diff -Nru mozc-1.11.1502.102/DEPS mozc-1.11.1522.102/DEPS --- mozc-1.11.1502.102/DEPS 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/DEPS 2013-08-28 05:25:59.000000000 +0000 @@ -33,7 +33,7 @@ "cygwin_in_chromium_revision": "66844", "gtest_revision": "645", "gmock_revision": "425", - "gyp_revision": "1589", + "gyp_revision": "1656", "jsoncpp_revision": "249", "memory_watcher_in_chromium_revision": "81926", "protobuf_revision": "462", @@ -47,7 +47,7 @@ deps = { "src/third_party/jsoncpp": - "http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/jsoncpp@" + + "http://svn.code.sf.net/p/jsoncpp/code/trunk/jsoncpp@" + Var("jsoncpp_revision"), "src/third_party/gmock": "http://googlemock.googlecode.com/svn/trunk@" + Var("gmock_revision"), @@ -77,13 +77,13 @@ "http://src.chromium.org/svn/trunk/tools/third_party/python_26@" + Var("python_in_chromium_revision"), "src/third_party/wtl/files/include": - "https://wtl.svn.sourceforge.net/svnroot/wtl/trunk/wtl/include@" + + "http://svn.code.sf.net/p/wtl/code/trunk@" + Var("wtl_revision"), "src/third_party/sidestep": "http://src.chromium.org/svn/trunk/src/tools/memory_watcher@" + Var("memory_watcher_in_chromium_revision"), "src/third_party/zinnia/v0_04": - "https://zinnia.svn.sourceforge.net/svnroot/zinnia/zinnia@" + + "http://svn.code.sf.net/p/zinnia/code@" + Var("zinnia_revision"), }, "mac": { @@ -91,7 +91,7 @@ "https://src.chromium.org/chrome/trunk/src/tools/clang/scripts@" + Var("clang_helper_in_chromium_revision"), "src/third_party/zinnia/v0_04": - "https://zinnia.svn.sourceforge.net/svnroot/zinnia/zinnia@" + + "http://svn.code.sf.net/p/zinnia/code@" + Var("zinnia_revision"), }, "unix": { diff -Nru mozc-1.11.1502.102/android/android.gyp mozc-1.11.1522.102/android/android.gyp --- mozc-1.11.1502.102/android/android.gyp 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/android.gyp 2013-08-28 05:25:59.000000000 +0000 @@ -745,6 +745,7 @@ '../base/base.gyp:gen_character_set', '../base/base.gyp:gen_config_file_stream_data', '../base/base.gyp:gen_version_def', + '../composer/composer.gyp:gen_typing_model', '../config/config.gyp:genproto_config', '../dictionary/dictionary_base.gyp:gen_pos_map', '../dictionary/dictionary_base.gyp:genproto_dictionary', @@ -758,14 +759,6 @@ '../usage_stats/usage_stats_base.gyp:genproto_usage_stats', ], 'conditions': [ - ['enable_typing_correction==1', { - 'defines': [ - 'MOZC_ENABLE_TYPING_CORRECTION' - ], - 'dependencies': [ - '../composer/composer.gyp:gen_typing_model', - ], - }], ['branding=="GoogleJapaneseInput"', { 'dependencies': [ '../data_manager/android/android_data_manager.gyp:gen_android_embedded_data#host', @@ -871,8 +864,8 @@ '../dictionary/system/system_dictionary.gyp:install_system_dictionary_test_data', '../config/config_test.gyp:install_stats_config_util_test_data', '../rewriter/calculator/calculator.gyp:install_calculator_test_data', - '../session/session_test.gyp:install_session_handler_scenario_test_data', - '../session/session_test.gyp:install_session_handler_usage_stats_scenario_test_data', + '../data/test/session/scenario/scenario.gyp:install_session_handler_scenario_test_data', + '../data/test/session/scenario/usage_stats/usage_stats.gyp:install_session_handler_usage_stats_scenario_test_data', ], 'conditions': [ ['branding=="GoogleJapaneseInput"', { diff -Nru mozc-1.11.1502.102/android/resources_oss/res/layout/candidate_view.xml mozc-1.11.1522.102/android/resources_oss/res/layout/candidate_view.xml --- mozc-1.11.1502.102/android/resources_oss/res/layout/candidate_view.xml 2013-07-17 02:43:04.000000000 +0000 +++ mozc-1.11.1522.102/android/resources_oss/res/layout/candidate_view.xml 2013-08-28 05:28:39.000000000 +0000 @@ -45,6 +45,7 @@ --> - - - diff -Nru mozc-1.11.1502.102/android/resources_oss/res/values/dimens.xml mozc-1.11.1522.102/android/resources_oss/res/values/dimens.xml --- mozc-1.11.1502.102/android/resources_oss/res/values/dimens.xml 2013-07-17 02:43:04.000000000 +0000 +++ mozc-1.11.1522.102/android/resources_oss/res/values/dimens.xml 2013-08-28 05:28:39.000000000 +0000 @@ -37,6 +37,8 @@ 20dip 20dip 10dip + 20dip + 10dip 0.7dip 4dip 40dip @@ -44,11 +46,6 @@ 8dip 4.5dip 10dip - - 34dip 200dip 200dip 10dip diff -Nru mozc-1.11.1502.102/android/resources_oss/res/values-xlarge/dimens.xml mozc-1.11.1522.102/android/resources_oss/res/values-xlarge/dimens.xml --- mozc-1.11.1502.102/android/resources_oss/res/values-xlarge/dimens.xml 2013-07-17 02:43:04.000000000 +0000 +++ mozc-1.11.1522.102/android/resources_oss/res/values-xlarge/dimens.xml 2013-08-28 05:28:39.000000000 +0000 @@ -34,9 +34,8 @@ 32dip 12dip - - 48dip + 20dip + 10dip 70dip 79.5dip diff -Nru mozc-1.11.1502.102/android/resources_oss/res/xml/kbd_12keys_flick_kana.xml mozc-1.11.1522.102/android/resources_oss/res/xml/kbd_12keys_flick_kana.xml --- mozc-1.11.1502.102/android/resources_oss/res/xml/kbd_12keys_flick_kana.xml 2013-07-17 02:43:04.000000000 +0000 +++ mozc-1.11.1522.102/android/resources_oss/res/xml/kbd_12keys_flick_kana.xml 2013-08-28 05:28:39.000000000 +0000 @@ -443,6 +443,16 @@ + + + + + + + + + + + + - + diff -Nru mozc-1.11.1502.102/android/resources_oss/res/xml/kbd_12keys_qwerty_abc.xml mozc-1.11.1522.102/android/resources_oss/res/xml/kbd_12keys_qwerty_abc.xml --- mozc-1.11.1502.102/android/resources_oss/res/xml/kbd_12keys_qwerty_abc.xml 2013-07-17 02:43:04.000000000 +0000 +++ mozc-1.11.1522.102/android/resources_oss/res/xml/kbd_12keys_qwerty_abc.xml 2013-08-28 05:28:39.000000000 +0000 @@ -47,6 +47,20 @@ + + + + + + + + + + @@ -56,6 +70,20 @@ + + + + + + + + + + @@ -67,6 +95,20 @@ + + + + + + + + + + @@ -76,6 +118,20 @@ + + + + + + + + + + @@ -87,6 +143,20 @@ + + + + + + + + + + @@ -96,6 +166,20 @@ + + + + + + + + + + @@ -107,6 +191,20 @@ + + + + + + + + + + @@ -116,6 +214,20 @@ + + + + + + + + + + @@ -127,6 +239,20 @@ + + + + + + + + + + @@ -136,6 +262,20 @@ + + + + + + + + + + @@ -147,6 +287,20 @@ + + + + + + + + + + @@ -156,6 +310,20 @@ + + + + + + + + + + @@ -167,6 +335,20 @@ + + + + + + + + + + @@ -176,6 +358,20 @@ + + + + + + + + + + @@ -187,6 +383,20 @@ + + + + + + + + + + @@ -196,6 +406,20 @@ + + + + + + + + + + @@ -207,6 +431,20 @@ + + + + + + + + + + @@ -216,6 +454,20 @@ + + + + + + + + + + @@ -227,6 +479,20 @@ + + + + + + + + + + @@ -236,6 +502,20 @@ + + + + + + + + + + @@ -250,6 +530,13 @@ + + + + + @@ -259,6 +546,13 @@ + + + + + @@ -270,6 +564,13 @@ + + + + + @@ -279,6 +580,13 @@ + + + + + @@ -290,6 +598,13 @@ + + + + + @@ -299,6 +614,13 @@ + + + + + @@ -310,6 +632,13 @@ + + + + + @@ -319,6 +648,13 @@ + + + + + @@ -330,6 +666,13 @@ + + + + + @@ -339,6 +682,13 @@ + + + + + @@ -350,6 +700,13 @@ + + + + + @@ -359,6 +716,13 @@ + + + + + @@ -370,6 +734,13 @@ + + + + + @@ -379,6 +750,13 @@ + + + + + @@ -390,6 +768,13 @@ + + + + + @@ -399,6 +784,13 @@ + + + + + @@ -410,6 +802,13 @@ + + + + + @@ -419,6 +818,13 @@ + + + + + @@ -459,6 +865,13 @@ + + + + + @@ -468,6 +881,13 @@ + + + + + @@ -479,6 +899,13 @@ + + + + + @@ -488,6 +915,13 @@ + + + + + @@ -499,6 +933,13 @@ + + + + + @@ -508,6 +949,13 @@ + + + + + @@ -519,6 +967,13 @@ + + + + + @@ -528,6 +983,13 @@ + + + + + @@ -539,6 +1001,13 @@ + + + + + @@ -548,6 +1017,13 @@ + + + + + @@ -559,6 +1035,13 @@ + + + + + @@ -568,6 +1051,13 @@ + + + + + @@ -579,6 +1069,13 @@ + + + + + @@ -588,6 +1085,13 @@ + + + + + @@ -705,5 +1209,5 @@ - + diff -Nru mozc-1.11.1502.102/android/resources_oss/res/xml/kbd_qwerty_abc.xml mozc-1.11.1522.102/android/resources_oss/res/xml/kbd_qwerty_abc.xml --- mozc-1.11.1502.102/android/resources_oss/res/xml/kbd_qwerty_abc.xml 2013-07-17 02:43:04.000000000 +0000 +++ mozc-1.11.1522.102/android/resources_oss/res/xml/kbd_qwerty_abc.xml 2013-08-28 05:28:39.000000000 +0000 @@ -47,6 +47,20 @@ + + + + + + + + + + @@ -56,6 +70,20 @@ + + + + + + + + + + @@ -67,6 +95,20 @@ + + + + + + + + + + @@ -76,6 +118,20 @@ + + + + + + + + + + @@ -87,6 +143,20 @@ + + + + + + + + + + @@ -96,6 +166,20 @@ + + + + + + + + + + @@ -107,6 +191,20 @@ + + + + + + + + + + @@ -116,6 +214,20 @@ + + + + + + + + + + @@ -127,6 +239,20 @@ + + + + + + + + + + @@ -136,6 +262,20 @@ + + + + + + + + + + @@ -147,6 +287,20 @@ + + + + + + + + + + @@ -156,6 +310,20 @@ + + + + + + + + + + @@ -167,6 +335,20 @@ + + + + + + + + + + @@ -176,6 +358,20 @@ + + + + + + + + + + @@ -187,6 +383,20 @@ + + + + + + + + + + @@ -196,6 +406,20 @@ + + + + + + + + + + @@ -207,6 +431,20 @@ + + + + + + + + + + @@ -216,6 +454,20 @@ + + + + + + + + + + @@ -227,6 +479,20 @@ + + + + + + + + + + @@ -236,6 +502,20 @@ + + + + + + + + + + @@ -250,6 +530,13 @@ + + + + + @@ -259,6 +546,13 @@ + + + + + @@ -270,6 +564,13 @@ + + + + + @@ -279,6 +580,13 @@ + + + + + @@ -290,6 +598,13 @@ + + + + + @@ -299,6 +614,13 @@ + + + + + @@ -310,6 +632,13 @@ + + + + + @@ -319,6 +648,13 @@ + + + + + @@ -330,6 +666,13 @@ + + + + + @@ -339,6 +682,13 @@ + + + + + @@ -350,6 +700,13 @@ + + + + + @@ -359,6 +716,13 @@ + + + + + @@ -370,6 +734,13 @@ + + + + + @@ -379,6 +750,13 @@ + + + + + @@ -390,6 +768,13 @@ + + + + + @@ -399,6 +784,13 @@ + + + + + @@ -410,6 +802,13 @@ + + + + + @@ -419,6 +818,13 @@ + + + + + @@ -459,6 +865,13 @@ + + + + + @@ -468,6 +881,13 @@ + + + + + @@ -479,6 +899,13 @@ + + + + + @@ -488,6 +915,13 @@ + + + + + @@ -499,6 +933,13 @@ + + + + + @@ -508,6 +949,13 @@ + + + + + @@ -519,6 +967,13 @@ + + + + + @@ -528,6 +983,13 @@ + + + + + @@ -539,6 +1001,13 @@ + + + + + @@ -548,6 +1017,13 @@ + + + + + @@ -559,6 +1035,13 @@ + + + + + @@ -568,6 +1051,13 @@ + + + + + @@ -579,6 +1069,13 @@ + + + + + @@ -588,6 +1085,13 @@ + + + + + @@ -705,5 +1209,5 @@ - + diff -Nru mozc-1.11.1502.102/android/resources_oss/res/xml/kbd_qwerty_kana.xml mozc-1.11.1522.102/android/resources_oss/res/xml/kbd_qwerty_kana.xml --- mozc-1.11.1502.102/android/resources_oss/res/xml/kbd_qwerty_kana.xml 2013-07-17 02:43:04.000000000 +0000 +++ mozc-1.11.1522.102/android/resources_oss/res/xml/kbd_qwerty_kana.xml 2013-08-28 05:28:39.000000000 +0000 @@ -47,6 +47,13 @@ + + + + + @@ -58,6 +65,13 @@ + + + + + @@ -69,6 +83,13 @@ + + + + + @@ -80,6 +101,13 @@ + + + + + @@ -91,6 +119,13 @@ + + + + + @@ -102,6 +137,13 @@ + + + + + @@ -113,6 +155,13 @@ + + + + + @@ -124,6 +173,13 @@ + + + + + @@ -135,6 +191,13 @@ + + + + + @@ -146,6 +209,13 @@ + + + + + diff -Nru mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/ApplicationCompatibility.java mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/ApplicationCompatibility.java --- mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/ApplicationCompatibility.java 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/ApplicationCompatibility.java 2013-08-28 05:25:59.000000000 +0000 @@ -38,20 +38,8 @@ * */ public class ApplicationCompatibility { + private static enum CompatibilityMode { - // Set the selection position always. This operation can be dangerous. - // - // -- Background -- - // This could have some unexpected behavior, especially if the target application modifies - // the composition text, or some sent operations via InputConnection. - // Unfortunately, under the current framework, there are no ways to treat a caret position - // *correctly*. For example, it is difficult to just keep the caret position at the end of - // the composition text, because onUpdateSelection caused by *something* other than Mozc - // are sometimes just skipped. - // So, this should be just our best effort. If we hit some bad situation, - // we should be able to modify (or simply remove) this behavior to keep the caret at - // the end of the composition text. - ALWAYS_SET_CURSOR, // A flag to check if the full screen mode is supported on the application. // @@ -63,7 +51,16 @@ // of the screen. So, we don't take care of if the connected field is Omnibar or not. FULLSCREEN_MODE_SUPPORTED, - // TODO(hidehiko): Move hack for WebView in SelectionTracker. + // A flag for the applications which requires special treatment like WebEditText. + // + // -- Background -- + // WebEditText requires special treatment to track selection range. + // In addition some applications (e.g. Chrome) requires the same treatment. + // The treatment is for the applications/views where onUpdateSelection is not called back + // when they updates the composition. + // The treatment could be applicable always but the solution might be slight hacky so + // currently it is applied on white-listed applications. + PRETEND_WEB_EDIT_TEXT, } /** The default configuration */ @@ -72,41 +69,45 @@ /** The special configuration for firefox. */ private static final ApplicationCompatibility FIREFOX_INSTANCE = - new ApplicationCompatibility(EnumSet.of(CompatibilityMode.ALWAYS_SET_CURSOR, - CompatibilityMode.FULLSCREEN_MODE_SUPPORTED)); + new ApplicationCompatibility(EnumSet.of(CompatibilityMode.FULLSCREEN_MODE_SUPPORTED)); /** The special configuration for Chrome. */ private static final ApplicationCompatibility CHROME_INSTANCE = new ApplicationCompatibility(EnumSet.noneOf(CompatibilityMode.class)); + /** The special configuration for Evernote, of which onSelectionUpdate is unreliable. */ + private static final ApplicationCompatibility EVERNOTE_INSTANCE = + new ApplicationCompatibility(EnumSet.of(CompatibilityMode.PRETEND_WEB_EDIT_TEXT)); + private final EnumSet compatibilityModeSet; private ApplicationCompatibility(EnumSet compatibilityModeSet) { this.compatibilityModeSet = compatibilityModeSet; } - /** - * @return {@code true} if we should send setSelection via inputConnection always when preedit - * is set. - */ - public boolean isAlwaysSetCursorEnabled() { - return compatibilityModeSet.contains(CompatibilityMode.ALWAYS_SET_CURSOR); - } - /** @return {@code true} if the target application supports full screen mode. */ public boolean isFullScreenModeSupported() { return compatibilityModeSet.contains(CompatibilityMode.FULLSCREEN_MODE_SUPPORTED); } + /** @return {@code true} if the target application requires special behavior like WebEditText. */ + public boolean isPretendingWebEditText() { + return compatibilityModeSet.contains(CompatibilityMode.PRETEND_WEB_EDIT_TEXT); + } + /** * @return an instance for the connected application. */ public static ApplicationCompatibility getInstance(EditorInfo editorInfo) { if (editorInfo != null) { - if ("com.android.chrome".equals(editorInfo.packageName)) { + String packageName = editorInfo.packageName; + if ("com.android.chrome".equals(packageName) || "com.chrome.beta".equals(packageName)) { return CHROME_INSTANCE; } - if ("org.mozilla.firefox".equals(editorInfo.packageName)) { + if ("com.evernote".equals(packageName)) { + return EVERNOTE_INSTANCE; + } + if ("org.mozilla.firefox".equals(packageName)) { return FIREFOX_INSTANCE; } } diff -Nru mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/CandidateView.java mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/CandidateView.java --- mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/CandidateView.java 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/CandidateView.java 2013-08-28 05:25:59.000000000 +0000 @@ -44,6 +44,7 @@ import org.mozc.android.inputmethod.japanese.ui.SpanFactory; import org.mozc.android.inputmethod.japanese.view.MozcDrawableFactory; import org.mozc.android.inputmethod.japanese.view.SkinType; +import com.google.common.base.Preconditions; import android.content.Context; import android.content.res.Resources; @@ -102,6 +103,7 @@ { setBackgroundDrawableType(DrawableType.CANDIDATE_BACKGROUND); + layouter = new ConversionCandidateLayouter(); } public ConversionCandidateWordView(Context context, AttributeSet attributeSet) { @@ -111,18 +113,23 @@ resources.getInteger(R.integer.candidate_scroller_velocity_decay_rate) / 1000000f); scroller.setMinimumVelocity( resources.getInteger(R.integer.candidate_scroller_minimum_velocity)); + } + + void setCandidateTextDimension(float candidateTextSize, float descriptionTextSize) { + Preconditions.checkArgument(candidateTextSize > 0); + Preconditions.checkArgument(descriptionTextSize > 0); + + Resources resources = getResources(); - float valueTextSize = resources.getDimension(R.dimen.candidate_text_size); float valueHorizontalPadding = resources.getDimension(R.dimen.candidate_horizontal_padding_size); float valueVerticalPadding = resources.getDimension(R.dimen.candidate_vertical_padding_size); - float descriptionTextSize = resources.getDimension(R.dimen.candidate_description_text_size); float descriptionHorizontalPadding = resources.getDimension(R.dimen.symbol_description_right_padding); float descriptionVerticalPadding = resources.getDimension(R.dimen.symbol_description_bottom_padding); - candidateLayoutRenderer.setValueTextSize(valueTextSize); + candidateLayoutRenderer.setValueTextSize(candidateTextSize); candidateLayoutRenderer.setValueHorizontalPadding(valueHorizontalPadding); candidateLayoutRenderer.setValueScalingPolicy(ValueScalingPolicy.HORIZONTAL); candidateLayoutRenderer.setDescriptionTextSize(descriptionTextSize); @@ -131,7 +138,7 @@ candidateLayoutRenderer.setDescriptionLayoutPolicy(DescriptionLayoutPolicy.EXCLUSIVE); SpanFactory spanFactory = new SpanFactory(); - spanFactory.setValueTextSize(valueTextSize); + spanFactory.setValueTextSize(candidateTextSize); spanFactory.setDescriptionTextSize(descriptionTextSize); spanFactory.setDescriptionDelimiter(DESCRIPTION_DELIMITER); @@ -141,16 +148,16 @@ float candidateTextMinimumWidth = resources.getDimension(R.dimen.candidate_text_minimum_width); float candidateChunkMinimumWidth = - resources.getDimension(R.dimen.keyboard_folding_icon_size); + candidateTextSize + resources.getDimension(R.dimen.candidate_vertical_padding_size) * 2; - setCandidateLayouter(new ConversionCandidateLayouter( - spanFactory, - candidateWidthCompressionRate, - candidateTextMinimumWidth, - candidateChunkMinimumWidth, - valueTextSize, - valueHorizontalPadding, - valueVerticalPadding)); + ConversionCandidateLayouter layouter = ConversionCandidateLayouter.class.cast(this.layouter); + layouter.setSpanFactory(spanFactory); + layouter.setValueWidthCompressionRate(candidateWidthCompressionRate); + layouter.setMinValueWidth(candidateTextMinimumWidth); + layouter.setMinChunkWidth(candidateChunkMinimumWidth); + layouter.setValueHeight(candidateTextSize); + layouter.setValueHorizontalPadding(valueHorizontalPadding); + layouter.setValueVerticalPadding(valueVerticalPadding); } @Override @@ -312,6 +319,14 @@ getConversionCandidateWordView().setSkinType(skinType); } + void setCandidateTextDimension(float candidateTextSize, float descriptionTextSize) { + Preconditions.checkArgument(candidateTextSize > 0); + Preconditions.checkArgument(descriptionTextSize > 0); + + getConversionCandidateWordView().setCandidateTextDimension(candidateTextSize, + descriptionTextSize); + } + void setNarrowMode(boolean narrowMode) { getInputFrameFoldButton().setVisibility(narrowMode ? GONE : VISIBLE); getConversionCandidateWordView().getCandidateLayouter() diff -Nru mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/CandidateWordView.java mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/CandidateWordView.java --- mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/CandidateWordView.java 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/CandidateWordView.java 2013-08-28 05:25:59.000000000 +0000 @@ -343,6 +343,7 @@ // TODO(matsuzakit): The parameter is TBD (needs UX study?). protected final SnapScroller scroller = new SnapScroller(); // The CandidateLayouter which calculates the layout of candidate words. + // This fields is not final but must be set in initialization in the subclasses. @VisibleForTesting CandidateLayouter layouter; // The calculated layout, created by this.layouter. protected CandidateLayout calculatedLayout; @@ -392,10 +393,6 @@ return layouter; } - void setCandidateLayouter(CandidateLayouter layouter) { - this.layouter = layouter; - } - protected void setHorizontalPadding(int horizontalPadding) { this.horizontalPadding = horizontalPadding; updateLayouter(); diff -Nru mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/ConversionCandidateWordContainerView.java mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/ConversionCandidateWordContainerView.java --- mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/ConversionCandidateWordContainerView.java 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/ConversionCandidateWordContainerView.java 2013-08-28 05:25:59.000000000 +0000 @@ -32,6 +32,7 @@ import org.mozc.android.inputmethod.japanese.CandidateView.ConversionCandidateWordView; import org.mozc.android.inputmethod.japanese.resources.R; import org.mozc.android.inputmethod.japanese.ui.ConversionCandidateLayouter; +import com.google.common.base.Preconditions; import android.content.Context; import android.util.AttributeSet; @@ -59,10 +60,9 @@ */ public class ConversionCandidateWordContainerView extends ViewGroup { - private final int foldingIconSize = - getResources().getDimensionPixelSize(R.dimen.keyboard_folding_icon_size); + private float foldingIconSize; - public ConversionCandidateWordContainerView( + public ConversionCandidateWordContainerView( Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); } @@ -75,6 +75,13 @@ super(context); } + void setCandidateTextDimension(float candidateTextSize) { + Preconditions.checkArgument(candidateTextSize > 0); + + foldingIconSize = candidateTextSize + + getResources().getDimension(R.dimen.candidate_vertical_padding_size) * 2; + } + @Override protected void onLayout(boolean changed, int left, int top, int right, int bottom) { // Note: Don't use getMeasuredHeight/Width for this and children because #onMeasure @@ -89,11 +96,11 @@ } ConversionCandidateLayouter layouter = candidateWordView.getCandidateLayouter(); - int topMargin = (layouter.getRowHeight() - foldingIconSize) / 2; - int rightMargin = ((int) layouter.getChunkWidth() - foldingIconSize) / 2; - inputFrameFoldButton.layout(right - rightMargin - foldingIconSize, - top + topMargin, - right - rightMargin, - top + topMargin + foldingIconSize); + float topMargin = (layouter.getRowHeight() - foldingIconSize) / 2; + float rightMargin = (layouter.getChunkWidth() - foldingIconSize) / 2; + inputFrameFoldButton.layout((int) (right - rightMargin - foldingIconSize), + (int) (top + topMargin), + (int) (right - rightMargin), + (int) (top + topMargin + foldingIconSize)); } } diff -Nru mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/HardwareKeyboard.java mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/HardwareKeyboard.java --- mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/HardwareKeyboard.java 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/HardwareKeyboard.java 2013-08-28 05:25:59.000000000 +0000 @@ -42,7 +42,11 @@ * */ public class HardwareKeyboard { - static enum CompositionSwitchMode { + + /** + * Used to switch the composition mode of harwdware keyboard. + **/ + public static enum CompositionSwitchMode { TOGGLE, KANA, ALPHABET diff -Nru mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/JapaneseKeyboard.java mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/JapaneseKeyboard.java --- mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/JapaneseKeyboard.java 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/JapaneseKeyboard.java 2013-08-28 05:25:59.000000000 +0000 @@ -77,7 +77,7 @@ CrossingEdgeBehavior.DO_NOTHING), TWELVE_KEY_TOGGLE_QWERTY_ALPHABET( - new KeyboardSpecificationName("TWELVE_KEY_TOGGLE_QWERTY_ALPHABET", 0, 2, 0), + new KeyboardSpecificationName("TWELVE_KEY_TOGGLE_QWERTY_ALPHABET", 0, 3, 0), R.xml.kbd_12keys_qwerty_abc, CompositionMode.HALF_ASCII, SpecialRomanjiTable.QWERTY_MOBILE_TO_HALFWIDTHASCII, @@ -87,7 +87,7 @@ // Flick mode. TWELVE_KEY_FLICK_KANA( - new KeyboardSpecificationName("TWELVE_KEY_FLICK_KANA", 0, 1, 1), + new KeyboardSpecificationName("TWELVE_KEY_FLICK_KANA", 0, 1, 2), R.xml.kbd_12keys_flick_kana, CompositionMode.HIRAGANA, SpecialRomanjiTable.FLICK_TO_HIRAGANA, @@ -114,7 +114,7 @@ CrossingEdgeBehavior.COMMIT_WITHOUT_CONSUMING), TWELVE_KEY_TOGGLE_FLICK_KANA( - new KeyboardSpecificationName("TWELVE_KEY_TOGGLE_FLICK_KANA", 0, 1, 1), + new KeyboardSpecificationName("TWELVE_KEY_TOGGLE_FLICK_KANA", 0, 1, 2), R.xml.kbd_12keys_flick_kana, CompositionMode.HIRAGANA, SpecialRomanjiTable.TOGGLE_FLICK_TO_HIRAGANA, @@ -142,7 +142,7 @@ // QWERTY keyboard. QWERTY_KANA( - new KeyboardSpecificationName("QWERTY_KANA", 0, 2, 0), + new KeyboardSpecificationName("QWERTY_KANA", 0, 3, 0), R.xml.kbd_qwerty_kana, CompositionMode.HIRAGANA, SpecialRomanjiTable.QWERTY_MOBILE_TO_HIRAGANA, @@ -160,7 +160,7 @@ CrossingEdgeBehavior.DO_NOTHING), QWERTY_ALPHABET( - new KeyboardSpecificationName("QWERTY_ALPHABET", 0, 2, 0), + new KeyboardSpecificationName("QWERTY_ALPHABET", 0, 3, 0), R.xml.kbd_qwerty_abc, CompositionMode.HALF_ASCII, SpecialRomanjiTable.QWERTY_MOBILE_TO_HALFWIDTHASCII, diff -Nru mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/MozcService.java mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/MozcService.java --- mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/MozcService.java 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/MozcService.java 2013-08-28 05:25:59.000000000 +0000 @@ -72,6 +72,7 @@ import android.content.SharedPreferences; import android.content.SharedPreferences.OnSharedPreferenceChangeListener; import android.content.res.Configuration; +import android.graphics.Color; import android.inputmethodservice.InputMethodService; import android.media.AudioManager; import android.os.Build; @@ -84,6 +85,7 @@ import android.text.Spanned; import android.text.style.BackgroundColorSpan; import android.text.style.CharacterStyle; +import android.text.style.ForegroundColorSpan; import android.text.style.UnderlineSpan; import android.util.Log; import android.view.KeyEvent; @@ -302,14 +304,18 @@ } @Override - public void onClickHardwareKeyboardCompositionModeButton() { - hardwareKeyboard.setCompositionMode(CompositionSwitchMode.TOGGLE); - viewManager.setHardwareKeyboardCompositionMode(hardwareKeyboard.getCompositionMode()); - sendKeyWithKeyboardSpecification( - null, null, - hardwareKeyboard.getKeyboardSpecification(), - getResources().getConfiguration(), - Collections.emptyList()); + public void onHardwareKeyboardCompositionModeChange(CompositionSwitchMode mode) { + CompositionMode oldMode = hardwareKeyboard.getCompositionMode(); + hardwareKeyboard.setCompositionMode(mode); + CompositionMode newMode = hardwareKeyboard.getCompositionMode(); + if (oldMode != newMode) { + viewManager.setHardwareKeyboardCompositionMode(newMode); + sendKeyWithKeyboardSpecification( + null, null, + hardwareKeyboard.getKeyboardSpecification(), + getResources().getConfiguration(), + Collections.emptyList()); + } } @Override @@ -429,16 +435,21 @@ private static final String PREF_TWEAK_LOGGING_PROTOCOL_BUFFERS = "pref_tweak_logging_protocol_buffers"; + // Foreground color of characters which also have background color span. + // Without this span default color (specified by the app) is used so + // if default color is similar to specified background color the characters are hard to be read. + @VisibleForTesting static final CharacterStyle SPAN_FOREGROUND_COLOR = + new ForegroundColorSpan(Color.DKGRAY); + // Focused segment's attribute. @VisibleForTesting static final CharacterStyle SPAN_CONVERT_HIGHLIGHT = - new BackgroundColorSpan(0x8888FFFF); + new BackgroundColorSpan(0xFF88FFFF); // Cursor position. // Note that InputConnection seems not to be able to show cursor. This is a workaround. @VisibleForTesting static final CharacterStyle SPAN_BEFORE_CURSOR = - new BackgroundColorSpan(0x88FF88FF); + new BackgroundColorSpan(0xFFFFAAFF); - // To hide a caret, we use non-transparent background for partial conversion. private static final CharacterStyle SPAN_PARTIAL_SUGGESTION_COLOR = new BackgroundColorSpan(0xFFFFE0E0); @@ -750,11 +761,18 @@ } } - static boolean isWebEditText(EditorInfo editorInfo) { + /** + * @return true if connected view is WebEditText (or the application pretends it) + */ + boolean isWebEditText(EditorInfo editorInfo) { if (editorInfo == null) { return false; } + if (applicationCompatibility.isPretendingWebEditText()) { + return true; + } + // TODO(hidehiko): Refine the heuristic to check isWebEditText related stuff. MozcLog.d("inputType: " + editorInfo.inputType); int variation = editorInfo.inputType & InputType.TYPE_MASK_VARIATION; @@ -1020,6 +1038,14 @@ showStatusIcon(); // Remove memory trimming message. memoryTrimmingHandler.removeMessages(MemoryTrimmingHandler.WHAT); + // Ensure keyboard's request. + // The session might be deleted by trimMemory caused by onWindowHidden. + // Note that this logic must be placed *after* removing the messages in memoryTrimmingHandler. + // Otherwise the session might be unexpectedly deleted and newly re-created one will be used + // without appropriate request which is sent below. + changeKeyboardSpecificationAndSendKey( + null, null, currentKeyboardSpecification, getResources().getConfiguration(), + Collections.emptyList()); } @Override @@ -1219,11 +1245,10 @@ builder.append(segment.getValue()); if (segment.hasAnnotation() && segment.getAnnotation() == Annotation.HIGHLIGHT) { // Highlight for the focused conversion part. - builder.setSpan( - SPAN_CONVERT_HIGHLIGHT, - builder.length() - segment.getValue().length(), - builder.length(), - Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + int end = builder.length(); + int begin = end - segment.getValue().length(); + builder.setSpan(SPAN_CONVERT_HIGHLIGHT, begin, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + builder.setSpan(SPAN_FOREGROUND_COLOR, begin, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); } } @@ -1240,10 +1265,13 @@ if (cursor != builder.length()) { // This condition is workaround not to show unexpected background color for EditText. builder.setSpan(SPAN_PARTIAL_SUGGESTION_COLOR, cursor, builder.length(), - Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + builder.setSpan(SPAN_FOREGROUND_COLOR, cursor, builder.length(), + Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); } if (cursor > 0) { builder.setSpan(SPAN_BEFORE_CURSOR, 0, cursor, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + builder.setSpan(SPAN_FOREGROUND_COLOR, 0, cursor, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); } } @@ -1262,12 +1290,9 @@ Preedit preedit = output.getPreedit(); int cursor = preedit.getCursor(); - if (!applicationCompatibility.isAlwaysSetCursorEnabled() && - (cursor == 0 || cursor == getPreeditLength(preedit))) { + if (cursor == 0 || cursor == getPreeditLength(preedit)) { // The cursor is at the beginning/ending of the preedit. So we don't anything about the // caret setting. - // Note that in some application, we need to set the caret position regardless of the - // spec. See also ApplicationCompatibility for details. return; } @@ -1504,6 +1529,9 @@ int updateStatus = selectionTracker.onUpdateSelection( oldSelStart, oldSelEnd, newSelStart, newSelEnd, candidatesStart, candidatesEnd); + if (isDebugBuild) { + MozcLog.d(selectionTracker.toString()); + } switch (updateStatus) { case SelectionTracker.DO_NOTHING: // Do nothing. diff -Nru mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/MozcView.java mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/MozcView.java --- mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/MozcView.java 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/MozcView.java 2013-08-28 05:25:59.000000000 +0000 @@ -30,6 +30,7 @@ package org.mozc.android.inputmethod.japanese; import org.mozc.android.inputmethod.japanese.FeedbackManager.FeedbackEvent; +import org.mozc.android.inputmethod.japanese.HardwareKeyboard.CompositionSwitchMode; import org.mozc.android.inputmethod.japanese.LayoutParamsAnimator.InterpolationListener; import org.mozc.android.inputmethod.japanese.ViewManagerInterface.LayoutAdjustment; import org.mozc.android.inputmethod.japanese.emoji.EmojiProviderType; @@ -427,7 +428,7 @@ getHardwareCompositionButton().setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { - viewEventListener.onClickHardwareKeyboardCompositionModeButton(); + viewEventListener.onHardwareKeyboardCompositionModeChange(CompositionSwitchMode.TOGGLE); } }); @@ -548,8 +549,7 @@ symbolInputView.setVisibility(View.GONE); resetFullscreenMode(); - setNarrowMode(narrowMode); - setLayoutAdjustment(layoutAdjustment); + setLayoutAdjustmentAndNarrowMode(layoutAdjustment, narrowMode); collapseDropShadowAndBackground(); updateBackgroundColor(); } @@ -696,25 +696,6 @@ return narrowMode; } - public void setNarrowMode(boolean narrowMode) { - this.narrowMode = narrowMode; - - // In narrow mode, hide software keyboard and show narrow status bar. - getCandidateView().setNarrowMode(narrowMode); - if (narrowMode) { - getKeyboardFrame().setVisibility(GONE); - getNarrowFrame().setVisibility(VISIBLE); - } else { - getKeyboardFrame().setVisibility(VISIBLE); - getNarrowFrame().setVisibility(GONE); - resetKeyboardFrameVisibility(); - } - - setLayoutAdjustment(layoutAdjustment); - updateInputFrameHeight(); - updateBackgroundColor(); - } - public void setHardwareCompositionButtonImage(CompositionMode compositionMode) { switch (compositionMode) { case HIRAGANA: @@ -738,10 +719,18 @@ getInputFrameHeight()); } - public void setLayoutAdjustment(LayoutAdjustment layoutAdjustment) { + /** + * Sets {@code LayoutAdjustment} and {@code narrowMode}. + * + *

They are highly dependent on one another so this method sets both at the same time. + * This decision makes caller-side simpler. + */ + public void setLayoutAdjustmentAndNarrowMode(LayoutAdjustment layoutAdjustment, + boolean narrowMode) { checkInflated(); this.layoutAdjustment = layoutAdjustment; + this.narrowMode = narrowMode; // If on narrowMode, the view is always shown with full-width regard less of given // layoutAdjustment. @@ -766,11 +755,30 @@ rightFrameStubProxy.setFrameVisibility( temporaryAdjustment == LayoutAdjustment.LEFT ? VISIBLE : GONE); - updateBackgroundColor(); + // Set candidate and desciption text size. + float candidateTextSize = layoutAdjustment == LayoutAdjustment.FILL + ? resources.getDimension(R.dimen.candidate_text_size) + : resources.getDimension(R.dimen.candidate_text_size_aligned_layout); + float descriptionTextSize = layoutAdjustment == LayoutAdjustment.FILL + ? resources.getDimension(R.dimen.candidate_description_text_size) + : resources.getDimension(R.dimen.candidate_description_text_size_aligned_layout); + getCandidateView().setCandidateTextDimension(candidateTextSize, descriptionTextSize); + getSymbolInputView().setCandidateTextDimension(candidateTextSize, descriptionTextSize); + getConversionCandidateWordContainerView().setCandidateTextDimension(candidateTextSize); + + // In narrow mode, hide software keyboard and show narrow status bar. + getCandidateView().setNarrowMode(narrowMode); + if (narrowMode) { + getKeyboardFrame().setVisibility(GONE); + getNarrowFrame().setVisibility(VISIBLE); + } else { + getKeyboardFrame().setVisibility(VISIBLE); + getNarrowFrame().setVisibility(GONE); + resetKeyboardFrameVisibility(); + } - // TODO(yoichio): Update SymbolInputView width scale. - // getSymbolInputView().setWidthScale(layoutParams.width - // / (float)resources.getDisplayMetrics().widthPixels); + updateInputFrameHeight(); + updateBackgroundColor(); } public void startLayoutAdjustmentAnimation() { @@ -948,6 +956,11 @@ return CandidateView.class.cast(findViewById(R.id.candidate_view)); } + public ConversionCandidateWordContainerView getConversionCandidateWordContainerView() { + return ConversionCandidateWordContainerView.class.cast( + findViewById(R.id.conversion_candidate_word_container_view)); + } + public View getKeyboardFrame() { return findViewById(R.id.keyboard_frame); } diff -Nru mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/SymbolInputView.java mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/SymbolInputView.java --- mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/SymbolInputView.java 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/SymbolInputView.java 2013-08-28 05:25:59.000000000 +0000 @@ -313,6 +313,8 @@ private final EmojiProviderType emojiProviderType; private final TabHost tabHost; private final ViewPager viewPager; + private final float candidateTextSize; + private final float descriptionTextSize; private View historyViewCache = null; private int scrollState = ViewPager.SCROLL_STATE_IDLE; @@ -322,7 +324,8 @@ ViewEventListener viewEventListener, CandidateSelectListener candidateSelectListener, MajorCategory majorCategory, SkinType skinType, EmojiProviderType emojiProviderType, - TabHost tabHost, ViewPager viewPager) { + TabHost tabHost, ViewPager viewPager, + float candidateTextSize, float descriptionTextSize) { Preconditions.checkNotNull(emojiProviderType); this.context = context; @@ -334,6 +337,8 @@ this.emojiProviderType = emojiProviderType; this.tabHost = tabHost; this.viewPager = viewPager; + this.candidateTextSize = candidateTextSize; + this.descriptionTextSize = descriptionTextSize; } private void maybeResetHistoryView() { @@ -416,6 +421,7 @@ context.getResources().getDimension(majorCategory.minColumnWidthResourceId)); symbolCandidateView.setSkinType(skinType); symbolCandidateView.setEmojiProviderType(emojiProviderType); + symbolCandidateView.setCandidateTextDimension(candidateTextSize, descriptionTextSize); // Set candidate contents. if (position == HISTORY_INDEX) { @@ -543,10 +549,6 @@ private View scrollGuideView = null; private GestureDetector gestureDetector = null; - { - setBackgroundDrawableType(DrawableType.SYMBOL_CANDIDATE_BACKGROUND); - } - public SymbolCandidateView(Context context) { super(context, Orientation.VERTICAL); } @@ -561,22 +563,29 @@ // Shared instance initializer. { + setBackgroundDrawableType(DrawableType.SYMBOL_CANDIDATE_BACKGROUND); Resources resources = getResources(); scroller.setDecayRate( resources.getInteger(R.integer.symbol_input_scroller_velocity_decay_rate) / 1000000f); scroller.setMinimumVelocity( resources.getInteger(R.integer.symbol_input_scroller_minimum_velocity)); + layouter = new SymbolCandidateLayouter(); + } + + void setCandidateTextDimension(float textSize, float descriptionTextSize) { + Preconditions.checkArgument(textSize > 0); + Preconditions.checkArgument(descriptionTextSize > 0); + + Resources resources = getResources(); - float valueTextSize = resources.getDimension(R.dimen.candidate_text_size); float valueHorizontalPadding = resources.getDimension(R.dimen.candidate_horizontal_padding_size); - float descriptionTextSize = resources.getDimension(R.dimen.candidate_description_text_size); float descriptionHorizontalPadding = resources.getDimension(R.dimen.symbol_description_right_padding); float descriptionVerticalPadding = resources.getDimension(R.dimen.symbol_description_bottom_padding); - candidateLayoutRenderer.setValueTextSize(valueTextSize); + candidateLayoutRenderer.setValueTextSize(textSize); candidateLayoutRenderer.setValueHorizontalPadding(valueHorizontalPadding); candidateLayoutRenderer.setValueScalingPolicy(ValueScalingPolicy.UNIFORM); candidateLayoutRenderer.setDescriptionTextSize(descriptionTextSize); @@ -585,13 +594,13 @@ candidateLayoutRenderer.setDescriptionLayoutPolicy(DescriptionLayoutPolicy.OVERLAY); SpanFactory spanFactory = new SpanFactory(); - spanFactory.setValueTextSize(valueTextSize); + spanFactory.setValueTextSize(textSize); spanFactory.setDescriptionTextSize(descriptionTextSize); spanFactory.setDescriptionDelimiter(DESCRIPTION_DELIMITER); - SymbolCandidateLayouter symbolCandidateLayouter = new SymbolCandidateLayouter(spanFactory); - symbolCandidateLayouter.setRowHeight( - resources.getDimensionPixelSize(R.dimen.symbol_view_candidate_height)); - setCandidateLayouter(symbolCandidateLayouter); + + SymbolCandidateLayouter layouter = SymbolCandidateLayouter.class.cast(this.layouter); + layouter.setSpanFactory(spanFactory); + layouter.setRowHeight(resources.getDimensionPixelSize(R.dimen.symbol_view_candidate_height)); } @Override @@ -699,6 +708,10 @@ MAJOR_CATEGORY_PRESSED_BOTTOM_COLOR, MAJOR_CATEGORY_SHADOW_COLOR, BUTTON_CORNOR_RADIUS * getResources().getDisplayMetrics().density); + // Candidate text size in dip. + private float candidateTextSize; + // Description text size in dip. + private float desciptionTextSize; public SymbolInputView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); @@ -778,7 +791,8 @@ SymbolTabWidgetViewPagerAdapter adapter = new SymbolTabWidgetViewPagerAdapter( getContext(), symbolCandidateStorage, viewEventListener, symbolCandidateSelectListener, - currentMajorCategory, skinType, emojiProviderType, tabHost, candidateViewPager); + currentMajorCategory, skinType, emojiProviderType, tabHost, candidateViewPager, + candidateTextSize, desciptionTextSize); candidateViewPager.setAdapter(adapter); candidateViewPager.setOnPageChangeListener(adapter); tabHost.setOnTabChangedListener(adapter); @@ -995,8 +1009,10 @@ @Override public void setVisibility(int visibility) { + int previousVisibility = getVisibility(); super.setVisibility(visibility); - if (viewEventListener != null && visibility != View.VISIBLE) { + if (viewEventListener != null + && previousVisibility == View.VISIBLE && visibility != View.VISIBLE) { viewEventListener.onCloseSymbolInputView(); } } @@ -1009,6 +1025,14 @@ deleteKeyEventButtonTouchListener.setKeyEventHandler(keyEventHandler); } + void setCandidateTextDimension(float candidateTextSize, float descriptionTextSize) { + Preconditions.checkArgument(candidateTextSize > 0); + Preconditions.checkArgument(descriptionTextSize > 0); + + this.candidateTextSize = candidateTextSize; + this.desciptionTextSize = descriptionTextSize; + } + /** * Initializes EmojiProvider selection dialog, if necessary. * Exposed as protected for testing purpose. diff -Nru mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/ViewEventDelegator.java mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/ViewEventDelegator.java --- mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/ViewEventDelegator.java 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/ViewEventDelegator.java 2013-08-28 05:25:59.000000000 +0000 @@ -30,6 +30,7 @@ package org.mozc.android.inputmethod.japanese; import org.mozc.android.inputmethod.japanese.FeedbackManager.FeedbackEvent; +import org.mozc.android.inputmethod.japanese.HardwareKeyboard.CompositionSwitchMode; import org.mozc.android.inputmethod.japanese.JapaneseKeyboard.KeyboardSpecification; import org.mozc.android.inputmethod.japanese.KeycodeConverter.KeyEventInterface; import org.mozc.android.inputmethod.japanese.SymbolInputView.MajorCategory; @@ -116,8 +117,8 @@ } @Override - public void onClickHardwareKeyboardCompositionModeButton() { - delegated.onClickHardwareKeyboardCompositionModeButton(); + public void onHardwareKeyboardCompositionModeChange(CompositionSwitchMode mode) { + delegated.onHardwareKeyboardCompositionModeChange(mode); } @Override diff -Nru mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/ViewEventListener.java mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/ViewEventListener.java --- mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/ViewEventListener.java 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/ViewEventListener.java 2013-08-28 05:25:59.000000000 +0000 @@ -30,6 +30,7 @@ package org.mozc.android.inputmethod.japanese; import org.mozc.android.inputmethod.japanese.FeedbackManager.FeedbackEvent; +import org.mozc.android.inputmethod.japanese.HardwareKeyboard.CompositionSwitchMode; import org.mozc.android.inputmethod.japanese.JapaneseKeyboard.KeyboardSpecification; import org.mozc.android.inputmethod.japanese.KeycodeConverter.KeyEventInterface; import org.mozc.android.inputmethod.japanese.SymbolInputView.MajorCategory; @@ -115,8 +116,9 @@ /** * Called when the hardware_composition_button is clicked. + * @param newParam TODO(matsuzakit): */ - public void onClickHardwareKeyboardCompositionModeButton(); + public void onHardwareKeyboardCompositionModeChange(CompositionSwitchMode mode); /** * Called when the key for editor action is pressed. diff -Nru mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/ViewManager.java mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/ViewManager.java --- mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/ViewManager.java 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/ViewManager.java 2013-08-28 05:25:59.000000000 +0000 @@ -498,6 +498,9 @@ inflater = inflater.cloneInContext(MozcUtil.getContextWithOutOfMemoryRetrial(context)); mozcView = MozcUtil.inflateWithOutOfMemoryRetrial( MozcView.class, inflater, R.layout.mozc_view, null, false); + // Suppress update of View's internal state + // until all the updates done in this method are finished. Just in case. + mozcView.setVisibility(View.GONE); mozcView.setKeyboardHeightRatio(keyboardHeightRatio); mozcView.setEventListener( eventListener, @@ -529,7 +532,7 @@ setJapaneseKeyboard(japaneseSoftwareKeyboardModel.getKeyboardSpecification(), Collections.emptyList()); mozcView.setFullscreenMode(fullscreenMode); - mozcView.setNarrowMode(narrowMode); + mozcView.setLayoutAdjustmentAndNarrowMode(layoutAdjustment, narrowMode); // At the moment, it is necessary to set the storage to the view, *before* setting emoji // provider type. // TODO(hidehiko): Remove the restriction. @@ -539,12 +542,13 @@ mozcView.setPopupEnabled(popupEnabled); mozcView.setFlickSensitivity(flickSensitivity); mozcView.setSkinType(skinType); - mozcView.setLayoutAdjustment(layoutAdjustment); // Clear the menu dialog. menuDialog = null; reset(); + + mozcView.setVisibility(View.VISIBLE); return mozcView; } @@ -757,7 +761,7 @@ public void setNarrowMode(boolean isNarrowMode) { this.narrowMode = isNarrowMode; if (mozcView != null) { - mozcView.setNarrowMode(isNarrowMode); + mozcView.setLayoutAdjustmentAndNarrowMode(layoutAdjustment, isNarrowMode); } } @@ -795,7 +799,7 @@ this.layoutAdjustment = layoutAdjustment; if (mozcView != null) { - mozcView.setLayoutAdjustment(layoutAdjustment); + mozcView.setLayoutAdjustmentAndNarrowMode(layoutAdjustment, narrowMode); } } diff -Nru mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/model/SelectionTracker.java mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/model/SelectionTracker.java --- mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/model/SelectionTracker.java 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/model/SelectionTracker.java 2013-08-28 05:25:59.000000000 +0000 @@ -35,8 +35,10 @@ import org.mozc.android.inputmethod.japanese.protobuf.ProtoCommands.Preedit; import org.mozc.android.inputmethod.japanese.protobuf.ProtoCommands.Preedit.Segment; import org.mozc.android.inputmethod.japanese.util.ArrayDeque; +import com.google.common.base.Objects; import android.content.res.Configuration; +import android.util.Log; /** * This class tracks the caret position based on the callback from MozcService. @@ -86,6 +88,12 @@ && (selectionEnd == other.selectionEnd); } + @Override + public String toString() { + return String.format("candidates(%d, %d), selection(%d, %d)", + candidatesStart, candidatesEnd, selectionStart, selectionEnd); + } + // Skipped to implement hashCode intentionally, as we don't expect use it. } @@ -108,6 +116,9 @@ private void clear() { recordQueue.clear(); + if (MozcLog.isLoggable(Log.DEBUG)) { + MozcLog.d("clear: " + toString()); + } } private void offerInternal(int candidatesStart, int candidatesEnd, @@ -117,17 +128,22 @@ } recordQueue.offerLast( new Record(candidatesStart, candidatesEnd, selectionStart, selectionEnd)); + if (MozcLog.isLoggable(Log.DEBUG)) { + MozcLog.d("offerInternal: " + toString()); + } } public void onStartInput( int initialSelectionStart, int initialSelectionEnd, boolean webTextView) { + if (MozcLog.isLoggable(Log.DEBUG)) { + MozcLog.d(String.format("onStartInput: %d %d %b", + initialSelectionStart, initialSelectionEnd, webTextView)); + } this.webTextView = webTextView; if (initialSelectionStart == -1 && initialSelectionEnd == -1) { - // Ignores (-1, -1). This case can be observed when - // - The IME is not connected to any field, or - // - on FireFox beta. - // See onUpdateSelection for more details about Firefox issue. + // Ignores (-1, -1). + // This case can be observed when the IME is not connected to any field. return; } @@ -202,7 +218,6 @@ } // When both are failed, give up to return the correct position. - // TODO(hidehiko): FIX ME. E.g., firefox set initialSelectionStart, End to -1, -1... return -1; } @@ -210,6 +225,9 @@ * Should be invoked when the MozcService sends text to the connected application. */ public void onRender(DeletionRange deletionRange, String commitText, Preedit preedit) { + if (MozcLog.isLoggable(Log.DEBUG)) { + MozcLog.d("onRender: " + Objects.firstNonNull(preedit, "").toString()); + } int preeditStartPosition = getPreeditStartPosition(); if (deletionRange != null) { // Note that deletionRange#getOffset usually returns negative value. @@ -280,6 +298,19 @@ } /** + * @return true if any record has the same candidate length of given {@code record} + */ + private boolean containsSeeingOnlyCandidateLength(Record record) { + int recoredLength = Math.abs(record.candidatesStart - record.candidatesEnd); + for (Record recorded : recordQueue) { + if (Math.abs(recorded.candidatesStart - recorded.candidatesEnd) == recoredLength) { + return true; + } + } + return false; + } + + /** * Should be invoked when MozcServer receives the callback {@code onUpdateSelection}. * @return the move cursor position, or one of special values * {@code DO_NOTHING, RESET_CONTEXT}. The caller should follow the result. @@ -287,16 +318,21 @@ public int onUpdateSelection(int oldSelStart, int oldSelEnd, int newSelStart, int newSelEnd, int candidatesStart, int candidatesEnd) { + if (MozcLog.isLoggable(Log.DEBUG)) { + MozcLog.d(String.format("onUpdateSelection: %d %d %d %d %d %d", + oldSelStart, oldSelEnd, newSelStart, newSelEnd, + candidatesStart, candidatesEnd)); + MozcLog.d(recordQueue.toString()); + } Record record = new Record(candidatesStart, candidatesEnd, newSelStart, newSelEnd); - // There are five cases to come here. + // There are four cases to come here. // 1) Framework invokes this callback when the caret position is updated due to the text // change from IME, i.e. MozcService. // 2-1) During composition, users can move the caret position by tapping somewhere around the // current preedit text. // 2-2) During composition, users can make a selection region by long-tapping somewhere text. - // 3) Some applications send this message as "initialization of the caret position". - // 4) Unexpected cursor/selection moving coming from outside of MozcService. + // 3) Unexpected cursor/selection moving coming from outside of MozcService. // At first, we checks 1) state. if (recordQueue.contains(record)) { @@ -313,7 +349,7 @@ return DO_NOTHING; } - // Here, the event is not caused by MozcService. + // Here, the event is not caused by MozcService (probably). Record lastRecord = recordQueue.peekLast(); if (lastRecord != null && lastRecord.candidatesStart >= 0 && @@ -335,23 +371,41 @@ return RESET_CONTEXT; } - // Here is the case 3). - // Some applications send us the oldSel{Start, End} == (-1, -1) as the initialization of - // the caret position, such as FireFox. - if (oldSelStart == -1 && oldSelEnd == -1) { - clear(); - offerInternal(candidatesStart, candidatesEnd, newSelStart, newSelEnd); - return RESET_CONTEXT; - } - - // Here is the case 4), i.e. totally unknown state. - // This can happen, e.g., the text message is sent to the chat by tapping sending button - // or the field is filled by the application's suggestion. + // Here is the case 3), i.e. totally unknown state. + // This can happen, e.g., + // - the cursor is moved when there are no preedit + // - the text message is sent to the chat by tapping sending button + // - the field is filled by the application's suggestion // Thus, we reset the context. + // But on problematic views, which don't call onUpdateSelection when there is not preedit + // (e.g. WebView), execution flow reaches here unexpectedly. + // In such case the context is reset unexpectedly, which causes serious unpleasantness. + // Therefore fall-back logic is implemented here. + // If any recorded entry has given candidate length (candidatesEnd - candidatesStart), + // reset the queue and return DO_NOTHING instead. Such recored entry was recorded in + // previous call of onRender. + // For example on problematic views following scenario would be seen. + // - onRender (commit) + // - onUpdateSelection (caused by last commit) + // - undetectable cursor move (causes record inconsistency) + // - onRender (records invalid entry but its length is correct) + // - onUpdateSelection (here) + // - Records are basically unreliable but the last one has correct length) if (candidatesStart != -1 || candidatesEnd != -1) { - // Now, we assume that the composition text is also cleared. If not log it for future - // debugging. - MozcLog.i("Unknown candidates: " + candidatesStart + ":" + candidatesEnd); + if (MozcLog.isLoggable(Log.DEBUG)) { + MozcLog.d("Unknown candidates: " + candidatesStart + ":" + candidatesEnd); + } + if (webTextView && containsSeeingOnlyCandidateLength(record)) { + if (MozcLog.isLoggable(Log.DEBUG)) { + MozcLog.d(String.format( + "Fall-back is applied as " + + "there is a entry of which the candidate length (%d) meets expectation.", + candidatesEnd - candidatesStart)); + } + clear(); + offerInternal(candidatesStart, candidatesEnd, newSelStart, newSelEnd); + return DO_NOTHING; + } } // For the next handling, we should remember the newest position. @@ -361,4 +415,14 @@ // Tell the caller to reset the context. return RESET_CONTEXT; } + + @Override + public String toString() { + return Objects.toStringHelper(this) + .add("recordQueue", recordQueue) + .add("initialSelectionStart", initialSelectionStart) + .add("initialSelectionEnd", initialSelectionEnd) + .add("webTextView", webTextView) + .toString(); + } } diff -Nru mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/ui/ConversionCandidateLayouter.java mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/ui/ConversionCandidateLayouter.java --- mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/ui/ConversionCandidateLayouter.java 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/ui/ConversionCandidateLayouter.java 2013-08-28 05:25:59.000000000 +0000 @@ -97,42 +97,74 @@ } } - private final SpanFactory spanFactory; + private SpanFactory spanFactory; /** Horizontal common ratio of the value size. */ - private final float valueWidthCompressionRate; + private float valueWidthCompressionRate; /** Minimum width of the value. */ - private final float minValueWidth; + private float minValueWidth; /** The Minimum width of the chunk. */ - private final float minChunkWidth; + private float minChunkWidth; /** Height of the value. */ - private final float valueHeight; + private float valueHeight; - private final float valueHorizontalPadding; - private final float valueVerticalPadding; + private float valueHorizontalPadding; + private float valueVerticalPadding; /** The current view's width. */ private int viewWidth; private boolean reserveEmptySpan = false; - public ConversionCandidateLayouter( - SpanFactory spanFactory, - float valueWidthCompressionRate, - float minValueWidth, - float minChunkWidth, - float valueHeight, - float valueHorizontalPadding, - float valueVerticalPadding) { + /** + * @param spanFactory the spanFactory to set + */ + public void setSpanFactory(SpanFactory spanFactory) { this.spanFactory = spanFactory; + } + + /** + * @param valueWidthCompressionRate the valueWidthCompressionRate to set + */ + public void setValueWidthCompressionRate(float valueWidthCompressionRate) { this.valueWidthCompressionRate = valueWidthCompressionRate; + } + + /** + * @param minValueWidth the minValueWidth to set + */ + public void setMinValueWidth(float minValueWidth) { this.minValueWidth = minValueWidth; + } + + /** + * @param minChunkWidth the minChunkWidth to set + */ + public void setMinChunkWidth(float minChunkWidth) { this.minChunkWidth = minChunkWidth; + } + + /** + * @param valueHeight the valueHeight to set + */ + public void setValueHeight(float valueHeight) { this.valueHeight = valueHeight; + } + + /** + * @param valueHorizontalPadding the valueHorizontalPadding to set + */ + public void setValueHorizontalPadding(float valueHorizontalPadding) { this.valueHorizontalPadding = valueHorizontalPadding; + } + + /** + * @param valueVerticalPadding the valueVerticalPadding to set + */ + public void setValueVerticalPadding(float valueVerticalPadding) { this.valueVerticalPadding = valueVerticalPadding; } @@ -171,7 +203,8 @@ @Override public CandidateLayout layout(CandidateList candidateList) { if (minChunkWidth <= 0 || viewWidth <= 0 || - candidateList == null || candidateList.getCandidatesCount() == 0) { + candidateList == null || candidateList.getCandidatesCount() == 0 || + spanFactory == null) { return null; } diff -Nru mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/ui/SymbolCandidateLayouter.java mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/ui/SymbolCandidateLayouter.java --- mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/ui/SymbolCandidateLayouter.java 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/ui/SymbolCandidateLayouter.java 2013-08-28 05:25:59.000000000 +0000 @@ -48,7 +48,7 @@ * */ public class SymbolCandidateLayouter implements CandidateLayouter { - private final SpanFactory spanFactory; + private SpanFactory spanFactory; /** The minimum width for each column. */ private float minColumnWidth; @@ -59,10 +59,6 @@ /** The current view's width. */ private int viewWidth; - public SymbolCandidateLayouter(SpanFactory spanFactory) { - this.spanFactory = spanFactory; - } - public void setMinColumnWidth(float minColumnWidth) { this.minColumnWidth = minColumnWidth; } @@ -95,7 +91,8 @@ @Override public CandidateLayout layout(CandidateList candidateList) { if (viewWidth <= 0 || rowHeight <= 0 || minColumnWidth <= 0 || - candidateList == null || candidateList.getCandidatesCount() == 0) { + candidateList == null || candidateList.getCandidatesCount() == 0 || + spanFactory == null) { return null; } @@ -161,4 +158,11 @@ row.setWidth(spanList.isEmpty() ? 0 : spanList.get(spanList.size() - 1).getRight()); } } + + /** + * @param spanFactory the spanFactory to set + */ + public void setSpanFactory(SpanFactory spanFactory) { + this.spanFactory = spanFactory; + } } diff -Nru mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/userdictionary/UserDictionaryUtil.java mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/userdictionary/UserDictionaryUtil.java --- mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/userdictionary/UserDictionaryUtil.java 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/userdictionary/UserDictionaryUtil.java 2013-08-28 05:25:59.000000000 +0000 @@ -451,7 +451,7 @@ } /** - * Returns a new instance for Dictioanry Name Dialog. + * Returns a new instance for Dictionary Name Dialog. */ static DictionaryNameDialog createDictionaryNameDialog( Context context, int titleResourceId, DictionaryNameDialogListener listener, diff -Nru mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/util/ArrayDeque.java mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/util/ArrayDeque.java --- mozc-1.11.1502.102/android/src/com/google/android/inputmethod/japanese/util/ArrayDeque.java 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/src/com/google/android/inputmethod/japanese/util/ArrayDeque.java 2013-08-28 05:25:59.000000000 +0000 @@ -29,7 +29,12 @@ package org.mozc.android.inputmethod.japanese.util; +import com.google.common.base.Joiner; + +import java.util.ArrayList; import java.util.Arrays; +import java.util.Iterator; +import java.util.List; import java.util.NoSuchElementException; /** @@ -39,8 +44,10 @@ * This class should be replaced by java.util.ArrayDeque, when we start to support only API level 9 * or later. * + * @param type of elements + * */ -public class ArrayDeque { +public class ArrayDeque implements Iterable { private int headIndex; private int tailIndex; private final T[] elements; @@ -129,4 +136,42 @@ } return r1.equals(r2); } + + @Override + public String toString() { + int size = size(); + if (size == 0) { + return "[empty]"; + } + List resultList = new ArrayList(size); + for (int i = 0; i < size; ++i) { + resultList.add(elements[(headIndex + i) % elements.length]); + } + return new StringBuilder() + .append("[(") + .append(Joiner.on(")(").join(resultList)) + .append(")]") + .toString(); + } + + @Override + public Iterator iterator() { + return new Iterator() { + private int index = 0; + + @Override + public boolean hasNext() { + return index < size(); + } + + @Override + public T next() { + return elements[(headIndex + index++) % elements.length]; + } + + @Override + public void remove() { + throw new UnsupportedOperationException(); + }}; + } } diff -Nru mozc-1.11.1502.102/android/tests/src/com/google/android/inputmethod/japanese/CandidateViewTest.java mozc-1.11.1522.102/android/tests/src/com/google/android/inputmethod/japanese/CandidateViewTest.java --- mozc-1.11.1502.102/android/tests/src/com/google/android/inputmethod/japanese/CandidateViewTest.java 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/tests/src/com/google/android/inputmethod/japanese/CandidateViewTest.java 2013-08-28 05:25:59.000000000 +0000 @@ -96,6 +96,7 @@ public void testCandidateWordView_update() { ConversionCandidateWordView candidateWordView = new ConversionCandidateWordView(getInstrumentation().getTargetContext(), null); + candidateWordView.setCandidateTextDimension(1, 1); candidateWordView.layout(0, 0, 320, 240); candidateWordView.scrollTo(100, 100); @@ -127,7 +128,7 @@ CandidateLayouter layouter = createMock(CandidateLayouter.class); expect(layouter.layout(isA(CandidateList.class))).andReturn(layout); expect(layouter.getPageHeight()).andReturn(100); - candidateWordView.setCandidateLayouter(layouter); + candidateWordView.layouter = layouter; replayAll(); candidateWordView.update(CandidateList.getDefaultInstance()); diff -Nru mozc-1.11.1502.102/android/tests/src/com/google/android/inputmethod/japanese/CandidateWordViewTest.java mozc-1.11.1522.102/android/tests/src/com/google/android/inputmethod/japanese/CandidateWordViewTest.java --- mozc-1.11.1502.102/android/tests/src/com/google/android/inputmethod/japanese/CandidateWordViewTest.java 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/tests/src/com/google/android/inputmethod/japanese/CandidateWordViewTest.java 2013-08-28 05:25:59.000000000 +0000 @@ -306,7 +306,7 @@ CandidateLayouter layouter = createMock(CandidateLayouter.class); expect(layouter.layout(isA(CandidateList.class))) .andReturn(MozcLayoutUtil.createNiceCandidateLayoutMock(getMockSupport())); - candidateWordView.setCandidateLayouter(layouter); + candidateWordView.layouter = layouter; expect(layouter.getPageHeight()).andReturn(50); replayAll(); @@ -327,7 +327,7 @@ expect(layouter.layout(isA(CandidateList.class))) .andStubReturn(MozcLayoutUtil.createNiceCandidateLayoutMock(getMockSupport())); expect(layouter.getPageHeight()).andStubReturn(50); - candidateWordView.setCandidateLayouter(layouter); + candidateWordView.layouter = layouter; replayAll(); candidateWordView.updateLayouter(); @@ -349,7 +349,7 @@ expect(layouter.layout(isA(CandidateList.class))) .andStubReturn(MozcLayoutUtil.createNiceCandidateLayoutMock(getMockSupport())); expect(layouter.getPageHeight()).andReturn(50); - candidateWordView.setCandidateLayouter(layouter); + candidateWordView.layouter = layouter; replayAll(); @@ -475,7 +475,7 @@ CandidateWordView candWordView = new StubCandidateWordView( getInstrumentation().getTargetContext(), Orientation.VERTICAL); CandidateLayouter layouter = createMock(CandidateLayouter.class); - candWordView.setCandidateLayouter(layouter); + candWordView.layouter = layouter; replayAll(); // Check pre-condition. diff -Nru mozc-1.11.1502.102/android/tests/src/com/google/android/inputmethod/japanese/MozcViewTest.java mozc-1.11.1522.102/android/tests/src/com/google/android/inputmethod/japanese/MozcViewTest.java --- mozc-1.11.1502.102/android/tests/src/com/google/android/inputmethod/japanese/MozcViewTest.java 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/tests/src/com/google/android/inputmethod/japanese/MozcViewTest.java 2013-08-28 05:25:59.000000000 +0000 @@ -29,6 +29,7 @@ package org.mozc.android.inputmethod.japanese; +import static org.easymock.EasyMock.anyFloat; import static org.easymock.EasyMock.anyInt; import static org.easymock.EasyMock.capture; import static org.easymock.EasyMock.eq; @@ -38,6 +39,7 @@ import static org.easymock.EasyMock.same; import org.mozc.android.inputmethod.japanese.FeedbackManager.FeedbackEvent; +import org.mozc.android.inputmethod.japanese.HardwareKeyboard.CompositionSwitchMode; import org.mozc.android.inputmethod.japanese.InOutAnimatedFrameLayout.VisibilityChangeListener; import org.mozc.android.inputmethod.japanese.JapaneseKeyboard.KeyboardSpecification; import org.mozc.android.inputmethod.japanese.LayoutParamsAnimator.InterpolationListener; @@ -117,7 +119,7 @@ expect(mozcView.getSymbolInputView()).andStubReturn(symbolInputView); expect(mozcView.getHardwareCompositionButton()).andStubReturn(hardwareCompositionButton); expect(mozcView.getWidenButton()).andStubReturn(widenButton); - viewEventListener.onClickHardwareKeyboardCompositionModeButton(); + viewEventListener.onHardwareKeyboardCompositionModeChange(CompositionSwitchMode.TOGGLE); widenButtonClickListener.onClick(widenButton); replayAll(); @@ -418,7 +420,7 @@ LayoutInflater inflater = LayoutInflater.from(getInstrumentation().getTargetContext()); MozcView mozcView = MozcView.class.cast(inflater.inflate(R.layout.mozc_view, null)); for (TestData testData : testDataList) { - mozcView.setNarrowMode(testData.narrowMode); + mozcView.setLayoutAdjustmentAndNarrowMode(LayoutAdjustment.FILL, testData.narrowMode); mozcView.getCandidateView().setVisibility(testData.candidateViewVisibility); mozcView.getSymbolInputView().setVisibility(testData.symbolInputViewVisibility); assertEquals(testData.toString(), @@ -510,7 +512,7 @@ for (TestData testData : testDataList) { mozcView.setFullscreenMode(testData.fullscreenMode); mozcView.resetFullscreenMode(); - mozcView.setNarrowMode(testData.narrowMode); + mozcView.setLayoutAdjustmentAndNarrowMode(LayoutAdjustment.FILL, testData.narrowMode); mozcView.getCandidateView().setVisibility(testData.candidateViewVisibility); mozcView.updateInputFrameHeight(); @@ -584,13 +586,13 @@ View inputFrameButton = mozcView.getCandidateView().getInputFrameFoldButton(); View narrowFrame = mozcView.getNarrowFrame(); - mozcView.setNarrowMode(true); + mozcView.setLayoutAdjustmentAndNarrowMode(LayoutAdjustment.FILL, true); assertTrue(mozcView.isNarrowMode()); assertEquals(View.GONE, keyboardFrame.getVisibility()); assertEquals(View.GONE, inputFrameButton.getVisibility()); assertEquals(View.VISIBLE, narrowFrame.getVisibility()); - mozcView.setNarrowMode(false); + mozcView.setLayoutAdjustmentAndNarrowMode(LayoutAdjustment.FILL, false); assertFalse(mozcView.isNarrowMode()); assertEquals(View.VISIBLE, keyboardFrame.getVisibility()); assertEquals(View.VISIBLE, inputFrameButton.getVisibility()); @@ -605,7 +607,7 @@ MozcView mozcView = MozcView.class.cast(inflater.inflate(R.layout.mozc_view, null)); { - mozcView.setLayoutAdjustment(LayoutAdjustment.FILL); + mozcView.setLayoutAdjustmentAndNarrowMode(LayoutAdjustment.FILL, false); Rect keyboardSize = mozcView.getKeyboardSize(); assertEquals(resources.getDisplayMetrics().widthPixels, keyboardSize.width()); assertEquals(resources.getDimensionPixelSize(R.dimen.input_frame_height), @@ -613,7 +615,7 @@ } { - mozcView.setLayoutAdjustment(LayoutAdjustment.RIGHT); + mozcView.setLayoutAdjustmentAndNarrowMode(LayoutAdjustment.RIGHT, false); Rect keyboardSize = mozcView.getKeyboardSize(); assertEquals(resources.getDimensionPixelSize(R.dimen.ime_window_partial_width), keyboardSize.width()); @@ -623,24 +625,32 @@ } @SmallTest - public void testSetLayoutAdjustment() { + public void testSetLayoutAdjustmentAndNarrowMode() { MozcView mozcView = createViewMockBuilder(MozcView.class) .addMockedMethods("checkInflated", "getForegroundFrame", "getInputFrameHeight", - "updateBackgroundColor", "getSymbolInputView") + "updateBackgroundColor", "updateInputFrameHeight", "getSymbolInputView", + "getCandidateView", "getConversionCandidateWordContainerView", + "getKeyboardFrame", "getNarrowFrame", "resetKeyboardFrameVisibility") .createMock(); View foreGroundFrame = createViewMockBuilder(View.class) .addMockedMethods("getLayoutParams", "setLayoutParams") .createMock(); - createViewMockBuilder(View.class) - .addMockedMethod("setVisibility") - .createMock(); - createViewMockBuilder(View.class) - .addMockedMethod("setVisibility") - .createMock(); + View keyboardFrame = new View(getInstrumentation().getTargetContext()); + FrameLayout narrowFrame = new FrameLayout(getInstrumentation().getTargetContext()); SideFrameStubProxy leftFrameStubProxy = createMock(SideFrameStubProxy.class); SideFrameStubProxy rightFrameStubProxy = createMock(SideFrameStubProxy.class); VisibilityProxy.setField(mozcView, "leftFrameStubProxy", leftFrameStubProxy); VisibilityProxy.setField(mozcView, "rightFrameStubProxy", rightFrameStubProxy); + SymbolInputView symbolInputView = createViewMockBuilder(SymbolInputView.class) + .addMockedMethods("setCandidateTextDimension") + .createMock(); + CandidateView candidateView = createViewMockBuilder(CandidateView.class) + .addMockedMethods("setCandidateTextDimension", "setNarrowMode") + .createMock(); + ConversionCandidateWordContainerView conversionCandidateWordContainerView = + createViewMockBuilder(ConversionCandidateWordContainerView.class) + .addMockedMethods("setCandidateTextDimension") + .createMock(); class TestData extends Parameter { final LayoutAdjustment layoutAdjustment; @@ -687,17 +697,31 @@ Capture layoutCapture = new Capture(); resetAll(); mozcView.checkInflated(); + expectLastCall().atLeastOnce(); expect(mozcView.getInputFrameHeight()).andStubReturn(100); expect(mozcView.getForegroundFrame()).andReturn(foreGroundFrame); expect(foreGroundFrame.getLayoutParams()).andReturn(new FrameLayout.LayoutParams(0, 0)); + expect(mozcView.getCandidateView()).andStubReturn(candidateView); + expect(mozcView.getSymbolInputView()).andStubReturn(symbolInputView); + expect(mozcView.getConversionCandidateWordContainerView()) + .andStubReturn(conversionCandidateWordContainerView); + candidateView.setCandidateTextDimension(anyFloat(), anyFloat()); + candidateView.setNarrowMode(testData.narrowMode); + symbolInputView.setCandidateTextDimension(anyFloat(), anyFloat()); + conversionCandidateWordContainerView.setCandidateTextDimension(anyFloat()); foreGroundFrame.setLayoutParams(capture(layoutCapture)); leftFrameStubProxy.setFrameVisibility(testData.expectLeftFrameVisibility); rightFrameStubProxy.setFrameVisibility(testData.expectRightFrameVisibility); + expect(mozcView.getKeyboardFrame()).andStubReturn(keyboardFrame); + expect(mozcView.getNarrowFrame()).andStubReturn(narrowFrame); + if (!testData.narrowMode) { + mozcView.resetKeyboardFrameVisibility(); + } + mozcView.updateInputFrameHeight(); mozcView.updateBackgroundColor(); replayAll(); - mozcView.narrowMode = testData.narrowMode; - mozcView.setLayoutAdjustment(testData.layoutAdjustment); + mozcView.setLayoutAdjustmentAndNarrowMode(testData.layoutAdjustment, testData.narrowMode); verifyAll(); assertEquals(testData.toString(), testData.layoutAdjustment, mozcView.layoutAdjustment); @@ -705,6 +729,8 @@ testData.expectForegroundFrameWidth, layoutCapture.getValue().width); assertEquals(testData.toString(), testData.expectForegroundFrameGravity, layoutCapture.getValue().gravity); + assertEquals(testData.narrowMode ? View.GONE : View.VISIBLE, keyboardFrame.getVisibility()); + assertEquals(testData.narrowMode ? View.VISIBLE : View.GONE, narrowFrame.getVisibility()); } } diff -Nru mozc-1.11.1502.102/android/tests/src/com/google/android/inputmethod/japanese/model/SelectionTrackerTest.java mozc-1.11.1522.102/android/tests/src/com/google/android/inputmethod/japanese/model/SelectionTrackerTest.java --- mozc-1.11.1502.102/android/tests/src/com/google/android/inputmethod/japanese/model/SelectionTrackerTest.java 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/tests/src/com/google/android/inputmethod/japanese/model/SelectionTrackerTest.java 2013-08-28 05:25:59.000000000 +0000 @@ -33,7 +33,9 @@ import org.mozc.android.inputmethod.japanese.protobuf.ProtoCommands.Preedit; import org.mozc.android.inputmethod.japanese.protobuf.ProtoCommands.Preedit.Segment; import org.mozc.android.inputmethod.japanese.protobuf.ProtoCommands.Preedit.Segment.Annotation; +import com.google.common.base.Strings; +import junit.framework.AssertionFailedError; import junit.framework.TestCase; import java.util.Arrays; @@ -658,4 +660,172 @@ assertEquals(SelectionTracker.DO_NOTHING, tracker.onUpdateSelection(0, 3, 1, 1, 0, 1)); assertTracker(0, 1, 1, tracker); } + + interface ScenarioPiece { + void execute(SelectionTracker tracker); + } + + static class RenderCommit implements ScenarioPiece { + + private int commitLength; + + RenderCommit(int commitLength) { + this.commitLength = commitLength; + } + + @Override + public void execute(SelectionTracker tracker) { + tracker.onRender(null, Strings.repeat("a", commitLength), null); + } + } + + static class RenderPreedit implements ScenarioPiece { + + private int preeditLength; + + RenderPreedit(int preeditLength) { + this.preeditLength = preeditLength; + } + + @Override + public void execute(SelectionTracker tracker) { + tracker.onRender(null, null, + Preedit.newBuilder() + .addSegment(Segment.newBuilder() + .setValue(Strings.repeat("a", preeditLength)) + .buildPartial()) + .setCursor(preeditLength) + .buildPartial()); + } + } + + static class Selection implements ScenarioPiece { + + private int oldSelStart; + private int oldSelEnd; + private int newSelStart; + private int newSelEnd; + private int candidatesStart; + private int candidatesEnd; + private int expectedResult; + + Selection(int oldSelStart, int oldSelEnd, + int newSelStart, int newSelEnd, + int candidatesStart, int candidatesEnd, + int expectedResult) { + this.oldSelStart = oldSelStart; + this.oldSelEnd = oldSelEnd; + this.newSelStart = newSelStart; + this.newSelEnd = newSelEnd; + this.candidatesStart = candidatesStart; + this.candidatesEnd = candidatesEnd; + this.expectedResult = expectedResult; + } + + @Override + public void execute(SelectionTracker tracker) { + assertEquals( + expectedResult, + tracker.onUpdateSelection(oldSelStart, oldSelEnd, newSelStart, + newSelEnd, candidatesStart, candidatesEnd)); + } + } + + static void runScenario(boolean webTextView, ScenarioPiece... scenaroPieces) { + SelectionTracker tracker = new SelectionTracker(); + tracker.onStartInput(0, 0, webTextView); + for (int i = 0; i < scenaroPieces.length; ++i) { + try { + scenaroPieces[i].execute(tracker); + } catch (AssertionFailedError e) { + throw new AssertionFailedError( + String.format("step %d (%s): %s", + i + 1, scenaroPieces[i].getClass().getSimpleName(), e.toString())); + } + } + } + + public void testBackSpaceOnWebView() { + runScenario(true, + new RenderPreedit(1), + new Selection(0, 0, 1, 1, 0, 1, SelectionTracker.DO_NOTHING), + new RenderCommit(1), + new Selection(1, 1, 1, 1, -1, -1, SelectionTracker.DO_NOTHING), + // Here undetectable cursor move. + new RenderPreedit(1), + new Selection(1, 1, 1, 1, 0, 1, SelectionTracker.DO_NOTHING)); + } + + public void testBackSpaceOnTextEdit() { + runScenario(false, + new RenderPreedit(1), + new Selection(0, 0, 1, 1, 0, 1, SelectionTracker.DO_NOTHING), + new RenderCommit(1), + new Selection(1, 1, 1, 1, -1, -1, SelectionTracker.DO_NOTHING), + // Here undetectable cursor move. + new RenderPreedit(1), + new Selection(1, 1, 1, 1, 0, 1, SelectionTracker.RESET_CONTEXT)); + } + + public void testBackSpaceOnWebView2() { + runScenario(true, + new RenderPreedit(4), + new Selection(0, 0, 4, 4, 0, 4, SelectionTracker.DO_NOTHING), + new RenderCommit(4), + new Selection(4, 4, 4, 4, -1, -1, SelectionTracker.DO_NOTHING), + // Here undetectable cursor move to the beginning. + new RenderPreedit(1), + new Selection(4, 4, 1, 1, 0, 1, SelectionTracker.DO_NOTHING)); + } + + public void testBackSpaceOnTextEdit2() { + runScenario(false, + new RenderPreedit(4), + new Selection(0, 0, 4, 4, 0, 4, SelectionTracker.DO_NOTHING), + new RenderCommit(4), + new Selection(4, 4, 4, 4, -1, -1, SelectionTracker.DO_NOTHING), + // Here undetectable cursor move to the beginning. + new RenderPreedit(1), + new Selection(4, 4, 1, 1, 0, 1, SelectionTracker.RESET_CONTEXT)); + } + + public void testQuickTypeOnWebView() { + runScenario(true, + new RenderPreedit(10), // If a user types very quickly, merged result will be arrive. + new Selection(0, 0, 10, 10, 0, 10, SelectionTracker.DO_NOTHING)); + } + + public void testQuickTypeOnTextEdit() { + runScenario(false, + new RenderPreedit(10), // If a user types very quickly, merged result will be arrive. + new Selection(0, 0, 10, 10, 0, 10, SelectionTracker.DO_NOTHING)); + } + + public void testCompletionOnWebView() { + runScenario(true, + // Type two characters. Corresponding selection updates are done later. + new RenderPreedit(1), + new RenderPreedit(2), + + // 1st call-back correspoing to 1st character. + new Selection(0, 0, 1, 1, 0, 1, SelectionTracker.DO_NOTHING), + + // Here completion is applied without selection update call-back. + // The cursor moves to position 10. No preedit is shown. + + // 2nd call-back correspoing to 2nd character. + // Note that old selection is 10 because of undetactable completion. + // NOTE: What should expected here? Both resetting and keeping as-is are + // uncomfortable for the users. Here DO_NOTHING is set as expectation but + // RESET_CONTEXT is also acceptable here. + new Selection(10, 10, 12, 12, 10, 12, SelectionTracker.DO_NOTHING)); + } + + public void testCompletionOnTextEdit() { + runScenario(false, + new RenderPreedit(1), + new RenderPreedit(2), + new Selection(0, 0, 1, 1, 0, 1, SelectionTracker.DO_NOTHING), + new Selection(10, 10, 12, 12, 10, 12, SelectionTracker.RESET_CONTEXT)); + } } diff -Nru mozc-1.11.1502.102/android/tests/src/com/google/android/inputmethod/japanese/ui/ConversionCandidateLayouterTest.java mozc-1.11.1522.102/android/tests/src/com/google/android/inputmethod/japanese/ui/ConversionCandidateLayouterTest.java --- mozc-1.11.1502.102/android/tests/src/com/google/android/inputmethod/japanese/ui/ConversionCandidateLayouterTest.java 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/tests/src/com/google/android/inputmethod/japanese/ui/ConversionCandidateLayouterTest.java 2013-08-28 05:25:59.000000000 +0000 @@ -58,9 +58,8 @@ }; public void testSetViewSize() { - ConversionCandidateLayouter layouter = new ConversionCandidateLayouter( - DUMMY_SPAN_FACTORY, 0, 0, 0, 0, 0, 0); - + ConversionCandidateLayouter layouter = new ConversionCandidateLayouter(); + layouter.setSpanFactory(DUMMY_SPAN_FACTORY); layouter.setViewSize(0, 0); assertTrue(layouter.setViewSize(320, 240)); @@ -70,9 +69,10 @@ } public void testPageSize() { - ConversionCandidateLayouter layouter = new ConversionCandidateLayouter( - DUMMY_SPAN_FACTORY, 0, 0, 0, VALUE_HEIGHT, 0, VALUE_VERTICAL_PADDING); - + ConversionCandidateLayouter layouter = new ConversionCandidateLayouter(); + layouter.setSpanFactory(DUMMY_SPAN_FACTORY); + layouter.setValueHeight(VALUE_HEIGHT); + layouter.setValueVerticalPadding(VALUE_VERTICAL_PADDING); layouter.setViewSize(320, 640); // The page width is equal to the view's width; diff -Nru mozc-1.11.1502.102/android/tests/src/com/google/android/inputmethod/japanese/ui/SymbolCandidateLayouterTest.java mozc-1.11.1522.102/android/tests/src/com/google/android/inputmethod/japanese/ui/SymbolCandidateLayouterTest.java --- mozc-1.11.1502.102/android/tests/src/com/google/android/inputmethod/japanese/ui/SymbolCandidateLayouterTest.java 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/tests/src/com/google/android/inputmethod/japanese/ui/SymbolCandidateLayouterTest.java 2013-08-28 05:25:59.000000000 +0000 @@ -51,8 +51,8 @@ @SmallTest public void testSetViewSize() { - SymbolCandidateLayouter layouter = new SymbolCandidateLayouter(DUMMY_SPAN_FACTORY); - + SymbolCandidateLayouter layouter = new SymbolCandidateLayouter(); + layouter.setSpanFactory(DUMMY_SPAN_FACTORY); layouter.setViewSize(0, 0); // If and only if the width is changed, the layout should be invalidated. @@ -63,8 +63,8 @@ } public void testPageSize() { - SymbolCandidateLayouter layouter = new SymbolCandidateLayouter(DUMMY_SPAN_FACTORY); - + SymbolCandidateLayouter layouter = new SymbolCandidateLayouter(); + layouter.setSpanFactory(DUMMY_SPAN_FACTORY); layouter.setRowHeight(10); // The page width should be equal to the view width. diff -Nru mozc-1.11.1502.102/android/tests/src/com/google/android/inputmethod/japanese/util/ArrayDequeTest.java mozc-1.11.1522.102/android/tests/src/com/google/android/inputmethod/japanese/util/ArrayDequeTest.java --- mozc-1.11.1502.102/android/tests/src/com/google/android/inputmethod/japanese/util/ArrayDequeTest.java 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/android/tests/src/com/google/android/inputmethod/japanese/util/ArrayDequeTest.java 2013-08-28 05:25:59.000000000 +0000 @@ -33,6 +33,7 @@ import junit.framework.TestCase; +import java.util.Iterator; import java.util.NoSuchElementException; /** @@ -141,4 +142,34 @@ assertFalse(deque.contains(Integer.MAX_VALUE)); assertFalse(deque.contains(Integer.MIN_VALUE)); } + + @SmallTest + public void testIterator() { + ArrayDeque deque = new ArrayDeque(3); + + for (int i = 0; i < 3; ++i) { + deque.offerLast(i); + } + + Iterator iterator = deque.iterator(); + assertTrue(iterator.hasNext()); + assertEquals(Integer.valueOf(0), iterator.next()); + assertTrue(iterator.hasNext()); + assertEquals(Integer.valueOf(1), iterator.next()); + assertTrue(iterator.hasNext()); + assertEquals(Integer.valueOf(2), iterator.next()); + assertFalse(iterator.hasNext()); + + deque.removeFirst(); + deque.offerLast(3); + + iterator = deque.iterator(); + assertTrue(iterator.hasNext()); + assertEquals(Integer.valueOf(1), iterator.next()); + assertTrue(iterator.hasNext()); + assertEquals(Integer.valueOf(2), iterator.next()); + assertTrue(iterator.hasNext()); + assertEquals(Integer.valueOf(3), iterator.next()); + assertFalse(iterator.hasNext()); + } } diff -Nru mozc-1.11.1502.102/base/android_util_test.cc mozc-1.11.1522.102/base/android_util_test.cc --- mozc-1.11.1502.102/base/android_util_test.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/base/android_util_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -76,7 +76,7 @@ }; string lhs; string rhs; - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(testcases); ++i) { + for (size_t i = 0; i < arraysize(testcases); ++i) { const TestCase &testcase = testcases[i]; SCOPED_TRACE(testcase.line); SCOPED_TRACE(testcase.lhs); diff -Nru mozc-1.11.1502.102/base/number_util.cc mozc-1.11.1522.102/base/number_util.cc --- mozc-1.11.1502.102/base/number_util.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/base/number_util.cc 2013-08-28 05:26:13.000000000 +0000 @@ -157,6 +157,9 @@ } const int kInt32BufferSize = 12; // "-2147483648\0" +const int kUInt32BufferSize = 11; // "4294967295\0" +const int kInt64BufferSize = 21; // "-9223372036854775808\0" +const int kUInt64BufferSize = 21; // "18446744073709551615\0" const char kAsciiZero = '0'; const char kAsciiOne = '1'; @@ -170,6 +173,24 @@ return string(buffer, length); } +string NumberUtil::SimpleItoa(uint32 number) { + char buffer[kUInt32BufferSize]; + const int length = snprintf(buffer, kUInt32BufferSize, "%u", number); + return string(buffer, length); +} + +string NumberUtil::SimpleItoa(int64 number) { + char buffer[kInt64BufferSize]; + const int length = snprintf(buffer, kInt64BufferSize, "%lld", number); + return string(buffer, length); +} + +string NumberUtil::SimpleItoa(uint64 number) { + char buffer[kUInt64BufferSize]; + const int length = snprintf(buffer, kUInt64BufferSize, "%llu", number); + return string(buffer, length); +} + int NumberUtil::SimpleAtoi(StringPiece str) { stringstream ss; ss << str; @@ -700,6 +721,38 @@ } // namespace +bool NumberUtil::SafeStrToInt32(StringPiece str, int32 *value) { + int64 tmp; + if (!SafeStrToInt64(str, &tmp)) { + return false; + } + *value = tmp; + return static_cast(*value) == tmp; +} + +bool NumberUtil::SafeStrToInt64(StringPiece str, int64 *value) { + const StringPiece stripped_str = SkipWhiteSpace(str); + if (stripped_str.empty()) { + return false; + } + uint64 tmp; + if (stripped_str[0] == '-') { + StringPiece opposite_str = StringPiece(stripped_str, + 1, + stripped_str.size() - 1); + if (!SafeStrToUInt64WithBase(opposite_str, 10, &tmp)) { + return false; + } + *value = -static_cast(tmp); + return *value <= 0; + } + if (!SafeStrToUInt64WithBase(str, 10, &tmp)) { + return false; + } + *value = tmp; + return *value >= 0; +} + bool NumberUtil::SafeStrToUInt32(StringPiece str, uint32 *value) { uint64 tmp; if (!SafeStrToUInt64WithBase(str, 10, &tmp)) { diff -Nru mozc-1.11.1502.102/base/number_util.h mozc-1.11.1522.102/base/number_util.h --- mozc-1.11.1502.102/base/number_util.h 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/base/number_util.h 2013-08-28 05:26:13.000000000 +0000 @@ -44,6 +44,9 @@ public: // Convert the number to a string and append it to output. static string SimpleItoa(int32 number); + static string SimpleItoa(uint32 number); + static string SimpleItoa(int64 number); + static string SimpleItoa(uint64 number); // Convert the string to a number and return it. static int SimpleAtoi(StringPiece str); @@ -145,8 +148,10 @@ static bool ArabicToOtherRadixes(StringPiece input_num, vector *output); - // Converts the string to a 32-/64-bit unsigned int. Returns true if success - // or false if the string is in the wrong format. + // Converts the string to a 32-/64-bit signed/unsigned int. Returns true if + // success or false if the string is in the wrong format. + static bool SafeStrToInt32(StringPiece str, int32 *value); + static bool SafeStrToInt64(StringPiece str, int64 *value); static bool SafeStrToUInt32(StringPiece str, uint32 *value); static bool SafeStrToUInt64(StringPiece str, uint64 *value); static bool SafeHexStrToUInt32(StringPiece str, uint32 *value); diff -Nru mozc-1.11.1502.102/base/number_util_test.cc mozc-1.11.1522.102/base/number_util_test.cc --- mozc-1.11.1502.102/base/number_util_test.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/base/number_util_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -51,6 +51,18 @@ snprintf(buf, arraysize(buf), "%d", kint32min); EXPECT_EQ(buf, NumberUtil::SimpleItoa(kint32min)); + + snprintf(buf, arraysize(buf), "%u", kuint32max); + EXPECT_EQ(buf, NumberUtil::SimpleItoa(kuint32max)); + + snprintf(buf, arraysize(buf), "%lld", kint64max); + EXPECT_EQ(buf, NumberUtil::SimpleItoa(kint64max)); + + snprintf(buf, arraysize(buf), "%lld", kint64min); + EXPECT_EQ(buf, NumberUtil::SimpleItoa(kint64min)); + + snprintf(buf, arraysize(buf), "%llu", kuint64max); + EXPECT_EQ(buf, NumberUtil::SimpleItoa(kuint64max)); } TEST_F(NumberUtilTest, SimpleAtoi) { @@ -59,6 +71,119 @@ EXPECT_EQ(-1, NumberUtil::SimpleAtoi("-1")); } +TEST_F(NumberUtilTest, SafeStrToInt32) { + int32 value = 0xDEADBEEF; + + EXPECT_TRUE(NumberUtil::SafeStrToInt32("0", &value)); + EXPECT_EQ(0, value); + value = 0xDEADBEEF; + EXPECT_TRUE(NumberUtil::SafeStrToInt32("+0", &value)); + EXPECT_EQ(0, value); + value = 0xDEADBEEF; + EXPECT_TRUE(NumberUtil::SafeStrToInt32("-0", &value)); + EXPECT_EQ(0, value); + value = 0xDEADBEEF; + EXPECT_TRUE(NumberUtil::SafeStrToInt32(" \t\r\n\v\f0 \t\r\n\v\f", &value)); + EXPECT_EQ(0, value); + value = 0xDEADBEEF; + EXPECT_TRUE(NumberUtil::SafeStrToInt32(" \t\r\n\v\f-0 \t\r\n\v\f", &value)); + EXPECT_EQ(0, value); + value = 0xDEADBEEF; + EXPECT_TRUE(NumberUtil::SafeStrToInt32("012345678", &value)); + EXPECT_EQ(12345678, value); + value = 0xDEADBEEF; + EXPECT_TRUE(NumberUtil::SafeStrToInt32("-012345678", &value)); + EXPECT_EQ(-12345678, value); + value = 0xDEADBEEF; + EXPECT_TRUE(NumberUtil::SafeStrToInt32("-2147483648", &value)); + EXPECT_EQ(kint32min, value); // min of 32-bit signed integer + value = 0xDEADBEEF; + EXPECT_TRUE(NumberUtil::SafeStrToInt32("2147483647", &value)); + EXPECT_EQ(kint32max, value); // max of 32-bit signed integer + value = 0xDEADBEEF; + EXPECT_TRUE(NumberUtil::SafeStrToInt32(" 1", &value)); + EXPECT_EQ(1, value); + value = 0xDEADBEEF; + EXPECT_TRUE(NumberUtil::SafeStrToInt32("2 ", &value)); + EXPECT_EQ(2, value); + + EXPECT_FALSE(NumberUtil::SafeStrToInt32("0x1234", &value)); + EXPECT_FALSE(NumberUtil::SafeStrToInt32("-2147483649", &value)); + EXPECT_FALSE(NumberUtil::SafeStrToInt32("2147483648", &value)); + EXPECT_FALSE(NumberUtil::SafeStrToInt32("18446744073709551616", &value)); + EXPECT_FALSE(NumberUtil::SafeStrToInt32("3e", &value)); + EXPECT_FALSE(NumberUtil::SafeStrToInt32("0.", &value)); + EXPECT_FALSE(NumberUtil::SafeStrToInt32(".0", &value)); + EXPECT_FALSE(NumberUtil::SafeStrToInt32("", &value)); + + // Test for StringPiece input. + const char *kString = "123 abc 789"; + EXPECT_TRUE(NumberUtil::SafeStrToInt32(StringPiece(kString, 3), + &value)); + EXPECT_EQ(123, value); + EXPECT_FALSE(NumberUtil::SafeStrToInt32(StringPiece(kString + 4, 3), + &value)); + EXPECT_TRUE(NumberUtil::SafeStrToInt32(StringPiece(kString + 8, 3), + &value)); + EXPECT_EQ(789, value); + EXPECT_TRUE(NumberUtil::SafeStrToInt32(StringPiece(kString + 7, 4), + &value)); + EXPECT_EQ(789, value); +} + +TEST_F(NumberUtilTest, SafeStrToInt64) { + int64 value = 0xDEADBEEF; + + value = 0xDEADBEEF; + EXPECT_TRUE(NumberUtil::SafeStrToInt64("0", &value)); + EXPECT_EQ(0, value); + value = 0xDEADBEEF; + EXPECT_TRUE(NumberUtil::SafeStrToInt64("+0", &value)); + EXPECT_EQ(0, value); + value = 0xDEADBEEF; + EXPECT_TRUE(NumberUtil::SafeStrToInt64("-0", &value)); + EXPECT_EQ(0, value); + value = 0xDEADBEEF; + EXPECT_TRUE(NumberUtil::SafeStrToInt64(" \t\r\n\v\f0 \t\r\n\v\f", &value)); + EXPECT_EQ(0, value); + value = 0xDEADBEEF; + EXPECT_TRUE(NumberUtil::SafeStrToInt64(" \t\r\n\v\f-0 \t\r\n\v\f", &value)); + EXPECT_EQ(0, value); + value = 0xDEADBEEF; + EXPECT_TRUE(NumberUtil::SafeStrToInt64("012345678", &value)); + EXPECT_EQ(12345678, value); + value = 0xDEADBEEF; + EXPECT_TRUE(NumberUtil::SafeStrToInt64("-012345678", &value)); + EXPECT_EQ(-12345678, value); + value = 0xDEADBEEF; + EXPECT_TRUE(NumberUtil::SafeStrToInt64("-9223372036854775808", &value)); + EXPECT_EQ(kint64min, value); // min of 64-bit signed integer + value = 0xDEADBEEF; + EXPECT_TRUE(NumberUtil::SafeStrToInt64("9223372036854775807", &value)); + EXPECT_EQ(kint64max, value); // max of 64-bit signed integer + + EXPECT_FALSE(NumberUtil::SafeStrToInt64("-9223372036854775809", // overflow + &value)); + EXPECT_FALSE(NumberUtil::SafeStrToInt64("9223372036854775808", // overflow + &value)); + EXPECT_FALSE(NumberUtil::SafeStrToInt64("0x1234", &value)); + EXPECT_FALSE(NumberUtil::SafeStrToInt64("3e", &value)); + EXPECT_FALSE(NumberUtil::SafeStrToInt64("0.", &value)); + EXPECT_FALSE(NumberUtil::SafeStrToInt64(".0", &value)); + EXPECT_FALSE(NumberUtil::SafeStrToInt64("", &value)); + + // Test for StringPiece input. + const char *kString = "123 abc 789"; + EXPECT_TRUE(NumberUtil::SafeStrToInt64(StringPiece(kString, 3), + &value)); + EXPECT_EQ(123, value); + EXPECT_FALSE(NumberUtil::SafeStrToInt64(StringPiece(kString + 4, 3), + &value)); + EXPECT_TRUE(NumberUtil::SafeStrToInt64(StringPiece(kString + 8, 3), + &value)); + EXPECT_EQ(789, value); +} + TEST_F(NumberUtilTest, SafeStrToUInt32) { uint32 value = 0xDEADBEEF; diff -Nru mozc-1.11.1502.102/base/port.h mozc-1.11.1522.102/base/port.h --- mozc-1.11.1502.102/base/port.h 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/base/port.h 2013-08-28 05:26:13.000000000 +0000 @@ -167,17 +167,6 @@ #define AS_STRING_INTERNAL(x) #x -// ARRAYSIZE_UNSAFE performs essentially the same calculation as arraysize, -// but can be used on anonymous types or types defined inside functions. -// It's less safe than arraysize as it accepts some (although not all) -// pointers. Therefore, you should use arraysize whenever possible. - -#ifndef ARRAYSIZE_UNSAFE -#define ARRAYSIZE_UNSAFE(a) \ - ((sizeof(a) / sizeof(*(a))) / \ - static_cast(!(sizeof(a) % sizeof(*(a))))) -#endif // !ARRAYSIZE_UNSAFE - #include "base/flags.h" #include "base/init.h" diff -Nru mozc-1.11.1502.102/base/system_util.cc mozc-1.11.1522.102/base/system_util.cc --- mozc-1.11.1502.102/base/system_util.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/base/system_util.cc 2013-08-28 05:26:13.000000000 +0000 @@ -601,17 +601,54 @@ return result; } -bool GetCurrentSessionId(DWORD *session_id) { +bool GetCurrentSessionId(uint32 *session_id) { DCHECK(session_id); *session_id = 0; - if (!::ProcessIdToSessionId(::GetCurrentProcessId(), - session_id)) { + DWORD id = 0; + if (!::ProcessIdToSessionId(::GetCurrentProcessId(), &id)) { LOG(ERROR) << "cannot get session id: " << ::GetLastError(); return false; } + static_assert(sizeof(DWORD) == sizeof(uint32), + "DWORD and uint32 must be equivalent"); + *session_id = static_cast(id); return true; } +// Here we use the input desktop instead of the desktop associated with the +// current thread. One reason behind this is that some applications such as +// Adobe Reader XI use multiple desktops in a process. Basically the input +// desktop is most appropriate and important desktop for our use case. +// See http://blogs.adobe.com/asset/2012/10/new-security-capabilities-in-adobe-reader-and-acrobat-xi-now-available.html +string GetInputDesktopName() { + const HDESK desktop_handle = OpenInputDesktop(0, FALSE, DESKTOP_READOBJECTS); + if (desktop_handle == nullptr) { + return ""; + } + const string desktop_name = GetObjectNameAsString(desktop_handle); + ::CloseDesktop(desktop_handle); + return desktop_name; +} + +string GetProcessWindowStationName() { + // We must not close the returned value of GetProcessWindowStation(). + // http://msdn.microsoft.com/en-us/library/windows/desktop/ms683225.aspx + const HWINSTA window_station = ::GetProcessWindowStation(); + if (window_station == nullptr) { + return ""; + } + + return GetObjectNameAsString(window_station); +} + +string GetSessionIdString() { + uint32 session_id = 0; + if (!GetCurrentSessionId(&session_id)) { + return ""; + } + return NumberUtil::SimpleItoa(session_id); +} + } // namespace #endif // OS_WIN @@ -627,21 +664,25 @@ return ""; #elif defined(OS_WIN) - DWORD session_id = 0; - if (!GetCurrentSessionId(&session_id)) { + const string &session_id = GetSessionIdString(); + if (session_id.empty()) { + DLOG(ERROR) << "Failed to retrieve session id"; return ""; } - char id[64]; - snprintf(id, sizeof(id), "%u", session_id); + const string &window_station_name = GetProcessWindowStationName(); + if (window_station_name.empty()) { + DLOG(ERROR) << "Failed to retrieve window station name"; + return ""; + } - string result = id; - result += "."; - result += GetObjectNameAsString(::GetProcessWindowStation()); - result += "."; - result += GetObjectNameAsString(::GetThreadDesktop(::GetCurrentThreadId())); + const string &desktop_name = GetInputDesktopName(); + if (desktop_name.empty()) { + DLOG(ERROR) << "Failed to retrieve desktop name"; + return ""; + } - return result; + return (session_id + "." + window_station_name + "." + desktop_name); #endif // OS_LINUX, OS_MACOSX, OS_WIN } @@ -663,9 +704,8 @@ DWORDLONG conditional = 0; VER_SET_CONDITION(conditional, VER_MAJORVERSION, VER_GREATER_EQUAL); VER_SET_CONDITION(conditional, VER_MINORVERSION, VER_GREATER_EQUAL); - const BOOL result = - ::VerifyVersionInfo(&osvi, VER_MAJORVERSION | VER_MINORVERSION, - conditional); + const BOOL result = ::VerifyVersionInfo( + &osvi, VER_MAJORVERSION | VER_MINORVERSION, conditional); if (result != FALSE) { succeeded_ = true; is_ver_x_or_later_ = true; @@ -682,9 +722,8 @@ DWORDLONG conditional = 0; VER_SET_CONDITION(conditional, VER_MAJORVERSION, VER_LESS); VER_SET_CONDITION(conditional, VER_MINORVERSION, VER_LESS); - const BOOL result = - ::VerifyVersionInfo(&osvi, VER_MAJORVERSION | VER_MINORVERSION, - conditional); + const BOOL result = ::VerifyVersionInfo( + &osvi, VER_MAJORVERSION | VER_MINORVERSION, conditional); if (result != FALSE) { succeeded_ = true; is_ver_x_or_later_ = false; @@ -711,6 +750,7 @@ typedef IsWindowsVerXOrLaterCache<6, 0> IsWindowsVistaOrLaterCache; typedef IsWindowsVerXOrLaterCache<6, 1> IsWindows7OrLaterCache; typedef IsWindowsVerXOrLaterCache<6, 2> IsWindows8OrLaterCache; +typedef IsWindowsVerXOrLaterCache<6, 3> IsWindows8_1OrLaterCache; // TODO(yukawa): Use API wrapper so that unit test can emulate any case. class SystemDirectoryCache { @@ -748,6 +788,9 @@ if (!Singleton::get()->succeeded()) { return false; } + if (!Singleton::get()->succeeded()) { + return false; + } if (!Singleton::get()->succeeded()) { return false; } @@ -942,6 +985,15 @@ #endif // OS_WIN } +bool SystemUtil::IsWindows8_1OrLater() { +#ifdef OS_WIN + DCHECK(Singleton::get()->succeeded()); + return Singleton::get()->is_ver_x_or_later(); +#else + return false; +#endif // OS_WIN +} + namespace { volatile mozc::SystemUtil::IsWindowsX64Mode g_is_windows_x64_mode = mozc::SystemUtil::IS_WINDOWS_X64_DEFAULT_MODE; @@ -1062,13 +1114,13 @@ } string SystemUtil::GetMSCTFAsmCacheReadyEventName() { - DWORD session_id = 0; - if (!GetCurrentSessionId(&session_id)) { + const string &session_id = GetSessionIdString(); + if (session_id.empty()) { + DLOG(ERROR) << "Failed to retrieve session id"; return ""; } - const string &desktop_name = - GetObjectNameAsString(::GetThreadDesktop(::GetCurrentThreadId())); + const string &desktop_name = GetInputDesktopName(); if (desktop_name.empty()) { DLOG(ERROR) << "Failed to retrieve desktop name"; @@ -1076,8 +1128,7 @@ } // Compose "Local\MSCTF.AsmCacheReady.". - return ("Local\\MSCTF.AsmCacheReady." + desktop_name + - Util::StringPrintf("%u", session_id)); + return ("Local\\MSCTF.AsmCacheReady." + desktop_name + session_id); } #endif // OS_WIN @@ -1089,8 +1140,10 @@ OSVERSIONINFOEX osvi = { 0 }; osvi.dwOSVersionInfoSize = sizeof(OSVERSIONINFOEX); if (GetVersionEx(reinterpret_cast(&osvi))) { - ret += "." + NumberUtil::SimpleItoa(osvi.dwMajorVersion); - ret += "." + NumberUtil::SimpleItoa(osvi.dwMinorVersion); + ret += "."; + ret += NumberUtil::SimpleItoa(static_cast(osvi.dwMajorVersion)); + ret += "."; + ret += NumberUtil::SimpleItoa(static_cast(osvi.dwMinorVersion)); ret += "." + NumberUtil::SimpleItoa(osvi.wServicePackMajor); ret += "." + NumberUtil::SimpleItoa(osvi.wServicePackMinor); } else { diff -Nru mozc-1.11.1502.102/base/system_util.h mozc-1.11.1522.102/base/system_util.h --- mozc-1.11.1502.102/base/system_util.h 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/base/system_util.h 2013-08-28 05:26:13.000000000 +0000 @@ -152,6 +152,9 @@ // returns true if the version of Windows is 6.2 or later. static bool IsWindows8OrLater(); + // returns true if the version of Windows is 6.3 or later. + static bool IsWindows8_1OrLater(); + // returns true if the version of Windows is x64 Edition. static bool IsWindowsX64(); diff -Nru mozc-1.11.1502.102/base/trie_test.cc mozc-1.11.1522.102/base/trie_test.cc --- mozc-1.11.1502.102/base/trie_test.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/base/trie_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -65,7 +65,7 @@ { LOOKUP, "abcd", "", true, "data_abcd" }, { REMOVE, "xyz", "", false, "" }, }; - const int size = ARRAYSIZE_UNSAFE(test_cases); + const int size = arraysize(test_cases); for (int i = 0; i < size; ++i) { const TestCase& test = test_cases[i]; switch (test.type) { diff -Nru mozc-1.11.1502.102/base/win_sandbox.cc mozc-1.11.1522.102/base/win_sandbox.cc --- mozc-1.11.1502.102/base/win_sandbox.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/base/win_sandbox.cc 2013-08-28 05:26:13.000000000 +0000 @@ -219,102 +219,200 @@ return true; } -wstring GetSSDL(WinSandbox::ObjectSecurityType shareble_object_type, - const wstring &token_user_sid, - const wstring &token_primary_group_sid) { - const wstring &allow_user = L"(A;;GA;;;" + token_user_sid + L")"; - const wchar_t allow_rw_low_integrity[] = - L"S:(ML;;" SDDL_NO_EXECUTE_UP L";;;" SDDL_ML_LOW L")"; - - const wchar_t allow_r_low_integrity[] = - L"S:(ML;;" SDDL_NO_WRITE_UP SDDL_NO_EXECUTE_UP - L";;;" SDDL_ML_LOW L")"; +wstring Allow(const wstring &access_right, const wstring &account_sid) { + return (wstring(L"(") + SDDL_ACCESS_ALLOWED + L";;" + + access_right + L";;;" + account_sid + L")"); +} + +wstring Deny(const wstring &access_right, const wstring &account_sid) { + return (wstring(L"(") + SDDL_ACCESS_DENIED + L";;" + + access_right + L";;;" + account_sid + L")"); +} + +wstring MandatoryLevel(const wstring &mandatory_label, + const wstring &integrity_levels) { + return (wstring(L"(") + SDDL_MANDATORY_LABEL + L";;" + + mandatory_label + L";;;" + integrity_levels + L")"); +} + +// SDDL_ALL_APP_PACKAGES is available on Windows SDK 8.0 and later. +#ifndef SDDL_ALL_APP_PACKAGES +#define SDDL_ALL_APP_PACKAGES L"AC" +#endif // SDDL_ALL_APP_PACKAGES + +// SDDL for PROCESS_QUERY_INFORMATION is not defined. So use hex digits instead. +#ifndef SDDL_PROCESS_QUERY_INFORMATION +static_assert(PROCESS_QUERY_INFORMATION == 0x0400, + "PROCESS_QUERY_INFORMATION must be 0x0400"); +#define SDDL_PROCESS_QUERY_INFORMATION L"0x0400" +#endif // SDDL_PROCESS_QUERY_INFORMATION + +// SDDL for PROCESS_QUERY_LIMITED_INFORMATION is not defined. So use hex digits +// instead. +#ifndef SDDL_PROCESS_QUERY_LIMITED_INFORMATION +static_assert(PROCESS_QUERY_LIMITED_INFORMATION == 0x1000, + "PROCESS_QUERY_LIMITED_INFORMATION must be 0x1000"); +#define SDDL_PROCESS_QUERY_LIMITED_INFORMATION L"0x1000" +#endif // SDDL_PROCESS_QUERY_LIMITED_INFORMATION - wstring ssdl = L"O:" + token_user_sid - + L"G:" + token_primary_group_sid; +} // namespace +wstring WinSandbox::GetSDDL(ObjectSecurityType shareble_object_type, + const wstring &token_user_sid, + const wstring &token_primary_group_sid, + bool is_windows_vista_or_later, + bool is_windows_8_or_later) { // See http://social.msdn.microsoft.com/Forums/en-US/windowssecurity/thread/e92502b1-0b9f-4e02-9d72-e4e47e924a8f/ - // for how to ccess named objects from an AppContainer. + // for how to acess named objects from an AppContainer. + + wstring dacl; + wstring sacl; switch (shareble_object_type) { case WinSandbox::kSharablePipe: - // Sharable Named Pipe: - // - Deny Remote Acccess - // - Allow general access to LocalSystem - // - Allow general access to Built-in Administorators - // - Allow general access to the current user - // - Allow general access to ALL APPLICATION PACKAGES - // - Allow read/write access to low integrity - ssdl += L"D:(D;;GA;;;NU)(A;;GA;;;SY)(A;;GA;;;BA)"; - if (SystemUtil::IsWindows8OrLater()) { - ssdl += L"(A;;GA;;;AC)"; - } - ssdl += allow_user; - if (SystemUtil::IsVistaOrLater()) { - ssdl += allow_rw_low_integrity; + // Deny Remote Acccess + dacl += Deny(SDDL_GENERIC_ALL, SDDL_NETWORK); + // Allow general access to LocalSystem + dacl += Allow(SDDL_GENERIC_ALL, SDDL_LOCAL_SYSTEM); + // Allow general access to Built-in Administorators + dacl += Allow(SDDL_GENERIC_ALL, SDDL_BUILTIN_ADMINISTRATORS); + if (is_windows_8_or_later) { + // Allow general access to ALL APPLICATION PACKAGES + dacl += Allow(SDDL_GENERIC_ALL, SDDL_ALL_APP_PACKAGES); + } + // Allow general access to the current user + dacl += Allow(SDDL_GENERIC_ALL, token_user_sid); + if (is_windows_vista_or_later) { + // Allow read/write access to low integrity + sacl += MandatoryLevel(SDDL_NO_EXECUTE_UP, SDDL_ML_LOW); + } + break; + case WinSandbox::kLooseSharablePipe: + // Deny Remote Acccess + dacl += Deny(SDDL_GENERIC_ALL, SDDL_NETWORK); + // Allow general access to LocalSystem + dacl += Allow(SDDL_GENERIC_ALL, SDDL_LOCAL_SYSTEM); + // Allow general access to Built-in Administorators + dacl += Allow(SDDL_GENERIC_ALL, SDDL_BUILTIN_ADMINISTRATORS); + if (is_windows_8_or_later) { + // Allow general access to ALL APPLICATION PACKAGES + dacl += Allow(SDDL_GENERIC_ALL, SDDL_ALL_APP_PACKAGES); + } + // Allow general access to the current user + dacl += Allow(SDDL_GENERIC_ALL, token_user_sid); + // Skip 2nd-phase ACL validation against restricted tokens. + dacl += Allow(SDDL_GENERIC_ALL, SDDL_RESTRICTED_CODE); + if (is_windows_vista_or_later) { + // Allow read/write access to low integrity + sacl += MandatoryLevel(SDDL_NO_EXECUTE_UP, SDDL_ML_LOW); } break; case WinSandbox::kSharableEvent: - // Sharable Event: - // - Allow general access to LocalSystem - // - Allow general access to Built-in Administorators - // - Allow general access to the current user - // - Allow state change/synchronize to ALL APPLICATION PACKAGES - // - Allow read/write access to low integrity - ssdl += L"D:(A;;GA;;;SY)(A;;GA;;;BA)"; - if (SystemUtil::IsWindows8OrLater()) { - ssdl += L"(A;;GX;;;AC)"; - } - ssdl += allow_user; - if (SystemUtil::IsVistaOrLater()) { - ssdl += allow_rw_low_integrity; + // Allow general access to LocalSystem + dacl += Allow(SDDL_GENERIC_ALL, SDDL_LOCAL_SYSTEM); + // Allow general access to Built-in Administorators + dacl += Allow(SDDL_GENERIC_ALL, SDDL_BUILTIN_ADMINISTRATORS); + if (is_windows_8_or_later) { + // Allow state change/synchronize to ALL APPLICATION PACKAGES + dacl += Allow(SDDL_GENERIC_EXECUTE, SDDL_ALL_APP_PACKAGES); + } + // Allow general access to the current user + dacl += Allow(SDDL_GENERIC_ALL, token_user_sid); + // Skip 2nd-phase ACL validation against restricted tokens regarding + // change/synchronize. + dacl += Allow(SDDL_GENERIC_EXECUTE, SDDL_RESTRICTED_CODE); + if (is_windows_vista_or_later) { + // Allow read/write access to low integrity + sacl += MandatoryLevel(SDDL_NO_EXECUTE_UP, SDDL_ML_LOW); } break; case WinSandbox::kSharableMutex: - // Sharable Mutex: - // - Allow general access to LocalSystem - // - Allow general access to Built-in Administorators - // - Allow general access to the current user - // - Allow state change/synchronize to ALL APPLICATION PACKAGES - // - Allow read/write access to low integrity - ssdl += L"D:(A;;GA;;;SY)(A;;GA;;;BA)"; - if (SystemUtil::IsWindows8OrLater()) { - ssdl += L"(A;;GX;;;AC)"; - } - ssdl += allow_user; - if (SystemUtil::IsVistaOrLater()) { - ssdl += allow_rw_low_integrity; + // Allow general access to LocalSystem + dacl += Allow(SDDL_GENERIC_ALL, SDDL_LOCAL_SYSTEM); + // Allow general access to Built-in Administorators + dacl += Allow(SDDL_GENERIC_ALL, SDDL_BUILTIN_ADMINISTRATORS); + if (is_windows_8_or_later) { + // Allow state change/synchronize to ALL APPLICATION PACKAGES + dacl += Allow(SDDL_GENERIC_EXECUTE, SDDL_ALL_APP_PACKAGES); + } + // Allow general access to the current user + dacl += Allow(SDDL_GENERIC_ALL, token_user_sid); + // Skip 2nd-phase ACL validation against restricted tokens regarding + // change/synchronize. + dacl += Allow(SDDL_GENERIC_EXECUTE, SDDL_RESTRICTED_CODE); + if (is_windows_vista_or_later) { + // Allow read/write access to low integrity + sacl += MandatoryLevel(SDDL_NO_EXECUTE_UP, SDDL_ML_LOW); } break; case WinSandbox::kSharableFileForRead: - // Sharable Mutex: - // - Allow general access to LocalSystem - // - Allow general access to Built-in Administorators - // - Allow general access to the current user - // - Allow general read access to ALL APPLICATION PACKAGES - // - Allow read access to low integrity - ssdl += L"D:(A;;GA;;;SY)(A;;GA;;;BA)"; - if (SystemUtil::IsWindows8OrLater()) { - ssdl += L"(A;;GR;;;AC)"; - } - ssdl += allow_user; - if (SystemUtil::IsVistaOrLater()) { - ssdl += allow_r_low_integrity; + // Allow general access to LocalSystem + dacl += Allow(SDDL_GENERIC_ALL, SDDL_LOCAL_SYSTEM); + // Allow general access to Built-in Administorators + dacl += Allow(SDDL_GENERIC_ALL, SDDL_BUILTIN_ADMINISTRATORS); + // Allow read access to low integrity + if (is_windows_8_or_later) { + // Allow general read access to ALL APPLICATION PACKAGES + dacl += Allow(SDDL_GENERIC_READ, SDDL_ALL_APP_PACKAGES); + } + // Allow general access to the current user + dacl += Allow(SDDL_GENERIC_ALL, token_user_sid); + // Skip 2nd-phase ACL validation against restricted tokens regarding + // general read access. + dacl += Allow(SDDL_GENERIC_READ, SDDL_RESTRICTED_CODE); + if (is_windows_vista_or_later) { + // Allow read access to low integrity + sacl += MandatoryLevel( + SDDL_NO_WRITE_UP SDDL_NO_EXECUTE_UP, SDDL_ML_LOW); + } + break; + case WinSandbox::kIPCServerProcess: + // Allow general access to LocalSystem + dacl += Allow(SDDL_GENERIC_ALL, SDDL_LOCAL_SYSTEM); + // Allow general access to Built-in Administorators + dacl += Allow(SDDL_GENERIC_ALL, SDDL_BUILTIN_ADMINISTRATORS); + if (is_windows_8_or_later) { + // Allow PROCESS_QUERY_LIMITED_INFORMATION to ALL APPLICATION PACKAGES + dacl += Allow(SDDL_PROCESS_QUERY_LIMITED_INFORMATION, + SDDL_ALL_APP_PACKAGES); + } + // Allow general access to the current user + dacl += Allow(SDDL_GENERIC_ALL, token_user_sid); + if (is_windows_vista_or_later) { + // Allow PROCESS_QUERY_LIMITED_INFORMATION to restricted tokens + dacl += Allow(SDDL_PROCESS_QUERY_LIMITED_INFORMATION, + SDDL_RESTRICTED_CODE); + } else { + // Allow PROCESS_QUERY_INFORMATION to restricted tokens + dacl += Allow(SDDL_PROCESS_QUERY_INFORMATION, SDDL_RESTRICTED_CODE); } break; case WinSandbox::kPrivateObject: default: - // General private object: - // - Allow general access to LocalSystem - // - Allow general access to Built-in Administorators - // - Allow general access to the current user - ssdl += (L"D:(A;;GA;;;SY)(A;;GA;;;BA)" + allow_user); + // Allow general access to LocalSystem + dacl += Allow(SDDL_GENERIC_ALL, SDDL_LOCAL_SYSTEM); + // Allow general access to Built-in Administorators + dacl += Allow(SDDL_GENERIC_ALL, SDDL_BUILTIN_ADMINISTRATORS); + // Allow general access to the current user + dacl += Allow(SDDL_GENERIC_ALL, token_user_sid); break; } - return ssdl; -} + wstring sddl; + // Owner SID + sddl += ((SDDL_OWNER SDDL_DELIMINATOR) + token_user_sid); + // Primary Group SID + sddl += ((SDDL_GROUP SDDL_DELIMINATOR) + token_primary_group_sid); + // DACL + if (!dacl.empty()) { + sddl += ((SDDL_DACL SDDL_DELIMINATOR) + dacl); + } + // SACL + if (!sacl.empty()) { + sddl += ((SDDL_SACL SDDL_DELIMINATOR) + sacl); + } -} // namespace + return sddl; +} Sid::Sid(const SID *sid) { ::CopySid(sizeof(sid_), sid_, const_cast(sid)); @@ -379,13 +477,14 @@ return false; } - const wstring &ssdl = GetSSDL( - shareble_object_type, token_user_sid, token_primary_group_sid); + const wstring &sddl = GetSDDL( + shareble_object_type, token_user_sid, token_primary_group_sid, + SystemUtil::IsVistaOrLater(), SystemUtil::IsWindows8OrLater()); // Create self-relative SD PSECURITY_DESCRIPTOR self_relative_desc = nullptr; if (!::ConvertStringSecurityDescriptorToSecurityDescriptorW( - ssdl.c_str(), + sddl.c_str(), SDDL_REVISION_1, &self_relative_desc, nullptr)) { @@ -574,7 +673,7 @@ PSECURITY_ATTRIBUTES security_attributes_ptr = nullptr; SECURITY_ATTRIBUTES security_attributes = {}; - if (WinSandbox::MakeSecurityAttributes(WinSandbox::kPrivateObject, + if (WinSandbox::MakeSecurityAttributes(WinSandbox::kIPCServerProcess, &security_attributes)) { security_attributes_ptr = &security_attributes; // Override the impersonation thread token's DACL to avoid http://b/1728895 @@ -690,7 +789,8 @@ return true; } -} // anonymous namespace + +} // namespace WinSandbox::SecurityInfo::SecurityInfo() : primary_level(WinSandbox::USER_LOCKDOWN), @@ -1098,7 +1198,8 @@ return (result != FALSE); } -} // anonymous namespace + +} // namespace vector WinSandbox::GetSidsToDisable(HANDLE effective_token, TokenLevel security_level) { @@ -1328,5 +1429,6 @@ restricted_token->reset(restricted_token_ret); return true; } + } // namespace mozc #endif // OS_WIN diff -Nru mozc-1.11.1502.102/base/win_sandbox.h mozc-1.11.1522.102/base/win_sandbox.h --- mozc-1.11.1502.102/base/win_sandbox.h 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/base/win_sandbox.h 2013-08-28 05:26:13.000000000 +0000 @@ -109,13 +109,19 @@ // Used for an object that is inaccessible from lower sandbox level. kPrivateObject = 0, // Used for a namedpipe object that is accessible from lower sandbox level. - kSharablePipe = 1, + kSharablePipe, + // Used for a namedpipe object that is accessible from lower sandbox level + // including processes with restricted tokens. + kLooseSharablePipe, // Used for an event object that is accessible from lower sandbox level. - kSharableEvent = 2, + kSharableEvent, // Used for a mutex object that is accessible from lower sandbox level. - kSharableMutex = 3, + kSharableMutex, // Used for a file object that can be read from lower sandbox level. - kSharableFileForRead = 4, + kSharableFileForRead, + // Used for an IPC process object that is queriable from lower sandbox + // level. + kIPCServerProcess, }; static bool MakeSecurityAttributes(ObjectSecurityType shareble_object_type, SECURITY_ATTRIBUTES *security_descriptor); @@ -178,9 +184,19 @@ IntegrityLevel integrity_level, ScopedHandle* restricted_token); + protected: + // Returns SDDL for given |shareble_object_type|. + // This method is placed here for unit testing. + static wstring GetSDDL(ObjectSecurityType shareble_object_type, + const wstring &token_user_sid, + const wstring &token_primary_group_sid, + bool is_windows_vista_or_later, + bool is_windows_8_or_later); + private: DISALLOW_IMPLICIT_CONSTRUCTORS(WinSandbox); }; + } // namespace mozc #endif // OS_WIN #endif // MOZC_BASE_WIN_SANDBOX_H_ diff -Nru mozc-1.11.1502.102/base/win_sandbox_test.cc mozc-1.11.1522.102/base/win_sandbox_test.cc --- mozc-1.11.1502.102/base/win_sandbox_test.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/base/win_sandbox_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -28,12 +28,23 @@ // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include "base/win_sandbox.h" + #include "base/scoped_handle.h" #include "testing/base/public/googletest.h" #include "testing/base/public/gunit.h" namespace mozc { namespace { + +class TestableWinSandbox : public WinSandbox { + public: + // Change access rights. + using WinSandbox::GetSDDL; + + private: + DISALLOW_IMPLICIT_CONSTRUCTORS(TestableWinSandbox); +}; + void VerifySidContained(const vector sids, WELL_KNOWN_SID_TYPE expected_well_known_sid) { Sid expected_sid(expected_well_known_sid); @@ -46,7 +57,6 @@ } EXPECT_TRUE(false) << "Not found. Expected SID: " << expected_well_known_sid; } -} // anonymous namespace TEST(WinSandboxTest, GetSidsToDisable) { HANDLE process_token_ret = NULL; @@ -135,4 +145,162 @@ VerifySidContained(interactive, WinBuiltinUsersSid); } + +const wchar_t kDummyUserSID[] = L"S-8"; +const wchar_t kDummyGroupSID[] = L"S-9"; + +wstring GetSDDLForXP(WinSandbox::ObjectSecurityType type) { + return TestableWinSandbox::GetSDDL( + type, kDummyUserSID, kDummyGroupSID, false, false); +} + +wstring GetSDDLForVista(WinSandbox::ObjectSecurityType type) { + return TestableWinSandbox::GetSDDL( + type, kDummyUserSID, kDummyGroupSID, true, false); +} + +wstring GetSDDLForWin8(WinSandbox::ObjectSecurityType type) { + return TestableWinSandbox::GetSDDL( + type, kDummyUserSID, kDummyGroupSID, true, true); +} + +TEST(WinSandboxTest, GetSDDLForSharablePipe) { + EXPECT_EQ( + L"O:S-8" + L"G:S-9" + L"D:(D;;GA;;;NU)(A;;GA;;;SY)(A;;GA;;;BA)(A;;GA;;;S-8)", + GetSDDLForXP(WinSandbox::kSharablePipe)); + EXPECT_EQ( + L"O:S-8" + L"G:S-9" + L"D:(D;;GA;;;NU)(A;;GA;;;SY)(A;;GA;;;BA)(A;;GA;;;S-8)" + L"S:(ML;;NX;;;LW)", + GetSDDLForVista(WinSandbox::kSharablePipe)); + EXPECT_EQ( + L"O:S-8" + L"G:S-9" + L"D:(D;;GA;;;NU)(A;;GA;;;SY)(A;;GA;;;BA)(A;;GA;;;AC)(A;;GA;;;S-8)" + L"S:(ML;;NX;;;LW)", + GetSDDLForWin8(WinSandbox::kSharablePipe)); +} + +TEST(WinSandboxTest, GetSDDLForLooseSharablePipe) { + EXPECT_EQ( + L"O:S-8" + L"G:S-9" + L"D:(D;;GA;;;NU)(A;;GA;;;SY)(A;;GA;;;BA)(A;;GA;;;S-8)(A;;GA;;;RC)", + GetSDDLForXP(WinSandbox::kLooseSharablePipe)); + EXPECT_EQ( + L"O:S-8" + L"G:S-9" + L"D:(D;;GA;;;NU)(A;;GA;;;SY)(A;;GA;;;BA)(A;;GA;;;S-8)(A;;GA;;;RC)" + L"S:(ML;;NX;;;LW)", + GetSDDLForVista(WinSandbox::kLooseSharablePipe)); + EXPECT_EQ( + L"O:S-8" + L"G:S-9" + L"D:(D;;GA;;;NU)(A;;GA;;;SY)(A;;GA;;;BA)(A;;GA;;;AC)(A;;GA;;;S-8)" + L"(A;;GA;;;RC)" + L"S:(ML;;NX;;;LW)", + GetSDDLForWin8(WinSandbox::kLooseSharablePipe)); +} + +TEST(WinSandboxTest, GetSDDLForSharableEvent) { + EXPECT_EQ( + L"O:S-8" + L"G:S-9" + L"D:(A;;GA;;;SY)(A;;GA;;;BA)(A;;GA;;;S-8)(A;;GX;;;RC)", + GetSDDLForXP(WinSandbox::kSharableEvent)); + EXPECT_EQ( + L"O:S-8" + L"G:S-9" + L"D:(A;;GA;;;SY)(A;;GA;;;BA)(A;;GA;;;S-8)(A;;GX;;;RC)" + L"S:(ML;;NX;;;LW)", + GetSDDLForVista(WinSandbox::kSharableEvent)); + EXPECT_EQ( + L"O:S-8" + L"G:S-9" + L"D:(A;;GA;;;SY)(A;;GA;;;BA)(A;;GX;;;AC)(A;;GA;;;S-8)(A;;GX;;;RC)" + L"S:(ML;;NX;;;LW)", + GetSDDLForWin8(WinSandbox::kSharableEvent)); +} + +TEST(WinSandboxTest, GetSDDLForSharableMutex) { + EXPECT_EQ( + L"O:S-8" + L"G:S-9" + L"D:(A;;GA;;;SY)(A;;GA;;;BA)(A;;GA;;;S-8)(A;;GX;;;RC)", + GetSDDLForXP(WinSandbox::kSharableMutex)); + EXPECT_EQ( + L"O:S-8" + L"G:S-9" + L"D:(A;;GA;;;SY)(A;;GA;;;BA)(A;;GA;;;S-8)(A;;GX;;;RC)" + L"S:(ML;;NX;;;LW)", + GetSDDLForVista(WinSandbox::kSharableMutex)); + EXPECT_EQ( + L"O:S-8" + L"G:S-9" + L"D:(A;;GA;;;SY)(A;;GA;;;BA)(A;;GX;;;AC)(A;;GA;;;S-8)(A;;GX;;;RC)" + L"S:(ML;;NX;;;LW)", + GetSDDLForWin8(WinSandbox::kSharableMutex)); +} + +TEST(WinSandboxTest, GetSDDLForSharableFileForRead) { + EXPECT_EQ( + L"O:S-8" + L"G:S-9" + L"D:(A;;GA;;;SY)(A;;GA;;;BA)(A;;GA;;;S-8)(A;;GR;;;RC)", + GetSDDLForXP(WinSandbox::kSharableFileForRead)); + EXPECT_EQ( + L"O:S-8" + L"G:S-9" + L"D:(A;;GA;;;SY)(A;;GA;;;BA)(A;;GA;;;S-8)(A;;GR;;;RC)" + L"S:(ML;;NWNX;;;LW)", + GetSDDLForVista(WinSandbox::kSharableFileForRead)); + EXPECT_EQ( + L"O:S-8" + L"G:S-9" + L"D:(A;;GA;;;SY)(A;;GA;;;BA)(A;;GR;;;AC)(A;;GA;;;S-8)(A;;GR;;;RC)" + L"S:(ML;;NWNX;;;LW)", + GetSDDLForWin8(WinSandbox::kSharableFileForRead)); +} + +TEST(WinSandboxTest, GetSDDLForIPCServerProcess) { + EXPECT_EQ( + L"O:S-8" + L"G:S-9" + L"D:(A;;GA;;;SY)(A;;GA;;;BA)(A;;GA;;;S-8)(A;;0x0400;;;RC)", + GetSDDLForXP(WinSandbox::kIPCServerProcess)); + EXPECT_EQ( + L"O:S-8" + L"G:S-9" + L"D:(A;;GA;;;SY)(A;;GA;;;BA)(A;;GA;;;S-8)(A;;0x1000;;;RC)", + GetSDDLForVista(WinSandbox::kIPCServerProcess)); + EXPECT_EQ( + L"O:S-8" + L"G:S-9" + L"D:(A;;GA;;;SY)(A;;GA;;;BA)(A;;0x1000;;;AC)(A;;GA;;;S-8)" + L"(A;;0x1000;;;RC)", + GetSDDLForWin8(WinSandbox::kIPCServerProcess)); +} + +TEST(WinSandboxTest, GetSDDLForPrivateObject) { + EXPECT_EQ( + L"O:S-8" + L"G:S-9" + L"D:(A;;GA;;;SY)(A;;GA;;;BA)(A;;GA;;;S-8)", + GetSDDLForXP(WinSandbox::kPrivateObject)); + EXPECT_EQ( + L"O:S-8" + L"G:S-9" + L"D:(A;;GA;;;SY)(A;;GA;;;BA)(A;;GA;;;S-8)", + GetSDDLForVista(WinSandbox::kPrivateObject)); + EXPECT_EQ( + L"O:S-8" + L"G:S-9" + L"D:(A;;GA;;;SY)(A;;GA;;;BA)(A;;GA;;;S-8)", + GetSDDLForWin8(WinSandbox::kPrivateObject)); +} + +} // namespace } // namespace mozc diff -Nru mozc-1.11.1502.102/build_mozc.py mozc-1.11.1522.102/build_mozc.py --- mozc-1.11.1502.102/build_mozc.py 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/build_mozc.py 2013-08-28 05:25:59.000000000 +0000 @@ -203,10 +203,15 @@ gyp_file_names.extend(glob.glob('%s/build_tools/*/*.gyp' % SRC_DIR)) # Include tests gyp gyp_file_names.append('%s/gyp/tests.gyp' % SRC_DIR) - # Include subdirectory of dictionary - gyp_file_names.extend(glob.glob('%s/dictionary/*/*.gyp' % SRC_DIR)) + # Include subdirectories of data/test/session/scenario + gyp_file_names.extend(glob.glob('%s/data/test/session/scenario/*.gyp' % + SRC_DIR)) + gyp_file_names.extend(glob.glob('%s/data/test/session/scenario/*/*.gyp' % + SRC_DIR)) # Include subdirectories of data_manager gyp_file_names.extend(glob.glob('%s/data_manager/*/*.gyp' % SRC_DIR)) + # Include subdirectory of dictionary + gyp_file_names.extend(glob.glob('%s/dictionary/*/*.gyp' % SRC_DIR)) # Include subdirectory of rewriter gyp_file_names.extend(glob.glob('%s/rewriter/*/*.gyp' % SRC_DIR)) # Include subdirectory of win32 and breakpad for Windows @@ -249,17 +254,6 @@ return os.path.dirname(ABS_SCRIPT_PATH) -def RunPackageVerifiers(build_dir): - """Runs script to verify created packages/binaries. - - Args: - build_dir: the directory where build results are. - """ - binary_size_checker = os.path.join( - GetBuildScriptDirectoryName(), 'build_tools', 'binary_size_checker.py') - RunOrDie([binary_size_checker, '--target_directory', build_dir]) - - def AddCommonOptions(parser): """Adds the options common among the commands.""" parser.add_option('--build_base', dest='build_base', @@ -427,12 +421,6 @@ AddFeatureOption(parser, feature_name='http client', macro_name='MOZC_ENABLE_HTTP_CLIENT', option_name='http_client') - AddFeatureOption(parser, feature_name='ambiguous search', - macro_name='MOZC_ENABLE_AMBIGUOUS_SEARCH', - option_name='ambiguous_search') - AddFeatureOption(parser, feature_name='typing correction', - macro_name='MOZC_ENABLE_TYPING_CORRECTION', - option_name='typing_correction') AddFeatureOption(parser, feature_name='mode_indicator', macro_name='MOZC_ENABLE_MODE_INDICATOR', option_name='mode_indicator') @@ -828,19 +816,15 @@ windows=is_official_dev, mac=is_official_dev) SetCommandLineForFeature(option_name='http_client', - linux=is_official, + linux=False, windows=is_official, mac=is_official, chromeos=False, # not supported. android=is_official, # System dictionary is read with HttpClient in NaCl. nacl=True) - SetCommandLineForFeature(option_name='ambiguous_search', - android=True) - SetCommandLineForFeature(option_name='typing_correction', - android=True) SetCommandLineForFeature(option_name='mode_indicator', - windows=is_official_dev_or_oss) + windows=True) command_line.extend(['-D', 'target_platform=%s' % options.target_platform]) @@ -1118,11 +1102,6 @@ logging.error('Unsupported platform: %s', os.name) return - RunPackageVerifiers( - os.path.join(GetBuildBaseName(options, - GetMozcVersion().GetTargetPlatform()), - options.configuration)) - # Revert python path. os.environ['PYTHONPATH'] = original_python_path diff -Nru mozc-1.11.1502.102/build_tools/binary_size_checker.py mozc-1.11.1522.102/build_tools/binary_size_checker.py --- mozc-1.11.1502.102/build_tools/binary_size_checker.py 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/build_tools/binary_size_checker.py 2013-08-28 05:25:59.000000000 +0000 @@ -106,7 +106,7 @@ help='The directory which will contain target binaries ' 'and packages.') parser.add_option('--target_filename', dest='target_filename', - help='The target filename.') + help='The target filenames (comma separated).') (options, unused_args) = parser.parse_args() return options @@ -123,9 +123,10 @@ logging.error('Files in target_directory exceeds the limit.') sys.exit(-1) if options.target_filename: - if not CheckFileSize(options.target_filename): - logging.error('The target_filename exceeds the limit.') - sys.exit(-1) + for filename in options.target_filename.split(','): + if not CheckFileSize(filename): + logging.error('The target_filename exceeds the limit.') + sys.exit(-1) if __name__ == '__main__': diff -Nru mozc-1.11.1502.102/build_tools/build_tools.gyp mozc-1.11.1522.102/build_tools/build_tools.gyp --- mozc-1.11.1502.102/build_tools/build_tools.gyp 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/build_tools/build_tools.gyp 2013-08-28 05:25:59.000000000 +0000 @@ -63,12 +63,6 @@ '../rewriter/rewriter_base.gyp:install_gen_usage_rewriter_dictionary_main', ], 'conditions': [ - ['language=="pinyin"', { - 'dependencies': [ - '../languages/pinyin/pinyin.gyp:' - 'install_gen_pinyin_english_dictionary_data_main', - ] - }], ], }, ], diff -Nru mozc-1.11.1502.102/build_tools/tweak_manifest.py mozc-1.11.1522.102/build_tools/tweak_manifest.py --- mozc-1.11.1502.102/build_tools/tweak_manifest.py 1970-01-01 00:00:00.000000000 +0000 +++ mozc-1.11.1522.102/build_tools/tweak_manifest.py 2013-08-28 05:25:59.000000000 +0000 @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2010-2013, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Tweaks manifest file of NaCl Mozc. + + % python tweak_manfest.py --output=out.txt --input=in.txt \ + --version_file=version.txt + +See mozc_version.py for the detailed information for version.txt. +""" + +__author__ = "horo" + +import json +import logging +import optparse + +from build_tools import mozc_version + + + +def ParseOptions(): + """Parse command line options. + + Returns: + An options data. + """ + parser = optparse.OptionParser() + parser.add_option('--version_file', dest='version_file') + parser.add_option('--output', dest='output') + parser.add_option('--input', dest='input') + parser.add_option('--enable_cloud_sync', + action='store_true', + default=False, + dest='enable_cloud_sync') + + (options, unused_args) = parser.parse_args() + return options + + + + +def main(): + """The main function.""" + options = ParseOptions() + if options.version_file is None: + logging.error('--version_file is not specified.') + exit(-1) + if options.output is None: + logging.error('--output is not specified.') + exit(-1) + if options.input is None: + logging.error('--input is not specified.') + exit(-1) + + version = mozc_version.MozcVersion(options.version_file) + with open(options.input) as f: + result = f.read() + result = version.GetVersionInFormat(result) + with open(options.output, 'w') as f: + f.write(result) + + +if __name__ == '__main__': + main() diff -Nru mozc-1.11.1502.102/chrome/nacl/_locales/en/messages_template.json mozc-1.11.1522.102/chrome/nacl/_locales/en/messages_template.json --- mozc-1.11.1502.102/chrome/nacl/_locales/en/messages_template.json 2013-07-17 02:38:05.000000000 +0000 +++ mozc-1.11.1522.102/chrome/nacl/_locales/en/messages_template.json 2013-08-28 05:26:13.000000000 +0000 @@ -64,10 +64,10 @@ "message": "Direct input" }, "configPunctuationMethod": { - "message": "Punctuation style" + "message": "Punctuation style:" }, "configPreeditMethod": { - "message": "Input mode" + "message": "Input mode:" }, "configPreeditMethodRomaji": { "message": "Romaji" @@ -76,10 +76,10 @@ "message": "Kana" }, "configSymbolMethod": { - "message": "Symbol style" + "message": "Symbol style:" }, "configSpaceCharacterForm": { - "message": "Space input style" + "message": "Space input style:" }, "configSpaceCharacterFormFollow": { "message": "Follow input mode" @@ -90,26 +90,14 @@ "configSpaceCharacterFormHalf": { "message": "Halfwidth" }, - "configHistoryLarningLevel": { - "message": "Adjust conversion based on previous input" - }, - "configHistoryLarningLevelDefault": { - "message": "Yes" - }, - "configHistoryLarningLevelReadOnly": { - "message": "Yes (don't record new data)" - }, - "configHistoryLarningLevelOff": { - "message": "No" - }, "configSelectionShortcut": { - "message": "Selection shortcut" + "message": "Selection shortcut:" }, "configSelectionShortcutNo": { "message": "No shortcut" }, "configShiftKeyModeSwitch": { - "message": "Shift key mode switch" + "message": "Shift key mode switch:" }, "configShiftKeyModeSwitchOff": { "message": "Off" @@ -121,7 +109,7 @@ "message": "Katakana" }, "configSessionKeymap": { - "message": "Keymap style" + "message": "Keymap style:" }, "configSessionKeymapAtok": { "message": "ATOK" @@ -148,7 +136,7 @@ "message": "Use system dictionary" }, "configSuggestionsSize": { - "message": "Maximum number of suggestions" + "message": "Number of suggestions:" }, "configSettingsTitle": { "message": "Japanese input settings" @@ -157,7 +145,7 @@ "message": "Basics" }, "configInputAssistanceTitle": { - "message": "Input Assistance" + "message": "Input assistance" }, "configSuggestTitle": { "message": "Suggest" @@ -165,11 +153,23 @@ "configPrivacyTitle": { "message": "Privacy" }, - "configClearUserHistory": { + "configClearHistory": { + "message": "Clear personalization data..." + }, + "configClearHistoryTitle": { "message": "Clear personalization data" }, - "configClearUserHistoryMessage": { - "message": "Do you want to clear all personalization data?" + "configClearHistoryConversionHistory": { + "message": "Conversion history" + }, + "configClearHistorySuggestionHistory": { + "message": "Suggestion history" + }, + "configClearHistoryOkButton": { + "message": "Clear personalization data" + }, + "configClearHistoryCancelButton": { + "message": "Cancel" }, "configCreditsDescription": { "message": "Copyright © 2013 Google Inc. All Rights Reserved." @@ -177,6 +177,12 @@ "configOssCreditsDescription": { "message": "This software is made possible by open source software." }, + "configDialogCancel": { + "message": "Cancel" + }, + "configDialogOk": { + "message": "OK" + }, "configSyncConfigCancel": { "message": "Cancel" }, @@ -184,7 +190,7 @@ "message": "OK" }, "configSyncStartSync": { - "message": "Start Sync" + "message": "Start Sync..." }, "configSyncStopSync": { "message": "Stop Sync" @@ -231,7 +237,7 @@ "configSyncUserDictionaryByteSizeExceeded": { "message": "Cannot save dictionaries because Sync Dictionary exceeds its binary size limit." }, - "configSyncUserDictionaryNumDicrionaryExceeded": { + "configSyncUserDictionaryNumDictionaryExceeded": { "message": "Cannot save dictionaries because the number of dictionaries for sync exceeds its limit." }, "configSyncUnknownErrorFound": { @@ -246,7 +252,127 @@ "configSyncUnknownError": { "message": "Unknown sync error" }, + "configDictionaryToolTitle": { + "message": "User dictionary" + }, + "configDictionaryToolDescription": { + "message": "Add your own words to the user dictionary in order to customize the conversion candidates." + }, + "configDictionaryToolButton": { + "message": "Manage user dictionary..." + }, + "dictionaryToolPageTitle": { + "message": "User dictionary" + }, + "dictionaryToolReadingTitle": { + "message": "Reading" + }, + "dictionaryToolWordTitle": { + "message": "Word" + }, + "dictionaryToolCategoryTitle": { + "message": "Category" + }, + "dictionaryToolCommentTitle": { + "message": "Comment" + }, + "dictionaryToolCreateButton": { + "message": "New dictionary" + }, + "dictionaryToolRenameButton": { + "message": "Rename" + }, + "dictionaryToolDeleteButton": { + "message": "Delete" + }, + "dictionaryToolExportButton": { + "message": "Export" + }, + "dictionaryToolDoneButton": { + "message": "Done" + }, + "dictionaryToolReadingNewInput": { + "message": "New reading" + }, + "dictionaryToolWordNewInput": { + "message": "New word" + }, + "dictionaryToolCommentNewInput": { + "message": "Comment" + }, + "dictionaryToolDictionaryName": { + "message": "Dictionary Name" + }, + "dictionaryToolSyncableDictionaryName": { + "message": "Sync-able dictionary" + }, + "dictionaryToolDeleteDictionaryConfirm": { + "message": "Do you want to delete $dictName$?", + "placeholders": { + "dictName": { + "content": "$1", + "example": "Dictionary Name" + } + } + }, + "dictionaryToolStatuErrorGeneral": { + "message": "The operation has failed." + }, + "dictionaryToolStatuErrorFileNotFound": { + "message": "Could not open the file." + }, + "dictionaryToolStatuErrorInvalidFileFormat": { + "message": "Could not read the file." + }, + "dictionaryToolStatuErrorFileSizeLimitExceeded": { + "message": "The data size exceeds the file size limit." + }, + "dictionaryToolStatuErrorDictionarySizeLimitExceeded": { + "message": "Can't create any more dictionaries." + }, + "dictionaryToolStatuErrorEntrySizeLimitExceeded": { + "message": "Can't create any more entries in this dictionary." + }, + "dictionaryToolStatuErrorDictionaryNameEmpty": { + "message": "Please type a dictionary name." + }, + "dictionaryToolStatuErrorDictionaryNameTooLong": { + "message": "The name is too long." + }, + "dictionaryToolStatuErrorDictionaryNameContainsInvalidCharacter": { + "message": "The name contains invalid character(s)." + }, + "dictionaryToolStatuErrorDictionaryNameDuplicated": { + "message": "The name already exists." + }, + "dictionaryToolStatuErrorReadingEmpty": { + "message": "Please type the reading." + }, + "dictionaryToolStatuErrorReadingTooLong": { + "message": "The reading is too long." + }, + "dictionaryToolStatuErrorReadingContainsInvalidCharacter": { + "message": "The reading contains invalid character(s)." + }, + "dictionaryToolStatuErrorWordEmpty": { + "message": "Please type the word." + }, + "dictionaryToolStatuErrorWordTooLong": { + "message": "The word is too long." + }, + "dictionaryToolStatuErrorWordContainsInvalidCharacter": { + "message": "Thw word contains invalid character(s)." + }, + "dictionaryToolStatuErrorImportTooManyWords": { + "message": "The import source contains too many words." + }, + "dictionaryToolStatuErrorImportInvalidEntries": { + "message": "Some words could not be imported." + }, + "dictionaryToolStatuErrorNoUndoHistory": { + "message": "No more undo-able operation." + }, "configUploadUsageStats": { - "message": "Help make Mozc better by automatically sending usage statistics and crash reports to Google." + "message": "Help make Mozc better by automatically sending usage statistics and crash reports to Google" } -} \ No newline at end of file +} diff -Nru mozc-1.11.1502.102/chrome/nacl/_locales/ja/messages_template.json mozc-1.11.1522.102/chrome/nacl/_locales/ja/messages_template.json --- mozc-1.11.1502.102/chrome/nacl/_locales/ja/messages_template.json 2013-07-17 02:38:05.000000000 +0000 +++ mozc-1.11.1522.102/chrome/nacl/_locales/ja/messages_template.json 2013-08-28 05:26:13.000000000 +0000 @@ -64,10 +64,10 @@ "message": "直接入力" }, "configPunctuationMethod": { - "message": "句読点" + "message": "句読点:" }, "configPreeditMethod": { - "message": "ローマ字入力・かな入力" + "message": "ローマ字入力・かな入力:" }, "configPreeditMethodRomaji": { "message": "ローマ字入力" @@ -76,10 +76,10 @@ "message": "かな入力" }, "configSymbolMethod": { - "message": "記号" + "message": "記号:" }, "configSpaceCharacterForm": { - "message": "スペースの入力" + "message": "スペースの入力:" }, "configSpaceCharacterFormFollow": { "message": "入力モードに従う" @@ -90,26 +90,14 @@ "configSpaceCharacterFormHalf": { "message": "半角" }, - "configHistoryLarningLevel": { - "message": "学習機能" - }, - "configHistoryLarningLevelDefault": { - "message": "学習機能有効" - }, - "configHistoryLarningLevelReadOnly": { - "message": "学習機能有効(新規学習はしない)" - }, - "configHistoryLarningLevelOff": { - "message": "学習機能無効" - }, "configSelectionShortcut": { - "message": "候補選択ショートカット" + "message": "候補選択ショートカット:" }, "configSelectionShortcutNo": { "message": "なし" }, "configShiftKeyModeSwitch": { - "message": "シフトキーでの入力切替" + "message": "シフトキーでの入力切替:" }, "configShiftKeyModeSwitchOff": { "message": "オフ" @@ -121,7 +109,7 @@ "message": "カタカナ" }, "configSessionKeymap": { - "message": "キー設定の選択" + "message": "キー設定の選択:" }, "configSessionKeymapAtok": { "message": "ATOK" @@ -148,7 +136,7 @@ "message": "システム辞書からのサジェスト自動表示を有効にする" }, "configSuggestionsSize": { - "message": "サジェストの最大候補数" + "message": "サジェストの最大候補数:" }, "configSettingsTitle": { "message": "日本語入力の設定" @@ -165,11 +153,23 @@ "configPrivacyTitle": { "message": "プライバシー" }, - "configClearUserHistory": { + "configClearHistory": { + "message": "入力履歴の削除..." + }, + "configClearHistoryTitle": { "message": "入力履歴の削除" }, - "configClearUserHistoryMessage": { - "message": "すべての入力履歴が削除されます。実行しますか?" + "configClearHistoryConversionHistory": { + "message": "変換履歴" + }, + "configClearHistorySuggestionHistory": { + "message": "サジェスト用履歴" + }, + "configClearHistoryOkButton": { + "message": "履歴データの削除" + }, + "configClearHistoryCancelButton": { + "message": "キャンセル" }, "configCreditsDescription": { "message": "Copyright © 2013 Google Inc. All Rights Reserved." @@ -177,6 +177,12 @@ "configOssCreditsDescription": { "message": "本ソフトウェアはオープンソースソフトウェアを利用しています。" }, + "configDialogCancel": { + "message": "キャンセル" + }, + "configDialogOk": { + "message": "OK" + }, "configSyncConfigCancel": { "message": "キャンセル" }, @@ -184,7 +190,7 @@ "message": "OK" }, "configSyncStartSync": { - "message": "同期する" + "message": "同期する..." }, "configSyncStopSync": { "message": "同期の解除" @@ -205,7 +211,7 @@ "message": "同期機能:オフ" }, "configSyncCustomization": { - "message": "詳細設定" + "message": "詳細設定..." }, "configSyncAdvancedSettings": { "message": "同期の詳細設定" @@ -231,7 +237,7 @@ "configSyncUserDictionaryByteSizeExceeded": { "message": "ファイルサイズの上限を超えたため同期用辞書をセーブできません。" }, - "configSyncUserDictionaryNumDicrionaryExceeded": { + "configSyncUserDictionaryNumDictionaryExceeded": { "message": "同期用辞書の数が上限を超えたため辞書をセーブできません。" }, "configSyncUnknownErrorFound": { @@ -246,7 +252,127 @@ "configSyncUnknownError": { "message": "未知の同期エラー" }, + "configDictionaryToolTitle": { + "message": "ユーザー辞書" + }, + "configDictionaryToolDescription": { + "message": "よく使う単語をユーザー辞書に登録することができます。" + }, + "configDictionaryToolButton": { + "message": "ユーザー辞書の管理..." + }, + "dictionaryToolPageTitle": { + "message": "ユーザー辞書" + }, + "dictionaryToolReadingTitle": { + "message": "よみ" + }, + "dictionaryToolWordTitle": { + "message": "単語" + }, + "dictionaryToolCategoryTitle": { + "message": "品詞" + }, + "dictionaryToolCommentTitle": { + "message": "コメント" + }, + "dictionaryToolCreateButton": { + "message": "辞書を作成" + }, + "dictionaryToolRenameButton": { + "message": "辞書名を変更" + }, + "dictionaryToolDeleteButton": { + "message": "辞書を削除" + }, + "dictionaryToolExportButton": { + "message": "辞書をエクスポート" + }, + "dictionaryToolDoneButton": { + "message": "完了" + }, + "dictionaryToolReadingNewInput": { + "message": "新しい単語のよみ" + }, + "dictionaryToolWordNewInput": { + "message": "新しい単語" + }, + "dictionaryToolCommentNewInput": { + "message": "コメント" + }, + "dictionaryToolDictionaryName": { + "message": "辞書名" + }, + "dictionaryToolSyncableDictionaryName": { + "message": "同期用辞書" + }, + "dictionaryToolDeleteDictionaryConfirm": { + "message": "$dictName$を削除しますか?", + "placeholders": { + "dictName": { + "content": "$1", + "example": "Dictionary Name" + } + } + }, + "dictionaryToolStatuErrorGeneral": { + "message": "操作を実行できません。" + }, + "dictionaryToolStatuErrorFileNotFound": { + "message": "ファイルを開けません。" + }, + "dictionaryToolStatuErrorInvalidFileFormat": { + "message": "ファイルを読み込めません。" + }, + "dictionaryToolStatuErrorFileSizeLimitExceeded": { + "message": "ファイルが大きすぎます。" + }, + "dictionaryToolStatuErrorDictionarySizeLimitExceeded": { + "message": "これ以上辞書を作成できません。" + }, + "dictionaryToolStatuErrorEntrySizeLimitExceeded": { + "message": "一つの辞書に含む単語が多すぎます。" + }, + "dictionaryToolStatuErrorDictionaryNameEmpty": { + "message": "辞書名を入力してください。" + }, + "dictionaryToolStatuErrorDictionaryNameTooLong": { + "message": "辞書名が長すぎます。" + }, + "dictionaryToolStatuErrorDictionaryNameContainsInvalidCharacter": { + "message": "辞書名に使用できない文字が含まれています。" + }, + "dictionaryToolStatuErrorDictionaryNameDuplicated": { + "message": "その辞書名はすでに使われています。" + }, + "dictionaryToolStatuErrorReadingEmpty": { + "message": "よみを入力してください。" + }, + "dictionaryToolStatuErrorReadingTooLong": { + "message": "よみが長すぎます。" + }, + "dictionaryToolStatuErrorReadingContainsInvalidCharacter": { + "message": "よみに使用できない文字が含まれています。" + }, + "dictionaryToolStatuErrorWordEmpty": { + "message": "単語を入力してください。" + }, + "dictionaryToolStatuErrorWordTooLong": { + "message": "単語が長すぎます。" + }, + "dictionaryToolStatuErrorWordContainsInvalidCharacter": { + "message": "単語に使用できない文字が含まれています。" + }, + "dictionaryToolStatuErrorImportTooManyWords": { + "message": "インポートする単語が多すぎます。" + }, + "dictionaryToolStatuErrorImportInvalidEntries": { + "message": "インポートできない単語がありました。" + }, + "dictionaryToolStatuErrorNoUndoHistory": { + "message": "これ以上操作を取り消せません。" + }, "configUploadUsageStats": { "message": "使用統計データや障害レポートを Google に自動送信して Mozcの機能向上に役立てる" } -} \ No newline at end of file +} diff -Nru mozc-1.11.1502.102/chrome/nacl/nacl_extension.gyp mozc-1.11.1522.102/chrome/nacl/nacl_extension.gyp --- mozc-1.11.1502.102/chrome/nacl/nacl_extension.gyp 2013-07-17 02:38:05.000000000 +0000 +++ mozc-1.11.1522.102/chrome/nacl/nacl_extension.gyp 2013-08-28 05:26:13.000000000 +0000 @@ -86,6 +86,8 @@ 'dictionary_downloader', '../../base/base.gyp:base', '../../base/base.gyp:nacl_js_proxy', + '../../dictionary/dictionary_base.gyp:user_dictionary', + '../../dictionary/dictionary_base.gyp:user_pos', '../../engine/engine.gyp:engine_factory', '../../net/net.gyp:http_client', '../../net/net.gyp:json_util', @@ -352,18 +354,25 @@ 'action_name': 'gen_manifest', 'inputs': [ '../../mozc_version.txt', - '../../build_tools/replace_version.py', + '../../build_tools/tweak_manifest.py', 'manifest/manifest_template.json', ], 'outputs': [ '<(gen_out_dir)/manifest.json', ], 'action': [ - 'python', '../../build_tools/replace_version.py', + 'python', '../../build_tools/tweak_manifest.py', '--version_file', '../../mozc_version.txt', '--input', 'manifest/manifest_template.json', '--output', '<(gen_out_dir)/manifest.json', ], + 'conditions': [ + ['enable_cloud_sync==1', { + 'action': [ + '--enable_cloud_sync', + ] + }], + ], }, { 'action_name': 'gen_nacl_mozc_version', diff -Nru mozc-1.11.1502.102/chrome/nacl/nacl_mozc.js mozc-1.11.1522.102/chrome/nacl/nacl_mozc.js --- mozc-1.11.1502.102/chrome/nacl/nacl_mozc.js 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/chrome/nacl/nacl_mozc.js 2013-08-28 05:26:13.000000000 +0000 @@ -137,11 +137,12 @@ this.context_ = null; /** - * Session id of Mozc's session. - * @type {number} + * Session id of Mozc's session. This id is handled as uint64 in NaCl. But + * JavaScript can't handle uint64. So we handle it as string in JavaScript. + * @type {string} * @private */ - this.sessionID_ = 0; + this.sessionID_ = ''; /** * The list of candidates. @@ -449,6 +450,15 @@ }; /** + * Gets POS list from NaCl Mozc module. + * @param {!function(Object)} callback Function to be called with results + * from NaCl module. + */ +mozc.NaclMozc.prototype.getPosList = function(callback) { + this.postNaclMozcEvent_({'type': 'GetPosList'}, callback); +}; + +/** * Sends callback command to NaCl module. * @param {!function(Object)=} opt_callback Function to be called with results * from NaCl module. @@ -937,7 +947,7 @@ this.postMozcCommand_( {'input': {'type': 'DELETE_SESSION', 'id': this.sessionID_}}, this.outputResponse_.bind(this)); - this.sessionID_ = 0; + this.sessionID_ = ''; }; /** @@ -1201,11 +1211,13 @@ } chrome.identity.getAuthToken( {interactive: !!args['interactive']}, - (function(token) { - this.naclModule_['postMessage'](JSON.stringify({ - 'jscall': 'GetAuthToken', - 'access_token': token - })); + /** @param {string=} opt_token */ + (function(opt_token) { + var result = {'jscall': 'GetAuthToken'}; + if (opt_token) { + result['access_token'] = opt_token; + } + this.naclModule_['postMessage'](JSON.stringify(result)); }).bind(this)); }; @@ -1288,12 +1300,11 @@ /** * New option page. - * @param {!HTMLDocument} domDocument Document object of the option page. - * @param {!Object} consoleObject Console object of the option page. + * @param {!Window} optionWindow Window object of the option page. * @return {!mozc.OptionPage} Option page object. */ -mozc.NaclMozc.prototype.newOptionPage = function(domDocument, consoleObject) { - var optionPage = new mozc.OptionPage(this, domDocument, consoleObject); +mozc.NaclMozc.prototype.newOptionPage = function(optionWindow) { + var optionPage = new mozc.OptionPage(this, optionWindow); optionPage.initialize(); return optionPage; }; diff -Nru mozc-1.11.1502.102/chrome/nacl/nacl_session_handler.cc mozc-1.11.1522.102/chrome/nacl/nacl_session_handler.cc --- mozc-1.11.1502.102/chrome/nacl/nacl_session_handler.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/chrome/nacl/nacl_session_handler.cc 2013-08-28 05:26:13.000000000 +0000 @@ -49,6 +49,8 @@ #include "config/config_handler.h" #include "config/config.pb.h" #include "data_manager/packed/packed_data_manager.h" +#include "dictionary/user_dictionary_util.h" +#include "dictionary/user_pos.h" #include "engine/engine_factory.h" #include "engine/engine_interface.h" #include "net/http_client.h" @@ -177,6 +179,9 @@ RegisterPepperInstanceForHTTPClient(instance_); PepperFileUtil::Initialize(instance_, kFileIoFileSystemExpectedSize); LoadDictionary(); + user_pos_.reset( + new UserPOS( + packed::PackedDataManager::GetUserPosManager()->GetUserPOSData())); engine_.reset(mozc::EngineFactory::Create()); session_factory_.reset(new JapaneseSessionFactory(engine_.get())); @@ -240,11 +245,14 @@ } if (message->isMember("event") && (*message)["event"].isMember("type")) { response["event"] = Json::objectValue; - response["event"]["type"] = (*message)["event"]["type"].asString(); - if ((*message)["event"]["type"].asString() == "SyncToFile") { + const string event_type = (*message)["event"]["type"].asString(); + response["event"]["type"] = event_type; + if (event_type == "SyncToFile") { response["event"]["result"] = PepperFileUtil::SyncMmapToFile(); - } else if ((*message)["event"]["type"].asString() == "GetVersionInfo") { + } else if (event_type == "GetVersionInfo") { response["event"]["version"] = Version::GetMozcVersion(); + } else if (event_type == "GetPosList") { + GetPosList(&response); } else { response["event"]["error"] = "Unsupported event"; } @@ -263,6 +271,7 @@ } private: + // Loads the dictionary. void LoadDictionary() { string output; @@ -277,6 +286,23 @@ CHECK(data_manager->InitWithZippedData(output)); mozc::packed::RegisterPackedDataManager(data_manager.release()); } + + + void GetPosList(Json::Value *response) { + (*response)["event"]["posList"] = Json::Value(Json::arrayValue); + Json::Value *pos_list = &(*response)["event"]["posList"]; + vector tmp_pos_vec; + user_pos_->GetPOSList(&tmp_pos_vec); + for (int i = 0; i < tmp_pos_vec.size(); ++i) { + (*pos_list)[i] = Json::Value(Json::objectValue); + const user_dictionary::UserDictionary::PosType pos_type = + UserDictionaryUtil::ToPosType(tmp_pos_vec[i].c_str()); + (*pos_list)[i]["type"] = + Json::Value(user_dictionary::UserDictionary::PosType_Name(pos_type)); + (*pos_list)[i]["name"] = Json::Value(tmp_pos_vec[i]); + } + } + pp::Instance *instance_; BlockingQueue *message_queue_; pp::CompletionCallbackFactory factory_; @@ -286,6 +312,7 @@ #ifdef ENABLE_CLOUD_SYNC scoped_ptr sync_handler_; #endif // ENABLE_CLOUD_SYNC + scoped_ptr user_pos_; DISALLOW_COPY_AND_ASSIGN(MozcSessionHandlerThread); }; diff -Nru mozc-1.11.1502.102/chrome/nacl/option_page.js mozc-1.11.1522.102/chrome/nacl/option_page.js --- mozc-1.11.1502.102/chrome/nacl/option_page.js 2013-07-17 02:38:05.000000000 +0000 +++ mozc-1.11.1522.102/chrome/nacl/option_page.js 2013-08-28 05:26:13.000000000 +0000 @@ -84,7 +84,7 @@ /** * Option title information data. * @const - * @type {Array.} + * @type {!Array.} * @private */ mozc.OPTION_TITLES_ = [ @@ -127,13 +127,53 @@ { id: 'sync_title', name: chrome.i18n.getMessage('configSyncTitle') + }, + { + id: 'clear_history_title', + name: chrome.i18n.getMessage('configClearHistoryTitle') + }, + { + id: 'clear_history_conversion_history_label', + name: chrome.i18n.getMessage('configClearHistoryConversionHistory') + }, + { + id: 'clear_history_suggestion_history_label', + name: chrome.i18n.getMessage('configClearHistorySuggestionHistory') + }, + { + id: 'dictionary_tool_title', + name: chrome.i18n.getMessage('configDictionaryToolTitle') + }, + { + id: 'dictionary_tool_description', + name: chrome.i18n.getMessage('configDictionaryToolDescription') + }, + { + id: 'dictionary_tool_page_title', + name: chrome.i18n.getMessage('dictionaryToolPageTitle') + }, + { + id: 'dictionary_tool_reading_title', + name: chrome.i18n.getMessage('dictionaryToolReadingTitle') + }, + { + id: 'dictionary_tool_word_title', + name: chrome.i18n.getMessage('dictionaryToolWordTitle') + }, + { + id: 'dictionary_tool_category_title', + name: chrome.i18n.getMessage('dictionaryToolCategoryTitle') + }, + { + id: 'dictionary_tool_comment_title', + name: chrome.i18n.getMessage('dictionaryToolCommentTitle') } ]; /** * Option checkbox information data. * @const - * @type {Array.} + * @type {!Array.} * @private */ mozc.OPTION_CHECKBOXES_ = [ @@ -170,7 +210,7 @@ /** * Option selection information data. * @const - * @type {Array.} + * @type {!Array.} * @private */ mozc.OPTION_SELECTIONS_ = [ @@ -229,25 +269,6 @@ ] }, { - id: 'history_learning_level', - configId: 'history_learning_level', - name: chrome.i18n.getMessage('configHistoryLarningLevel'), - items: [ - { - name: chrome.i18n.getMessage('configHistoryLarningLevelDefault'), - value: 'DEFAULT_HISTORY' - }, - { - name: chrome.i18n.getMessage('configHistoryLarningLevelReadOnly'), - value: 'READ_ONLY' - }, - { - name: chrome.i18n.getMessage('configHistoryLarningLevelOff'), - value: 'NO_HISTORY' - } - ] - }, - { id: 'selection_shortcut', configId: 'selection_shortcut', name: chrome.i18n.getMessage('configSelectionShortcut'), @@ -300,7 +321,7 @@ /** * Option number information data. * @const - * @type {Array.} + * @type {!Array.} * @private */ mozc.OPTION_NUMBERS_ = [ @@ -316,13 +337,24 @@ /** * Option button information data. * @const - * @type {Array.} + * @type {!Array.} * @private */ mozc.OPTION_BUTTONS_ = [ { - id: 'clear_user_history', - name: chrome.i18n.getMessage('configClearUserHistory') + id: 'clear_history_open', + name: chrome.i18n.getMessage('configClearHistory') + }, + { + id: 'clear_history_close' + }, + { + id: 'clear_history_ok', + name: chrome.i18n.getMessage('configClearHistoryOkButton') + }, + { + id: 'clear_history_cancel', + name: chrome.i18n.getMessage('configClearHistoryCancelButton') }, { id: 'sync_config_cancel', @@ -333,19 +365,49 @@ name: chrome.i18n.getMessage('configSyncConfigOk') }, { + id: 'sync_config_close' + }, + { id: 'sync_toggle_button', name: chrome.i18n.getMessage('configSyncStartSync') }, { id: 'sync_customization_button', name: chrome.i18n.getMessage('configSyncCustomization') + }, + { + id: 'dictionary_tool_open_button', + name: chrome.i18n.getMessage('configDictionaryToolButton') + }, + { + id: 'create_dictionary_button', + name: chrome.i18n.getMessage('dictionaryToolCreateButton') + }, + { + id: 'rename_dictionary_button', + name: chrome.i18n.getMessage('dictionaryToolRenameButton') + }, + { + id: 'delete_dictionary_button', + name: chrome.i18n.getMessage('dictionaryToolDeleteButton') + }, + { + id: 'export_dictionary_button', + name: chrome.i18n.getMessage('dictionaryToolExportButton') + }, + { + id: 'dictionary_tool_done_button', + name: chrome.i18n.getMessage('dictionaryToolDoneButton') + }, + { + id: 'dictionary_tool_close' } ]; /** * Option sync checkbox information data. * @const - * @type {Array.} + * @type {!Array.} * @private */ mozc.OPTION_SYNC_CHECKBOXES_ = [ @@ -362,6 +424,55 @@ ]; /** + * Dictionary tool error messages. + * @const + * @type {!Object.} + * @private + */ +mozc.DICTIONARY_TOOL_STATUS_ERRORS_ = { + 'FILE_NOT_FOUND': + chrome.i18n.getMessage('dictionaryToolStatuErrorFileNotFound'), + 'INVALID_FILE_FORMAT': + chrome.i18n.getMessage('dictionaryToolStatuErrorInvalidFileFormat'), + 'FILE_SIZE_LIMIT_EXCEEDED': + chrome.i18n.getMessage('dictionaryToolStatuErrorFileSizeLimitExceeded'), + 'DICTIONARY_SIZE_LIMIT_EXCEEDED': + chrome.i18n.getMessage( + 'dictionaryToolStatuErrorDictionarySizeLimitExceeded'), + 'ENTRY_SIZE_LIMIT_EXCEEDED': + chrome.i18n.getMessage('dictionaryToolStatuErrorEntrySizeLimitExceeded'), + 'DICTIONARY_NAME_EMPTY': + chrome.i18n.getMessage('dictionaryToolStatuErrorDictionaryNameEmpty'), + 'DICTIONARY_NAME_TOO_LONG': + chrome.i18n.getMessage('dictionaryToolStatuErrorDictionaryNameTooLong'), + 'DICTIONARY_NAME_CONTAINS_INVALID_CHARACTER': + chrome.i18n.getMessage( + 'dictionaryToolStatuErrorDictionaryNameContainsInvalidCharacter'), + 'DICTIONARY_NAME_DUPLICATED': + chrome.i18n.getMessage( + 'dictionaryToolStatuErrorDictionaryNameDuplicated'), + 'READING_EMPTY': + chrome.i18n.getMessage('dictionaryToolStatuErrorReadingEmpty'), + 'READING_TOO_LONG': + chrome.i18n.getMessage('dictionaryToolStatuErrorReadingTooLong'), + 'READING_CONTAINS_INVALID_CHARACTER': + chrome.i18n.getMessage( + 'dictionaryToolStatuErrorReadingContainsInvalidCharacter'), + 'WORD_EMPTY': chrome.i18n.getMessage('dictionaryToolStatuErrorWordEmpty'), + 'WORD_TOO_LONG': + chrome.i18n.getMessage('dictionaryToolStatuErrorWordTooLong'), + 'WORD_CONTAINS_INVALID_CHARACTER': + chrome.i18n.getMessage( + 'dictionaryToolStatuErrorWordContainsInvalidCharacter'), + 'IMPORT_TOO_MANY_WORDS': + chrome.i18n.getMessage('dictionaryToolStatuErrorImportTooManyWords'), + 'IMPORT_INVALID_ENTRIES': + chrome.i18n.getMessage('dictionaryToolStatuErrorImportInvalidEntries'), + 'NO_UNDO_HISTORY': + chrome.i18n.getMessage('dictionaryToolStatuErrorNoUndoHistory') +}; + +/** * The refresh interval of sync status in milliseconds. * @type {number} * @private @@ -375,14 +486,24 @@ */ mozc.ENABLE_CLOUD_SYNC_ = mozc.VERSION_.DEV && mozc.VERSION_.OFFICIAL; + +/** + * In Official Stable NaCl Mozc we use simple UI version dictionary tool. + * In simple UI version the user can only have one user dictionary named + * "user dictionary". + * @type {boolean} + * @private + */ +mozc.ENABLE_SIMPLE_DICTIONARY_TOOL_ = !mozc.VERSION_.DEV && + mozc.VERSION_.OFFICIAL; + /** * An empty constructor. * @param {!mozc.NaclMozc} naclMozc NaCl Mozc. - * @param {!HTMLDocument} domDocument Document object of the option page. - * @param {!Object} consoleObject Console object of the option page. + * @param {!Window} optionWindow Window object of the option page. * @constructor */ -mozc.OptionPage = function(naclMozc, domDocument, consoleObject) { +mozc.OptionPage = function(naclMozc, optionWindow) { /** * NaclMozc object. * This value will be null when the option page is unloaded. @@ -392,12 +513,20 @@ this.naclMozc_ = naclMozc; /** + * Window object of the option page. + * This value will be null when the option page is unloaded. + * @type {Window} + * @private + */ + this.window_ = optionWindow; + + /** * Document object of the option page. * This value will be null when the option page is unloaded. - * @type {HTMLDocument} + * @type {Document} * @private */ - this.document_ = domDocument; + this.document_ = optionWindow.document; /** * Console object of the option page. @@ -405,7 +534,7 @@ * @type {Object} * @private */ - this.console_ = consoleObject; + this.console_ = optionWindow.console; /** * Timer ID which is used to refresh sync status. @@ -421,6 +550,44 @@ * @private */ this.lastSyncedTimestamp_ = 0; + + /** + * User dictionary session id created in NaCl module. It is set when + * dictionary tool is opened. + * @type {string} + * @private + */ + this.userDictionarySessionId_ = ''; + + /** + * Pos type list. (example: {type: 'NOUN', name: '名詞'}) + * @type {!Array.<{type: string, name: string}>} + * @private + */ + this.posList_ = []; + + /** + * Pos type name to display name map. (example: 'NOUN' -> '名詞') + * @type {!Object.} + * @private + */ + this.posNameMap_ = {}; + + /** + * Whether cloud sync is enabled or not. + * @type {boolean} + * @private + */ + this.syncEnabled_ = false; + + /** + * Stack of Esc key handlers. An Esc key handler is pushed to it when an + * dialog box is opend and the handler will be popped when the dialog box is + * closed. It is used to close the dialog when the user presses Esc key. + * @type {!Array.} + * @private + */ + this.escapeKeyHandlers_ = []; }; /** @@ -433,7 +600,18 @@ } this.initPages_(); this.naclMozc_.callWhenInitialized((function() { - this.naclMozc_.getConfig(this.onConfigLoaded_.bind(this)); + this.naclMozc_.getPosList((function(message) { + this.posList_ = message['posList']; + var new_category_select = + this.document_.getElementById('dictionary_tool_category_new_select'); + for (var i = 0; i < this.posList_.length; ++i) { + this.posNameMap_[this.posList_[i]['type']] = this.posList_[i]['name']; + new_category_select.appendChild( + this.createOptionElement_(this.posList_[i]['name'], + this.posList_[i]['type'])); + } + this.naclMozc_.getConfig(this.onConfigLoaded_.bind(this)); + }).bind(this)); if (mozc.ENABLE_CLOUD_SYNC_) { this.updateSyncStatus_(); } @@ -452,6 +630,7 @@ clearTimeout(this.timeoutID_); this.timeoutID_ = undefined; } + this.window_ = null; this.naclMozc_ = null; this.document_ = null; this.console_ = null; @@ -491,13 +670,13 @@ optionTitle.name; } - // A checkbox (id:"CHECK_BOX_ID") is in a DIV (id:"CHECK_BOX_ID_div") and has - // a label (id:"CHECK_BOX_ID_label"). - //

+ // A checkbox (id:'CHECK_BOX_ID') is in a DIV (id:'CHECK_BOX_ID_div') and has + // a label (id:'CHECK_BOX_ID_label'). + //
// - // + // // - // + // // // //
@@ -513,12 +692,12 @@ 'change', this.saveConfig_.bind(this), true); } - // A selection (id:"SELECTION_ID") is in a DIV (id:"SELECTION_ID_div") and has - // a label (id:"SELECTION_ID_label") - //
- // - // ... - // + // A selection (id:'SELECTION_ID') is in a DIV (id:'SELECTION_ID_div') and has + // a label (id:'SELECTION_ID_label') + //
+ // + // ... + // // //
for (var i = 0; i < mozc.OPTION_SELECTIONS_.length; ++i) { @@ -540,12 +719,12 @@ } // We use a select element for number selection. - // A selection (id:"NUMBER_ID") is in a DIV (id:"NUMBER_ID_div") and has a - // label (id:"NUMBER_ID_label") - //
- // - // ... - // + // A selection (id:'NUMBER_ID') is in a DIV (id:'NUMBER_ID_div') and has a + // label (id:'NUMBER_ID_label') + //
+ // + // ... + // // //
for (var i = 0; i < mozc.OPTION_NUMBERS_.length; ++i) { @@ -564,27 +743,29 @@ 'change', this.saveConfig_.bind(this), true); } - // A button (id:"BUTTON_ID") is in a DIV (id:"BUTTON_ID_div"). - //
- // - // + // A button (id:'BUTTON_ID') is in a DIV (id:'BUTTON_ID_div'). + //
+ // + // // //
for (var i = 0; i < mozc.OPTION_BUTTONS_.length; ++i) { var optionButton = mozc.OPTION_BUTTONS_[i]; var buttonElement = this.document_.getElementById(optionButton.id); - buttonElement.value = optionButton.name; + if (optionButton.name) { + buttonElement.value = optionButton.name; + } buttonElement.addEventListener('click', this.onButtonClick_.bind(this, optionButton.id), true); } - // A sync checkbox (id:"CHECK_BOX_ID") has a label (id:"CHECK_BOX_ID_label"). + // A sync checkbox (id:'CHECK_BOX_ID') has a label (id:'CHECK_BOX_ID_label'). // - // + // // - // + // // // for (var i = 0; i < mozc.OPTION_SYNC_CHECKBOXES_.length; ++i) { @@ -593,11 +774,35 @@ optionCheckbox.name; } + this.document_.getElementById('user_dictionary_select').addEventListener( + 'change', this.onDictionarySelectChanged_.bind(this), true); + this.document_.getElementById('dictionary_tool_reading_new_input') + .addEventListener( + 'blur', this.onDictionaryNewEntryLostFocus_.bind(this), true); + this.document_.getElementById('dictionary_tool_word_new_input') + .addEventListener( + 'blur', this.onDictionaryNewEntryLostFocus_.bind(this), true); + this.document_.getElementById('dictionary_tool_category_new_select') + .addEventListener( + 'blur', this.onDictionaryNewEntryLostFocus_.bind(this), true); + this.document_.getElementById('dictionary_tool_comment_new_input') + .addEventListener( + 'blur', this.onDictionaryNewEntryLostFocus_.bind(this), true); + // Removes cloud_sync_div if cloud sync is not enabled. if (!mozc.ENABLE_CLOUD_SYNC_) { this.document_.getElementById('settings_div').removeChild( this.document_.getElementById('cloud_sync_div')); } + + // Hides menu of user dictionary if simple dictionary tool is enabled. + if (mozc.ENABLE_SIMPLE_DICTIONARY_TOOL_) { + this.document_.getElementById('dictionary_tool_menu_span').style.display = + 'none'; + } + + this.document_.addEventListener( + 'keydown', this.onKeyDown_.bind(this), false); }; /** @@ -666,6 +871,8 @@ this.document_.getElementById(optionNumber.id).value = value; } + this.syncEnabled_ = !!config['sync_config']; + this.document_.body.style.visibility = 'visible'; }; @@ -715,6 +922,19 @@ }; /** + * Called when the user presses a key. + * @param {Event} event Event object passed from browser. + * @private + */ +mozc.OptionPage.prototype.onKeyDown_ = function(event) { + if (event.keyCode == 27) { // Escape + if (this.escapeKeyHandlers_.length) { + this.escapeKeyHandlers_[this.escapeKeyHandlers_.length - 1](); + } + } +}; + +/** * Enables the dom element which is specified by id in the option page. * @param {string} id The element ID. * @private @@ -733,21 +953,256 @@ }; /** - * Shows the dom element which is specified by id in the option page. + * Shows the overlay element which is specified by id in the option page. * @param {string} id The element ID. * @private */ -mozc.OptionPage.prototype.showElementById_ = function(id) { +mozc.OptionPage.prototype.showOverlayElementById_ = function(id) { + this.freezeMainDiv_(); this.document_.getElementById(id).style.visibility = 'visible'; }; /** - * Hides the dom element which is specified by id in the option page. + * Hides the overlay element which is specified by id in the option page. * @param {string} id The element ID. * @private */ -mozc.OptionPage.prototype.hideElementById_ = function(id) { +mozc.OptionPage.prototype.hideOverlayElementById_ = function(id) { this.document_.getElementById(id).style.visibility = 'hidden'; + this.unfreezeMainDiv_(); +}; + +/** + * Shows a confirm dialog box. This method makes callerPageId element + * unfocusable while showing the dialog box. + * @param {string} callerPageId The ID of caller page. + * @param {string} title The title of the dialog box. + * @param {string} message The message in the dialog box. + * @param {!function(?boolean)} callback Function to be called with the result. + * @private + */ +mozc.OptionPage.prototype.showConfirm_ = function(callerPageId, + title, + message, + callback) { + this.setChildNodesUnfocusableByTabKeyById_(callerPageId); + var confirmOverlay = this.document_.createElement('div'); + confirmOverlay.classList.add('overlay', 'confirm_overlay'); + + var confirmPage = this.document_.createElement('div'); + confirmPage.classList.add('overlay_page', 'confirm_page'); + + var closeDiv = this.document_.createElement('div'); + closeDiv.classList.add('close_button'); + + var section = this.document_.createElement('section'); + + var confirmTitle = this.document_.createElement('div'); + confirmTitle.classList.add('confirm_title'); + confirmTitle.appendChild(this.document_.createTextNode(title)); + + var confirmMessage = this.document_.createElement('div'); + confirmMessage.classList.add('confirm_message_div'); + confirmMessage.appendChild(this.document_.createTextNode(message)); + + var confirmOkCancelDiv = this.document_.createElement('div'); + confirmOkCancelDiv.classList.add('confirm_ok_cancel_div'); + + var confirmOkButton = this.document_.createElement('input'); + confirmOkButton.classList.add('confirm_ok_button'); + confirmOkButton.type = 'button'; + confirmOkButton.value = chrome.i18n.getMessage('configDialogOk'); + + var confirmCancelButton = this.document_.createElement('input'); + confirmCancelButton.classList.add('confirm_cancel_button'); + confirmCancelButton.type = 'button'; + confirmCancelButton.value = chrome.i18n.getMessage('configDialogCancel'); + + confirmOkCancelDiv.appendChild(confirmCancelButton); + confirmOkCancelDiv.appendChild(confirmOkButton); + section.appendChild(confirmTitle); + section.appendChild(confirmMessage); + section.appendChild(confirmOkCancelDiv); + confirmPage.appendChild(closeDiv); + confirmPage.appendChild(section); + confirmOverlay.appendChild(confirmPage); + + var okCallback = + (function(overlay, pageId, callbackFunc) { + this.escapeKeyHandlers_.pop(); + this.document_.body.removeChild(overlay); + this.setChildNodesFocusableByTabKeyById_(pageId); + callbackFunc(true); + }).bind(this, confirmOverlay, callerPageId, callback); + var cancelCallback = + (function(overlay, pageId, callbackFunc) { + this.escapeKeyHandlers_.pop(); + this.document_.body.removeChild(overlay); + this.setChildNodesFocusableByTabKeyById_(pageId); + callbackFunc(false); + }).bind(this, confirmOverlay, callerPageId, callback); + closeDiv.addEventListener('click', cancelCallback, true); + confirmOkButton.addEventListener('click', okCallback, true); + confirmCancelButton.addEventListener('click', cancelCallback, true); + this.escapeKeyHandlers_.push(cancelCallback); + this.document_.body.appendChild(confirmOverlay); + confirmOkButton.focus(); +}; + +/** + * Shows a prompt dialog box. This method makes callerPageId element unfocusable + * while showing the dialog box. + * @param {string} callerPageId The ID of caller page. + * @param {string} title The title of the dialog box. + * @param {string} message The message in the dialog box. + * @param {string} defaultValue The default value. + * @param {!function(?string)} callback Function to be called with the result. + * If the user clicks the cancel button the result will be null. + * @private + */ +mozc.OptionPage.prototype.showPrompt_ = function(callerPageId, + title, + message, + defaultValue, + callback) { + this.setChildNodesUnfocusableByTabKeyById_(callerPageId); + var promptOverlay = this.document_.createElement('div'); + promptOverlay.classList.add('overlay', 'prompt_overlay'); + + var promptPage = this.document_.createElement('div'); + promptPage.classList.add('overlay_page', 'prompt_page'); + + var closeDiv = this.document_.createElement('div'); + closeDiv.classList.add('close_button'); + + var section = this.document_.createElement('section'); + + var promptTitle = this.document_.createElement('div'); + promptTitle.classList.add('prompt_title'); + promptTitle.appendChild(this.document_.createTextNode(title)); + + var promptMessage = this.document_.createElement('div'); + promptMessage.classList.add('prompt_message_div'); + promptMessage.appendChild(this.document_.createTextNode(message)); + + var promptOkCancelDiv = this.document_.createElement('div'); + promptOkCancelDiv.classList.add('prompt_ok_cancel_div'); + + var promptInputDiv = this.document_.createElement('div'); + promptInputDiv.classList.add('prompt_input_div'); + + var promptInput = this.document_.createElement('input'); + promptInput.classList.add('prompt_input'); + promptInput.type = 'text'; + promptInput.value = defaultValue; + promptInputDiv.appendChild(promptInput); + + var promptOkButton = this.document_.createElement('input'); + promptOkButton.classList.add('prompt_ok_button'); + promptOkButton.type = 'button'; + promptOkButton.value = chrome.i18n.getMessage('configDialogOk'); + + var promptCancelButton = this.document_.createElement('input'); + promptCancelButton.classList.add('prompt_cancel_button'); + promptCancelButton.type = 'button'; + promptCancelButton.value = chrome.i18n.getMessage('configDialogCancel'); + + promptOkCancelDiv.appendChild(promptCancelButton); + promptOkCancelDiv.appendChild(promptOkButton); + section.appendChild(promptTitle); + section.appendChild(promptMessage); + section.appendChild(promptInputDiv); + section.appendChild(promptOkCancelDiv); + promptPage.appendChild(closeDiv); + promptPage.appendChild(section); + promptOverlay.appendChild(promptPage); + + var okCallback = + (function(overlay, pageId, callbackFunc, input) { + this.escapeKeyHandlers_.pop(); + this.document_.body.removeChild(overlay); + this.setChildNodesFocusableByTabKeyById_(pageId); + callbackFunc(input.value); + }).bind(this, promptOverlay, callerPageId, callback, promptInput); + var cancelCallback = + (function(overlay, pageId, callbackFunc) { + this.escapeKeyHandlers_.pop(); + this.document_.body.removeChild(overlay); + this.setChildNodesFocusableByTabKeyById_(pageId); + callbackFunc(null); + }).bind(this, promptOverlay, callerPageId, callback); + closeDiv.addEventListener('click', cancelCallback, true); + promptOkButton.addEventListener('click', okCallback, true); + promptCancelButton.addEventListener('click', cancelCallback, true); + this.escapeKeyHandlers_.push(cancelCallback); + this.document_.body.appendChild(promptOverlay); + promptInput.focus(); +}; + +/** + * Shows an alert dialog box. This method makes callerPageId element unfocusable + * while showing the dialog box. + * @param {string} callerPageId The ID of caller page. + * @param {string} title The title of the dialog box. + * @param {string} message The message in the dialog box. + * @param {!function()=} opt_callback Function to be called when closed. + * @private + */ +mozc.OptionPage.prototype.showAlert_ = function(callerPageId, + title, + message, + opt_callback) { + this.setChildNodesUnfocusableByTabKeyById_(callerPageId); + var alertOverlay = this.document_.createElement('div'); + alertOverlay.classList.add('overlay', 'alert_overlay'); + + var alertPage = this.document_.createElement('div'); + alertPage.classList.add('overlay_page', 'alert_page'); + + var closeDiv = this.document_.createElement('div'); + closeDiv.classList.add('close_button'); + + var section = this.document_.createElement('section'); + + var alertTitle = this.document_.createElement('div'); + alertTitle.classList.add('alert_title'); + alertTitle.appendChild(this.document_.createTextNode(title)); + + var alertMessage = this.document_.createElement('div'); + alertMessage.classList.add('alert_message_div'); + alertMessage.appendChild(this.document_.createTextNode(message)); + + var alertOkDiv = this.document_.createElement('div'); + alertOkDiv.classList.add('alert_ok_div'); + + var alertOkButton = this.document_.createElement('input'); + alertOkButton.classList.add('alert_ok_button'); + alertOkButton.type = 'button'; + alertOkButton.value = chrome.i18n.getMessage('configDialogOk'); + + alertOkDiv.appendChild(alertOkButton); + section.appendChild(alertTitle); + section.appendChild(alertMessage); + section.appendChild(alertOkDiv); + alertPage.appendChild(closeDiv); + alertPage.appendChild(section); + alertOverlay.appendChild(alertPage); + + var okCallback = + (function(overlay, pageId, opt_callbackFunc) { + this.escapeKeyHandlers_.pop(); + this.document_.body.removeChild(overlay); + this.setChildNodesFocusableByTabKeyById_(pageId); + if (opt_callbackFunc) { + opt_callbackFunc(); + } + }).bind(this, alertOverlay, callerPageId, opt_callback); + closeDiv.addEventListener('click', okCallback, true); + alertOkButton.addEventListener('click', okCallback, true); + + this.escapeKeyHandlers_.push(okCallback); + this.document_.body.appendChild(alertOverlay); + alertOkButton.focus(); }; /** @@ -756,62 +1211,137 @@ * @private */ mozc.OptionPage.prototype.onButtonClick_ = function(buttonId) { - if (buttonId == 'clear_user_history') { - this.onClearUserHistoryClicked_(); + if (buttonId == 'clear_history_open') { + this.onClearHistoryOpenClicked_(); + } else if (buttonId == 'clear_history_close') { + this.onClearHistoryCancelClicked_(); + } else if (buttonId == 'clear_history_ok') { + this.onClearHistoryOkClicked_(); + } else if (buttonId == 'clear_history_cancel') { + this.onClearHistoryCancelClicked_(); } else if (buttonId == 'sync_config_cancel') { this.onSyncConfigCancelClicked_(); } else if (buttonId == 'sync_config_ok') { this.onSyncConfigOkClicked_(); + } else if (buttonId == 'sync_config_close') { + this.onSyncConfigCancelClicked_(); } else if (buttonId == 'sync_toggle_button') { this.onSyncToggleButtonClicked_(); } else if (buttonId == 'sync_customization_button') { this.onSyncCustomizationButtonClicked_(); + } else if (buttonId == 'dictionary_tool_open_button') { + this.onDictionaryToolOpenButtonClicked_(); + } else if (buttonId == 'dictionary_tool_done_button') { + this.onDictionaryToolDoneButtonClicked_(); + } else if (buttonId == 'dictionary_tool_close') { + this.onDictionaryToolDoneButtonClicked_(); + } else if (buttonId == 'create_dictionary_button') { + this.onDictionaryToolCreateButtonClicked_(); + } else if (buttonId == 'rename_dictionary_button') { + this.onDictionaryToolRenameButtonClicked_(); + } else if (buttonId == 'delete_dictionary_button') { + this.onDictionaryToolDeleteButtonClicked_(); + } else if (buttonId == 'export_dictionary_button') { + this.onDictionaryToolExportButtonClicked_(); } }; /** - * Called when clear_user_history button is clicked. + * Called when clear_history_open button is clicked. + * This method opens clear history dialog. * @private */ -mozc.OptionPage.prototype.onClearUserHistoryClicked_ = function() { - if (!confirm(chrome.i18n.getMessage('configClearUserHistoryMessage'))) { - return; +mozc.OptionPage.prototype.onClearHistoryOpenClicked_ = function() { + this.escapeKeyHandlers_.push(this.onClearHistoryCancelClicked_.bind(this)); + this.document_.getElementById('clear_history_conversion_history').checked = + true; + this.document_.getElementById('clear_history_suggestion_history').checked = + true; + this.showOverlayElementById_('clear_history_overlay'); + this.document_.getElementById('clear_history_cancel').focus(); +}; + +/** + * Called when clear_history_ok button is clicked. + * This method calls CLEAR_USER_HISTORY and CLEAR_USER_PREDICTION command + * according to clear_history_conversion_history and + * clear_history_suggestion_history check boxes. + * @private + */ +mozc.OptionPage.prototype.onClearHistoryOkClicked_ = function() { + if (this.document_.getElementById( + 'clear_history_conversion_history').checked) { + this.naclMozc_.clearUserHistory(); } - this.disableElementById_('clear_user_history'); - this.naclMozc_.clearUserHistory((function(response) { - this.naclMozc_.clearUserPrediction((function(response) { - this.enableElementById_('clear_user_history'); - }).bind(this)); - }).bind(this)); + if (this.document_.getElementById( + 'clear_history_suggestion_history').checked) { + this.naclMozc_.clearUserPrediction(); + } + this.escapeKeyHandlers_.pop(); + this.hideOverlayElementById_('clear_history_overlay'); + this.document_.getElementById('clear_history_open').focus(); }; /** - * Called when sync_config_cancel button is clicked. + * Called when clear_history_cancel button or clear_history_close button is + * clicked or the user presses Esc key while clear history dialog is opened. + * This method closes the clear history dialog. * @private */ -mozc.OptionPage.prototype.onSyncConfigCancelClicked_ = function() { +mozc.OptionPage.prototype.onClearHistoryCancelClicked_ = function() { + this.escapeKeyHandlers_.pop(); + this.hideOverlayElementById_('clear_history_overlay'); + this.document_.getElementById('clear_history_open').focus(); +}; + +/** + * Updates the cloud sync status and closes the sync dialog. + * @param {!function()=} opt_callback Function to be called when finished. + * @private + */ +mozc.OptionPage.prototype.updateSyncStatusAndCloseSyncDialog_ = + function(opt_callback) { this.naclMozc_.getCloudSyncStatus((function(res) { this.displaySyncStatus_(res); - this.hideElementById_('sync_config_overlay'); + this.escapeKeyHandlers_.pop(); + this.hideOverlayElementById_('sync_config_overlay'); + this.document_.getElementById('sync_customization_button').focus(); + if (opt_callback) { + opt_callback(); + } }).bind(this)); }; /** + * Called when sync_config_cancel button or sync_config_close button is clicked + * or the user presses Esc key while sycn dialog is opened. + * This method closes the sync dialog. + * @private + */ +mozc.OptionPage.prototype.onSyncConfigCancelClicked_ = function() { + this.updateSyncStatusAndCloseSyncDialog_(); +}; + +/** * Called when sync_config_ok button is clicked. + * This method starts sync if sync_settings or sync_user_dictionary is checked. * @private */ mozc.OptionPage.prototype.onSyncConfigOkClicked_ = function() { if (!this.document_.getElementById('sync_settings').checked && !this.document_.getElementById('sync_user_dictionary').checked) { - // Stop sync - this.naclMozc_.addAuthCode( - {'access_token': ''}, - (function() { - this.naclMozc_.getCloudSyncStatus((function(res) { - this.displaySyncStatus_(res); - this.hideElementById_('sync_config_overlay'); - }).bind(this)); - }).bind(this)); + // Stop sync. + this.naclMozc_.getConfig((function(response) { + var config = response['output']['config']; + delete config['sync_config']; + this.syncEnabled_ = false; + this.naclMozc_.setConfig( + config, + this.naclMozc_.addAuthCode.bind( + this.naclMozc_, + {'access_token': ''}, + this.updateSyncStatusAndCloseSyncDialog_.bind(this, undefined))); + }).bind(this)); return; } this.naclMozc_.getConfig((function(response) { @@ -823,17 +1353,16 @@ this.document_.getElementById('sync_settings').checked; config['sync_config']['use_user_dictionary_sync'] = this.document_.getElementById('sync_user_dictionary').checked; + this.syncEnabled_ = true; this.naclMozc_.setConfig( config, (function() { this.naclMozc_.sendReload(); this.getAuthTokenAndStartCloudSync_( - this.naclMozc_.getCloudSyncStatus.bind( - this.naclMozc_, - (function(res) { - this.displaySyncStatus_(res); - this.hideElementById_('sync_config_overlay'); - }).bind(this))); + this.naclMozc_.getCloudSyncStatus.bind( + this.naclMozc_, + this.updateSyncStatusAndCloseSyncDialog_.bind(this, + undefined))); }).bind(this)); }).bind(this)); }; @@ -848,51 +1377,830 @@ function(opt_callback) { chrome.identity.getAuthToken( {interactive: true}, - (function(token) { + /** @param {string=} opt_token */ + (function(opt_token) { this.naclMozc_.addAuthCode( - {'access_token': token}, + {'access_token': opt_token ? opt_token : ''}, this.naclMozc_.startCloudSync.bind(this.naclMozc_, opt_callback)); }).bind(this)); }; /** * Called when sync_toggle_button button is clicked. + * This method opens sync dialog if the current sync status is NOSYNC. + * Otherwise this method stops sync. * @private */ mozc.OptionPage.prototype.onSyncToggleButtonClicked_ = function() { this.disableElementById_('sync_toggle_button'); this.disableElementById_('sync_customization_button'); - this.naclMozc_.getCloudSyncStatus((function(response) { - if (response['output']['cloud_sync_status']['global_status'] == 'NOSYNC') { - this.document_.getElementById('sync_settings').checked = true; - this.document_.getElementById('sync_user_dictionary').checked = true; - this.showElementById_('sync_config_overlay'); - } else { - // Stop sync - this.naclMozc_.addAuthCode( - {'access_token': ''}, - (function() { - this.naclMozc_.getCloudSyncStatus( - this.displaySyncStatus_.bind(this)); - }).bind(this)); - } - }).bind(this)); + if (!this.syncEnabled_) { + // Show sync config dialog. + this.escapeKeyHandlers_.push(this.onSyncConfigCancelClicked_.bind(this)); + this.document_.getElementById('sync_settings').checked = true; + this.document_.getElementById('sync_user_dictionary').checked = true; + this.showOverlayElementById_('sync_config_overlay'); + this.document_.getElementById('sync_config_cancel').focus(); + } else { + // Stop sync. + this.naclMozc_.getConfig((function(response) { + var config = response['output']['config']; + delete config['sync_config']; + this.syncEnabled_ = false; + this.naclMozc_.setConfig( + config, + this.naclMozc_.addAuthCode.bind( + this.naclMozc_, + {'access_token': ''}, + this.naclMozc_.getCloudSyncStatus.bind( + this.naclMozc_, + this.displaySyncStatus_.bind(this)))); + }).bind(this)); + } }; /** * Called when sync_customization_button button is clicked. + * This method opens sync dialog. * @private */ mozc.OptionPage.prototype.onSyncCustomizationButtonClicked_ = function() { this.disableElementById_('sync_toggle_button'); this.disableElementById_('sync_customization_button'); this.naclMozc_.getConfig((function(response) { + this.escapeKeyHandlers_.push(this.onSyncConfigCancelClicked_.bind(this)); var sync_config = response['output']['config']['sync_config']; this.document_.getElementById('sync_settings').checked = sync_config && sync_config['use_config_sync']; this.document_.getElementById('sync_user_dictionary').checked = sync_config && sync_config['use_user_dictionary_sync']; - this.showElementById_('sync_config_overlay'); + this.showOverlayElementById_('sync_config_overlay'); + this.document_.getElementById('sync_config_cancel').focus(); + }).bind(this)); +}; + +/** + * Called when dictionary_tool_open_button button is clicked. + * This method creates a dictionary session and calls LOAD dictionary command + * and gets the user dictionary storage from NaCl Mozc module and shows a + * dictionary tool. + * @private + */ +mozc.OptionPage.prototype.onDictionaryToolOpenButtonClicked_ = function() { + this.disableElementById_('dictionary_tool_open_button'); + this.sendUserDictionaryCommand_( + {'type': 'CREATE_SESSION'}, + (function(response) { + this.userDictionarySessionId_ = response['session_id']; + this.sendUserDictionaryCommand_( + { + 'type': 'LOAD', + 'session_id': this.userDictionarySessionId_, + 'ensure_non_empty_storage': mozc.ENABLE_SIMPLE_DICTIONARY_TOOL_ + }, + this.loadStorage_.bind( + this, + (function() { + this.escapeKeyHandlers_.push( + this.onDictionaryToolDoneButtonClicked_.bind(this)); + this.showOverlayElementById_('dictionary_tool_overlay'); + this.document_.getElementById('dictionary_tool_reading_new_input') + .focus(); + }).bind(this))); + }).bind(this)); +}; + +/** + * Updates the dictionary list in 'user_dictionary_select' element according to + * dictionaries_. + * @private + */ +mozc.OptionPage.prototype.updateDictionaryList_ = function() { + var selectionElement = + this.document_.getElementById('user_dictionary_select'); + selectionElement.disabled = !this.dictionaries_.length; + var lastValue = selectionElement.value; + while (selectionElement.hasChildNodes()) { + selectionElement.removeChild(selectionElement.firstChild); + } + for (var i = 0; i < this.dictionaries_.length; ++i) { + var dictionaryName = this.dictionaries_[i].name; + // 'Sync Dictionary' is the default name of sync-able dictionary defined in + // user_dictionary_storage.cc. + if (dictionaryName == 'Sync Dictionary') { + dictionaryName = + chrome.i18n.getMessage('dictionaryToolSyncableDictionaryName'); + } + var newOption = + this.createOptionElement_(dictionaryName, + i.toString()); + if (i == lastValue) { + newOption.selected = true; + } + selectionElement.appendChild(newOption); + } +}; + +/** + * Gets the user dictionary storage from NaCl Mozc module and stores it to + * dictionaries_ and update the dictionary list and content. + * @param {!function()=} opt_callback Function to be called when finished. + * @private + */ +mozc.OptionPage.prototype.loadStorage_ = function(opt_callback) { + this.getStorage_((function() { + this.updateDictionaryList_(); + this.updateDictionaryContent_(); + if (opt_callback) { + opt_callback(); + } + }).bind(this)); +}; + +/** + * Gets the user dictionary storage from NaCl Mozc module and stores it to + * dictionaries_. + * @param {!function()=} opt_callback Function to be called when finished. + * @private + */ +mozc.OptionPage.prototype.getStorage_ = function(opt_callback) { + this.sendUserDictionaryCommand_( + { + 'type': 'GET_STORAGE', + 'session_id': this.userDictionarySessionId_ + }, + (function(response) { + this.dictionaries_ = response['storage']['dictionaries']; + if (!this.dictionaries_) { + this.dictionaries_ = []; + } + if (opt_callback) { + opt_callback(); + } + }).bind(this)); +}; + +/** + * Called when dictionary_tool_done_button button or dictionary_tool_close + * button is clicked or the user presses Esc key while dictionary tool is + * opened. + * This method closes the dictionary tool. + * @private + */ +mozc.OptionPage.prototype.onDictionaryToolDoneButtonClicked_ = function() { + this.escapeKeyHandlers_.pop(); + this.enableElementById_('dictionary_tool_open_button'); + this.hideOverlayElementById_('dictionary_tool_overlay'); + this.document_.getElementById('dictionary_tool_open_button').focus(); +}; + +/** + * Called when create_dictionary_button button is clicked. + * This method displays a dialog box that prompts the user to input the new + * dictionary name and creates a dictionary. + * @private + */ +mozc.OptionPage.prototype.onDictionaryToolCreateButtonClicked_ = function() { + this.showPrompt_( + 'dictionary_tool_page', + chrome.i18n.getMessage('appName'), + chrome.i18n.getMessage('dictionaryToolDictionaryName'), + '', + (function(dictName) { + if (!dictName) { + return; + } + this.sendUserDictionaryCommand_( + { + 'type': 'CREATE_DICTIONARY', + 'session_id': this.userDictionarySessionId_, + 'dictionary_name': dictName + }, + (function(response) { + if (response['status'] != 'USER_DICTIONARY_COMMAND_SUCCESS') { + this.showDictionaryToolError_(response['status']); + } else { + this.sendUserDictionaryCommand_( + {'type': 'SAVE', 'session_id': this.userDictionarySessionId_}, + this.naclMozc_.sendReload.bind(this.naclMozc_, undefined)); + // TODO(horo): getStorage_ is a heavy operation. We shoud only + // update the created dictionary. + this.getStorage_((function() { + this.updateDictionaryList_(); + this.document_.getElementById('user_dictionary_select').value = + this.dictionaries_.length - 1; + this.updateDictionaryContent_(); + }).bind(this)); + } + }).bind(this)); + }).bind(this)); +}; + +/** + * Returns the selected dictionary index in user_dictionary_select element. + * @return {number} The selected dictionary index. The return value is NaN + * when there is no dictionary. + * @private + */ +mozc.OptionPage.prototype.getSelectedDictionaryIndex_ = function() { + return parseInt( + this.document_.getElementById('user_dictionary_select').value, + 10); +}; + +/** + * Called when rename_dictionary_button button is clicked. + * This method displays a dialog box that prompts the user to input the new + * dictionary name and renames the selected dictionary. + * @private + */ +mozc.OptionPage.prototype.onDictionaryToolRenameButtonClicked_ = function() { + var dictIndex = this.getSelectedDictionaryIndex_(); + if (isNaN(dictIndex)) { + return; + } + var dictionaryId = this.dictionaries_[dictIndex]['id']; + this.showPrompt_( + 'dictionary_tool_page', + chrome.i18n.getMessage('appName'), + chrome.i18n.getMessage('dictionaryToolDictionaryName'), + this.dictionaries_[dictIndex]['name'], + (function(dictName) { + if (!dictName) { + return; + } + this.sendUserDictionaryCommand_( + { + 'type': 'RENAME_DICTIONARY', + 'session_id': this.userDictionarySessionId_, + 'dictionary_id': dictionaryId, + 'dictionary_name': dictName + }, + (function(response) { + if (response['status'] != 'USER_DICTIONARY_COMMAND_SUCCESS') { + this.showDictionaryToolError_(response['status']); + } else { + this.sendUserDictionaryCommand_( + {'type': 'SAVE', 'session_id': this.userDictionarySessionId_}, + this.naclMozc_.sendReload.bind(this.naclMozc_, undefined)); + // TODO(horo): loadStorage_ is a heavy operation. We shoud only + // update the changed dictionary. + this.loadStorage_(); + } + }).bind(this)); + }).bind(this)); +}; + +/** + * Called when delete_dictionary_button button is clicked. + * This method deletes the selecetd dictionary. + * @private + */ +mozc.OptionPage.prototype.onDictionaryToolDeleteButtonClicked_ = function() { + var dictIndex = this.getSelectedDictionaryIndex_(); + if (isNaN(dictIndex)) { + return; + } + this.showConfirm_( + 'dictionary_tool_page', + chrome.i18n.getMessage('appName'), + chrome.i18n.getMessage('dictionaryToolDeleteDictionaryConfirm', + this.dictionaries_[dictIndex]['name']), + (function(result) { + if (!result) { + return; + } + var dictionaryId = this.dictionaries_[dictIndex]['id']; + this.sendUserDictionaryCommand_( + { + 'type': 'DELETE_DICTIONARY', + 'session_id': this.userDictionarySessionId_, + 'dictionary_id': dictionaryId + }, + (function(response) { + if (response['status'] != 'USER_DICTIONARY_COMMAND_SUCCESS') { + this.showDictionaryToolError_(response['status']); + } else { + this.sendUserDictionaryCommand_( + {'type': 'SAVE', 'session_id': this.userDictionarySessionId_}, + this.naclMozc_.sendReload.bind(this.naclMozc_, undefined)); + // TODO(horo): loadStorage_ is a heavy operation. We shoud only + // update the deleted dictionary. + this.loadStorage_(); + } + }).bind(this)); + }).bind(this)); +}; + +/** + * Called when export_dictionary_button button is clicked. + * This method exports the selecetd dictionary. + * @private + */ +mozc.OptionPage.prototype.onDictionaryToolExportButtonClicked_ = function() { + var dictIndex = this.getSelectedDictionaryIndex_(); + if (isNaN(dictIndex)) { + return; + } + var data = ''; + var entries = this.dictionaries_[dictIndex]['entries']; + for (var i = 0; i < entries.length; ++i) { + data += entries[i]['key'] + '\t' + + entries[i]['value'] + '\t' + + this.posNameMap_[entries[i]['pos']] + '\t' + + entries[i]['comment'] + '\n'; + } + var blob = new Blob([data]); + var a = this.document_.createElement('a'); + a.href = this.window_.URL.createObjectURL(blob); + a.download = 'user_dict.txt'; + a.style.display = 'none'; + this.document_.body.appendChild(a); + a.click(); + this.document_.body.removeChild(a); +}; + +/** + * Called when dictionary entry element is focused. + * This method shows the input elements for 'reading' and 'word' and 'comment', + * and the select element for 'category'. + * @param {string} type Entry type ('reading', 'word', 'category' or 'comment'). + * @param {number} index Index of the entry. + * @param {Event} event Event object passed from browser. + * @private + */ +mozc.OptionPage.prototype.onDictionaryEntryFocus_ = function(type, + index, + event) { + var dictIndex = this.getSelectedDictionaryIndex_(); + if (isNaN(dictIndex)) { + return; + } + var entryNode = + this.document_.getElementById('dictionary_tool_entry_' + index); + if (entryNode.classList.contains('dictionary_tool_entry_selected')) { + return; + } + var readingInput = + this.document_.getElementById('dictionary_tool_reading_input_' + index); + var wordInput = + this.document_.getElementById('dictionary_tool_word_input_' + index); + var categorySelect = + this.document_.getElementById('dictionary_tool_category_input_' + index); + var commentInput = + this.document_.getElementById('dictionary_tool_comment_input_' + index); + while (categorySelect.childNodes.length) { + categorySelect.removeChild(categorySelect.firstChild); + } + for (var i = 0; i < this.posList_.length; ++i) { + categorySelect.appendChild( + this.createOptionElement_(this.posList_[i]['name'], + this.posList_[i]['type'])); + } + var entry = + this.dictionaries_[dictIndex]['entries'][index]; + readingInput.value = entry['key']; + wordInput.value = entry['value']; + categorySelect.value = entry['pos']; + commentInput.value = entry['comment']; + entryNode.classList.add('dictionary_tool_entry_selected'); + var focusInput = this.document_.getElementById( + 'dictionary_tool_' + type + '_input_' + index); + focusInput.focus(); + this.document_.getElementById('dictionary_tool_reading_' + index).tabIndex = + -1; + this.document_.getElementById('dictionary_tool_word_' + index).tabIndex = + -1; + this.document_.getElementById('dictionary_tool_category_' + index).tabIndex = + -1; + this.document_.getElementById('dictionary_tool_comment_' + index).tabIndex = + -1; +}; + +/** + * Called when dictionary entry element lost focus. + * This method hides the input elements for 'reading' and 'word' and 'comment', + * and the select element for 'category'. And also this method saves the entry. + * @param {string} type Entry type ('reading', 'word', 'category' or 'comment'). + * @param {number} index Index of the entry. + * @param {Event} event Event object passed from browser. + * @private + */ +mozc.OptionPage.prototype.onDictionaryEntryBlur_ = + function(type, index, event) { + var dictIndex = this.getSelectedDictionaryIndex_(); + if (isNaN(dictIndex)) { + return; + } + if (event && event.relatedTarget && + (event.relatedTarget.id == 'dictionary_tool_reading_input_' + index || + event.relatedTarget.id == 'dictionary_tool_word_input_' + index || + event.relatedTarget.id == 'dictionary_tool_category_input_' + index || + event.relatedTarget.id == 'dictionary_tool_comment_input_' + index)) { + return; + } + var entryNode = + this.document_.getElementById('dictionary_tool_entry_' + index); + if (!entryNode.classList.contains('dictionary_tool_entry_selected')) { + return; + } + var readingInput = + this.document_.getElementById('dictionary_tool_reading_input_' + index); + var wordInput = + this.document_.getElementById('dictionary_tool_word_input_' + index); + var categorySelect = + this.document_.getElementById('dictionary_tool_category_input_' + index); + var commentInput = + this.document_.getElementById('dictionary_tool_comment_input_' + index); + var readingStatic = + this.document_.getElementById('dictionary_tool_reading_static_' + index); + var wordStatic = + this.document_.getElementById('dictionary_tool_word_static_' + index); + var categoryStatic = + this.document_.getElementById('dictionary_tool_category_static_' + index); + var commentStatic = + this.document_.getElementById('dictionary_tool_comment_static_' + index); + + this.document_.getElementById('dictionary_tool_reading_' + index).tabIndex = + 0; + this.document_.getElementById('dictionary_tool_word_' + index).tabIndex = + 0; + this.document_.getElementById('dictionary_tool_category_' + index).tabIndex = + 0; + this.document_.getElementById('dictionary_tool_comment_' + index).tabIndex = + 0; + var oldEntry = this.dictionaries_[dictIndex]['entries'][index]; + if (readingInput.value == oldEntry['key'] && + wordInput.value == oldEntry['value'] && + commentInput.value == oldEntry['comment'] && + categorySelect.value == oldEntry['pos']) { + entryNode.classList.remove('dictionary_tool_entry_selected'); + return; + } + this.sendUserDictionaryCommand_( + { + 'type': 'EDIT_ENTRY', + 'session_id': this.userDictionarySessionId_, + 'dictionary_id': this.dictionaries_[dictIndex]['id'], + 'entry_index': [index], + 'entry': { + 'key': readingInput.value, + 'value': wordInput.value, + 'comment': commentInput.value, + 'pos': categorySelect.value + } + }, + (function(response) { + if (response['status'] != 'USER_DICTIONARY_COMMAND_SUCCESS') { + this.showDictionaryToolError_(response['status']); + } + this.sendUserDictionaryCommand_( + {'type': 'SAVE', 'session_id': this.userDictionarySessionId_}, + this.naclMozc_.sendReload.bind(this.naclMozc_, undefined)); + this.sendUserDictionaryCommand_( + { + 'type': 'GET_ENTRY', + 'session_id': this.userDictionarySessionId_, + 'dictionary_id': this.dictionaries_[dictIndex]['id'], + 'entry_index': [index] + }, + (function(response) { + this.dictionaries_[dictIndex]['entries'][index] = response['entry']; + var entry = + this.dictionaries_[dictIndex]['entries'][index]; + readingStatic.innerText = entry['key']; + wordStatic.innerText = entry['value']; + categoryStatic.innerText = this.posNameMap_[entry['pos']]; + commentStatic.innerText = entry['comment']; + entryNode.classList.remove('dictionary_tool_entry_selected'); + }).bind(this)); + }).bind(this)); +}; + +/** + * Called when dictionary_tool_entry_delete_button is clicked. + * This method deletes the entry in the dictionary. + * @param {number} index Index of the dictionary entry. + * @private + */ +mozc.OptionPage.prototype.onDictionaryEntryDeleteClick_ = function(index) { + var dictIndex = this.getSelectedDictionaryIndex_(); + if (isNaN(dictIndex)) { + return; + } + this.sendUserDictionaryCommand_( + { + 'type': 'DELETE_ENTRY', + 'session_id': this.userDictionarySessionId_, + 'dictionary_id': this.dictionaries_[dictIndex]['id'], + 'entry_index': [index] + }, + (function(response) { + if (response['status'] != 'USER_DICTIONARY_COMMAND_SUCCESS') { + this.showDictionaryToolError_(response['status']); + } else { + this.sendUserDictionaryCommand_( + {'type': 'SAVE', 'session_id': this.userDictionarySessionId_}, + this.naclMozc_.sendReload.bind(this.naclMozc_, undefined)); + // TODO(horo): loadStorage_ is a heavy operation. We shoud only update + // the deleted entry. + this.loadStorage_(); + } + }).bind(this)); +}; + +/** + * Shows dictionary tool error. + * @param {string} status Status string of dictionary tool. + * @private + */ +mozc.OptionPage.prototype.showDictionaryToolError_ = function(status) { + var message = mozc.DICTIONARY_TOOL_STATUS_ERRORS_[status]; + if (!message) { + message = chrome.i18n.getMessage('dictionaryToolStatuErrorGeneral') + + '[' + status + ']'; + } + this.showAlert_('dictionary_tool_page', + 'Error', + message); +}; + +/** + * Updates the dictionary entry list in 'dictionary_tool_current_area' element + * according to 'user_dictionary_select' element and dictionaries_. + * @private + */ +mozc.OptionPage.prototype.updateDictionaryContent_ = function() { + var dictIndex = this.getSelectedDictionaryIndex_(); + var currentArea = + this.document_.getElementById('dictionary_tool_current_area'); + if (isNaN(dictIndex)) { + for (var i = 0;; ++i) { + var entryDiv = + this.document_.getElementById('dictionary_tool_entry_' + i); + if (!entryDiv) { + break; + } + currentArea.removeChild(entryDiv); + } + this.disableElementById_('rename_dictionary_button'); + this.disableElementById_('delete_dictionary_button'); + this.disableElementById_('export_dictionary_button'); + this.disableElementById_('dictionary_tool_reading_new_input'); + this.disableElementById_('dictionary_tool_word_new_input'); + this.disableElementById_('dictionary_tool_category_new_select'); + this.disableElementById_('dictionary_tool_comment_new_input'); + return; + } + var dictionary = this.dictionaries_[dictIndex]; + if (dictionary['syncable']) { + this.disableElementById_('rename_dictionary_button'); + this.disableElementById_('delete_dictionary_button'); + } else { + this.enableElementById_('rename_dictionary_button'); + this.enableElementById_('delete_dictionary_button'); + } + this.enableElementById_('export_dictionary_button'); + this.enableElementById_('dictionary_tool_reading_new_input'); + this.enableElementById_('dictionary_tool_word_new_input'); + this.enableElementById_('dictionary_tool_category_new_select'); + this.enableElementById_('dictionary_tool_comment_new_input'); + if (!dictionary['entries']) { + dictionary['entries'] = []; + } + for (var i = 0; i < dictionary['entries'].length; ++i) { + var entryDiv = this.document_.getElementById('dictionary_tool_entry_' + i); + if (!entryDiv) { + entryDiv = this.createUserDictionaryEntryDiv_(i); + } + currentArea.appendChild(entryDiv); + this.document_.getElementById( + 'dictionary_tool_reading_static_' + i).innerText = + dictionary['entries'][i]['key']; + this.document_.getElementById( + 'dictionary_tool_word_static_' + i).innerText = + dictionary['entries'][i]['value']; + this.document_.getElementById( + 'dictionary_tool_category_static_' + i).innerText = + this.posNameMap_[dictionary['entries'][i]['pos']]; + this.document_.getElementById( + 'dictionary_tool_comment_static_' + i).innerText = + dictionary['entries'][i]['comment']; + } + for (var i = dictionary['entries'].length;; ++i) { + var entryDiv = this.document_.getElementById('dictionary_tool_entry_' + i); + if (!entryDiv) { + break; + } + currentArea.removeChild(entryDiv); + } +}; + +/** + * Creates a dictionary entry element like the followings. + *
+ *
+ *
+ * + *
+ *
+ *
+ * + *
+ *
+ *
+ * + *
+ *
+ *
+ * + *
+ *
+ * + *
+ *
+ * @param {number} index Index of the dictionary entry. + * @return {!Element} Created element. + * @private + */ +mozc.OptionPage.prototype.createUserDictionaryEntryDiv_ = function(index) { + var entryDiv = this.document_.createElement('div'); + entryDiv.id = 'dictionary_tool_entry_' + index; + entryDiv.classList.add('dictionary_tool_entry'); + var readingDiv = this.document_.createElement('div'); + var readingStaticDiv = this.document_.createElement('div'); + var readingInput = this.document_.createElement('input'); + readingDiv.id = 'dictionary_tool_reading_' + index; + readingDiv.classList.add('dictionary_tool_reading'); + readingDiv.tabIndex = 0; + readingDiv.addEventListener( + 'focus', + this.onDictionaryEntryFocus_.bind(this, 'reading', index), + true); + readingStaticDiv.id = 'dictionary_tool_reading_static_' + index; + readingStaticDiv.classList.add('static_text'); + readingInput.id = 'dictionary_tool_reading_input_' + index; + readingInput.type = 'text'; + readingInput.classList.add('dictionary_tool_entry_input'); + readingInput.addEventListener( + 'blur', + this.onDictionaryEntryBlur_.bind(this, 'reading', index), + true); + readingDiv.appendChild(readingStaticDiv); + readingDiv.appendChild(readingInput); + entryDiv.appendChild(readingDiv); + var wordDiv = this.document_.createElement('div'); + var wordStaticDiv = this.document_.createElement('div'); + var wordInput = this.document_.createElement('input'); + wordDiv.id = 'dictionary_tool_word_' + index; + wordDiv.classList.add('dictionary_tool_word'); + wordDiv.tabIndex = 0; + wordDiv.addEventListener( + 'focus', + this.onDictionaryEntryFocus_.bind(this, 'word', index), + true); + wordStaticDiv.id = 'dictionary_tool_word_static_' + index; + wordStaticDiv.classList.add('static_text'); + wordInput.id = 'dictionary_tool_word_input_' + index; + wordInput.type = 'text'; + wordInput.classList.add('dictionary_tool_entry_input'); + wordInput.addEventListener( + 'blur', + this.onDictionaryEntryBlur_.bind(this, 'word', index), + true); + wordDiv.appendChild(wordStaticDiv); + wordDiv.appendChild(wordInput); + entryDiv.appendChild(wordDiv); + var categoryDiv = this.document_.createElement('div'); + var categoryStaticDiv = this.document_.createElement('div'); + var categorySelect = this.document_.createElement('select'); + categoryDiv.id = 'dictionary_tool_category_' + index; + categoryDiv.classList.add('dictionary_tool_category'); + categoryDiv.tabIndex = 0; + categoryDiv.addEventListener( + 'focus', + this.onDictionaryEntryFocus_.bind(this, 'category', index), + true); + categoryStaticDiv.id = 'dictionary_tool_category_static_' + index; + categoryStaticDiv.classList.add('static_text'); + categorySelect.id = 'dictionary_tool_category_input_' + index; + categorySelect.classList.add('dictionary_tool_entry_select'); + categorySelect.addEventListener( + 'blur', + this.onDictionaryEntryBlur_.bind(this, 'category', index), + true); + categoryDiv.appendChild(categoryStaticDiv); + categoryDiv.appendChild(categorySelect); + entryDiv.appendChild(categoryDiv); + var commentDiv = this.document_.createElement('div'); + var commentStaticDiv = this.document_.createElement('div'); + var commentInput = this.document_.createElement('input'); + commentDiv.id = 'dictionary_tool_comment_' + index; + commentDiv.classList.add('dictionary_tool_comment'); + commentDiv.tabIndex = 0; + commentDiv.addEventListener( + 'focus', + this.onDictionaryEntryFocus_.bind(this, 'comment', index), + true); + commentStaticDiv.id = 'dictionary_tool_comment_static_' + index; + commentStaticDiv.classList.add('static_text'); + commentInput.id = 'dictionary_tool_comment_input_' + index; + commentInput.type = 'text'; + commentInput.classList.add('dictionary_tool_entry_input'); + commentInput.addEventListener( + 'blur', + this.onDictionaryEntryBlur_.bind(this, 'comment', index), + true); + commentDiv.appendChild(commentStaticDiv); + commentDiv.appendChild(commentInput); + entryDiv.appendChild(commentDiv); + var deleteDiv = this.document_.createElement('div'); + var deleteButton = this.document_.createElement('button'); + deleteDiv.classList.add('dictionary_tool_entry_delete_button_div'); + deleteButton.classList.add('dictionary_tool_entry_delete_button'); + deleteButton.addEventListener( + 'click', + this.onDictionaryEntryDeleteClick_.bind(this, index), + true); + deleteDiv.appendChild(deleteButton); + entryDiv.appendChild(deleteDiv); + return entryDiv; +}; + +/** + * Called when user_dictionary_select is changed. + * @private + */ +mozc.OptionPage.prototype.onDictionarySelectChanged_ = function() { + this.updateDictionaryContent_(); +}; + +/** + * Called when dictionary tool new entry input element lost focus. + * If the new reading and the new word are not empty this method creates a new + * entry in the selected dictionary. + * @param {Event} event Event object passed from browser. + * @private + */ +mozc.OptionPage.prototype.onDictionaryNewEntryLostFocus_ = function(event) { + if (event && event.relatedTarget && + (event.relatedTarget.id == 'dictionary_tool_reading_new_input' || + event.relatedTarget.id == 'dictionary_tool_word_new_input' || + event.relatedTarget.id == 'dictionary_tool_category_new_select' || + event.relatedTarget.id == 'dictionary_tool_comment_new_input')) { + return; + } + var dictIndex = this.getSelectedDictionaryIndex_(); + if (isNaN(dictIndex)) { + return; + } + var dictionaryId = this.dictionaries_[dictIndex]['id']; + var newReadingInput = + this.document_.getElementById('dictionary_tool_reading_new_input'); + var newWordInput = + this.document_.getElementById('dictionary_tool_word_new_input'); + var newCategorySelect = + this.document_.getElementById('dictionary_tool_category_new_select'); + var newCommentInput = + this.document_.getElementById('dictionary_tool_comment_new_input'); + var newReading = newReadingInput.value; + var newWord = newWordInput.value; + var newCategory = newCategorySelect.value; + var newComment = newCommentInput.value; + if (newReading == '' || newWord == '') { + return; + } + // We don't reset the value of dictionary_tool_category_new_select to help the + // user who want to add the same category entries. + newReadingInput.value = ''; + newWordInput.value = ''; + newCommentInput.value = ''; + this.sendUserDictionaryCommand_({ + 'type': 'ADD_ENTRY', + 'session_id': this.userDictionarySessionId_, + 'dictionary_id': dictionaryId, + 'entry': { + 'key': newReading, + 'value': newWord, + 'comment': newComment, + 'pos': newCategory + } + }, + (function(response) { + if (response['status'] != 'USER_DICTIONARY_COMMAND_SUCCESS') { + this.showDictionaryToolError_(response['status']); + } else { + this.sendUserDictionaryCommand_( + {'type': 'SAVE', 'session_id': this.userDictionarySessionId_}, + this.naclMozc_.sendReload.bind(this.naclMozc_, undefined)); + // TODO(horo): loadStorage_ is a heavy operation. We shoud only update + // the added entry. + this.loadStorage_(newReadingInput.focus.bind(newReadingInput)); + } }).bind(this)); }; @@ -951,7 +2259,15 @@ var cloud_sync_status = response['output']['cloud_sync_status']; var sync_global_status = cloud_sync_status['global_status']; var sync_message = ''; - if (sync_global_status == 'SYNC_SUCCESS' || + this.document_.getElementById('sync_toggle_button').value = + this.syncEnabled_ ? + chrome.i18n.getMessage('configSyncStopSync') : + chrome.i18n.getMessage('configSyncStartSync'); + if (!this.syncEnabled_ || sync_global_status == 'NOSYNC') { + sync_message += chrome.i18n.getMessage('configSyncNoSync'); + this.enableElementById_('sync_toggle_button'); + this.disableElementById_('sync_customization_button'); + } else if (sync_global_status == 'SYNC_SUCCESS' || sync_global_status == 'SYNC_FAILURE') { var lastSyncedTimestamp = cloud_sync_status['last_synced_timestamp']; if (!lastSyncedTimestamp) { @@ -965,28 +2281,16 @@ this.lastSyncedTimestamp_ = lastSyncedTimestamp; this.naclMozc_.getConfig(this.onConfigLoaded_.bind(this)); } - this.document_.getElementById('sync_toggle_button').value = - chrome.i18n.getMessage('configSyncStopSync'); this.enableElementById_('sync_toggle_button'); this.enableElementById_('sync_customization_button'); } else if (sync_global_status == 'WAITSYNC') { - this.document_.getElementById('sync_toggle_button').value = - chrome.i18n.getMessage('configSyncStopSync'); sync_message += chrome.i18n.getMessage('configSyncWaiting'); this.disableElementById_('sync_toggle_button'); this.disableElementById_('sync_customization_button'); } else if (sync_global_status == 'INSYNC') { - this.document_.getElementById('sync_toggle_button').value = - chrome.i18n.getMessage('configSyncStopSync'); sync_message += chrome.i18n.getMessage('configSyncDuringSync'); this.disableElementById_('sync_toggle_button'); this.disableElementById_('sync_customization_button'); - } else if (sync_global_status == 'NOSYNC') { - this.document_.getElementById('sync_toggle_button').value = - chrome.i18n.getMessage('configSyncStartSync'); - sync_message += chrome.i18n.getMessage('configSyncNoSync'); - this.enableElementById_('sync_toggle_button'); - this.disableElementById_('sync_customization_button'); } var tooltip = ''; if (cloud_sync_status['sync_errors'] && @@ -1007,7 +2311,7 @@ } else if (error_code == 'USER_DICTIONARY_NUM_DICTIONARY_EXCEEDED') { tooltip += chrome.i18n.getMessage( - 'configSyncUserDictionaryNumDicrionaryExceeded'); + 'configSyncUserDictionaryNumDictionaryExceeded'); } else { tooltip += chrome.i18n.getMessage('configSyncUnknownErrorFound'); } @@ -1029,3 +2333,124 @@ } sync_status_div.title = tooltip; }; + +/** + * Sets the tabIndex of the all focusable elements (tabIndex >= 0) in the target + * element -1 to make them unfocusable with tab key. + * @param {string} elementId The ID of target element. + * @private + */ +mozc.OptionPage.prototype.setChildNodesUnfocusableByTabKeyById_ = + function(elementId) { + var element = this.document_.getElementById(elementId); + if (!element) { + return; + } + this.setChildNodesUnfocusableByTabKey_(element); +}; + +/** + * Resets the tabIndex of the all focusable elements in the target element which + * was set to -1 with setChildNodesUnfocusableByTabKeyById_(). + * @param {string} elementId The ID of target element. + * @private + */ +mozc.OptionPage.prototype.setChildNodesFocusableByTabKeyById_ = + function(elementId) { + var element = this.document_.getElementById(elementId); + if (!element) { + return; + } + this.setChildNodesFocusableByTabKey_(element); +}; + +/** + * Sets the tabIndex of the all focusable elements (tabIndex >= 0) in the target + * element -1 to make them unfocusable with tab key. + * @param {!Element} element The target element. + * @private + */ +mozc.OptionPage.prototype.setChildNodesUnfocusableByTabKey_ = + function(element) { + if (element.tabIndex >= 0) { + element.oldTabIndex = element.tabIndex; + element.tabIndex = -1; + } + if (!element.childNodes) { + return; + } + for (var i = 0; i < element.childNodes.length; ++i) { + this.setChildNodesUnfocusableByTabKey_(element.childNodes[i]); + } +}; + +/** + * Resets the tabIndex of the all focusable elements in the target element which + * was set to -1 with setChildNodesUnfocusableByTabKey_(). + * @param {!Element} element The target element. + * @private + */ +mozc.OptionPage.prototype.setChildNodesFocusableByTabKey_ = function(element) { + if (element.oldTabIndex >= 0) { + element.tabIndex = element.oldTabIndex; + element.oldTabIndex = undefined; + } + if (!element.childNodes) { + return; + } + for (var i = 0; i < element.childNodes.length; ++i) { + this.setChildNodesFocusableByTabKey_(element.childNodes[i]); + } +}; + +/** + * Freezes the main div 'settings_div' to hide scroll bar. + * @private + */ +mozc.OptionPage.prototype.freezeMainDiv_ = function() { + var mainDiv = this.document_.getElementById('settings_div'); + if (mainDiv.classList.contains('frozen')) { + return; + } + mainDiv.style.width = this.window_.getComputedStyle(mainDiv).width; + mainDiv.oldScrollTop = this.document_.body.scrollTop; + mainDiv.classList.add('frozen'); + var vertical_position = + mainDiv.getBoundingClientRect().top - mainDiv.oldScrollTop; + mainDiv.style.top = vertical_position + 'px'; + this.setChildNodesUnfocusableByTabKey_(mainDiv); +}; + +/** + * Unfreezes the main div 'settings_div' to hide scroll bar. + * @private + */ +mozc.OptionPage.prototype.unfreezeMainDiv_ = function() { + var mainDiv = this.document_.getElementById('settings_div'); + if (!mainDiv.classList.contains('frozen')) { + return; + } + this.setChildNodesFocusableByTabKey_(mainDiv); + mainDiv.classList.remove('frozen'); + mainDiv.style.top = ''; + mainDiv.style.left = ''; + mainDiv.style.right = ''; + mainDiv.style.width = ''; + var scrollTop = mainDiv.oldScrollTop || 0; + mainDiv.oldScrollTop = undefined; + this.window_.scroll(0, scrollTop); +}; + +/** + * Sends user dictionary command to NaCl module. + * @param {!Object} command User dictionary command object to be sent. + * @param {!function(Object)=} opt_callback Function to be called with results + * from NaCl module. + * @private + */ +mozc.OptionPage.prototype.sendUserDictionaryCommand_ = + function(command, opt_callback) { + this.naclMozc_.sendUserDictionaryCommand( + command, opt_callback); +}; + diff -Nru mozc-1.11.1502.102/chrome/nacl/options.css mozc-1.11.1522.102/chrome/nacl/options.css --- mozc-1.11.1502.102/chrome/nacl/options.css 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/chrome/nacl/options.css 2013-08-28 05:26:13.000000000 +0000 @@ -29,17 +29,28 @@ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /** - * NaCl Mozc option page style seet. + * NaCl Mozc option page style sheet. + * This file is intended to keep a consistency with Chrome setting page. + * The original style sheet files of Chrome setting page are here. + * https://code.google.com/p/chromium/codesearch#chromium/src/ui/webui/resources/css/ * */ body { color: rgb(48, 57, 66); - font: 75% 'Segoe UI', Arial, Meiryo, 'MS PGothic', sans-serif; + font: 75% MotoyaG04Gothic, 'Noto Sans UI', sans-serif; max-width:640px; visibility:hidden; } +#content_base_div { + position: absolute; +} + +.frozen { + position: fixed; +} + input[type='checkbox'] { -webkit-appearance: none; background-image: -webkit-linear-gradient(#ededed, #ededed 38%, #dedede); @@ -60,7 +71,9 @@ } input[type='checkbox']:checked::before { - background-image: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAsAAAALCAYAAACprHcmAAAAcklEQVQY02NgwA/YoJgoEA/Es4DYgJBCJSBeD8SboRinBiYg7kZS2IosyQ/Eakh8LySFq4FYHFlxGRBvBOJYqMRqJMU+yApNkSRAeC0Sux3dfSCTetE0wKyXxOWhMKhTYIr9CAUXyJMzgLgBagBBgDPGAI2LGdNt0T1AAAAAAElFTkSuQmCC); + background-image: -webkit-image-set( + url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAsAAAALCAYAAACprHcmAAAAcklEQVQY02NgwA/YoJgoEA/Es4DYgJBCJSBeD8SboRinBiYg7kZS2IosyQ/Eakh8LySFq4FYHFlxGRBvBOJYqMRqJMU+yApNkSRAeC0Sux3dfSCTetE0wKyXxOWhMKhTYIr9CAUXyJMzgLgBagBBgDPGAI2LGdNt0T1AAAAAAElFTkSuQmCC) 1x, + url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABYAAAAWCAYAAADEtGw7AAAA60lEQVQ4y2P4//8/Ay0wA00NpgC4AXEksgA1DNYA4vVAvBnZcEoNFgLihVBDNyMbTonBLEDchWYo3HBKDM7BYSgIZ5JrsBseQxuBmIkcg5EjCx1PBmJufGHsBMQFQMxORGTB8BIglsCXKmSBeA1U8RQgliMistYBsTq+dAxy4TQ0TSBLnPFE1iYgdkAPL3SDC/BECi4cjS0i0A12RgoGYnAxEDMSYzAIyGMJDmy4B4jZcCUdXKmCnUCwzAVifnxpklA6xhY0K4BYgVBiJyaDgIJmKtTQjUBsTEwuIjbnwYLGi9jsSZ8aZEgZDACI3DQYaeTU0AAAAABJRU5ErkJggg==) 2x); background-size: 100% 100%; content: ''; display: block; @@ -83,8 +96,8 @@ color: #444; font: inherit; margin: 0 1px 5px 0; - min-height: 2em; - min-width: 4em; + min-height: 24px; + min-width: 48px; padding-bottom: 1px; -webkit-padding-end: 10px; -webkit-padding-start: 10px; @@ -114,6 +127,17 @@ color: #aaa; } +input[type='text'] { + border: 1px solid #bfbfbf; + border-radius: 2px; + box-sizing: border-box; + color: #444; + font: inherit; + margin: 0; + min-height: 2em; + padding: 3px; +} + label { display: inline; padding: 0; @@ -148,6 +172,21 @@ top: 13px; } +h2 { + display: block; + font-size: 120%; + font-weight: normal; + line-height: 1; + margin: 0; + -webkit-margin-after: .67em; + -webkit-margin-before: .67em; + -webkit-margin-end: 0; + -webkit-margin-start: 0; + padding: 1px 0 13px; + text-shadow: white 0 1px 2px; + -webkit-user-select: none; +} + h3 { color: black; font-size: 1.2em; @@ -159,14 +198,16 @@ } section { - margin: 8px 0 24px; + margin: 8px 0 10px; max-width: 600px; -webkit-padding-start: 20px; } select { -webkit-appearance: none; - background-image: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABMAAAAICAYAAAAbQcSUAAAAaUlEQVQoz2P4//8/A7UwdkEGhiggTsODo4g2LBEImJmZvwE1/UfHIHGQPNGGAbHCggULFrKxsf1ENgjEB4mD5EnxJoaByAZB5Yk3DNlAPj6+L8gGkWUYzMC3b982IRtEtmFQjaxYxDAwAGi4TwMYKNLfAAAAAElFTkSuQmCC), + background-image: -webkit-image-set( + url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABMAAAAICAYAAAAbQcSUAAAAaUlEQVQoz2P4//8/A7UwdkEGhiggTsODo4g2LBEImJmZvwE1/UfHIHGQPNGGAbHCggULFrKxsf1ENgjEB4mD5EnxJoaByAZB5Yk3DNlAPj6+L8gGkWUYzMC3b982IRtEtmFQjaxYxDAwAGi4TwMYKNLfAAAAAElFTkSuQmCC) 1x, + url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACcAAAAQCAYAAACV3GYgAAAAWklEQVRIx+3VOwoAIAwD0B49N686CCJ+K0aHFrIl8LaKqsqvEccxcPEQo8bgNs4KTBth4HaByCMWbhWIcsDEzYCoy2xcD4hW8QWuBqJXeoXLQIwK/iEc57iDBIg+ClNpjek/AAAAAElFTkSuQmCC) 2x), -webkit-linear-gradient(#ededed, #ededed 38%, #dedede); background-position: right center; background-repeat: no-repeat; @@ -177,7 +218,7 @@ font: inherit; margin: 0 1px 0 0; min-height: 2em; - min-width: 240px; + min-width: 160px; padding-bottom: 1px; -webkit-padding-end: 20px; -webkit-padding-start: 6px; @@ -186,13 +227,19 @@ } select:disabled { - background-image: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABMAAAAICAYAAAAbQcSUAAAAWklEQVQoz2P4//8/A7UwdkEGhiggTsODo4g2LBEIGhoa/uPCIHmiDQNihQULFizEZhBIHCRPijexGggzCCpPvGHoBiIbRJZhMAPfvn3bhGwQ2YZBNbJiEcPAAIgGZrTRc1ZLAAAAAElFTkSuQmCC), + background-image: -webkit-image-set( + url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABMAAAAICAYAAAAbQcSUAAAAWklEQVQoz2P4//8/A7UwdkEGhiggTsODo4g2LBEIGhoa/uPCIHmiDQNihQULFizEZhBIHCRPijexGggzCCpPvGHoBiIbRJZhMAPfvn3bhGwQ2YZBNbJiEcPAAIgGZrTRc1ZLAAAAAElFTkSuQmCC) 1x, + url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACYAAAAQCAYAAAB6Hg0eAAAAX0lEQVRIx+3UOwoAIAxEwRz93TwWIoj4lwRBA9vtwlQRVZUbcyVqCUY8PQwWsFMcgFjBdnGkvSVsFUe+tYbN4ih3HrARjtrGC9bC0ep7wkocva43LOEY9d75/B/2YZsJM1B42fe2uWoAAAAASUVORK5CYII=) 2x), -webkit-linear-gradient(#f1f1f1, #f1f1f1 38%, #e6e6e6); border-color: rgba(80, 80, 80, .2); box-shadow: 0 1px 0 rgba(80, 80, 80, .08), inset 0 1px 2px rgba(255, 255, 255, .75); color: #aaa; } +#suggestions_size { + min-width: 40px; +} + option { font: inherit; font-weight: normal; @@ -206,11 +253,21 @@ text-align: right; } +a { + color: rgb(17, 85, 204); + cursor: pointer; + text-decoration: none; +} + +a:hover { + text-decoration: underline; +} + .controlled-setting-with-label { -webkit-box-align: center; display: -webkit-box; - padding-bottom: 7px; - padding-top: 7px; + padding-bottom: 3px; + padding-top: 3px; } .controlled-setting-with-label > input + span { @@ -231,7 +288,7 @@ .selection-label { display: -webkit-box; - width: 230px; + width: 150px; } .overlay { @@ -252,7 +309,7 @@ z-index: 11; } -#sync_config_page { +.overlay_page { background: white; background-color: white; -webkit-border-radius: 3px; @@ -262,21 +319,370 @@ display: -webkit-box; margin-bottom: 6px; margin-top: 6px; - min-width: 300px; padding-top: 10px; position: relative; -webkit-transition: 200ms -webkit-transform; -webkit-user-select: none; - width: 400px; z-index: 0; } -#sync_config_page input[type='button'] { - min-height: 26px; - min-width: 87px; +.overlay .overlay_page > .close_button { + background-image: -webkit-image-set( + url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAA4AAAAOCAQAAAC1QeVaAAAAUklEQVR4XqXPYQrAIAhAYW/gXd8NJxTopVqsGEhtf+L9/ERU2k/HSMFQpKcYJeNFI9Be0LCMij8cYyjj5EHIivGBkwLfrbX3IF8PqumVmnDpEG+eDsKibPG2JwAAAABJRU5ErkJggg==) 1x, + url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABwAAAAcCAQAAADYBBcfAAAAcUlEQVR4Xu3UQQrAQAgDQN+cD+jPbU+BuChS6G3JLTAgIlp+zBZcCIO/sRL2PXQk8qDsexhI0soS0UOhlU2jCq1shEqVzVAp2RKSku2hE/oakpEuIVnIhkeoTDc8Q2VKB6jspD2Mwkg3t3oy9n9/gAsfSKXnp9ZyP20AAAAASUVORK5CYII=) 2x); + background-position: center; + background-repeat: no-repeat; + height: 14px; + position: absolute; + right: 7px; + top: 7px; + width: 14px; + z-index: 1; +} + +.overlay .overlay_page > .close_button:hover { + background-image: -webkit-image-set( + url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAA4AAAAOCAQAAAC1QeVaAAAAnUlEQVR4XoWQQQ6CQAxFewjkJkMCyXgJPMk7AiYczyBeZEAX6AKctGIaN+bt+trk9wtGQc/IkhnoKGxqqiWxOSZalapWFZ6VrIUDExsN0a5JRBq9LoVOR0eEQMoEhKizXhhsn0p1sCWVo7CwOf1RytPL8CPvwuBUoHL6ugeK30CVD1TqK7V/hdpe+VNChhOzV8xWny/+xosHF8578W/Hmc1OOC3wmwAAAABJRU5ErkJggg==) 1x, + url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABwAAAAcCAQAAADYBBcfAAABSElEQVR4XpXVTY6CQBAF4A4hHMu14S6QiQmJTpSNrjpyhY5LNsICF80N0NMMBjYkLYMGLOYNRWZ4u0p/4ae6C7GCaCv1VBGVoQnaoA1NVKpr6mkL1/1EdqwOzar9nUMTK20z8LKQ1RhgZHVZTMDkY2sQYLYm9QCeNwGBmQTt+XMEs+XmAYDN+pEte6gdejfdBQHWZaWdF4wVsedFlKvHpw5qixqQvyBS3VfzUXO0JRKfliBFRkl8oQpiSJFRVCGikhBSZJToS4SGCFJimNAIbDzSnNkIDNRvqDkYGoYhxUeNSo7l0Bz4OOrKMGwOtiPxecbTxKcth2yK0pazn5v8REuAMfVO4LHKgU3VZa2d4SC76/8cZHc8OnZ/Hh07Gh09pbvydxsYwdcDy3qOyTpzZwbyvplCe34gE049dTveh1/A8a5uqTcgyjd2tBg2T+9digAAAABJRU5ErkJggg==) 2x); +} + +.overlay .overlay_page > .close_button:active { + background-image: -webkit-image-set( + url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAA4AAAAOCAQAAAC1QeVaAAAAnElEQVR4Xn2QQQqDQAxFcwjrQR6MoDC9hD2RBY9XrL3IaLuwXWg1BgldlLfLS0LyBYWMlp5ppaMh26uqqEksjoFapapZhWfetHBiYKEi2nQiUul0LjRaOiME0kpAiFprhc76C9Vhb1LZCxOL06aUt5fhRz6PtbawcPpuBxGPgwp/UK6vlP4VSnvlfwgbF0avGC0+F/yDDy9uXLHgv9HQnE67Dt6aAAAAAElFTkSuQmCC) 1x, + url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABwAAAAcCAQAAADYBBcfAAABTUlEQVR4XpXVsYrCQBAG4CVIHsgmhYcIaSTaLpiHSDi2uwMhnbJVsNo0wceJPo2RpBHWnCt64/2XCXf5uyEfSZzZUYwhxtOJquI6tEEXdKGNa3XQifHwvp9olJXLy7j7neUlK82IgbupbBzgIpvdtAfq95lFgJlZnQDcfgQEBhJ0288XmM/frgDYTK75/AGNT99mbkGAddkY/w6zkpi7iHL1bH+DxqMGFM4RJXZP8dIc4wmdOsJQYBSdClURQ4qMoioR14SQIqPEJxFaIkiJYUIrsPFIC2YQGGi+oeFgaBmGFF81rjlWQHPgx1EHhmFzsB065RlPdUojh4ynbuRGbsj3dAswpn4TeKwKYH112Rr/eZCjyX8OcvSyOjbrv66OzZpWx4PSU/mnOQbQvbBsh5hs82hgIS8ufWjBL2TCOlHH1fn5F7A6q6NOnojyBfK6dXRV1N6jAAAAAElFTkSuQmCC) 2x); +} + +.alert_overlay { + visibility:visible; + z-index: 12; +} +.alert_page { + width: 400px; +} + +.alert_title { + color: black; + font-size: 14px; + font-weight: normal; + line-height: 1; + margin-bottom: 10px; + text-shadow: rgb(255, 255, 255) 0px 1px 2px; +} + +.alert_ok_div { + margin-right: 20px; + margin-top: 10px; + text-align: right; +} + +.prompt_overlay { + visibility:visible; + z-index: 12; +} + +.prompt_page { + width: 400px; +} + +.prompt_title { + color: black; + font-size: 14px; + font-weight: normal; + line-height: 1; + margin-bottom: 10px; + text-shadow: rgb(255, 255, 255) 0px 1px 2px; +} + +.prompt_message_div { + margin-bottom: 10px; +} + +.prompt_input { + width: 350px +} + +.prompt_ok_cancel_div { + margin-right: 20px; + margin-top: 10px; + text-align: right; +} + +.confirm_overlay { + visibility:visible; + z-index: 12; +} + +.confirm_page { + width: 400px; +} + +.confirm_title { + color: black; + font-size: 14px; + font-weight: normal; + line-height: 1; + margin-bottom: 10px; + text-shadow: rgb(255, 255, 255) 0px 1px 2px; +} + +.confirm_message_div { + margin-bottom: 10px; +} + +.confirm_ok_cancel_div { + margin-right: 20px; + margin-top: 10px; + text-align: right; +} + +#sync_config_page { + width: 400px; } #sync_config_ok_cancel_div { margin-right: 20px; + margin-top: 10px; text-align: right; } + +#clear_history_page { + width: 400px; +} + +#clear_history_ok_cancel_div { + margin-right: 20px; + margin-top: 10px; + text-align: right; +} + +#dictionary_tool_page { + -webkit-box-flex: 1; + overflow: auto; + width: 600px; +} + +#user_dictionary_done_div { + margin-bottom: 10px; + margin-right: 20px; + text-align:right; +} + +#dictionary_tool_content_area { + -webkit-box-flex: 1; + -webkit-box-orient: vertical; + display: -webkit-box; + font-size: 12px; + margin: 0 20px 10px 20px; + min-height: 100px; + overflow: auto; +} + +#dictionary_tool_entry_title { + display: -webkit-box; + font-size: 12px; + font-weight: bold; +} + +#dictionary_tool_list_area { + border: solid 1px rgb(217, 217, 217); + -webkit-box-flex: 1; + overflow: auto; +} + +#dictionary_tool_reading_title { + padding: 6px 3px 3px 7px; + width: 120px; +} + +#dictionary_tool_word_title { + padding: 6px 3px 3px 7px; + width: 120px; +} + +#dictionary_tool_category_title { + padding: 5px 0px 3px 10px; + width: 120px; +} + +#dictionary_tool_comment_title { + -webkit-box-flex: 1; + padding: 6px 3px 3px 7px; +} + +.dictionary_tool_entry { + display: -webkit-box; + min-height: 31px; +} + +.dictionary_tool_entry > div { + margin: 0; +} + +.dictionary_tool_reading { + padding: 10px 4px 0px 6px; + width: 120px; + word-wrap: break-word; +} +.dictionary_tool_word { + padding: 10px 4px 0px 6px; + width: 120px; + word-wrap: break-word; +} +.dictionary_tool_category { + padding: 9px 0px 1px 10px; + width: 120px; + word-wrap: break-word; +} +.dictionary_tool_comment { + -webkit-box-flex: 1; + padding: 10px 4px 0px 6px; + word-wrap: break-word; +} + +.dictionary_tool_entry_selected { + background-color: #d0d0d0; +} + +.dictionary_tool_entry_selected > div { + padding: 5px 5px 4px 5px; + margin: 0; +} + +.dictionary_tool_entry .static_text { + display: block; +} + +.dictionary_tool_entry_selected .static_text { + display: none; +} + +.dictionary_tool_entry .dictionary_tool_entry_input { + display: none; +} + +.dictionary_tool_entry_selected .dictionary_tool_entry_input { + display: block; +} + +.dictionary_tool_entry .dictionary_tool_entry_select { + display: none; +} + +.dictionary_tool_entry_selected .dictionary_tool_entry_select { + display: block; +} + +#dictionary_tool_entry_new { + display: -webkit-box; + min-height: 31px; +} + +#dictionary_tool_reading_new { + margin: 0; + padding: 3px 5px; + width: 120px; +} +#dictionary_tool_word_new { + margin: 0; + padding: 3px 5px; + width: 120px; +} +#dictionary_tool_category_new { + margin: 0; + padding: 3px 5px; + width: 120px; +} +#dictionary_tool_comment_new { + -webkit-box-flex: 1; + margin: 0; + padding: 3px 5px; +} + +.dictionary_tool_entry input[type='text'] { + box-sizing: border-box; + font-size: 12px; + margin: 0; + min-height: 22px; + padding: 0; + width: 100%; +} + +.dictionary_tool_entry select { + margin: 0; + min-height: 22px; + min-width: 100px; + -webkit-padding-start: 0px; + -webkit-padding-end: 0px; + width: 100%; +} + +#dictionary_tool_entry_new input[type='text'] { + box-sizing: border-box; + font-size: 12px; + margin: 0; + min-height: 22px; + padding: 0; + width: 100%; +} + +#dictionary_tool_entry_new select { + margin: 0; + min-height: 22px; + min-width: 100px; + -webkit-padding-end: 0px; + -webkit-padding-start: 0px; + width: 100%; +} + +.dictionary_tool_entry_delete_button_div { + -webkit-box-align: center; + -webkit-box-orient: vertical; + -webkit-box-pack: center; + display: -webkit-box; + padding: 3px 5px; +} + +.dictionary_tool_entry .dictionary_tool_entry_delete_button { + opacity: 0; +} + +.dictionary_tool_entry:hover { + background-color: rgb(182, 212, 252); +} + +.dictionary_tool_entry:hover .dictionary_tool_entry_delete_button { + opacity: 1; +} + +.dictionary_tool_entry_selected .dictionary_tool_entry_delete_button { + opacity: 1; +} + +.dictionary_tool_entry_delete_button:focus { + opacity: 1; +} + +.dictionary_tool_entry_delete_button { + background-color: transparent; + background-image: -webkit-image-set( + url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAQAAAC1+jfqAAAAiElEQVR42r2RsQrDMAxEBRdl8SDcX8lQPGg1GBI6lvz/h7QyRRXV0qUULwfvwZ1tenw5PxToRPWMC52eA9+WDnlh3HFQ/xBQl86NFYJqeGflkiogrOvVlIFhqURFVho3x1moGAa3deMs+LS30CAhBN5nNxeT5hbJ1zwmji2k+aF6NENIPf/hs54f0sZFUVAMigAAAABJRU5ErkJggg==) 1x, + url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAQAAADZc7J/AAAA9UlEQVR4Xu3UsWrCUByH0fMEouiuhrg4xohToJVGH0CHLBncEwfx/VvIFHLJBWmHDvKbv7PcP9f3L/fXwBsApZSRpUpEgbOnxwiReng6x4AvjdrNXRLkibubWqMcB9Yujk7qjhjmtZOji/U4wELuoBwQXa50kFsQA5jK+kQ/l5kSA4ZEK5Fo+3kcCIlGM8ijQEhUqkEeBUKiUPTyl4C5vZ1cbmdv/iqwclXY6aZwtXoFSLQqhVwmkytUWglxAMG7T0yCu4gD0v7ZBKeVxoEwFxIxYBPmIWEzDnyEeUj4HAfYdvmMcGYdsSUGsOzlIbHEv/uV38APrreiBRBIs3QAAAAASUVORK5CYII=) 2x); + border: none; + display: block; + height: 16px; + opacity: 1; + -webkit-transition: 150ms opacity; + width: 16px; +} + +.dictionary_tool_entry_delete_button:hover { + background-image: -webkit-image-set( + url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAQAAAC1+jfqAAAAqklEQVR4XqWRMQ6DMAxF/1Fyilyj2SmIBUG5QcTCyJA5Z8jGhlBPgRi4TmoDraVmKFJlWYrlp/g5QfwRlwEVNWVa4WzfH9jK6kCkEkBjwxOhLghheMWMELUAqqwQ4OCbnE4LJnhr5IYdqQt4DJQjhe9u4vBBmnxHHNzRFkDGjHDo0VuTAqy2vAG4NkvXXDHxbGsIGlj3e835VFNtdugma/Jk0eXq0lP//5svi4PtO01oFfYAAAAASUVORK5CYII=) 1x, + url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAQAAADZc7J/AAAB4UlEQVR42u2VsWoCQRBAh+MUFP0C1V9QD4NEOxs9xBQHQVCwSJFWVBAtBNXCxk6wTkBJYUTwEwQLC61E8QP0NzZzt5g5726DkC7EYWHZ8T3WndkV2C/jLwn4hwVYBIdLn9vkLp79QcBCTDMiy3w2gQ9XeTYkEHA8vqj2rworXu3HF1YFfSWgp5QFnKVLvYvzDEKEZ5hW70oXOCtcEbQLIkx7+IQtfMBSOjU6XEF4oyOdYInZbXyOuajjDlpNeQgleIUJKUz4BDMledhqOu/AzVSmzZ49CUjCC0yvim98iqtJT2L2jKsqczsdok9XrHNexaww415lnTNwn6CM/KxJIR8bnUZHPhLO6yMoIyk2pNjLewFuE5AiY1KMMQx8Q7hQYFek4AkjxXFe1rsF84I/BTFQMGL+1Lxwl4DwdtM1gjwKohgxyLtG7SYpxALqugOMcfOKN+bFXeBsLB1uulNcRqq7/tt36k41zoL6QlxGjtd6lrahiqCi1iOFYyvXuxY8yzK33VnvUivbLlOlj/jktm0s3YnXrNIXXufHNxuOGasi8S68zkwrlnV8ZcJJsTIUxbLgQcFZWE8N0gau2p40VVcM0gYeFpSRK6445UhBuKiRgiyKw+34rLt59nb1/7+RwReVkaFtqvNBuwAAAABJRU5ErkJggg==) 2x); +} + +.dictionary_tool_entry_delete_button:active { + background-image: -webkit-image-set( + url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAQAAAC1+jfqAAAARElEQVQoz2P4z4AfMlBLAYMdwxkghgEwD1XBGTC0g0sDIaYJECVwFqoChBK4WegKkJWArSJZAQErCDqSKG/iCyhaRhYA9LDIbULDzlIAAAAASUVORK5CYII=) 1x, + url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAQAAADZc7J/AAAA/ElEQVR4Xu3UsWrCUBiG4efGlIBoIMFbcnYolYJ3pg4iKGrGYFTRwaUFhYAekiDt0EG++X2W83N8/3J/DbwBMJJSsdQItcDY1VlCOImzq3Ed8OmicHASB3ns5KBw8VUNpDJrW7uAiJ3sbK1l0mqArpmFTUlQ5jYWZrrUAUSmT0SZm4qoA56JvVhs/5g3A7RLolA85A1ASOTye65NMxASK6syfxGITMzvMxG9CvRkliWwlOm9AsSOcitzU1NzK7mjuBkQvHtLK7iLBiB5PhttJSGpB8I8vM6kDuiHeUjoVwMfYR4SRtUAw1veIZzOjRhSBzCoyKFjgH/3K7+BHzg+Cgw0eSW3AAAAAElFTkSuQmCC) 2x); +} diff -Nru mozc-1.11.1502.102/chrome/nacl/options.html mozc-1.11.1522.102/chrome/nacl/options.html --- mozc-1.11.1502.102/chrome/nacl/options.html 2013-07-17 02:37:23.000000000 +0000 +++ mozc-1.11.1522.102/chrome/nacl/options.html 2013-08-28 05:25:43.000000000 +0000 @@ -9,9 +9,10 @@
-
+
+
-

Advanced sync settings

+

Advanced sync settings

@@ -25,165 +26,253 @@
- +
-
-
-
-

Japanese input settings

-
-
- -
+
+
+
-

Basics

-
- - Input mode: - - -
-
- - Punctuation style: - - -
-
- - Symbol style: - - -
-
- - Space input style: - - -
-
- - Selection shortcut: - - -
-
- - Keymap style: - +

Clear personalization data

+ + + + -
-
- - Adjust conversion based on previous input: - + + + + + + +
+ +
+
-
+
+
+
+
-

Input Assistance

-
- - - - - - -
-
- - Shift key mode switch: - - -
+

Dictionary tool

+ + + + + + +
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ +
+
+ +
+
+ +
+
+ +
+
+
+
+
+ +
+
-
-
-

Suggest

-
- - - - +
+
+
+
+

Japanese input settings

+
+
+ +
+
+

Basics

+
+ + Input mode: + - -
-
- - - - +
+
+ + Punctuation style: + - -
-
- - Maximum number of suggestions: - - -
-
-
+
+
+ + Symbol style: + + +
+
+ + Space input style: + + +
+
+ + Selection shortcut: + + +
+
+ + Keymap style: + + +
+
+
-
-
-

Sync

-
- - Google Japanese Input can securely syncs your settings and a user sync-dictionary among different machines. Synced data can be cleared by Google Dashboard. - - -
-
- - - - -
-
-
+
+
+

Input assistance

+
+ + + + + + +
+
+ + Shift key mode switch: + + +
+
+
-
-
-

Privacy

-
- - - -
-
- - - - +
+
+

Suggest

+
+ + + + + - -
-
- - - - +
+
+ + + + + - -
-
-
+
+
+ + Number of suggestions: + + +
+
+
-
-
Copyright © 2013 Google Inc. All Rights Reserved.
-
This software is made possible by open source software.
-
+
+
+

Sync

+
+ + Google Japanese Input can securely syncs your settings and a user sync-dictionary among different machines. Synced data can be cleared by Google Dashboard. + + +
+
+ + + + +
+
+
+ +
+
+

Dictionary tool

+
+ + Edit user dictionaries. + +
+
+ + + +
+
+
+ +
+
+

Privacy

+
+ + + +
+
+ + + + + + +
+
+ + + + + + +
+
+
+ +
+
Copyright © 2013 Google Inc. All Rights Reserved.
+
This software is made possible by open source software.
+
+
diff -Nru mozc-1.11.1502.102/chrome/nacl/options.js mozc-1.11.1522.102/chrome/nacl/options.js --- mozc-1.11.1502.102/chrome/nacl/options.js 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/chrome/nacl/options.js 2013-08-28 05:26:13.000000000 +0000 @@ -37,7 +37,7 @@ document.addEventListener('readystatechange', function() { if (document.readyState == 'complete') { var optionPage = /** @type {!mozc.OptionPage} */ - chrome.extension.getBackgroundPage()['newOptionPage'](this, console); + chrome.extension.getBackgroundPage()['newOptionPage'](window); window.addEventListener('beforeunload', optionPage.unload.bind(optionPage)); } }, true); diff -Nru mozc-1.11.1502.102/composer/composer_test.cc mozc-1.11.1522.102/composer/composer_test.cc --- mozc-1.11.1502.102/composer/composer_test.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/composer/composer_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -798,7 +798,7 @@ composer_->SetInputMode(transliteration::HIRAGANA); for (size_t test_data_index = 0; - test_data_index < ARRAYSIZE_UNSAFE(test_data_list); + test_data_index < arraysize(test_data_list); ++test_data_index) { const TestData &test_data = test_data_list[test_data_index]; composer_->SetInputFieldType(test_data.field_type_); @@ -3041,7 +3041,7 @@ TestData("ABCDEFGHI", commands::Context::TEL, true, 9), }; - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(test_data_list); ++i) { + for (size_t i = 0; i < arraysize(test_data_list); ++i) { const TestData &test_data = test_data_list[i]; SCOPED_TRACE(test_data.input_text); SCOPED_TRACE(test_data.field_type); diff -Nru mozc-1.11.1502.102/composer/internal/composition_test.cc mozc-1.11.1522.102/composer/internal/composition_test.cc --- mozc-1.11.1502.102/composer/internal/composition_test.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/composer/internal/composition_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -109,7 +109,7 @@ // "っ" { "\xe3\x81\xa3", "ty", "tty" }, }; - static const int test_chunks_size = ARRAYSIZE_UNSAFE(test_chunks); + static const int test_chunks_size = arraysize(test_chunks); CharChunkList::iterator it; comp->MaybeSplitChunkAt(0, &it); for (int i = 0; i < test_chunks_size; ++i) { @@ -156,7 +156,7 @@ }; CharChunk *chunk = AppendChunk("", "", "", composition_.get()); - for (int i = 0; i < ARRAYSIZE_UNSAFE(test_cases); ++i) { + for (int i = 0; i < arraysize(test_cases); ++i) { const TestCase& test = test_cases[i]; chunk->set_conversion(test.conversion); chunk->set_pending(test.pending); @@ -315,7 +315,7 @@ // "っ", "っ" { "\xe3\x81\xa3", "ty", "tty", 3, "", "", "", "\xe3\x81\xa3", "ty", "tty" }, }; - for (int i = 0; i < ARRAYSIZE_UNSAFE(test_cases); ++i) { + for (int i = 0; i < arraysize(test_cases); ++i) { const TestCase& test = test_cases[i]; CharChunk right_orig_chunk(Transliterators::CONVERSION_STRING, NULL); right_orig_chunk.set_conversion(test.conversion); @@ -369,7 +369,7 @@ // "っ", "っ" { "\xe3\x81\xa3", "ty", "tty", 3, "", "", "", "\xe3\x81\xa3", "ty", "tty" }, }; - for (int i = 0; i < ARRAYSIZE_UNSAFE(test_cases); ++i) { + for (int i = 0; i < arraysize(test_cases); ++i) { const TestCase& test = test_cases[i]; CharChunk right_orig_chunk(Transliterators::CONVERSION_STRING, NULL); right_orig_chunk.set_conversion(test.conversion); @@ -425,7 +425,7 @@ { 12, 5, 5 }, }; const size_t dummy_position = 0; - for (int i = 0; i < ARRAYSIZE_UNSAFE(test_cases); ++i) { + for (int i = 0; i < arraysize(test_cases); ++i) { const TestCase& test = test_cases[i]; { // Test RAW mode diff -Nru mozc-1.11.1502.102/composer/internal/converter_test.cc mozc-1.11.1522.102/composer/internal/converter_test.cc --- mozc-1.11.1502.102/composer/internal/converter_test.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/composer/internal/converter_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -89,7 +89,7 @@ { "nannkanain", "\xE3\x81\xAA\xE3\x82\x93\xE3\x81\x8B\xE3\x81\xAA\xE3\x81" "\x84\xE3\x82\x93" }, }; - static const int size = ARRAYSIZE_UNSAFE(test_cases); + static const int size = arraysize(test_cases); mozc::composer::Table table; InitTable(&table); diff -Nru mozc-1.11.1502.102/composer/internal/typing_corrector_test.cc mozc-1.11.1522.102/composer/internal/typing_corrector_test.cc --- mozc-1.11.1502.102/composer/internal/typing_corrector_test.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/composer/internal/typing_corrector_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -428,7 +428,7 @@ "\xE3\x81\x8B\xE3\x81\x84\xE3\x81\x97\xE3\x82\x83"}, }; - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kTestCases); ++i) { + for (size_t i = 0; i < arraysize(kTestCases); ++i) { SCOPED_TRACE(string("key: ") + kTestCases[i].keys); InsertOneByOne(kTestCases[i].keys, &corrector); vector queries; diff -Nru mozc-1.11.1502.102/composer/internal/typing_model.cc mozc-1.11.1522.102/composer/internal/typing_model.cc --- mozc-1.11.1502.102/composer/internal/typing_model.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/composer/internal/typing_model.cc 2013-08-28 05:26:13.000000000 +0000 @@ -39,40 +39,12 @@ namespace { -#ifdef MOZC_ENABLE_TYPING_CORRECTION // These header files are automatically generated by gen_typing_model.py. #include "composer/internal/typing_model_12keys-hiragana.h" #include "composer/internal/typing_model_flick-hiragana.h" #include "composer/internal/typing_model_godan-hiragana.h" #include "composer/internal/typing_model_qwerty_mobile-hiragana.h" #include "composer/internal/typing_model_toggle_flick-hiragana.h" -#else // MOZC_ENABLE_TYPING_CORRECTION - const char *kKeyCharacters_12keysHiragana = NULL; - const size_t kKeyCharactersSize_12keysHiragana = 0; - const uint8 *kCostTable_12keysHiragana = NULL; - const size_t kCostTableSize_12keysHiragana = 0; - const int32 *kCostMappingTable_12keysHiragana = NULL; - const char *kKeyCharacters_FlickHiragana = NULL; - const size_t kKeyCharactersSize_FlickHiragana = 0; - const uint8 *kCostTable_FlickHiragana = NULL; - const size_t kCostTableSize_FlickHiragana = 0; - const int32 *kCostMappingTable_FlickHiragana = 0; - const char *kKeyCharacters_ToggleFlickHiragana = NULL; - const size_t kKeyCharactersSize_ToggleFlickHiragana = 0; - const uint8 *kCostTable_ToggleFlickHiragana = NULL; - const size_t kCostTableSize_ToggleFlickHiragana = 0; - const int32 *kCostMappingTable_ToggleFlickHiragana = NULL; - const char *kKeyCharacters_QwertyMobileHiragana = NULL; - const size_t kKeyCharactersSize_QwertyMobileHiragana = 0; - const uint8 *kCostTable_QwertyMobileHiragana = NULL; - const size_t kCostTableSize_QwertyMobileHiragana = 0; - const int32 *kCostMappingTable_QwertyMobileHiragana = NULL; - const char *kKeyCharacters_GodanHiragana = NULL; - const size_t kKeyCharactersSize_GodanHiragana = 0; - const uint8 *kCostTable_GodanHiragana = NULL; - const size_t kCostTableSize_GodanHiragana = 0; - const int32 *kCostMappingTable_GodanHiragana = NULL; -#endif // MOZC_ENABLE_TYPING_CORRECTION scoped_ptr g_typing_model_12keys_hiragana; scoped_ptr g_typing_model_flick_hiragana; diff -Nru mozc-1.11.1502.102/composer/table_test.cc mozc-1.11.1522.102/composer/table_test.cc --- mozc-1.11.1502.102/composer/table_test.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/composer/table_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -142,7 +142,7 @@ // "ん" { "nn", true, "\xe3\x82\x93", "" }, }; - static const int size = ARRAYSIZE_UNSAFE(test_cases); + static const int size = arraysize(test_cases); Table table; InitTable(&table); @@ -204,7 +204,7 @@ config::ConfigHandler::Reload(); commands::Request request; - for (int i = 0; i < ARRAYSIZE_UNSAFE(test_cases); ++i) { + for (int i = 0; i < arraysize(test_cases); ++i) { config::Config config; config.set_punctuation_method(test_cases[i].method); EXPECT_TRUE(config::ConfigHandler::SetConfig(config)); @@ -253,7 +253,7 @@ config::ConfigHandler::Reload(); commands::Request request; - for (int i = 0; i < ARRAYSIZE_UNSAFE(test_cases); ++i) { + for (int i = 0; i < arraysize(test_cases); ++i) { config::Config config; config.set_symbol_method(test_cases[i].method); EXPECT_TRUE(config::ConfigHandler::SetConfig(config)); diff -Nru mozc-1.11.1502.102/config/config.proto mozc-1.11.1522.102/config/config.proto --- mozc-1.11.1502.102/config/config.proto 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/config/config.proto 2013-08-28 05:26:13.000000000 +0000 @@ -271,8 +271,6 @@ // Use typing correction feature. // e.g. sunkai -> 深海 - // Note that surely build configuration is prioritized. - // c.f. MOZC_ENABLE_TYPING_CORRECTION optional bool use_typing_correction = 66 [ default = false ]; ////////////////////////////////////////////////////////////// @@ -350,38 +348,7 @@ // Multilingual config. It resides here for historical reason. // TODO(mukai): apply extension for multilingual config. - optional PinyinConfig pinyin_config = 996; optional HangulConfig hangul_config = 998; - optional ChewingConfig chewing_config = 999; -}; - -// Config for Chinese PinYin input. Those items are based on the libpyzy -// configurations and current ChromeOS ibus-pinyin config items. -message PinyinConfig { - // These default values are not used on ChromeOS. - - // Corrects some typos or not. (e.g. gn -> ng) - optional bool correct_pinyin = 1 [default = true]; - // Accepts pinyin which is based on the pronunciation of Mandarin Chinese. - optional bool fuzzy_pinyin = 2 [default = false]; - optional bool select_with_shift = 3 [default = false]; - optional bool paging_with_minus_equal = 4 [default = true]; - optional bool paging_with_comma_period = 5 [default = true]; - optional bool auto_commit = 6 [default = false]; - optional bool double_pinyin = 7 [default = false]; - optional bool initial_mode_chinese = 8 [default = true]; - optional bool initial_mode_full_width_word = 9 [default = false]; - optional bool initial_mode_full_width_punctuation = 10 [default = true]; - optional bool initial_mode_simplified_chinese = 11 [default = true]; - - // Double pinyin shcema list. - // MSPY = 0; - // ZRM = 1; - // ABC = 2; - // ZGPY = 3; - // PYJJ = 4; - // XHE = 5; - optional int32 double_pinyin_schema = 12 [default = 0]; }; @@ -403,52 +370,3 @@ // custom hanja keymap repeated bytes hanja_keys = 2; }; - -// Config for Taiwanese chewing input. Those items are based on the -// libchewing configurations and current ChromeOS ibus-chewing config -// items. -message ChewingConfig { - optional bool automatic_shift_cursor = 1 [ default = false ]; - optional bool add_phrase_direction = 2 [ default = false ]; - optional bool easy_symbol_input = 3 [ default = false ]; - optional bool escape_cleans_all_buffer = 4 [ default = false ]; - optional bool force_lowercase_english = 5 [ default = false ]; - // does not make sense with the current code because libchewing does - // not care this item. - // TODO(mukai): implement it. - optional bool plain_zhuyin = 6 [ default = false ]; - optional bool phrase_choice_rearward = 7 [ default = true ]; - optional bool space_as_selection = 8 [ default = true ]; - optional uint32 maximum_chinese_character_length = 9 [ default = 40 ]; - optional uint32 candidates_per_page = 10 [ default = 10 ]; - enum KeyboardType { - DEFAULT = 0; - HSU = 1; - IBM = 2; - GIN_YIEH = 3; - ETEN = 4; - ETEN26 = 5; - DVORAK = 6; - DVORAK_HSU = 7; - DACHEN_26 = 8; - HANYU = 9; - }; - optional KeyboardType keyboard_type = 11 [ default = DEFAULT ]; - enum SelectionKeys { - SELECTION_1234567890 = 0; - SELECTION_asdfghjkl = 1; - SELECTION_asdfzxcv89 = 2; - SELECTION_asdfjkl789 = 3; - SELECTION_aoeuqjkix = 4; - SELECTION_aoeuhtnsid = 5; - SELECTION_aoeuidhtns = 6; - SELECTION_1234qweras = 7; - }; - optional SelectionKeys selection_keys = 12 [ default = SELECTION_1234567890 ]; - enum HsuSelectionKeys { - HSU_asdfjkl789 = 0; - HSU_asdfzxcv89 = 1; - }; - optional HsuSelectionKeys hsu_selection_keys = 13 - [ default = HSU_asdfjkl789 ]; -}; diff -Nru mozc-1.11.1502.102/config/config_handler_test.cc mozc-1.11.1522.102/config/config_handler_test.cc --- mozc-1.11.1502.102/config/config_handler_test.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/config/config_handler_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -169,7 +169,7 @@ {false, Testcase::DO_NOT_IMPOSE, false}, }; - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kTestcases); ++i) { + for (size_t i = 0; i < arraysize(kTestcases); ++i) { const bool stored_config_value = kTestcases[i].stored_config_value; const bool expected = kTestcases[i].expected_value; @@ -321,8 +321,8 @@ {"<>=+-/*", config::Config::FULL_WIDTH, config::Config::LAST_FORM}, {"?!", config::Config::FULL_WIDTH, config::Config::LAST_FORM}, }; - EXPECT_EQ(output.character_form_rules_size(), ARRAYSIZE_UNSAFE(testcases)); - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(testcases); ++i) { + EXPECT_EQ(output.character_form_rules_size(), arraysize(testcases)); + for (size_t i = 0; i < arraysize(testcases); ++i) { EXPECT_EQ(output.character_form_rules(i).group(), testcases[i].group); EXPECT_EQ(output.character_form_rules(i).preedit_character_form(), diff -Nru mozc-1.11.1502.102/converter/converter.cc mozc-1.11.1522.102/converter/converter.cc --- mozc-1.11.1502.102/converter/converter.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/converter/converter.cc 2013-08-28 05:26:12.000000000 +0000 @@ -201,9 +201,7 @@ } SetKey(segments, conversion_key); segments->set_request_type(Segments::CONVERSION); - if (!immutable_converter_->ConvertForRequest(request, segments)) { - return false; - } + immutable_converter_->ConvertForRequest(request, segments); RewriteAndSuppressCandidates(request, segments); return IsValidSegments(request, *segments); } @@ -213,9 +211,7 @@ SetKey(segments, key); segments->set_request_type(Segments::CONVERSION); const ConversionRequest default_request; - if (!immutable_converter_->ConvertForRequest(default_request, segments)) { - return false; - } + immutable_converter_->ConvertForRequest(default_request, segments); RewriteAndSuppressCandidates(default_request, segments); return IsValidSegments(default_request, *segments); } @@ -307,9 +303,7 @@ DCHECK_EQ(key, segments->conversion_segment(0).key()); segments->set_request_type(request_type); - if (!predictor_->PredictForRequest(request, segments)) { - return false; - } + predictor_->PredictForRequest(request, segments); RewriteAndSuppressCandidates(request, segments); if (request_type == Segments::PARTIAL_SUGGESTION || request_type == Segments::PARTIAL_PREDICTION) { @@ -404,7 +398,8 @@ Segments::PARTIAL_PREDICTION, segments); } -bool ConverterImpl::FinishConversion(Segments *segments) const { +bool ConverterImpl::FinishConversion(const ConversionRequest &request, + Segments *segments) const { CommitUsageStats(segments, segments->history_segments_size(), segments->conversion_segments_size()); @@ -425,7 +420,7 @@ } segments->clear_revert_entries(); - rewriter_->Finish(segments); + rewriter_->Finish(request, segments); predictor_->Finish(segments); // Remove the front segments except for some segments which will be @@ -699,12 +694,8 @@ segments->set_resized(true); - if (!immutable_converter_->ConvertForRequest(request, segments)) { - return false; - } - + immutable_converter_->ConvertForRequest(request, segments); RewriteAndSuppressCandidates(request, segments); - return true; } @@ -762,12 +753,8 @@ segments->set_resized(true); - if (!immutable_converter_->ConvertForRequest(request, segments)) { - return false; - } - + immutable_converter_->ConvertForRequest(request, segments); RewriteAndSuppressCandidates(request, segments); - return true; } diff -Nru mozc-1.11.1502.102/converter/converter.h mozc-1.11.1522.102/converter/converter.h --- mozc-1.11.1502.102/converter/converter.h 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/converter/converter.h 2013-08-28 05:26:12.000000000 +0000 @@ -87,7 +87,8 @@ virtual bool StartPartialSuggestion(Segments *segments, const string &key) const; - virtual bool FinishConversion(Segments *segments) const; + virtual bool FinishConversion(const ConversionRequest &request, + Segments *segments) const; virtual bool CancelConversion(Segments *segments) const; virtual bool ResetConversion(Segments *segments) const; virtual bool RevertConversion(Segments *segments) const; diff -Nru mozc-1.11.1502.102/converter/converter_interface.h mozc-1.11.1522.102/converter/converter_interface.h --- mozc-1.11.1502.102/converter/converter_interface.h 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/converter/converter_interface.h 2013-08-28 05:26:12.000000000 +0000 @@ -93,7 +93,8 @@ // Finish conversion. // Segments are cleared. Context is not cleared - virtual bool FinishConversion(Segments *segments) const = 0; + virtual bool FinishConversion(const ConversionRequest &request, + Segments *segments) const = 0; // Clear segments and keep the context virtual bool CancelConversion(Segments *segments) const = 0; diff -Nru mozc-1.11.1502.102/converter/converter_main.cc mozc-1.11.1522.102/converter/converter_main.cc --- mozc-1.11.1502.102/converter/converter_main.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/converter/converter_main.cc 2013-08-28 05:26:12.000000000 +0000 @@ -307,7 +307,10 @@ return converter.StartSuggestionForRequest(conversion_request, segments); } } else if (func == "finishconversion" || func == "finish") { - return converter.FinishConversion(segments); + Table table; + Composer composer(&table, &request); + ConversionRequest conversion_request(&composer, &request); + return converter.FinishConversion(conversion_request, segments); } else if (func == "resetconversion" || func == "reset") { return converter.ResetConversion(segments); } else if (func == "cancelconversion" || func == "cancel") { @@ -324,7 +327,10 @@ if (!(converter.CommitSegmentValue(segments, i, 0))) return false; } } - return converter.FinishConversion(segments); + Table table; + Composer composer(&table, &request); + ConversionRequest conversion_request(&composer, &request); + return converter.FinishConversion(conversion_request, segments); } else if (func == "focussegmentvalue" || func == "focus") { CHECK_FIELDS_LENGTH(3); return converter.FocusSegmentValue(segments, diff -Nru mozc-1.11.1502.102/converter/converter_mock.cc mozc-1.11.1522.102/converter/converter_mock.cc --- mozc-1.11.1502.102/converter/converter_mock.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/converter/converter_mock.cc 2013-08-28 05:26:12.000000000 +0000 @@ -476,7 +476,8 @@ } } -bool ConverterMock::FinishConversion(Segments *segments) const { +bool ConverterMock::FinishConversion(const ConversionRequest &request, + Segments *segments) const { VLOG(2) << "mock function: FinishConversion"; finishconversion_input_.segments.CopyFrom(*segments); diff -Nru mozc-1.11.1502.102/converter/converter_mock.h mozc-1.11.1522.102/converter/converter_mock.h --- mozc-1.11.1502.102/converter/converter_mock.h 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/converter/converter_mock.h 2013-08-28 05:26:12.000000000 +0000 @@ -130,7 +130,8 @@ Segments *segments) const; bool StartPartialSuggestion(Segments *segments, const string &key) const; - bool FinishConversion(Segments *segments) const; + bool FinishConversion(const ConversionRequest &request, + Segments *segments) const; bool CancelConversion(Segments *segments) const; bool ResetConversion(Segments *segments) const; bool RevertConversion(Segments *segments) const; diff -Nru mozc-1.11.1502.102/converter/converter_mock_test.cc mozc-1.11.1522.102/converter/converter_mock_test.cc --- mozc-1.11.1502.102/converter/converter_mock_test.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/converter/converter_mock_test.cc 2013-08-28 05:26:12.000000000 +0000 @@ -149,7 +149,8 @@ Segments output, expect; SetSegments(&expect, "FinishConversion"); GetMock()->SetFinishConversion(&expect, true); - EXPECT_TRUE(converter->FinishConversion(&output)); + const ConversionRequest default_request; + EXPECT_TRUE(converter->FinishConversion(default_request, &output)); EXPECT_EQ(expect.DebugString(), output.DebugString()); } @@ -350,7 +351,8 @@ Segments input; SetSegments(&input, "FinishConversion"); const string input_str = input.DebugString(); - converter->FinishConversion(&input); + ConversionRequest default_request; + converter->FinishConversion(default_request, &input); Segments last_segment; GetMock()->GetFinishConversion(&last_segment); diff -Nru mozc-1.11.1502.102/converter/converter_test.cc mozc-1.11.1522.102/converter/converter_test.cc --- mozc-1.11.1502.102/converter/converter_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/converter/converter_test.cc 2013-08-28 05:26:12.000000000 +0000 @@ -327,8 +327,9 @@ } } EXPECT_EQ(first_value, converted) << first_value; - EXPECT_TRUE(converter->FinishConversion(&segments)); - + // TODO(team): Use StartConversionForRequest instead of StartConversion. + const ConversionRequest default_request; + EXPECT_TRUE(converter->FinishConversion(default_request, &segments)); EXPECT_TRUE(converter->StartConversion(&segments, second_key)); EXPECT_EQ(segment_num + 1, segments.segments_size()); @@ -919,7 +920,10 @@ EXPECT_EQ(1, segments.conversion_segments_size()); EXPECT_TRUE(converter->CommitSegmentValue( &segments, 0, 0)); - EXPECT_TRUE(converter->FinishConversion(&segments)); + + // TODO(team): Use StartConversionForRequest instead of StartConversion. + const ConversionRequest default_request; + EXPECT_TRUE(converter->FinishConversion(default_request, &segments)); EXPECT_TRUE(converter->StartConversion( &segments, kKey2)); @@ -1180,7 +1184,7 @@ // Note that TearDown method will reset above stubs. - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(test_data_list); ++i) { + for (size_t i = 0; i < arraysize(test_data_list); ++i) { const TestData &test_data = test_data_list[i]; Segments segments; segments.set_request_type(test_data.request_type_); diff -Nru mozc-1.11.1502.102/converter/immutable_converter.cc mozc-1.11.1522.102/converter/immutable_converter.cc --- mozc-1.11.1502.102/converter/immutable_converter.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/converter/immutable_converter.cc 2013-08-28 05:26:12.000000000 +0000 @@ -38,7 +38,6 @@ #include #include "base/base.h" -#include "base/util.h" #include "base/logging.h" #include "base/stl_util.h" #include "base/util.h" @@ -1621,6 +1620,9 @@ InsertCandidates(segments, lattice, group, max_candidates_size, ONLY_FIRST_SEGMENT); + // Note that inserted candidates might consume the entire key. + // e.g. key: "なのは", value: "ナノは" + // Erase them later. if (segments->conversion_segment(0).candidates_size() <= only_first_segment_candidate_pos) { return; @@ -1642,17 +1644,25 @@ (first_segment.candidate(0).wcost - first_segment.candidate(only_first_segment_candidate_pos).wcost)); for (size_t i = only_first_segment_candidate_pos; - i < first_segment.candidates_size(); - ++i) { + i < first_segment.candidates_size();) { static const int kOnlyFirstSegmentOffset = 300; Segment::Candidate *candidate = segments->mutable_conversion_segment(0)->mutable_candidate(i); + // If the size of candidate's key is greater than or + // equal to 1st segment's key, + // it means that the result consumes the entire key. + // Such results are not appropriate for PARTIALLY_KEY_CONSUMED so erase it. + if (candidate->key.size() >= first_segment.key().size()) { + segments->mutable_conversion_segment(0)->erase_candidate(i); + continue; + } candidate->cost += (base_cost_diff + kOnlyFirstSegmentOffset); candidate->wcost += (base_wcost_diff + kOnlyFirstSegmentOffset); DCHECK(!(candidate->attributes & Segment::Candidate::PARTIALLY_KEY_CONSUMED)); candidate->attributes |= Segment::Candidate::PARTIALLY_KEY_CONSUMED; candidate->consumed_key_size = Util::CharsLen(candidate->key); + ++i; } } diff -Nru mozc-1.11.1502.102/converter/immutable_converter_test.cc mozc-1.11.1522.102/converter/immutable_converter_test.cc --- mozc-1.11.1502.102/converter/immutable_converter_test.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/converter/immutable_converter_test.cc 2013-08-28 05:26:12.000000000 +0000 @@ -571,4 +571,42 @@ EXPECT_FALSE(AutoPartialSuggestionTestHelper(conversion_request)); } +TEST_F(ImmutableConverterTest, AutoPartialSuggestionForSingleSegment) { + const commands::Request request; + ConversionRequest conversion_request(NULL, &request); + conversion_request.set_create_partial_candidates(true); + + scoped_ptr data_and_converter( + new MockDataAndImmutableConverter); + const string kRequestKeys[] = { + // "たかまち" + "\xE3\x81\x9F\xE3\x81\x8B\xE3\x81\xBE\xE3\x81\xA1", + // "なのは" + "\xE3\x81\xAA\xE3\x81\xAE\xE3\x81\xAF", + // "まほうしょうじょ" + "\xE3\x81\xBE\xE3\x81\xBB\xE3\x81\x86\xE3\x81\x97" + "\xE3\x82\x87\xE3\x81\x86\xE3\x81\x98\xE3\x82\x87", + }; + for (size_t testcase = 0; testcase < arraysize(kRequestKeys); ++testcase) { + Segments segments; + segments.set_request_type(Segments::PREDICTION); + segments.set_max_prediction_candidates_size(10); + Segment *segment = segments.add_segment(); + segment->set_key(kRequestKeys[testcase]); + EXPECT_TRUE(data_and_converter->GetConverter()-> + ConvertForRequest(conversion_request, &segments)); + EXPECT_EQ(1, segments.conversion_segments_size()); + EXPECT_LT(0, segments.segment(0).candidates_size()); + const string &segment_key = segments.segment(0).key(); + for (size_t i = 0; i < segments.segment(0).candidates_size(); ++i) { + const Segment::Candidate &cand = segments.segment(0).candidate(i); + if (cand.attributes & Segment::Candidate::PARTIALLY_KEY_CONSUMED) { + EXPECT_LT(cand.key.size(), segment_key.size()) << cand.DebugString(); + } else { + EXPECT_GE(cand.key.size(), segment_key.size()) << cand.DebugString(); + } + } + } +} + } // namespace mozc diff -Nru mozc-1.11.1502.102/data/pinyin/english_dictionary.txt mozc-1.11.1522.102/data/pinyin/english_dictionary.txt --- mozc-1.11.1502.102/data/pinyin/english_dictionary.txt 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/data/pinyin/english_dictionary.txt 1970-01-01 00:00:00.000000000 +0000 @@ -1,22493 +0,0 @@ -the -of -to -and -a -in -for -is -on -by -with -that -this -you -not -or -are -at -it -as -be -from -your -was -have -an -more -all -can -views -will -has -but -we -do -my -new -one -up -about -which -ago -our -out -their -they -time -if -like -other -were -any -been -his -may -me -no -so -also -he -now -when -would -i -only -some -than -there -us -use -what -who -first -get -had -her -information -into -its -just -free -here -home -list -people -see -them -these -years -am -most -over -two -using -year -how -make -need -note -post -should -site -such -then -used -work -after -does -good -know -page -very -video -well -available -between -could -data -day -said -through -videos -where -back -comment -days -did -different -each -many -months -number -pm -she -way -add -because -best -de -email -find -help -high -made -much -off -online -right -same -system -under -want -before -being -business -comments -even -found -go -great -must -name -part -search -set -take -think -those -based -both -close -full -him -life -own -service -top -view -without -world -account -content -down -during -image -last -less -long -love -order -real -really -results -review -rights -show -something -still -while -within -added -area -around -case -code -download -et -following -line -look -next -per -please -sign -since -size -support -three -too -url -address -al -another -better -details -every -file -hours -including -little -message -old -place -price -product -products -services -via -water -website -again -below -buy -change -company -control -design -end -family -few -form -game -going -got -group -left -level -link -model -never -performance -photo -posts -read -required -reviews -say -small -spam -state -times -type -value -web -above -always -big -blog -come -contact -experience -give -given -local -looking -mail -member -might -music -news -playlist -point -power -problem -process -provide -public -quality -queue -related -reserved -sale -says -school -sex -study -today -user -watch -week -access -against -black -car -check -children -click -current -date -important -include -info -large -low -man -non -open -password -person -photos -profile -property -provided -question -second -share -suggestions -sure -things -version -why -able -active -article -book -button -care -community -development -friends -health -item -items -lot -members -play -points -possible -program -questions -rate -report -side -stars -start -terms -text -try -yet -already -analysis -away -body -call -cost -estate -example -forum -girl -hard -having -hot -images -job -keep -least -light -live -making -market -money -month -others -phone -policy -present -put -range -research -shall -single -software -students -team -test -until -white -women -working -age -called -city -class -common -complete -country -course -create -daily -done -due -easy -either -ever -features -feel -field -files -food -four -friend -function -getting -hand -house -known -let -load -main -management -means -miles -minutes -near -needs -paper -porn -private -project -result -room -run -several -shown -similar -sites -source -specific -stock -store -subject -systems -thing -adding -application -art -become -bit -cells -changes -computer -conditions -empty -energy -far -games -general -human -latest -law -links -makes -map -men -method -million -network -night -offer -often -optional -original -party -patients -personal -problems -reply -section -seen -series -short -shows -social -special -star -though -together -total -users -visit -weekly -action -activity -air -along -anyone -cell -doing -effect -effects -enough -enter -event -face -fact -further -government -head -higher -history -issue -key -later -location -logo -material -mean -mobile -monthly -movie -offers -once -past -period -posted -send -space -story -style -taken -term -thought -title -treatment -upon -various -weeks -whether -young -across -actually -among -amount -anything -areas -box -bug -card -child -color -credit -currently -early -en -events -film -five -follow -front -fun -future -gets -girls -groups -however -improved -increase -industry -interest -issues -likes -major -nice -office -options -pages -parts -pay -picture -plan -position -provides -published -receive -red -response -return -save -self -shipping -simple -standard -technology -third -training -true -values -whole -words -works -addition -additional -answer -articles -associated -average -bad -believe -browser -building -came -cases -comes -companies -cover -error -feedback -growth -half -heart -included -includes -individual -jobs -join -kind -la -language -learn -levels -lower -media -min -natural -pictures -prices -rather -re -received -risk -solution -someone -started -stop -table -teen -tell -vs -weight -went -window -activities -agree -almost -approach -ask -author -beautiful -books -born -cause -certain -clear -collection -connect -created -customer -customers -designed -education -eg -else -especially -everything -favorite -financial -hair -held -idea -knowledge -log -loss -mind -models -needed -oil -particular -popular -potential -pretty -previous -probably -production -professional -reading -reason -recent -record -role -sales -season -security -select -significant -song -soon -speed -staff -structure -studies -surface -takes -taking -tax -took -turn -usually -word -written -yes -allow -baby -built -channel -choose -coming -condition -considered -copy -costs -death -description -directly -domain -drive -everyone -fast -final -helpful -hope -inside -international -land -learning -leave -length -liked -likely -listing -living -located -logged -longer -looks -lyrics -matter -medical -meet -mm -move -movies -necessary -nothing -null -perfect -playback -player -programs -properties -purchase -quite -rates -region -reported -request -running -screen -seems -sent -sound -step -strong -tags -tools -topic -trying -types -understand -unit -wide -write -according -ad -applications -asked -ass -blood -blue -board -center -characters -cheap -continue -countries -couple -deal -delivery -described -developed -difference -direct -dislikes -display -edit -effective -engine -entry -environment -equipment -feature -feed -feet -gives -green -guide -happy -helps -hit -hour -ideas -increased -insurance -interesting -kids -larger -lead -legal -limited -listed -machine -methods -multiple -normal -notes -numbers -obtained -outside -percent -plus -positive -practice -pressure -reference -register -registered -release -requirements -resources -selected -server -simply -status -student -temperature -th -thanks -told -track -trade -travel -unique -update -updated -wait -wanted -woman -wrote -ability -advice -allows -apply -appropriate -background -base -basis -behind -block -bring -build -choice -cock -commercial -compared -consider -court -cut -decision -device -digital -disease -distribution -double -easily -enjoy -ensure -evidence -expected -fine -fire -flow -forward -gas -goes -guy -guys -ie -improve -instead -internet -involved -late -lines -login -lost -materials -national -nature -owner -playing -pre -print -privacy -protein -purpose -pussy -rating -ready -recently -relationship -reports -road -sample -selection -sexy -shop -six -skin -states -stay -stuff -talk -thread -tool -updates -ways -wife -win -wish -writing -yourself -album -appear -band -bed -camera -charge -co -com -complex -construction -contains -cool -database -defined -degree -discussion -door -economic -entire -existing -factors -fashion -fit -focus -fuck -global -greater -ground -highly -hold -hotel -huge -icon -impact -income -interested -itself -largest -leading -match -maximum -meeting -memory -mode -names -observed -option -paid -payment -physical -piece -population -presented -primary -prior -projects -proposed -reblogged -records -remember -remove -require -requires -resource -responsible -rest -rules -safety -saw -sell -sense -sharing -shoes -shopping -skills -sold -son -sources -species -stage -therefore -tube -uses -valid -wall -wedding -worth -wrong -abuse -acid -agent -although -anal -answers -appears -applied -attention -audio -bar -basic -benefits -brand -cancer -cars -categories -category -changed -character -clean -college -completely -connection -correct -cross -deals -difficult -document -dog -dress -drug -employees -excellent -except -expression -extra -eyes -factor -force -format -fucking -fully -functions -giving -gold -hear -homes -independent -index -input -marketing -modern -mother -named -notice -object -official -ones -operation -output -pain -parents -patient -performed -phase -planning -plans -political -press -protection -providing -rated -respect -round -safe -scale -schools -seconds -seem -showing -solutions -starting -storage -success -supply -target -themselves -throughout -thus -tips -touch -town -tried -useful -variety -vehicle -volume -war -accept -actions -actual -ads -allowed -alone -amateur -amazing -annual -anti -became -bottom -break -capacity -cards -cart -cash -chance -client -closed -cm -communication -components -culture -damage -deep -default -determine -determined -develop -distance -driver -elements -enable -established -eye -fall -fan -female -figure -floor -former -forms -fucked -gave -gay -generally -gift -glass -hands -heard -heat -inch -individuals -library -listings -menu -messages -metal -mg -mi -minimum -moment -morning -myself -net -older -opportunity -organization -otherwise -pics -places -plant -players -police -presence -pro -produced -quick -quickly -quote -regarding -released -relevant -setting -settings -situation -sometimes -songs -stand -statement -stores -stories -summer -theory -thinking -traffic -transfer -tree -units -vote -whose -windows -worked -accounts -act -adult -bag -bank -beginning -boy -brought -capital -career -central -claim -connected -contract -copyright -critical -custom -dark -daughter -dead -decided -detailed -died -direction -downloads -drop -exchange -expand -external -fields -filter -fixed -followed -follows -frame -frequency -friendly -goal -highest -himself -host -hotels -id -im -initial -install -internal -investment -km -led -letter -lives -male -married -mass -maybe -negative -operations -opinion -opportunities -overall -owners -parties -pattern -played -poor -powered -processes -processing -produce -professionals -purposes -radio -recommend -reduce -regular -removed -replies -rock -saying -science -sets -ship -shot -showed -signal -sports -square -standards -steel -street -successful -tag -techniques -testing -topics -towards -traditional -trial -turned -understanding -unless -voice -wants -accepted -accuracy -admin -agents -agreement -animals -approximately -automatically -awesome -began -behavior -benefit -beyond -blonde -businesses -century -classes -clients -club -cold -completed -con -concept -containing -core -covered -devices -differences -directory -displayed -documents -earlier -edited -effort -exactly -false -fans -father -finally -firm -fish -forums -fresh -ft -fuel -funny -gallery -growing -identified -immediately -ing -interface -layer -limit -lol -looked -manager -mark -measure -medium -mentioned -met -middle -military -minute -moving -multi -newsletter -offered -operating -orders -package -panel -parameters -particularly -pass -path -perform -placed -race -ratings -ratio -ray -reach -reduced -rent -resolution -respectively -responsibility -ring -rule -samples -scene -score -shape -shared -signed -significantly -solid -steps -stream -suggest -supported -talking -technical -thank -theme -tits -tour -votes -walk -wear -websites -advanced -advantage -advertising -algorithm -alternative -animal -argument -artist -auto -avoid -balance -ball -battery -beach -beauty -becomes -begin -bought -calls -caused -classic -clearly -clinical -combination -command -comparison -component -constant -contain -context -count -cute -dance -delivered -demand -detail -di -die -discount -doctor -doi -driving -dry -eat -edge -edition -electronic -element -enabled -equal -etc -ex -facebook -facilities -failed -failure -families -fat -favorites -felt -fight -finding -foot -foreign -generation -gone -grade -happen -heavy -holiday -increasing -influence -installed -instructions -introduced -kitchen -knew -lack -license -lots -measured -measures -missing -movement -navigation -nearly -none -nor -nude -obtain -onto -opening -owned -pair -pick -plants -plays -pool -port -prevent -previously -procedure -profiles -reaction -reasons -relative -reviewed -runs -selling -separate -sequence -serious -serve -smaller -society -soft -sort -specified -straight -strategy -strength -stress -sub -submit -super -switch -tab -task -teacher -tests -treated -trip -upper -van -vector -viewed -waiting -weather -wood -workers -accessories -accurate -addresses -affect -amp -approved -assessment -attack -bedroom -boys -brain -budget -buying -cable -carried -carry -chain -characteristics -church -cialis -claims -clicking -compare -concentration -continued -counter -coverage -creating -cycle -definitely -definition -density -developing -economy -efforts -environmental -errors -essential -estimated -evaluation -exercise -expect -express -extremely -federal -feeling -fees -filled -finish -flat -football -gain -gene -generated -guarantee -guess -happens -healthy -height -hospital -husband -ice -identify -ii -intended -kept -laws -leather -loan -locations -maintain -maintenance -manual -mature -max -meaning -mine -miss -mix -mom -motion -motor -moved -networks -north -noted -occur -park -perhaps -permission -photography -pieces -plastic -policies -powerful -practices -prepared -procedures -proper -protect -random -recommended -reduction -registration -remain -repair -represent -respective -responses -rich -sea -sector -served -session -seven -shirt -silver -south -spend -spent -spring -station -submitted -supplied -survey -sweet -teaching -ten -torrent -variable -watching -weekend -welcome -winter -won -wonderful -achieve -agency -artists -assistance -authors -babe -beat -binding -birth -boots -calculated -causes -certainly -challenge -changing -charges -cities -codes -colors -column -combined -competition -consumer -contained -corresponding -courses -cum -del -dependent -depth -designs -director -discuss -discussed -discussions -district -drivers -drugs -du -easier -ed -efficiency -efficient -eight -electric -entered -episode -experienced -exposure -expressed -fair -famous -fee -finished -fix -flag -forget -formed -funds -garden -generic -goals -goods -grow -handle -happened -housing -implementation -importance -inches -increases -indicate -indicated -induced -installation -interests -interview -kg -knows -le -leads -lesbian -linear -literature -lose -loved -majority -manage -manner -master -mouth -noise -objects -offering -opens -organic -parameter -passed -patterns -persons -progress -properly -quotes -rear -recommendations -recorded -recovery -relatively -remains -resulting -rooms -saved -searches -seat -secure -seeing -seller -sizes -sleep -slightly -slow -sounds -spot -stated -string -structures -sun -teachers -technique -therapy -thoughts -threads -tickets -toward -trademarks -train -transport -trust -tv -un -username -variables -viewing -visitors -wild -wine -zero -zip -administration -adults -advance -affected -ahead -applicable -array -asian -aspects -assets -attempt -aware -bags -bike -birthday -blogs -bound -brother -brown -bus -cancel -carbon -cast -cat -centre -chat -chemical -coffee -collected -communities -conducted -conference -confirm -connections -consistent -contrast -controlled -corporate -creative -criteria -cultural -cup -da -debt -dedicated -define -delete -department -depending -derived -des -diet -directions -div -dynamic -earth -employee -employment -estimate -evening -examples -exist -experiences -experiments -explain -extended -extent -facility -facts -faster -fill -flash -formation -furniture -guest -guitar -hardcore -hardware -hate -hidden -hole -homepage -identity -injury -instance -interaction -journal -killed -kit -leader -leaves -linked -listen -loves -magazine -marked -markets -matrix -mb -mid -ml -modified -monitoring -na -ok -opened -organizations -pack -parent -partner -partners -paste -pink -plate -platform -pop -portion -posting -prescription -profit -provider -reached -reality -receiving -regions -relation -relationships -remote -requests -resistance -restaurant -returned -returns -ride -rise -schedule -scheme -sheet -shirts -sister -soil -sorry -speak -starts -statistics -strategies -styles -subjects -successfully -suggested -suitable -surgery -symptoms -teams -tested -thousands -tight -trees -truly -truth -upload -virus -visual -warm -waste -west -whatever -wheel -whom -wind -www -yellow -zone -aid -announced -asking -assume -attached -authority -availability -award -biggest -bill -billion -boat -brands -broken -campaign -cent -channels -chapter -charged -chosen -clip -clothing -comfortable -comprehensive -configuration -contents -continues -controls -county -covers -cream -dates -decide -decisions -der -describe -distributed -dose -dream -drink -duty -eating -editor -educational -el -ended -ends -engineering -entertainment -equation -equivalent -exact -exclusive -experimental -experts -extension -featured -films -flight -font -forces -framework -functional -gear -guidelines -handling -hearing -helped -helping -holding -hosting -houses -ideal -improvement -indicates -industrial -iron -issued -joined -joint -keyword -kill -label -laptop -learned -leaving -letters -lights -lists -loading -managed -manufacturer -maps -mechanism -mental -milk -mini -mission -mixed -module -monitor -mostly -mouse -muscle -naked -nation -newest -novel -occurred -opinions -outdoor -participants -peace -peak -positions -possibly -presents -president -preview -promote -proof -protected -publication -purchased -raised -reader -reasonable -recipes -regional -reliable -remaining -removal -replacement -reporting -represents -restaurants -root -se -searching -secondary -sections -sexual -shares -signs -skip -snow -specifically -speech -spread -supports -television -template -tissue -totally -transmission -truck -twitter -unknown -vary -vehicles -virtual -vol -voltage -wallpaper -wave -wet -width -winning -wireless -yesterday -absolutely -achieved -agencies -agreed -alcohol -alerts -angle -anywhere -appearance -appeared -architecture -assist -association -ba -bathroom -battle -becoming -beta -bid -calendar -circuit -circumstances -clothes -coach -colour -concerned -concerns -conduct -continuous -corner -crazy -crime -degrees -deliver -depends -designer -detection -dick -dinner -directed -disabled -dogs -doubt -draw -east -es -estimates -evaluate -exists -expensive -expert -faith -fear -featuring -figures -filed -findings -flowers -formats -funding -gifts -graphics -greatest -guests -gun -historical -holds -horse -infection -int -integrated -interior -island -keeping -latter -leaders -liquid -lived -loans -lowest -mainly -marriage -measurement -measurements -medicine -membership -mention -mins -nuclear -numerous -occurs -oh -oz -paint -percentage -personally -plane -practical -premium -presentation -proteins -pump -quantity -rare -readers -refer -references -referred -reflect -rental -replace -residents -retail -river -roll -route -rss -salt -script -sec -secret -seemed -senior -sensitive -serving -shift -smooth -standing -statements -stone -studied -studio -sufficient -sugar -suggests -summary -tank -taxes -technologies -trading -transition -treat -trouble -turns -twice -typical -typically -unable -university -upgrade -urban -vintage -visible -vision -walking -warranty -wire -academic -accordance -activation -advertisement -alert -allowing -amounts -appreciate -approaches -approval -archive -arm -arms -assigned -attachments -automatic -banks -bath -bone -breast -brings -brunette -buildings -calling -catch -caught -challenges -civil -cleaning -climate -clips -comfort -competitive -concern -confidence -considering -consists -consumption -contributions -crack -creation -dating -decrease -delay -despite -detected -dollars -domestic -dresses -effectively -electrical -em -emergency -employed -engines -entries -experiment -extensive -extreme -farm -feat -firms -fly -focused -folder -forced -freedom -frequently -fruit -fund -galleries -generate -hi -hits -inappropriate -indeed -institutions -investigation -isolated -kinds -lady -languages -laser -launch -leadership -legs -lets -licensed -limits -lovely -luck -maintained -marks -matching -meant -mice -minor -mortgage -native -neck -node -officer -oldest -oral -ordered -origin -painting -papers -parallel -parking -payments -pc -pdf -performing -periods -phones -possibility -poster -prefer -principles -printed -programming -prove -pull -pure -putting -quarter -query -rain -raise -recipe -recognition -recognized -recording -regulation -relations -religious -representation -requirement -revealed -row -scientific -scores -seek -seeking -sensor -serial -servers -ships -sides -simulation -sit -sitting -smart -solar -split -sport -st -stable -stick -stored -strip -strongly -subscribe -suit -supplies -tape -tasks -taste -telephone -thin -till -tip -toys -tracks -translation -trends -usual -var -versions -versus -village -wearing -whereas -wonder -worldwide -writer -yearly -youth -absence -abstract -acceptance -accident -acres -ages -anyway -apartment -arts -assembly -audience -bear -begins -blocks -bodies -border -bottle -bridge -bright -broad -busy -cake -cant -capable -chair -chart -checked -checking -chicken -chief -choices -clock -closely -cloud -collect -commented -committed -communications -compatible -compliance -composition -concentrations -concerning -confirmed -consideration -consumers -contacts -contribute -contribution -conversation -conversion -cooking -criminal -crisis -curve -cutting -defense -demonstrated -desire -desired -desktop -destination -diagnosis -diameter -discover -discovered -disk -division -documentation -doors -drawing -driven -dual -duration -enjoyed -establish -excited -exciting -executive -existence -expansion -expertise -explained -explore -exposed -ext -fabric -factory -fail -fantastic -fishing -fitness -fluid -forest -formal -formula -forth -fourth -genes -golf -gonna -google -grant -grown -hell -hide -illegal -import -informed -inner -integration -iphone -jewelry -keyboard -keywords -kid -king -labor -lbs -lighting -machines -magnetic -manufacturing -matches -matters -mixture -nearby -nine -nm -normally -noticed -objective -obvious -officers -officials -operator -optical -originally -partial -participate -perspective -pic -pin -plasma -politics -porno -preparation -primarily -principal -probability -programme -prohibited -proud -providers -push -ran -refers -regard -regulations -releases -replaced -representative -represented -requested -respond -revenue -savings -sending -setup -severe -shit -shoot -shower -soul -span -speaking -spectrum -spending -spirit -stability -static -stopped -subscribers -sum -suppliers -supporting -tables -tattoo -tea -tech -tend -thick -titles -trademark -trailer -transaction -ultimate -usage -valuable -variation -verify -violence -visiting -wallpapers -willing -worry -worst -yeah -yield -absolute -acting -acts -adopted -aim -aircraft -airport -aka -apart -appeal -arrived -assumed -attacks -attend -awards -bands -bars -bedrooms -bigger -boards -boot -boxes -brief -buyer -bytes -candidate -cap -capture -certified -cheese -chocolate -cited -closer -commitment -committee -computers -concepts -concrete -conflict -consent -continuing -copies -correctly -cotton -coupons -crash -credits -crystal -demonstrate -describes -dimensional -domains -earn -election -encourage -enhance -entirely -eventually -evolution -examination -finance -finds -flexible -flower -followers -girlfriend -glad -guaranteed -ha -herself -hosted -hundred -hundreds -identification -ill -immediate -implemented -impossible -infrastructure -inspired -intensity -interactions -introduction -involving -journey -judge -jump -keeps -lens -liability -lock -loop -massive -meat -mechanisms -membrane -missed -molecular -moves -necessarily -neighborhood -neither -nights -operate -optimal -orange -packages -para -participation -patch -paying -permalink -permanent -pet -plenty -plug -preferred -pregnancy -pregnant -prepare -pricing -printing -protocol -provision -provisions -qualified -rank -raw -realize -receptor -relating -religion -residential -riding -risks -saving -scope -semi -seriously -shooting -signals -situations -smoking -solve -somewhat -stands -structural -subscription -subsequent -sucking -supplier -supposed -surrounding -talks -teach -teens -tells -terminal -thats -themes -tiny -tomorrow -tracking -trend -uk -understood -uploaded -utility -vacation -valve -vertical -void -von -walls -watches -weak -webcam -wheels -widely -winner -worse -ya -yours -yrs -accounting -afternoon -aged -ancient -android -apartments -army -aspect -attorney -attributes -authorities -awareness -axis -basket -beer -behalf -behaviour -believed -belt -bin -birds -bond -boobs -boundary -branch -breakfast -breaking -bringing -buffer -candidates -carefully -carrying -certificate -christmas -citation -citizens -coast -commonly -compensation -contemporary -contest -controller -conventional -convert -correlation -coupon -currency -dealer -debate -depend -determination -developers -dining -dirty -diseases -displays -distinct -divided -dollar -drawn -drinking -eligible -ending -english -enhanced -entering -equations -everyday -evil -examined -exception -expenses -export -faces -fairly -familiar -fault -favourite -feels -fell -fighting -flights -foods -forever -fucks -gender -genetic -graduate -granted -graph -graphic -greatly -guidance -guides -hat -heads -heating -holes -holidays -indian -indicating -instrument -instruments -interact -interactive -involves -justice -keys -leg -les -lessons -lies -listening -liver -lucky -lunch -luxury -magic -manufacturers -massage -mechanical -meetings -mile -millions -mountain -musical -newly -nodes -observations -operated -opposite -organized -outcome -outstanding -oxygen -par -paragraph -particles -perfectly -personnel -picked -pipe -plain -planned -pleasure -plot -portfolio -pounds -powder -principle -priority -producing -promotion -rapid -reducing -relief -reserve -reverse -roof -rose -rubber -rural -sad -satisfaction -satisfied -scan -scenes -shops -sick -skill -sky -slowly -slut -smile -spaces -specifications -sponsored -stages -stats -strain -stupid -substantial -superior -survival -symbol -tagged -telling -thermal -throw -tone -tonight -toy -trained -tramadol -transactions -transportation -turning -twenty -uniform -upcoming -velocity -verified -warning -western -wholesale -yards -youtube -accessible -actor -actress -addressed -adds -affiliated -affordable -algorithms -analyses -anonymous -answered -anymore -apparently -apple -applies -applying -atmosphere -attractive -auction -authorized -basketball -bass -beam -beds -bird -bonus -buyers -cam -camp -campus -capabilities -carrier -causing -chronic -classification -classified -concert -conclusion -constitutes -constructed -cookies -covering -creates -crew -cruise -dancing -dangerous -dealing -declared -decreased -deleted -differ -dimensions -disc -discounts -downloaded -draft -dvd -earned -effectiveness -electron -emails -emotional -entitled -equity -evaluated -exam -excess -execution -explanation -extend -facial -facing -fake -fellow -fiber -filters -finger -foundation -frames -fraud -functionality -fundamental -gap -god -gorgeous -grid -hence -hip -horny -household -hurt -identical -iii -imagine -implement -improvements -improving -innovative -inquiry -inspection -instant -instruction -interpretation -investigate -ion -jacket -knowing -laboratory -ladies -lake -landscape -launched -layers -layout -legislation -lie -lift -loaded -logic -losses -managers -meal -meets -mirror -modeling -modules -moments -objectives -obviously -offensive -offices -oriented -outcomes -partnership -passing -permit -pocket -printer -proceed -promise -proposal -quiet -racing -radiation -rapidly -rat -recognize -regardless -regularly -remained -researchers -reset -resistant -restricted -resulted -rice -rings -roles -sand -scheduled -seats -seed -segment -sensitivity -separated -serves -sessions -shaped -shell -shock -shots -shoulder -signature -sin -singing -sized -slide -smoke -sole -solo -somewhere -southern -speaker -speakers -stations -statistical -strategic -suck -sucks -surprise -tabs -tall -taught -teeth -temperatures -threshold -ticket -tough -treatments -union -visited -visits -wanna -wins -wondering -writers -writes -younger -zoom -acceptable -acquired -acquisition -activated -acute -adequate -administrative -affiliates -albums -alive -aluminum -analyzed -anime -apparent -appointed -appointment -arrested -assess -attended -backup -basically -bearing -beats -bet -biological -bitch -bits -blank -boss -boyfriend -broadcast -browse -browsing -bulk -buttons -cache -careful -celebrity -centers -chest -chick -chip -circle -classical -classroom -cluster -coat -commenting -completion -comply -compounds -computing -consequences -constantly -contracts -convenient -courts -cuts -dad -decades -delicious -depression -desk -determining -diabetes -diagram -diamond -difficulty -diverse -diversity -doctors -dreams -drives -dropped -drops -dust -duties -dynamics -ear -ease -editing -eggs -employer -enables -engaged -entity -equipped -era -examine -exit -expectations -extract -falls -feeds -fits -flying -folks -fraction -gaming -garage -generator -glasses -grand -grey -happening -hire -holder -honest -honor -hourly -html -humans -incident -industries -infected -ingredients -injection -insert -institution -interval -interviews -introduce -inventory -investigated -investors -japanese -judgment -kbps -kick -largely -lawyer -lay -league -lifestyle -losing -loving -mount -mounted -narrow -newspaper -northern -nursing -observation -ongoing -ordinary -ourselves -overview -ownership -packaging -pants -passion -phrase -physician -pixels -plates -plugin -populations -potentially -powers -prime -prints -prison -processor -proved -proven -publish -pulled -rats -reactions -realized -reblog -receipt -receiver -regulatory -repeat -restrictions -rough -satellite -scroll -seeds -sentence -sequences -serum -shipped -shut -significance -singer -slideshow -soccer -spatial -spin -stainless -strange -streets -supra -surfaces -syndrome -synthesis -talent -targets -temporary -theoretical -thousand -threat -transferred -trials -tumor -tutorial -underlying -ups -violation -watched -weapons -whenever -wow -yard -accommodation -adjust -advantages -adventure -afford -amino -arrive -assessed -asset -attempts -attitude -avatar -awarded -baseball -belief -blow -booking -boost -bowl -bread -broke -bunch -burn -burning -busty -butter -calculate -calculation -cameras -cartoon -casino -certification -characteristic -citations -claimed -closing -clubs -collections -compact -complexity -compound -considerable -constraints -cooling -cooperation -copper -crowd -curves -customs -dated -decade -deck -decline -demands -deposit -designated -detect -dictionary -dildo -discharge -discovery -disorder -disorders -dot -drama -egg -electricity -elsewhere -emissions -emphasis -enemy -enforcement -enlarge -enterprise -environments -equally -escape -everywhere -faculty -falling -fantasy -favor -fax -feeding -feelings -festival -fewer -fingers -finite -fold -forgot -founded -gate -gel -giant -grain -grew -healthcare -hearts -hentai -herein -hey -hoping -horizontal -hrs -imaging -inc -increasingly -incredible -indexed -initially -injuries -innovation -insight -inspiration -intelligence -inter -intervention -invited -involve -involvement -jeans -joy -juice -killing -laid -leaf -lesson -limitations -loads -loose -mac -mailing -maintaining -managing -memories -merely -micro -minimal -modes -modify -monster -mood -moon -murder -nurse -ocean -operational -operators -optimization -ordering -orientation -outer -outlet -overnight -painted -pairs -participating -particle -permitted -personality -personalized -pharmacy -photographs -piano -pilot -planet -pleased -portable -posters -preference -preferences -prevention -producer -promoting -proportion -propose -publisher -purchasing -purple -puts -queries -receives -recommendation -reliability -repeated -representing -reputation -resident -routine -sampling -sauce -screening -sellers -sharp -sheets -shoe -sight -signing -solely -sought -specify -spiritual -storm -streaming -strike -stronger -stuck -stunning -substance -suite -surprised -suspension -swimming -tablet -tail -thickness -thumbnail -tie -timing -tradition -transformation -variations -verification -victory -wash -waves -wealth -widget -wishlist -wooden -worker -workshop -absorption -acids -adjacent -adjusted -adjustment -adobe -afraid -agricultural -alarm -alpha -american -appreciated -arguments -arrangements -arrival -assistant -assumption -bacteria -balls -batteries -belong -belongs -bikini -blind -breaks -bride -brilliant -brothers -butt -canvas -capability -cats -celebrate -cellular -challenging -characterized -choosing -chose -cleared -coefficient -collaboration -colleagues -columns -combine -comedy -communicate -complaint -complaints -composed -conservation -consulting -container -convenience -converted -cook -correction -costume -council -courtesy -crucial -cultures -damaged -deemed -dental -designers -differential -dimension -downloading -drag -dude -edges -elected -electronics -embedded -emission -engage -engagement -enjoying -entities -entrance -enzyme -episodes -equilibrium -essentially -everybody -exceed -exhibit -expense -explains -fails -fiction -fitted -fitting -flows -frequent -genuine -golden -grass -gray -guard -hall -hang -heels -historic -hop -hunting -hybrid -hydrogen -hypothesis -icons -identifiable -illness -illustration -implications -impressive -incorporated -incorrect -india -instantly -intake -intellectual -invention -investments -invite -joining -kernel -lab -labels -labour -lamp -laugh -lease -levitra -lifetime -los -mad -magnitude -maker -mapping -marine -mask -measuring -medication -meter -meters -mining -mod -molecules -moral -mortality -movements -naturally -networking -neutral -nose -occasion -offline -offset -opt -pan -panels -patent -philosophy -physics -placement -pointed -portal -pot -pour -poverty -predicted -probe -processed -produces -professor -publications -pulse -purchases -quoted -rail -ranging -recall -recipient -reform -regarded -remix -reserves -resort -restore -resume -retirement -reveal -reveals -rid -rising -roads -rocks -roots -salary -sat -scenario -scratch -seal -searched -separation -settlement -sing -sleeping -slip -sodium -specialist -specification -spectra -spoke -spots -spray -stem -stocks -stroke -studying -submission -suffering -suggestion -sustainable -thinks -tied -tired -tons -torrents -tubes -universal -unusual -vacuum -vast -vice -victim -victims -vital -vitro -voted -voting -walked -weird -wise -actors -adapter -administrator -admit -adverse -aims -animation -anniversary -antibody -anxiety -anybody -arrangement -arrow -aside -assignment -attribute -authorization -banking -banner -believes -bias -bonds -bookmark -brake -breath -calcium -calculations -canada -carpet -catalog -chamber -chances -charity -checks -chemicals -childhood -coal -cognitive -colored -combat -comic -comparing -compilation -complicated -compression -concluded -confirmation -connecting -consisting -console -constitute -construct -consultation -contributed -converter -cord -counts -coupled -craft -cylinder -damages -damn -danger -debut -decent -defendant -definitions -demo -denied -dependence -developer -developments -diesel -difficulties -directors -dis -disaster -dish -dominant -drinks -drunk -earnings -ebony -encouraged -establishment -estimation -exhibition -expanded -expects -explicit -faced -facilitate -farmers -fed -females -fifth -filling -financing -fiscal -flexibility -forming -french -gained -gauge -generating -glucose -governments -grounds -guilty -guns -gym -hanging -harder -harm -headed -heading -healing -hero -highlight -highlights -hill -homemade -horses -hosts -http -illustrated -immune -impacts -implies -independently -indicator -inform -injured -ink -instances -integral -intermediate -kiss -knee -lesbians -libraries -lips -locally -locate -locked -logical -lung -males -margin -math -median -merchant -mesh -metabolism -migration -mistake -moderate -modification -motorcycle -mph -museum -nations -notification -notified -notify -nutrition -odd -okay -opposed -organisation -oven -packed -passes -peer -pepper -performs -pets -photographer -pizza -poker -popularity -ports -precise -precision -productivity -profits -promotional -radius -raising -rarely -reduces -refund -regards -regime -regression -rely -removing -rentals -replied -representatives -requiring -residence -resolve -resolved -returning -revised -rod -satisfy -schemes -scientists -securities -sees -selecting -separately -shortly -sidebar -situated -slave -slot -soldiers -spare -steady -steam -stood -strap -stretch -strictly -substrate -suddenly -suggesting -suppose -suspect -templates -tennis -texture -thereby -ties -tires -toilet -tongue -tourism -tower -trace -trail -trans -tries -tune -ultimately -universe -varying -venue -vessel -volunteer -wake -waters -welfare -wing -wishes -wont -worn -abroad -accused -achievement -acoustic -admission -admitted -adoption -advertise -affects -agreements -aimed -alleged -amongst -analyze -angel -angry -appearing -archives -argue -arrest -artwork -asks -assumes -assuming -assumptions -attending -audit -automated -babes -babies -badge -balanced -ban -bay -beneficial -bills -blade -blocked -bold -boundaries -branches -brush -captured -casting -casual -ceiling -ceremony -charts -cheaper -cheapest -checkout -chemistry -cloth -coding -coefficients -collective -colours -commands -commerce -commission -comparable -computed -conditioning -confident -confused -corn -corporation -couples -crop -cry -curriculum -cycles -dear -delayed -den -deprecated -destroyed -diagnostic -dies -disability -discretion -downtown -dried -eastern -echo -elegant -eliminate -embed -emerging -engineer -ethnic -evident -exercises -exhaust -extraction -focuses -forecast -formerly -founder -frozen -funded -gains -gathered -gotten -grab -grants -gross -hairy -hardly -header -heaven -hook -hopes -hospitals -hottest -identifying -induction -influenced -initiative -inputs -installing -integrity -intense -invest -jack -javascript -jet -jurisdiction -kits -knife -labeled -lands -lane -las -liable -lightbox -linking -literally -lying -magazines -makeup -mate -meals -mediated -mens -meta -methodology -mixing -mpg -muscles -nail -namely -nearest -nerve -nervous -newsletters -nike -nitrogen -nobody -numerical -obligation -officially -owns -oxide -packet -pad -paintings -partially -passage -passenger -pen -penalty -pills -pitch -prayer -prediction -preparing -pride -prize -programmes -promotions -publicly -publishing -quit -rack -ranges -ranked -rape -reaching -reads -realistic -reception -recover -reflected -reflects -res -responded -responsibilities -retain -rides -robot -rolling -rotation -router -scored -screw -sectors -segments -shapes -shorter -simultaneously -skirt -sleeve -slight -solving -somebody -speaks -stack -stomach -stops -struggle -stylish -submitting -suffer -surgical -survive -switching -symbols -talked -targeted -tension -theater -theft -thereof -threesome -throat -timely -ton -tours -transform -trigger -trips -troops -trucks -twin -ultra -uncertainty -unlikely -unlimited -unlock -validity -variance -varied -vegetables -vinyl -virtually -viruses -visitor -vivo -volunteers -wanting -weapon -whilst -winners -worship -wrap -yields -abilities -accompanied -acquire -activate -adjustable -advertisements -advised -affairs -affecting -affiliate -aggregate -allocation -alter -alternatives -anytime -arise -arising -arranged -artificial -assay -atoms -attachment -attempted -authentic -bandwidth -barely -barrier -baseline -beef -beliefs -bikes -binary -blame -blend -boats -boolean -bottles -buddy -cabinet -candy -cardiac -celebration -census -chains -chairs -china -chips -circles -clearance -closure -coated -collecting -commit -compete -composite -connector -consequence -conservative -consistently -contractor -controlling -convinced -cookie -copyrighted -crap -cure -curious -databases -deaths -delivers -departments -descriptions -destroy -destruction -diffusion -disclosure -discrimination -distribute -distributions -divorce -doll -dramatic -drum -durable -ears -elections -empirical -employers -encyclopedia -ensuring -essay -exceptional -exclusively -executed -exp -fastest -favourites -fetish -filing -fired -flip -flood -flux -foam -focusing -footage -frequencies -fusion -gang -gathering -globe -handbags -heavily -honey -hood -hopefully -impressed -impression -inbox -incidence -inclusion -incredibly -independence -indicators -individually -indoor -infections -inflation -inhibition -initiatives -innocent -inquiries -insulin -intelligent -intent -intervals -jail -jazz -john -joke -journals -kinda -lace -lateral -lawyers -legend -letting -lingerie -loud -lover -makers -manufactured -megaupload -mere -mess -microsoft -mild -mineral -minister -mobility -moisture -mountains -mutual -neurons -newer -notifications -notion -obligations -observe -occurring -operates -opposition -organisations -organize -overcome -overseas -pace -paths -pathway -pays -pending -perceived -perception -performances -phases -phenomenon -photoshop -physically -physicians -pixel -placing -poetry -pole -poll -por -portrait -possession -possibilities -predict -preliminary -priced -prominent -protective -protest -province -psychological -pub -pursuant -quantitative -quantities -quantum -ranking -rapidshare -ratios -reaches -readily -reasonably -receptors -reflection -regulated -relate -repairs -replica -rescue -retired -robust -romantic -roughly -safely -samsung -scales -seasons -seeks -sensors -shadow -shaft -shortcuts -ski -smell -solved -somehow -sons -specialized -speeds -sponsor -stayed -staying -steering -stereo -stimulation -strains -subsequently -substantially -substitute -suffered -suicide -surely -surveys -swing -synthetic -tablets -tap -temporal -terrible -territory -texts -theatre -theories -thesis -thirty -tile -tire -tissues -tolerance -tournament -transmitted -transparent -traveling -trick -trim -trunk -ugg -unavailable -undefined -universities -unlike -uploading -utilize -valuation -vessels -visa -volumes -wars -washing -wherein -whore -wider -wiki -workout -zones -accepting -accessed -accomplished -accurately -acknowledge -actively -administered -adopt -agenda -aggressive -aging -agriculture -alternate -amended -analytical -announcement -antenna -antibodies -apparatus -appliances -applicant -approximate -argued -armed -associate -attendees -automotive -bankruptcy -bare -batch -baths -bench -besides -blogging -bondage -bones -bow -bubble -burden -buried -cables -caps -careers -char -citizen -clause -clay -clinic -clouds -clusters -coating -colleges -completing -comprising -conclude -conclusions -configure -configured -conjunction -considerably -consist -consultant -continuously -contrary -coordination -corrected -corresponds -cos -coupling -creampie -creator -cups -dealers -decreases -deeper -deeply -defects -delivering -democracy -deposits -describing -designing -detector -dial -dialog -dialogue -dirt -discipline -dishes -distances -districts -doc -donation -dragon -drain -drawings -dressed -editorial -editors -efficiently -elderly -enabling -encounter -engineers -enjoys -est -excessive -excluded -expanding -exploration -expressions -extends -extensions -exterior -extracted -fabulous -finest -floors -flour -forgotten -fruits -fur -garlic -genre -geometry -german -grace -grades -gradient -habitat -hack -handed -happiness -heated -hello -heritage -highway -hiring -holders -horror -humor -ignore -implementing -imposed -infant -ins -institutional -intensive -intention -interference -interracial -invalid -ions -islands -java -jersey -junior -jury -killer -kinase -lasting -lenses -licking -lovers -manually -manufacture -marker -markers -marketplace -matched -mediafire -minds -minimize -mistakes -mothers -mounting -multimedia -nasty -necklace -neighbors -neural -newspapers -nokia -notebook -nuts -obtaining -occasionally -occurrence -ons -organizational -panties -payable -peers -penis -phentermine -photograph -php -picks -pollution -polymer -pound -precisely -premiere -prescribed -presentations -proceedings -producers -progressive -proposals -prospective -psychology -qualify -queen -radical -recruitment -reddit -refused -registry -rejected -reload -remarkable -renal -reproduction -respondents -retro -revolution -reward -routes -routing -rows -royal -russian -salad -san -secretary -secured -settled -shelf -silent -silk -simulations -sink -sisters -skilled -slope -sofa -sophisticated -soup -spoken -stockings -streams -strict -structured -suspended -swap -talented -tattoos -tears -temple -tender -therapeutic -threats -thumb -tops -towns -toxic -transcription -translated -tricks -tropical -trusted -twelve -ugly -valley -varies -venture -violent -virgin -vitamin -wage -walks -washed -weights -wheat -wildlife -wings -wisdom -witness -worried -yoga -achieving -acre -adaptation -adapted -adidas -advertisers -advise -airline -alignment -alike -alloy -altered -amenities -amplitude -analog -angles -animated -announce -annually -anticipated -antique -approx -approximation -arrange -artistic -assessments -assisted -associations -attempting -attendance -attract -attributed -authentication -backed -baking -bang -banned -barrel -bases -beads -beans -begun -belly -beneath -bio -biology -bolt -bomb -booty -boring -bra -brass -brick -bundle -calculator -calories -captain -carriers -carries -celebrities -centres -ceramic -chinese -cholesterol -chrome -cigarette -circular -cleaner -cocks -coli -collapse -collar -colorful -combinations -comics -complexes -complications -comprises -conducting -considerations -consistency -consult -coordinate -coordinates -copied -correlated -correspondence -corruption -couch -counsel -counting -creativity -crimes -criticism -crown -customized -defines -degradation -delays -demonstrates -derivatives -desert -deserve -desirable -determines -deviation -devoted -disagree -disclose -discrete -discusses -disposal -dispute -distinction -documentary -documented -dosage -doses -drill -duplicate -dying -economics -efficacy -elementary -elevated -emotions -encoding -encountered -endorsed -enemies -ensures -entertaining -epic -establishing -euro -fancy -figured -flavor -floating -folk -ford -freely -gather -generations -glory -gloves -gotta -governance -gradually -grateful -grip -handled -handles -hats -hitting -hockey -holy -households -hub -hungry -immigration -indication -infinite -influences -informative -initiated -integer -interfaces -investigations -judges -kidney -landing -latina -lesions -lib -liberal -likelihood -limitation -locals -logging -logos -machinery -macro -mathematical -meaningful -merchandise -metals -minority -modifications -molecule -motivation -mutant -mystery -nylon -occasions -occupied -offense -olive -onset -organ -orgasm -orgy -ought -outdoors -outfit -packing -parks -partition -partly -permits -petition -picking -pie -pill -pins -platforms -pleasant -pockets -poem -polish -polls -pose -possess -pray -precious -premier -proceeds -profession -prompt -protocols -proxy -pumps -pursue -pushed -quest -rabbit -races -recovered -relatives -relax -relaxation -relevance -replacing -representations -reproduced -respiratory -restoration -retention -revenues -rush -saves -scanning -scenarios -scheduling -scoring -screens -sealed -selective -sells -sends -sheep -shorts -signaling -singles -slim -sms -soap -socks -sorted -spa -specialty -specimens -spectral -spell -sperm -spreading -springs -spy -stones -stopping -strings -strips -struck -subscribed -substances -sudden -suits -sunglasses -surrounded -systematic -tanks -technological -tho -thoroughly -thrown -tobacco -toe -toll -transfers -transformed -trap -treating -tribute -triple -underground -undertaken -unemployment -united -updating -usa -usb -utilized -validation -valued -vendor -vendors -viral -womens -workplace -workshops -worlds -wrapped -abundance -accommodate -accumulation -adaptive -addressing -adorable -alien -alongside -annoying -arbitrary -arc -arrives -artery -assault -ate -athletes -atom -atomic -attach -attitudes -attractions -bacterial -barriers -bears -behavioral -behaviors -bell -biography -bite -blackberry -blast -blocking -blogger -bracelet -breasts -breathing -breeding -briefly -broker -bugs -cal -calibration -calm -cargo -centered -centuries -certificates -champion -characterization -charger -charging -charm -churches -cinema -circuits -circulation -closest -clutch -coaching -coastal -coil -coin -combines -commentary -comparisons -competing -competitors -computation -concentrated -confusion -conscious -consecutive -constraint -contacted -contractors -contributing -crossing -crying -crystals -customize -daughters -decay -declaration -decor -decoration -def -defeat -defence -defend -defining -demonstration -derivative -diagnosed -differentiation -differently -digit -disappear -disclosed -discussing -dominated -don -donor -dozen -earthquake -easiest -ebook -elastic -elite -employ -enhancement -enormous -erotic -ethical -evaluating -exceeded -excluding -execute -exotic -experiencing -explaining -explicitly -exploring -extending -extraordinary -fallen -faq -fatty -fever -fingering -finishing -flame -float -franchise -friendship -functioning -funeral -gaining -gardens -giveaway -graduated -graphs -gravity -guided -habits -handy -headphones -heater -heel -hormone -hose -hunt -ignored -illustrate -imagination -implied -imply -importantly -imported -incorporate -indirect -inflammatory -inserted -insights -instructor -instrumental -interpreted -introduces -invitation -ipad -ipod -isolation -judicial -kills -kissing -knees -lately -launches -legacy -legally -lemon -lighter -lightweight -limiting -lined -lip -lounge -mandatory -medications -metric -ministry -momentum -motivated -mpeg -myspace -needle -neighbor -nonlinear -nurses -odds -optimized -org -outline -outlined -overhead -painful -palm -participant -participated -passengers -passive -payday -peaks -penetration -pension -peptide -periodic -peripheral -phosphate -photostream -plaintiff -plots -pointing -portions -ppm -praise -predictions -premises -presenting -preserve -presidential -pressing -productive -promised -promising -promo -protecting -pulling -punishment -pushing -puzzle -qualities -quiz -rally -randomly -rap -reasoning -recycling -ref -referring -refuse -relates -relay -residual -resorts -responding -restriction -retailers -retained -retrieve -revision -rewards -rolled -romance -sake -scam -scattering -sciences -scripts -seasonal -secrets -semester -settle -shade -shame -shed -shifts -shore -silence -silly -similarity -sitemap -skinny -slots -socket -solvent -sony -sorts -specially -stamp -statute -stays -stolen -striking -submissions -succeed -sufficiently -suited -sunny -superb -supervision -supplement -surprising -sustained -switches -tale -tan -temporarily -tendency -tends -termination -testimony -tooth -torque -tourist -tract -trailers -transit -tumors -tuning -turbo -twist -unexpected -upset -urine -variability -vascular -vectors -vegetable -vulnerable -wages -waist -weighted -wherever -wires -wool -wordpress -worthy -wound -yeast -york -abandoned -abide -absent -accepts -accidents -accomplish -accordingly -addiction -adjustments -advances -agrees -align -allocated -amendment -anger -antigen -appreciation -backgrounds -bargain -basement -bat -bathrooms -bicycle -bids -bind -bleeding -blues -breed -broadband -bullet -cage -campaigns -camping -caring -cattle -celebrated -cement -cents -chairman -chapters -chloride -clarity -classifieds -coaches -cod -collaborative -combining -comparative -compatibility -compiled -complement -computational -conflicts -consciousness -consensus -considers -constitutional -containers -cooked -cooler -coronary -corporations -correspond -costumes -counties -cricket -criterion -crops -david -deer -defect -denote -departure -deployment -destinations -deviant -disabilities -disable -disappointed -displacement -displaying -distinguish -distinguished -donate -donations -dpi -dressing -drilling -dropping -drove -eco -eds -educated -elevation -emerged -encouraging -engaging -enrollment -enterprises -envelope -enzymes -equality -ethanol -ethics -excellence -exhibitors -expecting -expenditure -expired -expressing -failing -farming -fate -fatigue -fence -fibers -fifty -firing -firmware -flags -floral -forests -formulation -fragments -furnished -fuzzy -galaxy -gen -generous -gentle -ghost -governmental -grams -grows -harmful -hills -hired -honestly -horrible -hydraulic -illustrations -improves -incentives -inclusive -incomplete -infants -informal -inhibitor -intact -integrate -interventions -introducing -invasion -investing -jackets -japan -jar -jerseys -joints -jokes -jumping -keygen -kingdom -lacking -latex -laughing -laundry -lecture -legislative -legitimate -leisure -lengths -liabilities -licence -licensing -lining -linux -lit -literary -logs -lolita -london -manga -masturbating -mathematics -ments -merchants -metallic -methyl -mill -minus -mit -modem -mol -monitored -monitors -musicians -mutation -mutations -nails -narrative -nationwide -nicely -oak -occasional -occupation -oils -onion -opera -optimum -organisms -outputs -oxidation -packets -packs -pads -passionate -patches -pathways -peoples -phrases -pickup -pig -pit -plugins -poems -poly -poorly -preceding -pressed -pressures -prevalence -preventing -prevents -printable -proceeding -progression -projection -promises -promoted -promoter -proprietary -protects -publishers -pulmonary -pupils -puppy -qualifications -racial -radar -raises -rational -rays -recognised -reconstruction -redhead -refine -rel -relaxed -remembered -render -rendered -reporter -reservation -residues -resin -resist -resonance -restored -ribbon -ringtone -rip -roller -rolls -rounds -rpm -ruling -salaries -scanner -scared -scholarship -sentences -severity -shake -shareholders -shear -shelter -shield -shy -silicon -simulated -sir -sits -sixth -smallest -snap -soldier -specialists -specs -spectacular -spelling -spouse -squares -stakeholders -starter -stating -statistically -statutory -steal -stickers -stimulus -stud -subjected -subjective -subsidiary -subtitles -supplements -surveillance -survived -suspected -swim -switched -sword -tear -thou -threatened -throwing -timer -titled -torture -trainer -trains -translate -translations -tray -tub -tuned -tunnel -tutorials -tweets -twins -unfortunately -unto -utilities -utilization -vaccine -varieties -viewer -viewers -violations -visibility -voices -voluntary -voters -warehouse -warming -warrant -wax -whats -widespread -winds -wiring -wit -withdrawal -woods -wrestling -xanax -xbox -abnormal -acceleration -accessory -accompanying -acknowledged -acne -adapt -adventures -afterwards -alt -amplifier -anchor -answering -anterior -applicants -architectural -archived -asshole -assign -assignments -assure -assured -asthma -attacked -attorneys -auctions -australia -autumn -badges -badly -basin -beaches -beating -beautifully -beings -belonging -bend -beside -blond -boom -borders -bored -bother -bracket -breach -breakdown -bridal -broader -builds -bulb -burned -burst -cabin -cartridge -castle -catalogue -cavity -championship -cheat -cheating -chicks -christian -cleaned -climbing -clone -coins -coloring -combo -comp -competent -compute -confidential -connectivity -connects -consisted -consuming -controversial -convention -convergence -corners -corrections -counseling -cousin -crafts -crossed -cunt -custody -cycling -dans -darkness -deadline -dealt -declare -declined -decorative -decreasing -defensive -deficiency -deficit -democratic -dense -deny -deposition -detached -developmental -distant -distinctive -divine -diving -doctrine -drainage -draws -drew -drums -earliest -earrings -ebay -electrode -eligibility -enjoyable -enters -escort -essence -evolved -exceeds -exceptions -excitement -exclude -excuse -exhibited -existed -exports -failures -fame -fascinating -fatal -favour -feasible -filtering -fires -fixing -flashing -fleet -flesh -flooring -fluorescence -folders -folding -foster -fought -foursquare -fragment -framed -generates -genome -gently -geographic -governed -graduation -grammar -granny -hairstyles -handmade -harvest -headlines -helmet -hereby -highlighted -hindi -homework -honda -hung -hunter -identifies -illustrates -impaired -incentive -incoming -incubated -induce -inflammation -informational -infringement -inherent -inhibitors -injected -insertion -insulation -intersection -italian -james -jewellery -jordan -juicy -lamps -lang -lattice -lawn -lazy -leak -lean -licenses -lid -lion -lipid -locks -longest -magnet -mainstream -maintains -mall -march -marijuana -masses -mat -maximize -metres -michael -microwave -midnight -mirrors -modelling -monetary -musician -naughty -navigate -necessity -noon -notices -nucleus -nut -organs -ours -outlook -pale -patio -permanently -permissions -phenomena -photographers -pipeline -pipes -planes -podcast -politicians -pools -positioning -positively -posterior -potato -potatoes -practically -practitioners -preserved -princess -priorities -prizes -processors -projected -proliferation -pronounced -prototype -proudly -proximity -punch -purely -purified -qualitative -railway -ram -ranks -recommends -refresh -rehabilitation -relaxing -remarks -remedy -rendering -repeatedly -repository -residue -respected -retrieval -rifle -rigid -rope -royalty -ruled -sandwich -satisfactory -scams -scanned -scary -schedules -screws -sexually -shaved -shine -shoppers -shoulders -similarly -slides -sliding -slower -soils -spacious -specimen -sphere -spinal -sponsors -spyware -stained -sticks -strengthen -strengths -strive -struggling -subset -subtle -supporters -sustainability -sync -tackle -targeting -temp -tent -texas -theorem -thorough -threatening -threw -thy -tiles -tin -tomato -toxicity -traditions -tranny -transitions -trash -travelling -tremendous -tumblr -turkey -typing -underwear -unity -unnecessary -unwanted -upgraded -uptake -utilizing -valves -variant -vegetation -ventures -venues -verse -veteran -viable -villages -vocal -wavelength -wells -widgets -wore -zinc -abortion -accessing -achievements -acrylic -administrators -affair -affinity -algebra -ambient -analytics -analyzing -ankle -announces -apparel -approve -aqueous -arena -argues -assembled -assessing -assurance -athletic -attracted -attraction -automobile -avoided -awful -backing -balloon -banana -basics -bean -beloved -bent -billing -blessed -blowing -booth -bounded -brakes -bronze -browsers -builder -bull -bumper -cab -cakes -california -carcinoma -cardiovascular -cares -catching -challenged -charter -chef -cherry -chopped -chris -cigarettes -citing -claiming -climb -closet -collector -collision -comma -companion -competitions -completeness -comprised -concentrate -conceptual -concerts -conferences -configurations -constants -contamination -continually -contributor -contributors -conversations -conviction -cooperative -cortex -cosmetic -costly -counted -cow -crashes -creatures -crude -dairy -dam -dash -decides -decomposition -dedication -defeated -definite -deletion -demanding -denotes -deposited -develops -devil -diagrams -dietary -dig -digits -dioxide -disclaimer -discounted -dissolved -divide -divisions -dramatically -drift -drying -dumb -dump -dye -dysfunction -earning -ecological -economies -editions -electrons -eliminated -emotion -empire -employs -encryption -endless -energies -enrolled -equals -examining -executives -exhibits -explored -explosion -extensively -extracts -farmer -farms -favorited -fears -fighter -fights -fireplace -flats -florida -flowing -flu -flush -focal -fonts -foreclosure -forty -fox -fracture -freeze -freight -gambling -gameplay -gangbang -genius -governing -governor -gown -graduates -gratis -grave -greeting -grill -grocery -guarantees -habit -halloween -hazard -hazardous -headquarters -helicopter -heroes -honored -housewife -humanity -impairment -incl -incurred -infrared -inspire -inspiring -intend -intro -investigating -investor -jam -joins -jpeg -junction -junk -justify -keen -knock -lap -lawsuit -learners -lee -legendary -lending -lesser -liberty -lick -literacy -litigation -locking -lookup -loops -loses -louis -magical -maid -manuals -manuscript -marginal -marry -masturbation -maturity -mechanics -membranes -memorable -messenger -metabolic -minerals -missions -mold -monkey -mud -municipal -neat -negotiations -nipples -nominal -notable -obesity -operative -opponent -organizing -panic -pantyhose -passwords -peaceful -pearl -pencil -persistent -perspectives -pharmaceutical -physiological -pile -pine -poet -pointer -polar -polyester -pork -poses -posing -prayers -precipitation -preferably -preteen -privilege -prom -promotes -propagation -proportional -prospects -prostate -punk -puppies -quarters -questionnaire -react -reactive -readings -recipients -recordings -recreation -referral -regret -regulate -reject -remainder -remind -renewable -renowned -rep -replication -reservations -reviewing -ridiculous -rivers -rounded -sacred -sacrifice -salmon -satin -satisfying -scaling -scandal -scholars -scripting -seating -sediment -semantic -shallow -shipment -shout -signatures -sim -simplicity -sketch -slice -sluts -smartphone -smiling -snake -societies -soluble -soundtrack -spanish -spec -specialize -spine -spite -sporting -spotted -squad -staining -stairs -stationary -stranger -strikes -subsection -sup -surf -symmetry -syntax -systemic -teaches -tee -teenage -therein -throws -thumbs -tier -timber -tit -tomatoes -toolbar -touched -touching -tourists -traits -transient -transmit -trauma -treats -triangle -tribal -unions -unused -useless -vampire -verbal -versatile -vibration -vocabulary -weblog -whatsoever -wines -wrist -yahoo -abdominal -abundant -abused -abusive -accent -accounted -accredited -acted -additions -adhesive -advisor -aids -america -angels -announcements -apoptosis -approached -approaching -appropriately -arises -armor -arrows -arthritis -ash -asleep -atmospheric -averaged -avoiding -backs -basal -bedding -biomass -blades -blah -blown -bolts -boxing -bro -brutal -bucket -bulbs -bump -buses -butterfly -bypass -byte -cabinets -cafe -candle -canon -catalyst -catering -cation -chaos -charming -chase -cheats -chic -chromosome -clarify -clever -clue -clustering -cocktail -colon -colony -commercially -compartment -compelling -compile -complain -compressed -compromise -conditional -condo -cone -constitution -consultants -consumed -controversy -convicted -counters -courage -cracked -creature -critics -cubic -cuisine -cumulative -currents -dan -debris -deciding -decorated -delicate -delight -demographic -dentist -deployed -derive -deserves -desires -diary -dip -directories -discourse -discs -distributor -downstream -dryer -durability -eaten -endorsement -enhancing -evolutionary -exams -excel -exchanges -exclusion -fabrics -fare -favorable -fifteen -finder -fisting -fool -footwear -fractions -friction -fried -fulfill -gadgets -gal -gallon -gamma -gaps -garbage -gases -gates -generalized -geographical -geometric -glance -glue -gods -goodness -grains -gram -granite -groom -hammer -harsh -headers -hiding -hierarchy -hilarious -homeless -hoped -horizon -humidity -hypertension -idle -immigrants -incidents -incomes -indices -indigenous -inequality -initiation -inline -insufficient -insured -internationally -interpret -interviewed -intimate -invasive -inverse -invested -invisible -iso -journalist -knit -laying -lifting -lime -localization -longitudinal -ltd -magnificent -mailed -manipulation -marble -marking -masters -matrices -memorial -menus -messaging -messy -minded -mint -mixer -moderator -modest -modulation -moms -motors -mum -navy -nevertheless -niche -norm -nowhere -nutrients -nutritional -offerings -offshore -origins -outlets -overlap -parental -partnerships -pas -pasta -patience -paul -pause -pendant -petite -placebo -plated -polished -pond -positioned -postage -postal -potassium -predictive -preservation -prevented -priest -privately -probable -prohibit -promptly -proposes -proves -purse -qualifying -radial -randomized -rec -recession -recreational -recruiting -recycled -refined -reflecting -reforms -regulator -releasing -remedies -reminder -reminds -renewal -reproduce -reproductive -researcher -reservoir -resolutions -responsive -restart -retailer -rev -revenge -rhythm -rider -ringtones -rises -runner -salon -scans -scattered -schoolgirl -scientist -screaming -screenshot -sculpture -seminar -sensing -sewing -shades -shifting -shoots -sided -situ -skull -sometime -sooner -spark -specials -stadium -stake -starring -startup -sticker -stimulated -straightforward -stressed -stuffed -subscriber -substitution -sunset -supervisor -surfing -surplus -surprisingly -tactics -tastes -tasty -taxi -ter -terrain -terror -terrorism -terrorist -tha -thee -thu -toes -tom -tracker -traded -trails -travels -treasure -turnover -tweet -ubuntu -uncertain -undergraduate -underneath -unstable -upgrades -upgrading -upstream -vanilla -variants -vein -ventilation -vibrant -virtue -vocals -wallet -warned -warranties -weakness -weddings -weed -welding -workforce -xml -absorbed -accessibility -accreditation -acetate -adequately -advisory -advocate -aligned -allegedly -allowance -aluminium -amendments -analyst -angular -appeals -appointments -appraisal -arch -architect -arriving -associates -audiences -automation -awkward -backyard -baked -banners -beams -beast -beaten -bee -belts -benchmark -bidder -bidding -bizarre -bladder -blanket -bloggers -bloody -booked -bookmarks -bore -bounds -bout -bridges -brochure -bucks -bush -bust -buzz -calculating -camps -canal -cancellation -cancelled -cartoons -caution -celebrates -celebrating -cerebral -chassis -cheerleader -chords -cite -clearing -cleavage -coded -colleague -coloured -combustion -commander -compliant -compressor -confined -conform -confusing -connectors -consequently -consolidated -consolidation -contacting -contexts -continuity -contributes -controllers -converting -cooled -coordinator -cope -copying -correlations -corrosion -cottage -cracks -crafted -cube -cultured -daddy -dare -dawn -deadly -debug -decorating -decorations -defective -delighted -delta -depicted -desperate -dessert -diamonds -differs -disappeared -dispersion -disputes -distortion -dive -dock -dolls -donated -donors -dots -dozens -duck -eager -elimination -emailed -emerge -enclosed -encourages -endorse -endothelial -enforce -erosion -essays -eve -exceeding -excitation -exempt -expectation -expenditures -explanations -expressly -fairy -faults -feminine -fibre -fig -filename -filtered -finishes -firmly -flagged -flies -fluctuations -forcing -fork -formally -formatted -fortune -freezing -fridge -fuse -gateway -gears -genetics -genus -gif -gossip -grandchildren -graphical -guards -gum -haired -handbag -handsome -harmony -harness -hash -headache -headline -heal -herbal -herbs -highlighting -hint -hints -hoc -horn -ignition -immunity -imports -impose -inadequate -incorporating -incubation -indirectly -inexpensive -influential -ingredient -insane -insects -institute -intrinsic -invitations -invites -irrigation -ist -journalists -jungle -justified -kicked -kicks -kindly -kinetic -lag -lakes -landed -lanes -laptops -lectures -lender -ligand -liner -lonely -lord -loyalty -luxurious -mag -manages -maternal -mattress -maximal -mayor -meanings -med -medicines -mega -melt -melting -mercury -metadata -metrics -microphone -microscopy -mileage -mods -morphology -mutants -mysterious -needing -nest -nickel -nominated -normalized -norms -noting -novels -nutrient -occupational -omitted -onions -openings -opponents -optimize -organised -parade -paradigm -parenting -pcs -penalties -perceptions -pertaining -pigs -pillow -planted -planting -platinum -pokemon -polynomial -pos -posed -pov -preparations -primer -prince -prisoners -procurement -profitable -prolonged -prone -propecia -prospect -pulls -pursuit -qualification -quartz -racist -ranged -rankings -reactor -realm -reductions -reel -referenced -refreshing -refrigerator -reg -reinforced -rejection -relied -reminded -requesting -restrict -reversed -revolutionary -riders -rim -rocket -rotating -safer -sandals -saturated -secretion -seemingly -selections -sensation -sensory -sequential -seventh -severely -sheer -shelves -shifted -shiny -shocked -showcase -silicone -simplified -simulator -simultaneous -sings -sleeves -soda -specificity -spirits -spontaneous -stamps -statue -stepped -sterling -stimuli -stove -stretching -styling -summarized -supportive -suppression -surgeon -surround -sustain -sweat -sweater -symmetric -tailored -talents -tamil -terminals -terminated -thai -thereafter -thorns -thyroid -tiger -toddler -tones -towel -toyota -traces -trackback -trades -transmitter -transparency -trapped -traveled -tuition -unanswered -uncle -unclear -undergo -undertake -unrelated -unsubscribe -urgent -vapor -veterans -waited -ward -washer -waterproof -wears -webmaster -webpage -weekends -welcomed -whip -wholly -wifi -witnesses -wives -wolf -wondered -worksheets -zoo -accountability -accumulated -adjusting -advertised -african -airlines -airplane -allegations -alliance -altitude -altogether -alumni -amazed -amazon -ample -analysed -anatomy -antibiotics -appealing -appearances -appliance -arterial -assays -athlete -aur -avail -aviation -backward -bacon -balancing -battles -bearings -ben -bending -betting -bitter -bless -blows -bob -bonding -brackets -brave -british -bye -cave -chemotherapy -chromatography -chubby -civic -civilian -clerk -clicks -clinics -closes -coats -cocaine -coconut -cohort -col -colonial -colonies -commas -committees -commodity -compares -competence -compiler -complementary -compositions -conductivity -confirms -constrained -contests -continental -convince -coordinated -cop -corrupt -countless -cows -credentials -credited -cursor -cuz -debts -deceased -dee -defendants -deficient -deformation -demonstrating -denial -densities -dependency -depressed -detecting -diabetic -diaper -diff -digg -disciplines -dismissed -distress -domination -dough -duct -duo -dwelling -ebooks -elaborate -electro -eleven -embodiment -embrace -emo -employing -encoded -enjoyment -enquiry -enthusiasm -esteem -eternal -evaluations -examines -exploit -exploitation -expose -eyed -faithful -fashionable -fertility -fetal -fittings -fixes -fluids -fluorescent -fog -foil -forbidden -formulated -fps -gardening -gasoline -generators -gig -ginger -globally -glow -gospel -graffiti -grandmother -granting -greenhouse -handful -hazards -heights -hesitate -hiking -hobby -homogeneous -honors -honour -hottie -ideals -identifier -idiot -inability -inaccurate -inactive -inferior -inhabitants -inlet -insect -instability -installations -interfere -intracellular -intuitive -invariant -invented -investigators -invoice -jumped -justin -juvenile -knowledgeable -ladder -latency -latitude -launching -layouts -lenders -lever -leverage -lifted -lightly -lightning -listened -liter -lobby -localized -logistics -lowered -lowering -loyal -luggage -lungs -mails -malware -manifold -margins -martial -medieval -meditation -merger -merit -metro -microscope -mil -modeled -modular -motherboard -nationally -nickname -noble -notation -noun -nursery -obstacles -offenders -optic -orbit -ore -ounces -outs -overwhelming -paired -paradise -passport -pathology -paypal -pedal -perfume -phosphorylation -physiology -piercing -playlists -plotted -plugs -postcode -potent -practicing -predominantly -presently -prestigious -presumably -prey -primitive -printers -professionally -profitability -profound -projections -prompted -pros -protests -provincial -pumping -quarterly -quasi -rainbow -rainfall -ramp -recognizes -recorder -redistributed -registering -reliance -relies -removable -respects -resting -retreat -reuse -revealing -revisions -rewritten -rig -rival -robots -rods -roses -runtime -sailing -sampled -sanctions -satisfies -saturation -scarf -seals -securely -sender -seniors -shocks -showers -shrimp -simpler -singular -skate -skins -slaves -slices -slopes -sorting -spacing -spectroscopy -spider -spinning -spreads -standardized -steep -sticky -stiff -stills -stimulate -stir -straps -stresses -stretched -strokes -strongest -studios -substituted -substrates -succeeded -sulfate -summit -survivors -susceptible -swallow -swelling -symbolic -tales -tapes -taylor -teenagers -tel -thanked -themed -therapist -throughput -topology -tractor -traditionally -tragedy -travelers -tribe -triggered -turbine -ultrasound -unchanged -undoubtedly -unincorporated -unsigned -upward -urge -vent -ventricular -verb -vertex -vet -vicinity -villa -violate -vista -visualization -vitamins -voucher -vuitton -wardrobe -warfare -warnings -wasted -weigh -wellness -whereby -woven -xxxx -yarn -yea -aboard -acquiring -adhesion -admits -adolescents -advert -aerial -aesthetic -affiliation -aggregation -airports -allergic -ally -alterations -analysts -aperture -apples -arrays -artifacts -assists -audition -awhile -axial -axle -backwards -balcony -bamboo -behave -blessing -bluetooth -boiler -borne -boutique -branded -breathe -brightness -brokerage -brokers -btw -bubbles -budgets -builders -burner -buys -candles -capita -caption -captures -carved -categorized -cervical -chan -cheers -citizenship -coarse -collagen -commitments -communicating -composer -comprise -conception -conditioned -conditioner -conductor -conspiracy -consume -contaminated -continent -convey -coral -cough -credibility -cried -critically -crush -cult -curtain -curved -cyclic -dancer -dat -datasheet -deed -dem -demanded -descent -descriptive -designation -diets -disciplinary -disks -disney -dissemination -disturbance -dividend -docs -downloadable -drawer -drilled -eagle -eats -ecosystem -educate -educators -eighth -elephant -eliminating -embroidery -emergence -emphasize -engineered -enhances -ensemble -enthusiasts -entrepreneurs -epithelial -estimating -european -evolving -exemption -explores -exponential -fashioned -fathers -festivals -fighters -filming -filtration -fin -finals -firewall -fixtures -flavors -flooding -fore -forex -formulas -forthcoming -fossil -foundations -founding -fragrance -freeware -freezer -friday -frog -frustrated -fuels -furnace -galls -gastric -geography -glamour -gland -goat -grandfather -grasp -groundwater -guitars -gut -ham -happenings -hassle -hay -hepatitis -heterogeneous -hierarchical -hike -hollow -hooked -hormones -hospitality -hunger -hurts -ignoring -imagery -implicit -inaccuracies -inconsistent -inconvenience -incorporates -incorporation -indie -induces -inevitable -infectious -infusion -ingentaconnect -inherited -inhibit -inhibited -initiate -innovations -instructional -integrating -intentions -interim -intestinal -intra -irregular -irrelevant -isolates -jaw -jointly -judged -jumps -kelly -kim -kings -kinky -kitty -korean -labeling -lacks -lasted -latin -laughed -lcd -lesion -linguistic -litter -livestock -lottery -mapped -mar -marrow -mentally -mentions -mercy -merge -metropolitan -mighty -mike -mistress -mixtures -mon -mono -monsters -museums -myocardial -myth -nano -negatively -neglect -neglected -negotiate -negotiation -neighborhoods -neighboring -nerves -ness -newborn -ninja -nonprofit -northwest -notably -notch -nuclei -numbered -observer -occupy -offence -olds -omg -owing -packaged -padding -paragraphs -paris -parish -patents -payroll -penny -peptides -perfection -performers -periodically -petroleum -picnic -pictured -ping -piss -piston -plc -plumbing -png -polarization -poles -polo -potentials -poured -practitioner -prefix -privileges -probes -productions -projector -prop -proportions -proposition -prosecution -provinces -psi -psychiatric -publicity -pulses -pumpkin -pursuing -python -quad -questioned -quicker -racism -rage -raid -railroad -rails -realise -rebuild -rectangular -removes -renewed -reportedly -respondent -rims -ripped -rotor -rubbing -rude -rugby -saline -sam -schema -scholar -scoop -scooter -scrap -screened -sectional -seminars -senses -sentenced -shadows -shaking -shark -shells -sig -signup -silica -sis -skiing -smiled -smith -smoothly -snack -socially -soma -sore -souls -soy -spanking -specializes -specifies -spice -spicy -spill -spiral -splitting -spotlight -stain -stance -stems -steve -stimulating -stochastic -storing -strand -subsidiaries -subunit -successive -sucked -sue -sunlight -sunshine -supplying -surname -surroundings -surviving -susceptibility -swear -symptom -synthesized -sys -tease -teaspoon -technically -teenager -tenant -tended -terminate -textures -thankful -thrust -tide -tightly -tilt -titanium -token -topless -torn -touches -towers -traders -transcript -transformer -transported -treaty -tribes -tunes -twisted -unauthorized -uncomfortable -undergoing -understands -unfair -urged -urinary -vacancies -validate -vegas -versa -violated -viscosity -visually -volatile -vulnerability -warmth -warrior -wealthy -wired -wished -wishing -witnessed -wonders -worries -wounded -writings -xvid -youngest -abs -accelerated -accidentally -ace -activists -additionally -adhere -adsorption -advocacy -agreeing -albeit -alphabet -alright -amber -amid -ancestors -ant -anxious -api -apologize -appetite -apt -arab -arbitration -arcade -arguing -assisting -attacking -attain -autism -bail -barn -bathing -bead -beginners -beverage -biased -bible -bilateral -bis -bmw -boasts -boiling -borrow -bot -bounce -bowel -brains -branding -burns -calorie -canadian -canopy -cans -capacities -capsule -cartridges -casinos -catalytic -catches -celebrations -champions -characterize -che -cheer -chess -chi -chill -circa -civilization -clamp -clan -classy -clipboard -clones -coalition -coherent -collectively -collectors -commissioned -competitor -complaining -compliment -complimentary -concludes -concurrent -condom -congestion -conserved -constructive -constructs -conveniently -convertible -convex -cops -cores -cosmetics -creamy -crisp -critique -crowded -crushed -curly -cutter -dancers -dangers -darker -dataset -debates -definitive -dell -demon -denoted -depths -desi -deviations -diagonal -dicks -diffraction -diggs -dignity -diluted -discharged -disco -discontinued -dislike -dispatch -disposition -distal -distributors -dominate -dos -doubts -downward -drought -dull -dummy -dynamically -economical -ego -elect -electrodes -elevator -emphasized -enclosure -endogenous -energetic -eng -enquiries -ent -enthusiast -environmentally -ether -europe -evolve -examinations -expiration -explorer -explosive -fabrication -fade -fanpop -feasibility -filesonic -filmed -finances -financially -flap -flew -flex -flock -folded -footprint -forecasts -foreclosures -formatting -fort -fortunate -forwarded -frustration -fundraising -futures -gem -genres -gmail -goodbye -govern -grinding -groove -guardian -guessing -guilt -halo -happily -hardwood -harmonic -harry -headings -headset -healthier -hedge -hex -hotfile -humble -iconic -ignorance -impedance -implants -incest -inclined -inference -inspections -installer -interacting -inviting -irradiation -jackson -jealous -jessica -joe -journalism -justification -lasts -laughter -layered -leakage -limb -lineup -linkage -liquor -lithium -lively -locus -magnesium -mama -mandate -maple -mas -meantime -medal -mein -mexican -mines -miniature -miracle -misleading -missile -mitochondrial -mommy -motions -msg -mugs -muscular -mushrooms -naruto -neighbourhood -nick -nightmare -northeast -obituary -observing -opposing -organism -originated -ounce -outfits -owe -painter -palace -parcel -parked -pastor -petrol -phenotype -photographic -pilots -pioneer -pissing -pistol -plasmid -poison -polymers -porch -pore -pornography -portraits -postings -pots -premature -premiums -preserving -probabilities -problematic -programmed -programmers -proton -proving -pseudo -punished -purification -purity -quietly -quotations -radiator -rash -realised -realization -recombinant -recruit -redundant -reflections -reflective -regeneration -registers -reign -renew -repaired -reporters -researching -reside -responds -retaining -rom -rotate -rub -ruin -rumors -runners -rust -saddle -sail -saint -sandy -sang -scenery -schematic -scholarships -screenshots -screwed -seafood -seamless -seated -securing -segmentation -semantics -sensible -sequel -servant -servicing -shortage -shuttle -sights -similarities -simplest -sins -sixty -skeletal -slavery -smiles -snacks -socio -solids -sounded -sounding -sour -southwest -specializing -specifics -spells -splash -spokesman -statistic -stealing -sticking -stole -straw -strengthening -stripping -struggles -studs -subdivision -subscribing -supreme -surge -suspicious -sweep -tasting -taxation -technician -telecommunications -tenants -tenure -terrace -terrific -territories -textbook -textile -texting -theres -thong -threaded -throttle -thumbnails -tick -topped -touring -towels -toying -trackers -traction -trainers -transplant -transplantation -transverse -traps -trivial -tsp -umbrella -undertaking -underwater -uniquely -unlocked -usable -vagina -vaginal -validated -vegetarian -veins -vest -vinegar -volatility -volt -voyeur -watt -wedge -weighing -whites -wicked -wii -willingness -wipe -withdraw -wizard -workflow -wounds -yielded -zombie -abnormalities -absorb -academy -accompany -accord -actin -additive -admissions -adopting -advocates -aiming -alcoholic -aliens -alkaline -allergy -alloys -alteration -ambitious -amend -amplification -analogous -analytic -antibiotic -antivirus -aquatic -aromatic -attained -attributable -authorize -autonomous -auxiliary -avg -awake -axes -bachelor -backpack -bake -balances -ballet -banged -bargaining -baskets -beard -beginner -believing -beverages -bidders -biopsy -bitches -blacks -bollywood -bonuses -bovine -bowling -bracelets -braces -brazilian -breeze -broadcasting -broadly -brushes -buck -bulletin -calf -cams -cancers -capturing -causal -cease -charitable -chez -chicago -chooses -cinnamon -classics -clicked -clinically -clit -cloudy -collateral -collects -commodities -como -compensate -cons -constructing -continuation -contour -contracting -contraction -converts -convincing -copyrights -cortical -counterparts -countryside -cracking -craigslist -crashed -creations -creditors -criminals -crosses -crossover -cruel -crust -cushion -cutie -cyber -damaging -dances -dashed -debit -defending -deg -delightful -della -deluxe -denim -deploy -deputy -destructive -detailing -detectors -deter -diagnose -dice -dilution -dim -din -diode -directional -discovering -discriminatory -dismiss -disposed -disruption -dissertation -disturbing -dodge -dome -dominance -dorm -dorsal -ecology -economically -edits -elbow -electromagnetic -elegance -embryos -enacted -encounters -encouragement -enlarged -enriched -entertain -enthusiastic -entrepreneur -entropy -escorts -executing -expire -extracellular -extras -factories -faulty -faux -favors -fills -fireworks -fist -flames -flickr -footer -forgive -fountain -france -freak -freelance -freshly -frustrating -fulfilled -gallons -genomic -gentleman -genuinely -glorious -gloss -glove -gowns -gps -grabbed -graft -grease -grief -grounded -grouped -gucci -habitats -hacked -hacker -handler -harassment -hardest -hated -heavier -hepatic -hes -hips -historically -homeowners -hooks -huh -hygiene -hypotheses -ignorant -illumination -imagined -implant -implements -incremental -indexes -indications -indicative -influenza -inhibitory -inserts -inspirational -instructed -instructors -internally -interpretations -interrupt -interrupted -iteration -jay -jeep -jelly -jerk -judgments -karaoke -khan -kicking -kidding -kilometers -kinetics -knives -knob -knocked -labelled -laboratories -lamb -landlord -landmark -landscapes -larvae -leaks -lengthy -lifts -likewise -linen -liquids -lite -locality -lotion -lymph -mainland -malignant -mammalian -manifest -mankind -marathon -marie -mary -masks -mates -mentor -merged -merits -methanol -mexico -midst -minecraft -mis -miscellaneous -mitigation -moderators -moist -monkeys -mortgages -motif -motorcycles -mounts -multiplayer -naming -nasal -negligence -negligible -neighbours -nets -neuronal -nfl -noisy -nomination -nonetheless -nonsense -noticeable -nowadays -nucleotide -offender -offs -omissions -opener -ops -organizer -outdated -outgoing -outlines -outreach -outsourcing -oval -ovarian -overlapping -parliament -passages -peanut -pediatric -perceive -performer -permeability -personalize -philippines -philosophical -photon -pirate -planets -planner -plaque -playground -playoffs -plea -politically -politician -polymerase -populated -popup -preceded -precursor -predicting -prejudice -prep -pretend -prevailing -prisoner -prob -programmer -promedia -protesters -puzzles -quarterback -quotation -rabbits -rationale -reboot -recognise -recognizing -recurrent -redirect -regulating -regulators -relieve -relying -remark -renovation -republic -retrieved -reunion -rewarding -ridge -rigorous -ritual -rivals -rooted -roster -routinely -routines -rug -runway -salts -salvation -santa -scaled -scent -scream -sedan -seized -semiconductor -sequencing -servants -shaping -shooter -siblings -simplify -simulate -singers -skirts -sleek -sneak -sneakers -southeast -sovereign -speculation -spherical -spike -spoon -squeeze -squirt -std -steak -stepping -stirring -stitch -stocking -strangers -stripes -stripped -sturdy -subs -subscriptions -suburbs -succession -successor -suites -sums -supposedly -swift -synchronization -syrup -tactical -tailor -tar -taxable -taxpayer -taxpayers -teasing -technicians -tens -termed -terminology -textbooks -therapies -thirds -thrilled -timeline -tor -trafficking -tragic -trajectory -transcripts -transformations -trendy -trimmed -trio -trout -trustworthy -tuberculosis -tubing -twilight -tyres -uncertainties -underway -underwent -unfortunate -unified -uniformly -unprecedented -unpublished -unsure -upright -upside -vertices -viability -vibrator -vii -violin -vivid -vouchers -wagon -waiver -warmer -warn -wat -watts -weaker -weaknesses -welcomes -whoever -widow -winding -windshield -witch -withdrawn -woke -accents -accidental -accommodations -accountable -acknowledges -acquisitions -activist -addicted -adolescent -advancement -advertiser -aided -alias -allies -ammonia -amps -analogy -analyse -ann -annum -anyways -aortic -aquarium -architects -arose -asbestos -asia -assemble -assertion -asses -aunt -authorised -autonomy -aux -avenue -averages -averaging -awaiting -ballot -banging -bark -barrels -bash -bbs -bibliography -billions -biochemical -biodiversity -bipolar -bisexual -boarding -boil -bonded -bookmarking -borrowed -buffet -bullying -bunny -burnt -cached -caller -cane -canonical -capacitor -capsules -carbonate -cardboard -cared -carriage -cemetery -certainty -chambers -charcoal -checklist -chickens -chili -chin -cites -clash -collapsed -commenced -commercials -commissions -communicated -complained -completes -compulsory -conceived -condos -confidentiality -constituents -construed -continuum -contracted -contractual -conventions -coping -cor -corpus -cozy -crab -crane -crank -creek -crystalline -cue -cues -cultivation -cupcakes -curiosity -currencies -curtains -curvature -customizable -cylinders -dashboard -dealership -decimal -declining -deduction -deferred -dementia -depletion -destiny -destroying -detention -deutsch -dielectric -differentiate -differing -diffuse -digestion -diploma -directing -disasters -discomfort -disturbed -dividends -dividing -divorced -dotted -doubled -elder -electoral -electronically -eliminates -embroidered -escaped -establishes -ethnicity -ethyl -evenly -exchanged -exercised -exercising -exhausted -exhibitions -expires -exposures -exquisite -extinction -farther -feather -fetch -finale -fixation -fleece -flyer -fond -forwarding -foul -fractures -framing -freshman -fringe -frontal -funky -gadget -galaxies -garment -gifted -girlfriends -giveaways -glimpse -goose -gradual -grandma -gratitude -gravel -guiding -guitarist -hacking -hairstyle -handheld -handset -hardy -harvested -hath -haul -heck -histories -hollywood -hostile -hug -hull -humanitarian -humiliation -hurry -identities -idol -illusion -immigrant -impressions -impulse -influencing -inheritance -injections -inquire -inspect -insure -intel -ionic -isolate -issuance -issuing -jan -jerking -jets -jizz -jones -judgement -keynote -knight -knitting -knot -lame -lat -leap -learner -learns -learnt -leasing -legends -legislature -lend -leukemia -licked -liking -liquidity -litre -locker -lymphocytes -mailbox -malicious -mammals -mario -mars -maternity -mats -matt -mechanic -microbial -ministers -misc -mistaken -mixes -mob -moderately -modifying -mole -mug -murdered -mutually -mysql -nausea -needles -nipple -nitrate -nov -nudity -numeric -occupancy -oem -offspring -openly -optics -orbital -orchestra -originate -overlooking -overly -oversight -overtime -overweight -owning -oxidative -ozone -pains -pal -patented -patrol -peculiar -pedestrian -pee -peek -pens -peppers -percentages -perl -perpendicular -persistence -pertinent -pest -peter -phoenix -piracy -pissed -plaintiffs -playoff -plugged -plural -plush -poisoning -pony -porcelain -possessed -possesses -pouch -pounded -practiced -predicts -premise -prepares -prevalent -primers -proceeded -prosperity -protector -proximal -pumped -purchaser -pursued -pussies -questioning -quilt -quoting -racks -ranch -recalled -receivers -recovering -recruited -redemption -reef -refugees -refuses -regimes -reinforcement -relational -religions -remodeling -remotely -replaces -restructuring -retire -reviewer -ribs -risky -robbery -rotary -ruby -rugged -ruins -rushed -sadly -sauna -sausage -scalar -scott -secs -sediments -seismic -seldom -sentiment -settlements -sewer -sexuality -shampoo -shareholder -shaving -shes -shining -shipments -shocking -shortest -shrink -shutdown -sickness -silhouette -skating -skeleton -slab -sliced -slider -smoked -smokers -snatch -songwriter -sonic -specifying -spends -spit -sponsorship -spouses -sql -stall -staring -stat -statutes -steadily -stellar -stiffness -storms -str -streak -stripe -stumbled -suburb -suede -suffers -sulfur -sung -supervised -supplemented -suppressed -surrender -surveyed -suspicion -swallowing -sympathy -tariff -terrorists -thoughtful -throne -timed -timeout -toast -toner -tonnes -tote -tow -tracked -trait -transferring -transforming -transistor -translator -tricky -triggers -trivia -trophy -troubles -troubleshooting -trustee -tumour -tuner -turtle -tween -uncommon -unemployed -unhappy -uniforms -untreated -unveiled -usefulness -utilizes -vacancy -vacations -valium -vegan -verdict -veterinary -vis -vomiting -warns -wastewater -wasting -wayne -weighed -weighs -whichever -wig -worm -worms -worthwhile -yacht -yamaha -yummy -zipper -accelerate -acclaimed -achieves -addict -administer -admire -advisors -africa -aggression -alex -ali -allele -allergies -als -ambien -amplified -ana -angeles -annotated -anonymously -anticipate -appendix -architectures -arithmetic -arrivals -arteries -assemblies -assert -asymmetric -aus -avoidance -bait -bees -bells -beneficiaries -berry -bieber -billed -binds -bingo -births -bites -biz -bleach -blended -blew -bloom -blot -bombs -bookings -booklet -booster -borrowing -bothered -breaker -breakthrough -buckle -bud -bullets -burger -burial -caffeine -capillary -capitalism -cassette -catheter -ceilings -cereal -champagne -charities -charms -chasing -cheek -choir -chord -chorus -circulating -civilians -clarification -classify -classrooms -clauses -cleaners -cleanup -clipped -clocks -clues -coatings -comfortably -commencement -compassion -comprehension -conflicting -constituted -contingent -conversions -corridor -countdown -coupe -credible -creepy -crest -critic -crochet -crore -crowds -cucumber -cuff -curb -cured -curse -cylindrical -daytime -dec -december -decks -defender -deficits -delegation -deliberately -deliveries -dent -descending -deserved -detectable -devastating -diarrhea -differentiated -digging -dilemma -diminished -directive -disadvantage -disadvantages -disappointment -disconnect -discrepancies -dishwasher -dispatched -displaced -disposable -doggy -doubles -drained -dreaming -driveway -duly -dutch -eclipse -ecosystems -ect -eighteen -embryo -emotionally -encrypted -endangered -endurance -enforced -england -enhancements -enlargement -epidemic -equivalence -erection -essentials -establishments -ester -estrogen -exceptionally -expands -expedition -experimentally -exploited -exported -exposing -fabricated -facilitates -factual -fading -fairness -fakes -feast -feathers -fellowship -ferry -fertilizer -festive -fileserve -filler -finely -fines -floods -foregoing -foremost -fragile -frameworks -fraudulent -freshwater -frost -fry -fulfilling -gamers -garments -genealogy -gesture -ghetto -git -glands -glitter -gluten -goddess -graded -grading -greens -grilled -guild -happier -harbor -harvesting -hatred -headaches -herb -holistic -hometown -honesty -housed -hunters -hurricane -hyper -hypothetical -ideally -ideology -iframe -illnesses -imaginary -immense -impacted -imperative -implementations -impress -improper -indoors -inevitably -infarction -infinity -inhibits -insider -insist -insisted -insulated -intends -intentionally -intravenous -intriguing -introductory -invaluable -inverter -itunes -jennifer -judging -jurisdictions -kate -keeper -kilometres -kindness -labs -landscaping -laughs -lauren -leaked -leopard -lethal -lieu -listeners -literal -loader -lodge -lodging -lump -macrophages -madness -magnets -malaria -manipulate -marital -markedly -martin -mast -melody -melted -methodologies -microscopic -mills -minimizing -misses -missionary -mobiles -mock -molded -moncler -monoclonal -morphological -mos -mosaic -msn -municipality -mushroom -mustard -naval -necrosis -negotiated -neutron -nicole -nil -nineteenth -nissan -nod -nos -notions -obese -obj -objection -obliged -obscure -observers -obsessed -obstacle -occupations -och -odor -ohio -onwards -opted -optimistic -ornament -ornaments -orthogonal -outbreak -overlay -overlooked -override -paced -padded -paints -pairing -palette -pancreatic -paperwork -pat -peel -perimeter -permitting -perry -persist -personalised -personalities -phosphorus -photographed -pineapple -piping -pirates -planar -platelet -pleasing -pledge -plurality -podcasts -polite -pollen -pops -porous -portfolios -postcard -posture -potter -poultry -praying -precautions -predictable -pres -preschool -prescriptions -preventive -priests -privileged -probation -procedural -professors -propaganda -pulp -pupil -putative -quadratic -questionable -quizzes -radioactive -raped -realizing -rebel -recalls -receipts -recurrence -refinement -refusal -reimbursement -reluctant -remarkably -repeating -repeats -replicate -rescued -researched -reseller -resides -rests -retains -reversal -reversible -rihanna -roast -roasted -robert -rocking -rocky -ruined -ruler -sage -saints -sarah -saturday -scare -scenic -schizophrenia -schooling -sci -scratches -scrutiny -sealing -secular -seizure -seizures -separating -sharply -shutter -sincere -sinks -sizing -skies -slept -smartphones -smells -snapshot -spaced -spanning -spices -spoiler -spreadsheet -stabilization -staffing -stamped -standby -starch -sterile -steroids -stool -stranded -strawberry -struggled -subsidies -subtitle -suburban -supermarket -surgeons -surprises -suspects -swapping -swept -sympathetic -systematically -tailed -tangible -techno -telugu -testimonials -testosterone -thigh -thomas -threaten -thriller -tiffany -toggle -tolerate -topical -torch -tory -tournaments -trader -transcriptional -traumatic -travelled -trillion -troubled -trousers -tutor -typed -tyrosine -undo -unlawful -upstairs -upwards -usability -utmost -utterly -vacant -vaccination -vaccines -vague -val -vanity -vee -velocities -velvet -venous -verses -vertically -viewpoint -vocational -volleyball -welcoming -whale -wheelchair -wilderness -worksheet -workstation -wrapping -wraps -wreck -xmas -yen -yielding -yogurt -zombies -abandon -abdomen -abstraction -acidic -adapters -adherence -adj -adore -advancing -adversely -adviser -affection -aforementioned -aftermarket -aggregated -aired -airway -alarms -algae -algebraic -amateurs -ammo -analogue -anesthesia -animations -annotation -anticipation -antigens -antioxidant -anus -appellant -appoint -appreciators -april -arrests -assortment -asterisk -asymptotic -attends -attribution -auditory -authenticity -babysitter -backbone -bald -bareback -battlefield -beers -belonged -beneficiary -benign -bicycles -bins -blends -blessings -blouse -bodily -boob -bowls -breastfeeding -bred -breeds -bricks -broth -buddies -businessman -calculus -calendars -calibrated -cape -cater -celebs -cheeks -chevy -chlorine -chunk -circumstance -cliff -clipart -cocoa -coils -comeback -communicator -compensated -compliments -compromised -concise -conduction -confirming -conscience -consultancy -contention -cooker -cooperate -coordinating -corrupted -cosmic -counselor -courier -cowboy -cpu -crappy -crawl -creators -cultivated -curry -customization -czech -daylight -deaf -debian -debtor -decorate -deductible -deeds -deficiencies -delegates -deleting -demonstrations -denies -denying -dependencies -depreciation -desperately -detective -determinants -digestive -dil -disappointing -discoveries -discrepancy -dispersed -dissolution -distilled -disturbances -downs -drawers -drummer -dudes -dungeon -duplex -duplication -ejaculation -elasticity -embarrassing -embryonic -emphasizes -enrichment -epoxy -erected -euros -evaporation -evenings -everytime -excludes -expresses -facilitated -facilitating -faint -favored -feared -felony -fibres -fierce -financed -firefox -fisheries -fixture -flange -flashes -flavour -flaws -flora -follower -forecasting -forged -foto -fourteen -frank -freestyle -fries -frontier -fungi -fused -gaga -gasket -gastrointestinal -gays -gaze -geared -geek -gels -gems -genetically -genotype -george -germany -glazed -glitch -glossy -goodies -gourmet -graduating -grandparents -grape -grapes -grazing -greek -greetings -grind -grouping -guideline -guinea -guru -halfway -halls -halt -handbook -hangs -harmless -hates -heaters -helper -hers -heterogeneity -historian -holdings -hooker -hubby -hum -humour -hybridization -hydrochloride -hydrolysis -hype -ich -illustrator -implication -inception -incorrectly -inducing -inferred -inflatable -inorganic -inserting -insightful -inspected -installment -instrumentation -intensities -intermittent -internship -interpersonal -interpreting -inversion -inverted -iterative -ivory -jane -jean -joomla -july -jumper -ken -kissed -kisses -kitten -knots -lasers -latent -leaning -lego -ligands -limbs -lisa -listener -lobe -locale -localhost -locating -loft -longitude -lust -maize -manufactures -maria -maritime -markings -masterpiece -mating -measurable -medial -mediation -mentioning -mercedes -methane -minorities -misuse -modal -moderated -moderation -modulus -molar -monopoly -morality -multiplication -multiply -multitude -municipalities -myths -nailed -naive -nanoparticles -negotiating -neon -nested -ninth -nitric -nominations -nozzle -nudist -obey -obsolete -obstruction -oiled -onboard -oppose -oracle -ordinance -organise -oriental -originating -outward -overflow -overload -owed -owl -pakistan -parametric -parody -partitions -pavement -pcm -peas -pillows -pinch -plausible -pneumonia -pod -popping -pores -postoperative -pottery -pounding -predictor -prepaid -previews -proficiency -props -pushes -pvc -radicals -rainy -reactivity -readable -rebate -rebuilt -recombination -recurring -recycle -referrals -refurbished -regimen -regulates -relocation -remake -rename -rented -renting -repetition -repetitive -replay -reprint -resemble -resembles -residency -resistor -restoring -restraint -restrictive -retarded -revelation -reviewers -rewarded -rib -richer -rob -rode -rotational -routers -rugs -runoff -ryan -sac -saga -sandwiches -sans -scar -scarce -scholarly -seas -seasoned -seekers -selects -serviced -settling -sewage -shook -shortcut -sigh -signifies -sinus -sixteen -slap -slate -sleeps -slipped -smarter -sol -solvents -someday -sparse -speeches -spheres -spinach -sporty -squared -squirting -stacked -staging -stains -stakes -stirred -storey -stressful -striped -subgroup -subsidy -substantive -subway -successes -suction -sued -suitability -sunday -supplemental -suppress -survivor -sushi -sweeping -tablespoons -taco -tails -teachings -teaser -temples -tense -tensor -tenth -testified -textured -theaters -theirs -theology -thicker -thighs -thirteen -thresholds -tidal -tim -tolerant -toss -township -toxin -traced -tracing -trance -transfected -transformers -transforms -transgenic -transitional -translates -traveler -travellers -trek -triumph -tsunami -tubular -twentieth -tyre -unacceptable -unaware -unconscious -uninstall -unpaid -unrestricted -unsorted -unsuccessful -valentine -victoria -viii -violates -violet -vip -volcanic -voluntarily -voter -waking -walkthrough -warrants -warriors -washington -wastes -watershed -withstand -workload -workouts -worrying -wtf -yang -zoning -absorbing -academics -accomplishments -accumulate -advent -afforded -agar -aggregates -albumin -allocate -altering -alternating -amazingly -ambiguous -ambition -ambulance -ammonium -amy -analyzer -anderson -anna -annealing -anomalies -antagonist -antennas -ants -apex -apology -applicability -arabic -arguably -ascending -ashamed -asphalt -asserted -assistants -asylum -atrial -attenuation -attracting -audi -august -aunty -authored -backdrop -balloons -bats -beige -believers -belle -benefited -berries -biblical -biker -biking -bile -biomedical -bios -bishop -blankets -bleed -blinds -blower -bombing -borrower -breadth -breathtaking -brides -brighter -buds -bumps -bundles -caliber -camcorder -canceled -canine -canned -carbohydrate -carrot -carrots -cartilage -cascade -casing -cations -cautious -ceased -celeb -cellphone -cellulose -centralized -ceremonies -certify -chanel -chatting -chefs -chromosomes -chuck -circus -cis -cit -classmates -cleansing -clearer -clomid -cloning -clothed -codec -coherence -coincidence -collage -collisions -commence -compass -compelled -competitiveness -concealed -confronted -congenital -congregation -congress -consoles -constituent -consulted -coolant -coolest -correctness -correlate -cosplay -costing -counterpart -creep -crews -crib -criticized -culinary -cupcake -damp -datasets -deadlines -dean -debugging -declines -decoding -decree -deem -deepest -defaults -delhi -deliberate -depart -depicts -deprived -derivation -deterioration -dialysis -dictionaries -differed -digest -diplomatic -discarded -disconnected -disgusting -dismissal -distorted -distributing -doctoral -dong -dopamine -drafting -drastically -drills -dub -dubbed -electrophoresis -emergencies -enamel -encompasses -engraved -enroll -entertained -entirety -equivalents -estimator -evidenced -excerpt -explanatory -facials -fats -feeder -feminist -fiberglass -fibrosis -fictional -firearms -flare -fled -flowering -fol -folds -followup -forehead -forgiveness -formulations -fragmentation -freed -fundamentals -fungal -fungus -funk -gated -gentlemen -giants -goth -govt -grabs -gradients -grandson -graphite -grips -hacks -hairs -harbour -hardness -hatch -haven -headlights -hearings -heavenly -hem -hen -herd -herpes -hid -historians -hopeful -horns -horsepower -htc -husbands -hydro -iframes -illuminated -imperial -imposing -imprisonment -incompatible -increment -incur -indexing -ineffective -infamous -informations -inherently -inmates -inn -insiders -inspector -insult -insurer -intentional -intercourse -interiors -intestine -investigator -ionization -irritation -italy -jailbreak -june -kindergarten -kindle -kitchens -lan -lays -leaking -leased -leases -len -lettuce -lien -lifelong -limestone -linearly -linkedin -loser -lush -lymphoma -maiden -mains -managerial -mansion -mantle -marketed -marriages -masturbates -matte -memo -merging -merry -messed -metaphor -metre -miami -migrants -migrate -milestone -misconduct -mist -mitigate -molding -monday -morbidity -moreover -mortar -motivate -motive -motives -multiplied -murine -neighbouring -nephew -neurological -newbie -nicotine -nightly -noodles -novelty -nun -obama -obscene -obsession -omega -oneself -ontology -organisational -overwhelmed -pane -paperback -parity -parliamentary -partitioning -pathogens -peach -pellet -peroxide -perturbation -pesticides -pharmacology -pierced -pigment -pitbull -pitcher -pits -pity -pivot -pix -plastics -pointers -polarized -pollutants -portals -portrayed -ppt -presumed -prix -proactive -probabilistic -prognosis -programmable -pronunciation -proofs -prophet -psychic -purses -pyramid -quantify -quota -rachel -rant -rapper -razor -reagent -rectangle -redeem -redundancy -reflex -refusing -rehab -reinforce -relieved -remembering -renovated -repairing -reps -reputable -residing -resolving -respectful -resultant -resumes -retinal -retrospective -revival -rhetoric -ripe -risen -roofing -rubbish -rue -rushing -salsa -savvy -scalable -scalp -scars -seam -segregation -selectively -selectivity -selfish -sen -sensual -shave -sheriff -shores -shortlist -shotgun -sildenafil -sincerely -singapore -singh -sketches -slideshows -slippers -slips -slit -sludge -smash -snakes -sniper -sovereignty -sparkling -spindle -splendid -sponge -squash -stacks -starters -statewide -steer -steroid -strands -strengthened -stretches -stripper -stunt -styled -subsets -summarize -summarizes -sunrise -superficial -supernatural -supervisors -supplementary -supporter -suspend -swinger -swinging -swollen -synaptic -tandem -telescope -tempo -temptation -tempted -tensile -tensions -tents -terrestrial -territorial -tertiary -tester -theoretically -therapists -thereto -thermostat -thi -threatens -thrive -timeless -titty -tolerated -tomography -touchdown -toxins -treasures -triangular -tuna -turbulence -turbulent -unbelievable -unbiased -uncovered -underage -unofficial -unread -unveils -uranium -urges -vain -vans -vase -verifying -vide -volcano -voltages -volts -voyage -waitress -ware -watering -waveform -weakly -wee -weeds -welded -wetlands -absurd -accountant -acknowledgement -acrobat -activating -actuator -adam -addictive -additives -aerosol -alleles -alley -allied -alternatively -ambassador -ambiguity -ammunition -amusement -amusing -anemia -announcing -anomaly -apache -archaeological -ascertain -ashley -aspirations -aspirin -attracts -auditor -audits -augmented -autocad -ave -avid -awe -axe -barcode -barley -batter -beg -behavioural -benzene -bets -birthdays -blamed -blending -boiled -bookmarked -borough -bosses -boston -bouquet -boxer -brace -braking -bridesmaid -broadcasts -brochures -brushed -buffalo -bullshit -bureau -camel -cannon -carving -catalysts -chaotic -chapel -cheque -chop -cigar -citrate -citrus -climbed -cloned -cloves -coke -collaborate -comb -comedian -comfy -commissioner -committing -comparatively -competency -complies -compose -computations -condensation -conduit -conformation -congressional -conjugated -consultations -contaminants -contempt -contours -contradiction -convection -corrective -correlates -councils -cousins -covariance -covenant -cowgirl -crashing -craving -cries -crowned -cruises -cubes -culturally -cums -curl -customary -cytokine -cytokines -cytoplasmic -damping -darling -decals -declares -decoder -delegate -demographics -demolition -demons -demos -dentists -departed -descendants -despair -desserts -destined -det -detergent -determinations -deterministic -detrimental -devised -devotion -diagnostics -diapers -diaries -dipole -disappears -disciples -discovers -dispose -dissociation -distanced -ditch -divergence -diy -dom -donating -doped -drafted -dragged -drake -dumped -dunk -dunno -dwarf -dynamical -earthquakes -edible -efficiencies -embedding -emerges -emitted -emulator -endeavor -endure -ensured -envelopes -envy -epilepsy -epithelium -equipments -equitable -erase -err -estates -ethylene -eva -eventual -evidently -evoked -executable -explode -exporter -exporting -expressive -fares -fasting -favourable -fencing -fender -fest -fined -fishes -flick -flooded -flute -foliage -foreigners -forensic -forestry -founders -foursome -fractional -frankly -fro -fulfillment -fundamentally -furnish -furnishings -gag -gagging -gaping -genera -generalization -genital -geo -geological -gestures -gigs -glowing -gnome -goats -goto -grad -grandpa -greedy -grooming -haircut -hamster -haunted -hdtv -headphone -heap -helix -hereinafter -heuristic -highways -hinge -hobbies -holly -honeymoon -houston -hurting -hut -ideological -idiots -immature -implantation -implicated -importing -incidental -increments -incumbent -inequalities -inland -insignificant -inspectors -integrates -interpolation -intricate -intrusion -invoked -irrespective -ischemia -iterations -january -jason -jesus -jewel -jim -juices -jun -karma -lacked -ladyboy -laminate -lapse -lawful -lawsuits -leagues -leveling -lied -lily -lineage -lipstick -loci -logistic -lone -longevity -lynn -majors -maturation -maze -medals -medicinal -melanoma -mentoring -metabolites -michelle -minors -miserable -missiles -mitochondria -montage -monument -mortal -motivational -motorola -motto -mould -multicast -multivariate -mythology -nap -necklaces -neighbour -neonatal -neuron -nifty -nigga -noises -notorious -november -novice -oath -obituaries -objections -offended -optimizing -organizers -originals -oscillations -oscillator -outpatient -oversized -panty -parasite -parasites -particulars -pathetic -pathological -patrons -pear -pearls -peeing -pellets -pelvic -penguin -pensions -percentile -perky -petitioner -pharmacist -phenyl -planners -plateau -playful -ply -poets -ponds -popped -ported -postcards -pouring -praised -predecessor -prednisone -predominant -preferable -prefers -prestige -principally -principals -professionalism -profiling -progressively -prosecutor -protease -pubs -pudding -punish -quantified -questionnaires -quo -rad -radios -rains -readiness -realities -recap -reciprocal -reconciliation -recs -recursive -referee -reflux -refuge -refugee -registrar -registrations -rejects -reliably -remembers -renamed -requisite -resale -resignation -resigned -rifles -ringing -rinse -riot -robustness -rocker -rookie -runescape -rupture -sack -sadness -safari -salvage -sanctuary -satellites -scatter -sclerosis -screensaver -scrolling -scrub -sculptures -sean -secretly -seedlings -selector -separates -september -seq -settlers -shakes -showroom -signalling -sims -sinking -skipped -skype -slammed -slash -slender -slowing -snapped -soaked -socialist -sock -sockets -solicitation -solubility -sorrow -sourced -sourcing -spears -speedy -spelled -spikes -spirituality -spleen -sprint -stabilized -standpoint -staple -stare -starred -stitching -stringent -submissive -subunits -sucrose -superstar -swedish -sweets -swine -swings -swiss -synchronized -synchronous -synonyms -synthase -tadalafil -tagging -tanning -tapping -tasted -temps -terribly -theatrical -thief -thinner -thunder -tights -tomb -toon -toronto -touchscreen -transmitting -traveller -trays -tread -triangles -troll -trusts -ubiquitous -uncover -undergone -understandable -unfamiliar -unite -unpleasant -unreasonable -unsafe -urea -veil -ventral -verbs -vibe -vibrations -villas -violating -visions -viz -vodka -vortex -warranted -waterfall -wavelet -weave -weighting -weld -whipped -widescreen -wikipedia -wolves -wonderfully -wording -wrench -abbreviation -accession -accomplishment -accustomed -acer -acetic -acquires -actresses -adaptor -adjoining -adrenal -advantageous -adverts -advisable -advising -aerobic -affirmative -ageing -agonist -airborne -ale -alkyl -alleviate -allowable -allowances -altar -amorphous -amplitudes -annoyed -ans -ante -appealed -aroma -artworks -aspiring -assigning -assigns -assorted -asymmetry -atm -atop -attainment -attendant -attire -auditing -aureus -austin -authoritative -automobiles -avoids -await -awaited -baggage -bakery -bananas -barbie -bargains -basil -batman -beaded -beaver -beforehand -begging -behaviours -believer -bella -benchmarks -benz -binder -biographical -biosynthesis -blender -blink -blondes -blunt -blur -blush -boo -boosting -bots -bottoms -bouncing -bows -branching -brazil -britney -buff -buffers -buggy -bum -butterflies -cabbage -cannabis -capacitance -capitalist -capped -caravan -carbohydrates -carbs -cardio -caregivers -carnival -carolina -carton -carts -centred -centric -ceramics -cessation -champ -championships -characterised -chars -checkbox -chemically -chevrolet -chiefly -chimney -choke -clad -classifier -clinicians -clown -cocktails -coed -coincide -colourful -commanded -commits -communal -communist -companions -compartments -competencies -complication -composites -comprehend -computes -conceive -condemned -condensed -conducts -confidently -conformity -confront -conjugate -conquer -constructions -contemplated -contextual -contiguous -contingency -contrasting -conveyed -convictions -cords -corps -correcting -correspondent -counselling -coup -courtyard -couture -coz -crawler -creditor -crises -cruising -crushing -cutoff -cytochrome -dallas -dave -dba -decisive -defended -degeneration -degraded -dehydrogenase -dentistry -depleted -depot -deprivation -designate -detained -detects -diego -diligence -dire -directs -disappearance -discharges -discriminate -dispenser -disregard -dissipation -distinctly -distinguishing -dit -dominates -doo -dosing -doubtful -downgrade -dragging -dragoon -drank -drip -dripping -dropdown -ducks -dues -dug -duplicated -dyes -dynasty -easter -ecommerce -economists -educating -educator -effected -electrolyte -elephants -elf -embarrassed -embraced -emperor -empower -empowerment -enduring -engages -entrepreneurial -epidemiology -equip -erect -esp -espresso -evacuation -excuses -exec -exert -exhibiting -exogenous -expectancy -experimentation -exporters -extracting -fab -faded -fantasies -fauna -fermentation -fertile -fibroblasts -filthy -finer -fingerprint -fir -fishery -fishnet -flagship -flawed -flown -fluoride -fluxes -flyers -foolish -formations -formulate -freaking -fronts -fullest -fundraiser -furious -gauges -generosity -germ -ghosts -gladly -glamorous -globalization -glutamate -gomez -gothic -grabbing -greet -greeted -hearted -helmets -hemisphere -hereafter -heroic -heroin -hitch -hmm -homeland -homosexual -horizontally -hover -hugs -hulu -humorous -hydrophobic -hydroxy -icing -illegally -imitation -immersion -implying -infiltration -innings -innocence -insists -instantaneous -insurers -integers -interruption -interstate -interviewing -inventor -invoke -inward -iodine -irony -irradiated -ischemic -isotope -issuer -jars -johnson -kai -katy -kevin -keyboards -keypad -kidneys -kin -kittens -knocking -koi -landfill -latch -launcher -legit -lemma -licks -lions -lipids -litres -lobster -longtime -loosely -lumbar -lunar -lure -lyric -magnification -makeover -mandated -mango -manifestations -manipulated -manners -marc -mare -marketers -masturbate -maxi -meats -mediate -mer -metastatic -meth -mia -midget -milling -minimized -mismatch -moore -mosque -motifs -muslim -myriad -namespace -nationality -natives -nerd -nesting -nodded -nominee -normative -noteworthy -notifying -notwithstanding -nous -nouvelle -nova -nyc -obedience -observable -occurrences -oct -october -offences -offenses -oily -omission -overdose -paddle -palate -panda -pans -parentheses -pastry -pasture -pathogenesis -paved -penetrate -percussion -perfusion -permissible -phantom -pharmacies -phenomenal -philosopher -phosphatase -photoshoot -piles -pillar -pitched -pituitary -plague -plains -plantation -playboy -playlistitem -pleasures -pointless -polarity -polyethylene -polymerization -polymorphism -pooled -poop -poorer -popcorn -possessing -preaching -precedent -predator -predators -predetermined -predictors -preset -prevail -priori -proclaimed -prod -professions -prohibition -promoters -prompts -proposing -prose -psychologist -publishes -puppet -pups -quake -rang -rave -reacted -reacts -reagents -realism -realizes -rebels -rebuilding -recharge -rechargeable -recommending -reconstructed -redirected -redistribution -refill -refrain -refundable -regain -renders -rents -repayment -resection -resize -resonant -respiration -responsiveness -retina -revise -rewrite -richness -rio -robotic -rogue -roommate -ropes -rot -rotated -rum -rumor -rustic -safeguard -salinity -sands -sanitation -saver -scarves -scissors -scout -seams -seize -selena -sentencing -sera -servo -seventy -shaded -sharks -sheath -shelters -shortened -sic -sidewalk -signage -sketching -skyline -slam -slick -slowed -smoother -soak -softball -solder -solitary -som -soothing -sophomore -spacecraft -spacer -spain -spans -sparks -specialties -spectrometry -speeding -spins -splits -sprayed -staged -stationery -statues -stocked -storyline -stroller -stumble -submarine -summers -suzuki -sweetheart -swimsuit -swimwear -swingers -sworn -symposium -tablespoon -taller -tanned -ted -telecom -temper -tex -textiles -thesaurus -thrill -tighten -toilets -tony -tornado -totals -trajectories -transducer -treadmill -trench -trumpet -trustees -truths -turbines -turf -turtles -twat -ultrasonic -unaffected -undesirable -uneven -unforgettable -unhealthy -unpredictable -unusually -uploads -urgency -urn -uterus -utter -veggies -vents -versatility -vesicles -vicious -vigorous -virginia -waits -walker -wallets -wandering -waterfront -wavelengths -webcams -whistle -whitening -wigs -williams -workmanship -workspace -wrapper -wrinkles -yourselves -zipcode -zithromax -abc -abolished -abstracts -accelerator -accrued -acetone -acquainted -activates -activator -affective -affiliations -aftermath -afterward -aground -almond -aloud -amine -amplifiers -analyzes -anatomical -angelina -anthem -antimicrobial -antiques -archival -arid -armies -armour -arranging -articulated -artifact -ashes -aspire -asserts -astrology -astronomy -asynchronous -athletics -attaching -attic -audited -auditors -autoimmune -backstage -backups -bangs -barbecue -basins -bastard -bathtub -bbc -beauties -beetle -behold -bends -bilingual -biotechnology -biting -bitrate -bliss -blockade -blossom -boating -bon -bookstore -borrowers -boxed -brian -briefing -brooke -buffered -burgers -bursts -busted -butts -cages -callback -cally -camper -campuses -captions -captive -carotid -carpets -categorize -cathedral -cathode -cedar -centrally -cert -chap -cheerful -chewing -childcare -chromium -chunks -classifications -climatic -cockpit -cognition -collectible -colorectal -commissioning -commute -compost -concluding -condoms -cones -confess -confession -confuse -congratulations -consolidate -cont -contestants -contrasts -converge -converse -cookbook -corset -cottages -cougar -crafting -creams -cruiser -crunch -cuffs -curated -curing -cushions -daemon -daisy -daniel -daring -decal -declaring -deductions -defends -defenses -dents -depicting -derives -destroys -determinant -diagnoses -diaphragm -dinosaur -disadvantaged -disagreement -discloses -disclosures -disputed -distressed -divers -diversified -diversion -docking -doom -dope -doubling -downside -downstairs -dragons -drains -drastic -dreamed -droid -drunken -dubai -dumping -dwell -dyed -eagerly -earns -eclectic -edema -effluent -eigenvalues -elders -elective -electrostatic -elemental -embodied -embodiments -eminent -emitting -emma -encode -encoder -encompass -endpoint -enema -engraving -entails -entitlement -entrepreneurship -enzymatic -eric -erotica -erroneous -etching -eternity -ethernet -etiology -examiner -excretion -exemplary -exhaustive -existent -exits -exploded -exploits -externally -extremes -facets -fairs -farewell -faucet -fetus -fidelity -filesharing -fingertips -firearm -firstly -flair -flashlight -flawless -flop -floppy -forefront -forgetting -forwards -ftp -furry -furthermore -gagged -gearbox -genocide -genotypes -geology -georgia -getaway -glee -glycol -goodwill -gov -grained -gravitational -greed -grids -griffey -hackers -hallway -haze -hears -heartbeat -heavens -hectares -helium -hides -highs -homeowner -homology -hoodies -hoses -hostel -hostname -hotter -hubs -hybrids -hydrocarbon -hydrocarbons -hydroxide -hypothesized -ibid -ical -illustrating -imbalance -imminent -imperfect -implicitly -inactivation -inaugural -indispensable -indonesia -inefficient -inertia -inf -infer -influx -informing -infra -initialization -initiating -injustice -innate -installs -institutes -interacts -intercept -interdisciplinary -interleukin -interpreter -intuition -investigates -invoices -ireland -itching -johnny -kardashian -kay -korea -lad -laden -lakh -laminated -lantern -lated -lava -lecturer -legitimacy -levy -lexical -liaison -liberation -licensee -licensees -limousine -liners -locator -lolly -louder -lows -lumber -machining -macros -mana -manifestation -manure -manuscripts -marching -marina -marvelous -masculine -masked -mastery -materially -maximizing -meanwhile -mellitus -meme -mentality -mes -messing -methodological -michigan -midi -migraine -miller -mindset -mined -ministries -misery -modelled -modulated -monuments -mornings -morphine -moss -mouths -movers -mower -multinational -multiplier -mumbai -mums -murders -mustang -mysteries -narratives -neatly -netbook -nicer -niece -nightlife -nintendo -nitro -nucleic -objectionable -obtains -occupants -occupies -oceans -offending -ohm -opaque -orally -oscillation -outright -oxidase -oxides -oxidized -pageant -pandora -panoramic -parser -parsing -parsley -particulate -pathogen -patron -payload -pea -pedals -pencils -perceptual -perennial -perks -persists -personals -persuade -pesticide -pests -picturesque -pid -pillars -plaster -pleaded -plywood -poetic -polishing -polynomials -possessions -predicate -prerequisite -prescribe -preserves -presidency -presses -pristine -progressed -propane -prosecutors -protections -protons -pulley -pup -qualifies -quantification -quests -racer -radiant -rebellion -rebound -rebounds -recourse -recruits -refining -refractory -refunds -reggae -rehearsal -reinstall -remarked -reminiscent -remuneration -repay -repertoire -replacements -replying -respecting -retweet -reusable -revisit -rheumatoid -rhyme -rhymes -richard -richest -ridden -righteous -rightly -riots -rituals -robe -robin -roughness -roulette -roundup -rubbed -rulers -rumours -sailor -sanitary -sara -sayings -scalability -scarcely -scat -scribd -seeded -sep -septic -ses -sheds -shields -shines -shrubs -shuffle -sibling -sigma -skateboard -skeptical -skid -slaughter -sleepy -slogan -slutty -smoker -smoothing -snaps -snowboard -societal -sociology -softer -softly -solidarity -solves -soybean -spares -spawn -specialised -specialization -spectrometer -spoilers -sponsoring -sprite -spun -stabilize -stacking -stakeholder -standalone -stenosis -stimulates -stray -striker -stroll -stub -stuffing -subgroups -subsystem -sucker -summaries -superiority -supernatant -supplementation -surrounds -sustaining -swallows -sway -swivel -swords -symmetrical -syn -taboo -tackles -tactic -taps -taxa -teamed -teammates -teddy -tees -tempting -tendencies -tendon -terminus -testify -textual -thermometer -thieves -thriving -tic -tighter -toddlers -tokens -topography -topological -topping -tors -toshiba -tossed -tougher -towing -traceable -trailing -translating -transmissions -transporter -transporting -transsexual -trapping -tripod -trojan -trolley -trough -truncated -tum -tunnels -turquoise -tying -ulcer -ultraviolet -uncensored -unexpectedly -uniqueness -universally -unseen -upholstery -uterine -vanessa -vault -vested -vie -vine -volunteering -vows -wafer -walnut -wards -washable -weaving -wetland -whales -whitepages -whores -widths -william -wiped -woodland -workbook -worthless -writ -yelling -youngsters -youths -yuan -zebra -zur -abuses -abusing -achievable -acronym -acupuncture -adapting -adele -adequacy -administering -admired -adopts -adsorbed -adulthood -adventurous -aerospace -aesthetics -affidavit -affirmed -aide -airflow -aisle -akin -alice -allen -alliances -alphabetical -amounted -anaerobic -ancestor -anchored -andy -anion -anne -antagonists -antioxidants -apical -apologies -apoptotic -approves -approximations -apron -aqua -arbitrarily -arizona -arsenal -arsenic -articulate -artillery -asap -asp -aspiration -assassination -astonishing -atlanta -attacker -aura -avant -backlinks -baker -bankers -bankrupt -banquet -barred -batting -beginnings -beyonce -bezel -bittorrent -blackjack -blasts -bodied -bomber -bombshell -bottled -bras -breasted -brew -brewing -briefs -broccoli -budgeting -bulky -bulls -bully -bushes -cabins -calves -candid -canyon -capacitors -cardigan -carers -casts -casualties -catalogs -catastrophic -caves -cavities -cds -celery -censorship -centrifugation -cerevisiae -certifications -chained -chalk -changer -charles -charlie -checker -chennai -chew -chilled -chromatin -chromosomal -circuitry -circumference -claimant -clamps -clarified -cleanse -clergy -climax -clippings -clockwise -closures -clustered -clutter -coaster -coastline -cobalt -cole -collectibles -colorado -columnist -commencing -competed -compiling -computerized -concentrating -condominium -conductance -conductive -conferred -configuring -confinement -conservatives -conserve -consortium -containment -contend -continual -contradictory -converters -conveyor -cooks -cordless -cork -cradle -crate -crawling -crease -creed -cruelty -cruz -cyst -cytoplasm -damned -dams -darn -deception -deduced -deducted -deflection -demise -dendritic -dependable -depended -depressive -devote -diameters -digested -dildos -dilute -diminish -dinners -dipping -disrupt -disrupted -dissolve -dist -distracted -diversification -divx -dll -documenting -dolphin -dominating -donkey -downhill -downturn -doxycycline -draining -drawback -droplets -drupal -dubstep -dwellings -economist -eighty -elapsed -elongated -elusive -emailing -embassy -emblem -empowered -enactment -enforcing -enlisted -ericsson -etched -evo -excavation -excerpts -exile -exon -exploiting -exponent -exponentially -exposes -eyebrows -facet -familial -familiarity -fascinated -feb -femme -femoral -fences -fer -figuring -filings -fingered -firefighters -fishermen -flagging -flask -flavored -flaw -flea -flee -flops -fluffy -foreground -francisco -freebies -friendships -fulfil -functionally -funniest -fury -gamer -gangs -genesis -geographically -geometrical -gigantic -glare -glazing -glossary -glutathione -glycerol -granular -granules -grating -greatness -grin -grinder -grounding -growers -guessed -hail -halogen -han -handing -hanger -hardened -hardship -headlight -healed -heightened -heir -helicopters -hemorrhage -heparin -hinges -hires -histogram -hoe -homicide -homologous -hoodie -hoop -hormonal -hospitalized -hue -hydrocodone -hypoxia -icy -ignores -illustrative -imaginative -immersed -immunization -immunodeficiency -implanted -impurities -incapable -incision -inclination -ind -infantry -infinitely -infused -inhibiting -injunction -insanity -insensitive -inset -insomnia -inspires -instinct -interchange -interfering -interferon -intern -intervene -invariably -investigative -ipa -iris -ironic -isotropic -jacuzzi -jade -jailed -jewels -jimmy -jquery -junctions -jus -katie -kcal -kernels -killers -kite -knitted -knockout -kong -kung -landmarks -laura -lavender -lawmakers -legislators -lent -lexapro -librarian -libs -lifestyles -lightboxes -linkages -listens -logically -lollipop -lookin -lopez -lotus -lowers -macrophage -malayalam -malaysia -malls -malpractice -mania -manifested -mans -marvel -mash -meaningless -mediator -menstrual -methylation -microorganisms -migrant -migrated -migrating -miley -mimic -miracles -moi -monk -monomer -morgan -mosquito -motel -mucosa -mute -nationals -nba -ned -negotiable -nerdy -nikon -ninety -normalization -notebooks -numerically -observational -occlusion -occupying -oder -oops -oppression -optimism -ordination -orientations -ous -outbound -outrageous -outset -owes -pakistani -pancreas -paradox -parole -parse -participates -passions -patrick -patterned -penetrated -peninsula -perpetual -pervasive -perverted -petty -photons -pier -pies -pint -pioneering -pitching -pivotal -planetary -plating -plum -plunge -pneumatic -polling -polypeptide -populous -porosity -postponed -potency -powdered -powders -prayed -precursors -preferential -prescribing -presenter -presidents -pretending -pricey -prisons -profanity -proficient -prohibits -pronounce -prospectus -prostitution -provisional -provoking -pulsed -qty -quirky -rag -ration -reactors -rebates -reckon -redesign -reductase -reed -referendum -refractive -refreshed -relapse -replicated -repression -reprinted -resemblance -reservoirs -resilience -respectable -restraints -restricting -resurrection -ret -retiring -rhythms -ribbons -rick -rite -roaming -robbed -rollers -roman -roofs -routed -rubs -sailed -salads -salesman -sandstone -sapiens -sapphire -savage -screams -scuba -seconded -selves -sem -semen -sermon -serotonin -sew -shafts -sham -shortcomings -shortening -shouted -shouting -showcases -shredded -siding -silky -sine -sip -sling -slippery -slipping -snapshots -socioeconomic -solenoid -sorority -spandex -sparkle -spatially -speculative -spiders -spills -spoil -spontaneously -spraying -spur -squirrel -staircase -stalls -stared -steals -stealth -stereotypes -stern -stitched -stitches -stools -strata -strawberries -strives -striving -stunned -stylist -subordinate -subwoofer -succeeding -suffix -sugars -superintendent -supervisory -surfactant -sweaters -swell -sydney -synonymous -synopsis -syringe -systolic -tad -takeover -tangent -taped -tariffs -taxonomy -tbsp -teamwork -tearing -tedious -temperate -tempered -tenders -thanksgiving -thence -theological -thereon -thoracic -thrilling -timezone -tint -tipped -tis -titted -toned -tort -tortured -toughest -tracts -transduction -transistors -treasury -tribunal -triggering -tummy -tumours -turkish -turnaround -tutoring -undermine -unfinished -unidentified -uniformity -unix -unspecified -unwilling -upfront -upscale -valleys -victories -villagers -villain -virtues -visualize -vitality -vogue -voiced -vols -wander -warehouses -weakened -wellbeing -widest -withholding -witty -wrath -wrought -yay -yeh -yep -youthful -abbreviations -ablation -accelerating -accusations -activism -adaptations -adderall -adjective -adjusts -admiration -admitting -advisers -advises -advocated -affirm -aggressively -agility -airplanes -ajax -alexis -alkali -allocations -alpine -alternator -ambitions -amidst -ancestry -anchors -andrew -anisotropy -annotations -annuity -anonymity -apprentice -approvals -approximated -arabian -arginine -arrogant -assayed -atlas -attentive -augmentation -australian -awaits -bailout -banker -baptism -battling -benches -beware -biases -bibliographical -bindings -biographies -biscuits -blaze -blazer -blocker -blurred -boast -bordered -bounty -boyfriends -breathable -breeders -bridging -bromide -brunch -bruno -brutally -bun -bundled -bungalow -bury -caching -cafes -calculates -canoe -caramel -cashier -caspase -caste -catchment -catholic -censored -chiefs -chilling -chipset -cider -claw -clears -colder -collaboratively -coma -comm -compassionate -composers -concave -concentrates -concession -concessions -concurrently -condolences -conductors -confer -confrontation -conjecture -conn -conquered -conquest -consequential -constipation -constructor -contested -continents -contractions -corridors -cot -counselors -courteous -craftsmanship -crystallization -cumming -cur -cyrus -cysteine -daunting -dazzling -dealings -defenders -departmental -depict -depressing -descended -descriptor -desks -detox -dictate -digitally -dime -dine -dipped -directives -disabling -discard -discography -discourage -discretionary -disguise -disparity -dissection -distinctions -distraction -diva -divides -dizziness -dolphins -doomed -doping -dove -drafts -drifting -drone -ducts -dusty -eccentric -edged -eighteenth -eldest -electrically -elevations -elicited -elliptical -elongation -embracing -eminem -empathy -emphasizing -empirically -encompassing -endemic -endocrine -endowed -enrich -epidermal -eps -ere -erectile -eruption -escapes -escrow -esters -evaluates -exaggerated -exchanger -exchanging -excursion -experimenting -exploratory -explosions -explosives -expo -extinct -faction -facto -famed -fertilization -fifa -filament -filaments -filmmaker -fins -flakes -fluctuation -fools -footnote -forge -forks -formidable -freezes -frightened -frogs -fueled -gals -generously -glued -goggles -graceful -gracious -graders -grasses -grassroots -grated -gratefully -grille -groceries -grooves -guts -handicap -harley -heather -heirs -helpless -hemoglobin -hesitation -hippocampal -histone -homage -homosexuality -hong -horizons -hugely -humid -hun -hydration -hypnosis -ian -illinois -impartial -imperfections -imposes -imprint -inbound -indefinitely -indictment -indifferent -indulge -infancy -infertility -informs -inhalation -inherit -injector -inks -inmate -inning -inpatient -inscription -instituted -intangible -intellect -interchangeable -interstitial -intimacy -invaded -inventories -irish -irons -irritating -itinerary -ivy -jacks -jeff -joking -jolie -josh -journeys -julia -kayak -knobs -knowingly -labelling -landlords -laps -latino -legged -leggings -lest -liar -liberals -lighted -lipoprotein -lmfao -localities -logout -losers -loudly -lupus -lymphocyte -madison -mafia -mage -malfunction -manganese -manila -manpower -markup -masala -mascara -mascot -masking -mason -masonry -massacre -mastercard -mastered -maths -mechanically -megapixel -megavideo -melee -melons -memorandum -menopause -metastasis -milestones -miners -minimise -minimization -minimizes -misspelled -mobilization -modalities -monks -morally -mouthful -muff -mural -nat -nav -navel -nearer -nec -neckline -needy -nests -networked -nicki -nightclub -ning -nominate -nominees -noticing -nucleotides -nudes -nuisance -numbering -obligated -observes -ocular -onsite -openness -orbits -orderly -organising -orgasms -osteoporosis -outing -outsole -ovens -overcoming -overhaul -overlook -oversee -pacific -palms -parallels -paramount -parcels -partnered -patriotic -pavilion -paving -payoff -pci -peeled -penetrating -periphery -persecution -persona -petals -petitions -phenol -philosophers -phylogenetic -pianist -pinned -pitches -placements -plasticity -platelets -playstation -plotting -poised -polyurethane -portrayal -practicable -pragmatic -prank -preach -precipitate -predefined -prenatal -presume -presumption -progesterone -prognostic -progresses -projecting -prominence -prophecy -propositions -prosperous -prostitute -prototypes -prudent -psychologists -punched -punctuation -queens -queues -radically -radiotherapy -raining -ranger -reacting -reap -receivable -reckless -recognizable -rectal -redistribute -refrigeration -refunded -reinforcing -relays -relocated -remediation -reminders -reorganization -reprints -resembling -residences -resign -resilient -rested -resumed -righteousness -ripping -rivalry -rocked -ron -rover -russia -sacrificed -sacrifices -safest -salient -salons -sanction -sanctioned -sandal -sap -scanners -scrapbook -scratching -seamlessly -seeders -seeker -sesame -seventeen -sewn -sexes -sexiest -shading -shale -shedding -sheepskin -shortages -shouts -showcasing -sideways -siege -sightseeing -signifi -simplex -skinned -skipping -slack -slightest -sloppy -smiley -sneaker -snippet -soaking -sober -solver -somatic -soonest -sophistication -southeastern -spaghetti -spas -speculate -speculum -splicing -spoiled -spores -spotting -squeezed -stabbed -stash -stigma -sting -storytelling -strategically -stubborn -studded -stupidity -submerged -subspace -substitutes -subtypes -suffice -suggestive -sulphate -summed -superhero -supper -surcharge -survives -suspensions -swallowed -sweating -syllabus -symptomatic -synonym -syntactic -tapered -tapped -tart -tau -taxed -tenor -tentative -testament -testimonial -theatres -thermodynamic -thirst -thrombosis -tidy -tiled -timetable -ting -tokyo -toolkit -tout -tracer -tractors -translational -translocation -treaties -trophies -tshirt -tucked -turmoil -tutors -tweak -tweeted -twists -ulcers -unanimously -uncut -undergoes -unfold -unilateral -unloading -unparalleled -unreliable -unrest -unsecured -unsolicited -upheld -uptime -valence -vampires -vapour -varsity -vastly -vat -vending -vibrational -viewable -virtualization -viscous -visualized -vixen -vocalist -vowel -waiter -waived -wakes -walled -walmart -wand -warcraft -warez -warp -washers -watermark -waving -weary -whispered -wil -wildly -wink -winters -wisely -wiz -woes -woody -yum -zeros -aaa -abrupt -abruptly -absorbance -academia -accesses -accountants -acidity -adenosine -admissible -adolescence -aft -agile -agitation -agonists -algebras -amanda -amused -analysing -angiotensin -anode -anomalous -anthony -anthropology -aorta -appellate -appropriation -assimilation -attaches -attenuated -atypical -auch -audible -auditions -authenticated -autos -avenues -backseat -ballast -bangalore -bans -baptized -barefoot -batches -bbq -beasts -behaved -billy -bimbo -bing -biologically -bizrate -blanks -blasting -blindfolded -blindness -blockers -blooms -blueprint -bluray -bodybuilding -breakers -breeder -brightest -bros -brushing -buckets -butyl -cadmium -canals -carbide -caregiver -carpenter -cashmere -catalyzed -cellar -centrifugal -centrifuged -chased -chats -cheated -checkpoint -chiffon -chiral -chloroform -cho -chocolates -chopper -christina -chronicles -chronological -cipro -circulated -clasp -claws -cleanliness -cleans -clipping -closeup -cobra -cohesion -coincides -colonization -commanding -commentators -commissioners -commons -complexities -complied -complying -composing -compress -compromising -concomitant -condenser -congrats -connective -consciously -consequent -conspicuously -constitutive -converges -corre -cortisol -crackers -criticisms -crow -cuckold -cultivars -cupboard -cures -curls -curvy -cutaneous -cystic -cysts -cytotoxic -dads -danced -daytona -debated -declarations -defeating -degenerate -dehydration -deletions -demi -departing -deploying -dept -derby -detachment -devoid -dialect -diastolic -dimer -dimethyl -ding -dinosaurs -diodes -disappoint -disastrous -disciplined -disclaims -discouraged -discreet -dislocation -dispersal -diver -dominatrix -doorstep -downloader -drawbacks -dread -dreamweaver -duet -dumps -earnest -earphones -effector -eigenvalue -electrochemical -elegans -elicit -elk -embarrassment -embossed -emerald -emergent -emulsion -enclosures -encodes -endeavors -endeavour -ergonomic -escaping -ethic -etiquette -evaporated -exceedingly -excursions -exemptions -exiting -expansive -extremity -fashions -fearful -fedex -fellows -fenced -fiduciary -fiery -figs -filipina -finalists -fireplaces -flaps -floats -florist -fluent -foe -footsteps -formulae -fortress -fractal -fragmented -fragrances -franchises -freeway -frightening -fruitful -funnel -fuses -fuss -gala -germination -gestation -gin -glaze -glycoprotein -grammatical -granddaughter -graves -gravy -grim -gymnastics -halves -handicapped -handwriting -haters -hawaii -hawk -headquartered -heats -heavyweight -helical -helpers -hereditary -heroine -hexagonal -hilton -hind -hinder -hippocampus -histological -hog -holmes -homo -honoured -hooded -hopping -hops -hospitalization -housekeeping -hungarian -hunk -ico -illicit -immensely -immortal -immunology -imprisoned -inclusions -indiana -inductive -inert -inflated -infor -informa -ingestion -inhabited -initialize -inject -insecure -insoluble -instruct -insulating -insulting -intensely -interconnected -internships -interoperability -intersections -inventions -irrational -irresistible -irreversible -jams -jasmine -jenna -jon -judiciary -keel -kettle -kidnapped -killings -kinases -lactate -lambda -larval -lashes -lasix -leash -legality -lends -leo -leon -lettering -ligament -ligne -limo -liters -lizard -lobbying -logarithmic -longing -lucrative -lug -lumen -luminosity -luminous -mags -mahogany -maine -majestic -manageable -mandates -maneuver -mantra -marsh -mattresses -maxima -maya -mayo -mazda -meadow -medically -mediocre -megan -melissa -memberships -memoir -memorabilia -mentors -mergers -mermaid -mesothelioma -metastases -microarray -minaj -minced -mirrored -mitsubishi -monastery -monies -monoxide -montana -mor -morale -motivations -muddy -multiplying -mystical -nah -nanny -narrator -narrower -narrowly -nationalism -needless -negatives -nero -nexium -nexus -nina -northeastern -nostalgia -nympho -oblique -occured -olives -omit -opioid -oranges -ordinarily -oregon -originality -originates -ornamental -ost -outboard -outflow -outta -ovary -paintball -painters -pallet -panasonic -pancakes -pant -pantry -paralysis -paranoid -paranormal -pardon -pared -parker -participatory -partisan -passports -pastoral -pathogenic -paused -peacefully -peaked -peanuts -pedestal -pedigree -peep -penicillin -persuaded -persuasive -pes -pharmacological -phenotypes -pickups -pimp -pioneers -plaid -plainly -plaques -plasmids -playable -plaza -pledged -plumber -plumper -poke -polygon -poo -poorest -portray -potty -powerpoint -practise -prairie -precedence -precipitated -preclude -pregnancies -pretreatment -priceless -priming -procession -prolific -prophets -proscar -prosecuted -provocative -prozac -psychiatrist -psychiatry -psycho -psychotherapy -puff -pun -punches -punjabi -purchasers -pylori -quantitatively -queer -quits -quot -radioactivity -rai -raids -rammed -randomised -receivables -recess -reconcile -recreate -redeemed -redesigned -redevelopment -redox -referencing -refinance -reflector -regiment -rejecting -relocate -remedial -remission -remover -renaming -renovations -republican -residuals -resistivity -retardation -revert -revisited -revive -revolutionize -rigidity -roadside -roadway -ross -rounding -saggy -sailors -saliva -sampler -sandra -satire -scheduler -scientifically -scooters -screwdriver -scripture -searchable -secreted -sect -seductive -sentiments -separator -serine -servings -shaken -shattered -shepherd -shielding -shifter -shitty -shopper -shoreline -shrine -shrinking -shuts -shutting -silently -skis -slams -slang -slid -slows -smashed -smear -snail -socialism -soles -solicitor -solute -sparked -spawning -spear -spectacle -spectators -spirited -splitter -spokesperson -spool -spruce -squid -stabilizing -stair -stalk -stan -stark -starvation -stationed -stent -stew -stipulated -stockholders -strained -strapless -stricken -striptease -structurally -strut -stuffs -stylus -subclass -submits -substituting -substitutions -suicidal -sunflower -sunk -supermarkets -suppressor -surfers -surgeries -surrogate -swamp -swarm -swf -tack -tackling -tally -tame -tanker -tant -taper -tattooed -telecommunication -terminating -thailand -thematic -thinkers -thinning -ticks -tightening -timers -timestamp -todo -toluene -tongues -torsion -tot -totaling -townhouse -trainees -transcribed -transfection -tremendously -trending -trilogy -trimming -trolling -troop -trunks -tungsten -twisting -typo -unconditional -underscores -unequal -uninterrupted -unitary -unmatched -unopened -unrealistic -unresolved -uploader -urdu -urgently -urging -utensils -variances -veneer -vera -verge -verizon -vibrating -vines -visuals -volunteered -vulnerabilities -warmed -wary -watchers -watercolor -waveguide -wedges -wen -whiskey -whisper -wholesalers -widening -windy -winged -wipes -withheld -woo -xenical -yelled -yer -abbreviated -abound -abrasive -acetyl -ache -acknowledging -acquaintance -adams -addicts -adjectives -adjuvant -adrenaline -adversary -aforesaid -agarose -aggravated -airbag -airbags -alarming -alexander -allotment -allotted -almonds -alters -alumina -alveolar -amoxicillin -ancestral -ancillary -angled -ani -anisotropic -aquifer -arches -archiving -arcs -arctic -argon -arisen -articulation -ascribed -assembling -assertions -assures -atherosclerosis -atrophy -atv -aug -augment -autograph -automate -autopsy -avian -avocado -avril -awakening -badass -bailey -banning -bas -bays -behaves -belongings -bespoke -bishops -blinking -boilers -boosted -boosts -booths -booze -botanical -bounces -boycott -branched -brandy -brethren -brightly -brittle -broaden -broadening -brow -budding -bunk -bur -burdens -bureaucracy -bursting -calvin -capacitive -capitalization -carat -cardinal -carp -cashback -catastrophe -catcher -catchy -ceases -cello -chandelier -chant -chargers -cheering -cheesy -cher -cherish -cherished -chilly -chiropractic -chops -chunky -cisco -clashes -cliffs -climates -climbs -coagulation -codon -cohesive -cola -colitis -collaborations -collars -colouring -comforting -commanders -communicates -complemented -complements -conceal -conclusive -condemn -conducive -conferencing -configurable -conformational -conforming -conspicuous -constituency -contends -cookware -cooper -corneal -corpse -costa -coursework -covert -cracker -cranes -crawlers -creatinine -crispy -criticize -cropped -cropping -crores -crowns -curricula -cutest -cutters -cyclists -dal -davis -daycare -debuted -debuts -deceptive -deco -dedicate -defeats -deformed -degrade -delegated -delights -denominator -densely -depiction -deserving -desktops -devastated -deviants -devise -diazepam -diced -dictated -diplomacy -disparate -dispensing -disruptive -dissatisfied -dissent -distinguishes -distortions -disturb -divergent -doctrines -dolor -domino -doorway -dramas -dresser -drowning -dryers -duel -dunes -durations -dusk -dyke -eater -echoes -eff -effortless -effortlessly -elbows -elizabeth -elliptic -emily -emit -emoticons -empowering -endings -endured -engagements -enlightened -ensuing -environ -erased -estradiol -excessively -exclusions -executes -exerted -exhaustion -expansions -expiry -explodes -exposition -eyeglasses -facade -facilitation -faithfully -famine -fart -fascination -fasteners -favoured -federation -ferrari -festivities -fiat -fibrillation -fibrous -filmmakers -finalized -fisher -fisherman -flank -flattering -flavours -flipped -flipping -flourish -flushing -forage -forearm -formaldehyde -formalism -fossils -fostering -freaky -freedoms -frenzy -frying -fulfills -fuller -fulltext -gait -galvanized -gamble -gaseous -gatherings -gathers -gearing -gelatin -generality -genomes -geothermal -girly -glacial -glaucoma -glide -goo -greasy -grills -grit -groundbreaking -grunge -gsm -guarded -guardians -gulf -gums -hairless -hammered -handcrafted -handcuffed -hangover -harris -hatchback -hating -havoc -headband -hearty -helm -hmmm -honours -hoops -hopeless -horrific -hostility -hotmail -hotspot -housewives -humanities -hydroxyl -hyperlink -identifiers -idols -iff -immunoglobulin -imo -impair -impending -importer -imposition -improperly -incontinence -indebted -indefinite -inducible -inferences -inflicted -initialized -initials -injectors -inkjet -inoculated -insertions -integrals -intermediates -intolerance -intrigued -invade -invent -ipsum -ironing -islam -isoforms -isotopic -italic -jab -jamie -jaws -jealousy -jerks -jogging -jose -joyful -jumbo -juniors -kanye -karate -karen -khalifa -knights -lads -lagoon -leaned -leone -lessen -lewis -liberties -licences -lifespan -ling -liquidation -livelihood -lobes -locksmith -locomotive -lodged -logon -lookout -loot -lube -lubricant -luscious -lysine -lysis -macbook -machined -machinima -magnitudes -mammary -manchester -manifolds -manipulating -mappings -marched -mashed -massages -matures -melbourne -melodies -melts -metabolite -metering -microprocessor -mildly -millennium -millionaire -minimalist -minimally -missionaries -mistakenly -misunderstanding -misunderstood -modality -modifier -molds -moles -monica -morals -moth -motility -motivating -mound -mourning -movable -mozilla -mrs -mucosal -mucus -muffler -multicultural -multidisciplinary -multiples -multiplicity -murderer -muse -musica -nam -narrowed -narrowing -natal -nautical -navigating -nephews -noticeably -nouns -novelist -nurture -nurturing -nutritious -nvidia -oasis -oats -obstructive -ohms -ole -oncology -ontario -oocytes -opacity -optionally -ordinator -orkut -outage -outbreaks -outrage -overdue -overexpression -ovulation -oyster -pacing -pals -panorama -parasitic -parrot -pastel -patching -paw -peasant -peat -peritoneal -peroxidase -persisted -perturbations -phage -pharmaceuticals -phil -phosphorylated -pigeon -pigments -pigtail -pipelines -pkg -plantations -pleasantly -plethora -plume -plump -pods -policing -polka -polymorphisms -polypropylene -pope -porsche -positives -postgraduate -prada -praises -preparedness -prick -prism -probing -procure -profoundly -progressing -projectors -prominently -propensity -protagonist -pruning -psoriasis -psychosocial -puma -punching -punitive -qualitatively -quark -quart -quartet -radiative -radii -raging -rainforest -rake -ralph -ramps -raspberry -raven -rea -reasoned -receptive -recessed -recoil -reconnect -reddish -reefs -reels -registrant -regressions -regrets -regularity -reminding -remixes -remnants -rendition -repeal -replicas -repositories -reputed -researches -resins -restarted -restless -reused -reversing -revoked -revolt -revolving -rhinestone -riches -ridges -ridiculously -rift -robes -rockets -rolex -rotations -rotors -rotten -royalties -rusty -sacrificing -safeguards -salty -salute -sane -scandals -scours -scrambled -scraps -screamed -seaside -seasoning -seater -seattle -seawater -secondly -secrecy -sedimentation -seduced -selenium -seminal -senator -sensations -sepsis -sequentially -serials -shady -shalt -sharepoint -shelving -shorten -shrinkage -shrub -sid -signify -simon -simpson -simulating -sive -sizzling -skank -skateboarding -skillet -skincare -slabs -slain -slapped -slated -sleeper -slogans -slurry -smelling -snowboarding -snowy -soaring -soldering -sous -southwestern -spectroscopic -sri -ssh -stab -stabilizer -stamping -standardization -starving -stature -steamy -stephen -stratified -streamline -streamlined -stride -stunts -subcutaneous -subtraction -subtype -succeeds -successors -sufficiency -suitably -suitcase -sulphur -summoners -supervise -suspense -swag -swaps -sweepstakes -sweetness -syllable -syndromes -tallest -tat -tera -terrified -terrifying -thc -tides -tigers -timeliness -tinted -tonic -toolbox -tore -torso -touchdowns -trackbacks -trainee -transferable -transfusion -transmembrane -treasurer -trimester -triplet -trucking -trusting -tuck -tumble -tyler -typographical -unanimous -unconstitutional -undercover -underestimate -undue -unfolding -unlocking -unsuitable -ure -utilisation -utilised -valuations -vancouver -vanish -vanished -vengeance -ventricle -veterinarian -vial -vicodin -viewpoints -vigorously -virginity -visas -visitation -volumetric -vulgar -waive -washes -watchlist -wavy -webs -wed -wellbutrin -wilson -winch -wiper -withdrew -womb -woodworking -workings -workstations -wreath -wrinkle -wrongful -xanga -xii -xoxo -yell -zen -abandonment -abrasion -absorber -acceptor -accommodating -acetaminophen -adaptable -adhered -adipose -adorned -advocating -aeruginosa -affords -agony -airfare -airing -alan -alba -alcoholism -allegation -alleging -alphabetically -ambience -amyloid -ankles -antitrust -antonio -ape -apnea -append -applause -applet -approving -archaeology -arousal -arse -artificially -assassin -asymptomatic -atheist -authoring -autobiography -avatars -aversion -awarding -backend -backlight -bacterium -ballistic -barber -battered -beacon -bearer -bedtime -begs -benefiting -berth -bestselling -bikinis -billboard -birch -blaming -bland -blasted -blinded -blockbuster -blossoms -blotting -bmx -bona -booming -bootstrap -borderline -braid -braided -brigade -budgetary -bulge -bumpers -burberry -burglary -burnout -butcher -calculators -camouflage -campers -carbonyl -carmen -castles -casualty -categorical -catfight -celestial -cereals -cervix -cess -characterizing -cheddar -cherries -chestnut -chlorophyll -chores -cigars -cinematic -cirrhosis -classifiers -clerical -clientele -clinician -cloak -closets -clover -coaxial -coco -codeine -coffin -cohorts -collaborating -combinatorial -comforts -communion -commuter -commuting -complainant -compressive -concur -confessed -conformance -contemplating -contra -copolymer -coun -countertops -couplings -courageous -courtroom -coveted -cowboys -coworkers -crabs -cramps -crater -crave -craze -creased -creatively -crescent -crocodile -crossings -crotch -cultivate -curricular -cushioning -cyclin -damper -debating -decode -decomposed -defamatory -degrading -deity -deli -demolished -denver -dependant -deriving -dermatitis -descend -descriptors -deserted -detachable -dew -dictator -differentially -diminishing -diner -dips -disambiguation -disappearing -disjoint -disparities -displacements -disseminated -dissenting -distillation -distributes -diverted -divider -docks -documentaries -doe -dormant -downtime -droplet -drowned -duction -duke -dummies -eagles -earring -ecards -ecstasy -eczema -egypt -electrician -electrified -elle -ellipsis -eluted -embark -emulate -encore -encyclopedias -endpoints -enhancer -enlightenment -enrolment -entail -entrusted -envisioned -eos -epidemiological -epoch -equilibria -equine -esa -esophageal -estimators -evacuated -evasion -evolves -excision -expelled -extrusion -fallout -fastening -federally -feminism -fertilizers -fictitious -fide -fiesta -filesystem -fission -flashed -fooled -footballer -footing -foraging -foreseeable -fortunes -fountains -fractured -fraternity -freaks -freshness -frontage -frosting -furnishing -futuristic -gangster -gary -geometries -gilt -glacier -glam -glycine -gore -governors -governs -grader -grafts -gran -grenade -grievance -grove -guestbook -gui -gutter -halter -handlers -handsets -hardening -hast -haunting -hectic -hedging -heh -hemp -heres -heuristics -hexane -hikes -hitherto -holster -hon -honorable -honoring -hostage -hotspots -hotties -humane -humility -hurdles -hyderabad -hypertrophy -hypnotized -ibn -illuminate -immobilized -impacting -impairments -impulses -impurity -inaccessible -inadvertently -incense -inconsistencies -inconsistency -individualized -inflow -infringing -inhaled -injecting -inoculation -inscribed -insecurity -insults -interactivity -interconnect -interconnection -intermediary -interplay -intervening -involuntary -ios -irreducible -israel -itch -itchy -jessie -jonas -joys -jurors -kansas -kart -kerala -keychain -kia -knockers -landowners -landscaped -lathe -laundering -lavish -leaflet -ledge -librarians -lids -lifecycle -linda -linearity -linker -loaf -loosen -lubrication -lunches -macroeconomic -magnetization -malt -mammal -manor -marginally -mart -massively -mastering -masterpieces -maximise -mbt -mcg -mediates -mediators -melodic -memorize -michaels -micron -microstructure -midfielder -militia -miniskirt -misdemeanor -mms -modulate -moiety -molten -monarchy -monroe -monsoon -motorbike -motorized -muffin -muffins -multidimensional -mummy -mutated -nan -nay -necessities -neg -negligent -neuropathy -neutrality -neutrophils -newcomers -nieces -nightmares -nodules -nonzero -noodle -nook -nope -nth -nucleation -oatmeal -obligatory -obsessive -octopus -offend -offsets -oklahoma -olfactory -oligonucleotide -ordained -ordinances -orphan -orthodox -outlining -outskirts -overs -overt -oxford -oxycodone -pact -pagan -pajamas -pandemic -papa -paradigms -paraffin -parallelism -partitioned -partnering -partying -pasted -paxil -payout -peacock -pedestrians -peeling -peg -pelvis -pendants -pennsylvania -perforated -phased -phat -phenotypic -photosynthesis -physique -pilgrimage -pinpoint -pistons -pitfalls -placenta -plead -pleading -plentiful -plight -plow -podium -poisonous -pol -policeman -polyacrylamide -polyclonal -polymeric -poppy -pornographic -portability -porter -preached -preacher -predecessors -preoperative -progeny -propagate -propeller -prophylaxis -proxies -puberty -puck -puncture -purge -quickest -quilting -quilts -quitting -quotient -raced -railways -rearing -reconsider -reconstruct -recruiter -refinery -reflectance -regimens -rem -ren -reopen -repost -reptiles -resided -resisting -resistors -responders -restrained -retard -retrieving -revived -rhetorical -rhythmic -ribosomal -rinsed -ripple -robotics -rodent -rodents -rooftop -rooting -rumored -sacks -samurai -sanity -sasha -satisfactorily -sauces -sax -scarcity -scented -scrapbooking -scrape -scratched -scrolls -seduce -seduces -seduction -seeding -sensational -sequenced -seriousness -setback -shanghai -shareware -sheltered -sherlock -shielded -shooters -shovel -showdown -sighed -sightings -simplifies -simplistic -sissy -skates -skulls -sled -slug -smack -snippets -snoop -snug -soaps -sofas -softwares -soprano -soups -spacers -spamming -spanked -spared -specialise -splice -spline -spokeswoman -sporadic -sprays -sprinkler -squamous -squat -stabbing -staggering -stamina -staples -starr -starved -stereotype -stewardess -stewart -stormwater -stoves -strangely -strippers -stroking -structuring -sublime -subnet -subsistence -subsurface -sues -sug -sulfide -summation -summoned -summons -supervising -surveying -suture -swan -swiftly -swirl -symbolism -symphony -tactile -tain -teammate -teases -telephones -televisions -temperament -tenderness -terminates -terra -terrier -terry -theoretic -theorists -theta -thirsty -throughs -timeframe -tipping -toasted -todays -tofu -tonne -toothbrush -totality -toured -tram -tran -tranquil -translucent -transvestite -traverse -troublesome -truthful -tubs -tug -tunneling -turnout -typography -unavoidable -uncontrolled -unconventional -uncredited -underline -underlined -undertook -unprotected -unsatisfactory -unsaturated -uphold -usher -venom -ventilated -veto -victorious -vineyard -vineyards -virulence -visionary -voicemail -volkswagen -wan -watchdog -watson -weaken -weekday -wheeled -whence -whining -whipping -whoa -wholesaler -willingly -willow -winery -wonderland -wooded -workaround -wrestler -wrongly -zodiac -zoloft -zoomed -abnormality -abscess -absorbs -accented -accommodated -accompanies -accompaniment -aches -acreage -actuators -adjunct -admirable -admirers -adrenergic -adv -aerodynamic -affine -affordability -agendas -agreeable -ailments -airs -airways -ala -alabama -alas -alia -alleges -allegiance -alto -ambiance -amide -amortization -amour -analogs -analogues -aneurysm -angina -angiogenesis -angiography -anthology -antibacterial -appended -appreciable -appropriated -appropriateness -apr -aptitude -ark -aroused -arr -artnet -asa -ascent -asparagus -assaulted -assemblage -assholes -associative -astounding -attaining -auctioneer -authenticate -authorizing -autistic -automata -axles -axons -backpacks -banjo -barbara -barcelona -bartender -bashing -bassist -bedside -beep -berlin -betrayed -bicarbonate -binoculars -biochemistry -birthplace -biscuit -bitmap -blackout -bleaching -blooming -bobby -bogus -bonnet -booting -boredom -boron -bottleneck -boxers -brad -breaches -breakout -breakup -bridesmaids -brilliance -brilliantly -brine -brink -brooch -brook -brooklyn -buffering -buildup -bulldog -bureaucratic -burners -busiest -businessmen -buzzing -cadillac -cafeteria -cameo -canister -captcha -carcinomas -careless -carte -carter -cas -casserole -catalogues -cataract -categorization -catfish -cavalry -centerpiece -cgi -chaired -charlotte -chartered -cheerleaders -cheeses -chemist -childbirth -childish -chilli -cilantro -circulate -civilized -clamping -clap -cling -cloths -coached -codecs -coined -collaborated -collegiate -comet -comforter -commemorate -commentator -communism -compaction -compel -compensatory -complains -complexion -compounded -compulsive -computationally -conceptions -conditioners -congratulate -conical -constituting -consumes -contagious -contemplate -contender -contestant -contraceptive -corp -corrugated -cosy -counterfeit -covenants -cranberry -cranial -crossword -crumbs -cub -cumbersome -curling -cursed -cyclone -cytometry -cytotoxicity -darkest -dart -dealerships -dearly -decays -decentralized -deduce -deduct -delinquent -deputies -desorption -detroit -dexter -diagnosing -dialing -diana -dictates -diflucan -dimensionless -discern -discerning -discontinue -disgrace -disliked -disordered -dizzy -dogg -donna -dreaded -dreadful -duality -dubious -dun -duplicates -eddy -editable -ejection -elaborated -elevate -elites -embarked -embraces -emitter -emulation -encapsulated -endothelium -endowment -entitlements -entrants -equities -escalation -eukaryotic -eurozone -everlasting -excite -exclaimed -expended -extant -extractor -extraordinarily -extremities -eyewear -facilitator -factions -falsely -fam -farmland -fatalities -february -fecal -feeders -ferrous -fillers -finalist -fingerprints -firefighter -flaming -flammable -flares -flattened -flirt -fluctuate -flue -flushed -focussed -follicles -forbid -foreclosed -forgiven -forklift -fortified -fortunately -fred -freshmen -fret -fruity -fumes -functionalities -galore -ganglion -garages -gardener -gaskets -gaya -gemstone -gestational -gingerbread -glitches -goalie -golfer -golfers -grafting -graphically -grassland -grep -gripping -gypsy -haircuts -hallmark -halted -handgun -hangers -hannah -harmonics -harp -haste -heaps -henry -hereof -hereunder -hernia -hiatus -hight -hiphop -histamine -hogtied -homecoming -homelessness -homeostasis -homogeneity -honorary -horoscope -horribly -horseback -hospice -hostess -hotline -hunted -hydrated -hyperbolic -hyperplasia -hypertensive -hyundai -iced -ide -immerse -immoral -impatient -importers -inactivity -incompetent -indemnify -indifference -individuality -inexperienced -infested -initiates -inns -instincts -insufficiency -integrin -intending -interfacial -interrogation -interviewer -intimidating -intranet -intrusive -invading -inventive -inversely -iodide -iowa -irresponsible -ism -isomorphism -isotopes -jenny -jerry -jewish -jig -jigsaw -kapoor -kawasaki -kidnapping -lacrosse -lactation -lactic -lactose -ladders -lance -laparoscopic -lea -leaflets -leftover -leveraging -levied -lieutenant -lighthouse -limitless -lincoln -linens -lingering -linguistics -lockout -logger -loneliness -loo -loom -lords -lore -luckily -lucy -macroscopic -magician -manicure -maroon -marrying -maryland -mated -mayhem -mediating -meh -meningitis -meridia -microbiology -microphones -middleware -migratory -militant -militants -milking -milliseconds -ming -minnesota -mmm -moan -moaning -modernization -modulator -momma -monarch -monocytes -moose -mop -mosquitoes -mot -motorists -motorway -mover -mozzarella -mtv -mundane -musings -myosin -nanotubes -navigator -nectar -nicest -niggas -nineteen -nition -northwestern -nostalgic -numb -objected -objectively -obtainable -oceanic -oft -olivia -onward -ooh -orb -orchard -orchid -origami -orlando -orthopedic -outsiders -overheating -overpriced -overseeing -painfully -pairwise -palsy -parabolic -pastors -patiently -patronage -paycheck -peasants -pedagogical -pedo -penguins -perch -perfumes -peripherals -permutation -persuasion -pertains -philadelphia -photovoltaic -pictorial -pierce -pilgrims -pings -pistols -plagued -platter -plugging -pneumoniae -pollutant -polluted -polycarbonate -polystyrene -ponder -pong -populate -postnatal -postulated -preferentially -prehistoric -prevailed -preventative -primed -privatization -probed -procured -prog -progenitor -prohibiting -prompting -propagating -propelled -proprietor -prosecute -protectors -provoke -psychedelic -psychotic -puffy -puppets -pursuits -pus -puss -pyridine -quadrant -quaint -quantization -quarry -quenching -quilted -racket -radiology -raffle -raft -rallies -rampant -rationality -rattle -realms -realtor -reared -reckoned -recorders -recovers -recruiters -redo -refs -rejoice -relativistic -relentless -reluctance -remembrance -remodel -renaissance -repealed -reperfusion -replicates -repo -reproducible -reproductions -resisted -resonances -revelations -revocation -rigging -rigs -riparian -rips -rites -roam -roar -rosemary -rpg -runaway -sai -sails -saloon -samba -sank -sarcasm -sarcastic -saxophone -scaffold -scarlet -scouts -screenings -screenplay -scribed -scriptures -secluded -secretory -sedimentary -segmented -segregated -selectable -senate -senators -sensed -settles -shaker -sharper -shirtless -shove -shunt -shutters -sidewalks -sighted -sighting -silt -simmer -simplification -sinister -skew -skewed -sleeved -slew -slimming -slowdown -smashing -snowfall -soften -solicit -solicitors -sophie -sores -sorption -soundtracks -souvenir -spade -spilled -spines -spinner -spiritually -spoof -spreadsheets -spree -sprinkle -sprouts -spying -staffed -stale -stalking -standings -steamer -stencil -steven -stint -stout -strapped -stratification -strengthens -stressing -struts -stun -subscribes -subsidized -subsystems -subtract -subtracted -subtracting -sunscreen -superimposed -supple -surfaced -surfer -surpassed -surreal -surrendered -suspicions -swapped -sweatshirt -sweaty -sweeps -sweetest -swimmers -syllables -synchronize -synergy -synthesize -tabbed -tackled -tai -talkin -teas -teaspoons -telephony -tempera -tending -teri -thankfully -thankyou -theorems -theses -thrift -throbbing -thugs -thyme -tilted -titration -toothpaste -topographic -toughness -towering -transactional -transmitters -transplanted -transports -trekking -trypsin -tshirts -tuesday -tuple -turntable -tweaks -tyranny -unbalanced -undamaged -underestimated -underside -underwriting -unexplained -unfavorable -unintended -unjust -unleash -unload -unmarried -unnamed -unregistered -untouched -untrue -unveil -upbeat -uprising -vaccinated -valentines -valtrex -vanishes -vanishing -veg -vegetative -verbally -vertebral -vga -victor -vietnam -ville -violently -visceral -volvo -vomit -wade -warts -waveforms -waxing -weakening -wearer -weekdays -wetting -wheeler -whey -whimsical -whisky -wills -wiping -wisconsin -witches -witnessing -wizards -worldly -wrecked -xenon -yearbook -yoke -yup -abatement -abduction -abiding -ably -abortions -accretion -accumulating -accuse -acknowledgment -adhering -admins -admittedly -affluent -aiding -alanine -alcohols -alerted -alicia -allocating -alphabetic -alprazolam -alum -amending -amounting -amplify -anarchy -andrea -androgen -anesthetic -anglers -annealed -anon -anorexia -antabuse -anticipating -antidepressant -antisense -antiviral -appetizer -appoints -apprenticeship -appropriations -aquaculture -armored -artisans -ascertained -assaults -asserting -astronomical -ati -attackers -attractiveness -authorisation -autonomic -avalanche -avodart -aye -baba -backlash -backlink -backside -ballad -ballots -ballroom -banded -barking -bedrock -beetles -benchmarking -bib -bibliographic -biodegradable -biopsies -bitten -blake -blames -blazing -blister -blueberry -boobies -bookstores -bordering -bounced -boutiques -brawl -breakage -brewery -brighten -bronchial -brooks -bruce -brute -bumped -busting -cabling -caches -cactus -caliper -callers -calming -camaro -campaigning -campbell -capitalize -captivating -carboxylic -caribbean -carol -carousel -caster -casually -caters -celebrex -celtic -centimeters -ceremonial -characterizes -charismatic -cheesecake -chelsea -chests -chile -chloro -choking -choreography -christ -circumcision -cladding -claire -classifying -cleft -clubhouse -clueless -clutches -collaborators -collapses -colloidal -columbia -combos -commentaries -commonplace -communicative -compo -compressors -comps -conceivable -condemnation -confers -confluence -conforms -conjugation -constellation -constrain -contaminant -contraception -contradict -contradictions -contrasted -convened -convergent -conveying -conveys -coolers -coop -cooperating -cornerstone -corresponded -corrosive -coughing -coupler -courthouse -cravings -creases -creeping -crimson -critiques -crooked -crossdressers -culprit -cumin -customizing -cuttings -cynical -cytosolic -dagger -dailymotion -dakota -dana -dang -danny -dansk -dar -davidson -deactivated -decidedly -deems -defer -dehydrated -delaying -departures -dermal -descendant -designations -devils -devotees -diam -differentiating -diffuser -digs -dilated -disciple -discourses -discriminating -diseased -disguised -disgust -dissatisfaction -disseminate -distract -divert -dives -dmca -doodle -doubly -downgrades -downright -draped -dreamstime -dryness -drywall -duh -duvet -dyno -earthly -easing -echoed -edgy -edward -elucidate -embellished -enact -enchanting -endorses -endoscopic -energized -enforceable -enormously -enquire -ensembles -entertainer -enumerated -enumeration -envisaged -epidermis -epitope -equiv -esophagus -esteemed -estuary -etch -evans -evergreen -evils -evoke -exemplified -experiential -exploding -eyebrow -eyeshadow -faceted -faculties -fastened -favorably -feline -fiddle -fleeing -flexion -flips -flowed -flywheel -foes -folklore -follicle -footed -forging -formulating -fractionation -fragrant -fresher -freshest -fructose -fucker -gage -gall -gangbanged -gangsta -garmin -geese -generalize -generals -germs -glutamine -glycogen -goofy -gorilla -gosh -gout -grapefruit -greece -greener -gritty -groin -grub -gyms -hailed -halftime -hampered -handwritten -hare -harmed -harmonious -hauling -haunt -heartfelt -hectare -heed -hefty -hematopoietic -hens -hepatocytes -hermes -hesitant -heterosexual -hinged -hippie -hive -homozygous -hoo -hoods -hopper -hostels -howard -hugging -hunts -hypocrisy -hysteresis -illus -illusions -imagining -imitate -impetus -impractical -indebtedness -indicted -indistinguishable -infos -infrastructures -ingenious -insulator -intakes -intensified -interpretive -interrupts -intersect -intimately -introductions -intron -invocation -irregularities -irrigated -irritated -islamic -isoform -isomers -italia -jackpot -jake -janet -jargon -jeremy -jesse -jill -jitter -joseph -judgements -jug -junkie -juveniles -kappa -kara -katrina -kicker -kiosk -kiwi -klonopin -knocks -kobe -kosher -kudos -lacquer -lacs -lags -lakhs -lambs -lanterns -laterally -leaching -leashed -leptin -lesbo -levers -liberated -liens -likeness -linebacker -lipitor -literate -loam -lofty -loosing -lossless -lowing -luncheon -lurking -lymphoid -lyrical -lysates -magistrate -malignancy -malnutrition -mane -marines -marketer -matured -mayonnaise -meadows -meaty -melon -meltdown -meshes -metaphors -meteorological -metformin -methionine -methylene -mickey -midway -milky -millimeters -mindful -miner -minima -miscarriage -missouri -mites -mitotic -mitral -mixers -modded -momentarily -monogram -monolayer -monomers -monumental -moods -moron -motherhood -motocross -moulding -mountainous -mule -multilateral -mutagenesis -muzzle -myeloid -mystic -mythical -narration -nationalist -nearing -nestled -netting -newborns -newcomer -nextdoor -nighttime -nip -nocturnal -nodal -nomenclature -occult -occupant -octane -odors -officiating -ointment -optically -optimally -opting -orbitals -ordinate -orifice -ornate -orphans -outsider -outweigh -oversees -oysters -palliative -pam -pancake -parachute -parietal -parlor -parmesan -parted -pastures -patched -payer -payers -peaches -pears -peeps -pence -pendulum -perfected -peri -peril -petit -pharma -pharmacists -phonon -picky -piled -pizzas -plank -pleated -plumbers -policymakers -pooling -porting -portland -postpartum -pps -predation -preparatory -preterm -prized -probate -prof -projectile -projective -prolong -promulgated -propagated -proponents -propulsion -prostitutes -protesting -provisioning -provoked -pumpkins -quieter -quotas -racers -rand -randy -rangers -rarity -ratified -rearrangement -rebecca -reborn -receptionist -reclaim -reclaimed -recognises -recoverable -rectum -redness -referees -refinancing -refinements -refreshments -refrigerant -refrigerators -reimbursed -reindeer -relieving -remnant -remodeled -republika -resellers -resetting -resolves -resonator -respectfully -restitution -restores -restricts -resuspended -retaliation -reticulum -retirees -revolutions -revolver -revolves -rex -ribbed -richly -rightful -roasting -rollover -rotates -rouge -ruining -ruthless -rye -samantha -sanding -sarcoma -sash -sativa -saucepan -scares -scents -scopes -scotland -screwing -sculptor -sealant -sectoral -secures -sedation -seedling -seeming -semiconductors -sentimental -serene -sermons -setups -seventeenth -shank -shin -shingles -shoved -showcased -sieve -silencing -silhouettes -silicate -simplifying -singlet -situational -sliders -sluggish -slump -smuggling -snare -sneaky -snowman -soar -sociological -softened -solemn -sorta -spawned -specialising -spectator -spies -spiked -spoons -sprung -spurious -squeezing -squirts -stalled -startling -statically -stator -steaks -steamed -steels -stemming -stewardship -stopper -stubs -stump -subdivided -summon -sump -superman -superoxide -susan -sweetie -tam -tang -tangled -tango -taxing -taxis -taxonomic -teak -teal -tectonic -tenancy -tequila -terraced -terraces -testers -thaliana -thermally -thug -thursday -ticking -tiered -tiers -tightened -toaster -tolerances -tommy -totaled -transgender -translators -transmits -transnational -transplants -trenches -troubling -truss -tweaking -twofold -typedef -uber -umbrellas -unbeatable -unborn -undated -undead -undecided -undergraduates -understandings -unification -uninsured -unleashed -unnatural -unnecessarily -uno -unsupported -untitled -uphill -uplifting -vaguely -validating -vases -veggie -vented -vertebrate -vesicle -vests -vets -vices -vignette -villains -visor -visualizes -vita -vow -vowels -vulgaris -wagons -walkers -wandered -waterfalls -watermelon -waterways -weakest -weathering -wednesday -whirlpool -withdrawals -wrists -xiii -yachts -yellowish -yolk -zeal -zealand -zovirax -zucchini -aaron -abbey -abercrombie -aboriginal -absorbent -abstinence -acai -acclaim -accomplishing -accorded -acetonitrile -acetylcholine -aching -acuity -ada -adenocarcinoma -adept -adhesives -adultery -adware -aero -afflicted -airy -alaska -alfa -aliases -aligning -alligator -ambulatory -analytically -animate -annoy -annoyance -annular -antics -antidepressants -antiquity -anyhow -apartheid -apes -apo -apologise -appetizers -applica -appraisals -aprons -arbitrator -arched -archery -arenas -armchair -arrogance -arson -articular -artisan -assesses -assuring -asymmetrical -asymptotically -atheism -atrium -attendants -auditorium -aurora -auspices -austerity -authorizes -autodesk -autographed -avast -backlit -balconies -balm -bandage -banding -barge -barren -battalion -beau -beet -begged -behaving -bestowed -betrayal -bifurcation -biliary -binaries -binge -biodiesel -biomarkers -biometric -biotech -biotin -bitterness -blatant -bleak -blob -bloc -blurry -bodice -bog -boils -bolster -bolted -bombers -boner -bookshelf -bothering -bothers -bounding -breads -broadcaster -broom -brutality -bullied -bunker -buns -bushing -bushings -butler -buttocks -cad -cancels -capitalized -capitals -captains -captivity -carcass -cardiomyopathy -cartel -carve -cate -cationic -caucasian -caucus -caudal -causality -causation -cellulite -cerebellar -champs -chanting -chatter -cheaply -chem -chemo -cheques -cherokee -chills -chime -chipped -chloe -cholera -chopping -chow -chromatic -chronograph -cinemas -circadian -cisplatin -civilizations -clarifying -clarinet -clerks -clogged -clot -coasts -collide -colorless -colt -commences -commenter -compaq -competes -compositional -comprehensively -conceded -concentric -concerted -condensate -confessions -confines -confronting -consignment -contenders -controllable -controversies -convective -conversational -conveyance -copier -copolymers -corolla -corona -corpses -countertop -covalent -coverings -craftsman -crafty -craig -creatine -creme -criticised -crossroads -crunchy -crusher -cunts -curator -custodian -customised -cutlery -cvs -cyanide -cyclical -cyclist -dangling -dared -darts -denomination -dependents -desperation -detectives -deterrent -deviate -dialogues -dictatorship -dieting -digitized -diligently -disagreed -discharging -discontinuity -discontinuous -disproportionate -disqualified -dissimilar -distracting -distractions -disulfide -dns -doctorate -doth -downwards -drenched -dries -drinkers -drown -dune -dysfunctional -dysplasia -effexor -eggplant -elaboration -eld -elevators -els -elution -elves -embodies -embody -emphasised -empires -encrypt -endo -endometrial -endorsements -ensuite -entrances -envision -equator -equatorial -equestrian -eradication -eroded -erythrocytes -escorted -ethos -evangelical -eviction -excise -executions -exempted -exhibitionists -exited -expandable -expedited -expulsion -extrapolation -extrinsic -facelift -facsimile -fad -fades -fairies -famously -fanatic -farmhouse -fascia -favours -fearless -feces -femur -fenders -fetched -feud -fibroblast -figurine -filenames -finasteride -fledged -flicker -flirting -flogger -fluctuating -fluency -foci -fodder -footnotes -footprints -forgiving -formative -fra -freelancers -futile -galactic -gamertag -ganglia -garnered -geeks -geologic -gill -gis -glaciers -glanced -glider -gloomy -gmt -goalkeeper -gon -graces -graham -granule -graveyard -greg -grievances -grossly -groupings -grout -guaranteeing -guarding -guetta -gypsum -hal -hamburger -hamilton -hamsters -hart -hatched -hdd -healer -heals -heeled -helen -heme -herbicide -herds -hereto -hierarchies -hindered -hipster -histology -hitter -hoax -hobo -holland -homestead -hone -hoon -horrors -hues -hump -hurdle -hurricanes -hustle -hydrate -hydrodynamic -hydrophilic -ibm -ibuprofen -idiopathic -illuminating -imaginable -impart -impeccable -impede -impotence -inauguration -incarnation -incubator -indent -industrialized -ineligible -inertial -infect -infestation -infrequent -infringe -inhabit -initiator -injuring -insisting -insofar -insolvency -installers -instructive -intellectuals -intelligently -interception -interns -intestines -intracranial -invaders -invariance -invertebrates -invests -invoking -iran -isolating -isomer -isomorphic -issuers -italics -jammed -jamming -jordans -joystick -julie -jurisprudence -kale -keepers -kennel -ketchup -kiln -kilograms -kinematic -kingdoms -lac -laced -lagged -landline -lash -lastly -lawns -layering -leafy -leaps -lebron -ledger -leftovers -lei -lemonade -lemons -leucine -leukocyte -lex -lexus -liberalization -lice -ligation -limp -lindsay -lint -lockerroom -looming -looms -lorazepam -lousy -lovingly -lucid -luciferase -luke -luna -lux -lymphatic -madrid -magically -magnum -manifests -manipulations -manly -mann -manned -massager -mater -matthew -maven -maximizes -measles -mech -mediums -medley -melancholy -melatonin -mellow -menace -mend -merchandising -meridian -metaphysical -methadone -metropolis -microcontroller -microns -mildew -mineralization -miraculous -misplaced -mite -mitigating -mobs -modems -modernity -modifies -moisturizer -monde -moonlight -moulded -mucous -mulch -multilingual -murals -nada -nancy -narcotics -natalie -nate -necks -negro -neoprene -neutrino -neutrons -neutrophil -nic -nitrite -nonspecific -nonwoven -nouveau -nozzles -nuke -numerals -nutmeg -nutshell -oakley -objectivity -obnoxious -obscured -observance -octave -oddly -ond -onstage -opportunistic -oppressed -ordeal -organisers -organizes -orig -ortho -osmotic -osteoarthritis -outerwear -outliers -outperforms -outsourced -ovaries -overdrive -overloaded -overrated -overwhelmingly -overwrite -paging -painless -pamphlet -pancreatitis -parting -passionately -pastebin -pasting -paternal -patriotism -pave -pawn -paws -pcb -pct -pedagogy -pegs -pennies -penthouse -periodicals -periodontal -permeable -pertain -pervert -pesos -pete -pharmacokinetics -phenolic -phi -philosophies -phishing -physicist -phytoplankton -piercings -piezoelectric -pigeons -pioneered -pitchers -plagiarism -plat -plavix -playwright -pleads -pleasurable -pledges -pleural -poking -polio -polymorphic -pom -ponies -porcine -pornos -portrays -positional -postpone -pouches -pow -powerhouse -predicates -prerequisites -preseason -presets -primate -primates -primitives -princes -prioritize -pron -prong -prototyping -psychosis -pune -purported -purposely -puzzled -qualifier -quorum -railing -railroads -raisins -randomness -rants -rapport -ras -raster -readership -realistically -recalling -receptacle -receptions -reds -redshift -reflexes -refrigerated -refurbishment -reimburse -reinforces -relativity -relics -relish -relocating -renters -reproducing -reptile -rescues -resentment -resistive -resists -respite -responsibly -restarting -restocking -restraining -retainer -rethink -retractable -retreats -retry -returnable -reunited -rewriting -rink -riser -roadmap -roaring -rockers -rodeo -rome -rowing -rtf -ruffle -rulings -rupee -rupees -russell -sakura -sal -salivary -saltwater -sandbox -saws -schematics -scorer -scouting -scraping -screensavers -sculpted -sensitivities -separable -septum -serenity -sergeant -serif -serpent -servlet -shabby -sharpness -shawl -sheen -shenzhen -shortfall -sierra -sinful -singularity -sinners -sinusoidal -sire -siren -sitcom -sixteenth -sizable -skater -skim -slag -slant -sleeveless -slipper -sloping -slotted -smelled -smokes -smoothed -smoothness -snails -snapping -sniff -snoring -softening -softness -solitude -solos -solu -someones -sow -spammers -speciality -speculated -spitting -spooky -spore -squadron -squirrels -staffs -staggered -startups -steaming -stephanie -sterilization -stink -stormy -stratum -strife -stromal -stumbleupon -stylized -subconscious -subcontractors -subdivisions -successively -sufferers -suing -summarised -superconducting -suppressing -supremacy -surpass -swearing -swimmer -swung -synapses -synergistic -synthesizer -tachycardia -tacos -tailgate -tailoring -taping -tasked -technologically -technorati -teeny -televised -teller -testicular -testis -tetracycline -textbox -thanking -thickening -thicknesses -thingy -thinker -thinly -thompson -threading -throats -thrombin -tibia -timberland -timelines -tingling -tinnitus -tonal -tooling -topper -tori -tossing -trainings -trampoline -transceiver -treble -tributes -trimmer -trims -tripping -trolls -tron -troubleshoot -tunic -turner -turret -twelfth -twill -typos -ubiquitin -ugh -umbilical -undersigned -undisclosed -unforeseen -unicorn -unlucky -unnoticed -unplugged -unreleased -unstructured -uplift -upsetting -usenet -ute -utilise -utterance -vandalism -vantage -varnish -vehicular -venting -versed -vials -vigilance -vin -visibly -vitae -voids -vor -wacky -waistband -walnuts -wang -warehousing -wares -wartime -watered -watery -waved -wearable -webcast -weblogs -wholesome -widened -wifes -wildcard -wirelessly -withdrawing -withhold -workable -worsening -xiv -yah -yan -yarns -youngster -zest -abandoning -abnormally -abode -abolition -abort -aborted -abundances -academically -accelerates -accommodates -accordion -accrue -accusation -accusing -acquaintances -actuarial -acutely -acyl -adjudication -administrations -adoptive -adsense -advancements -adversity -affectionate -affirmation -affixed -aides -albicans -alfalfa -algal -alignments -allergen -allure -alluring -aloe -alternately -amaze -amenable -amenity -amines -analgesia -analgesic -andreas -anew -anions -annex -annihilation -antiretroviral -apocalypse -applaud -appreciates -argentina -arrears -artefacts -aryl -ascii -ashore -assemblages -assurances -attainable -attest -attributions -attrition -authoritarian -authorship -autocorrelation -autographs -awakened -awesomeness -axioms -axon -badminton -badongo -barium -bastards -bday -beatles -beech -benthic -billet -billionaire -biofuels -blizzard -blockage -bloke -bloodstream -boarded -bodybuilder -boldly -bony -boogie -bookmarklet -booted -booties -borrowings -boulder -bouquets -bowed -brane -brasil -bravery -bravo -breached -breakfasts -bronchitis -brood -brotherhood -brunettes -buckles -bulletins -burgundy -bustling -calibrate -calibre -calmly -camcorders -cameron -campsite -cancellations -cara -carburetor -carisoprodol -carlos -castings -catered -caterpillar -catwalk -cautiously -chainsaw -chakra -challenger -charters -chromatographic -chronically -chronicle -chronology -cir -circulatory -claimants -clam -clamped -cleanser -cleaved -cleveland -clowns -cody -coercion -coiled -collapsing -cologne -colonel -colonic -commensurate -compilations -complicate -composting -comrades -concussion -confocal -congregations -consonant -contended -contentious -contradicts -contralateral -convict -convoy -cookbooks -coordinators -corals -coriander -cornea -corticosteroids -cosmopolitan -cosmos -cove -crackdown -crankshaft -creamed -culminating -cunning -curled -custodial -deb -debtors -debugger -deceive -deceived -deciduous -decompression -deepen -deforestation -defy -degenerative -demonstrators -denominations -denoting -deployments -deportation -deserts -despatch -destroyer -detainees -deteriorate -deteriorated -detoxification -detriment -dialects -dilation -dilemmas -diligent -dimensionality -dinnerware -disbelief -disclosing -discriminant -dishonest -dismal -dispense -disperse -disrespect -dissolving -dna -doggie -dolly -dongle -donuts -dosages -doubtless -douche -downfall -dragonfly -drier -dropout -drumming -dylan -easement -eases -efflux -ejected -electorate -elegantly -eleventh -emptied -emptying -encapsulation -enchanted -enclose -ene -enrolling -entertainers -enthalpy -epidural -equalizer -equate -eradicate -erratic -erred -erupted -estimations -eta -eventful -exacerbated -excavated -excised -exhausting -existential -experimented -exponents -extender -extern -extravagant -eyelashes -eyelid -eyeliner -facies -factorization -falcon -favoring -fearing -fedora -feeble -feral -fermented -fiasco -figurines -finalize -flanking -flannel -flashcards -flashy -flatter -flax -flicks -floated -fluff -follicular -folly -forfeiture -foyer -frat -freeing -frosted -froze -fudge -fugitive -fullness -furnaces -garnet -gating -gemstones -gina -globes -gloom -goldfish -golfing -grasping -grate -grilling -habitual -hails -hamper -happiest -harass -harassing -hardcover -hardships -hatching -hateful -headsets -hearth -heath -heaviest -hedgehog -herbicides -herring -hoes -hoist -honeycomb -hooking -hound -hovering -howto -hui -humiliated -hymn -hyperlinks -hypersensitivity -hypnotic -hypotension -iceberg -ignite -immaculate -immersive -immunological -imp -importation -imprinted -inaccuracy -inactivated -inaugurated -incandescent -incidentally -incline -inconvenient -indemnity -indica -indigo -inductance -inducted -indulgence -ingenuity -ingested -inquired -insanely -insole -inspecting -installments -integrative -interferes -intermediaries -interpreters -interruptions -intimidation -intoxicated -intoxication -intrigue -intrinsically -intruder -invariants -ionized -ironically -isa -jacobs -jaguar -jeopardy -jockey -jog -jolly -juan -jumpers -justifies -kalba -kayaking -keith -kentucky -klein -kristen -kyle -laborer -laborers -laces -lactating -landings -landslide -latitudes -layoffs -lem -leukocytes -leveled -leveraged -lexicon -liars -liberalism -libido -licensure -lifetimes -lightest -lineages -lingual -livejournal -livelihoods -liverpool -lizards -loadings -loco -logarithm -logistical -lotto -lubricants -lumps -lund -lures -madonna -magma -majesty -mance -marathi -martini -mathematically -maturing -maxim -maximization -mecha -mejor -meticulous -microbes -midday -midwife -mindless -mischief -misguided -misinformation -misty -moans -mobil -modulating -monochrome -mononuclear -monotone -monstrous -montreal -morrow -motivates -mourn -mulberry -multilayer -mus -mussels -muted -myeloma -myocardium -nanotechnology -napkin -napkins -narrated -negativity -neglecting -neuroscience -newswires -niches -nicknames -nitrous -nobility -nolvadex -noses -notations -notepad -numbness -nuns -nurseries -nylons -obedient -olympic -ontologies -operatives -opium -oppressive -optimised -optimizations -orchestral -oregano -outages -outperform -overlaps -overpaying -overriding -overthrow -overturned -owls -paddy -palladium -paparazzi -paperbacks -parishes -parka -pastries -pathologic -pauses -pendent -percutaneous -periodical -perish -perpetrators -perseverance -personalization -perturbed -peso -peugeot -pewter -pho -phonological -phospholipid -photographing -photosynthetic -pickle -piggy -pinion -pinnacle -placental -planter -pleas -plexus -pliers -plunged -poignant -poisoned -policemen -politely -polygons -poolside -popularly -portugal -posh -powering -ppp -practised -practising -precaution -predatory -preface -prefrontal -prematurely -premiered -presenters -preservatives -pressured -pressurized -preteens -prevails -primal -proclaim -proclamation -proliferative -promos -proportionate -prosper -prosthesis -prosthetic -protested -prowess -psyche -pubic -punishing -pyramidal -pyramids -pyruvate -quarantine -radiators -radiography -ramifications -rappers -ratification -rattling -readability -readout -realisation -realtime -rearrange -recessive -recital -recited -recollection -reconnaissance -rectifier -rectify -redress -reformed -refraction -rein -reiterated -reliant -reloading -renewals -reopened -repaid -repeater -repellent -reproducibility -resembled -restrain -restroom -resuscitation -retailing -retrofit -retrograde -revered -reverence -revising -revoke -rhino -rico -riley -ripper -robber -roger -romeo -roommates -rosa -roundabout -rout -roy -rubble -sabotage -sacked -sausages -scaffolding -schemas -schoolgirls -scramble -scripted -scrubs -scum -seaweed -semesters -sensibility -separations -sept -sequin -severed -shack -shaky -shane -sharpen -sharpening -shellfish -shootout -showtimes -shred -sickle -sidelines -simpsons -sincerity -sitter -sixties -skeletons -skepticism -skipper -slicing -smoky -snag -snapback -snowmobile -socialization -sonar -souvenirs -spanned -specialises -speechless -spout -spurs -ssl -stalks -stallion -stances -standout -steele -straighten -streaks -strides -strollers -styrene -subdued -submersible -subtilis -sunroof -sunsets -suppresses -suv -swipe -symbian -syncs -syndicated -synthroid -syphilis -tabloid -tact -tainted -takeaway -takeoff -tangential -tara -teased -teenie -tennessee -tentatively -ternary -therefor -thermo -thine -tics -timings -tina -titan -topologies -tortilla -tortoise -touchpad -towed -transducers -transferase -transporters -treasured -treatise -tricked -trooper -tropics -trump -tryptophan -tubulin -tuners -twenties -unambiguous -unattended -unchecked -uncomplicated -underworld -undisturbed -uneasy -unethical -unfolded -unimportant -unloaded -unpopular -unquestionably -unreal -unsupervised -unwind -upland -urinating -usernames -utah -uttered -vaccinations -variational -vas -velcro -ventured -victorian -vigor -viola -virgins -voip -volcanoes -vowed -wafers -waffle -wah -walkway -wardrobes -warmly -warms -warped -weaning -weathered -weep -weeping -welder -whereabouts -whit -whopping -wicker -wicket -widen -wikis -wildfire -winnings -wipers -workplaces -wrestlers -zelda -zippered -abolish -abreast -absences -abundantly -acceptability -accrual -accuses -acoustics -actuality -adaptability -adheres -adiabatic -adjourned -adjuster -aerosols -afternoons -aggregating -agitated -airbrush -airspace -akira -alarmed -alchemy -alexa -algorithmic -alienation -allege -allergens -altitudes -ama -americans -amnesty -ampicillin -amygdala -anatomic -anchoring -andre -anecdotal -anecdotes -angela -anguish -anhydrous -anita -annexed -anodized -antiserum -apocalyptic -apostles -appraised -appreciative -apprehension -apricot -aptly -archipelago -armani -aromas -arresting -ascorbic -asians -assassins -assessor -asteroid -astrocytes -astronaut -atrocities -auctioned -audrey -autologous -automaton -awaken -aways -axiom -backlog -backroom -bactrim -bagged -bali -bangle -baton -bazaar -beanie -bearded -bel -believable -bestiality -biceps -bidirectional -bikers -bilayer -binders -binomial -biofilm -biologists -blindly -blisters -blitz -blogged -bloglovin -blueberries -bluegrass -boar -boasting -bombed -bombings -boned -bong -bookkeeping -boon -bootleg -bouncy -boyz -bracing -bran -brandon -bribe -bribery -briefcase -broadcasters -broadened -brownies -bruises -bubbly -buckling -bunnies -buzzer -calipers -calligraphy -campground -candies -capillaries -capping -carl -carrie -cartier -casein -catalysis -catenin -caveat -cayenne -centrifuge -cerebellum -characterisation -charisma -charting -cheeky -cheerleading -chevron -chiropractor -choked -christopher -chrysler -chute -cine -cipher -ciprofloxacin -circumvent -clans -clark -classed -classmate -cleats -clickable -clique -clog -clonal -clove -clumsy -coasters -coca -codified -coincided -colons -combating -commandments -commend -commended -commercialization -commuters -compensating -competitively -compilers -compromises -concierge -concurrency -condensing -confine -conformal -confounding -congested -congestive -conservatory -conserving -consolation -contemplation -contemporaries -convolution -corollary -coronal -correctional -corvette -counteract -covariates -craftsmen -crates -credential -crippled -cryptographic -cufflinks -culmination -cultivar -cupboards -cushioned -cuties -cutout -cyclase -cytology -dangerously -danish -dard -datum -deadlock -dearest -decaying -decking -decompose -deepening -deformations -defunct -deleterious -deletes -deliberation -delimited -delinquency -denominated -departs -depictions -deported -depositfiles -deprive -designee -deteriorating -devastation -devon -devotional -diabetics -diarrhoea -differentials -diminishes -diners -diocese -dior -diplomat -disclaim -disgusted -dislocations -dispensed -displace -disregarded -disruptions -dissected -distinguishable -distort -distrust -diurnal -docket -dominion -donut -downgraded -drapes -dreamer -dreamy -dum -dumpster -dwellers -earl -eaters -echocardiography -ecstatic -ectopic -eddie -edging -editorials -eel -electrolytes -ellipse -embargo -embolism -emptiness -enclosing -encountering -endlessly -enlightening -enticing -entrenched -epitopes -epson -escalating -evade -evaporator -evidences -excavator -excitatory -exclusivity -exerts -expedite -expeditions -explorers -extruded -eyelids -fag -fandom -fastener -fatality -faucets -fern -fetuses -fibromyalgia -fieldwork -fillet -fillings -firewalls -firewood -fisted -fists -fistula -flagler -flanges -fleas -flirty -floodplain -floss -flourished -flourishing -flung -folic -folio -foreigner -forested -forfeit -forte -fortnight -fostered -franklin -frantic -friendliness -frustrations -fulfilment -fuselage -garbled -gardeners -gateways -gazing -generalizations -genomics -giggle -giraffe -glassware -glial -glomerular -gracefully -grassy -grieving -guesses -guidebook -guise -gurus -hallucinations -handover -handshake -harming -harms -hauled -heartburn -hedges -heterozygous -hideous -hijab -hillside -histograms -hives -homebrew -homogenous -homolog -homosexuals -horde -hudson -hurried -husky -hydrochloric -hydrostatic -hymns -hyperactivity -hyphens -hypothalamus -ici -identically -ideologies -imbalances -immunized -immunoreactivity -imperialism -impoverished -impregnated -improvisation -improvised -impulsive -incarceration -indesign -inductor -inelastic -infiltrate -inflamed -inflict -inflows -informally -informant -infrequently -initio -injure -innovate -innumerable -inscriptions -insignia -insistence -inspirations -insurgents -intellectually -intercepted -interlocking -interrelated -interviewees -intimidated -intuitively -inventors -ionizing -isle -isometric -isothermal -jackie -jacquard -jest -jock -joker -jonathan -joyous -jurisdictional -kangaroo -kent -khaki -kilometer -kink -kinship -knuckle -lagging -lamborghini -lamina -laminar -larry -lawfully -lawrence -lax -leans -leech -leisurely -lenovo -ligaments -lighten -lightroom -linger -lipase -lithography -liz -lockers -lodges -lohan -lotions -luminance -luminescence -lunchtime -macular -mae -magenta -magnifying -maids -mailboxes -mam -maneuvers -maniac -manic -marcus -margarine -marquee -marshall -marvellous -masculinity -mashup -matchup -mathematician -maximized -mechanistic -meg -mel -mesa -methotrexate -meticulously -mica -microfiber -microfiche -midpoint -milan -milieu -milled -millimeter -mimics -ministerial -miranda -misconceptions -misfortune -misrepresentation -mister -mitosis -mocking -modifiers -moduli -modulo -monograph -monolayers -monolithic -moody -moons -mowing -murdering -musculoskeletal -musicals -muslims -mustache -muster -narcotic -neces -neil -newbies -newsroom -nhl -nicknamed -nonfiction -normality -norton -notoriously -nuances -oat -oblivion -oblivious -oblong -octet -odour -oligonucleotides -olympics -onscreen -opposes -optimality -orchids -orientated -orleans -oscar -oscillating -outings -outlaw -outraged -outsource -overboard -overlays -overlooks -overlying -oxo -oxycontin -pamela -panther -paprika -paralyzed -parchment -patchwork -paternity -patriots -patterning -pax -payback -pedicure -penal -penned -pep -perched -percocet -perforation -perfused -petitioners -philips -phonetic -photonic -physiologic -pickles -piecewise -pike -pines -pious -pla -placeholder -planters -platoon -playthrough -pleases -plex -plunger -polypeptides -pontiac -portuguese -positivity -postmenopausal -powerless -ppb -preamp -precede -precedes -precinct -predictability -prejudices -prelude -pretreated -primaries -proline -proofing -prophetic -prostaglandin -punishable -punt -puzzling -quan -quenched -querying -rabies -racially -radiance -radiographic -radiological -radon -rafting -rainwater -rams -ratchet -rca -reachable -reassuring -rebirth -reciprocity -recite -reclamation -reconciled -recoveries -rectangles -recursion -recyclable -redirects -reflectivity -reforming -regained -reigns -reloaded -reluctantly -renault -renewing -repentance -repetitions -replaceable -repressed -repulsive -repurchase -resell -resent -resigns -restorative -revamped -rica -rigged -robbers -rudder -rumble -safeguarding -salted -sassy -satis -savory -scammed -scammer -scarring -scavenger -scrambling -seniority -sensagent -sensitization -seventies -severance -sha -shah -shaman -shameless -sharma -shortcode -shrug -shrunk -sill -simulators -sited -sizeable -ska -slamming -slapping -slaughtered -slime -slum -smartest -smelly -snorkeling -socialize -sod -solitaire -solr -sonia -soot -sorghum -spd -spongebob -sportswear -sprinkled -sprocket -sputtering -squads -stag -stalker -standardised -stepwise -stevens -stoned -streamed -stressors -stretcher -strikingly -strobe -strung -stumbling -subdomains -subroutine -subsections -subtly -suffices -sultry -summing -supermodel -superseded -suppl -surfactants -surges -surnames -sweatshirts -sweden -sweeter -swore -symbolizes -synagogue -syncing -syndicate -syndication -tabletop -takers -tamoxifen -tampa -tapestry -tarot -tasteful -teaming -telegraph -telescopes -tempt -tendons -tentacle -tentacles -thaw -thermodynamics -thickened -thongs -thorn -thymus -ticker -ticketing -tickle -tightness -tiling -tillage -tilting -toad -tomcat -toms -toning -toons -totes -touted -transitioning -transitive -treason -trembling -tremor -triathlon -tributaries -triglycerides -triplicate -troy -truncation -tryin -tuples -turkeys -tweeting -unauthorised -unbearable -unbelievably -unbounded -uncanny -uncheck -undefeated -undeniable -undermined -underrated -understated -undertakes -undertakings -undisputed -undocumented -unfairly -unfit -unfolds -unheard -unifying -unisex -univariate -unlabeled -unmarked -unplug -unveiling -upholstered -urination -utterances -vardenafil -ven -veneers -veronica -vers -vert -vertebrae -vertebrates -vibes -vic -vided -vigilant -vile -viva -vocation -vpn -wager -waivers -wannabe -wap -warfarin -wei -werewolf -westward -whine -whispers -whistles -wickets -wilt -wineries -wiser -workflows -worsen -wrappers -wrestle -wretched -zoe -zoned -abducted -aberrant -aberrations -absentee -abyss -accelerometer -accumulates -acura -addicting -addictions -adenovirus -adherent -adjoint -ado -aesthetically -afferent -affinities -aggregator -agro -albert -aldehyde -aliquots -alma -alphanumeric -ambassadors -ame -amphetamine -amsterdam -anabolic -analyte -anchorage -annie -annuities -antecedent -anthropogenic -anticipates -antigenic -apologized -appellants -applique -appointing -archaic -ardent -aria -arkansas -armpit -aromatherapy -arrhythmias -arthur -artistry -ascend -aspartate -associating -ast -astonished -astronauts -ata -atheists -atlantic -attested -autosomal -awfully -awning -axonal -azimuth -azithromycin -bale -ballerina -bandwagon -bangkok -bangles -baroque -barracks -basalt -basing -battled -beacons -beading -beak -bearers -bebe -benzyl -betty -billboards -bioavailability -biologist -bir -blanc -blaster -blemishes -blight -bloated -blogosphere -blooded -blots -bluff -boardwalk -bodyguard -bolus -bombarded -boneless -boulders -bourgeois -bouts -brag -braids -brainstorming -breathed -brewed -brim -brisk -browns -bruised -brushless -bryan -bubbling -budgeted -buffs -buick -bullies -bullion -bumping -buoyancy -burch -burgeoning -burglar -buster -bustle -cabs -cadherin -cadre -calcite -cancelling -cancerous -canola -cantilever -capri -carboxyl -carer -carey -caries -carrera -cartons -casio -casters -catheters -cauliflower -cautioned -celexa -cemented -censor -centering -certifying -chad -chairperson -chancellor -checklists -checksum -cheered -choline -cholinergic -choral -chore -chu -cid -cindy -clams -clarifies -clays -clearances -climber -clonazepam -closeness -coals -coax -coexistence -collins -colonists -columbus -combustible -comedians -comedies -commemorative -commune -commutative -compacted -companionship -comparator -concede -conceptually -condemns -condominiums -condone -confiscated -conglomerate -congruent -conscientious -consented -consenting -conservatism -considerate -consolidating -constituencies -contingencies -contoured -contraceptives -contractile -converged -converging -cools -cooperatives -corpora -correspondingly -cosine -cosmological -cosmology -councillors -cox -coyote -crammed -cramped -crepe -criticizing -crock -crowding -cruisers -crusade -cryptic -crystallized -cubs -cucumbers -cuddle -cultivating -custard -cyberspace -cytosol -cytotec -dale -dame -danielle -darkened -dashes -debilitating -decedent -decency -defamation -deference -deformity -deliberations -delimiter -delineated -deliverables -delusion -democracies -denise -depolarization -deregulation -desiring -despatched -despise -detergents -dexamethasone -dials -dichotomy -diecast -digitize -dill -dilutions -dimers -dimmer -disagreements -disassembly -discoloration -discretization -discriminated -discriminative -discus -dispositions -disproportionately -disrupting -ditches -ditto -diversify -divisional -dodgy -domestically -donates -doubted -dreary -dressings -drifted -drones -drool -drugstore -dublin -dvr -dyeing -dynamite -earthy -eased -eastward -ecclesiastical -eden -egyptian -eighties -eject -ell -ella -elm -emigration -emits -empowers -endeavours -endoplasmic -endoscopy -enlighten -enriching -ensued -entangled -entice -entrant -episodic -equalization -equates -equivalently -errands -erythrocyte -erythromycin -ess -evacuate -evokes -excavations -exclamation -exclusives -excused -exertion -exhilarating -exons -expat -extradition -extremist -extremists -eyesight -fabricate -factoring -falciparum -fanfic -farmed -fascist -fda -fellowships -feudal -fiercely -fifteenth -figurative -filmmaking -finitely -fished -flake -flanked -flared -flashback -flasks -flavorful -fleeting -fleets -flocks -floorplans -flutter -folate -footbed -forcibly -foreman -forfeited -forts -fosters -fours -fowl -frag -frail -franc -fray -freebie -fringes -fronted -frugal -fullscreen -funnier -gags -gallbladder -gamut -gar -garner -garnish -garrison -garter -gasp -gauze -gazebo -gee -gent -gibson -gist -giver -glaring -gliding -glittering -goers -googled -gordon -gorge -graciously -grafted -grandeur -granularity -graphene -grasslands -gratification -greets -grenades -grower -gutters -gyrus -hairdresser -halts -hammock -handout -handouts -hanged -haplotype -harassed -hazel -hcg -headboard -hemodynamic -hemorrhoids -henceforth -henna -hep -hepatocellular -herald -highlands -highschool -hindsight -hinted -histidine -histologic -hob -holiness -hugged -hunks -hush -huts -hydrothermal -hypothalamic -hypoxic -hysterectomy -hysterical -ibibo -idiosyncratic -ids -illegitimate -illiterate -illustrious -imaged -immortality -immunofluorescence -impeachment -impeller -impossibility -improbable -incarcerated -incompetence -indecent -indentation -informants -infront -inquiring -insecticide -intensify -intensively -interdependence -interprets -interrupting -intertwined -iraq -ire -irritable -isbn -islet -islets -jacob -janvier -jobless -joey -journalistic -juggling -jugs -justices -justifying -kameez -kannada -kerosene -ketone -kewego -keyed -keyless -kickoff -kilogram -kilometre -kinematics -kneeling -kool -krishna -landfills -landowner -lanka -larva -lattices -lavigne -leaderboard -leaky -lecturers -legion -lessee -libel -libertarian -lignin -lilac -lilies -lima -limiter -liposomes -liposuction -livery -locales -localize -locomotion -lola -lookups -looping -loudspeaker -louisiana -lounges -lovable -luis -lumens -luster -magnified -magyar -mainframe -malice -malignancies -mammoth -manhattan -manhood -manifesto -manslaughter -marches -marco -marginalized -mariah -marketable -marketplaces -marries -melanogaster -memoirs -memphis -mesenchymal -messengers -meteor -methamphetamine -microblogs -microtubule -midfield -midline -midsole -midwives -mime -mingle -mlb -mls -mobilize -modding -modulates -molasses -molly -molybdenum -momentary -monetized -monologue -monotonic -monte -moot -morbid -morphism -motels -motherboards -moths -motoring -mounds -mouthpiece -mujeres -multifunctional -multilevel -multimodal -multiplex -multiplexing -murderers -myelin -nana -nary -nascent -nationalities -navigational -negate -negation -neighbourhoods -nelson -neonates -neoplasms -neoplastic -nerds -nerf -neuromuscular -neutralizing -newsfeed -nib -nigh -noncommercial -nonlinearity -norepinephrine -northward -notifies -nuggets -oaks -oftentimes -ohh -ono -onshore -oocyte -operand -optimizer -ora -ord -ordinal -orient -originator -orphanage -oscillators -oscillatory -otter -overarching -overcomes -overflowing -oxidant -oxidizing -oxy -pac -pacemaker -packers -paddling -pairings -palaces -pallets -palpable -parable -paranoia -pathophysiology -patriot -patrols -payouts -pebble -pebbles -pecans -pediatrician -penetrates -peppermint -permutations -pesky -pessimistic -pharmacokinetic -philanthropy -phobia -photocopy -pierre -pigmentation -pimples -pinning -pittsburgh -playa -poland -ponytail -populace -portraying -positron -postseason -potable -potion -practicality -praising -prawns -predicament -pref -preferring -prefixes -premieres -presiding -presley -pretended -prettiest -preventable -priligy -primes -profiled -pronoun -pronouns -propel -prophylactic -proponent -propranolol -propylene -proteolytic -protracted -protruding -psych -psychiatrists -psychologically -publicized -punishments -purifier -pvt -pyrolysis -queuing -quid -quired -racy -radeon -rags -raided -raiding -raj -rallied -rambling -rampage -rapids -raping -rations -realty -rebellious -recognising -reconfiguration -reconstituted -redefine -refills -regenerate -regenerative -regionally -registries -regulars -rehearsals -reid -reins -reinstated -reissue -relegated -relic -remand -rematch -remit -remorse -renin -repent -repercussions -republish -republished -resets -resized -responder -restructure -retires -retrospect -reverb -reverses -revolve -rewind -ridicule -riff -rigorously -risking -riverside -roberts -rockstar -roomy -rooster -rumour -saith -salesperson -sally -sarees -sari -satchel -saucer -savannah -savers -scammers -scoops -scoreboard -scorpion -scraped -scrapped -screenwriter -scribe -sealer -seasonally -secondhand -sedentary -seducing -septal -sequestration -sequins -setbacks -sever -sewers -sexist -shakira -shameful -sharon -shearing -shimmer -shire -shootings -shortcodes -showtime -shroud -shrugged -silenced -simulates -singly -sinner -skier -skiers -skillful -skit -slaying -slowest -smoothie -sniffing -solicited -soliciting -songwriting -soothe -sou -sowing -sown -spank -spiderman -spokes -sprawling -sprayer -sprites -sqrt -sro -stagnant -stagnation -starcraft -stares -startled -stead -sterilized -stimulant -strattera -strenuous -stricter -subbed -subcategories -subcontractor -subjectivity -submarines -subspecies -suckers -sulfuric -summarizing -summertime -supernatants -superposition -superstars -surgically -surveyor -swarovski -swirling -synthetase -tabulated -tacky -taining -tamper -tate -tavern -tec -telnet -terminator -therefrom -thor -thymidine -timeshare -tins -titans -titers -tolerable -tombs -toppings -torment -torus -tourney -tradeoff -tranquility -transgene -transients -tributary -trinity -trophic -ttl -tubules -tumbling -tunable -turbidity -twitch -tycoon -tylenol -ukulele -underlie -undermining -underscore -undone -uninstalled -unintentional -unravel -unsalted -unsuspecting -unusable -upkeep -urbanization -usd -utilising -vacated -vaio -valet -variously -vaulted -venerable -ventilator -verbatim -verifies -vernacular -vibrate -virtuous -vividly -vmware -voodoo -wales -waxed -wel -wheelchairs -whim -whisk -wick -widows -wildest -windscreen -witchcraft -woodlands -workloads -worldview -worshipped -wright -xvi -yung -zation -zedge -zeolite -zine -zipped -zooming -aad -abelian -aberration -abstractions -accolades -acidosis -acquitted -actuated -adapts -adored -adorn -adsl -advices -aeration -aerobics -affirming -affliction -afloat -afro -aha -ail -ain -aliasing -aliphatic -allegra -alluded -amazement -ambush -amends -americana -amnesia -analogies -analy -anesthetized -angelic -angioplasty -angst -anhydride -animator -anionic -announcer -antifungal -apathy -apostle -appalling -applicator -appraiser -approximating -ariel -assignee -assistive -astrological -atherosclerotic -atmospheres -aunties -aunts -auteur -aver -awoke -babysitting -backpacking -baffle -baffled -bales -ballads -balsamic -baltimore -banda -banter -barbed -barbeque -barrage -barring -barry -beachfront -begining -benevolent -bestseller -beth -billiard -binocular -biofuel -biologic -birmingham -bison -bistro -blackmail -bleached -blogroll -blondie -blouses -blueprints -bod -bombardment -boobed -bookcase -booklets -bootable -bosch -bosom -boulevard -bowler -bps -bradley -bragging -breakdowns -breakthroughs -breaths -bridged -briefings -brisbane -broiler -brownish -bruising -buddha -bumpy -bungee -bureaucrats -buttermilk -bylaws -cabinetry -cadet -cadets -cali -caloric -camber -canceling -candidacy -canning -capitalists -capitol -carcinogenic -cardiology -carnage -carotene -carpeting -carvings -cascading -causative -caustic -cel -cellphones -cemeteries -centerline -cerebrospinal -chandeliers -chang -chants -chastity -checkpoints -chemokine -chen -chihuahua -childs -chimeric -chipping -chitosan -christine -chuckle -circling -citric -classically -claudia -cleverly -climbers -clinging -clogs -clots -clotting -clubbing -coalitions -cob -cochlear -coder -coercive -coexist -coldplay -colds -coloration -colossal -combi -comedic -concealment -concerto -concurring -conformations -confronts -conjugates -connecticut -consents -constitutionally -conveniences -convent -conversely -copious -corporal -correspondents -cowl -cranberries -cranks -crawled -crazed -crimp -critters -crossdresser -crumble -crustal -crypto -cryptography -crystallographic -culminated -cusp -customise -cytoskeleton -deathly -deceit -decker -decoded -defences -defiance -defloration -deities -delve -demeanor -democrats -demonic -dengue -deodorant -dermatologist -derogatory -desirability -detach -devine -diablo -diaspora -diffused -digitalnature -dilatation -dimming -diploid -diplomats -disables -disclaimers -disconnection -discontent -discontinuation -disinfection -disintegration -dismay -dismissing -disposing -disrespectful -divinity -doable -docked -dogma -dolce -doublet -doug -downed diff -Nru mozc-1.11.1502.102/data/preedit/flick-hiragana.tsv mozc-1.11.1522.102/data/preedit/flick-hiragana.tsv --- mozc-1.11.1502.102/data/preedit/flick-hiragana.tsv 2013-07-17 02:37:29.000000000 +0000 +++ mozc-1.11.1522.102/data/preedit/flick-hiragana.tsv 2013-08-28 05:25:48.000000000 +0000 @@ -148,8 +148,8 @@ よ* ょ ょ* よ よ` ょ -t 「 NoTransliteration -v 」 NoTransliteration +t ( NoTransliteration +v ) NoTransliteration 9 ら NoTransliteration w り NoTransliteration x る NoTransliteration @@ -159,9 +159,7 @@ [ NoTransliteration ] NoTransliteration ` NoTransliteration -( ( NoTransliteration ' ’ NoTransliteration -) ) NoTransliteration 0 わ NoTransliteration わ* ゎ ゎ* わ diff -Nru mozc-1.11.1502.102/data/preedit/toggle_flick-hiragana.tsv mozc-1.11.1522.102/data/preedit/toggle_flick-hiragana.tsv --- mozc-1.11.1502.102/data/preedit/toggle_flick-hiragana.tsv 2013-07-17 02:37:29.000000000 +0000 +++ mozc-1.11.1522.102/data/preedit/toggle_flick-hiragana.tsv 2013-08-28 05:25:48.000000000 +0000 @@ -285,8 +285,8 @@ 8 {?}や NewChunk NoTransliteration u {*}ゆ NoTransliteration ^ {*}よ NoTransliteration -t 「 NoTransliteration -v 」 NoTransliteration +t ( NoTransliteration +v ) NoTransliteration {?}や8 {?}ゆ {?}ゆ8 {?}よ {?}よ8 {?}ゃ @@ -339,9 +339,7 @@ [ NoTransliteration ] NoTransliteration ` NoTransliteration -( ( NoTransliteration ' ’ NoTransliteration -) ) NoTransliteration 0 {?}わ NewChunk NoTransliteration + を NoTransliteration / ん NoTransliteration diff -Nru mozc-1.11.1502.102/data/rules/boundary.def mozc-1.11.1522.102/data/rules/boundary.def --- mozc-1.11.1502.102/data/rules/boundary.def 2013-07-17 02:37:29.000000000 +0000 +++ mozc-1.11.1522.102/data/rules/boundary.def 2013-08-28 05:25:48.000000000 +0000 @@ -103,6 +103,7 @@ # Suppress incomplete X-接続 forms, e.g.: # - かい -> 書い and かい -> 描い (連用タ接続) # - いお -> 言お and ひらこ -> 開こ (未然ウ接続) +# - こうふか -> 耕深 (ガル接続) # See http://mozcsuorg.appspot.com/#issue/187 for more examples. Putting large # penalty will be safe because users will input those words in complete forms, # like かいた. This rule should be SUFFIX because, if it is PREFIX, @@ -110,6 +111,7 @@ SUFFIX 動詞,*,*,*,連用タ接続, 7000 SUFFIX 動詞,*,*,*,*,連用タ接続, 7000 SUFFIX 形容詞,*,*,*,*,連用タ接続, 7000 +SUFFIX 形容詞,*,*,*,*,ガル接続, 7000 SUFFIX 動詞,自立,*,五段・カ行イ音便,未然ウ接続, 7000 SUFFIX 動詞,自立,*,五段・ワ行ウ音便,未然ウ接続, 7000 @@ -118,6 +120,7 @@ # - あいの -> 会い+の (五段・ワ行促音便,丁寧連用形) # - ふけ -> ふけ (仮定形 of ふく, 五段・カ行イ音便) # - いとい -> いとい (連用形 of いとう, 五段・ワ行ウ音便,連用形) +# - こうふか -> 高付加 vs 交付か (サ変名詞 + 助詞,か) # TODO(noriyukit): 仮定形 may be more agressively demoted as users will input # 仮定形 with particles, like すれ+ば. PREFIX 動詞,自立,*,*,五段・ワ行促音便,丁寧連用形,* 500 @@ -126,6 +129,7 @@ SUFFIX 動詞,自立,*,五段・カ行イ音便,未然形, 2000 SUFFIX 動詞,自立,*,五段・カ行イ音便,連用形, 1000 SUFFIX 動詞,自立,*,五段・ワ行ウ音便,連用形, 1000 +SUFFIX 助詞,副助詞/並立助詞/終助詞,*,*,*,*,か 700 ##################################################### # Automatic rules. diff -Nru mozc-1.11.1502.102/data/symbol/symbol.tsv mozc-1.11.1522.102/data/symbol/symbol.tsv --- mozc-1.11.1502.102/data/symbol/symbol.tsv 2013-07-17 02:37:29.000000000 +0000 +++ mozc-1.11.1522.102/data/symbol/symbol.tsv 2013-08-28 05:25:48.000000000 +0000 @@ -58,10 +58,14 @@ 括弧閉 〛 ] 」 ) 」 終わり二重大括弧 PARENTHESIS 機種依存文字 括弧開 { { [ ( 「 「 始め中括弧 PARENTHESIS 括弧閉 } } ] ) 」 」 終わり中括弧 PARENTHESIS -括弧開 〈 < ( [ 「 「 始め山括弧 PARENTHESIS +括弧開 〈 < ( [ 「 「 始め山括弧 PARENTHESIS 括弧閉 〉 > ) ] 」 」 終わり山括弧 PARENTHESIS +括弧開 ‹ < ( [ 「 「 始め山括弧 PARENTHESIS +括弧閉 › > ) ] 」 」 終わり山括弧 PARENTHESIS 括弧開 《 << ( [ 「 「 始め二重山括弧 PARENTHESIS 括弧閉 》 >> ) ] 」 」 終わり二重山括弧 PARENTHESIS +括弧開 « ぎゅめ << ( [ 「 「 始め二重山括弧・ギュメ PARENTHESIS +括弧閉 » ぎゅめ >> ) ] 」 」 終わり二重山括弧・ギュメ PARENTHESIS 括弧開 「 [ 「 ( 「 始めかぎ括弧 PARENTHESIS 括弧閉 」 ] 」 ) 」 終わりかぎ括弧 PARENTHESIS 括弧開 『 [ 「 ( 「 始め二重かぎ括弧 PARENTHESIS @@ -76,6 +80,8 @@ 記号 {} かっこ 中括弧 記号 〈〉 かっこ 山括弧 記号 《》 かっこ 二重山括弧 +記号 «» かっこ ぎゅめ 二重山括弧・ギュメ +記号 ‹› かっこ 一重山括弧 記号 「」 かっこ かぎかっこ かぎ括弧 記号 『』 かっこ かぎかっこ 二重かぎ括弧 記号 【】 かっこ すみつき括弧 diff -Nru mozc-1.11.1502.102/data/test/dictionary/dictionary.txt mozc-1.11.1522.102/data/test/dictionary/dictionary.txt --- mozc-1.11.1502.102/data/test/dictionary/dictionary.txt 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/data/test/dictionary/dictionary.txt 2013-08-28 05:26:12.000000000 +0000 @@ -65721,3 +65721,4 @@ じかん 1934 2024 8483 持間 かん 1939 1939 4553 かん むりょう 1939 1939 1500 無料 +まほうしょうじょまどかまぎか 2177 2177 4927 魔法少女まどか☆マギカ diff -Nru mozc-1.11.1502.102/data/test/session/scenario/scenario.gyp mozc-1.11.1522.102/data/test/session/scenario/scenario.gyp --- mozc-1.11.1502.102/data/test/session/scenario/scenario.gyp 1970-01-01 00:00:00.000000000 +0000 +++ mozc-1.11.1522.102/data/test/session/scenario/scenario.gyp 2013-08-28 05:26:07.000000000 +0000 @@ -0,0 +1,74 @@ +# Copyright 2010-2013, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +{ + 'variables': { + 'relative_dir': 'data/test/session/scenario', + 'gen_out_dir': '<(SHARED_INTERMEDIATE_DIR)/<(relative_dir)', + }, + 'targets': [ + { + 'target_name': 'install_session_handler_scenario_test_data', + 'type': 'none', + 'variables': { + 'test_data': [ + 'auto_partial_suggestion.txt', + 'b7132535_scenario.txt', + 'b7321313_scenario.txt', + 'b8703702_scenario.txt', + 'change_request.txt', + 'clear_user_prediction.txt', + 'composition_display_as.txt', + 'conversion.txt', + 'conversion_display_as.txt', + 'conversion_with_history_segment.txt', + 'conversion_with_long_history_segments.txt', + 'delete_history.txt', + 'desktop_t13n_candidates.txt', + 'insert_characters.txt', + 'mobile_qwerty_transliteration_scenario.txt', + 'mobile_t13n_candidates.txt', + 'on_off_cancel.txt', + 'partial_suggestion.txt', + 'pending_character.txt', + 'predict_and_convert.txt', + 'reconvert.txt', + 'revert.txt', + 'segment_focus.txt', + 'segment_width.txt', + 'twelvekeys_switch_inputmode_scenario.txt', + 'twelvekeys_toggle_hiragana_preedit_scenario.txt', + 'undo.txt', + ], + 'test_data_subdir': 'data/test/session/scenario', + }, + 'includes': ['../../../../gyp/install_testdata.gypi'], + }, + ], +} diff -Nru mozc-1.11.1502.102/data/test/session/scenario/usage_stats/usage_stats.gyp mozc-1.11.1522.102/data/test/session/scenario/usage_stats/usage_stats.gyp --- mozc-1.11.1502.102/data/test/session/scenario/usage_stats/usage_stats.gyp 1970-01-01 00:00:00.000000000 +0000 +++ mozc-1.11.1522.102/data/test/session/scenario/usage_stats/usage_stats.gyp 2013-08-28 05:26:07.000000000 +0000 @@ -0,0 +1,71 @@ +# Copyright 2010-2013, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +{ + 'variables': { + 'relative_dir': 'data/test/session/scenario/usage_stats', + 'gen_out_dir': '<(SHARED_INTERMEDIATE_DIR)/<(relative_dir)', + }, + 'targets': [ + { + 'target_name': 'install_session_handler_usage_stats_scenario_test_data', + 'type': 'none', + 'variables': { + 'test_data': [ + "conversion.txt", + "prediction.txt", + "suggestion.txt", + "composition.txt", + "select_prediction.txt", + "select_minor_conversion.txt", + "select_minor_prediction.txt", + "mouse_select_from_suggestion.txt", + "select_t13n_by_key.txt", + "select_t13n_on_cascading_window.txt", + "switch_kana_type.txt", + "multiple_segments.txt", + "select_candidates_in_multiple_segments.txt", + "select_candidates_in_multiple_segments_and_expand_segment.txt", + "continue_input.txt", + "continuous_input.txt", + "multiple_sessions.txt", + "backspace_after_commit.txt", + "backspace_after_commit_after_backspace.txt", + "multiple_backspace_after_commit.txt", + "zero_query_suggestion.txt", + "auto_partial_suggestion.txt", + "insert_space.txt", + "numpad_in_direct_input_mode.txt", + ], + 'test_data_subdir': 'data/test/session/scenario/usage_stats', + }, + 'includes': ['../../../../../gyp/install_testdata.gypi'], + }, + ], +} \ No newline at end of file diff -Nru mozc-1.11.1502.102/data/usage_stats/stats.def mozc-1.11.1522.102/data/usage_stats/stats.def --- mozc-1.11.1502.102/data/usage_stats/stats.def 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/data/usage_stats/stats.def 2013-08-28 05:26:12.000000000 +0000 @@ -129,6 +129,10 @@ TransliterationCandidates9 TransliterationCandidatesGE10 +# The count of language aware suggestions are displayed and committed. +LanguageAwareSuggestionTriggered +LanguageAwareSuggestionCommitted + # The count of mouse selection command call MouseSelect @@ -300,9 +304,11 @@ SendCommand_UndoOrRewind SendCommand_ExpandSuggestion SendCommand_SendCaretLocation -SendCommand_SendLanguageBarCommand +SendCommand_ObsoleteSendLanguageBarCommand SendCommand_GetAsyncResult SendCommand_CommitRawText +SendCommand_ConvertPrevPage +SendCommand_ConvertNextPage # The count of revert in Chrome Omnibox or Google search box SendCommand_RevertInChromeOmnibox @@ -382,7 +388,7 @@ DIVIDE # Numpad [=] EQUALS -ASCII +TEXT_INPUT HANKAKU KANJI KATAKANA diff -Nru mozc-1.11.1502.102/debian/changelog mozc-1.11.1522.102/debian/changelog --- mozc-1.11.1502.102/debian/changelog 2013-08-22 12:35:30.000000000 +0000 +++ mozc-1.11.1522.102/debian/changelog 2013-09-04 12:25:53.000000000 +0000 @@ -1,8 +1,31 @@ -mozc (1:1.11.1502.102-0~ppa7.1) raring; urgency=low +mozc (1:1.11.1522.102-0~ppa7) raring; urgency=low * - -- Sawa (ikoinoba) Sun, 18 Aug 2013 15:19:46 +0900 + -- Sawa (ikoinoba) Wed, 04 Sep 2013 21:25:50 +0900 + +mozc (1.11.1522.102-0~ubuntu1~raring1) raring; urgency=low + + * New upstream release + * debian/patches/fcitx-mozc.patch: updated. + * debian/patches/caption.patch, caption-notificaion.patch, + caption-lookup-table.patch, caption-lookup-table2.patch: + removed. already in upstream. + + -- Ikuya Awashiro Fri, 30 Aug 2013 21:49:03 +0900 + +mozc (1.11.1502.102-0~ubuntu1~raring3) raring; urgency=low + + * debian/patches/caption.patch: added for showing caption. + https://gitorious.org/fcitx/mozc/commit/7d458467e8857a5fa3fa14b95eb2a069f9fc7dca + * debian/patches/caption-notificaion.patch: added for showing caption in notification. + https://gitorious.org/fcitx/mozc/commit/4b86816011a1f5d120ce8d2f8891cb47a348b50e + * debian/patches/caption-lookup-table.patch: added for showing caption in lookup table. + https://gitorious.org/fcitx/mozc/commit/54b352e773484e1a9a75467548f08851c495c57b + * debian/patches/caption-lookup-table2.patch: fix preedit issue. + https://gitorious.org/fcitx/mozc/commit/7705a2df7053befa15a4ba582e1a18d6818e0e89 + + -- Ikuya Awashiro Sat, 24 Aug 2013 14:20:06 +0900 mozc (1.11.1502.102-0~ubuntu1~raring2) raring; urgency=low diff -Nru mozc-1.11.1502.102/debian/patches/caption-lookup-table.patch mozc-1.11.1522.102/debian/patches/caption-lookup-table.patch --- mozc-1.11.1502.102/debian/patches/caption-lookup-table.patch 1970-01-01 00:00:00.000000000 +0000 +++ mozc-1.11.1522.102/debian/patches/caption-lookup-table.patch 2013-08-26 23:52:56.000000000 +0000 @@ -0,0 +1,151 @@ +From 54b352e773484e1a9a75467548f08851c495c57b Mon Sep 17 00:00:00 2001 +From: Weng Xuetian +Date: Sun, 25 Aug 2013 17:24:22 -0400 +Subject: [PATCH] [mozc] usage candidate table to display usage + +--- + unix/fcitx/eim.cc | 49 +++++++++++++++++++++++++++---- + unix/fcitx/fcitx_key_translator.cc | 1 + + unix/fcitx/fcitx_mozc.cc | 1 - + unix/fcitx/fcitx_mozc.h | 3 +- + 4 files changed, 45 insertions(+), 9 deletions(-) + +diff --git a/unix/fcitx/eim.cc b/src/unix/fcitx/eim.cc +index 6d4fd7c..f3ee896 100644 +--- a/unix/fcitx/eim.cc ++++ b/unix/fcitx/eim.cc +@@ -34,7 +34,7 @@ + #include + #include + #include +-#include ++#include + #include + #include "fcitx_mozc.h" + #include "mozc_connection.h" +@@ -42,6 +42,7 @@ + + typedef struct _FcitxMozcState { + mozc::fcitx::FcitxMozc* mozc; ++ int inUsageState; + } FcitxMozcState; + + +@@ -146,15 +147,43 @@ INPUT_RETURN_VALUE FcitxMozcDoInput(void* arg, FcitxKeySym _sym, unsigned int _s + FcitxInstance* instance = mozcState->mozc->GetInstance(); + FcitxInputState* input = FcitxInstanceGetInputState(mozcState->mozc->GetInstance()); + ++ if (mozcState->inUsageState) { ++ if (FcitxHotkeyIsHotKey(_sym, _state, FCITX_ESCAPE)) { ++ mozcState->inUsageState = false; ++ mozcState->mozc->process_key_event(FcitxKey_VoidSymbol, 0, 0, CheckLayout(instance), false); ++ return IRV_DISPLAY_CANDWORDS; ++ } else { ++ return IRV_DO_NOTHING; ++ } ++ } ++ + if (FcitxHotkeyIsHotKey(_sym, _state, MOZC_CTRL_ALT_H)) { + pair< string, string > usage = mozcState->mozc->GetUsage(); + if (usage.first.size() != 0 || usage.second.size() != 0) { +- FcitxFreeDesktopNotifyShow( +- instance, "fcitx-mozc-usage", +- 0, mozcState->mozc->GetIconFile("mozc.png").c_str(), +- usage.first.c_str(), usage.second.c_str(), +- NULL, -1, NULL, NULL, NULL); +- return IRV_DO_NOTHING; ++ mozcState->inUsageState = true; ++ FcitxCandidateWordList* candList = FcitxInputStateGetCandidateList(mozcState->mozc->GetInputState()); ++ FcitxInstanceCleanInputWindow(instance); ++ FcitxCandidateWordReset(candList); ++ FcitxCandidateWordSetPageSize(candList, 9); ++ FcitxCandidateWordSetLayoutHint(candList, CLH_Vertical); ++ FcitxCandidateWordSetChoose(candList, "\0\0\0\0\0\0\0\0\0\0"); ++ FcitxMessages* preedit = FcitxInputStateGetPreedit(input); ++ FcitxMessagesAddMessageAtLast(preedit, MSG_TIPS, "%s[%s]", usage.first.c_str(), _("Press Escape to go back")); ++ ++ UT_array* lines = fcitx_utils_split_string(usage.second.c_str(), '\n'); ++ utarray_foreach(line, lines, char*) { ++ FcitxCandidateWord candWord; ++ candWord.callback = NULL; ++ candWord.extraType = MSG_OTHER; ++ candWord.strExtra = NULL; ++ candWord.priv = NULL; ++ candWord.strWord = strdup(*line); ++ candWord.wordType = MSG_OTHER; ++ candWord.owner = NULL; ++ FcitxCandidateWordAppend(candList, &candWord); ++ } ++ utarray_free(lines); ++ return IRV_DISPLAY_MESSAGE; + } + } + +@@ -177,6 +206,11 @@ INPUT_RETURN_VALUE FcitxMozcDoReleaseInput(void* arg, FcitxKeySym _sym, unsigned + FcitxInputState* input = FcitxInstanceGetInputState(mozcState->mozc->GetInstance()); + FCITX_UNUSED(_sym); + FCITX_UNUSED(_state); ++ ++ if (mozcState->inUsageState) { ++ return IRV_DONOT_PROCESS; ++ } ++ + FcitxKeySym sym = (FcitxKeySym) FcitxInputStateGetKeySym(input); + uint32 keycode = FcitxInputStateGetKeyCode(input); + uint32 state = FcitxInputStateGetKeyState(input); +@@ -209,6 +243,7 @@ void FcitxMozcSave(void* arg) + void FcitxMozcResetIM(void* arg) + { + FcitxMozcState* mozcState = (FcitxMozcState*) arg; ++ mozcState->inUsageState = false; + mozcState->mozc->resetim(); + } + +diff --git a/unix/fcitx/fcitx_key_translator.cc b/src/unix/fcitx/fcitx_key_translator.cc +index bfeca8c..62a87d9 100644 +--- a/unix/fcitx/fcitx_key_translator.cc ++++ b/unix/fcitx/fcitx_key_translator.cc +@@ -41,6 +41,7 @@ const struct SpecialKeyMap { + uint32 from; + mozc::commands::KeyEvent::SpecialKey to; + } special_key_map[] = { ++ {FcitxKey_VoidSymbol, mozc::commands::KeyEvent::NO_SPECIALKEY}, + {FcitxKey_space, mozc::commands::KeyEvent::SPACE}, + {FcitxKey_Return, mozc::commands::KeyEvent::ENTER}, + {FcitxKey_Left, mozc::commands::KeyEvent::LEFT}, +diff --git a/unix/fcitx/fcitx_mozc.cc b/src/unix/fcitx/fcitx_mozc.cc +index 36db36c..ad31983 100644 +--- a/unix/fcitx/fcitx_mozc.cc ++++ b/unix/fcitx/fcitx_mozc.cc +@@ -46,7 +46,6 @@ + #include "unix/fcitx/mozc_connection.h" + #include "unix/fcitx/mozc_response_parser.h" + #include +-#include + + #define N_(x) (x) + +diff --git a/unix/fcitx/fcitx_mozc.h b/src/unix/fcitx/fcitx_mozc.h +index ee095c7..0e75f86 100644 +--- a/unix/fcitx/fcitx_mozc.h ++++ b/unix/fcitx/fcitx_mozc.h +@@ -122,6 +122,8 @@ public: + + std::pair GetUsage(); + ++ void DrawAll(); ++ + private: + friend class FcitxMozcTest; + +@@ -135,7 +137,6 @@ private: + bool ParseResponse ( const mozc::commands::Output &request ); + + void ClearAll(); +- void DrawAll(); + void DrawPreeditInfo(); + void DrawAux(); + +-- +1.7.1 + diff -Nru mozc-1.11.1502.102/debian/patches/caption-lookup-table2.patch mozc-1.11.1522.102/debian/patches/caption-lookup-table2.patch --- mozc-1.11.1502.102/debian/patches/caption-lookup-table2.patch 1970-01-01 00:00:00.000000000 +0000 +++ mozc-1.11.1522.102/debian/patches/caption-lookup-table2.patch 2013-08-26 23:52:56.000000000 +0000 @@ -0,0 +1,52 @@ +From 7705a2df7053befa15a4ba582e1a18d6818e0e89 Mon Sep 17 00:00:00 2001 +From: Weng Xuetian +Date: Sun, 25 Aug 2013 17:32:52 -0400 +Subject: [PATCH] [mozc] don't clear preedit + +--- + src/unix/fcitx/eim.cc | 18 +++++++++++++++--- + 1 files changed, 15 insertions(+), 3 deletions(-) + +diff --git a/unix/fcitx/eim.cc b/src/unix/fcitx/eim.cc +index f3ee896..a39ab01 100644 +--- a/unix/fcitx/eim.cc ++++ b/unix/fcitx/eim.cc +@@ -150,6 +150,7 @@ INPUT_RETURN_VALUE FcitxMozcDoInput(void* arg, FcitxKeySym _sym, unsigned int _s + if (mozcState->inUsageState) { + if (FcitxHotkeyIsHotKey(_sym, _state, FCITX_ESCAPE)) { + mozcState->inUsageState = false; ++ // send a dummy key to let server send us the candidate info back without side effect + mozcState->mozc->process_key_event(FcitxKey_VoidSymbol, 0, 0, CheckLayout(instance), false); + return IRV_DISPLAY_CANDWORDS; + } else { +@@ -162,13 +163,24 @@ INPUT_RETURN_VALUE FcitxMozcDoInput(void* arg, FcitxKeySym _sym, unsigned int _s + if (usage.first.size() != 0 || usage.second.size() != 0) { + mozcState->inUsageState = true; + FcitxCandidateWordList* candList = FcitxInputStateGetCandidateList(mozcState->mozc->GetInputState()); +- FcitxInstanceCleanInputWindow(instance); ++ ++ // clear preedit, but keep client preedit ++ FcitxMessages* preedit = FcitxInputStateGetPreedit(input); ++ FcitxMessagesSetMessageCount(preedit, 0); ++ FcitxInputStateSetShowCursor(input, false); ++ ++ // clear aux ++ FcitxMessages* auxUp = FcitxInputStateGetAuxUp(input); ++ FcitxMessages* auxDown = FcitxInputStateGetAuxDown(input); ++ FcitxMessagesSetMessageCount(auxUp, 0); ++ FcitxMessagesSetMessageCount(auxDown, 0); ++ ++ // clear candidate table + FcitxCandidateWordReset(candList); + FcitxCandidateWordSetPageSize(candList, 9); + FcitxCandidateWordSetLayoutHint(candList, CLH_Vertical); + FcitxCandidateWordSetChoose(candList, "\0\0\0\0\0\0\0\0\0\0"); +- FcitxMessages* preedit = FcitxInputStateGetPreedit(input); +- FcitxMessagesAddMessageAtLast(preedit, MSG_TIPS, "%s[%s]", usage.first.c_str(), _("Press Escape to go back")); ++ FcitxMessagesAddMessageAtLast(preedit, MSG_TIPS, "%s [%s]", usage.first.c_str(), _("Press Escape to go back")); + + UT_array* lines = fcitx_utils_split_string(usage.second.c_str(), '\n'); + utarray_foreach(line, lines, char*) { +-- +1.7.1 + diff -Nru mozc-1.11.1502.102/debian/patches/caption-notification.patch mozc-1.11.1522.102/debian/patches/caption-notification.patch --- mozc-1.11.1502.102/debian/patches/caption-notification.patch 1970-01-01 00:00:00.000000000 +0000 +++ mozc-1.11.1522.102/debian/patches/caption-notification.patch 2013-08-24 15:38:13.000000000 +0000 @@ -0,0 +1,157 @@ +From 4b86816011a1f5d120ce8d2f8891cb47a348b50e Mon Sep 17 00:00:00 2001 +From: Weng Xuetian +Date: Sat, 24 Aug 2013 11:12:27 -0400 +Subject: [PATCH] [mozc] show usage in notification + +--- + unix/fcitx/eim.cc | 19 +++++++++++++++++++ + unix/fcitx/fcitx_mozc.cc | 12 ++++++++++++ + unix/fcitx/fcitx_mozc.h | 6 ++++++ + unix/fcitx/mozc_response_parser.cc | 14 +++++++------- + 4 files changed, 44 insertions(+), 7 deletions(-) + +diff --git a/unix/fcitx/eim.cc b/src/unix/fcitx/eim.cc +index eec6fde..6d4fd7c 100644 +--- a/unix/fcitx/eim.cc ++++ b/unix/fcitx/eim.cc +@@ -34,6 +34,7 @@ + #include + #include + #include ++#include + #include + #include "fcitx_mozc.h" + #include "mozc_connection.h" +@@ -134,11 +135,29 @@ static void FcitxMozcDestroy(void *arg) + free(mozcState); + } + ++static const FcitxHotkey MOZC_CTRL_ALT_H[2] = { ++ {NULL, FcitxKey_H, FcitxKeyState_Ctrl_Alt}, ++ {NULL, FcitxKey_None, 0} ++}; ++ + INPUT_RETURN_VALUE FcitxMozcDoInput(void* arg, FcitxKeySym _sym, unsigned int _state) + { + FcitxMozcState* mozcState = (FcitxMozcState*) arg; + FcitxInstance* instance = mozcState->mozc->GetInstance(); + FcitxInputState* input = FcitxInstanceGetInputState(mozcState->mozc->GetInstance()); ++ ++ if (FcitxHotkeyIsHotKey(_sym, _state, MOZC_CTRL_ALT_H)) { ++ pair< string, string > usage = mozcState->mozc->GetUsage(); ++ if (usage.first.size() != 0 || usage.second.size() != 0) { ++ FcitxFreeDesktopNotifyShow( ++ instance, "fcitx-mozc-usage", ++ 0, mozcState->mozc->GetIconFile("mozc.png").c_str(), ++ usage.first.c_str(), usage.second.c_str(), ++ NULL, -1, NULL, NULL, NULL); ++ return IRV_DO_NOTHING; ++ } ++ } ++ + FCITX_UNUSED(_sym); + FCITX_UNUSED(_state); + FcitxKeySym sym = (FcitxKeySym) FcitxInputStateGetKeySym(input); +diff --git a/unix/fcitx/fcitx_mozc.cc b/src/unix/fcitx/fcitx_mozc.cc +index 9d9428f..36db36c 100644 +--- a/unix/fcitx/fcitx_mozc.cc ++++ b/unix/fcitx/fcitx_mozc.cc +@@ -46,6 +46,7 @@ + #include "unix/fcitx/mozc_connection.h" + #include "unix/fcitx/mozc_response_parser.h" + #include ++#include + + #define N_(x) (x) + +@@ -539,6 +540,17 @@ const std::string& FcitxMozc::GetCurrentCompositionModeIcon() { + return empty_string; + } + ++void FcitxMozc::SetUsage(const string& title_, const string& description_) ++{ ++ title = title_; ++ description = description_; ++} ++ ++pair< string, string > FcitxMozc::GetUsage() ++{ ++ return make_pair(title, description); ++} ++ + } // namespace fcitx + + } // namespace mozc_unix_scim +diff --git a/unix/fcitx/fcitx_mozc.h b/src/unix/fcitx/fcitx_mozc.h +index dff6de4..ee095c7 100644 +--- a/unix/fcitx/fcitx_mozc.h ++++ b/unix/fcitx/fcitx_mozc.h +@@ -118,6 +118,10 @@ public: + + bool SendCommand(const mozc::commands::SessionCommand& session_command, mozc::commands::Output* new_output); + ++ void SetUsage(const std::string& title, const std::string& description); ++ ++ std::pair GetUsage(); ++ + private: + friend class FcitxMozcTest; + +@@ -153,6 +157,8 @@ private: + + FcitxUIMenu compositionMenu; + FcitxUIMenu toolMenu; ++ string description; ++ string title; + + DISALLOW_COPY_AND_ASSIGN ( FcitxMozc ); + }; +diff --git a/unix/fcitx/mozc_response_parser.cc b/src/unix/fcitx/mozc_response_parser.cc +index 3c62197..f9d637b 100755 +--- a/unix/fcitx/mozc_response_parser.cc ++++ b/unix/fcitx/mozc_response_parser.cc +@@ -183,6 +183,8 @@ bool MozcResponseParser::ParseResponse(const mozc::commands::Output &response, + return false; + } + ++ fcitx_mozc->SetUsage("", ""); ++ + UpdateDeletionRange(response, fcitx_mozc); + + // We should check the mode field first since the response for a +@@ -303,11 +305,6 @@ void MozcResponseParser::ParseCandidates( + if (focused_index != -1 && index == focused_index) { + local_index = i; + type = MSG_FIRSTCAND; +- +- if (candidate.has_information_id()) { +- map >::iterator it = +- usage_map.find(candidate.information_id()); +- } + } else { + type = MSG_OTHER; + } +@@ -340,6 +337,7 @@ void MozcResponseParser::ParseCandidates( + value += CreateDescriptionString( + candidate.annotation().description()); + } ++ + if (use_annotation_ && focused_index != -1 && index == focused_index) { + local_index = i; + type = MSG_FIRSTCAND; +@@ -347,8 +345,10 @@ void MozcResponseParser::ParseCandidates( + if (candidate.has_information_id()) { + map >::iterator it = + usage_map.find(candidate.information_id()); +- value += CreateDescriptionString( +- it->second.second); ++ if (it != usage_map.end()) { ++ fcitx_mozc->SetUsage(it->second.first, it->second.second); ++ } ++ value += CreateDescriptionString(_("Press Ctrl+Alt+H to show usages.")); + } + } + +-- +1.7.1 + diff -Nru mozc-1.11.1502.102/debian/patches/caption.patch mozc-1.11.1522.102/debian/patches/caption.patch --- mozc-1.11.1502.102/debian/patches/caption.patch 1970-01-01 00:00:00.000000000 +0000 +++ mozc-1.11.1522.102/debian/patches/caption.patch 2013-08-24 15:38:34.000000000 +0000 @@ -0,0 +1,139 @@ +From 7d458467e8857a5fa3fa14b95eb2a069f9fc7dca Mon Sep 17 00:00:00 2001 +From: Weng Xuetian +Date: Fri, 23 Aug 2013 15:58:20 -0400 +Subject: [PATCH] [mozc] try to show usage, add a substr protection. + +--- + unix/fcitx/mozc_response_parser.cc | 65 ++++++++++++++++++++++-------- + unix/fcitx/surrounding_text_util.cc | 8 ++++ + 2 files changed, 55 insertions(+), 18 deletions(-) + +diff --git a/unix/fcitx/mozc_response_parser.cc b/src/unix/fcitx/mozc_response_parser.cc +index 4753d6b..3c62197 100755 +--- a/unix/fcitx/mozc_response_parser.cc ++++ b/unix/fcitx/mozc_response_parser.cc +@@ -276,6 +276,18 @@ void MozcResponseParser::ParseCandidates( + FcitxCandidateWordSetPageSize(candList, 9); + FcitxCandidateWordSetLayoutHint(candList, CLH_Vertical); + ++ map > usage_map; ++ if (candidates.has_usages()) { ++ const commands::InformationList& usages = candidates.usages(); ++ for (size_t i = 0; i < usages.information().size(); ++i) { ++ const commands::Information& information = usages.information(i); ++ if (!information.has_id() || !information.has_description()) ++ continue; ++ usage_map[information.id()].first = information.title(); ++ usage_map[information.id()].second = information.description(); ++ } ++ } ++ + #define EMPTY_STR_CHOOSE "\0\0\0\0\0\0\0\0\0\0" + std::vector choose; + +@@ -285,14 +297,20 @@ void MozcResponseParser::ParseCandidates( + focused_index = candidates.focused_index(); + } + for (int i = 0; i < candidates.candidate_size(); ++i) { +- const uint32 index = candidates.candidate(i).index(); ++ const commands::Candidates::Candidate& candidate = candidates.candidate(i); ++ const uint32 index = candidate.index(); + FcitxMessageType type; + if (focused_index != -1 && index == focused_index) { + local_index = i; + type = MSG_FIRSTCAND; +- } +- else ++ ++ if (candidate.has_information_id()) { ++ map >::iterator it = ++ usage_map.find(candidate.information_id()); ++ } ++ } else { + type = MSG_OTHER; ++ } + int32* id = (int32*) fcitx_utils_malloc0(sizeof(int32)); + FcitxCandidateWord candWord; + candWord.callback = FcitxMozcGetCandidateWord; +@@ -305,33 +323,44 @@ void MozcResponseParser::ParseCandidates( + + string value; + if (use_annotation_ && +- candidates.candidate(i).has_annotation() && +- candidates.candidate(i).annotation().has_prefix()) { +- value = candidates.candidate(i).annotation().prefix(); ++ candidate.has_annotation() && ++ candidate.annotation().has_prefix()) { ++ value = candidate.annotation().prefix(); + } +- value += candidates.candidate(i).value(); ++ value += candidate.value(); + if (use_annotation_ && +- candidates.candidate(i).has_annotation() && +- candidates.candidate(i).annotation().has_suffix()) { +- value += candidates.candidate(i).annotation().suffix(); ++ candidate.has_annotation() && ++ candidate.annotation().has_suffix()) { ++ value += candidate.annotation().suffix(); + } + if (use_annotation_ && +- candidates.candidate(i).has_annotation() && +- candidates.candidate(i).annotation().has_description()) { ++ candidate.has_annotation() && ++ candidate.annotation().has_description()) { + // Display descriptions ([HALF][KATAKANA], [GREEK], [Black square], etc). + value += CreateDescriptionString( +- candidates.candidate(i).annotation().description()); ++ candidate.annotation().description()); ++ } ++ if (use_annotation_ && focused_index != -1 && index == focused_index) { ++ local_index = i; ++ type = MSG_FIRSTCAND; ++ ++ if (candidate.has_information_id()) { ++ map >::iterator it = ++ usage_map.find(candidate.information_id()); ++ value += CreateDescriptionString( ++ it->second.second); ++ } + } + +- if (candidates.candidate(i).has_annotation() && +- candidates.candidate(i).annotation().has_shortcut()) { +- choose.push_back(candidates.candidate(i).annotation().shortcut().c_str()[0]); ++ if (candidate.has_annotation() && ++ candidate.annotation().has_shortcut()) { ++ choose.push_back(candidate.annotation().shortcut().c_str()[0]); + } + + candWord.strWord = strdup(value.c_str()); + +- if (candidates.candidate(i).has_id()) { +- const int32 cid = candidates.candidate(i).id(); ++ if (candidate.has_id()) { ++ const int32 cid = candidate.id(); + DCHECK_NE(kBadCandidateId, cid) << "Unexpected id is passed."; + *id = cid; + } else { +diff --git a/unix/fcitx/surrounding_text_util.cc b/src/unix/fcitx/surrounding_text_util.cc +index 09437e2..df7ff32 100644 +--- a/unix/fcitx/surrounding_text_util.cc ++++ b/unix/fcitx/surrounding_text_util.cc +@@ -229,6 +229,14 @@ bool GetSurroundingText(FcitxInstance* instance, + + const uint32 selection_start = min(cursor_pos, anchor_pos); + const uint32 selection_length = abs(info->relative_selected_length); ++ ++ if (selection_start > surrounding_text.length()) { ++ return false; ++ } ++ if (selection_start + selection_length > surrounding_text.length()) { ++ return false; ++ } ++ + info->preceding_text = surrounding_text.substr(0, selection_start); + Util::SubString(surrounding_text, + selection_start, +-- +1.7.1 + diff -Nru mozc-1.11.1502.102/debian/patches/fcitx-mozc.patch mozc-1.11.1522.102/debian/patches/fcitx-mozc.patch --- mozc-1.11.1502.102/debian/patches/fcitx-mozc.patch 2013-08-06 13:59:17.000000000 +0000 +++ mozc-1.11.1522.102/debian/patches/fcitx-mozc.patch 2013-08-30 12:55:08.000000000 +0000 @@ -1,9 +1,9 @@ -diff --git a/unix/fcitx/eim.cc b/src/unix/fcitx/eim.cc +diff --git a/unix/fcitx/eim.cc b/unix/fcitx/eim.cc new file mode 100644 -index 0000000..eec6fde +index 0000000..a39ab01 --- /dev/null +++ b/unix/fcitx/eim.cc -@@ -0,0 +1,201 @@ +@@ -0,0 +1,267 @@ +// Copyright 2012~2013, Weng Xuetian +// All rights reserved. +// @@ -40,6 +40,7 @@ +#include +#include +#include ++#include +#include +#include "fcitx_mozc.h" +#include "mozc_connection.h" @@ -47,6 +48,7 @@ + +typedef struct _FcitxMozcState { + mozc::fcitx::FcitxMozc* mozc; ++ int inUsageState; +} FcitxMozcState; + + @@ -140,11 +142,69 @@ + free(mozcState); +} + ++static const FcitxHotkey MOZC_CTRL_ALT_H[2] = { ++ {NULL, FcitxKey_H, FcitxKeyState_Ctrl_Alt}, ++ {NULL, FcitxKey_None, 0} ++}; ++ +INPUT_RETURN_VALUE FcitxMozcDoInput(void* arg, FcitxKeySym _sym, unsigned int _state) +{ + FcitxMozcState* mozcState = (FcitxMozcState*) arg; + FcitxInstance* instance = mozcState->mozc->GetInstance(); + FcitxInputState* input = FcitxInstanceGetInputState(mozcState->mozc->GetInstance()); ++ ++ if (mozcState->inUsageState) { ++ if (FcitxHotkeyIsHotKey(_sym, _state, FCITX_ESCAPE)) { ++ mozcState->inUsageState = false; ++ // send a dummy key to let server send us the candidate info back without side effect ++ mozcState->mozc->process_key_event(FcitxKey_VoidSymbol, 0, 0, CheckLayout(instance), false); ++ return IRV_DISPLAY_CANDWORDS; ++ } else { ++ return IRV_DO_NOTHING; ++ } ++ } ++ ++ if (FcitxHotkeyIsHotKey(_sym, _state, MOZC_CTRL_ALT_H)) { ++ pair< string, string > usage = mozcState->mozc->GetUsage(); ++ if (usage.first.size() != 0 || usage.second.size() != 0) { ++ mozcState->inUsageState = true; ++ FcitxCandidateWordList* candList = FcitxInputStateGetCandidateList(mozcState->mozc->GetInputState()); ++ ++ // clear preedit, but keep client preedit ++ FcitxMessages* preedit = FcitxInputStateGetPreedit(input); ++ FcitxMessagesSetMessageCount(preedit, 0); ++ FcitxInputStateSetShowCursor(input, false); ++ ++ // clear aux ++ FcitxMessages* auxUp = FcitxInputStateGetAuxUp(input); ++ FcitxMessages* auxDown = FcitxInputStateGetAuxDown(input); ++ FcitxMessagesSetMessageCount(auxUp, 0); ++ FcitxMessagesSetMessageCount(auxDown, 0); ++ ++ // clear candidate table ++ FcitxCandidateWordReset(candList); ++ FcitxCandidateWordSetPageSize(candList, 9); ++ FcitxCandidateWordSetLayoutHint(candList, CLH_Vertical); ++ FcitxCandidateWordSetChoose(candList, "\0\0\0\0\0\0\0\0\0\0"); ++ FcitxMessagesAddMessageAtLast(preedit, MSG_TIPS, "%s [%s]", usage.first.c_str(), _("Press Escape to go back")); ++ ++ UT_array* lines = fcitx_utils_split_string(usage.second.c_str(), '\n'); ++ utarray_foreach(line, lines, char*) { ++ FcitxCandidateWord candWord; ++ candWord.callback = NULL; ++ candWord.extraType = MSG_OTHER; ++ candWord.strExtra = NULL; ++ candWord.priv = NULL; ++ candWord.strWord = strdup(*line); ++ candWord.wordType = MSG_OTHER; ++ candWord.owner = NULL; ++ FcitxCandidateWordAppend(candList, &candWord); ++ } ++ utarray_free(lines); ++ return IRV_DISPLAY_MESSAGE; ++ } ++ } ++ + FCITX_UNUSED(_sym); + FCITX_UNUSED(_state); + FcitxKeySym sym = (FcitxKeySym) FcitxInputStateGetKeySym(input); @@ -164,6 +224,11 @@ + FcitxInputState* input = FcitxInstanceGetInputState(mozcState->mozc->GetInstance()); + FCITX_UNUSED(_sym); + FCITX_UNUSED(_state); ++ ++ if (mozcState->inUsageState) { ++ return IRV_DONOT_PROCESS; ++ } ++ + FcitxKeySym sym = (FcitxKeySym) FcitxInputStateGetKeySym(input); + uint32 keycode = FcitxInputStateGetKeyCode(input); + uint32 state = FcitxInputStateGetKeyState(input); @@ -196,6 +261,7 @@ +void FcitxMozcResetIM(void* arg) +{ + FcitxMozcState* mozcState = (FcitxMozcState*) arg; ++ mozcState->inUsageState = false; + mozcState->mozc->resetim(); +} + @@ -205,7 +271,7 @@ + mozcState->mozc->reset(); + +} -diff --git a/unix/fcitx/fcitx-mozc.conf b/src/unix/fcitx/fcitx-mozc.conf +diff --git a/unix/fcitx/fcitx-mozc.conf b/unix/fcitx/fcitx-mozc.conf new file mode 100644 index 0000000..65d0e11 --- /dev/null @@ -222,7 +288,7 @@ +SubConfig= +IMRegisterMethod=ConfigFile +LoadLocal=True -diff --git a/unix/fcitx/fcitx.gyp b/src/unix/fcitx/fcitx.gyp +diff --git a/unix/fcitx/fcitx.gyp b/unix/fcitx/fcitx.gyp new file mode 100644 index 0000000..4744279 --- /dev/null @@ -331,7 +397,7 @@ + }, + ], +} -diff --git a/unix/fcitx/fcitx_key_event_handler.cc b/src/unix/fcitx/fcitx_key_event_handler.cc +diff --git a/unix/fcitx/fcitx_key_event_handler.cc b/unix/fcitx/fcitx_key_event_handler.cc new file mode 100644 index 0000000..608c871 --- /dev/null @@ -583,7 +649,7 @@ + +} // namespace ibus +} // namespace mozc -diff --git a/unix/fcitx/fcitx_key_event_handler.h b/src/unix/fcitx/fcitx_key_event_handler.h +diff --git a/unix/fcitx/fcitx_key_event_handler.h b/unix/fcitx/fcitx_key_event_handler.h new file mode 100644 index 0000000..bc3043b --- /dev/null @@ -668,12 +734,12 @@ +} // namespace mozc + +#endif // MOZC_UNIX_FCITX_KEY_EVENT_HANDLER_H_ -diff --git a/unix/fcitx/fcitx_key_translator.cc b/src/unix/fcitx/fcitx_key_translator.cc +diff --git a/unix/fcitx/fcitx_key_translator.cc b/unix/fcitx/fcitx_key_translator.cc new file mode 100644 -index 0000000..bfeca8c +index 0000000..62a87d9 --- /dev/null +++ b/unix/fcitx/fcitx_key_translator.cc -@@ -0,0 +1,527 @@ +@@ -0,0 +1,528 @@ +// Copyright 2010-2012, Google Inc. +// Copyright 2012~2013, Weng Xuetian +// All rights reserved. @@ -717,6 +783,7 @@ + uint32 from; + mozc::commands::KeyEvent::SpecialKey to; +} special_key_map[] = { ++ {FcitxKey_VoidSymbol, mozc::commands::KeyEvent::NO_SPECIALKEY}, + {FcitxKey_space, mozc::commands::KeyEvent::SPACE}, + {FcitxKey_Return, mozc::commands::KeyEvent::ENTER}, + {FcitxKey_Left, mozc::commands::KeyEvent::LEFT}, @@ -1201,7 +1268,7 @@ + +} // namespace ibus +} // namespace mozc -diff --git a/unix/fcitx/fcitx_key_translator.h b/src/unix/fcitx/fcitx_key_translator.h +diff --git a/unix/fcitx/fcitx_key_translator.h b/unix/fcitx/fcitx_key_translator.h new file mode 100644 index 0000000..b3a6f18 --- /dev/null @@ -1328,12 +1395,12 @@ +} // namespace mozc + +#endif // MOZC_UNIX_FCITX_FCITX_KEY_TRANSLATOR_H_ -diff --git a/unix/fcitx/fcitx_mozc.cc b/src/unix/fcitx/fcitx_mozc.cc +diff --git a/unix/fcitx/fcitx_mozc.cc b/unix/fcitx/fcitx_mozc.cc new file mode 100644 -index 0000000..9d9428f +index 0000000..08be04e --- /dev/null +++ b/unix/fcitx/fcitx_mozc.cc -@@ -0,0 +1,544 @@ +@@ -0,0 +1,571 @@ +// Copyright 2012~2013, Weng Xuetian +// All rights reserved. +// @@ -1555,6 +1622,22 @@ + } +} + ++bool FcitxMozc::paging(bool prev) ++{ ++ VLOG ( 1 ) << "paging"; ++ string error; ++ mozc::commands::SessionCommand::CommandType command = ++ prev ? mozc::commands::SessionCommand::CONVERT_PREV_PAGE ++ : mozc::commands::SessionCommand::CONVERT_NEXT_PAGE; ++ mozc::commands::Output raw_response; ++ if ( connection_->TrySendCommand ( ++ command, &raw_response, &error ) ) ++ { ++ parser_->ParseResponse ( raw_response, this ); ++ return true; ++ } ++ return false; ++} + +// This function is called from SCIM framework when the ic gets focus. +void FcitxMozc::init() @@ -1875,15 +1958,26 @@ + return empty_string; +} + ++void FcitxMozc::SetUsage(const string& title_, const string& description_) ++{ ++ title = title_; ++ description = description_; ++} ++ ++pair< string, string > FcitxMozc::GetUsage() ++{ ++ return make_pair(title, description); ++} ++ +} // namespace fcitx + +} // namespace mozc_unix_scim -diff --git a/unix/fcitx/fcitx_mozc.h b/src/unix/fcitx/fcitx_mozc.h +diff --git a/unix/fcitx/fcitx_mozc.h b/unix/fcitx/fcitx_mozc.h new file mode 100644 -index 0000000..dff6de4 +index 0000000..b3a5128 --- /dev/null +++ b/unix/fcitx/fcitx_mozc.h -@@ -0,0 +1,165 @@ +@@ -0,0 +1,173 @@ +// Copyright 2012~2013, Weng Xuetian +// All rights reserved. +// @@ -1969,6 +2063,7 @@ + void reset(); + void init(); + void focus_out(); ++ bool paging(bool prev); + + // Functions called by the MozcResponseParser class to update UI. + @@ -2004,6 +2099,12 @@ + + bool SendCommand(const mozc::commands::SessionCommand& session_command, mozc::commands::Output* new_output); + ++ void SetUsage(const std::string& title, const std::string& description); ++ ++ std::pair GetUsage(); ++ ++ void DrawAll(); ++ +private: + friend class FcitxMozcTest; + @@ -2017,7 +2118,6 @@ + bool ParseResponse ( const mozc::commands::Output &request ); + + void ClearAll(); -+ void DrawAll(); + void DrawPreeditInfo(); + void DrawAux(); + @@ -2039,6 +2139,8 @@ + + FcitxUIMenu compositionMenu; + FcitxUIMenu toolMenu; ++ string description; ++ string title; + + DISALLOW_COPY_AND_ASSIGN ( FcitxMozc ); +}; @@ -2049,7 +2151,7 @@ + +#endif // MOZC_UNIX_FCITX_FCITX_MOZC_H_ + -diff --git a/unix/fcitx/gen_fcitx_mozc_i18n.sh b/src/unix/fcitx/gen_fcitx_mozc_i18n.sh +diff --git a/unix/fcitx/gen_fcitx_mozc_i18n.sh b/unix/fcitx/gen_fcitx_mozc_i18n.sh new file mode 100755 index 0000000..b730b82 --- /dev/null @@ -2067,7 +2169,7 @@ + msgfmt "$pofile" -o "$1/${pofile/po/mo}" +done \ No newline at end of file -diff --git a/unix/fcitx/mozc.conf b/src/unix/fcitx/mozc.conf +diff --git a/unix/fcitx/mozc.conf b/unix/fcitx/mozc.conf new file mode 100644 index 0000000..ad19230 --- /dev/null @@ -2080,7 +2182,7 @@ +Priority=1 +LangCode=ja +Parent=fcitx-mozc -diff --git a/unix/fcitx/mozc_connection.cc b/src/unix/fcitx/mozc_connection.cc +diff --git a/unix/fcitx/mozc_connection.cc b/unix/fcitx/mozc_connection.cc new file mode 100755 index 0000000..65fc446 --- /dev/null @@ -2282,7 +2384,7 @@ +} // namespace fcitx + +} // namespace mozc -diff --git a/unix/fcitx/mozc_connection.h b/src/unix/fcitx/mozc_connection.h +diff --git a/unix/fcitx/mozc_connection.h b/unix/fcitx/mozc_connection.h new file mode 100755 index 0000000..d454632 --- /dev/null @@ -2435,12 +2537,12 @@ +} // namespace mozc + +#endif // MOZC_UNIX_SCIM_MOZC_CONNECTION_H_ -diff --git a/unix/fcitx/mozc_response_parser.cc b/src/unix/fcitx/mozc_response_parser.cc +diff --git a/unix/fcitx/mozc_response_parser.cc b/unix/fcitx/mozc_response_parser.cc new file mode 100755 -index 0000000..4753d6b +index 0000000..e1d2739 --- /dev/null +++ b/unix/fcitx/mozc_response_parser.cc -@@ -0,0 +1,403 @@ +@@ -0,0 +1,456 @@ +// Copyright 2010-2012, Google Inc. +// Copyright 2012~2013, Weng Xuetian +// All rights reserved. @@ -2626,6 +2728,8 @@ + return false; + } + ++ fcitx_mozc->SetUsage("", ""); ++ + UpdateDeletionRange(response, fcitx_mozc); + + // We should check the mode field first since the response for a @@ -2684,9 +2788,17 @@ + } +} + ++static boolean FcitxMozcPaging(void* arg, boolean prev) ++{ ++ FcitxMozc* mozc = static_cast(arg); ++ return mozc->paging(prev); ++} ++ +void MozcResponseParser::ParseCandidates( + const mozc::commands::Candidates &candidates, FcitxMozc *fcitx_mozc) const { + const commands::Footer &footer = candidates.footer(); ++ bool hasPrev = false; ++ bool hasNext = false; + if (candidates.has_footer()) { + string auxString; + if (footer.has_label()) { @@ -2710,6 +2822,16 @@ + candidates.size()); + DCHECK_GE(result, 0) << "snprintf in ComposeAuxiliaryText failed"; + auxString += index_buf; ++ ++ if (candidates.candidate_size() > 0) { ++ ++ if (candidates.candidate(0).index() > 0) { ++ hasPrev = true; ++ } ++ if (candidates.candidate(candidates.candidate_size() - 1).index() + 1 < candidates.size()) { ++ hasNext = true; ++ } ++ } + } + fcitx_mozc->SetAuxString(auxString); + } @@ -2717,7 +2839,24 @@ + FcitxCandidateWordList* candList = FcitxInputStateGetCandidateList(fcitx_mozc->GetInputState()); + FcitxCandidateWordReset(candList); + FcitxCandidateWordSetPageSize(candList, 9); -+ FcitxCandidateWordSetLayoutHint(candList, CLH_Vertical); ++ if (candidates.has_direction() && ++ candidates.direction() == commands::Candidates::HORIZONTAL) { ++ FcitxCandidateWordSetLayoutHint(candList, CLH_Horizontal); ++ } else { ++ FcitxCandidateWordSetLayoutHint(candList, CLH_Vertical); ++ } ++ ++ map > usage_map; ++ if (candidates.has_usages()) { ++ const commands::InformationList& usages = candidates.usages(); ++ for (size_t i = 0; i < usages.information().size(); ++i) { ++ const commands::Information& information = usages.information(i); ++ if (!information.has_id() || !information.has_description()) ++ continue; ++ usage_map[information.id()].first = information.title(); ++ usage_map[information.id()].second = information.description(); ++ } ++ } + +#define EMPTY_STR_CHOOSE "\0\0\0\0\0\0\0\0\0\0" + std::vector choose; @@ -2728,14 +2867,15 @@ + focused_index = candidates.focused_index(); + } + for (int i = 0; i < candidates.candidate_size(); ++i) { -+ const uint32 index = candidates.candidate(i).index(); ++ const commands::Candidates::Candidate& candidate = candidates.candidate(i); ++ const uint32 index = candidate.index(); + FcitxMessageType type; + if (focused_index != -1 && index == focused_index) { + local_index = i; + type = MSG_FIRSTCAND; -+ } -+ else ++ } else { + type = MSG_OTHER; ++ } + int32* id = (int32*) fcitx_utils_malloc0(sizeof(int32)); + FcitxCandidateWord candWord; + candWord.callback = FcitxMozcGetCandidateWord; @@ -2748,33 +2888,47 @@ + + string value; + if (use_annotation_ && -+ candidates.candidate(i).has_annotation() && -+ candidates.candidate(i).annotation().has_prefix()) { -+ value = candidates.candidate(i).annotation().prefix(); ++ candidate.has_annotation() && ++ candidate.annotation().has_prefix()) { ++ value = candidate.annotation().prefix(); + } -+ value += candidates.candidate(i).value(); ++ value += candidate.value(); + if (use_annotation_ && -+ candidates.candidate(i).has_annotation() && -+ candidates.candidate(i).annotation().has_suffix()) { -+ value += candidates.candidate(i).annotation().suffix(); ++ candidate.has_annotation() && ++ candidate.annotation().has_suffix()) { ++ value += candidate.annotation().suffix(); + } + if (use_annotation_ && -+ candidates.candidate(i).has_annotation() && -+ candidates.candidate(i).annotation().has_description()) { ++ candidate.has_annotation() && ++ candidate.annotation().has_description()) { + // Display descriptions ([HALF][KATAKANA], [GREEK], [Black square], etc). + value += CreateDescriptionString( -+ candidates.candidate(i).annotation().description()); ++ candidate.annotation().description()); ++ } ++ ++ if (use_annotation_ && focused_index != -1 && index == focused_index) { ++ local_index = i; ++ type = MSG_FIRSTCAND; ++ ++ if (candidate.has_information_id()) { ++ map >::iterator it = ++ usage_map.find(candidate.information_id()); ++ if (it != usage_map.end()) { ++ fcitx_mozc->SetUsage(it->second.first, it->second.second); ++ } ++ value += CreateDescriptionString(_("Press Ctrl+Alt+H to show usages.")); ++ } + } + -+ if (candidates.candidate(i).has_annotation() && -+ candidates.candidate(i).annotation().has_shortcut()) { -+ choose.push_back(candidates.candidate(i).annotation().shortcut().c_str()[0]); ++ if (candidate.has_annotation() && ++ candidate.annotation().has_shortcut()) { ++ choose.push_back(candidate.annotation().shortcut().c_str()[0]); + } + + candWord.strWord = strdup(value.c_str()); + -+ if (candidates.candidate(i).has_id()) { -+ const int32 cid = candidates.candidate(i).id(); ++ if (candidate.has_id()) { ++ const int32 cid = candidate.id(); + DCHECK_NE(kBadCandidateId, cid) << "Unexpected id is passed."; + *id = cid; + } else { @@ -2794,6 +2948,7 @@ + else + FcitxCandidateWordSetChoose(candList, EMPTY_STR_CHOOSE); + FcitxCandidateWordSetFocus(candList, local_index); ++ FcitxCandidateWordSetOverridePaging(candList, hasPrev, hasNext, FcitxMozcPaging, fcitx_mozc, NULL); +} + +static int GetRawCursorPos(const char * str, int upos) @@ -2844,7 +2999,7 @@ +} // namespace fcitx + +} // namespace mozc -diff --git a/unix/fcitx/mozc_response_parser.h b/src/unix/fcitx/mozc_response_parser.h +diff --git a/unix/fcitx/mozc_response_parser.h b/unix/fcitx/mozc_response_parser.h new file mode 100755 index 0000000..0975055 --- /dev/null @@ -2947,7 +3102,7 @@ +} // namespace mozc + +#endif // MOZC_UNIX_FCITX_MOZC_RESPONSE_PARSER_H_ -diff --git a/unix/fcitx/po/Messages.sh b/src/unix/fcitx/po/Messages.sh +diff --git a/unix/fcitx/po/Messages.sh b/unix/fcitx/po/Messages.sh new file mode 100755 index 0000000..be34171 --- /dev/null @@ -2986,12 +3141,12 @@ +echo "Cleaning up" +rm "${WDIR}/infiles.list" +echo "Done" -diff --git a/unix/fcitx/po/de.po b/src/unix/fcitx/po/de.po +diff --git a/unix/fcitx/po/de.po b/unix/fcitx/po/de.po new file mode 100644 -index 0000000..7b3b012 +index 0000000..c42eae5 --- /dev/null +++ b/unix/fcitx/po/de.po -@@ -0,0 +1,80 @@ +@@ -0,0 +1,88 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. @@ -3002,9 +3157,9 @@ +msgstr "" +"Project-Id-Version: fcitx\n" +"Report-Msgid-Bugs-To: fcitx-dev@googlegroups.com\n" -+"POT-Creation-Date: 2013-06-26 22:02-0400\n" -+"PO-Revision-Date: 2013-06-01 10:18+0000\n" -+"Last-Translator: mar well \n" ++"POT-Creation-Date: 2013-08-25 18:02-0400\n" ++"PO-Revision-Date: 2013-08-25 10:07+0000\n" ++"Last-Translator: Xuetian Weng \n" +"Language-Team: German (http://www.transifex.com/projects/p/fcitx/language/" +"de/)\n" +"Language: de\n" @@ -3013,6 +3168,10 @@ +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + ++#: ../eim.cc:183 ++msgid "Press Escape to go back" ++msgstr "" ++ +#: ../fcitx_mozc.cc:68 +msgid "Direct" +msgstr "Direkt" @@ -3072,12 +3231,16 @@ +#: ../fcitx_mozc.cc:497 +msgid "About Mozc" +msgstr "Über Mozc" -diff --git a/unix/fcitx/po/fcitx-mozc.pot b/src/unix/fcitx/po/fcitx-mozc.pot ++ ++#: ../mozc_response_parser.cc:351 ++msgid "Press Ctrl+Alt+H to show usages." ++msgstr "" +diff --git a/unix/fcitx/po/fcitx-mozc.pot b/unix/fcitx/po/fcitx-mozc.pot new file mode 100644 -index 0000000..5b8d639 +index 0000000..79371ba --- /dev/null +++ b/unix/fcitx/po/fcitx-mozc.pot -@@ -0,0 +1,78 @@ +@@ -0,0 +1,86 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. @@ -3088,7 +3251,7 @@ +msgstr "" +"Project-Id-Version: PACKAGE VERSION\n" +"Report-Msgid-Bugs-To: fcitx-dev@googlegroups.com\n" -+"POT-Creation-Date: 2013-04-25 00:02-0400\n" ++"POT-Creation-Date: 2013-08-25 18:02-0400\n" +"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" +"Last-Translator: FULL NAME \n" +"Language-Team: LANGUAGE \n" @@ -3097,6 +3260,10 @@ +"Content-Type: text/plain; charset=CHARSET\n" +"Content-Transfer-Encoding: 8bit\n" + ++#: ../eim.cc:183 ++msgid "Press Escape to go back" ++msgstr "" ++ +#: ../fcitx_mozc.cc:68 +msgid "Direct" +msgstr "" @@ -3156,25 +3323,30 @@ +#: ../fcitx_mozc.cc:497 +msgid "About Mozc" +msgstr "" -diff --git a/unix/fcitx/po/ja.po b/src/unix/fcitx/po/ja.po ++ ++#: ../mozc_response_parser.cc:351 ++msgid "Press Ctrl+Alt+H to show usages." ++msgstr "" +diff --git a/unix/fcitx/po/ja.po b/unix/fcitx/po/ja.po new file mode 100644 -index 0000000..9e93539 +index 0000000..7867054 --- /dev/null +++ b/unix/fcitx/po/ja.po -@@ -0,0 +1,81 @@ +@@ -0,0 +1,90 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: ++# いくや あわしろ , 2013 +# いくや あわしろ , 2012 +# Xuetian Weng , 2012 +msgid "" +msgstr "" +"Project-Id-Version: fcitx\n" +"Report-Msgid-Bugs-To: fcitx-dev@googlegroups.com\n" -+"POT-Creation-Date: 2013-04-25 00:02-0400\n" -+"PO-Revision-Date: 2013-04-03 11:47+0000\n" ++"POT-Creation-Date: 2013-08-27 12:02-0400\n" ++"PO-Revision-Date: 2013-08-27 14:10+0000\n" +"Last-Translator: いくや あわしろ \n" +"Language-Team: Japanese (http://www.transifex.com/projects/p/fcitx/language/" +"ja/)\n" @@ -3184,6 +3356,10 @@ +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=1; plural=0;\n" + ++#: ../eim.cc:183 ++msgid "Press Escape to go back" ++msgstr "Escキーを押して戻る" ++ +#: ../fcitx_mozc.cc:68 +msgid "Direct" +msgstr "直接入力" @@ -3243,24 +3419,30 @@ +#: ../fcitx_mozc.cc:497 +msgid "About Mozc" +msgstr "Mozc について" -diff --git a/unix/fcitx/po/zh_CN.po b/src/unix/fcitx/po/zh_CN.po ++ ++#: ../mozc_response_parser.cc:351 ++msgid "Press Ctrl+Alt+H to show usages." ++msgstr "Ctrl+Alt+H キーを押して用例を表示" +diff --git a/unix/fcitx/po/zh_CN.po b/unix/fcitx/po/zh_CN.po new file mode 100644 -index 0000000..e0c395f +index 0000000..0cfc3da --- /dev/null +++ b/unix/fcitx/po/zh_CN.po -@@ -0,0 +1,79 @@ +@@ -0,0 +1,89 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. +# +# Translators: ++# Xuetian Weng , 2013 +# Xuetian Weng , 2012 ++# Xuetian Weng , 2012-2013 +msgid "" +msgstr "" +"Project-Id-Version: fcitx\n" +"Report-Msgid-Bugs-To: fcitx-dev@googlegroups.com\n" -+"POT-Creation-Date: 2013-04-25 00:02-0400\n" -+"PO-Revision-Date: 2012-04-07 03:47+0000\n" ++"POT-Creation-Date: 2013-08-28 12:59-0400\n" ++"PO-Revision-Date: 2013-08-28 17:01+0000\n" +"Last-Translator: Xuetian Weng \n" +"Language-Team: Chinese (China) \n" +"Language: zh_CN\n" @@ -3269,6 +3451,10 @@ +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=1; plural=0;\n" + ++#: ../eim.cc:183 ++msgid "Press Escape to go back" ++msgstr "按下 Escape 返回" ++ +#: ../fcitx_mozc.cc:68 +msgid "Direct" +msgstr "直接键盘输入" @@ -3293,47 +3479,51 @@ +msgid "Half Katakana" +msgstr "半角片假名" + -+#: ../fcitx_mozc.cc:394 ../fcitx_mozc.cc:395 ../fcitx_mozc.cc:473 ++#: ../fcitx_mozc.cc:410 ../fcitx_mozc.cc:411 ../fcitx_mozc.cc:489 +msgid "Composition Mode" +msgstr "编辑模式" + -+#: ../fcitx_mozc.cc:405 ../fcitx_mozc.cc:406 ++#: ../fcitx_mozc.cc:421 ../fcitx_mozc.cc:422 +msgid "Tool" +msgstr "工具" + -+#: ../fcitx_mozc.cc:486 ++#: ../fcitx_mozc.cc:502 +msgid "Mozc Tool" +msgstr "Mozc 工具" + -+#: ../fcitx_mozc.cc:492 ++#: ../fcitx_mozc.cc:508 +msgid "Configuration Tool" +msgstr "配置工具" + -+#: ../fcitx_mozc.cc:493 ++#: ../fcitx_mozc.cc:509 +msgid "Dictionary Tool" +msgstr "词典工具" + -+#: ../fcitx_mozc.cc:494 ++#: ../fcitx_mozc.cc:510 +msgid "Hand Writing" +msgstr "手写输入" + -+#: ../fcitx_mozc.cc:495 ++#: ../fcitx_mozc.cc:511 +msgid "Character Palette" +msgstr "字符映射表" + -+#: ../fcitx_mozc.cc:496 ++#: ../fcitx_mozc.cc:512 +msgid "Add Word" +msgstr "添加单词" + -+#: ../fcitx_mozc.cc:497 ++#: ../fcitx_mozc.cc:513 +msgid "About Mozc" +msgstr "关于 Mozc" -diff --git a/unix/fcitx/po/zh_TW.po b/src/unix/fcitx/po/zh_TW.po ++ ++#: ../mozc_response_parser.cc:374 ++msgid "Press Ctrl+Alt+H to show usages." ++msgstr "按下 Ctrl+Alt+H 显示用法。" +diff --git a/unix/fcitx/po/zh_TW.po b/unix/fcitx/po/zh_TW.po new file mode 100644 -index 0000000..4843ddc +index 0000000..62bc891 --- /dev/null +++ b/unix/fcitx/po/zh_TW.po -@@ -0,0 +1,81 @@ +@@ -0,0 +1,89 @@ +# SOME DESCRIPTIVE TITLE. +# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER +# This file is distributed under the same license as the PACKAGE package. @@ -3345,8 +3535,8 @@ +msgstr "" +"Project-Id-Version: fcitx\n" +"Report-Msgid-Bugs-To: fcitx-dev@googlegroups.com\n" -+"POT-Creation-Date: 2013-04-25 00:02-0400\n" -+"PO-Revision-Date: 2012-04-07 03:47+0000\n" ++"POT-Creation-Date: 2013-08-25 18:02-0400\n" ++"PO-Revision-Date: 2013-08-25 10:07+0000\n" +"Last-Translator: Xuetian Weng \n" +"Language-Team: Chinese (Taiwan) (http://www.transifex.com/projects/p/fcitx/" +"language/zh_TW/)\n" @@ -3356,6 +3546,10 @@ +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=1; plural=0;\n" + ++#: ../eim.cc:183 ++msgid "Press Escape to go back" ++msgstr "" ++ +#: ../fcitx_mozc.cc:68 +msgid "Direct" +msgstr "直接鍵盤輸入" @@ -3415,12 +3609,16 @@ +#: ../fcitx_mozc.cc:497 +msgid "About Mozc" +msgstr "關於 Mozc" -diff --git a/unix/fcitx/surrounding_text_util.cc b/src/unix/fcitx/surrounding_text_util.cc ++ ++#: ../mozc_response_parser.cc:351 ++msgid "Press Ctrl+Alt+H to show usages." ++msgstr "" +diff --git a/unix/fcitx/surrounding_text_util.cc b/unix/fcitx/surrounding_text_util.cc new file mode 100644 -index 0000000..09437e2 +index 0000000..df7ff32 --- /dev/null +++ b/unix/fcitx/surrounding_text_util.cc -@@ -0,0 +1,243 @@ +@@ -0,0 +1,251 @@ +// Copyright 2010-2013, Google Inc. +// All rights reserved. +// @@ -3652,6 +3850,14 @@ + + const uint32 selection_start = min(cursor_pos, anchor_pos); + const uint32 selection_length = abs(info->relative_selected_length); ++ ++ if (selection_start > surrounding_text.length()) { ++ return false; ++ } ++ if (selection_start + selection_length > surrounding_text.length()) { ++ return false; ++ } ++ + info->preceding_text = surrounding_text.substr(0, selection_start); + Util::SubString(surrounding_text, + selection_start, @@ -3664,7 +3870,7 @@ + +} // namespace fcitx +} // namespace mozc -diff --git a/unix/fcitx/surrounding_text_util.h b/src/unix/fcitx/surrounding_text_util.h +diff --git a/unix/fcitx/surrounding_text_util.h b/unix/fcitx/surrounding_text_util.h new file mode 100644 index 0000000..5bf661d --- /dev/null diff -Nru mozc-1.11.1502.102/debian/patches/series mozc-1.11.1522.102/debian/patches/series --- mozc-1.11.1502.102/debian/patches/series 2013-08-06 13:43:02.000000000 +0000 +++ mozc-1.11.1522.102/debian/patches/series 2013-08-30 12:58:19.000000000 +0000 @@ -1,6 +1,3 @@ #support-kfreebsd.patch uim-mozc.patch fcitx-mozc.patch -#protobuf-250.patch -#fix_use_libprotobuf_3.patch -#show-aux-in-a-better-place.patch diff -Nru mozc-1.11.1502.102/dictionary/suppression_dictionary.h mozc-1.11.1522.102/dictionary/suppression_dictionary.h --- mozc-1.11.1502.102/dictionary/suppression_dictionary.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/dictionary/suppression_dictionary.h 2013-08-28 05:26:12.000000000 +0000 @@ -44,7 +44,7 @@ SuppressionDictionary(); virtual ~SuppressionDictionary(); - // Lock dictioanry. + // Lock dictionary. // call Lock() before calling AddWord() or Clear(); // When the dictionary is locked, Supress() return false. // @@ -86,6 +86,6 @@ bool has_value_empty_; Mutex mutex_; }; -} +} // namespace mozc #endif // MOZC_DICTIONARY_SUPPRESSION_DICTIONARY_H_ diff -Nru mozc-1.11.1502.102/dictionary/system/system_dictionary_test.cc mozc-1.11.1522.102/dictionary/system/system_dictionary_test.cc --- mozc-1.11.1502.102/dictionary/system/system_dictionary_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/dictionary/system/system_dictionary_test.cc 2013-08-28 05:26:12.000000000 +0000 @@ -575,7 +575,7 @@ { "\xE3\x81\xB0\xE3\x81\xB3\xE3\x81\xB6", "\xE3\x83\x90\xE3\x83\x93\xE3\x83\x96" }, }; - const size_t kKeyValuesSize = ARRAYSIZE_UNSAFE(kKeyValues); + const size_t kKeyValuesSize = arraysize(kKeyValues); scoped_ptr tokens[kKeyValuesSize]; vector source_tokens(kKeyValuesSize); for (size_t i = 0; i < kKeyValuesSize; ++i) { @@ -604,7 +604,7 @@ EXPECT_TRUE(result.end() != result.find(entry)); } // The others should not be found. - for (size_t i = 5; i < ARRAYSIZE_UNSAFE(kKeyValues); ++i) { + for (size_t i = 5; i < arraysize(kKeyValues); ++i) { const pair entry( kKeyValues[i].key, kKeyValues[i].value); EXPECT_TRUE(result.end() == result.find(entry)); diff -Nru mozc-1.11.1502.102/dictionary/system/value_dictionary.cc mozc-1.11.1522.102/dictionary/system/value_dictionary.cc --- mozc-1.11.1502.102/dictionary/system/value_dictionary.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/dictionary/system/value_dictionary.cc 2013-08-28 05:26:12.000000000 +0000 @@ -245,7 +245,7 @@ return LookupPredictiveWithLimit(str, size, empty_limit_, allocator); } -// Value dictioanry is intended to use for prediction, +// Value dictionary is intended to use for prediction, // so we don't support LookupPrefix Node *ValueDictionary::LookupPrefixWithLimit( const char *str, int size, diff -Nru mozc-1.11.1502.102/dictionary/user_dictionary_importer_test.cc mozc-1.11.1522.102/dictionary/user_dictionary_importer_test.cc --- mozc-1.11.1502.102/dictionary/user_dictionary_importer_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/dictionary/user_dictionary_importer_test.cc 2013-08-28 05:26:12.000000000 +0000 @@ -417,8 +417,9 @@ vector entries; for (size_t j = 0; j < kSize[i]; ++j) { UserDictionaryImporter::RawEntry entry; - const string key = "key" + NumberUtil::SimpleItoa(j); - const string value = "value" + NumberUtil::SimpleItoa(j); + const string key = "key" + NumberUtil::SimpleItoa(static_cast(j)); + const string value = + "value" + NumberUtil::SimpleItoa(static_cast(j)); entry.key = key; entry.value = value; // entry.set_pos("名詞"); @@ -460,8 +461,9 @@ vector entries; for (size_t j = 0; j < kSize[i]; ++j) { UserDictionaryImporter::RawEntry entry; - const string key = "key" + NumberUtil::SimpleItoa(j); - const string value = "value" + NumberUtil::SimpleItoa(j); + const string key = "key" + NumberUtil::SimpleItoa(static_cast(j)); + const string value = + "value" + NumberUtil::SimpleItoa(static_cast(j)); entry.key = key; entry.value = value; // entry.set_pos("名詞"); @@ -503,8 +505,9 @@ vector entries; for (size_t j = 0; j < kSize[i]; ++j) { UserDictionaryImporter::RawEntry entry; - const string key = "key" + NumberUtil::SimpleItoa(j); - const string value = "value" + NumberUtil::SimpleItoa(j); + const string key = "key" + NumberUtil::SimpleItoa(static_cast(j)); + const string value = + "value" + NumberUtil::SimpleItoa(static_cast(j)); entry.key = key; entry.value = value; if (j % 2 == 0) { diff -Nru mozc-1.11.1502.102/dictionary/user_dictionary_session_handler.cc mozc-1.11.1522.102/dictionary/user_dictionary_session_handler.cc --- mozc-1.11.1502.102/dictionary/user_dictionary_session_handler.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/dictionary/user_dictionary_session_handler.cc 2013-08-28 05:26:12.000000000 +0000 @@ -558,11 +558,6 @@ uint64 id = kInvalidSessionId; while (true) { Util::GetRandomSequence(reinterpret_cast(&id), sizeof(id)); -#ifdef __native_client__ - // Because JavaScript does not support uint64. - // So we downsize the session id range from uint64 to uint32 in NaCl. - id = static_cast(id); -#endif // __native_client__ if (id != kInvalidSessionId && (session_.get() == NULL || session_id_ != id)) { diff -Nru mozc-1.11.1502.102/dictionary/user_dictionary_storage.h mozc-1.11.1522.102/dictionary/user_dictionary_storage.h --- mozc-1.11.1502.102/dictionary/user_dictionary_storage.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/dictionary/user_dictionary_storage.h 2013-08-28 05:26:12.000000000 +0000 @@ -172,7 +172,7 @@ // return mutable UserDictionary corresponding to dic_id UserDictionary *GetUserDictionary(uint64 dic_id); - // Searches a dictionary from a dictioanry name, and the dictionary id is + // Searches a dictionary from a dictionary name, and the dictionary id is // stored in "dic_id". // Returns false if the name is not found. bool GetUserDictionaryId(const string &dic_name, uint64 *dic_id); diff -Nru mozc-1.11.1502.102/dictionary/user_dictionary_storage_test.cc mozc-1.11.1522.102/dictionary/user_dictionary_storage_test.cc --- mozc-1.11.1502.102/dictionary/user_dictionary_storage_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/dictionary/user_dictionary_storage_test.cc 2013-08-28 05:26:12.000000000 +0000 @@ -206,8 +206,9 @@ const size_t dict_size = storage.dictionaries_size(); for (size_t i = 0; i < kDictionariesSize; ++i) { - EXPECT_TRUE(storage.CreateDictionary("test" + NumberUtil::SimpleItoa(i), - &id[i])); + EXPECT_TRUE(storage.CreateDictionary( + "test" + NumberUtil::SimpleItoa(static_cast(i)), + &id[i])); EXPECT_EQ(i + 1 + dict_size, storage.dictionaries_size()); } @@ -261,8 +262,9 @@ storage.Clear(); vector ids(100); for (size_t i = 0; i < ids.size(); ++i) { - EXPECT_TRUE(storage.CreateDictionary("test" + NumberUtil::SimpleItoa(i), - &ids[i])); + EXPECT_TRUE(storage.CreateDictionary( + "test" + NumberUtil::SimpleItoa(static_cast(i)), + &ids[i])); } vector alive; @@ -293,7 +295,7 @@ for (size_t i = 0; i < 1000; ++i) { UserDictionaryStorage::UserDictionaryEntry *entry = dic->add_entries(); - const string prefix = NumberUtil::SimpleItoa(i); + const string prefix = NumberUtil::SimpleItoa(static_cast(i)); // set empty fields randomly entry->set_key(prefix + "key"); entry->set_value(prefix + "value"); @@ -337,8 +339,8 @@ for (size_t i = 0; i < dic_size; ++i) { uint64 id = 0; EXPECT_TRUE( - storage1.CreateDictionary("test" + NumberUtil::SimpleItoa(i), - &id)); + storage1.CreateDictionary( + "test" + NumberUtil::SimpleItoa(static_cast(i)), &id)); const size_t entry_size = Util::Random(100) + 1; for (size_t j = 0; j < entry_size; ++j) { UserDictionaryStorage::UserDictionary *dic = diff -Nru mozc-1.11.1502.102/dictionary/user_dictionary_test.cc mozc-1.11.1522.102/dictionary/user_dictionary_test.cc --- mozc-1.11.1502.102/dictionary/user_dictionary_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/dictionary/user_dictionary_test.cc 2013-08-28 05:26:12.000000000 +0000 @@ -741,16 +741,20 @@ for (size_t j = 0; j < 10000; ++j) { UserDictionaryStorage::UserDictionaryEntry *entry = dic->add_entries(); - entry->set_key("no_suppress_key" + NumberUtil::SimpleItoa(j)); - entry->set_value("no_suppress_value" + NumberUtil::SimpleItoa(j)); + entry->set_key("no_suppress_key" + + NumberUtil::SimpleItoa(static_cast(j))); + entry->set_value("no_suppress_value" + + NumberUtil::SimpleItoa(static_cast(j))); entry->set_pos(user_dictionary::UserDictionary::NOUN); } for (size_t j = 0; j < 10; ++j) { UserDictionaryStorage::UserDictionaryEntry *entry = dic->add_entries(); - entry->set_key("suppress_key" + NumberUtil::SimpleItoa(j)); - entry->set_value("suppress_value" + NumberUtil::SimpleItoa(j)); + entry->set_key( + "suppress_key" + NumberUtil::SimpleItoa(static_cast(j))); + entry->set_value( + "suppress_value" + NumberUtil::SimpleItoa(static_cast(j))); // entry->set_pos("抑制単語"); entry->set_pos(user_dictionary::UserDictionary::SUPPRESSION_WORD); } @@ -762,8 +766,8 @@ for (size_t j = 0; j < 10; ++j) { EXPECT_TRUE(suppression_dictionary_->SuppressEntry( - "suppress_key" + NumberUtil::SimpleItoa(j), - "suppress_value" + NumberUtil::SimpleItoa(j))); + "suppress_key" + NumberUtil::SimpleItoa(static_cast(j)), + "suppress_value" + NumberUtil::SimpleItoa(static_cast(j)))); } } @@ -777,8 +781,10 @@ for (size_t j = 0; j < 10000; ++j) { UserDictionaryStorage::UserDictionaryEntry *entry = dic->add_entries(); - entry->set_key("no_suppress_key" + NumberUtil::SimpleItoa(j)); - entry->set_value("no_suppress_value" + NumberUtil::SimpleItoa(j)); + entry->set_key( + "no_suppress_key" + NumberUtil::SimpleItoa(static_cast(j))); + entry->set_value( + "no_suppress_value" + NumberUtil::SimpleItoa(static_cast(j))); entry->set_pos(user_dictionary::UserDictionary::NOUN); } @@ -788,8 +794,8 @@ for (size_t j = 0; j < 10; ++j) { EXPECT_FALSE(suppression_dictionary_->SuppressEntry( - "suppress_key" + NumberUtil::SimpleItoa(j), - "suppress_value" + NumberUtil::SimpleItoa(j))); + "suppress_key" + NumberUtil::SimpleItoa(static_cast(j)), + "suppress_value" + NumberUtil::SimpleItoa(static_cast(j)))); } } FileUtil::Unlink(filename); @@ -815,7 +821,7 @@ for (size_t j = 0; j < 10; ++j) { UserDictionaryStorage::UserDictionaryEntry *entry = dic->add_entries(); - entry->set_key("key" + NumberUtil::SimpleItoa(j)); + entry->set_key("key" + NumberUtil::SimpleItoa(static_cast(j))); entry->set_value("default"); // "名詞" entry->set_pos(user_dictionary::UserDictionary::NOUN); @@ -824,7 +830,7 @@ for (size_t j = 0; j < 10; ++j) { UserDictionaryStorage::UserDictionaryEntry *entry = dic->add_entries(); - entry->set_key("key" + NumberUtil::SimpleItoa(j)); + entry->set_key("key" + NumberUtil::SimpleItoa(static_cast(j))); entry->set_value("suggest_only"); // "サジェストのみ" entry->set_pos(user_dictionary::UserDictionary::SUGGESTION_ONLY); diff -Nru mozc-1.11.1502.102/dictionary/user_dictionary_util_test.cc mozc-1.11.1522.102/dictionary/user_dictionary_util_test.cc --- mozc-1.11.1502.102/dictionary/user_dictionary_util_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/dictionary/user_dictionary_util_test.cc 2013-08-28 05:26:12.000000000 +0000 @@ -293,7 +293,7 @@ EXPECT_TRUE(UserDictionaryUtil::IsDictionaryFull(dictionary)); } -TEST(UserDictioanryUtilTest, IsSyncDictionaryFull) { +TEST(UserDictionaryUtilTest, IsSyncDictionaryFull) { UserDictionary dictionary; dictionary.set_syncable(true); diff -Nru mozc-1.11.1502.102/engine/engine.cc mozc-1.11.1522.102/engine/engine.cc --- mozc-1.11.1502.102/engine/engine.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/engine/engine.cc 2013-08-28 05:26:13.000000000 +0000 @@ -236,7 +236,7 @@ rewriter_ = new RewriterImpl(converter_impl, data_manager, pos_group_.get(), - user_dictionary_.get()); + dictionary_.get()); CHECK(rewriter_); converter_impl->Init(data_manager->GetPOSMatcher(), diff -Nru mozc-1.11.1502.102/gui/config_dialog/keymap_editor.cc mozc-1.11.1522.102/gui/config_dialog/keymap_editor.cc --- mozc-1.11.1502.102/gui/config_dialog/keymap_editor.cc 2013-07-17 02:38:20.000000000 +0000 +++ mozc-1.11.1522.102/gui/config_dialog/keymap_editor.cc 2013-08-28 05:26:28.000000000 +0000 @@ -109,7 +109,7 @@ invisible_key_events_.insert(mozc::commands::KeyEvent::KANJI); invisible_key_events_.insert(mozc::commands::KeyEvent::ON); invisible_key_events_.insert(mozc::commands::KeyEvent::OFF); - invisible_key_events_.insert(mozc::commands::KeyEvent::ASCII); + invisible_key_events_.insert(mozc::commands::KeyEvent::TEXT_INPUT); } bool IsVisibleKey(const string &key) { @@ -209,6 +209,7 @@ manager.GetAvailableCommandNamePrecomposition(&command_names); manager.GetAvailableCommandNameComposition(&command_names); manager.GetAvailableCommandNameConversion(&command_names); + manager.GetAvailableCommandNameZeroQuerySuggestion(&command_names); manager.GetAvailableCommandNameSuggestion(&command_names); manager.GetAvailableCommandNamePrediction(&command_names); for (set::const_iterator itr = command_names.begin(); diff -Nru mozc-1.11.1502.102/gui/dictionary_tool/dictionary_tool.cc mozc-1.11.1522.102/gui/dictionary_tool/dictionary_tool.cc --- mozc-1.11.1502.102/gui/dictionary_tool/dictionary_tool.cc 2013-07-17 02:38:22.000000000 +0000 +++ mozc-1.11.1522.102/gui/dictionary_tool/dictionary_tool.cc 2013-08-28 05:26:30.000000000 +0000 @@ -939,7 +939,7 @@ // it's not the case. Here we simply remove the MS-IME import function // if dictionary tool is UAC-elevated QMessageBox::warning(this, window_title_, - tr("Microsoft IME dictioanry import function doesn't " + tr("Microsoft IME dictionary import function doesn't " "work on UAC-elevated process.")); return; } Binary files /tmp/XHzFtK0Zqt/mozc-1.11.1502.102/gui/dictionary_tool/dictionary_tool_en.qm and /tmp/Lp87ADOiYs/mozc-1.11.1522.102/gui/dictionary_tool/dictionary_tool_en.qm differ diff -Nru mozc-1.11.1502.102/gui/dictionary_tool/dictionary_tool_en.ts mozc-1.11.1522.102/gui/dictionary_tool/dictionary_tool_en.ts --- mozc-1.11.1502.102/gui/dictionary_tool/dictionary_tool_en.ts 2013-07-17 02:38:21.000000000 +0000 +++ mozc-1.11.1522.102/gui/dictionary_tool/dictionary_tool_en.ts 2013-08-28 05:26:29.000000000 +0000 @@ -309,7 +309,7 @@ - Microsoft IME dictioanry import function doesn't work on UAC-elevated process. + Microsoft IME dictionary import function doesn't work on UAC-elevated process. Binary files /tmp/XHzFtK0Zqt/mozc-1.11.1502.102/gui/dictionary_tool/dictionary_tool_ja.qm and /tmp/Lp87ADOiYs/mozc-1.11.1522.102/gui/dictionary_tool/dictionary_tool_ja.qm differ diff -Nru mozc-1.11.1502.102/gui/dictionary_tool/dictionary_tool_ja.ts mozc-1.11.1522.102/gui/dictionary_tool/dictionary_tool_ja.ts --- mozc-1.11.1502.102/gui/dictionary_tool/dictionary_tool_ja.ts 2013-07-17 02:38:20.000000000 +0000 +++ mozc-1.11.1522.102/gui/dictionary_tool/dictionary_tool_ja.ts 2013-08-28 05:26:28.000000000 +0000 @@ -228,7 +228,7 @@ - Microsoft IME dictioanry import function doesn't work on UAC-elevated process. + Microsoft IME dictionary import function doesn't work on UAC-elevated process. UACで昇格されたアプリケーションからは、Microsoft IME のユーザ辞書を読み込むことができません。 diff -Nru mozc-1.11.1502.102/gui/gui.gyp mozc-1.11.1522.102/gui/gui.gyp --- mozc-1.11.1502.102/gui/gui.gyp 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/gui/gui.gyp 2013-08-28 05:26:12.000000000 +0000 @@ -1108,7 +1108,7 @@ 'msvs_settings': { 'VCManifestTool': { 'AdditionalManifestFiles': 'tool/mozc_tool.exe.manifest', - 'EmbedManifest': 'false', + 'EmbedManifest': 'true', }, }, }], diff -Nru mozc-1.11.1502.102/gui/tool/mozc_tool.exe.manifest mozc-1.11.1522.102/gui/tool/mozc_tool.exe.manifest --- mozc-1.11.1502.102/gui/tool/mozc_tool.exe.manifest 2013-07-17 02:37:39.000000000 +0000 +++ mozc-1.11.1522.102/gui/tool/mozc_tool.exe.manifest 2013-08-28 05:25:52.000000000 +0000 @@ -9,6 +9,8 @@ + + diff -Nru mozc-1.11.1502.102/gyp/common.gypi mozc-1.11.1522.102/gyp/common.gypi --- mozc-1.11.1502.102/gyp/common.gypi 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/gyp/common.gypi 2013-08-28 05:26:13.000000000 +0000 @@ -362,6 +362,18 @@ 'x86_Base': { 'abstract': 1, 'msvs_settings': { + 'VCCLCompilerTool': { + 'conditions': [ + ['target_compiler=="msvs2012"', { + # Windows 7 and prior still support CPUs that lack of SSE/SSE2. + # So we explicitly disable them. We can change this setting to + # /arch:SSE2 once Windows 7 is unsupported in Mozc. + # Note that Visual C++ 2010 does not have /arch:IA32 and + # does not use these enhanced instruction set by default. + 'AdditionalOptions': '/arch:IA32', + }], + ], + }, 'VCLibrarianTool': { 'AdditionalLibraryDirectories': [ '<@(msvs_libs_x86)', @@ -700,12 +712,7 @@ 'SuppressStartupBanner': 'true', # /nologo 'TreatWChar_tAsBuiltInType': 'false', # /Zc:wchar_t- 'WarningLevel': '3', # /W3 - 'conditions': [ - ['target_compiler=="msvs2010" or target_compiler=="msvs2012"', { - 'AdditionalOptions': '/MP', # /MP - 'OmitFramePointers': 'false', # /Oy- (for Visual C++ 2010) - }], - ], + 'OmitFramePointers': 'false', # /Oy- }, 'VCLinkerTool': { 'AdditionalDependencies': [ diff -Nru mozc-1.11.1502.102/handwriting/zinnia_handwriting_test.cc mozc-1.11.1522.102/handwriting/zinnia_handwriting_test.cc --- mozc-1.11.1502.102/handwriting/zinnia_handwriting_test.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/handwriting/zinnia_handwriting_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -34,17 +34,22 @@ #include #include +#include "base/file_util.h" #include "testing/base/public/googletest.h" #include "testing/base/public/gunit.h" +DECLARE_string(test_srcdir); + namespace mozc { namespace handwriting { class ZinniaHandwritingTest : public ::testing::Test { protected: virtual void SetUp() { - zinnia_.reset( - new ZinniaHandwriting(ZinniaHandwriting::GetModelFileName())); + const string filepath = FileUtil::JoinPath( + FLAGS_test_srcdir, + "handwriting-ja.model"); + zinnia_.reset(new ZinniaHandwriting(filepath)); } scoped_ptr zinnia_; @@ -59,13 +64,10 @@ strokes.push_back(stroke); vector results; - - // This call returns an error because it failed to load the handwriting - // model file. However there is no implementation to specify the file - // path of the model to the constructor. - // TODO(komatsu): Enable to specify the model file to ZinniaHandwriting. const HandwritingStatus status = zinnia_->Recognize(strokes, &results); - EXPECT_EQ(HANDWRITING_ERROR, status); + EXPECT_EQ(HANDWRITING_NO_ERROR, status); + // "一" + EXPECT_EQ("\xE4\xB8\x80", results[0]); } TEST_F(ZinniaHandwritingTest, Commit) { diff -Nru mozc-1.11.1502.102/ipc/ipc_path_manager.cc mozc-1.11.1522.102/ipc/ipc_path_manager.cc --- mozc-1.11.1502.102/ipc/ipc_path_manager.cc 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/ipc/ipc_path_manager.cc 2013-08-28 05:25:59.000000000 +0000 @@ -322,23 +322,6 @@ return false; } -#ifdef OS_WIN - // OpenProcess API seems to be unavailable on Win8/AppContainer. So we - // temporarily disable the verification of the path name of the peer. - bool in_appcontainer = false; - if (!WinUtil::IsProcessInAppContainer(::GetCurrentProcess(), - &in_appcontainer)) { - return false; - } - if (in_appcontainer) { - // Bypass security check. - // TODO(yukawa): Establish alternative verification mechanism for Metro. - server_pid_ = pid; - server_path_.clear(); - return true; - } -#endif // OS_WIN - // compare path name if (pid == server_pid_) { return (server_path == server_path_); diff -Nru mozc-1.11.1502.102/ipc/win32_ipc.cc mozc-1.11.1522.102/ipc/win32_ipc.cc --- mozc-1.11.1502.102/ipc/win32_ipc.cc 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/ipc/win32_ipc.cc 2013-08-28 05:25:59.000000000 +0000 @@ -770,15 +770,6 @@ const DWORD create_file_error = ::GetLastError(); // ScopedHandle returns nullptr even when it received INVALID_HANDLE_VALUE. if (new_handle.get() != nullptr) { - DWORD mode = PIPE_READMODE_MESSAGE; - if (::SetNamedPipeHandleState(new_handle.get(), &mode, nullptr, nullptr) - == FALSE) { - const DWORD set_namedpipe_handle_state_error = ::GetLastError(); - LOG(ERROR) << "SetNamedPipeHandleState failed. error: " - << set_namedpipe_handle_state_error; - last_ipc_error_ = IPC_UNKNOWN_ERROR; - return; - } pipe_handle_.reset(new_handle.take()); MaybeDisableFileCompletionNotification(pipe_handle_.get()); if (!manager->IsValidServer(GetServerProcessIdImpl(pipe_handle_.get()), diff -Nru mozc-1.11.1502.102/languages/chewing/chewing.gyp mozc-1.11.1522.102/languages/chewing/chewing.gyp --- mozc-1.11.1502.102/languages/chewing/chewing.gyp 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/chewing/chewing.gyp 1970-01-01 00:00:00.000000000 +0000 @@ -1,103 +0,0 @@ -# Copyright 2010-2013, Google Inc. -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -{ - 'variables': { - 'relative_dir': 'chewing', - 'gen_out_dir': '<(SHARED_INTERMEDIATE_DIR)/<(relative_dir)', - }, - 'targets': [ - { - 'target_name': 'chewing_session', - 'type': 'static_library', - 'sources': [ - 'session.cc', - 'chewing_session_factory.cc', - ], - 'dependencies': [ - '../../base/base.gyp:base', - '../../session/session_base.gyp:key_event_util', - '../../session/session_base.gyp:session_protocol', - ], - 'includes': [ - 'chewing_libraries.gypi', - ], - }, - { - 'target_name': 'chewing_all_test', - 'type': 'none', - }, - ], - 'conditions': [ - ['OS=="linux"', { - 'targets': [ - { - 'target_name': 'ibus_mozc_chewing', - 'type': 'executable', - 'sources': [ - 'unix/ibus/main.cc', - 'unix/ibus/mozc_engine_property.cc', - ], - 'dependencies': [ - '../../unix/ibus/ibus.gyp:ibus_mozc_lib', - ], - 'conditions': [ - ['target_platform=="ChromeOS"', { - 'dependencies': [ - '../../config/config.gyp:config_handler', - '../../config/config.gyp:config_protocol', - 'chewing_session', - ], - 'sources+': [ - 'unix/ibus/config_updater.cc', - ], - 'includes': [ - 'chewing_libraries.gypi', - ], - }], - ], - }, - { - 'target_name': 'mozc_server_chewing', - 'type': 'executable', - 'sources': [ - 'server_main.cc', - ], - 'dependencies': [ - '../../server/server.gyp:mozc_server_lib', - 'chewing_session', - ], - 'includes': [ - 'chewing_libraries.gypi', - ], - }, - ], - }], - ], -} diff -Nru mozc-1.11.1502.102/languages/chewing/chewing_libraries.gypi mozc-1.11.1522.102/languages/chewing/chewing_libraries.gypi --- mozc-1.11.1502.102/languages/chewing/chewing_libraries.gypi 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/chewing/chewing_libraries.gypi 1970-01-01 00:00:00.000000000 +0000 @@ -1,45 +0,0 @@ -# Copyright 2010-2013, Google Inc. -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -{ - 'variables': { - 'chewing_libs': [ - 'chewing', - ], - }, - 'cflags': [ - ' - -#include "base/file_util.h" -#include "base/logging.h" -#include "base/singleton.h" -#include "engine/empty_user_data_manager.h" -#include "languages/chewing/session.h" - -#if defined(OS_CHROMEOS) -DEFINE_string(datapath, "/usr/share/chewing", - "the default path of libchewing"); -#else -DEFINE_string(datapath, "/usr/share/libchewing3/chewing", - "the default path of libchewing"); -#endif // OS_CHROMEOS - -namespace mozc { - -string GetHashPath() { - // The logic below is copied from SystemUtil::GetUserProfileDirectory(). - string dir; - char buf[1024]; - struct passwd pw, *ppw; - const uid_t uid = geteuid(); - CHECK_EQ(0, getpwuid_r(uid, &pw, buf, sizeof(buf), &ppw)) - << "Can't get passwd entry for uid " << uid << "."; - CHECK_LT(0, strlen(pw.pw_dir)) - << "Home directory for uid " << uid << " is not set."; -#if defined(OS_CHROMEOS) - dir = FileUtil::JoinPath(pw.pw_dir, "user/.chewing"); -#else - dir = FileUtil::JoinPath(pw.pw_dir, ".chewing"); -#endif // OS_CHROMEOS - return dir; -} - -namespace chewing { - -// The default session factory implementation for chewing. We do not -// use the implementation in session/session_factory.cc. We do not -// even link to it because the default session factory refers to the -// Japanese language models / Japanese vocabulary which we don't want -// here. -ChewingSessionFactory::ChewingSessionFactory() { - string hash_path = GetHashPath(); - if (!FileUtil::DirectoryExists(hash_path)) { - string hash_dir = FileUtil::Dirname(hash_path); - // In Chrome OS, hash_dir would be ~/user, which might not exist. - if (FileUtil::DirectoryExists(hash_dir) || - FileUtil::CreateDirectory(hash_dir)) { - FileUtil::CreateDirectory(hash_path); - } - } - ::chewing_Init(FLAGS_datapath.c_str(), hash_path.c_str()); -} - -ChewingSessionFactory::~ChewingSessionFactory() { - ::chewing_Terminate(); -} - -session::SessionInterface *ChewingSessionFactory::NewSession() { - return new Session(); -} - -UserDataManagerInterface *ChewingSessionFactory::GetUserDataManager() { - return Singleton::get(); -} - -} // namespace chewing -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/chewing/chewing_session_factory.h mozc-1.11.1522.102/languages/chewing/chewing_session_factory.h --- mozc-1.11.1502.102/languages/chewing/chewing_session_factory.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/chewing/chewing_session_factory.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,52 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#ifndef MOZC_LANGUAGES_CHEWING_CHEWING_SESSION_FACTORY_H_ -#define MOZC_LANGUAGES_CHEWING_CHEWING_SESSION_FACTORY_H_ -#include "session/session_factory_manager.h" - -namespace mozc { -namespace session { -class SessionInterface; -} // namespace session - -namespace chewing { -class ChewingSessionFactory : public session::SessionFactoryInterface { - public: - ChewingSessionFactory(); - virtual ~ChewingSessionFactory(); - virtual session::SessionInterface *NewSession(); - virtual UserDataManagerInterface *GetUserDataManager(); - private: - bool is_available_; -}; -} // namespace chewing -} // namespace mozc - -#endif // MOZC_LANGUAGES_CHEWING_CHEWING_SESSION_FACTORY_H_ diff -Nru mozc-1.11.1502.102/languages/chewing/scoped_chewing_ptr.h mozc-1.11.1522.102/languages/chewing/scoped_chewing_ptr.h --- mozc-1.11.1502.102/languages/chewing/scoped_chewing_ptr.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/chewing/scoped_chewing_ptr.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,84 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Scoped pointer used for chewing. - -#ifndef MOZC_LANGUAGES_CHEWING_SCOPED_CHEWING_PTR_H_ -#define MOZC_LANGUAGES_CHEWING_SCOPED_CHEWING_PTR_H_ - -#include - -#include "base/port.h" - -template -class scoped_chewing_ptr { - public: - typedef PTR* element_type; - - explicit scoped_chewing_ptr(PTR* object = NULL) - : object_(object) { - } - - ~scoped_chewing_ptr() { - if (object_) { - ::chewing_free(object_); - } - } - - void reset(PTR* object = NULL) { - if (object_) { - ::chewing_free(object_); - } - object_ = object; - } - - bool operator==(PTR* that) const { - return object_ == that; - } - - bool operator!=(PTR* that) const { - return object_ != that; - } - - operator PTR*() const { - return object_; - } - - PTR* get() const { - return object_; - } - - private: - PTR* object_; - - DISALLOW_COPY_AND_ASSIGN(scoped_chewing_ptr); -}; - - -#endif // MOZC_LANGUAGES_CHEWING_SCOPED_CHEWING_PTR_H_ diff -Nru mozc-1.11.1502.102/languages/chewing/server_main.cc mozc-1.11.1522.102/languages/chewing/server_main.cc --- mozc-1.11.1502.102/languages/chewing/server_main.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/chewing/server_main.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,43 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/chewing/chewing_session_factory.h" -#include "server/mozc_server.h" -#include "session/session_factory_manager.h" - -int main(int argc, char* argv[]) { - mozc::server::InitGoogleAndMozcServer(argv[0], &argc, &argv, false); - - mozc::chewing::ChewingSessionFactory session_factory; - mozc::session::SessionFactoryManager::SetSessionFactory(&session_factory); - - const int return_value = mozc::server::MozcServer::Run(); - mozc::server::MozcServer::Finalize(); - return return_value; -} diff -Nru mozc-1.11.1502.102/languages/chewing/session.cc mozc-1.11.1522.102/languages/chewing/session.cc --- mozc-1.11.1502.102/languages/chewing/session.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/chewing/session.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,615 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/chewing/session.h" - -#include - -#include "base/base.h" -#include "base/logging.h" -#include "base/singleton.h" -#include "base/util.h" -#include "config/config_handler.h" -#include "languages/chewing/scoped_chewing_ptr.h" -#include "session/key_event_util.h" - -using mozc::commands::KeyEvent; -using mozc::config::ChewingConfig; - -namespace mozc { -namespace { -// Returns the bytes used for the characters in the text. -int BytesForChars(const string &utf8_text, int characters) { - int result = 0; - int counted = 0; - const char *text_data = utf8_text.data(); - while (result < utf8_text.size() && counted < characters) { - int charLen = Util::OneCharLen(text_data); - result += charLen; - text_data += charLen; - ++counted; - } - return result; -} - -// Holds the mapping between mozc ChewingConfig enum and libchewing -// config enum. -class ChewingConfigMap { - public: - ChewingConfigMap() { - keyboard_type_map_[ChewingConfig::DEFAULT] = - ::chewing_KBStr2Num(const_cast("KB_DEFAULT")); - keyboard_type_map_[ChewingConfig::HSU] = - ::chewing_KBStr2Num(const_cast("KB_HSU")); - keyboard_type_map_[ChewingConfig::IBM] = - ::chewing_KBStr2Num(const_cast("KB_IBM")); - keyboard_type_map_[ChewingConfig::GIN_YIEH] = - ::chewing_KBStr2Num(const_cast("KB_GIN_YIEH")); - keyboard_type_map_[ChewingConfig::ETEN] = - ::chewing_KBStr2Num(const_cast("KB_ET")); - keyboard_type_map_[ChewingConfig::ETEN26] = - ::chewing_KBStr2Num(const_cast("KB_ET26")); - keyboard_type_map_[ChewingConfig::DVORAK] = - ::chewing_KBStr2Num(const_cast("KB_DVORAK")); - keyboard_type_map_[ChewingConfig::DVORAK_HSU] = - ::chewing_KBStr2Num(const_cast("KB_DVORAK_HSU")); - keyboard_type_map_[ChewingConfig::DACHEN_26] = - ::chewing_KBStr2Num(const_cast("KB_DACHEN_CP26")); - keyboard_type_map_[ChewingConfig::HANYU] = - ::chewing_KBStr2Num(const_cast("KB_HANYU_PINYIN")); - - selection_keys_map_[ChewingConfig::SELECTION_1234567890] = "1234567890"; - selection_keys_map_[ChewingConfig::SELECTION_asdfghjkl] = "asdfghjkl;"; - selection_keys_map_[ChewingConfig::SELECTION_asdfzxcv89] = "asdfzxcv89"; - selection_keys_map_[ChewingConfig::SELECTION_asdfjkl789] = "asdfjkl789"; - selection_keys_map_[ChewingConfig::SELECTION_aoeuqjkix] = "aoeu;qjkix"; - selection_keys_map_[ChewingConfig::SELECTION_aoeuhtnsid] = "aoeuhtnsid"; - selection_keys_map_[ChewingConfig::SELECTION_aoeuidhtns] = "aoeuidhtns"; - selection_keys_map_[ChewingConfig::SELECTION_1234qweras] = "1234qweras"; - - hsu_selection_keys_map_[ChewingConfig::HSU_asdfjkl789] = HSU_SELKEY_TYPE1; - hsu_selection_keys_map_[ChewingConfig::HSU_asdfzxcv89] = HSU_SELKEY_TYPE2; - } - - int GetKeyboardTypeId(ChewingConfig::KeyboardType keyboard_type) const { - map::const_iterator it = - keyboard_type_map_.find(keyboard_type); - if (it != keyboard_type_map_.end()) { - return it->second; - } - return ::chewing_KBStr2Num(const_cast("KB_DEFAULT")); - } - - string GetSelectionKeys(ChewingConfig::SelectionKeys selection_keys) const { - map::const_iterator it = - selection_keys_map_.find(selection_keys); - if (it != selection_keys_map_.end()) { - return it->second; - } - - return ""; - } - - int GetHsuSelectionKeys( - ChewingConfig::HsuSelectionKeys hsu_selection_keys) const { - map::const_iterator it = - hsu_selection_keys_map_.find(hsu_selection_keys); - if (it != hsu_selection_keys_map_.end()) { - return it->second; - } - - return HSU_SELKEY_TYPE1; - } - - private: - map keyboard_type_map_; - map selection_keys_map_; - map hsu_selection_keys_map_; -}; - -uint64 g_last_config_updated = 0; -} // namespace - -namespace chewing { -Session::Session() - : context_(::chewing_new()), - state_(PRECOMPOSITION), - create_session_time_(Util::GetTime()), - last_command_time_(0), - last_config_updated_(0) { - ResetConfig(); -} - -Session::~Session() { - chewing_delete(context_); -} - -#define CHEWING_SET_CONFIG(field, name) \ - ::chewing_set_##name(context_, chewing_config.field()) - -void Session::ResetConfig() { - const ChewingConfig &chewing_config = GET_CONFIG(chewing_config); - CHEWING_SET_CONFIG(automatic_shift_cursor, autoShiftCur); - CHEWING_SET_CONFIG(add_phrase_direction, addPhraseDirection); - CHEWING_SET_CONFIG(easy_symbol_input, easySymbolInput); - CHEWING_SET_CONFIG(escape_cleans_all_buffer, escCleanAllBuf); - CHEWING_SET_CONFIG(phrase_choice_rearward, phraseChoiceRearward); - CHEWING_SET_CONFIG(space_as_selection, spaceAsSelection); - CHEWING_SET_CONFIG(maximum_chinese_character_length, maxChiSymbolLen); - CHEWING_SET_CONFIG(candidates_per_page, candPerPage); - - const ChewingConfigMap *config_map = Singleton::get(); - ::chewing_set_KBType( - context_, config_map->GetKeyboardTypeId(chewing_config.keyboard_type())); - ::chewing_set_hsuSelKeyType( - context_, - config_map->GetHsuSelectionKeys(chewing_config.hsu_selection_keys())); - - // Set up the selection keys - string keys = config_map->GetSelectionKeys(chewing_config.selection_keys()); - - // We always use a static size of selection keys, MAX_SELKEY (in - // chewing/global.h) because libchewing needs it. - if (keys.size() == MAX_SELKEY) { - int keys_data[MAX_SELKEY]; - for (size_t i = 0; i < keys.size(); ++i) { - keys_data[i] = static_cast(keys[i]); - } - ::chewing_set_selKey(context_, keys_data, MAX_SELKEY); - } else if (!keys.empty()) { - LOG(ERROR) << "The size of selection keys has changed in libchewing"; - } - last_config_updated_ = Util::GetTime(); -} - -#undef CHEWING_SET_CONFIG - -void Session::RenewContext() { - const int original_chi_eng_mode = ::chewing_get_ChiEngMode(context_); - const int original_shape_mode = ::chewing_get_ShapeMode(context_); - ::chewing_delete(context_); - context_ = ::chewing_new(); - ResetConfig(); - ::chewing_set_ChiEngMode(context_, original_chi_eng_mode); - ::chewing_set_ShapeMode(context_, original_shape_mode); -} - -void Session::FillCandidates(commands::Candidates *candidates) { - if (::chewing_cand_CheckDone(context_)) { - return; - } - - const int total_candidates = ::chewing_cand_TotalChoice(context_); - if (total_candidates == 0) { - return; - } - - candidates->set_size(total_candidates); - const int page_size = ::chewing_cand_ChoicePerPage(context_); - ::chewing_cand_Enumerate(context_); - scoped_chewing_ptr selkeys(::chewing_get_selKey(context_)); - const int base_rank = page_size * ::chewing_cand_CurrentPage(context_); - for (int i = 0; i < page_size && ::chewing_cand_hasNext(context_); ++i) { - scoped_chewing_ptr cand_text(::chewing_cand_String(context_)); - commands::Candidates::Candidate *new_candidate = - candidates->add_candidate(); - new_candidate->set_id(base_rank + i); - new_candidate->set_index(i); - new_candidate->set_value(cand_text.get()); - new_candidate->mutable_annotation()->set_shortcut( - string(1, static_cast(selkeys.get()[i]))); - } - candidates->set_direction(commands::Candidates::HORIZONTAL); -} - -void Session::FillOutput(commands::Command *command) { - DLOG(INFO) << ::chewing_get_KBString(context_); - commands::Output *output = command->mutable_output(); - output->mutable_key()->CopyFrom(command->input().key()); - - if (!output->consumed() && state_ == PRECOMPOSITION) { - // Do not fill the result if the key is not consumed and current - // status is PRECOMPOSITION (means not during the input). In such - // case it will fill the output of the previous status. - return; - } - - // Fill the result - if (::chewing_commit_Check(context_)) { - scoped_chewing_ptr commit_text(::chewing_commit_String(context_)); - commands::Result *result = output->mutable_result(); - result->set_type(commands::Result::STRING); - result->set_value(commit_text.get()); - if (::chewing_get_ChiEngMode(context_) == SYMBOL_MODE && - GET_CONFIG(chewing_config).force_lowercase_english()) { - Util::LowerString(result->mutable_value()); - } - } - - // Fill the preedit - // Buffer means the chinese characters which are not comitted yet. - string buffer; - int buffer_len = 0; - if (::chewing_buffer_Check(context_)) { - // Buffer len means the number of characters, not bytes. - buffer_len = ::chewing_buffer_Len(context_); - scoped_chewing_ptr buffer_text(::chewing_buffer_String(context_)); - buffer.assign(buffer_text.get()); - } - - // Zuin means the user-typed Zhuyin charcters. - string zuin; - int zuin_len = 0; - { - scoped_chewing_ptr zuin_text( - ::chewing_zuin_String(context_, &zuin_len)); - // zuin_len also means the number of characters, not bytes. - if (zuin_len > 0) { - zuin.assign(zuin_text.get()); - } - } - - const int cursor = ::chewing_cursor_Current(context_); - // Constract actual preedit structure. We need to insert |zuin| at - // the cursor position -- therefore, we need to split the buffer beforehand. - { - string pre_text; - string trailing_text; - int pre_len = 0; - int trailing_len = 0; - if (buffer_len > 0) { - if (cursor < buffer_len) { - int bytes = BytesForChars(buffer, cursor); - pre_text = buffer.substr(0, bytes); - trailing_text = buffer.substr(bytes); - pre_len = cursor; - trailing_len = buffer_len - cursor; - } else { - pre_text = buffer; - pre_len = buffer_len; - } - } - if (!pre_text.empty()) { - commands::Preedit::Segment *segment = - output->mutable_preedit()->add_segment(); - segment->set_annotation(commands::Preedit::Segment::UNDERLINE); - segment->set_value(pre_text); - segment->set_value_length(pre_len); - } - if (zuin_len > 0) { - commands::Preedit::Segment *segment = - output->mutable_preedit()->add_segment(); - segment->set_annotation(commands::Preedit::Segment::HIGHLIGHT); - segment->set_value(zuin); - segment->set_value_length(zuin_len); - } - if (!trailing_text.empty()) { - commands::Preedit::Segment *segment = - output->mutable_preedit()->add_segment(); - segment->set_annotation(commands::Preedit::Segment::UNDERLINE); - segment->set_value(trailing_text); - segment->set_value_length(trailing_len); - } - if (output->has_preedit() && output->preedit().segment_size() > 0) { - output->mutable_preedit()->set_cursor(cursor); - } - } - - if (output->preedit().segment_size() == 0) { - state_ = PRECOMPOSITION; - } else { - state_ = IN_CONVERSION; - } - - // Fill the candidates - // TODO(mukai): Fill the all_candidates too. - if (!::chewing_cand_CheckDone(context_)) { - int total_candidates = ::chewing_cand_TotalChoice(context_); - if (total_candidates > 0) { - FillCandidates(output->mutable_candidates()); - // Set the cursor here. - output->mutable_candidates()->set_position(cursor); - } - } - - commands::CompositionMode new_mode = commands::NUM_OF_COMPOSITIONS; - if (::chewing_get_ChiEngMode(context_) == CHINESE_MODE) { - // Currently we use HIRAGANA for the chewing input but it's not the ideal. - // TODO(mukai): use a CHEWING mode when we add it. - new_mode = commands::HIRAGANA; - } else { - // English mode: check the full/half width - if (::chewing_get_ShapeMode(context_) == FULLSHAPE_MODE) { - new_mode = commands::FULL_ASCII; - } else { - new_mode = commands::HALF_ASCII; - } - } - if (new_mode != commands::NUM_OF_COMPOSITIONS) { - output->mutable_status()->set_mode(new_mode); - output->mutable_status()->set_activated(true); - } - - DLOG(INFO) << command->DebugString(); -} - -bool Session::SendKey(commands::Command *command) { - last_command_time_ = Util::GetTime(); - if (g_last_config_updated > last_config_updated_) { - ResetConfig(); - } - - // Check the modifier keys at first. - const KeyEvent &key_event = command->input().key(); - bool status_updated = false; - - const uint32 modifiers = KeyEventUtil::GetModifiers(key_event); - - if (KeyEventUtil::IsShift(modifiers)) { - if (key_event.has_special_key()) { - if (key_event.special_key() == KeyEvent::LEFT) { - ::chewing_handle_ShiftLeft(context_); - status_updated = true; - } else if (key_event.special_key() == KeyEvent::RIGHT) { - ::chewing_handle_ShiftRight(context_); - status_updated = true; - } else if (key_event.special_key() == KeyEvent::SPACE) { - ::chewing_handle_ShiftSpace(context_); - status_updated = true; - } - } - } else if (KeyEventUtil::IsCtrl(modifiers)) { - // CtrlNum is the event just for numeric keys at the top of - // keyboard, not for the tenkeys. - if ('0' <= key_event.key_code() && key_event.key_code() <= '9') { - ::chewing_handle_CtrlNum(context_, key_event.key_code()); - status_updated = true; - } - } else { - // normal key event. - if (key_event.has_special_key()) { - // TODO(mukai): write a script to generate following clauses. - switch (key_event.special_key()) { - case KeyEvent::SPACE: - ::chewing_handle_Space(context_); - status_updated = true; - break; - case KeyEvent::ESCAPE: - ::chewing_handle_Esc(context_); - status_updated = true; - break; - case KeyEvent::ENTER: - if (!::chewing_cand_CheckDone(context_) && - ::chewing_cand_TotalChoice(context_) > 0) { - // Special hack: if candidate window pops up, Enter-key - // means the commit of the first candidate instead of - // doing nothing. - // other implementations behaviors: - // ibus-chewing: do nothing - // Mac Zhuyin: select candidate, not commit - // The current code is same as Mac Zhuyin behavior. - // TODO(mukai): verify the correct behavior. - ::chewing_handle_Default(context_, '1'); - } else { - ::chewing_handle_Enter(context_); - } - status_updated = true; - break; - case KeyEvent::DEL: - ::chewing_handle_Del(context_); - status_updated = true; - break; - case KeyEvent::BACKSPACE: - ::chewing_handle_Backspace(context_); - status_updated = true; - break; - case KeyEvent::TAB: - ::chewing_handle_Tab(context_); - status_updated = true; - break; - case KeyEvent::LEFT: - ::chewing_handle_Left(context_); - status_updated = true; - break; - case KeyEvent::RIGHT: - ::chewing_handle_Right(context_); - status_updated = true; - break; - case KeyEvent::UP: - ::chewing_handle_Up(context_); - status_updated = true; - break; - case KeyEvent::HOME: - ::chewing_handle_Home(context_); - status_updated = true; - break; - case KeyEvent::END: - ::chewing_handle_End(context_); - status_updated = true; - break; - case KeyEvent::PAGE_UP: - ::chewing_handle_PageUp(context_); - status_updated = true; - break; - case KeyEvent::PAGE_DOWN: - ::chewing_handle_PageDown(context_); - status_updated = true; - break; - case KeyEvent::CAPS_LOCK: - ::chewing_handle_Capslock(context_); - status_updated = true; - break; - case KeyEvent::NUMPAD0: - case KeyEvent::NUMPAD1: - case KeyEvent::NUMPAD2: - case KeyEvent::NUMPAD3: - case KeyEvent::NUMPAD4: - case KeyEvent::NUMPAD5: - case KeyEvent::NUMPAD6: - case KeyEvent::NUMPAD7: - case KeyEvent::NUMPAD8: - case KeyEvent::NUMPAD9: - ::chewing_handle_Numlock( - context_, '0' + key_event.special_key() - KeyEvent::NUMPAD0); - status_updated = true; - break; - default: - // do nothing - // Currently we don't handle DblTab. - break; - } - } else if (key_event.modifier_keys_size() == 0) { - ::chewing_handle_Default(context_, key_event.key_code()); - status_updated = true; - } - } - - command->mutable_output()->set_consumed( - status_updated && !::chewing_keystroke_CheckIgnore(context_)); - FillOutput(command); - return true; -} - -bool Session::TestSendKey(commands::Command *command) { - // TODO(mukai): implement this. - last_command_time_ = Util::GetTime(); - command->mutable_output()->set_consumed(true); - FillOutput(command); - return true; -} - -bool Session::SendCommand(commands::Command *command) { - last_command_time_ = Util::GetTime(); - if (g_last_config_updated > last_config_updated_) { - ResetConfig(); - } - - const commands::SessionCommand &session_command = command->input().command(); - bool consumed = false; - switch (session_command.type()) { - case commands::SessionCommand::REVERT: - RenewContext(); - consumed = true; - break; - case commands::SessionCommand::SUBMIT: - // TODO(mukai): think about the key customization. - ::chewing_handle_Enter(context_); - consumed = true; - break; - case commands::SessionCommand::SWITCH_INPUT_MODE: - switch (session_command.composition_mode()) { - case commands::HIRAGANA: - ::chewing_set_ChiEngMode(context_, CHINESE_MODE); - consumed = true; - break; - case commands::FULL_ASCII: - ::chewing_set_ChiEngMode(context_, SYMBOL_MODE); - ::chewing_set_ShapeMode(context_, FULLSHAPE_MODE); - consumed = true; - break; - case commands::HALF_ASCII: - ::chewing_set_ChiEngMode(context_, SYMBOL_MODE); - ::chewing_set_ShapeMode(context_, HALFSHAPE_MODE); - consumed = true; - break; - default: - // do nothing - break; - } - break; - case commands::SessionCommand::SELECT_CANDIDATE: - { - commands::Candidates candidates; - FillCandidates(&candidates); - for (size_t i = 0; i < candidates.candidate_size(); ++i) { - const commands::Candidates::Candidate &candidate = - candidates.candidate(i); - if (candidate.id() == session_command.id() && - candidate.annotation().has_shortcut()) { - ::chewing_handle_Default( - context_, candidate.annotation().shortcut()[0]); - consumed = true; - } - } - } - break; - case commands::SessionCommand::GET_STATUS: - // Do nothing here. - consumed = true; - break; - default: - // do nothing - // Following commands are ignored: - // HIGHLIGHT_CANDIDATE, SWITCH_INPUT_MODE, - // SELECT_CANDIDATE_AND_FORWARD, CONVERT_REVERSE, UNDO. - break; - } - - command->mutable_output()->set_consumed(consumed); - FillOutput(command); - return true; -} - -void Session::ReloadConfig() { - last_command_time_ = Util::GetTime(); - ResetConfig(); -} - -void Session::set_client_capability(const commands::Capability &capability) { - // Do nothing. Capability does not make sense with the current chewing. -} - -void Session::set_application_info( - const commands::ApplicationInfo &application_info) { - application_info_.CopyFrom(application_info); -} - -const commands::ApplicationInfo &Session::application_info() const { - return application_info_; -} - -uint64 Session::create_session_time() const { - return create_session_time_; -} - -uint64 Session::last_command_time() const { - return last_command_time_; -} - -#ifdef OS_CHROMEOS -void Session::UpdateConfig(const config::ChewingConfig &config) { - config::Config mozc_config; - mozc_config.mutable_chewing_config()->MergeFrom(config); - config::ConfigHandler::SetConfig(mozc_config); - g_last_config_updated = Util::GetTime(); -} -#endif - -} // namespace chewing -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/chewing/session.h mozc-1.11.1522.102/languages/chewing/session.h --- mozc-1.11.1502.102/languages/chewing/session.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/chewing/session.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,110 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// The Session class to wrap libchewing. - -#ifndef MOZC_LANGUAGES_CHEWING_SESSION_H_ -#define MOZC_LANGUAGES_CHEWING_SESSION_H_ - -#include - -#include "base/base.h" -#include "session/commands.pb.h" -#include "session/session_interface.h" - -namespace mozc { -namespace chewing { -class Session: public mozc::session::SessionInterface { - public: - Session(); - ~Session(); - - enum State { - PRECOMPOSITION = 0, - IN_CONVERSION = 1, - }; - - virtual bool SendKey(commands::Command *command); - - // Check if the input key event will be consumed by the session. - virtual bool TestSendKey(commands::Command *command); - - // Perform the SEND_COMMAND command defined commands.proto. - virtual bool SendCommand(commands::Command *command); - - virtual void ReloadConfig(); - - // Set client capability for this session. Used by unittest. - virtual void set_client_capability(const commands::Capability &capability); - - // Set application information for this session. - virtual void set_application_info( - const commands::ApplicationInfo &application_info); - - // Get application information - virtual const commands::ApplicationInfo &application_info() const; - - // Return the time when this instance was created. - virtual uint64 create_session_time() const; - - // return 0 (default value) if no command is executed in this session. - virtual uint64 last_command_time() const; - -#ifdef OS_CHROMEOS - // Update the config by bypassing session layer for Chrome OS - static void UpdateConfig(const config::ChewingConfig &config); -#endif - - private: - // Fill the candidates with the current context. - void FillCandidates(commands::Candidates *candidates); - - // Fill the output with the current context. This does not update - // 'consumed' field. The caller has the responsibility to fill it - // before calling this method. - void FillOutput(commands::Command *command); - - // Set configurations. - void ResetConfig(); - - // Throw the existing chewing context and create another one again - // to clear context completely. - void RenewContext(); - - ChewingContext *context_; - State state_; - commands::ApplicationInfo application_info_; - uint64 create_session_time_; - uint64 last_command_time_; - uint64 last_config_updated_; -}; - -} // namespace session -} // namespace mozc -#endif // MOZC_LANGUAGES_CHEWING_SESSION_H_ diff -Nru mozc-1.11.1502.102/languages/chewing/typical_chewing_key_sequences.txt mozc-1.11.1522.102/languages/chewing/typical_chewing_key_sequences.txt --- mozc-1.11.1502.102/languages/chewing/typical_chewing_key_sequences.txt 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/chewing/typical_chewing_key_sequences.txt 1970-01-01 00:00:00.000000000 +0000 @@ -1,4 +0,0 @@ -(w, no_mods) (l, no_mods) (space, no_mods) (w, no_mods) (l, no_mods) (space, no_mods) (Return, no_mods) (w, no_mods) (l, no_mods) (space, no_mods) (Return, no_mods) -(w, no_mods) (l, no_mods) (space, no_mods) (w, no_mods) (l, no_mods) (space, no_mods) (space, no_mods) (space, no_mods) (3, no_mods) (Return, no_mods) -(g, no_mods) (k, no_mods) (space, no_mods) (space, no_mods) (plus, CONTROL) (plus, CONTROL) -(t, CONTROL) diff -Nru mozc-1.11.1502.102/languages/chewing/unix/ibus/config_updater.cc mozc-1.11.1522.102/languages/chewing/unix/ibus/config_updater.cc --- mozc-1.11.1502.102/languages/chewing/unix/ibus/config_updater.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/chewing/unix/ibus/config_updater.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,182 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/chewing/unix/ibus/config_updater.h" - -#include "base/base.h" -#include "base/logging.h" -#include "base/protobuf/descriptor.h" -#include "base/protobuf/message.h" -#include "base/singleton.h" -#include "config/config.pb.h" -#include "config/config_handler.h" -#include "languages/chewing/session.h" -#include "unix/ibus/config_util.h" - -using mozc::config::ChewingConfig; - -namespace mozc { -namespace chewing { -#ifdef OS_CHROMEOS -namespace { -const char kChewingSectionName[] = "engine/Chewing"; - -const char kKeyboardTypeName[] = "KBType"; -const char kSelectionKeysName[] = "selKeys"; -const char kHsuSelectionKeysTypeName[] = "hsuSelKeyType"; -} // namespace - -ConfigUpdater::ConfigUpdater() { - name_to_field_["autoShiftCur"] = "automatic_shift_cursor"; - name_to_field_["addPhraseDirection"] = "add_phrase_direction"; - name_to_field_["easySymbolInput"] = "easy_symbol_input"; - name_to_field_["escCleanAllBuf"] = "escape_cleans_all_buffer"; - name_to_field_["forceLowercaseEnglish"] = "force_lowercase_english"; - name_to_field_["plainZhuyin"] = "plain_zhuyin"; - name_to_field_["phraseChoiceRearward"] = "phrase_choice_rearward"; - name_to_field_["spaceAsSelection"] = "space_as_selection"; - name_to_field_["maxChiSymbolLen"] = "maximum_chinese_character_length"; - name_to_field_["candPerPage"] = "candidates_per_page"; - name_to_field_["KBType"] = "keyboard_type"; - name_to_field_["selKeys"] = "selection_keys"; - name_to_field_["hsuSelKeyType"] = "hsu_selection_keys"; - - name_to_keyboard_type_["default"] = ChewingConfig::DEFAULT; - name_to_keyboard_type_["hsu"] = ChewingConfig::HSU; - name_to_keyboard_type_["ibm"] = ChewingConfig::IBM; - name_to_keyboard_type_["gin_yieh"] = ChewingConfig::GIN_YIEH; - name_to_keyboard_type_["eten"] = ChewingConfig::ETEN; - name_to_keyboard_type_["eten26"] = ChewingConfig::ETEN26; - name_to_keyboard_type_["dvorak"] = ChewingConfig::DVORAK; - name_to_keyboard_type_["dvorak_hsu"] = ChewingConfig::DVORAK_HSU; - name_to_keyboard_type_["dachen_26"] = ChewingConfig::DACHEN_26; - name_to_keyboard_type_["hanyu"] = ChewingConfig::HANYU; - - name_to_selection_keys_["1234567890"] = ChewingConfig::SELECTION_1234567890; - name_to_selection_keys_["asdfghjkl;"] = ChewingConfig::SELECTION_asdfghjkl; - name_to_selection_keys_["asdfzxcv89"] = ChewingConfig::SELECTION_asdfzxcv89; - name_to_selection_keys_["asdfjkl789"] = ChewingConfig::SELECTION_asdfjkl789; - name_to_selection_keys_["aoeu;qjkix"] = ChewingConfig::SELECTION_aoeuqjkix; - name_to_selection_keys_["aoeuhtnsid"] = ChewingConfig::SELECTION_aoeuhtnsid; - name_to_selection_keys_["aoeuidhtns"] = ChewingConfig::SELECTION_aoeuidhtns; - name_to_selection_keys_["1234qweras"] = ChewingConfig::SELECTION_1234qweras; - - name_to_hsu_keys_[1] = ChewingConfig::HSU_asdfjkl789; - name_to_hsu_keys_[2] = ChewingConfig::HSU_asdfzxcv89; -} - -void ConfigUpdater::ConfigValueChanged(IBusConfig *config, - const gchar *section, - const gchar *name, - GVariant *value, - gpointer user_data) { - Singleton::get()->UpdateConfig(section, name, value); -} - -void ConfigUpdater::UpdateConfig(const gchar *section, - const gchar *name, - GVariant *value) { - if (!section || !name || !value) { - return; - } - - if (g_strcmp0(section, kChewingSectionName) != 0) { - return; - } - - config::Config mozc_config; - config::ConfigHandler::GetConfig(&mozc_config); - config::ChewingConfig config; - config.CopyFrom(mozc_config.chewing_config()); - - if (g_strcmp0(name, kKeyboardTypeName) == 0) { - const gchar *string_value = NULL; - if (!ibus::ConfigUtil::GetString(value, &string_value) || - string_value == NULL) { - LOG(ERROR) << "Type mismatch: keyboard type is expected to be a string"; - return; - } - map::const_iterator it = - name_to_keyboard_type_.find(string_value); - if (it == name_to_keyboard_type_.end()) { - LOG(ERROR) << "Cannot find a valid keyboard type for " << string_value; - return; - } - config.set_keyboard_type(it->second); - } else if (g_strcmp0(name, kSelectionKeysName) == 0) { - const gchar *string_value = NULL; - if (!ibus::ConfigUtil::GetString(value, &string_value) || - string_value == NULL) { - LOG(ERROR) << "Type mismatch: selection keys is expected to be a string"; - return; - } - map::const_iterator it = - name_to_selection_keys_.find(string_value); - if (it == name_to_selection_keys_.end()) { - LOG(ERROR) << "Cannot find a valid selection keys for " << string_value; - return; - } - config.set_selection_keys(it->second); - } else if (g_strcmp0(name, kHsuSelectionKeysTypeName) == 0) { - gint int_value = 0; - if (!ibus::ConfigUtil::GetInteger(value, &int_value)) { - LOG(ERROR) << "Type mismatch: hsu keys is expected to be an int"; - return; - } - map::const_iterator it = - name_to_hsu_keys_.find(int_value); - if (it == name_to_hsu_keys_.end()) { - LOG(ERROR) << "Cannot find a valid hsu keys for " << int_value; - return; - } - config.set_hsu_selection_keys(it->second); - } else { - ibus::ConfigUtil::SetFieldForName(name_to_field_[name], value, &config); - } - - Session::UpdateConfig(config); -} - -const map& ConfigUpdater::name_to_field() { - return name_to_field_; -} - -void ConfigUpdater::InitConfig(IBusConfig *config) { - // Initialize the mozc config with the config loaded from ibus-memconf, which - // is the primary config storage on Chrome OS. - ibus::ConfigUtil::InitConfig( - config, - kChewingSectionName, - Singleton::get()->name_to_field()); -} - -#endif // OS_CHROMEOS - -} // namespace chewing -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/chewing/unix/ibus/config_updater.h mozc-1.11.1522.102/languages/chewing/unix/ibus/config_updater.h --- mozc-1.11.1502.102/languages/chewing/unix/ibus/config_updater.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/chewing/unix/ibus/config_updater.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,73 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// We don't use the config updating logic for Chrome OS in mozc_engine.cc. -// Rather we use another one and invoke it from our main.cc. - -#ifndef MOZC_LANGUAGES_CHEWING_UNIX_IBUS_CONFIG_UPDATER_H_ -#define MOZC_LANGUAGES_CHEWING_UNIX_IBUS_CONFIG_UPDATER_H_ - -#include -#include -#include - -#include "config/config.pb.h" - -namespace mozc { -namespace chewing { -#ifdef OS_CHROMEOS -class ConfigUpdater { - public: - ConfigUpdater(); - - static void ConfigValueChanged(IBusConfig *config, - const gchar *section, - const gchar *name, - GVariant *value, - gpointer user_data); - void UpdateConfig(const gchar *section, - const gchar *name, - GVariant *value); - - // Initializes mozc chewing config. - static void InitConfig(IBusConfig *config); - - private: - const map& name_to_field(); - - map name_to_field_; - map name_to_keyboard_type_; - map name_to_selection_keys_; - map name_to_hsu_keys_; -}; -#endif // OS_CHROMEOS -} // namespace chewing -} // namespace mozc - -#endif // MOZC_LANGUAGES_CHEWING_UNIX_IBUS_CONFIG_UPDATER_H_ diff -Nru mozc-1.11.1502.102/languages/chewing/unix/ibus/main.cc mozc-1.11.1522.102/languages/chewing/unix/ibus/main.cc --- mozc-1.11.1502.102/languages/chewing/unix/ibus/main.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/chewing/unix/ibus/main.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,156 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include - -#include "base/base.h" -#include "base/logging.h" -#include "base/version.h" -#include "config/config_handler.h" -#include "languages/chewing/chewing_session_factory.h" -#include "languages/chewing/unix/ibus/config_updater.h" -#include "languages/chewing/unix/ibus/main.h" -#include "session/session_factory_manager.h" -#include "unix/ibus/config_util.h" -#include "unix/ibus/mozc_engine.h" -#include "unix/ibus/path_util.h" - -#ifdef OS_CHROMEOS -#include "languages/chewing/session.h" -#endif // OS_CHROMEOS - -DEFINE_bool(ibus, false, "The engine is started by ibus-daemon"); - -namespace { - -IBusBus *g_bus = NULL; -#ifdef OS_CHROMEOS -// We use the ibus configuration daemon only on Chromium OS. -IBusConfig *g_config = NULL; -#endif - -#ifndef OS_CHROMEOS -void EnableVerboseLog() { - const int kDefaultVerboseLevel = 1; - if (mozc::Logging::GetVerboseLevel() < kDefaultVerboseLevel) { - mozc::Logging::SetVerboseLevel(kDefaultVerboseLevel); - } -} -#endif - -// Creates a IBusComponent object and add engine(s) to the object. -IBusComponent *GetIBusComponent() { - IBusComponent *component = ibus_component_new( - kComponentName, - kComponentDescription, - mozc::Version::GetMozcVersion().c_str(), - kComponentLicense, - kComponentAuthor, - kComponentHomepage, - "", - kComponentTextdomain); - const string icon_path = mozc::ibus::GetIconPath(kEngineIcon); - for (size_t i = 0; i < kEngineArrayLen; ++i) { - ibus_component_add_engine(component, - ibus_engine_desc_new(kEngineNameArray[i], - kEngineLongnameArray[i], - kEngineDescription, - kEngineLanguage, - kComponentLicense, - kComponentAuthor, - icon_path.c_str(), - kEngineLayoutArray[i])); - } - return component; -} - -// Initializes ibus components and adds Mozc engine. -void InitIBusComponent(bool executed_by_ibus_daemon) { - g_bus = ibus_bus_new(); - g_signal_connect(g_bus, - "disconnected", - G_CALLBACK(mozc::ibus::MozcEngine::Disconnected), - NULL); - -#ifdef OS_CHROMEOS - g_config = ibus_bus_get_config(g_bus); - g_object_ref_sink(g_config); - g_signal_connect(g_config, - "value-changed", - G_CALLBACK(mozc::chewing::ConfigUpdater::ConfigValueChanged), - NULL); -#endif - - IBusComponent *component = GetIBusComponent(); - IBusFactory *factory = ibus_factory_new(ibus_bus_get_connection(g_bus)); - GList *engines = ibus_component_get_engines(component); - for (GList *p = engines; p; p = p->next) { - IBusEngineDesc *engine = reinterpret_cast(p->data); - const gchar * const engine_name = ibus_engine_desc_get_name(engine); - ibus_factory_add_engine( - factory, engine_name, mozc::ibus::MozcEngine::GetType()); - } - - if (executed_by_ibus_daemon) { - ibus_bus_request_name(g_bus, kComponentName, 0); - } else { - ibus_bus_register_component(g_bus, component); - } - g_object_unref(component); -} - -} // namespace - -int main(gint argc, gchar **argv) { - InitGoogle(argv[0], &argc, &argv, true); -#ifdef OS_CHROMEOS - mozc::chewing::ChewingSessionFactory session_factory; - mozc::session::SessionFactoryManager::SetSessionFactory(&session_factory); -#endif // OS_CHROMEOS - ibus_init(); - InitIBusComponent(FLAGS_ibus); -#ifdef OS_CHROMEOS - // On Chrome OS, mozc does not store the config data to a local file. - mozc::config::ConfigHandler::SetConfigFileName("memory://chewing_config.1.db"); - mozc::chewing::ConfigUpdater::InitConfig(g_config); -#else -#ifndef NO_LOGGING - EnableVerboseLog(); -#endif // NO_LOGGING -#endif // OS_CHROMEOS - ibus_main(); - -#ifdef OS_CHROMEOS - if (g_config) { - g_object_unref(g_config); - } -#endif - - return 0; -} diff -Nru mozc-1.11.1502.102/languages/chewing/unix/ibus/main.h mozc-1.11.1522.102/languages/chewing/unix/ibus/main.h --- mozc-1.11.1502.102/languages/chewing/unix/ibus/main.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/chewing/unix/ibus/main.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,58 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// This file is generated by a script at first but now maintained manually. - -#ifndef MOZC_LANGUAGES_CHEWING_UNIX_IBUS_MAIN_H_ -#define MOZC_LANGUAGES_CHEWING_UNIX_IBUS_MAIN_H_ -namespace { -const char kComponentVersion[] = "0.0.0.0"; -const char kComponentName[] = "com.google.IBus.Chewing"; -const char kComponentLicense[] = "New BSD"; -const char kComponentExec[] = "/usr/libexec/ibus-engine-mozc-chewing --ibus"; -const char kComponentTextdomain[] = "ibus-mozc-chewing"; -const char kComponentAuthor[] = "Google Inc."; -const char kComponentHomepage[] = "http://code.google.com/p/mozc/"; -const char kComponentDescription[] = "Mozc Chewing Component"; -const char kEngineRank[] = "0"; -const char kEngineDescription[] = "Mozc Chewing (Chewing Input Method)"; -const char kEngineLanguage[] = "zh_TW"; -const char kEngineIcon[] = "/usr/share/ibus-mozc/product_icon.png"; -const char* kEngineLayoutArray[] = { -"us", -}; -const char* kEngineNameArray[] = { -"mozc-chewing", -}; -const char* kEngineLongnameArray[] = { -"Mozc Chewing (Chewing)", -}; -const size_t kEngineArrayLen = 1; -} // namespace -#endif // MOZC_LANGUAGES_CHEWING_UNIX_IBUS_MAIN_H_ diff -Nru mozc-1.11.1502.102/languages/chewing/unix/ibus/mozc-chewing.xml mozc-1.11.1522.102/languages/chewing/unix/ibus/mozc-chewing.xml --- mozc-1.11.1502.102/languages/chewing/unix/ibus/mozc-chewing.xml 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/chewing/unix/ibus/mozc-chewing.xml 1970-01-01 00:00:00.000000000 +0000 @@ -1,51 +0,0 @@ - - - 0.0.0.0 - com.google.IBus.Chewing - New BSD - /usr/libexec/ibus-engine-mozc-chewing --ibus - ibus-mozc-chewing - Google Inc. - http://code.google.com/p/mozc/ - Mozc Chewing Component - - - 0 - Mozc Chewing (Chewing Input Method) - zh-TW - /usr/share/ibus-mozc/product_icon.png - us - mozc-chewing - Mozc Chewing (Chewing) - - - diff -Nru mozc-1.11.1502.102/languages/chewing/unix/ibus/mozc_engine_property.cc mozc-1.11.1522.102/languages/chewing/unix/ibus/mozc_engine_property.cc --- mozc-1.11.1502.102/languages/chewing/unix/ibus/mozc_engine_property.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/chewing/unix/ibus/mozc_engine_property.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,84 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "unix/ibus/mozc_engine_property.h" - -#include "base/base.h" -#include "session/commands.pb.h" - -namespace mozc { -namespace ibus { - -namespace { -// The list of properties used in ibus-mozc. -const MozcEngineProperty kMozcEnginePropertiesArray[] = { - { - commands::HALF_ASCII, - "CompositionMode.English", - "English", - "A", - "direct.png", - }, - // We use 'HIRAGANA' mode for the chewing Chinese input mode. - // TODO(mukai): add CHEWING mode and fix here. - { - commands::HIRAGANA, - "CompositionMode.Chinese", - "Chinese", - "\xe9\x85\xB7", // "酷" U+9177 "chewing" kanji - "hiragana.png", - }, - { - commands::FULL_ASCII, - "CompositionMode.WideLatin", - "Full-width English", - "\xef\xbc\xa1", // Full width ASCII letter A - "alpha_full.png", - }, -}; -} // namespace - -const MozcEngineProperty *kMozcEngineProperties = - &kMozcEnginePropertiesArray[0]; -// The IMEOff state is not available in Chewing. -const MozcEngineProperty *kMozcEnginePropertyIMEOffState = NULL; -const size_t kMozcEnginePropertiesSize = arraysize(kMozcEnginePropertiesArray); - -const commands::CompositionMode kMozcEngineInitialCompositionMode = - commands::HIRAGANA; - -const MozcEngineSwitchProperty *kMozcEngineSwitchProperties = NULL; -const size_t kMozcEngineSwitchPropertiesSize = 0; - -const MozcEngineToolProperty *kMozcEngineToolProperties = NULL; -const size_t kMozcEngineToolPropertiesSize = 0; - -const unsigned int kPageSize = 10; -} // namespace ibus -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/hangul/session_test.cc mozc-1.11.1522.102/languages/hangul/session_test.cc --- mozc-1.11.1502.102/languages/hangul/session_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/hangul/session_test.cc 2013-08-28 05:26:12.000000000 +0000 @@ -381,7 +381,7 @@ const int kCandidatesPerPage = 10; // TODO(nona): load from ibus - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kCandidates); ++i) { + for (size_t i = 0; i < arraysize(kCandidates); ++i) { EXPECT_EQ(kCandidates[i], GetNthCandidate(command, i % kCandidatesPerPage)); EXPECT_TRUE(SendSpecialKey(commands::KeyEvent::RIGHT, session_, &command)); } @@ -823,7 +823,7 @@ "\xE7\x96\x8F\xE8\xA8\xBB" // "疏註" }; - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kCandidates); ++i) { + for (size_t i = 0; i < arraysize(kCandidates); ++i) { EXPECT_TRUE(SendKey("t", session_, &command)); EXPECT_TRUE(SendKey("h", session_, &command)); EXPECT_TRUE(SendKey("w", session_, &command)); @@ -1039,7 +1039,7 @@ commands::KeyEvent::NUMPAD9, }; - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kNumPadKeys); ++i) { + for (size_t i = 0; i < arraysize(kNumPadKeys); ++i) { EXPECT_TRUE(SendKey("e", session_, &command)); EXPECT_TRUE(SendKey("k", session_, &command)); EXPECT_TRUE(SendSpecialKey(commands::KeyEvent::HANJA, session_, &command)); diff -Nru mozc-1.11.1502.102/languages/hangul/unix/ibus/mozc_engine_property.cc mozc-1.11.1522.102/languages/hangul/unix/ibus/mozc_engine_property.cc --- mozc-1.11.1502.102/languages/hangul/unix/ibus/mozc_engine_property.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/hangul/unix/ibus/mozc_engine_property.cc 2013-08-28 05:26:12.000000000 +0000 @@ -67,9 +67,6 @@ const commands::CompositionMode kMozcEngineInitialCompositionMode = commands::HIRAGANA; -const MozcEngineSwitchProperty *kMozcEngineSwitchProperties = NULL; -const size_t kMozcEngineSwitchPropertiesSize = 0; - const MozcEngineToolProperty *kMozcEngineToolProperties = NULL; const size_t kMozcEngineToolPropertiesSize = 0; diff -Nru mozc-1.11.1502.102/languages/pinyin/configurable_keymap.cc mozc-1.11.1522.102/languages/pinyin/configurable_keymap.cc --- mozc-1.11.1502.102/languages/pinyin/configurable_keymap.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/configurable_keymap.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,128 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/pinyin/configurable_keymap.h" - -#include - -#include "base/base.h" -#include "base/logging.h" -#include "config/config.pb.h" -#include "config/config_handler.h" -#include "session/commands.pb.h" -#include "session/key_event_util.h" - -namespace mozc { -using commands::KeyEvent; - -namespace pinyin { -namespace keymap { - -namespace { -const char kEnglishModeSpecialKey = 'v'; -} // namespace - -bool ConfigurableKeymap::GetCommand(const KeyEvent &key_event, - ConverterState state, - KeyCommand *key_command) { - DCHECK(key_command); - DCHECK(!KeyEventUtil::IsNumpadKey(key_event)); - - const config::PinyinConfig &pinyin_config = GET_CONFIG(pinyin_config); - const uint32 modifiers = KeyEventUtil::GetModifiers(key_event); - DCHECK(!KeyEventUtil::HasCaps(modifiers)); - - // TODO(hsumita): Refactoring these codes. (There are many nested block) - - if (state == INACTIVE) { - if (!pinyin_config.double_pinyin()) { - if (key_event.has_key_code() && modifiers == 0 && - key_event.key_code() == kEnglishModeSpecialKey) { - *key_command = TURN_ON_ENGLISH_MODE; - return true; - } - } - } else { // state == ACTIVE - if (pinyin_config.select_with_shift()) { - if (!key_event.has_key_code() && !key_event.has_special_key() && - KeyEventUtil::IsShift(modifiers)) { - if (modifiers & KeyEvent::LEFT_SHIFT) { - *key_command = SELECT_SECOND_CANDIDATE; - return true; - } - if (modifiers & KeyEvent::RIGHT_SHIFT) { - *key_command = SELECT_THIRD_CANDIDATE; - return true; - } - } - } - - if (pinyin_config.paging_with_minus_equal()) { - if (key_event.has_key_code() && modifiers == 0) { - const char key_code = key_event.key_code(); - if (key_code == '-') { - *key_command = FOCUS_CANDIDATE_PREV_PAGE; - return true; - } - if (key_code == '=') { - *key_command = FOCUS_CANDIDATE_NEXT_PAGE; - return true; - } - } - } - - if (pinyin_config.paging_with_comma_period()) { - if (key_event.has_key_code() && modifiers == 0) { - const char key_code = key_event.key_code(); - if (key_code == ',') { - *key_command = FOCUS_CANDIDATE_PREV_PAGE; - return true; - } - if (key_code == '.') { - *key_command = FOCUS_CANDIDATE_NEXT_PAGE; - return true; - } - } - } - - if (pinyin_config.auto_commit()) { - if (key_event.has_key_code() && ispunct(key_event.key_code()) && - modifiers == 0) { - *key_command = AUTO_COMMIT; - return true; - } - } - } - - return false; -} - -} // namespace keymap -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/configurable_keymap.h mozc-1.11.1522.102/languages/pinyin/configurable_keymap.h --- mozc-1.11.1502.102/languages/pinyin/configurable_keymap.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/configurable_keymap.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,58 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#ifndef MOZC_LANGUAGES_PINYIN_CONFIGURABLE_KEYMAP_H_ -#define MOZC_LANGUAGES_PINYIN_CONFIGURABLE_KEYMAP_H_ - -#include "base/port.h" -#include "languages/pinyin/pinyin_constant.h" - -namespace mozc { -namespace commands { -class KeyEvent; -} // namespace commands - -namespace pinyin { -namespace keymap { - -class ConfigurableKeymap { - public: - // Parses key_event and gets command. This method does not handle CapsLock or - // Numlock keys. These keys should be removed before calling this method. - static bool GetCommand(const commands::KeyEvent &key_event, - ConverterState state, KeyCommand *key_command); - private: - DISALLOW_IMPLICIT_CONSTRUCTORS(ConfigurableKeymap); -}; - -} // namespace keymap -} // namespace pinyin -} // namespace mozc - -#endif // MOZC_LANGUAGES_PINYIN_CONFIGURABLE_KEYMAP_H_ diff -Nru mozc-1.11.1502.102/languages/pinyin/configurable_keymap_test.cc mozc-1.11.1522.102/languages/pinyin/configurable_keymap_test.cc --- mozc-1.11.1502.102/languages/pinyin/configurable_keymap_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/configurable_keymap_test.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,352 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/pinyin/configurable_keymap.h" - -#include - -#include "base/system_util.h" -#include "config/config.pb.h" -#include "config/config_handler.h" -#include "session/commands.pb.h" -#include "session/key_parser.h" -#include "testing/base/public/googletest.h" -#include "testing/base/public/gunit.h" - -DECLARE_string(test_tmpdir); - -namespace mozc { -namespace pinyin { -namespace keymap { - -namespace { -bool GetKeyCommand(const string &key_string, ConverterState state, - KeyCommand *key_command) { - commands::KeyEvent key_event; - EXPECT_TRUE(KeyParser::ParseKey(key_string, &key_event)); - return ConfigurableKeymap::GetCommand(key_event, state, key_command); -} -} // namespace - -class ConfigurableKeymapTest : public testing::Test { - protected: - virtual void SetUp() { - SystemUtil::SetUserProfileDirectory(FLAGS_test_tmpdir); - config::Config config; - config::ConfigHandler::GetDefaultConfig(&config); - config::ConfigHandler::SetConfig(config); - } - - virtual void TearDown() { - config::Config config; - config::ConfigHandler::GetDefaultConfig(&config); - config::ConfigHandler::SetConfig(config); - } -}; - -TEST_F(ConfigurableKeymapTest, DoublePinyin) { - config::Config config; - config::ConfigHandler::GetDefaultConfig(&config); - - KeyCommand key_command; - - { - config.mutable_pinyin_config()->set_double_pinyin(true); - config::ConfigHandler::SetConfig(config); - - ASSERT_FALSE(GetKeyCommand("v", ACTIVE, &key_command)); - ASSERT_FALSE(GetKeyCommand("v", INACTIVE, &key_command)); - } - - { - config.mutable_pinyin_config()->set_double_pinyin(false); - config::ConfigHandler::SetConfig(config); - - ASSERT_FALSE(GetKeyCommand("v", ACTIVE, &key_command)); - ASSERT_TRUE(GetKeyCommand("v", INACTIVE, &key_command)); - EXPECT_EQ(TURN_ON_ENGLISH_MODE, key_command); - } -} - -TEST_F(ConfigurableKeymapTest, SelectWithShift) { - config::Config config; - config::ConfigHandler::GetDefaultConfig(&config); - - { - config.mutable_pinyin_config()->set_select_with_shift(true); - config::ConfigHandler::SetConfig(config); - - KeyCommand key_command; - - ASSERT_TRUE(GetKeyCommand("LeftShift", ACTIVE, &key_command)); - EXPECT_EQ(SELECT_SECOND_CANDIDATE, key_command); - ASSERT_TRUE(GetKeyCommand("RightShift", ACTIVE, &key_command)); - EXPECT_EQ(SELECT_THIRD_CANDIDATE, key_command); - - EXPECT_FALSE(GetKeyCommand("Alt", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Ctrl", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt LeftShift", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl LeftShift", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Ctrl LeftShift", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("LeftShift", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Ctrl", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt LeftShift", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl LeftShift", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Ctrl LeftShift", INACTIVE, &key_command)); - - // "Shift + printable key" should not be sent from mozc_engine. - // Shift key with a printable key is removed. - EXPECT_FALSE(GetKeyCommand("LeftShift BS", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("LeftShift BS", INACTIVE, &key_command)); - } - - { - config.mutable_pinyin_config()->set_select_with_shift(false); - config::ConfigHandler::SetConfig(config); - - KeyCommand key_command; - - EXPECT_FALSE(GetKeyCommand("Alt", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Shift", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Ctrl", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Shift", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl Shift", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Ctrl Shift", ACTIVE, &key_command)); - - EXPECT_FALSE(GetKeyCommand("Alt", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Shift", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Ctrl", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Shift", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl Shift", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Ctrl Shift", INACTIVE, &key_command)); - - // "Shift + printable key" should not be sent from mozc_engine. - // Shift key with a printable key is removed. - EXPECT_FALSE(GetKeyCommand("LeftShift BS", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("LeftShift BS", INACTIVE, &key_command)); - } -} - -TEST_F(ConfigurableKeymapTest, PagingWithMinusEqual) { - config::Config config; - config::ConfigHandler::GetDefaultConfig(&config); - - { - config.mutable_pinyin_config()->set_paging_with_minus_equal(true); - config::ConfigHandler::SetConfig(config); - - KeyCommand key_command; - - ASSERT_TRUE(GetKeyCommand("-", ACTIVE, &key_command)); - EXPECT_EQ(FOCUS_CANDIDATE_PREV_PAGE, key_command); - - ASSERT_TRUE(GetKeyCommand("=", ACTIVE, &key_command)); - EXPECT_EQ(FOCUS_CANDIDATE_NEXT_PAGE, key_command); - - EXPECT_FALSE(GetKeyCommand("Alt =", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl =", ACTIVE, &key_command)); - // "Shift + printable key" should not be sent from mozc_engine. - // Shift key with a printable key is removed. - EXPECT_FALSE(GetKeyCommand("Alt Ctrl =", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Shift =", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl Shift =", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Ctrl Shift =", ACTIVE, &key_command)); - - EXPECT_FALSE(GetKeyCommand("Alt =", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl =", INACTIVE, &key_command)); - // "Shift + printable key" should not be sent from mozc_engine. - // Shift key with a printable key is removed. - EXPECT_FALSE(GetKeyCommand("Alt Ctrl =", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Shift =", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl Shift =", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Ctrl Shift =", INACTIVE, &key_command)); - } - - { - config.mutable_pinyin_config()->set_paging_with_minus_equal(false); - config::ConfigHandler::SetConfig(config); - - KeyCommand key_command; - - EXPECT_FALSE(GetKeyCommand("Alt =", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl =", ACTIVE, &key_command)); - // "Shift + printable key" should not be sent from mozc_engine. - // Shift key with a printable key is removed. - EXPECT_FALSE(GetKeyCommand("Alt Ctrl =", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Shift =", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl Shift =", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Ctrl Shift =", ACTIVE, &key_command)); - - EXPECT_FALSE(GetKeyCommand("Alt =", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl =", INACTIVE, &key_command)); - // "Shift + printable key" should not be sent from mozc_engine. - // Shift key with a printable key is removed. - EXPECT_FALSE(GetKeyCommand("Alt Ctrl =", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Shift =", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl Shift =", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Ctrl Shift =", INACTIVE, &key_command)); - } -} - -TEST_F(ConfigurableKeymapTest, PagingWithCommaPeriod) { - config::Config config; - config::ConfigHandler::GetDefaultConfig(&config); - - { - config.mutable_pinyin_config()->set_paging_with_comma_period(true); - config::ConfigHandler::SetConfig(config); - - KeyCommand key_command; - - ASSERT_TRUE(GetKeyCommand(",", ACTIVE, &key_command)); - EXPECT_EQ(FOCUS_CANDIDATE_PREV_PAGE, key_command); - - ASSERT_TRUE(GetKeyCommand(".", ACTIVE, &key_command)); - EXPECT_EQ(FOCUS_CANDIDATE_NEXT_PAGE, key_command); - - EXPECT_FALSE(GetKeyCommand("Alt .", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl .", ACTIVE, &key_command)); - // "Shift + printable key" should not be sent from mozc_engine. - // Shift key with a printable key is removed. - EXPECT_FALSE(GetKeyCommand("Alt Ctrl .", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Shift .", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl Shift .", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Ctrl Shift .", ACTIVE, &key_command)); - - EXPECT_FALSE(GetKeyCommand("Alt .", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl .", INACTIVE, &key_command)); - // "Shift + printable key" should not be sent from mozc_engine. - // Shift key with a printable key is removed. - EXPECT_FALSE(GetKeyCommand("Alt Ctrl .", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Shift .", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl Shift .", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Ctrl Shift .", INACTIVE, &key_command)); - } - - { - config.mutable_pinyin_config()->set_paging_with_comma_period(false); - config::ConfigHandler::SetConfig(config); - - KeyCommand key_command; - - EXPECT_FALSE(GetKeyCommand("Alt .", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl .", ACTIVE, &key_command)); - // "Shift + printable key" should not be sent from mozc_engine. - // Shift key with a printable key is removed. - EXPECT_FALSE(GetKeyCommand("Alt Ctrl .", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Shift .", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl Shift .", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Ctrl Shift .", ACTIVE, &key_command)); - - EXPECT_FALSE(GetKeyCommand("Alt .", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl .", INACTIVE, &key_command)); - // "Shift + printable key" should not be sent from mozc_engine. - // Shift key with a printable key is removed. - EXPECT_FALSE(GetKeyCommand("Alt Ctrl .", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Shift .", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl Shift .", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Ctrl Shift .", INACTIVE, &key_command)); - } -} - -TEST_F(ConfigurableKeymapTest, AutoCommit) { - config::Config config; - config::ConfigHandler::GetDefaultConfig(&config); - - { - config.mutable_pinyin_config()->set_auto_commit(true); - config::ConfigHandler::SetConfig(config); - - KeyCommand key_command; - - ASSERT_TRUE(GetKeyCommand("!", ACTIVE, &key_command)); - EXPECT_EQ(AUTO_COMMIT, key_command); - - EXPECT_FALSE(GetKeyCommand("Alt !", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl !", ACTIVE, &key_command)); - // "Shift + printable key" should not be sent from mozc_engine. - // Shift key with a printable key is removed. - EXPECT_FALSE(GetKeyCommand("Alt Ctrl !", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Shift !", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl Shift !", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Ctrl Shift !", ACTIVE, &key_command)); - - ASSERT_FALSE(GetKeyCommand("!", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt !", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl !", INACTIVE, &key_command)); - // "Shift + printable key" should not be sent from mozc_engine. - // Shift key with a printable key is removed. - EXPECT_FALSE(GetKeyCommand("Alt Ctrl !", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Shift !", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl Shift !", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Ctrl Shift !", INACTIVE, &key_command)); - - ASSERT_FALSE(GetKeyCommand("a", ACTIVE, &key_command)); - ASSERT_FALSE(GetKeyCommand("a", INACTIVE, &key_command)); - } - - { - config.mutable_pinyin_config()->set_auto_commit(false); - config::ConfigHandler::SetConfig(config); - - KeyCommand key_command; - - ASSERT_FALSE(GetKeyCommand("!", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt !", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl !", ACTIVE, &key_command)); - // "Shift + printable key" should not be sent from mozc_engine. - // Shift key with a printable key is removed. - EXPECT_FALSE(GetKeyCommand("Alt Ctrl !", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Shift !", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl Shift !", ACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Ctrl Shift !", ACTIVE, &key_command)); - - ASSERT_FALSE(GetKeyCommand("!", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt !", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl !", INACTIVE, &key_command)); - // "Shift + printable key" should not be sent from mozc_engine. - // Shift key with a printable key is removed. - EXPECT_FALSE(GetKeyCommand("Alt Ctrl !", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Shift !", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Ctrl Shift !", INACTIVE, &key_command)); - EXPECT_FALSE(GetKeyCommand("Alt Ctrl Shift !", INACTIVE, &key_command)); - - ASSERT_FALSE(GetKeyCommand("a", ACTIVE, &key_command)); - ASSERT_FALSE(GetKeyCommand("a", INACTIVE, &key_command)); - } -} - -} // namespace keymap -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/default_keymap.cc mozc-1.11.1522.102/languages/pinyin/default_keymap.cc --- mozc-1.11.1502.102/languages/pinyin/default_keymap.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/default_keymap.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,233 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/pinyin/default_keymap.h" - -#include - -#include "base/base.h" -#include "base/logging.h" -#include "session/commands.pb.h" -#include "session/key_event_util.h" - -namespace mozc { -namespace pinyin { -namespace keymap { - -namespace { -const char kPunctuationModeSpecialKey = '`'; -} // namespace - -// TODO(hsumita): Investigates the behavior of "@" key when converter is active. - -bool DefaultKeymap::GetCommand(const commands::KeyEvent &key_event, - ConverterState state, - KeyCommand *key_command) { - DCHECK(key_command); - DCHECK(!KeyEventUtil::HasCaps(KeyEventUtil::GetModifiers(key_event))); - DCHECK(!KeyEventUtil::IsNumpadKey(key_event)); - - if (key_event.has_key_code()) { - *key_command = ProcessKeyCode(key_event, state); - return true; - } - if (key_event.has_special_key()) { - *key_command = ProcessSpecialKey(key_event, state); - return true; - } - if (key_event.modifier_keys_size() != 0) { - *key_command = ProcessModifierKey(key_event, state); - return true; - } - - LOG(ERROR) << "There is no key_code, modifier_key or special_key"; - return false; -} - -KeyCommand DefaultKeymap::ProcessKeyCode(const commands::KeyEvent &key_event, - ConverterState state) { - DCHECK(key_event.has_key_code()); - DCHECK(!key_event.has_special_key()); - - const uint32 modifiers = KeyEventUtil::GetModifiers(key_event); - const char key_code = key_event.key_code(); - - if (KeyEventUtil::IsCtrlShift(modifiers) && - (key_code == 'f' || key_code == 'F')) { - return TOGGLE_SIMPLIFIED_CHINESE_MODE; - } - - if (state == INACTIVE) { - if (ispunct(key_event.key_code()) && modifiers == 0) { - if (key_event.key_code() == kPunctuationModeSpecialKey) { - return TURN_ON_PUNCTUATION_MODE; - } else { - return INSERT_PUNCTUATION; - } - } - if (KeyEventUtil::HasAlt(modifiers) || KeyEventUtil::HasCtrl(modifiers)) { - return DO_NOTHING_WITHOUT_CONSUME; - } - return INSERT; - } - DCHECK_EQ(ACTIVE, state); - - if (KeyEventUtil::HasAlt(modifiers)) { - return DO_NOTHING_WITH_CONSUME; - } - - if (isalpha(key_code)) { - if (KeyEventUtil::HasCtrl(modifiers)) { - return DO_NOTHING_WITH_CONSUME; - } - if (KeyEventUtil::IsUpperAlphabet(key_event)) { - return DO_NOTHING_WITH_CONSUME; - } - return INSERT; - } - - if (isdigit(key_code)) { - if (KeyEventUtil::HasShift(modifiers)) { - return DO_NOTHING_WITH_CONSUME; - } - if (KeyEventUtil::IsCtrl(modifiers)) { - return CLEAR_CANDIDATE_FROM_HISTORY; - } - return SELECT_CANDIDATE; - } - - return DO_NOTHING_WITH_CONSUME; -} - -KeyCommand DefaultKeymap::ProcessSpecialKey( - const commands::KeyEvent &key_event, ConverterState state) { - DCHECK(key_event.has_special_key()); - DCHECK(!key_event.has_key_code()); - - const commands::KeyEvent::SpecialKey special_key = key_event.special_key(); - - if (state == INACTIVE) { - return DO_NOTHING_WITHOUT_CONSUME; - } - DCHECK_EQ(ACTIVE, state); - - // Shift key is always ignored with special key. - - const uint32 modifiers = KeyEventUtil::GetModifiers(key_event); - const bool has_alt = KeyEventUtil::HasAlt(modifiers); - const bool has_ctrl = KeyEventUtil::HasCtrl(modifiers); - - if (has_alt && has_ctrl) { - switch (special_key) { - case commands::KeyEvent::UP: - return MOVE_CURSOR_TO_BEGINNING; - case commands::KeyEvent::DOWN: - return MOVE_CURSOR_TO_END; - default: - return DO_NOTHING_WITH_CONSUME; - } - } else if (has_alt) { - switch (special_key) { - case commands::KeyEvent::UP: - return FOCUS_CANDIDATE_PREV_PAGE; - case commands::KeyEvent::DOWN: - return FOCUS_CANDIDATE_NEXT_PAGE; - default: - return DO_NOTHING_WITH_CONSUME; - } - } else if (has_ctrl) { - switch (special_key) { - case commands::KeyEvent::RIGHT: - return MOVE_CURSOR_RIGHT_BY_WORD; - case commands::KeyEvent::LEFT: - return MOVE_CURSOR_LEFT_BY_WORD; - case commands::KeyEvent::BACKSPACE: - return REMOVE_WORD_BEFORE; - case commands::KeyEvent::DEL: - return REMOVE_WORD_AFTER; - default: - return DO_NOTHING_WITH_CONSUME; - } - } else { - switch (special_key) { - case commands::KeyEvent::ENTER: - return COMMIT; - case commands::KeyEvent::SPACE: - return SELECT_FOCUSED_CANDIDATE; - case commands::KeyEvent::UP: - return FOCUS_CANDIDATE_PREV; - case commands::KeyEvent::DOWN: - return FOCUS_CANDIDATE_NEXT; - case commands::KeyEvent::RIGHT: - return MOVE_CURSOR_RIGHT; - case commands::KeyEvent::LEFT: - return MOVE_CURSOR_LEFT; - case commands::KeyEvent::PAGE_UP: - return FOCUS_CANDIDATE_PREV_PAGE; - case commands::KeyEvent::PAGE_DOWN: - return FOCUS_CANDIDATE_NEXT_PAGE; - case commands::KeyEvent::HOME: - return MOVE_CURSOR_TO_BEGINNING; - case commands::KeyEvent::END: - return MOVE_CURSOR_TO_END; - case commands::KeyEvent::BACKSPACE: - return REMOVE_CHAR_BEFORE; - case commands::KeyEvent::DEL: - return REMOVE_CHAR_AFTER; - case commands::KeyEvent::ESCAPE: - return CLEAR; - case commands::KeyEvent::TAB: - return FOCUS_CANDIDATE_NEXT_PAGE; - default: - return DO_NOTHING_WITH_CONSUME; - } - } - - LOG(ERROR) << "We should NOT process here."; - return DO_NOTHING_WITH_CONSUME; -} - -KeyCommand DefaultKeymap::ProcessModifierKey( - const commands::KeyEvent &key_event, ConverterState state) { - DCHECK_NE(0, key_event.modifier_keys_size()); - DCHECK(!key_event.has_special_key()); - DCHECK(!key_event.has_key_code()); - - const uint32 modifiers = KeyEventUtil::GetModifiers(key_event); - - if (KeyEventUtil::IsShift(modifiers)) { - return TOGGLE_DIRECT_MODE; - } - - return DO_NOTHING_WITHOUT_CONSUME; -} - -} // namespace keymap -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/default_keymap.h mozc-1.11.1522.102/languages/pinyin/default_keymap.h --- mozc-1.11.1502.102/languages/pinyin/default_keymap.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/default_keymap.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,67 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#ifndef MOZC_LANGUAGES_PINYIN_DEFAULT_KEYMAP_H_ -#define MOZC_LANGUAGES_PINYIN_DEFAULT_KEYMAP_H_ - -#include "base/port.h" -#include "languages/pinyin/pinyin_constant.h" - -namespace mozc { -namespace commands { -class KeyEvent; -} // namespace commands - -namespace pinyin { -namespace keymap { - -class DefaultKeymap { - public: - // Parses key_event and gets command. This method does not handle CapsLock or - // Numlock keys. These keys should be removed before calling this method. - static bool GetCommand(const commands::KeyEvent &key_event, - ConverterState state, KeyCommand *key_command); - - private: - static KeyCommand ProcessKeyCode(const commands::KeyEvent &key_event, - ConverterState state); - static KeyCommand ProcessSpecialKey(const commands::KeyEvent &key_event, - ConverterState state); - static KeyCommand ProcessModifierKey(const commands::KeyEvent &key_event, - ConverterState state); - - // Should never be allocated. - DISALLOW_IMPLICIT_CONSTRUCTORS(DefaultKeymap); -}; - -} // namespace keymap -} // namespace pinyin -} // namespace mozc - -#endif // MOZC_LANGUAGES_PINYIN_DEFAULT_KEYMAP_H_ diff -Nru mozc-1.11.1502.102/languages/pinyin/default_keymap_test.cc mozc-1.11.1522.102/languages/pinyin/default_keymap_test.cc --- mozc-1.11.1502.102/languages/pinyin/default_keymap_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/default_keymap_test.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,302 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/pinyin/default_keymap.h" - -#include - -#include "languages/pinyin/keymap.h" -#include "session/commands.pb.h" -#include "session/key_parser.h" -#include "testing/base/public/googletest.h" -#include "testing/base/public/gunit.h" - -namespace mozc { -namespace pinyin { -namespace keymap { -namespace { - -const char *kSpecialKeys[] = { - "SPACE", - "ENTER", - "LEFT", - "RIGHT", - "UP", - "DOWN", - "ESCAPE", - "DEL", - "BACKSPACE", - "HOME", - "END", - "TAB", - "PAGEUP", - "PAGEDOWN", -}; - -typedef map KeyTable; - -KeyCommand GetKeyCommand(const string &key_string, ConverterState state) { - commands::KeyEvent key_event; - EXPECT_TRUE(KeyParser::ParseKey(key_string, &key_event)); - KeyCommand key_command; - EXPECT_TRUE(DefaultKeymap::GetCommand(key_event, state, &key_command)); - return key_command; -} - -void TestSpecialKey(const KeyTable &key_table, const string &modifiers) { - // Converter is active. - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kSpecialKeys); ++i) { - const string key_string = modifiers + ' ' + kSpecialKeys[i]; - SCOPED_TRACE(key_string + " (converter is active)"); - const KeyCommand key_command = GetKeyCommand(key_string, ACTIVE); - - const KeyTable::const_iterator &iter = key_table.find(kSpecialKeys[i]); - if (iter == key_table.end()) { - EXPECT_EQ(DO_NOTHING_WITH_CONSUME, key_command); - } else { - EXPECT_EQ(iter->second, key_command); - } - } - - // Converter is NOT active. All KeyEvent should NOT be consumed. - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kSpecialKeys); ++i) { - const string key_string = modifiers + ' ' + kSpecialKeys[i]; - SCOPED_TRACE(key_string + " (converter is NOT active)"); - const KeyCommand key_command = GetKeyCommand(key_string, INACTIVE); - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, key_command); - } -}; - -} // namespace - -TEST(DefaultKeymapTest, AlphabetKey) { - { // Converter is active - EXPECT_EQ(INSERT, GetKeyCommand("a", ACTIVE)); - EXPECT_EQ(DO_NOTHING_WITH_CONSUME, GetKeyCommand("A", ACTIVE)); - EXPECT_EQ(TOGGLE_SIMPLIFIED_CHINESE_MODE, - GetKeyCommand("CTRL SHIFT f", ACTIVE)); - EXPECT_EQ(TOGGLE_SIMPLIFIED_CHINESE_MODE, - GetKeyCommand("CTRL SHIFT F", ACTIVE)); - - EXPECT_EQ(DO_NOTHING_WITH_CONSUME, GetKeyCommand("ALT a", ACTIVE)); - EXPECT_EQ(DO_NOTHING_WITH_CONSUME, GetKeyCommand("CTRL a", ACTIVE)); - EXPECT_EQ(DO_NOTHING_WITH_CONSUME, - GetKeyCommand("ALT CTRL a", ACTIVE)); - EXPECT_EQ(DO_NOTHING_WITH_CONSUME, - GetKeyCommand("ALT SHIFT a", ACTIVE)); - EXPECT_EQ(DO_NOTHING_WITH_CONSUME, - GetKeyCommand("CTRL SHIFT a", ACTIVE)); - EXPECT_EQ(DO_NOTHING_WITH_CONSUME, - GetKeyCommand("ALT CTRL SHIFT a", ACTIVE)); - } - - { // Converter is NOT active - EXPECT_EQ(INSERT, GetKeyCommand("a", INACTIVE)); - EXPECT_EQ(INSERT, GetKeyCommand("A", INACTIVE)); - EXPECT_EQ(TOGGLE_SIMPLIFIED_CHINESE_MODE, - GetKeyCommand("CTRL SHIFT f", INACTIVE)); - EXPECT_EQ(TOGGLE_SIMPLIFIED_CHINESE_MODE, - GetKeyCommand("CTRL SHIFT F", INACTIVE)); - - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, - GetKeyCommand("ALT a", INACTIVE)); - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, - GetKeyCommand("CTRL a", INACTIVE)); - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, - GetKeyCommand("ALT CTRL a", INACTIVE)); - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, - GetKeyCommand("ALT SHIFT a", INACTIVE)); - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, - GetKeyCommand("CTRL SHIFT a", INACTIVE)); - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, - GetKeyCommand("ALT CTRL SHIFT a", INACTIVE)); - } -} - -TEST(DefaultKeymapTest, NumberKey) { - { // Converter is active - EXPECT_EQ(SELECT_CANDIDATE, GetKeyCommand("1", ACTIVE)); - - EXPECT_EQ(DO_NOTHING_WITH_CONSUME, GetKeyCommand("ALT 1", ACTIVE)); - EXPECT_EQ(CLEAR_CANDIDATE_FROM_HISTORY, GetKeyCommand("CTRL 1", ACTIVE)); - // "Shift + printable key" should not be sent from mozc_engine. - // Shift key with a printable key is removed. - EXPECT_EQ(DO_NOTHING_WITH_CONSUME, GetKeyCommand("ALT CTRL 1", ACTIVE)); - EXPECT_EQ(DO_NOTHING_WITH_CONSUME, GetKeyCommand("ALT SHIFT 1", ACTIVE)); - EXPECT_EQ(DO_NOTHING_WITH_CONSUME, GetKeyCommand("CTRL SHIFT 1", ACTIVE)); - EXPECT_EQ(DO_NOTHING_WITH_CONSUME, - GetKeyCommand("ALT CTRL SHIFT 1", ACTIVE)); - } - - { // Converter is NOT active - EXPECT_EQ(INSERT, GetKeyCommand("1", INACTIVE)); - - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, GetKeyCommand("ALT 1", INACTIVE)); - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, GetKeyCommand("CTRL 1", INACTIVE)); - // "Shift + printable key" should not be sent from mozc_engine. - // Shift key with a printable key is removed. - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, - GetKeyCommand("ALT CTRL 1", INACTIVE)); - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, - GetKeyCommand("ALT SHIFT 1", INACTIVE)); - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, - GetKeyCommand("CTRL SHIFT 1", INACTIVE)); - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, - GetKeyCommand("ALT CTRL SHIFT 1", INACTIVE)); - } -} - -TEST(DefaultKeymapTest, PunctuationKey) { - { // Converter is active - EXPECT_EQ(DO_NOTHING_WITH_CONSUME, GetKeyCommand("!", ACTIVE)); - EXPECT_EQ(DO_NOTHING_WITH_CONSUME, GetKeyCommand("`", ACTIVE)); - - EXPECT_EQ(DO_NOTHING_WITH_CONSUME, GetKeyCommand("ALT !", ACTIVE)); - EXPECT_EQ(DO_NOTHING_WITH_CONSUME, GetKeyCommand("CTRL !", ACTIVE)); - // "Shift + printable key" should not be sent from mozc_engine. - // Shift key with a printable key is removed. - EXPECT_EQ(DO_NOTHING_WITH_CONSUME, - GetKeyCommand("ALT CTRL !", ACTIVE)); - EXPECT_EQ(DO_NOTHING_WITH_CONSUME, - GetKeyCommand("ALT SHIFT !", ACTIVE)); - EXPECT_EQ(DO_NOTHING_WITH_CONSUME, - GetKeyCommand("CTRL SHIFT !", ACTIVE)); - EXPECT_EQ(DO_NOTHING_WITH_CONSUME, - GetKeyCommand("ALT CTRL SHIFT !", ACTIVE)); - } - - { // Converter is NOT active - EXPECT_EQ(INSERT_PUNCTUATION, GetKeyCommand("!", INACTIVE)); - EXPECT_EQ(TURN_ON_PUNCTUATION_MODE, GetKeyCommand("`", INACTIVE)); - - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, - GetKeyCommand("ALT !", INACTIVE)); - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, - GetKeyCommand("CTRL !", INACTIVE)); - // "Shift + printable key" should not be sent from mozc_engine. - // Shift key with a printable key is removed. - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, - GetKeyCommand("ALT CTRL !", INACTIVE)); - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, - GetKeyCommand("ALT SHIFT !", INACTIVE)); - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, - GetKeyCommand("CTRL SHIFT !", INACTIVE)); - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, - GetKeyCommand("ALT CTRL SHIFT !", INACTIVE)); - } -} - -TEST(DefaultKeymapTest, SpecialKeyWithoutAltAndCtrl) { - KeyTable key_table; - key_table.insert(make_pair("ENTER", COMMIT)); - key_table.insert(make_pair("SPACE", SELECT_FOCUSED_CANDIDATE)); - key_table.insert(make_pair("UP", FOCUS_CANDIDATE_PREV)); - key_table.insert(make_pair("DOWN", FOCUS_CANDIDATE_NEXT)); - key_table.insert(make_pair("RIGHT", MOVE_CURSOR_RIGHT)); - key_table.insert(make_pair("LEFT", MOVE_CURSOR_LEFT)); - key_table.insert(make_pair("PAGEUP", FOCUS_CANDIDATE_PREV_PAGE)); - key_table.insert(make_pair("PAGEDOWN", FOCUS_CANDIDATE_NEXT_PAGE)); - key_table.insert(make_pair("HOME", MOVE_CURSOR_TO_BEGINNING)); - key_table.insert(make_pair("END", MOVE_CURSOR_TO_END)); - key_table.insert(make_pair("BACKSPACE", REMOVE_CHAR_BEFORE)); - key_table.insert(make_pair("DEL", REMOVE_CHAR_AFTER)); - key_table.insert(make_pair("ESCAPE", CLEAR)); - key_table.insert(make_pair("TAB", FOCUS_CANDIDATE_NEXT_PAGE)); - - TestSpecialKey(key_table, ""); - TestSpecialKey(key_table, "SHIFT"); -} - -TEST(DefaultKeymapTest, SpecialKeyWithAlt) { - KeyTable key_table; - key_table.insert(make_pair("UP", FOCUS_CANDIDATE_PREV_PAGE)); - key_table.insert(make_pair("DOWN", FOCUS_CANDIDATE_NEXT_PAGE)); - - TestSpecialKey(key_table, "ALT"); - TestSpecialKey(key_table, "ALT SHIFT"); -} - -TEST(DefaultKeymapTest, SpecialKeyWithCtrl) { - KeyTable key_table; - key_table.insert(make_pair("RIGHT", MOVE_CURSOR_RIGHT_BY_WORD)); - key_table.insert(make_pair("LEFT", MOVE_CURSOR_LEFT_BY_WORD)); - key_table.insert(make_pair("BACKSPACE", REMOVE_WORD_BEFORE)); - key_table.insert(make_pair("DEL", REMOVE_WORD_AFTER)); - - TestSpecialKey(key_table, "CTRL"); - TestSpecialKey(key_table, "CTRL SHIFT"); -} - -TEST(DefaultKeymapTest, SpecialKeyWithAltAndCtrl) { - KeyTable key_table; - key_table.insert(make_pair("UP", MOVE_CURSOR_TO_BEGINNING)); - key_table.insert(make_pair("DOWN", MOVE_CURSOR_TO_END)); - - TestSpecialKey(key_table, "ALT CTRL"); - TestSpecialKey(key_table, "ALT CTRL SHIFT"); -} - -TEST(DefaultKeymapTest, ModifierKey) { - { // Converter is active - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, GetKeyCommand("ALT", ACTIVE)); - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, - GetKeyCommand("CTRL", ACTIVE)); - EXPECT_EQ(TOGGLE_DIRECT_MODE, GetKeyCommand("SHIFT", ACTIVE)); - - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, - GetKeyCommand("ALT CTRL", ACTIVE)); - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, - GetKeyCommand("ALT SHIFT", ACTIVE)); - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME , - GetKeyCommand("CTRL SHIFT", ACTIVE)); - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME , - GetKeyCommand("ALT CTRL SHIFT", ACTIVE)); - } - - { // Converter is NOT active - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, - GetKeyCommand("ALT", INACTIVE)); - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, - GetKeyCommand("CTRL", INACTIVE)); - EXPECT_EQ(TOGGLE_DIRECT_MODE, GetKeyCommand("SHIFT", INACTIVE)); - - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, - GetKeyCommand("ALT CTRL", INACTIVE)); - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME, - GetKeyCommand("ALT SHIFT", INACTIVE)); - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME , - GetKeyCommand("CTRL SHIFT", INACTIVE)); - EXPECT_EQ(DO_NOTHING_WITHOUT_CONSUME , - GetKeyCommand("ALT CTRL SHIFT", INACTIVE)); - } -} - -} // namespace keymap -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/direct_context.cc mozc-1.11.1522.102/languages/pinyin/direct_context.cc --- mozc-1.11.1502.102/languages/pinyin/direct_context.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/direct_context.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,175 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/pinyin/direct_context.h" - -#include -#include -#include - -#include "base/logging.h" -#include "base/util.h" -#include "languages/pinyin/session_config.h" - -namespace mozc { -namespace pinyin { -namespace direct { - -namespace { -const char kInputPrefixCharacter = 'v'; -} // namespace - -DirectContext::DirectContext(const SessionConfig &session_config) - : session_config_(session_config) { -} - -DirectContext::~DirectContext() { -} - -bool DirectContext::Insert(char ch) { - const string input(1, ch); - - // TODO(hsumita): Move this logic to SessionConverter. - if (session_config_.full_width_word_mode) { - Util::HalfWidthAsciiToFullWidthAscii(input, &commit_text_); - } else { - Util::FullWidthAsciiToHalfWidthAscii(input, &commit_text_); - } - - return true; -} - -void DirectContext::Commit() { - // Does nothing since all character are commited by Insert(). -} - -void DirectContext::CommitPreedit() { - // Does nothing since all character are commited by Insert(). -} - -void DirectContext::Clear() { - ClearCommitText(); -} - -void DirectContext::ClearCommitText() { - commit_text_.clear(); -} - -bool DirectContext::MoveCursorRight() { - DLOG(ERROR) << "MoveCursorRight will not be expected to call."; - return false; -} - -bool DirectContext::MoveCursorLeft() { - DLOG(ERROR) << "MoveCursorLeft will not be expected to call."; - return false; -} - -bool DirectContext::MoveCursorRightByWord() { - DLOG(ERROR) << "MoveCursorRightByWord will not be expected to call."; - return false; -} - -bool DirectContext::MoveCursorLeftByWord() { - DLOG(ERROR) << "MoveCursorLeftByWord will not be expected to call."; - return false; -} - -bool DirectContext::MoveCursorToBeginning() { - DLOG(ERROR) << "MoveCursorToBeginning will not be expected to call."; - return false; -} - -bool DirectContext::MoveCursorToEnd() { - DLOG(ERROR) << "MoveCursorToEnd will not be expected to call."; - return false; -} - -bool DirectContext::SelectCandidate(size_t index) { - DLOG(ERROR) << "SelectCandidate will not be expected to call."; - return false; -} - -bool DirectContext::FocusCandidate(size_t index) { - DLOG(ERROR) << "FocusCandidate will not be expected to call."; - return false; -} - -bool DirectContext::ClearCandidateFromHistory(size_t index) { - // This context doesn't use history. - return true; -} - -bool DirectContext::RemoveCharBefore() { - DLOG(ERROR) << "RemoveCharBefore will not be expected to call."; - return false; -} - -bool DirectContext::RemoveCharAfter() { - DLOG(ERROR) << "RemoveCharAfter will not be expected to call."; - return false; -} - -bool DirectContext::RemoveWordBefore() { - DLOG(ERROR) << "RemoveWordBefore will not be expected to call."; - return false; -} - -bool DirectContext::RemoveWordAfter() { - DLOG(ERROR) << "RemoveWordAfter will not be expected to call."; - return false; -} - -void DirectContext::ReloadConfig() { - // Direct mode does NOT use a configuration. -} - -const string &DirectContext::commit_text() const { - return commit_text_; -} - -// There is no composition text on Direct mode. -const string &DirectContext::input_text() const { return empty_text_; } -const string &DirectContext::selected_text() const { return empty_text_; } -const string &DirectContext::conversion_text() const { return empty_text_; } -const string &DirectContext::rest_text() const { return empty_text_; } -const string &DirectContext::auxiliary_text() const { return empty_text_; } -size_t DirectContext::cursor() const { return 0; } - -// There is no candidates. -size_t DirectContext::focused_candidate_index() const { return 0; } -bool DirectContext::GetCandidate(size_t index, Candidate *candidate) { - return false; -} -bool DirectContext::HasCandidate(size_t index) { return false; } -size_t DirectContext::PrepareCandidates(size_t required_size) { return 0; } - -} // namespace direct -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/direct_context.h mozc-1.11.1522.102/languages/pinyin/direct_context.h --- mozc-1.11.1502.102/languages/pinyin/direct_context.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/direct_context.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,105 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Direct mode context for pinyin IME. - -#ifndef MOZC_LANGUAGES_PINYIN_DIRECT_CONTEXT_H_ -#define MOZC_LANGUAGES_PINYIN_DIRECT_CONTEXT_H_ - -#include -#include - -#include "base/port.h" -#include "languages/pinyin/pinyin_context_interface.h" - -namespace mozc { -namespace pinyin { -struct SessionConfig; - -namespace direct { - -// Direct context directly commits all of input characters except for -// some situation. If |session_config.is_full_width_word_mode()| is -// true, this context converts a input character to full width. - -class DirectContext : public PinyinContextInterface { - public: - explicit DirectContext(const SessionConfig &session_config); - virtual ~DirectContext(); - - bool Insert(char ch); - void Commit(); - void CommitPreedit(); - void Clear(); - void ClearCommitText(); - - bool MoveCursorRight(); - bool MoveCursorLeft(); - bool MoveCursorRightByWord(); - bool MoveCursorLeftByWord(); - bool MoveCursorToBeginning(); - bool MoveCursorToEnd(); - - bool SelectCandidate(size_t index); - bool FocusCandidate(size_t index); - bool ClearCandidateFromHistory(size_t index); - - bool RemoveCharBefore(); - bool RemoveCharAfter(); - bool RemoveWordBefore(); - bool RemoveWordAfter(); - - void ReloadConfig(); - - const string &commit_text() const; - const string &input_text() const; - const string &selected_text() const; - const string &conversion_text() const; - const string &rest_text() const; - const string &auxiliary_text() const; - - size_t cursor() const; - size_t focused_candidate_index() const; - bool GetCandidate(size_t index, Candidate *candidates); - bool HasCandidate(size_t index); - size_t PrepareCandidates(size_t required_size); - - private: - const string empty_text_; - string commit_text_; - const SessionConfig &session_config_; - - DISALLOW_COPY_AND_ASSIGN(DirectContext); -}; - -} // namespace direct -} // namespace pinyin -} // namespace mozc - -#endif // MOZC_LANGUAGES_PINYIN_DIRECT_CONTEXT_H_ diff -Nru mozc-1.11.1502.102/languages/pinyin/direct_context_test.cc mozc-1.11.1522.102/languages/pinyin/direct_context_test.cc --- mozc-1.11.1502.102/languages/pinyin/direct_context_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/direct_context_test.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,149 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include -#include - -#include "base/scoped_ptr.h" -#include "base/util.h" -#include "languages/pinyin/direct_context.h" -#include "languages/pinyin/session_config.h" -#include "testing/base/public/gunit.h" - -namespace mozc { -namespace pinyin { -namespace direct { - -namespace { -testing::AssertionResult CheckContext(const char *expected_commit_text_expr, - const char *actual_context_expr, - const string &expected_commit_text, - DirectContext *actual_context) { - vector error_messages; - - if (!actual_context->input_text().empty()) { - error_messages.push_back("input_text is not empty."); - } - if (!actual_context->selected_text().empty()) { - error_messages.push_back("selected_text is not empty."); - } - if (!actual_context->conversion_text().empty()) { - error_messages.push_back("conversion_text is not empty."); - } - if (!actual_context->rest_text().empty()) { - error_messages.push_back("rest_text is not empty."); - } - if (!actual_context->auxiliary_text().empty()) { - error_messages.push_back("auxiliary_text is not empty."); - } - if (actual_context->cursor() != 0) { - error_messages.push_back(Util::StringPrintf("invalid value. cursor: %d", - actual_context->cursor())); - } - if (actual_context->focused_candidate_index() != 0) { - error_messages.push_back(Util::StringPrintf( - "invalid value. focused_candidate_index: %d", - actual_context->focused_candidate_index())); - } - if (actual_context->HasCandidate(0)) { - error_messages.push_back("invalid value. there are some candidates."); - } - - if (expected_commit_text != actual_context->commit_text()) { - error_messages.push_back(Util::StringPrintf( - "commit_text is not valid.\n" - "Expected: %s\n" - "Actual: %s", - expected_commit_text.c_str(), actual_context->commit_text().c_str())); - } - - if (!error_messages.empty()) { - string error_message; - Util::JoinStrings(error_messages, "\n", &error_message); - return testing::AssertionFailure() << error_message; - } - - return testing::AssertionSuccess(); -} - -#define EXPECT_VALID_CONTEXT(expected_commit_text) \ - EXPECT_PRED_FORMAT2(CheckContext, expected_commit_text, context_.get()) -} // namespace - -class DirectContextTest : public testing::Test { - protected: - virtual void SetUp() { - session_config_.reset(new SessionConfig); - session_config_->full_width_word_mode = false; - session_config_->full_width_punctuation_mode = true; - session_config_->simplified_chinese_mode = true; - context_.reset(new DirectContext(*session_config_)); - } - - virtual void TearDown() { - } - - scoped_ptr session_config_; - scoped_ptr context_; -}; - -TEST_F(DirectContextTest, Insert) { - EXPECT_TRUE(context_->commit_text().empty()); - - context_->Insert('a'); - EXPECT_VALID_CONTEXT("a"); - - context_->Insert('b'); - EXPECT_VALID_CONTEXT("b"); - - context_->ClearCommitText(); - EXPECT_VALID_CONTEXT(""); - - context_->Insert('a'); - EXPECT_VALID_CONTEXT("a"); - - context_->Clear(); - EXPECT_VALID_CONTEXT(""); -} - -TEST_F(DirectContextTest, HalfOrFullWidthInsert) { - session_config_->full_width_word_mode = true; - context_->Insert('a'); - // "a" - EXPECT_VALID_CONTEXT("\xEF\xBD\x81"); - - session_config_->full_width_word_mode = false; - - context_->Insert('a'); - EXPECT_VALID_CONTEXT("a"); -} - -} // namespace direct -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/english_context.cc mozc-1.11.1522.102/languages/pinyin/english_context.cc --- mozc-1.11.1502.102/languages/pinyin/english_context.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/english_context.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,271 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/pinyin/english_context.h" - -#include -#include -#include - -#include "base/logging.h" -#include "base/util.h" -#include "languages/pinyin/english_dictionary_factory.h" -#include "languages/pinyin/session_config.h" - -namespace mozc { -namespace pinyin { -namespace english { - -namespace { -const char kInputPrefixCharacter = 'v'; -const size_t kMaxWordLength = 80; -} // namespace - -EnglishContext::EnglishContext(const SessionConfig &session_config) - : session_config_(session_config) {} -EnglishContext::~EnglishContext() {} - -bool EnglishContext::Insert(char ch) { - if (!isalpha(ch)) { - return false; - } - - // Ignore a too long word to avoid since user dictionary cannot store it. - if (input_text_.size() >= kMaxWordLength) { - return false; - } - - input_text_.append(1, ch); - DCHECK_EQ(kInputPrefixCharacter, input_text_[0]); - Suggest(); - UpdateAuxiliaryText(); - return true; -} - -void EnglishContext::Commit() { - CommitPreedit(); -} - -void EnglishContext::CommitPreedit() { - if (input_text_.size() <= 1) { - DCHECK(input_text_.empty() || input_text_[0] == kInputPrefixCharacter); - Clear(); - return; - } - - const string result = input_text_.substr(1); - EnglishDictionaryFactory::GetDictionary()->LearnWord(result); - Clear(); - - // TODO(hsumita): Move this logic to SessionConverter. - if (session_config_.full_width_word_mode) { - Util::HalfWidthAsciiToFullWidthAscii(result, &commit_text_); - } else { - Util::FullWidthAsciiToHalfWidthAscii(result, &commit_text_); - } -} - -void EnglishContext::Clear() { - input_text_.clear(); - commit_text_.clear(); - auxiliary_text_.clear(); - focused_candidate_index_ = 0; - candidates_.clear(); -} - -void EnglishContext::ClearCommitText() { - commit_text_.clear(); -} - -// There is no composition text on English mode. -bool EnglishContext::MoveCursorRight() { - DLOG(ERROR) << "MoveCursorRight will not be expected to call."; - return false; -} - -bool EnglishContext::MoveCursorLeft() { - DLOG(ERROR) << "MoveCursorLeft will not be expected to call."; - return false; -} - -bool EnglishContext::MoveCursorRightByWord() { - DLOG(ERROR) << "MoveCursorRightByWord will not be expected to call."; - return false; -} - -bool EnglishContext::MoveCursorLeftByWord() { - DLOG(ERROR) << "MoveCursorLeftByWord will not be expected to call."; - return false; -} - -bool EnglishContext::MoveCursorToBeginning() { - DLOG(ERROR) << "MoveCursorToBeginning will not be expected to call."; - return false; -} - -bool EnglishContext::MoveCursorToEnd() { - DLOG(ERROR) << "MoveCursorToEnd will not be expected to call."; - return false; -} - -bool EnglishContext::SelectCandidate(size_t index) { - if (!FocusCandidate(index)) { - return false; - } - DCHECK_EQ(kInputPrefixCharacter, input_text_[0]); - - // Commits selected text. - const string result = candidates_[focused_candidate_index_]; - EnglishDictionaryFactory::GetDictionary()->LearnWord(result); - Clear(); - commit_text_.assign(result); - - return true; -} - -bool EnglishContext::FocusCandidate(size_t index) { - if (index >= candidates_.size()) { - return false; - } - focused_candidate_index_ = index; - return true; -} - -bool EnglishContext::ClearCandidateFromHistory(size_t index) { - if (index >= candidates_.size()) { - return false; - } - - // Currently this method does not make sense. - // TODO(hsumita): Implements this function. - - return true; -} - -bool EnglishContext::RemoveCharBefore() { - if (!input_text_.empty()) { - input_text_.erase(input_text_.size() - 1); - Suggest(); - UpdateAuxiliaryText(); - } - return true; -} - -bool EnglishContext::RemoveCharAfter() { - return false; -} - -bool EnglishContext::RemoveWordBefore() { - Clear(); - return true; -} - -bool EnglishContext::RemoveWordAfter() { - return false; -} - -// English mode does NOT use a configuration. -void EnglishContext::ReloadConfig() {} - -const string &EnglishContext::commit_text() const { - return commit_text_; -} - -const string &EnglishContext::input_text() const { - return input_text_; -} - -// There is no composition text on English mode. -const string &EnglishContext::selected_text() const { return empty_text_; } -const string &EnglishContext::conversion_text() const { return empty_text_; } -const string &EnglishContext::rest_text() const { return empty_text_; } - -const string &EnglishContext::auxiliary_text() const { - return auxiliary_text_; -} - -// There is no composition text on English mode. -size_t EnglishContext::cursor() const { - return 0; -} - -size_t EnglishContext::focused_candidate_index() const { - return focused_candidate_index_; -} - -bool EnglishContext::GetCandidate(size_t index, Candidate *candidate) { - DCHECK(candidate); - if (!HasCandidate(index)) { - return false; - } - - candidate->text.assign(candidates_[index]); - return true; -} - -bool EnglishContext::HasCandidate(size_t index) { - return index < candidates_.size(); -} - -size_t EnglishContext::PrepareCandidates(size_t required_size) { - return min(required_size, candidates_.size()); -} - -void EnglishContext::Suggest() { - candidates_.clear(); - focused_candidate_index_ = 0; - auxiliary_text_.clear(); - - if (input_text_.size() <= 1) { - return; - } - DCHECK_EQ(kInputPrefixCharacter, input_text_[0]); - - string query = input_text_.substr(1); - EnglishDictionaryFactory::GetDictionary()-> - GetSuggestions(query, &candidates_); -} - -void EnglishContext::UpdateAuxiliaryText() { - auxiliary_text_.clear(); - - if (input_text_.empty()) { - return; - } - DCHECK_EQ(kInputPrefixCharacter, input_text_[0]); - auxiliary_text_.assign(1, input_text_[0]); - - if (input_text_.size() > 1) { - auxiliary_text_.append(" " + input_text_.substr(1)); - } -} - -} // namespace english -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/english_context.h mozc-1.11.1522.102/languages/pinyin/english_context.h --- mozc-1.11.1502.102/languages/pinyin/english_context.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/english_context.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,117 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// English mode context for pinyin IME. - -#ifndef MOZC_LANGUAGES_PINYIN_ENGLISH_CONTEXT_H_ -#define MOZC_LANGUAGES_PINYIN_ENGLISH_CONTEXT_H_ - -#include -#include - -#include "base/port.h" -#include "languages/pinyin/pinyin_context_interface.h" - -namespace mozc { -namespace pinyin { -struct SessionConfig; - -namespace english { - -// Suggests English words. -// This class suggests {"the", "to", "that", ...} for a query "vt" on a current -// implementation. -// 'v' is a special character to turn into English mode. -// This class accepts a lower / upper alphabet character. but all candidates -// consist of lower alphabet characters. - -class EnglishContext : public PinyinContextInterface { - public: - explicit EnglishContext(const SessionConfig &session_config); - virtual ~EnglishContext(); - - // Returns false if ch is a non-alphabetical character. - bool Insert(char ch); - // Commit is completely same as CommitPreedit on English context. - void Commit(); - void CommitPreedit(); - void Clear(); - void ClearCommitText(); - - bool MoveCursorRight(); - bool MoveCursorLeft(); - bool MoveCursorRightByWord(); - bool MoveCursorLeftByWord(); - bool MoveCursorToBeginning(); - bool MoveCursorToEnd(); - - bool SelectCandidate(size_t index); - bool FocusCandidate(size_t index); - bool ClearCandidateFromHistory(size_t index); - - bool RemoveCharBefore(); - bool RemoveCharAfter(); - bool RemoveWordBefore(); - bool RemoveWordAfter(); - - void ReloadConfig(); - - const string &commit_text() const; - const string &input_text() const; - const string &selected_text() const; - const string &conversion_text() const; - const string &rest_text() const; - const string &auxiliary_text() const; - - size_t cursor() const; - size_t focused_candidate_index() const; - bool GetCandidate(size_t index, Candidate *candidate); - bool HasCandidate(size_t index); - size_t PrepareCandidates(size_t required_size); - - private: - void Suggest(); - void UpdateAuxiliaryText(); - - const string empty_text_; - string input_text_; - string commit_text_; - string auxiliary_text_; - size_t focused_candidate_index_; - vector candidates_; - const SessionConfig &session_config_; - - DISALLOW_COPY_AND_ASSIGN(EnglishContext); -}; - -} // namespace english -} // namespace pinyin -} // namespace mozc - -#endif // MOZC_LANGUAGES_PINYIN_ENGLISH_CONTEXT_H_ diff -Nru mozc-1.11.1502.102/languages/pinyin/english_context_test.cc mozc-1.11.1522.102/languages/pinyin/english_context_test.cc --- mozc-1.11.1502.102/languages/pinyin/english_context_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/english_context_test.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,429 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/pinyin/english_context.h" - -#include -#include -#include - -#include "base/logging.h" -#include "base/port.h" -#include "base/util.h" -#include "languages/pinyin/english_dictionary_factory.h" -#include "languages/pinyin/session_config.h" -#include "testing/base/public/gmock.h" -#include "testing/base/public/gunit.h" - -using ::testing::_; -using ::testing::Return; - -namespace mozc { -namespace pinyin { -namespace english { - -namespace { -class EnglishMockDictionary : public EnglishDictionaryInterface { - public: - EnglishMockDictionary() { - const char *kWordList[] = { - "aaa", "aab", "aac", - "aa", "ab", "ac", - "a", "b", "c", - }; - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kWordList); ++i) { - word_list_.push_back(kWordList[i]); - } - } - virtual ~EnglishMockDictionary() {} - - void GetSuggestions(const string &prefix, vector *output) const { - DCHECK(output); - output->clear(); - - if (prefix.empty()) { - return; - } - - string normalized_prefix = prefix; - Util::LowerString(&normalized_prefix); - - for (size_t i = 0; i < word_list_.size(); ++i) { - if (Util::StartsWith(word_list_[i], normalized_prefix)) { - output->push_back(word_list_[i]); - } - } - } - - MOCK_METHOD1(LearnWord, bool(const string &word)); - - private: - vector word_list_; -}; -} // namespace - -class EnglishContextTest : public testing::Test { - protected: - virtual void SetUp() { - EXPECT_CALL(dictionary_, LearnWord(_)).WillRepeatedly(Return(true)); - EnglishDictionaryFactory::SetDictionary(&dictionary_); - - session_config_.reset(new SessionConfig); - session_config_->full_width_word_mode = false; - session_config_->full_width_punctuation_mode = true; - session_config_->simplified_chinese_mode = true; - context_.reset(new EnglishContext(*session_config_)); - } - - virtual void TearDown() { - context_.reset(NULL); - EnglishDictionaryFactory::SetDictionary(NULL); - } - - void InsertCharacterChars(const string &chars) { - for (size_t i = 0; i < chars.size(); ++i) { - context_->Insert(chars[i]); - } - } - - void CheckContext(const string &input_text, - const string &commit_text, - size_t focused_candidate_index) { - EXPECT_EQ(input_text, context_->input_text()); - EXPECT_EQ(commit_text, context_->commit_text()); - if (input_text.size() <= 1) { - EXPECT_EQ(input_text, context_->auxiliary_text()); - } else { - EXPECT_EQ("v " + input_text.substr(1), context_->auxiliary_text()); - } - EXPECT_EQ("", context_->selected_text()); - EXPECT_EQ("", context_->conversion_text()); - EXPECT_EQ("", context_->rest_text()); - - EXPECT_EQ(0, context_->cursor()); - EXPECT_EQ(focused_candidate_index, context_->focused_candidate_index()); - - if (input_text.size() <= 1) { - EXPECT_EQ(0, GetCandidatesSize()); - } else { - vector expected_candidates; - string query = input_text.substr(1); - Util::LowerString(&query); - dictionary_.GetSuggestions(query, &expected_candidates); - - ASSERT_EQ(expected_candidates.size(), GetCandidatesSize()); - - Candidate candidate; - for (size_t i = 0; context_->GetCandidate(i, &candidate); ++i) { - EXPECT_EQ(expected_candidates[i], candidate.text); - } - } - } - - size_t GetCandidatesSize() { - size_t size = 0; - for (; context_->HasCandidate(size); ++size) {} - return size; - } - - EnglishMockDictionary dictionary_; - std::unique_ptr session_config_; - std::unique_ptr context_; -}; - -TEST_F(EnglishContextTest, InsertTest) { - { - SCOPED_TRACE("Inserts v"); - EXPECT_TRUE(context_->Insert('v')); - CheckContext("v", "", 0); - } - - { - SCOPED_TRACE("Inserts va"); - EXPECT_TRUE(context_->Insert('a')); - CheckContext("va", "", 0); - } - - { - SCOPED_TRACE("Inserts vaA"); - EXPECT_TRUE(context_->Insert('A')); - CheckContext("vaA", "", 0); - } - - { - SCOPED_TRACE("Inserts 1 and fails"); - EXPECT_FALSE(context_->Insert('1')); - CheckContext("vaA", "", 0); - } -} - -TEST_F(EnglishContextTest, CommitTest) { - { - SCOPED_TRACE("Commits [v]"); - InsertCharacterChars("v"); - EXPECT_CALL(dictionary_, LearnWord(_)).Times(0); - context_->Commit(); - CheckContext("", "", 0); - } - - { - SCOPED_TRACE("Commits [va]"); - InsertCharacterChars("va"); - EXPECT_CALL(dictionary_, LearnWord("a")).Times(1); - context_->Commit(); - CheckContext("", "a", 0); - } - - context_->Clear(); - - { - SCOPED_TRACE("Commits preedit [va]"); - InsertCharacterChars("va"); - EXPECT_CALL(dictionary_, LearnWord("a")).Times(1); - context_->CommitPreedit(); - CheckContext("", "a", 0); - } - - context_->Clear(); - - { // Selects second candidate and commits. - { - SCOPED_TRACE("Focuses a 2nd candidate"); - InsertCharacterChars("va"); - context_->FocusCandidate(1); - CheckContext("va", "", 1); - } - { - SCOPED_TRACE("Commits when second candidate is focused"); - EXPECT_CALL(dictionary_, LearnWord("a")).Times(1); - context_->Commit(); - CheckContext("", "a", 0); - } - } -} - -TEST_F(EnglishContextTest, CursorTest) { - InsertCharacterChars("va"); - - { - SCOPED_TRACE("Moves cursor to left"); - EXPECT_FALSE(context_->MoveCursorLeft()); - CheckContext("va", "", 0); - } - - { - SCOPED_TRACE("Moves cursor to right"); - EXPECT_FALSE(context_->MoveCursorRight()); - CheckContext("va", "", 0); - } - - { - SCOPED_TRACE("Moves cursor to left by word"); - EXPECT_FALSE(context_->MoveCursorLeftByWord()); - CheckContext("va", "", 0); - } - - { - SCOPED_TRACE("Moves cursor to right by word"); - EXPECT_FALSE(context_->MoveCursorRightByWord()); - CheckContext("va", "", 0); - } - - { - SCOPED_TRACE("Moves cursor to to beginning"); - EXPECT_FALSE(context_->MoveCursorToBeginning()); - CheckContext("va", "", 0); - } - - { - SCOPED_TRACE("Moves cursor to end"); - EXPECT_FALSE(context_->MoveCursorToEnd()); - CheckContext("va", "", 0); - } -} - -TEST_F(EnglishContextTest, RemoveTest) { - InsertCharacterChars("vaa"); - - { - SCOPED_TRACE("Removes a previous character"); - EXPECT_TRUE(context_->RemoveCharBefore()); - CheckContext("va", "", 0); - } - - { - SCOPED_TRACE("Removes a previous character thrice"); - EXPECT_TRUE(context_->RemoveCharBefore()); - CheckContext("v", "", 0); - - EXPECT_TRUE(context_->RemoveCharBefore()); - CheckContext("", "", 0); - - EXPECT_TRUE(context_->RemoveCharBefore()); - CheckContext("", "", 0); - } - - InsertCharacterChars("vaa"); - - { - SCOPED_TRACE("Removes a previous word"); - EXPECT_TRUE(context_->RemoveWordBefore()); - CheckContext("", "", 0); - } - - InsertCharacterChars("vaa"); - - { - SCOPED_TRACE("Removes an after character and make no sense"); - EXPECT_FALSE(context_->RemoveCharAfter()); - CheckContext("vaa", "", 0); - } - - { - SCOPED_TRACE("Removes an after word and make no sense"); - EXPECT_FALSE(context_->RemoveWordAfter()); - CheckContext("vaa", "", 0); - } -} - -TEST_F(EnglishContextTest, FocusCandidateIndex) { - InsertCharacterChars("vaa"); - - const size_t last_index = GetCandidatesSize() - 1; - - { - SCOPED_TRACE("Focuses a last candidate"); - EXPECT_TRUE(context_->FocusCandidate(last_index)); - CheckContext("vaa", "", last_index); - } - - { - SCOPED_TRACE("Focuses a invalid candidate and make no sense"); - EXPECT_FALSE(context_->FocusCandidate(last_index + 1)); - CheckContext("vaa", "", last_index); - } - - { - SCOPED_TRACE("Focuses a 1st candidate"); - EXPECT_TRUE(context_->FocusCandidate(0)); - CheckContext("vaa", "", 0); - } -} - -TEST_F(EnglishContextTest, SelectCandidate) { - InsertCharacterChars("vaa"); - - { - SCOPED_TRACE("Selects a 100th candidate and fails"); - EXPECT_FALSE(context_->SelectCandidate(100)); - CheckContext("vaa", "", 0); - } - - { - SCOPED_TRACE("Selects a 3rd candidate"); - EXPECT_CALL(dictionary_, LearnWord("aac")).Times(1); - EXPECT_TRUE(context_->SelectCandidate(2)); - CheckContext("", "aac", 0); - } -} - -TEST_F(EnglishContextTest, NoMatchingInput) { - const char *kInputText = "vaaaaaaaaaaa"; - const char *kCommitText = "aaaaaaaaaaa"; - - { - SCOPED_TRACE("Inserts. There are no matching words in the mock dictionary"); - InsertCharacterChars(kInputText); - CheckContext(kInputText, "", 0); - EXPECT_EQ(0, GetCandidatesSize()); - } - - { - SCOPED_TRACE("Focuses a candidate and fails."); - EXPECT_FALSE(context_->FocusCandidate(0)); - CheckContext(kInputText, "", 0); - } - - { - SCOPED_TRACE("Selects a candidate and fails."); - EXPECT_FALSE(context_->SelectCandidate(0)); - CheckContext(kInputText, "", 0); - } - - { - SCOPED_TRACE("Commits."); - EXPECT_CALL(dictionary_, LearnWord(kCommitText)).Times(1); - context_->Commit(); - CheckContext("", kCommitText, 0); - } -} - -TEST_F(EnglishContextTest, LongInput) { - ASSERT_TRUE(context_->Insert('v')); - - const size_t kMaxInputLength = 80; - for (size_t i = 0; i < kMaxInputLength - 1; ++i) { - EXPECT_TRUE(context_->Insert('a')); - } - - const string input_text = context_->input_text(); - EXPECT_FALSE(context_->Insert('a')); - EXPECT_EQ(input_text, context_->input_text()); -} - -TEST_F(EnglishContextTest, FullWidthMode) { - session_config_->full_width_word_mode = true; - - { - SCOPED_TRACE("Inserts characters with full width mode"); - InsertCharacterChars("va"); - CheckContext("va", "", 0); - } - - { - SCOPED_TRACE("Commits full width a"); - EXPECT_CALL(dictionary_, LearnWord("a")).Times(1); - context_->Commit(); - // "a" - CheckContext("", "\xEF\xBD\x81", 0); - } - - InsertCharacterChars("va"); - - { - SCOPED_TRACE("Selects aaa with full width mode"); - EXPECT_CALL(dictionary_, LearnWord("aaa")).Times(1); - context_->SelectCandidate(0); - CheckContext("", "aaa", 0); - } -} - -} // namespace english -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/english_dictionary.cc mozc-1.11.1522.102/languages/pinyin/english_dictionary.cc --- mozc-1.11.1502.102/languages/pinyin/english_dictionary.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/english_dictionary.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,293 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/pinyin/english_dictionary.h" - -#include -#include -#include -#include - -#include "base/config_file_stream.h" -#include "base/logging.h" -#include "base/scoped_ptr.h" -#include "base/util.h" -#include "dictionary/file/codec_interface.h" -#include "dictionary/file/dictionary_file.h" -#include "storage/encrypted_string_storage.h" -#include "storage/louds/louds_trie.h" - -// TODO(hsumita): Lock user dictionary file. - -namespace mozc { -namespace pinyin { -namespace english { - -namespace { - -// Includes generated dictionary data. -#include "languages/pinyin/pinyin_embedded_english_dictionary_data.h" - -const char *kUserDictionaryFileName = "user://pinyin_english.db"; -// The last printable character in ASCII code -const char kSentinelValueForAlphabet = '~'; -// It should be less than storage size (64MByte) / entry size (~85Byte) -const size_t kMaxUserDictionarySize = 50000; -const size_t kMaxWordLength = 80; - -// Serialized user dictionary format is as following. -// Dictionary : array of Entry -// Entry : | key_length (1byte) | key (~80bytes) | used_count (4bytes) | -// -// key_length : length of key. -// key: : word registered on user dictionary entry. -// used_count : number that how many times this entry is learned. -// -// |key| should be smaller than or equal to 80 bytes, and we can store the -// length of |key| within 1byte w/o worry about signed vs unsigned conversion. -void SerializeUserDictionary(const UserDictionary &dictionary, - string *output) { - CHECK(output); - output->clear(); - - for (UserDictionary::const_iterator it = dictionary.begin(); - it != dictionary.end(); ++it) { - const string &key = it->first; - const uint32 used_count = it->second; - const char *used_count_ptr = reinterpret_cast(&used_count); - - // The length of the key should be <= 80. - // So we can save it as uint8. - CHECK_GE(80, key.size()); - output->append(1, static_cast(key.size())); - output->append(key); - output->append(used_count_ptr, used_count_ptr + 4); - } -} - -bool DeserializeUserDictionary(const string &input, - UserDictionary *dictionary) { - CHECK(dictionary); - dictionary->clear(); - - size_t index = 0; - while (index < input.size()) { - const size_t key_length = input[index]; - index += 1; - - if (index + key_length + 4 > input.size()) { - LOG(ERROR) << "Cannot parse user dictionary."; - dictionary->clear(); - return false; - } - - const string key = input.substr(index, key_length); - index += key_length; - const uint32 used_count = *reinterpret_cast( - input.substr(index, 4).c_str()); - index += 4; - - dictionary->insert(make_pair(key, used_count)); - } - - return true; -} - -// -typedef pair DictionaryEntry; -typedef map DictionaryMap; - -bool DictionaryEntryComparator(const DictionaryEntry &lhs, - const DictionaryEntry &rhs) { - if (lhs.second != rhs.second) { - return lhs.second > rhs.second; - } - return lhs.first < rhs.first; -} -} // namespace - -EnglishDictionary::EnglishDictionary() - : word_trie_(new mozc::storage::louds::LoudsTrie), - storage_(new storage::EncryptedStringStorage( - EnglishDictionary::user_dictionary_file_path())) { - Init(); -} - -EnglishDictionary::~EnglishDictionary() { -} - -namespace { - -// Used for predictive search from system dictionary. -class WordCallback : public mozc::storage::louds::LoudsTrie::Callback { - public: - WordCallback(const vector &priority_table, - DictionaryMap *dictionary_map) - : priority_table_(priority_table), dictionary_map_(dictionary_map) {} - - // Updates dictionary map on each key found using the given priority table. - virtual ResultType Run(const char *s, size_t len, int key_id) { - const string key(s, len); - (*dictionary_map_)[key] += priority_table_[key_id]; - return SEARCH_CONTINUE; - } - - private: - const vector &priority_table_; - DictionaryMap *dictionary_map_; - - DISALLOW_COPY_AND_ASSIGN(WordCallback); -}; - -} // namespace - -void EnglishDictionary::GetSuggestions(const string &input_prefix, - vector *output) const { - CHECK(output); - output->clear(); - - if (input_prefix.empty()) { - return; - } - - string prefix = input_prefix; - Util::LowerString(&prefix); - - DictionaryMap merged_entries; - { - const UserDictionary::const_iterator it_begin = - user_dictionary_.lower_bound(prefix); - const UserDictionary::const_iterator it_end = - user_dictionary_.lower_bound(prefix + kSentinelValueForAlphabet); - for (UserDictionary::const_iterator it = it_begin; it != it_end; ++it) { - merged_entries[it->first] = learning_multiplier_ * it->second; - } - } - - WordCallback callback(priority_table_, &merged_entries); - word_trie_->PredictiveSearch(prefix.c_str(), &callback); - - vector merged_vector(merged_entries.size()); - copy(merged_entries.begin(), merged_entries.end(), merged_vector.begin()); - sort(merged_vector.begin(), merged_vector.end(), DictionaryEntryComparator); - - for (size_t i = 0; i < merged_vector.size(); ++i) { - output->push_back(merged_vector[i].first); - } -} - -bool EnglishDictionary::LearnWord(const string &input_word) { - if (input_word.empty()) { - LOG(ERROR) << "Cannot learn an empty word."; - return false; - } - - if (input_word.size() > kMaxWordLength) { - LOG(ERROR) << "Cannot learn a too long word."; - return false; - } - - // TODO(hsumita): Introduce LRU algorithm. http://b/6047022 - if (user_dictionary_.size() < kMaxUserDictionarySize) { - string word = input_word; - Util::LowerString(&word); - - // If |word| is not registered on |user_dictionary_| yet, the value of the - // entry will be 1 since constructor of std::map initializes it with 0. - ++user_dictionary_[word]; - } - - return Sync(); -} - -void EnglishDictionary::Init() { - CHECK(priority_table_.empty()); - - vector sections; - const DictionaryFileCodecInterface *codec = - DictionaryFileCodecFactory::GetCodec(); - if (!codec->ReadSections(kPinyinEnglishDictionary_data, - kPinyinEnglishDictionary_size, §ions)) { - LOG(FATAL) - << "Cannot open English dictionary because section data is not found."; - } - - const string word_trie_section_name = - codec->GetSectionName("english_dictionary_trie"); - const string priority_table_section_name = - codec->GetSectionName("english_word_priority_table"); - const string learning_multiplier_section_name = - codec->GetSectionName("learning_multiplier"); - for (size_t i = 0; i < sections.size(); ++i) { - const DictionaryFileSection §ion = sections[i]; - if (section.name == word_trie_section_name) { - if (!word_trie_->Open(reinterpret_cast(section.ptr))) { - LOG(FATAL) << "Failed to open trie section data."; - } - } else if (section.name == priority_table_section_name) { - const float *p = reinterpret_cast(section.ptr); - const int length = section.len / sizeof(*p); - priority_table_.assign(p, p + length); - } else if (section.name == learning_multiplier_section_name) { - learning_multiplier_ = *reinterpret_cast(section.ptr); - } else { - LOG(FATAL) << "Unknown section name: " << section.name; - } - } - - ReloadUserDictionary(); -} - -bool EnglishDictionary::ReloadUserDictionary() { - user_dictionary_.clear(); - - string serialized_data; - if (!storage_->Load(&serialized_data) || - !DeserializeUserDictionary(serialized_data, &user_dictionary_)) { - LOG(ERROR) << "Cannot deserialize data."; - return false; - } - - return true; -} - -bool EnglishDictionary::Sync() { - string serialized_data; - SerializeUserDictionary(user_dictionary_, &serialized_data); - return storage_->Save(serialized_data); -} - -// static -string EnglishDictionary::user_dictionary_file_path() { - return ConfigFileStream::GetFileName(kUserDictionaryFileName); -} - -} // namespace english -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/english_dictionary.h mozc-1.11.1522.102/languages/pinyin/english_dictionary.h --- mozc-1.11.1502.102/languages/pinyin/english_dictionary.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/english_dictionary.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,111 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// English dictionary class to suggest english words by prefix match. This class -// supports user dictionary to learn new words or reorder suggested words. -// This class is NOT thread-safe or process-safe. - -#ifndef MOZC_LANGUAGES_PINYIN_ENGLISH_DICTIONARY_H_ -#define MOZC_LANGUAGES_PINYIN_ENGLISH_DICTIONARY_H_ - -#include -#include -#include - -#include "base/port.h" -#include "base/scoped_ptr.h" -#include "languages/pinyin/english_dictionary_interface.h" - -namespace mozc { -namespace storage { - -class StringStorageInterface; - -namespace louds { -class LoudsTrie; -} // namespace louds - -} // namespace storage - -namespace pinyin { -namespace english { - -typedef map UserDictionary; - -class EnglishDictionary : public EnglishDictionaryInterface { - public: - EnglishDictionary(); - virtual ~EnglishDictionary(); - - // Gets english words starting with |prefix| from system / user dictionary, - // and sets it into |output|. Entries of |output| are ordered by priority - // based on appearance frequency, and consist of lower-case characters. - virtual void GetSuggestions(const string &prefix, - vector *output) const; - - // Boosts the priority of a word. if it is a unknown word, It will be added on - // user dictionary. Return false if failed. - virtual bool LearnWord(const string &word); - - private: - friend class EnglishDictionaryTest; - - // Loads system / user dictionary data. Don't call this method twice. - void Init(); - - // Discards user dictionary data and reloads it from storage. - bool ReloadUserDictionary(); - - // Reads user dictionary data from storage, merges it, and writes it to a - // storage. - bool Sync(); - - // Returns the path to the user dictionary file. - // For initialization or unittest use only. - static string user_dictionary_file_path(); - - // System dictionary trie data. - scoped_ptr word_trie_; - // Maps an ID of trie entries to their priority. - vector priority_table_; - // It maps words to their frequency. - UserDictionary user_dictionary_; - // Multiplier to convert from frequency to priority. - float learning_multiplier_; - // Storage instance to manage user dictionary. - scoped_ptr storage_; - - DISALLOW_COPY_AND_ASSIGN(EnglishDictionary); -}; - -} // namespace english -} // namespace pinyin -} // namespace mozc - -#endif // MOZC_LANGUAGES_PINYIN_ENGLISH_DICTIONARY_H_ diff -Nru mozc-1.11.1502.102/languages/pinyin/english_dictionary_data_builder.cc mozc-1.11.1522.102/languages/pinyin/english_dictionary_data_builder.cc --- mozc-1.11.1502.102/languages/pinyin/english_dictionary_data_builder.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/english_dictionary_data_builder.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,124 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/pinyin/english_dictionary_data_builder.h" - -#include -#include -#include - -#include "base/file_stream.h" -#include "base/util.h" -#include "dictionary/file/codec_interface.h" -#include "dictionary/file/dictionary_file.h" -#include "dictionary/file/section.h" -#include "storage/louds/louds_trie_builder.h" - -namespace mozc { -namespace pinyin { -namespace english { - -using mozc::storage::louds::LoudsTrieBuilder; - -namespace { -// Priority = (1 / (sqrt(index + offset))) + used_count * multiplier -const float kIndexOffset = 10.0; -const float kLearningMultiplier = 0.02; -} // namespace - -EnglishDictionaryDataBuilder::EnglishDictionaryDataBuilder() : words_num_(0) { -} - -EnglishDictionaryDataBuilder::~EnglishDictionaryDataBuilder() { -} - -void EnglishDictionaryDataBuilder::BuildFromStream(istream *input_stream) { - DCHECK(input_stream); - - vector words; - string line; - while (getline(*input_stream, line)) { - if (line.empty() || Util::StartsWith(line, "#")) { - continue; - } - words.push_back(line); - } - - builder_.reset(new LoudsTrieBuilder); - for (size_t i = 0; i < words.size(); ++i) { - builder_->Add(words[i]); - } - builder_->Build(); - - words_num_ = words.size(); - louds_id_to_priority_.reset(new float[words_num_]); - - for (size_t i = 0; i < words.size(); ++i) { - const int word_id = builder_->GetId(words[i]); - - DCHECK_LT(word_id, words.size()); - DCHECK_NE(-1, word_id); - - louds_id_to_priority_[word_id] = 1.0 / (sqrt(kIndexOffset + i)); - } -} - -void EnglishDictionaryDataBuilder::WriteToStream(ostream *output_stream) const { - DCHECK(output_stream); - DCHECK(builder_.get()); - DCHECK(louds_id_to_priority_.get()); - - vector sections; - DictionaryFileCodecInterface *file_codec = - DictionaryFileCodecFactory::GetCodec(); - - DictionaryFileSection dictionary_trie( - builder_->image().data(), - builder_->image().size(), - file_codec->GetSectionName("english_dictionary_trie")); - sections.push_back(dictionary_trie); - - DictionaryFileSection word_priority_table( - reinterpret_cast(louds_id_to_priority_.get()), - words_num_ * static_cast(sizeof(louds_id_to_priority_[0])), - file_codec->GetSectionName("english_word_priority_table")); - sections.push_back(word_priority_table); - - DictionaryFileSection learning_multiplier( - reinterpret_cast(&kLearningMultiplier), - sizeof(kLearningMultiplier), - file_codec->GetSectionName("learning_multiplier")); - sections.push_back(learning_multiplier); - - file_codec->WriteSections(sections, output_stream); -} - -} // namespace english -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/english_dictionary_data_builder.h mozc-1.11.1522.102/languages/pinyin/english_dictionary_data_builder.h --- mozc-1.11.1502.102/languages/pinyin/english_dictionary_data_builder.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/english_dictionary_data_builder.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,72 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Build english dictionary data for ibus-mozc-pinyin. - -#ifndef MOZC_LANGUAGES_PINYIN_ENGLISH_DICTIONARY_DATA_BUILDER_H_ -#define MOZC_LANGUAGES_PINYIN_ENGLISH_DICTIONARY_DATA_BUILDER_H_ - -#include - -#include "base/port.h" - -namespace mozc { -namespace storage { -namespace louds { -class LoudsTrieBuilder; -} // namespace louds -} // namespace storage - -namespace pinyin { -namespace english { - -// This class expects that word number of the english dictionary is less than -// 65536 because we use short int to reduce footprint. -class EnglishDictionaryDataBuilder { - public: - EnglishDictionaryDataBuilder(); - ~EnglishDictionaryDataBuilder(); - - void BuildFromStream(istream *input_stream); - void WriteToStream(ostream *output_stream) const; - - private: - std::unique_ptr builder_; - std::unique_ptr louds_id_to_priority_; - int words_num_; - - DISALLOW_COPY_AND_ASSIGN(EnglishDictionaryDataBuilder); -}; - -} // namespace english -} // namespace pinyin -} // namespace mozc - -// MOZC_LANGUAGES_PINYIN_ENGLISH_DICTIONARY_DATA_BUILDER_H_ -#endif diff -Nru mozc-1.11.1502.102/languages/pinyin/english_dictionary_factory.cc mozc-1.11.1522.102/languages/pinyin/english_dictionary_factory.cc --- mozc-1.11.1502.102/languages/pinyin/english_dictionary_factory.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/english_dictionary_factory.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,58 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/pinyin/english_dictionary_factory.h" - -#include "base/singleton.h" -#include "languages/pinyin/english_dictionary.h" - -namespace mozc { -namespace pinyin { -namespace english { - -namespace { -EnglishDictionaryInterface *g_dictionary = NULL; -} // namespace - -EnglishDictionaryInterface *EnglishDictionaryFactory::GetDictionary() { - if (g_dictionary == NULL) { - return Singleton::get(); - } else { - return g_dictionary; - } -} - -void EnglishDictionaryFactory::SetDictionary( - EnglishDictionaryInterface *dictionary) { - g_dictionary = dictionary; -} - -} // namespace english -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/english_dictionary_factory.h mozc-1.11.1522.102/languages/pinyin/english_dictionary_factory.h --- mozc-1.11.1502.102/languages/pinyin/english_dictionary_factory.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/english_dictionary_factory.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,59 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#ifndef MOZC_LANGUAGES_PINYIN_ENGLISH_DICTIONARY_FACTORY_H_ -#define MOZC_LANGUAGES_PINYIN_ENGLISH_DICTIONARY_FACTORY_H_ - -#include "base/port.h" -#include "languages/pinyin/english_dictionary_interface.h" - -namespace mozc { -namespace pinyin { -namespace english { - -class EnglishDictionaryFactory { - public: - // Gets dictionary instance. - // This class takes an ownership of the instance. - static EnglishDictionaryInterface *GetDictionary(); - // Sets dictionary instance. - // This class doesn't take an ownership of |*dictionary|. - // If you set an instance, you should set NULL before delete it. - static void SetDictionary(EnglishDictionaryInterface *dictionary); - - private: - DISALLOW_IMPLICIT_CONSTRUCTORS(EnglishDictionaryFactory); -}; - -} // namespace english -} // namespace pinyin -} // namespace mozc - -// MOZC_LANGUAGES_PINYIN_ENGLISH_DICTIONARY_FACTORY_H_ -#endif diff -Nru mozc-1.11.1502.102/languages/pinyin/english_dictionary_factory_test.cc mozc-1.11.1522.102/languages/pinyin/english_dictionary_factory_test.cc --- mozc-1.11.1502.102/languages/pinyin/english_dictionary_factory_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/english_dictionary_factory_test.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,66 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include -#include - -#include "languages/pinyin/english_dictionary_factory.h" -#include "testing/base/public/gunit.h" - -namespace mozc { -namespace pinyin { -namespace english { - -class EnglishDictionaryFactoryTest : public ::testing::Test { - protected: - virtual void SetUp() { - EnglishDictionaryFactory::SetDictionary(NULL); - } - - virtual void TearDown() { - EnglishDictionaryFactory::SetDictionary(NULL); - } -}; - -TEST_F(EnglishDictionaryFactoryTest, RegisterInstance) { - EnglishDictionaryInterface *real_instance = - EnglishDictionaryFactory::GetDictionary(); - EnglishDictionaryInterface *dummy_instance = - reinterpret_cast(1); - - EnglishDictionaryFactory::SetDictionary(dummy_instance); - EXPECT_EQ(dummy_instance, EnglishDictionaryFactory::GetDictionary()); - - EnglishDictionaryFactory::SetDictionary(NULL); - EXPECT_EQ(real_instance, EnglishDictionaryFactory::GetDictionary()); -} - -} // namespace english -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/english_dictionary_interface.h mozc-1.11.1522.102/languages/pinyin/english_dictionary_interface.h --- mozc-1.11.1502.102/languages/pinyin/english_dictionary_interface.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/english_dictionary_interface.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,59 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// English dictionary interface for ibus-mozc-pinyin - -#ifndef MOZC_LANGUAGES_PINYIN_ENGLISH_DICTIONARY_INTERFACE_H_ -#define MOZC_LANGUAGES_PINYIN_ENGLISH_DICTIONARY_INTERFACE_H_ - -#include -#include - -namespace mozc { -namespace pinyin { -namespace english { - -class EnglishDictionaryInterface { - public: - virtual ~EnglishDictionaryInterface() {} - - // Gets English words starts with prefix. - virtual void GetSuggestions( - const string &prefix, vector *output) const = 0; - - // Learns word to add unknown word or to boost a word priority. - virtual bool LearnWord(const string &word) = 0; -}; - -} // namespace english -} // namespace pinyin -} // namespace mozc - -// MOZC_LANGUAGES_PINYIN_ENGLISH_DICTIONARY_INTERFACE_H_ -#endif diff -Nru mozc-1.11.1502.102/languages/pinyin/english_dictionary_test.cc mozc-1.11.1522.102/languages/pinyin/english_dictionary_test.cc --- mozc-1.11.1502.102/languages/pinyin/english_dictionary_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/english_dictionary_test.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,309 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/pinyin/english_dictionary.h" - -#include -#include -#include - -#include "base/file_util.h" -#include "base/system_util.h" -#include "base/util.h" -#include "storage/encrypted_string_storage.h" -#include "testing/base/public/gmock.h" -#include "testing/base/public/gunit.h" - -// This test requires following condition to system dictionary. -// - "the" has a highest priority in words "th*" -// - "that" has a highest priority in words "tha*" -// - "of" has a highest priority in words "of*" -// Actual dictionary entries are defined on data/pinyin/english_dictionary.txt -// It should be something wrong on generating dictionary data process if above -// condition is not satisfied. - -DECLARE_string(test_tmpdir); - -using ::testing::DoAll; -using ::testing::Return; -using ::testing::SetArgPointee; -using ::testing::_; - -namespace mozc { -namespace pinyin { -namespace english { - -class MockStorage : public storage::StringStorageInterface { - public: - MockStorage() {} - virtual ~MockStorage() {} - - MOCK_CONST_METHOD1(Load, bool(string *output)); - MOCK_CONST_METHOD1(Save, bool(const string &input)); -}; - -class EnglishDictionaryTest : public ::testing::Test { - protected: - virtual void SetUp() { - SystemUtil::SetUserProfileDirectory(FLAGS_test_tmpdir); - UnlinkUserHistoryDatabase(); - } - - virtual void TearDown() { - UnlinkUserHistoryDatabase(); - } - - // Unlinks user history database file to reset. - void UnlinkUserHistoryDatabase() { - FileUtil::Unlink(EnglishDictionary::user_dictionary_file_path()); - } - - // Sets mock user dictionary storage for unit testing. - // |*dictionary| takes a ownership of |*mock_user_dictionary_storage|. - void SetMockUserDictionaryStorage( - EnglishDictionary *dictionary, - storage::StringStorageInterface *mock_user_dictionary_storage) { - dictionary->storage_.reset(mock_user_dictionary_storage); - } - - // Reloads user dictionary for unit testing. - bool ReloadUserDictionary(EnglishDictionary *dictionary) { - return dictionary->ReloadUserDictionary(); - } -}; - -// Checks GetSuggestions() with some famous English words. -TEST_F(EnglishDictionaryTest, GetSuggestions) { - EnglishDictionary dictionary; - - { // Searches with an empty query. - vector output; - output.push_back("dummy_entry"); - dictionary.GetSuggestions("", &output); - ASSERT_TRUE(output.empty()); - } - - { // Searches with normal queries. - vector output; - dictionary.GetSuggestions("th", &output); - ASSERT_FALSE(output.empty()); - EXPECT_EQ("the", output.front()); - const size_t th_size = output.size(); - - dictionary.GetSuggestions("tHa", &output); - ASSERT_FALSE(output.empty()); - EXPECT_EQ("that", output.front()); - const size_t tha_size = output.size(); - - EXPECT_GT(th_size, tha_size); - - dictionary.GetSuggestions("OF", &output); - ASSERT_FALSE(output.empty()); - EXPECT_EQ("of", output.front()); - } - - { // Searches with an illegal querie. - vector output; - output.push_back("dummy_entry"); - dictionary.GetSuggestions("-", &output); - ASSERT_TRUE(output.empty()); - } -} - -// Checks LearnWord(). -TEST_F(EnglishDictionaryTest, LearningFunction) { - EnglishDictionary dictionary; - - const char *kQueryPrefix = "the"; - vector output; - - const string word_a = Util::StringPrintf("%s%s", kQueryPrefix, "abcde"); - const string word_b = Util::StringPrintf("%s%s", kQueryPrefix, "fghij"); - - dictionary.GetSuggestions(kQueryPrefix, &output); - ASSERT_TRUE(find(output.begin(), output.end(), word_a) == output.end()); - ASSERT_TRUE(find(output.begin(), output.end(), word_b) == output.end()); - const size_t original_size = output.size(); - - // Empty word. - EXPECT_FALSE(dictionary.LearnWord("")); - - // Too long word. - EXPECT_TRUE(dictionary.LearnWord( - "0123456789" "0123456789" "0123456789" "0123456789" - "0123456789" "0123456789" "0123456789" "0123456789")); - EXPECT_FALSE(dictionary.LearnWord( - "0123456789" "0123456789" "0123456789" "0123456789" - "0123456789" "0123456789" "0123456789" "0123456789" "0")); - - { // Learns word_a once. (a: 1, b: 0) - EXPECT_TRUE(dictionary.LearnWord(word_a)); - dictionary.GetSuggestions(kQueryPrefix, &output); - EXPECT_EQ(original_size + 1, output.size()); - EXPECT_NE(find(output.begin(), output.end(), word_a), output.end()); - } - - { // Learns word_b twice. (a: 1, b: 2) - EXPECT_TRUE(dictionary.LearnWord(word_b)); - EXPECT_TRUE(dictionary.LearnWord(word_b)); - dictionary.GetSuggestions(kQueryPrefix, &output); - EXPECT_EQ(original_size + 2, output.size()); - vector::iterator it_a = find(output.begin(), output.end(), word_a); - vector::iterator it_b = find(output.begin(), output.end(), word_b); - EXPECT_NE(it_a, output.end()); - EXPECT_NE(it_b, output.end()); - EXPECT_GT(it_a, it_b); - } - - { // Learns word_a twice. (a: 3, b: 2) - EXPECT_TRUE(dictionary.LearnWord(word_a)); - EXPECT_TRUE(dictionary.LearnWord(word_a)); - dictionary.GetSuggestions(kQueryPrefix, &output); - EXPECT_EQ(original_size + 2, output.size()); - vector::iterator it_a = find(output.begin(), output.end(), word_a); - vector::iterator it_b = find(output.begin(), output.end(), word_b); - EXPECT_NE(it_a, output.end()); - EXPECT_NE(it_b, output.end()); - EXPECT_LT(it_a, it_b); - } - - { // Learns word_b once. (a: 3, b: 3) - EXPECT_TRUE(dictionary.LearnWord(word_b)); - dictionary.GetSuggestions(kQueryPrefix, &output); - EXPECT_EQ(original_size + 2, output.size()); - vector::iterator it_a = find(output.begin(), output.end(), word_a); - vector::iterator it_b = find(output.begin(), output.end(), word_b); - EXPECT_NE(it_a, output.end()); - EXPECT_NE(it_b, output.end()); - EXPECT_LT(it_a, it_b); - } - - { // Learns more 100 times and moves word_a to top of candidates. - dictionary.GetSuggestions(kQueryPrefix, &output); - ASSERT_EQ(original_size + 2, output.size()); - ASSERT_NE(word_a, output[0]); - - for (size_t i = 0; i < 100; ++i) { - EXPECT_TRUE(dictionary.LearnWord(word_a)); - } - dictionary.GetSuggestions(kQueryPrefix, &output); - ASSERT_EQ(original_size + 2, output.size()); - EXPECT_EQ(word_a, output[0]); - } -} - -// Checks that LearnWord() handle upper case characters correctly. -// http://b/6136098 -TEST_F(EnglishDictionaryTest, LearnWordsContainsUpperAlphabet_Issue6136098) { - EnglishDictionary dictionary; - - vector output; - - const char *kWord = "abcDEFghi"; - const char *kLowerWord = "abcdefghi"; - - dictionary.GetSuggestions("", &output); - ASSERT_TRUE(output.empty()); - - output.clear(); - dictionary.GetSuggestions(kWord, &output); - ASSERT_TRUE(output.empty()); - - output.clear(); - EXPECT_TRUE(dictionary.LearnWord(kWord)); - dictionary.GetSuggestions(kWord, &output); - EXPECT_EQ(1, output.size()); - EXPECT_EQ(kLowerWord, output.front()); -} - -// Checks that user dictionary is correctly stored to a storage. -TEST_F(EnglishDictionaryTest, StoreUserDictionaryToStorage) { - const char *kUnknownWord = "thisisunknownword"; - vector output; - - { // Creates a dictionary and lean a new word. - EnglishDictionary dictionary; - dictionary.GetSuggestions(kUnknownWord, &output); - ASSERT_TRUE(output.empty()); - EXPECT_TRUE(dictionary.LearnWord(kUnknownWord)); - dictionary.GetSuggestions(kUnknownWord, &output); - ASSERT_EQ(1, output.size()); - ASSERT_EQ(kUnknownWord, output[0]); - } - - { // Creates another dictionary and verifies that it has a new word. - EnglishDictionary dictionary; - dictionary.GetSuggestions(kUnknownWord, &output); - EXPECT_EQ(1, output.size()); - EXPECT_EQ(kUnknownWord, output[0]); - } - - UnlinkUserHistoryDatabase(); - - { // Creates another dictionary and verifies that it doesn't have a new word. - EnglishDictionary dictionary; - dictionary.GetSuggestions(kUnknownWord, &output); - EXPECT_TRUE(output.empty()); - } -} - -// Checks that broken user dictionary is correctly handled. -TEST_F(EnglishDictionaryTest, InvalidUserDictionary) { - EnglishDictionary dictionary; - MockStorage *mock_storage = new MockStorage; - SetMockUserDictionaryStorage(&dictionary, mock_storage); - EXPECT_CALL(*mock_storage, Save(_)).WillRepeatedly(Return(true)); - - // Cannot open storage. - EXPECT_CALL(*mock_storage, Load(_)).WillOnce(Return(false)); - EXPECT_FALSE(ReloadUserDictionary(&dictionary)); - - // Empty storage (success) - EXPECT_CALL(*mock_storage, Load(_)).WillOnce(DoAll( - SetArgPointee<0>(""), Return(true))); - EXPECT_TRUE(ReloadUserDictionary(&dictionary)); - - const char *kWrongUserDictionaryData[] = { - "\x01", // Wrong key length (key length: 1, key: "") - "\x02" "a", // Wrong key length (key length: 2, key: "a") - "\x01" "aa", // Wrong key length (key length: 1, key: "aa") - "\x01" "a", // Wrong used count length (length == 0) - "\x01" "a" "\x00", // Wrong used count length (length != 0 && length != 4) - }; - - for (size_t i = 0; i < arraysize(kWrongUserDictionaryData); ++i) { - EXPECT_CALL(*mock_storage, Load(_)).WillOnce(DoAll( - SetArgPointee<0>(kWrongUserDictionaryData[i]), Return(true))); - EXPECT_FALSE(ReloadUserDictionary(&dictionary)); - } -} - -} // namespace english -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/gen_english_dictionary_data_main.cc mozc-1.11.1522.102/languages/pinyin/gen_english_dictionary_data_main.cc --- mozc-1.11.1502.102/languages/pinyin/gen_english_dictionary_data_main.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/gen_english_dictionary_data_main.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,66 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include - -#include "base/base.h" -#include "base/codegen_bytearray_stream.h" -#include "base/file_stream.h" -#include "base/logging.h" -#include "base/scoped_ptr.h" -#include "languages/pinyin/english_dictionary_data_builder.h" - -DEFINE_string(input, "", "space separated input text files"); -DEFINE_string(output, "", "output binary file"); - -int main(int argc, char **argv) { - InitGoogle(argv[0], &argc, &argv, false); - - mozc::pinyin::english::EnglishDictionaryDataBuilder builder; - { - mozc::InputFileStream ifs(FLAGS_input.c_str()); - builder.BuildFromStream(&ifs); - } - - scoped_ptr output_stream( - new mozc::OutputFileStream(FLAGS_output.c_str(), ios::out)); - - mozc::CodeGenByteArrayOutputStream *codegen_stream; - output_stream.reset( - codegen_stream = new mozc::CodeGenByteArrayOutputStream( - output_stream.release(), - mozc::codegenstream::OWN_STREAM)); - codegen_stream->OpenVarDef("PinyinEnglishDictionary"); - - LOG(INFO) << "Start writing file."; - builder.WriteToStream(output_stream.get()); - LOG(INFO) << "Start writing file... done."; - - return 0; -} diff -Nru mozc-1.11.1502.102/languages/pinyin/keymap.cc mozc-1.11.1522.102/languages/pinyin/keymap.cc --- mozc-1.11.1502.102/languages/pinyin/keymap.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/keymap.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,257 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/pinyin/keymap.h" - -#include - -#include "base/logging.h" -#include "base/port.h" -#include "base/singleton.h" -#include "languages/pinyin/configurable_keymap.h" -#include "languages/pinyin/default_keymap.h" -#include "session/commands.pb.h" -#include "session/key_event_util.h" - -namespace mozc { -namespace pinyin { -namespace keymap { - -// TODO(hsumita): Investigates the behavior of "@" key when converter is active. - -//////////////////////////////////////////////////////////// -// Pinyin -//////////////////////////////////////////////////////////// - -class PinyinKeymapImpl : public KeymapInterface { - public: - PinyinKeymapImpl() {} - virtual ~PinyinKeymapImpl() {} - - bool GetCommand(const commands::KeyEvent &key_event, - ConverterState state, KeyCommand *key_command) const { - DCHECK(key_command); - DCHECK(!KeyEventUtil::HasCaps(KeyEventUtil::GetModifiers(key_event))); - DCHECK(!KeyEventUtil::IsNumpadKey(key_event)); - - if (ConfigurableKeymap::GetCommand(key_event, state, key_command) || - DefaultKeymap::GetCommand(key_event, state, key_command)) { - return true; - } - - LOG(ERROR) << "Should not reach here."; - *key_command = DO_NOTHING_WITHOUT_CONSUME; - return false; - } - - private: - DISALLOW_COPY_AND_ASSIGN(PinyinKeymapImpl); -}; - -//////////////////////////////////////////////////////////// -// Direct -//////////////////////////////////////////////////////////// - -class DirectKeymapImpl : public KeymapInterface { - public: - DirectKeymapImpl() {} - virtual ~DirectKeymapImpl() {} - - bool GetCommand(const commands::KeyEvent &key_event, - ConverterState state, KeyCommand *key_command) const { - DCHECK(key_command); - DCHECK(!KeyEventUtil::HasCaps(KeyEventUtil::GetModifiers(key_event))); - DCHECK(!KeyEventUtil::IsNumpadKey(key_event)); - - const uint32 modifiers = KeyEventUtil::GetModifiers(key_event); - - if (KeyEventUtil::IsCtrlShift(modifiers) && key_event.has_key_code() && - (key_event.key_code() == 'f' || key_event.key_code() == 'F')) { - *key_command = TOGGLE_SIMPLIFIED_CHINESE_MODE; - return true; - } - - if (KeyEventUtil::HasAlt(modifiers) || KeyEventUtil::HasCtrl(modifiers)) { - *key_command = DO_NOTHING_WITHOUT_CONSUME; - return true; - } - - if (key_event.has_key_code()) { - *key_command = INSERT; - return true; - } - - if (KeyEventUtil::IsShift(modifiers) && !key_event.has_special_key()) { - *key_command = TOGGLE_DIRECT_MODE; - return true; - } - - *key_command = DO_NOTHING_WITHOUT_CONSUME; - return true; - } - - private: - DISALLOW_COPY_AND_ASSIGN(DirectKeymapImpl); -}; - -//////////////////////////////////////////////////////////// -// English -//////////////////////////////////////////////////////////// - -class EnglishKeymapImpl : public KeymapInterface { - public: - EnglishKeymapImpl() {} - virtual ~EnglishKeymapImpl() {} - - bool GetCommand(const commands::KeyEvent &key_event, - ConverterState state, KeyCommand *key_command) const { - DCHECK(key_command); - DCHECK(!KeyEventUtil::HasCaps(KeyEventUtil::GetModifiers(key_event))); - DCHECK(!KeyEventUtil::IsNumpadKey(key_event)); - - const uint32 modifiers = KeyEventUtil::GetModifiers(key_event); - if (!KeyEventUtil::HasCtrl(modifiers) && !KeyEventUtil::HasAlt(modifiers) && - key_event.has_key_code() && isalpha(key_event.key_code())) { - *key_command = INSERT; - return true; - } - - if (!ConfigurableKeymap::GetCommand(key_event, state, key_command) && - !DefaultKeymap::GetCommand(key_event, state, key_command)) { - LOG(ERROR) << "Should not reach here."; - *key_command = DO_NOTHING_WITHOUT_CONSUME; - return false; - } - - *key_command = GetOverridedCommand(*key_command); - return true; - } - - private: - // Override some commands which is differ from Pinyin keymap. Some commands - // does not make sense on this mode, so we replace these. - // To keep codes simple, and avoid leeks of conversion, we directly execute - // conversion from KeyCommand to KeyCommand instead of conversion from - // KeyEvent to KeyCommand. - KeyCommand GetOverridedCommand(KeyCommand key_command) const { - switch (key_command) { - case AUTO_COMMIT: return DO_NOTHING_WITH_CONSUME; - case MOVE_CURSOR_LEFT: return FOCUS_CANDIDATE_TOP; - case MOVE_CURSOR_RIGHT: return FOCUS_CANDIDATE_TOP; - case MOVE_CURSOR_LEFT_BY_WORD: return FOCUS_CANDIDATE_TOP; - case MOVE_CURSOR_RIGHT_BY_WORD: return FOCUS_CANDIDATE_TOP; - case MOVE_CURSOR_TO_BEGINNING: return FOCUS_CANDIDATE_TOP; - case MOVE_CURSOR_TO_END: return FOCUS_CANDIDATE_TOP; - case REMOVE_WORD_BEFORE: return DO_NOTHING_WITHOUT_CONSUME; - case REMOVE_WORD_AFTER: return DO_NOTHING_WITHOUT_CONSUME; - case TOGGLE_DIRECT_MODE: return DO_NOTHING_WITHOUT_CONSUME; - default: return key_command; - } - } - - DISALLOW_COPY_AND_ASSIGN(EnglishKeymapImpl); -}; - -//////////////////////////////////////////////////////////// -// Punctuation -//////////////////////////////////////////////////////////// - -class PunctuationKeymapImpl : public KeymapInterface { - public: - PunctuationKeymapImpl() {} - virtual ~PunctuationKeymapImpl() {} - - bool GetCommand(const commands::KeyEvent &key_event, - ConverterState state, KeyCommand *key_command) const { - DCHECK(key_command); - DCHECK(!KeyEventUtil::HasCaps(KeyEventUtil::GetModifiers(key_event))); - DCHECK(!KeyEventUtil::IsNumpadKey(key_event)); - - const uint32 modifiers = KeyEventUtil::GetModifiers(key_event); - if (!KeyEventUtil::HasAlt(modifiers) && !KeyEventUtil::HasCtrl(modifiers) && - key_event.has_key_code()) { - *key_command = INSERT; - return true; - } - - if (!ConfigurableKeymap::GetCommand(key_event, state, key_command) && - !DefaultKeymap::GetCommand(key_event, state, key_command)) { - LOG(ERROR) << "Should not reach here."; - *key_command = DO_NOTHING_WITHOUT_CONSUME; - return false; - } - - *key_command = GetOverridedCommand(*key_command); - return true; - } - - private: - // Override some commands which is differ from Pinyin keymap. Some commands - // does not make sense on this mode, so we replace these. - // To keep codes simple, and avoid leeks of conversion, we directly execute - // conversion from KeyCommand to KeyCommand instead of conversion from - // KeyEvent to KeyCommand. - KeyCommand GetOverridedCommand(KeyCommand key_command) const { - switch (key_command) { - case AUTO_COMMIT: return INSERT; - case COMMIT: return COMMIT_PREEDIT; - case TOGGLE_DIRECT_MODE: return DO_NOTHING_WITHOUT_CONSUME; - case TURN_ON_PUNCTUATION_MODE: return INSERT; - default: return key_command; - } - } - - map key_command_map_; - - DISALLOW_COPY_AND_ASSIGN(PunctuationKeymapImpl); -}; - -//////////////////////////////////////////////////////////// -// Keymap Factory -//////////////////////////////////////////////////////////// - -const KeymapInterface *KeymapFactory::GetKeymap(KeymapMode mode) { - switch (mode) { - case PINYIN: - return Singleton::get(); - case DIRECT: - return Singleton::get(); - case ENGLISH: - return Singleton::get(); - case PUNCTUATION: - return Singleton::get(); - default: - LOG(ERROR) << "Should not reach here"; - return Singleton::get(); - } -} - -} // namespace keymap -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/keymap.h mozc-1.11.1522.102/languages/pinyin/keymap.h --- mozc-1.11.1502.102/languages/pinyin/keymap.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/keymap.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,68 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#ifndef MOZC_LANGUAGES_PINYIN_KEYMAP_H_ -#define MOZC_LANGUAGES_PINYIN_KEYMAP_H_ - -#include - -#include "base/port.h" -#include "languages/pinyin/pinyin_constant.h" - -namespace mozc { -namespace commands { -class KeyEvent; -} // namespace commands - -namespace pinyin { -namespace keymap { - -class KeymapInterface { - public: - virtual ~KeymapInterface() {} - // Parses key_event and gets command. This method does not handle CapsLock or - // Numlock keys. These keys should be removed before calling this method. - virtual bool GetCommand(const commands::KeyEvent &key_event, - ConverterState state, - KeyCommand *key_command) const = 0; -}; - -class KeymapFactory { - public: - static const KeymapInterface *GetKeymap(KeymapMode mode); - - private: - DISALLOW_IMPLICIT_CONSTRUCTORS(KeymapFactory); -}; - -} // namespace keymap -} // namespace pinyin -} // namespace mozc - -#endif // MOZC_LANGUAGES_PINYIN_KEYMAP_H_ diff -Nru mozc-1.11.1502.102/languages/pinyin/keymap_test.cc mozc-1.11.1522.102/languages/pinyin/keymap_test.cc --- mozc-1.11.1502.102/languages/pinyin/keymap_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/keymap_test.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,229 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/pinyin/keymap.h" - -#include - -#include "base/port.h" -#include "base/system_util.h" -#include "base/util.h" -#include "config/config.pb.h" -#include "config/config_handler.h" -#include "session/commands.pb.h" -#include "session/key_parser.h" -#include "testing/base/public/googletest.h" -#include "testing/base/public/gunit.h" - -DECLARE_string(test_tmpdir); - -namespace mozc { -namespace pinyin { -namespace keymap { - -namespace { -testing::AssertionResult CheckKeyCommand(const char *expected_command_expr, - const char *keymap_expr, - const char *keys_expr, - KeyCommand expected_command, - const KeymapInterface *keymap, - const string &keys) { - commands::KeyEvent key_event; - if (!KeyParser::ParseKey(keys, &key_event)) { - return testing::AssertionFailure() << - Util::StringPrintf("Failed to parse keys.\n" - "keys: %s (%s)", - keys.c_str(), keys_expr); - } - - KeyCommand key_command; - // It is enough that we test ACTIVE state only because - // - PinyinKeymap is simply calls DefaultKeymap and ConfigurableKeymap and - // has no complex logic. - // - Other keymaps doesn't consider any state. - if (!keymap->GetCommand(key_event, ACTIVE, &key_command)) { - return testing::AssertionFailure() << - Util::StringPrintf("Failed to get command.\n" - "keys: %s (%s)", keys.c_str(), keys_expr); - } - - if (expected_command != key_command) { - return testing::AssertionFailure() << - Util::StringPrintf("KeyCommand is not valid.\n" - "Expected: %d\n" - "Actual: %d", - expected_command, key_command); - } - - return testing::AssertionSuccess(); -} - -#define EXPECT_KEY_COMMAND(expected_command, keymap, keys) \ - EXPECT_PRED_FORMAT3(CheckKeyCommand, expected_command, keymap, keys) -} // namespace - -class KeymapTest : public testing::Test { - protected: - virtual void SetUp() { - SystemUtil::SetUserProfileDirectory(FLAGS_test_tmpdir); - config::Config config; - config::ConfigHandler::GetDefaultConfig(&config); - config.mutable_pinyin_config()->set_paging_with_minus_equal(true); - config.mutable_pinyin_config()->set_double_pinyin(false); - config::ConfigHandler::SetConfig(config); - } - - virtual void TearDown() { - config::Config config; - config::ConfigHandler::GetDefaultConfig(&config); - config::ConfigHandler::SetConfig(config); - } -}; - -TEST_F(KeymapTest, PinyinKeymap) { - const KeymapInterface *keymap = KeymapFactory::GetKeymap(PINYIN); - - { - SCOPED_TRACE("Configurable keymap"); - EXPECT_KEY_COMMAND(FOCUS_CANDIDATE_PREV_PAGE, keymap, "-"); - } - - { - SCOPED_TRACE("Default keymap"); - EXPECT_KEY_COMMAND(INSERT, keymap, "a"); - } -} - -namespace { -const struct DirectKeymapTestData { - string keys; - KeyCommand command; -} kDirectKeymapTestData[] = { - { "-", INSERT }, - { "a", INSERT }, - { "!", INSERT }, - { "0", INSERT }, - { "A", INSERT }, - // "Shift + printable key" should not be sent from mozc_engine. - // Shift key with a printable key is removed. - { "Ctrl a", DO_NOTHING_WITHOUT_CONSUME }, - { "Left", DO_NOTHING_WITHOUT_CONSUME }, - { "Ctrl Left", DO_NOTHING_WITHOUT_CONSUME }, - { "Home", DO_NOTHING_WITHOUT_CONSUME }, - { "Ctrl", DO_NOTHING_WITHOUT_CONSUME }, - { "Enter", DO_NOTHING_WITHOUT_CONSUME }, - { "Shift", TOGGLE_DIRECT_MODE }, - { "v", INSERT }, - { "`", INSERT }, - { "Ctrl Shift f", TOGGLE_SIMPLIFIED_CHINESE_MODE }, -}; -} // namespace - -TEST_F(KeymapTest, DirectKeymap) { - const KeymapInterface *keymap = KeymapFactory::GetKeymap(DIRECT); - - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kDirectKeymapTestData); ++i) { - const DirectKeymapTestData &data = kDirectKeymapTestData[i]; - SCOPED_TRACE(data.keys); - EXPECT_KEY_COMMAND(data.command, keymap, data.keys); - } -} - -namespace { -const struct EnglishKeymapTestData { - string keys; - KeyCommand command; -} kEnglishKeymapTestData[] = { - { "-", FOCUS_CANDIDATE_PREV_PAGE }, - { "a", INSERT }, - { "!", DO_NOTHING_WITH_CONSUME }, - { "A", INSERT }, - // "Shift + printable key" should not be sent from mozc_engine. - // Shift key with a printable key is removed. - { "Ctrl a", DO_NOTHING_WITH_CONSUME }, - { "Left", FOCUS_CANDIDATE_TOP }, - { "Right", FOCUS_CANDIDATE_TOP }, - { "Ctrl Left", FOCUS_CANDIDATE_TOP }, - { "Ctrl Right", FOCUS_CANDIDATE_TOP }, - { "Home", FOCUS_CANDIDATE_TOP }, - { "End", FOCUS_CANDIDATE_TOP }, - { "SHIFT", DO_NOTHING_WITHOUT_CONSUME }, - { "Ctrl BS", DO_NOTHING_WITHOUT_CONSUME }, - { "Ctrl Delete", DO_NOTHING_WITHOUT_CONSUME }, - { "v", INSERT }, - { "`", DO_NOTHING_WITH_CONSUME }, - { "Ctrl Shift f", TOGGLE_SIMPLIFIED_CHINESE_MODE }, -}; -} // namespace - -TEST_F(KeymapTest, EnglishKeymap) { - const KeymapInterface *keymap = KeymapFactory::GetKeymap(ENGLISH); - - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kEnglishKeymapTestData); ++i) { - const EnglishKeymapTestData &data = kEnglishKeymapTestData[i]; - SCOPED_TRACE(data.keys); - EXPECT_KEY_COMMAND(data.command, keymap, data.keys); - } -} - -namespace { -const struct PunctuationKeymapTestData { - string keys; - KeyCommand command; -} kPunctuationKeymapTestData[] = { - { "-", INSERT }, - { "a", INSERT }, - { "!", INSERT }, - { "0", INSERT }, - { "A", INSERT }, - // "Shift + printable key" should not be sent from mozc_engine. - // Shift key with a printable key is removed. - { "Ctrl a", DO_NOTHING_WITH_CONSUME }, - { "Left", MOVE_CURSOR_LEFT }, - { "SHIFT", DO_NOTHING_WITHOUT_CONSUME }, - { "ENTER", COMMIT_PREEDIT }, - { "v", INSERT }, - { "`", INSERT }, - { "Ctrl Shift f", TOGGLE_SIMPLIFIED_CHINESE_MODE }, -}; -} // namespace - -TEST_F(KeymapTest, PunctuationKeymap) { - const KeymapInterface *keymap = KeymapFactory::GetKeymap(PUNCTUATION); - - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kPunctuationKeymapTestData); ++i) { - const PunctuationKeymapTestData &data = kPunctuationKeymapTestData[i]; - SCOPED_TRACE(data.keys); - EXPECT_KEY_COMMAND(data.command, keymap, data.keys); - } -} - -} // namespace keymap -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/pinyin.gyp mozc-1.11.1522.102/languages/pinyin/pinyin.gyp --- mozc-1.11.1502.102/languages/pinyin/pinyin.gyp 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/pinyin.gyp 1970-01-01 00:00:00.000000000 +0000 @@ -1,550 +0,0 @@ -# Copyright 2010-2013, Google Inc. -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -{ - 'variables': { - 'relative_dir': 'languages/pinyin', - 'gen_out_dir': '<(SHARED_INTERMEDIATE_DIR)/<(relative_dir)', - }, - 'targets': [ - { - # Meta target to set up build environment. - 'target_name': 'pinyin_build_environment', - 'type': 'none', - 'variables': { - 'pinyin_libs': [ - 'pyzy-1.0', - ], - }, - 'all_dependent_settings': { - 'cflags': [ - ' -#include -#include - -#include - -#include "base/logging.h" -#include "base/util.h" -#include "config/config.pb.h" -#include "config/config_handler.h" -#include "languages/pinyin/session_config.h" - -namespace mozc { -namespace pinyin { - -class ContextObserver : public ContextObserverInterface { - public: - explicit ContextObserver(const SessionConfig &session_config) - : session_config_(session_config) { - } - - virtual ~ContextObserver() {} - - virtual const string &commit_text() const { - return commit_text_; - } - - virtual void SetCommitText(const string &commit_text) { - // TODO(hsumita): Move this logic to SessionConverter. - if (session_config_.full_width_word_mode) { - Util::HalfWidthAsciiToFullWidthAscii(commit_text, &commit_text_); - } else { - commit_text_.assign(commit_text); - } - } - - virtual void ClearCommitText() { - commit_text_.clear(); - } - - // Callback interfaces which are called by libpyzy. - virtual void commitText(PyZy::InputContext *context, - const string &commit_text) { - SetCommitText(commit_text); - } - - // We don't use these function. Do nothings. - virtual void inputTextChanged(PyZy::InputContext *context) {} - virtual void cursorChanged(PyZy::InputContext *context) {} - virtual void preeditTextChanged(PyZy::InputContext *context) {} - virtual void auxiliaryTextChanged(PyZy::InputContext *context) {} - virtual void candidatesChanged(PyZy::InputContext *context) {} - - private: - string commit_text_; - const SessionConfig &session_config_; -}; - -// Apply this change to the header file. -PinyinContext::PinyinContext(const SessionConfig &session_config) - : session_config_(session_config), - observer_(new ContextObserver(session_config)) { - ResetContext(); - ReloadConfig(); -} - -PinyinContext::~PinyinContext() { -} - -bool PinyinContext::Insert(char ch) { - if (isdigit(ch) && input_text().empty()) { - observer_->SetCommitText(string(1, ch)); - return true; - } - return context_->insert(ch); -} - -void PinyinContext::Commit() { - context_->commit(PyZy::InputContext::TYPE_CONVERTED); -} - -void PinyinContext::CommitPreedit() { - context_->commit(PyZy::InputContext::TYPE_RAW); -} - -void PinyinContext::Clear() { - context_->reset(); - ClearCommitText(); -} - -void PinyinContext::ClearCommitText() { - observer_->ClearCommitText(); -} - -bool PinyinContext::MoveCursorRight() { - if (context_->unselectCandidates()) { - return true; - } - return context_->moveCursorRight(); -} - -bool PinyinContext::MoveCursorLeft() { - if (context_->unselectCandidates()) { - return true; - } - return context_->moveCursorLeft(); -} - -bool PinyinContext::MoveCursorRightByWord() { - if (context_->unselectCandidates()) { - return true; - } - return context_->moveCursorRightByWord(); -} - -bool PinyinContext::MoveCursorLeftByWord() { - if (context_->unselectCandidates()) { - return true; - } - return context_->moveCursorLeftByWord(); -} - -bool PinyinContext::MoveCursorToBeginning() { - if (context_->unselectCandidates()) { - return true; - } - return context_->moveCursorToBegin(); -} - -bool PinyinContext::MoveCursorToEnd() { - if (context_->unselectCandidates()) { - return true; - } - return context_->moveCursorToEnd(); -} - -bool PinyinContext::SelectCandidate(size_t index) { - return context_->selectCandidate(index); -} - -bool PinyinContext::FocusCandidate(size_t index) { - return context_->focusCandidate(index); -} - -bool PinyinContext::ClearCandidateFromHistory(size_t index) { - return context_->resetCandidate(index); -} - -bool PinyinContext::RemoveCharBefore() { - return context_->removeCharBefore(); -} - -bool PinyinContext::RemoveCharAfter() { - return context_->removeCharAfter(); -} - -bool PinyinContext::RemoveWordBefore() { - return context_->removeWordBefore(); -} - -bool PinyinContext::RemoveWordAfter() { - return context_->removeWordAfter(); -} - -namespace { -const uint32 kIncompletePinyinOption = PINYIN_INCOMPLETE_PINYIN; -const uint32 kCorrectPinyinOption = PINYIN_CORRECT_ALL; -const uint32 kFuzzyPinyinOption = - PINYIN_FUZZY_C_CH | - PINYIN_FUZZY_Z_ZH | - PINYIN_FUZZY_S_SH | - PINYIN_FUZZY_L_N | - PINYIN_FUZZY_F_H | - PINYIN_FUZZY_K_G | - PINYIN_FUZZY_G_K | - PINYIN_FUZZY_AN_ANG | - PINYIN_FUZZY_ANG_AN | - PINYIN_FUZZY_EN_ENG | - PINYIN_FUZZY_ENG_EN | - PINYIN_FUZZY_IN_ING | - PINYIN_FUZZY_ING_IN; -} // namespace - -void PinyinContext::ReloadConfig() { - const config::PinyinConfig &config = GET_CONFIG(pinyin_config); - - // Resets a context if input method is changed. - if (config.double_pinyin() != double_pinyin_) { - ResetContext(); - } - - uint32 conversion_option = kIncompletePinyinOption; - if (config.correct_pinyin()) { - conversion_option |= kCorrectPinyinOption; - } - if (config.fuzzy_pinyin()) { - conversion_option |= kFuzzyPinyinOption; - } - context_->setProperty(PyZy::InputContext::PROPERTY_CONVERSION_OPTION, - PyZy::Variant::fromUnsignedInt(conversion_option)); - - context_->setProperty( - PyZy::InputContext::PROPERTY_DOUBLE_PINYIN_SCHEMA, - PyZy::Variant::fromUnsignedInt(config.double_pinyin_schema())); - - context_->setProperty( - PyZy::InputContext::PROPERTY_MODE_SIMP, - PyZy::Variant::fromBool(session_config_.simplified_chinese_mode)); -} - -const string &PinyinContext::commit_text() const { - return observer_->commit_text(); -} - -const string &PinyinContext::input_text() const { - return context_->inputText(); -} - -const string &PinyinContext::selected_text() const { - return context_->selectedText(); -} - -const string &PinyinContext::conversion_text() const { - return context_->conversionText(); -} - -const string &PinyinContext::rest_text() const { - return context_->restText(); -} - -const string &PinyinContext::auxiliary_text() const { - return context_->auxiliaryText(); -} - -size_t PinyinContext::cursor() const { - return context_->cursor(); -} - -size_t PinyinContext::focused_candidate_index() const { - return context_->focusedCandidate(); -} - -bool PinyinContext::GetCandidate(size_t index, Candidate *candidate) { - DCHECK(candidate); - - PyZy::Candidate pyzy_candidate; - if (context_->getCandidate(index, pyzy_candidate)) { - candidate->text.assign(pyzy_candidate.text); - return true; - } - return false; -} - -bool PinyinContext::HasCandidate(size_t index) { - return context_->hasCandidate(index); -} - -size_t PinyinContext::PrepareCandidates(size_t required_size) { - DCHECK_NE(0, required_size); - if (context_->hasCandidate(required_size - 1)) { - return required_size; - } - return context_->getPreparedCandidatesSize(); -} - -void PinyinContext::ResetContext() { - double_pinyin_ = GET_CONFIG(pinyin_config).double_pinyin(); - - PyZy::InputContext::InputType type = double_pinyin_ - ? PyZy::InputContext::DOUBLE_PINYIN - : PyZy::InputContext::FULL_PINYIN; - - context_.reset(PyZy::InputContext::create(type, observer_.get())); - Clear(); -} - -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/pinyin_context.h mozc-1.11.1522.102/languages/pinyin/pinyin_context.h --- mozc-1.11.1502.102/languages/pinyin/pinyin_context.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/pinyin_context.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,115 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#ifndef MOZC_LANGUAGES_PINYIN_PINYIN_CONTEXT_H_ -#define MOZC_LANGUAGES_PINYIN_PINYIN_CONTEXT_H_ - -#include "languages/pinyin/pinyin_context_interface.h" - -#include -#include -#include - -#include "base/scoped_ptr.h" - -namespace mozc { -namespace pinyin { -struct SessionConfig; - -// InputContext::Observer has only pure virtual methods and static methods. -// So we can inherit it. -class ContextObserverInterface : public PyZy::InputContext::Observer { - public: - virtual ~ContextObserverInterface() {} - virtual const string &commit_text() const = 0; - virtual void SetCommitText(const string &commit_text) = 0; - virtual void ClearCommitText() = 0; -}; - -class PinyinContext : public PinyinContextInterface { - public: - explicit PinyinContext(const SessionConfig &session_config); - virtual ~PinyinContext(); - - // Wrapper functions of libpyzy. - bool Insert(char ch); - void Commit(); - void CommitPreedit(); - void Clear(); - void ClearCommitText(); - - bool MoveCursorRight(); - bool MoveCursorLeft(); - bool MoveCursorRightByWord(); - bool MoveCursorLeftByWord(); - bool MoveCursorToBeginning(); - bool MoveCursorToEnd(); - - bool SelectCandidate(size_t index); - bool FocusCandidate(size_t index); - bool ClearCandidateFromHistory(size_t index); - - bool RemoveCharBefore(); - bool RemoveCharAfter(); - bool RemoveWordBefore(); - bool RemoveWordAfter(); - - void ReloadConfig(); - - const string &commit_text() const; - const string &input_text() const; - const string &selected_text() const; - const string &conversion_text() const; - const string &rest_text() const; - const string &auxiliary_text() const; - - size_t cursor() const; - size_t focused_candidate_index() const; - bool GetCandidate(size_t index, Candidate *candidate); - bool HasCandidate(size_t index); - size_t PrepareCandidates(size_t required_size); - - private: - friend class PinyinContextTest; - void ResetContext(); - - // Double pinyin mode or not. - bool double_pinyin_; - const SessionConfig &session_config_; - // We should delete context_ before observer_. - scoped_ptr observer_; - scoped_ptr context_; - - DISALLOW_COPY_AND_ASSIGN(PinyinContext); -}; - -} // namespace pinyin -} // namespace mozc - -#endif // MOZC_LANGUAGES_PINYIN_PINYIN_CONTEXT_H_ diff -Nru mozc-1.11.1502.102/languages/pinyin/pinyin_context_interface.h mozc-1.11.1522.102/languages/pinyin/pinyin_context_interface.h --- mozc-1.11.1502.102/languages/pinyin/pinyin_context_interface.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/pinyin_context_interface.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,114 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Manage PinYin conversion engine. - -#ifndef MOZC_LANGUAGES_PINYIN_PINYIN_CONTEXT_INTERFACE_H_ -#define MOZC_LANGUAGES_PINYIN_PINYIN_CONTEXT_INTERFACE_H_ - -#include -#include - -#include "base/base.h" - -namespace mozc { -namespace pinyin { - -// TODO(hsumita): Appends candidate type to |Candidate|. -// Candidate type is used to indicates how the candidate is generated. -struct Candidate { - string text; -}; - -class PinyinContextInterface { - public: - virtual ~PinyinContextInterface() {} - - virtual bool Insert(char ch) = 0; - // Sets |selected_text| + |unselected_text| to |commit_text| and clears - // other context. - // |unselected_text| is pinyin of |conversion text| + |rest_text|. - virtual void Commit() = 0; - // Sets |input_text| to |commit_text| and clears other context. - virtual void CommitPreedit() = 0; - virtual void Clear() = 0; - // Clears only commit text. - virtual void ClearCommitText() = 0; - - virtual bool MoveCursorRight() = 0; - virtual bool MoveCursorLeft() = 0; - virtual bool MoveCursorRightByWord() = 0; - virtual bool MoveCursorLeftByWord() = 0; - virtual bool MoveCursorToBeginning() = 0; - virtual bool MoveCursorToEnd() = 0; - - virtual bool SelectCandidate(size_t index) = 0; - virtual bool FocusCandidate(size_t index) = 0; - // Clears specified conversion history. - // Candidate which is introduced by conversion history is also cleared. - virtual bool ClearCandidateFromHistory(size_t index) = 0; - - virtual bool RemoveCharBefore() = 0; - virtual bool RemoveCharAfter() = 0; - virtual bool RemoveWordBefore() = 0; - virtual bool RemoveWordAfter() = 0; - - // Reloads config of backend with config::PinyinConfig. - // If config::PinyinConfig::double_pinyin is changed, any informations of - // the context are cleared. - virtual void ReloadConfig() = 0; - - // Accessors - // Commit text. - virtual const string &commit_text() const = 0; - // Raw input text. It is not modified without calling Insert(), Remove*(), - // Commit*(), or Clear(). - virtual const string &input_text() const = 0; - // Already selected text using candidate window. - virtual const string &selected_text() const = 0; - // Text which is being converted. - virtual const string &conversion_text() const = 0; - // Unsegmented and unconverted text. - virtual const string &rest_text() const = 0; - // Auxiliary text is shown on candidates window to support user operations. - virtual const string &auxiliary_text() const = 0; - - virtual size_t cursor() const = 0; - virtual size_t focused_candidate_index() const = 0; - // TODO(hsumita): Appends const qualifier to Get/HasCandidate methods. - virtual bool GetCandidate(size_t index, Candidate *candidate) = 0; - virtual bool HasCandidate(size_t index) = 0; - // Takes a required candidates size, and returns a prepared candidates size. - virtual size_t PrepareCandidates(size_t required_size) = 0; -}; - -} // namespace pinyin -} // namespace mozc - -#endif // MOZC_LANGUAGES_PINYIN_PINYIN_CONTEXT_INTERFACE_H_ diff -Nru mozc-1.11.1502.102/languages/pinyin/pinyin_context_mock.cc mozc-1.11.1522.102/languages/pinyin/pinyin_context_mock.cc --- mozc-1.11.1502.102/languages/pinyin/pinyin_context_mock.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/pinyin_context_mock.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,365 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/pinyin/pinyin_context_mock.h" - -#include -#include -#include -#include - -#include "base/logging.h" -#include "base/util.h" -#include "config/config.pb.h" -#include "config/config_handler.h" - -namespace mozc { -namespace pinyin { - -namespace { -// Used to determine word boundaries. -const size_t kWordSize = 3; -const char *kAuxiliaryTextPrefix = "auxiliary_text_"; -} // namespace - -PinyinContextMock::PinyinContextMock() { - double_pinyin_ = GET_CONFIG(pinyin_config).double_pinyin(); - Clear(); -} - -PinyinContextMock::~PinyinContextMock() { -} - -bool PinyinContextMock::Insert(char ch) { - if (!islower(ch)) { - return false; - } - - input_text_ += ch; - ++cursor_; - focused_candidate_index_ = 0; - Update(); - return true; -} - -void PinyinContextMock::Commit() { - string result; - result.append(selected_text_); - result.append(input_text_.substr(Util::CharsLen(selected_text_))); - Clear(); - commit_text_ = result; -} - -void PinyinContextMock::CommitPreedit() { - const string result = input_text_; - Clear(); - commit_text_ = result; -} - -void PinyinContextMock::Clear() { - ClearCommitText(); - - input_text_.clear(); - selected_text_.clear(); - conversion_text_.clear(); - rest_text_.clear(); - auxiliary_text_.clear(); - cursor_ = 0; - focused_candidate_index_ = 0; - candidates_.clear(); -} - -void PinyinContextMock::ClearCommitText() { - commit_text_.clear(); -} - -bool PinyinContextMock::MoveCursorRight() { - const size_t pos = min(input_text_.size(), cursor_ + 1); - return MoveCursorInternal(pos); -} - -bool PinyinContextMock::MoveCursorLeft() { - const size_t pos = (cursor_ == 0) ? 0 : cursor_ - 1; - return MoveCursorInternal(pos); -} - -bool PinyinContextMock::MoveCursorRightByWord() { - return MoveCursorInternal(BoundaryNext()); -} - -bool PinyinContextMock::MoveCursorLeftByWord() { - return MoveCursorInternal(BoundaryPrev()); -} - -bool PinyinContextMock::MoveCursorToBeginning() { - return MoveCursorInternal(0); -} - -bool PinyinContextMock::MoveCursorToEnd() { - return MoveCursorInternal(input_text_.size()); -} - -bool PinyinContextMock::SelectCandidate(size_t index) { - if (index >= candidates_.size()) { - return false; - } - - selected_text_.append(candidates_[index]); - conversion_text_.clear(); - focused_candidate_index_ = 0; - - if (Util::CharsLen(selected_text_) == input_text_.size()) { - Commit(); - } else { - Update(); - } - return true; -} - -bool PinyinContextMock::FocusCandidate(size_t index) { - if (index >= candidates_.size()) { - return false; - } - - if (input_text_.size() == cursor_) { - conversion_text_ = candidates_[index]; - } else { - conversion_text_ = input_text_.substr(Util::CharsLen(selected_text_), - Util::CharsLen(candidates_[index])); - } - rest_text_ = input_text_.substr( - Util::CharsLen(selected_text_) + Util::CharsLen(conversion_text_)); - focused_candidate_index_ = index; - return true; -} - -bool PinyinContextMock::ClearCandidateFromHistory(size_t index) { - if (index >= candidates_.size()) { - return false; - } - - candidates_.erase(candidates_.begin() + index); - focused_candidate_index_ = 0; - UpdateConversion(); - return true; -} - -bool PinyinContextMock::RemoveCharBefore() { - if (cursor_ == 0) { - return false; - } - - input_text_.erase(cursor_ - 1, 1); - --cursor_; - focused_candidate_index_ = 0; - Update(); - return true; -} - -bool PinyinContextMock::RemoveCharAfter() { - if (cursor_ == input_text_.size()) { - return false; - } - - input_text_.erase(cursor_, 1); - rest_text_ = input_text_.substr(cursor_); - return true; -} - -bool PinyinContextMock::RemoveWordBefore() { - if (cursor_ == 0) { - return false; - } - - const size_t boundary = BoundaryPrev(); - input_text_.erase(boundary, cursor_ - boundary); - cursor_ = boundary; - focused_candidate_index_ = 0; - Update(); - return true; -} - -bool PinyinContextMock::RemoveWordAfter() { - if (cursor_ == input_text_.size()) { - return false; - } - - const size_t boundary = BoundaryNext(); - input_text_.erase(cursor_, boundary - cursor_); - rest_text_ = input_text_.substr(cursor_); - return true; -} - -void PinyinContextMock::ReloadConfig() { - bool new_mode = GET_CONFIG(pinyin_config).double_pinyin(); - if (new_mode != double_pinyin_) { - double_pinyin_ = new_mode; - Clear(); - } -} - -const string &PinyinContextMock::commit_text() const { - return commit_text_; -} - -const string &PinyinContextMock::input_text() const { - return input_text_; -} - -const string &PinyinContextMock::selected_text() const { - return selected_text_; -} - -const string &PinyinContextMock::conversion_text() const { - return conversion_text_; -} - -const string &PinyinContextMock::rest_text() const { - return rest_text_; -} - -const string &PinyinContextMock::auxiliary_text() const { - return auxiliary_text_; -} - -size_t PinyinContextMock::cursor() const { - return cursor_; -} - -size_t PinyinContextMock::focused_candidate_index() const { - return focused_candidate_index_; -} - -size_t PinyinContextMock::candidates_size() const { - return candidates_.size(); -} - -bool PinyinContextMock::HasCandidate(size_t index) { - return index < candidates_.size(); -} - -bool PinyinContextMock::GetCandidate(size_t index, Candidate *candidate) { - DCHECK(candidate); - if (!HasCandidate(index)) { - return false; - } - candidate->text.assign(candidates_[index]); - return true; -} - -size_t PinyinContextMock::PrepareCandidates(size_t index) { - return min(index, candidates_.size()); -} - -size_t PinyinContextMock::BoundaryNext() const { - return min(input_text_.size(), - (cursor_ + kWordSize) / kWordSize * kWordSize); -} - -size_t PinyinContextMock::BoundaryPrev() const { - if (cursor_ == 0) { - return 0; - } - return (cursor_ - 1) / kWordSize * kWordSize; -} - -void PinyinContextMock::Update() { - UpdateCandidates(); - UpdateConversion(); -} - -void PinyinContextMock::UpdateCandidates() { - const size_t selected_length = Util::CharsLen(selected_text_); - const string converting_string = input_text_.substr( - selected_length, cursor_ - selected_length); - - string base = converting_string; - Util::UpperString(&base); - - candidates_.clear(); - for (size_t i = 0; i < base.size(); ++i) { - const string &sub_text = base.substr(0, base.size() - i); - string candidate; - Util::HalfWidthAsciiToFullWidthAscii(sub_text, &candidate); - candidates_.push_back(candidate); - } -} - -void PinyinContextMock::UpdateConversion() { - conversion_text_.clear(); - rest_text_.clear(); - auxiliary_text_.clear(); - - const size_t selected_length = Util::CharsLen(selected_text_); - - if (candidates_.empty()) { - rest_text_ = input_text_.substr(selected_length); - return; - } - - if (cursor_ == input_text_.size()) { - conversion_text_ = candidates_[focused_candidate_index_]; - const size_t consumed = - Util::CharsLen(selected_text_) + Util::CharsLen(conversion_text_); - rest_text_ = input_text_.substr(consumed); - } else { - conversion_text_ = input_text_.substr( - selected_length, cursor_ - selected_length); - rest_text_ = input_text_.substr(cursor_); - } - - auxiliary_text_ = kAuxiliaryTextPrefix + input_text_.substr( - selected_length, cursor_ - selected_length); -} - -bool PinyinContextMock::MoveCursorInternal(size_t pos) { - if (pos > input_text_.size()) { - LOG(ERROR) << "Too big cursor index!"; - return false; - } - - if (pos == cursor_) { - return true; - } - - cursor_ = pos; - - selected_text_.clear(); - conversion_text_.clear(); - rest_text_.clear(); - focused_candidate_index_ = 0; - candidates_.clear(); - - Update(); - - return true; -} - -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/pinyin_context_mock.h mozc-1.11.1522.102/languages/pinyin/pinyin_context_mock.h --- mozc-1.11.1502.102/languages/pinyin/pinyin_context_mock.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/pinyin_context_mock.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,134 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#ifndef MOZC_LANGUAGES_PINYIN_PINYIN_CONTEXT_MOCK_H_ -#define MOZC_LANGUAGES_PINYIN_PINYIN_CONTEXT_MOCK_H_ - -#include "languages/pinyin/pinyin_context_interface.h" - -#include -#include - -namespace mozc { -namespace pinyin { - -// This class converts an alphabet-sequence to full-width and upper case. -// Candidates consist of all prefix of input text orderd by length. -// e.g.) input "abc" -> candidates "ABC", "AB", and "A". - -// The differences of actual PinyinContext and the mock is here. -// - Can't convert multi-characters to one character. -// - Assumes that word boundaries exist on (i % 3 == 0). -// e.g.) input "abcdefghijk" -> boundaries "abc def ghi jk". -// This boundary is used in MoveCursor*ByWord(), and RemoveWord*(), -// and isn't used in conversion process. -// - Can't manage "'" on Insert(). It is used to specify boundaries. -// - Content of auxiliary text is "auxiliary_text_" + -// tolower(to_half_width(candidates_[0])). -// - ClearCandidateFromHistory() removes specified candidate even if it is -// not a candidate from a history. - -class PinyinContextMock : public PinyinContextInterface { - public: - PinyinContextMock(); - virtual ~PinyinContextMock(); - - // Insert() returns false if ch is not an alphabet. - virtual bool Insert(char ch); - virtual void Commit(); - virtual void CommitPreedit(); - virtual void Clear(); - virtual void ClearCommitText(); - - virtual bool MoveCursorRight(); - virtual bool MoveCursorLeft(); - virtual bool MoveCursorRightByWord(); - virtual bool MoveCursorLeftByWord(); - virtual bool MoveCursorToBeginning(); - virtual bool MoveCursorToEnd(); - - virtual bool SelectCandidate(size_t index); - virtual bool FocusCandidate(size_t index); - virtual bool ClearCandidateFromHistory(size_t index); - - virtual bool RemoveCharBefore(); - virtual bool RemoveCharAfter(); - virtual bool RemoveWordBefore(); - virtual bool RemoveWordAfter(); - - virtual void ReloadConfig(); - - virtual const string &commit_text() const; - virtual const string &input_text() const; - virtual const string &selected_text() const; - virtual const string &conversion_text() const; - virtual const string &rest_text() const; - virtual const string &auxiliary_text() const; - - virtual size_t cursor() const; - virtual size_t focused_candidate_index() const; - virtual size_t candidates_size() const; - virtual bool HasCandidate(size_t index); - virtual bool GetCandidate(size_t index, Candidate *candidate); - virtual size_t PrepareCandidates(size_t index); - - private: - // Finds a word boundary around cursor_. - virtual size_t BoundaryNext() const; - virtual size_t BoundaryPrev() const; - - virtual void Update(); - // Converts an alphabet-sequence to full-width and upper case - virtual void UpdateCandidates(); - virtual void UpdateConversion(); - virtual bool MoveCursorInternal(size_t pos); - - // This mock expects that - // - input_text_ contains only ASCII characters. - // - commit_text_, selected_text_, conversion_text_, rest_text_, and - // candidates_ contains ASCII characters or UTF-8 characters. - string commit_text_; - string input_text_; - string selected_text_; - string conversion_text_; - string rest_text_; - string auxiliary_text_; - size_t cursor_; - size_t focused_candidate_index_; - vector candidates_; - - bool double_pinyin_; - - DISALLOW_COPY_AND_ASSIGN(PinyinContextMock); -}; - -} // namespace pinyin -} // namespace mozc - -#endif // MOZC_LANGUAGES_PINYIN_PINYIN_CONTEXT_MOCK_H_ diff -Nru mozc-1.11.1502.102/languages/pinyin/pinyin_context_mock_test.cc mozc-1.11.1522.102/languages/pinyin/pinyin_context_mock_test.cc --- mozc-1.11.1502.102/languages/pinyin/pinyin_context_mock_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/pinyin_context_mock_test.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,683 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/pinyin/pinyin_context_mock.h" - -#include -#include - -#include "base/system_util.h" -#include "base/util.h" -#include "languages/pinyin/pinyin_context_interface.h" -#include "config/config.pb.h" -#include "config/config_handler.h" -#include "testing/base/public/googletest.h" -#include "testing/base/public/gunit.h" - -DECLARE_string(test_tmpdir); - -// TODO(hsumita): Check return value of member functions of mock. - -namespace mozc { -namespace pinyin { -namespace { -void InsertCharacterChars(const string &chars, - PinyinContextInterface *context) { - for (size_t i = 0; i < chars.size(); ++i) { - context->Insert(chars[i]); - } -} - -// Wrapper function. It takes only half width ASCII characters. -string ToFullWidthAscii(const string &half_width) { - string full_width; - Util::HalfWidthAsciiToFullWidthAscii(half_width, &full_width); - return full_width; -} -} // namespace - -class PinyinContextMockTest : public testing::Test { - protected: - virtual void SetUp() { - SystemUtil::SetUserProfileDirectory(FLAGS_test_tmpdir); - config::Config config; - config::ConfigHandler::GetDefaultConfig(&config); - config::ConfigHandler::SetConfig(config); - } - - virtual void TearDown() { - config::Config config; - config::ConfigHandler::GetDefaultConfig(&config); - config::ConfigHandler::SetConfig(config); - } - - size_t GetCandidatesSize(PinyinContextInterface *context) { - size_t size = 0; - for (; context->HasCandidate(size); ++size) {} - return size; - } -}; - -TEST_F(PinyinContextMockTest, InsertTest) { - PinyinContextMock context; - - { // Insert "nihao" and commit without select a candidate. - context.Clear(); - - // Does nothing. - context.Insert('A'); - EXPECT_EQ("", context.input_text()); - - InsertCharacterChars("nihao", &context); - EXPECT_EQ("", context.commit_text()); - EXPECT_EQ("nihao", context.input_text()); - EXPECT_EQ("auxiliary_text_nihao", context.auxiliary_text()); - EXPECT_EQ("", context.selected_text()); - EXPECT_EQ(ToFullWidthAscii("NIHAO"), context.conversion_text()); - EXPECT_EQ("", context.rest_text()); - EXPECT_EQ(5, context.cursor()); - EXPECT_EQ(0, context.focused_candidate_index()); - ASSERT_EQ(5, GetCandidatesSize(&context)); - - const string kBaseCandidate = "NIHAO"; - size_t size = kBaseCandidate.size(); - for (size_t i = 0; i < size; ++i) { - Candidate candidate; - EXPECT_TRUE(context.GetCandidate(i, &candidate)); - EXPECT_EQ(ToFullWidthAscii(kBaseCandidate.substr(0, size - i)), - candidate.text); - } - - // Does nothing. - context.Insert('A'); - EXPECT_EQ("nihao", context.input_text()); - } -} - -TEST_F(PinyinContextMockTest, CommitTest) { - PinyinContextMock context; - - { // Insert "nihao" and commit without select a candidate. - context.Clear(); - InsertCharacterChars("nihao", &context); - - context.Commit(); - EXPECT_EQ("nihao", context.commit_text()); - EXPECT_EQ("", context.input_text()); - EXPECT_EQ("", context.auxiliary_text()); - EXPECT_EQ("", context.selected_text()); - EXPECT_EQ("", context.conversion_text()); - EXPECT_EQ("", context.rest_text()); - EXPECT_EQ(0, context.cursor()); - EXPECT_EQ(0, context.focused_candidate_index()); - EXPECT_EQ(0, context.candidates_size()); - } - - { // Insert "nihao" and select "NIHAO" - context.Clear(); - InsertCharacterChars("nihao", &context); - - context.SelectCandidate(0); - EXPECT_EQ(ToFullWidthAscii("NIHAO"), context.commit_text()); - EXPECT_EQ("", context.input_text()); - EXPECT_EQ("", context.auxiliary_text()); - EXPECT_EQ("", context.selected_text()); - EXPECT_EQ("", context.conversion_text()); - EXPECT_EQ("", context.rest_text()); - EXPECT_EQ(0, context.cursor()); - EXPECT_EQ(0, context.focused_candidate_index()); - EXPECT_EQ(0, context.candidates_size()); - } - - { // Insert "nihao", select "NI", focus "HA", commit "NIhao". - context.Clear(); - InsertCharacterChars("nihao", &context); - - context.SelectCandidate(3); - EXPECT_EQ("", context.commit_text()); - EXPECT_EQ("nihao", context.input_text()); - EXPECT_EQ("auxiliary_text_hao", context.auxiliary_text()); - EXPECT_EQ(ToFullWidthAscii("NI"), context.selected_text()); - EXPECT_EQ(ToFullWidthAscii("HAO"), context.conversion_text()); - EXPECT_EQ("", context.rest_text()); - EXPECT_EQ(5, context.cursor()); - EXPECT_EQ(0, context.focused_candidate_index()); - EXPECT_EQ(3, context.candidates_size()); - - context.FocusCandidate(1); - EXPECT_EQ("", context.commit_text()); - EXPECT_EQ("nihao", context.input_text()); - EXPECT_EQ("auxiliary_text_hao", context.auxiliary_text()); - EXPECT_EQ(ToFullWidthAscii("NI"), context.selected_text()); - EXPECT_EQ(ToFullWidthAscii("HA"), context.conversion_text()); - EXPECT_EQ("o", context.rest_text()); - EXPECT_EQ(5, context.cursor()); - EXPECT_EQ(1, context.focused_candidate_index()); - EXPECT_EQ(3, context.candidates_size()); - - context.Commit(); - EXPECT_EQ(ToFullWidthAscii("NI") + "hao", context.commit_text()); - EXPECT_EQ("", context.input_text()); - EXPECT_EQ("", context.auxiliary_text()); - EXPECT_EQ("", context.selected_text()); - EXPECT_EQ("", context.conversion_text()); - EXPECT_EQ("", context.rest_text()); - EXPECT_EQ(0, context.cursor()); - EXPECT_EQ(0, context.focused_candidate_index()); - EXPECT_EQ(0, context.candidates_size()); - } - - { // insert "nihao", select "NI", commit preedit. - context.Clear(); - - InsertCharacterChars("nihao", &context); - - context.SelectCandidate(3); - - context.CommitPreedit(); - EXPECT_EQ("nihao", context.commit_text()); - EXPECT_EQ("", context.input_text()); - EXPECT_EQ("", context.auxiliary_text()); - EXPECT_EQ("", context.selected_text()); - EXPECT_EQ("", context.conversion_text()); - EXPECT_EQ("", context.rest_text()); - EXPECT_EQ(0, context.cursor()); - EXPECT_EQ(0, context.focused_candidate_index()); - EXPECT_EQ(0, context.candidates_size()); - } -} - -TEST_F(PinyinContextMockTest, MultiSegmentConversionTest) { - PinyinContextMock context; - - context.Clear(); - - InsertCharacterChars("abc", &context); - - context.MoveCursorLeft(); - EXPECT_EQ("", context.commit_text()); - EXPECT_EQ("abc", context.input_text()); - EXPECT_EQ("auxiliary_text_ab", context.auxiliary_text()); - EXPECT_EQ("", context.selected_text()); - EXPECT_EQ("ab", context.conversion_text()); - EXPECT_EQ("c", context.rest_text()); - EXPECT_EQ(2, context.cursor()); - EXPECT_EQ(0, context.focused_candidate_index()); - EXPECT_EQ(2, context.candidates_size()); - - context.SelectCandidate(1); - EXPECT_EQ("", context.commit_text()); - EXPECT_EQ("abc", context.input_text()); - EXPECT_EQ("auxiliary_text_b", context.auxiliary_text()); - EXPECT_EQ(ToFullWidthAscii("A"), context.selected_text()); - EXPECT_EQ("b", context.conversion_text()); - EXPECT_EQ("c", context.rest_text()); - EXPECT_EQ(2, context.cursor()); - EXPECT_EQ(0, context.focused_candidate_index()); - EXPECT_EQ(1, context.candidates_size()); - - context.Commit(); - EXPECT_EQ(ToFullWidthAscii("A") + "bc", context.commit_text()); - EXPECT_EQ("", context.input_text()); - EXPECT_EQ("", context.auxiliary_text()); - EXPECT_EQ("", context.selected_text()); - EXPECT_EQ("", context.conversion_text()); - EXPECT_EQ("", context.rest_text()); - EXPECT_EQ(0, context.cursor()); - EXPECT_EQ(0, context.focused_candidate_index()); - EXPECT_EQ(0, context.candidates_size()); -} - -TEST_F(PinyinContextMockTest, FocusTest) { - PinyinContextMock context; - - context.Clear(); - - InsertCharacterChars("nihao", &context); - EXPECT_EQ(ToFullWidthAscii("NIHAO"), context.conversion_text()); - EXPECT_EQ("", context.rest_text()); - EXPECT_EQ(0, context.focused_candidate_index()); - EXPECT_EQ(5, context.candidates_size()); - - context.FocusCandidate(4); - EXPECT_EQ(ToFullWidthAscii("N"), context.conversion_text()); - EXPECT_EQ("ihao", context.rest_text()); - EXPECT_EQ(4, context.focused_candidate_index()); - - context.FocusCandidate(100); - EXPECT_EQ(ToFullWidthAscii("N"), context.conversion_text()); - EXPECT_EQ("ihao", context.rest_text()); - EXPECT_EQ(4, context.focused_candidate_index()); - - context.FocusCandidate(0); - EXPECT_EQ(ToFullWidthAscii("NIHAO"), context.conversion_text()); - EXPECT_EQ("", context.rest_text()); - EXPECT_EQ(0, context.focused_candidate_index()); -} - -TEST_F(PinyinContextMockTest, CursorTest) { - PinyinContextMock context; - InsertCharacterChars("nihao", &context); - - { // Moving test - EXPECT_EQ(ToFullWidthAscii("NIHAO"), context.conversion_text()); - EXPECT_EQ("", context.rest_text()); - EXPECT_EQ(5, context.cursor()); - EXPECT_EQ(5, context.candidates_size()); - - // Nothing occurs. - context.MoveCursorRight(); - EXPECT_EQ(ToFullWidthAscii("NIHAO"), context.conversion_text()); - EXPECT_EQ("", context.rest_text()); - EXPECT_EQ(5, context.cursor()); - EXPECT_EQ(5, context.candidates_size()); - - // Nothing occurs. - context.MoveCursorRightByWord(); - EXPECT_EQ(ToFullWidthAscii("NIHAO"), context.conversion_text()); - EXPECT_EQ("", context.rest_text()); - EXPECT_EQ(5, context.cursor()); - EXPECT_EQ(5, context.candidates_size()); - - context.MoveCursorLeft(); - EXPECT_EQ("niha", context.conversion_text()); - EXPECT_EQ("o", context.rest_text()); - EXPECT_EQ(4, context.cursor()); - EXPECT_EQ(4, context.candidates_size()); - - context.MoveCursorLeftByWord(); - EXPECT_EQ("nih", context.conversion_text()); - EXPECT_EQ("ao", context.rest_text()); - EXPECT_EQ(3, context.cursor()); - EXPECT_EQ(3, context.candidates_size()); - - context.MoveCursorLeftByWord(); - EXPECT_EQ("", context.conversion_text()); - EXPECT_EQ("nihao", context.rest_text()); - EXPECT_EQ(0, context.cursor()); - EXPECT_EQ(0, context.candidates_size()); - - // Nothing occurs - context.MoveCursorLeft(); - EXPECT_EQ("", context.conversion_text()); - EXPECT_EQ("nihao", context.rest_text()); - EXPECT_EQ(0, context.cursor()); - EXPECT_EQ(0, context.candidates_size()); - - // Nothing occurs - context.MoveCursorLeftByWord(); - EXPECT_EQ("", context.conversion_text()); - EXPECT_EQ("nihao", context.rest_text()); - EXPECT_EQ(0, context.cursor()); - EXPECT_EQ(0, context.candidates_size()); - - context.MoveCursorRight(); - EXPECT_EQ("n", context.conversion_text()); - EXPECT_EQ("ihao", context.rest_text()); - EXPECT_EQ(1, context.cursor()); - EXPECT_EQ(1, context.candidates_size()); - - context.MoveCursorRightByWord(); - EXPECT_EQ("nih", context.conversion_text()); - EXPECT_EQ("ao", context.rest_text()); - EXPECT_EQ(3, context.cursor()); - EXPECT_EQ(3, context.candidates_size()); - - context.MoveCursorRightByWord(); - EXPECT_EQ(ToFullWidthAscii("NIHAO"), context.conversion_text()); - EXPECT_EQ("", context.rest_text()); - EXPECT_EQ(5, context.cursor()); - EXPECT_EQ(5, context.candidates_size()); - - context.MoveCursorLeftByWord(); - EXPECT_EQ("nih", context.conversion_text()); - EXPECT_EQ("ao", context.rest_text()); - EXPECT_EQ(3, context.cursor()); - EXPECT_EQ(3, context.candidates_size()); - - context.MoveCursorToBeginning(); - EXPECT_EQ("", context.conversion_text()); - EXPECT_EQ("nihao", context.rest_text()); - EXPECT_EQ(0, context.cursor()); - EXPECT_EQ(0, context.candidates_size()); - - context.MoveCursorRightByWord(); - EXPECT_EQ("nih", context.conversion_text()); - EXPECT_EQ("ao", context.rest_text()); - EXPECT_EQ(3, context.cursor()); - EXPECT_EQ(3, context.candidates_size()); - - context.MoveCursorToEnd(); - EXPECT_EQ(ToFullWidthAscii("NIHAO"), context.conversion_text()); - EXPECT_EQ("", context.rest_text()); - EXPECT_EQ(5, context.cursor()); - EXPECT_EQ(5, context.candidates_size()); - } - - context.Clear(); - InsertCharacterChars("nihao", &context); - - { // Confirms focused_candidate_index when cursor is not moved. - context.FocusCandidate(1); - ASSERT_EQ(1, context.focused_candidate_index()); - - context.MoveCursorRight(); - EXPECT_EQ(1, context.focused_candidate_index()); - - context.MoveCursorLeft(); - EXPECT_EQ(0, context.focused_candidate_index()); - - context.FocusCandidate(2); - ASSERT_EQ(2, context.focused_candidate_index()); - - context.MoveCursorRight(); - EXPECT_EQ(0, context.focused_candidate_index()); - } -} - -TEST_F(PinyinContextMockTest, RemoveTest) { - PinyinContextMock context; - - // Nothing occurs - context.RemoveCharBefore(); - EXPECT_EQ("", context.input_text()); - EXPECT_EQ("", context.auxiliary_text()); - EXPECT_EQ(0, context.cursor()); - EXPECT_EQ(0, context.candidates_size()); - - // Nothing occurs - context.RemoveCharAfter(); - EXPECT_EQ("", context.input_text()); - EXPECT_EQ("", context.auxiliary_text()); - EXPECT_EQ(0, context.cursor()); - EXPECT_EQ(0, context.candidates_size()); - - InsertCharacterChars("nihao", &context); - - context.RemoveCharBefore(); - EXPECT_EQ("niha", context.input_text()); - EXPECT_EQ("auxiliary_text_niha", context.auxiliary_text()); - EXPECT_EQ(4, context.cursor()); - EXPECT_EQ(4, context.candidates_size()); - - context.RemoveWordBefore(); - EXPECT_EQ("nih", context.input_text()); - EXPECT_EQ("auxiliary_text_nih", context.auxiliary_text()); - EXPECT_EQ(3, context.cursor()); - EXPECT_EQ(3, context.candidates_size()); - - context.MoveCursorToBeginning(); - EXPECT_EQ("nih", context.input_text()); - EXPECT_EQ("", context.auxiliary_text()); - EXPECT_EQ(0, context.cursor()); - EXPECT_EQ(0, context.candidates_size()); - - // Nothing occurs - context.RemoveCharBefore(); - EXPECT_EQ("nih", context.input_text()); - EXPECT_EQ("", context.auxiliary_text()); - EXPECT_EQ(0, context.cursor()); - EXPECT_EQ(0, context.candidates_size()); - - // Nothing occurs - context.RemoveWordBefore(); - EXPECT_EQ("nih", context.input_text()); - EXPECT_EQ("", context.auxiliary_text()); - EXPECT_EQ(0, context.cursor()); - EXPECT_EQ(0, context.candidates_size()); - - context.RemoveCharAfter(); - EXPECT_EQ("ih", context.input_text()); - EXPECT_EQ("", context.auxiliary_text()); - EXPECT_EQ(0, context.cursor()); - EXPECT_EQ(0, context.candidates_size()); - - context.RemoveWordAfter(); - EXPECT_EQ("", context.input_text()); - EXPECT_EQ("", context.auxiliary_text()); - EXPECT_EQ(0, context.cursor()); - EXPECT_EQ(0, context.candidates_size()); - - InsertCharacterChars("nihao", &context); - EXPECT_EQ("nihao", context.input_text()); - EXPECT_EQ("auxiliary_text_nihao", context.auxiliary_text()); - EXPECT_EQ(5, context.cursor()); - EXPECT_EQ(5, context.candidates_size()); - - context.RemoveWordBefore(); - EXPECT_EQ("nih", context.input_text()); - EXPECT_EQ("auxiliary_text_nih", context.auxiliary_text()); - EXPECT_EQ(3, context.cursor()); - EXPECT_EQ(3, context.candidates_size()); - - context.RemoveWordBefore(); - EXPECT_EQ("", context.input_text()); - EXPECT_EQ("", context.auxiliary_text()); - EXPECT_EQ(0, context.cursor()); - EXPECT_EQ(0, context.candidates_size()); -} - -TEST_F(PinyinContextMockTest, FocusCandidateIndex) { - PinyinContextMock context; - - { // Insert - context.Clear(); - - InsertCharacterChars("nihao", &context); - ASSERT_EQ(5, context.candidates_size()); - - context.FocusCandidate(1); - ASSERT_EQ(1, context.focused_candidate_index()); - - context.Insert('a'); - EXPECT_EQ(0, context.focused_candidate_index()); - } - - { // FocusCandidate* - context.Clear(); - InsertCharacterChars("nihao", &context); - - ASSERT_EQ(5, context.candidates_size()); - context.FocusCandidate(1); - EXPECT_EQ(1, context.focused_candidate_index()); - context.MoveCursorLeft(); - EXPECT_EQ(4, context.cursor()); - EXPECT_EQ(0, context.focused_candidate_index()); - - ASSERT_EQ(4, context.candidates_size()); - context.FocusCandidate(1); - EXPECT_EQ(1, context.focused_candidate_index()); - context.MoveCursorLeftByWord(); - EXPECT_EQ(3, context.cursor()); - EXPECT_EQ(0, context.focused_candidate_index()); - - ASSERT_EQ(3, context.candidates_size()); - context.FocusCandidate(1); - EXPECT_EQ(1, context.focused_candidate_index()); - context.MoveCursorRight(); - EXPECT_EQ(4, context.cursor()); - EXPECT_EQ(0, context.focused_candidate_index()); - - ASSERT_EQ(4, context.candidates_size()); - context.FocusCandidate(1); - EXPECT_EQ(1, context.focused_candidate_index()); - context.MoveCursorRightByWord(); - EXPECT_EQ(5, context.cursor()); - EXPECT_EQ(0, context.focused_candidate_index()); - } - - { // Remove* - context.Clear(); - InsertCharacterChars("abcdefgh", &context); - - // abcdefgh -> abcdefg - ASSERT_EQ(8, context.candidates_size()); - context.FocusCandidate(1); - EXPECT_EQ(1, context.focused_candidate_index()); - context.RemoveCharBefore(); - EXPECT_EQ("abcdefg", context.input_text()); - EXPECT_EQ("auxiliary_text_abcdefg", context.auxiliary_text()); - EXPECT_EQ(0, context.focused_candidate_index()); - - // abcdefg -> abcdef - ASSERT_EQ(7, context.candidates_size()); - context.FocusCandidate(1); - EXPECT_EQ(1, context.focused_candidate_index()); - context.RemoveWordBefore(); - EXPECT_EQ("abcdef", context.input_text()); - EXPECT_EQ("auxiliary_text_abcdef", context.auxiliary_text()); - EXPECT_EQ(0, context.focused_candidate_index()); - - context.MoveCursorLeft(); - context.MoveCursorLeft(); - - // abcd|ef -> abcd|f - // focus_candidate_index should not be changed. - ASSERT_EQ(4, context.candidates_size()); - context.FocusCandidate(1); - EXPECT_EQ(1, context.focused_candidate_index()); - context.RemoveCharAfter(); - EXPECT_EQ("abcdf", context.input_text()); - EXPECT_EQ("auxiliary_text_abcd", context.auxiliary_text()); - EXPECT_EQ(1, context.focused_candidate_index()); - - // abcd|f -> abcd - // focus_candidate_index should not be changed. - ASSERT_EQ(4, context.candidates_size()); - context.FocusCandidate(1); - EXPECT_EQ(1, context.focused_candidate_index()); - context.RemoveWordAfter(); - EXPECT_EQ("abcd", context.input_text()); - EXPECT_EQ("auxiliary_text_abcd", context.auxiliary_text()); - EXPECT_EQ(1, context.focused_candidate_index()); - } -} - -TEST_F(PinyinContextMockTest, ClearTest) { - PinyinContextMock context; - - { // Prepare condition - InsertCharacterChars("abcd", &context); - context.MoveCursorLeft(); - context.SelectCandidate(2); - context.FocusCandidate(1); - ASSERT_EQ("abcd", context.input_text()); - EXPECT_EQ("auxiliary_text_bc", context.auxiliary_text()); - ASSERT_EQ(ToFullWidthAscii("A"), context.selected_text()); - ASSERT_EQ("b", context.conversion_text()); - ASSERT_EQ("cd", context.rest_text()); - ASSERT_EQ(1, context.focused_candidate_index()); - ASSERT_EQ(3, context.cursor()); - ASSERT_EQ(2, context.candidates_size()); - } - - { - context.ClearCommitText(); - EXPECT_EQ("abcd", context.input_text()); - EXPECT_EQ("auxiliary_text_bc", context.auxiliary_text()); - EXPECT_EQ(ToFullWidthAscii("A"), context.selected_text()); - EXPECT_EQ("b", context.conversion_text()); - EXPECT_EQ("cd", context.rest_text()); - EXPECT_EQ(1, context.focused_candidate_index()); - EXPECT_EQ(3, context.cursor()); - EXPECT_EQ(2, context.candidates_size()); - } - - { - context.Clear(); - ASSERT_EQ("", context.input_text()); - EXPECT_EQ("", context.auxiliary_text()); - ASSERT_EQ("", context.selected_text()); - ASSERT_EQ("", context.conversion_text()); - ASSERT_EQ("", context.rest_text()); - ASSERT_EQ(0, context.focused_candidate_index()); - ASSERT_EQ(0, context.cursor()); - ASSERT_EQ(0, context.candidates_size()); - } - - { // Prepare condition - InsertCharacterChars("abc", &context); - context.CommitPreedit(); - ASSERT_EQ("", context.input_text()); - ASSERT_EQ("abc", context.commit_text()); - } - - { - context.ClearCommitText(); - ASSERT_EQ("", context.input_text()); - ASSERT_EQ("", context.commit_text()); - } -} - -TEST_F(PinyinContextMockTest, ClearCandidateFromHistory) { - PinyinContextMock context; - - InsertCharacterChars("abc", &context); - ASSERT_EQ(3, context.candidates_size()); - - context.FocusCandidate(1); - ASSERT_EQ(1, context.focused_candidate_index()); - - context.ClearCandidateFromHistory(1); - EXPECT_EQ(2, GetCandidatesSize(&context)); - - Candidate candidate; - EXPECT_TRUE(context.GetCandidate(0, &candidate)); - EXPECT_EQ(ToFullWidthAscii("ABC"), candidate.text); - EXPECT_TRUE(context.GetCandidate(1, &candidate)); - EXPECT_EQ(ToFullWidthAscii("A"), candidate.text); - - EXPECT_EQ("abc", context.input_text()); - EXPECT_EQ("", context.selected_text()); - EXPECT_EQ(ToFullWidthAscii("ABC"), context.conversion_text()); - EXPECT_EQ("", context.rest_text()); - EXPECT_EQ(0, context.focused_candidate_index()); -} - -TEST_F(PinyinContextMockTest, ReloadConfig) { - config::Config config; - config::ConfigHandler::GetDefaultConfig(&config); - config.mutable_pinyin_config()->set_double_pinyin(false); - config::ConfigHandler::SetConfig(config); - - PinyinContextMock context; - InsertCharacterChars("abc", &context); - ASSERT_EQ("abc", context.input_text()); - - config.mutable_pinyin_config()->set_double_pinyin(true); - config::ConfigHandler::SetConfig(config); - - // If config::PinyinConfig::double_pinyin is changed, any of information of - // context will be cleared. - context.ReloadConfig(); - EXPECT_EQ("", context.input_text()); -} - -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/pinyin_context_test.cc mozc-1.11.1522.102/languages/pinyin/pinyin_context_test.cc --- mozc-1.11.1502.102/languages/pinyin/pinyin_context_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/pinyin_context_test.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,554 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/pinyin/pinyin_context.h" - -#include -#include -#include - -#include -#include - -#include "base/logging.h" -#include "base/scoped_ptr.h" -#include "base/system_util.h" -#include "config/config.pb.h" -#include "config/config_handler.h" -#include "languages/pinyin/session_config.h" -#include "testing/base/public/googletest.h" -#include "testing/base/public/gunit.h" - -DECLARE_string(test_tmpdir); - -namespace mozc { -namespace pinyin { - -// This test depends on libpyzy and conversion results of libpyzy depends on -// installed dictionary and input history. We expect some conversion (e.g. -// "nihao" should be "你好"), but it is NOT ensured. For the reason, this test -// can be unstalbe. -// -// TODO(hsumita): Create a test dictionary to libpyzy. -// TODO(hsumita): Add incognito mode to libpyzy. -// TODO(hsumita): Add a test case for ClearCandidateFromHistory. - -namespace { -// "你好" -const char *kNihao = "\xE4\xBD\xA0\xE5\xA5\xBD"; -// "你" -const char *kNi = "\xE4\xBD\xA0"; -// "好" -const char *kHao = "\xE5\xA5\xBD"; -} // namespace - -class PinyinContextTest : public testing::Test { - protected: - virtual void SetUp() { - SystemUtil::SetUserProfileDirectory(FLAGS_test_tmpdir); - PyZy::InputContext::init(FLAGS_test_tmpdir, FLAGS_test_tmpdir); - - config::Config config; - config::ConfigHandler::GetDefaultConfig(&config); - config::ConfigHandler::SetConfig(config); - - session_config_.reset(new SessionConfig); - session_config_->full_width_word_mode = false; - session_config_->full_width_punctuation_mode = true; - session_config_->simplified_chinese_mode = true; - - context_.reset(new PinyinContext(*session_config_)); - } - - virtual void TearDown() { - PyZy::InputContext::finalize(); - - config::Config config; - config::ConfigHandler::GetDefaultConfig(&config); - config::ConfigHandler::SetConfig(config); - } - - void InsertCharacterChars(const string &chars) { - for (size_t i = 0; i < chars.size(); ++i) { - EXPECT_TRUE(context_->Insert(chars[i])); - } - } - - bool FindCandidateIndex(const string &expected_candidate, size_t *index) { - Candidate candidate; - for (size_t i = 0; context_->GetCandidate(i, &candidate); ++i) { - if (candidate.text == expected_candidate) { - *index = i; - return true; - } - } - - LOG(ERROR) << "Can't find candidate index"; - return false; - } - - void CheckTextAccessors(const string &commit_text, - const string &input_text, - const string &selected_text, - const string &conversion_text, - const string &rest_text, - const string &auxiliary_text) { - EXPECT_EQ(commit_text, context_->commit_text()); - EXPECT_EQ(input_text, context_->input_text()); - EXPECT_EQ(selected_text, context_->selected_text()); - EXPECT_EQ(conversion_text, context_->conversion_text()); - EXPECT_EQ(rest_text, context_->rest_text()); - EXPECT_EQ(auxiliary_text, context_->auxiliary_text()); - } - - PyZy::InputContext *GetRawContext() const { - return context_->context_.get(); - } - - scoped_ptr session_config_; - scoped_ptr context_; -}; - -TEST_F(PinyinContextTest, InsertAndClear) { - { - SCOPED_TRACE("Initial state"); - CheckTextAccessors("", "", "", "", "", ""); - EXPECT_EQ("", context_->auxiliary_text()); - } - - { - SCOPED_TRACE("Input nihao and check accessors"); - InsertCharacterChars("nihao"); - - CheckTextAccessors("", "nihao", "", kNihao, "", "ni hao|"); - EXPECT_EQ(0, context_->focused_candidate_index()); - EXPECT_EQ(5, context_->cursor()); - EXPECT_TRUE(context_->HasCandidate(0)); - - Candidate candidate; - ASSERT_TRUE(context_->GetCandidate(0, &candidate)); - EXPECT_EQ(kNihao, candidate.text); - } - - { - SCOPED_TRACE("Clear state"); - context_->Clear(); - CheckTextAccessors("", "", "", "", "", ""); - EXPECT_EQ(0, context_->focused_candidate_index()); - EXPECT_EQ(0, context_->cursor()); - EXPECT_FALSE(context_->HasCandidate(0)); - } -} - -TEST_F(PinyinContextTest, SelectAndCommit) { - InsertCharacterChars("nihao"); - - { - SCOPED_TRACE("Commit"); - context_->Commit(); - // commit_text should be "nihao" because we don't call SelectCandidate(). - CheckTextAccessors("nihao", "", "", "", "", ""); - } - - context_->Clear(); - InsertCharacterChars("nihao"); - - { - SCOPED_TRACE("Select first candidate"); - context_->SelectCandidate(0); - CheckTextAccessors(kNihao, "", "", "", "", ""); - } - - context_->Clear(); - InsertCharacterChars("nihao"); - - { - SCOPED_TRACE("Select partially and commit"); - size_t ni_index; - ASSERT_TRUE(FindCandidateIndex(kNi, &ni_index)); - context_->SelectCandidate(ni_index); - CheckTextAccessors("", "nihao", kNi, kHao, "", "hao|"); - - context_->Commit(); - CheckTextAccessors(string(kNi) + "hao", "", "", "", "", ""); - } - - context_->Clear(); - InsertCharacterChars("nihao"); - - { - SCOPED_TRACE("Select partially and commit preedit"); - size_t ni_index; - ASSERT_TRUE(FindCandidateIndex(kNi, &ni_index)); - context_->SelectCandidate(ni_index); - CheckTextAccessors("", "nihao", kNi, kHao, "", "hao|"); - - context_->CommitPreedit(); - CheckTextAccessors("nihao", "", "", "", "", ""); - } -} - -TEST_F(PinyinContextTest, CommitText) { - InsertCharacterChars("nihao"); - context_->CommitPreedit(); - ASSERT_EQ("nihao", context_->commit_text()); - - { - SCOPED_TRACE("Clear commit text by Clear()"); - context_->Clear(); - CheckTextAccessors("", "", "", "", "", ""); - } - - context_->Clear(); - InsertCharacterChars("nihao"); - context_->CommitPreedit(); - ASSERT_EQ("nihao", context_->commit_text()); - - { - SCOPED_TRACE("Don't clear commit text by other functions"); - InsertCharacterChars("nihao"); - CheckTextAccessors("nihao", "nihao", "", kNihao, "", "ni hao|"); - } -} - -TEST_F(PinyinContextTest, ClearTest) { - InsertCharacterChars("nihao"); - ASSERT_EQ("nihao", context_->input_text()); - ASSERT_EQ("", context_->commit_text()); - - { - context_->ClearCommitText(); - EXPECT_EQ("nihao", context_->input_text()); - EXPECT_EQ("", context_->commit_text()); - } - - { - context_->Clear(); - EXPECT_EQ("", context_->input_text()); - EXPECT_EQ("", context_->commit_text()); - } - - InsertCharacterChars("nihao"); - context_->CommitPreedit(); - ASSERT_EQ("", context_->input_text()); - ASSERT_EQ("nihao", context_->commit_text()); - - { - context_->ClearCommitText(); - EXPECT_EQ("", context_->input_text()); - EXPECT_EQ("", context_->commit_text()); - } - - InsertCharacterChars("nihao"); - context_->CommitPreedit(); - ASSERT_EQ("", context_->input_text()); - ASSERT_EQ("nihao", context_->commit_text()); - - { - context_->Clear(); - EXPECT_EQ("", context_->input_text()); - EXPECT_EQ("", context_->commit_text()); - } -} - -TEST_F(PinyinContextTest, FocusCandidate) { - InsertCharacterChars("nihao"); - ASSERT_TRUE(context_->HasCandidate(2)); - ASSERT_EQ(0, context_->focused_candidate_index()); - - { - SCOPED_TRACE("Focus 3rd candidate"); - EXPECT_TRUE(context_->FocusCandidate(2)); - EXPECT_EQ(2, context_->focused_candidate_index()); - } - - { - SCOPED_TRACE("Focus 100th candidate and fail"); - EXPECT_FALSE(context_->FocusCandidate(99)); - EXPECT_EQ(2, context_->focused_candidate_index()); - } -} - -TEST_F(PinyinContextTest, MoveCursor) { - InsertCharacterChars("nihao"); - - { - SCOPED_TRACE("Move cursor left"); - context_->MoveCursorLeft(); - CheckTextAccessors("", "nihao", "", "ni ha|o", "", "ni ha|o"); - EXPECT_EQ(4, context_->cursor()); - } - - { - SCOPED_TRACE("Move cursor left by word"); - context_->MoveCursorLeftByWord(); - CheckTextAccessors("", "nihao", "", "ni|hao", "", "ni|hao"); - EXPECT_EQ(2, context_->cursor()); - } - - { - SCOPED_TRACE("Move cursor to beginning"); - context_->MoveCursorToBeginning(); - CheckTextAccessors("", "nihao", "", "", "nihao", ""); - EXPECT_EQ(0, context_->cursor()); - } - - { - SCOPED_TRACE("Move cursor right"); - context_->MoveCursorRight(); - CheckTextAccessors("", "nihao", "", "n|ihao", "", "n|ihao"); - EXPECT_EQ(1, context_->cursor()); - } - - { - // In current implementation of libpyzy, RemoveWordAfter removes all - // characters after cursor. - SCOPED_TRACE("Move cursor right by word"); - context_->MoveCursorRightByWord(); - CheckTextAccessors("", "nihao", "", kNihao, "", "ni hao|"); - EXPECT_EQ(5, context_->cursor()); - } - - context_->MoveCursorLeftByWord(); - ASSERT_EQ(2, context_->cursor()); - - { - SCOPED_TRACE("Move cursor to end"); - context_->MoveCursorToEnd(); - CheckTextAccessors("", "nihao", "", kNihao, "", "ni hao|"); - EXPECT_EQ(5, context_->cursor()); - } -} - -TEST_F(PinyinContextTest, UnselectCandidates) { - InsertCharacterChars("nihao"); - ASSERT_EQ("nihao", context_->input_text()); - ASSERT_TRUE(context_->selected_text().empty()); - - size_t ni_index; - ASSERT_TRUE(FindCandidateIndex(kNi, &ni_index)); - - context_->SelectCandidate(ni_index); - ASSERT_EQ(kNi, context_->selected_text()); - context_->MoveCursorLeft(); - EXPECT_TRUE(context_->selected_text().empty()); - - context_->SelectCandidate(ni_index); - ASSERT_EQ(kNi, context_->selected_text()); - context_->MoveCursorRight(); - EXPECT_TRUE(context_->selected_text().empty()); - - context_->SelectCandidate(ni_index); - ASSERT_EQ(kNi, context_->selected_text()); - context_->MoveCursorLeftByWord(); - EXPECT_TRUE(context_->selected_text().empty()); - - context_->SelectCandidate(ni_index); - ASSERT_EQ(kNi, context_->selected_text()); - context_->MoveCursorRightByWord(); - EXPECT_TRUE(context_->selected_text().empty()); - - context_->SelectCandidate(ni_index); - ASSERT_EQ(kNi, context_->selected_text()); - context_->MoveCursorToBeginning(); - EXPECT_TRUE(context_->selected_text().empty()); - - context_->SelectCandidate(ni_index); - ASSERT_EQ(kNi, context_->selected_text()); - context_->MoveCursorToEnd(); - EXPECT_TRUE(context_->selected_text().empty()); -} - -TEST_F(PinyinContextTest, RemoveCharacters) { - InsertCharacterChars("haohao"); - ASSERT_EQ("haohao", context_->input_text()); - - { - SCOPED_TRACE("Remove a previous character."); - context_->RemoveCharBefore(); - EXPECT_EQ("haoha", context_->input_text()); - } - - { - SCOPED_TRACE("Remove a previous word."); - context_->RemoveWordBefore(); - EXPECT_EQ("hao", context_->input_text()); - } - - context_->MoveCursorToBeginning(); - ASSERT_EQ(0, context_->cursor()); - - { - SCOPED_TRACE("Remove a next character."); - context_->RemoveCharAfter(); - EXPECT_EQ("ao", context_->input_text()); - } - - { - // In current implementation of libpyzy, RemoveWordAfter removes all - // characters after cursor. - SCOPED_TRACE("Remove a next word."); - context_->RemoveWordAfter(); - EXPECT_EQ("", context_->input_text()); - } -} - -namespace { -const uint32 kIncompletePinyinOption = PINYIN_INCOMPLETE_PINYIN; -const uint32 kCorrectPinyinOption = PINYIN_CORRECT_ALL; -const uint32 kFuzzyPinyinOption = - PINYIN_FUZZY_C_CH | - PINYIN_FUZZY_Z_ZH | - PINYIN_FUZZY_S_SH | - PINYIN_FUZZY_L_N | - PINYIN_FUZZY_F_H | - PINYIN_FUZZY_K_G | - PINYIN_FUZZY_G_K | - PINYIN_FUZZY_AN_ANG | - PINYIN_FUZZY_ANG_AN | - PINYIN_FUZZY_EN_ENG | - PINYIN_FUZZY_ENG_EN | - PINYIN_FUZZY_IN_ING | - PINYIN_FUZZY_ING_IN; -} // namespace - -TEST_F(PinyinContextTest, ReloadConfig) { - config::Config config; - config::ConfigHandler::GetDefaultConfig(&config); - config::PinyinConfig *pinyin_config = config.mutable_pinyin_config(); - - { // full pinyin / double pinyin - pinyin_config->set_double_pinyin(false); - config::ConfigHandler::SetConfig(config); - - context_->ReloadConfig(); - InsertCharacterChars("nihao"); - EXPECT_EQ(kNihao, context_->conversion_text()); - - pinyin_config->set_double_pinyin(true); - config::ConfigHandler::SetConfig(config); - - context_->ReloadConfig(); - EXPECT_EQ("", context_->input_text()); - InsertCharacterChars("nihk"); - EXPECT_EQ(kNihao, context_->conversion_text()); - } - - const PyZy::InputContext *raw_context = GetRawContext(); - - { // conversion option - const bool kInput[][2] = { - {false, false}, {false, true}, {true, false}, {true, true}, - }; - - for (size_t i = 0; i < arraysize(kInput); ++i) { - const bool use_correct = kInput[i][0]; - const bool use_fuzzy = kInput[i][1]; - pinyin_config->set_fuzzy_pinyin(use_fuzzy); - pinyin_config->set_correct_pinyin(use_correct); - config::ConfigHandler::SetConfig(config); - context_->ReloadConfig(); - const uint32 expected = - kIncompletePinyinOption | - (use_correct ? kCorrectPinyinOption : 0) | - (use_fuzzy ? kFuzzyPinyinOption : 0); - const uint32 actual = raw_context->getProperty( - PyZy::InputContext::PROPERTY_CONVERSION_OPTION).getUnsignedInt(); - EXPECT_EQ(expected, actual) << use_fuzzy << "," << use_correct; - } - } - - { // double pinyin schema - ASSERT_EQ(DOUBLE_PINYIN_KEYBOARD_MSPY, - pinyin_config->double_pinyin_schema()); - ASSERT_EQ(DOUBLE_PINYIN_KEYBOARD_MSPY, raw_context->getProperty( - PyZy::InputContext::PROPERTY_DOUBLE_PINYIN_SCHEMA).getUnsignedInt()); - - pinyin_config->set_double_pinyin_schema(DOUBLE_PINYIN_KEYBOARD_ZRM); - config::ConfigHandler::SetConfig(config); - context_->ReloadConfig(); - EXPECT_EQ(DOUBLE_PINYIN_KEYBOARD_ZRM, raw_context->getProperty( - PyZy::InputContext::PROPERTY_DOUBLE_PINYIN_SCHEMA).getUnsignedInt()); - } - - { // simplified chinese - ASSERT_TRUE(session_config_->simplified_chinese_mode); - ASSERT_TRUE(raw_context->getProperty( - PyZy::InputContext::PROPERTY_MODE_SIMP).getBool()); - - session_config_->simplified_chinese_mode = false; - context_->ReloadConfig(); - EXPECT_FALSE(raw_context->getProperty( - PyZy::InputContext::PROPERTY_MODE_SIMP).getBool()); - } -} - -TEST_F(PinyinContextTest, FullWidthCommit) { - session_config_->full_width_word_mode = false; - InsertCharacterChars("nihao"); - context_->Commit(); - EXPECT_EQ("nihao", context_->commit_text()); - - session_config_->full_width_word_mode = true; - InsertCharacterChars("nihao"); - context_->Commit(); - // "nihao" - EXPECT_EQ("\xEF\xBD\x8E\xEF\xBD\x89\xEF\xBD\x88\xEF\xBD\x81\xEF\xBD\x8F", - context_->commit_text()); -} - -TEST_F(PinyinContextTest, InsertNumber_Issue6136903) { - { // Half width word mode - session_config_->full_width_word_mode = false; - - context_->Clear(); - EXPECT_TRUE(context_->Insert('1')); - EXPECT_EQ("1", context_->commit_text()); - - context_->Clear(); - InsertCharacterChars("nihao"); - EXPECT_FALSE(context_->Insert('1')); - } - - { // Full width word mode - session_config_->full_width_word_mode = true; - - context_->Clear(); - EXPECT_TRUE(context_->Insert('1')); - // "1" - EXPECT_EQ("\xEF\xBC\x91", context_->commit_text()); - - context_->Clear(); - InsertCharacterChars("nihao"); - EXPECT_FALSE(context_->Insert('1')); - } -} - -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/pinyin_session_factory.cc mozc-1.11.1522.102/languages/pinyin/pinyin_session_factory.cc --- mozc-1.11.1502.102/languages/pinyin/pinyin_session_factory.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/pinyin_session_factory.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,53 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/pinyin/pinyin_session_factory.h" - -#include "base/singleton.h" -#include "engine/empty_user_data_manager.h" -#include "languages/pinyin/session.h" - -namespace mozc { -namespace pinyin { -PinyinSessionFactory::PinyinSessionFactory() { -} - -PinyinSessionFactory::~PinyinSessionFactory() { -} - -session::SessionInterface *PinyinSessionFactory::NewSession() { - return new mozc::pinyin::Session(); -} - -UserDataManagerInterface *PinyinSessionFactory::GetUserDataManager() { - return Singleton::get(); -} - -} // namesapce pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/pinyin_session_factory.h mozc-1.11.1522.102/languages/pinyin/pinyin_session_factory.h --- mozc-1.11.1502.102/languages/pinyin/pinyin_session_factory.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/pinyin_session_factory.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,52 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#ifndef MOZC_LANGUAGES_PINYIN_PINYIN_SESSION_FACTORY_H_ -#define MOZC_LANGUAGES_PINYIN_PINYIN_SESSION_FACTORY_H_ - -#include "session/session_factory_manager.h" - -namespace mozc { - -namespace session { -class SessionInterface; -} // namespace session. - -namespace pinyin { -class PinyinSessionFactory : public session::SessionFactoryInterface { - public: - PinyinSessionFactory(); - virtual ~PinyinSessionFactory(); - virtual session::SessionInterface *NewSession(); - virtual UserDataManagerInterface *GetUserDataManager(); - private: -}; -} // namespace pinyin -} // namespace mozc -#endif // MOZC_LANGUAGES_PINYIN_PINYIN_SESSION_FACTORY_H_ diff -Nru mozc-1.11.1502.102/languages/pinyin/pinyin_session_factory_test.cc mozc-1.11.1522.102/languages/pinyin/pinyin_session_factory_test.cc --- mozc-1.11.1502.102/languages/pinyin/pinyin_session_factory_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/pinyin_session_factory_test.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,49 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "base/scoped_ptr.h" -#include "languages/pinyin/pinyin_session_factory.h" -#include "session/session_interface.h" -#include "testing/base/public/gunit.h" - -namespace mozc { -namespace pinyin { - -TEST(PinyinSessionFactoryTest, BasicTest) { - PinyinSessionFactory session_factory; - - scoped_ptr session(session_factory.NewSession()); - EXPECT_TRUE(session.get()); - - UserDataManagerInterface *manager = session_factory.GetUserDataManager(); - EXPECT_TRUE(manager); -} - -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/punctuation_context.cc mozc-1.11.1522.102/languages/pinyin/punctuation_context.cc --- mozc-1.11.1502.102/languages/pinyin/punctuation_context.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/punctuation_context.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,412 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/pinyin/punctuation_context.h" - -#include -#include -#include - -#include "base/logging.h" -#include "base/port.h" -#include "base/singleton.h" -#include "base/util.h" -#include "languages/pinyin/punctuation_table.h" -#include "languages/pinyin/session_config.h" - -namespace mozc { -namespace pinyin { -namespace punctuation { - -namespace { -// TODO(hsumita): Handling quote and double quote (open and close), and comma -// (especially comma after number) for direct commit. - -const char kPunctuationSpecialKey = '`'; -} // namespace - -PunctuationContext::PunctuationContext(const SessionConfig &session_config) - : table_(Singleton::get()), - session_config_(session_config) { - ClearAll(); -} - -PunctuationContext::~PunctuationContext() {} - -bool PunctuationContext::Insert(char ch) { - if (!isgraph(ch)) { - return false; - } - - if (input_text_.empty() && ch != kPunctuationSpecialKey) { - return DirectCommit(ch); - } - - if (is_initial_state_ && !input_text_.empty()) { - is_initial_state_ = false; - input_text_.clear(); - selected_text_.clear(); - rest_text_.clear(); - cursor_ = 0; - } - - input_text_ += ch; - ++cursor_; - focused_candidate_index_ = 0; - - UpdateCandidates(); - UpdateAuxiliaryText(); - selected_text_.append(candidates_[focused_candidate_index_]); - - return true; -} - -void PunctuationContext::Commit() { - string result; - result.append(selected_text_); - result.append(rest_text_); - Clear(); - commit_text_.assign(result); -} - -void PunctuationContext::CommitPreedit() { - const string result = input_text_; - Clear(); - commit_text_.assign(result); -} - -void PunctuationContext::Clear() { - ClearCommitText(); - - is_initial_state_ = true; - input_text_.clear(); - selected_text_.clear(); - rest_text_.clear(); - auxiliary_text_.clear(); - cursor_ = 0; - focused_candidate_index_ = 0; - candidates_.clear(); -} - -void PunctuationContext::ClearCommitText() { - commit_text_.clear(); -} - -bool PunctuationContext::MoveCursorRight() { - if (cursor_ == input_text_.size()) { - return true; - } - return MoveCursorInternal(cursor_ + 1); -} - -bool PunctuationContext::MoveCursorLeft() { - if (cursor_ == 0) { - return true; - } - return MoveCursorInternal(cursor_ - 1); -} - -bool PunctuationContext::MoveCursorRightByWord() { - if (cursor_ == input_text_.size()) { - return true; - } - return MoveCursorRight(); -} - -bool PunctuationContext::MoveCursorLeftByWord() { - return MoveCursorLeft(); -} - -bool PunctuationContext::MoveCursorToBeginning() { - if (cursor_ == 0) { - return true; - } - return MoveCursorInternal(0); -} - -bool PunctuationContext::MoveCursorToEnd() { - if (cursor_ == input_text_.size()) { - return true; - } - return MoveCursorInternal(input_text_.size()); -} - -bool PunctuationContext::SelectCandidate(size_t index) { - if (index >= candidates_.size()) { - return false; - } - - if (!FocusCandidate(index)) { - return false; - } - - Commit(); - return true; -} - -bool PunctuationContext::FocusCandidate(size_t index) { - if (index >= candidates_.size()) { - return false; - } - - focused_candidate_index_ = index; - DCHECK(!selected_text_.empty()); - selected_text_.assign( - Util::SubString(selected_text_, 0, Util::CharsLen(selected_text_) - 1)); - selected_text_.append(candidates_[focused_candidate_index_]); - - return true; -} - -bool PunctuationContext::ClearCandidateFromHistory(size_t index) { - // This context doesn't use history. - return true; -} - -bool PunctuationContext::RemoveCharBefore() { - if (cursor_ == 0) { - return false; - } - - input_text_.erase(cursor_ - 1, 1); - --cursor_; - - if (input_text_.empty()) { - Clear(); - return true; - } - - DCHECK(!selected_text_.empty()); - selected_text_ = - Util::SubString(selected_text_, 0, Util::CharsLen(selected_text_) - 1); - - focused_candidate_index_ = 0; - UpdateCandidates(); - UpdateAuxiliaryText(); - return true; -} - -bool PunctuationContext::RemoveCharAfter() { - if (cursor_ == input_text_.size()) { - return false; - } - - input_text_.erase(cursor_, 1); - - if (input_text_.empty()) { - Clear(); - return true; - } - - rest_text_ = Util::SubString(rest_text_, 1, Util::CharsLen(rest_text_) - 1); - - UpdateCandidates(); - UpdateAuxiliaryText(); - return true; -} - -bool PunctuationContext::RemoveWordBefore() { - if (cursor_ == 0) { - return false; - } - return RemoveCharBefore(); -} - -bool PunctuationContext::RemoveWordAfter() { - if (cursor_ == input_text_.size()) { - return false; - } - return RemoveCharAfter(); -} - -void PunctuationContext::ReloadConfig() { - // This context doesn't use config. -} - -const string &PunctuationContext::commit_text() const { - return commit_text_; -} - -const string &PunctuationContext::input_text() const { - return input_text_; -} - -const string &PunctuationContext::selected_text() const { - return selected_text_; -} - -const string &PunctuationContext::conversion_text() const { - // Conversion text is not used on this context. - return empty_text_; -} - -const string &PunctuationContext::rest_text() const { - return rest_text_; -} - -const string &PunctuationContext::auxiliary_text() const { - return auxiliary_text_; -} - -size_t PunctuationContext::cursor() const { - return cursor_; -} - -size_t PunctuationContext::focused_candidate_index() const { - return focused_candidate_index_; -} - -bool PunctuationContext::GetCandidate(size_t index, Candidate *candidate) { - DCHECK(candidate); - if (!HasCandidate(index)) { - return false; - } - - candidate->text.assign(candidates_[index]); - return true; -} - -bool PunctuationContext::HasCandidate(size_t index) { - return index < candidates_.size(); -} - -size_t PunctuationContext::PrepareCandidates(size_t required_size) { - return min(required_size, candidates_.size()); -} - -void PunctuationContext::UpdatePreviousCommitText(const string &text) { - if (!text.empty() && isdigit(text[text.size() - 1])) { - is_next_dot_half_ = true; - } else { - is_next_dot_half_ = false; - } -} - -void PunctuationContext::ClearAll() { - Clear(); - is_next_single_quote_close_ = false; - is_next_double_quote_close_ = false; - is_next_dot_half_ = false; -} - -bool PunctuationContext::DirectCommit(char ch) { - string text(1, ch); - - if (session_config_.full_width_punctuation_mode) { - if (session_config_.simplified_chinese_mode) { - table_->GetDirectCommitTextForSimplifiedChinese(ch, &text); - } else { - table_->GetDirectCommitTextForTraditionalChinese(ch, &text); - } - - // Converts some punctuation by the context. - if (text == "\xE2\x80\x98") { // "‘" - if (is_next_single_quote_close_) { - text.assign("\xE2\x80\x99"); // "’" - } - is_next_single_quote_close_ = !is_next_single_quote_close_; - } else if (text == "\xE2\x80\x9C") { // "“" - if (is_next_double_quote_close_) { - text.assign("\xE2\x80\x9D"); // "”" - } - is_next_double_quote_close_ = !is_next_double_quote_close_; - } else if (text == "\xE3\x80\x82" && is_next_dot_half_) { // "。" - text.assign("."); - is_next_dot_half_ = false; - } - - // We use an original character as a commit text if - // GetDirectCommitTextFor*() is failed. - } else { - // TODO(hsumita): Move this logic to SessionConverter. - if (session_config_.full_width_word_mode) { - string full_text; - Util::HalfWidthAsciiToFullWidthAscii(text, &full_text); - text.assign(full_text); - } else { - string half_text; - Util::FullWidthAsciiToHalfWidthAscii(text, &half_text); - text.assign(half_text); - } - } - - selected_text_.assign(text); - Commit(); - return true; -} - -bool PunctuationContext::MoveCursorInternal(size_t index) { - if (index > input_text_.size()) { - return false; - } - - cursor_ = index; - focused_candidate_index_ = 0; - - const string text(selected_text_ + rest_text_); - Util::SubString(text, 0, cursor_, &selected_text_); - Util::SubString(text, cursor_, Util::CharsLen(text), &rest_text_); - - UpdateCandidates(); - UpdateAuxiliaryText(); - - return true; -} - -void PunctuationContext::UpdateCandidates() { - DCHECK(!input_text_.empty()); - - candidates_.clear(); - - if (cursor_ == 0) { - return; - } - - if (is_initial_state_) { - // Show default candidates. - table_->GetDefaultCandidates(&candidates_); - return; - } - - const char key = input_text_[cursor_ - 1]; - table_->GetCandidates(key, &candidates_); -} - -void PunctuationContext::UpdateAuxiliaryText() { - DCHECK(!input_text_.empty()); - DCHECK_LE(cursor_, input_text_.size()); - - auxiliary_text_.assign(input_text_.substr(0, cursor_)); - auxiliary_text_.append("|"); - auxiliary_text_.append(input_text_.substr(cursor_)); -} - -} // namespace punctuation -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/punctuation_context.h mozc-1.11.1522.102/languages/pinyin/punctuation_context.h --- mozc-1.11.1502.102/languages/pinyin/punctuation_context.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/punctuation_context.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,140 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Punctuation mode context for pinyin IME. - -#ifndef MOZC_LANGUAGES_PINYIN_PUNCTUATION_CONTEXT_H_ -#define MOZC_LANGUAGES_PINYIN_PUNCTUATION_CONTEXT_H_ - -#include -#include - -#include "base/port.h" -#include "languages/pinyin/pinyin_context_interface.h" - -// Suggests punctuation. -// If special key '`' is input first, punctuation mode is turned on, and some -// candidates related to input will be generated. -// If some key input after special key, first key is cleared from the context. -// If other punctuation key is input first, this class directly commits some -// punctuation. - -namespace mozc { -namespace pinyin { -struct SessionConfig; - -namespace punctuation { -class PunctuationTableInterface; - -class PunctuationContext : public PinyinContextInterface { - public: - explicit PunctuationContext(const SessionConfig &session_config); - virtual ~PunctuationContext(); - - virtual bool Insert(char ch); - virtual void Commit(); - virtual void CommitPreedit(); - // Clear states except for direct commit mode related states. - // Please call ClearAll() if you want to all states. - virtual void Clear(); - virtual void ClearCommitText(); - - virtual bool MoveCursorRight(); - virtual bool MoveCursorLeft(); - virtual bool MoveCursorRightByWord(); - virtual bool MoveCursorLeftByWord(); - virtual bool MoveCursorToBeginning(); - virtual bool MoveCursorToEnd(); - - virtual bool SelectCandidate(size_t index); - virtual bool FocusCandidate(size_t index); - virtual bool ClearCandidateFromHistory(size_t index); - - virtual bool RemoveCharBefore(); - virtual bool RemoveCharAfter(); - virtual bool RemoveWordBefore(); - virtual bool RemoveWordAfter(); - - virtual void ReloadConfig(); - - virtual const string &commit_text() const; - virtual const string &input_text() const; - virtual const string &selected_text() const; - virtual const string &conversion_text() const; - virtual const string &rest_text() const; - virtual const string &auxiliary_text() const; - - virtual size_t cursor() const; - virtual size_t focused_candidate_index() const; - virtual bool GetCandidate(size_t index, Candidate *candidate); - virtual bool HasCandidate(size_t index); - virtual size_t PrepareCandidates(size_t required_size); - - // In addition to Clear(), this method clears the data related to direct - // commit mode. This method is virtual for testing. - virtual void ClearAll(); - // Updates the previous commit text to insert characters considering - // commited text on direct commit mode. This method is virtual for testing. - virtual void UpdatePreviousCommitText(const string &text); - - private: - friend class PunctuationContextTest; - - bool DirectCommit(char ch); - void UpdateAuxiliaryText(); - void UpdateCandidates(); - bool MoveCursorInternal(size_t index); - - bool is_initial_state_; - string commit_text_; - // input_text contains only ASCII cahracters. - string input_text_; - string selected_text_; - string rest_text_; - string auxiliary_text_; - size_t cursor_; - size_t focused_candidate_index_; - vector candidates_; - const string empty_text_; - const PunctuationTableInterface *table_; - const SessionConfig &session_config_; - - // Direct mode related context. - bool is_next_single_quote_close_; - bool is_next_double_quote_close_; - bool is_next_dot_half_; - - DISALLOW_COPY_AND_ASSIGN(PunctuationContext); -}; - -} // namespace punctuation -} // namespace pinyin -} // namespace mozc - -#endif // MOZC_LANGUAGES_PINYIN_PUNCTUATION_CONTEXT_H_ diff -Nru mozc-1.11.1502.102/languages/pinyin/punctuation_context_test.cc mozc-1.11.1522.102/languages/pinyin/punctuation_context_test.cc --- mozc-1.11.1502.102/languages/pinyin/punctuation_context_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/punctuation_context_test.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,636 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include -#include - -#include "base/scoped_ptr.h" -#include "base/util.h" -#include "languages/pinyin/punctuation_context.h" -#include "languages/pinyin/punctuation_table.h" -#include "languages/pinyin/session_config.h" -#include "testing/base/public/gmock.h" -#include "testing/base/public/gunit.h" - -using ::testing::DoAll; -using ::testing::Return; -using ::testing::SetArgPointee; -using ::testing::_; - -namespace mozc { -namespace pinyin { -namespace punctuation { - -namespace { -class MockTable : public PunctuationTableInterface { - public: - MockTable() { - } - virtual ~MockTable() {} - - MOCK_CONST_METHOD2(GetCandidates, - bool(char key, vector *candidates)); - MOCK_CONST_METHOD1(GetDefaultCandidates, - void(vector *candidates)); - MOCK_CONST_METHOD2(GetDirectCommitTextForSimplifiedChinese, - bool(char key, string *commit_text)); - MOCK_CONST_METHOD2(GetDirectCommitTextForTraditionalChinese, - bool(char key, string *commit_text)); -}; - -string Repeat(const string &str, int n) { - string result; - for (int i = 0; i < n; ++i) { - result.append(str); - } - return result; -} -} // namespace - -class PunctuationContextTest : public testing::Test { - protected: - virtual void SetUp() { - session_config_.reset(new SessionConfig); - session_config_->full_width_word_mode = false; - session_config_->full_width_punctuation_mode = true; - session_config_->simplified_chinese_mode = true; - - context_.reset(new PunctuationContext(*session_config_)); - table_.reset(new MockTable); - context_->table_ = table_.get(); - - dummy_candidates_.clear(); - dummy_candidates_.push_back("\xEF\xBC\x81"); // "·" - dummy_candidates_.push_back("\xEF\xBC\x8C"); // "," - dummy_candidates_.push_back("\xE3\x80\x82"); // "。" - - dummy_commit_text_.assign("__dummy_commit_text__"); - - // Default behaviors of table_ - EXPECT_CALL(*table_, GetCandidates(_, _)) - .WillRepeatedly(DoAll(SetArgPointee<1>(dummy_candidates_), - Return(true))); - EXPECT_CALL(*table_, GetDefaultCandidates(_)) - .WillRepeatedly(SetArgPointee<0>(dummy_candidates_)); - EXPECT_CALL(*table_, GetDirectCommitTextForSimplifiedChinese('!', _)) - .WillRepeatedly(DoAll(SetArgPointee<1>(dummy_commit_text_), - Return(true))); - EXPECT_CALL(*table_, GetDirectCommitTextForTraditionalChinese('!', _)) - .WillRepeatedly(DoAll(SetArgPointee<1>(dummy_commit_text_), - Return(true))); - } - - virtual void TearDown() { - } - - void InsertCharacterChars(const string &chars) { - for (size_t i = 0; i < chars.size(); ++i) { - context_->Insert(chars[i]); - } - } - - void CheckComposition(const string &input_text, - const string &selected_text, - const string &rest_text) { - EXPECT_EQ(input_text, context_->input_text()); - EXPECT_EQ(selected_text, context_->selected_text()); - EXPECT_EQ("", context_->conversion_text()); - EXPECT_EQ(rest_text, context_->rest_text()); - EXPECT_EQ(Util::CharsLen(selected_text), context_->cursor()); - } - - void CheckCandidates(const vector &candidates, - size_t focused_candidate_index, - const string &auxiliary_text) { - vector actual_candidates; - const size_t candidates_size = GetCandidatesSize(); - ASSERT_EQ(candidates.size(), candidates_size); - for (size_t i = 0; i < candidates_size; ++i) { - Candidate candidate; - ASSERT_TRUE(context_->GetCandidate(i, &candidate)); - EXPECT_EQ(candidates[i], candidate.text); - } - - EXPECT_EQ(auxiliary_text, context_->auxiliary_text()); - EXPECT_EQ(focused_candidate_index, context_->focused_candidate_index()); - } - - void CheckResult(const string &commit_text) { - EXPECT_EQ(commit_text, context_->commit_text()); - } - - size_t GetCandidatesSize() { - size_t size = 0; - for (; context_->HasCandidate(size); ++size) {} - return size; - } - - scoped_ptr table_; - scoped_ptr session_config_; - scoped_ptr context_; - vector dummy_candidates_; - string dummy_commit_text_; - const vector empty_candidates_; -}; - -TEST_F(PunctuationContextTest, Insert) { - { - SCOPED_TRACE("Directly commit text (Success)"); - context_->Clear(); - EXPECT_TRUE(context_->Insert('!')); - CheckComposition("", "", ""); - CheckCandidates(empty_candidates_, 0, ""); - CheckResult(dummy_commit_text_); - } - - { - SCOPED_TRACE("Directly commit text (Failed)"); - context_->Clear(); - EXPECT_CALL(*table_, GetDirectCommitTextForSimplifiedChinese('!', _)) - .WillOnce(Return(false)); - - EXPECT_TRUE(context_->Insert('!')); - CheckComposition("", "", ""); - CheckCandidates(empty_candidates_, 0, ""); - CheckResult("!"); - } - - { // English Mode - context_->Clear(); - - { - SCOPED_TRACE("Turn on English mode"); - EXPECT_TRUE(context_->Insert('`')); - CheckComposition("`", dummy_candidates_[0], ""); - CheckCandidates(dummy_candidates_, 0, "`|"); - CheckResult(""); - } - - { - SCOPED_TRACE("Insert a character on English mode"); - EXPECT_TRUE(context_->Insert('!')); - CheckComposition("!", dummy_candidates_[0], ""); - CheckCandidates(dummy_candidates_, 0, "!|"); - CheckResult(""); - } - - { - SCOPED_TRACE("Insert a additional character on English mode"); - EXPECT_TRUE(context_->Insert('!')); - CheckComposition("!!", Repeat(dummy_candidates_[0], 2), ""); - CheckCandidates(dummy_candidates_, 0, "!!|"); - CheckResult(""); - } - } - - context_->Clear(); - - { - SCOPED_TRACE("Insert a invalid character"); - EXPECT_FALSE(context_->Insert(' ')); - CheckComposition("", "", ""); - CheckCandidates(empty_candidates_, 0, ""); - CheckResult(""); - } -} - -TEST_F(PunctuationContextTest, Commit) { - InsertCharacterChars("`!!"); - context_->MoveCursorLeft(); - context_->FocusCandidate(1); - - { - SCOPED_TRACE("Commit"); - context_->Commit(); - CheckComposition("", "", ""); - CheckCandidates(empty_candidates_, 0, ""); - CheckResult(dummy_candidates_[1] + dummy_candidates_[0]); - } - - InsertCharacterChars("`!!"); - context_->MoveCursorLeft(); - context_->FocusCandidate(1); - - { - SCOPED_TRACE("CommitPreedit"); - context_->CommitPreedit(); - CheckComposition("", "", ""); - CheckCandidates(empty_candidates_, 0, ""); - CheckResult("!!"); - } -} - -TEST_F(PunctuationContextTest, MoveCursor) { - const string &candidate = dummy_candidates_[0]; - - InsertCharacterChars("`"); - - { - SCOPED_TRACE("Moves cursor left [`]"); - context_->MoveCursorLeft(); - CheckComposition("`", "", candidate); - CheckCandidates(empty_candidates_, 0, "|`"); - CheckResult(""); - } - - { - SCOPED_TRACE("Moves cursor left and does not make sense [`]"); - context_->MoveCursorLeft(); - CheckComposition("`", "", candidate); - CheckCandidates(empty_candidates_, 0, "|`"); - CheckResult(""); - } - - { - SCOPED_TRACE("Moves cursor right [`]"); - context_->MoveCursorRight(); - CheckComposition("`", candidate, ""); - CheckCandidates(dummy_candidates_, 0, "`|"); - CheckResult(""); - } - - { - SCOPED_TRACE("Moves cursor right and does not make sense [`]"); - context_->MoveCursorRight(); - CheckComposition("`", candidate, ""); - CheckCandidates(dummy_candidates_, 0, "`|"); - CheckResult(""); - } - - context_->Clear(); - InsertCharacterChars("`!!!!"); - - { - SCOPED_TRACE("Moves cursor left [!!!!]"); - context_->MoveCursorLeft(); - CheckComposition("!!!!", Repeat(candidate, 3), Repeat(candidate, 1)); - CheckCandidates(dummy_candidates_, 0, "!!!|!"); - CheckResult(""); - } - - { - SCOPED_TRACE("Moves cursor left by word [!!!!]"); - context_->MoveCursorLeftByWord(); - CheckComposition("!!!!", Repeat(candidate, 2), Repeat(candidate, 2)); - CheckCandidates(dummy_candidates_, 0, "!!|!!"); - CheckResult(""); - } - - { - SCOPED_TRACE("Moves cursor to beginning [!!!!]"); - context_->MoveCursorToBeginning(); - CheckComposition("!!!!", Repeat(candidate, 0), Repeat(candidate, 4)); - CheckCandidates(empty_candidates_, 0, "|!!!!"); - CheckResult(""); - } - - { - SCOPED_TRACE("Moves cursor left and does not make sense [!!!!]"); - context_->MoveCursorLeftByWord(); - CheckComposition("!!!!", Repeat(candidate, 0), Repeat(candidate, 4)); - CheckCandidates(empty_candidates_, 0, "|!!!!"); - CheckResult(""); - } - - - { - SCOPED_TRACE("Moves cursor right [!!!!]"); - context_->MoveCursorRight(); - CheckComposition("!!!!", Repeat(candidate, 1), Repeat(candidate, 3)); - CheckCandidates(dummy_candidates_, 0, "!|!!!"); - CheckResult(""); - } - - { - SCOPED_TRACE("Moves cursor right by word [!!!!]"); - context_->MoveCursorRightByWord(); - CheckComposition("!!!!", Repeat(candidate, 2), Repeat(candidate, 2)); - CheckCandidates(dummy_candidates_, 0, "!!|!!"); - CheckResult(""); - } - - { - SCOPED_TRACE("Moves cursor to end [!!!!]"); - context_->MoveCursorToEnd(); - CheckComposition("!!!!", Repeat(candidate, 4), Repeat(candidate, 0)); - CheckCandidates(dummy_candidates_, 0, "!!!!|"); - CheckResult(""); - } - - { - SCOPED_TRACE("Moves cursor right and does not make sense [!!!!]"); - context_->MoveCursorRightByWord(); - CheckComposition("!!!!", Repeat(candidate, 4), Repeat(candidate, 0)); - CheckCandidates(dummy_candidates_, 0, "!!!!|"); - CheckResult(""); - } -} - -TEST_F(PunctuationContextTest, FocusCandidateIndex) { - { - SCOPED_TRACE("There are no candidates"); - context_->FocusCandidate(10); - CheckComposition("", "", ""); - CheckCandidates(empty_candidates_, 0, ""); - CheckResult(""); - } - - InsertCharacterChars("`"); - - { - SCOPED_TRACE("Default candidates"); - context_->FocusCandidate(1); - CheckComposition("`", dummy_candidates_[1], ""); - CheckCandidates(dummy_candidates_, 1, "`|"); - CheckResult(""); - } - - context_->Clear(); - InsertCharacterChars("`!"); - - { - SCOPED_TRACE("Normal candidates for [!]"); - context_->FocusCandidate(2); - CheckComposition("!", dummy_candidates_[2], ""); - CheckCandidates(dummy_candidates_, 2, "!|"); - CheckResult(""); - } - - context_->Clear(); - InsertCharacterChars("`!!"); - - { - SCOPED_TRACE("Normal candidates for [!!]"); - context_->FocusCandidate(1); - CheckComposition("!!", dummy_candidates_[0] + dummy_candidates_[1], ""); - CheckCandidates(dummy_candidates_, 1, "!!|"); - CheckResult(""); - } - - context_->Clear(); - InsertCharacterChars("`!!"); - context_->FocusCandidate(1); - context_->MoveCursorLeft(); - - { - SCOPED_TRACE("Normal candidates for [!|!]"); - context_->FocusCandidate(2); - CheckComposition("!!", dummy_candidates_[2], dummy_candidates_[1]); - CheckCandidates(dummy_candidates_, 2, "!|!"); - CheckResult(""); - } -} - -TEST_F(PunctuationContextTest, SelectCandidate) { - InsertCharacterChars("`"); - - { - SCOPED_TRACE("Select a non-exist candidate [`]"); - EXPECT_FALSE(context_->SelectCandidate(100)); - CheckComposition("`", dummy_candidates_[0], ""); - CheckCandidates(dummy_candidates_, 0, "`|"); - CheckResult(""); - } - - { - SCOPED_TRACE("Select a 2nd default candidate and commit [`]"); - EXPECT_TRUE(context_->SelectCandidate(1)); - CheckComposition("", "", ""); - CheckCandidates(empty_candidates_, 0, ""); - CheckResult(dummy_candidates_[1]); - } - - InsertCharacterChars("`!"); - - { - SCOPED_TRACE("Select a 1st candidate and commit [!]"); - EXPECT_TRUE(context_->SelectCandidate(2)); - CheckComposition("", "", ""); - CheckCandidates(empty_candidates_, 0, ""); - CheckResult(dummy_candidates_[2]); - } - - InsertCharacterChars("`!!"); - - { - SCOPED_TRACE("Select a 3rd candidate and commit [!!]"); - EXPECT_TRUE(context_->SelectCandidate(2)); - CheckComposition("", "", ""); - CheckCandidates(empty_candidates_, 0, ""); - CheckResult(dummy_candidates_[0] + dummy_candidates_[2]); - } - - InsertCharacterChars("`!!"); - context_->MoveCursorLeft(); - - { - SCOPED_TRACE("Select a 2nd candidate and commit [!|!]"); - EXPECT_TRUE(context_->SelectCandidate(1)); - CheckComposition("", "", ""); - CheckCandidates(empty_candidates_, 0, ""); - CheckResult(dummy_candidates_[1] + dummy_candidates_[0]); - } -} - -TEST_F(PunctuationContextTest, Remove) { - const string candidate = dummy_candidates_[0]; - - { - SCOPED_TRACE("Removes a character from empty context"); - EXPECT_FALSE(context_->RemoveCharBefore()); - EXPECT_FALSE(context_->RemoveCharAfter()); - EXPECT_FALSE(context_->RemoveWordBefore()); - EXPECT_FALSE(context_->RemoveWordAfter()); - } - - context_->Clear(); - InsertCharacterChars("`"); - - { - SCOPED_TRACE("Removes a next character and doesn't make sense [`]"); - EXPECT_FALSE(context_->RemoveCharAfter()); - CheckComposition("`", candidate, ""); - CheckCandidates(dummy_candidates_, 0, "`|"); - CheckResult(""); - } - - { - SCOPED_TRACE("Removes a previous character [`]"); - EXPECT_TRUE(context_->RemoveCharBefore()); - CheckComposition("", "", ""); - CheckCandidates(empty_candidates_, 0, ""); - CheckResult(""); - } - - context_->Clear(); - InsertCharacterChars("`0123456789"); - for (size_t i = 0; i < 5; ++i) { - context_->MoveCursorLeft(); - } - - { - SCOPED_TRACE("Removes a previous character [01234|56789]"); - EXPECT_TRUE(context_->RemoveCharBefore()); - CheckComposition("012356789", Repeat(candidate, 4), Repeat(candidate, 5)); - CheckCandidates(dummy_candidates_, 0, "0123|56789"); - CheckResult(""); - } - - { - SCOPED_TRACE("Removes a next character [0123|56789]"); - EXPECT_TRUE(context_->RemoveCharAfter()); - CheckComposition("01236789", Repeat(candidate, 4), Repeat(candidate, 4)); - CheckCandidates(dummy_candidates_, 0, "0123|6789"); - CheckResult(""); - } - - { - SCOPED_TRACE("Removes a previous word [0123|6789]"); - EXPECT_TRUE(context_->RemoveWordBefore()); - CheckComposition("0126789", Repeat(candidate, 3), Repeat(candidate, 4)); - CheckCandidates(dummy_candidates_, 0, "012|6789"); - CheckResult(""); - } - - { - SCOPED_TRACE("Removes a next word [012|6789]"); - EXPECT_TRUE(context_->RemoveWordAfter()); - CheckComposition("012789", Repeat(candidate, 3), Repeat(candidate, 3)); - CheckCandidates(dummy_candidates_, 0, "012|789"); - CheckResult(""); - } -} - -TEST_F(PunctuationContextTest, Config) { - session_config_->full_width_word_mode = false; - session_config_->full_width_punctuation_mode = true; - session_config_->simplified_chinese_mode = true; - - { // Full width punctuation with simplified chinese - context_->Clear(); - EXPECT_CALL(*table_, GetDirectCommitTextForSimplifiedChinese('!', _)) - .Times(1).WillOnce(Return(true)); - EXPECT_TRUE(context_->Insert('!')); - } - - { // Full width punctuation with traditional chinese - session_config_->simplified_chinese_mode = false; - context_->Clear(); - EXPECT_CALL(*table_, GetDirectCommitTextForTraditionalChinese('!', _)) - .Times(1).WillOnce(Return(true)); - EXPECT_TRUE(context_->Insert('!')); - } - - { // Half width punctuation - session_config_->full_width_punctuation_mode = false; - context_->Clear(); - EXPECT_CALL(*table_, GetDirectCommitTextForSimplifiedChinese('!', _)) - .Times(0); - EXPECT_CALL(*table_, GetDirectCommitTextForTraditionalChinese('!', _)) - .Times(0); - EXPECT_TRUE(context_->Insert('!')); - } - - { - SCOPED_TRACE("Full width word"); - session_config_->full_width_word_mode = false; - context_->Clear(); - EXPECT_TRUE(context_->Insert('!')); - CheckResult("!"); - } - - { - SCOPED_TRACE("Half width word"); - session_config_->full_width_word_mode = true; - context_->Clear(); - EXPECT_TRUE(context_->Insert('!')); - CheckResult("\xEF\xBC\x81"); // "!" - } -} - -TEST_F(PunctuationContextTest, ToggleQuotes) { - const char *kOpenSingleQuote = "\xE2\x80\x98"; // "‘" - const char *kCloseSingleQuote = "\xE2\x80\x99"; // "’" - const char *kOpenDoubleQuote = "\xE2\x80\x9C"; // "“" - const char *kCloseDoubleQuote = "\xE2\x80\x9D"; // "”" - - EXPECT_CALL(*table_, GetDirectCommitTextForSimplifiedChinese('\'', _)) - .WillRepeatedly(DoAll(SetArgPointee<1>(kOpenSingleQuote), Return(true))); - EXPECT_CALL(*table_, GetDirectCommitTextForSimplifiedChinese('"', _)) - .WillRepeatedly(DoAll(SetArgPointee<1>(kOpenDoubleQuote), Return(true))); - EXPECT_CALL(*table_, GetDirectCommitTextForSimplifiedChinese('a', _)) - .WillRepeatedly(DoAll(SetArgPointee<1>("a"), Return(true))); - - context_->Insert('\''); - EXPECT_EQ(kOpenSingleQuote, context_->commit_text()); - context_->Insert('\''); - EXPECT_EQ(kCloseSingleQuote, context_->commit_text()); - context_->Insert('\''); - EXPECT_EQ(kOpenSingleQuote, context_->commit_text()); - - context_->Insert('"'); - EXPECT_EQ(kOpenDoubleQuote, context_->commit_text()); - context_->Insert('"'); - EXPECT_EQ(kCloseDoubleQuote, context_->commit_text()); - context_->Insert('"'); - EXPECT_EQ(kOpenDoubleQuote, context_->commit_text()); - - context_->ClearAll(); - // Opening quotes should be commited. - context_->Insert('\''); - EXPECT_EQ(kOpenSingleQuote, context_->commit_text()); - context_->Insert('"'); - EXPECT_EQ(kOpenDoubleQuote, context_->commit_text()); - - context_->Insert('a'); - ASSERT_EQ("a", context_->commit_text()); - // Closing quotes should be commited. - context_->Insert('\''); - EXPECT_EQ(kCloseSingleQuote, context_->commit_text()); - context_->Insert('"'); - EXPECT_EQ(kCloseDoubleQuote, context_->commit_text()); -} - -TEST_F(PunctuationContextTest, PeriodAfterDigit) { - const char *kDot = "\xE3\x80\x82"; // "。" - - EXPECT_CALL(*table_, GetDirectCommitTextForSimplifiedChinese('.', _)) - .WillRepeatedly(DoAll(SetArgPointee<1>(kDot), Return(true))); - - context_->Insert('.'); - EXPECT_EQ(kDot, context_->commit_text()); - - context_->UpdatePreviousCommitText("0"); - context_->Insert('.'); - EXPECT_EQ(".", context_->commit_text()); -} - -} // namespace punctuation -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/punctuation_table.cc mozc-1.11.1522.102/languages/pinyin/punctuation_table.cc --- mozc-1.11.1502.102/languages/pinyin/punctuation_table.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/punctuation_table.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,494 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/pinyin/punctuation_table.h" - -#include "base/logging.h" -#include "base/singleton.h" - -namespace mozc { -namespace pinyin { -namespace punctuation { - -namespace { - -const char *kPunctuationDefaultCandidatesTable[] = { - // "·", ",", "。", "「", "」", "、", ":", ";", "?", "!", - "\xC2\xB7", "\xEF\xBC\x8C", "\xE3\x80\x82", "\xE3\x80\x8C", - "\xE3\x80\x8D", "\xE3\x80\x81", "\xEF\xBC\x9A", "\xEF\xBC\x9B", - "\xEF\xBC\x9F", "\xEF\xBC\x81", -}; - -const struct PunctuationCandidatesTable { - const char key; - // We should allocate enough memory to contain candidates or we will get - // compile errors. - const char *candidates[11]; -} kPunctuationCandidatesTable[] = { - { // "!", "﹗", "‼", "⁉" - '!', - { "\xEF\xBC\x81", "\xEF\xB9\x97", "\xE2\x80\xBC", "\xE2\x81\x89", NULL }, - }, { // "“", "”", """ - '"', - { "\xE2\x80\x9C", "\xE2\x80\x9D", "\xEF\xBC\x82", NULL }, - }, { // "#", "﹟", "♯" - '#', - { "\xEF\xBC\x83", "\xEF\xB9\x9F", "\xE2\x99\xAF", NULL }, - }, { // "$", "€", "﹩", "¢", "£", "¥" - '$', - { "\xEF\xBC\x84", "\xE2\x82\xAC", "\xEF\xB9\xA9", "\xEF\xBF\xA0", - "\xEF\xBF\xA1", "\xEF\xBF\xA5", NULL }, - }, { // "%", "﹪", "‰", "‱", "㏙", "㏗" - '%', - { "\xEF\xBC\x85", "\xEF\xB9\xAA", "\xE2\x80\xB0", "\xE2\x80\xB1", - "\xE3\x8F\x99", "\xE3\x8F\x97", NULL }, - }, { // "&", "﹠" - '&', - { "\xEF\xBC\x86", "\xEF\xB9\xA0", NULL }, - }, { // "、", "‘", "’" - '\'', - { "\xE3\x80\x81", "\xE2\x80\x98", "\xE2\x80\x99", NULL }, - }, { // "(", "︵", "﹙" - '(', - { "\xEF\xBC\x88", "\xEF\xB8\xB5", "\xEF\xB9\x99", NULL }, - }, { // ")", "︶", "﹚" - ')', - { "\xEF\xBC\x89", "\xEF\xB8\xB6", "\xEF\xB9\x9A", NULL }, - }, { // "*", "×", "※", "╳", "﹡", "⁎", "⁑", "⁂", "⌘" - '*', - { "\xEF\xBC\x8A", "\xC3\x97", "\xE2\x80\xBB", "\xE2\x95\xB3", - "\xEF\xB9\xA1", "\xE2\x81\x8E", "\xE2\x81\x91", "\xE2\x81\x82", - "\xE2\x8C\x98", NULL }, - }, { // "+", "±", "﹢" - '+', - { "\xEF\xBC\x8B", "\xC2\xB1", "\xEF\xB9\xA2", NULL }, - }, { // ",", "、", "﹐", "﹑" - ',', - { "\xEF\xBC\x8C", "\xE3\x80\x81", "\xEF\xB9\x90", "\xEF\xB9\x91", NULL }, - }, { // "…", "—", "-", "¯", "﹉", " ̄", "﹊", "ˍ", "–", "‥" - '-', - { "\xE2\x80\xA6", "\xE2\x80\x94", "\xEF\xBC\x8D", "\xC2\xAF", - "\xEF\xB9\x89", "\xEF\xBF\xA3", "\xEF\xB9\x8A", "\xCB\x8D", - "\xE2\x80\x93", "\xE2\x80\xA5", NULL }, - }, { // "。", "·", "‧", "﹒", "." - '.', - { "\xE3\x80\x82", "\xC2\xB7", "\xE2\x80\xA7", "\xEF\xB9\x92", - "\xEF\xBC\x8E", NULL }, - }, { // "/", "÷", "↗", "↙", "∕" - '/', - { "\xEF\xBC\x8F", "\xC3\xB7", "\xE2\x86\x97", "\xE2\x86\x99", - "\xE2\x88\x95", NULL }, - }, { // "0", "0" - '0', - { "\xEF\xBC\x90", "0", NULL }, - }, { // "1", "1" - '1', - { "\xEF\xBC\x91", "1", NULL }, - }, { // "2", "2" - '2', - { "\xEF\xBC\x92", "2", NULL }, - }, { // "3", "3" - '3', - { "\xEF\xBC\x93", "3", NULL }, - }, { // "4", "4" - '4', - { "\xEF\xBC\x94", "4", NULL }, - }, { // "5", "5" - '5', - { "\xEF\xBC\x95", "5", NULL }, - }, { // "6", "6" - '6', - { "\xEF\xBC\x96", "6", NULL }, - }, { // "7", "7" - '7', - { "\xEF\xBC\x97", "7", NULL }, - }, { // "8", "8" - '8', - { "\xEF\xBC\x98", "8", NULL }, - }, { // "9", "9" - '9', - { "\xEF\xBC\x99", "9", NULL }, - }, { // ":", "︰", "﹕" - ':', - { "\xEF\xBC\x9A", "\xEF\xB8\xB0", "\xEF\xB9\x95", NULL }, - }, { // ";", "﹔" - ';', - { "\xEF\xBC\x9B", "\xEF\xB9\x94", NULL }, - }, { // "<", "〈", "《", "︽", "︿", "﹤" - '<', - { "\xEF\xBC\x9C", "\xE3\x80\x88", "\xE3\x80\x8A", "\xEF\xB8\xBD", - "\xEF\xB8\xBF", "\xEF\xB9\xA4", NULL }, - }, { // "=", "≒", "≠", "≡", "≦", "≧", "﹦" - '=', - { "\xEF\xBC\x9D", "\xE2\x89\x92", "\xE2\x89\xA0", "\xE2\x89\xA1", - "\xE2\x89\xA6", "\xE2\x89\xA7", "\xEF\xB9\xA6", NULL }, - }, { // ">", "〉", "》", "︾", "﹀", "﹥" - '>', - { "\xEF\xBC\x9E", "\xE3\x80\x89", "\xE3\x80\x8B", "\xEF\xB8\xBE", - "\xEF\xB9\x80", "\xEF\xB9\xA5", NULL }, - }, { // "?", "﹖", "⁇", "⁈" - '?', - { "\xEF\xBC\x9F", "\xEF\xB9\x96", "\xE2\x81\x87", "\xE2\x81\x88", NULL }, - }, { // "@", "⊕", "⊙", "㊣", "﹫", "◉", "◎" - '@', - { "\xEF\xBC\xA0", "\xE2\x8A\x95", "\xE2\x8A\x99", "\xE3\x8A\xA3", - "\xEF\xB9\xAB", "\xE2\x97\x89", "\xE2\x97\x8E", NULL }, - }, { // "A", "A" - 'A', - { "\xEF\xBC\xA1", "A", NULL }, - }, { // "B", "B" - 'B', - { "\xEF\xBC\xA2", "B", NULL }, - }, { // "C", "C" - 'C', - { "\xEF\xBC\xA3", "C", NULL }, - }, { // "D", "D" - 'D', - { "\xEF\xBC\xA4", "D", NULL }, - }, { // "E", "E" - 'E', - { "\xEF\xBC\xA5", "E", NULL }, - }, { // "F", "F" - 'F', - { "\xEF\xBC\xA6", "F", NULL }, - }, { // "G", "G" - 'G', - { "\xEF\xBC\xA7", "G", NULL }, - }, { // "H", "H" - 'H', - { "\xEF\xBC\xA8", "H", NULL }, - }, { // "I", "I" - 'I', - { "\xEF\xBC\xA9", "I", NULL }, - }, { // "J", "J" - 'J', - { "\xEF\xBC\xAA", "J", NULL }, - }, { // "K", "K" - 'K', - { "\xEF\xBC\xAB", "K", NULL }, - }, { // "L", "L" - 'L', - { "\xEF\xBC\xAC", "L", NULL }, - }, { // "M", "M" - 'M', - { "\xEF\xBC\xAD", "M", NULL }, - }, { // "N", "N" - 'N', - { "\xEF\xBC\xAE", "N", NULL }, - }, { // "O", "O" - 'O', - { "\xEF\xBC\xAF", "O", NULL }, - }, { // "P", "P" - 'P', - { "\xEF\xBC\xB0", "P", NULL }, - }, { // "Q", "Q" - 'Q', - { "\xEF\xBC\xB1", "Q", NULL }, - }, { // "R", "R" - 'R', - { "\xEF\xBC\xB2", "R", NULL }, - }, { // "S", "S" - 'S', - { "\xEF\xBC\xB3", "S", NULL }, - }, { // "T", "T" - 'T', - { "\xEF\xBC\xB4", "T", NULL }, - }, { // "U", "U" - 'U', - { "\xEF\xBC\xB5", "U", NULL }, - }, { // "V", "V" - 'V', - { "\xEF\xBC\xB6", "V", NULL }, - }, { // "W", "W" - 'W', - { "\xEF\xBC\xB7", "W", NULL }, - }, { // "X", "X" - 'X', - { "\xEF\xBC\xB8", "X", NULL }, - }, { // "Y", "Y" - 'Y', - { "\xEF\xBC\xB9", "Y", NULL }, - }, { // "Z", "Z" - 'Z', - { "\xEF\xBC\xBA", "Z", NULL }, - }, { // "「", "[", "『", "【", "「", "︻", "﹁", "﹃" - '[', - { "\xE3\x80\x8C", "\xEF\xBC\xBB", "\xE3\x80\x8E", "\xE3\x80\x90", - "\xEF\xBD\xA2", "\xEF\xB8\xBB", "\xEF\xB9\x81", "\xEF\xB9\x83", NULL }, - }, { // "\", "↖", "↘", "﹨" - '\\', - { "\xEF\xBC\xBC", "\xE2\x86\x96", "\xE2\x86\x98", "\xEF\xB9\xA8", NULL }, - }, { // "」", "]", "』", "】", "」", "︼", "﹂", "﹄" - ']', - { "\xE3\x80\x8D", "\xEF\xBC\xBD", "\xE3\x80\x8F", "\xE3\x80\x91", - "\xEF\xBD\xA3", "\xEF\xB8\xBC", "\xEF\xB9\x82", "\xEF\xB9\x84", NULL }, - }, { // "︿", "〈", "《", "︽", "﹤", "<" - '^', - { "\xEF\xB8\xBF", "\xE3\x80\x88", "\xE3\x80\x8A", "\xEF\xB8\xBD", - "\xEF\xB9\xA4", "\xEF\xBC\x9C", NULL }, - }, { // "_", "╴", "←", "→" - '_', - { "\xEF\xBC\xBF", "\xE2\x95\xB4", "\xE2\x86\x90", "\xE2\x86\x92", NULL }, - }, { // "‵", "′" - '`', - { "\xE2\x80\xB5", "\xE2\x80\xB2", NULL }, - }, { // "a", "a" - 'a', - { "\xEF\xBD\x81", "a", NULL }, - }, { // "b", "b" - 'b', - { "\xEF\xBD\x82", "b", NULL }, - }, { // "c", "c" - 'c', - { "\xEF\xBD\x83", "c", NULL }, - }, { // "d", "d" - 'd', - { "\xEF\xBD\x84", "d", NULL }, - }, { // "e", "e" - 'e', - { "\xEF\xBD\x85", "e", NULL }, - }, { // "f", "f" - 'f', - { "\xEF\xBD\x86", "f", NULL }, - }, { // "g", "g" - 'g', - { "\xEF\xBD\x87", "g", NULL }, - }, { // "h", "h" - 'h', - { "\xEF\xBD\x88", "h", NULL }, - }, { // "i", "i" - 'i', - { "\xEF\xBD\x89", "i", NULL }, - }, { // "j", "j" - 'j', - { "\xEF\xBD\x8A", "j", NULL }, - }, { // "k", "k" - 'k', - { "\xEF\xBD\x8B", "k", NULL }, - }, { // "l", "l" - 'l', - { "\xEF\xBD\x8C", "l", NULL }, - }, { // "m", "m" - 'm', - { "\xEF\xBD\x8D", "m", NULL }, - }, { // "n", "n" - 'n', - { "\xEF\xBD\x8E", "n", NULL }, - }, { // "o", "o" - 'o', - { "\xEF\xBD\x8F", "o", NULL }, - }, { // "p", "p" - 'p', - { "\xEF\xBD\x90", "p", NULL }, - }, { // "q", "q" - 'q', - { "\xEF\xBD\x91", "q", NULL }, - }, { // "r", "r" - 'r', - { "\xEF\xBD\x92", "r", NULL }, - }, { // "s", "s" - 's', - { "\xEF\xBD\x93", "s", NULL }, - }, { // "t", "t" - 't', - { "\xEF\xBD\x94", "t", NULL }, - }, { // "u", "u" - 'u', - { "\xEF\xBD\x95", "u", NULL }, - }, { // "v", "v" - 'v', - { "\xEF\xBD\x96", "v", NULL }, - }, { // "w", "w" - 'w', - { "\xEF\xBD\x97", "w", NULL }, - }, { // "x", "x" - 'x', - { "\xEF\xBD\x98", "x", NULL }, - }, { // "y", "y" - 'y', - { "\xEF\xBD\x99", "y", NULL }, - }, { // "z", "z" - 'z', - { "\xEF\xBD\x9A", "z", NULL }, - }, { // "{", "︷", "﹛", "〔", "﹝", "︹" - '{', - { "\xEF\xBD\x9B", "\xEF\xB8\xB7", "\xEF\xB9\x9B", "\xE3\x80\x94", - "\xEF\xB9\x9D", "\xEF\xB8\xB9", NULL }, - }, { // "|", "↑", "↓", "∣", "∥", "︱", "︳", "︴", "¦" - '|', - { "\xEF\xBD\x9C", "\xE2\x86\x91", "\xE2\x86\x93", "\xE2\x88\xA3", - "\xE2\x88\xA5", "\xEF\xB8\xB1", "\xEF\xB8\xB3", "\xEF\xB8\xB4", - "\xEF\xBF\xA4", NULL }, - }, { // "}", "︸", "﹜", "〕", "﹞", "︺" - '}', - { "\xEF\xBD\x9D", "\xEF\xB8\xB8", "\xEF\xB9\x9C", "\xE3\x80\x95", - "\xEF\xB9\x9E", "\xEF\xB8\xBA", NULL }, - }, { // "~", "﹋", "﹌" - '~', - { "\xEF\xBD\x9E", "\xEF\xB9\x8B", "\xEF\xB9\x8C", NULL }, - }, -}; - -const struct PunctuationDirectCommitTable { - public: - const char key; - const char *value; -} kPunctuationDirectCommitTable[] = { - { '!', "\xEF\xBC\x81" }, // "!" - // TODO(hsumita): Consider a previously committed character. - // We should alternately insert "“" and "”". - { '"', "\xE2\x80\x9C" }, // "“" - { '$', "\xEF\xBF\xA5" }, // "¥" - // TODO(hsumita): Consider a previously committed character. - // We should alternately insert "‘" and "’". - { '\'', "\xE2\x80\x98" }, // "‘" - { '(', "\xEF\xBC\x88" }, // "(" - { ')', "\xEF\xBC\x89" }, // ")" - { ',', "\xEF\xBC\x8C" }, // "," - // TODO(hsumita): Consider a previously committed character. - // We should insert "." after a number character. - { '.', "\xE3\x80\x82" }, // "。" - { ':', "\xEF\xBC\x9A" }, // ":" - { ';', "\xEF\xBC\x9B" }, // ";" - { '<', "\xE3\x80\x8A" }, // "《" - { '>', "\xE3\x80\x8B" }, // "》" - { '?', "\xEF\xBC\x9F" }, // "?" - { '[', "\xE3\x80\x90" }, // "【" - { '\\', "\xE3\x80\x81" }, // "、" - { ']', "\xE3\x80\x91" }, // "】" - { '^', "\xE2\x80\xA6\xE2\x80\xA6" }, // "……" - { '_', "\xE2\x80\x94\xE2\x80\x94" }, // "——" - { '{', "\xE3\x80\x8E" }, // "『" - { '}', "\xE3\x80\x8F" }, // "』" - { '~', "\xEF\xBD\x9E" }, // "~" -}; - -} // namespace - -PunctuationTable::PunctuationTable() { - const size_t default_candidates_table_size = - ARRAYSIZE_UNSAFE(kPunctuationDefaultCandidatesTable); - const char **table = kPunctuationDefaultCandidatesTable; - default_candidates_.assign(table, table + default_candidates_table_size); - - const size_t candidates_table_size = - ARRAYSIZE_UNSAFE(kPunctuationCandidatesTable); - - for (size_t i = 0; i < candidates_table_size; ++i) { - const PunctuationCandidatesTable &data = kPunctuationCandidatesTable[i]; - - size_t candidates_num = 0; - for (size_t j = 0; j < ARRAYSIZE_UNSAFE(data.candidates); ++j) { - if (data.candidates[j] == NULL) { - candidates_num = j; - break; - } - } - DCHECK_NE(0, candidates_num); - - const vector table(data.candidates, - data.candidates + candidates_num); - conversion_map_.insert(make_pair(data.key, table)); - } - - const size_t commit_table_size = - ARRAYSIZE_UNSAFE(kPunctuationDirectCommitTable); - for (size_t i = 0; i < commit_table_size; ++i) { - const PunctuationDirectCommitTable &item = - kPunctuationDirectCommitTable[i]; - commit_map_.insert(make_pair(item.key, item.value)); - } -} - -PunctuationTable::~PunctuationTable() { -} - -bool PunctuationTable::GetCandidates(char key, - vector *candidates) const { - DCHECK(candidates); - - const map >::const_iterator iter = - conversion_map_.find(key); - if (iter == conversion_map_.end()) { - DLOG(ERROR) << "Can't find candidates for " << key; - return false; - } - candidates->assign(iter->second.begin(), iter->second.end()); - - return true; -} - -void PunctuationTable::GetDefaultCandidates(vector *candidates) const { - DCHECK(candidates); - candidates->assign(default_candidates_.begin(), default_candidates_.end()); -} - -bool PunctuationTable::GetDirectCommitTextForSimplifiedChinese( - char key, string *commit_text) const { - DCHECK(commit_text); - return GetDirectCommitTextInternal(key, commit_text); -} - -bool PunctuationTable::GetDirectCommitTextForTraditionalChinese( - char key, string *commit_text) const { - DCHECK(commit_text); - - switch (key) { - case '<': - commit_text->assign("\xEF\xBC\x8C"); // "," - return true; - case '>': - commit_text->assign("\xE3\x80\x82"); // "。" - return true; - case '[': - commit_text->assign("\xE3\x80\x8C"); // "「" - return true; - case ']': - commit_text->assign("\xE3\x80\x8D"); // "」" - return true; - default: - return GetDirectCommitTextInternal(key, commit_text); - } -}; - -bool PunctuationTable::GetDirectCommitTextInternal( - char key, string *commit_text) const { - DCHECK(commit_text); - - map::const_iterator iter = commit_map_.find(key); - if (iter == commit_map_.end()) { - return false; - } - commit_text->assign(iter->second); - return true; -} - -} // namespace punctuation -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/punctuation_table.h mozc-1.11.1522.102/languages/pinyin/punctuation_table.h --- mozc-1.11.1502.102/languages/pinyin/punctuation_table.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/punctuation_table.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,87 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#ifndef MOZC_LANGUAGES_PINYIN_PUNCTUATION_TABLE_H_ -#define MOZC_LANGUAGES_PINYIN_PUNCTUATION_TABLE_H_ - -#include -#include -#include - -#include "base/port.h" -#include "base/singleton.h" - -namespace mozc { -namespace pinyin { -namespace punctuation { - -class PunctuationTableInterface { - public: - virtual ~PunctuationTableInterface() {} - - // All candidates contain only one character. - virtual bool GetCandidates(char key, vector *candidates) const = 0; - virtual void GetDefaultCandidates(vector *candidates) const = 0; - - // Commit text may contain multi characters. - virtual bool GetDirectCommitTextForSimplifiedChinese( - char key, string *commit_text) const = 0; - // Commit text may contain multi characters. - virtual bool GetDirectCommitTextForTraditionalChinese( - char key, string *commit_text) const = 0; -}; - -class PunctuationTable : public PunctuationTableInterface { - public: - virtual ~PunctuationTable(); - bool GetCandidates(char key, vector *candidates) const; - void GetDefaultCandidates(vector *candidates) const; - - bool GetDirectCommitTextForSimplifiedChinese( - char key, string *commit_text) const; - bool GetDirectCommitTextForTraditionalChinese( - char key, string *commit_text) const; - - private: - friend class Singleton; - - bool GetDirectCommitTextInternal(char key, string *commit_text) const; - - vector default_candidates_; - map > conversion_map_; - map commit_map_; - - DISALLOW_IMPLICIT_CONSTRUCTORS(PunctuationTable); -}; - -} // namespace punctuation -} // namespace pinyin -} // namespace mozc - -#endif // MOZC_LANGUAGES_PINYIN_PUNCTUATION_TABLE_H_ diff -Nru mozc-1.11.1502.102/languages/pinyin/punctuation_table_test.cc mozc-1.11.1522.102/languages/pinyin/punctuation_table_test.cc --- mozc-1.11.1502.102/languages/pinyin/punctuation_table_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/punctuation_table_test.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,172 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include -#include -#include - -#include "languages/pinyin/punctuation_table.h" -#include "testing/base/public/gunit.h" - -namespace mozc { -namespace pinyin { -namespace punctuation { - -TEST(PunctuationTableTest, GetCandidates) { - const PunctuationTable *table = Singleton::get(); - - for (char ch = 0; ch < 128; ++ch) { - vector candidates; - if (isgraph(ch)) { - EXPECT_TRUE(table->GetCandidates(ch, &candidates)); - EXPECT_LT(0, candidates.size()); - } else { - EXPECT_FALSE(table->GetCandidates(ch, &candidates)); - EXPECT_TRUE(candidates.empty()); - } - } - - // It is too hard to test all cases, so I test only some queries. - - vector candidates; - table->GetCandidates('!', &candidates); - EXPECT_EQ(4, candidates.size()); - EXPECT_EQ(candidates[0], "\xEF\xBC\x81"); // "!" - EXPECT_EQ(candidates[1], "\xEF\xB9\x97"); // "﹗" - EXPECT_EQ(candidates[2], "\xE2\x80\xBC"); // "‼" - EXPECT_EQ(candidates[3], "\xE2\x81\x89"); // "⁉" - - candidates.clear(); - table->GetCandidates('0', &candidates); - EXPECT_EQ(2, candidates.size()); - EXPECT_EQ(candidates[0], "\xEF\xBC\x90"); // "0" - EXPECT_EQ(candidates[1], "0"); - - candidates.clear(); - table->GetCandidates('P', &candidates); - EXPECT_EQ(2, candidates.size()); - EXPECT_EQ(candidates[0], "\xEF\xBC\xB0"); // "P" - EXPECT_EQ(candidates[1], "P"); - - candidates.clear(); - table->GetCandidates('a', &candidates); - EXPECT_EQ(2, candidates.size()); - EXPECT_EQ(candidates[0], "\xEF\xBD\x81"); // "a" - EXPECT_EQ(candidates[1], "a"); - - candidates.clear(); - table->GetCandidates('~', &candidates); - EXPECT_EQ(3, candidates.size()); - EXPECT_EQ(candidates[0], "\xEF\xBD\x9E"); // "~" - EXPECT_EQ(candidates[1], "\xEF\xB9\x8B"); // "﹋" - EXPECT_EQ(candidates[2], "\xEF\xB9\x8C"); // "﹌" -} - -TEST(PunctuationTableTest, GetDefaultCandidates) { - PunctuationTable *table = Singleton::get(); - vector candidates; - - table->GetDefaultCandidates(&candidates); - EXPECT_EQ(10, candidates.size()); - EXPECT_EQ("\xC2\xB7" , candidates[0]); // "·" - EXPECT_EQ("\xEF\xBC\x8C", candidates[1]); // "," - EXPECT_EQ("\xE3\x80\x82", candidates[2]); // "。" - EXPECT_EQ("\xE3\x80\x8C", candidates[3]); // "「" - EXPECT_EQ("\xE3\x80\x8D", candidates[4]); // "」" - EXPECT_EQ("\xE3\x80\x81", candidates[5]); // "、" - EXPECT_EQ("\xEF\xBC\x9A", candidates[6]); // ":" - EXPECT_EQ("\xEF\xBC\x9B", candidates[7]); // ";" - EXPECT_EQ("\xEF\xBC\x9F", candidates[8]); // "?" - EXPECT_EQ("\xEF\xBC\x81", candidates[9]); // "!" -} - -TEST(PunctuationTableTest, GetDirectCommitText) { - PunctuationTable *table = Singleton::get(); - string commit_text; - - // It is too hard to test all cases, so I test only some queries. - - { // Simplified chinese test - commit_text.clear(); - EXPECT_TRUE(table->GetDirectCommitTextForSimplifiedChinese( - '!', &commit_text)); - EXPECT_EQ("\xEF\xBC\x81", commit_text); // "!" - - commit_text.clear(); - EXPECT_TRUE(table->GetDirectCommitTextForSimplifiedChinese( - '^', &commit_text)); - EXPECT_EQ("\xE2\x80\xA6\xE2\x80\xA6", commit_text); // "……" - - commit_text.clear(); - EXPECT_TRUE(table->GetDirectCommitTextForSimplifiedChinese( - '~', &commit_text)); - EXPECT_EQ("\xEF\xBD\x9E", commit_text); // "~" - } - - { // Traditional chinese test - commit_text.clear(); - EXPECT_TRUE(table->GetDirectCommitTextForTraditionalChinese( - '!', &commit_text)); - EXPECT_EQ("\xEF\xBC\x81", commit_text); // "!" - - commit_text.clear(); - EXPECT_TRUE(table->GetDirectCommitTextForTraditionalChinese( - '<', &commit_text)); - EXPECT_EQ("\xEF\xBC\x8C", commit_text); // "," - - commit_text.clear(); - EXPECT_TRUE(table->GetDirectCommitTextForTraditionalChinese( - '[', &commit_text)); - EXPECT_EQ("\xE3\x80\x8C", commit_text); // "「" - } - - { // Failure test - const char *kDummyText = "dummy_text"; - commit_text.assign(kDummyText); - EXPECT_FALSE(table->GetDirectCommitTextForSimplifiedChinese( - 'a', &commit_text)); - EXPECT_EQ(kDummyText, commit_text); - - EXPECT_FALSE(table->GetDirectCommitTextForSimplifiedChinese( - 'A', &commit_text)); - EXPECT_EQ(kDummyText, commit_text); - - EXPECT_FALSE(table->GetDirectCommitTextForSimplifiedChinese( - '0', &commit_text)); - EXPECT_EQ(kDummyText, commit_text); - - EXPECT_FALSE(table->GetDirectCommitTextForSimplifiedChinese( - '#', &commit_text)); - EXPECT_EQ(kDummyText, commit_text); - } -} - -} // namespace punctuation -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/server_main.cc mozc-1.11.1522.102/languages/pinyin/server_main.cc --- mozc-1.11.1502.102/languages/pinyin/server_main.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/server_main.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,48 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include - -#include "languages/pinyin/pinyin_session_factory.h" -#include "server/mozc_server.h" -#include "session/session_factory_manager.h" - -int main(int argc, char* argv[]) { - mozc::server::InitGoogleAndMozcServer(argv[0], &argc, &argv, false); - - PyZy::InputContext::init(); - mozc::pinyin::PinyinSessionFactory session_factory; - mozc::session::SessionFactoryManager::SetSessionFactory(&session_factory); - - const int return_value = mozc::server::MozcServer::Run(); - - PyZy::InputContext::finalize(); - mozc::server::MozcServer::Finalize(); - return return_value; -} diff -Nru mozc-1.11.1502.102/languages/pinyin/session.cc mozc-1.11.1522.102/languages/pinyin/session.cc --- mozc-1.11.1502.102/languages/pinyin/session.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/session.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,537 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/pinyin/session.h" - -#include -#include -#include - -#include "base/base.h" -#include "base/logging.h" -#include "base/util.h" -#include "config/config.pb.h" -#include "config/config_handler.h" -#include "languages/pinyin/keymap.h" -#include "languages/pinyin/pinyin_constant.h" -#include "languages/pinyin/session_config.h" -#include "languages/pinyin/session_converter.h" -#include "session/commands.pb.h" -#include "session/key_event_util.h" - -// To implement AutoCommit correctly, we implemented following steps. -// -// 1. Calls SelectFocusedCandidate() and fills protocol buffers with current -// context. -// 2. Discards a current context and creates a Punctuation context. -// 3. Inserts a character, which triggered AutoCommit. -// 4. Calls SelectFocusedCandidate() and appends new commit text. -// 5. Discards a Punctuation context and a create context. -// -// But it is not a natural implementation because filling a protocol buffer is a -// task of SessionConverter. I think following implementation is better. -// -// 1. Stores all contexts on SessionConverter. These contexts are not discarded -// until Session instance is destructed. -// 2. Implements a new method for AutoCommit on SessionConverter. -// 3. Session simply calls a new method. SessionConverter commits a current -// context, inserts a character which triggered AutoCommit to Punctuation -// context, commits Punctuation context, and merges a commit text of these -// contexts. -// -// TODO(hsumita): Refactors Session and SessionConverter to store all contexts -// on SessionConverter. (But it might be hard to rewrite -// session_converter_test.cc) - -namespace mozc { -namespace pinyin { - -namespace { -// TODO(hsumita): Unify config updating mechanism like session_handler. -uint64 g_last_config_updated = 0; - -size_t GetIndexFromKeyEvent(const commands::KeyEvent &key_event) { - // This class only accepts number keys. - DCHECK(key_event.has_key_code()); - const uint32 key_code = key_event.key_code(); - DCHECK(isdigit(key_code)); - return (key_code == '0') ? 9 : key_code - '1'; -} -} // namespace - -Session::Session() - : session_config_(new SessionConfig), - converter_(new SessionConverter(*session_config_)), - conversion_mode_(NONE), - next_conversion_mode_(NONE), - is_already_commited_(false), - create_session_time_(Util::GetTime()), - last_command_time_(0), - last_config_updated_(0) { - // Initializes session_config_ - const config::PinyinConfig &config = GET_CONFIG(pinyin_config); - session_config_->full_width_word_mode = config.initial_mode_full_width_word(); - session_config_->full_width_punctuation_mode = - config.initial_mode_full_width_punctuation(); - session_config_->simplified_chinese_mode = - config.initial_mode_simplified_chinese(); - - const ConversionMode conversion_mode = - GET_CONFIG(pinyin_config).initial_mode_chinese() ? PINYIN : DIRECT; - SwitchConversionMode(conversion_mode); - ResetConfig(); -} - -Session::~Session() { -} - -bool Session::SendKey(commands::Command *command) { - DCHECK(command); - - last_command_time_ = Util::GetTime(); - is_already_commited_ = false; - - const bool consumed = ProcessKeyEvent(command); - command->mutable_output()->set_consumed(consumed); - - if (!is_already_commited_) { - converter_->PopOutput(command->mutable_output()); - } - - SwitchConversionMode(next_conversion_mode_); - - DLOG(INFO) << command->DebugString(); - - return true; -} - -bool Session::TestSendKey(commands::Command *command) { - DCHECK(command); - - // TODO(hsumita): implement this. - last_command_time_ = Util::GetTime(); - if (g_last_config_updated > last_config_updated_) { - ResetConfig(); - } - - return true; -} - -bool Session::SendCommand(commands::Command *command) { - DCHECK(command); - - last_command_time_ = Util::GetTime(); - is_already_commited_ = false; - if (g_last_config_updated > last_config_updated_) { - ResetConfig(); - } - - bool consumed = ProcessCommand(command); - command->mutable_output()->set_consumed(consumed); - - if (!is_already_commited_) { - converter_->PopOutput(command->mutable_output()); - } - - SwitchConversionMode(next_conversion_mode_); - - DLOG(INFO) << command->DebugString(); - - return true; -} - -void Session::ReloadConfig() { - last_command_time_ = Util::GetTime(); - ResetConfig(); -} - -#ifdef OS_CHROMEOS -void Session::UpdateConfig(const config::PinyinConfig &config) { - config::Config mozc_config; - mozc_config.mutable_pinyin_config()->MergeFrom(config); - config::ConfigHandler::SetConfig(mozc_config); - g_last_config_updated = Util::GetTime(); -} -#endif // OS_CHROMEOS - -void Session::set_client_capability(const commands::Capability &capability) { - // Does nothing. Capability does not make sense with the current pinyin. -} - -void Session::set_application_info( - const commands::ApplicationInfo &application_info) { - application_info_.CopyFrom(application_info); -} - -const commands::ApplicationInfo &Session::application_info() const { - return application_info_; -} - -uint64 Session::create_session_time() const { - return create_session_time_; -} - -uint64 Session::last_command_time() const { - return last_command_time_; -} - -namespace { -// Returns true when key_command can change input mode to English. If this -// function returns true, it doesn't mean that always we change input mode. -// We should NOT change input mode when converter is active. -bool MaybePinyinModeCommandForKeyCommand(keymap::KeyCommand key_command) { - switch (key_command) { - case keymap::CLEAR: - case keymap::COMMIT: - case keymap::COMMIT_PREEDIT: - case keymap::SELECT_CANDIDATE: - case keymap::SELECT_FOCUSED_CANDIDATE: - case keymap::SELECT_SECOND_CANDIDATE: - case keymap::SELECT_THIRD_CANDIDATE: - case keymap::REMOVE_CHAR_BEFORE: - case keymap::REMOVE_CHAR_AFTER: - case keymap::REMOVE_WORD_BEFORE: - case keymap::REMOVE_WORD_AFTER: - return true; - default: - return false; - } -} - -// Returns true when key_command can change input mode to English. If this -// function returns true, it doesn't mean that always we change input mode. -// We should NOT change input mode when converter is active. -bool MaybePinyinModeCommandForSessionCommand( - const commands::SessionCommand &session_command) { - switch (session_command.type()) { - case commands::SessionCommand::SUBMIT: - case commands::SessionCommand::SELECT_CANDIDATE: - return true; - default: - return false; - } -} -} // namespace - -bool Session::ProcessKeyEvent(commands::Command *command) { - DCHECK(command); - - commands::KeyEvent key_event; - { - commands::KeyEvent temp_key_event; - KeyEventUtil::RemoveModifiers(command->input().key(), - commands::KeyEvent::CAPS, - &temp_key_event); - KeyEventUtil::NormalizeNumpadKey(temp_key_event, &key_event); - } - - const keymap::ConverterState converter_state = - (converter_->IsConverterActive()) - ? keymap::ACTIVE : keymap::INACTIVE; - - keymap::KeyCommand key_command; - - keymap_->GetCommand(key_event, converter_state, &key_command); - VLOG(2) << "KeyCommand: " << key_command << "\n" - << "Converter State: " - << (converter_state == keymap::ACTIVE ? "ACTIVE" : "INACTIVE") - << "\n" - << "Conversion Mode: " << conversion_mode_; - bool consumed = true; - - switch (key_command) { - case keymap::INSERT: - consumed = converter_->Insert(key_event); - break; - case keymap::INSERT_PUNCTUATION: - { - const ConversionMode comeback_conversion_mode = conversion_mode_; - SwitchConversionMode(PUNCTUATION); - consumed = converter_->Insert(key_event); - next_conversion_mode_ = comeback_conversion_mode; - } - break; - case keymap::COMMIT: - converter_->Commit(); - break; - case keymap::COMMIT_PREEDIT: - converter_->CommitPreedit(); - break; - case keymap::CLEAR: - converter_->Clear(); - break; - case keymap::AUTO_COMMIT: - // TODO(hsumita): Moves these logics into SessionConverter. - // Details are written on the top of this file. - converter_->SelectFocusedCandidate(); - if (converter_->IsConverterActive()) { - converter_->Commit(); - } - converter_->PopOutput(command->mutable_output()); - - { - const ConversionMode comeback_conversion_mode = conversion_mode_; - const string previous_commit_text = command->output().result().value(); - SwitchConversionMode(PUNCTUATION); - converter_->Insert(key_event); - converter_->PopOutput(command->mutable_output()); - const string punctuation_commit_text = - command->output().result().value(); - command->mutable_output()->mutable_result()->set_value( - previous_commit_text + punctuation_commit_text); - - is_already_commited_ = true; - next_conversion_mode_ = comeback_conversion_mode; - } - - break; - case keymap::MOVE_CURSOR_RIGHT: - converter_->MoveCursorRight(); - break; - case keymap::MOVE_CURSOR_LEFT: - converter_->MoveCursorLeft(); - break; - case keymap::MOVE_CURSOR_RIGHT_BY_WORD: - converter_->MoveCursorRightByWord(); - break; - case keymap::MOVE_CURSOR_LEFT_BY_WORD: - converter_->MoveCursorLeftByWord(); - break; - case keymap::MOVE_CURSOR_TO_BEGINNING: - converter_->MoveCursorToBeginning(); - break; - case keymap::MOVE_CURSOR_TO_END: - converter_->MoveCursorToEnd(); - break; - - case keymap::SELECT_CANDIDATE: - converter_->SelectCandidateOnPage(GetIndexFromKeyEvent(key_event)); - break; - case keymap::SELECT_FOCUSED_CANDIDATE: - converter_->SelectFocusedCandidate(); - break; - case keymap::SELECT_SECOND_CANDIDATE: - converter_->SelectCandidateOnPage(1); - break; - case keymap::SELECT_THIRD_CANDIDATE: - converter_->SelectCandidateOnPage(2); - break; - case keymap::FOCUS_CANDIDATE: - converter_->FocusCandidateOnPage(GetIndexFromKeyEvent(key_event)); - break; - case keymap::FOCUS_CANDIDATE_TOP: - converter_->FocusCandidate(0); - break; - case keymap::FOCUS_CANDIDATE_PREV: - converter_->FocusCandidatePrev(); - break; - case keymap::FOCUS_CANDIDATE_NEXT: - converter_->FocusCandidateNext(); - break; - case keymap::FOCUS_CANDIDATE_PREV_PAGE: - converter_->FocusCandidatePrevPage(); - break; - case keymap::FOCUS_CANDIDATE_NEXT_PAGE: - converter_->FocusCandidateNextPage(); - break; - case keymap::CLEAR_CANDIDATE_FROM_HISTORY: - converter_->ClearCandidateFromHistory(GetIndexFromKeyEvent(key_event)); - break; - - case keymap::REMOVE_CHAR_BEFORE: - converter_->RemoveCharBefore(); - break; - case keymap::REMOVE_CHAR_AFTER: - converter_->RemoveCharAfter(); - break; - case keymap::REMOVE_WORD_BEFORE: - converter_->RemoveWordBefore(); - break; - case keymap::REMOVE_WORD_AFTER: - converter_->RemoveWordAfter(); - break; - - case keymap::TOGGLE_DIRECT_MODE: - if (conversion_mode_ == DIRECT) { - SwitchConversionMode(PINYIN); - } else { - SwitchConversionMode(DIRECT); - } - break; - case keymap::TURN_ON_ENGLISH_MODE: - SwitchConversionMode(ENGLISH); - consumed = converter_->Insert(key_event); - break; - case keymap::TURN_ON_PUNCTUATION_MODE: - SwitchConversionMode(PUNCTUATION); - consumed = converter_->Insert(key_event); - break; - case keymap::TOGGLE_SIMPLIFIED_CHINESE_MODE: - session_config_->simplified_chinese_mode = - !session_config_->simplified_chinese_mode; - ResetConfig(); - break; - case keymap::DO_NOTHING_WITH_CONSUME: - break; - case keymap::DO_NOTHING_WITHOUT_CONSUME: - consumed = false; - break; - default: - LOG(ERROR) << "Should not reach here. command = " << key_command; - consumed = false; - break; - } - - // Turn on Pinyin mode from English or Punctuation mode. - if ((conversion_mode_ == ENGLISH || conversion_mode_ == PUNCTUATION) && - !converter_->IsConverterActive() && - MaybePinyinModeCommandForKeyCommand(key_command)) { - next_conversion_mode_ = PINYIN; - } - - return consumed; -} - -bool Session::ProcessCommand(commands::Command *command) { - DCHECK(command); - - if (!command->input().has_command()) { - return false; - } - - const commands::SessionCommand &session_command = command->input().command(); - - bool consumed = true; - - switch (session_command.type()) { - case commands::SessionCommand::REVERT: - case commands::SessionCommand::RESET_CONTEXT: - ResetContext(); - break; - case commands::SessionCommand::SUBMIT: - converter_->Commit(); - break; - case commands::SessionCommand::SELECT_CANDIDATE: - DCHECK(session_command.has_id()); - converter_->SelectCandidateOnPage(session_command.id()); - break; - case commands::SessionCommand::SEND_LANGUAGE_BAR_COMMAND: - HandleLanguageBarCommand(session_command); - break; - default: - // Does nothing. - DLOG(ERROR) << "Unexpected Session Command:" << command->DebugString(); - consumed = false; - break; - } - - // Turn on Pinyin mode from English or Punctuation mode. - if ((conversion_mode_ == ENGLISH || conversion_mode_ == PUNCTUATION) && - !converter_->IsConverterActive() && - MaybePinyinModeCommandForSessionCommand(session_command)) { - next_conversion_mode_ = PINYIN; - } - - return consumed; -} - -void Session::ResetContext() { - converter_->Clear(); -} - -void Session::ResetConfig() { - converter_->ReloadConfig(); - last_config_updated_ = Util::GetTime(); -} - -void Session::SwitchConversionMode(ConversionMode mode) { - if (mode == conversion_mode_) { - return; - } - - conversion_mode_ = mode; - next_conversion_mode_ = mode; - - switch (conversion_mode_) { - case PINYIN: - keymap_ = keymap::KeymapFactory::GetKeymap(keymap::PINYIN); - break; - case DIRECT: - keymap_ = keymap::KeymapFactory::GetKeymap(keymap::DIRECT); - break; - case ENGLISH: - keymap_ = keymap::KeymapFactory::GetKeymap(keymap::ENGLISH); - break; - case PUNCTUATION: - keymap_ = keymap::KeymapFactory::GetKeymap(keymap::PUNCTUATION); - break; - default: - LOG(ERROR) << "Should NOT reach here. Set a fallback context"; - conversion_mode_ = PINYIN; - keymap_ = keymap::KeymapFactory::GetKeymap(keymap::PINYIN); - break; - } - - converter_->SwitchContext(conversion_mode_); -} - -void Session::HandleLanguageBarCommand( - const commands::SessionCommand &session_command) { - DCHECK(session_command.has_language_bar_command_id()); - - switch (session_command.language_bar_command_id()) { - case commands::SessionCommand::TOGGLE_PINYIN_CHINESE_MODE: - if (conversion_mode_ == DIRECT) { - SwitchConversionMode(PINYIN); - } else { - SwitchConversionMode(DIRECT); - } - break; - case commands::SessionCommand::TOGGLE_PINYIN_FULL_WIDTH_WORD_MODE: - session_config_->full_width_word_mode = - !session_config_->full_width_word_mode; - break; - case commands::SessionCommand::TOGGLE_PINYIN_FULL_WIDTH_PUNCTUATION_MODE: - session_config_->full_width_punctuation_mode = - !session_config_->full_width_punctuation_mode; - break; - case commands::SessionCommand::TOGGLE_PINYIN_SIMPLIFIED_CHINESE_MODE: - session_config_->simplified_chinese_mode = - !session_config_->simplified_chinese_mode; - break; - default: - LOG(ERROR) << "Unknown session request. Should NOT reach here."; - break; - } - - ResetConfig(); -} - -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/session.h mozc-1.11.1522.102/languages/pinyin/session.h --- mozc-1.11.1502.102/languages/pinyin/session.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/session.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,114 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#ifndef MOZC_LANGUAGES_PINYIN_SESSION_H_ -#define MOZC_LANGUAGES_PINYIN_SESSION_H_ - -#include -#include - -#include "base/port.h" -#include "languages/pinyin/pinyin_constant.h" -#include "session/commands.pb.h" -#include "session/session_interface.h" - -namespace mozc { - -#ifdef OS_CHROMEOS -namespace config { -class PinyinConfig; -} // namespace config -#endif // OS_CHROMEOS - -namespace pinyin { -namespace keymap { -class KeymapInterface; -} // namespace keymap - -class SessionConverterInterface; -struct SessionConfig; - -class Session : public mozc::session::SessionInterface { - public: - Session(); - virtual ~Session(); - - virtual bool SendKey(commands::Command *command); - // Checks if the input key event will be consumed by the session. - virtual bool TestSendKey(commands::Command *command); - virtual bool SendCommand(commands::Command *command); - virtual void ReloadConfig(); - - // Sets client capability for this session. Used by unittest. - // TODO(hsumita): rename this function to set_client_capability_for_unittest - virtual void set_client_capability(const commands::Capability &capability); - virtual void set_application_info( - const commands::ApplicationInfo &application_info); - virtual const commands::ApplicationInfo &application_info() const; - virtual uint64 create_session_time() const; - // Returns 0 (default value) if no command is executed in this session. - virtual uint64 last_command_time() const; - -#ifdef OS_CHROMEOS - static void UpdateConfig(const config::PinyinConfig &config); -#endif - - private: - friend class PinyinSessionTest; - - bool ProcessKeyEvent(commands::Command *command); - bool ProcessCommand(commands::Command *command); - void ResetContext(); - void ResetConfig(); - // Switch conversion mode. Previous context is cleared. - // We should fill a protocol buffer before we call it if we have some data. - void SwitchConversionMode(ConversionMode mode); - void HandleLanguageBarCommand( - const commands::SessionCommand &session_command); - - std::unique_ptr session_config_; - std::unique_ptr converter_; - const keymap::KeymapInterface *keymap_; - ConversionMode conversion_mode_; - // Stores conversion mode which we should switched to at the end of SendKey() - // or SendCommand() to fill a protocol buffer correctly. - ConversionMode next_conversion_mode_; - bool is_already_commited_; - - uint64 create_session_time_; - commands::ApplicationInfo application_info_; - uint64 last_command_time_; - uint64 last_config_updated_; - - DISALLOW_COPY_AND_ASSIGN(Session); -}; - -} // namespace pinyin -} // namespace mozc -#endif // MOZC_LANGUAGES_PINYIN_SESSION_H_ diff -Nru mozc-1.11.1502.102/languages/pinyin/session_config.h mozc-1.11.1522.102/languages/pinyin/session_config.h --- mozc-1.11.1502.102/languages/pinyin/session_config.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/session_config.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,49 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// Manages session dependent config. -// Each session has an instance of this class. - -#ifndef MOZC_LANGUAGES_PINYIN_SESSION_CONFIG_H_ -#define MOZC_LANGUAGES_PINYIN_SESSION_CONFIG_H_ - -namespace mozc { -namespace pinyin { - -struct SessionConfig { - public: - bool full_width_word_mode; - bool full_width_punctuation_mode; - bool simplified_chinese_mode; -}; - -} // namespace pinyin -} // namespace mozc - -#endif // MOZC_LANGUAGES_PINYIN_SESSION_CONFIG_H_ diff -Nru mozc-1.11.1502.102/languages/pinyin/session_converter.cc mozc-1.11.1522.102/languages/pinyin/session_converter.cc --- mozc-1.11.1502.102/languages/pinyin/session_converter.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/session_converter.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,422 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/pinyin/session_converter.h" - -#include -#include -#include - -#include "base/logging.h" -#include "base/util.h" -#include "languages/pinyin/direct_context.h" -#include "languages/pinyin/english_context.h" -#include "languages/pinyin/pinyin_context.h" -#include "languages/pinyin/pinyin_context_interface.h" -#include "languages/pinyin/punctuation_context.h" -#include "session/commands.pb.h" -#include "session/key_event_util.h" - -namespace mozc { -namespace pinyin { -namespace { -// TODO(hsumita): Calculate this value by the platform-specific APIs. -const size_t kCandidatesPerPage = 5; -} // namespace - -SessionConverter::SessionConverter(const SessionConfig &session_config) - : pinyin_context_(new PinyinContext(session_config)), - direct_context_(new direct::DirectContext(session_config)), - english_context_(new english::EnglishContext(session_config)), - punctuation_context_( - new punctuation::PunctuationContext(session_config)) { - context_ = pinyin_context_.get(); -} - -SessionConverter::~SessionConverter() { -} - -bool SessionConverter::IsConverterActive() const { - return !context_->input_text().empty(); -} - -bool SessionConverter::IsCandidateListVisible() { - return (context_->HasCandidate(0) || - !context_->auxiliary_text().empty()); -} - -bool SessionConverter::IsConversionTextVisible() const { - return !(context_->selected_text().empty() && - context_->conversion_text().empty() && - context_->rest_text().empty()); -} - -bool SessionConverter::Insert(const commands::KeyEvent &key_event) { - if (!key_event.has_key_code()) { - return false; - } - - const uint32 key_code = key_event.key_code(); - const uint32 modifiers = KeyEventUtil::GetModifiers(key_event); - DCHECK(!KeyEventUtil::HasCaps(modifiers)); - - char insert_character = key_code; - if (KeyEventUtil::IsShift(modifiers)) { - if (isupper(key_code)) { - insert_character = tolower(key_code); - } else if (islower(key_code)) { - insert_character = toupper(key_code); - } - } else if (modifiers != 0) { - return false; - } - - const bool result = context_->Insert(insert_character); - if (!context_->commit_text().empty()) { - punctuation_context_->UpdatePreviousCommitText(context_->commit_text()); - } - return result; -} - -void SessionConverter::Clear() { - ClearInternal(); - punctuation_context_->ClearAll(); -} - -void SessionConverter::ClearInternal() { - context_->Clear(); -} - -void SessionConverter::Commit() { - context_->Commit(); - punctuation_context_->UpdatePreviousCommitText(context_->commit_text()); -} - -void SessionConverter::CommitPreedit() { - context_->CommitPreedit(); - punctuation_context_->UpdatePreviousCommitText(context_->commit_text()); -} - -bool SessionConverter::SelectCandidateOnPage(size_t index) { - size_t absolute_index; - if (!GetAbsoluteIndex(index, &absolute_index)) { - return false; - } - return context_->SelectCandidate(absolute_index); -} - -bool SessionConverter::SelectFocusedCandidate() { - if (!context_->HasCandidate(0)) { - context_->Commit(); - return true; - } - return context_->SelectCandidate(context_->focused_candidate_index()); -} - -bool SessionConverter::FocusCandidate(size_t index) { - if (!context_->HasCandidate(index)) { - return false; - } - return context_->FocusCandidate(index); -} - -bool SessionConverter::FocusCandidateOnPage(size_t index) { - size_t absolute_index; - if (!GetAbsoluteIndex(index, &absolute_index)) { - return false; - } - return FocusCandidate(absolute_index); -} - -bool SessionConverter::FocusCandidateNext() { - return FocusCandidate(context_->focused_candidate_index() + 1); -} - -bool SessionConverter::FocusCandidateNextPage() { - DCHECK(context_->HasCandidate(0)); - - const size_t current_page = - context_->focused_candidate_index() / kCandidatesPerPage; - const size_t prepared_size = - context_->PrepareCandidates((current_page + 2) * kCandidatesPerPage); - - if (prepared_size <= (current_page + 1) * kCandidatesPerPage) { - return false; - } - - const size_t index = min(prepared_size - 1, - context_->focused_candidate_index() - + kCandidatesPerPage); - return context_->FocusCandidate(index); -} - -bool SessionConverter::FocusCandidatePrev() { - const size_t focused_index = context_->focused_candidate_index(); - if (focused_index == 0) { - return false; - } - return context_->FocusCandidate(focused_index - 1); -} - -bool SessionConverter::FocusCandidatePrevPage() { - if (context_->focused_candidate_index() < kCandidatesPerPage) { - return false; - } - const size_t index = context_->focused_candidate_index() - kCandidatesPerPage; - return context_->FocusCandidate(index); -} - -bool SessionConverter::ClearCandidateFromHistory(size_t index) { - size_t absolute_index; - if (!GetAbsoluteIndex(index, &absolute_index)) { - return false; - } - return context_->ClearCandidateFromHistory(absolute_index); -} - -bool SessionConverter::RemoveCharBefore() { - return context_->RemoveCharBefore(); -} - -bool SessionConverter::RemoveCharAfter() { - return context_->RemoveCharAfter(); -} - -bool SessionConverter::RemoveWordBefore() { - return context_->RemoveWordBefore(); -} - -bool SessionConverter::RemoveWordAfter() { - return context_->RemoveWordAfter(); -} - -bool SessionConverter::MoveCursorRight() { - return context_->MoveCursorRight(); -} - -bool SessionConverter::MoveCursorLeft() { - return context_->MoveCursorLeft(); -} - -bool SessionConverter::MoveCursorRightByWord() { - return context_->MoveCursorRightByWord(); -} - -bool SessionConverter::MoveCursorLeftByWord() { - return context_->MoveCursorLeftByWord(); -} - -bool SessionConverter::MoveCursorToBeginning() { - return context_->MoveCursorToBeginning(); -} - -bool SessionConverter::MoveCursorToEnd() { - return context_->MoveCursorToEnd(); -} - -void SessionConverter::FillOutput(commands::Output *output) { - DCHECK(output); - - if (!context_->commit_text().empty()) { - FillResult(output->mutable_result()); - } - - if (IsConversionTextVisible()) { - FillConversion(output->mutable_preedit()); - } - - if (IsCandidateListVisible()) { - FillCandidates(output->mutable_candidates()); - } -} - -void SessionConverter::PopOutput(commands::Output *output) { - DCHECK(output); - FillOutput(output); - context_->ClearCommitText(); -} - -void SessionConverter::FillConversion(commands::Preedit *preedit) const { - DCHECK(preedit); - DCHECK(IsConversionTextVisible()); - preedit->Clear(); - - const string texts[3] = { - context_->selected_text(), - context_->conversion_text(), - context_->rest_text(), - }; - const size_t kConversionIndex = 1; - - // Add segments - size_t total_length = 0; - for (int i = 0; i < 3; ++i) { - if (texts[i].empty()) { - continue; - } - - commands::Preedit::Segment *segment = preedit->add_segment(); - - if (i == kConversionIndex) { - segment->set_annotation(commands::Preedit::Segment::HIGHLIGHT); - preedit->set_highlighted_position(total_length); - } else { - segment->set_annotation(commands::Preedit::Segment::UNDERLINE); - } - - segment->set_value(texts[i]); - const size_t value_length = Util::CharsLen(texts[i]); - segment->set_value_length(value_length); - total_length += value_length; - } - - preedit->set_cursor(Util::CharsLen(context_->selected_text())); -} - -void SessionConverter::FillResult(commands::Result *result) const { - DCHECK(result); - DCHECK(!context_->commit_text().empty()); - result->Clear(); - - result->set_value(context_->commit_text()); - result->set_type(commands::Result::STRING); -} - -void SessionConverter::FillCandidates(commands::Candidates *candidates) { - DCHECK(candidates); - DCHECK(IsCandidateListVisible()); - candidates->Clear(); - - const size_t focused_index = context_->focused_candidate_index(); - const size_t candidates_begin = - focused_index - focused_index % kCandidatesPerPage; - const size_t candidates_end = - context_->PrepareCandidates(candidates_begin + kCandidatesPerPage); - const size_t candidates_size = candidates_end - candidates_begin; - - // Currently we cannot get the correct size of the all candidates with a good - // performance, and commands::Candidates::size is not used unless - // commands::Candidates::Footer::index_visible is true on ibus environment. - // So it is ok to set a dummy value. - // TODO(hsumita): Makes commands::Candidates::size optional and removes these - // statements. - const size_t kDummyCandidatesSize = 0xFFFFFFFF; - candidates->set_size(kDummyCandidatesSize); - - if (candidates_size > 0) { - for (size_t i = candidates_begin; i < candidates_end; ++i) { - commands::Candidates::Candidate *new_candidate = - candidates->add_candidate(); - new_candidate->set_id(i); - new_candidate->set_index(i); - Candidate value; - const bool result = context_->GetCandidate(i, &value); - DCHECK(result); - new_candidate->set_value(value.text); - } - - { - const string kDigits = "1234567890"; - - // Logic here is copied from SessionOutput::FillShortcuts. We - // can't reuse this at this time because SessionOutput depends on - // converter/segments.cc, which depends on the Japanese language - // model. - // TODO(hsumita): extract FillShortcuts() method to another library. - const size_t num_loop = candidates_end - candidates_begin; - for (size_t i = 0; i < num_loop; ++i) { - const string shortcut = kDigits.substr(i, 1); - candidates->mutable_candidate(i)->mutable_annotation()-> - set_shortcut(shortcut); - } - } - - candidates->set_focused_index(context_->focused_candidate_index()); - } - - if (!context_->auxiliary_text().empty()) { - commands::Footer *footer = candidates->mutable_footer(); - footer->set_label(context_->auxiliary_text()); - footer->set_index_visible(false); - } - - candidates->set_direction(commands::Candidates::HORIZONTAL); - candidates->set_display_type(commands::MAIN); - candidates->set_position(Util::CharsLen(context_->selected_text())); -} - -bool SessionConverter::GetAbsoluteIndex(size_t relative_index, - size_t *absolute_index) { - DCHECK(absolute_index); - - if (relative_index >= kCandidatesPerPage) { - return false; - } - - const size_t focused_index = context_->focused_candidate_index(); - const size_t current_page = focused_index / kCandidatesPerPage; - const size_t index = current_page * kCandidatesPerPage + relative_index; - - if (!context_->HasCandidate(index)) { - return false; - } - - *absolute_index = index; - return true; -} - -void SessionConverter::ReloadConfig() { - context_->ReloadConfig(); -} - -void SessionConverter::SwitchContext(ConversionMode mode) { - ClearInternal(); - - switch (mode) { - case PINYIN: - context_ = pinyin_context_.get(); - break; - case DIRECT: - context_ = direct_context_.get(); - break; - case ENGLISH: - context_ = english_context_.get(); - break; - case PUNCTUATION: - context_ = punctuation_context_.get(); - break; - default: - LOG(ERROR) << "Should NOT reach here. Fallback to Pinyin context."; - context_ = pinyin_context_.get(); - break; - } -} - -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/session_converter.h mozc-1.11.1522.102/languages/pinyin/session_converter.h --- mozc-1.11.1502.102/languages/pinyin/session_converter.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/session_converter.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,139 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#ifndef MOZC_LANGUAGES_PINYIN_SESSION_CONVERTER_H_ -#define MOZC_LANGUAGES_PINYIN_SESSION_CONVERTER_H_ - -#include "languages/pinyin/session_converter_interface.h" - -#include -#include "base/scoped_ptr.h" -#include "languages/pinyin/punctuation_context.h" - -namespace mozc { -namespace commands { -class Candidates; -class KeyEvent; -class Output; -class Preedit; -class Result; -} // namespace commands - -namespace pinyin { -namespace punctuation { -class PunctuationContext; -} // namespace punctuation - -class PinyinContextInterface; -class SessionConfig; - -class SessionConverter : public SessionConverterInterface { - public: - explicit SessionConverter(const SessionConfig &session_config); - virtual ~SessionConverter(); - - bool IsConverterActive() const; - bool Insert(const commands::KeyEvent &key_event); - void Clear(); - void Commit(); - void CommitPreedit(); - - bool SelectCandidateOnPage(size_t index); - bool SelectFocusedCandidate(); - - bool FocusCandidate(size_t index); - bool FocusCandidateOnPage(size_t index); - bool FocusCandidateNext(); - bool FocusCandidateNextPage(); - bool FocusCandidatePrev(); - bool FocusCandidatePrevPage(); - - bool ClearCandidateFromHistory(size_t index); - - bool MoveCursorRight(); - bool MoveCursorLeft(); - bool MoveCursorRightByWord(); - bool MoveCursorLeftByWord(); - bool MoveCursorToBeginning(); - bool MoveCursorToEnd(); - - bool RemoveCharBefore(); - bool RemoveCharAfter(); - bool RemoveWordBefore(); - bool RemoveWordAfter(); - - // These methods sets dummy value into commands::Candidates::size to avoid - // performance issue. http://b/6340948 - void FillOutput(commands::Output *output); - void PopOutput(commands::Output *output); - - void ReloadConfig(); - void SwitchContext(ConversionMode mode); - - private: - friend class PinyinSessionTest; - friend class SessionConverterTest; - - // Clears the context expect for some states on PunctuationContext. - void ClearInternal(); - - // IsCandidateListVisible doesn't have const qualifier because - // PinyinContextInterface may generate candidates lazily. - bool IsCandidateListVisible(); - bool IsConversionTextVisible() const; - - // Fills data. We may need to update data before call these. - void FillConversion(commands::Preedit *preedit) const; - void FillResult(commands::Result *result) const; - // FillCandidates doesn't have const qualifier because PinyinContextInterface - // may generate candidates lazily. - void FillCandidates(commands::Candidates *candidates); - - // Converts relative index to absolute index. - // Absolute index is an index from the beginning of candidates, and - // relative index is an index from the beginning of a candidates page. - bool GetAbsoluteIndex(size_t relative_index, size_t *absolute_index); - - scoped_ptr pinyin_context_; - scoped_ptr direct_context_; - scoped_ptr english_context_; - // The type of |punctuation_context_| is not PinyinContextInterface since - // we use PunctuationContext specific methods. - scoped_ptr punctuation_context_; - // |context_| holds the pointer of current context (pinyin, direct, english - // or punctuation), and does NOT take a ownership. - PinyinContextInterface *context_; - - DISALLOW_COPY_AND_ASSIGN(SessionConverter); -}; - -} // namespace pinyin -} // namespace mozc - -#endif // MOZC_LANGUAGES_PINYIN_SESSION_CONVERTER_H_ diff -Nru mozc-1.11.1502.102/languages/pinyin/session_converter_interface.h mozc-1.11.1522.102/languages/pinyin/session_converter_interface.h --- mozc-1.11.1502.102/languages/pinyin/session_converter_interface.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/session_converter_interface.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,111 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#ifndef MOZC_LANGUAGES_PINYIN_SESSION_CONVERTER_INTERFACE_H_ -#define MOZC_LANGUAGES_PINYIN_SESSION_CONVERTER_INTERFACE_H_ - -#include "base/base.h" -#include "languages/pinyin/pinyin_constant.h" - -namespace mozc { -namespace commands { -class KeyEvent; -class Output; -class Result; -} // namespace commands - -namespace pinyin { -class PinyinContextInterface; - -class SessionConverterInterface { - public: - SessionConverterInterface() {} - virtual ~SessionConverterInterface() {} - - // Indicates if the conversion session is active or not. In general, - // Insert function makes it active, Reset, Commit and CommitPreedit - // functions make it deactive, and SelectCandidate may make it diactive. - virtual bool IsConverterActive() const = 0; - - virtual bool Insert(const commands::KeyEvent &key_event) = 0; - - virtual void Clear() = 0; - - // Fixes the conversion with the current status. - // If there are unselected text (conversion_text and rest_text), - // This function commits it as preedit text. - virtual void Commit() = 0; - - // Commits the preedit string. - virtual void CommitPreedit() = 0; - - // Selects the candidate. - // If all candidates are selected, this function calls Commit(). - virtual bool SelectCandidateOnPage(size_t index) = 0; - virtual bool SelectFocusedCandidate() = 0; - - virtual bool FocusCandidate(size_t index) = 0; - virtual bool FocusCandidateOnPage(size_t index) = 0; - virtual bool FocusCandidateNext() = 0; - virtual bool FocusCandidateNextPage() = 0; - virtual bool FocusCandidatePrev() = 0; - virtual bool FocusCandidatePrevPage() = 0; - - virtual bool ClearCandidateFromHistory(size_t index) = 0; - - virtual bool RemoveCharBefore() = 0; - virtual bool RemoveCharAfter() = 0; - virtual bool RemoveWordBefore() = 0; - virtual bool RemoveWordAfter() = 0; - - virtual bool MoveCursorRight() = 0; - virtual bool MoveCursorLeft() = 0; - virtual bool MoveCursorRightByWord() = 0; - virtual bool MoveCursorLeftByWord() = 0; - virtual bool MoveCursorToBeginning() = 0; - virtual bool MoveCursorToEnd() = 0; - - // Fills protocol buffers - // It doesn't have const qualifier because PinyinContextInterface may - // generate candidates lazily. - virtual void FillOutput(commands::Output *output) = 0; - // Fills protocol buffers and updates internal status for a next operation. - virtual void PopOutput(commands::Output *output) = 0; - - virtual void ReloadConfig() = 0; - // Switches the context. - virtual void SwitchContext(ConversionMode mode) = 0; - - private: - DISALLOW_COPY_AND_ASSIGN(SessionConverterInterface); -}; - -} // namespace pinyin -} // namespace mozc -#endif // MOZC_LANGUAGES_PINYIN_SESSION_CONVERTER_INTERFACE_H_ diff -Nru mozc-1.11.1502.102/languages/pinyin/session_converter_test.cc mozc-1.11.1522.102/languages/pinyin/session_converter_test.cc --- mozc-1.11.1502.102/languages/pinyin/session_converter_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/session_converter_test.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,580 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include -#include - -#include "base/util.h" -#include "languages/pinyin/pinyin_constant.h" -#include "languages/pinyin/pinyin_context_interface.h" -#include "languages/pinyin/session_config.h" -#include "languages/pinyin/session_converter.h" -#include "session/commands.pb.h" -#include "testing/base/public/gmock.h" -#include "testing/base/public/gunit.h" - -using ::testing::Invoke; -using ::testing::Return; -using ::testing::ReturnRef; -using ::testing::SetArgPointee; -using ::testing::_; - -namespace mozc { -namespace pinyin { - -namespace { -const char *kCommitText = "\xE4\xBD\xA0\xE5\xA5\xBD"; // "你好" -const char *kInputText = "input"; -const char *kSelectedText = "\xE9\x80\x89\xE5\x87\xBA"; // "选出" -const char *kConversionText = "\xE5\x8F\x98\xE6\x8D\xA2"; // "变换" -const char *kRestText = "\xE6\xAE\x8B\xE4\xBD\x99"; // "残余" -const char *kAuxiliaryText = "auxiliary"; -const char *kCandidateText = "\xE5\x80\x99\xE9\x80\x89" "%d"; // "候选" -const size_t kFocusedCandidateIndex = 6; -const size_t kCandidatesSize = 7; -const size_t kPageSize = 5; - -class MockContext : public PinyinContextInterface { - public: - MockContext() : candidates_size_(0) {} - virtual ~MockContext() {} - - MOCK_METHOD1(Insert, bool(char ch)); - MOCK_METHOD0(Commit, void()); - MOCK_METHOD0(CommitPreedit, void()); - MOCK_METHOD0(Clear, void()); - MOCK_METHOD0(ClearCommitText, void()); - - MOCK_METHOD0(MoveCursorRight, bool()); - MOCK_METHOD0(MoveCursorLeft, bool()); - MOCK_METHOD0(MoveCursorRightByWord, bool()); - MOCK_METHOD0(MoveCursorLeftByWord, bool()); - MOCK_METHOD0(MoveCursorToBeginning, bool()); - MOCK_METHOD0(MoveCursorToEnd, bool()); - - MOCK_METHOD1(SelectCandidate, bool(size_t index)); - MOCK_METHOD1(FocusCandidate, bool(size_t index)); - MOCK_METHOD0(FocusCandidatePrev, bool()); - MOCK_METHOD0(FocusCandidateNext, bool()); - MOCK_METHOD1(ClearCandidateFromHistory, bool(size_t index)); - - MOCK_METHOD0(RemoveCharBefore, bool()); - MOCK_METHOD0(RemoveCharAfter, bool()); - MOCK_METHOD0(RemoveWordBefore, bool()); - MOCK_METHOD0(RemoveWordAfter, bool()); - - MOCK_METHOD0(ReloadConfig, void()); - MOCK_METHOD1(SwitchContext, void(int mode)); - - MOCK_CONST_METHOD0(commit_text, const string &()); - MOCK_CONST_METHOD0(input_text, const string &()); - MOCK_CONST_METHOD0(selected_text, const string &()); - MOCK_CONST_METHOD0(conversion_text, const string &()); - MOCK_CONST_METHOD0(rest_text, const string &()); - MOCK_CONST_METHOD0(auxiliary_text, const string &()); - - MOCK_CONST_METHOD0(cursor, size_t()); - MOCK_CONST_METHOD0(focused_candidate_index, size_t()); - - virtual bool HasCandidate(size_t index) { - return index < candidates_size_; - } - - virtual bool GetCandidate(size_t index, Candidate *candidate) { - if (!HasCandidate(index)) { - return false; - } - candidate->text = Util::StringPrintf(kCandidateText, index); - return true; - } - - virtual size_t PrepareCandidates(size_t required_size) { - return min(candidates_size_, required_size); - } - - void set_candidates_size(size_t size) { - candidates_size_ = size; - } - - private: - size_t candidates_size_; -}; - -class MockPunctuationContext : public punctuation::PunctuationContext { - public: - explicit MockPunctuationContext(const SessionConfig &session_config) - : punctuation::PunctuationContext(session_config) {} - virtual ~MockPunctuationContext() {} - - MOCK_METHOD0(Clear, void()); - MOCK_METHOD0(ClearAll, void()); - MOCK_METHOD1(UpdatePreviousCommitText, void(const string &text)); -}; -} // namespace - -class SessionConverterTest : public testing::Test { - protected: - virtual void SetUp() { - converter_.reset(new SessionConverter(session_config_)); - - // MockContext objects convert a ASCII-sequence to full-width and upper - // case. - context_ = new MockContext; - punctuation_context_ = new MockPunctuationContext(session_config_); - - converter_->pinyin_context_.reset(context_); - converter_->direct_context_.reset(new MockContext); - converter_->english_context_.reset(new MockContext); - converter_->punctuation_context_.reset(punctuation_context_); - converter_->context_ = context_; - - ClearMockVariables(); - } - - virtual void TearDown() { - } - - void InsertCharacterChars(const string &chars) { - for (size_t i = 0; i < chars.size(); ++i) { - commands::KeyEvent key_event; - key_event.set_key_code(chars[i]); - converter_->Insert(key_event); - } - } - - void ClearMockVariables() { - candidates_.clear(); - input_text_.clear(); - selected_text_.clear(); - conversion_text_.clear(); - rest_text_.clear(); - auxiliary_text_.clear(); - commit_text_.clear(); - } - - void SetUpMockForFillOutput(bool has_input, bool has_result, - bool has_conversion, bool has_candidates, - bool has_auxiliary_text) { - ClearMockVariables(); - - if (has_result) { - commit_text_.assign(kCommitText); - } - EXPECT_CALL(*context_, commit_text()). - WillRepeatedly(ReturnRef(commit_text_)); - - if (has_input) { - input_text_.assign(kInputText); - } - - if (has_conversion) { - selected_text_.assign(kSelectedText); - conversion_text_.assign(kConversionText); - rest_text_.assign(kRestText); - } - - EXPECT_CALL(*context_, input_text()).WillRepeatedly(ReturnRef(input_text_)); - EXPECT_CALL(*context_, selected_text()) - .WillRepeatedly(ReturnRef(selected_text_)); - EXPECT_CALL(*context_, conversion_text()) - .WillRepeatedly(ReturnRef(conversion_text_)); - EXPECT_CALL(*context_, rest_text()).WillRepeatedly(ReturnRef(rest_text_)); - - if (has_candidates) { - context_->set_candidates_size(kCandidatesSize); - EXPECT_CALL(*context_, focused_candidate_index()) - .WillRepeatedly(Return(kFocusedCandidateIndex)); - } else { - context_->set_candidates_size(0); - EXPECT_CALL(*context_, focused_candidate_index()) - .WillRepeatedly(Return(0)); - } - - if (has_auxiliary_text) { - auxiliary_text_.assign(kAuxiliaryText); - } - EXPECT_CALL(*context_, auxiliary_text()) - .WillRepeatedly(ReturnRef(auxiliary_text_)); - } - - void CheckOutput(const commands::Output &output, - bool has_input, bool has_result, bool has_conversion, - bool has_candidates, bool has_auxiliary_text) { - // Currently has_input itself does not make sense on this method. - - if (has_result) { - ASSERT_TRUE(output.has_result()); - EXPECT_EQ(kCommitText, output.result().value()); - EXPECT_EQ(commands::Result::STRING, output.result().type()); - } else { - EXPECT_FALSE(output.has_result()); - } - - if (has_conversion) { - ASSERT_TRUE(output.has_preedit()); - const commands::Preedit &preedit = output.preedit(); - ASSERT_EQ(3, preedit.segment_size()); - - EXPECT_EQ(selected_text_, preedit.segment(0).value()); - EXPECT_EQ(Util::CharsLen(selected_text_), - preedit.segment(0).value_length()); - EXPECT_EQ(conversion_text_, preedit.segment(1).value()); - EXPECT_EQ(Util::CharsLen(conversion_text_), - preedit.segment(1).value_length()); - EXPECT_EQ(rest_text_, preedit.segment(2).value()); - EXPECT_EQ(Util::CharsLen(rest_text_), preedit.segment(2).value_length()); - - EXPECT_EQ(commands::Preedit::Segment::UNDERLINE, - preedit.segment(0).annotation()); - EXPECT_EQ(commands::Preedit::Segment::HIGHLIGHT, - preedit.segment(1).annotation()); - EXPECT_EQ(commands::Preedit::Segment::UNDERLINE, - preedit.segment(2).annotation()); - EXPECT_EQ(Util::CharsLen(kSelectedText), preedit.highlighted_position()); - EXPECT_EQ(Util::CharsLen(kSelectedText), preedit.cursor()); - } else { - EXPECT_FALSE(output.has_preedit()); - } - - if (has_candidates || has_auxiliary_text) { - ASSERT_TRUE(output.has_candidates()); - const commands::Candidates &candidates = output.candidates(); - - EXPECT_EQ(commands::Candidates::HORIZONTAL, candidates.direction()); - EXPECT_EQ(commands::MAIN, candidates.display_type()); - EXPECT_EQ(Util::CharsLen(selected_text_), candidates.position()); - - // It is very high cost to get a accurate candidates size, so we set - // a non-zero dummy value on SessionConverter. - const size_t kDummyCandidatesSize = 0xFFFFFFFF; - EXPECT_EQ(kDummyCandidatesSize, candidates.size()); - - if (has_candidates) { - EXPECT_EQ(kFocusedCandidateIndex, candidates.focused_index()); - EXPECT_EQ(kCandidatesSize - kPageSize, candidates.candidate_size()); - EXPECT_EQ(kDummyCandidatesSize, candidates.size()); - - for (size_t i = 0; i < kCandidatesSize - kPageSize; ++i) { - const commands::Candidates::Candidate &c = candidates.candidate(i); - const size_t candidate_index = i + kPageSize; - EXPECT_EQ(candidate_index, c.id()); - EXPECT_EQ(candidate_index, c.index()); - EXPECT_EQ(Util::StringPrintf(kCandidateText, candidate_index), - c.value()); - EXPECT_TRUE(c.has_annotation()); - EXPECT_EQ(Util::StringPrintf("%d", i + 1), c.annotation().shortcut()); - } - } else { - EXPECT_FALSE(candidates.has_focused_index()); - EXPECT_EQ(0, candidates.candidate_size()); - } - - if (has_auxiliary_text) { - ASSERT_TRUE(candidates.has_footer()); - const commands::Footer &footer = candidates.footer(); - EXPECT_EQ(kAuxiliaryText, footer.label()); - ASSERT_TRUE(footer.has_index_visible()); - EXPECT_FALSE(footer.index_visible()); - } else { - EXPECT_FALSE(candidates.has_footer()); - } - } else { - EXPECT_FALSE(output.has_candidates()); - } - } - - PinyinContextInterface *GetCurrentContext() { - return converter_->context_; - } - - MockContext* context_; - MockPunctuationContext *punctuation_context_; - scoped_ptr converter_; - - // variables for mock - vector candidates_; - SessionConfig session_config_; - string input_text_; - string selected_text_; - string conversion_text_; - string rest_text_; - string auxiliary_text_; - string commit_text_; -}; - -TEST_F(SessionConverterTest, IsConverterActive) { - string input_text; - EXPECT_CALL(*context_, input_text()).WillOnce(ReturnRef(input_text)); - EXPECT_FALSE(converter_->IsConverterActive()); - input_text = "a"; - EXPECT_CALL(*context_, input_text()).WillOnce(ReturnRef(input_text)); - EXPECT_TRUE(converter_->IsConverterActive()); -} - -TEST_F(SessionConverterTest, Insert) { - const string commit_text; - EXPECT_CALL(*context_, commit_text()).WillRepeatedly(ReturnRef(commit_text)); - - commands::KeyEvent key_event; - key_event.set_key_code('a'); - EXPECT_CALL(*context_, Insert('a')).WillOnce(Return(true)); - EXPECT_TRUE(converter_->Insert(key_event)); - EXPECT_CALL(*context_, Insert('a')).WillOnce(Return(false)); - EXPECT_FALSE(converter_->Insert(key_event)); - - key_event.add_modifier_keys(commands::KeyEvent::SHIFT); - EXPECT_CALL(*context_, Insert('A')).WillOnce(Return(true)); - EXPECT_TRUE(converter_->Insert(key_event)); - EXPECT_CALL(*context_, Insert('A')).WillOnce(Return(false)); - EXPECT_FALSE(converter_->Insert(key_event)); - - key_event.add_modifier_keys(commands::KeyEvent::CTRL); - EXPECT_CALL(*context_, Insert(_)).Times(0); - EXPECT_FALSE(converter_->Insert(key_event)); -} - -TEST_F(SessionConverterTest, Clear) { - EXPECT_CALL(*context_, Clear()).Times(1); - EXPECT_CALL(*punctuation_context_, ClearAll()).Times(1); - converter_->Clear(); -} - -TEST_F(SessionConverterTest, Commit) { - const string kText = kCommitText; - - EXPECT_CALL(*context_, Commit()).Times(1); - EXPECT_CALL(*context_, commit_text()).WillOnce(ReturnRef(kText)); - EXPECT_CALL(*punctuation_context_, - UpdatePreviousCommitText(kText)).Times(1); - converter_->Commit(); - - EXPECT_CALL(*context_, CommitPreedit()).Times(1); - EXPECT_CALL(*context_, commit_text()).WillOnce(ReturnRef(kText)); - EXPECT_CALL(*punctuation_context_, - UpdatePreviousCommitText(kText)).Times(1); - converter_->CommitPreedit(); -} - -TEST_F(SessionConverterTest, Remove) { - EXPECT_CALL(*context_, RemoveCharBefore()).WillOnce(Return(true)); - EXPECT_TRUE(converter_->RemoveCharBefore()); - EXPECT_CALL(*context_, RemoveCharBefore()).WillOnce(Return(false)); - EXPECT_FALSE(converter_->RemoveCharBefore()); - - EXPECT_CALL(*context_, RemoveCharAfter()).WillOnce(Return(true)); - EXPECT_TRUE(converter_->RemoveCharAfter()); - EXPECT_CALL(*context_, RemoveCharAfter()).WillOnce(Return(false)); - EXPECT_FALSE(converter_->RemoveCharAfter()); - - EXPECT_CALL(*context_, RemoveWordBefore()).WillOnce(Return(true)); - EXPECT_TRUE(converter_->RemoveWordBefore()); - EXPECT_CALL(*context_, RemoveWordBefore()).WillOnce(Return(false)); - EXPECT_FALSE(converter_->RemoveWordBefore()); - - EXPECT_CALL(*context_, RemoveWordAfter()).WillOnce(Return(true)); - EXPECT_TRUE(converter_->RemoveWordAfter()); - EXPECT_CALL(*context_, RemoveWordAfter()).WillOnce(Return(false)); - EXPECT_FALSE(converter_->RemoveWordAfter()); -} - -TEST_F(SessionConverterTest, MoveCursor) { - EXPECT_CALL(*context_, MoveCursorLeft()).WillOnce(Return(true)); - EXPECT_TRUE(converter_->MoveCursorLeft()); - EXPECT_CALL(*context_, MoveCursorLeft()).WillOnce(Return(false)); - EXPECT_FALSE(converter_->MoveCursorLeft()); - - EXPECT_CALL(*context_, MoveCursorRight()).WillOnce(Return(true)); - EXPECT_TRUE(converter_->MoveCursorRight()); - EXPECT_CALL(*context_, MoveCursorRight()).WillOnce(Return(false)); - EXPECT_FALSE(converter_->MoveCursorRight()); - - EXPECT_CALL(*context_, MoveCursorLeftByWord()).WillOnce(Return(true)); - EXPECT_TRUE(converter_->MoveCursorLeftByWord()); - EXPECT_CALL(*context_, MoveCursorLeftByWord()).WillOnce(Return(false)); - EXPECT_FALSE(converter_->MoveCursorLeftByWord()); - - EXPECT_CALL(*context_, MoveCursorRightByWord()).WillOnce(Return(true)); - EXPECT_TRUE(converter_->MoveCursorRightByWord()); - EXPECT_CALL(*context_, MoveCursorRightByWord()).WillOnce(Return(false)); - EXPECT_FALSE(converter_->MoveCursorRightByWord()); - - EXPECT_CALL(*context_, MoveCursorToBeginning()).WillOnce(Return(true)); - EXPECT_TRUE(converter_->MoveCursorToBeginning()); - EXPECT_CALL(*context_, MoveCursorToBeginning()).WillOnce(Return(false)); - EXPECT_FALSE(converter_->MoveCursorToBeginning()); - - EXPECT_CALL(*context_, MoveCursorToEnd()).WillOnce(Return(true)); - EXPECT_TRUE(converter_->MoveCursorToEnd()); - EXPECT_CALL(*context_, MoveCursorToEnd()).WillOnce(Return(false)); - EXPECT_FALSE(converter_->MoveCursorToEnd()); -} - -TEST_F(SessionConverterTest, SelectCandidate) { - context_->set_candidates_size(8); - EXPECT_CALL(*context_, focused_candidate_index()) - .WillRepeatedly(Return(6)); - - EXPECT_CALL(*context_, SelectCandidate(7)).WillOnce(Return(true)); - EXPECT_TRUE(converter_->SelectCandidateOnPage(2)); - EXPECT_CALL(*context_, SelectCandidate(_)).Times(0); - EXPECT_FALSE(converter_->SelectCandidateOnPage(4)); - - EXPECT_CALL(*context_, SelectCandidate(6)).WillOnce(Return(true)); - EXPECT_TRUE(converter_->SelectFocusedCandidate()); -} - -TEST_F(SessionConverterTest, FocusCandidate) { - context_->set_candidates_size(8); - EXPECT_CALL(*context_, focused_candidate_index()).WillRepeatedly(Return(6)); - - EXPECT_CALL(*context_, FocusCandidate(1)).WillOnce(Return(true)); - EXPECT_TRUE(converter_->FocusCandidate(1)); - EXPECT_CALL(*context_, FocusCandidate(_)).Times(0); - EXPECT_FALSE(converter_->FocusCandidate(10)); - - EXPECT_CALL(*context_, FocusCandidate(7)).WillOnce(Return(true)); - EXPECT_TRUE(converter_->FocusCandidateOnPage(2)); - EXPECT_CALL(*context_, FocusCandidate(_)).Times(0); - EXPECT_FALSE(converter_->FocusCandidateOnPage(4)); - - EXPECT_CALL(*context_, FocusCandidate(7)).WillOnce(Return(true)); - EXPECT_TRUE(converter_->FocusCandidateNext()); - EXPECT_CALL(*context_, FocusCandidate(7)).WillOnce(Return(false)); - EXPECT_FALSE(converter_->FocusCandidateNext()); - - EXPECT_CALL(*context_, FocusCandidate(5)).WillOnce(Return(true)); - EXPECT_TRUE(converter_->FocusCandidatePrev()); - EXPECT_CALL(*context_, FocusCandidate(5)).WillOnce(Return(false)); - EXPECT_FALSE(converter_->FocusCandidatePrev()); - - // FocusCandidateNextPage - - context_->set_candidates_size(10); - EXPECT_CALL(*context_, focused_candidate_index()).WillRepeatedly(Return(6)); - EXPECT_CALL(*context_, FocusCandidate(_)).Times(0); - EXPECT_FALSE(converter_->FocusCandidateNextPage()); - - context_->set_candidates_size(11); - EXPECT_CALL(*context_, focused_candidate_index()).WillRepeatedly(Return(6)); - EXPECT_CALL(*context_, FocusCandidate(10)).WillOnce(Return(true)); - EXPECT_TRUE(converter_->FocusCandidateNextPage()); - EXPECT_CALL(*context_, FocusCandidate(10)).WillOnce(Return(false)); - EXPECT_FALSE(converter_->FocusCandidateNextPage()); - - context_->set_candidates_size(13); - EXPECT_CALL(*context_, focused_candidate_index()).WillRepeatedly(Return(6)); - EXPECT_CALL(*context_, FocusCandidate(11)).WillOnce(Return(true)); - EXPECT_TRUE(converter_->FocusCandidateNextPage()); - EXPECT_CALL(*context_, FocusCandidate(11)).WillOnce(Return(false)); - EXPECT_FALSE(converter_->FocusCandidateNextPage()); - - // FocusCandidatePrevPage - - context_->set_candidates_size(4); - EXPECT_CALL(*context_, focused_candidate_index()).WillRepeatedly(Return(3)); - EXPECT_CALL(*context_, FocusCandidate(_)).Times(0); - EXPECT_FALSE(converter_->FocusCandidatePrevPage()); - - context_->set_candidates_size(8); - EXPECT_CALL(*context_, focused_candidate_index()).WillRepeatedly(Return(6)); - EXPECT_CALL(*context_, FocusCandidate(1)).WillOnce(Return(true)); - EXPECT_TRUE(converter_->FocusCandidatePrevPage()); - EXPECT_CALL(*context_, FocusCandidate(1)).WillOnce(Return(false)); - EXPECT_FALSE(converter_->FocusCandidatePrevPage()); -} - -TEST_F(SessionConverterTest, ClearCandidateFromHistory) { - context_->set_candidates_size(8); - EXPECT_CALL(*context_, focused_candidate_index()).WillRepeatedly(Return(6)); - - EXPECT_CALL(*context_, ClearCandidateFromHistory(7)).WillOnce(Return(true)); - EXPECT_TRUE(converter_->ClearCandidateFromHistory(2)); - EXPECT_CALL(*context_, ClearCandidateFromHistory(_)).Times(0); - EXPECT_FALSE(converter_->ClearCandidateFromHistory(10)); -} - -TEST_F(SessionConverterTest, FillOutputAndPopOutput) { - // Has Input, Result, Conversion, Candidates, Auxiliary text or not. - const bool kStates[][5] = { - {false, false, false, false, false}, // Initial state - {false, true, false, false, false}, // Commited state - {true, false, true, true, true}, // Conversion state - {true, false, false, false, true}, // Auxiliary text only (English mode) - }; - - for (int i = 0; i < ARRAYSIZE_UNSAFE(kStates); ++i) { - const bool *state = kStates[i]; - - { - SCOPED_TRACE(Util::StringPrintf("FillOutput i=%d", i)); - commands::Output output; - SetUpMockForFillOutput(state[0], state[1], state[2], state[3], state[4]); - converter_->FillOutput(&output); - CheckOutput(output, state[0], state[1], state[2], state[3], state[4]); - } - - { - SCOPED_TRACE(Util::StringPrintf("FillOutput i=%d", i)); - EXPECT_CALL(*context_, ClearCommitText()).Times(1); - commands::Output output; - SetUpMockForFillOutput(state[0], state[1], state[2], state[3], state[4]); - converter_->PopOutput(&output); - CheckOutput(output, state[0], state[1], state[2], state[3], state[4]); - } - } -} - -TEST_F(SessionConverterTest, ReloadConfig) { - EXPECT_CALL(*context_, ReloadConfig()).Times(1); - converter_->ReloadConfig(); -} - -TEST_F(SessionConverterTest, SelectWithNoCandidate_Issue6121366) { - context_->set_candidates_size(0); - EXPECT_CALL(*context_, focused_candidate_index()) - .WillRepeatedly(Return(0)); - - EXPECT_CALL(*context_, Commit()).Times(1); - EXPECT_TRUE(converter_->SelectFocusedCandidate()); -} - -TEST_F(SessionConverterTest, SwitchConversionMode) { - ASSERT_EQ(context_, GetCurrentContext()); - - EXPECT_CALL(*context_, Clear()).Times(1); - converter_->SwitchContext(PUNCTUATION); - EXPECT_EQ(punctuation_context_, GetCurrentContext()); - - EXPECT_CALL(*punctuation_context_, Clear()).Times(1); - converter_->SwitchContext(PINYIN); - EXPECT_EQ(context_, GetCurrentContext()); -} - -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/session_test.cc mozc-1.11.1522.102/languages/pinyin/session_test.cc --- mozc-1.11.1502.102/languages/pinyin/session_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/session_test.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,1012 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/pinyin/session.h" - -#include -#include - -#include "base/scoped_ptr.h" -#include "base/system_util.h" -#include "base/util.h" -#include "config/config.pb.h" -#include "config/config_handler.h" -#include "languages/pinyin/pinyin_context_mock.h" -#include "languages/pinyin/session_config.h" -#include "languages/pinyin/session_converter.h" -#include "session/commands.pb.h" -#include "session/key_parser.h" -#include "testing/base/public/gunit.h" - -DECLARE_string(test_tmpdir); - -namespace mozc { -namespace pinyin { -namespace { - -// Wrapper function. It takes only half width ASCII characters. -string ToFullWidthAscii(const string &half_width) { - string full_width; - Util::HalfWidthAsciiToFullWidthAscii(half_width, &full_width); - return full_width; -} - -// This code is based on session_conveter_test::CheckConversion. -void CheckConversion(const commands::Command &command, - const string &selected_text, - const string &conversion_text, - const string &rest_text) { - const commands::Output &output = command.output(); - - // There is no preedit. - if (selected_text.empty() && conversion_text.empty() && rest_text.empty()) { - EXPECT_FALSE(output.has_preedit()); - return; - } - - ASSERT_TRUE(output.has_preedit()); - - const commands::Preedit &conversion = output.preedit(); - const size_t segments_size = (selected_text.empty() ? 0 : 1) - + (conversion_text.empty() ? 0 : 1) - + (rest_text.empty() ? 0 : 1); - ASSERT_EQ(segments_size, conversion.segment_size()); - - size_t index = 0; - - if (!selected_text.empty()) { - EXPECT_EQ(selected_text, - conversion.segment(index).value()); - EXPECT_EQ(commands::Preedit::Segment::UNDERLINE, - conversion.segment(index).annotation()); - EXPECT_EQ(Util::CharsLen(selected_text), - conversion.segment(index).value_length()); - ++index; - } - - if (!conversion_text.empty()) { - EXPECT_EQ(conversion_text, - conversion.segment(index).value()); - EXPECT_EQ(commands::Preedit::Segment::HIGHLIGHT, - conversion.segment(index).annotation()); - EXPECT_EQ(Util::CharsLen(conversion_text), - conversion.segment(index).value_length()); - EXPECT_EQ(Util::CharsLen(selected_text), conversion.highlighted_position()); - ++index; - } - - if (!rest_text.empty()) { - EXPECT_EQ(rest_text, - conversion.segment(index).value()); - EXPECT_EQ(commands::Preedit::Segment::UNDERLINE, - conversion.segment(index).annotation()); - EXPECT_EQ(Util::CharsLen(rest_text), - conversion.segment(index).value_length()); - ++index; - } - - EXPECT_EQ(Util::CharsLen(selected_text), conversion.cursor()); -} - -void CheckCandidates(const commands::Command &command, - const string &base_text, size_t focused_index) { - // This is simple check. - // Deep comparison test is written on session_converter_test.cc. - - const commands::Output &output = command.output(); - - // There are no candidates. - if (base_text.empty()) { - ASSERT_FALSE(output.has_candidates()); - return; - } - - ASSERT_TRUE(output.has_candidates()); - ASSERT_TRUE(output.has_preedit()); - - // Converts "abc" to ("ABC", "AB", "A"). - - const commands::Candidates &candidates = output.candidates(); - // Okay to check only the candidates.size is 0 or not, because this value - // does not represent actual one due to performance issue - if (base_text.empty()) { - EXPECT_EQ(0, candidates.size()); - } else { - EXPECT_NE(0, candidates.size()); - } - EXPECT_EQ(focused_index, candidates.focused_index()); - - string focused_text = base_text.substr(0, base_text.size() - focused_index); - Util::UpperString(&focused_text); - focused_text.assign(ToFullWidthAscii(focused_text)); - const size_t kCandidatesPerPage = 5; - const size_t focused_index_in_current_page = - focused_index % kCandidatesPerPage; - EXPECT_EQ(focused_text, - candidates.candidate(focused_index_in_current_page).value()); -} - -// This code is based on session_conveter_test::CheckResult. -void CheckResult(const commands::Command &command, const string &result_text) { - const commands::Output &output = command.output(); - - if (result_text.empty()) { - EXPECT_FALSE(output.has_result()); - return; - } - - ASSERT_TRUE(output.has_result()); - EXPECT_EQ(result_text, output.result().value()); -} -} // namespace - -class PinyinSessionTest : public testing::Test { - protected: - virtual void SetUp() { - SystemUtil::SetUserProfileDirectory(FLAGS_test_tmpdir); - config::Config config; - config::ConfigHandler::GetDefaultConfig(&config); - config::ConfigHandler::SetConfig(config); - - PyZy::InputContext::init(FLAGS_test_tmpdir, FLAGS_test_tmpdir); - - ResetSession(); - } - - virtual void TearDown() { - PyZy::InputContext::finalize(); - - config::Config config; - config::ConfigHandler::GetDefaultConfig(&config); - config::ConfigHandler::SetConfig(config); - } - - bool SendKey(const string &key, - commands::Command *command) { - command->Clear(); - command->mutable_input()->set_type(commands::Input::SEND_KEY); - if (!KeyParser::ParseKey(key, command->mutable_input()->mutable_key())) { - return false; - } - return session_->SendKey(command); - } - - bool SendCommand(const commands::SessionCommand &session_command, - commands::Command *command) { - command->Clear(); - command->mutable_input()->set_type(commands::Input::SEND_COMMAND); - command->mutable_input()->mutable_command()->CopyFrom(session_command); - return session_->SendCommand(command); - } - - bool InsertCharacterChars(const string &chars, commands::Command *command) { - const uint32 kNoModifiers = 0; - for (int i = 0; i < chars.size(); ++i) { - command->Clear(); - commands::KeyEvent *key_event = command->mutable_input()->mutable_key(); - key_event->set_key_code(chars[i]); - key_event->set_modifiers(kNoModifiers); - if (!session_->SendKey(command)) { - return false; - } - } - return true; - } - - void ResetSession() { - session_.reset(new pinyin::Session); - - SessionConverter *converter = - new SessionConverter(*session_->session_config_); - PinyinContextMock *mock = new PinyinContextMock; - converter->pinyin_context_.reset(mock); - converter->context_ = mock; - - session_->converter_.reset(converter); - - config::ConfigHandler::Reload(); - } - - void ResetContext() { - session_->ResetContext(); - } - - ConversionMode GetConversionMode() const { - return session_->conversion_mode_; - } - - ConversionMode GetConversionModeWithSessionInstance( - const Session &session) const { - return session.conversion_mode_; - } - - const SessionConfig *GetSessionConfigWithSessionInstance( - const Session &session) const { - return session.session_config_.get(); - } - -#ifdef OS_CHROMEOS - void SessionUpdateConfig(const config::PinyinConfig &pinyin_config) { - session_->UpdateConfig(pinyin_config); - } -#endif // OS_CHROMEOS - - scoped_ptr session_; -}; - -class SessionConfigTest : public PinyinSessionTest, - public testing::WithParamInterface< - tr1::tuple > { - protected: - void SetUp() { - PinyinSessionTest::SetUp(); - - const tr1::tuple ¶m = GetParam(); - full_width_word_mode_ = tr1::get<0>(param); - full_width_punctuation_mode_ = tr1::get<1>(param); - simplified_chinese_mode_ = tr1::get<2>(param); - } - - bool full_width_word_mode_; - bool full_width_punctuation_mode_; - bool simplified_chinese_mode_; -}; - -TEST_P(SessionConfigTest, Constructor) { - config::Config config; - config::ConfigHandler::GetDefaultConfig(&config); - config::PinyinConfig *pinyin_config = config.mutable_pinyin_config(); - - pinyin_config->set_initial_mode_full_width_word( - full_width_word_mode_); - pinyin_config->set_initial_mode_full_width_punctuation( - full_width_punctuation_mode_); - pinyin_config->set_initial_mode_simplified_chinese( - simplified_chinese_mode_); - - config::ConfigHandler::SetConfig(config); - - Session session; - const SessionConfig *session_config = - GetSessionConfigWithSessionInstance(session); - - EXPECT_EQ(full_width_word_mode_, - session_config->full_width_word_mode); - EXPECT_EQ(full_width_punctuation_mode_, - session_config->full_width_punctuation_mode); - EXPECT_EQ(simplified_chinese_mode_, - session_config->simplified_chinese_mode); -} - -INSTANTIATE_TEST_CASE_P(SessionConfigTest, SessionConfigTest, testing::Combine( - testing::Bool(), testing::Bool(), testing::Bool())); - -TEST_F(PinyinSessionTest, Insert) { - commands::Command command; - - { - SCOPED_TRACE("Insert CAPS A (Converter is NOT active)"); - command.Clear(); - SendKey("CAPS A", &command); - CheckConversion(command, "", "", ""); - CheckCandidates(command, "", 0); - CheckResult(command, ""); - } - - { - SCOPED_TRACE("Insert CAPS a"); - command.Clear(); - SendKey("CAPS a", &command); - CheckConversion(command, "", ToFullWidthAscii("A"), ""); - CheckCandidates(command, "A", 0); - CheckResult(command, ""); - } - - { - SCOPED_TRACE("Insert a"); - command.Clear(); - SendKey("a", &command); - CheckConversion(command, "", ToFullWidthAscii("AA"), ""); - CheckCandidates(command, "AA", 0); - CheckResult(command, ""); - } - - { - SCOPED_TRACE("Insert CAPS A (Converter is active)"); - command.Clear(); - SendKey("CAPS A", &command); - CheckConversion(command, "", ToFullWidthAscii("AA"), ""); - CheckCandidates(command, "AA", 0); - CheckResult(command, ""); - } -} - -TEST_F(PinyinSessionTest, ResetContext) { - commands::Command command; - - InsertCharacterChars("abc", &command); - { - SCOPED_TRACE("Resets context."); - commands::SessionCommand session_command; - session_command.set_type(commands::SessionCommand::RESET_CONTEXT); - SendCommand(session_command, &command); - CheckConversion(command, "", "", ""); - CheckCandidates(command, "", 0); - CheckResult(command, ""); - } - - InsertCharacterChars("abc", &command); - { - SCOPED_TRACE("Reverts context."); - commands::SessionCommand session_command; - session_command.set_type(commands::SessionCommand::REVERT); - SendCommand(session_command, &command); - CheckConversion(command, "", "", ""); - CheckCandidates(command, "", 0); - CheckResult(command, ""); - } -} - -TEST_F(PinyinSessionTest, Result) { - commands::Command command; - InsertCharacterChars("abc", &command); - - { - SCOPED_TRACE("Commit"); - command.Clear(); - SendKey("Enter", &command); - CheckConversion(command, "", "", ""); - CheckCandidates(command, "", 0); - CheckResult(command, "abc"); - } - - { - SCOPED_TRACE("Insert characters and check that result is cleared."); - command.Clear(); - InsertCharacterChars("def", &command); - CheckConversion(command, "", ToFullWidthAscii("DEF"), ""); - CheckCandidates(command, "def", 0); - CheckResult(command, ""); - } -} - -TEST_F(PinyinSessionTest, SelectCandidate) { - commands::Command command; - - { - SCOPED_TRACE("Selects first candidate by space key and commit."); - command.Clear(); - InsertCharacterChars("abc", &command); - - command.Clear(); - SendKey("SPACE", &command); - CheckConversion(command, "", "", ""); - CheckCandidates(command, "", 0); - CheckResult(command, ToFullWidthAscii("ABC")); - } - - ResetContext(); - command.Clear(); - InsertCharacterChars("abcd", &command); - - { - SCOPED_TRACE("Selects a 4th candidate by number."); - command.Clear(); - SendKey("4", &command); - CheckConversion( - command, ToFullWidthAscii("A"), ToFullWidthAscii("BCD"), ""); - CheckCandidates(command, "BCD", 0); - CheckResult(command, ""); - } - - { - SCOPED_TRACE("Selects a 3rd candidate by numpad."); - command.Clear(); - SendKey("NUMPAD3", &command); - CheckConversion( - command, ToFullWidthAscii("AB"), ToFullWidthAscii("CD"), ""); - CheckCandidates(command, "CD", 0); - CheckResult(command, ""); - } - - { - SCOPED_TRACE("Selects a 2nd candidate with SessionCommand"); - command.Clear(); - commands::SessionCommand session_command; - session_command.set_type(commands::SessionCommand::SELECT_CANDIDATE); - session_command.set_id(1); - SendCommand(session_command, &command); - CheckConversion( - command, ToFullWidthAscii("ABC"), ToFullWidthAscii("D"), ""); - CheckCandidates(command, "D", 0); - CheckResult(command, ""); - } -} - -TEST_F(PinyinSessionTest, Commit) { - commands::Command command; - InsertCharacterChars("abc", &command); - - { - SCOPED_TRACE("Selects a second candidate."); - command.Clear(); - SendKey("2", &command); - CheckConversion(command, ToFullWidthAscii("AB"), ToFullWidthAscii("C"), ""); - CheckCandidates(command, "C", 0); - CheckResult(command, ""); - } - - { - SCOPED_TRACE("Commits."); - command.Clear(); - SendKey("ENTER", &command); - CheckConversion(command, "", "", ""); - CheckCandidates(command, "", 0); - CheckResult(command, ToFullWidthAscii("AB") + "c"); - } - - ResetContext(); - command.Clear(); - InsertCharacterChars("abc", &command); - - { - SCOPED_TRACE("Commits with Numpad"); - command.Clear(); - SendKey("SEPARATOR", &command); - CheckConversion(command, "", "", ""); - CheckCandidates(command, "", 0); - CheckResult(command, "abc"); - } - - ResetContext(); - command.Clear(); - InsertCharacterChars("abc", &command); - - { - SCOPED_TRACE("Commits with SessionCommand"); - command.Clear(); - commands::SessionCommand session_command; - session_command.set_type(commands::SessionCommand::SUBMIT); - SendCommand(session_command, &command); - CheckConversion(command, "", "", ""); - CheckCandidates(command, "", 0); - CheckResult(command, "abc"); - } -} - -TEST_F(PinyinSessionTest, FocusCandidate) { - commands::Command command; - InsertCharacterChars("abcdef", &command); - - { - SCOPED_TRACE("Focuses candidate next"); - command.Clear(); - SendKey("DOWN", &command); - CheckConversion(command, "", ToFullWidthAscii("ABCDE"), "f"); - CheckCandidates(command, "ABCDEF", 1); - CheckResult(command, ""); - } - - { - SCOPED_TRACE("Focuses candidate prev"); - command.Clear(); - SendKey("UP", &command); - CheckConversion(command, "", ToFullWidthAscii("ABCDEF"), ""); - CheckCandidates(command, "ABCDEF", 0); - CheckResult(command, ""); - } - - { - SCOPED_TRACE("Focuses candidate next page"); - command.Clear(); - SendKey("PAGEDOWN", &command); - CheckConversion(command, "", ToFullWidthAscii("A"), "bcdef"); - CheckCandidates(command, "ABCDEF", 5); - CheckResult(command, ""); - } - - { - SCOPED_TRACE("Focuses candidate prev page"); - command.Clear(); - SendKey("PAGEUP", &command); - CheckConversion(command, "", ToFullWidthAscii("ABCDEF"), ""); - CheckCandidates(command, "ABCDEF", 0); - CheckResult(command, ""); - } - - { - SCOPED_TRACE("Focuses candidate next"); - command.Clear(); - SendKey("DOWN", &command); - CheckConversion(command, "", ToFullWidthAscii("ABCDE"), "f"); - CheckCandidates(command, "ABCDEF", 1); - CheckResult(command, ""); - } - - { - SCOPED_TRACE("Commits focused candidate."); - command.Clear(); - SendKey("SPACE", &command); - CheckConversion( - command, ToFullWidthAscii("ABCDE"), ToFullWidthAscii("F"), ""); - CheckCandidates(command, "F", 0); - CheckResult(command, ""); - } -} - -TEST_F(PinyinSessionTest, MoveCursor) { - commands::Command command; - InsertCharacterChars("abcdefghijkl", &command); - - { - SCOPED_TRACE("Moves cursor left. abcdefghijk|l"); - command.Clear(); - SendKey("LEFT", &command); - CheckConversion(command, "", "abcdefghijk", "l"); - CheckCandidates(command, "ABCDEFGHIJK", 0); - CheckResult(command, ""); - } - - { - SCOPED_TRACE("Moves cursor right. abcdefghijkl|"); - command.Clear(); - SendKey("RIGHT", &command); - CheckConversion(command, "", ToFullWidthAscii("ABCDEFGHIJKL"), ""); - CheckCandidates(command, "ABCDEFGHIJKL", 0); - CheckResult(command, ""); - } - - { - SCOPED_TRACE("Moves cursor to beginning. |abcdefghijkl"); - command.Clear(); - SendKey("HOME", &command); - CheckConversion(command, "", "", "abcdefghijkl"); - CheckCandidates(command, "", 0); - CheckResult(command, ""); - } - - { - SCOPED_TRACE("Moves cursor right by word. abc|defghijkl"); - command.Clear(); - SendKey("CTRL RIGHT", &command); - CheckConversion(command, "", "abc", "defghijkl"); - CheckCandidates(command, "ABC", 0); - CheckResult(command, ""); - } - - { - SCOPED_TRACE("Moves cursor left by word. |abcdefghijkl"); - command.Clear(); - SendKey("CTRL LEFT", &command); - CheckConversion(command, "", "", "abcdefghijkl"); - CheckCandidates(command, "", 0); - CheckResult(command, ""); - } - - { - SCOPED_TRACE("Moves cursor to end. abcdefghijkl|"); - command.Clear(); - SendKey("END", &command); - CheckConversion(command, "", ToFullWidthAscii("ABCDEFGHIJKL"), ""); - CheckCandidates(command, "ABCDEFGHIJKL", 0); - CheckResult(command, ""); - } -} - -TEST_F(PinyinSessionTest, ClearCandidateFromHistory) { - commands::Command command; - InsertCharacterChars("abc", &command); - - { - SCOPED_TRACE("Clear 1st candidate"); - SendKey("CTRL 1", &command); - CheckConversion(command, "", ToFullWidthAscii("AB"), "c"); - CheckCandidates(command, "AB", 0); - CheckResult(command, ""); - } -} - -TEST_F(PinyinSessionTest, BackSpaceAndDelete) { - commands::Command command; - InsertCharacterChars("abcdefghijkl", &command); - - { - SCOPED_TRACE("BackSpace"); - command.Clear(); - SendKey("BACKSPACE", &command); - CheckConversion(command, "", ToFullWidthAscii("ABCDEFGHIJK"), ""); - CheckCandidates(command, "ABCDEFGHIJK", 0); - CheckResult(command, ""); - } - - { - SCOPED_TRACE("Ctrl BackSpace"); - command.Clear(); - SendKey("CTRL BACKSPACE", &command); - CheckConversion(command, "", ToFullWidthAscii("ABCDEFGHI"), ""); - CheckCandidates(command, "ABCDEFGHI", 0); - CheckResult(command, ""); - } - - command.Clear(); - SendKey("HOME", &command); - - { - SCOPED_TRACE("Delete"); - command.Clear(); - SendKey("DEL", &command); - CheckConversion(command, "", "", "bcdefghi"); - CheckCandidates(command, "", 0); - CheckResult(command, ""); - } - - { - SCOPED_TRACE("Ctrl Delete"); - command.Clear(); - SendKey("CTRL DEL", &command); - CheckConversion(command, "", "", "efghi"); - CheckCandidates(command, "", 0); - CheckResult(command, ""); - } -} - -#ifdef OS_CHROMEOS -TEST_F(PinyinSessionTest, UpdateConfig) { - config::Config config; - config::ConfigHandler::GetConfig(&config); - const bool original_value = config.pinyin_config().correct_pinyin(); - - config.mutable_pinyin_config()->set_correct_pinyin(!original_value); - Session::UpdateConfig(config.pinyin_config()); - - config::ConfigHandler::GetConfig(&config); - EXPECT_NE(original_value, config.pinyin_config().correct_pinyin()); -} -#endif // OS_CHROMEOS - -TEST_F(PinyinSessionTest, SelectWithShift) { - config::Config config; - config::ConfigHandler::GetConfig(&config); - config.mutable_pinyin_config()->set_select_with_shift(true); - config::ConfigHandler::SetConfig(config); - - commands::Command command; - InsertCharacterChars("abc", &command); - - { - SCOPED_TRACE("Selects third candidate with right shift"); - command.Clear(); - SendKey("RightShift", &command); - CheckConversion(command, - ToFullWidthAscii("A"), ToFullWidthAscii("BC"), ""); - CheckCandidates(command, "bc", 0); - CheckResult(command, ""); - } - - { - SCOPED_TRACE("Selects second candidate with left shift"); - command.Clear(); - SendKey("LeftShift", &command); - CheckConversion(command, - ToFullWidthAscii("AB"), ToFullWidthAscii("C"), ""); - CheckCandidates(command, "c", 0); - CheckResult(command, ""); - } - - { - SCOPED_TRACE("Selects non-exisntent candidate with left shift and fail"); - command.Clear(); - SendKey("LeftShift", &command); - CheckConversion(command, - ToFullWidthAscii("AB"), ToFullWidthAscii("C"), ""); - CheckCandidates(command, "c", 0); - CheckResult(command, ""); - } - - ResetSession(); - command.Clear(); - InsertCharacterChars("abcdefg", &command); - command.Clear(); - SendKey("PageDown", &command); - - { - SCOPED_TRACE("Selects second candidate with left shift on 2nd page"); - command.Clear(); - SendKey("LeftShift", &command); - CheckConversion(command, - ToFullWidthAscii("A"), ToFullWidthAscii("BCDEFG"), ""); - CheckCandidates(command, "bcdefg", 0); - CheckResult(command, ""); - } -} - -TEST_F(PinyinSessionTest, AutoCommit) { - config::Config config; - config::ConfigHandler::GetConfig(&config); - config.mutable_pinyin_config()->set_auto_commit(true); - config::ConfigHandler::SetConfig(config); - - commands::Command command; - { - SCOPED_TRACE("Inserts abc! and does auto commit"); - command.Clear(); - InsertCharacterChars("abc", &command); - - command.Clear(); - SendKey("!", &command); - EXPECT_TRUE(command.output().consumed()); - - CheckConversion(command, "", "", ""); - CheckCandidates(command, "", 0); - CheckResult(command, ToFullWidthAscii("ABC!")); - } - - ResetSession(); - - { - SCOPED_TRACE("Inserts abc, moves cursor, and does auto commit"); - command.Clear(); - InsertCharacterChars("abc", &command); - SendKey("LEFT", &command); - - command.Clear(); - SendKey("!", &command); - EXPECT_TRUE(command.output().consumed()); - - CheckConversion(command, "", "", ""); - CheckCandidates(command, "", 0); - CheckResult(command, ToFullWidthAscii("AB") + "c" + ToFullWidthAscii("!")); - } -} - -TEST_F(PinyinSessionTest, SwitchConversionMode) { - // Real pinyin context will be used in this test because context is reset by - // switching conversion mode. - - commands::Command command; - - { - SCOPED_TRACE("English mode"); - ASSERT_EQ(PINYIN, GetConversionMode()); - - command.Clear(); - InsertCharacterChars("vt", &command); - EXPECT_EQ(ENGLISH, GetConversionMode()); - - command.Clear(); - SendKey("Enter", &command); - EXPECT_EQ(PINYIN, GetConversionMode()); - CheckConversion(command, "", "", ""); - CheckCandidates(command, "", 0); - CheckResult(command, "t"); - - // Makes converter active. - command.Clear(); - SendKey("n", &command); - EXPECT_EQ(PINYIN, GetConversionMode()); - - // Doesn't turn on English mode because converter is active. - command.Clear(); - SendKey("v", &command); - EXPECT_EQ(PINYIN, GetConversionMode()); - } - - ResetSession(); - - { - SCOPED_TRACE("Direct mode"); - ASSERT_EQ(PINYIN, GetConversionMode()); - - command.Clear(); - SendKey("Shift", &command); - EXPECT_EQ(DIRECT, GetConversionMode()); - - SendKey("a", &command); - CheckConversion(command, "", "", ""); - CheckCandidates(command, "", 0); - CheckResult(command, "a"); - EXPECT_EQ(DIRECT, GetConversionMode()); - - command.Clear(); - SendKey("Shift", &command); - EXPECT_EQ(PINYIN, GetConversionMode()); - - command.Clear(); - SendKey("LeftShift", &command); - EXPECT_EQ(DIRECT, GetConversionMode()); - - command.Clear(); - SendKey("RightShift", &command); - EXPECT_EQ(PINYIN, GetConversionMode()); - } - - ResetSession(); - - { - SCOPED_TRACE("Punctuation mode"); - ASSERT_EQ(PINYIN, GetConversionMode()); - - command.Clear(); - SendKey("`", &command); - EXPECT_EQ(PUNCTUATION, GetConversionMode()); - - command.Clear(); - SendKey("Enter", &command); - EXPECT_EQ(PINYIN, GetConversionMode()); - - // Makes converter active. - command.Clear(); - SendKey("n", &command); - EXPECT_EQ(PINYIN, GetConversionMode()); - - // Doesn't turn on Punctuation mode because converter is active. - command.Clear(); - SendKey("`", &command); - EXPECT_EQ(PINYIN, GetConversionMode()); - } - - ResetSession(); - - { - SCOPED_TRACE("English mode to Pinyin mode with SessionCommand"); - ASSERT_EQ(PINYIN, GetConversionMode()); - - command.Clear(); - InsertCharacterChars("vt", &command); - EXPECT_EQ(ENGLISH, GetConversionMode()); - - command.Clear(); - command.mutable_input()->mutable_command()->set_type( - commands::SessionCommand::SUBMIT); - session_->SendCommand(&command); - EXPECT_EQ(PINYIN, GetConversionMode()); - CheckConversion(command, "", "", ""); - CheckCandidates(command, "", 0); - CheckResult(command, "t"); - } - - { - SCOPED_TRACE("Punctuation mode to Pinyin mode with SessionCommand"); - ASSERT_EQ(PINYIN, GetConversionMode()); - - command.Clear(); - InsertCharacterChars("`", &command); - EXPECT_EQ(PUNCTUATION, GetConversionMode()); - - command.Clear(); - command.mutable_input()->mutable_command()->set_type( - commands::SessionCommand::SUBMIT); - session_->SendCommand(&command); - EXPECT_EQ(PINYIN, GetConversionMode()); - CheckConversion(command, "", "", ""); - CheckCandidates(command, "", 0); - CheckResult(command, "\xC2\xB7"); // "·" - } -} - -TEST_F(PinyinSessionTest, InitialConversionMode) { - // Real pinyin context will be used in this test. - - config::Config config; - config::ConfigHandler::GetDefaultConfig(&config); - config.mutable_pinyin_config()->set_initial_mode_chinese(true); - config::ConfigHandler::SetConfig(config); - - { - Session session; - EXPECT_EQ(PINYIN, GetConversionModeWithSessionInstance(session)); - } - - config.mutable_pinyin_config()->set_initial_mode_chinese(false); - config::ConfigHandler::SetConfig(config); - - { - Session session; - EXPECT_EQ(DIRECT, GetConversionModeWithSessionInstance(session)); - } -} - -TEST_F(PinyinSessionTest, HandleNumpadOnPunctuationMode_Issue6055961) { - commands::Command command; - - SendKey("`", &command); - ASSERT_EQ(PUNCTUATION, GetConversionMode()); - - command.Clear(); - SendKey("MULTIPLY", &command); - - ASSERT_TRUE(command.output().has_preedit()); - EXPECT_EQ(1, command.output().preedit().segment_size()); -} - -TEST_F(PinyinSessionTest, ToggleSimplifiedChineseMode) { - const SessionConfig *session_config = - GetSessionConfigWithSessionInstance(*session_); - ASSERT_TRUE(session_config->simplified_chinese_mode); - - commands::Command command; - SendKey("Ctrl Shift f", &command); - EXPECT_FALSE(session_config->simplified_chinese_mode); - - command.Clear(); - SendKey("Ctrl Shift F", &command); - EXPECT_TRUE(session_config->simplified_chinese_mode); -} - -TEST_F(PinyinSessionTest, SetSessionRequest) { - const SessionConfig *session_config = - GetSessionConfigWithSessionInstance(*session_); - commands::Command command; - commands::SessionCommand session_command; - session_command.set_type(commands::SessionCommand::SEND_LANGUAGE_BAR_COMMAND); - - // Chinese mode - ASSERT_EQ(PINYIN, GetConversionMode()); - session_command.set_language_bar_command_id( - commands::SessionCommand::TOGGLE_PINYIN_CHINESE_MODE); - command.Clear(); - EXPECT_TRUE(SendCommand(session_command, &command)); - EXPECT_EQ(DIRECT, GetConversionMode()); - command.Clear(); - EXPECT_TRUE(SendCommand(session_command, &command)); - EXPECT_EQ(PINYIN, GetConversionMode()); - - // Full width word mode - ASSERT_FALSE(session_config->full_width_word_mode); - session_command.set_language_bar_command_id( - commands::SessionCommand::TOGGLE_PINYIN_FULL_WIDTH_WORD_MODE); - command.Clear(); - EXPECT_TRUE(SendCommand(session_command, &command)); - EXPECT_TRUE(session_config->full_width_word_mode); - - // Full width punctuation mode - GetSessionConfigWithSessionInstance(*session_); - ASSERT_TRUE(session_config->full_width_punctuation_mode); - session_command.set_language_bar_command_id( - commands::SessionCommand::TOGGLE_PINYIN_FULL_WIDTH_PUNCTUATION_MODE); - command.Clear(); - EXPECT_TRUE(SendCommand(session_command, &command)); - EXPECT_FALSE(session_config->full_width_punctuation_mode); - - // Simplified chinese mode - GetSessionConfigWithSessionInstance(*session_); - ASSERT_TRUE(session_config->simplified_chinese_mode); - session_command.set_language_bar_command_id( - commands::SessionCommand::TOGGLE_PINYIN_SIMPLIFIED_CHINESE_MODE); - command.Clear(); - EXPECT_TRUE(SendCommand(session_command, &command)); - EXPECT_FALSE(session_config->simplified_chinese_mode); -} - -// TODO(hsumita): Implements this test. -// TEST_F(PinyinSessionTest, ResetConfig) { -// } - -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/unix/ibus/config_updater.cc mozc-1.11.1522.102/languages/pinyin/unix/ibus/config_updater.cc --- mozc-1.11.1502.102/languages/pinyin/unix/ibus/config_updater.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/unix/ibus/config_updater.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,115 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "languages/pinyin/unix/ibus/config_updater.h" - -#include -#include -#include - -#include "base/singleton.h" -#include "config/config.pb.h" -#include "config/config_handler.h" -#include "languages/pinyin/session.h" -#include "unix/ibus/config_util.h" - -// TODO(hsumita): Add test code. - -namespace mozc { -namespace pinyin { -#ifdef OS_CHROMEOS -namespace { -const char kPinyinSectionName[] = "engine/Pinyin"; -} // namespace - -ConfigUpdater::ConfigUpdater() { - // bool values - name_to_field_["CorrectPinyin"] = "correct_pinyin"; - name_to_field_["FuzzyPinyin"] = "fuzzy_pinyin"; - name_to_field_["ShiftSelectCandidate"] = "select_with_shift"; - name_to_field_["MinusEqualPage"] = "paging_with_minus_equal"; - name_to_field_["CommaPeriodPage"] = "paging_with_comma_period"; - name_to_field_["AutoCommit"] = "auto_commit"; - name_to_field_["DoublePinyin"] = "double_pinyin"; - name_to_field_["InitChinese"] = "initial_mode_chinese"; - name_to_field_["InitFull"] = "initial_mode_full_width_word"; - name_to_field_["InitFullPunct"] = "initial_mode_full_width_punctuation"; - name_to_field_["InitSimplifiedChinese"] = "initial_mode_simplified_chinese"; - - // int values - name_to_field_["DoublePinyinSchema"] = "double_pinyin_schema"; -} - -// static -void ConfigUpdater::ConfigValueChanged(IBusConfig *config, - const gchar *section, - const gchar *name, - GVariant *value, - gpointer user_data) { - Singleton::get()->UpdateConfig(section, name, value); -} - -void ConfigUpdater::UpdateConfig(const gchar *section, - const gchar *name, - GVariant *value) { - if (!section || !name || !value) { - return; - } - - if (g_strcmp0(section, kPinyinSectionName) != 0) { - return; - } - - config::PinyinConfig pinyin_config = GET_CONFIG(pinyin_config); - - if (!ibus::ConfigUtil::SetFieldForName( - name_to_field_[name], value, &pinyin_config)) { - return; - } - - Session::UpdateConfig(pinyin_config); -} - -const map& ConfigUpdater::name_to_field() { - return name_to_field_; -} - -// static -void ConfigUpdater::InitConfig(IBusConfig *config) { - // Initialize the mozc config with the config loaded from ibus-memconf, which - // is the primary config storage on Chrome OS. - ibus::ConfigUtil::InitConfig( - config, kPinyinSectionName, - Singleton::get()->name_to_field()); -} - -#endif // OS_CHROMEOS - -} // namespace pinyin -} // namespace mozc diff -Nru mozc-1.11.1502.102/languages/pinyin/unix/ibus/config_updater.h mozc-1.11.1522.102/languages/pinyin/unix/ibus/config_updater.h --- mozc-1.11.1502.102/languages/pinyin/unix/ibus/config_updater.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/unix/ibus/config_updater.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,70 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// We don't use the config updating logic for Chrome OS in mozc_engine.cc. -// Rather we use another one and invoke it from our main.cc. - -#ifndef MOZC_LANGUAGES_PINYIN_UNIX_IBUS_CONFIG_UPDATER_H_ -#define MOZC_LANGUAGES_PINYIN_UNIX_IBUS_CONFIG_UPDATER_H_ - -#include -#include -#include - -#include "config/config.pb.h" - -namespace mozc { -namespace pinyin { -#ifdef OS_CHROMEOS -class ConfigUpdater { - public: - ConfigUpdater(); - - static void ConfigValueChanged(IBusConfig *config, - const gchar *section, - const gchar *name, - GVariant *value, - gpointer user_data); - void UpdateConfig(const gchar *section, - const gchar *name, - GVariant *value); - - // Initializes mozc pinyin config. - static void InitConfig(IBusConfig *config); - - private: - const map& name_to_field(); - - map name_to_field_; -}; -#endif // OS_CHROMEOS -} // namespace pinyin -} // namespace mozc - -#endif // MOZC_LANGUAGES_PINYIN_UNIX_IBUS_CONFIG_UPDATER_H_ diff -Nru mozc-1.11.1502.102/languages/pinyin/unix/ibus/main.cc mozc-1.11.1522.102/languages/pinyin/unix/ibus/main.cc --- mozc-1.11.1502.102/languages/pinyin/unix/ibus/main.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/unix/ibus/main.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,147 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// This file is copied from chewing/unix/ibus/main.cc. - -#ifdef OS_CHROMEOS -#include -#endif // OS_CHROMEOS -#include - -#include "base/base.h" -#include "base/version.h" -#include "languages/pinyin/unix/ibus/main.h" -#include "unix/ibus/mozc_engine.h" -#include "unix/ibus/path_util.h" -#ifdef OS_CHROMEOS -#include "config/config_handler.h" -#include "languages/pinyin/pinyin_session_factory.h" -#include "languages/pinyin/unix/ibus/config_updater.h" -#include "session/session_factory_manager.h" -#endif // OS_CHROMEOS - -DEFINE_bool(ibus, false, "The engine is started by ibus-daemon"); - -namespace { - -IBusBus *g_bus = NULL; -#ifdef OS_CHROMEOS -// We use the ibus configuration daemon only on Chromium OS. -IBusConfig *g_config = NULL; -#endif // OS_CHROMEOS - -// Creates a IBusComponent object and add engine(s) to the object. -IBusComponent *GetIBusComponent() { - IBusComponent *component = ibus_component_new( - kComponentName, - kComponentDescription, - mozc::Version::GetMozcVersion().c_str(), - kComponentLicense, - kComponentAuthor, - kComponentHomepage, - "", - kComponentTextdomain); - const string icon_path = mozc::ibus::GetIconPath(kEngineIcon); - for (size_t i = 0; i < kEngineArrayLen; ++i) { - ibus_component_add_engine(component, - ibus_engine_desc_new(kEngineNameArray[i], - kEngineLongnameArray[i], - kEngineDescription, - kEngineLanguage, - kComponentLicense, - kComponentAuthor, - icon_path.c_str(), - kEngineLayoutArray[i])); - } - return component; -} - -// Initializes ibus components and adds Mozc engine. -void InitIBusComponent(bool executed_by_ibus_daemon) { - g_bus = ibus_bus_new(); - g_signal_connect(g_bus, - "disconnected", - G_CALLBACK(mozc::ibus::MozcEngine::Disconnected), - NULL); -#ifdef OS_CHROMEOS - g_config = ibus_bus_get_config(g_bus); - g_object_ref_sink(g_config); - g_signal_connect(g_config, - "value-changed", - G_CALLBACK(mozc::pinyin::ConfigUpdater::ConfigValueChanged), - NULL); -#endif // OS_CHROMEOS - IBusComponent *component = GetIBusComponent(); - IBusFactory *factory = ibus_factory_new(ibus_bus_get_connection(g_bus)); - GList *engines = ibus_component_get_engines(component); - for (GList *p = engines; p; p = p->next) { - IBusEngineDesc *engine = reinterpret_cast(p->data); - const gchar * const engine_name = ibus_engine_desc_get_name(engine); - ibus_factory_add_engine( - factory, engine_name, mozc::ibus::MozcEngine::GetType()); - } - - if (executed_by_ibus_daemon) { - ibus_bus_request_name(g_bus, kComponentName, 0); - } else { - ibus_bus_register_component(g_bus, component); - } - g_object_unref(component); -} - -} // namespace - -int main(gint argc, gchar **argv) { - InitGoogle(argv[0], &argc, &argv, true); -#ifdef OS_CHROMEOS - // We should initialize it before ibus_init(). - PyZy::InputContext::init(); -#endif // OS_CHROMEOS - - ibus_init(); - InitIBusComponent(FLAGS_ibus); -#ifdef OS_CHROMEOS - mozc::config::ConfigHandler::SetConfigFileName("memory://pinyin_config.1.db"); - mozc::pinyin::ConfigUpdater::InitConfig(g_config); - - mozc::pinyin::PinyinSessionFactory session_factory; - mozc::session::SessionFactoryManager::SetSessionFactory(&session_factory); -#endif // OS_CHROMEOS - - ibus_main(); - -#ifdef OS_CHROMEOS - PyZy::InputContext::finalize(); - if (g_config) { - g_object_unref(g_config); - } -#endif // OS_CHROMEOS - - return 0; -} diff -Nru mozc-1.11.1502.102/languages/pinyin/unix/ibus/main.h mozc-1.11.1522.102/languages/pinyin/unix/ibus/main.h --- mozc-1.11.1502.102/languages/pinyin/unix/ibus/main.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/unix/ibus/main.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,61 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// This file is generated by a script at first but now maintained manually. - -#ifndef MOZC_LANGUAGES_PINYIN_UNIX_IBUS_MAIN_H_ -#define MOZC_LANGUAGES_PINYIN_UNIX_IBUS_MAIN_H_ -namespace { -const char kComponentVersion[] = "0.0.0.0"; -const char kComponentName[] = "com.google.IBus.Pinyin"; -const char kComponentLicense[] = "New BSD"; -const char kComponentExec[] = "/usr/libexec/ibus-engine-mozc-pinyin --ibus"; -const char kComponentTextdomain[] = "ibus-mozc-pinyin"; -const char kComponentAuthor[] = "Google Inc."; -const char kComponentHomepage[] = "http://code.google.com/p/mozc/"; -const char kComponentDescription[] = "Mozc Pinyin Component"; -const char kEngineRank[] = "0"; -const char kEngineDescription[] = "Mozc Pinyin (Pinyin Input Method)"; -const char kEngineLanguage[] = "zh-CN"; -const char kEngineIcon[] = "/usr/share/ibus-mozc/product_icon.png"; -const char *kEngineLayoutArray[] = { - "us", - "us(dvorak)", -}; -const char *kEngineNameArray[] = { - "mozc-pinyin", - "mozc-pinyin-dv", -}; -const char *kEngineLongnameArray[] = { - "Mozc Pinyin", - "Mozc Pinyin (for US Dvorak keyboard)", -}; -const size_t kEngineArrayLen = 2; -} // namespace -#endif // MOZC_LANGUAGES_PINYIN_UNIX_IBUS_MAIN_H_ diff -Nru mozc-1.11.1502.102/languages/pinyin/unix/ibus/mozc-pinyin.xml mozc-1.11.1522.102/languages/pinyin/unix/ibus/mozc-pinyin.xml --- mozc-1.11.1502.102/languages/pinyin/unix/ibus/mozc-pinyin.xml 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/unix/ibus/mozc-pinyin.xml 1970-01-01 00:00:00.000000000 +0000 @@ -1,60 +0,0 @@ - - - 0.0.0.0 - com.google.IBus.Pinyin - New BSD - /usr/libexec/ibus-engine-mozc-pinyin --ibus - ibus-mozc-pinyin - Google Inc. - http://code.google.com/p/mozc/ - Mozc Pinyin Component - - - 0 - Mozc Pinyin (Pinyin Input Method) - zh-CN - /usr/share/ibus-mozc-pinyin/product_icon.png - us - mozc-pinyin - Mozc Pinyin - - - 0 - Mozc Pinyin (Pinyin Input Method) - zh-CN - /usr/share/ibus-mozc-pinyin/product_icon.png - us(dvorak) - mozc-pinyin-dv - Mozc Pinyin (for US Dvorak keyboard) - - - diff -Nru mozc-1.11.1502.102/languages/pinyin/unix/ibus/mozc_engine_property.cc mozc-1.11.1522.102/languages/pinyin/unix/ibus/mozc_engine_property.cc --- mozc-1.11.1502.102/languages/pinyin/unix/ibus/mozc_engine_property.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/languages/pinyin/unix/ibus/mozc_engine_property.cc 1970-01-01 00:00:00.000000000 +0000 @@ -1,92 +0,0 @@ -// Copyright 2010-2013, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -#include "unix/ibus/mozc_engine_property.h" - -#include "base/base.h" -#include "session/commands.pb.h" - -namespace mozc { -namespace ibus { - -namespace { -// The list of properties used in ibus-mozc-pinyin. -// In ChromeOS, we do not use toggle interface, because ChromeOS does not -// support toggle interface. -const MozcEngineSwitchProperty kMozcEngineSwitchPropertiesArray[] = { - { - commands::SessionCommand::TOGGLE_PINYIN_CHINESE_MODE, - "mode.chinese", - "CN", - "hiragana.png", - "Chinese", - }, { - commands::SessionCommand::TOGGLE_PINYIN_FULL_WIDTH_WORD_MODE, - "mode.full", - "Aa", - "hiragana.png", - "Full/Half width", - }, { - commands::SessionCommand::TOGGLE_PINYIN_FULL_WIDTH_PUNCTUATION_MODE, - "mode.full_punct", - ",.", - "hiragana.png", - "Full/Half width punctuation", - }, { - commands::SessionCommand::TOGGLE_PINYIN_SIMPLIFIED_CHINESE_MODE, - "mode.simp", - "\xE7\xAE\x80", // "简" - "hiragana.png", - "Simplified/Traditional Chinese", - }, -}; - -} // namespace - -const MozcEngineProperty *kMozcEngineProperties = NULL; -// The IMEOff state is not available in Pinyin. -const MozcEngineProperty *kMozcEnginePropertyIMEOffState = NULL; -const size_t kMozcEnginePropertiesSize = 0; - -const commands::CompositionMode kMozcEngineInitialCompositionMode = - commands::HIRAGANA; - -const MozcEngineSwitchProperty *kMozcEngineSwitchProperties = - &kMozcEngineSwitchPropertiesArray[0]; -const size_t kMozcEngineSwitchPropertiesSize = - arraysize(kMozcEngineSwitchPropertiesArray); - -const MozcEngineToolProperty *kMozcEngineToolProperties = NULL; -const size_t kMozcEngineToolPropertiesSize = 0; - -// iBus lookup up table size (that is, the number of candidates per page). -// TODO(hsumita) make this variable editable in config. -const unsigned int kPageSize = 5; -} // namespace ibus -} // namespace mozc diff -Nru mozc-1.11.1502.102/mac/GoogleJapaneseInputController.mm mozc-1.11.1522.102/mac/GoogleJapaneseInputController.mm --- mozc-1.11.1502.102/mac/GoogleJapaneseInputController.mm 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/mac/GoogleJapaneseInputController.mm 2013-08-28 05:26:13.000000000 +0000 @@ -866,6 +866,9 @@ return false; } NSInteger totalLength = [client length]; + if (totalLength == 0 || totalLength == NSNotFound) { + return false; + } NSRange precedingRange = NSMakeRange(0, selectedRange.location); if (selectedRange.location > kMaxSurroundingLength) { precedingRange = diff -Nru mozc-1.11.1502.102/mozc_version_template.txt mozc-1.11.1522.102/mozc_version_template.txt --- mozc-1.11.1502.102/mozc_version_template.txt 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/mozc_version_template.txt 2013-08-28 05:25:59.000000000 +0000 @@ -1,6 +1,6 @@ MAJOR=1 MINOR=11 -BUILD=1502 +BUILD=1522 REVISION=102 # ANDROID_VERSION_CODE should be the number of update times. # It must at least increase when the build updates. diff -Nru mozc-1.11.1502.102/net/json_util.cc mozc-1.11.1522.102/net/json_util.cc --- mozc-1.11.1502.102/net/json_util.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/net/json_util.cc 2013-08-28 05:26:13.000000000 +0000 @@ -34,6 +34,7 @@ #include "base/base.h" #include "base/logging.h" +#include "base/number_util.h" #include "base/protobuf/descriptor.h" #include "base/util.h" #include "net/jsoncpp.h" @@ -64,8 +65,8 @@ return true; } case FieldDescriptor::CPPTYPE_INT64: { - *value = Json::Value(Json::Int64( - reflection.GetRepeatedInt64(message, &field, index))); + *value = Json::Value(NumberUtil::SimpleItoa(static_cast( + reflection.GetRepeatedInt64(message, &field, index)))); return true; } case FieldDescriptor::CPPTYPE_UINT32: { @@ -74,8 +75,8 @@ return true; } case FieldDescriptor::CPPTYPE_UINT64: { - *value = Json::Value(Json::UInt64( - reflection.GetRepeatedUInt64(message, &field, index))); + *value = Json::Value(NumberUtil::SimpleItoa(static_cast( + reflection.GetRepeatedUInt64(message, &field, index)))); return true; } case FieldDescriptor::CPPTYPE_FLOAT: { @@ -124,7 +125,8 @@ return true; } case FieldDescriptor::CPPTYPE_INT64: { - *value = Json::Value(Json::Int64(reflection.GetInt64(message, &field))); + *value = Json::Value(NumberUtil::SimpleItoa(static_cast( + reflection.GetInt64(message, &field)))); return true; } case FieldDescriptor::CPPTYPE_UINT32: { @@ -132,7 +134,8 @@ return true; } case FieldDescriptor::CPPTYPE_UINT64: { - *value = Json::Value(Json::UInt64(reflection.GetUInt64(message, &field))); + *value = Json::Value(NumberUtil::SimpleItoa(static_cast( + reflection.GetUInt64(message, &field)))); return true; } case FieldDescriptor::CPPTYPE_FLOAT: { @@ -185,12 +188,18 @@ break; } case FieldDescriptor::CPPTYPE_INT64: { - if (!value.isConvertibleTo(Json::intValue)) { - DLOG(ERROR) << "value is not convertible to intValue: " + if (!value.isConvertibleTo(Json::stringValue)) { + DLOG(ERROR) << "value is not convertible to stringValue: " + << Json::FastWriter().write(value); + return false; + } + int64 int_value; + if (!NumberUtil::SafeStrToInt64(value.asString(), &int_value)) { + DLOG(ERROR) << "value is not convertible to int64: " << Json::FastWriter().write(value); return false; } - reflection->SetInt64(message, field, value.asInt64()); + reflection->SetInt64(message, field, int_value); break; } case FieldDescriptor::CPPTYPE_UINT32: { @@ -203,12 +212,18 @@ break; } case FieldDescriptor::CPPTYPE_UINT64: { - if (!value.isConvertibleTo(Json::uintValue)) { - DLOG(ERROR) << "value is not convertible to uintValue: " + if (!value.isConvertibleTo(Json::stringValue)) { + DLOG(ERROR) << "value is not convertible to stringValue: " + << Json::FastWriter().write(value); + return false; + } + uint64 uint_value; + if (!NumberUtil::SafeStrToUInt64(value.asString(), &uint_value)) { + DLOG(ERROR) << "value is not convertible to uint64: " << Json::FastWriter().write(value); return false; } - reflection->SetUInt64(message, field, value.asUInt64()); + reflection->SetUInt64(message, field, uint_value); break; } case FieldDescriptor::CPPTYPE_DOUBLE: { @@ -302,12 +317,18 @@ } case FieldDescriptor::CPPTYPE_INT64: { for (Json::ArrayIndex i = 0; i < value.size(); ++i) { - if (!value[i].isConvertibleTo(Json::intValue)) { - DLOG(ERROR) << "value is not convertible to intValue: " + int64 int_value; + if (!value[i].isConvertibleTo(Json::stringValue)) { + DLOG(ERROR) << "value is not convertible to stringValue: " + << Json::FastWriter().write(value[i]); + result = false; + } else if (!NumberUtil::SafeStrToInt64(value[i].asString(), + &int_value)) { + DLOG(ERROR) << "value is not convertible to int64: " << Json::FastWriter().write(value[i]); result = false; } else { - reflection->AddInt64(message, field, value[i].asInt64()); + reflection->AddInt64(message, field, int_value); } } break; @@ -326,12 +347,18 @@ } case FieldDescriptor::CPPTYPE_UINT64: { for (Json::ArrayIndex i = 0; i < value.size(); ++i) { - if (!value[i].isConvertibleTo(Json::uintValue)) { - DLOG(ERROR) << "value is not convertible to uintValue: " + uint64 uint_value; + if (!value[i].isConvertibleTo(Json::stringValue)) { + DLOG(ERROR) << "value is not convertible to stringValue: " + << Json::FastWriter().write(value[i]); + result = false; + } else if (!NumberUtil::SafeStrToUInt64(value[i].asString(), + &uint_value)) { + DLOG(ERROR) << "value is not convertible to uint64: " << Json::FastWriter().write(value[i]); result = false; } else { - reflection->AddUInt64(message, field, value[i].asUInt64()); + reflection->AddUInt64(message, field, uint_value); } } break; diff -Nru mozc-1.11.1502.102/net/json_util.h mozc-1.11.1522.102/net/json_util.h --- mozc-1.11.1502.102/net/json_util.h 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/net/json_util.h 2013-08-28 05:26:13.000000000 +0000 @@ -39,8 +39,12 @@ class JsonUtil { public: + // Converts protobuf message to JSON value. + // Note: JavaScript JSON parser can't handle int64/uint64. + // So it converts int64/uint64 value in protobuf to string value in JSON. static bool ProtobufMessageToJsonValue( const protobuf::Message &message, Json::Value *value); + // Converts JSON value to protobuf message. static bool JsonValueToProtobufMessage( const Json::Value &value, protobuf::Message *message); diff -Nru mozc-1.11.1502.102/net/json_util_test.cc mozc-1.11.1522.102/net/json_util_test.cc --- mozc-1.11.1502.102/net/json_util_test.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/net/json_util_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -70,15 +70,53 @@ TEST_CONVERT_ITEM(set_double_value, 1.0, "double_value", 1.0) TEST_CONVERT_ITEM(set_float_value, 2.0, "float_value", 2.0) TEST_CONVERT_ITEM(set_int32_value, 3, "int32_value", Json::Int(3)) - TEST_CONVERT_ITEM(set_int64_value, 4, "int64_value", Json::Int64(4)) + TEST_CONVERT_ITEM(set_int32_value, -3, "int32_value", Json::Int(-3)) + TEST_CONVERT_ITEM(set_int32_value, kint32min, + "int32_value", Json::Int(kint32min)) + TEST_CONVERT_ITEM(set_int32_value, kint32max, + "int32_value", Json::Int(kint32max)) + TEST_CONVERT_ITEM(set_int64_value, 4, "int64_value", "4") + TEST_CONVERT_ITEM(set_int64_value, -4, "int64_value", "-4") + TEST_CONVERT_ITEM(set_int64_value, kint64min, + "int64_value", "-9223372036854775808") + TEST_CONVERT_ITEM(set_int64_value, kint64max, + "int64_value", "9223372036854775807") TEST_CONVERT_ITEM(set_uint32_value, 5, "uint32_value", Json::UInt(5)) - TEST_CONVERT_ITEM(set_uint64_value, 6, "uint64_value", Json::UInt64(6)) + TEST_CONVERT_ITEM(set_uint32_value, kuint32max, + "uint32_value", Json::UInt(kuint32max)) + TEST_CONVERT_ITEM(set_uint64_value, 6, "uint64_value", "6") + TEST_CONVERT_ITEM(set_uint64_value, kuint64max, + "uint64_value", "18446744073709551615") TEST_CONVERT_ITEM(set_sint32_value, 7, "sint32_value", Json::Int(7)) - TEST_CONVERT_ITEM(set_sint64_value, 8, "sint64_value", Json::Int64(8)) + TEST_CONVERT_ITEM(set_sint32_value, -7, "sint32_value", Json::Int(-7)) + TEST_CONVERT_ITEM(set_sint32_value, kint32min, + "sint32_value", Json::Int(kint32min)) + TEST_CONVERT_ITEM(set_sint32_value, kint32max, + "sint32_value", Json::Int(kint32max)) + TEST_CONVERT_ITEM(set_sint64_value, 8, "sint64_value", "8") + TEST_CONVERT_ITEM(set_sint64_value, -8, "sint64_value", "-8") + TEST_CONVERT_ITEM(set_sint64_value, kint64min, + "sint64_value", "-9223372036854775808") + TEST_CONVERT_ITEM(set_sint64_value, kint64max, + "sint64_value", "9223372036854775807") TEST_CONVERT_ITEM(set_fixed32_value, 9, "fixed32_value", Json::UInt(9)) - TEST_CONVERT_ITEM(set_fixed64_value, 10, "fixed64_value", Json::UInt64(10)) + TEST_CONVERT_ITEM(set_fixed32_value, kuint32max, + "fixed32_value", Json::UInt(kuint32max)) + TEST_CONVERT_ITEM(set_fixed64_value, 10, "fixed64_value", "10") + TEST_CONVERT_ITEM(set_fixed64_value, kuint64max, + "fixed64_value", "18446744073709551615") TEST_CONVERT_ITEM(set_sfixed32_value, 11, "sfixed32_value", Json::Int(11)) - TEST_CONVERT_ITEM(set_sfixed64_value, 12, "sfixed64_value", Json::Int64(12)) + TEST_CONVERT_ITEM(set_sfixed32_value, -11, "sfixed32_value", Json::Int(-11)) + TEST_CONVERT_ITEM(set_sfixed32_value, kint32min, + "sfixed32_value", Json::Int(kint32min)) + TEST_CONVERT_ITEM(set_sfixed32_value, kint32max, + "sfixed32_value", Json::Int(kint32max)) + TEST_CONVERT_ITEM(set_sfixed64_value, 12, "sfixed64_value", "12") + TEST_CONVERT_ITEM(set_sfixed64_value, -12, "sfixed64_value", "-12") + TEST_CONVERT_ITEM(set_sfixed64_value, kint64min, + "sfixed64_value", "-9223372036854775808") + TEST_CONVERT_ITEM(set_sfixed64_value, kint64max, + "sfixed64_value", "9223372036854775807") TEST_CONVERT_ITEM(set_bool_value, true, "bool_value", true) TEST_CONVERT_ITEM(set_bool_value, false, "bool_value", false) TEST_CONVERT_ITEM(set_string_value, "string", "string_value", "string") @@ -116,33 +154,113 @@ TEST_CONVERT_REPEATED_ITEM(add_repeated_int32_value, 1, 2, 3, "repeated_int32_value", Json::Int(1), Json::Int(2), Json::Int(3)) + TEST_CONVERT_REPEATED_ITEM( + add_repeated_int32_value, + kint32min, kint32min, kint32min, + "repeated_int32_value", + Json::Int(kint32min), Json::Int(kint32min), Json::Int(kint32min)) + TEST_CONVERT_REPEATED_ITEM( + add_repeated_int32_value, + kint32max, kint32max, kint32max, + "repeated_int32_value", + Json::Int(kint32max), Json::Int(kint32max), Json::Int(kint32max)) TEST_CONVERT_REPEATED_ITEM(add_repeated_int64_value, 1, 2, 3, "repeated_int64_value", - Json::Int64(1), Json::Int64(2), Json::Int64(3)) + "1", "2", "3") + TEST_CONVERT_REPEATED_ITEM( + add_repeated_int64_value, + kint64min, kint64min, kint64min, + "repeated_int64_value", + "-9223372036854775808", "-9223372036854775808", "-9223372036854775808") + TEST_CONVERT_REPEATED_ITEM( + add_repeated_int64_value, + kint64max, kint64max, kint64max, + "repeated_int64_value", + "9223372036854775807", "9223372036854775807", "9223372036854775807") TEST_CONVERT_REPEATED_ITEM(add_repeated_uint32_value, 1, 2, 3, "repeated_uint32_value", Json::UInt(1), Json::UInt(2), Json::UInt(3)) + TEST_CONVERT_REPEATED_ITEM( + add_repeated_uint32_value, + kuint32max, kuint32max, kuint32max, + "repeated_uint32_value", + Json::UInt(kuint32max), Json::UInt(kuint32max), Json::UInt(kuint32max)) TEST_CONVERT_REPEATED_ITEM(add_repeated_uint64_value, 1, 2, 3, "repeated_uint64_value", - Json::UInt64(1), Json::UInt64(2), Json::UInt64(3)) + "1", "2", "3") + TEST_CONVERT_REPEATED_ITEM( + add_repeated_uint64_value, + kuint64max, kuint64max, kuint64max, + "repeated_uint64_value", + "18446744073709551615", "18446744073709551615", "18446744073709551615") TEST_CONVERT_REPEATED_ITEM(add_repeated_sint32_value, 1, 2, 3, "repeated_sint32_value", Json::Int(1), Json::Int(2), Json::Int(3)) + TEST_CONVERT_REPEATED_ITEM( + add_repeated_sint32_value, + kint32min, kint32min, kint32min, + "repeated_sint32_value", + Json::Int(kint32min), Json::Int(kint32min), Json::Int(kint32min)) + TEST_CONVERT_REPEATED_ITEM( + add_repeated_sint32_value, + kint32max, kint32max, kint32max, + "repeated_sint32_value", + Json::Int(kint32max), Json::Int(kint32max), Json::Int(kint32max)) TEST_CONVERT_REPEATED_ITEM(add_repeated_sint64_value, 1, 2, 3, "repeated_sint64_value", - Json::Int64(1), Json::Int64(2), Json::Int64(3)) + "1", "2", "3") + TEST_CONVERT_REPEATED_ITEM( + add_repeated_sint64_value, + kint64min, kint64min, kint64min, + "repeated_sint64_value", + "-9223372036854775808", "-9223372036854775808", "-9223372036854775808") + TEST_CONVERT_REPEATED_ITEM( + add_repeated_sint64_value, + kint64max, kint64max, kint64max, + "repeated_sint64_value", + "9223372036854775807", "9223372036854775807", "9223372036854775807") TEST_CONVERT_REPEATED_ITEM(add_repeated_fixed32_value, 1, 2, 3, "repeated_fixed32_value", Json::UInt(1), Json::UInt(2), Json::UInt(3)) + TEST_CONVERT_REPEATED_ITEM( + add_repeated_fixed32_value, + kuint32max, kuint32max, kuint32max, + "repeated_fixed32_value", + Json::UInt(kuint32max), Json::UInt(kuint32max), Json::UInt(kuint32max)) TEST_CONVERT_REPEATED_ITEM(add_repeated_fixed64_value, 1, 2, 3, "repeated_fixed64_value", - Json::UInt64(1), Json::UInt64(2), Json::UInt64(3)) + "1", "2", "3") + TEST_CONVERT_REPEATED_ITEM( + add_repeated_fixed64_value, + kuint64max, kuint64max, kuint64max, + "repeated_fixed64_value", + "18446744073709551615", "18446744073709551615", "18446744073709551615") TEST_CONVERT_REPEATED_ITEM(add_repeated_sfixed32_value, 1, 2, 3, "repeated_sfixed32_value", Json::Int(1), Json::Int(2), Json::Int(3)) + TEST_CONVERT_REPEATED_ITEM( + add_repeated_sfixed32_value, + kint32min, kint32min, kint32min, + "repeated_sfixed32_value", + Json::Int(kint32min), Json::Int(kint32min), Json::Int(kint32min)) + TEST_CONVERT_REPEATED_ITEM( + add_repeated_sfixed32_value, + kint32max, kint32max, kint32max, + "repeated_sfixed32_value", + Json::Int(kint32max), Json::Int(kint32max), Json::Int(kint32max)) TEST_CONVERT_REPEATED_ITEM(add_repeated_sfixed64_value, 1, 2, 3, "repeated_sfixed64_value", - Json::Int64(1), Json::Int64(2), Json::Int64(3)) + "1", "2", "3") + TEST_CONVERT_REPEATED_ITEM( + add_repeated_sfixed64_value, + kint64min, kint64min, kint64min, + "repeated_sfixed64_value", + "-9223372036854775808", "-9223372036854775808", "-9223372036854775808") + TEST_CONVERT_REPEATED_ITEM( + add_repeated_sfixed64_value, + kint64max, kint64max, kint64max, + "repeated_sfixed64_value", + "9223372036854775807", "9223372036854775807", "9223372036854775807") TEST_CONVERT_REPEATED_ITEM(add_repeated_bool_value, true, true, false, "repeated_bool_value", true, true, false) TEST_CONVERT_REPEATED_ITEM(add_repeated_string_value, "ABC", "DEF", "GHQ", @@ -244,6 +362,110 @@ EXPECT_PROTO_EQ(msg, new_msg); } +namespace { + +bool ParseToMessage(const string &json_string, TestMsg *message) { + message->Clear(); + Json::Value value; + EXPECT_TRUE(Json::Reader().parse(json_string, value)); + return JsonUtil::JsonValueToProtobufMessage(value, message); +} + +} // namespace + +TEST(JsonUtilTest, JsonParseTest) { + TestMsg msg; + // signed int 32 + EXPECT_FALSE(ParseToMessage("{\"int32_value\": -2147483649}", &msg)); + EXPECT_TRUE(ParseToMessage("{\"int32_value\": -2147483648}", &msg)); + EXPECT_EQ(kint32min, msg.int32_value()); + EXPECT_TRUE(ParseToMessage("{\"int32_value\": 2147483647}", &msg)); + EXPECT_EQ(kint32max, msg.int32_value()); + EXPECT_FALSE(ParseToMessage("{\"int32_value\": 2147483648}", &msg)); + + EXPECT_FALSE(ParseToMessage("{\"sint32_value\": -2147483649}", &msg)); + EXPECT_TRUE(ParseToMessage("{\"sint32_value\": -2147483648}", &msg)); + EXPECT_EQ(kint32min, msg.sint32_value()); + EXPECT_TRUE(ParseToMessage("{\"sint32_value\": 2147483647}", &msg)); + EXPECT_EQ(kint32max, msg.sint32_value()); + EXPECT_FALSE(ParseToMessage("{\"sint32_value\": 2147483648}", &msg)); + + EXPECT_FALSE(ParseToMessage("{\"sfixed32_value\": -2147483649}", &msg)); + EXPECT_TRUE(ParseToMessage("{\"sfixed32_value\": -2147483648}", &msg)); + EXPECT_EQ(kint32min, msg.sfixed32_value()); + EXPECT_TRUE(ParseToMessage("{\"sfixed32_value\": 2147483647}", &msg)); + EXPECT_EQ(kint32max, msg.sfixed32_value()); + EXPECT_FALSE(ParseToMessage("{\"sfixed32_value\": 2147483648}", &msg)); + + // unsigned int 32 + EXPECT_FALSE(ParseToMessage("{\"uint32_value\": -1}", &msg)); + EXPECT_TRUE(ParseToMessage("{\"uint32_value\": 0}", &msg)); + EXPECT_EQ(0, msg.uint32_value()); + EXPECT_TRUE(ParseToMessage("{\"uint32_value\": 4294967295}", &msg)); + EXPECT_EQ(kuint32max, msg.uint32_value()); + EXPECT_FALSE(ParseToMessage("{\"uint32_value\": 4294967296}", &msg)); + + EXPECT_FALSE(ParseToMessage("{\"fixed32_value\": -1}", &msg)); + EXPECT_TRUE(ParseToMessage("{\"fixed32_value\": 0}", &msg)); + EXPECT_EQ(0, msg.fixed32_value()); + EXPECT_TRUE(ParseToMessage("{\"fixed32_value\": 4294967295}", &msg)); + EXPECT_EQ(kuint32max, msg.fixed32_value()); + EXPECT_FALSE(ParseToMessage("{\"fixed32_value\": 4294967296}", &msg)); + + // signed int 64 + EXPECT_FALSE(ParseToMessage("{\"int64_value\": \"-9223372036854775809\"}", + &msg)); + EXPECT_TRUE(ParseToMessage("{\"int64_value\": \"-9223372036854775808\"}", + &msg)); + EXPECT_EQ(kint64min, msg.int64_value()); + EXPECT_TRUE(ParseToMessage("{\"int64_value\": \"9223372036854775807\"}", + &msg)); + EXPECT_EQ(kint64max, msg.int64_value()); + EXPECT_FALSE(ParseToMessage("{\"int64_value\": \"9223372036854775808\"}", + &msg)); + + EXPECT_FALSE(ParseToMessage("{\"sint64_value\": \"-9223372036854775809\"}", + &msg)); + EXPECT_TRUE(ParseToMessage("{\"sint64_value\": \"-9223372036854775808\"}", + &msg)); + EXPECT_EQ(kint64min, msg.sint64_value()); + EXPECT_TRUE(ParseToMessage("{\"sint64_value\": \"9223372036854775807\"}", + &msg)); + EXPECT_EQ(kint64max, msg.sint64_value()); + EXPECT_FALSE(ParseToMessage("{\"sint64_value\": \"9223372036854775808\"}", + &msg)); + + EXPECT_FALSE(ParseToMessage("{\"sfixed64_value\": \"-9223372036854775809\"}", + &msg)); + EXPECT_TRUE(ParseToMessage("{\"sfixed64_value\": \"-9223372036854775808\"}", + &msg)); + EXPECT_EQ(kint64min, msg.sfixed64_value()); + EXPECT_TRUE(ParseToMessage("{\"sfixed64_value\": \"9223372036854775807\"}", + &msg)); + EXPECT_EQ(kint64max, msg.sfixed64_value()); + EXPECT_FALSE(ParseToMessage("{\"sfixed64_value\": \"9223372036854775808\"}", + &msg)); + + // unsigned int 64 + EXPECT_FALSE(ParseToMessage("{\"uint64_value\": \"-1\"}", &msg)); + EXPECT_TRUE(ParseToMessage("{\"uint64_value\": \"0\"}", &msg)); + EXPECT_EQ(0, msg.uint64_value()); + EXPECT_TRUE(ParseToMessage("{\"uint64_value\": \"18446744073709551615\"}", + &msg)); + EXPECT_EQ(kuint64max, msg.uint64_value()); + EXPECT_FALSE(ParseToMessage("{\"uint64_value\": \"18446744073709551616\"}", + &msg)); + + EXPECT_FALSE(ParseToMessage("{\"fixed64_value\": \"-1\"}", &msg)); + EXPECT_TRUE(ParseToMessage("{\"fixed64_value\": \"0\"}", &msg)); + EXPECT_EQ(0, msg.fixed64_value()); + EXPECT_TRUE(ParseToMessage("{\"fixed64_value\": \"18446744073709551615\"}", + &msg)); + EXPECT_EQ(kuint64max, msg.fixed64_value()); + EXPECT_FALSE(ParseToMessage("{\"fixed64_value\": \"18446744073709551616\"}", + &msg)); +} + TEST(JsonUtilTest, FailureTest) { const char *kNumValueKeys[] = { "double_value", "float_value", "int32_value", "int64_value", @@ -269,13 +491,70 @@ EXPECT_FALSE(JsonUtil::JsonValueToProtobufMessage(json_value, &msg)); } } - const char *kUnsignedNumValueKeys[] = {"uint32_value", "uint64_value"}; - for (size_t i = 0; i < arraysize(kUnsignedNumValueKeys); ++i) { - Json::Value json_value; - json_value[kUnsignedNumValueKeys[i]] = -1; - TestMsg msg; - EXPECT_FALSE(JsonUtil::JsonValueToProtobufMessage(json_value, &msg)); + const char *kNumS32ValueKeys[] = + {"int32_value", "sint32_value", "sfixed32_value"}; + const char *kNumU32ValueKeys[] = {"uint32_value", "fixed32_value"}; + const char *kNumS64ValueKeys[] = + {"int64_value", "sint64_value", "sfixed64_value"}; + const char *kNumU64ValueKeys[] = {"uint64_value", "fixed64_value"}; + for (size_t i = 0; i < arraysize(kNumS32ValueKeys); ++i) { + { + Json::Value json_value; + json_value[kNumS32ValueKeys[i]] = -2147483649ll; + TestMsg msg; + EXPECT_FALSE(JsonUtil::JsonValueToProtobufMessage(json_value, &msg)); + } + { + Json::Value json_value; + json_value[kNumS32ValueKeys[i]] = 2147483648ull; + TestMsg msg; + EXPECT_FALSE(JsonUtil::JsonValueToProtobufMessage(json_value, &msg)); + } } + + for (size_t i = 0; i < arraysize(kNumU32ValueKeys); ++i) { + { + Json::Value json_value; + json_value[kNumU32ValueKeys[i]] = -1; + TestMsg msg; + EXPECT_FALSE(JsonUtil::JsonValueToProtobufMessage(json_value, &msg)); + } + { + Json::Value json_value; + json_value[kNumU32ValueKeys[i]] = 4294967296ull; + TestMsg msg; + EXPECT_FALSE(JsonUtil::JsonValueToProtobufMessage(json_value, &msg)); + } + } + for (size_t i = 0; i < arraysize(kNumS64ValueKeys); ++i) { + { + Json::Value json_value; + json_value[kNumS64ValueKeys[i]] = "-9223372036854775809"; + TestMsg msg; + EXPECT_FALSE(JsonUtil::JsonValueToProtobufMessage(json_value, &msg)); + } + { + Json::Value json_value; + json_value[kNumS64ValueKeys[i]] = "9223372036854775808"; + TestMsg msg; + EXPECT_FALSE(JsonUtil::JsonValueToProtobufMessage(json_value, &msg)); + } + } + for (size_t i = 0; i < arraysize(kNumU64ValueKeys); ++i) { + { + Json::Value json_value; + json_value[kNumU64ValueKeys[i]] = "-1"; + TestMsg msg; + EXPECT_FALSE(JsonUtil::JsonValueToProtobufMessage(json_value, &msg)); + } + { + Json::Value json_value; + json_value[kNumU64ValueKeys[i]] = "18446744073709551616"; + TestMsg msg; + EXPECT_FALSE(JsonUtil::JsonValueToProtobufMessage(json_value, &msg)); + } + } + { Json::Value json_value; json_value["bool_value"] = "str"; diff -Nru mozc-1.11.1502.102/prediction/dictionary_predictor.h mozc-1.11.1522.102/prediction/dictionary_predictor.h --- mozc-1.11.1502.102/prediction/dictionary_predictor.h 2013-07-17 02:38:05.000000000 +0000 +++ mozc-1.11.1522.102/prediction/dictionary_predictor.h 2013-08-28 05:26:13.000000000 +0000 @@ -52,7 +52,7 @@ class Segments; class SuggestionFilter; -// Dictioanry-based predictor +// Dictionary-based predictor class DictionaryPredictor : public PredictorInterface { public: // Initializes a predictor with given references to submodules. Note that diff -Nru mozc-1.11.1502.102/prediction/dictionary_predictor_test.cc mozc-1.11.1522.102/prediction/dictionary_predictor_test.cc --- mozc-1.11.1502.102/prediction/dictionary_predictor_test.cc 2013-07-17 02:38:05.000000000 +0000 +++ mozc-1.11.1522.102/prediction/dictionary_predictor_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -2101,7 +2101,7 @@ "\xe6\x99\x82", false }, }; - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kTestCases); ++i) { + for (size_t i = 0; i < arraysize(kTestCases); ++i) { Segments segments; MakeSegmentsForSuggestion("", &segments); @@ -2977,7 +2977,7 @@ true }, }; - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kTestcases); ++i) { + for (size_t i = 0; i < arraysize(kTestcases); ++i) { const TestCase &test_case = kTestcases[i]; Segments segments; diff -Nru mozc-1.11.1502.102/prediction/user_history_predictor_test.cc mozc-1.11.1522.102/prediction/user_history_predictor_test.cc --- mozc-1.11.1502.102/prediction/user_history_predictor_test.cc 2013-07-17 02:38:05.000000000 +0000 +++ mozc-1.11.1522.102/prediction/user_history_predictor_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -1900,8 +1900,9 @@ vector commands(10000); for (size_t i = 0; i < commands.size(); ++i) { - commands[i].key = NumberUtil::SimpleItoa(i) + "key"; - commands[i].value = NumberUtil::SimpleItoa(i) + "value"; + commands[i].key = NumberUtil::SimpleItoa(static_cast(i)) + "key"; + commands[i].value = NumberUtil::SimpleItoa(static_cast(i)) + + "value"; const int n = Util::Random(100); if (n == 0) { commands[i].type = Command::WAIT; @@ -2971,10 +2972,7 @@ UserHistoryPredictor::LEFT_PREFIX_MATCH }, }; - // ARRAYSIZE_UNSAFE is less safe than arraysize, however we can't use - // arraysize for inner defined class. - // Please see base/port.h - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kTests1); ++i) { + for (size_t i = 0; i < arraysize(kTests1); ++i) { EXPECT_EQ(kTests1[i].expect_type, UserHistoryPredictor::GetMatchTypeFromInput( // "あ", "あ" @@ -3001,7 +2999,7 @@ UserHistoryPredictor::LEFT_PREFIX_MATCH }, }; - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kTests2); ++i) { + for (size_t i = 0; i < arraysize(kTests2); ++i) { EXPECT_EQ(kTests2[i].expect_type, UserHistoryPredictor::GetMatchTypeFromInput( "", "", expanded.get(), kTests2[i].target)) @@ -3049,10 +3047,7 @@ UserHistoryPredictor::LEFT_PREFIX_MATCH }, }; - // ARRAYSIZE_UNSAFE is less safe than arraysize, however we can't use - // arraysize for inner defined class. - // Please see base/port.h - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kTests1); ++i) { + for (size_t i = 0; i < arraysize(kTests1); ++i) { EXPECT_EQ(kTests1[i].expect_type, UserHistoryPredictor::GetMatchTypeFromInput( // "あし", "あ" @@ -3082,7 +3077,7 @@ UserHistoryPredictor::LEFT_PREFIX_MATCH }, }; - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kTests2); ++i) { + for (size_t i = 0; i < arraysize(kTests2); ++i) { EXPECT_EQ(kTests2[i].expect_type, UserHistoryPredictor::GetMatchTypeFromInput( // "し" diff -Nru mozc-1.11.1502.102/renderer/mozc_renderer.exe.manifest mozc-1.11.1522.102/renderer/mozc_renderer.exe.manifest --- mozc-1.11.1502.102/renderer/mozc_renderer.exe.manifest 2013-07-17 02:37:40.000000000 +0000 +++ mozc-1.11.1522.102/renderer/mozc_renderer.exe.manifest 2013-08-28 05:25:52.000000000 +0000 @@ -9,6 +9,8 @@ + + diff -Nru mozc-1.11.1502.102/renderer/renderer.gyp mozc-1.11.1522.102/renderer/renderer.gyp --- mozc-1.11.1502.102/renderer/renderer.gyp 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/renderer/renderer.gyp 2013-08-28 05:25:59.000000000 +0000 @@ -481,7 +481,7 @@ 'msvs_settings': { 'VCManifestTool': { 'AdditionalManifestFiles': 'mozc_renderer.exe.manifest', - 'EmbedManifest': 'false', + 'EmbedManifest': 'true', }, }, }, diff -Nru mozc-1.11.1502.102/renderer/win32/win32_image_util.cc mozc-1.11.1522.102/renderer/win32/win32_image_util.cc --- mozc-1.11.1502.102/renderer/win32/win32_image_util.cc 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/renderer/win32/win32_image_util.cc 2013-08-28 05:25:59.000000000 +0000 @@ -408,19 +408,30 @@ const int bitmap_width = pix_width * kDivision; const int bitmap_height = pix_height * kDivision; - BITMAPINFO bitmap_info = {}; - bitmap_info.bmiHeader.biSize = sizeof(BITMAPINFOHEADER); - bitmap_info.bmiHeader.biWidth = bitmap_width; - bitmap_info.bmiHeader.biHeight = -bitmap_height; // top-down BMP - bitmap_info.bmiHeader.biPlanes = 1; - bitmap_info.bmiHeader.biBitCount = 1; - bitmap_info.bmiHeader.biCompression = BI_RGB; - bitmap_info.bmiHeader.biSizeImage = 0; + struct MonochromeBitmapInfo { + BITMAPINFOHEADER header; + RGBQUAD color_palette[2]; + }; + + const RGBQUAD kBackgroundColor = {0x00, 0x00, 0x00, 0x00}; + const RGBQUAD kForegroundColor = {0xff, 0xff, 0xff, 0x00}; + + MonochromeBitmapInfo bitmap_info = {}; + bitmap_info.header.biSize = sizeof(BITMAPINFOHEADER); + bitmap_info.header.biWidth = bitmap_width; + bitmap_info.header.biHeight = -bitmap_height; // top-down BMP + bitmap_info.header.biPlanes = 1; + bitmap_info.header.biBitCount = 1; // Color palettes must have 2 entries. + bitmap_info.header.biCompression = BI_RGB; + bitmap_info.header.biSizeImage = 0; + bitmap_info.color_palette[0] = kBackgroundColor; // black + bitmap_info.color_palette[1] = kForegroundColor; // white uint8 *buffer = nullptr; CBitmap dib; - dib.CreateDIBSection(nullptr, &bitmap_info, DIB_RGB_COLORS, - reinterpret_cast(&buffer), nullptr, 0); + dib.CreateDIBSection( + nullptr, reinterpret_cast(&bitmap_info), + DIB_RGB_COLORS, reinterpret_cast(&buffer), nullptr, 0); CDC dc; dc.CreateCompatibleDC(nullptr); diff -Nru mozc-1.11.1502.102/renderer/win32/win32_renderer_client.cc mozc-1.11.1522.102/renderer/win32/win32_renderer_client.cc --- mozc-1.11.1502.102/renderer/win32/win32_renderer_client.cc 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/renderer/win32/win32_renderer_client.cc 2013-08-28 05:25:59.000000000 +0000 @@ -32,6 +32,7 @@ #include "base/logging.h" #include "base/mutex.h" #include "base/scoped_handle.h" +#include "base/system_util.h" #include "base/util.h" #include "renderer/renderer_client.h" #include "renderer/renderer_command.pb.h" @@ -80,6 +81,21 @@ } void RenderLoop() { + // Wait until desktop name is ready. b/10403163 + while (SystemUtil::GetDesktopNameAsString().empty()) { + const DWORD wait_result = ::WaitForSingleObject(quit_event_.get(), 500); + const DWORD wait_error = ::GetLastError(); + if (wait_result == WAIT_OBJECT_0) { + return; + } + if (wait_result == WAIT_TIMEOUT) { + continue; + } + LOG(ERROR) << "Unknown result: " << wait_result + << ", error: " << wait_error; + return; + } + mozc::renderer::RendererClient renderer_client; while (true) { const HANDLE handles[] = {quit_event_.get(), command_event_.get()}; diff -Nru mozc-1.11.1502.102/renderer/win32/win32_renderer_util.cc mozc-1.11.1522.102/renderer/win32/win32_renderer_util.cc --- mozc-1.11.1502.102/renderer/win32/win32_renderer_util.cc 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/renderer/win32/win32_renderer_util.cc 2013-08-28 05:25:59.000000000 +0000 @@ -744,12 +744,23 @@ static FPLogicalToPhysicalPoint GetLogicalToPhysicalPoint() { // LogicalToPhysicalPoint API is available in Vista or later. const HMODULE module = WinUtil::GetSystemModuleHandle(L"user32.dll"); - if (module == NULL) { - return NULL; + if (module == nullptr) { + return nullptr; } - void *function = ::GetProcAddress(module, "LogicalToPhysicalPoint"); - if (function == NULL) { - return NULL; + // Despite its name, LogicalToPhysicalPoint API no longer converts + // coordinates on Windows 8.1 and later. We must use + // LogicalToPhysicalPointForPerMonitorDPI API instead when it is available. + // See http://go.microsoft.com/fwlink/?LinkID=307061 + void *function = ::GetProcAddress( + module, "LogicalToPhysicalPointForPerMonitorDPI"); + if (function == nullptr) { + // When LogicalToPhysicalPointForPerMonitorDPI API does not exist but + // LogicalToPhysicalPoint API exists, LogicalToPhysicalPoint works fine. + // This is the case on Windows Vista, Windows 7 and Windows 8. + function = ::GetProcAddress(module, "LogicalToPhysicalPoint"); + if (function == nullptr) { + return nullptr; + } } return reinterpret_cast(function); } diff -Nru mozc-1.11.1502.102/rewriter/command_rewriter.cc mozc-1.11.1522.102/rewriter/command_rewriter.cc --- mozc-1.11.1502.102/rewriter/command_rewriter.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/command_rewriter.cc 2013-08-28 05:26:13.000000000 +0000 @@ -224,10 +224,6 @@ candidate->content_value = candidate->value; } -void CommandRewriter::Finish(Segments *segments) { - // Do nothing in finish. -} - bool CommandRewriter::RewriteSegment(Segment *segment) const { DCHECK(segment); diff -Nru mozc-1.11.1502.102/rewriter/command_rewriter.h mozc-1.11.1522.102/rewriter/command_rewriter.h --- mozc-1.11.1502.102/rewriter/command_rewriter.h 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/command_rewriter.h 2013-08-28 05:26:13.000000000 +0000 @@ -49,8 +49,6 @@ virtual bool Rewrite(const ConversionRequest &request, Segments *segments) const; - virtual void Finish(Segments *segments); - private: bool RewriteSegment(Segment *segment) const; diff -Nru mozc-1.11.1502.102/rewriter/date_rewriter_test.cc mozc-1.11.1522.102/rewriter/date_rewriter_test.cc --- mozc-1.11.1502.102/rewriter/date_rewriter_test.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/date_rewriter_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -1005,7 +1005,7 @@ { 12, 31 } }; - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(month_days_test_data); ++i) { + for (size_t i = 0; i < arraysize(month_days_test_data); ++i) { EXPECT_TRUE(rewriter.ConvertDateWithYear( 2001, month_days_test_data[i].month, diff -Nru mozc-1.11.1502.102/rewriter/emoji_rewriter.cc mozc-1.11.1522.102/rewriter/emoji_rewriter.cc --- mozc-1.11.1502.102/rewriter/emoji_rewriter.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/emoji_rewriter.cc 2013-08-28 05:26:13.000000000 +0000 @@ -252,16 +252,17 @@ return RewriteCandidates(available_emoji_carrier, segments); } -void EmojiRewriter::Finish(Segments *segments) { +void EmojiRewriter::Finish(const ConversionRequest &request, + Segments *segments) { if (!mozc::config::ConfigHandler::GetConfig().use_emoji_conversion()) { return; } // Update usage stats - for (size_t i = 0; i < segments->segments_size(); ++i) { - const Segment &segment = segments->segment(i); + for (size_t i = 0; i < segments->conversion_segments_size(); ++i) { + const Segment &segment = segments->conversion_segment(i); // Ignores segments which are not converted or not committed. - if (segment.candidates_size() <= 0 || + if (segment.candidates_size() == 0 || segment.segment_type() != Segment::FIXED_VALUE) { continue; } diff -Nru mozc-1.11.1502.102/rewriter/emoji_rewriter.h mozc-1.11.1522.102/rewriter/emoji_rewriter.h --- mozc-1.11.1502.102/rewriter/emoji_rewriter.h 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/emoji_rewriter.h 2013-08-28 05:26:13.000000000 +0000 @@ -116,7 +116,7 @@ // NOTE: This method is expected to be called after the segments are processed // with COMMIT command in a SessionConverter instance. May record wrong // stats if you call this method in other situation. - virtual void Finish(Segments *segments); + virtual void Finish(const ConversionRequest &request, Segments *segments); // Returns true if the given candidate includes emoji characters. // TODO(peria, hidehiko): Unify this checker and IsEmojiEntry defined in diff -Nru mozc-1.11.1502.102/rewriter/emoji_rewriter_test.cc mozc-1.11.1522.102/rewriter/emoji_rewriter_test.cc --- mozc-1.11.1502.102/rewriter/emoji_rewriter_test.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/emoji_rewriter_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -499,25 +499,25 @@ // Converting non-emoji candidates does not matter. SetSegment("test", "test", &segments); EXPECT_FALSE(rewriter_->Rewrite(request_, &segments)); - rewriter_->Finish(&segments); + rewriter_->Finish(request_, &segments); EXPECT_STATS_NOT_EXIST(kStatsKey); // Converting an emoji candidate. SetSegment("Nezumi", "test", &segments); EXPECT_TRUE(rewriter_->Rewrite(request_, &segments)); ChooseEmojiCandidate(&segments); - rewriter_->Finish(&segments); + rewriter_->Finish(request_, &segments); EXPECT_COUNT_STATS(kStatsKey, 1); SetSegment(kEmoji, "test", &segments); EXPECT_TRUE(rewriter_->Rewrite(request_, &segments)); ChooseEmojiCandidate(&segments); - rewriter_->Finish(&segments); + rewriter_->Finish(request_, &segments); EXPECT_COUNT_STATS(kStatsKey, 2); // Converting non-emoji keeps the previous usage stats. SetSegment("test", "test", &segments); EXPECT_FALSE(rewriter_->Rewrite(request_, &segments)); - rewriter_->Finish(&segments); + rewriter_->Finish(request_, &segments); EXPECT_COUNT_STATS(kStatsKey, 2); } diff -Nru mozc-1.11.1502.102/rewriter/gen_single_kanji_rewriter_data.py mozc-1.11.1522.102/rewriter/gen_single_kanji_rewriter_data.py --- mozc-1.11.1502.102/rewriter/gen_single_kanji_rewriter_data.py 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/gen_single_kanji_rewriter_data.py 2013-08-28 05:26:13.000000000 +0000 @@ -114,11 +114,11 @@ def GenNounPrefix(): - """Generates noun prefix embedded dictioanry entries.""" + """Generates noun prefix embedded dictionary entries.""" token_map = {} for entry in NOUN_PREFIX: - key = entry[0] if entry[0] != "" else None - value = entry[1] if entry[1] != "" else None + key = entry[0] if entry[0] else None + value = entry[1] if entry[1] else None rank = entry[2] token_map.setdefault(key, []).append( diff -Nru mozc-1.11.1502.102/rewriter/merger_rewriter.h mozc-1.11.1522.102/rewriter/merger_rewriter.h --- mozc-1.11.1502.102/rewriter/merger_rewriter.h 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/merger_rewriter.h 2013-08-28 05:26:13.000000000 +0000 @@ -105,9 +105,9 @@ } // Hook(s) for all mutable operations - virtual void Finish(Segments *segments) { + virtual void Finish(const ConversionRequest &request, Segments *segments) { for (size_t i = 0; i < rewriters_.size(); ++i) { - rewriters_[i]->Finish(segments); + rewriters_[i]->Finish(request, segments); } } diff -Nru mozc-1.11.1502.102/rewriter/merger_rewriter_test.cc mozc-1.11.1522.102/rewriter/merger_rewriter_test.cc --- mozc-1.11.1502.102/rewriter/merger_rewriter_test.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/merger_rewriter_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -76,7 +76,7 @@ return return_value_; } - virtual void Finish(Segments *segments) { + virtual void Finish(const ConversionRequest &request, Segments *segments) { buffer_->append(name_ + ".Finish();"); } @@ -220,11 +220,12 @@ TEST_F(MergerRewriterTest, Finish) { string call_result; + const ConversionRequest request; MergerRewriter merger; merger.AddRewriter(new TestRewriter(&call_result, "a", false)); merger.AddRewriter(new TestRewriter(&call_result, "b", false)); merger.AddRewriter(new TestRewriter(&call_result, "c", false)); - merger.Finish(NULL); + merger.Finish(request, NULL); EXPECT_EQ("a.Finish();" "b.Finish();" "c.Finish();", diff -Nru mozc-1.11.1502.102/rewriter/number_rewriter_test.cc mozc-1.11.1522.102/rewriter/number_rewriter_test.cc --- mozc-1.11.1502.102/rewriter/number_rewriter_test.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/number_rewriter_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -236,7 +236,7 @@ scoped_ptr number_rewriter(CreateNumberRewriter()); - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(test_data_list); ++i) { + for (size_t i = 0; i < arraysize(test_data_list); ++i) { TestData& test_data = test_data_list[i]; Segments segments; segments.set_request_type(test_data.request_type_); diff -Nru mozc-1.11.1502.102/rewriter/rewriter.cc mozc-1.11.1522.102/rewriter/rewriter.cc --- mozc-1.11.1502.102/rewriter/rewriter.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/rewriter.cc 2013-08-28 05:26:13.000000000 +0000 @@ -93,7 +93,7 @@ AddRewriter(new UserDictionaryRewriter); AddRewriter(new FocusCandidateRewriter); - AddRewriter(new TransliterationRewriter(*pos_matcher)); + AddRewriter(new TransliterationRewriter(*pos_matcher, dictionary)); AddRewriter(new EnglishVariantsRewriter); AddRewriter(new NumberRewriter(pos_matcher)); AddRewriter(new CollocationRewriter(data_manager)); diff -Nru mozc-1.11.1502.102/rewriter/rewriter_interface.h mozc-1.11.1522.102/rewriter/rewriter_interface.h --- mozc-1.11.1502.102/rewriter/rewriter_interface.h 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/rewriter_interface.h 2013-08-28 05:26:13.000000000 +0000 @@ -71,7 +71,7 @@ } // Hook(s) for all mutable operations - virtual void Finish(Segments *segments) {} + virtual void Finish(const ConversionRequest &request, Segments *segments) {} // sync internal data to local file system. virtual bool Sync() { return true; } diff -Nru mozc-1.11.1502.102/rewriter/rewriter_test.gyp mozc-1.11.1522.102/rewriter/rewriter_test.gyp --- mozc-1.11.1502.102/rewriter/rewriter_test.gyp 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/rewriter_test.gyp 2013-08-28 05:26:13.000000000 +0000 @@ -56,7 +56,6 @@ 'remove_redundant_candidate_rewriter_test.cc', 'rewriter_test.cc', 'symbol_rewriter_test.cc', - 'transliteration_rewriter_test.cc', 'unicode_rewriter_test.cc', 'user_boundary_history_rewriter_test.cc', 'user_dictionary_rewriter_test.cc', @@ -129,6 +128,38 @@ 'test_size': 'small', }, }, + { + 'target_name': 'transliteration_rewriter_test', + 'type': 'executable', + 'sources': [ + 'transliteration_rewriter_test.cc', + ], + 'dependencies': [ + '../base/base.gyp:base', + '../composer/composer.gyp:composer', + '../data_manager/testing/mock_data_manager.gyp:mock_data_manager', + '../dictionary/dictionary.gyp:dictionary_mock', + '../session/session_base.gyp:request_test_util', + '../session/session_base.gyp:session_protocol', + '../testing/testing.gyp:gtest_main', + '../usage_stats/usage_stats_test.gyp:usage_stats_testing_util', + 'rewriter.gyp:rewriter', + ], + 'conditions': [ + ['use_packed_dictionary==1', { + 'dependencies': [ + '../data_manager/packed/packed_data_manager.gyp:gen_packed_data_header_mock#host', + ], + 'hard_dependency': 1, + 'export_dependent_settings': [ + '../data_manager/packed/packed_data_manager.gyp:gen_packed_data_header_mock#host', + ], + }], + ], + 'variables': { + 'test_size': 'small', + }, + }, # Test cases meta target: this target is referred from gyp/tests.gyp { 'target_name': 'rewriter_all_test', @@ -137,6 +168,7 @@ 'calculator/calculator.gyp:calculator_all_test', 'rewriter_test', 'single_kanji_rewriter_test', + 'transliteration_rewriter_test', ], }, ], diff -Nru mozc-1.11.1502.102/rewriter/transliteration_rewriter.cc mozc-1.11.1522.102/rewriter/transliteration_rewriter.cc --- mozc-1.11.1502.102/rewriter/transliteration_rewriter.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/transliteration_rewriter.cc 2013-08-28 05:26:13.000000000 +0000 @@ -40,10 +40,12 @@ #include "composer/composer.h" #include "converter/conversion_request.h" #include "converter/segments.h" +#include "dictionary/dictionary_interface.h" #include "dictionary/pos_matcher.h" #include "session/commands.pb.h" // For T13n normalize #include "transliteration/transliteration.h" +#include "usage_stats/usage_stats.h" namespace mozc { namespace { @@ -302,8 +304,11 @@ return modified; } -TransliterationRewriter::TransliterationRewriter(const POSMatcher &pos_matcher) - : unknown_id_(pos_matcher.GetUnknownId()) {} +TransliterationRewriter::TransliterationRewriter( + const POSMatcher &pos_matcher, + const DictionaryInterface *dictionary) + : unknown_id_(pos_matcher.GetUnknownId()), + dictionary_(dictionary) {} TransliterationRewriter::~TransliterationRewriter() {} @@ -312,6 +317,10 @@ if (request.request().mixed_conversion()) { return RewriterInterface::ALL; } + if (request.request().language_aware_input() != + mozc::commands::Request::NO_LANGUAGE_AWARE_INPUT) { + return RewriterInterface::ALL; + } return RewriterInterface::CONVERSION; } @@ -376,18 +385,157 @@ return true; } +namespace { +bool IsRawQuery(const composer::Composer &composer, + const DictionaryInterface *dictionary) { + string raw_text; + composer.GetRawString(&raw_text); + + // Check if the length of text is less than or equal to three. + // For example, "cat" is not treated as a raw query so far to avoid + // false negative cases. + if (raw_text.size() <= 3) { + return false; + } + + // If alphabet characters are in the middle of the composition, it is + // probably a raw query. For example, "えぁmpぇ" (example) contains + // "m" and "p" in the middle. So it is treated as a raw query. On the + // other hand, "くえry" (query) contains alphabet characters, but they + // are at the end of the string, so it cannot be determined here. + // + // Note, GetQueryForPrediction omits the trailing alphabet characters of + // the composition string and returns it. + string key; + composer.GetQueryForPrediction(&key); + if (Util::ContainsScriptType(key, Util::ALPHABET)) { + return true; + } + + // If the input text is stored in the dictionary, it is perhaps a raw query. + // For example, the input characters of "れもヴぇ" (remove) is in the + // dictionary, so it is treated as a raw text. This logic is a little + // aggressive because "たけ" (take), "ほうせ" (house) and so forth are also + // treated as raw texts. + if (dictionary->HasValue(raw_text)) { + return true; + } + + return false; +} +} // namespace + + +bool TransliterationRewriter::FillRawText( + const ConversionRequest &request, Segments *segments) const { + if (segments->conversion_segments_size() != 1 || !request.has_composer()) { + return false; + } + + if (!IsRawQuery(request.composer(), dictionary_)) { + return false; + } + + Segment *segment = segments->mutable_conversion_segment(0); + + T13nIds ids; + GetIds(*segment, &ids); + + // TODO(komatsu): GetRawString is expensive. Optimize this function or + // reuse it for IsRawQuery. + string raw_string; + request.composer().GetRawString(&raw_string); + + Segment::Candidate *candidate = segment->push_front_candidate(); + InitT13nCandidate(raw_string, raw_string, + ids.ascii_lid, ids.ascii_rid, + candidate); + candidate->attributes |= (Segment::Candidate::NO_VARIANTS_EXPANSION | + Segment::Candidate::NO_EXTRA_DESCRIPTION); + candidate->prefix = "\xE2\x86\x92 "; // "→ " + candidate->description = + // "もしかして" + "\xE3\x82\x82\xE3\x81\x97\xE3\x81\x8B\xE3\x81\x97\xE3\x81\xA6"; + + // Set usage stats + usage_stats::UsageStats::IncrementCount("LanguageAwareSuggestionTriggered"); + + return true; +} + + bool TransliterationRewriter::Rewrite( const ConversionRequest &request, Segments *segments) const { + bool modified = false; + // The current default value of language_aware_input is + // NO_LANGUAGE_AWARE_INPUT and only unittests set LANGUAGE_AWARE_SUGGESTION + // at this moment. Thus, FillRawText is not performed in the productions + // yet. + if (request.request().language_aware_input() == + mozc::commands::Request::LANGUAGE_AWARE_SUGGESTION) { + modified |= FillRawText(request, segments); + } + if (request.skip_slow_rewriters()) { - return false; + return modified; + } + + if (IsComposerApplicable(request, segments)) { + modified |= FillT13nsFromComposer(request, segments); + } else { + modified |= FillT13nsFromKey(segments); } - bool modified = IsComposerApplicable(request, segments) - ? FillT13nsFromComposer(request, segments) - : FillT13nsFromKey(segments); modified |= AddRawNumberT13nCandidates(request, segments); + return modified; } +namespace { +bool IsLangaugeAwareInputCandidate(const composer::Composer &composer, + const Segment::Candidate &candidate) { + // Check candidate.prefix to filter if the candidate is probably generated + // from LanguangeAwareInput or not. + // + // "→ " + if (candidate.prefix != "\xE2\x86\x92 ") { + return false; + } + + string raw_string; + composer.GetRawString(&raw_string); + if (raw_string != candidate.value) { + return false; + } + return true; +} +} // namespace + +void TransliterationRewriter::Finish(const ConversionRequest &request, + Segments *segments) { + // Finish is used for Lanaugage aware input only at this moment. + if (request.request().language_aware_input() != + mozc::commands::Request::LANGUAGE_AWARE_SUGGESTION) { + return; + } + + if (segments->conversion_segments_size() != 1 || !request.has_composer()) { + return; + } + + // Update usage stats + const Segment &segment = segments->conversion_segment(0); + // Ignores segments which are not converted or not committed. + if (segment.candidates_size() == 0 || + segment.segment_type() != Segment::FIXED_VALUE) { + return; + } + + if (IsLangaugeAwareInputCandidate(request.composer(), + segment.candidate(0))) { + usage_stats::UsageStats::IncrementCount("LanguageAwareSuggestionCommitted"); + } +} + void TransliterationRewriter::InitT13nCandidate( const string &key, const string &value, diff -Nru mozc-1.11.1502.102/rewriter/transliteration_rewriter.h mozc-1.11.1522.102/rewriter/transliteration_rewriter.h --- mozc-1.11.1502.102/rewriter/transliteration_rewriter.h 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/transliteration_rewriter.h 2013-08-28 05:26:13.000000000 +0000 @@ -39,13 +39,15 @@ namespace mozc { class ConversionRequest; +class DictionaryInterface; class POSMatcher; class Segments; class Segment; class TransliterationRewriter : public RewriterInterface { public: - explicit TransliterationRewriter(const POSMatcher &pos_matcher); + explicit TransliterationRewriter(const POSMatcher &pos_matcher, + const DictionaryInterface *dictionary); virtual ~TransliterationRewriter(); virtual int capability(const ConversionRequest &request) const; @@ -53,17 +55,22 @@ virtual bool Rewrite(const ConversionRequest &request, Segments *segments) const; + virtual void Finish(const ConversionRequest &request, Segments *segments); + private: void InitT13nCandidate(const string &key, const string &value, uint16 lid, uint16 rid, Segment::Candidate *cand) const; - // Set transliteration values into segment. If t13ns is invalid, + // Sets transliteration values into segment. If t13ns is invalid, // false is returned. bool SetTransliterations(const vector &t13ns, const string &key, Segment *segment) const; + // Fills the raw text if the query does not look like Japanese. + bool FillRawText(const ConversionRequest &request, + Segments *segments) const; bool FillT13nsFromComposer(const ConversionRequest &request, Segments *segments) const; bool FillT13nsFromKey(Segments *segments) const; @@ -71,6 +78,7 @@ Segments *segments) const; const uint16 unknown_id_; + const DictionaryInterface *dictionary_; }; } // namespace mozc diff -Nru mozc-1.11.1502.102/rewriter/transliteration_rewriter_test.cc mozc-1.11.1522.102/rewriter/transliteration_rewriter_test.cc --- mozc-1.11.1502.102/rewriter/transliteration_rewriter_test.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/transliteration_rewriter_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -48,10 +48,13 @@ #include "data_manager/packed/packed_data_mock.h" #endif // MOZC_USE_PACKED_DICTIONARY #include "data_manager/user_pos_manager.h" +#include "dictionary/dictionary_mock.h" #include "dictionary/pos_matcher.h" #include "session/commands.pb.h" #include "testing/base/public/gunit.h" #include "transliteration/transliteration.h" +#include "usage_stats/usage_stats.h" +#include "usage_stats/usage_stats_testing_util.h" DECLARE_string(test_tmpdir); @@ -82,6 +85,7 @@ virtual ~TransliterationRewriterTest() {} virtual void SetUp() { + usage_stats::UsageStats::ClearAllStatsForTest(); #ifdef MOZC_USE_PACKED_DICTIONARY // Registers mocked PackedDataManager. scoped_ptr @@ -93,6 +97,7 @@ SystemUtil::SetUserProfileDirectory(FLAGS_test_tmpdir); config::ConfigHandler::GetDefaultConfig(&default_config_); config::ConfigHandler::SetConfig(default_config_); + dictionary_mock_.reset(new DictionaryMock); } virtual void TearDown() { @@ -102,11 +107,14 @@ // Unregisters mocked PackedDataManager. packed::RegisterPackedDataManager(NULL); #endif // MOZC_USE_PACKED_DICTIONARY + dictionary_mock_.reset(NULL); + usage_stats::UsageStats::ClearAllStatsForTest(); } TransliterationRewriter *CreateTransliterationRewriter() const { return new TransliterationRewriter( - *UserPosManager::GetUserPosManager()->GetPOSMatcher()); + *UserPosManager::GetUserPosManager()->GetPOSMatcher(), + dictionary_mock_.get()); } const commands::Request &default_request() const { @@ -117,6 +125,9 @@ return default_config_; } + scoped_ptr dictionary_mock_; + usage_stats::scoped_usage_stats_enabler usage_stats_enabler_; + private: const commands::Request default_request_; config::Config default_config_; @@ -1118,4 +1129,178 @@ EXPECT_EQ("s", seg.meta_candidate(transliteration::HALF_ASCII).value); } } + +namespace { +bool RewriteWithLanguageAwareInput(const TransliterationRewriter *rewriter, + const string &key, + string *composition, + Segments *segments) { + commands::Request client_request; + client_request.set_language_aware_input( + commands::Request::LANGUAGE_AWARE_SUGGESTION); + + composer::Table table; + config::Config default_config; + table.InitializeWithRequestAndConfig(client_request, default_config); + + composer::Composer composer(&table, &client_request); + InsertASCIISequence(key, &composer); + composer.GetStringForPreedit(composition); + + // Perform the rewrite command. + segments->set_request_type(Segments::SUGGESTION); + Segment *segment = segments->add_segment(); + segment->set_key(*composition); + ConversionRequest request(&composer, &client_request); + + return rewriter->Rewrite(request, segments); +} +} // namespace + +TEST_F(TransliterationRewriterTest, LanguageAwareInput) { + dictionary_mock_->AddLookupExact("query", "query", "query", 0); + + scoped_ptr t13n_rewriter( + CreateTransliterationRewriter()); + + const string &kPrefix = "\xE2\x86\x92 "; // "→ " + const string &kDidYouMean = + // "もしかして" + "\xE3\x82\x82\xE3\x81\x97\xE3\x81\x8B\xE3\x81\x97\xE3\x81\xA6"; + + { + // "python" is composed to "pyてょn", but "python" should be suggested, + // because alphabet characters are in the middle of the word. + string composition; + Segments segments; + EXPECT_TRUE(RewriteWithLanguageAwareInput(t13n_rewriter.get(), "python", + &composition, &segments)); + + // "pyてょn" + EXPECT_EQ("\xEF\xBD\x90\xEF\xBD\x99\xE3\x81\xA6\xE3\x82\x87\xEF\xBD\x8E", + composition); + const Segment::Candidate &candidate = + segments.conversion_segment(0).candidate(0); + EXPECT_EQ("python", candidate.key); + EXPECT_EQ("python", candidate.value); + EXPECT_EQ(kPrefix, candidate.prefix); + EXPECT_EQ(kDidYouMean, candidate.description); + } + + { + // "mozuk" is composed to "もずk", then "mozuk" is not suggested. + // The tailing alphabet characters are not counted. + string composition; + Segments segments; + EXPECT_TRUE(RewriteWithLanguageAwareInput(t13n_rewriter.get(), "mozuk", + &composition, &segments)); + + // "もずk" + EXPECT_EQ("\xE3\x82\x82\xE3\x81\x9A\xEF\xBD\x8B", composition); + EXPECT_EQ(0, segments.conversion_segment(0).candidates_size()); + } + + { + // "query" is composed to "くえry". Since "query" is in the dictionary + // dislike the above "mozuk" case, "query" should be suggested. + string composition; + Segments segments; + EXPECT_TRUE(RewriteWithLanguageAwareInput(t13n_rewriter.get(), "query", + &composition, &segments)); + + // "くえry" + EXPECT_EQ("\xE3\x81\x8F\xE3\x81\x88\xEF\xBD\x92\xEF\xBD\x99", composition); + const Segment::Candidate &candidate = + segments.conversion_segment(0).candidate(0); + EXPECT_EQ("query", candidate.key); + EXPECT_EQ("query", candidate.value); + EXPECT_EQ(kPrefix, candidate.prefix); + EXPECT_EQ(kDidYouMean, candidate.description); + } +} + +TEST_F(TransliterationRewriterTest, LanguageAwareInputUsageStats) { + scoped_ptr t13n_rewriter( + CreateTransliterationRewriter()); + + usage_stats::UsageStats::ClearAllStatsForTest(); + uint32 triggered = 0; + EXPECT_TRUE(usage_stats::UsageStats::IsListed( + "LanguageAwareSuggestionTriggered")); + EXPECT_FALSE(usage_stats::UsageStats::GetCountForTest( + "LanguageAwareSuggestionTriggered", &triggered)); + EXPECT_EQ(0, triggered); + + uint32 committed = 0; + EXPECT_TRUE(usage_stats::UsageStats::IsListed( + "LanguageAwareSuggestionCommitted")); + EXPECT_FALSE(usage_stats::UsageStats::GetCountForTest( + "LanguageAwareSuggestionCommitted", &committed)); + EXPECT_EQ(0, committed); + + const string kPyTeyoN = + // "pyてょn" + "\xEF\xBD\x90\xEF\xBD\x99\xE3\x81\xA6\xE3\x82\x87\xEF\xBD\x8E"; + + { + // "python" is composed to "pyてょn", but "python" should be suggested, + // because alphabet characters are in the middle of the word. + string composition; + Segments segments; + EXPECT_TRUE(RewriteWithLanguageAwareInput(t13n_rewriter.get(), "python", + &composition, &segments)); + EXPECT_EQ(kPyTeyoN, composition); + + const Segment::Candidate &candidate = + segments.conversion_segment(0).candidate(0); + EXPECT_EQ("python", candidate.key); + EXPECT_EQ("python", candidate.value); + + EXPECT_TRUE(usage_stats::UsageStats::GetCountForTest( + "LanguageAwareSuggestionTriggered", &triggered)); + EXPECT_EQ(1, triggered); + } + + { + // Call Rewrite with "python" again, then call Finish. Both ...Triggered + // and ...Committed should be incremented. + // Note, RewriteWithLanguageAwareInput is not used here, because + // Finish also requires ConversionRequest. + string composition; + Segments segments; + + commands::Request client_request; + client_request.set_language_aware_input( + commands::Request::LANGUAGE_AWARE_SUGGESTION); + + composer::Table table; + config::Config default_config; + table.InitializeWithRequestAndConfig(client_request, default_config); + + composer::Composer composer(&table, &client_request); + InsertASCIISequence("python", &composer); + composer.GetStringForPreedit(&composition); + EXPECT_EQ(kPyTeyoN, composition); + + // Perform the rewrite command. + segments.set_request_type(Segments::SUGGESTION); + Segment *segment = segments.add_segment(); + segment->set_key(composition); + ConversionRequest request(&composer, &client_request); + + EXPECT_TRUE(t13n_rewriter->Rewrite(request, &segments)); + + usage_stats::UsageStats::GetCountForTest( + "LanguageAwareSuggestionTriggered", &triggered); + EXPECT_EQ(2, triggered); + + segment->set_segment_type(Segment::FIXED_VALUE); + EXPECT_LT(0, segment->candidates_size()); + + t13n_rewriter->Finish(request, &segments); + usage_stats::UsageStats::GetCountForTest( + "LanguageAwareSuggestionCommitted", &committed); + EXPECT_EQ(1, committed); + } +} } // namespace mozc diff -Nru mozc-1.11.1502.102/rewriter/unicode_rewriter_test.cc mozc-1.11.1522.102/rewriter/unicode_rewriter_test.cc --- mozc-1.11.1502.102/rewriter/unicode_rewriter_test.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/unicode_rewriter_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -169,14 +169,14 @@ } // Mozc accepts Japanese characters - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kUcs4Utf8Data); ++i) { + for (size_t i = 0; i < arraysize(kUcs4Utf8Data); ++i) { InitSegments(kUcs4Utf8Data[i].ucs4, kUcs4Utf8Data[i].ucs4, &segments); EXPECT_TRUE(rewriter.Rewrite(request, &segments)); EXPECT_TRUE(ContainCandidate(segments, kUcs4Utf8Data[i].utf8)); } // Mozc does not accept other characters - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kMozcUnsupportedUtf8); ++i) { + for (size_t i = 0; i < arraysize(kMozcUnsupportedUtf8); ++i) { InitSegments(kMozcUnsupportedUtf8[i], kMozcUnsupportedUtf8[i], &segments); EXPECT_FALSE(rewriter.Rewrite(request, &segments)); } diff -Nru mozc-1.11.1502.102/rewriter/user_boundary_history_rewriter.cc mozc-1.11.1522.102/rewriter/user_boundary_history_rewriter.cc --- mozc-1.11.1502.102/rewriter/user_boundary_history_rewriter.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/user_boundary_history_rewriter.cc 2013-08-28 05:26:13.000000000 +0000 @@ -120,7 +120,8 @@ UserBoundaryHistoryRewriter::~UserBoundaryHistoryRewriter() {} -void UserBoundaryHistoryRewriter::Finish(Segments *segments) { +void UserBoundaryHistoryRewriter::Finish(const ConversionRequest &request, + Segments *segments) { if (segments->request_type() != Segments::CONVERSION) { return; } @@ -147,10 +148,7 @@ } if (segments->resized()) { - // |ResizeOrInsert| does NOT call Converter::ResizeSegment since we pass - // INSERT as an argument, so we can use dummy ConversionRequest. - const ConversionRequest default_request; - ResizeOrInsert(segments, default_request, INSERT); + ResizeOrInsert(segments, request, INSERT); #ifdef OS_ANDROID // TODO(hidehiko): UsageStats requires some functionalities, e.g. network, // which are not needed for mozc's main features. diff -Nru mozc-1.11.1502.102/rewriter/user_boundary_history_rewriter.h mozc-1.11.1522.102/rewriter/user_boundary_history_rewriter.h --- mozc-1.11.1502.102/rewriter/user_boundary_history_rewriter.h 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/user_boundary_history_rewriter.h 2013-08-28 05:26:13.000000000 +0000 @@ -57,7 +57,7 @@ virtual bool Rewrite(const ConversionRequest &request, Segments *segments) const; - virtual void Finish(Segments *segments); + virtual void Finish(const ConversionRequest &request, Segments *segments); virtual bool Reload(); diff -Nru mozc-1.11.1502.102/rewriter/user_boundary_history_rewriter_test.cc mozc-1.11.1522.102/rewriter/user_boundary_history_rewriter_test.cc --- mozc-1.11.1502.102/rewriter/user_boundary_history_rewriter_test.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/user_boundary_history_rewriter_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -154,7 +154,7 @@ bounded_segments.set_resized(true); bounded_segments.set_user_history_enabled(true); - rewriter.Finish(&bounded_segments); + rewriter.Finish(default_request_, &bounded_segments); const string bounded_segments_str = bounded_segments.DebugString(); Segments segments; @@ -199,7 +199,7 @@ segments.set_resized(true); segments.set_user_history_enabled(true); - rewriter.Finish(&segments); + rewriter.Finish(default_request_, &segments); const string segments_str = segments.DebugString(); Segments bounded_segments; @@ -242,7 +242,7 @@ bounded_segments.set_resized(true); bounded_segments.set_user_history_enabled(true); - rewriter.Finish(&bounded_segments); + rewriter.Finish(default_request_, &bounded_segments); const string bounded_segments_str = bounded_segments.DebugString(); Segments segments; @@ -266,7 +266,7 @@ bounded_segments.set_resized(true); bounded_segments.set_user_history_enabled(true); - rewriter.Finish(&bounded_segments); + rewriter.Finish(default_request_, &bounded_segments); const string bounded_segments_str = bounded_segments.DebugString(); Segments segments; @@ -289,7 +289,7 @@ bounded_segments.set_resized(true); bounded_segments.set_user_history_enabled(false); - rewriter.Finish(&bounded_segments); + rewriter.Finish(default_request_, &bounded_segments); const string bounded_segments_str = bounded_segments.DebugString(); Segments segments; @@ -312,7 +312,7 @@ bounded_segments.set_resized(false); bounded_segments.set_user_history_enabled(true); - rewriter.Finish(&bounded_segments); + rewriter.Finish(default_request_, &bounded_segments); const string bounded_segments_str = bounded_segments.DebugString(); Segments segments; @@ -335,7 +335,7 @@ bounded_segments.set_resized(true); bounded_segments.set_user_history_enabled(true); - rewriter.Finish(&bounded_segments); + rewriter.Finish(default_request_, &bounded_segments); const string bounded_segments_str = bounded_segments.DebugString(); Segments segments; @@ -361,7 +361,7 @@ bounded_segments.set_resized(true); bounded_segments.set_user_history_enabled(true); - rewriter.Finish(&bounded_segments); + rewriter.Finish(default_request_, &bounded_segments); const string bounded_segments_str = bounded_segments.DebugString(); Segments segments; @@ -385,7 +385,7 @@ bounded_segments.set_resized(true); bounded_segments.set_user_history_enabled(true); - rewriter.Finish(&bounded_segments); + rewriter.Finish(default_request_, &bounded_segments); const string bounded_segments_str =bounded_segments.DebugString(); Segments segments; @@ -409,7 +409,7 @@ bounded_segments.set_resized(true); bounded_segments.set_user_history_enabled(true); - rewriter.Finish(&bounded_segments); + rewriter.Finish(default_request_, &bounded_segments); const string bounded_segments_str =bounded_segments.DebugString(); Segments segments; @@ -432,7 +432,7 @@ bounded_segments.set_resized(true); bounded_segments.set_user_history_enabled(true); - rewriter.Finish(&bounded_segments); + rewriter.Finish(default_request_, &bounded_segments); const string bounded_segments_str =bounded_segments.DebugString(); Segments segments; diff -Nru mozc-1.11.1502.102/rewriter/user_dictionary_rewriter.cc mozc-1.11.1522.102/rewriter/user_dictionary_rewriter.cc --- mozc-1.11.1502.102/rewriter/user_dictionary_rewriter.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/user_dictionary_rewriter.cc 2013-08-28 05:26:13.000000000 +0000 @@ -71,7 +71,7 @@ continue; } - // find the final destination of user dictioanry + // find the final destination of user dictionary // from [move_to_start .. move_from). int move_to = -1; for (int j = move_to_start; j < static_cast(move_from); ++j) { diff -Nru mozc-1.11.1502.102/rewriter/user_segment_history_rewriter.cc mozc-1.11.1522.102/rewriter/user_segment_history_rewriter.cc --- mozc-1.11.1502.102/rewriter/user_segment_history_rewriter.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/user_segment_history_rewriter.cc 2013-08-28 05:26:13.000000000 +0000 @@ -782,7 +782,8 @@ return true; } -void UserSegmentHistoryRewriter::Finish(Segments *segments) { +void UserSegmentHistoryRewriter::Finish(const ConversionRequest &request, + Segments *segments) { if (segments->request_type() != Segments::CONVERSION) { return; } diff -Nru mozc-1.11.1502.102/rewriter/user_segment_history_rewriter.h mozc-1.11.1522.102/rewriter/user_segment_history_rewriter.h --- mozc-1.11.1502.102/rewriter/user_segment_history_rewriter.h 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/user_segment_history_rewriter.h 2013-08-28 05:26:13.000000000 +0000 @@ -62,7 +62,7 @@ virtual bool Rewrite(const ConversionRequest &request, Segments *segments) const; - virtual void Finish(Segments *segments); + virtual void Finish(const ConversionRequest &request, Segments *segments); virtual bool Reload(); diff -Nru mozc-1.11.1502.102/rewriter/user_segment_history_rewriter_test.cc mozc-1.11.1522.102/rewriter/user_segment_history_rewriter_test.cc --- mozc-1.11.1502.102/rewriter/user_segment_history_rewriter_test.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/user_segment_history_rewriter_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -67,11 +67,13 @@ for (size_t i = 0; i < size; ++i) { Segment *segment = segments->add_segment(); CHECK(segment); - segment->set_key(string("segment") + NumberUtil::SimpleItoa(i)); + segment->set_key(string("segment") + + NumberUtil::SimpleItoa(static_cast(i))); for (size_t j = 0; j < candidate_size; ++j) { Segment::Candidate *c = segment->add_candidate(); c->content_key = segment->key(); - c->content_value = string("candidate") + NumberUtil::SimpleItoa(j); + c->content_value = string("candidate") + + NumberUtil::SimpleItoa(static_cast(j)); c->value = c->content_value; if (j == 0) { c->attributes |= Segment::Candidate::BEST_CANDIDATE; @@ -215,7 +217,7 @@ segments.Clear(); const ConversionRequest default_request; EXPECT_FALSE(rewriter->Rewrite(default_request, &segments)); - rewriter->Finish(&segments); + rewriter->Finish(default_request, &segments); } TEST_F(UserSegmentHistoryRewriterTest, IncognitoModeTest) { @@ -232,7 +234,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); rewriter->Rewrite(request, &segments); EXPECT_EQ("candidate2", @@ -253,7 +255,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); rewriter->Rewrite(request, &segments); EXPECT_EQ("candidate0", @@ -275,7 +277,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); rewriter->Rewrite(request, &segments); EXPECT_EQ("candidate2", @@ -301,7 +303,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); rewriter->Rewrite(request, &segments); EXPECT_EQ("candidate0", @@ -324,7 +326,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); rewriter->Rewrite(request, &segments); EXPECT_EQ("candidate2", @@ -350,7 +352,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); rewriter->Rewrite(request, &segments); segments.set_user_history_enabled(true); @@ -375,7 +377,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 2); rewriter->Rewrite(request, &segments); @@ -392,7 +394,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); rewriter->Rewrite(request, &segments); @@ -410,7 +412,7 @@ segments.mutable_segment(1)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(1)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 2); rewriter->Rewrite(request, &segments); @@ -428,7 +430,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 2); rewriter->Rewrite(request, &segments); @@ -447,7 +449,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); rewriter->Rewrite(request, &segments); @@ -480,7 +482,7 @@ segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); EXPECT_EQ("candidate2", segments.segment(0).candidate(0).value); - rewriter->Finish(&segments); // learn "candidate2" + rewriter->Finish(request, &segments); // learn "candidate2" // Next timestamp of learning should be newer than previous learning. clock.PutClockForward(1, 0); @@ -504,7 +506,7 @@ segments.mutable_segment(1)->set_segment_type(Segment::FIXED_VALUE); EXPECT_EQ("candidate3", segments.segment(1).candidate(0).value); - rewriter->Finish(&segments); // learn "candidate3" + rewriter->Finish(request, &segments); // learn "candidate3" clock.PutClockForward(1, 0); @@ -532,7 +534,7 @@ segments.mutable_segment(2)->set_segment_type(Segment::FIXED_VALUE); EXPECT_EQ("candidate2", segments.segment(2).candidate(0).value); - rewriter->Finish(&segments); // learn "candidate2" + rewriter->Finish(request, &segments); // learn "candidate2" clock.PutClockForward(1, 0); @@ -581,7 +583,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); rewriter->Rewrite(request, &segments); @@ -594,7 +596,7 @@ |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); clock.PutClockForward(1, 0); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); rewriter->Rewrite(request, &segments); @@ -608,7 +610,7 @@ |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); clock.PutClockForward(1, 0); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); rewriter->Rewrite(request, &segments); @@ -637,7 +639,7 @@ segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::NO_LEARNING; - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); rewriter->Rewrite(request, &segments); EXPECT_EQ("candidate0", @@ -653,7 +655,7 @@ segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::NO_HISTORY_LEARNING; - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); rewriter->Rewrite(request, &segments); EXPECT_EQ("candidate0", @@ -669,7 +671,7 @@ segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::NO_SUGGEST_LEARNING; - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); rewriter->Rewrite(request, &segments); EXPECT_EQ("candidate2", @@ -693,7 +695,7 @@ segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::CONTEXT_SENSITIVE; - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 2); rewriter->Rewrite(request, &segments); @@ -716,7 +718,7 @@ segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::CONTEXT_SENSITIVE; - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); // fire if even in single segment InitSegments(&segments, 1); @@ -749,7 +751,7 @@ |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); AppendCandidateSuffixWithLid(segments.mutable_segment(0), 0, ":all", 0); @@ -790,7 +792,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); rewriter->Rewrite(request, &segments); EXPECT_EQ("candidate0", @@ -807,7 +809,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); rewriter->Rewrite(request, &segments); EXPECT_EQ("candidate0", @@ -824,7 +826,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); AppendCandidateSuffixWithLid(segments.mutable_segment(0), 0, ":other", 0); AppendCandidateSuffixWithLid(segments.mutable_segment(0), 2, ":other", 1); @@ -843,7 +845,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); AppendCandidateSuffixWithLid(segments.mutable_segment(0), 0, "", 0); AppendCandidateSuffixWithLid(segments.mutable_segment(0), 2, ":other", 0); @@ -870,7 +872,7 @@ |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); AppendCandidateSuffixWithLid(segments.mutable_segment(0), 0, ":all", 0); @@ -895,7 +897,7 @@ |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 2); AppendCandidateSuffixWithLid(segments.mutable_segment(0), 0, ":all", 0); @@ -919,7 +921,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); AppendCandidateSuffixWithLid(segments.mutable_segment(0), 0, ":all", 0); AppendCandidateSuffixWithLid(segments.mutable_segment(0), 2, ":all", 0); @@ -937,7 +939,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); AppendCandidateSuffixWithLid(segments.mutable_segment(0), 0, ":all", 0); AppendCandidateSuffixWithLid(segments.mutable_segment(0), 2, ":all", 1); @@ -955,7 +957,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 2); AppendCandidateSuffixWithLid(segments.mutable_segment(0), 0, ":all", 0); AppendCandidateSuffixWithLid(segments.mutable_segment(0), 2, ":all", 0); @@ -973,7 +975,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 2); AppendCandidateSuffixWithLid(segments.mutable_segment(0), 0, ":all", 0); AppendCandidateSuffixWithLid(segments.mutable_segment(0), 2, ":all", 1); @@ -1000,7 +1002,7 @@ |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 2); AppendCandidateSuffix(segments.mutable_segment(0), 0, ":all", 1, 1); @@ -1032,7 +1034,7 @@ |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 2); AppendCandidateSuffix(segments.mutable_segment(0), 0, ":all", 1, 1); @@ -1065,7 +1067,7 @@ |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 2); AppendCandidateSuffix(segments.mutable_segment(0), 0, ":all", 1, 1); @@ -1099,7 +1101,7 @@ segments.mutable_segment(1)->mutable_candidate(0)->attributes |= Segment::Candidate::CONTEXT_SENSITIVE; segments.mutable_segment(1)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); EXPECT_EQ("1234", segments.segment(0).candidate(0).value); EXPECT_EQ("candidate2", @@ -1125,7 +1127,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::CONTEXT_SENSITIVE; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); EXPECT_EQ("candidate2", segments.segment(0).candidate(0).value); EXPECT_EQ("1234", @@ -1162,7 +1164,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); } { @@ -1202,7 +1204,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); } { @@ -1217,7 +1219,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); } { @@ -1265,7 +1267,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); } { @@ -1317,7 +1319,7 @@ candidate->style = NumberUtil::NumberString::NUMBER_SEPARATED_ARABIC_FULLWIDTH; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); // full-width for separated number + rewriter->Finish(request, &segments); // full-width for separated number } { @@ -1367,7 +1369,7 @@ candidate->style = NumberUtil::NumberString::NUMBER_SEPARATED_ARABIC_HALFWIDTH; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); // half-width for separated number + rewriter->Finish(request, &segments); // half-width for separated number } { @@ -1416,7 +1418,7 @@ candidate->rid = pos_matcher().GetNumberId(); candidate->style = NumberUtil::NumberString::NUMBER_KANJI; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); // learn kanji + rewriter->Finish(request, &segments); // learn kanji } { segments.Clear(); @@ -1432,7 +1434,7 @@ candidate->style = NumberUtil::NumberString::NUMBER_SEPARATED_ARABIC_HALFWIDTH; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); // learn kanji + rewriter->Finish(request, &segments); // learn kanji } { @@ -1474,7 +1476,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); rewriter->Rewrite(request, &segments); @@ -1488,7 +1490,7 @@ |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); clock.PutClockForward(1, 0); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); rewriter->Rewrite(request, &segments); @@ -1502,7 +1504,7 @@ |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); clock.PutClockForward(1, 0); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); rewriter->Rewrite(request, &segments); @@ -1531,7 +1533,7 @@ segments.mutable_segment(1)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(1)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 2); rewriter->Rewrite(request, &segments); @@ -1563,7 +1565,7 @@ segments.mutable_segment(1)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(1)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 2); segments.mutable_segment(1)->set_key("."); @@ -1591,7 +1593,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 2, 1024); rewriter->Rewrite(request, &segments); @@ -1624,7 +1626,7 @@ segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); EXPECT_EQ(expected, segments.segment(0).candidate(0).value); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); InitSegments(&segments, 1); rewriter->Rewrite(request, &segments); EXPECT_EQ(expected, @@ -1658,7 +1660,7 @@ segments.mutable_segment(0)->mutable_candidate(0)->attributes |= Segment::Candidate::RERANKED; segments.mutable_segment(0)->set_segment_type(Segment::FIXED_VALUE); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); } { @@ -1686,7 +1688,7 @@ } EXPECT_EQ(expectation, segments.segment(0).candidate(0).description); - rewriter->Finish(&segments); + rewriter->Finish(request, &segments); } } } // namespace mozc diff -Nru mozc-1.11.1502.102/rewriter/variants_rewriter.cc mozc-1.11.1522.102/rewriter/variants_rewriter.cc --- mozc-1.11.1502.102/rewriter/variants_rewriter.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/variants_rewriter.cc 2013-08-28 05:26:13.000000000 +0000 @@ -434,7 +434,8 @@ return modified; } -void VariantsRewriter::Finish(Segments *segments) { +void VariantsRewriter::Finish(const ConversionRequest &request, + Segments *segments) { if (segments->request_type() != Segments::CONVERSION) { return; } diff -Nru mozc-1.11.1502.102/rewriter/variants_rewriter.h mozc-1.11.1522.102/rewriter/variants_rewriter.h --- mozc-1.11.1502.102/rewriter/variants_rewriter.h 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/variants_rewriter.h 2013-08-28 05:26:13.000000000 +0000 @@ -58,7 +58,7 @@ virtual int capability(const ConversionRequest &request) const; virtual bool Rewrite(const ConversionRequest &request, Segments *segments) const; - virtual void Finish(Segments *segments); + virtual void Finish(const ConversionRequest &request, Segments *segments); virtual void Clear(); // Used by UserSegmentHistoryRewriter. diff -Nru mozc-1.11.1502.102/rewriter/version_rewriter.cc mozc-1.11.1522.102/rewriter/version_rewriter.cc --- mozc-1.11.1502.102/rewriter/version_rewriter.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/rewriter/version_rewriter.cc 2013-08-28 05:26:13.000000000 +0000 @@ -98,7 +98,7 @@ const string &version_string = kVersionRewriterVersionPrefix + Version::GetMozcVersion(); - for (int i = 0; i < ARRAYSIZE_UNSAFE(kKeyCandList); ++i) { + for (int i = 0; i < arraysize(kKeyCandList); ++i) { entries_[kKeyCandList[i].key] = new VersionEntry(kKeyCandList[i].base_candidate, version_string, 9); diff -Nru mozc-1.11.1502.102/server/mozc_cache_service.exe.manifest mozc-1.11.1522.102/server/mozc_cache_service.exe.manifest --- mozc-1.11.1502.102/server/mozc_cache_service.exe.manifest 2013-07-17 02:37:41.000000000 +0000 +++ mozc-1.11.1522.102/server/mozc_cache_service.exe.manifest 2013-08-28 05:25:53.000000000 +0000 @@ -9,6 +9,8 @@ + + diff -Nru mozc-1.11.1502.102/server/mozc_server.exe.manifest mozc-1.11.1522.102/server/mozc_server.exe.manifest --- mozc-1.11.1502.102/server/mozc_server.exe.manifest 2013-07-17 02:37:41.000000000 +0000 +++ mozc-1.11.1522.102/server/mozc_server.exe.manifest 2013-08-28 05:25:53.000000000 +0000 @@ -9,6 +9,8 @@ + + diff -Nru mozc-1.11.1502.102/server/server.gyp mozc-1.11.1522.102/server/server.gyp --- mozc-1.11.1502.102/server/server.gyp 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/server/server.gyp 2013-08-28 05:25:59.000000000 +0000 @@ -95,7 +95,7 @@ 'msvs_settings': { 'VCManifestTool': { 'AdditionalManifestFiles': 'mozc_server.exe.manifest', - 'EmbedManifest': 'false', + 'EmbedManifest': 'true', }, 'VCLinkerTool': { 'DelayLoadDLLs': [ @@ -212,7 +212,7 @@ 'msvs_settings': { 'VCManifestTool': { 'AdditionalManifestFiles': 'mozc_cache_service.exe.manifest', - 'EmbedManifest': 'false', + 'EmbedManifest': 'true', }, 'VCLinkerTool': { 'AdditionalDependencies': [ diff -Nru mozc-1.11.1502.102/session/commands.proto mozc-1.11.1522.102/session/commands.proto --- mozc-1.11.1502.102/session/commands.proto 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/session/commands.proto 2013-08-28 05:26:12.000000000 +0000 @@ -124,7 +124,7 @@ DECIMAL = 61; // Numpad [.] DIVIDE = 62; // Numpad [/] EQUALS = 63; // Numpad [=] - ASCII = 64; + TEXT_INPUT = 64; // Meta key event representing any text input. HANKAKU = 65; KANJI = 66; KATAKANA = 67; // VK_DBE_KATAKANA(Win) @@ -398,9 +398,9 @@ // suggest window on the correct position, this message can be ignored. SEND_CARET_LOCATION = 16; - // Send a command from language bar. - // Exact command is specified by language_bar_command_id. - SEND_LANGUAGE_BAR_COMMAND = 17; + // Obsolete command. Don't simply remove this command for NUM_OF_COMMANDS. + // TODO(team): Replace this command by useful one. + OBSOLETE_SEND_LANGUAGE_BAR_COMMAND = 17; // When the server is hadling asynchronous request, the server returns the // message with callback request which session_command is GET_ASYNC_RESULT. @@ -410,10 +410,18 @@ // Commit the raw text of the composed string. COMMIT_RAW_TEXT = 19; + // Calls ConvertPrevPage session command to show the previous page of + // candidates. + CONVERT_PREV_PAGE = 20; + + // Calls ConvertNextPage session command to show the next page of + // candidates. + CONVERT_NEXT_PAGE = 21; + // Number of commands. // When new command is added, the command should use below number // and NUM_OF_COMMANDS should be incremented. - NUM_OF_COMMANDS = 20; + NUM_OF_COMMANDS = 22; }; required CommandType type = 1; @@ -444,15 +452,6 @@ // position calculation. Used with SEND_CARET_LOCATION. optional Rectangle caret_rectangle = 8; - // Specifies the command id which is sent from language bar. - enum LanguageBarCommandId { - TOGGLE_PINYIN_CHINESE_MODE = 1; - TOGGLE_PINYIN_FULL_WIDTH_WORD_MODE = 2; - TOGGLE_PINYIN_FULL_WIDTH_PUNCTUATION_MODE = 3; - TOGGLE_PINYIN_SIMPLIFIED_CHINESE_MODE = 4; - }; - optional LanguageBarCommandId language_bar_command_id = 9; - // Unique number specifying an asynchronous request. optional int32 asynchronous_request_id = 10; }; @@ -713,6 +712,19 @@ } optional CrossingEdgeBehavior crossing_edge_behavior = 13 [default = DO_NOTHING]; + + // Controls the behavior of language aware input. Language aware input + // guesses the actual language regardless the input mode. For example, + // if user type "てst" it will be treated as "test". + enum LanguageAwareInputBehavior { + // Does not perform this functionarity. + NO_LANGUAGE_AWARE_INPUT = 0; + + // Adds a language aware candidate to the suggestion. + LANGUAGE_AWARE_SUGGESTION = 1; + } + optional LanguageAwareInputBehavior language_aware_input = 14 + [default = NO_LANGUAGE_AWARE_INPUT]; } // Note there is another ApplicationInfo inside RendererCommand. diff -Nru mozc-1.11.1502.102/session/internal/keymap-inl.h mozc-1.11.1522.102/session/internal/keymap-inl.h --- mozc-1.11.1502.102/session/internal/keymap-inl.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/session/internal/keymap-inl.h 2013-08-28 05:26:12.000000000 +0000 @@ -42,33 +42,6 @@ } // namespace commands namespace keymap { -namespace internal { - -// Returns true if the key event originates from "Raw Unicode input" -// (a.k.a. VK_PACKET on Windows). See b/4170089. -// TODO(team): Remove this function by updating KeyEventUtil::MaybeGetKeyStub. -// See b/9697808 for details. -template -inline bool HandleRawUnicodeInput(const commands::KeyEvent &key_event, - typename T::Commands* command) { - if (key_event.has_key_string() && !key_event.has_key_code()) { - *command = T::INSERT_CHARACTER; - return true; - } - return false; -} - -// Specialization for DirectInputState. This state doesn't have INSERT_CHARACTER -// command and shouldn't handle Unicode input. -// TODO(team): Remove this function by updating KeyEventUtil::MaybeGetKeyStub. -// See b/9697808 for details. -template <> -inline bool HandleRawUnicodeInput( - const commands::KeyEvent &, DirectInputState::Commands*) { - return false; -} - -} // namespace internal template bool KeyMap::GetCommand(const commands::KeyEvent &key_event, @@ -96,10 +69,6 @@ } } - if (internal::HandleRawUnicodeInput(key_event, command)) { - return true; - } - return false; } diff -Nru mozc-1.11.1502.102/session/internal/keymap.cc mozc-1.11.1522.102/session/internal/keymap.cc --- mozc-1.11.1502.102/session/internal/keymap.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/session/internal/keymap.cc 2013-08-28 05:26:12.000000000 +0000 @@ -204,7 +204,7 @@ } commands::KeyEvent key_event; - KeyParser::ParseKey("ASCII", &key_event); + KeyParser::ParseKey("TextInput", &key_event); keymap_precomposition_.AddRule(key_event, PrecompositionState::INSERT_CHARACTER); keymap_composition_.AddRule(key_event, CompositionState::INSERT_CHARACTER); diff -Nru mozc-1.11.1502.102/session/internal/keymap_test.cc mozc-1.11.1522.102/session/internal/keymap_test.cc --- mozc-1.11.1502.102/session/internal/keymap_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/session/internal/keymap_test.cc 2013-08-28 05:26:12.000000000 +0000 @@ -122,7 +122,7 @@ { KeyMap keymap; commands::KeyEvent init_key_event; - init_key_event.set_key_code(97); + init_key_event.set_special_key(commands::KeyEvent::TEXT_INPUT); EXPECT_TRUE(keymap.AddRule(init_key_event, PrecompositionState::INSERT_CHARACTER)); @@ -211,6 +211,39 @@ commands::KeyEvent key_event; PrecompositionState::Commands command; key_event.set_key_string("a"); + EXPECT_FALSE(keymap.GetCommand(key_event, &command)); + } + + // After adding the rule of TEXT_INPUT -> INSERT_CHARACTER, the above cases + // should return INSERT_CHARACTER. + commands::KeyEvent text_input_key_event; + text_input_key_event.set_special_key(commands::KeyEvent::TEXT_INPUT); + keymap.AddRule(text_input_key_event, PrecompositionState::INSERT_CHARACTER); + + // key_code = 97, key_string = empty + { + commands::KeyEvent key_event; + PrecompositionState::Commands command; + key_event.set_key_code(97); + EXPECT_TRUE(keymap.GetCommand(key_event, &command)); + EXPECT_EQ(PrecompositionState::INSERT_CHARACTER, command); + } + + // key_code = 97, key_string = "a" + { + commands::KeyEvent key_event; + PrecompositionState::Commands command; + key_event.set_key_code(97); + key_event.set_key_string("a"); + EXPECT_TRUE(keymap.GetCommand(key_event, &command)); + EXPECT_EQ(PrecompositionState::INSERT_CHARACTER, command); + } + + // key_code = empty, key_string = "a" + { + commands::KeyEvent key_event; + PrecompositionState::Commands command; + key_event.set_key_string("a"); EXPECT_TRUE(keymap.GetCommand(key_event, &command)); EXPECT_EQ(PrecompositionState::INSERT_CHARACTER, command); } @@ -219,7 +252,7 @@ TEST_F(KeyMapTest, GetCommandKeyStub) { KeyMap keymap; commands::KeyEvent init_key_event; - init_key_event.set_special_key(commands::KeyEvent::ASCII); + init_key_event.set_special_key(commands::KeyEvent::TEXT_INPUT); EXPECT_TRUE(keymap.AddRule(init_key_event, PrecompositionState::INSERT_CHARACTER)); @@ -249,9 +282,9 @@ istringstream iss("", istringstream::in); EXPECT_TRUE(manager.LoadStream(&iss)); - { // Check key bindings of ASCII. + { // Check key bindings of TextInput. commands::KeyEvent key_event; - KeyParser::ParseKey("ASCII", &key_event); + KeyParser::ParseKey("TextInput", &key_event); PrecompositionState::Commands fund_command; EXPECT_TRUE(manager.GetCommandPrecomposition(key_event, &fund_command)); diff -Nru mozc-1.11.1502.102/session/key_event_util.cc mozc-1.11.1522.102/session/key_event_util.cc --- mozc-1.11.1502.102/session/key_event_util.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/session/key_event_util.cc 2013-08-28 05:26:12.000000000 +0000 @@ -210,12 +210,14 @@ return false; } - if (!key_event.has_key_code() || key_event.key_code() <= 32) { + // Check if both key_code and key_string are invalid. + if ((!key_event.has_key_code() || key_event.key_code() <= 32) && + (!key_event.has_key_string() || key_event.key_string().empty())) { return false; } KeyEvent stub_key_event; - stub_key_event.set_special_key(KeyEvent::ASCII); + stub_key_event.set_special_key(KeyEvent::TEXT_INPUT); if (!GetKeyInformation(stub_key_event, key)) { return false; } diff -Nru mozc-1.11.1502.102/session/key_event_util_test.cc mozc-1.11.1522.102/session/key_event_util_test.cc --- mozc-1.11.1502.102/session/key_event_util_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/session/key_event_util_test.cc 2013-08-28 05:26:12.000000000 +0000 @@ -257,7 +257,7 @@ KeyParser::ParseKey("a", &key_event); EXPECT_TRUE(KeyEventUtil::MaybeGetKeyStub(key_event, &key)); - EXPECT_EQ(static_cast(KeyEvent::ASCII) << 32, key); + EXPECT_EQ(static_cast(KeyEvent::TEXT_INPUT) << 32, key); } namespace { @@ -448,7 +448,7 @@ { "MULTIPLY", true }, { "EQUALS", true }, { "COMMA", true }, - { "ASCII", false }, + { "TEXTINPUT", false }, }; } // namespace diff -Nru mozc-1.11.1502.102/session/key_parser.cc mozc-1.11.1522.102/session/key_parser.cc --- mozc-1.11.1502.102/session/key_parser.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/session/key_parser.cc 2013-08-28 05:26:12.000000000 +0000 @@ -116,7 +116,8 @@ keycode_map_["home"] = KeyEvent::HOME; keycode_map_["end"] = KeyEvent::END; keycode_map_["space"] = KeyEvent::SPACE; - keycode_map_["ascii"] = KeyEvent::ASCII; + keycode_map_["ascii"] = KeyEvent::TEXT_INPUT; // depricated + keycode_map_["textinput"] = KeyEvent::TEXT_INPUT; keycode_map_["tab"] = KeyEvent::TAB; keycode_map_["pageup"] = KeyEvent::PAGE_UP; keycode_map_["pagedown"] = KeyEvent::PAGE_DOWN; diff -Nru mozc-1.11.1502.102/session/key_parser_test.cc mozc-1.11.1522.102/session/key_parser_test.cc --- mozc-1.11.1502.102/session/key_parser_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/session/key_parser_test.cc 2013-08-28 05:26:12.000000000 +0000 @@ -122,7 +122,8 @@ make_pair("home", commands::KeyEvent::HOME), make_pair("end", commands::KeyEvent::END), make_pair("space", commands::KeyEvent::SPACE), - make_pair("ascii", commands::KeyEvent::ASCII), + make_pair("ascii", commands::KeyEvent::TEXT_INPUT), // depricated + make_pair("textinput", commands::KeyEvent::TEXT_INPUT), make_pair("tab", commands::KeyEvent::TAB), make_pair("pageup", commands::KeyEvent::PAGE_UP), make_pair("pagedown", commands::KeyEvent::PAGE_DOWN), diff -Nru mozc-1.11.1502.102/session/session.cc mozc-1.11.1522.102/session/session.cc --- mozc-1.11.1502.102/session/session.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/session/session.cc 2013-08-28 05:26:12.000000000 +0000 @@ -363,6 +363,12 @@ case commands::SessionCommand::COMMIT_RAW_TEXT: result = CommitRawText(command); break; + case commands::SessionCommand::CONVERT_PREV_PAGE: + result = ConvertPrevPage(command); + break; + case commands::SessionCommand::CONVERT_NEXT_PAGE: + result = ConvertNextPage(command); + break; default: LOG(WARNING) << "Unknown command" << command->DebugString(); result = DoNothing(command); @@ -1733,7 +1739,8 @@ context_->mutable_converter()->CommitPreedit(context_->composer(), command->input().context()); } else { // ImeContext::CONVERSION - context_->mutable_converter()->Commit(command->input().context()); + context_->mutable_converter()->Commit(context_->composer(), + command->input().context()); } SetSessionState(ImeContext::PRECOMPOSITION, context_.get()); @@ -1829,7 +1836,8 @@ void Session::CommitFirstSegmentInternal(const commands::Context &context) { size_t size; - context_->mutable_converter()->CommitFirstSegment(context, &size); + context_->mutable_converter()->CommitFirstSegment( + context_->composer(), context, &size); if (size > 0) { // Delete the key characters of the first segment from the preedit. context_->mutable_composer()->DeleteRange(0, size); @@ -2546,6 +2554,9 @@ } bool Session::ConvertNextPage(commands::Command *command) { + if (!(context_->state() & (ImeContext::CONVERSION))) { + return DoNothing(command); + } command->mutable_output()->set_consumed(true); context_->mutable_converter()->CandidateNextPage(); Output(command); @@ -2560,6 +2571,9 @@ } bool Session::ConvertPrevPage(commands::Command *command) { + if (!(context_->state() & (ImeContext::CONVERSION))) { + return DoNothing(command); + } command->mutable_output()->set_consumed(true); context_->mutable_converter()->CandidatePrevPage(); Output(command); diff -Nru mozc-1.11.1502.102/session/session_converter.cc mozc-1.11.1522.102/session/session_converter.cc --- mozc-1.11.1502.102/session/session_converter.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/session/session_converter.cc 2013-08-28 05:26:12.000000000 +0000 @@ -652,16 +652,12 @@ const size_t previous_index = candidate_list_->focused_index(); if (!PredictWithPreferences(composer, conversion_preferences_)) { - // TODO(komatsu): Consider the case when PredictWithPreferences fails. return; } - if (previous_index < candidate_list_->size()) { - candidate_list_->MoveToId(candidate_list_->candidate(previous_index).id()); - UpdateSelectedCandidateIndex(); - } else { - // Ideally this should not happen. - } + DCHECK_LT(previous_index, candidate_list_->size()); + candidate_list_->MoveToId(candidate_list_->candidate(previous_index).id()); + UpdateSelectedCandidateIndex(); } void SessionConverter::Cancel() { @@ -689,7 +685,8 @@ ResetState(); } -void SessionConverter::Commit(const commands::Context &context) { +void SessionConverter::Commit(const composer::Composer &composer, + const commands::Context &context) { DCHECK(CheckState(PREDICTION | CONVERSION)); ResetResult(); @@ -705,7 +702,8 @@ GetCandidateIndexForConverter(i)); } CommitUsageStats(state_, context); - converter_->FinishConversion(segments_.get()); + ConversionRequest conversion_request(&composer, request_); + converter_->FinishConversion(conversion_request, segments_.get()); ResetState(); } @@ -753,7 +751,8 @@ 0, GetCandidateIndexForConverter(0)); CommitUsageStats(SessionConverterInterface::SUGGESTION, context); - converter_->FinishConversion(segments_.get()); + ConversionRequest conversion_request(&composer, request_); + converter_->FinishConversion(conversion_request, segments_.get()); DCHECK_EQ(0, segments_->conversion_segments_size()); ResetState(); } @@ -791,7 +790,8 @@ return CommitSuggestionInternal(composer, context, consumed_key_size); } -void SessionConverter::CommitFirstSegment(const commands::Context &context, +void SessionConverter::CommitFirstSegment(const composer::Composer &composer, + const commands::Context &context, size_t *consumed_key_size) { DCHECK(CheckState(PREDICTION | CONVERSION)); ResetResult(); @@ -800,7 +800,7 @@ // If the number of segments is one, just call Commit. if (segments_->conversion_segments_size() == 1) { - Commit(context); + Commit(composer, context); return; } @@ -848,7 +848,8 @@ segments_.get()); CommitUsageStats(SessionConverterInterface::COMPOSITION, context); - converter_->FinishConversion(segments_.get()); + ConversionRequest conversion_request(&composer, request_); + converter_->FinishConversion(conversion_request, segments_.get()); ResetState(); } diff -Nru mozc-1.11.1502.102/session/session_converter.h mozc-1.11.1522.102/session/session_converter.h --- mozc-1.11.1502.102/session/session_converter.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/session/session_converter.h 2013-08-28 05:26:12.000000000 +0000 @@ -130,7 +130,8 @@ virtual void Reset(); // Fixes the conversion with the current status. - virtual void Commit(const commands::Context &context); + virtual void Commit(const composer::Composer &composer, + const commands::Context &context); // Fixes the suggestion candidate. Stores the number of characters in the key // of the committed candidate to committed_key_size. @@ -179,7 +180,8 @@ // so Commit() method is called instead. In this case, the caller // should not delete any characters. // c.f. CommitSuggestionInternal - virtual void CommitFirstSegment(const commands::Context &context, + virtual void CommitFirstSegment(const composer::Composer &composer, + const commands::Context &context, size_t *consumed_key_size); // Commits the preedit string represented by Composer. diff -Nru mozc-1.11.1502.102/session/session_converter_interface.h mozc-1.11.1522.102/session/session_converter_interface.h --- mozc-1.11.1502.102/session/session_converter_interface.h 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/session/session_converter_interface.h 2013-08-28 05:26:12.000000000 +0000 @@ -163,7 +163,8 @@ virtual void Reset() ABSTRACT; // Fix the conversion with the current status. - virtual void Commit(const commands::Context &context) ABSTRACT; + virtual void Commit(const composer::Composer &composer, + const commands::Context &context) ABSTRACT; // Fix the suggestion candidate. True is returned if the selected // candidate is successfully committed. @@ -184,7 +185,8 @@ // Fix only the conversion of the first segment, and keep the rest. // The caller should delete characters from composer based on returned // |committed_key_size|. - virtual void CommitFirstSegment(const commands::Context &context, + virtual void CommitFirstSegment(const composer::Composer &composer, + const commands::Context &context, size_t *committed_key_size) ABSTRACT; // Commit the preedit string represented by Composer. diff -Nru mozc-1.11.1502.102/session/session_converter_test.cc mozc-1.11.1522.102/session/session_converter_test.cc --- mozc-1.11.1502.102/session/session_converter_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/session/session_converter_test.cc 2013-08-28 05:26:12.000000000 +0000 @@ -452,7 +452,7 @@ EXPECT_TRUE(converter.IsActive()); EXPECT_FALSE(IsCandidateListVisible(converter)); - converter.Commit(Context::default_instance()); + converter.Commit(*composer_, Context::default_instance()); composer_->Reset(); output.Clear(); converter.FillOutput(*composer_, &output); @@ -550,7 +550,7 @@ EXPECT_SELECTED_CANDIDATE_INDICES_EQ(converter, expected_indices); } - converter.Commit(Context::default_instance()); + converter.Commit(*composer_, Context::default_instance()); EXPECT_COUNT_STATS("Commit", 1); EXPECT_COUNT_STATS("CommitFromConversion", 1); @@ -985,7 +985,7 @@ EXPECT_EQ("\xe5\x8d\xb0\xe6\x88\xbf", conversion.segment(1).value()); } - converter.Commit(Context::default_instance()); + converter.Commit(*composer_, Context::default_instance()); expected_indices.clear(); { composer_->Reset(); @@ -1532,7 +1532,7 @@ convertermock_->SetCommitFirstSegment(&segments_after_submit, true); } size_t size; - converter.CommitFirstSegment(Context::default_instance(), &size); + converter.CommitFirstSegment(*composer_, Context::default_instance(), &size); expected_indices.erase(expected_indices.begin(), expected_indices.begin() + 1); EXPECT_FALSE(IsCandidateListVisible(converter)); @@ -2011,7 +2011,7 @@ converter.CandidateNext(*composer_); expected_indices[0] += 1; EXPECT_SELECTED_CANDIDATE_INDICES_EQ(converter, expected_indices); - converter.Commit(Context::default_instance()); + converter.Commit(*composer_, Context::default_instance()); composer_->Reset(); expected_indices.clear(); EXPECT_SELECTED_CANDIDATE_INDICES_EQ(converter, expected_indices); @@ -3311,7 +3311,7 @@ composer_->InsertCharacterPreedit(kChars_Aiueo); EXPECT_TRUE(converter.Convert(*composer_)); - converter.Commit(Context::default_instance()); + converter.Commit(*composer_, Context::default_instance()); commands::Output output; converter.FillOutput(*composer_, &output); EXPECT_FALSE(output.has_result()); @@ -3337,7 +3337,8 @@ converter.Convert(*composer_); size_t committed_size = 0; - converter.CommitFirstSegment(Context::default_instance(), + converter.CommitFirstSegment(*composer_, + Context::default_instance(), &committed_size); EXPECT_EQ(0, committed_size); @@ -3360,7 +3361,8 @@ converter.Convert(*composer_); size_t committed_size = 0; - converter.CommitFirstSegment(Context::default_instance(), + converter.CommitFirstSegment(*composer_, + Context::default_instance(), &committed_size); EXPECT_EQ(Util::CharsLen(kKamabokono), committed_size); @@ -3424,7 +3426,7 @@ composer_->InsertCharacterPreedit(kChars_Aiueo); EXPECT_TRUE(converter.Convert(*composer_)); - converter.Commit(Context::default_instance()); + converter.Commit(*composer_, Context::default_instance()); commands::Output output; converter.FillOutput(*composer_, &output); EXPECT_FALSE(output.has_result()); @@ -3452,7 +3454,7 @@ composer_->InsertCharacterPreedit(kChars_Aiueo); EXPECT_TRUE(converter.Convert(*composer_)); - converter.Commit(Context::default_instance()); + converter.Commit(*composer_, Context::default_instance()); commands::Output output; converter.FillOutput(*composer_, &output); EXPECT_FALSE(output.has_result()); @@ -3480,7 +3482,7 @@ composer_->InsertCharacterPreedit(kChars_Aiueo); EXPECT_TRUE(converter.Convert(*composer_)); - converter.Commit(Context::default_instance()); + converter.Commit(*composer_, Context::default_instance()); commands::Output output; converter.FillOutput(*composer_, &output); EXPECT_FALSE(output.has_result()); @@ -3508,7 +3510,7 @@ composer_->InsertCharacterPreedit(kChars_Aiueo); EXPECT_TRUE(converter.Convert(*composer_)); - converter.Commit(Context::default_instance()); + converter.Commit(*composer_, Context::default_instance()); commands::Output output; converter.FillOutput(*composer_, &output); EXPECT_FALSE(output.has_result()); diff -Nru mozc-1.11.1502.102/session/session_handler.cc mozc-1.11.1522.102/session/session_handler.cc --- mozc-1.11.1502.102/session/session_handler.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/session/session_handler.cc 2013-08-28 05:26:12.000000000 +0000 @@ -829,11 +829,6 @@ SessionID id = 0; while (true) { Util::GetRandomSequence(reinterpret_cast(&id), sizeof(id)); -#ifdef __native_client__ - // Because JavaScript does not support uint64. - // So we downsize the session id range from uint64 to uint32 in NaCl. - id = static_cast(id); -#endif // __native_client__ // don't allow id == 0, as it is reserved for // "invalid id" if (id != 0 && !session_map_->HasKey(id)) { diff -Nru mozc-1.11.1502.102/session/session_handler_stress_test.cc mozc-1.11.1522.102/session/session_handler_stress_test.cc --- mozc-1.11.1502.102/session/session_handler_stress_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/session/session_handler_stress_test.cc 2013-08-28 05:26:12.000000000 +0000 @@ -33,8 +33,6 @@ #include "base/base.h" #include "base/file_util.h" -#include "config/config.pb.h" -#include "config/config_handler.h" #include "engine/engine_factory.h" #include "session/commands.pb.h" #include "session/japanese_session_factory.h" @@ -118,18 +116,11 @@ }; TEST_F(SessionHandlerStressTest, BasicStressTest) { - config::Config config; - config::ConfigHandler::GetDefaultConfig(&config); - // TOOD(all): Add a test for the case where - // use_realtime_conversion is true. - config.set_use_realtime_conversion(false); - config::ConfigHandler::SetConfig(config); - vector keys; commands::Output output; TestSessionClient client; size_t keyevents_size = 0; - const size_t kMaxEventSize = 50000; + const size_t kMaxEventSize = 10000; ASSERT_TRUE(client.CreateSession()); const uint32 random_seed = static_cast(FLAGS_random_seed); diff -Nru mozc-1.11.1502.102/session/session_regression_test.cc mozc-1.11.1522.102/session/session_regression_test.cc --- mozc-1.11.1502.102/session/session_regression_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/session/session_regression_test.cc 2013-08-28 05:26:12.000000000 +0000 @@ -147,11 +147,6 @@ session_factory_.reset(new session::JapaneseSessionFactory(engine_.get())); session::SessionFactoryManager::SetSessionFactory(session_factory_.get()); - config::ConfigHandler::GetDefaultConfig(&config_); - // TOOD(all): Add a test for the case where - // use_realtime_conversion is true. - config_.set_use_realtime_conversion(false); - config::ConfigHandler::SetConfig(config_); handler_.reset(new SessionHandler()); ResetSession(); CHECK(session_.get()); @@ -269,7 +264,6 @@ EXPECT_FALSE(command.output().has_result()); EXPECT_TRUE(SendKey("a", &command)); - EXPECT_FALSE(command.output().has_candidates()); #if OS_MACOSX // The MacOS default short cut of F10 is DisplayAsHalfAlphanumeric. // It does not start the conversion so output does not have any result. diff -Nru mozc-1.11.1502.102/session/session_test.cc mozc-1.11.1522.102/session/session_test.cc --- mozc-1.11.1502.102/session/session_test.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/session/session_test.cc 2013-08-28 05:26:12.000000000 +0000 @@ -43,6 +43,7 @@ #include "converter/converter_mock.h" #include "converter/segments.h" #include "data_manager/user_pos_manager.h" +#include "dictionary/dictionary_mock.h" #include "engine/engine_interface.h" #include "engine/mock_converter_engine.h" #include "engine/mock_data_engine_factory.h" @@ -554,9 +555,11 @@ session::SessionFactoryManager::SetSessionFactory(session_factory_.get()); handler_.reset(new SessionHandler); + dictionary_mock_.reset(new DictionaryMock); t13n_rewriter_.reset( new TransliterationRewriter( - *UserPosManager::GetUserPosManager()->GetPOSMatcher())); + *UserPosManager::GetUserPosManager()->GetPOSMatcher(), + dictionary_mock_.get())); } virtual void TearDown() { @@ -871,6 +874,7 @@ scoped_ptr engine_; scoped_ptr mock_data_engine_; scoped_ptr handler_; + scoped_ptr dictionary_mock_; scoped_ptr t13n_rewriter_; scoped_ptr session_factory_; scoped_ptr table_; @@ -3045,6 +3049,109 @@ EXPECT_EQ(ImeContext::PRECOMPOSITION, session->context().state()); } +TEST_F(SessionTest, ConvertNextPage_PrevPage) { + commands::Command command; + scoped_ptr session(new Session(engine_.get())); + + InitSessionToPrecomposition(session.get()); + + // Should be ignored in precomposition state. + { + command.Clear(); + command.mutable_input()->set_type(commands::Input::SEND_COMMAND); + command.mutable_input()->mutable_command()->set_type( + commands::SessionCommand::CONVERT_NEXT_PAGE); + ASSERT_TRUE(session->SendCommand(&command)); + EXPECT_TRUE(command.output().consumed()); + + command.Clear(); + command.mutable_input()->set_type(commands::Input::SEND_COMMAND); + command.mutable_input()->mutable_command()->set_type( + commands::SessionCommand::CONVERT_PREV_PAGE); + ASSERT_TRUE(session->SendCommand(&command)); + EXPECT_TRUE(command.output().consumed()); + } + + InsertCharacterChars("aiueo", session.get(), &command); + EXPECT_PREEDIT(kAiueo, command); + + // Should be ignored in composition state. + { + command.Clear(); + command.mutable_input()->set_type(commands::Input::SEND_COMMAND); + command.mutable_input()->mutable_command()->set_type( + commands::SessionCommand::CONVERT_NEXT_PAGE); + ASSERT_TRUE(session->SendCommand(&command)); + EXPECT_TRUE(command.output().consumed()); + EXPECT_PREEDIT(kAiueo, command) << "should do nothing"; + + command.Clear(); + command.mutable_input()->set_type(commands::Input::SEND_COMMAND); + command.mutable_input()->mutable_command()->set_type( + commands::SessionCommand::CONVERT_PREV_PAGE); + ASSERT_TRUE(session->SendCommand(&command)); + EXPECT_TRUE(command.output().consumed()); + EXPECT_PREEDIT(kAiueo, command) << "should do nothing"; + } + + // Generate sequential candidates as follows. + // "page0-cand0" + // "page0-cand1" + // ... + // "page0-cand8" + // "page1-cand0" + // ... + // "page1-cand8" + // "page2-cand0" + // ... + // "page2-cand8" + { + Segments segments; + Segment *segment = NULL; + segment = segments.add_segment(); + segment->set_key(kAiueo); + for (int page_index = 0; page_index < 3; ++page_index) { + for (int cand_index = 0; cand_index < 9; ++cand_index) { + segment->add_candidate()->value = Util::StringPrintf( + "page%d-cand%d", page_index, cand_index); + } + } + GetConverterMock()->SetStartConversionForRequest(&segments, true); + } + + // Make sure the selected candidate changes as follows. + // -> Convert + // -> "page0-cand0" -> SendCommand/CONVERT_NEXT_PAGE + // -> "page1-cand0" -> SendCommand/CONVERT_PREV_PAGE + // -> "page0-cand0" -> SendCommand/CONVERT_PREV_PAGE + // -> "page2-cand0" + + command.Clear(); + ASSERT_TRUE(session->Convert(&command)); + EXPECT_PREEDIT("page0-cand0", command); + + command.Clear(); + command.mutable_input()->set_type(commands::Input::SEND_COMMAND); + command.mutable_input()->mutable_command()->set_type( + commands::SessionCommand::CONVERT_NEXT_PAGE); + ASSERT_TRUE(session->SendCommand(&command)); + EXPECT_PREEDIT("page1-cand0", command); + + command.Clear(); + command.mutable_input()->set_type(commands::Input::SEND_COMMAND); + command.mutable_input()->mutable_command()->set_type( + commands::SessionCommand::CONVERT_PREV_PAGE); + ASSERT_TRUE(session->SendCommand(&command)); + EXPECT_PREEDIT("page0-cand0", command); + + command.Clear(); + command.mutable_input()->set_type(commands::Input::SEND_COMMAND); + command.mutable_input()->mutable_command()->set_type( + commands::SessionCommand::CONVERT_PREV_PAGE); + ASSERT_TRUE(session->SendCommand(&command)); + EXPECT_PREEDIT("page2-cand0", command); +} + TEST_F(SessionTest, NeedlessClearUndoContext) { // This is a unittest against http://b/3423910. diff -Nru mozc-1.11.1502.102/session/session_test.gyp mozc-1.11.1522.102/session/session_test.gyp --- mozc-1.11.1502.102/session/session_test.gyp 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/session/session_test.gyp 2013-08-28 05:26:12.000000000 +0000 @@ -76,6 +76,7 @@ 'dependencies': [ '../converter/converter_base.gyp:converter_mock', '../data_manager/data_manager.gyp:user_pos_manager', + '../dictionary/dictionary.gyp:dictionary_mock', '../engine/engine.gyp:mock_converter_engine', "../engine/engine.gyp:mock_data_engine_factory", '../rewriter/rewriter.gyp:rewriter', @@ -229,7 +230,7 @@ 'session_handler_test_util', ], 'variables': { - 'test_size': 'small', + 'test_size': 'large', }, }, { @@ -292,77 +293,6 @@ }, }, { - 'target_name': 'install_session_handler_scenario_test_data', - 'type': 'none', - 'variables': { - 'test_data': [ - '../<(test_data_subdir)/auto_partial_suggestion.txt', - '../<(test_data_subdir)/b7132535_scenario.txt', - '../<(test_data_subdir)/b7321313_scenario.txt', - '../<(test_data_subdir)/b8703702_scenario.txt', - '../<(test_data_subdir)/change_request.txt', - '../<(test_data_subdir)/clear_user_prediction.txt', - '../<(test_data_subdir)/composition_display_as.txt', - '../<(test_data_subdir)/conversion.txt', - '../<(test_data_subdir)/conversion_display_as.txt', - '../<(test_data_subdir)/conversion_with_history_segment.txt', - '../<(test_data_subdir)/conversion_with_long_history_segments.txt', - '../<(test_data_subdir)/delete_history.txt', - '../<(test_data_subdir)/desktop_t13n_candidates.txt', - '../<(test_data_subdir)/insert_characters.txt', - '../<(test_data_subdir)/mobile_qwerty_transliteration_scenario.txt', - '../<(test_data_subdir)/mobile_t13n_candidates.txt', - '../<(test_data_subdir)/on_off_cancel.txt', - '../<(test_data_subdir)/partial_suggestion.txt', - '../<(test_data_subdir)/pending_character.txt', - '../<(test_data_subdir)/predict_and_convert.txt', - '../<(test_data_subdir)/reconvert.txt', - '../<(test_data_subdir)/revert.txt', - '../<(test_data_subdir)/segment_focus.txt', - '../<(test_data_subdir)/segment_width.txt', - '../<(test_data_subdir)/twelvekeys_switch_inputmode_scenario.txt', - '../<(test_data_subdir)/twelvekeys_toggle_hiragana_preedit_scenario.txt', - '../<(test_data_subdir)/undo.txt', - ], - 'test_data_subdir': 'data/test/session/scenario', - }, - 'includes': ['../gyp/install_testdata.gypi'], - }, - { - 'target_name': 'install_session_handler_usage_stats_scenario_test_data', - 'type': 'none', - 'variables': { - 'test_data': [ - "../<(test_data_subdir)/conversion.txt", - "../<(test_data_subdir)/prediction.txt", - "../<(test_data_subdir)/suggestion.txt", - "../<(test_data_subdir)/composition.txt", - "../<(test_data_subdir)/select_prediction.txt", - "../<(test_data_subdir)/select_minor_conversion.txt", - "../<(test_data_subdir)/select_minor_prediction.txt", - "../<(test_data_subdir)/mouse_select_from_suggestion.txt", - "../<(test_data_subdir)/select_t13n_by_key.txt", - "../<(test_data_subdir)/select_t13n_on_cascading_window.txt", - "../<(test_data_subdir)/switch_kana_type.txt", - "../<(test_data_subdir)/multiple_segments.txt", - "../<(test_data_subdir)/select_candidates_in_multiple_segments.txt", - "../<(test_data_subdir)/select_candidates_in_multiple_segments_and_expand_segment.txt", - "../<(test_data_subdir)/continue_input.txt", - "../<(test_data_subdir)/continuous_input.txt", - "../<(test_data_subdir)/multiple_sessions.txt", - "../<(test_data_subdir)/backspace_after_commit.txt", - "../<(test_data_subdir)/backspace_after_commit_after_backspace.txt", - "../<(test_data_subdir)/multiple_backspace_after_commit.txt", - "../<(test_data_subdir)/zero_query_suggestion.txt", - "../<(test_data_subdir)/auto_partial_suggestion.txt", - "../<(test_data_subdir)/insert_space.txt", - "../<(test_data_subdir)/numpad_in_direct_input_mode.txt", - ], - 'test_data_subdir': 'data/test/session/scenario/usage_stats', - }, - 'includes': ['../gyp/install_testdata.gypi'], - }, - { 'target_name': 'session_handler_scenario_test', 'type': 'executable', 'sources': [ @@ -370,10 +300,10 @@ ], 'dependencies': [ '../base/base.gyp:base', + '../data/test/session/scenario/scenario.gyp:install_session_handler_scenario_test_data', + '../data/test/session/scenario/usage_stats/usage_stats.gyp:install_session_handler_usage_stats_scenario_test_data', '../testing/testing.gyp:gtest_main', '../usage_stats/usage_stats_test.gyp:usage_stats_testing_util', - 'install_session_handler_scenario_test_data', - 'install_session_handler_usage_stats_scenario_test_data', 'session.gyp:session_handler', 'session_base.gyp:request_test_util', 'session_base.gyp:session_protocol', diff -Nru mozc-1.11.1502.102/session/session_watch_dog.cc mozc-1.11.1522.102/session/session_watch_dog.cc --- mozc-1.11.1502.102/session/session_watch_dog.cc 2013-07-17 02:38:03.000000000 +0000 +++ mozc-1.11.1522.102/session/session_watch_dog.cc 2013-08-28 05:26:12.000000000 +0000 @@ -247,12 +247,12 @@ } const float all_avg = - accumulate(cpu_loads, cpu_loads + cpu_loads_index, 0.0) + std::accumulate(cpu_loads, cpu_loads + cpu_loads_index, 0.0) / cpu_loads_index; const size_t latest_size = min(2, cpu_loads_index); const float latest_avg = - accumulate(cpu_loads, cpu_loads + latest_size, 0.0) + std::accumulate(cpu_loads, cpu_loads + latest_size, 0.0) / latest_size; VLOG(1) << "Average CPU load=" << all_avg diff -Nru mozc-1.11.1502.102/storage/louds/bit_vector_based_array_test.cc mozc-1.11.1522.102/storage/louds/bit_vector_based_array_test.cc --- mozc-1.11.1502.102/storage/louds/bit_vector_based_array_test.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/storage/louds/bit_vector_based_array_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -62,7 +62,7 @@ }; BitVectorBasedArrayBuilder builder; - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kTestData); ++i) { + for (size_t i = 0; i < arraysize(kTestData); ++i) { builder.Add(string(kTestData[i].element, kTestData[i].length)); } builder.SetSize(4, 2); @@ -70,7 +70,7 @@ BitVectorBasedArray array; array.Open(reinterpret_cast(builder.image().data())); - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kTestData); ++i) { + for (size_t i = 0; i < arraysize(kTestData); ++i) { size_t length; const char *result = array.Get(i, &length); EXPECT_EQ( diff -Nru mozc-1.11.1502.102/third_party/gyp/AUTHORS mozc-1.11.1522.102/third_party/gyp/AUTHORS --- mozc-1.11.1502.102/third_party/gyp/AUTHORS 2012-12-06 21:16:39.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/AUTHORS 1970-01-01 00:00:00.000000000 +0000 @@ -1,8 +0,0 @@ -# Names should be added to this file like so: -# Name or Organization - -Google Inc. -Bloomberg Finance L.P. - -Steven Knight -Ryan Norton diff -Nru mozc-1.11.1502.102/third_party/gyp/DEPS mozc-1.11.1522.102/third_party/gyp/DEPS --- mozc-1.11.1502.102/third_party/gyp/DEPS 2012-03-14 01:29:17.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/DEPS 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -# DEPS file for gclient use in buildbot execution of gyp tests. -# -# (You don't need to use gclient for normal GYP development work.) - -vars = { - "chrome_trunk": "http://src.chromium.org/svn/trunk", - "googlecode_url": "http://%s.googlecode.com/svn", -} - -deps = { - "scons": - Var("chrome_trunk") + "/src/third_party/scons@44099", -} - -deps_os = { - "win": { - "third_party/cygwin": - Var("chrome_trunk") + "/deps/third_party/cygwin@66844", - - "third_party/python_26": - Var("chrome_trunk") + "/tools/third_party/python_26@89111", - - "src/third_party/pefile": - (Var("googlecode_url") % "pefile") + "/trunk@63", - }, -} diff -Nru mozc-1.11.1502.102/third_party/gyp/LICENSE mozc-1.11.1522.102/third_party/gyp/LICENSE --- mozc-1.11.1502.102/third_party/gyp/LICENSE 2009-10-22 20:02:37.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/LICENSE 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -Copyright (c) 2009 Google Inc. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff -Nru mozc-1.11.1502.102/third_party/gyp/MANIFEST mozc-1.11.1522.102/third_party/gyp/MANIFEST --- mozc-1.11.1502.102/third_party/gyp/MANIFEST 2009-12-11 19:32:21.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/MANIFEST 1970-01-01 00:00:00.000000000 +0000 @@ -1,21 +0,0 @@ -setup.py -gyp -LICENSE -AUTHORS -pylib/gyp/MSVSNew.py -pylib/gyp/MSVSProject.py -pylib/gyp/MSVSToolFile.py -pylib/gyp/MSVSUserFile.py -pylib/gyp/MSVSVersion.py -pylib/gyp/SCons.py -pylib/gyp/__init__.py -pylib/gyp/common.py -pylib/gyp/input.py -pylib/gyp/xcodeproj_file.py -pylib/gyp/generator/__init__.py -pylib/gyp/generator/gypd.py -pylib/gyp/generator/gypsh.py -pylib/gyp/generator/make.py -pylib/gyp/generator/msvs.py -pylib/gyp/generator/scons.py -pylib/gyp/generator/xcode.py diff -Nru mozc-1.11.1502.102/third_party/gyp/OWNERS mozc-1.11.1522.102/third_party/gyp/OWNERS --- mozc-1.11.1502.102/third_party/gyp/OWNERS 2011-04-01 22:56:20.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/OWNERS 1970-01-01 00:00:00.000000000 +0000 @@ -1 +0,0 @@ -* diff -Nru mozc-1.11.1502.102/third_party/gyp/PRESUBMIT.py mozc-1.11.1522.102/third_party/gyp/PRESUBMIT.py --- mozc-1.11.1502.102/third_party/gyp/PRESUBMIT.py 2013-01-16 20:12:08.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/PRESUBMIT.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,116 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - - -"""Top-level presubmit script for GYP. - -See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts -for more details about the presubmit API built into gcl. -""" - - -PYLINT_BLACKLIST = [ - # TODO: fix me. - # From SCons, not done in google style. - 'test/lib/TestCmd.py', - 'test/lib/TestCommon.py', - 'test/lib/TestGyp.py', - # Needs style fix. - 'pylib/gyp/generator/scons.py', - 'pylib/gyp/generator/xcode.py', -] - - -PYLINT_DISABLED_WARNINGS = [ - # TODO: fix me. - # Many tests include modules they don't use. - 'W0611', - # Include order doesn't properly include local files? - 'F0401', - # Some use of built-in names. - 'W0622', - # Some unused variables. - 'W0612', - # Operator not preceded/followed by space. - 'C0323', - 'C0322', - # Unnecessary semicolon. - 'W0301', - # Unused argument. - 'W0613', - # String has no effect (docstring in wrong place). - 'W0105', - # Comma not followed by space. - 'C0324', - # Access to a protected member. - 'W0212', - # Bad indent. - 'W0311', - # Line too long. - 'C0301', - # Undefined variable. - 'E0602', - # Not exception type specified. - 'W0702', - # No member of that name. - 'E1101', - # Dangerous default {}. - 'W0102', - # Others, too many to sort. - 'W0201', 'W0232', 'E1103', 'W0621', 'W0108', 'W0223', 'W0231', - 'R0201', 'E0101', 'C0321', - # ************* Module copy - # W0104:427,12:_test.odict.__setitem__: Statement seems to have no effect - 'W0104', -] - - -def CheckChangeOnUpload(input_api, output_api): - report = [] - report.extend(input_api.canned_checks.PanProjectChecks( - input_api, output_api)) - return report - - -def CheckChangeOnCommit(input_api, output_api): - report = [] - - # Accept any year number from 2009 to the current year. - current_year = int(input_api.time.strftime('%Y')) - allowed_years = (str(s) for s in reversed(xrange(2009, current_year + 1))) - years_re = '(' + '|'.join(allowed_years) + ')' - - # The (c) is deprecated, but tolerate it until it's removed from all files. - license = ( - r'.*? Copyright (\(c\) )?%(year)s Google Inc\. All rights reserved\.\n' - r'.*? Use of this source code is governed by a BSD-style license that ' - r'can be\n' - r'.*? found in the LICENSE file\.\n' - ) % { - 'year': years_re, - } - - report.extend(input_api.canned_checks.PanProjectChecks( - input_api, output_api, license_header=license)) - report.extend(input_api.canned_checks.CheckTreeIsOpen( - input_api, output_api, - 'http://gyp-status.appspot.com/status', - 'http://gyp-status.appspot.com/current')) - - import sys - old_sys_path = sys.path - try: - sys.path = ['pylib', 'test/lib'] + sys.path - report.extend(input_api.canned_checks.RunPylint( - input_api, - output_api, - black_list=PYLINT_BLACKLIST, - disabled_warnings=PYLINT_DISABLED_WARNINGS)) - finally: - sys.path = old_sys_path - return report - - -def GetPreferredTrySlaves(): - return ['gyp-win32', 'gyp-win64', 'gyp-linux', 'gyp-mac', 'gyp-android'] diff -Nru mozc-1.11.1502.102/third_party/gyp/README.mozc mozc-1.11.1522.102/third_party/gyp/README.mozc --- mozc-1.11.1502.102/third_party/gyp/README.mozc 2013-07-17 03:35:04.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/README.mozc 1970-01-01 00:00:00.000000000 +0000 @@ -1,12 +0,0 @@ -URL: http://code.google.com/p/gyp/ -Version: r1589 -License: New BSD License -License File: LICENSE - -Description: -This is Mozc's local copy of gyp, a tool that generates native Visual Studio, -Xcode and SCons and/or make build files from a platform-independent input -format. - -Local Modifications: -test directory is removed. diff -Nru mozc-1.11.1502.102/third_party/gyp/buildbot/buildbot_run.py mozc-1.11.1522.102/third_party/gyp/buildbot/buildbot_run.py --- mozc-1.11.1502.102/third_party/gyp/buildbot/buildbot_run.py 2013-02-21 23:36:49.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/buildbot/buildbot_run.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,150 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - - -"""Argument-less script to select what to run on the buildbots.""" - - -import os -import shutil -import subprocess -import sys - - -if sys.platform in ['win32', 'cygwin']: - EXE_SUFFIX = '.exe' -else: - EXE_SUFFIX = '' - - -BUILDBOT_DIR = os.path.dirname(os.path.abspath(__file__)) -TRUNK_DIR = os.path.dirname(BUILDBOT_DIR) -ROOT_DIR = os.path.dirname(TRUNK_DIR) -ANDROID_DIR = os.path.join(ROOT_DIR, 'android') -OUT_DIR = os.path.join(TRUNK_DIR, 'out') - - -def CallSubProcess(*args, **kwargs): - """Wrapper around subprocess.call which treats errors as build exceptions.""" - retcode = subprocess.call(*args, **kwargs) - if retcode != 0: - print '@@@STEP_EXCEPTION@@@' - sys.exit(1) - - -def PrepareAndroidTree(): - """Prepare an Android tree to run 'android' format tests.""" - if os.environ['BUILDBOT_CLOBBER'] == '1': - print '@@@BUILD_STEP Clobber Android checkout@@@' - shutil.rmtree(ANDROID_DIR) - - # The release of Android we use is static, so there's no need to do anything - # if the directory already exists. - if os.path.isdir(ANDROID_DIR): - return - - print '@@@BUILD_STEP Initialize Android checkout@@@' - os.mkdir(ANDROID_DIR) - CallSubProcess(['git', 'config', '--global', 'user.name', 'trybot']) - CallSubProcess(['git', 'config', '--global', - 'user.email', 'chrome-bot@google.com']) - CallSubProcess(['git', 'config', '--global', 'color.ui', 'false']) - CallSubProcess( - ['repo', 'init', - '-u', 'https://android.googlesource.com/platform/manifest', - '-b', 'android-4.2.1_r1', - '-g', 'all,-notdefault,-device,-darwin,-mips,-x86'], - cwd=ANDROID_DIR) - - print '@@@BUILD_STEP Sync Android@@@' - CallSubProcess(['repo', 'sync', '-j4'], cwd=ANDROID_DIR) - - print '@@@BUILD_STEP Build Android@@@' - CallSubProcess( - ['/bin/bash', - '-c', 'source build/envsetup.sh && lunch full-eng && make -j4'], - cwd=ANDROID_DIR) - - -def GypTestFormat(title, format=None, msvs_version=None): - """Run the gyp tests for a given format, emitting annotator tags. - - See annotator docs at: - https://sites.google.com/a/chromium.org/dev/developers/testing/chromium-build-infrastructure/buildbot-annotations - Args: - format: gyp format to test. - Returns: - 0 for sucesss, 1 for failure. - """ - if not format: - format = title - - print '@@@BUILD_STEP ' + title + '@@@' - sys.stdout.flush() - env = os.environ.copy() - if msvs_version: - env['GYP_MSVS_VERSION'] = msvs_version - command = ' '.join( - [sys.executable, 'trunk/gyptest.py', - '--all', - '--passed', - '--format', format, - '--chdir', 'trunk', - '--path', '../scons']) - if format == 'android': - # gyptest needs the environment setup from envsetup/lunch in order to build - # using the 'android' backend, so this is done in a single shell. - retcode = subprocess.call( - ['/bin/bash', - '-c', 'source build/envsetup.sh && lunch full-eng && cd %s && %s' - % (ROOT_DIR, command)], - cwd=ANDROID_DIR, env=env) - else: - retcode = subprocess.call(command, cwd=ROOT_DIR, env=env, shell=True) - if retcode: - # Emit failure tag, and keep going. - print '@@@STEP_FAILURE@@@' - return 1 - return 0 - - -def GypBuild(): - # Dump out/ directory. - print '@@@BUILD_STEP cleanup@@@' - print 'Removing %s...' % OUT_DIR - shutil.rmtree(OUT_DIR, ignore_errors=True) - print 'Done.' - - retcode = 0 - # The Android gyp bot runs on linux so this must be tested first. - if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-android': - PrepareAndroidTree() - retcode += GypTestFormat('android') - elif sys.platform.startswith('linux'): - retcode += GypTestFormat('ninja') - retcode += GypTestFormat('scons') - retcode += GypTestFormat('make') - elif sys.platform == 'darwin': - retcode += GypTestFormat('ninja') - retcode += GypTestFormat('xcode') - retcode += GypTestFormat('make') - elif sys.platform == 'win32': - retcode += GypTestFormat('ninja') - retcode += GypTestFormat('msvs-2008', format='msvs', msvs_version='2008') - if os.environ['BUILDBOT_BUILDERNAME'] == 'gyp-win64': - retcode += GypTestFormat('msvs-2010', format='msvs', msvs_version='2010') - else: - raise Exception('Unknown platform') - if retcode: - # TODO(bradnelson): once the annotator supports a postscript (section for - # after the build proper that could be used for cumulative failures), - # use that instead of this. This isolates the final return value so - # that it isn't misattributed to the last stage. - print '@@@BUILD_STEP failures@@@' - sys.exit(retcode) - - -if __name__ == '__main__': - GypBuild() diff -Nru mozc-1.11.1502.102/third_party/gyp/codereview.settings mozc-1.11.1522.102/third_party/gyp/codereview.settings --- mozc-1.11.1502.102/third_party/gyp/codereview.settings 2010-04-27 23:47:26.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/codereview.settings 1970-01-01 00:00:00.000000000 +0000 @@ -1,10 +0,0 @@ -# This file is used by gcl to get repository specific information. -CODE_REVIEW_SERVER: codereview.chromium.org -CC_LIST: gyp-developer@googlegroups.com -VIEW_VC: http://code.google.com/p/gyp/source/detail?r= -TRY_ON_UPLOAD: True -TRYSERVER_PROJECT: gyp -TRYSERVER_PATCHLEVEL: 0 -TRYSERVER_ROOT: trunk -TRYSERVER_SVN_URL: svn://svn.chromium.org/chrome-try/try-nacl - diff -Nru mozc-1.11.1502.102/third_party/gyp/gyp mozc-1.11.1522.102/third_party/gyp/gyp --- mozc-1.11.1502.102/third_party/gyp/gyp 2009-10-22 20:02:37.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/gyp 1970-01-01 00:00:00.000000000 +0000 @@ -1,18 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import sys - -# TODO(mark): sys.path manipulation is some temporary testing stuff. -try: - import gyp -except ImportError, e: - import os.path - sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), 'pylib')) - import gyp - -if __name__ == '__main__': - sys.exit(gyp.main(sys.argv[1:])) diff -Nru mozc-1.11.1502.102/third_party/gyp/gyp.bat mozc-1.11.1522.102/third_party/gyp/gyp.bat --- mozc-1.11.1502.102/third_party/gyp/gyp.bat 2010-03-23 18:16:53.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/gyp.bat 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -@rem Copyright (c) 2009 Google Inc. All rights reserved. -@rem Use of this source code is governed by a BSD-style license that can be -@rem found in the LICENSE file. - -@python "%~dp0/gyp" %* diff -Nru mozc-1.11.1502.102/third_party/gyp/gyp_dummy.c mozc-1.11.1522.102/third_party/gyp/gyp_dummy.c --- mozc-1.11.1502.102/third_party/gyp/gyp_dummy.c 2010-01-08 14:58:07.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/gyp_dummy.c 1970-01-01 00:00:00.000000000 +0000 @@ -1,7 +0,0 @@ -/* Copyright (c) 2009 Google Inc. All rights reserved. - * Use of this source code is governed by a BSD-style license that can be - * found in the LICENSE file. */ - -int main() { - return 0; -} diff -Nru mozc-1.11.1502.102/third_party/gyp/gyptest.py mozc-1.11.1522.102/third_party/gyp/gyptest.py --- mozc-1.11.1502.102/third_party/gyp/gyptest.py 2012-09-18 18:05:05.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/gyptest.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,266 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -__doc__ = """ -gyptest.py -- test runner for GYP tests. -""" - -import os -import optparse -import subprocess -import sys - -class CommandRunner: - """ - Executor class for commands, including "commands" implemented by - Python functions. - """ - verbose = True - active = True - - def __init__(self, dictionary={}): - self.subst_dictionary(dictionary) - - def subst_dictionary(self, dictionary): - self._subst_dictionary = dictionary - - def subst(self, string, dictionary=None): - """ - Substitutes (via the format operator) the values in the specified - dictionary into the specified command. - - The command can be an (action, string) tuple. In all cases, we - perform substitution on strings and don't worry if something isn't - a string. (It's probably a Python function to be executed.) - """ - if dictionary is None: - dictionary = self._subst_dictionary - if dictionary: - try: - string = string % dictionary - except TypeError: - pass - return string - - def display(self, command, stdout=None, stderr=None): - if not self.verbose: - return - if type(command) == type(()): - func = command[0] - args = command[1:] - s = '%s(%s)' % (func.__name__, ', '.join(map(repr, args))) - if type(command) == type([]): - # TODO: quote arguments containing spaces - # TODO: handle meta characters? - s = ' '.join(command) - else: - s = self.subst(command) - if not s.endswith('\n'): - s += '\n' - sys.stdout.write(s) - sys.stdout.flush() - - def execute(self, command, stdout=None, stderr=None): - """ - Executes a single command. - """ - if not self.active: - return 0 - if type(command) == type(''): - command = self.subst(command) - cmdargs = shlex.split(command) - if cmdargs[0] == 'cd': - command = (os.chdir,) + tuple(cmdargs[1:]) - if type(command) == type(()): - func = command[0] - args = command[1:] - return func(*args) - else: - if stdout is sys.stdout: - # Same as passing sys.stdout, except python2.4 doesn't fail on it. - subout = None - else: - # Open pipe for anything else so Popen works on python2.4. - subout = subprocess.PIPE - if stderr is sys.stderr: - # Same as passing sys.stderr, except python2.4 doesn't fail on it. - suberr = None - elif stderr is None: - # Merge with stdout if stderr isn't specified. - suberr = subprocess.STDOUT - else: - # Open pipe for anything else so Popen works on python2.4. - suberr = subprocess.PIPE - p = subprocess.Popen(command, - shell=(sys.platform == 'win32'), - stdout=subout, - stderr=suberr) - p.wait() - if stdout is None: - self.stdout = p.stdout.read() - elif stdout is not sys.stdout: - stdout.write(p.stdout.read()) - if stderr not in (None, sys.stderr): - stderr.write(p.stderr.read()) - return p.returncode - - def run(self, command, display=None, stdout=None, stderr=None): - """ - Runs a single command, displaying it first. - """ - if display is None: - display = command - self.display(display) - return self.execute(command, stdout, stderr) - - -class Unbuffered: - def __init__(self, fp): - self.fp = fp - def write(self, arg): - self.fp.write(arg) - self.fp.flush() - def __getattr__(self, attr): - return getattr(self.fp, attr) - -sys.stdout = Unbuffered(sys.stdout) -sys.stderr = Unbuffered(sys.stderr) - - -def find_all_gyptest_files(directory): - result = [] - for root, dirs, files in os.walk(directory): - if '.svn' in dirs: - dirs.remove('.svn') - result.extend([ os.path.join(root, f) for f in files - if f.startswith('gyptest') and f.endswith('.py') ]) - result.sort() - return result - - -def main(argv=None): - if argv is None: - argv = sys.argv - - usage = "gyptest.py [-ahlnq] [-f formats] [test ...]" - parser = optparse.OptionParser(usage=usage) - parser.add_option("-a", "--all", action="store_true", - help="run all tests") - parser.add_option("-C", "--chdir", action="store", default=None, - help="chdir to the specified directory") - parser.add_option("-f", "--format", action="store", default='', - help="run tests with the specified formats") - parser.add_option("-G", '--gyp_option', action="append", default=[], - help="Add -G options to the gyp command line") - parser.add_option("-l", "--list", action="store_true", - help="list available tests and exit") - parser.add_option("-n", "--no-exec", action="store_true", - help="no execute, just print the command line") - parser.add_option("--passed", action="store_true", - help="report passed tests") - parser.add_option("--path", action="append", default=[], - help="additional $PATH directory") - parser.add_option("-q", "--quiet", action="store_true", - help="quiet, don't print test command lines") - opts, args = parser.parse_args(argv[1:]) - - if opts.chdir: - os.chdir(opts.chdir) - - if opts.path: - extra_path = [os.path.abspath(p) for p in opts.path] - extra_path = os.pathsep.join(extra_path) - os.environ['PATH'] += os.pathsep + extra_path - - if not args: - if not opts.all: - sys.stderr.write('Specify -a to get all tests.\n') - return 1 - args = ['test'] - - tests = [] - for arg in args: - if os.path.isdir(arg): - tests.extend(find_all_gyptest_files(os.path.normpath(arg))) - else: - tests.append(arg) - - if opts.list: - for test in tests: - print test - sys.exit(0) - - CommandRunner.verbose = not opts.quiet - CommandRunner.active = not opts.no_exec - cr = CommandRunner() - - os.environ['PYTHONPATH'] = os.path.abspath('test/lib') - if not opts.quiet: - sys.stdout.write('PYTHONPATH=%s\n' % os.environ['PYTHONPATH']) - - passed = [] - failed = [] - no_result = [] - - if opts.format: - format_list = opts.format.split(',') - else: - # TODO: not duplicate this mapping from pylib/gyp/__init__.py - format_list = { - 'freebsd7': ['make'], - 'freebsd8': ['make'], - 'cygwin': ['msvs'], - 'win32': ['msvs', 'ninja'], - 'linux2': ['make', 'ninja'], - 'linux3': ['make', 'ninja'], - 'darwin': ['make', 'ninja', 'xcode'], - }[sys.platform] - - for format in format_list: - os.environ['TESTGYP_FORMAT'] = format - if not opts.quiet: - sys.stdout.write('TESTGYP_FORMAT=%s\n' % format) - - gyp_options = [] - for option in opts.gyp_option: - gyp_options += ['-G', option] - if gyp_options and not opts.quiet: - sys.stdout.write('Extra Gyp options: %s\n' % gyp_options) - - for test in tests: - status = cr.run([sys.executable, test] + gyp_options, - stdout=sys.stdout, - stderr=sys.stderr) - if status == 2: - no_result.append(test) - elif status: - failed.append(test) - else: - passed.append(test) - - if not opts.quiet: - def report(description, tests): - if tests: - if len(tests) == 1: - sys.stdout.write("\n%s the following test:\n" % description) - else: - fmt = "\n%s the following %d tests:\n" - sys.stdout.write(fmt % (description, len(tests))) - sys.stdout.write("\t" + "\n\t".join(tests) + "\n") - - if opts.passed: - report("Passed", passed) - report("Failed", failed) - report("No result from", no_result) - - if failed: - return 1 - else: - return 0 - - -if __name__ == "__main__": - sys.exit(main()) diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/MSVSNew.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/MSVSNew.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/MSVSNew.py 2012-09-19 19:53:22.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/MSVSNew.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,339 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""New implementation of Visual Studio project generation for SCons.""" - -import os -import random - -import gyp.common - -# hashlib is supplied as of Python 2.5 as the replacement interface for md5 -# and other secure hashes. In 2.6, md5 is deprecated. Import hashlib if -# available, avoiding a deprecation warning under 2.6. Import md5 otherwise, -# preserving 2.4 compatibility. -try: - import hashlib - _new_md5 = hashlib.md5 -except ImportError: - import md5 - _new_md5 = md5.new - - -# Initialize random number generator -random.seed() - -# GUIDs for project types -ENTRY_TYPE_GUIDS = { - 'project': '{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}', - 'folder': '{2150E333-8FDC-42A3-9474-1A3956D46DE8}', -} - -#------------------------------------------------------------------------------ -# Helper functions - - -def MakeGuid(name, seed='msvs_new'): - """Returns a GUID for the specified target name. - - Args: - name: Target name. - seed: Seed for MD5 hash. - Returns: - A GUID-line string calculated from the name and seed. - - This generates something which looks like a GUID, but depends only on the - name and seed. This means the same name/seed will always generate the same - GUID, so that projects and solutions which refer to each other can explicitly - determine the GUID to refer to explicitly. It also means that the GUID will - not change when the project for a target is rebuilt. - """ - # Calculate a MD5 signature for the seed and name. - d = _new_md5(str(seed) + str(name)).hexdigest().upper() - # Convert most of the signature to GUID form (discard the rest) - guid = ('{' + d[:8] + '-' + d[8:12] + '-' + d[12:16] + '-' + d[16:20] - + '-' + d[20:32] + '}') - return guid - -#------------------------------------------------------------------------------ - - -class MSVSSolutionEntry(object): - def __cmp__(self, other): - # Sort by name then guid (so things are in order on vs2008). - return cmp((self.name, self.get_guid()), (other.name, other.get_guid())) - - -class MSVSFolder(MSVSSolutionEntry): - """Folder in a Visual Studio project or solution.""" - - def __init__(self, path, name = None, entries = None, - guid = None, items = None): - """Initializes the folder. - - Args: - path: Full path to the folder. - name: Name of the folder. - entries: List of folder entries to nest inside this folder. May contain - Folder or Project objects. May be None, if the folder is empty. - guid: GUID to use for folder, if not None. - items: List of solution items to include in the folder project. May be - None, if the folder does not directly contain items. - """ - if name: - self.name = name - else: - # Use last layer. - self.name = os.path.basename(path) - - self.path = path - self.guid = guid - - # Copy passed lists (or set to empty lists) - self.entries = sorted(list(entries or [])) - self.items = list(items or []) - - self.entry_type_guid = ENTRY_TYPE_GUIDS['folder'] - - def get_guid(self): - if self.guid is None: - # Use consistent guids for folders (so things don't regenerate). - self.guid = MakeGuid(self.path, seed='msvs_folder') - return self.guid - - -#------------------------------------------------------------------------------ - - -class MSVSProject(MSVSSolutionEntry): - """Visual Studio project.""" - - def __init__(self, path, name = None, dependencies = None, guid = None, - spec = None, build_file = None, config_platform_overrides = None, - fixpath_prefix = None): - """Initializes the project. - - Args: - path: Absolute path to the project file. - name: Name of project. If None, the name will be the same as the base - name of the project file. - dependencies: List of other Project objects this project is dependent - upon, if not None. - guid: GUID to use for project, if not None. - spec: Dictionary specifying how to build this project. - build_file: Filename of the .gyp file that the vcproj file comes from. - config_platform_overrides: optional dict of configuration platforms to - used in place of the default for this target. - fixpath_prefix: the path used to adjust the behavior of _fixpath - """ - self.path = path - self.guid = guid - self.spec = spec - self.build_file = build_file - # Use project filename if name not specified - self.name = name or os.path.splitext(os.path.basename(path))[0] - - # Copy passed lists (or set to empty lists) - self.dependencies = list(dependencies or []) - - self.entry_type_guid = ENTRY_TYPE_GUIDS['project'] - - if config_platform_overrides: - self.config_platform_overrides = config_platform_overrides - else: - self.config_platform_overrides = {} - self.fixpath_prefix = fixpath_prefix - self.msbuild_toolset = None - - def set_dependencies(self, dependencies): - self.dependencies = list(dependencies or []) - - def get_guid(self): - if self.guid is None: - # Set GUID from path - # TODO(rspangler): This is fragile. - # 1. We can't just use the project filename sans path, since there could - # be multiple projects with the same base name (for example, - # foo/unittest.vcproj and bar/unittest.vcproj). - # 2. The path needs to be relative to $SOURCE_ROOT, so that the project - # GUID is the same whether it's included from base/base.sln or - # foo/bar/baz/baz.sln. - # 3. The GUID needs to be the same each time this builder is invoked, so - # that we don't need to rebuild the solution when the project changes. - # 4. We should be able to handle pre-built project files by reading the - # GUID from the files. - self.guid = MakeGuid(self.name) - return self.guid - - def set_msbuild_toolset(self, msbuild_toolset): - self.msbuild_toolset = msbuild_toolset - -#------------------------------------------------------------------------------ - - -class MSVSSolution: - """Visual Studio solution.""" - - def __init__(self, path, version, entries=None, variants=None, - websiteProperties=True): - """Initializes the solution. - - Args: - path: Path to solution file. - version: Format version to emit. - entries: List of entries in solution. May contain Folder or Project - objects. May be None, if the folder is empty. - variants: List of build variant strings. If none, a default list will - be used. - websiteProperties: Flag to decide if the website properties section - is generated. - """ - self.path = path - self.websiteProperties = websiteProperties - self.version = version - - # Copy passed lists (or set to empty lists) - self.entries = list(entries or []) - - if variants: - # Copy passed list - self.variants = variants[:] - else: - # Use default - self.variants = ['Debug|Win32', 'Release|Win32'] - # TODO(rspangler): Need to be able to handle a mapping of solution config - # to project config. Should we be able to handle variants being a dict, - # or add a separate variant_map variable? If it's a dict, we can't - # guarantee the order of variants since dict keys aren't ordered. - - - # TODO(rspangler): Automatically write to disk for now; should delay until - # node-evaluation time. - self.Write() - - - def Write(self, writer=gyp.common.WriteOnDiff): - """Writes the solution file to disk. - - Raises: - IndexError: An entry appears multiple times. - """ - # Walk the entry tree and collect all the folders and projects. - all_entries = set() - entries_to_check = self.entries[:] - while entries_to_check: - e = entries_to_check.pop(0) - - # If this entry has been visited, nothing to do. - if e in all_entries: - continue - - all_entries.add(e) - - # If this is a folder, check its entries too. - if isinstance(e, MSVSFolder): - entries_to_check += e.entries - - all_entries = sorted(all_entries) - - # Open file and print header - f = writer(self.path) - f.write('Microsoft Visual Studio Solution File, ' - 'Format Version %s\r\n' % self.version.SolutionVersion()) - f.write('# %s\r\n' % self.version.Description()) - - # Project entries - sln_root = os.path.split(self.path)[0] - for e in all_entries: - relative_path = gyp.common.RelativePath(e.path, sln_root) - # msbuild does not accept an empty folder_name. - # use '.' in case relative_path is empty. - folder_name = relative_path.replace('/', '\\') or '.' - f.write('Project("%s") = "%s", "%s", "%s"\r\n' % ( - e.entry_type_guid, # Entry type GUID - e.name, # Folder name - folder_name, # Folder name (again) - e.get_guid(), # Entry GUID - )) - - # TODO(rspangler): Need a way to configure this stuff - if self.websiteProperties: - f.write('\tProjectSection(WebsiteProperties) = preProject\r\n' - '\t\tDebug.AspNetCompiler.Debug = "True"\r\n' - '\t\tRelease.AspNetCompiler.Debug = "False"\r\n' - '\tEndProjectSection\r\n') - - if isinstance(e, MSVSFolder): - if e.items: - f.write('\tProjectSection(SolutionItems) = preProject\r\n') - for i in e.items: - f.write('\t\t%s = %s\r\n' % (i, i)) - f.write('\tEndProjectSection\r\n') - - if isinstance(e, MSVSProject): - if e.dependencies: - f.write('\tProjectSection(ProjectDependencies) = postProject\r\n') - for d in e.dependencies: - f.write('\t\t%s = %s\r\n' % (d.get_guid(), d.get_guid())) - f.write('\tEndProjectSection\r\n') - - f.write('EndProject\r\n') - - # Global section - f.write('Global\r\n') - - # Configurations (variants) - f.write('\tGlobalSection(SolutionConfigurationPlatforms) = preSolution\r\n') - for v in self.variants: - f.write('\t\t%s = %s\r\n' % (v, v)) - f.write('\tEndGlobalSection\r\n') - - # Sort config guids for easier diffing of solution changes. - config_guids = [] - config_guids_overrides = {} - for e in all_entries: - if isinstance(e, MSVSProject): - config_guids.append(e.get_guid()) - config_guids_overrides[e.get_guid()] = e.config_platform_overrides - config_guids.sort() - - f.write('\tGlobalSection(ProjectConfigurationPlatforms) = postSolution\r\n') - for g in config_guids: - for v in self.variants: - nv = config_guids_overrides[g].get(v, v) - # Pick which project configuration to build for this solution - # configuration. - f.write('\t\t%s.%s.ActiveCfg = %s\r\n' % ( - g, # Project GUID - v, # Solution build configuration - nv, # Project build config for that solution config - )) - - # Enable project in this solution configuration. - f.write('\t\t%s.%s.Build.0 = %s\r\n' % ( - g, # Project GUID - v, # Solution build configuration - nv, # Project build config for that solution config - )) - f.write('\tEndGlobalSection\r\n') - - # TODO(rspangler): Should be able to configure this stuff too (though I've - # never seen this be any different) - f.write('\tGlobalSection(SolutionProperties) = preSolution\r\n') - f.write('\t\tHideSolutionNode = FALSE\r\n') - f.write('\tEndGlobalSection\r\n') - - # Folder mappings - # TODO(rspangler): Should omit this section if there are no folders - f.write('\tGlobalSection(NestedProjects) = preSolution\r\n') - for e in all_entries: - if not isinstance(e, MSVSFolder): - continue # Does not apply to projects, only folders - for subentry in e.entries: - f.write('\t\t%s = %s\r\n' % (subentry.get_guid(), e.get_guid())) - f.write('\tEndGlobalSection\r\n') - - f.write('EndGlobal\r\n') - - f.close() diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/MSVSProject.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/MSVSProject.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/MSVSProject.py 2012-05-25 21:36:14.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/MSVSProject.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,208 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Visual Studio project reader/writer.""" - -import gyp.common -import gyp.easy_xml as easy_xml - -#------------------------------------------------------------------------------ - - -class Tool(object): - """Visual Studio tool.""" - - def __init__(self, name, attrs=None): - """Initializes the tool. - - Args: - name: Tool name. - attrs: Dict of tool attributes; may be None. - """ - self._attrs = attrs or {} - self._attrs['Name'] = name - - def _GetSpecification(self): - """Creates an element for the tool. - - Returns: - A new xml.dom.Element for the tool. - """ - return ['Tool', self._attrs] - -class Filter(object): - """Visual Studio filter - that is, a virtual folder.""" - - def __init__(self, name, contents=None): - """Initializes the folder. - - Args: - name: Filter (folder) name. - contents: List of filenames and/or Filter objects contained. - """ - self.name = name - self.contents = list(contents or []) - - -#------------------------------------------------------------------------------ - - -class Writer(object): - """Visual Studio XML project writer.""" - - def __init__(self, project_path, version, name, guid=None, platforms=None): - """Initializes the project. - - Args: - project_path: Path to the project file. - version: Format version to emit. - name: Name of the project. - guid: GUID to use for project, if not None. - platforms: Array of string, the supported platforms. If null, ['Win32'] - """ - self.project_path = project_path - self.version = version - self.name = name - self.guid = guid - - # Default to Win32 for platforms. - if not platforms: - platforms = ['Win32'] - - # Initialize the specifications of the various sections. - self.platform_section = ['Platforms'] - for platform in platforms: - self.platform_section.append(['Platform', {'Name': platform}]) - self.tool_files_section = ['ToolFiles'] - self.configurations_section = ['Configurations'] - self.files_section = ['Files'] - - # Keep a dict keyed on filename to speed up access. - self.files_dict = dict() - - def AddToolFile(self, path): - """Adds a tool file to the project. - - Args: - path: Relative path from project to tool file. - """ - self.tool_files_section.append(['ToolFile', {'RelativePath': path}]) - - def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools): - """Returns the specification for a configuration. - - Args: - config_type: Type of configuration node. - config_name: Configuration name. - attrs: Dict of configuration attributes; may be None. - tools: List of tools (strings or Tool objects); may be None. - Returns: - """ - # Handle defaults - if not attrs: - attrs = {} - if not tools: - tools = [] - - # Add configuration node and its attributes - node_attrs = attrs.copy() - node_attrs['Name'] = config_name - specification = [config_type, node_attrs] - - # Add tool nodes and their attributes - if tools: - for t in tools: - if isinstance(t, Tool): - specification.append(t._GetSpecification()) - else: - specification.append(Tool(t)._GetSpecification()) - return specification - - - def AddConfig(self, name, attrs=None, tools=None): - """Adds a configuration to the project. - - Args: - name: Configuration name. - attrs: Dict of configuration attributes; may be None. - tools: List of tools (strings or Tool objects); may be None. - """ - spec = self._GetSpecForConfiguration('Configuration', name, attrs, tools) - self.configurations_section.append(spec) - - def _AddFilesToNode(self, parent, files): - """Adds files and/or filters to the parent node. - - Args: - parent: Destination node - files: A list of Filter objects and/or relative paths to files. - - Will call itself recursively, if the files list contains Filter objects. - """ - for f in files: - if isinstance(f, Filter): - node = ['Filter', {'Name': f.name}] - self._AddFilesToNode(node, f.contents) - else: - node = ['File', {'RelativePath': f}] - self.files_dict[f] = node - parent.append(node) - - def AddFiles(self, files): - """Adds files to the project. - - Args: - files: A list of Filter objects and/or relative paths to files. - - This makes a copy of the file/filter tree at the time of this call. If you - later add files to a Filter object which was passed into a previous call - to AddFiles(), it will not be reflected in this project. - """ - self._AddFilesToNode(self.files_section, files) - # TODO(rspangler) This also doesn't handle adding files to an existing - # filter. That is, it doesn't merge the trees. - - def AddFileConfig(self, path, config, attrs=None, tools=None): - """Adds a configuration to a file. - - Args: - path: Relative path to the file. - config: Name of configuration to add. - attrs: Dict of configuration attributes; may be None. - tools: List of tools (strings or Tool objects); may be None. - - Raises: - ValueError: Relative path does not match any file added via AddFiles(). - """ - # Find the file node with the right relative path - parent = self.files_dict.get(path) - if not parent: - raise ValueError('AddFileConfig: file "%s" not in project.' % path) - - # Add the config to the file node - spec = self._GetSpecForConfiguration('FileConfiguration', config, attrs, - tools) - parent.append(spec) - - def WriteIfChanged(self): - """Writes the project file.""" - # First create XML content definition - content = [ - 'VisualStudioProject', - {'ProjectType': 'Visual C++', - 'Version': self.version.ProjectVersion(), - 'Name': self.name, - 'ProjectGUID': self.guid, - 'RootNamespace': self.name, - 'Keyword': 'Win32Proj' - }, - self.platform_section, - self.tool_files_section, - self.configurations_section, - ['References'], # empty section - self.files_section, - ['Globals'] # empty section - ] - easy_xml.WriteXmlIfChanged(content, self.project_path, - encoding="Windows-1252") diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/MSVSSettings.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/MSVSSettings.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/MSVSSettings.py 2012-08-09 21:33:12.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/MSVSSettings.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,1046 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Code to validate and convert settings of the Microsoft build tools. - -This file contains code to validate and convert settings of the Microsoft -build tools. The function ConvertToMSBuildSettings(), ValidateMSVSSettings(), -and ValidateMSBuildSettings() are the entry points. - -This file was created by comparing the projects created by Visual Studio 2008 -and Visual Studio 2010 for all available settings through the user interface. -The MSBuild schemas were also considered. They are typically found in the -MSBuild install directory, e.g. c:\Program Files (x86)\MSBuild -""" - -import sys -import re - -# Dictionaries of settings validators. The key is the tool name, the value is -# a dictionary mapping setting names to validation functions. -_msvs_validators = {} -_msbuild_validators = {} - - -# A dictionary of settings converters. The key is the tool name, the value is -# a dictionary mapping setting names to conversion functions. -_msvs_to_msbuild_converters = {} - - -# Tool name mapping from MSVS to MSBuild. -_msbuild_name_of_tool = {} - - -class _Tool(object): - """Represents a tool used by MSVS or MSBuild. - - Attributes: - msvs_name: The name of the tool in MSVS. - msbuild_name: The name of the tool in MSBuild. - """ - - def __init__(self, msvs_name, msbuild_name): - self.msvs_name = msvs_name - self.msbuild_name = msbuild_name - - -def _AddTool(tool): - """Adds a tool to the four dictionaries used to process settings. - - This only defines the tool. Each setting also needs to be added. - - Args: - tool: The _Tool object to be added. - """ - _msvs_validators[tool.msvs_name] = {} - _msbuild_validators[tool.msbuild_name] = {} - _msvs_to_msbuild_converters[tool.msvs_name] = {} - _msbuild_name_of_tool[tool.msvs_name] = tool.msbuild_name - - -def _GetMSBuildToolSettings(msbuild_settings, tool): - """Returns an MSBuild tool dictionary. Creates it if needed.""" - return msbuild_settings.setdefault(tool.msbuild_name, {}) - - -class _Type(object): - """Type of settings (Base class).""" - - def ValidateMSVS(self, value): - """Verifies that the value is legal for MSVS. - - Args: - value: the value to check for this type. - - Raises: - ValueError if value is not valid for MSVS. - """ - - def ValidateMSBuild(self, value): - """Verifies that the value is legal for MSBuild. - - Args: - value: the value to check for this type. - - Raises: - ValueError if value is not valid for MSBuild. - """ - - def ConvertToMSBuild(self, value): - """Returns the MSBuild equivalent of the MSVS value given. - - Args: - value: the MSVS value to convert. - - Returns: - the MSBuild equivalent. - - Raises: - ValueError if value is not valid. - """ - return value - - -class _String(_Type): - """A setting that's just a string.""" - - def ValidateMSVS(self, value): - if not isinstance(value, basestring): - raise ValueError('expected string; got %r' % value) - - def ValidateMSBuild(self, value): - if not isinstance(value, basestring): - raise ValueError('expected string; got %r' % value) - - def ConvertToMSBuild(self, value): - # Convert the macros - return ConvertVCMacrosToMSBuild(value) - - -class _StringList(_Type): - """A settings that's a list of strings.""" - - def ValidateMSVS(self, value): - if not isinstance(value, basestring) and not isinstance(value, list): - raise ValueError('expected string list; got %r' % value) - - def ValidateMSBuild(self, value): - if not isinstance(value, basestring) and not isinstance(value, list): - raise ValueError('expected string list; got %r' % value) - - def ConvertToMSBuild(self, value): - # Convert the macros - if isinstance(value, list): - return [ConvertVCMacrosToMSBuild(i) for i in value] - else: - return ConvertVCMacrosToMSBuild(value) - - -class _Boolean(_Type): - """Boolean settings, can have the values 'false' or 'true'.""" - - def _Validate(self, value): - if value != 'true' and value != 'false': - raise ValueError('expected bool; got %r' % value) - - def ValidateMSVS(self, value): - self._Validate(value) - - def ValidateMSBuild(self, value): - self._Validate(value) - - def ConvertToMSBuild(self, value): - self._Validate(value) - return value - - -class _Integer(_Type): - """Integer settings.""" - - def __init__(self, msbuild_base=10): - _Type.__init__(self) - self._msbuild_base = msbuild_base - - def ValidateMSVS(self, value): - # Try to convert, this will raise ValueError if invalid. - self.ConvertToMSBuild(value) - - def ValidateMSBuild(self, value): - # Try to convert, this will raise ValueError if invalid. - int(value, self._msbuild_base) - - def ConvertToMSBuild(self, value): - msbuild_format = (self._msbuild_base == 10) and '%d' or '0x%04x' - return msbuild_format % int(value) - - -class _Enumeration(_Type): - """Type of settings that is an enumeration. - - In MSVS, the values are indexes like '0', '1', and '2'. - MSBuild uses text labels that are more representative, like 'Win32'. - - Constructor args: - label_list: an array of MSBuild labels that correspond to the MSVS index. - In the rare cases where MSVS has skipped an index value, None is - used in the array to indicate the unused spot. - new: an array of labels that are new to MSBuild. - """ - - def __init__(self, label_list, new=None): - _Type.__init__(self) - self._label_list = label_list - self._msbuild_values = set(value for value in label_list - if value is not None) - if new is not None: - self._msbuild_values.update(new) - - def ValidateMSVS(self, value): - # Try to convert. It will raise an exception if not valid. - self.ConvertToMSBuild(value) - - def ValidateMSBuild(self, value): - if value not in self._msbuild_values: - raise ValueError('unrecognized enumerated value %s' % value) - - def ConvertToMSBuild(self, value): - index = int(value) - if index < 0 or index >= len(self._label_list): - raise ValueError('index value (%d) not in expected range [0, %d)' % - (index, len(self._label_list))) - label = self._label_list[index] - if label is None: - raise ValueError('converted value for %s not specified.' % value) - return label - - -# Instantiate the various generic types. -_boolean = _Boolean() -_integer = _Integer() -# For now, we don't do any special validation on these types: -_string = _String() -_file_name = _String() -_folder_name = _String() -_file_list = _StringList() -_folder_list = _StringList() -_string_list = _StringList() -# Some boolean settings went from numerical values to boolean. The -# mapping is 0: default, 1: false, 2: true. -_newly_boolean = _Enumeration(['', 'false', 'true']) - - -def _Same(tool, name, setting_type): - """Defines a setting that has the same name in MSVS and MSBuild. - - Args: - tool: a dictionary that gives the names of the tool for MSVS and MSBuild. - name: the name of the setting. - setting_type: the type of this setting. - """ - _Renamed(tool, name, name, setting_type) - - -def _Renamed(tool, msvs_name, msbuild_name, setting_type): - """Defines a setting for which the name has changed. - - Args: - tool: a dictionary that gives the names of the tool for MSVS and MSBuild. - msvs_name: the name of the MSVS setting. - msbuild_name: the name of the MSBuild setting. - setting_type: the type of this setting. - """ - - def _Translate(value, msbuild_settings): - msbuild_tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) - msbuild_tool_settings[msbuild_name] = setting_type.ConvertToMSBuild(value) - - _msvs_validators[tool.msvs_name][msvs_name] = setting_type.ValidateMSVS - _msbuild_validators[tool.msbuild_name][msbuild_name] = ( - setting_type.ValidateMSBuild) - _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate - - -def _Moved(tool, settings_name, msbuild_tool_name, setting_type): - _MovedAndRenamed(tool, settings_name, msbuild_tool_name, settings_name, - setting_type) - - -def _MovedAndRenamed(tool, msvs_settings_name, msbuild_tool_name, - msbuild_settings_name, setting_type): - """Defines a setting that may have moved to a new section. - - Args: - tool: a dictionary that gives the names of the tool for MSVS and MSBuild. - msvs_settings_name: the MSVS name of the setting. - msbuild_tool_name: the name of the MSBuild tool to place the setting under. - msbuild_settings_name: the MSBuild name of the setting. - setting_type: the type of this setting. - """ - - def _Translate(value, msbuild_settings): - tool_settings = msbuild_settings.setdefault(msbuild_tool_name, {}) - tool_settings[msbuild_settings_name] = setting_type.ConvertToMSBuild(value) - - _msvs_validators[tool.msvs_name][msvs_settings_name] = ( - setting_type.ValidateMSVS) - validator = setting_type.ValidateMSBuild - _msbuild_validators[msbuild_tool_name][msbuild_settings_name] = validator - _msvs_to_msbuild_converters[tool.msvs_name][msvs_settings_name] = _Translate - - -def _MSVSOnly(tool, name, setting_type): - """Defines a setting that is only found in MSVS. - - Args: - tool: a dictionary that gives the names of the tool for MSVS and MSBuild. - name: the name of the setting. - setting_type: the type of this setting. - """ - - def _Translate(unused_value, unused_msbuild_settings): - # Since this is for MSVS only settings, no translation will happen. - pass - - _msvs_validators[tool.msvs_name][name] = setting_type.ValidateMSVS - _msvs_to_msbuild_converters[tool.msvs_name][name] = _Translate - - -def _MSBuildOnly(tool, name, setting_type): - """Defines a setting that is only found in MSBuild. - - Args: - tool: a dictionary that gives the names of the tool for MSVS and MSBuild. - name: the name of the setting. - setting_type: the type of this setting. - """ - _msbuild_validators[tool.msbuild_name][name] = setting_type.ValidateMSBuild - - -def _ConvertedToAdditionalOption(tool, msvs_name, flag): - """Defines a setting that's handled via a command line option in MSBuild. - - Args: - tool: a dictionary that gives the names of the tool for MSVS and MSBuild. - msvs_name: the name of the MSVS setting that if 'true' becomes a flag - flag: the flag to insert at the end of the AdditionalOptions - """ - - def _Translate(value, msbuild_settings): - if value == 'true': - tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) - if 'AdditionalOptions' in tool_settings: - new_flags = '%s %s' % (tool_settings['AdditionalOptions'], flag) - else: - new_flags = flag - tool_settings['AdditionalOptions'] = new_flags - _msvs_validators[tool.msvs_name][msvs_name] = _boolean.ValidateMSVS - _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate - - -def _CustomGeneratePreprocessedFile(tool, msvs_name): - def _Translate(value, msbuild_settings): - tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool) - if value == '0': - tool_settings['PreprocessToFile'] = 'false' - tool_settings['PreprocessSuppressLineNumbers'] = 'false' - elif value == '1': # /P - tool_settings['PreprocessToFile'] = 'true' - tool_settings['PreprocessSuppressLineNumbers'] = 'false' - elif value == '2': # /EP /P - tool_settings['PreprocessToFile'] = 'true' - tool_settings['PreprocessSuppressLineNumbers'] = 'true' - else: - raise ValueError('value must be one of [0, 1, 2]; got %s' % value) - # Create a bogus validator that looks for '0', '1', or '2' - msvs_validator = _Enumeration(['a', 'b', 'c']).ValidateMSVS - _msvs_validators[tool.msvs_name][msvs_name] = msvs_validator - msbuild_validator = _boolean.ValidateMSBuild - msbuild_tool_validators = _msbuild_validators[tool.msbuild_name] - msbuild_tool_validators['PreprocessToFile'] = msbuild_validator - msbuild_tool_validators['PreprocessSuppressLineNumbers'] = msbuild_validator - _msvs_to_msbuild_converters[tool.msvs_name][msvs_name] = _Translate - - -fix_vc_macro_slashes_regex_list = ('IntDir', 'OutDir') -fix_vc_macro_slashes_regex = re.compile( - r'(\$\((?:%s)\))(?:[\\/]+)' % "|".join(fix_vc_macro_slashes_regex_list) -) - -def FixVCMacroSlashes(s): - """Replace macros which have excessive following slashes. - - These macros are known to have a built-in trailing slash. Furthermore, many - scripts hiccup on processing paths with extra slashes in the middle. - - This list is probably not exhaustive. Add as needed. - """ - if '$' in s: - s = fix_vc_macro_slashes_regex.sub(r'\1', s) - return s - - -def ConvertVCMacrosToMSBuild(s): - """Convert the the MSVS macros found in the string to the MSBuild equivalent. - - This list is probably not exhaustive. Add as needed. - """ - if '$' in s: - replace_map = { - '$(ConfigurationName)': '$(Configuration)', - '$(InputDir)': '%(RootDir)%(Directory)', - '$(InputExt)': '%(Extension)', - '$(InputFileName)': '%(Filename)%(Extension)', - '$(InputName)': '%(Filename)', - '$(InputPath)': '%(FullPath)', - '$(ParentName)': '$(ProjectFileName)', - '$(PlatformName)': '$(Platform)', - '$(SafeInputName)': '%(Filename)', - } - for old, new in replace_map.iteritems(): - s = s.replace(old, new) - s = FixVCMacroSlashes(s) - return s - - -def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr): - """Converts MSVS settings (VS2008 and earlier) to MSBuild settings (VS2010+). - - Args: - msvs_settings: A dictionary. The key is the tool name. The values are - themselves dictionaries of settings and their values. - stderr: The stream receiving the error messages. - - Returns: - A dictionary of MSBuild settings. The key is either the MSBuild tool name - or the empty string (for the global settings). The values are themselves - dictionaries of settings and their values. - """ - msbuild_settings = {} - for msvs_tool_name, msvs_tool_settings in msvs_settings.iteritems(): - if msvs_tool_name in _msvs_to_msbuild_converters: - msvs_tool = _msvs_to_msbuild_converters[msvs_tool_name] - for msvs_setting, msvs_value in msvs_tool_settings.iteritems(): - if msvs_setting in msvs_tool: - # Invoke the translation function. - try: - msvs_tool[msvs_setting](msvs_value, msbuild_settings) - except ValueError, e: - print >> stderr, ('Warning: while converting %s/%s to MSBuild, ' - '%s' % (msvs_tool_name, msvs_setting, e)) - else: - # We don't know this setting. Give a warning. - print >> stderr, ('Warning: unrecognized setting %s/%s ' - 'while converting to MSBuild.' % - (msvs_tool_name, msvs_setting)) - else: - print >> stderr, ('Warning: unrecognized tool %s while converting to ' - 'MSBuild.' % msvs_tool_name) - return msbuild_settings - - -def ValidateMSVSSettings(settings, stderr=sys.stderr): - """Validates that the names of the settings are valid for MSVS. - - Args: - settings: A dictionary. The key is the tool name. The values are - themselves dictionaries of settings and their values. - stderr: The stream receiving the error messages. - """ - _ValidateSettings(_msvs_validators, settings, stderr) - - -def ValidateMSBuildSettings(settings, stderr=sys.stderr): - """Validates that the names of the settings are valid for MSBuild. - - Args: - settings: A dictionary. The key is the tool name. The values are - themselves dictionaries of settings and their values. - stderr: The stream receiving the error messages. - """ - _ValidateSettings(_msbuild_validators, settings, stderr) - - -def _ValidateSettings(validators, settings, stderr): - """Validates that the settings are valid for MSBuild or MSVS. - - We currently only validate the names of the settings, not their values. - - Args: - validators: A dictionary of tools and their validators. - settings: A dictionary. The key is the tool name. The values are - themselves dictionaries of settings and their values. - stderr: The stream receiving the error messages. - """ - for tool_name in settings: - if tool_name in validators: - tool_validators = validators[tool_name] - for setting, value in settings[tool_name].iteritems(): - if setting in tool_validators: - try: - tool_validators[setting](value) - except ValueError, e: - print >> stderr, ('Warning: for %s/%s, %s' % - (tool_name, setting, e)) - else: - print >> stderr, ('Warning: unrecognized setting %s/%s' % - (tool_name, setting)) - else: - print >> stderr, ('Warning: unrecognized tool %s' % tool_name) - - -# MSVS and MBuild names of the tools. -_compile = _Tool('VCCLCompilerTool', 'ClCompile') -_link = _Tool('VCLinkerTool', 'Link') -_midl = _Tool('VCMIDLTool', 'Midl') -_rc = _Tool('VCResourceCompilerTool', 'ResourceCompile') -_lib = _Tool('VCLibrarianTool', 'Lib') -_manifest = _Tool('VCManifestTool', 'Manifest') - - -_AddTool(_compile) -_AddTool(_link) -_AddTool(_midl) -_AddTool(_rc) -_AddTool(_lib) -_AddTool(_manifest) -# Add sections only found in the MSBuild settings. -_msbuild_validators[''] = {} -_msbuild_validators['ProjectReference'] = {} -_msbuild_validators['ManifestResourceCompile'] = {} - -# Descriptions of the compiler options, i.e. VCCLCompilerTool in MSVS and -# ClCompile in MSBuild. -# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\cl.xml" for -# the schema of the MSBuild ClCompile settings. - -# Options that have the same name in MSVS and MSBuild -_Same(_compile, 'AdditionalIncludeDirectories', _folder_list) # /I -_Same(_compile, 'AdditionalOptions', _string_list) -_Same(_compile, 'AdditionalUsingDirectories', _folder_list) # /AI -_Same(_compile, 'AssemblerListingLocation', _file_name) # /Fa -_Same(_compile, 'BrowseInformationFile', _file_name) -_Same(_compile, 'BufferSecurityCheck', _boolean) # /GS -_Same(_compile, 'DisableLanguageExtensions', _boolean) # /Za -_Same(_compile, 'DisableSpecificWarnings', _string_list) # /wd -_Same(_compile, 'EnableFiberSafeOptimizations', _boolean) # /GT -_Same(_compile, 'EnablePREfast', _boolean) # /analyze Visible='false' -_Same(_compile, 'ExpandAttributedSource', _boolean) # /Fx -_Same(_compile, 'FloatingPointExceptions', _boolean) # /fp:except -_Same(_compile, 'ForceConformanceInForLoopScope', _boolean) # /Zc:forScope -_Same(_compile, 'ForcedIncludeFiles', _file_list) # /FI -_Same(_compile, 'ForcedUsingFiles', _file_list) # /FU -_Same(_compile, 'GenerateXMLDocumentationFiles', _boolean) # /doc -_Same(_compile, 'IgnoreStandardIncludePath', _boolean) # /X -_Same(_compile, 'MinimalRebuild', _boolean) # /Gm -_Same(_compile, 'OmitDefaultLibName', _boolean) # /Zl -_Same(_compile, 'OmitFramePointers', _boolean) # /Oy -_Same(_compile, 'PreprocessorDefinitions', _string_list) # /D -_Same(_compile, 'ProgramDataBaseFileName', _file_name) # /Fd -_Same(_compile, 'RuntimeTypeInfo', _boolean) # /GR -_Same(_compile, 'ShowIncludes', _boolean) # /showIncludes -_Same(_compile, 'SmallerTypeCheck', _boolean) # /RTCc -_Same(_compile, 'StringPooling', _boolean) # /GF -_Same(_compile, 'SuppressStartupBanner', _boolean) # /nologo -_Same(_compile, 'TreatWChar_tAsBuiltInType', _boolean) # /Zc:wchar_t -_Same(_compile, 'UndefineAllPreprocessorDefinitions', _boolean) # /u -_Same(_compile, 'UndefinePreprocessorDefinitions', _string_list) # /U -_Same(_compile, 'UseFullPaths', _boolean) # /FC -_Same(_compile, 'WholeProgramOptimization', _boolean) # /GL -_Same(_compile, 'XMLDocumentationFileName', _file_name) - -_Same(_compile, 'AssemblerOutput', - _Enumeration(['NoListing', - 'AssemblyCode', # /FA - 'All', # /FAcs - 'AssemblyAndMachineCode', # /FAc - 'AssemblyAndSourceCode'])) # /FAs -_Same(_compile, 'BasicRuntimeChecks', - _Enumeration(['Default', - 'StackFrameRuntimeCheck', # /RTCs - 'UninitializedLocalUsageCheck', # /RTCu - 'EnableFastChecks'])) # /RTC1 -_Same(_compile, 'BrowseInformation', - _Enumeration(['false', - 'true', # /FR - 'true'])) # /Fr -_Same(_compile, 'CallingConvention', - _Enumeration(['Cdecl', # /Gd - 'FastCall', # /Gr - 'StdCall'])) # /Gz -_Same(_compile, 'CompileAs', - _Enumeration(['Default', - 'CompileAsC', # /TC - 'CompileAsCpp'])) # /TP -_Same(_compile, 'DebugInformationFormat', - _Enumeration(['', # Disabled - 'OldStyle', # /Z7 - None, - 'ProgramDatabase', # /Zi - 'EditAndContinue'])) # /ZI -_Same(_compile, 'EnableEnhancedInstructionSet', - _Enumeration(['NotSet', - 'StreamingSIMDExtensions', # /arch:SSE - 'StreamingSIMDExtensions2'])) # /arch:SSE2 -_Same(_compile, 'ErrorReporting', - _Enumeration(['None', # /errorReport:none - 'Prompt', # /errorReport:prompt - 'Queue'], # /errorReport:queue - new=['Send'])) # /errorReport:send" -_Same(_compile, 'ExceptionHandling', - _Enumeration(['false', - 'Sync', # /EHsc - 'Async'], # /EHa - new=['SyncCThrow'])) # /EHs -_Same(_compile, 'FavorSizeOrSpeed', - _Enumeration(['Neither', - 'Speed', # /Ot - 'Size'])) # /Os -_Same(_compile, 'FloatingPointModel', - _Enumeration(['Precise', # /fp:precise - 'Strict', # /fp:strict - 'Fast'])) # /fp:fast -_Same(_compile, 'InlineFunctionExpansion', - _Enumeration(['Default', - 'OnlyExplicitInline', # /Ob1 - 'AnySuitable'], # /Ob2 - new=['Disabled'])) # /Ob0 -_Same(_compile, 'Optimization', - _Enumeration(['Disabled', # /Od - 'MinSpace', # /O1 - 'MaxSpeed', # /O2 - 'Full'])) # /Ox -_Same(_compile, 'RuntimeLibrary', - _Enumeration(['MultiThreaded', # /MT - 'MultiThreadedDebug', # /MTd - 'MultiThreadedDLL', # /MD - 'MultiThreadedDebugDLL'])) # /MDd -_Same(_compile, 'StructMemberAlignment', - _Enumeration(['Default', - '1Byte', # /Zp1 - '2Bytes', # /Zp2 - '4Bytes', # /Zp4 - '8Bytes', # /Zp8 - '16Bytes'])) # /Zp16 -_Same(_compile, 'WarningLevel', - _Enumeration(['TurnOffAllWarnings', # /W0 - 'Level1', # /W1 - 'Level2', # /W2 - 'Level3', # /W3 - 'Level4'], # /W4 - new=['EnableAllWarnings'])) # /Wall - -# Options found in MSVS that have been renamed in MSBuild. -_Renamed(_compile, 'EnableFunctionLevelLinking', 'FunctionLevelLinking', - _boolean) # /Gy -_Renamed(_compile, 'EnableIntrinsicFunctions', 'IntrinsicFunctions', - _boolean) # /Oi -_Renamed(_compile, 'KeepComments', 'PreprocessKeepComments', _boolean) # /C -_Renamed(_compile, 'ObjectFile', 'ObjectFileName', _file_name) # /Fo -_Renamed(_compile, 'OpenMP', 'OpenMPSupport', _boolean) # /openmp -_Renamed(_compile, 'PrecompiledHeaderThrough', 'PrecompiledHeaderFile', - _file_name) # Used with /Yc and /Yu -_Renamed(_compile, 'PrecompiledHeaderFile', 'PrecompiledHeaderOutputFile', - _file_name) # /Fp -_Renamed(_compile, 'UsePrecompiledHeader', 'PrecompiledHeader', - _Enumeration(['NotUsing', # VS recognized '' for this value too. - 'Create', # /Yc - 'Use'])) # /Yu -_Renamed(_compile, 'WarnAsError', 'TreatWarningAsError', _boolean) # /WX - -_ConvertedToAdditionalOption(_compile, 'DefaultCharIsUnsigned', '/J') - -# MSVS options not found in MSBuild. -_MSVSOnly(_compile, 'Detect64BitPortabilityProblems', _boolean) -_MSVSOnly(_compile, 'UseUnicodeResponseFiles', _boolean) - -# MSBuild options not found in MSVS. -_MSBuildOnly(_compile, 'BuildingInIDE', _boolean) -_MSBuildOnly(_compile, 'CompileAsManaged', - _Enumeration([], new=['false', - 'true', # /clr - 'Pure', # /clr:pure - 'Safe', # /clr:safe - 'OldSyntax'])) # /clr:oldSyntax -_MSBuildOnly(_compile, 'CreateHotpatchableImage', _boolean) # /hotpatch -_MSBuildOnly(_compile, 'MultiProcessorCompilation', _boolean) # /MP -_MSBuildOnly(_compile, 'PreprocessOutputPath', _string) # /Fi -_MSBuildOnly(_compile, 'ProcessorNumber', _integer) # the number of processors -_MSBuildOnly(_compile, 'TrackerLogDirectory', _folder_name) -_MSBuildOnly(_compile, 'TreatSpecificWarningsAsErrors', _string_list) # /we -_MSBuildOnly(_compile, 'UseUnicodeForAssemblerListing', _boolean) # /FAu - -# Defines a setting that needs very customized processing -_CustomGeneratePreprocessedFile(_compile, 'GeneratePreprocessedFile') - - -# Directives for converting MSVS VCLinkerTool to MSBuild Link. -# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\link.xml" for -# the schema of the MSBuild Link settings. - -# Options that have the same name in MSVS and MSBuild -_Same(_link, 'AdditionalDependencies', _file_list) -_Same(_link, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH -# /MANIFESTDEPENDENCY: -_Same(_link, 'AdditionalManifestDependencies', _file_list) -_Same(_link, 'AdditionalOptions', _string_list) -_Same(_link, 'AddModuleNamesToAssembly', _file_list) # /ASSEMBLYMODULE -_Same(_link, 'AllowIsolation', _boolean) # /ALLOWISOLATION -_Same(_link, 'AssemblyLinkResource', _file_list) # /ASSEMBLYLINKRESOURCE -_Same(_link, 'BaseAddress', _string) # /BASE -_Same(_link, 'CLRUnmanagedCodeCheck', _boolean) # /CLRUNMANAGEDCODECHECK -_Same(_link, 'DelayLoadDLLs', _file_list) # /DELAYLOAD -_Same(_link, 'DelaySign', _boolean) # /DELAYSIGN -_Same(_link, 'EmbedManagedResourceFile', _file_list) # /ASSEMBLYRESOURCE -_Same(_link, 'EnableUAC', _boolean) # /MANIFESTUAC -_Same(_link, 'EntryPointSymbol', _string) # /ENTRY -_Same(_link, 'ForceSymbolReferences', _file_list) # /INCLUDE -_Same(_link, 'FunctionOrder', _file_name) # /ORDER -_Same(_link, 'GenerateDebugInformation', _boolean) # /DEBUG -_Same(_link, 'GenerateMapFile', _boolean) # /MAP -_Same(_link, 'HeapCommitSize', _string) -_Same(_link, 'HeapReserveSize', _string) # /HEAP -_Same(_link, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB -_Same(_link, 'IgnoreEmbeddedIDL', _boolean) # /IGNOREIDL -_Same(_link, 'ImportLibrary', _file_name) # /IMPLIB -_Same(_link, 'KeyContainer', _file_name) # /KEYCONTAINER -_Same(_link, 'KeyFile', _file_name) # /KEYFILE -_Same(_link, 'ManifestFile', _file_name) # /ManifestFile -_Same(_link, 'MapExports', _boolean) # /MAPINFO:EXPORTS -_Same(_link, 'MapFileName', _file_name) -_Same(_link, 'MergedIDLBaseFileName', _file_name) # /IDLOUT -_Same(_link, 'MergeSections', _string) # /MERGE -_Same(_link, 'MidlCommandFile', _file_name) # /MIDL -_Same(_link, 'ModuleDefinitionFile', _file_name) # /DEF -_Same(_link, 'OutputFile', _file_name) # /OUT -_Same(_link, 'PerUserRedirection', _boolean) -_Same(_link, 'Profile', _boolean) # /PROFILE -_Same(_link, 'ProfileGuidedDatabase', _file_name) # /PGD -_Same(_link, 'ProgramDatabaseFile', _file_name) # /PDB -_Same(_link, 'RegisterOutput', _boolean) -_Same(_link, 'SetChecksum', _boolean) # /RELEASE -_Same(_link, 'StackCommitSize', _string) -_Same(_link, 'StackReserveSize', _string) # /STACK -_Same(_link, 'StripPrivateSymbols', _file_name) # /PDBSTRIPPED -_Same(_link, 'SupportUnloadOfDelayLoadedDLL', _boolean) # /DELAY:UNLOAD -_Same(_link, 'SuppressStartupBanner', _boolean) # /NOLOGO -_Same(_link, 'SwapRunFromCD', _boolean) # /SWAPRUN:CD -_Same(_link, 'TurnOffAssemblyGeneration', _boolean) # /NOASSEMBLY -_Same(_link, 'TypeLibraryFile', _file_name) # /TLBOUT -_Same(_link, 'TypeLibraryResourceID', _integer) # /TLBID -_Same(_link, 'UACUIAccess', _boolean) # /uiAccess='true' -_Same(_link, 'Version', _string) # /VERSION - -_Same(_link, 'EnableCOMDATFolding', _newly_boolean) # /OPT:ICF -_Same(_link, 'FixedBaseAddress', _newly_boolean) # /FIXED -_Same(_link, 'LargeAddressAware', _newly_boolean) # /LARGEADDRESSAWARE -_Same(_link, 'OptimizeReferences', _newly_boolean) # /OPT:REF -_Same(_link, 'RandomizedBaseAddress', _newly_boolean) # /DYNAMICBASE -_Same(_link, 'TerminalServerAware', _newly_boolean) # /TSAWARE - -_subsystem_enumeration = _Enumeration( - ['NotSet', - 'Console', # /SUBSYSTEM:CONSOLE - 'Windows', # /SUBSYSTEM:WINDOWS - 'Native', # /SUBSYSTEM:NATIVE - 'EFI Application', # /SUBSYSTEM:EFI_APPLICATION - 'EFI Boot Service Driver', # /SUBSYSTEM:EFI_BOOT_SERVICE_DRIVER - 'EFI ROM', # /SUBSYSTEM:EFI_ROM - 'EFI Runtime', # /SUBSYSTEM:EFI_RUNTIME_DRIVER - 'WindowsCE'], # /SUBSYSTEM:WINDOWSCE - new=['POSIX']) # /SUBSYSTEM:POSIX - -_target_machine_enumeration = _Enumeration( - ['NotSet', - 'MachineX86', # /MACHINE:X86 - None, - 'MachineARM', # /MACHINE:ARM - 'MachineEBC', # /MACHINE:EBC - 'MachineIA64', # /MACHINE:IA64 - None, - 'MachineMIPS', # /MACHINE:MIPS - 'MachineMIPS16', # /MACHINE:MIPS16 - 'MachineMIPSFPU', # /MACHINE:MIPSFPU - 'MachineMIPSFPU16', # /MACHINE:MIPSFPU16 - None, - None, - None, - 'MachineSH4', # /MACHINE:SH4 - None, - 'MachineTHUMB', # /MACHINE:THUMB - 'MachineX64']) # /MACHINE:X64 - -_Same(_link, 'AssemblyDebug', - _Enumeration(['', - 'true', # /ASSEMBLYDEBUG - 'false'])) # /ASSEMBLYDEBUG:DISABLE -_Same(_link, 'CLRImageType', - _Enumeration(['Default', - 'ForceIJWImage', # /CLRIMAGETYPE:IJW - 'ForcePureILImage', # /Switch="CLRIMAGETYPE:PURE - 'ForceSafeILImage'])) # /Switch="CLRIMAGETYPE:SAFE -_Same(_link, 'CLRThreadAttribute', - _Enumeration(['DefaultThreadingAttribute', # /CLRTHREADATTRIBUTE:NONE - 'MTAThreadingAttribute', # /CLRTHREADATTRIBUTE:MTA - 'STAThreadingAttribute'])) # /CLRTHREADATTRIBUTE:STA -_Same(_link, 'DataExecutionPrevention', - _Enumeration(['', - 'false', # /NXCOMPAT:NO - 'true'])) # /NXCOMPAT -_Same(_link, 'Driver', - _Enumeration(['NotSet', - 'Driver', # /Driver - 'UpOnly', # /DRIVER:UPONLY - 'WDM'])) # /DRIVER:WDM -_Same(_link, 'LinkTimeCodeGeneration', - _Enumeration(['Default', - 'UseLinkTimeCodeGeneration', # /LTCG - 'PGInstrument', # /LTCG:PGInstrument - 'PGOptimization', # /LTCG:PGOptimize - 'PGUpdate'])) # /LTCG:PGUpdate -_Same(_link, 'ShowProgress', - _Enumeration(['NotSet', - 'LinkVerbose', # /VERBOSE - 'LinkVerboseLib'], # /VERBOSE:Lib - new=['LinkVerboseICF', # /VERBOSE:ICF - 'LinkVerboseREF', # /VERBOSE:REF - 'LinkVerboseSAFESEH', # /VERBOSE:SAFESEH - 'LinkVerboseCLR'])) # /VERBOSE:CLR -_Same(_link, 'SubSystem', _subsystem_enumeration) -_Same(_link, 'TargetMachine', _target_machine_enumeration) -_Same(_link, 'UACExecutionLevel', - _Enumeration(['AsInvoker', # /level='asInvoker' - 'HighestAvailable', # /level='highestAvailable' - 'RequireAdministrator'])) # /level='requireAdministrator' - - -# Options found in MSVS that have been renamed in MSBuild. -_Renamed(_link, 'ErrorReporting', 'LinkErrorReporting', - _Enumeration(['NoErrorReport', # /ERRORREPORT:NONE - 'PromptImmediately', # /ERRORREPORT:PROMPT - 'QueueForNextLogin'], # /ERRORREPORT:QUEUE - new=['SendErrorReport'])) # /ERRORREPORT:SEND -_Renamed(_link, 'IgnoreDefaultLibraryNames', 'IgnoreSpecificDefaultLibraries', - _file_list) # /NODEFAULTLIB -_Renamed(_link, 'ResourceOnlyDLL', 'NoEntryPoint', _boolean) # /NOENTRY -_Renamed(_link, 'SwapRunFromNet', 'SwapRunFromNET', _boolean) # /SWAPRUN:NET - -_Moved(_link, 'GenerateManifest', '', _boolean) -_Moved(_link, 'IgnoreImportLibrary', '', _boolean) -_Moved(_link, 'LinkIncremental', '', _newly_boolean) -_Moved(_link, 'LinkLibraryDependencies', 'ProjectReference', _boolean) -_Moved(_link, 'UseLibraryDependencyInputs', 'ProjectReference', _boolean) - -# MSVS options not found in MSBuild. -_MSVSOnly(_link, 'OptimizeForWindows98', _newly_boolean) -_MSVSOnly(_link, 'UseUnicodeResponseFiles', _boolean) -# TODO(jeanluc) I don't think these are genuine settings but byproducts of Gyp. -_MSVSOnly(_link, 'AdditionalLibraryDirectories_excluded', _folder_list) - -# MSBuild options not found in MSVS. -_MSBuildOnly(_link, 'BuildingInIDE', _boolean) -_MSBuildOnly(_link, 'ImageHasSafeExceptionHandlers', _boolean) # /SAFESEH -_MSBuildOnly(_link, 'LinkDLL', _boolean) # /DLL Visible='false' -_MSBuildOnly(_link, 'LinkStatus', _boolean) # /LTCG:STATUS -_MSBuildOnly(_link, 'PreventDllBinding', _boolean) # /ALLOWBIND -_MSBuildOnly(_link, 'SupportNobindOfDelayLoadedDLL', _boolean) # /DELAY:NOBIND -_MSBuildOnly(_link, 'TrackerLogDirectory', _folder_name) -_MSBuildOnly(_link, 'TreatLinkerWarningAsErrors', _boolean) # /WX -_MSBuildOnly(_link, 'MinimumRequiredVersion', _string) -_MSBuildOnly(_link, 'MSDOSStubFileName', _file_name) # /STUB Visible='false' -_MSBuildOnly(_link, 'SectionAlignment', _integer) # /ALIGN -_MSBuildOnly(_link, 'SpecifySectionAttributes', _string) # /SECTION -_MSBuildOnly(_link, 'ForceFileOutput', - _Enumeration([], new=['Enabled', # /FORCE - # /FORCE:MULTIPLE - 'MultiplyDefinedSymbolOnly', - 'UndefinedSymbolOnly'])) # /FORCE:UNRESOLVED -_MSBuildOnly(_link, 'CreateHotPatchableImage', - _Enumeration([], new=['Enabled', # /FUNCTIONPADMIN - 'X86Image', # /FUNCTIONPADMIN:5 - 'X64Image', # /FUNCTIONPADMIN:6 - 'ItaniumImage'])) # /FUNCTIONPADMIN:16 -_MSBuildOnly(_link, 'CLRSupportLastError', - _Enumeration([], new=['Enabled', # /CLRSupportLastError - 'Disabled', # /CLRSupportLastError:NO - # /CLRSupportLastError:SYSTEMDLL - 'SystemDlls'])) - - -# Directives for converting VCResourceCompilerTool to ResourceCompile. -# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\rc.xml" for -# the schema of the MSBuild ResourceCompile settings. - -_Same(_rc, 'AdditionalOptions', _string_list) -_Same(_rc, 'AdditionalIncludeDirectories', _folder_list) # /I -_Same(_rc, 'Culture', _Integer(msbuild_base=16)) -_Same(_rc, 'IgnoreStandardIncludePath', _boolean) # /X -_Same(_rc, 'PreprocessorDefinitions', _string_list) # /D -_Same(_rc, 'ResourceOutputFileName', _string) # /fo -_Same(_rc, 'ShowProgress', _boolean) # /v -# There is no UI in VisualStudio 2008 to set the following properties. -# However they are found in CL and other tools. Include them here for -# completeness, as they are very likely to have the same usage pattern. -_Same(_rc, 'SuppressStartupBanner', _boolean) # /nologo -_Same(_rc, 'UndefinePreprocessorDefinitions', _string_list) # /u - -# MSBuild options not found in MSVS. -_MSBuildOnly(_rc, 'NullTerminateStrings', _boolean) # /n -_MSBuildOnly(_rc, 'TrackerLogDirectory', _folder_name) - - -# Directives for converting VCMIDLTool to Midl. -# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\midl.xml" for -# the schema of the MSBuild Midl settings. - -_Same(_midl, 'AdditionalIncludeDirectories', _folder_list) # /I -_Same(_midl, 'AdditionalOptions', _string_list) -_Same(_midl, 'CPreprocessOptions', _string) # /cpp_opt -_Same(_midl, 'ErrorCheckAllocations', _boolean) # /error allocation -_Same(_midl, 'ErrorCheckBounds', _boolean) # /error bounds_check -_Same(_midl, 'ErrorCheckEnumRange', _boolean) # /error enum -_Same(_midl, 'ErrorCheckRefPointers', _boolean) # /error ref -_Same(_midl, 'ErrorCheckStubData', _boolean) # /error stub_data -_Same(_midl, 'GenerateStublessProxies', _boolean) # /Oicf -_Same(_midl, 'GenerateTypeLibrary', _boolean) -_Same(_midl, 'HeaderFileName', _file_name) # /h -_Same(_midl, 'IgnoreStandardIncludePath', _boolean) # /no_def_idir -_Same(_midl, 'InterfaceIdentifierFileName', _file_name) # /iid -_Same(_midl, 'MkTypLibCompatible', _boolean) # /mktyplib203 -_Same(_midl, 'OutputDirectory', _string) # /out -_Same(_midl, 'PreprocessorDefinitions', _string_list) # /D -_Same(_midl, 'ProxyFileName', _file_name) # /proxy -_Same(_midl, 'RedirectOutputAndErrors', _file_name) # /o -_Same(_midl, 'SuppressStartupBanner', _boolean) # /nologo -_Same(_midl, 'TypeLibraryName', _file_name) # /tlb -_Same(_midl, 'UndefinePreprocessorDefinitions', _string_list) # /U -_Same(_midl, 'WarnAsError', _boolean) # /WX - -_Same(_midl, 'DefaultCharType', - _Enumeration(['Unsigned', # /char unsigned - 'Signed', # /char signed - 'Ascii'])) # /char ascii7 -_Same(_midl, 'TargetEnvironment', - _Enumeration(['NotSet', - 'Win32', # /env win32 - 'Itanium', # /env ia64 - 'X64'])) # /env x64 -_Same(_midl, 'EnableErrorChecks', - _Enumeration(['EnableCustom', - 'None', # /error none - 'All'])) # /error all -_Same(_midl, 'StructMemberAlignment', - _Enumeration(['NotSet', - '1', # Zp1 - '2', # Zp2 - '4', # Zp4 - '8'])) # Zp8 -_Same(_midl, 'WarningLevel', - _Enumeration(['0', # /W0 - '1', # /W1 - '2', # /W2 - '3', # /W3 - '4'])) # /W4 - -_Renamed(_midl, 'DLLDataFileName', 'DllDataFileName', _file_name) # /dlldata -_Renamed(_midl, 'ValidateParameters', 'ValidateAllParameters', - _boolean) # /robust - -# MSBuild options not found in MSVS. -_MSBuildOnly(_midl, 'ApplicationConfigurationMode', _boolean) # /app_config -_MSBuildOnly(_midl, 'ClientStubFile', _file_name) # /cstub -_MSBuildOnly(_midl, 'GenerateClientFiles', - _Enumeration([], new=['Stub', # /client stub - 'None'])) # /client none -_MSBuildOnly(_midl, 'GenerateServerFiles', - _Enumeration([], new=['Stub', # /client stub - 'None'])) # /client none -_MSBuildOnly(_midl, 'LocaleID', _integer) # /lcid DECIMAL -_MSBuildOnly(_midl, 'ServerStubFile', _file_name) # /sstub -_MSBuildOnly(_midl, 'SuppressCompilerWarnings', _boolean) # /no_warn -_MSBuildOnly(_midl, 'TrackerLogDirectory', _folder_name) -_MSBuildOnly(_midl, 'TypeLibFormat', - _Enumeration([], new=['NewFormat', # /newtlb - 'OldFormat'])) # /oldtlb - - -# Directives for converting VCLibrarianTool to Lib. -# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\lib.xml" for -# the schema of the MSBuild Lib settings. - -_Same(_lib, 'AdditionalDependencies', _file_list) -_Same(_lib, 'AdditionalLibraryDirectories', _folder_list) # /LIBPATH -_Same(_lib, 'AdditionalOptions', _string_list) -_Same(_lib, 'ExportNamedFunctions', _string_list) # /EXPORT -_Same(_lib, 'ForceSymbolReferences', _string) # /INCLUDE -_Same(_lib, 'IgnoreAllDefaultLibraries', _boolean) # /NODEFAULTLIB -_Same(_lib, 'IgnoreSpecificDefaultLibraries', _file_list) # /NODEFAULTLIB -_Same(_lib, 'ModuleDefinitionFile', _file_name) # /DEF -_Same(_lib, 'OutputFile', _file_name) # /OUT -_Same(_lib, 'SuppressStartupBanner', _boolean) # /NOLOGO -_Same(_lib, 'UseUnicodeResponseFiles', _boolean) -_Same(_lib, 'LinkTimeCodeGeneration', _boolean) # /LTCG - -# TODO(jeanluc) _link defines the same value that gets moved to -# ProjectReference. We may want to validate that they are consistent. -_Moved(_lib, 'LinkLibraryDependencies', 'ProjectReference', _boolean) - -# TODO(jeanluc) I don't think these are genuine settings but byproducts of Gyp. -_MSVSOnly(_lib, 'AdditionalLibraryDirectories_excluded', _folder_list) - -_MSBuildOnly(_lib, 'DisplayLibrary', _string) # /LIST Visible='false' -_MSBuildOnly(_lib, 'ErrorReporting', - _Enumeration([], new=['PromptImmediately', # /ERRORREPORT:PROMPT - 'QueueForNextLogin', # /ERRORREPORT:QUEUE - 'SendErrorReport', # /ERRORREPORT:SEND - 'NoErrorReport'])) # /ERRORREPORT:NONE -_MSBuildOnly(_lib, 'MinimumRequiredVersion', _string) -_MSBuildOnly(_lib, 'Name', _file_name) # /NAME -_MSBuildOnly(_lib, 'RemoveObjects', _file_list) # /REMOVE -_MSBuildOnly(_lib, 'SubSystem', _subsystem_enumeration) -_MSBuildOnly(_lib, 'TargetMachine', _target_machine_enumeration) -_MSBuildOnly(_lib, 'TrackerLogDirectory', _folder_name) -_MSBuildOnly(_lib, 'TreatLibWarningAsErrors', _boolean) # /WX -_MSBuildOnly(_lib, 'Verbose', _boolean) - - -# Directives for converting VCManifestTool to Mt. -# See "c:\Program Files (x86)\MSBuild\Microsoft.Cpp\v4.0\1033\mt.xml" for -# the schema of the MSBuild Lib settings. - -# Options that have the same name in MSVS and MSBuild -_Same(_manifest, 'AdditionalManifestFiles', _file_list) # /manifest -_Same(_manifest, 'AdditionalOptions', _string_list) -_Same(_manifest, 'AssemblyIdentity', _string) # /identity: -_Same(_manifest, 'ComponentFileName', _file_name) # /dll -_Same(_manifest, 'GenerateCatalogFiles', _boolean) # /makecdfs -_Same(_manifest, 'InputResourceManifests', _string) # /inputresource -_Same(_manifest, 'OutputManifestFile', _file_name) # /out -_Same(_manifest, 'RegistrarScriptFile', _file_name) # /rgs -_Same(_manifest, 'ReplacementsFile', _file_name) # /replacements -_Same(_manifest, 'SuppressStartupBanner', _boolean) # /nologo -_Same(_manifest, 'TypeLibraryFile', _file_name) # /tlb: -_Same(_manifest, 'UpdateFileHashes', _boolean) # /hashupdate -_Same(_manifest, 'UpdateFileHashesSearchPath', _file_name) -_Same(_manifest, 'VerboseOutput', _boolean) # /verbose - -# Options that have moved location. -_MovedAndRenamed(_manifest, 'ManifestResourceFile', - 'ManifestResourceCompile', - 'ResourceOutputFileName', - _file_name) -_Moved(_manifest, 'EmbedManifest', '', _boolean) - -# MSVS options not found in MSBuild. -_MSVSOnly(_manifest, 'DependencyInformationFile', _file_name) -_MSVSOnly(_manifest, 'UseFAT32Workaround', _boolean) -_MSVSOnly(_manifest, 'UseUnicodeResponseFiles', _boolean) - -# MSBuild options not found in MSVS. -_MSBuildOnly(_manifest, 'EnableDPIAwareness', _boolean) -_MSBuildOnly(_manifest, 'GenerateCategoryTags', _boolean) # /category -_MSBuildOnly(_manifest, 'ManifestFromManagedAssembly', - _file_name) # /managedassemblyname -_MSBuildOnly(_manifest, 'OutputResourceManifests', _string) # /outputresource -_MSBuildOnly(_manifest, 'SuppressDependencyElement', _boolean) # /nodependency -_MSBuildOnly(_manifest, 'TrackerLogDirectory', _folder_name) diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/MSVSSettings_test.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/MSVSSettings_test.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/MSVSSettings_test.py 2012-05-22 19:39:22.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/MSVSSettings_test.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,1482 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Unit tests for the MSVSSettings.py file.""" - -import StringIO -import unittest -import gyp.MSVSSettings as MSVSSettings - - -class TestSequenceFunctions(unittest.TestCase): - - def setUp(self): - self.stderr = StringIO.StringIO() - - def _ExpectedWarnings(self, expected): - """Compares recorded lines to expected warnings.""" - self.stderr.seek(0) - actual = self.stderr.read().split('\n') - actual = [line for line in actual if line] - self.assertEqual(sorted(expected), sorted(actual)) - - def testValidateMSVSSettings_tool_names(self): - """Tests that only MSVS tool names are allowed.""" - MSVSSettings.ValidateMSVSSettings( - {'VCCLCompilerTool': {}, - 'VCLinkerTool': {}, - 'VCMIDLTool': {}, - 'foo': {}, - 'VCResourceCompilerTool': {}, - 'VCLibrarianTool': {}, - 'VCManifestTool': {}, - 'ClCompile': {}}, - self.stderr) - self._ExpectedWarnings([ - 'Warning: unrecognized tool foo', - 'Warning: unrecognized tool ClCompile']) - - def testValidateMSVSSettings_settings(self): - """Tests that for invalid MSVS settings.""" - MSVSSettings.ValidateMSVSSettings( - {'VCCLCompilerTool': { - 'AdditionalIncludeDirectories': 'folder1;folder2', - 'AdditionalOptions': ['string1', 'string2'], - 'AdditionalUsingDirectories': 'folder1;folder2', - 'AssemblerListingLocation': 'a_file_name', - 'AssemblerOutput': '0', - 'BasicRuntimeChecks': '5', - 'BrowseInformation': 'fdkslj', - 'BrowseInformationFile': 'a_file_name', - 'BufferSecurityCheck': 'true', - 'CallingConvention': '-1', - 'CompileAs': '1', - 'DebugInformationFormat': '2', - 'DefaultCharIsUnsigned': 'true', - 'Detect64BitPortabilityProblems': 'true', - 'DisableLanguageExtensions': 'true', - 'DisableSpecificWarnings': 'string1;string2', - 'EnableEnhancedInstructionSet': '1', - 'EnableFiberSafeOptimizations': 'true', - 'EnableFunctionLevelLinking': 'true', - 'EnableIntrinsicFunctions': 'true', - 'EnablePREfast': 'true', - 'Enableprefast': 'bogus', - 'ErrorReporting': '1', - 'ExceptionHandling': '1', - 'ExpandAttributedSource': 'true', - 'FavorSizeOrSpeed': '1', - 'FloatingPointExceptions': 'true', - 'FloatingPointModel': '1', - 'ForceConformanceInForLoopScope': 'true', - 'ForcedIncludeFiles': 'file1;file2', - 'ForcedUsingFiles': 'file1;file2', - 'GeneratePreprocessedFile': '1', - 'GenerateXMLDocumentationFiles': 'true', - 'IgnoreStandardIncludePath': 'true', - 'InlineFunctionExpansion': '1', - 'KeepComments': 'true', - 'MinimalRebuild': 'true', - 'ObjectFile': 'a_file_name', - 'OmitDefaultLibName': 'true', - 'OmitFramePointers': 'true', - 'OpenMP': 'true', - 'Optimization': '1', - 'PrecompiledHeaderFile': 'a_file_name', - 'PrecompiledHeaderThrough': 'a_file_name', - 'PreprocessorDefinitions': 'string1;string2', - 'ProgramDataBaseFileName': 'a_file_name', - 'RuntimeLibrary': '1', - 'RuntimeTypeInfo': 'true', - 'ShowIncludes': 'true', - 'SmallerTypeCheck': 'true', - 'StringPooling': 'true', - 'StructMemberAlignment': '1', - 'SuppressStartupBanner': 'true', - 'TreatWChar_tAsBuiltInType': 'true', - 'UndefineAllPreprocessorDefinitions': 'true', - 'UndefinePreprocessorDefinitions': 'string1;string2', - 'UseFullPaths': 'true', - 'UsePrecompiledHeader': '1', - 'UseUnicodeResponseFiles': 'true', - 'WarnAsError': 'true', - 'WarningLevel': '1', - 'WholeProgramOptimization': 'true', - 'XMLDocumentationFileName': 'a_file_name', - 'ZZXYZ': 'bogus'}, - 'VCLinkerTool': { - 'AdditionalDependencies': 'file1;file2', - 'AdditionalLibraryDirectories': 'folder1;folder2', - 'AdditionalManifestDependencies': 'file1;file2', - 'AdditionalOptions': 'a string1', - 'AddModuleNamesToAssembly': 'file1;file2', - 'AllowIsolation': 'true', - 'AssemblyDebug': '2', - 'AssemblyLinkResource': 'file1;file2', - 'BaseAddress': 'a string1', - 'CLRImageType': '2', - 'CLRThreadAttribute': '2', - 'CLRUnmanagedCodeCheck': 'true', - 'DataExecutionPrevention': '2', - 'DelayLoadDLLs': 'file1;file2', - 'DelaySign': 'true', - 'Driver': '2', - 'EmbedManagedResourceFile': 'file1;file2', - 'EnableCOMDATFolding': '2', - 'EnableUAC': 'true', - 'EntryPointSymbol': 'a string1', - 'ErrorReporting': '2', - 'FixedBaseAddress': '2', - 'ForceSymbolReferences': 'file1;file2', - 'FunctionOrder': 'a_file_name', - 'GenerateDebugInformation': 'true', - 'GenerateManifest': 'true', - 'GenerateMapFile': 'true', - 'HeapCommitSize': 'a string1', - 'HeapReserveSize': 'a string1', - 'IgnoreAllDefaultLibraries': 'true', - 'IgnoreDefaultLibraryNames': 'file1;file2', - 'IgnoreEmbeddedIDL': 'true', - 'IgnoreImportLibrary': 'true', - 'ImportLibrary': 'a_file_name', - 'KeyContainer': 'a_file_name', - 'KeyFile': 'a_file_name', - 'LargeAddressAware': '2', - 'LinkIncremental': '2', - 'LinkLibraryDependencies': 'true', - 'LinkTimeCodeGeneration': '2', - 'ManifestFile': 'a_file_name', - 'MapExports': 'true', - 'MapFileName': 'a_file_name', - 'MergedIDLBaseFileName': 'a_file_name', - 'MergeSections': 'a string1', - 'MidlCommandFile': 'a_file_name', - 'ModuleDefinitionFile': 'a_file_name', - 'OptimizeForWindows98': '1', - 'OptimizeReferences': '2', - 'OutputFile': 'a_file_name', - 'PerUserRedirection': 'true', - 'Profile': 'true', - 'ProfileGuidedDatabase': 'a_file_name', - 'ProgramDatabaseFile': 'a_file_name', - 'RandomizedBaseAddress': '2', - 'RegisterOutput': 'true', - 'ResourceOnlyDLL': 'true', - 'SetChecksum': 'true', - 'ShowProgress': '2', - 'StackCommitSize': 'a string1', - 'StackReserveSize': 'a string1', - 'StripPrivateSymbols': 'a_file_name', - 'SubSystem': '2', - 'SupportUnloadOfDelayLoadedDLL': 'true', - 'SuppressStartupBanner': 'true', - 'SwapRunFromCD': 'true', - 'SwapRunFromNet': 'true', - 'TargetMachine': '2', - 'TerminalServerAware': '2', - 'TurnOffAssemblyGeneration': 'true', - 'TypeLibraryFile': 'a_file_name', - 'TypeLibraryResourceID': '33', - 'UACExecutionLevel': '2', - 'UACUIAccess': 'true', - 'UseLibraryDependencyInputs': 'true', - 'UseUnicodeResponseFiles': 'true', - 'Version': 'a string1'}, - 'VCMIDLTool': { - 'AdditionalIncludeDirectories': 'folder1;folder2', - 'AdditionalOptions': 'a string1', - 'CPreprocessOptions': 'a string1', - 'DefaultCharType': '1', - 'DLLDataFileName': 'a_file_name', - 'EnableErrorChecks': '1', - 'ErrorCheckAllocations': 'true', - 'ErrorCheckBounds': 'true', - 'ErrorCheckEnumRange': 'true', - 'ErrorCheckRefPointers': 'true', - 'ErrorCheckStubData': 'true', - 'GenerateStublessProxies': 'true', - 'GenerateTypeLibrary': 'true', - 'HeaderFileName': 'a_file_name', - 'IgnoreStandardIncludePath': 'true', - 'InterfaceIdentifierFileName': 'a_file_name', - 'MkTypLibCompatible': 'true', - 'notgood': 'bogus', - 'OutputDirectory': 'a string1', - 'PreprocessorDefinitions': 'string1;string2', - 'ProxyFileName': 'a_file_name', - 'RedirectOutputAndErrors': 'a_file_name', - 'StructMemberAlignment': '1', - 'SuppressStartupBanner': 'true', - 'TargetEnvironment': '1', - 'TypeLibraryName': 'a_file_name', - 'UndefinePreprocessorDefinitions': 'string1;string2', - 'ValidateParameters': 'true', - 'WarnAsError': 'true', - 'WarningLevel': '1'}, - 'VCResourceCompilerTool': { - 'AdditionalOptions': 'a string1', - 'AdditionalIncludeDirectories': 'folder1;folder2', - 'Culture': '1003', - 'IgnoreStandardIncludePath': 'true', - 'notgood2': 'bogus', - 'PreprocessorDefinitions': 'string1;string2', - 'ResourceOutputFileName': 'a string1', - 'ShowProgress': 'true', - 'SuppressStartupBanner': 'true', - 'UndefinePreprocessorDefinitions': 'string1;string2'}, - 'VCLibrarianTool': { - 'AdditionalDependencies': 'file1;file2', - 'AdditionalLibraryDirectories': 'folder1;folder2', - 'AdditionalOptions': 'a string1', - 'ExportNamedFunctions': 'string1;string2', - 'ForceSymbolReferences': 'a string1', - 'IgnoreAllDefaultLibraries': 'true', - 'IgnoreSpecificDefaultLibraries': 'file1;file2', - 'LinkLibraryDependencies': 'true', - 'ModuleDefinitionFile': 'a_file_name', - 'OutputFile': 'a_file_name', - 'SuppressStartupBanner': 'true', - 'UseUnicodeResponseFiles': 'true'}, - 'VCManifestTool': { - 'AdditionalManifestFiles': 'file1;file2', - 'AdditionalOptions': 'a string1', - 'AssemblyIdentity': 'a string1', - 'ComponentFileName': 'a_file_name', - 'DependencyInformationFile': 'a_file_name', - 'GenerateCatalogFiles': 'true', - 'InputResourceManifests': 'a string1', - 'ManifestResourceFile': 'a_file_name', - 'OutputManifestFile': 'a_file_name', - 'RegistrarScriptFile': 'a_file_name', - 'ReplacementsFile': 'a_file_name', - 'SuppressStartupBanner': 'true', - 'TypeLibraryFile': 'a_file_name', - 'UpdateFileHashes': 'truel', - 'UpdateFileHashesSearchPath': 'a_file_name', - 'UseFAT32Workaround': 'true', - 'UseUnicodeResponseFiles': 'true', - 'VerboseOutput': 'true'}}, - self.stderr) - self._ExpectedWarnings([ - 'Warning: for VCCLCompilerTool/BasicRuntimeChecks, ' - 'index value (5) not in expected range [0, 4)', - 'Warning: for VCCLCompilerTool/BrowseInformation, ' - "invalid literal for int() with base 10: 'fdkslj'", - 'Warning: for VCCLCompilerTool/CallingConvention, ' - 'index value (-1) not in expected range [0, 3)', - 'Warning: for VCCLCompilerTool/DebugInformationFormat, ' - 'converted value for 2 not specified.', - 'Warning: unrecognized setting VCCLCompilerTool/Enableprefast', - 'Warning: unrecognized setting VCCLCompilerTool/ZZXYZ', - 'Warning: for VCLinkerTool/TargetMachine, ' - 'converted value for 2 not specified.', - 'Warning: unrecognized setting VCMIDLTool/notgood', - 'Warning: unrecognized setting VCResourceCompilerTool/notgood2', - 'Warning: for VCManifestTool/UpdateFileHashes, ' - "expected bool; got 'truel'" - '']) - - def testValidateMSBuildSettings_settings(self): - """Tests that for invalid MSBuild settings.""" - MSVSSettings.ValidateMSBuildSettings( - {'ClCompile': { - 'AdditionalIncludeDirectories': 'folder1;folder2', - 'AdditionalOptions': ['string1', 'string2'], - 'AdditionalUsingDirectories': 'folder1;folder2', - 'AssemblerListingLocation': 'a_file_name', - 'AssemblerOutput': 'NoListing', - 'BasicRuntimeChecks': 'StackFrameRuntimeCheck', - 'BrowseInformation': 'false', - 'BrowseInformationFile': 'a_file_name', - 'BufferSecurityCheck': 'true', - 'BuildingInIDE': 'true', - 'CallingConvention': 'Cdecl', - 'CompileAs': 'CompileAsC', - 'CompileAsManaged': 'Pure', - 'CreateHotpatchableImage': 'true', - 'DebugInformationFormat': 'ProgramDatabase', - 'DisableLanguageExtensions': 'true', - 'DisableSpecificWarnings': 'string1;string2', - 'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions', - 'EnableFiberSafeOptimizations': 'true', - 'EnablePREfast': 'true', - 'Enableprefast': 'bogus', - 'ErrorReporting': 'Prompt', - 'ExceptionHandling': 'SyncCThrow', - 'ExpandAttributedSource': 'true', - 'FavorSizeOrSpeed': 'Neither', - 'FloatingPointExceptions': 'true', - 'FloatingPointModel': 'Precise', - 'ForceConformanceInForLoopScope': 'true', - 'ForcedIncludeFiles': 'file1;file2', - 'ForcedUsingFiles': 'file1;file2', - 'FunctionLevelLinking': 'false', - 'GenerateXMLDocumentationFiles': 'true', - 'IgnoreStandardIncludePath': 'true', - 'InlineFunctionExpansion': 'OnlyExplicitInline', - 'IntrinsicFunctions': 'false', - 'MinimalRebuild': 'true', - 'MultiProcessorCompilation': 'true', - 'ObjectFileName': 'a_file_name', - 'OmitDefaultLibName': 'true', - 'OmitFramePointers': 'true', - 'OpenMPSupport': 'true', - 'Optimization': 'Disabled', - 'PrecompiledHeader': 'NotUsing', - 'PrecompiledHeaderFile': 'a_file_name', - 'PrecompiledHeaderOutputFile': 'a_file_name', - 'PreprocessKeepComments': 'true', - 'PreprocessorDefinitions': 'string1;string2', - 'PreprocessOutputPath': 'a string1', - 'PreprocessSuppressLineNumbers': 'false', - 'PreprocessToFile': 'false', - 'ProcessorNumber': '33', - 'ProgramDataBaseFileName': 'a_file_name', - 'RuntimeLibrary': 'MultiThreaded', - 'RuntimeTypeInfo': 'true', - 'ShowIncludes': 'true', - 'SmallerTypeCheck': 'true', - 'StringPooling': 'true', - 'StructMemberAlignment': '1Byte', - 'SuppressStartupBanner': 'true', - 'TrackerLogDirectory': 'a_folder', - 'TreatSpecificWarningsAsErrors': 'string1;string2', - 'TreatWarningAsError': 'true', - 'TreatWChar_tAsBuiltInType': 'true', - 'UndefineAllPreprocessorDefinitions': 'true', - 'UndefinePreprocessorDefinitions': 'string1;string2', - 'UseFullPaths': 'true', - 'UseUnicodeForAssemblerListing': 'true', - 'WarningLevel': 'TurnOffAllWarnings', - 'WholeProgramOptimization': 'true', - 'XMLDocumentationFileName': 'a_file_name', - 'ZZXYZ': 'bogus'}, - 'Link': { - 'AdditionalDependencies': 'file1;file2', - 'AdditionalLibraryDirectories': 'folder1;folder2', - 'AdditionalManifestDependencies': 'file1;file2', - 'AdditionalOptions': 'a string1', - 'AddModuleNamesToAssembly': 'file1;file2', - 'AllowIsolation': 'true', - 'AssemblyDebug': '', - 'AssemblyLinkResource': 'file1;file2', - 'BaseAddress': 'a string1', - 'BuildingInIDE': 'true', - 'CLRImageType': 'ForceIJWImage', - 'CLRSupportLastError': 'Enabled', - 'CLRThreadAttribute': 'MTAThreadingAttribute', - 'CLRUnmanagedCodeCheck': 'true', - 'CreateHotPatchableImage': 'X86Image', - 'DataExecutionPrevention': 'false', - 'DelayLoadDLLs': 'file1;file2', - 'DelaySign': 'true', - 'Driver': 'NotSet', - 'EmbedManagedResourceFile': 'file1;file2', - 'EnableCOMDATFolding': 'false', - 'EnableUAC': 'true', - 'EntryPointSymbol': 'a string1', - 'FixedBaseAddress': 'false', - 'ForceFileOutput': 'Enabled', - 'ForceSymbolReferences': 'file1;file2', - 'FunctionOrder': 'a_file_name', - 'GenerateDebugInformation': 'true', - 'GenerateMapFile': 'true', - 'HeapCommitSize': 'a string1', - 'HeapReserveSize': 'a string1', - 'IgnoreAllDefaultLibraries': 'true', - 'IgnoreEmbeddedIDL': 'true', - 'IgnoreSpecificDefaultLibraries': 'a_file_list', - 'ImageHasSafeExceptionHandlers': 'true', - 'ImportLibrary': 'a_file_name', - 'KeyContainer': 'a_file_name', - 'KeyFile': 'a_file_name', - 'LargeAddressAware': 'false', - 'LinkDLL': 'true', - 'LinkErrorReporting': 'SendErrorReport', - 'LinkStatus': 'true', - 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration', - 'ManifestFile': 'a_file_name', - 'MapExports': 'true', - 'MapFileName': 'a_file_name', - 'MergedIDLBaseFileName': 'a_file_name', - 'MergeSections': 'a string1', - 'MidlCommandFile': 'a_file_name', - 'MinimumRequiredVersion': 'a string1', - 'ModuleDefinitionFile': 'a_file_name', - 'MSDOSStubFileName': 'a_file_name', - 'NoEntryPoint': 'true', - 'OptimizeReferences': 'false', - 'OutputFile': 'a_file_name', - 'PerUserRedirection': 'true', - 'PreventDllBinding': 'true', - 'Profile': 'true', - 'ProfileGuidedDatabase': 'a_file_name', - 'ProgramDatabaseFile': 'a_file_name', - 'RandomizedBaseAddress': 'false', - 'RegisterOutput': 'true', - 'SectionAlignment': '33', - 'SetChecksum': 'true', - 'ShowProgress': 'LinkVerboseREF', - 'SpecifySectionAttributes': 'a string1', - 'StackCommitSize': 'a string1', - 'StackReserveSize': 'a string1', - 'StripPrivateSymbols': 'a_file_name', - 'SubSystem': 'Console', - 'SupportNobindOfDelayLoadedDLL': 'true', - 'SupportUnloadOfDelayLoadedDLL': 'true', - 'SuppressStartupBanner': 'true', - 'SwapRunFromCD': 'true', - 'SwapRunFromNET': 'true', - 'TargetMachine': 'MachineX86', - 'TerminalServerAware': 'false', - 'TrackerLogDirectory': 'a_folder', - 'TreatLinkerWarningAsErrors': 'true', - 'TurnOffAssemblyGeneration': 'true', - 'TypeLibraryFile': 'a_file_name', - 'TypeLibraryResourceID': '33', - 'UACExecutionLevel': 'AsInvoker', - 'UACUIAccess': 'true', - 'Version': 'a string1'}, - 'ResourceCompile': { - 'AdditionalIncludeDirectories': 'folder1;folder2', - 'AdditionalOptions': 'a string1', - 'Culture': '0x236', - 'IgnoreStandardIncludePath': 'true', - 'NullTerminateStrings': 'true', - 'PreprocessorDefinitions': 'string1;string2', - 'ResourceOutputFileName': 'a string1', - 'ShowProgress': 'true', - 'SuppressStartupBanner': 'true', - 'TrackerLogDirectory': 'a_folder', - 'UndefinePreprocessorDefinitions': 'string1;string2'}, - 'Midl': { - 'AdditionalIncludeDirectories': 'folder1;folder2', - 'AdditionalOptions': 'a string1', - 'ApplicationConfigurationMode': 'true', - 'ClientStubFile': 'a_file_name', - 'CPreprocessOptions': 'a string1', - 'DefaultCharType': 'Signed', - 'DllDataFileName': 'a_file_name', - 'EnableErrorChecks': 'EnableCustom', - 'ErrorCheckAllocations': 'true', - 'ErrorCheckBounds': 'true', - 'ErrorCheckEnumRange': 'true', - 'ErrorCheckRefPointers': 'true', - 'ErrorCheckStubData': 'true', - 'GenerateClientFiles': 'Stub', - 'GenerateServerFiles': 'None', - 'GenerateStublessProxies': 'true', - 'GenerateTypeLibrary': 'true', - 'HeaderFileName': 'a_file_name', - 'IgnoreStandardIncludePath': 'true', - 'InterfaceIdentifierFileName': 'a_file_name', - 'LocaleID': '33', - 'MkTypLibCompatible': 'true', - 'OutputDirectory': 'a string1', - 'PreprocessorDefinitions': 'string1;string2', - 'ProxyFileName': 'a_file_name', - 'RedirectOutputAndErrors': 'a_file_name', - 'ServerStubFile': 'a_file_name', - 'StructMemberAlignment': 'NotSet', - 'SuppressCompilerWarnings': 'true', - 'SuppressStartupBanner': 'true', - 'TargetEnvironment': 'Itanium', - 'TrackerLogDirectory': 'a_folder', - 'TypeLibFormat': 'NewFormat', - 'TypeLibraryName': 'a_file_name', - 'UndefinePreprocessorDefinitions': 'string1;string2', - 'ValidateAllParameters': 'true', - 'WarnAsError': 'true', - 'WarningLevel': '1'}, - 'Lib': { - 'AdditionalDependencies': 'file1;file2', - 'AdditionalLibraryDirectories': 'folder1;folder2', - 'AdditionalOptions': 'a string1', - 'DisplayLibrary': 'a string1', - 'ErrorReporting': 'PromptImmediately', - 'ExportNamedFunctions': 'string1;string2', - 'ForceSymbolReferences': 'a string1', - 'IgnoreAllDefaultLibraries': 'true', - 'IgnoreSpecificDefaultLibraries': 'file1;file2', - 'LinkTimeCodeGeneration': 'true', - 'MinimumRequiredVersion': 'a string1', - 'ModuleDefinitionFile': 'a_file_name', - 'Name': 'a_file_name', - 'OutputFile': 'a_file_name', - 'RemoveObjects': 'file1;file2', - 'SubSystem': 'Console', - 'SuppressStartupBanner': 'true', - 'TargetMachine': 'MachineX86i', - 'TrackerLogDirectory': 'a_folder', - 'TreatLibWarningAsErrors': 'true', - 'UseUnicodeResponseFiles': 'true', - 'Verbose': 'true'}, - 'Manifest': { - 'AdditionalManifestFiles': 'file1;file2', - 'AdditionalOptions': 'a string1', - 'AssemblyIdentity': 'a string1', - 'ComponentFileName': 'a_file_name', - 'EnableDPIAwareness': 'fal', - 'GenerateCatalogFiles': 'truel', - 'GenerateCategoryTags': 'true', - 'InputResourceManifests': 'a string1', - 'ManifestFromManagedAssembly': 'a_file_name', - 'notgood3': 'bogus', - 'OutputManifestFile': 'a_file_name', - 'OutputResourceManifests': 'a string1', - 'RegistrarScriptFile': 'a_file_name', - 'ReplacementsFile': 'a_file_name', - 'SuppressDependencyElement': 'true', - 'SuppressStartupBanner': 'true', - 'TrackerLogDirectory': 'a_folder', - 'TypeLibraryFile': 'a_file_name', - 'UpdateFileHashes': 'true', - 'UpdateFileHashesSearchPath': 'a_file_name', - 'VerboseOutput': 'true'}, - 'ProjectReference': { - 'LinkLibraryDependencies': 'true', - 'UseLibraryDependencyInputs': 'true'}, - 'ManifestResourceCompile': { - 'ResourceOutputFileName': 'a_file_name'}, - '': { - 'EmbedManifest': 'true', - 'GenerateManifest': 'true', - 'IgnoreImportLibrary': 'true', - 'LinkIncremental': 'false'}}, - self.stderr) - self._ExpectedWarnings([ - 'Warning: unrecognized setting ClCompile/Enableprefast', - 'Warning: unrecognized setting ClCompile/ZZXYZ', - 'Warning: unrecognized setting Manifest/notgood3', - 'Warning: for Manifest/GenerateCatalogFiles, ' - "expected bool; got 'truel'", - 'Warning: for Lib/TargetMachine, unrecognized enumerated value ' - 'MachineX86i', - "Warning: for Manifest/EnableDPIAwareness, expected bool; got 'fal'"]) - - def testConvertToMSBuildSettings_empty(self): - """Tests an empty conversion.""" - msvs_settings = {} - expected_msbuild_settings = {} - actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( - msvs_settings, - self.stderr) - self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) - self._ExpectedWarnings([]) - - def testConvertToMSBuildSettings_minimal(self): - """Tests a minimal conversion.""" - msvs_settings = { - 'VCCLCompilerTool': { - 'AdditionalIncludeDirectories': 'dir1', - 'AdditionalOptions': '/foo', - 'BasicRuntimeChecks': '0', - }, - 'VCLinkerTool': { - 'LinkTimeCodeGeneration': '1', - 'ErrorReporting': '1', - 'DataExecutionPrevention': '2', - }, - } - expected_msbuild_settings = { - 'ClCompile': { - 'AdditionalIncludeDirectories': 'dir1', - 'AdditionalOptions': '/foo', - 'BasicRuntimeChecks': 'Default', - }, - 'Link': { - 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration', - 'LinkErrorReporting': 'PromptImmediately', - 'DataExecutionPrevention': 'true', - }, - } - actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( - msvs_settings, - self.stderr) - self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) - self._ExpectedWarnings([]) - - def testConvertToMSBuildSettings_warnings(self): - """Tests conversion that generates warnings.""" - msvs_settings = { - 'VCCLCompilerTool': { - 'AdditionalIncludeDirectories': '1', - 'AdditionalOptions': '2', - # These are incorrect values: - 'BasicRuntimeChecks': '12', - 'BrowseInformation': '21', - 'UsePrecompiledHeader': '13', - 'GeneratePreprocessedFile': '14'}, - 'VCLinkerTool': { - # These are incorrect values: - 'Driver': '10', - 'LinkTimeCodeGeneration': '31', - 'ErrorReporting': '21', - 'FixedBaseAddress': '6'}, - 'VCResourceCompilerTool': { - # Custom - 'Culture': '1003'}} - expected_msbuild_settings = { - 'ClCompile': { - 'AdditionalIncludeDirectories': '1', - 'AdditionalOptions': '2'}, - 'Link': {}, - 'ResourceCompile': { - # Custom - 'Culture': '0x03eb'}} - actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( - msvs_settings, - self.stderr) - self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) - self._ExpectedWarnings([ - 'Warning: while converting VCCLCompilerTool/BasicRuntimeChecks to ' - 'MSBuild, index value (12) not in expected range [0, 4)', - 'Warning: while converting VCCLCompilerTool/BrowseInformation to ' - 'MSBuild, index value (21) not in expected range [0, 3)', - 'Warning: while converting VCCLCompilerTool/UsePrecompiledHeader to ' - 'MSBuild, index value (13) not in expected range [0, 3)', - 'Warning: while converting VCCLCompilerTool/GeneratePreprocessedFile to ' - 'MSBuild, value must be one of [0, 1, 2]; got 14', - - 'Warning: while converting VCLinkerTool/Driver to ' - 'MSBuild, index value (10) not in expected range [0, 4)', - 'Warning: while converting VCLinkerTool/LinkTimeCodeGeneration to ' - 'MSBuild, index value (31) not in expected range [0, 5)', - 'Warning: while converting VCLinkerTool/ErrorReporting to ' - 'MSBuild, index value (21) not in expected range [0, 3)', - 'Warning: while converting VCLinkerTool/FixedBaseAddress to ' - 'MSBuild, index value (6) not in expected range [0, 3)', - ]) - - def testConvertToMSBuildSettings_full_synthetic(self): - """Tests conversion of all the MSBuild settings.""" - msvs_settings = { - 'VCCLCompilerTool': { - 'AdditionalIncludeDirectories': 'folder1;folder2;folder3', - 'AdditionalOptions': 'a_string', - 'AdditionalUsingDirectories': 'folder1;folder2;folder3', - 'AssemblerListingLocation': 'a_file_name', - 'AssemblerOutput': '0', - 'BasicRuntimeChecks': '1', - 'BrowseInformation': '2', - 'BrowseInformationFile': 'a_file_name', - 'BufferSecurityCheck': 'true', - 'CallingConvention': '0', - 'CompileAs': '1', - 'DebugInformationFormat': '4', - 'DefaultCharIsUnsigned': 'true', - 'Detect64BitPortabilityProblems': 'true', - 'DisableLanguageExtensions': 'true', - 'DisableSpecificWarnings': 'd1;d2;d3', - 'EnableEnhancedInstructionSet': '0', - 'EnableFiberSafeOptimizations': 'true', - 'EnableFunctionLevelLinking': 'true', - 'EnableIntrinsicFunctions': 'true', - 'EnablePREfast': 'true', - 'ErrorReporting': '1', - 'ExceptionHandling': '2', - 'ExpandAttributedSource': 'true', - 'FavorSizeOrSpeed': '0', - 'FloatingPointExceptions': 'true', - 'FloatingPointModel': '1', - 'ForceConformanceInForLoopScope': 'true', - 'ForcedIncludeFiles': 'file1;file2;file3', - 'ForcedUsingFiles': 'file1;file2;file3', - 'GeneratePreprocessedFile': '1', - 'GenerateXMLDocumentationFiles': 'true', - 'IgnoreStandardIncludePath': 'true', - 'InlineFunctionExpansion': '2', - 'KeepComments': 'true', - 'MinimalRebuild': 'true', - 'ObjectFile': 'a_file_name', - 'OmitDefaultLibName': 'true', - 'OmitFramePointers': 'true', - 'OpenMP': 'true', - 'Optimization': '3', - 'PrecompiledHeaderFile': 'a_file_name', - 'PrecompiledHeaderThrough': 'a_file_name', - 'PreprocessorDefinitions': 'd1;d2;d3', - 'ProgramDataBaseFileName': 'a_file_name', - 'RuntimeLibrary': '0', - 'RuntimeTypeInfo': 'true', - 'ShowIncludes': 'true', - 'SmallerTypeCheck': 'true', - 'StringPooling': 'true', - 'StructMemberAlignment': '1', - 'SuppressStartupBanner': 'true', - 'TreatWChar_tAsBuiltInType': 'true', - 'UndefineAllPreprocessorDefinitions': 'true', - 'UndefinePreprocessorDefinitions': 'd1;d2;d3', - 'UseFullPaths': 'true', - 'UsePrecompiledHeader': '1', - 'UseUnicodeResponseFiles': 'true', - 'WarnAsError': 'true', - 'WarningLevel': '2', - 'WholeProgramOptimization': 'true', - 'XMLDocumentationFileName': 'a_file_name'}, - 'VCLinkerTool': { - 'AdditionalDependencies': 'file1;file2;file3', - 'AdditionalLibraryDirectories': 'folder1;folder2;folder3', - 'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3', - 'AdditionalManifestDependencies': 'file1;file2;file3', - 'AdditionalOptions': 'a_string', - 'AddModuleNamesToAssembly': 'file1;file2;file3', - 'AllowIsolation': 'true', - 'AssemblyDebug': '0', - 'AssemblyLinkResource': 'file1;file2;file3', - 'BaseAddress': 'a_string', - 'CLRImageType': '1', - 'CLRThreadAttribute': '2', - 'CLRUnmanagedCodeCheck': 'true', - 'DataExecutionPrevention': '0', - 'DelayLoadDLLs': 'file1;file2;file3', - 'DelaySign': 'true', - 'Driver': '1', - 'EmbedManagedResourceFile': 'file1;file2;file3', - 'EnableCOMDATFolding': '0', - 'EnableUAC': 'true', - 'EntryPointSymbol': 'a_string', - 'ErrorReporting': '0', - 'FixedBaseAddress': '1', - 'ForceSymbolReferences': 'file1;file2;file3', - 'FunctionOrder': 'a_file_name', - 'GenerateDebugInformation': 'true', - 'GenerateManifest': 'true', - 'GenerateMapFile': 'true', - 'HeapCommitSize': 'a_string', - 'HeapReserveSize': 'a_string', - 'IgnoreAllDefaultLibraries': 'true', - 'IgnoreDefaultLibraryNames': 'file1;file2;file3', - 'IgnoreEmbeddedIDL': 'true', - 'IgnoreImportLibrary': 'true', - 'ImportLibrary': 'a_file_name', - 'KeyContainer': 'a_file_name', - 'KeyFile': 'a_file_name', - 'LargeAddressAware': '2', - 'LinkIncremental': '1', - 'LinkLibraryDependencies': 'true', - 'LinkTimeCodeGeneration': '2', - 'ManifestFile': 'a_file_name', - 'MapExports': 'true', - 'MapFileName': 'a_file_name', - 'MergedIDLBaseFileName': 'a_file_name', - 'MergeSections': 'a_string', - 'MidlCommandFile': 'a_file_name', - 'ModuleDefinitionFile': 'a_file_name', - 'OptimizeForWindows98': '1', - 'OptimizeReferences': '0', - 'OutputFile': 'a_file_name', - 'PerUserRedirection': 'true', - 'Profile': 'true', - 'ProfileGuidedDatabase': 'a_file_name', - 'ProgramDatabaseFile': 'a_file_name', - 'RandomizedBaseAddress': '1', - 'RegisterOutput': 'true', - 'ResourceOnlyDLL': 'true', - 'SetChecksum': 'true', - 'ShowProgress': '0', - 'StackCommitSize': 'a_string', - 'StackReserveSize': 'a_string', - 'StripPrivateSymbols': 'a_file_name', - 'SubSystem': '2', - 'SupportUnloadOfDelayLoadedDLL': 'true', - 'SuppressStartupBanner': 'true', - 'SwapRunFromCD': 'true', - 'SwapRunFromNet': 'true', - 'TargetMachine': '3', - 'TerminalServerAware': '2', - 'TurnOffAssemblyGeneration': 'true', - 'TypeLibraryFile': 'a_file_name', - 'TypeLibraryResourceID': '33', - 'UACExecutionLevel': '1', - 'UACUIAccess': 'true', - 'UseLibraryDependencyInputs': 'false', - 'UseUnicodeResponseFiles': 'true', - 'Version': 'a_string'}, - 'VCResourceCompilerTool': { - 'AdditionalIncludeDirectories': 'folder1;folder2;folder3', - 'AdditionalOptions': 'a_string', - 'Culture': '1003', - 'IgnoreStandardIncludePath': 'true', - 'PreprocessorDefinitions': 'd1;d2;d3', - 'ResourceOutputFileName': 'a_string', - 'ShowProgress': 'true', - 'SuppressStartupBanner': 'true', - 'UndefinePreprocessorDefinitions': 'd1;d2;d3'}, - 'VCMIDLTool': { - 'AdditionalIncludeDirectories': 'folder1;folder2;folder3', - 'AdditionalOptions': 'a_string', - 'CPreprocessOptions': 'a_string', - 'DefaultCharType': '0', - 'DLLDataFileName': 'a_file_name', - 'EnableErrorChecks': '2', - 'ErrorCheckAllocations': 'true', - 'ErrorCheckBounds': 'true', - 'ErrorCheckEnumRange': 'true', - 'ErrorCheckRefPointers': 'true', - 'ErrorCheckStubData': 'true', - 'GenerateStublessProxies': 'true', - 'GenerateTypeLibrary': 'true', - 'HeaderFileName': 'a_file_name', - 'IgnoreStandardIncludePath': 'true', - 'InterfaceIdentifierFileName': 'a_file_name', - 'MkTypLibCompatible': 'true', - 'OutputDirectory': 'a_string', - 'PreprocessorDefinitions': 'd1;d2;d3', - 'ProxyFileName': 'a_file_name', - 'RedirectOutputAndErrors': 'a_file_name', - 'StructMemberAlignment': '3', - 'SuppressStartupBanner': 'true', - 'TargetEnvironment': '1', - 'TypeLibraryName': 'a_file_name', - 'UndefinePreprocessorDefinitions': 'd1;d2;d3', - 'ValidateParameters': 'true', - 'WarnAsError': 'true', - 'WarningLevel': '4'}, - 'VCLibrarianTool': { - 'AdditionalDependencies': 'file1;file2;file3', - 'AdditionalLibraryDirectories': 'folder1;folder2;folder3', - 'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3', - 'AdditionalOptions': 'a_string', - 'ExportNamedFunctions': 'd1;d2;d3', - 'ForceSymbolReferences': 'a_string', - 'IgnoreAllDefaultLibraries': 'true', - 'IgnoreSpecificDefaultLibraries': 'file1;file2;file3', - 'LinkLibraryDependencies': 'true', - 'ModuleDefinitionFile': 'a_file_name', - 'OutputFile': 'a_file_name', - 'SuppressStartupBanner': 'true', - 'UseUnicodeResponseFiles': 'true'}, - 'VCManifestTool': { - 'AdditionalManifestFiles': 'file1;file2;file3', - 'AdditionalOptions': 'a_string', - 'AssemblyIdentity': 'a_string', - 'ComponentFileName': 'a_file_name', - 'DependencyInformationFile': 'a_file_name', - 'EmbedManifest': 'true', - 'GenerateCatalogFiles': 'true', - 'InputResourceManifests': 'a_string', - 'ManifestResourceFile': 'my_name', - 'OutputManifestFile': 'a_file_name', - 'RegistrarScriptFile': 'a_file_name', - 'ReplacementsFile': 'a_file_name', - 'SuppressStartupBanner': 'true', - 'TypeLibraryFile': 'a_file_name', - 'UpdateFileHashes': 'true', - 'UpdateFileHashesSearchPath': 'a_file_name', - 'UseFAT32Workaround': 'true', - 'UseUnicodeResponseFiles': 'true', - 'VerboseOutput': 'true'}} - expected_msbuild_settings = { - 'ClCompile': { - 'AdditionalIncludeDirectories': 'folder1;folder2;folder3', - 'AdditionalOptions': 'a_string /J', - 'AdditionalUsingDirectories': 'folder1;folder2;folder3', - 'AssemblerListingLocation': 'a_file_name', - 'AssemblerOutput': 'NoListing', - 'BasicRuntimeChecks': 'StackFrameRuntimeCheck', - 'BrowseInformation': 'true', - 'BrowseInformationFile': 'a_file_name', - 'BufferSecurityCheck': 'true', - 'CallingConvention': 'Cdecl', - 'CompileAs': 'CompileAsC', - 'DebugInformationFormat': 'EditAndContinue', - 'DisableLanguageExtensions': 'true', - 'DisableSpecificWarnings': 'd1;d2;d3', - 'EnableEnhancedInstructionSet': 'NotSet', - 'EnableFiberSafeOptimizations': 'true', - 'EnablePREfast': 'true', - 'ErrorReporting': 'Prompt', - 'ExceptionHandling': 'Async', - 'ExpandAttributedSource': 'true', - 'FavorSizeOrSpeed': 'Neither', - 'FloatingPointExceptions': 'true', - 'FloatingPointModel': 'Strict', - 'ForceConformanceInForLoopScope': 'true', - 'ForcedIncludeFiles': 'file1;file2;file3', - 'ForcedUsingFiles': 'file1;file2;file3', - 'FunctionLevelLinking': 'true', - 'GenerateXMLDocumentationFiles': 'true', - 'IgnoreStandardIncludePath': 'true', - 'InlineFunctionExpansion': 'AnySuitable', - 'IntrinsicFunctions': 'true', - 'MinimalRebuild': 'true', - 'ObjectFileName': 'a_file_name', - 'OmitDefaultLibName': 'true', - 'OmitFramePointers': 'true', - 'OpenMPSupport': 'true', - 'Optimization': 'Full', - 'PrecompiledHeader': 'Create', - 'PrecompiledHeaderFile': 'a_file_name', - 'PrecompiledHeaderOutputFile': 'a_file_name', - 'PreprocessKeepComments': 'true', - 'PreprocessorDefinitions': 'd1;d2;d3', - 'PreprocessSuppressLineNumbers': 'false', - 'PreprocessToFile': 'true', - 'ProgramDataBaseFileName': 'a_file_name', - 'RuntimeLibrary': 'MultiThreaded', - 'RuntimeTypeInfo': 'true', - 'ShowIncludes': 'true', - 'SmallerTypeCheck': 'true', - 'StringPooling': 'true', - 'StructMemberAlignment': '1Byte', - 'SuppressStartupBanner': 'true', - 'TreatWarningAsError': 'true', - 'TreatWChar_tAsBuiltInType': 'true', - 'UndefineAllPreprocessorDefinitions': 'true', - 'UndefinePreprocessorDefinitions': 'd1;d2;d3', - 'UseFullPaths': 'true', - 'WarningLevel': 'Level2', - 'WholeProgramOptimization': 'true', - 'XMLDocumentationFileName': 'a_file_name'}, - 'Link': { - 'AdditionalDependencies': 'file1;file2;file3', - 'AdditionalLibraryDirectories': 'folder1;folder2;folder3', - 'AdditionalManifestDependencies': 'file1;file2;file3', - 'AdditionalOptions': 'a_string', - 'AddModuleNamesToAssembly': 'file1;file2;file3', - 'AllowIsolation': 'true', - 'AssemblyDebug': '', - 'AssemblyLinkResource': 'file1;file2;file3', - 'BaseAddress': 'a_string', - 'CLRImageType': 'ForceIJWImage', - 'CLRThreadAttribute': 'STAThreadingAttribute', - 'CLRUnmanagedCodeCheck': 'true', - 'DataExecutionPrevention': '', - 'DelayLoadDLLs': 'file1;file2;file3', - 'DelaySign': 'true', - 'Driver': 'Driver', - 'EmbedManagedResourceFile': 'file1;file2;file3', - 'EnableCOMDATFolding': '', - 'EnableUAC': 'true', - 'EntryPointSymbol': 'a_string', - 'FixedBaseAddress': 'false', - 'ForceSymbolReferences': 'file1;file2;file3', - 'FunctionOrder': 'a_file_name', - 'GenerateDebugInformation': 'true', - 'GenerateMapFile': 'true', - 'HeapCommitSize': 'a_string', - 'HeapReserveSize': 'a_string', - 'IgnoreAllDefaultLibraries': 'true', - 'IgnoreEmbeddedIDL': 'true', - 'IgnoreSpecificDefaultLibraries': 'file1;file2;file3', - 'ImportLibrary': 'a_file_name', - 'KeyContainer': 'a_file_name', - 'KeyFile': 'a_file_name', - 'LargeAddressAware': 'true', - 'LinkErrorReporting': 'NoErrorReport', - 'LinkTimeCodeGeneration': 'PGInstrument', - 'ManifestFile': 'a_file_name', - 'MapExports': 'true', - 'MapFileName': 'a_file_name', - 'MergedIDLBaseFileName': 'a_file_name', - 'MergeSections': 'a_string', - 'MidlCommandFile': 'a_file_name', - 'ModuleDefinitionFile': 'a_file_name', - 'NoEntryPoint': 'true', - 'OptimizeReferences': '', - 'OutputFile': 'a_file_name', - 'PerUserRedirection': 'true', - 'Profile': 'true', - 'ProfileGuidedDatabase': 'a_file_name', - 'ProgramDatabaseFile': 'a_file_name', - 'RandomizedBaseAddress': 'false', - 'RegisterOutput': 'true', - 'SetChecksum': 'true', - 'ShowProgress': 'NotSet', - 'StackCommitSize': 'a_string', - 'StackReserveSize': 'a_string', - 'StripPrivateSymbols': 'a_file_name', - 'SubSystem': 'Windows', - 'SupportUnloadOfDelayLoadedDLL': 'true', - 'SuppressStartupBanner': 'true', - 'SwapRunFromCD': 'true', - 'SwapRunFromNET': 'true', - 'TargetMachine': 'MachineARM', - 'TerminalServerAware': 'true', - 'TurnOffAssemblyGeneration': 'true', - 'TypeLibraryFile': 'a_file_name', - 'TypeLibraryResourceID': '33', - 'UACExecutionLevel': 'HighestAvailable', - 'UACUIAccess': 'true', - 'Version': 'a_string'}, - 'ResourceCompile': { - 'AdditionalIncludeDirectories': 'folder1;folder2;folder3', - 'AdditionalOptions': 'a_string', - 'Culture': '0x03eb', - 'IgnoreStandardIncludePath': 'true', - 'PreprocessorDefinitions': 'd1;d2;d3', - 'ResourceOutputFileName': 'a_string', - 'ShowProgress': 'true', - 'SuppressStartupBanner': 'true', - 'UndefinePreprocessorDefinitions': 'd1;d2;d3'}, - 'Midl': { - 'AdditionalIncludeDirectories': 'folder1;folder2;folder3', - 'AdditionalOptions': 'a_string', - 'CPreprocessOptions': 'a_string', - 'DefaultCharType': 'Unsigned', - 'DllDataFileName': 'a_file_name', - 'EnableErrorChecks': 'All', - 'ErrorCheckAllocations': 'true', - 'ErrorCheckBounds': 'true', - 'ErrorCheckEnumRange': 'true', - 'ErrorCheckRefPointers': 'true', - 'ErrorCheckStubData': 'true', - 'GenerateStublessProxies': 'true', - 'GenerateTypeLibrary': 'true', - 'HeaderFileName': 'a_file_name', - 'IgnoreStandardIncludePath': 'true', - 'InterfaceIdentifierFileName': 'a_file_name', - 'MkTypLibCompatible': 'true', - 'OutputDirectory': 'a_string', - 'PreprocessorDefinitions': 'd1;d2;d3', - 'ProxyFileName': 'a_file_name', - 'RedirectOutputAndErrors': 'a_file_name', - 'StructMemberAlignment': '4', - 'SuppressStartupBanner': 'true', - 'TargetEnvironment': 'Win32', - 'TypeLibraryName': 'a_file_name', - 'UndefinePreprocessorDefinitions': 'd1;d2;d3', - 'ValidateAllParameters': 'true', - 'WarnAsError': 'true', - 'WarningLevel': '4'}, - 'Lib': { - 'AdditionalDependencies': 'file1;file2;file3', - 'AdditionalLibraryDirectories': 'folder1;folder2;folder3', - 'AdditionalOptions': 'a_string', - 'ExportNamedFunctions': 'd1;d2;d3', - 'ForceSymbolReferences': 'a_string', - 'IgnoreAllDefaultLibraries': 'true', - 'IgnoreSpecificDefaultLibraries': 'file1;file2;file3', - 'ModuleDefinitionFile': 'a_file_name', - 'OutputFile': 'a_file_name', - 'SuppressStartupBanner': 'true', - 'UseUnicodeResponseFiles': 'true'}, - 'Manifest': { - 'AdditionalManifestFiles': 'file1;file2;file3', - 'AdditionalOptions': 'a_string', - 'AssemblyIdentity': 'a_string', - 'ComponentFileName': 'a_file_name', - 'GenerateCatalogFiles': 'true', - 'InputResourceManifests': 'a_string', - 'OutputManifestFile': 'a_file_name', - 'RegistrarScriptFile': 'a_file_name', - 'ReplacementsFile': 'a_file_name', - 'SuppressStartupBanner': 'true', - 'TypeLibraryFile': 'a_file_name', - 'UpdateFileHashes': 'true', - 'UpdateFileHashesSearchPath': 'a_file_name', - 'VerboseOutput': 'true'}, - 'ManifestResourceCompile': { - 'ResourceOutputFileName': 'my_name'}, - 'ProjectReference': { - 'LinkLibraryDependencies': 'true', - 'UseLibraryDependencyInputs': 'false'}, - '': { - 'EmbedManifest': 'true', - 'GenerateManifest': 'true', - 'IgnoreImportLibrary': 'true', - 'LinkIncremental': 'false'}} - actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( - msvs_settings, - self.stderr) - self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) - self._ExpectedWarnings([]) - - def testConvertToMSBuildSettings_actual(self): - """Tests the conversion of an actual project. - - A VS2008 project with most of the options defined was created through the - VS2008 IDE. It was then converted to VS2010. The tool settings found in - the .vcproj and .vcxproj files were converted to the two dictionaries - msvs_settings and expected_msbuild_settings. - - Note that for many settings, the VS2010 converter adds macros like - %(AdditionalIncludeDirectories) to make sure than inherited values are - included. Since the Gyp projects we generate do not use inheritance, - we removed these macros. They were: - ClCompile: - AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)' - AdditionalOptions: ' %(AdditionalOptions)' - AdditionalUsingDirectories: ';%(AdditionalUsingDirectories)' - DisableSpecificWarnings: ';%(DisableSpecificWarnings)', - ForcedIncludeFiles: ';%(ForcedIncludeFiles)', - ForcedUsingFiles: ';%(ForcedUsingFiles)', - PreprocessorDefinitions: ';%(PreprocessorDefinitions)', - UndefinePreprocessorDefinitions: - ';%(UndefinePreprocessorDefinitions)', - Link: - AdditionalDependencies: ';%(AdditionalDependencies)', - AdditionalLibraryDirectories: ';%(AdditionalLibraryDirectories)', - AdditionalManifestDependencies: - ';%(AdditionalManifestDependencies)', - AdditionalOptions: ' %(AdditionalOptions)', - AddModuleNamesToAssembly: ';%(AddModuleNamesToAssembly)', - AssemblyLinkResource: ';%(AssemblyLinkResource)', - DelayLoadDLLs: ';%(DelayLoadDLLs)', - EmbedManagedResourceFile: ';%(EmbedManagedResourceFile)', - ForceSymbolReferences: ';%(ForceSymbolReferences)', - IgnoreSpecificDefaultLibraries: - ';%(IgnoreSpecificDefaultLibraries)', - ResourceCompile: - AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)', - AdditionalOptions: ' %(AdditionalOptions)', - PreprocessorDefinitions: ';%(PreprocessorDefinitions)', - Manifest: - AdditionalManifestFiles: ';%(AdditionalManifestFiles)', - AdditionalOptions: ' %(AdditionalOptions)', - InputResourceManifests: ';%(InputResourceManifests)', - """ - msvs_settings = { - 'VCCLCompilerTool': { - 'AdditionalIncludeDirectories': 'dir1', - 'AdditionalOptions': '/more', - 'AdditionalUsingDirectories': 'test', - 'AssemblerListingLocation': '$(IntDir)\\a', - 'AssemblerOutput': '1', - 'BasicRuntimeChecks': '3', - 'BrowseInformation': '1', - 'BrowseInformationFile': '$(IntDir)\\e', - 'BufferSecurityCheck': 'false', - 'CallingConvention': '1', - 'CompileAs': '1', - 'DebugInformationFormat': '4', - 'DefaultCharIsUnsigned': 'true', - 'Detect64BitPortabilityProblems': 'true', - 'DisableLanguageExtensions': 'true', - 'DisableSpecificWarnings': 'abc', - 'EnableEnhancedInstructionSet': '1', - 'EnableFiberSafeOptimizations': 'true', - 'EnableFunctionLevelLinking': 'true', - 'EnableIntrinsicFunctions': 'true', - 'EnablePREfast': 'true', - 'ErrorReporting': '2', - 'ExceptionHandling': '2', - 'ExpandAttributedSource': 'true', - 'FavorSizeOrSpeed': '2', - 'FloatingPointExceptions': 'true', - 'FloatingPointModel': '1', - 'ForceConformanceInForLoopScope': 'false', - 'ForcedIncludeFiles': 'def', - 'ForcedUsingFiles': 'ge', - 'GeneratePreprocessedFile': '2', - 'GenerateXMLDocumentationFiles': 'true', - 'IgnoreStandardIncludePath': 'true', - 'InlineFunctionExpansion': '1', - 'KeepComments': 'true', - 'MinimalRebuild': 'true', - 'ObjectFile': '$(IntDir)\\b', - 'OmitDefaultLibName': 'true', - 'OmitFramePointers': 'true', - 'OpenMP': 'true', - 'Optimization': '3', - 'PrecompiledHeaderFile': '$(IntDir)\\$(TargetName).pche', - 'PrecompiledHeaderThrough': 'StdAfx.hd', - 'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE', - 'ProgramDataBaseFileName': '$(IntDir)\\vc90b.pdb', - 'RuntimeLibrary': '3', - 'RuntimeTypeInfo': 'false', - 'ShowIncludes': 'true', - 'SmallerTypeCheck': 'true', - 'StringPooling': 'true', - 'StructMemberAlignment': '3', - 'SuppressStartupBanner': 'false', - 'TreatWChar_tAsBuiltInType': 'false', - 'UndefineAllPreprocessorDefinitions': 'true', - 'UndefinePreprocessorDefinitions': 'wer', - 'UseFullPaths': 'true', - 'UsePrecompiledHeader': '0', - 'UseUnicodeResponseFiles': 'false', - 'WarnAsError': 'true', - 'WarningLevel': '3', - 'WholeProgramOptimization': 'true', - 'XMLDocumentationFileName': '$(IntDir)\\c'}, - 'VCLinkerTool': { - 'AdditionalDependencies': 'zx', - 'AdditionalLibraryDirectories': 'asd', - 'AdditionalManifestDependencies': 's2', - 'AdditionalOptions': '/mor2', - 'AddModuleNamesToAssembly': 'd1', - 'AllowIsolation': 'false', - 'AssemblyDebug': '1', - 'AssemblyLinkResource': 'd5', - 'BaseAddress': '23423', - 'CLRImageType': '3', - 'CLRThreadAttribute': '1', - 'CLRUnmanagedCodeCheck': 'true', - 'DataExecutionPrevention': '0', - 'DelayLoadDLLs': 'd4', - 'DelaySign': 'true', - 'Driver': '2', - 'EmbedManagedResourceFile': 'd2', - 'EnableCOMDATFolding': '1', - 'EnableUAC': 'false', - 'EntryPointSymbol': 'f5', - 'ErrorReporting': '2', - 'FixedBaseAddress': '1', - 'ForceSymbolReferences': 'd3', - 'FunctionOrder': 'fssdfsd', - 'GenerateDebugInformation': 'true', - 'GenerateManifest': 'false', - 'GenerateMapFile': 'true', - 'HeapCommitSize': '13', - 'HeapReserveSize': '12', - 'IgnoreAllDefaultLibraries': 'true', - 'IgnoreDefaultLibraryNames': 'flob;flok', - 'IgnoreEmbeddedIDL': 'true', - 'IgnoreImportLibrary': 'true', - 'ImportLibrary': 'f4', - 'KeyContainer': 'f7', - 'KeyFile': 'f6', - 'LargeAddressAware': '2', - 'LinkIncremental': '0', - 'LinkLibraryDependencies': 'false', - 'LinkTimeCodeGeneration': '1', - 'ManifestFile': - '$(IntDir)\\$(TargetFileName).2intermediate.manifest', - 'MapExports': 'true', - 'MapFileName': 'd5', - 'MergedIDLBaseFileName': 'f2', - 'MergeSections': 'f5', - 'MidlCommandFile': 'f1', - 'ModuleDefinitionFile': 'sdsd', - 'OptimizeForWindows98': '2', - 'OptimizeReferences': '2', - 'OutputFile': '$(OutDir)\\$(ProjectName)2.exe', - 'PerUserRedirection': 'true', - 'Profile': 'true', - 'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd', - 'ProgramDatabaseFile': 'Flob.pdb', - 'RandomizedBaseAddress': '1', - 'RegisterOutput': 'true', - 'ResourceOnlyDLL': 'true', - 'SetChecksum': 'false', - 'ShowProgress': '1', - 'StackCommitSize': '15', - 'StackReserveSize': '14', - 'StripPrivateSymbols': 'd3', - 'SubSystem': '1', - 'SupportUnloadOfDelayLoadedDLL': 'true', - 'SuppressStartupBanner': 'false', - 'SwapRunFromCD': 'true', - 'SwapRunFromNet': 'true', - 'TargetMachine': '1', - 'TerminalServerAware': '1', - 'TurnOffAssemblyGeneration': 'true', - 'TypeLibraryFile': 'f3', - 'TypeLibraryResourceID': '12', - 'UACExecutionLevel': '2', - 'UACUIAccess': 'true', - 'UseLibraryDependencyInputs': 'true', - 'UseUnicodeResponseFiles': 'false', - 'Version': '333'}, - 'VCResourceCompilerTool': { - 'AdditionalIncludeDirectories': 'f3', - 'AdditionalOptions': '/more3', - 'Culture': '3084', - 'IgnoreStandardIncludePath': 'true', - 'PreprocessorDefinitions': '_UNICODE;UNICODE2', - 'ResourceOutputFileName': '$(IntDir)/$(InputName)3.res', - 'ShowProgress': 'true'}, - 'VCManifestTool': { - 'AdditionalManifestFiles': 'sfsdfsd', - 'AdditionalOptions': 'afdsdafsd', - 'AssemblyIdentity': 'sddfdsadfsa', - 'ComponentFileName': 'fsdfds', - 'DependencyInformationFile': '$(IntDir)\\mt.depdfd', - 'EmbedManifest': 'false', - 'GenerateCatalogFiles': 'true', - 'InputResourceManifests': 'asfsfdafs', - 'ManifestResourceFile': - '$(IntDir)\\$(TargetFileName).embed.manifest.resfdsf', - 'OutputManifestFile': '$(TargetPath).manifestdfs', - 'RegistrarScriptFile': 'sdfsfd', - 'ReplacementsFile': 'sdffsd', - 'SuppressStartupBanner': 'false', - 'TypeLibraryFile': 'sfsd', - 'UpdateFileHashes': 'true', - 'UpdateFileHashesSearchPath': 'sfsd', - 'UseFAT32Workaround': 'true', - 'UseUnicodeResponseFiles': 'false', - 'VerboseOutput': 'true'}} - expected_msbuild_settings = { - 'ClCompile': { - 'AdditionalIncludeDirectories': 'dir1', - 'AdditionalOptions': '/more /J', - 'AdditionalUsingDirectories': 'test', - 'AssemblerListingLocation': '$(IntDir)a', - 'AssemblerOutput': 'AssemblyCode', - 'BasicRuntimeChecks': 'EnableFastChecks', - 'BrowseInformation': 'true', - 'BrowseInformationFile': '$(IntDir)e', - 'BufferSecurityCheck': 'false', - 'CallingConvention': 'FastCall', - 'CompileAs': 'CompileAsC', - 'DebugInformationFormat': 'EditAndContinue', - 'DisableLanguageExtensions': 'true', - 'DisableSpecificWarnings': 'abc', - 'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions', - 'EnableFiberSafeOptimizations': 'true', - 'EnablePREfast': 'true', - 'ErrorReporting': 'Queue', - 'ExceptionHandling': 'Async', - 'ExpandAttributedSource': 'true', - 'FavorSizeOrSpeed': 'Size', - 'FloatingPointExceptions': 'true', - 'FloatingPointModel': 'Strict', - 'ForceConformanceInForLoopScope': 'false', - 'ForcedIncludeFiles': 'def', - 'ForcedUsingFiles': 'ge', - 'FunctionLevelLinking': 'true', - 'GenerateXMLDocumentationFiles': 'true', - 'IgnoreStandardIncludePath': 'true', - 'InlineFunctionExpansion': 'OnlyExplicitInline', - 'IntrinsicFunctions': 'true', - 'MinimalRebuild': 'true', - 'ObjectFileName': '$(IntDir)b', - 'OmitDefaultLibName': 'true', - 'OmitFramePointers': 'true', - 'OpenMPSupport': 'true', - 'Optimization': 'Full', - 'PrecompiledHeader': 'NotUsing', # Actual conversion gives '' - 'PrecompiledHeaderFile': 'StdAfx.hd', - 'PrecompiledHeaderOutputFile': '$(IntDir)$(TargetName).pche', - 'PreprocessKeepComments': 'true', - 'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE', - 'PreprocessSuppressLineNumbers': 'true', - 'PreprocessToFile': 'true', - 'ProgramDataBaseFileName': '$(IntDir)vc90b.pdb', - 'RuntimeLibrary': 'MultiThreadedDebugDLL', - 'RuntimeTypeInfo': 'false', - 'ShowIncludes': 'true', - 'SmallerTypeCheck': 'true', - 'StringPooling': 'true', - 'StructMemberAlignment': '4Bytes', - 'SuppressStartupBanner': 'false', - 'TreatWarningAsError': 'true', - 'TreatWChar_tAsBuiltInType': 'false', - 'UndefineAllPreprocessorDefinitions': 'true', - 'UndefinePreprocessorDefinitions': 'wer', - 'UseFullPaths': 'true', - 'WarningLevel': 'Level3', - 'WholeProgramOptimization': 'true', - 'XMLDocumentationFileName': '$(IntDir)c'}, - 'Link': { - 'AdditionalDependencies': 'zx', - 'AdditionalLibraryDirectories': 'asd', - 'AdditionalManifestDependencies': 's2', - 'AdditionalOptions': '/mor2', - 'AddModuleNamesToAssembly': 'd1', - 'AllowIsolation': 'false', - 'AssemblyDebug': 'true', - 'AssemblyLinkResource': 'd5', - 'BaseAddress': '23423', - 'CLRImageType': 'ForceSafeILImage', - 'CLRThreadAttribute': 'MTAThreadingAttribute', - 'CLRUnmanagedCodeCheck': 'true', - 'DataExecutionPrevention': '', - 'DelayLoadDLLs': 'd4', - 'DelaySign': 'true', - 'Driver': 'UpOnly', - 'EmbedManagedResourceFile': 'd2', - 'EnableCOMDATFolding': 'false', - 'EnableUAC': 'false', - 'EntryPointSymbol': 'f5', - 'FixedBaseAddress': 'false', - 'ForceSymbolReferences': 'd3', - 'FunctionOrder': 'fssdfsd', - 'GenerateDebugInformation': 'true', - 'GenerateMapFile': 'true', - 'HeapCommitSize': '13', - 'HeapReserveSize': '12', - 'IgnoreAllDefaultLibraries': 'true', - 'IgnoreEmbeddedIDL': 'true', - 'IgnoreSpecificDefaultLibraries': 'flob;flok', - 'ImportLibrary': 'f4', - 'KeyContainer': 'f7', - 'KeyFile': 'f6', - 'LargeAddressAware': 'true', - 'LinkErrorReporting': 'QueueForNextLogin', - 'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration', - 'ManifestFile': '$(IntDir)$(TargetFileName).2intermediate.manifest', - 'MapExports': 'true', - 'MapFileName': 'd5', - 'MergedIDLBaseFileName': 'f2', - 'MergeSections': 'f5', - 'MidlCommandFile': 'f1', - 'ModuleDefinitionFile': 'sdsd', - 'NoEntryPoint': 'true', - 'OptimizeReferences': 'true', - 'OutputFile': '$(OutDir)$(ProjectName)2.exe', - 'PerUserRedirection': 'true', - 'Profile': 'true', - 'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd', - 'ProgramDatabaseFile': 'Flob.pdb', - 'RandomizedBaseAddress': 'false', - 'RegisterOutput': 'true', - 'SetChecksum': 'false', - 'ShowProgress': 'LinkVerbose', - 'StackCommitSize': '15', - 'StackReserveSize': '14', - 'StripPrivateSymbols': 'd3', - 'SubSystem': 'Console', - 'SupportUnloadOfDelayLoadedDLL': 'true', - 'SuppressStartupBanner': 'false', - 'SwapRunFromCD': 'true', - 'SwapRunFromNET': 'true', - 'TargetMachine': 'MachineX86', - 'TerminalServerAware': 'false', - 'TurnOffAssemblyGeneration': 'true', - 'TypeLibraryFile': 'f3', - 'TypeLibraryResourceID': '12', - 'UACExecutionLevel': 'RequireAdministrator', - 'UACUIAccess': 'true', - 'Version': '333'}, - 'ResourceCompile': { - 'AdditionalIncludeDirectories': 'f3', - 'AdditionalOptions': '/more3', - 'Culture': '0x0c0c', - 'IgnoreStandardIncludePath': 'true', - 'PreprocessorDefinitions': '_UNICODE;UNICODE2', - 'ResourceOutputFileName': '$(IntDir)%(Filename)3.res', - 'ShowProgress': 'true'}, - 'Manifest': { - 'AdditionalManifestFiles': 'sfsdfsd', - 'AdditionalOptions': 'afdsdafsd', - 'AssemblyIdentity': 'sddfdsadfsa', - 'ComponentFileName': 'fsdfds', - 'GenerateCatalogFiles': 'true', - 'InputResourceManifests': 'asfsfdafs', - 'OutputManifestFile': '$(TargetPath).manifestdfs', - 'RegistrarScriptFile': 'sdfsfd', - 'ReplacementsFile': 'sdffsd', - 'SuppressStartupBanner': 'false', - 'TypeLibraryFile': 'sfsd', - 'UpdateFileHashes': 'true', - 'UpdateFileHashesSearchPath': 'sfsd', - 'VerboseOutput': 'true'}, - 'ProjectReference': { - 'LinkLibraryDependencies': 'false', - 'UseLibraryDependencyInputs': 'true'}, - '': { - 'EmbedManifest': 'false', - 'GenerateManifest': 'false', - 'IgnoreImportLibrary': 'true', - 'LinkIncremental': '' - }, - 'ManifestResourceCompile': { - 'ResourceOutputFileName': - '$(IntDir)$(TargetFileName).embed.manifest.resfdsf'} - } - actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings( - msvs_settings, - self.stderr) - self.assertEqual(expected_msbuild_settings, actual_msbuild_settings) - self._ExpectedWarnings([]) - - -if __name__ == '__main__': - unittest.main() diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/MSVSToolFile.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/MSVSToolFile.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/MSVSToolFile.py 2012-05-25 21:36:14.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/MSVSToolFile.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,58 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Visual Studio project reader/writer.""" - -import gyp.common -import gyp.easy_xml as easy_xml - - -class Writer(object): - """Visual Studio XML tool file writer.""" - - def __init__(self, tool_file_path, name): - """Initializes the tool file. - - Args: - tool_file_path: Path to the tool file. - name: Name of the tool file. - """ - self.tool_file_path = tool_file_path - self.name = name - self.rules_section = ['Rules'] - - def AddCustomBuildRule(self, name, cmd, description, - additional_dependencies, - outputs, extensions): - """Adds a rule to the tool file. - - Args: - name: Name of the rule. - description: Description of the rule. - cmd: Command line of the rule. - additional_dependencies: other files which may trigger the rule. - outputs: outputs of the rule. - extensions: extensions handled by the rule. - """ - rule = ['CustomBuildRule', - {'Name': name, - 'ExecutionDescription': description, - 'CommandLine': cmd, - 'Outputs': ';'.join(outputs), - 'FileExtensions': ';'.join(extensions), - 'AdditionalDependencies': - ';'.join(additional_dependencies) - }] - self.rules_section.append(rule) - - def WriteIfChanged(self): - """Writes the tool file.""" - content = ['VisualStudioToolFile', - {'Version': '8.00', - 'Name': self.name - }, - self.rules_section - ] - easy_xml.WriteXmlIfChanged(content, self.tool_file_path, - encoding="Windows-1252") diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/MSVSUserFile.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/MSVSUserFile.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/MSVSUserFile.py 2012-05-25 21:36:14.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/MSVSUserFile.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,147 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Visual Studio user preferences file writer.""" - -import os -import re -import socket # for gethostname - -import gyp.common -import gyp.easy_xml as easy_xml - - -#------------------------------------------------------------------------------ - -def _FindCommandInPath(command): - """If there are no slashes in the command given, this function - searches the PATH env to find the given command, and converts it - to an absolute path. We have to do this because MSVS is looking - for an actual file to launch a debugger on, not just a command - line. Note that this happens at GYP time, so anything needing to - be built needs to have a full path.""" - if '/' in command or '\\' in command: - # If the command already has path elements (either relative or - # absolute), then assume it is constructed properly. - return command - else: - # Search through the path list and find an existing file that - # we can access. - paths = os.environ.get('PATH','').split(os.pathsep) - for path in paths: - item = os.path.join(path, command) - if os.path.isfile(item) and os.access(item, os.X_OK): - return item - return command - -def _QuoteWin32CommandLineArgs(args): - new_args = [] - for arg in args: - # Replace all double-quotes with double-double-quotes to escape - # them for cmd shell, and then quote the whole thing if there - # are any. - if arg.find('"') != -1: - arg = '""'.join(arg.split('"')) - arg = '"%s"' % arg - - # Otherwise, if there are any spaces, quote the whole arg. - elif re.search(r'[ \t\n]', arg): - arg = '"%s"' % arg - new_args.append(arg) - return new_args - -class Writer(object): - """Visual Studio XML user user file writer.""" - - def __init__(self, user_file_path, version, name): - """Initializes the user file. - - Args: - user_file_path: Path to the user file. - version: Version info. - name: Name of the user file. - """ - self.user_file_path = user_file_path - self.version = version - self.name = name - self.configurations = {} - - def AddConfig(self, name): - """Adds a configuration to the project. - - Args: - name: Configuration name. - """ - self.configurations[name] = ['Configuration', {'Name': name}] - - def AddDebugSettings(self, config_name, command, environment = {}, - working_directory=""): - """Adds a DebugSettings node to the user file for a particular config. - - Args: - command: command line to run. First element in the list is the - executable. All elements of the command will be quoted if - necessary. - working_directory: other files which may trigger the rule. (optional) - """ - command = _QuoteWin32CommandLineArgs(command) - - abs_command = _FindCommandInPath(command[0]) - - if environment and isinstance(environment, dict): - env_list = ['%s="%s"' % (key, val) - for (key,val) in environment.iteritems()] - environment = ' '.join(env_list) - else: - environment = '' - - n_cmd = ['DebugSettings', - {'Command': abs_command, - 'WorkingDirectory': working_directory, - 'CommandArguments': " ".join(command[1:]), - 'RemoteMachine': socket.gethostname(), - 'Environment': environment, - 'EnvironmentMerge': 'true', - # Currently these are all "dummy" values that we're just setting - # in the default manner that MSVS does it. We could use some of - # these to add additional capabilities, I suppose, but they might - # not have parity with other platforms then. - 'Attach': 'false', - 'DebuggerType': '3', # 'auto' debugger - 'Remote': '1', - 'RemoteCommand': '', - 'HttpUrl': '', - 'PDBPath': '', - 'SQLDebugging': '', - 'DebuggerFlavor': '0', - 'MPIRunCommand': '', - 'MPIRunArguments': '', - 'MPIRunWorkingDirectory': '', - 'ApplicationCommand': '', - 'ApplicationArguments': '', - 'ShimCommand': '', - 'MPIAcceptMode': '', - 'MPIAcceptFilter': '' - }] - - # Find the config, and add it if it doesn't exist. - if config_name not in self.configurations: - self.AddConfig(config_name) - - # Add the DebugSettings onto the appropriate config. - self.configurations[config_name].append(n_cmd) - - def WriteIfChanged(self): - """Writes the user file.""" - configs = ['Configurations'] - for config, spec in sorted(self.configurations.iteritems()): - configs.append(spec) - - content = ['VisualStudioUserFile', - {'Version': self.version.ProjectVersion(), - 'Name': self.name - }, - configs] - easy_xml.WriteXmlIfChanged(content, self.user_file_path, - encoding="Windows-1252") diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/MSVSUtil.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/MSVSUtil.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/MSVSUtil.py 2013-02-06 01:01:32.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/MSVSUtil.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,74 +0,0 @@ -# Copyright (c) 2013 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions shared amongst the Windows generators.""" - -import copy - -def _ShardName(name, number): - """Add a shard number to the end of a target. - - Arguments: - name: name of the target (foo#target) - number: shard number - Returns: - Target name with shard added (foo_1#target) - """ - parts = name.rsplit('#', 1) - parts[0] = '%s_%d' % (parts[0], number) - return '#'.join(parts) - - -def ShardTargets(target_list, target_dicts): - """Shard some targets apart to work around the linkers limits. - - Arguments: - target_list: List of target pairs: 'base/base.gyp:base'. - target_dicts: Dict of target properties keyed on target pair. - Returns: - Tuple of the new sharded versions of the inputs. - """ - # Gather the targets to shard, and how many pieces. - targets_to_shard = {} - for t in target_dicts: - shards = int(target_dicts[t].get('msvs_shard', 0)) - if shards: - targets_to_shard[t] = shards - # Shard target_list. - new_target_list = [] - for t in target_list: - if t in targets_to_shard: - for i in range(targets_to_shard[t]): - new_target_list.append(_ShardName(t, i)) - else: - new_target_list.append(t) - # Shard target_dict. - new_target_dicts = {} - for t in target_dicts: - if t in targets_to_shard: - for i in range(targets_to_shard[t]): - name = _ShardName(t, i) - new_target_dicts[name] = copy.copy(target_dicts[t]) - new_target_dicts[name]['target_name'] = _ShardName( - new_target_dicts[name]['target_name'], i) - sources = new_target_dicts[name].get('sources', []) - new_sources = [] - for pos in range(i, len(sources), targets_to_shard[t]): - new_sources.append(sources[pos]) - new_target_dicts[name]['sources'] = new_sources - else: - new_target_dicts[t] = target_dicts[t] - # Shard dependencies. - for t in new_target_dicts: - dependencies = copy.copy(new_target_dicts[t].get('dependencies', [])) - new_dependencies = [] - for d in dependencies: - if d in targets_to_shard: - for i in range(targets_to_shard[d]): - new_dependencies.append(_ShardName(d, i)) - else: - new_dependencies.append(d) - new_target_dicts[t]['dependencies'] = new_dependencies - - return (new_target_list, new_target_dicts) diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/MSVSVersion.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/MSVSVersion.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/MSVSVersion.py 2013-01-02 17:53:06.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/MSVSVersion.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,373 +0,0 @@ -# Copyright (c) 2013 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Handle version information related to Visual Stuio.""" - -import errno -import os -import re -import subprocess -import sys -import gyp - - -class VisualStudioVersion(object): - """Information regarding a version of Visual Studio.""" - - def __init__(self, short_name, description, - solution_version, project_version, flat_sln, uses_vcxproj, - path, sdk_based, default_toolset=None): - self.short_name = short_name - self.description = description - self.solution_version = solution_version - self.project_version = project_version - self.flat_sln = flat_sln - self.uses_vcxproj = uses_vcxproj - self.path = path - self.sdk_based = sdk_based - self.default_toolset = default_toolset - - def ShortName(self): - return self.short_name - - def Description(self): - """Get the full description of the version.""" - return self.description - - def SolutionVersion(self): - """Get the version number of the sln files.""" - return self.solution_version - - def ProjectVersion(self): - """Get the version number of the vcproj or vcxproj files.""" - return self.project_version - - def FlatSolution(self): - return self.flat_sln - - def UsesVcxproj(self): - """Returns true if this version uses a vcxproj file.""" - return self.uses_vcxproj - - def ProjectExtension(self): - """Returns the file extension for the project.""" - return self.uses_vcxproj and '.vcxproj' or '.vcproj' - - def Path(self): - """Returns the path to Visual Studio installation.""" - return self.path - - def ToolPath(self, tool): - """Returns the path to a given compiler tool. """ - return os.path.normpath(os.path.join(self.path, "VC/bin", tool)) - - def DefaultToolset(self): - """Returns the msbuild toolset version that will be used in the absence - of a user override.""" - return self.default_toolset - - def SetupScript(self, target_arch): - """Returns a command (with arguments) to be used to set up the - environment.""" - # Check if we are running in the SDK command line environment and use - # the setup script from the SDK if so. |target_arch| should be either - # 'x86' or 'x64'. - assert target_arch in ('x86', 'x64') - sdk_dir = os.environ.get('WindowsSDKDir') - if self.sdk_based and sdk_dir: - return [os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd')), - '/' + target_arch] - else: - # We don't use VC/vcvarsall.bat for x86 because vcvarsall calls - # vcvars32, which it can only find if VS??COMNTOOLS is set, which it - # isn't always. - if target_arch == 'x86': - return [os.path.normpath( - os.path.join(self.path, 'Common7/Tools/vsvars32.bat'))] - else: - assert target_arch == 'x64' - arg = 'x86_amd64' - if (os.environ.get('PROCESSOR_ARCHITECTURE') == 'AMD64' or - os.environ.get('PROCESSOR_ARCHITEW6432') == 'AMD64'): - # Use the 64-on-64 compiler if we can. - arg = 'amd64' - return [os.path.normpath( - os.path.join(self.path, 'VC/vcvarsall.bat')), arg] - - -def _RegistryQueryBase(sysdir, key, value): - """Use reg.exe to read a particular key. - - While ideally we might use the win32 module, we would like gyp to be - python neutral, so for instance cygwin python lacks this module. - - Arguments: - sysdir: The system subdirectory to attempt to launch reg.exe from. - key: The registry key to read from. - value: The particular value to read. - Return: - stdout from reg.exe, or None for failure. - """ - # Skip if not on Windows or Python Win32 setup issue - if sys.platform not in ('win32', 'cygwin'): - return None - # Setup params to pass to and attempt to launch reg.exe - cmd = [os.path.join(os.environ.get('WINDIR', ''), sysdir, 'reg.exe'), - 'query', key] - if value: - cmd.extend(['/v', value]) - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - # Obtain the stdout from reg.exe, reading to the end so p.returncode is valid - # Note that the error text may be in [1] in some cases - text = p.communicate()[0] - # Check return code from reg.exe; officially 0==success and 1==error - if p.returncode: - return None - return text - - -def _RegistryQuery(key, value=None): - """Use reg.exe to read a particular key through _RegistryQueryBase. - - First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If - that fails, it falls back to System32. Sysnative is available on Vista and - up and available on Windows Server 2003 and XP through KB patch 942589. Note - that Sysnative will always fail if using 64-bit python due to it being a - virtual directory and System32 will work correctly in the first place. - - KB 942589 - http://support.microsoft.com/kb/942589/en-us. - - Arguments: - key: The registry key. - value: The particular registry value to read (optional). - Return: - stdout from reg.exe, or None for failure. - """ - text = None - try: - text = _RegistryQueryBase('Sysnative', key, value) - except OSError, e: - if e.errno == errno.ENOENT: - text = _RegistryQueryBase('System32', key, value) - else: - raise - return text - - -def _RegistryGetValue(key, value): - """Use reg.exe to obtain the value of a registry key. - - Args: - key: The registry key. - value: The particular registry value to read. - Return: - contents of the registry key's value, or None on failure. - """ - text = _RegistryQuery(key, value) - if not text: - return None - # Extract value. - match = re.search(r'REG_\w+\s+([^\r]+)\r\n', text) - if not match: - return None - return match.group(1) - - -def _RegistryKeyExists(key): - """Use reg.exe to see if a key exists. - - Args: - key: The registry key to check. - Return: - True if the key exists - """ - if not _RegistryQuery(key): - return False - return True - - -def _CreateVersion(name, path, sdk_based=False): - """Sets up MSVS project generation. - - Setup is based off the GYP_MSVS_VERSION environment variable or whatever is - autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is - passed in that doesn't match a value in versions python will throw a error. - """ - if path: - path = os.path.normpath(path) - versions = { - '2012': VisualStudioVersion('2012', - 'Visual Studio 2012', - solution_version='12.00', - project_version='4.0', - flat_sln=False, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - default_toolset='v110'), - '2012e': VisualStudioVersion('2012e', - 'Visual Studio 2012', - solution_version='12.00', - project_version='4.0', - flat_sln=True, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based, - default_toolset='v110'), - '2010': VisualStudioVersion('2010', - 'Visual Studio 2010', - solution_version='11.00', - project_version='4.0', - flat_sln=False, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based), - '2010e': VisualStudioVersion('2010e', - 'Visual Studio 2010', - solution_version='11.00', - project_version='4.0', - flat_sln=True, - uses_vcxproj=True, - path=path, - sdk_based=sdk_based), - '2008': VisualStudioVersion('2008', - 'Visual Studio 2008', - solution_version='10.00', - project_version='9.00', - flat_sln=False, - uses_vcxproj=False, - path=path, - sdk_based=sdk_based), - '2008e': VisualStudioVersion('2008e', - 'Visual Studio 2008', - solution_version='10.00', - project_version='9.00', - flat_sln=True, - uses_vcxproj=False, - path=path, - sdk_based=sdk_based), - '2005': VisualStudioVersion('2005', - 'Visual Studio 2005', - solution_version='9.00', - project_version='8.00', - flat_sln=False, - uses_vcxproj=False, - path=path, - sdk_based=sdk_based), - '2005e': VisualStudioVersion('2005e', - 'Visual Studio 2005', - solution_version='9.00', - project_version='8.00', - flat_sln=True, - uses_vcxproj=False, - path=path, - sdk_based=sdk_based), - } - return versions[str(name)] - - -def _ConvertToCygpath(path): - """Convert to cygwin path if we are using cygwin.""" - if sys.platform == 'cygwin': - p = subprocess.Popen(['cygpath', path], stdout=subprocess.PIPE) - path = p.communicate()[0].strip() - return path - - -def _DetectVisualStudioVersions(versions_to_check, force_express): - """Collect the list of installed visual studio versions. - - Returns: - A list of visual studio versions installed in descending order of - usage preference. - Base this on the registry and a quick check if devenv.exe exists. - Only versions 8-10 are considered. - Possibilities are: - 2005(e) - Visual Studio 2005 (8) - 2008(e) - Visual Studio 2008 (9) - 2010(e) - Visual Studio 2010 (10) - 2012(e) - Visual Studio 2012 (11) - Where (e) is e for express editions of MSVS and blank otherwise. - """ - version_to_year = { - '8.0': '2005', '9.0': '2008', '10.0': '2010', '11.0': '2012'} - versions = [] - for version in versions_to_check: - # Old method of searching for which VS version is installed - # We don't use the 2010-encouraged-way because we also want to get the - # path to the binaries, which it doesn't offer. - keys = [r'HKLM\Software\Microsoft\VisualStudio\%s' % version, - r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\%s' % version, - r'HKLM\Software\Microsoft\VCExpress\%s' % version, - r'HKLM\Software\Wow6432Node\Microsoft\VCExpress\%s' % version] - for index in range(len(keys)): - path = _RegistryGetValue(keys[index], 'InstallDir') - if not path: - continue - path = _ConvertToCygpath(path) - # Check for full. - full_path = os.path.join(path, 'devenv.exe') - express_path = os.path.join(path, 'vcexpress.exe') - if not force_express and os.path.exists(full_path): - # Add this one. - versions.append(_CreateVersion(version_to_year[version], - os.path.join(path, '..', '..'))) - # Check for express. - elif os.path.exists(express_path): - # Add this one. - versions.append(_CreateVersion(version_to_year[version] + 'e', - os.path.join(path, '..', '..'))) - - # The old method above does not work when only SDK is installed. - keys = [r'HKLM\Software\Microsoft\VisualStudio\SxS\VC7', - r'HKLM\Software\Wow6432Node\Microsoft\VisualStudio\SxS\VC7'] - for index in range(len(keys)): - path = _RegistryGetValue(keys[index], version) - if not path: - continue - path = _ConvertToCygpath(path) - versions.append(_CreateVersion(version_to_year[version] + 'e', - os.path.join(path, '..'), sdk_based=True)) - - return versions - - -def SelectVisualStudioVersion(version='auto'): - """Select which version of Visual Studio projects to generate. - - Arguments: - version: Hook to allow caller to force a particular version (vs auto). - Returns: - An object representing a visual studio project format version. - """ - # In auto mode, check environment variable for override. - if version == 'auto': - version = os.environ.get('GYP_MSVS_VERSION', 'auto') - version_map = { - 'auto': ('10.0', '9.0', '8.0', '11.0'), - '2005': ('8.0',), - '2005e': ('8.0',), - '2008': ('9.0',), - '2008e': ('9.0',), - '2010': ('10.0',), - '2010e': ('10.0',), - '2012': ('11.0',), - '2012e': ('11.0',), - } - override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH') - if override_path: - msvs_version = os.environ.get('GYP_MSVS_VERSION') - if not msvs_version or 'e' not in msvs_version: - raise ValueError('GYP_MSVS_OVERRIDE_PATH requires GYP_MSVS_VERSION to be ' - 'set to an "e" version (e.g. 2010e)') - return _CreateVersion(msvs_version, override_path, sdk_based=True) - version = str(version) - versions = _DetectVisualStudioVersions(version_map[version], 'e' in version) - if not versions: - if version == 'auto': - # Default to 2005 if we couldn't find anything - return _CreateVersion('2005', None) - else: - return _CreateVersion(version, None) - return versions[0] diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/SCons.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/SCons.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/SCons.py 2012-05-25 21:36:14.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/SCons.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,199 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -SCons generator. - -This contains class definitions and supporting functions for generating -pieces of SCons files for the different types of GYP targets. -""" - -import os - - -def WriteList(fp, list, prefix='', - separator=',\n ', - preamble=None, - postamble=None): - fp.write(preamble or '') - fp.write((separator or ' ').join([prefix + l for l in list])) - fp.write(postamble or '') - - -class TargetBase(object): - """ - Base class for a SCons representation of a GYP target. - """ - is_ignored = False - target_prefix = '' - target_suffix = '' - def __init__(self, spec): - self.spec = spec - def full_product_name(self): - """ - Returns the full name of the product being built: - - * Uses 'product_name' if it's set, else prefix + 'target_name'. - * Prepends 'product_dir' if set. - * Appends SCons suffix variables for the target type (or - product_extension). - """ - suffix = self.target_suffix - product_extension = self.spec.get('product_extension') - if product_extension: - suffix = '.' + product_extension - prefix = self.spec.get('product_prefix', self.target_prefix) - name = self.spec['target_name'] - name = prefix + self.spec.get('product_name', name) + suffix - product_dir = self.spec.get('product_dir') - if product_dir: - name = os.path.join(product_dir, name) - else: - name = os.path.join(self.out_dir, name) - return name - - def write_input_files(self, fp): - """ - Writes the definition of the input files (sources). - """ - sources = self.spec.get('sources') - if not sources: - fp.write('\ninput_files = []\n') - return - preamble = '\ninput_files = [\n ' - postamble = ',\n]\n' - WriteList(fp, map(repr, sources), preamble=preamble, postamble=postamble) - - def builder_call(self): - """ - Returns the actual SCons builder call to build this target. - """ - name = self.full_product_name() - return 'env.%s(env.File(%r), input_files)' % (self.builder_name, name) - def write_target(self, fp, src_dir='', pre=''): - """ - Writes the lines necessary to build this target. - """ - fp.write('\n' + pre) - fp.write('_outputs = %s\n' % self.builder_call()) - fp.write('target_files.extend(_outputs)\n') - - -class NoneTarget(TargetBase): - """ - A GYP target type of 'none', implicitly or explicitly. - """ - def write_target(self, fp, src_dir='', pre=''): - fp.write('\ntarget_files.extend(input_files)\n') - - -class SettingsTarget(TargetBase): - """ - A GYP target type of 'settings'. - """ - is_ignored = True - - -compilable_sources_template = """ -_result = [] -for infile in input_files: - if env.compilable(infile): - if (type(infile) == type('') - and (infile.startswith(%(src_dir)r) - or not os.path.isabs(env.subst(infile)))): - # Force files below the build directory by replacing all '..' - # elements in the path with '__': - base, ext = os.path.splitext(os.path.normpath(infile)) - base = [d == '..' and '__' or d for d in base.split('/')] - base = os.path.join(*base) - object = '${OBJ_DIR}/${COMPONENT_NAME}/${TARGET_NAME}/' + base - if not infile.startswith(%(src_dir)r): - infile = %(src_dir)r + infile - infile = env.%(name)s(object, infile)[0] - else: - infile = env.%(name)s(infile)[0] - _result.append(infile) -input_files = _result -""" - -class CompilableSourcesTargetBase(TargetBase): - """ - An abstract base class for targets that compile their source files. - - We explicitly transform compilable files into object files, - even though SCons could infer that for us, because we want - to control where the object file ends up. (The implicit rules - in SCons always put the object file next to the source file.) - """ - intermediate_builder_name = None - def write_target(self, fp, src_dir='', pre=''): - if self.intermediate_builder_name is None: - raise NotImplementedError - if src_dir and not src_dir.endswith('/'): - src_dir += '/' - variables = { - 'src_dir': src_dir, - 'name': self.intermediate_builder_name, - } - fp.write(compilable_sources_template % variables) - super(CompilableSourcesTargetBase, self).write_target(fp) - - -class ProgramTarget(CompilableSourcesTargetBase): - """ - A GYP target type of 'executable'. - """ - builder_name = 'GypProgram' - intermediate_builder_name = 'StaticObject' - target_prefix = '${PROGPREFIX}' - target_suffix = '${PROGSUFFIX}' - out_dir = '${TOP_BUILDDIR}' - - -class StaticLibraryTarget(CompilableSourcesTargetBase): - """ - A GYP target type of 'static_library'. - """ - builder_name = 'GypStaticLibrary' - intermediate_builder_name = 'StaticObject' - target_prefix = '${LIBPREFIX}' - target_suffix = '${LIBSUFFIX}' - out_dir = '${LIB_DIR}' - - -class SharedLibraryTarget(CompilableSourcesTargetBase): - """ - A GYP target type of 'shared_library'. - """ - builder_name = 'GypSharedLibrary' - intermediate_builder_name = 'SharedObject' - target_prefix = '${SHLIBPREFIX}' - target_suffix = '${SHLIBSUFFIX}' - out_dir = '${LIB_DIR}' - - -class LoadableModuleTarget(CompilableSourcesTargetBase): - """ - A GYP target type of 'loadable_module'. - """ - builder_name = 'GypLoadableModule' - intermediate_builder_name = 'SharedObject' - target_prefix = '${SHLIBPREFIX}' - target_suffix = '${SHLIBSUFFIX}' - out_dir = '${TOP_BUILDDIR}' - - -TargetMap = { - None : NoneTarget, - 'none' : NoneTarget, - 'settings' : SettingsTarget, - 'executable' : ProgramTarget, - 'static_library' : StaticLibraryTarget, - 'shared_library' : SharedLibraryTarget, - 'loadable_module' : LoadableModuleTarget, -} - - -def Target(spec): - return TargetMap[spec.get('type')](spec) diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/__init__.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/__init__.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/__init__.py 2013-02-25 09:48:54.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/__init__.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,532 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import copy -import gyp.input -import optparse -import os.path -import re -import shlex -import sys -import traceback -from gyp.common import GypError - -# Default debug modes for GYP -debug = {} - -# List of "official" debug modes, but you can use anything you like. -DEBUG_GENERAL = 'general' -DEBUG_VARIABLES = 'variables' -DEBUG_INCLUDES = 'includes' - - -def DebugOutput(mode, message, *args): - if 'all' in gyp.debug or mode in gyp.debug: - ctx = ('unknown', 0, 'unknown') - try: - f = traceback.extract_stack(limit=2) - if f: - ctx = f[0][:3] - except: - pass - if args: - message %= args - print '%s:%s:%d:%s %s' % (mode.upper(), os.path.basename(ctx[0]), - ctx[1], ctx[2], message) - -def FindBuildFiles(): - extension = '.gyp' - files = os.listdir(os.getcwd()) - build_files = [] - for file in files: - if file.endswith(extension): - build_files.append(file) - return build_files - - -def Load(build_files, format, default_variables={}, - includes=[], depth='.', params=None, check=False, - circular_check=True): - """ - Loads one or more specified build files. - default_variables and includes will be copied before use. - Returns the generator for the specified format and the - data returned by loading the specified build files. - """ - if params is None: - params = {} - - flavor = None - if '-' in format: - format, params['flavor'] = format.split('-', 1) - - default_variables = copy.copy(default_variables) - - # Default variables provided by this program and its modules should be - # named WITH_CAPITAL_LETTERS to provide a distinct "best practice" namespace, - # avoiding collisions with user and automatic variables. - default_variables['GENERATOR'] = format - - # Format can be a custom python file, or by default the name of a module - # within gyp.generator. - if format.endswith('.py'): - generator_name = os.path.splitext(format)[0] - path, generator_name = os.path.split(generator_name) - - # Make sure the path to the custom generator is in sys.path - # Don't worry about removing it once we are done. Keeping the path - # to each generator that is used in sys.path is likely harmless and - # arguably a good idea. - path = os.path.abspath(path) - if path not in sys.path: - sys.path.insert(0, path) - else: - generator_name = 'gyp.generator.' + format - - # These parameters are passed in order (as opposed to by key) - # because ActivePython cannot handle key parameters to __import__. - generator = __import__(generator_name, globals(), locals(), generator_name) - for (key, val) in generator.generator_default_variables.items(): - default_variables.setdefault(key, val) - - # Give the generator the opportunity to set additional variables based on - # the params it will receive in the output phase. - if getattr(generator, 'CalculateVariables', None): - generator.CalculateVariables(default_variables, params) - - # Give the generator the opportunity to set generator_input_info based on - # the params it will receive in the output phase. - if getattr(generator, 'CalculateGeneratorInputInfo', None): - generator.CalculateGeneratorInputInfo(params) - - # Fetch the generator specific info that gets fed to input, we use getattr - # so we can default things and the generators only have to provide what - # they need. - generator_input_info = { - 'generator_wants_absolute_build_file_paths': - getattr(generator, 'generator_wants_absolute_build_file_paths', False), - 'generator_handles_variants': - getattr(generator, 'generator_handles_variants', False), - 'non_configuration_keys': - getattr(generator, 'generator_additional_non_configuration_keys', []), - 'path_sections': - getattr(generator, 'generator_additional_path_sections', []), - 'extra_sources_for_rules': - getattr(generator, 'generator_extra_sources_for_rules', []), - 'generator_supports_multiple_toolsets': - getattr(generator, 'generator_supports_multiple_toolsets', False), - 'generator_wants_static_library_dependencies_adjusted': - getattr(generator, - 'generator_wants_static_library_dependencies_adjusted', True), - 'generator_wants_sorted_dependencies': - getattr(generator, 'generator_wants_sorted_dependencies', False), - } - - # Process the input specific to this generator. - result = gyp.input.Load(build_files, default_variables, includes[:], - depth, generator_input_info, check, circular_check, - params['parallel']) - return [generator] + result - -def NameValueListToDict(name_value_list): - """ - Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary - of the pairs. If a string is simply NAME, then the value in the dictionary - is set to True. If VALUE can be converted to an integer, it is. - """ - result = { } - for item in name_value_list: - tokens = item.split('=', 1) - if len(tokens) == 2: - # If we can make it an int, use that, otherwise, use the string. - try: - token_value = int(tokens[1]) - except ValueError: - token_value = tokens[1] - # Set the variable to the supplied value. - result[tokens[0]] = token_value - else: - # No value supplied, treat it as a boolean and set it. - result[tokens[0]] = True - return result - -def ShlexEnv(env_name): - flags = os.environ.get(env_name, []) - if flags: - flags = shlex.split(flags) - return flags - -def FormatOpt(opt, value): - if opt.startswith('--'): - return '%s=%s' % (opt, value) - return opt + value - -def RegenerateAppendFlag(flag, values, predicate, env_name, options): - """Regenerate a list of command line flags, for an option of action='append'. - - The |env_name|, if given, is checked in the environment and used to generate - an initial list of options, then the options that were specified on the - command line (given in |values|) are appended. This matches the handling of - environment variables and command line flags where command line flags override - the environment, while not requiring the environment to be set when the flags - are used again. - """ - flags = [] - if options.use_environment and env_name: - for flag_value in ShlexEnv(env_name): - value = FormatOpt(flag, predicate(flag_value)) - if value in flags: - flags.remove(value) - flags.append(value) - if values: - for flag_value in values: - flags.append(FormatOpt(flag, predicate(flag_value))) - return flags - -def RegenerateFlags(options): - """Given a parsed options object, and taking the environment variables into - account, returns a list of flags that should regenerate an equivalent options - object (even in the absence of the environment variables.) - - Any path options will be normalized relative to depth. - - The format flag is not included, as it is assumed the calling generator will - set that as appropriate. - """ - def FixPath(path): - path = gyp.common.FixIfRelativePath(path, options.depth) - if not path: - return os.path.curdir - return path - - def Noop(value): - return value - - # We always want to ignore the environment when regenerating, to avoid - # duplicate or changed flags in the environment at the time of regeneration. - flags = ['--ignore-environment'] - for name, metadata in options._regeneration_metadata.iteritems(): - opt = metadata['opt'] - value = getattr(options, name) - value_predicate = metadata['type'] == 'path' and FixPath or Noop - action = metadata['action'] - env_name = metadata['env_name'] - if action == 'append': - flags.extend(RegenerateAppendFlag(opt, value, value_predicate, - env_name, options)) - elif action in ('store', None): # None is a synonym for 'store'. - if value: - flags.append(FormatOpt(opt, value_predicate(value))) - elif options.use_environment and env_name and os.environ.get(env_name): - flags.append(FormatOpt(opt, value_predicate(os.environ.get(env_name)))) - elif action in ('store_true', 'store_false'): - if ((action == 'store_true' and value) or - (action == 'store_false' and not value)): - flags.append(opt) - elif options.use_environment and env_name: - print >>sys.stderr, ('Warning: environment regeneration unimplemented ' - 'for %s flag %r env_name %r' % (action, opt, - env_name)) - else: - print >>sys.stderr, ('Warning: regeneration unimplemented for action %r ' - 'flag %r' % (action, opt)) - - return flags - -class RegeneratableOptionParser(optparse.OptionParser): - def __init__(self): - self.__regeneratable_options = {} - optparse.OptionParser.__init__(self) - - def add_option(self, *args, **kw): - """Add an option to the parser. - - This accepts the same arguments as OptionParser.add_option, plus the - following: - regenerate: can be set to False to prevent this option from being included - in regeneration. - env_name: name of environment variable that additional values for this - option come from. - type: adds type='path', to tell the regenerator that the values of - this option need to be made relative to options.depth - """ - env_name = kw.pop('env_name', None) - if 'dest' in kw and kw.pop('regenerate', True): - dest = kw['dest'] - - # The path type is needed for regenerating, for optparse we can just treat - # it as a string. - type = kw.get('type') - if type == 'path': - kw['type'] = 'string' - - self.__regeneratable_options[dest] = { - 'action': kw.get('action'), - 'type': type, - 'env_name': env_name, - 'opt': args[0], - } - - optparse.OptionParser.add_option(self, *args, **kw) - - def parse_args(self, *args): - values, args = optparse.OptionParser.parse_args(self, *args) - values._regeneration_metadata = self.__regeneratable_options - return values, args - -def gyp_main(args): - my_name = os.path.basename(sys.argv[0]) - - parser = RegeneratableOptionParser() - usage = 'usage: %s [options ...] [build_file ...]' - parser.set_usage(usage.replace('%s', '%prog')) - parser.add_option('-D', dest='defines', action='append', metavar='VAR=VAL', - env_name='GYP_DEFINES', - help='sets variable VAR to value VAL') - parser.add_option('-f', '--format', dest='formats', action='append', - env_name='GYP_GENERATORS', regenerate=False, - help='output formats to generate') - parser.add_option('--msvs-version', dest='msvs_version', - regenerate=False, - help='Deprecated; use -G msvs_version=MSVS_VERSION instead') - parser.add_option('-I', '--include', dest='includes', action='append', - metavar='INCLUDE', type='path', - help='files to include in all loaded .gyp files') - parser.add_option('--depth', dest='depth', metavar='PATH', type='path', - help='set DEPTH gyp variable to a relative path to PATH') - parser.add_option('-d', '--debug', dest='debug', metavar='DEBUGMODE', - action='append', default=[], help='turn on a debugging ' - 'mode for debugging GYP. Supported modes are "variables", ' - '"includes" and "general" or "all" for all of them.') - parser.add_option('-S', '--suffix', dest='suffix', default='', - help='suffix to add to generated files') - parser.add_option('-G', dest='generator_flags', action='append', default=[], - metavar='FLAG=VAL', env_name='GYP_GENERATOR_FLAGS', - help='sets generator flag FLAG to VAL') - parser.add_option('--generator-output', dest='generator_output', - action='store', default=None, metavar='DIR', type='path', - env_name='GYP_GENERATOR_OUTPUT', - help='puts generated build files under DIR') - parser.add_option('--ignore-environment', dest='use_environment', - action='store_false', default=True, regenerate=False, - help='do not read options from environment variables') - parser.add_option('--check', dest='check', action='store_true', - help='check format of gyp files') - parser.add_option('--parallel', action='store_true', - env_name='GYP_PARALLEL', - help='Use multiprocessing for speed (experimental)') - parser.add_option('--toplevel-dir', dest='toplevel_dir', action='store', - default=None, metavar='DIR', type='path', - help='directory to use as the root of the source tree') - parser.add_option('--build', dest='configs', action='append', - help='configuration for build after project generation') - # --no-circular-check disables the check for circular relationships between - # .gyp files. These relationships should not exist, but they've only been - # observed to be harmful with the Xcode generator. Chromium's .gyp files - # currently have some circular relationships on non-Mac platforms, so this - # option allows the strict behavior to be used on Macs and the lenient - # behavior to be used elsewhere. - # TODO(mark): Remove this option when http://crbug.com/35878 is fixed. - parser.add_option('--no-circular-check', dest='circular_check', - action='store_false', default=True, regenerate=False, - help="don't check for circular relationships between files") - - # We read a few things from ~/.gyp, so set up a var for that. - home_vars = ['HOME'] - if sys.platform in ('cygwin', 'win32'): - home_vars.append('USERPROFILE') - home = None - home_dot_gyp = None - for home_var in home_vars: - home = os.getenv(home_var) - if home != None: - home_dot_gyp = os.path.join(home, '.gyp') - if not os.path.exists(home_dot_gyp): - home_dot_gyp = None - else: - break - - # TODO(thomasvl): add support for ~/.gyp/defaults - - options, build_files_arg = parser.parse_args(args) - build_files = build_files_arg - - if not options.formats: - # If no format was given on the command line, then check the env variable. - generate_formats = [] - if options.use_environment: - generate_formats = os.environ.get('GYP_GENERATORS', []) - if generate_formats: - generate_formats = re.split('[\s,]', generate_formats) - if generate_formats: - options.formats = generate_formats - else: - # Nothing in the variable, default based on platform. - if sys.platform == 'darwin': - options.formats = ['xcode'] - elif sys.platform in ('win32', 'cygwin'): - options.formats = ['msvs'] - else: - options.formats = ['make'] - - if not options.generator_output and options.use_environment: - g_o = os.environ.get('GYP_GENERATOR_OUTPUT') - if g_o: - options.generator_output = g_o - - if not options.parallel and options.use_environment: - p = os.environ.get('GYP_PARALLEL') - options.parallel = bool(p and p != '0') - - for mode in options.debug: - gyp.debug[mode] = 1 - - # Do an extra check to avoid work when we're not debugging. - if DEBUG_GENERAL in gyp.debug: - DebugOutput(DEBUG_GENERAL, 'running with these options:') - for option, value in sorted(options.__dict__.items()): - if option[0] == '_': - continue - if isinstance(value, basestring): - DebugOutput(DEBUG_GENERAL, " %s: '%s'", option, value) - else: - DebugOutput(DEBUG_GENERAL, " %s: %s", option, value) - - if not build_files: - build_files = FindBuildFiles() - if not build_files: - raise GypError((usage + '\n\n%s: error: no build_file') % - (my_name, my_name)) - - # TODO(mark): Chromium-specific hack! - # For Chromium, the gyp "depth" variable should always be a relative path - # to Chromium's top-level "src" directory. If no depth variable was set - # on the command line, try to find a "src" directory by looking at the - # absolute path to each build file's directory. The first "src" component - # found will be treated as though it were the path used for --depth. - if not options.depth: - for build_file in build_files: - build_file_dir = os.path.abspath(os.path.dirname(build_file)) - build_file_dir_components = build_file_dir.split(os.path.sep) - components_len = len(build_file_dir_components) - for index in xrange(components_len - 1, -1, -1): - if build_file_dir_components[index] == 'src': - options.depth = os.path.sep.join(build_file_dir_components) - break - del build_file_dir_components[index] - - # If the inner loop found something, break without advancing to another - # build file. - if options.depth: - break - - if not options.depth: - raise GypError('Could not automatically locate src directory. This is' - 'a temporary Chromium feature that will be removed. Use' - '--depth as a workaround.') - - # If toplevel-dir is not set, we assume that depth is the root of our source - # tree. - if not options.toplevel_dir: - options.toplevel_dir = options.depth - - # -D on the command line sets variable defaults - D isn't just for define, - # it's for default. Perhaps there should be a way to force (-F?) a - # variable's value so that it can't be overridden by anything else. - cmdline_default_variables = {} - defines = [] - if options.use_environment: - defines += ShlexEnv('GYP_DEFINES') - if options.defines: - defines += options.defines - cmdline_default_variables = NameValueListToDict(defines) - if DEBUG_GENERAL in gyp.debug: - DebugOutput(DEBUG_GENERAL, - "cmdline_default_variables: %s", cmdline_default_variables) - - # Set up includes. - includes = [] - - # If ~/.gyp/include.gypi exists, it'll be forcibly included into every - # .gyp file that's loaded, before anything else is included. - if home_dot_gyp != None: - default_include = os.path.join(home_dot_gyp, 'include.gypi') - if os.path.exists(default_include): - print 'Using overrides found in ' + default_include - includes.append(default_include) - - # Command-line --include files come after the default include. - if options.includes: - includes.extend(options.includes) - - # Generator flags should be prefixed with the target generator since they - # are global across all generator runs. - gen_flags = [] - if options.use_environment: - gen_flags += ShlexEnv('GYP_GENERATOR_FLAGS') - if options.generator_flags: - gen_flags += options.generator_flags - generator_flags = NameValueListToDict(gen_flags) - if DEBUG_GENERAL in gyp.debug.keys(): - DebugOutput(DEBUG_GENERAL, "generator_flags: %s", generator_flags) - - # TODO: Remove this and the option after we've gotten folks to move to the - # generator flag. - if options.msvs_version: - print >>sys.stderr, \ - 'DEPRECATED: Use generator flag (-G msvs_version=' + \ - options.msvs_version + ') instead of --msvs-version=' + \ - options.msvs_version - generator_flags['msvs_version'] = options.msvs_version - - # Generate all requested formats (use a set in case we got one format request - # twice) - for format in set(options.formats): - params = {'options': options, - 'build_files': build_files, - 'generator_flags': generator_flags, - 'cwd': os.getcwd(), - 'build_files_arg': build_files_arg, - 'gyp_binary': sys.argv[0], - 'home_dot_gyp': home_dot_gyp, - 'parallel': options.parallel} - - # Start with the default variables from the command line. - [generator, flat_list, targets, data] = Load(build_files, format, - cmdline_default_variables, - includes, options.depth, - params, options.check, - options.circular_check) - - # TODO(mark): Pass |data| for now because the generator needs a list of - # build files that came in. In the future, maybe it should just accept - # a list, and not the whole data dict. - # NOTE: flat_list is the flattened dependency graph specifying the order - # that targets may be built. Build systems that operate serially or that - # need to have dependencies defined before dependents reference them should - # generate targets in the order specified in flat_list. - generator.GenerateOutput(flat_list, targets, data, params) - - if options.configs: - valid_configs = targets[flat_list[0]]['configurations'].keys() - for conf in options.configs: - if conf not in valid_configs: - raise GypError('Invalid config specified via --build: %s' % conf) - generator.PerformBuild(data, options.configs, params) - - # Done - return 0 - - -def main(args): - try: - return gyp_main(args) - except GypError, e: - sys.stderr.write("gyp: %s\n" % e) - return 1 - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/common.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/common.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/common.py 2012-12-26 16:49:26.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/common.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,489 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -from __future__ import with_statement - -import errno -import filecmp -import os.path -import re -import tempfile -import sys - - -# A minimal memoizing decorator. It'll blow up if the args aren't immutable, -# among other "problems". -class memoize(object): - def __init__(self, func): - self.func = func - self.cache = {} - def __call__(self, *args): - try: - return self.cache[args] - except KeyError: - result = self.func(*args) - self.cache[args] = result - return result - - -class GypError(Exception): - """Error class representing an error, which is to be presented - to the user. The main entry point will catch and display this. - """ - pass - - -def ExceptionAppend(e, msg): - """Append a message to the given exception's message.""" - if not e.args: - e.args = (msg,) - elif len(e.args) == 1: - e.args = (str(e.args[0]) + ' ' + msg,) - else: - e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:] - - -def ParseQualifiedTarget(target): - # Splits a qualified target into a build file, target name and toolset. - - # NOTE: rsplit is used to disambiguate the Windows drive letter separator. - target_split = target.rsplit(':', 1) - if len(target_split) == 2: - [build_file, target] = target_split - else: - build_file = None - - target_split = target.rsplit('#', 1) - if len(target_split) == 2: - [target, toolset] = target_split - else: - toolset = None - - return [build_file, target, toolset] - - -def ResolveTarget(build_file, target, toolset): - # This function resolves a target into a canonical form: - # - a fully defined build file, either absolute or relative to the current - # directory - # - a target name - # - a toolset - # - # build_file is the file relative to which 'target' is defined. - # target is the qualified target. - # toolset is the default toolset for that target. - [parsed_build_file, target, parsed_toolset] = ParseQualifiedTarget(target) - - if parsed_build_file: - if build_file: - # If a relative path, parsed_build_file is relative to the directory - # containing build_file. If build_file is not in the current directory, - # parsed_build_file is not a usable path as-is. Resolve it by - # interpreting it as relative to build_file. If parsed_build_file is - # absolute, it is usable as a path regardless of the current directory, - # and os.path.join will return it as-is. - build_file = os.path.normpath(os.path.join(os.path.dirname(build_file), - parsed_build_file)) - # Further (to handle cases like ../cwd), make it relative to cwd) - if not os.path.isabs(build_file): - build_file = RelativePath(build_file, '.') - else: - build_file = parsed_build_file - - if parsed_toolset: - toolset = parsed_toolset - - return [build_file, target, toolset] - - -def BuildFile(fully_qualified_target): - # Extracts the build file from the fully qualified target. - return ParseQualifiedTarget(fully_qualified_target)[0] - - -def GetEnvironFallback(var_list, default): - """Look up a key in the environment, with fallback to secondary keys - and finally falling back to a default value.""" - for var in var_list: - if var in os.environ: - return os.environ[var] - return default - - -def QualifiedTarget(build_file, target, toolset): - # "Qualified" means the file that a target was defined in and the target - # name, separated by a colon, suffixed by a # and the toolset name: - # /path/to/file.gyp:target_name#toolset - fully_qualified = build_file + ':' + target - if toolset: - fully_qualified = fully_qualified + '#' + toolset - return fully_qualified - - -@memoize -def RelativePath(path, relative_to): - # Assuming both |path| and |relative_to| are relative to the current - # directory, returns a relative path that identifies path relative to - # relative_to. - - # Convert to normalized (and therefore absolute paths). - path = os.path.realpath(path) - relative_to = os.path.realpath(relative_to) - - # Split the paths into components. - path_split = path.split(os.path.sep) - relative_to_split = relative_to.split(os.path.sep) - - # Determine how much of the prefix the two paths share. - prefix_len = len(os.path.commonprefix([path_split, relative_to_split])) - - # Put enough ".." components to back up out of relative_to to the common - # prefix, and then append the part of path_split after the common prefix. - relative_split = [os.path.pardir] * (len(relative_to_split) - prefix_len) + \ - path_split[prefix_len:] - - if len(relative_split) == 0: - # The paths were the same. - return '' - - # Turn it back into a string and we're done. - return os.path.join(*relative_split) - - -@memoize -def InvertRelativePath(path, toplevel_dir=None): - """Given a path like foo/bar that is relative to toplevel_dir, return - the inverse relative path back to the toplevel_dir. - - E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path))) - should always produce the empty string, unless the path contains symlinks. - """ - if not path: - return path - toplevel_dir = '.' if toplevel_dir is None else toplevel_dir - return RelativePath(toplevel_dir, os.path.join(toplevel_dir, path)) - - -def FixIfRelativePath(path, relative_to): - # Like RelativePath but returns |path| unchanged if it is absolute. - if os.path.isabs(path): - return path - return RelativePath(path, relative_to) - - -def UnrelativePath(path, relative_to): - # Assuming that |relative_to| is relative to the current directory, and |path| - # is a path relative to the dirname of |relative_to|, returns a path that - # identifies |path| relative to the current directory. - rel_dir = os.path.dirname(relative_to) - return os.path.normpath(os.path.join(rel_dir, path)) - - -# re objects used by EncodePOSIXShellArgument. See IEEE 1003.1 XCU.2.2 at -# http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_02 -# and the documentation for various shells. - -# _quote is a pattern that should match any argument that needs to be quoted -# with double-quotes by EncodePOSIXShellArgument. It matches the following -# characters appearing anywhere in an argument: -# \t, \n, space parameter separators -# # comments -# $ expansions (quoted to always expand within one argument) -# % called out by IEEE 1003.1 XCU.2.2 -# & job control -# ' quoting -# (, ) subshell execution -# *, ?, [ pathname expansion -# ; command delimiter -# <, >, | redirection -# = assignment -# {, } brace expansion (bash) -# ~ tilde expansion -# It also matches the empty string, because "" (or '') is the only way to -# represent an empty string literal argument to a POSIX shell. -# -# This does not match the characters in _escape, because those need to be -# backslash-escaped regardless of whether they appear in a double-quoted -# string. -_quote = re.compile('[\t\n #$%&\'()*;<=>?[{|}~]|^$') - -# _escape is a pattern that should match any character that needs to be -# escaped with a backslash, whether or not the argument matched the _quote -# pattern. _escape is used with re.sub to backslash anything in _escape's -# first match group, hence the (parentheses) in the regular expression. -# -# _escape matches the following characters appearing anywhere in an argument: -# " to prevent POSIX shells from interpreting this character for quoting -# \ to prevent POSIX shells from interpreting this character for escaping -# ` to prevent POSIX shells from interpreting this character for command -# substitution -# Missing from this list is $, because the desired behavior of -# EncodePOSIXShellArgument is to permit parameter (variable) expansion. -# -# Also missing from this list is !, which bash will interpret as the history -# expansion character when history is enabled. bash does not enable history -# by default in non-interactive shells, so this is not thought to be a problem. -# ! was omitted from this list because bash interprets "\!" as a literal string -# including the backslash character (avoiding history expansion but retaining -# the backslash), which would not be correct for argument encoding. Handling -# this case properly would also be problematic because bash allows the history -# character to be changed with the histchars shell variable. Fortunately, -# as history is not enabled in non-interactive shells and -# EncodePOSIXShellArgument is only expected to encode for non-interactive -# shells, there is no room for error here by ignoring !. -_escape = re.compile(r'(["\\`])') - -def EncodePOSIXShellArgument(argument): - """Encodes |argument| suitably for consumption by POSIX shells. - - argument may be quoted and escaped as necessary to ensure that POSIX shells - treat the returned value as a literal representing the argument passed to - this function. Parameter (variable) expansions beginning with $ are allowed - to remain intact without escaping the $, to allow the argument to contain - references to variables to be expanded by the shell. - """ - - if not isinstance(argument, str): - argument = str(argument) - - if _quote.search(argument): - quote = '"' - else: - quote = '' - - encoded = quote + re.sub(_escape, r'\\\1', argument) + quote - - return encoded - - -def EncodePOSIXShellList(list): - """Encodes |list| suitably for consumption by POSIX shells. - - Returns EncodePOSIXShellArgument for each item in list, and joins them - together using the space character as an argument separator. - """ - - encoded_arguments = [] - for argument in list: - encoded_arguments.append(EncodePOSIXShellArgument(argument)) - return ' '.join(encoded_arguments) - - -def DeepDependencyTargets(target_dicts, roots): - """Returns the recursive list of target dependencies.""" - dependencies = set() - pending = set(roots) - while pending: - # Pluck out one. - r = pending.pop() - # Skip if visited already. - if r in dependencies: - continue - # Add it. - dependencies.add(r) - # Add its children. - spec = target_dicts[r] - pending.update(set(spec.get('dependencies', []))) - pending.update(set(spec.get('dependencies_original', []))) - return list(dependencies - set(roots)) - - -def BuildFileTargets(target_list, build_file): - """From a target_list, returns the subset from the specified build_file. - """ - return [p for p in target_list if BuildFile(p) == build_file] - - -def AllTargets(target_list, target_dicts, build_file): - """Returns all targets (direct and dependencies) for the specified build_file. - """ - bftargets = BuildFileTargets(target_list, build_file) - deptargets = DeepDependencyTargets(target_dicts, bftargets) - return bftargets + deptargets - - -def WriteOnDiff(filename): - """Write to a file only if the new contents differ. - - Arguments: - filename: name of the file to potentially write to. - Returns: - A file like object which will write to temporary file and only overwrite - the target if it differs (on close). - """ - - class Writer: - """Wrapper around file which only covers the target if it differs.""" - def __init__(self): - # Pick temporary file. - tmp_fd, self.tmp_path = tempfile.mkstemp( - suffix='.tmp', - prefix=os.path.split(filename)[1] + '.gyp.', - dir=os.path.split(filename)[0]) - try: - self.tmp_file = os.fdopen(tmp_fd, 'wb') - except Exception: - # Don't leave turds behind. - os.unlink(self.tmp_path) - raise - - def __getattr__(self, attrname): - # Delegate everything else to self.tmp_file - return getattr(self.tmp_file, attrname) - - def close(self): - try: - # Close tmp file. - self.tmp_file.close() - # Determine if different. - same = False - try: - same = filecmp.cmp(self.tmp_path, filename, False) - except OSError, e: - if e.errno != errno.ENOENT: - raise - - if same: - # The new file is identical to the old one, just get rid of the new - # one. - os.unlink(self.tmp_path) - else: - # The new file is different from the old one, or there is no old one. - # Rename the new file to the permanent name. - # - # tempfile.mkstemp uses an overly restrictive mode, resulting in a - # file that can only be read by the owner, regardless of the umask. - # There's no reason to not respect the umask here, which means that - # an extra hoop is required to fetch it and reset the new file's mode. - # - # No way to get the umask without setting a new one? Set a safe one - # and then set it back to the old value. - umask = os.umask(077) - os.umask(umask) - os.chmod(self.tmp_path, 0666 & ~umask) - if sys.platform == 'win32' and os.path.exists(filename): - # NOTE: on windows (but not cygwin) rename will not replace an - # existing file, so it must be preceded with a remove. Sadly there - # is no way to make the switch atomic. - os.remove(filename) - os.rename(self.tmp_path, filename) - except Exception: - # Don't leave turds behind. - os.unlink(self.tmp_path) - raise - - return Writer() - - -def GetFlavor(params): - """Returns |params.flavor| if it's set, the system's default flavor else.""" - flavors = { - 'cygwin': 'win', - 'win32': 'win', - 'darwin': 'mac', - } - - if 'flavor' in params: - return params['flavor'] - if sys.platform in flavors: - return flavors[sys.platform] - if sys.platform.startswith('sunos'): - return 'solaris' - if sys.platform.startswith('freebsd'): - return 'freebsd' - if sys.platform.startswith('aix'): - return 'aix' - - return 'linux' - - -def CopyTool(flavor, out_path): - """Finds (mac|sun|win)_tool.gyp in the gyp directory and copies it - to |out_path|.""" - prefix = { 'solaris': 'sun', 'mac': 'mac', 'win': 'win' }.get(flavor, None) - if not prefix: - return - - # Slurp input file. - source_path = os.path.join( - os.path.dirname(os.path.abspath(__file__)), '%s_tool.py' % prefix) - with open(source_path) as source_file: - source = source_file.readlines() - - # Add header and write it out. - tool_path = os.path.join(out_path, 'gyp-%s-tool' % prefix) - with open(tool_path, 'w') as tool_file: - tool_file.write( - ''.join([source[0], '# Generated by gyp. Do not edit.\n'] + source[1:])) - - # Make file executable. - os.chmod(tool_path, 0755) - - -# From Alex Martelli, -# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52560 -# ASPN: Python Cookbook: Remove duplicates from a sequence -# First comment, dated 2001/10/13. -# (Also in the printed Python Cookbook.) - -def uniquer(seq, idfun=None): - if idfun is None: - idfun = lambda x: x - seen = {} - result = [] - for item in seq: - marker = idfun(item) - if marker in seen: continue - seen[marker] = 1 - result.append(item) - return result - - -class CycleError(Exception): - """An exception raised when an unexpected cycle is detected.""" - def __init__(self, nodes): - self.nodes = nodes - def __str__(self): - return 'CycleError: cycle involving: ' + str(self.nodes) - - -def TopologicallySorted(graph, get_edges): - """Topologically sort based on a user provided edge definition. - - Args: - graph: A list of node names. - get_edges: A function mapping from node name to a hashable collection - of node names which this node has outgoing edges to. - Returns: - A list containing all of the node in graph in topological order. - It is assumed that calling get_edges once for each node and caching is - cheaper than repeatedly calling get_edges. - Raises: - CycleError in the event of a cycle. - Example: - graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'} - def GetEdges(node): - return re.findall(r'\$\(([^))]\)', graph[node]) - print TopologicallySorted(graph.keys(), GetEdges) - ==> - ['a', 'c', b'] - """ - get_edges = memoize(get_edges) - visited = set() - visiting = set() - ordered_nodes = [] - def Visit(node): - if node in visiting: - raise CycleError(visiting) - if node in visited: - return - visited.add(node) - visiting.add(node) - for neighbor in get_edges(node): - Visit(neighbor) - visiting.remove(node) - ordered_nodes.insert(0, node) - for node in sorted(graph): - Visit(node) - return ordered_nodes diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/common_test.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/common_test.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/common_test.py 2012-08-28 16:30:17.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/common_test.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,71 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Unit tests for the common.py file.""" - -import gyp.common -import unittest -import sys - - -class TestTopologicallySorted(unittest.TestCase): - def test_Valid(self): - """Test that sorting works on a valid graph with one possible order.""" - graph = { - 'a': ['b', 'c'], - 'b': [], - 'c': ['d'], - 'd': ['b'], - } - def GetEdge(node): - return tuple(graph[node]) - self.assertEqual( - gyp.common.TopologicallySorted(graph.keys(), GetEdge), - ['a', 'c', 'd', 'b']) - - def test_Cycle(self): - """Test that an exception is thrown on a cyclic graph.""" - graph = { - 'a': ['b'], - 'b': ['c'], - 'c': ['d'], - 'd': ['a'], - } - def GetEdge(node): - return tuple(graph[node]) - self.assertRaises( - gyp.common.CycleError, gyp.common.TopologicallySorted, - graph.keys(), GetEdge) - - -class TestGetFlavor(unittest.TestCase): - """Test that gyp.common.GetFlavor works as intended""" - original_platform = '' - - def setUp(self): - self.original_platform = sys.platform - - def tearDown(self): - sys.platform = self.original_platform - - def assertFlavor(self, expected, argument, param): - sys.platform = argument - self.assertEqual(expected, gyp.common.GetFlavor(param)) - - def test_platform_default(self): - self.assertFlavor('freebsd', 'freebsd9' , {}) - self.assertFlavor('freebsd', 'freebsd10', {}) - self.assertFlavor('solaris', 'sunos5' , {}); - self.assertFlavor('solaris', 'sunos' , {}); - self.assertFlavor('linux' , 'linux2' , {}); - self.assertFlavor('linux' , 'linux3' , {}); - - def test_param(self): - self.assertFlavor('foobar', 'linux2' , {'flavor': 'foobar'}) - - -if __name__ == '__main__': - unittest.main() diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/easy_xml.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/easy_xml.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/easy_xml.py 2012-08-21 17:57:33.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/easy_xml.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,157 +0,0 @@ -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import re -import os - - -def XmlToString(content, encoding='utf-8', pretty=False): - """ Writes the XML content to disk, touching the file only if it has changed. - - Visual Studio files have a lot of pre-defined structures. This function makes - it easy to represent these structures as Python data structures, instead of - having to create a lot of function calls. - - Each XML element of the content is represented as a list composed of: - 1. The name of the element, a string, - 2. The attributes of the element, a dictionary (optional), and - 3+. The content of the element, if any. Strings are simple text nodes and - lists are child elements. - - Example 1: - - becomes - ['test'] - - Example 2: - - This is - it! - - - becomes - ['myelement', {'a':'value1', 'b':'value2'}, - ['childtype', 'This is'], - ['childtype', 'it!'], - ] - - Args: - content: The structured content to be converted. - encoding: The encoding to report on the first XML line. - pretty: True if we want pretty printing with indents and new lines. - - Returns: - The XML content as a string. - """ - # We create a huge list of all the elements of the file. - xml_parts = ['' % encoding] - if pretty: - xml_parts.append('\n') - _ConstructContentList(xml_parts, content, pretty) - - # Convert it to a string - return ''.join(xml_parts) - - -def _ConstructContentList(xml_parts, specification, pretty, level=0): - """ Appends the XML parts corresponding to the specification. - - Args: - xml_parts: A list of XML parts to be appended to. - specification: The specification of the element. See EasyXml docs. - pretty: True if we want pretty printing with indents and new lines. - level: Indentation level. - """ - # The first item in a specification is the name of the element. - if pretty: - indentation = ' ' * level - new_line = '\n' - else: - indentation = '' - new_line = '' - name = specification[0] - if not isinstance(name, str): - raise Exception('The first item of an EasyXml specification should be ' - 'a string. Specification was ' + str(specification)) - xml_parts.append(indentation + '<' + name) - - # Optionally in second position is a dictionary of the attributes. - rest = specification[1:] - if rest and isinstance(rest[0], dict): - for at, val in sorted(rest[0].iteritems()): - xml_parts.append(' %s="%s"' % (at, _XmlEscape(val, attr=True))) - rest = rest[1:] - if rest: - xml_parts.append('>') - all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True) - multi_line = not all_strings - if multi_line and new_line: - xml_parts.append(new_line) - for child_spec in rest: - # If it's a string, append a text node. - # Otherwise recurse over that child definition - if isinstance(child_spec, str): - xml_parts.append(_XmlEscape(child_spec)) - else: - _ConstructContentList(xml_parts, child_spec, pretty, level + 1) - if multi_line and indentation: - xml_parts.append(indentation) - xml_parts.append('%s' % (name, new_line)) - else: - xml_parts.append('/>%s' % new_line) - - -def WriteXmlIfChanged(content, path, encoding='utf-8', pretty=False, - win32=False): - """ Writes the XML content to disk, touching the file only if it has changed. - - Args: - content: The structured content to be written. - path: Location of the file. - encoding: The encoding to report on the first line of the XML file. - pretty: True if we want pretty printing with indents and new lines. - """ - xml_string = XmlToString(content, encoding, pretty) - if win32 and os.linesep != '\r\n': - xml_string = xml_string.replace('\n', '\r\n') - - # Get the old content - try: - f = open(path, 'r') - existing = f.read() - f.close() - except: - existing = None - - # It has changed, write it - if existing != xml_string: - f = open(path, 'w') - f.write(xml_string) - f.close() - - -_xml_escape_map = { - '"': '"', - "'": ''', - '<': '<', - '>': '>', - '&': '&', - '\n': ' ', - '\r': ' ', -} - - -_xml_escape_re = re.compile( - "(%s)" % "|".join(map(re.escape, _xml_escape_map.keys()))) - - -def _XmlEscape(value, attr=False): - """ Escape a string for inclusion in XML.""" - def replace(match): - m = match.string[match.start() : match.end()] - # don't replace single quotes in attrs - if attr and m == "'": - return m - return _xml_escape_map[m] - return _xml_escape_re.sub(replace, value) diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/easy_xml_test.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/easy_xml_test.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/easy_xml_test.py 2012-08-21 17:57:33.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/easy_xml_test.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,103 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" Unit tests for the easy_xml.py file. """ - -import gyp.easy_xml as easy_xml -import unittest -import StringIO - - -class TestSequenceFunctions(unittest.TestCase): - - def setUp(self): - self.stderr = StringIO.StringIO() - - def test_EasyXml_simple(self): - self.assertEqual( - easy_xml.XmlToString(['test']), - '') - - self.assertEqual( - easy_xml.XmlToString(['test'], encoding='Windows-1252'), - '') - - def test_EasyXml_simple_with_attributes(self): - self.assertEqual( - easy_xml.XmlToString(['test2', {'a': 'value1', 'b': 'value2'}]), - '') - - def test_EasyXml_escaping(self): - original = '\'"\r&\nfoo' - converted = '<test>\'" & foo' - converted_apos = converted.replace("'", ''') - self.assertEqual( - easy_xml.XmlToString(['test3', {'a': original}, original]), - '%s' % - (converted, converted_apos)) - - def test_EasyXml_pretty(self): - self.assertEqual( - easy_xml.XmlToString( - ['test3', - ['GrandParent', - ['Parent1', - ['Child'] - ], - ['Parent2'] - ] - ], - pretty=True), - '\n' - '\n' - ' \n' - ' \n' - ' \n' - ' \n' - ' \n' - ' \n' - '\n') - - - def test_EasyXml_complex(self): - # We want to create: - target = ( - '' - '' - '' - '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}' - 'Win32Proj' - 'automated_ui_tests' - '' - '' - '' - 'Application' - 'Unicode' - '' - '') - - xml = easy_xml.XmlToString( - ['Project', - ['PropertyGroup', {'Label': 'Globals'}, - ['ProjectGuid', '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}'], - ['Keyword', 'Win32Proj'], - ['RootNamespace', 'automated_ui_tests'] - ], - ['Import', {'Project': '$(VCTargetsPath)\\Microsoft.Cpp.props'}], - ['PropertyGroup', - {'Condition': "'$(Configuration)|$(Platform)'=='Debug|Win32'", - 'Label': 'Configuration'}, - ['ConfigurationType', 'Application'], - ['CharacterSet', 'Unicode'] - ] - ]) - self.assertEqual(xml, target) - - -if __name__ == '__main__': - unittest.main() diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/generator/android.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/generator/android.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/generator/android.py 2013-01-18 21:58:36.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/generator/android.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,1099 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# Notes: -# -# This generates makefiles suitable for inclusion into the Android build system -# via an Android.mk file. It is based on make.py, the standard makefile -# generator. -# -# The code below generates a separate .mk file for each target, but -# all are sourced by the top-level GypAndroid.mk. This means that all -# variables in .mk-files clobber one another, and furthermore that any -# variables set potentially clash with other Android build system variables. -# Try to avoid setting global variables where possible. - -import gyp -import gyp.common -import gyp.generator.make as make # Reuse global functions from make backend. -import os -import re -import subprocess - -generator_default_variables = { - 'OS': 'android', - 'EXECUTABLE_PREFIX': '', - 'EXECUTABLE_SUFFIX': '', - 'STATIC_LIB_PREFIX': 'lib', - 'SHARED_LIB_PREFIX': 'lib', - 'STATIC_LIB_SUFFIX': '.a', - 'SHARED_LIB_SUFFIX': '.so', - 'INTERMEDIATE_DIR': '$(gyp_intermediate_dir)', - 'SHARED_INTERMEDIATE_DIR': '$(gyp_shared_intermediate_dir)', - 'PRODUCT_DIR': '$(gyp_shared_intermediate_dir)', - 'SHARED_LIB_DIR': '$(builddir)/lib.$(TOOLSET)', - 'LIB_DIR': '$(obj).$(TOOLSET)', - 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python. - 'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python. - 'RULE_INPUT_PATH': '$(RULE_SOURCES)', - 'RULE_INPUT_EXT': '$(suffix $<)', - 'RULE_INPUT_NAME': '$(notdir $<)', - 'CONFIGURATION_NAME': '$(GYP_DEFAULT_CONFIGURATION)', -} - -# Make supports multiple toolsets -generator_supports_multiple_toolsets = True - - -# Generator-specific gyp specs. -generator_additional_non_configuration_keys = [ - # Boolean to declare that this target does not want its name mangled. - 'android_unmangled_name', -] -generator_additional_path_sections = [] -generator_extra_sources_for_rules = [] - - -SHARED_FOOTER = """\ -# "gyp_all_modules" is a concatenation of the "gyp_all_modules" targets from -# all the included sub-makefiles. This is just here to clarify. -gyp_all_modules: -""" - -header = """\ -# This file is generated by gyp; do not edit. - -""" - -android_standard_include_paths = set([ - # JNI_H_INCLUDE in build/core/binary.mk - 'dalvik/libnativehelper/include/nativehelper', - # from SRC_HEADERS in build/core/config.mk - 'system/core/include', - 'hardware/libhardware/include', - 'hardware/libhardware_legacy/include', - 'hardware/ril/include', - 'dalvik/libnativehelper/include', - 'frameworks/native/include', - 'frameworks/native/opengl/include', - 'frameworks/base/include', - 'frameworks/base/opengl/include', - 'frameworks/base/native/include', - 'external/skia/include', - # TARGET_C_INCLUDES in build/core/combo/TARGET_linux-arm.mk - 'bionic/libc/arch-arm/include', - 'bionic/libc/include', - 'bionic/libstdc++/include', - 'bionic/libc/kernel/common', - 'bionic/libc/kernel/arch-arm', - 'bionic/libm/include', - 'bionic/libm/include/arm', - 'bionic/libthread_db/include', - ]) - - -# Map gyp target types to Android module classes. -MODULE_CLASSES = { - 'static_library': 'STATIC_LIBRARIES', - 'shared_library': 'SHARED_LIBRARIES', - 'executable': 'EXECUTABLES', -} - - -def IsCPPExtension(ext): - return make.COMPILABLE_EXTENSIONS.get(ext) == 'cxx' - - -def Sourceify(path): - """Convert a path to its source directory form. The Android backend does not - support options.generator_output, so this function is a noop.""" - return path - - -# Map from qualified target to path to output. -# For Android, the target of these maps is a tuple ('static', 'modulename'), -# ('dynamic', 'modulename'), or ('path', 'some/path') instead of a string, -# since we link by module. -target_outputs = {} -# Map from qualified target to any linkable output. A subset -# of target_outputs. E.g. when mybinary depends on liba, we want to -# include liba in the linker line; when otherbinary depends on -# mybinary, we just want to build mybinary first. -target_link_deps = {} - - -class AndroidMkWriter(object): - """AndroidMkWriter packages up the writing of one target-specific Android.mk. - - Its only real entry point is Write(), and is mostly used for namespacing. - """ - - def __init__(self, android_top_dir): - self.android_top_dir = android_top_dir - - def Write(self, qualified_target, relative_target, base_path, output_filename, - spec, configs, part_of_all): - """The main entry point: writes a .mk file for a single target. - - Arguments: - qualified_target: target we're generating - relative_target: qualified target name relative to the root - base_path: path relative to source root we're building in, used to resolve - target-relative paths - output_filename: output .mk file name to write - spec, configs: gyp info - part_of_all: flag indicating this target is part of 'all' - """ - make.ensure_directory_exists(output_filename) - - self.fp = open(output_filename, 'w') - - self.fp.write(header) - - self.qualified_target = qualified_target - self.relative_target = relative_target - self.path = base_path - self.target = spec['target_name'] - self.type = spec['type'] - self.toolset = spec['toolset'] - - deps, link_deps = self.ComputeDeps(spec) - - # Some of the generation below can add extra output, sources, or - # link dependencies. All of the out params of the functions that - # follow use names like extra_foo. - extra_outputs = [] - extra_sources = [] - - self.android_class = MODULE_CLASSES.get(self.type, 'GYP') - self.android_module = self.ComputeAndroidModule(spec) - (self.android_stem, self.android_suffix) = self.ComputeOutputParts(spec) - self.output = self.output_binary = self.ComputeOutput(spec) - - # Standard header. - self.WriteLn('include $(CLEAR_VARS)\n') - - # Module class and name. - self.WriteLn('LOCAL_MODULE_CLASS := ' + self.android_class) - self.WriteLn('LOCAL_MODULE := ' + self.android_module) - # Only emit LOCAL_MODULE_STEM if it's different to LOCAL_MODULE. - # The library module classes fail if the stem is set. ComputeOutputParts - # makes sure that stem == modulename in these cases. - if self.android_stem != self.android_module: - self.WriteLn('LOCAL_MODULE_STEM := ' + self.android_stem) - self.WriteLn('LOCAL_MODULE_SUFFIX := ' + self.android_suffix) - self.WriteLn('LOCAL_MODULE_TAGS := optional') - if self.toolset == 'host': - self.WriteLn('LOCAL_IS_HOST_MODULE := true') - - # Grab output directories; needed for Actions and Rules. - self.WriteLn('gyp_intermediate_dir := $(call local-intermediates-dir)') - self.WriteLn('gyp_shared_intermediate_dir := ' - '$(call intermediates-dir-for,GYP,shared)') - self.WriteLn() - - # List files this target depends on so that actions/rules/copies/sources - # can depend on the list. - # TODO: doesn't pull in things through transitive link deps; needed? - target_dependencies = [x[1] for x in deps if x[0] == 'path'] - self.WriteLn('# Make sure our deps are built first.') - self.WriteList(target_dependencies, 'GYP_TARGET_DEPENDENCIES', - local_pathify=True) - - # Actions must come first, since they can generate more OBJs for use below. - if 'actions' in spec: - self.WriteActions(spec['actions'], extra_sources, extra_outputs) - - # Rules must be early like actions. - if 'rules' in spec: - self.WriteRules(spec['rules'], extra_sources, extra_outputs) - - if 'copies' in spec: - self.WriteCopies(spec['copies'], extra_outputs) - - # GYP generated outputs. - self.WriteList(extra_outputs, 'GYP_GENERATED_OUTPUTS', local_pathify=True) - - # Set LOCAL_ADDITIONAL_DEPENDENCIES so that Android's build rules depend - # on both our dependency targets and our generated files. - self.WriteLn('# Make sure our deps and generated files are built first.') - self.WriteLn('LOCAL_ADDITIONAL_DEPENDENCIES := $(GYP_TARGET_DEPENDENCIES) ' - '$(GYP_GENERATED_OUTPUTS)') - self.WriteLn() - - # Sources. - if spec.get('sources', []) or extra_sources: - self.WriteSources(spec, configs, extra_sources) - - self.WriteTarget(spec, configs, deps, link_deps, part_of_all) - - # Update global list of target outputs, used in dependency tracking. - target_outputs[qualified_target] = ('path', self.output_binary) - - # Update global list of link dependencies. - if self.type == 'static_library': - target_link_deps[qualified_target] = ('static', self.android_module) - elif self.type == 'shared_library': - target_link_deps[qualified_target] = ('shared', self.android_module) - - self.fp.close() - return self.android_module - - - def WriteActions(self, actions, extra_sources, extra_outputs): - """Write Makefile code for any 'actions' from the gyp input. - - extra_sources: a list that will be filled in with newly generated source - files, if any - extra_outputs: a list that will be filled in with any outputs of these - actions (used to make other pieces dependent on these - actions) - """ - for action in actions: - name = make.StringToMakefileVariable('%s_%s' % (self.relative_target, - action['action_name'])) - self.WriteLn('### Rules for action "%s":' % action['action_name']) - inputs = action['inputs'] - outputs = action['outputs'] - - # Build up a list of outputs. - # Collect the output dirs we'll need. - dirs = set() - for out in outputs: - if not out.startswith('$'): - print ('WARNING: Action for target "%s" writes output to local path ' - '"%s".' % (self.target, out)) - dir = os.path.split(out)[0] - if dir: - dirs.add(dir) - if int(action.get('process_outputs_as_sources', False)): - extra_sources += outputs - - # Prepare the actual command. - command = gyp.common.EncodePOSIXShellList(action['action']) - if 'message' in action: - quiet_cmd = 'Gyp action: %s ($@)' % action['message'] - else: - quiet_cmd = 'Gyp action: %s ($@)' % name - if len(dirs) > 0: - command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command - - cd_action = 'cd $(gyp_local_path)/%s; ' % self.path - command = cd_action + command - - # The makefile rules are all relative to the top dir, but the gyp actions - # are defined relative to their containing dir. This replaces the gyp_* - # variables for the action rule with an absolute version so that the - # output goes in the right place. - # Only write the gyp_* rules for the "primary" output (:1); - # it's superfluous for the "extra outputs", and this avoids accidentally - # writing duplicate dummy rules for those outputs. - main_output = make.QuoteSpaces(self.LocalPathify(outputs[0])) - self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output) - self.WriteLn('%s: gyp_intermediate_dir := ' - '$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_intermediate_dir)' % - main_output) - self.WriteLn('%s: gyp_shared_intermediate_dir := ' - '$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_shared_intermediate_dir)' % - main_output) - - # Android's envsetup.sh adds a number of directories to the path including - # the built host binary directory. This causes actions/rules invoked by - # gyp to sometimes use these instead of system versions, e.g. bison. - # The built host binaries may not be suitable, and can cause errors. - # So, we remove them from the PATH using the ANDROID_BUILD_PATHS variable - # set by envsetup. - self.WriteLn('%s: export PATH := $(subst $(ANDROID_BUILD_PATHS),,$(PATH))' - % main_output) - - for input in inputs: - assert ' ' not in input, ( - "Spaces in action input filenames not supported (%s)" % input) - for output in outputs: - assert ' ' not in output, ( - "Spaces in action output filenames not supported (%s)" % output) - - self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' % - (main_output, ' '.join(map(self.LocalPathify, inputs)))) - self.WriteLn('\t@echo "%s"' % quiet_cmd) - self.WriteLn('\t$(hide)%s\n' % command) - for output in outputs[1:]: - # Make each output depend on the main output, with an empty command - # to force make to notice that the mtime has changed. - self.WriteLn('%s: %s ;' % (self.LocalPathify(output), main_output)) - - extra_outputs += outputs - self.WriteLn() - - self.WriteLn() - - - def WriteRules(self, rules, extra_sources, extra_outputs): - """Write Makefile code for any 'rules' from the gyp input. - - extra_sources: a list that will be filled in with newly generated source - files, if any - extra_outputs: a list that will be filled in with any outputs of these - rules (used to make other pieces dependent on these rules) - """ - if len(rules) == 0: - return - rule_trigger = '%s_rule_trigger' % self.android_module - - did_write_rule = False - for rule in rules: - if len(rule.get('rule_sources', [])) == 0: - continue - did_write_rule = True - name = make.StringToMakefileVariable('%s_%s' % (self.relative_target, - rule['rule_name'])) - self.WriteLn('\n### Generated for rule "%s":' % name) - self.WriteLn('# "%s":' % rule) - - inputs = rule.get('inputs') - for rule_source in rule.get('rule_sources', []): - (rule_source_dirname, rule_source_basename) = os.path.split(rule_source) - (rule_source_root, rule_source_ext) = \ - os.path.splitext(rule_source_basename) - - outputs = [self.ExpandInputRoot(out, rule_source_root, - rule_source_dirname) - for out in rule['outputs']] - - dirs = set() - for out in outputs: - if not out.startswith('$'): - print ('WARNING: Rule for target %s writes output to local path %s' - % (self.target, out)) - dir = os.path.dirname(out) - if dir: - dirs.add(dir) - extra_outputs += outputs - if int(rule.get('process_outputs_as_sources', False)): - extra_sources.extend(outputs) - - components = [] - for component in rule['action']: - component = self.ExpandInputRoot(component, rule_source_root, - rule_source_dirname) - if '$(RULE_SOURCES)' in component: - component = component.replace('$(RULE_SOURCES)', - rule_source) - components.append(component) - - command = gyp.common.EncodePOSIXShellList(components) - cd_action = 'cd $(gyp_local_path)/%s; ' % self.path - command = cd_action + command - if dirs: - command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command - - # We set up a rule to build the first output, and then set up - # a rule for each additional output to depend on the first. - outputs = map(self.LocalPathify, outputs) - main_output = outputs[0] - self.WriteLn('%s: gyp_local_path := $(LOCAL_PATH)' % main_output) - self.WriteLn('%s: gyp_intermediate_dir := ' - '$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_intermediate_dir)' - % main_output) - self.WriteLn('%s: gyp_shared_intermediate_dir := ' - '$(GYP_ABS_ANDROID_TOP_DIR)/$(gyp_shared_intermediate_dir)' - % main_output) - - # See explanation in WriteActions. - self.WriteLn('%s: export PATH := ' - '$(subst $(ANDROID_BUILD_PATHS),,$(PATH))' % main_output) - - main_output_deps = self.LocalPathify(rule_source) - if inputs: - main_output_deps += ' ' - main_output_deps += ' '.join([self.LocalPathify(f) for f in inputs]) - - self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES)' % - (main_output, main_output_deps)) - self.WriteLn('\t%s\n' % command) - for output in outputs[1:]: - self.WriteLn('%s: %s' % (output, main_output)) - self.WriteLn('.PHONY: %s' % (rule_trigger)) - self.WriteLn('%s: %s' % (rule_trigger, main_output)) - self.WriteLn('') - if did_write_rule: - extra_sources.append(rule_trigger) # Force all rules to run. - self.WriteLn('### Finished generating for all rules') - self.WriteLn('') - - - def WriteCopies(self, copies, extra_outputs): - """Write Makefile code for any 'copies' from the gyp input. - - extra_outputs: a list that will be filled in with any outputs of this action - (used to make other pieces dependent on this action) - """ - self.WriteLn('### Generated for copy rule.') - - variable = make.StringToMakefileVariable(self.relative_target + '_copies') - outputs = [] - for copy in copies: - for path in copy['files']: - # The Android build system does not allow generation of files into the - # source tree. The destination should start with a variable, which will - # typically be $(gyp_intermediate_dir) or - # $(gyp_shared_intermediate_dir). Note that we can't use an assertion - # because some of the gyp tests depend on this. - if not copy['destination'].startswith('$'): - print ('WARNING: Copy rule for target %s writes output to ' - 'local path %s' % (self.target, copy['destination'])) - - # LocalPathify() calls normpath, stripping trailing slashes. - path = Sourceify(self.LocalPathify(path)) - filename = os.path.split(path)[1] - output = Sourceify(self.LocalPathify(os.path.join(copy['destination'], - filename))) - - self.WriteLn('%s: %s $(GYP_TARGET_DEPENDENCIES) | $(ACP)' % - (output, path)) - self.WriteLn('\t@echo Copying: $@') - self.WriteLn('\t$(hide) mkdir -p $(dir $@)') - self.WriteLn('\t$(hide) $(ACP) -r $< $@') - self.WriteLn() - outputs.append(output) - self.WriteLn('%s = %s' % (variable, - ' '.join(map(make.QuoteSpaces, outputs)))) - extra_outputs.append('$(%s)' % variable) - self.WriteLn() - - - def WriteSourceFlags(self, spec, configs): - """Write out the flags and include paths used to compile source files for - the current target. - - Args: - spec, configs: input from gyp. - """ - config = configs[spec['default_configuration']] - extracted_includes = [] - - self.WriteLn('\n# Flags passed to both C and C++ files.') - cflags, includes_from_cflags = self.ExtractIncludesFromCFlags( - config.get('cflags')) - extracted_includes.extend(includes_from_cflags) - self.WriteList(cflags, 'MY_CFLAGS') - - cflags_c, includes_from_cflags_c = self.ExtractIncludesFromCFlags( - config.get('cflags_c')) - extracted_includes.extend(includes_from_cflags_c) - self.WriteList(cflags_c, 'MY_CFLAGS_C') - - self.WriteList(config.get('defines'), 'MY_DEFS', prefix='-D', - quoter=make.EscapeCppDefine) - self.WriteLn('LOCAL_CFLAGS := $(MY_CFLAGS_C) $(MY_CFLAGS) $(MY_DEFS)') - - # Undefine ANDROID for host modules - # TODO: the source code should not use macro ANDROID to tell if it's host or - # target module. - if self.toolset == 'host': - self.WriteLn('# Undefine ANDROID for host modules') - self.WriteLn('LOCAL_CFLAGS += -UANDROID') - - self.WriteLn('\n# Include paths placed before CFLAGS/CPPFLAGS') - includes = list(config.get('include_dirs', [])) - includes.extend(extracted_includes) - includes = map(Sourceify, map(self.LocalPathify, includes)) - includes = self.NormalizeIncludePaths(includes) - self.WriteList(includes, 'LOCAL_C_INCLUDES') - self.WriteLn('LOCAL_C_INCLUDES := $(GYP_COPIED_SOURCE_ORIGIN_DIRS) ' - '$(LOCAL_C_INCLUDES)') - - self.WriteLn('\n# Flags passed to only C++ (and not C) files.') - self.WriteList(config.get('cflags_cc'), 'LOCAL_CPPFLAGS') - - - def WriteSources(self, spec, configs, extra_sources): - """Write Makefile code for any 'sources' from the gyp input. - These are source files necessary to build the current target. - We need to handle shared_intermediate directory source files as - a special case by copying them to the intermediate directory and - treating them as a genereated sources. Otherwise the Android build - rules won't pick them up. - - Args: - spec, configs: input from gyp. - extra_sources: Sources generated from Actions or Rules. - """ - sources = filter(make.Compilable, spec.get('sources', [])) - generated_not_sources = [x for x in extra_sources if not make.Compilable(x)] - extra_sources = filter(make.Compilable, extra_sources) - - # Determine and output the C++ extension used by these sources. - # We simply find the first C++ file and use that extension. - all_sources = sources + extra_sources - local_cpp_extension = '.cpp' - for source in all_sources: - (root, ext) = os.path.splitext(source) - if IsCPPExtension(ext): - local_cpp_extension = ext - break - if local_cpp_extension != '.cpp': - self.WriteLn('LOCAL_CPP_EXTENSION := %s' % local_cpp_extension) - - # We need to move any non-generated sources that are coming from the - # shared intermediate directory out of LOCAL_SRC_FILES and put them - # into LOCAL_GENERATED_SOURCES. We also need to move over any C++ files - # that don't match our local_cpp_extension, since Android will only - # generate Makefile rules for a single LOCAL_CPP_EXTENSION. - local_files = [] - for source in sources: - (root, ext) = os.path.splitext(source) - if '$(gyp_shared_intermediate_dir)' in source: - extra_sources.append(source) - elif '$(gyp_intermediate_dir)' in source: - extra_sources.append(source) - elif IsCPPExtension(ext) and ext != local_cpp_extension: - extra_sources.append(source) - else: - local_files.append(os.path.normpath(os.path.join(self.path, source))) - - # For any generated source, if it is coming from the shared intermediate - # directory then we add a Make rule to copy them to the local intermediate - # directory first. This is because the Android LOCAL_GENERATED_SOURCES - # must be in the local module intermediate directory for the compile rules - # to work properly. If the file has the wrong C++ extension, then we add - # a rule to copy that to intermediates and use the new version. - final_generated_sources = [] - # If a source file gets copied, we still need to add the orginal source - # directory as header search path, for GCC searches headers in the - # directory that contains the source file by default. - origin_src_dirs = [] - for source in extra_sources: - local_file = source - if not '$(gyp_intermediate_dir)/' in local_file: - basename = os.path.basename(local_file) - local_file = '$(gyp_intermediate_dir)/' + basename - (root, ext) = os.path.splitext(local_file) - if IsCPPExtension(ext) and ext != local_cpp_extension: - local_file = root + local_cpp_extension - if local_file != source: - self.WriteLn('%s: %s' % (local_file, self.LocalPathify(source))) - self.WriteLn('\tmkdir -p $(@D); cp $< $@') - origin_src_dirs.append(os.path.dirname(source)) - final_generated_sources.append(local_file) - - # We add back in all of the non-compilable stuff to make sure that the - # make rules have dependencies on them. - final_generated_sources.extend(generated_not_sources) - self.WriteList(final_generated_sources, 'LOCAL_GENERATED_SOURCES') - - origin_src_dirs = gyp.common.uniquer(origin_src_dirs) - origin_src_dirs = map(Sourceify, map(self.LocalPathify, origin_src_dirs)) - self.WriteList(origin_src_dirs, 'GYP_COPIED_SOURCE_ORIGIN_DIRS') - - self.WriteList(local_files, 'LOCAL_SRC_FILES') - - # Write out the flags used to compile the source; this must be done last - # so that GYP_COPIED_SOURCE_ORIGIN_DIRS can be used as an include path. - self.WriteSourceFlags(spec, configs) - - - def ComputeAndroidModule(self, spec): - """Return the Android module name used for a gyp spec. - - We use the complete qualified target name to avoid collisions between - duplicate targets in different directories. We also add a suffix to - distinguish gyp-generated module names. - """ - - if int(spec.get('android_unmangled_name', 0)): - assert self.type != 'shared_library' or self.target.startswith('lib') - return self.target - - if self.type == 'shared_library': - # For reasons of convention, the Android build system requires that all - # shared library modules are named 'libfoo' when generating -l flags. - prefix = 'lib_' - else: - prefix = '' - - if spec['toolset'] == 'host': - suffix = '_host_gyp' - else: - suffix = '_gyp' - - if self.path: - name = '%s%s_%s%s' % (prefix, self.path, self.target, suffix) - else: - name = '%s%s%s' % (prefix, self.target, suffix) - - return make.StringToMakefileVariable(name) - - - def ComputeOutputParts(self, spec): - """Return the 'output basename' of a gyp spec, split into filename + ext. - - Android libraries must be named the same thing as their module name, - otherwise the linker can't find them, so product_name and so on must be - ignored if we are building a library, and the "lib" prepending is - not done for Android. - """ - assert self.type != 'loadable_module' # TODO: not supported? - - target = spec['target_name'] - target_prefix = '' - target_ext = '' - if self.type == 'static_library': - target = self.ComputeAndroidModule(spec) - target_ext = '.a' - elif self.type == 'shared_library': - target = self.ComputeAndroidModule(spec) - target_ext = '.so' - elif self.type == 'none': - target_ext = '.stamp' - elif self.type != 'executable': - print ("ERROR: What output file should be generated?", - "type", self.type, "target", target) - - if self.type != 'static_library' and self.type != 'shared_library': - target_prefix = spec.get('product_prefix', target_prefix) - target = spec.get('product_name', target) - product_ext = spec.get('product_extension') - if product_ext: - target_ext = '.' + product_ext - - target_stem = target_prefix + target - return (target_stem, target_ext) - - - def ComputeOutputBasename(self, spec): - """Return the 'output basename' of a gyp spec. - - E.g., the loadable module 'foobar' in directory 'baz' will produce - 'libfoobar.so' - """ - return ''.join(self.ComputeOutputParts(spec)) - - - def ComputeOutput(self, spec): - """Return the 'output' (full output path) of a gyp spec. - - E.g., the loadable module 'foobar' in directory 'baz' will produce - '$(obj)/baz/libfoobar.so' - """ - if self.type == 'executable' and self.toolset == 'host': - # We install host executables into shared_intermediate_dir so they can be - # run by gyp rules that refer to PRODUCT_DIR. - path = '$(gyp_shared_intermediate_dir)' - elif self.type == 'shared_library': - if self.toolset == 'host': - path = '$(HOST_OUT_INTERMEDIATE_LIBRARIES)' - else: - path = '$(TARGET_OUT_INTERMEDIATE_LIBRARIES)' - else: - # Other targets just get built into their intermediate dir. - if self.toolset == 'host': - path = '$(call intermediates-dir-for,%s,%s,true)' % (self.android_class, - self.android_module) - else: - path = '$(call intermediates-dir-for,%s,%s)' % (self.android_class, - self.android_module) - - assert spec.get('product_dir') is None # TODO: not supported? - return os.path.join(path, self.ComputeOutputBasename(spec)) - - - def NormalizeLdFlags(self, ld_flags): - """ Clean up ldflags from gyp file. - Remove any ldflags that contain android_top_dir. - - Args: - ld_flags: ldflags from gyp files. - - Returns: - clean ldflags - """ - clean_ldflags = [] - for flag in ld_flags: - if self.android_top_dir in flag: - continue - clean_ldflags.append(flag) - return clean_ldflags - - def NormalizeIncludePaths(self, include_paths): - """ Normalize include_paths. - Convert absolute paths to relative to the Android top directory; - filter out include paths that are already brought in by the Android build - system. - - Args: - include_paths: A list of unprocessed include paths. - Returns: - A list of normalized include paths. - """ - normalized = [] - for path in include_paths: - if path[0] == '/': - path = gyp.common.RelativePath(path, self.android_top_dir) - - # Filter out the Android standard search path. - if path not in android_standard_include_paths: - normalized.append(path) - return normalized - - def ExtractIncludesFromCFlags(self, cflags): - """Extract includes "-I..." out from cflags - - Args: - cflags: A list of compiler flags, which may be mixed with "-I.." - Returns: - A tuple of lists: (clean_clfags, include_paths). "-I.." is trimmed. - """ - clean_cflags = [] - include_paths = [] - if cflags: - for flag in cflags: - if flag.startswith('-I'): - include_paths.append(flag[2:]) - else: - clean_cflags.append(flag) - - return (clean_cflags, include_paths) - - def ComputeAndroidLibraryModuleNames(self, libraries): - """Compute the Android module names from libraries, ie spec.get('libraries') - - Args: - libraries: the value of spec.get('libraries') - Returns: - A tuple (static_lib_modules, dynamic_lib_modules) - """ - static_lib_modules = [] - dynamic_lib_modules = [] - for libs in libraries: - # Libs can have multiple words. - for lib in libs.split(): - # Filter the system libraries, which are added by default by the Android - # build system. - if (lib == '-lc' or lib == '-lstdc++' or lib == '-lm' or - lib.endswith('libgcc.a')): - continue - match = re.search(r'([^/]+)\.a$', lib) - if match: - static_lib_modules.append(match.group(1)) - continue - match = re.search(r'([^/]+)\.so$', lib) - if match: - dynamic_lib_modules.append(match.group(1)) - continue - # "-lstlport" -> libstlport - if lib.startswith('-l'): - if lib.endswith('_static'): - static_lib_modules.append('lib' + lib[2:]) - else: - dynamic_lib_modules.append('lib' + lib[2:]) - return (static_lib_modules, dynamic_lib_modules) - - - def ComputeDeps(self, spec): - """Compute the dependencies of a gyp spec. - - Returns a tuple (deps, link_deps), where each is a list of - filenames that will need to be put in front of make for either - building (deps) or linking (link_deps). - """ - deps = [] - link_deps = [] - if 'dependencies' in spec: - deps.extend([target_outputs[dep] for dep in spec['dependencies'] - if target_outputs[dep]]) - for dep in spec['dependencies']: - if dep in target_link_deps: - link_deps.append(target_link_deps[dep]) - deps.extend(link_deps) - return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps)) - - - def WriteTargetFlags(self, spec, configs, link_deps): - """Write Makefile code to specify the link flags and library dependencies. - - spec, configs: input from gyp. - link_deps: link dependency list; see ComputeDeps() - """ - config = configs[spec['default_configuration']] - - # LDFLAGS - ldflags = list(config.get('ldflags', [])) - static_flags, dynamic_flags = self.ComputeAndroidLibraryModuleNames( - ldflags) - self.WriteLn('') - self.WriteList(self.NormalizeLdFlags(ldflags), 'LOCAL_LDFLAGS') - - # Libraries (i.e. -lfoo) - libraries = gyp.common.uniquer(spec.get('libraries', [])) - static_libs, dynamic_libs = self.ComputeAndroidLibraryModuleNames( - libraries) - - # Link dependencies (i.e. libfoo.a, libfoo.so) - static_link_deps = [x[1] for x in link_deps if x[0] == 'static'] - shared_link_deps = [x[1] for x in link_deps if x[0] == 'shared'] - self.WriteLn('') - self.WriteList(static_flags + static_libs + static_link_deps, - 'LOCAL_STATIC_LIBRARIES') - self.WriteLn('# Enable grouping to fix circular references') - self.WriteLn('LOCAL_GROUP_STATIC_LIBRARIES := true') - self.WriteLn('') - self.WriteList(dynamic_flags + dynamic_libs + shared_link_deps, - 'LOCAL_SHARED_LIBRARIES') - - - def WriteTarget(self, spec, configs, deps, link_deps, part_of_all): - """Write Makefile code to produce the final target of the gyp spec. - - spec, configs: input from gyp. - deps, link_deps: dependency lists; see ComputeDeps() - part_of_all: flag indicating this target is part of 'all' - """ - self.WriteLn('### Rules for final target.') - - if self.type != 'none': - self.WriteTargetFlags(spec, configs, link_deps) - - # Add to the set of targets which represent the gyp 'all' target. We use the - # name 'gyp_all_modules' as the Android build system doesn't allow the use - # of the Make target 'all' and because 'all_modules' is the equivalent of - # the Make target 'all' on Android. - if part_of_all: - self.WriteLn('# Add target alias to "gyp_all_modules" target.') - self.WriteLn('.PHONY: gyp_all_modules') - self.WriteLn('gyp_all_modules: %s' % self.android_module) - self.WriteLn('') - - # Add an alias from the gyp target name to the Android module name. This - # simplifies manual builds of the target, and is required by the test - # framework. - if self.target != self.android_module: - self.WriteLn('# Alias gyp target name.') - self.WriteLn('.PHONY: %s' % self.target) - self.WriteLn('%s: %s' % (self.target, self.android_module)) - self.WriteLn('') - - # Add the command to trigger build of the target type depending - # on the toolset. Ex: BUILD_STATIC_LIBRARY vs. BUILD_HOST_STATIC_LIBRARY - # NOTE: This has to come last! - modifier = '' - if self.toolset == 'host': - modifier = 'HOST_' - if self.type == 'static_library': - self.WriteLn('include $(BUILD_%sSTATIC_LIBRARY)' % modifier) - elif self.type == 'shared_library': - self.WriteLn('LOCAL_PRELINK_MODULE := false') - self.WriteLn('include $(BUILD_%sSHARED_LIBRARY)' % modifier) - elif self.type == 'executable': - if self.toolset == 'host': - self.WriteLn('LOCAL_MODULE_PATH := $(gyp_shared_intermediate_dir)') - else: - # Don't install target executables for now, as it results in them being - # included in ROM. This can be revisited if there's a reason to install - # them later. - self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true') - self.WriteLn('include $(BUILD_%sEXECUTABLE)' % modifier) - else: - self.WriteLn('LOCAL_MODULE_PATH := $(PRODUCT_OUT)/gyp_stamp') - self.WriteLn('LOCAL_UNINSTALLABLE_MODULE := true') - self.WriteLn() - self.WriteLn('include $(BUILD_SYSTEM)/base_rules.mk') - self.WriteLn() - self.WriteLn('$(LOCAL_BUILT_MODULE): $(LOCAL_ADDITIONAL_DEPENDENCIES)') - self.WriteLn('\t$(hide) echo "Gyp timestamp: $@"') - self.WriteLn('\t$(hide) mkdir -p $(dir $@)') - self.WriteLn('\t$(hide) touch $@') - - - def WriteList(self, value_list, variable=None, prefix='', - quoter=make.QuoteIfNecessary, local_pathify=False): - """Write a variable definition that is a list of values. - - E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out - foo = blaha blahb - but in a pretty-printed style. - """ - values = '' - if value_list: - value_list = [quoter(prefix + l) for l in value_list] - if local_pathify: - value_list = [self.LocalPathify(l) for l in value_list] - values = ' \\\n\t' + ' \\\n\t'.join(value_list) - self.fp.write('%s :=%s\n\n' % (variable, values)) - - - def WriteLn(self, text=''): - self.fp.write(text + '\n') - - - def LocalPathify(self, path): - """Convert a subdirectory-relative path into a normalized path which starts - with the make variable $(LOCAL_PATH) (i.e. the top of the project tree). - Absolute paths, or paths that contain variables, are just normalized.""" - if '$(' in path or os.path.isabs(path): - # path is not a file in the project tree in this case, but calling - # normpath is still important for trimming trailing slashes. - return os.path.normpath(path) - local_path = os.path.join('$(LOCAL_PATH)', self.path, path) - local_path = os.path.normpath(local_path) - # Check that normalizing the path didn't ../ itself out of $(LOCAL_PATH) - # - i.e. that the resulting path is still inside the project tree. The - # path may legitimately have ended up containing just $(LOCAL_PATH), though, - # so we don't look for a slash. - assert local_path.startswith('$(LOCAL_PATH)'), ( - 'Path %s attempts to escape from gyp path %s !)' % (path, self.path)) - return local_path - - - def ExpandInputRoot(self, template, expansion, dirname): - if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template: - return template - path = template % { - 'INPUT_ROOT': expansion, - 'INPUT_DIRNAME': dirname, - } - return path - - -def PerformBuild(data, configurations, params): - # The android backend only supports the default configuration. - options = params['options'] - makefile = os.path.abspath(os.path.join(options.toplevel_dir, - 'GypAndroid.mk')) - env = dict(os.environ) - env['ONE_SHOT_MAKEFILE'] = makefile - arguments = ['make', '-C', os.environ['ANDROID_BUILD_TOP'], 'gyp_all_modules'] - print 'Building: %s' % arguments - subprocess.check_call(arguments, env=env) - - -def GenerateOutput(target_list, target_dicts, data, params): - options = params['options'] - generator_flags = params.get('generator_flags', {}) - builddir_name = generator_flags.get('output_dir', 'out') - limit_to_target_all = generator_flags.get('limit_to_target_all', False) - android_top_dir = os.environ.get('ANDROID_BUILD_TOP') - assert android_top_dir, '$ANDROID_BUILD_TOP not set; you need to run lunch.' - - def CalculateMakefilePath(build_file, base_name): - """Determine where to write a Makefile for a given gyp file.""" - # Paths in gyp files are relative to the .gyp file, but we want - # paths relative to the source root for the master makefile. Grab - # the path of the .gyp file as the base to relativize against. - # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp". - base_path = gyp.common.RelativePath(os.path.dirname(build_file), - options.depth) - # We write the file in the base_path directory. - output_file = os.path.join(options.depth, base_path, base_name) - assert not options.generator_output, ( - 'The Android backend does not support options.generator_output.') - base_path = gyp.common.RelativePath(os.path.dirname(build_file), - options.toplevel_dir) - return base_path, output_file - - # TODO: search for the first non-'Default' target. This can go - # away when we add verification that all targets have the - # necessary configurations. - default_configuration = None - toolsets = set([target_dicts[target]['toolset'] for target in target_list]) - for target in target_list: - spec = target_dicts[target] - if spec['default_configuration'] != 'Default': - default_configuration = spec['default_configuration'] - break - if not default_configuration: - default_configuration = 'Default' - - srcdir = '.' - makefile_name = 'GypAndroid' + options.suffix + '.mk' - makefile_path = os.path.join(options.toplevel_dir, makefile_name) - assert not options.generator_output, ( - 'The Android backend does not support options.generator_output.') - make.ensure_directory_exists(makefile_path) - root_makefile = open(makefile_path, 'w') - - root_makefile.write(header) - - # We set LOCAL_PATH just once, here, to the top of the project tree. This - # allows all the other paths we use to be relative to the Android.mk file, - # as the Android build system expects. - root_makefile.write('\nLOCAL_PATH := $(call my-dir)\n') - - # Find the list of targets that derive from the gyp file(s) being built. - needed_targets = set() - for build_file in params['build_files']: - for target in gyp.common.AllTargets(target_list, target_dicts, build_file): - needed_targets.add(target) - - build_files = set() - include_list = set() - android_modules = {} - for qualified_target in target_list: - build_file, target, toolset = gyp.common.ParseQualifiedTarget( - qualified_target) - relative_build_file = gyp.common.RelativePath(build_file, - options.toplevel_dir) - build_files.add(relative_build_file) - included_files = data[build_file]['included_files'] - for included_file in included_files: - # The included_files entries are relative to the dir of the build file - # that included them, so we have to undo that and then make them relative - # to the root dir. - relative_include_file = gyp.common.RelativePath( - gyp.common.UnrelativePath(included_file, build_file), - options.toplevel_dir) - abs_include_file = os.path.abspath(relative_include_file) - # If the include file is from the ~/.gyp dir, we should use absolute path - # so that relocating the src dir doesn't break the path. - if (params['home_dot_gyp'] and - abs_include_file.startswith(params['home_dot_gyp'])): - build_files.add(abs_include_file) - else: - build_files.add(relative_include_file) - - base_path, output_file = CalculateMakefilePath(build_file, - target + '.' + toolset + options.suffix + '.mk') - - spec = target_dicts[qualified_target] - configs = spec['configurations'] - - part_of_all = (qualified_target in needed_targets and - not int(spec.get('suppress_wildcard', False))) - if limit_to_target_all and not part_of_all: - continue - - relative_target = gyp.common.QualifiedTarget(relative_build_file, target, - toolset) - writer = AndroidMkWriter(android_top_dir) - android_module = writer.Write(qualified_target, relative_target, base_path, - output_file, spec, configs, - part_of_all=part_of_all) - if android_module in android_modules: - print ('ERROR: Android module names must be unique. The following ' - 'targets both generate Android module name %s.\n %s\n %s' % - (android_module, android_modules[android_module], - qualified_target)) - return - android_modules[android_module] = qualified_target - - # Our root_makefile lives at the source root. Compute the relative path - # from there to the output_file for including. - mkfile_rel_path = gyp.common.RelativePath(output_file, - os.path.dirname(makefile_path)) - include_list.add(mkfile_rel_path) - - # Some tools need to know the absolute path of the top directory. - root_makefile.write('GYP_ABS_ANDROID_TOP_DIR := $(shell pwd)\n') - root_makefile.write('GYP_DEFAULT_CONFIGURATION := %s\n' % - default_configuration) - - # Write out the sorted list of includes. - root_makefile.write('\n') - for include_file in sorted(include_list): - root_makefile.write('include $(LOCAL_PATH)/' + include_file + '\n') - root_makefile.write('\n') - - root_makefile.write(SHARED_FOOTER) - - root_makefile.close() diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/generator/dump_dependency_json.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/generator/dump_dependency_json.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/generator/dump_dependency_json.py 2012-10-26 08:14:28.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/generator/dump_dependency_json.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,93 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import collections -import os -import gyp -import gyp.common -import gyp.msvs_emulation -import json -import sys - -generator_supports_multiple_toolsets = True - -generator_wants_static_library_dependencies_adjusted = False - -generator_default_variables = { -} -for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR', - 'LIB_DIR', 'SHARED_LIB_DIR']: - # Some gyp steps fail if these are empty(!). - generator_default_variables[dirname] = 'dir' -for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME', - 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT', - 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX', - 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX', - 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX', - 'CONFIGURATION_NAME']: - generator_default_variables[unused] = '' - - -def CalculateVariables(default_variables, params): - generator_flags = params.get('generator_flags', {}) - for key, val in generator_flags.items(): - default_variables.setdefault(key, val) - default_variables.setdefault('OS', gyp.common.GetFlavor(params)) - - flavor = gyp.common.GetFlavor(params) - if flavor =='win': - # Copy additional generator configuration data from VS, which is shared - # by the Windows Ninja generator. - import gyp.generator.msvs as msvs_generator - generator_additional_non_configuration_keys = getattr(msvs_generator, - 'generator_additional_non_configuration_keys', []) - generator_additional_path_sections = getattr(msvs_generator, - 'generator_additional_path_sections', []) - - # Set a variable so conditions can be based on msvs_version. - msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags) - default_variables['MSVS_VERSION'] = msvs_version.ShortName() - - # To determine processor word size on Windows, in addition to checking - # PROCESSOR_ARCHITECTURE (which reflects the word size of the current - # process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which - # contains the actual word size of the system when running thru WOW64). - if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or - '64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')): - default_variables['MSVS_OS_BITS'] = 64 - else: - default_variables['MSVS_OS_BITS'] = 32 - - -def CalculateGeneratorInputInfo(params): - """Calculate the generator specific info that gets fed to input (called by - gyp).""" - generator_flags = params.get('generator_flags', {}) - if generator_flags.get('adjust_static_libraries', False): - global generator_wants_static_library_dependencies_adjusted - generator_wants_static_library_dependencies_adjusted = True - - -def GenerateOutput(target_list, target_dicts, data, params): - # Map of target -> list of targets it depends on. - edges = {} - - # Queue of targets to visit. - targets_to_visit = target_list[:] - - while len(targets_to_visit) > 0: - target = targets_to_visit.pop() - if target in edges: - continue - edges[target] = [] - - for dep in target_dicts[target].get('dependencies', []): - edges[target].append(dep) - targets_to_visit.append(dep) - - filename = 'dump.json' - f = open(filename, 'w') - json.dump(edges, f) - f.close() - print 'Wrote json to %s.' % filename diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/generator/eclipse.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/generator/eclipse.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/generator/eclipse.py 2013-02-13 19:15:24.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/generator/eclipse.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,277 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""GYP backend that generates Eclipse CDT settings files. - -This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML -files that can be imported into an Eclipse CDT project. The XML file contains a -list of include paths and symbols (i.e. defines). - -Because a full .cproject definition is not created by this generator, it's not -possible to properly define the include dirs and symbols for each file -individually. Instead, one set of includes/symbols is generated for the entire -project. This works fairly well (and is a vast improvement in general), but may -still result in a few indexer issues here and there. - -This generator has no automated tests, so expect it to be broken. -""" - -from xml.sax.saxutils import escape -import os.path -import subprocess -import gyp -import gyp.common -import shlex - -generator_wants_static_library_dependencies_adjusted = False - -generator_default_variables = { -} - -for dirname in ['INTERMEDIATE_DIR', 'PRODUCT_DIR', 'LIB_DIR', 'SHARED_LIB_DIR']: - # Some gyp steps fail if these are empty(!). - generator_default_variables[dirname] = 'dir' - -for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME', - 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT', - 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX', - 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX', - 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX', - 'CONFIGURATION_NAME']: - generator_default_variables[unused] = '' - -# Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as -# part of the path when dealing with generated headers. This value will be -# replaced dynamically for each configuration. -generator_default_variables['SHARED_INTERMEDIATE_DIR'] = \ - '$SHARED_INTERMEDIATE_DIR' - - -def CalculateVariables(default_variables, params): - generator_flags = params.get('generator_flags', {}) - for key, val in generator_flags.items(): - default_variables.setdefault(key, val) - default_variables.setdefault('OS', gyp.common.GetFlavor(params)) - - -def CalculateGeneratorInputInfo(params): - """Calculate the generator specific info that gets fed to input (called by - gyp).""" - generator_flags = params.get('generator_flags', {}) - if generator_flags.get('adjust_static_libraries', False): - global generator_wants_static_library_dependencies_adjusted - generator_wants_static_library_dependencies_adjusted = True - - -def GetAllIncludeDirectories(target_list, target_dicts, - shared_intermediate_dirs, config_name): - """Calculate the set of include directories to be used. - - Returns: - A list including all the include_dir's specified for every target followed - by any include directories that were added as cflag compiler options. - """ - - gyp_includes_set = set() - compiler_includes_list = [] - - for target_name in target_list: - target = target_dicts[target_name] - if config_name in target['configurations']: - config = target['configurations'][config_name] - - # Look for any include dirs that were explicitly added via cflags. This - # may be done in gyp files to force certain includes to come at the end. - # TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and - # remove this. - cflags = config['cflags'] - for cflag in cflags: - include_dir = '' - if cflag.startswith('-I'): - include_dir = cflag[2:] - if include_dir and not include_dir in compiler_includes_list: - compiler_includes_list.append(include_dir) - - # Find standard gyp include dirs. - if config.has_key('include_dirs'): - include_dirs = config['include_dirs'] - for shared_intermediate_dir in shared_intermediate_dirs: - for include_dir in include_dirs: - include_dir = include_dir.replace('$SHARED_INTERMEDIATE_DIR', - shared_intermediate_dir) - if not os.path.isabs(include_dir): - base_dir = os.path.dirname(target_name) - - include_dir = base_dir + '/' + include_dir - include_dir = os.path.abspath(include_dir) - - if not include_dir in gyp_includes_set: - gyp_includes_set.add(include_dir) - - - # Generate a list that has all the include dirs. - all_includes_list = list(gyp_includes_set) - all_includes_list.sort() - for compiler_include in compiler_includes_list: - if not compiler_include in gyp_includes_set: - all_includes_list.append(compiler_include) - - # All done. - return all_includes_list - - -def GetCompilerPath(target_list, target_dicts, data): - """Determine a command that can be used to invoke the compiler. - - Returns: - If this is a gyp project that has explicit make settings, try to determine - the compiler from that. Otherwise, see if a compiler was specified via the - CC_target environment variable. - """ - - # First, see if the compiler is configured in make's settings. - build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) - make_global_settings_dict = data[build_file].get('make_global_settings', {}) - for key, value in make_global_settings_dict: - if key in ['CC', 'CXX']: - return value - - # Check to see if the compiler was specified as an environment variable. - for key in ['CC_target', 'CC', 'CXX']: - compiler = os.environ.get(key) - if compiler: - return compiler - - return 'gcc' - - -def GetAllDefines(target_list, target_dicts, data, config_name): - """Calculate the defines for a project. - - Returns: - A dict that includes explict defines declared in gyp files along with all of - the default defines that the compiler uses. - """ - - # Get defines declared in the gyp files. - all_defines = {} - for target_name in target_list: - target = target_dicts[target_name] - - if config_name in target['configurations']: - config = target['configurations'][config_name] - for define in config['defines']: - split_define = define.split('=', 1) - if len(split_define) == 1: - split_define.append('1') - if split_define[0].strip() in all_defines: - # Already defined - continue - - all_defines[split_define[0].strip()] = split_define[1].strip() - - # Get default compiler defines (if possible). - cc_target = GetCompilerPath(target_list, target_dicts, data) - if cc_target: - command = shlex.split(cc_target) - command.extend(['-E', '-dM', '-']) - cpp_proc = subprocess.Popen(args=command, cwd='.', - stdin=subprocess.PIPE, stdout=subprocess.PIPE) - cpp_output = cpp_proc.communicate()[0] - cpp_lines = cpp_output.split('\n') - for cpp_line in cpp_lines: - if not cpp_line.strip(): - continue - cpp_line_parts = cpp_line.split(' ', 2) - key = cpp_line_parts[1] - if len(cpp_line_parts) >= 3: - val = cpp_line_parts[2] - else: - val = '1' - all_defines[key] = val - - return all_defines - - -def WriteIncludePaths(out, eclipse_langs, include_dirs): - """Write the includes section of a CDT settings export file.""" - - out.write('
\n') - out.write(' \n') - for lang in eclipse_langs: - out.write(' \n' % lang) - for include_dir in include_dirs: - out.write(' %s\n' % - include_dir) - out.write(' \n') - out.write('
\n') - - -def WriteMacros(out, eclipse_langs, defines): - """Write the macros section of a CDT settings export file.""" - - out.write('
\n') - out.write(' \n') - for lang in eclipse_langs: - out.write(' \n' % lang) - for key in sorted(defines.iterkeys()): - out.write(' %s%s\n' % - (escape(key), escape(defines[key]))) - out.write(' \n') - out.write('
\n') - - -def GenerateOutputForConfig(target_list, target_dicts, data, params, - config_name): - options = params['options'] - generator_flags = params.get('generator_flags', {}) - - # build_dir: relative path from source root to our output files. - # e.g. "out/Debug" - build_dir = os.path.join(generator_flags.get('output_dir', 'out'), - config_name) - - toplevel_build = os.path.join(options.toplevel_dir, build_dir) - # Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the - # SHARED_INTERMEDIATE_DIR. Include both possible locations. - shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'), - os.path.join(toplevel_build, 'gen')] - - if not os.path.exists(toplevel_build): - os.makedirs(toplevel_build) - out = open(os.path.join(toplevel_build, 'eclipse-cdt-settings.xml'), 'w') - - out.write('\n') - out.write('\n') - - eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File', - 'GNU C++', 'GNU C', 'Assembly'] - include_dirs = GetAllIncludeDirectories(target_list, target_dicts, - shared_intermediate_dirs, config_name) - WriteIncludePaths(out, eclipse_langs, include_dirs) - defines = GetAllDefines(target_list, target_dicts, data, config_name) - WriteMacros(out, eclipse_langs, defines) - - out.write('\n') - out.close() - - -def GenerateOutput(target_list, target_dicts, data, params): - """Generate an XML settings file that can be imported into a CDT project.""" - - if params['options'].generator_output: - raise NotImplementedError, "--generator_output not implemented for eclipse" - - user_config = params.get('generator_flags', {}).get('config', None) - if user_config: - GenerateOutputForConfig(target_list, target_dicts, data, params, - user_config) - else: - config_names = target_dicts[target_list[0]]['configurations'].keys() - for config_name in config_names: - GenerateOutputForConfig(target_list, target_dicts, data, params, - config_name) - diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/generator/gypd.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/generator/gypd.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/generator/gypd.py 2011-11-28 16:07:19.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/generator/gypd.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,87 +0,0 @@ -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""gypd output module - -This module produces gyp input as its output. Output files are given the -.gypd extension to avoid overwriting the .gyp files that they are generated -from. Internal references to .gyp files (such as those found in -"dependencies" sections) are not adjusted to point to .gypd files instead; -unlike other paths, which are relative to the .gyp or .gypd file, such paths -are relative to the directory from which gyp was run to create the .gypd file. - -This generator module is intended to be a sample and a debugging aid, hence -the "d" for "debug" in .gypd. It is useful to inspect the results of the -various merges, expansions, and conditional evaluations performed by gyp -and to see a representation of what would be fed to a generator module. - -It's not advisable to rename .gypd files produced by this module to .gyp, -because they will have all merges, expansions, and evaluations already -performed and the relevant constructs not present in the output; paths to -dependencies may be wrong; and various sections that do not belong in .gyp -files such as such as "included_files" and "*_excluded" will be present. -Output will also be stripped of comments. This is not intended to be a -general-purpose gyp pretty-printer; for that, you probably just want to -run "pprint.pprint(eval(open('source.gyp').read()))", which will still strip -comments but won't do all of the other things done to this module's output. - -The specific formatting of the output generated by this module is subject -to change. -""" - - -import gyp.common -import errno -import os -import pprint - - -# These variables should just be spit back out as variable references. -_generator_identity_variables = [ - 'EXECUTABLE_PREFIX', - 'EXECUTABLE_SUFFIX', - 'INTERMEDIATE_DIR', - 'PRODUCT_DIR', - 'RULE_INPUT_ROOT', - 'RULE_INPUT_DIRNAME', - 'RULE_INPUT_EXT', - 'RULE_INPUT_NAME', - 'RULE_INPUT_PATH', - 'SHARED_INTERMEDIATE_DIR', -] - -# gypd doesn't define a default value for OS like many other generator -# modules. Specify "-D OS=whatever" on the command line to provide a value. -generator_default_variables = { -} - -# gypd supports multiple toolsets -generator_supports_multiple_toolsets = True - -# TODO(mark): This always uses <, which isn't right. The input module should -# notify the generator to tell it which phase it is operating in, and this -# module should use < for the early phase and then switch to > for the late -# phase. Bonus points for carrying @ back into the output too. -for v in _generator_identity_variables: - generator_default_variables[v] = '<(%s)' % v - - -def GenerateOutput(target_list, target_dicts, data, params): - output_files = {} - for qualified_target in target_list: - [input_file, target] = \ - gyp.common.ParseQualifiedTarget(qualified_target)[0:2] - - if input_file[-4:] != '.gyp': - continue - input_file_stem = input_file[:-4] - output_file = input_file_stem + params['options'].suffix + '.gypd' - - if not output_file in output_files: - output_files[output_file] = input_file - - for output_file, input_file in output_files.iteritems(): - output = open(output_file, 'w') - pprint.pprint(data[input_file], output) - output.close() diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/generator/gypsh.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/generator/gypsh.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/generator/gypsh.py 2011-11-28 16:07:19.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/generator/gypsh.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,56 +0,0 @@ -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""gypsh output module - -gypsh is a GYP shell. It's not really a generator per se. All it does is -fire up an interactive Python session with a few local variables set to the -variables passed to the generator. Like gypd, it's intended as a debugging -aid, to facilitate the exploration of .gyp structures after being processed -by the input module. - -The expected usage is "gyp -f gypsh -D OS=desired_os". -""" - - -import code -import sys - - -# All of this stuff about generator variables was lovingly ripped from gypd.py. -# That module has a much better description of what's going on and why. -_generator_identity_variables = [ - 'EXECUTABLE_PREFIX', - 'EXECUTABLE_SUFFIX', - 'INTERMEDIATE_DIR', - 'PRODUCT_DIR', - 'RULE_INPUT_ROOT', - 'RULE_INPUT_DIRNAME', - 'RULE_INPUT_EXT', - 'RULE_INPUT_NAME', - 'RULE_INPUT_PATH', - 'SHARED_INTERMEDIATE_DIR', -] - -generator_default_variables = { -} - -for v in _generator_identity_variables: - generator_default_variables[v] = '<(%s)' % v - - -def GenerateOutput(target_list, target_dicts, data, params): - locals = { - 'target_list': target_list, - 'target_dicts': target_dicts, - 'data': data, - } - - # Use a banner that looks like the stock Python one and like what - # code.interact uses by default, but tack on something to indicate what - # locals are available, and identify gypsh. - banner='Python %s on %s\nlocals.keys() = %s\ngypsh' % \ - (sys.version, sys.platform, repr(sorted(locals.keys()))) - - code.interact(banner, local=locals) diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/generator/make.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/generator/make.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/generator/make.py 2013-02-26 02:59:54.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/generator/make.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,2148 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -# Notes: -# -# This is all roughly based on the Makefile system used by the Linux -# kernel, but is a non-recursive make -- we put the entire dependency -# graph in front of make and let it figure it out. -# -# The code below generates a separate .mk file for each target, but -# all are sourced by the top-level Makefile. This means that all -# variables in .mk-files clobber one another. Be careful to use := -# where appropriate for immediate evaluation, and similarly to watch -# that you're not relying on a variable value to last beween different -# .mk files. -# -# TODOs: -# -# Global settings and utility functions are currently stuffed in the -# toplevel Makefile. It may make sense to generate some .mk files on -# the side to keep the the files readable. - -import os -import re -import sys -import subprocess -import gyp -import gyp.common -import gyp.xcode_emulation -from gyp.common import GetEnvironFallback - -generator_default_variables = { - 'EXECUTABLE_PREFIX': '', - 'EXECUTABLE_SUFFIX': '', - 'STATIC_LIB_PREFIX': 'lib', - 'SHARED_LIB_PREFIX': 'lib', - 'STATIC_LIB_SUFFIX': '.a', - 'INTERMEDIATE_DIR': '$(obj).$(TOOLSET)/$(TARGET)/geni', - 'SHARED_INTERMEDIATE_DIR': '$(obj)/gen', - 'PRODUCT_DIR': '$(builddir)', - 'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python. - 'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python. - 'RULE_INPUT_PATH': '$(abspath $<)', - 'RULE_INPUT_EXT': '$(suffix $<)', - 'RULE_INPUT_NAME': '$(notdir $<)', - 'CONFIGURATION_NAME': '$(BUILDTYPE)', -} - -# Make supports multiple toolsets -generator_supports_multiple_toolsets = True - -# Request sorted dependencies in the order from dependents to dependencies. -generator_wants_sorted_dependencies = False - -# Placates pylint. -generator_additional_non_configuration_keys = [] -generator_additional_path_sections = [] -generator_extra_sources_for_rules = [] - - -def CalculateVariables(default_variables, params): - """Calculate additional variables for use in the build (called by gyp).""" - flavor = gyp.common.GetFlavor(params) - if flavor == 'mac': - default_variables.setdefault('OS', 'mac') - default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib') - default_variables.setdefault('SHARED_LIB_DIR', - generator_default_variables['PRODUCT_DIR']) - default_variables.setdefault('LIB_DIR', - generator_default_variables['PRODUCT_DIR']) - - # Copy additional generator configuration data from Xcode, which is shared - # by the Mac Make generator. - import gyp.generator.xcode as xcode_generator - global generator_additional_non_configuration_keys - generator_additional_non_configuration_keys = getattr(xcode_generator, - 'generator_additional_non_configuration_keys', []) - global generator_additional_path_sections - generator_additional_path_sections = getattr(xcode_generator, - 'generator_additional_path_sections', []) - global generator_extra_sources_for_rules - generator_extra_sources_for_rules = getattr(xcode_generator, - 'generator_extra_sources_for_rules', []) - COMPILABLE_EXTENSIONS.update({'.m': 'objc', '.mm' : 'objcxx'}) - else: - operating_system = flavor - if flavor == 'android': - operating_system = 'linux' # Keep this legacy behavior for now. - default_variables.setdefault('OS', operating_system) - default_variables.setdefault('SHARED_LIB_SUFFIX', '.so') - default_variables.setdefault('SHARED_LIB_DIR','$(builddir)/lib.$(TOOLSET)') - default_variables.setdefault('LIB_DIR', '$(obj).$(TOOLSET)') - - -def CalculateGeneratorInputInfo(params): - """Calculate the generator specific info that gets fed to input (called by - gyp).""" - generator_flags = params.get('generator_flags', {}) - android_ndk_version = generator_flags.get('android_ndk_version', None) - # Android NDK requires a strict link order. - if android_ndk_version: - global generator_wants_sorted_dependencies - generator_wants_sorted_dependencies = True - - -def ensure_directory_exists(path): - dir = os.path.dirname(path) - if dir and not os.path.exists(dir): - os.makedirs(dir) - - -# The .d checking code below uses these functions: -# wildcard, sort, foreach, shell, wordlist -# wildcard can handle spaces, the rest can't. -# Since I could find no way to make foreach work with spaces in filenames -# correctly, the .d files have spaces replaced with another character. The .d -# file for -# Chromium\ Framework.framework/foo -# is for example -# out/Release/.deps/out/Release/Chromium?Framework.framework/foo -# This is the replacement character. -SPACE_REPLACEMENT = '?' - - -LINK_COMMANDS_LINUX = """\ -quiet_cmd_alink = AR($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) - -quiet_cmd_alink_thin = AR($(TOOLSET)) $@ -cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) - -# Due to circular dependencies between libraries :(, we wrap the -# special "figure out circular dependencies" flags around the entire -# input list during linking. -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS) - -# We support two kinds of shared objects (.so): -# 1) shared_library, which is just bundling together many dependent libraries -# into a link line. -# 2) loadable_module, which is generating a module intended for dlopen(). -# -# They differ only slightly: -# In the former case, we want to package all dependent code into the .so. -# In the latter case, we want to package just the API exposed by the -# outermost module. -# This means shared_library uses --whole-archive, while loadable_module doesn't. -# (Note that --whole-archive is incompatible with the --start-group used in -# normal linking.) - -# Other shared-object link notes: -# - Set SONAME to the library filename so our binaries don't reference -# the local, absolute paths used on the link command-line. -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS) -""" - -LINK_COMMANDS_MAC = """\ -quiet_cmd_alink = LIBTOOL-STATIC $@ -cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^) - -quiet_cmd_link = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -# TODO(thakis): Find out and document the difference between shared_library and -# loadable_module on mac. -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS) - -# TODO(thakis): The solink_module rule is likely wrong. Xcode seems to pass -# -bundle -single_module here (for osmesa.so). -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) -""" - -LINK_COMMANDS_ANDROID = """\ -quiet_cmd_alink = AR($(TOOLSET)) $@ -cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^) - -quiet_cmd_alink_thin = AR($(TOOLSET)) $@ -cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^) - -# Due to circular dependencies between libraries :(, we wrap the -# special "figure out circular dependencies" flags around the entire -# input list during linking. -quiet_cmd_link = LINK($(TOOLSET)) $@ -quiet_cmd_link_host = LINK($(TOOLSET)) $@ -cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS) -cmd_link_host = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS) - -# Other shared-object link notes: -# - Set SONAME to the library filename so our binaries don't reference -# the local, absolute paths used on the link command-line. -quiet_cmd_solink = SOLINK($(TOOLSET)) $@ -cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS) - -quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS) -quiet_cmd_solink_module_host = SOLINK_MODULE($(TOOLSET)) $@ -cmd_solink_module_host = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS) -""" - - -# Header of toplevel Makefile. -# This should go into the build tree, but it's easier to keep it here for now. -SHARED_HEADER = ("""\ -# We borrow heavily from the kernel build setup, though we are simpler since -# we don't have Kconfig tweaking settings on us. - -# The implicit make rules have it looking for RCS files, among other things. -# We instead explicitly write all the rules we care about. -# It's even quicker (saves ~200ms) to pass -r on the command line. -MAKEFLAGS=-r - -# The source directory tree. -srcdir := %(srcdir)s -abs_srcdir := $(abspath $(srcdir)) - -# The name of the builddir. -builddir_name ?= %(builddir)s - -# The V=1 flag on command line makes us verbosely print command lines. -ifdef V - quiet= -else - quiet=quiet_ -endif - -# Specify BUILDTYPE=Release on the command line for a release build. -BUILDTYPE ?= %(default_configuration)s - -# Directory all our build output goes into. -# Note that this must be two directories beneath src/ for unit tests to pass, -# as they reach into the src/ directory for data with relative paths. -builddir ?= $(builddir_name)/$(BUILDTYPE) -abs_builddir := $(abspath $(builddir)) -depsdir := $(builddir)/.deps - -# Object output directory. -obj := $(builddir)/obj -abs_obj := $(abspath $(obj)) - -# We build up a list of every single one of the targets so we can slurp in the -# generated dependency rule Makefiles in one pass. -all_deps := - -%(make_global_settings)s - -# C++ apps need to be linked with g++. -# -# Note: flock is used to seralize linking. Linking is a memory-intensive -# process so running parallel links can often lead to thrashing. To disable -# the serialization, override LINK via an envrionment variable as follows: -# -# export LINK=g++ -# -# This will allow make to invoke N linker processes as specified in -jN. -LINK ?= %(flock)s $(builddir)/linker.lock $(CXX.target) - -CC.target ?= %(CC.target)s -CFLAGS.target ?= $(CFLAGS) -CXX.target ?= %(CXX.target)s -CXXFLAGS.target ?= $(CXXFLAGS) -LINK.target ?= %(LINK.target)s -LDFLAGS.target ?= $(LDFLAGS) -AR.target ?= $(AR) - -# TODO(evan): move all cross-compilation logic to gyp-time so we don't need -# to replicate this environment fallback in make as well. -CC.host ?= %(CC.host)s -CFLAGS.host ?= -CXX.host ?= %(CXX.host)s -CXXFLAGS.host ?= -LINK.host ?= %(LINK.host)s -LDFLAGS.host ?= -AR.host ?= %(AR.host)s - -# Define a dir function that can handle spaces. -# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions -# "leading spaces cannot appear in the text of the first argument as written. -# These characters can be put into the argument value by variable substitution." -empty := -space := $(empty) $(empty) - -# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces -replace_spaces = $(subst $(space),""" + SPACE_REPLACEMENT + """,$1) -unreplace_spaces = $(subst """ + SPACE_REPLACEMENT + """,$(space),$1) -dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1))) - -# Flags to make gcc output dependency info. Note that you need to be -# careful here to use the flags that ccache and distcc can understand. -# We write to a dep file on the side first and then rename at the end -# so we can't end up with a broken dep file. -depfile = $(depsdir)/$(call replace_spaces,$@).d -DEPFLAGS = -MMD -MF $(depfile).raw - -# We have to fixup the deps output in a few ways. -# (1) the file output should mention the proper .o file. -# ccache or distcc lose the path to the target, so we convert a rule of -# the form: -# foobar.o: DEP1 DEP2 -# into -# path/to/foobar.o: DEP1 DEP2 -# (2) we want missing files not to cause us to fail to build. -# We want to rewrite -# foobar.o: DEP1 DEP2 \\ -# DEP3 -# to -# DEP1: -# DEP2: -# DEP3: -# so if the files are missing, they're just considered phony rules. -# We have to do some pretty insane escaping to get those backslashes -# and dollar signs past make, the shell, and sed at the same time. -# Doesn't work with spaces, but that's fine: .d files have spaces in -# their names replaced with other characters.""" -r""" -define fixup_dep -# The depfile may not exist if the input file didn't have any #includes. -touch $(depfile).raw -# Fixup path as in (1). -sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile) -# Add extra rules as in (2). -# We remove slashes and replace spaces with new lines; -# remove blank lines; -# delete the first line and append a colon to the remaining lines. -sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\ - grep -v '^$$' |\ - sed -e 1d -e 's|$$|:|' \ - >> $(depfile) -rm $(depfile).raw -endef -""" -""" -# Command definitions: -# - cmd_foo is the actual command to run; -# - quiet_cmd_foo is the brief-output summary of the command. - -quiet_cmd_cc = CC($(TOOLSET)) $@ -cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $< - -quiet_cmd_cxx = CXX($(TOOLSET)) $@ -cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -%(extra_commands)s -quiet_cmd_touch = TOUCH $@ -cmd_touch = touch $@ - -quiet_cmd_copy = COPY $@ -# send stderr to /dev/null to ignore messages when linking directories. -cmd_copy = ln -f "$<" "$@" 2>/dev/null || (rm -rf "$@" && cp -af "$<" "$@") - -%(link_commands)s -""" - -r""" -# Define an escape_quotes function to escape single quotes. -# This allows us to handle quotes properly as long as we always use -# use single quotes and escape_quotes. -escape_quotes = $(subst ','\'',$(1)) -# This comment is here just to include a ' to unconfuse syntax highlighting. -# Define an escape_vars function to escape '$' variable syntax. -# This allows us to read/write command lines with shell variables (e.g. -# $LD_LIBRARY_PATH), without triggering make substitution. -escape_vars = $(subst $$,$$$$,$(1)) -# Helper that expands to a shell command to echo a string exactly as it is in -# make. This uses printf instead of echo because printf's behaviour with respect -# to escape sequences is more portable than echo's across different shells -# (e.g., dash, bash). -exact_echo = printf '%%s\n' '$(call escape_quotes,$(1))' -""" -""" -# Helper to compare the command we're about to run against the command -# we logged the last time we ran the command. Produces an empty -# string (false) when the commands match. -# Tricky point: Make has no string-equality test function. -# The kernel uses the following, but it seems like it would have false -# positives, where one string reordered its arguments. -# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \\ -# $(filter-out $(cmd_$@), $(cmd_$(1)))) -# We instead substitute each for the empty string into the other, and -# say they're equal if both substitutions produce the empty string. -# .d files contain """ + SPACE_REPLACEMENT + \ - """ instead of spaces, take that into account. -command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\\ - $(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1)))) - -# Helper that is non-empty when a prerequisite changes. -# Normally make does this implicitly, but we force rules to always run -# so we can check their command lines. -# $? -- new prerequisites -# $| -- order-only dependencies -prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?)) - -# Helper that executes all postbuilds until one fails. -define do_postbuilds - @E=0;\\ - for p in $(POSTBUILDS); do\\ - eval $$p;\\ - E=$$?;\\ - if [ $$E -ne 0 ]; then\\ - break;\\ - fi;\\ - done;\\ - if [ $$E -ne 0 ]; then\\ - rm -rf "$@";\\ - exit $$E;\\ - fi -endef - -# do_cmd: run a command via the above cmd_foo names, if necessary. -# Should always run for a given target to handle command-line changes. -# Second argument, if non-zero, makes it do asm/C/C++ dependency munging. -# Third argument, if non-zero, makes it do POSTBUILDS processing. -# Note: We intentionally do NOT call dirx for depfile, since it contains """ + \ - SPACE_REPLACEMENT + """ for -# spaces already and dirx strips the """ + SPACE_REPLACEMENT + \ - """ characters. -define do_cmd -$(if $(or $(command_changed),$(prereq_changed)), - @$(call exact_echo, $($(quiet)cmd_$(1))) - @mkdir -p "$(call dirx,$@)" "$(dir $(depfile))" - $(if $(findstring flock,$(word %(flock_index)d,$(cmd_$1))), - @$(cmd_$(1)) - @echo " $(quiet_cmd_$(1)): Finished", - @$(cmd_$(1)) - ) - @$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile) - @$(if $(2),$(fixup_dep)) - $(if $(and $(3), $(POSTBUILDS)), - $(call do_postbuilds) - ) -) -endef - -# Declare the "%(default_target)s" target first so it is the default, -# even though we don't have the deps yet. -.PHONY: %(default_target)s -%(default_target)s: - -# make looks for ways to re-generate included makefiles, but in our case, we -# don't have a direct way. Explicitly telling make that it has nothing to do -# for them makes it go faster. -%%.d: ; - -# Use FORCE_DO_CMD to force a target to run. Should be coupled with -# do_cmd. -.PHONY: FORCE_DO_CMD -FORCE_DO_CMD: - -""") - -SHARED_HEADER_MAC_COMMANDS = """ -quiet_cmd_objc = CXX($(TOOLSET)) $@ -cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< - -quiet_cmd_objcxx = CXX($(TOOLSET)) $@ -cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# Commands for precompiled header files. -quiet_cmd_pch_c = CXX($(TOOLSET)) $@ -cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_cc = CXX($(TOOLSET)) $@ -cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $< -quiet_cmd_pch_m = CXX($(TOOLSET)) $@ -cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $< -quiet_cmd_pch_mm = CXX($(TOOLSET)) $@ -cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $< - -# gyp-mac-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_mac_tool = MACTOOL $(4) $< -cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@" - -quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@ -cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4) - -quiet_cmd_infoplist = INFOPLIST $@ -cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@" -""" - -SHARED_HEADER_SUN_COMMANDS = """ -# gyp-sun-tool is written next to the root Makefile by gyp. -# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd -# already. -quiet_cmd_sun_tool = SUNTOOL $(4) $< -cmd_sun_tool = ./gyp-sun-tool $(4) $< "$@" -""" - - -def WriteRootHeaderSuffixRules(writer): - extensions = sorted(COMPILABLE_EXTENSIONS.keys(), key=str.lower) - - writer.write('# Suffix rules, putting all outputs into $(obj).\n') - for ext in extensions: - writer.write('$(obj).$(TOOLSET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD\n' % ext) - writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext]) - - writer.write('\n# Try building from generated source, too.\n') - for ext in extensions: - writer.write( - '$(obj).$(TOOLSET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD\n' % ext) - writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext]) - writer.write('\n') - for ext in extensions: - writer.write('$(obj).$(TOOLSET)/%%.o: $(obj)/%%%s FORCE_DO_CMD\n' % ext) - writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext]) - writer.write('\n') - - -SHARED_HEADER_SUFFIX_RULES_COMMENT1 = ("""\ -# Suffix rules, putting all outputs into $(obj). -""") - - -SHARED_HEADER_SUFFIX_RULES_COMMENT2 = ("""\ -# Try building from generated source, too. -""") - - -SHARED_FOOTER = """\ -# "all" is a concatenation of the "all" targets from all the included -# sub-makefiles. This is just here to clarify. -all: - -# Add in dependency-tracking rules. $(all_deps) is the list of every single -# target in our tree. Only consider the ones with .d (dependency) info: -d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d)) -ifneq ($(d_files),) - include $(d_files) -endif -""" - -header = """\ -# This file is generated by gyp; do not edit. - -""" - -# Maps every compilable file extension to the do_cmd that compiles it. -COMPILABLE_EXTENSIONS = { - '.c': 'cc', - '.cc': 'cxx', - '.cpp': 'cxx', - '.cxx': 'cxx', - '.s': 'cc', - '.S': 'cc', -} - -def Compilable(filename): - """Return true if the file is compilable (should be in OBJS).""" - for res in (filename.endswith(e) for e in COMPILABLE_EXTENSIONS): - if res: - return True - return False - - -def Linkable(filename): - """Return true if the file is linkable (should be on the link line).""" - return filename.endswith('.o') - - -def Target(filename): - """Translate a compilable filename to its .o target.""" - return os.path.splitext(filename)[0] + '.o' - - -def EscapeShellArgument(s): - """Quotes an argument so that it will be interpreted literally by a POSIX - shell. Taken from - http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python - """ - return "'" + s.replace("'", "'\\''") + "'" - - -def EscapeMakeVariableExpansion(s): - """Make has its own variable expansion syntax using $. We must escape it for - string to be interpreted literally.""" - return s.replace('$', '$$') - - -def EscapeCppDefine(s): - """Escapes a CPP define so that it will reach the compiler unaltered.""" - s = EscapeShellArgument(s) - s = EscapeMakeVariableExpansion(s) - # '#' characters must be escaped even embedded in a string, else Make will - # treat it as the start of a comment. - return s.replace('#', r'\#') - - -def QuoteIfNecessary(string): - """TODO: Should this ideally be replaced with one or more of the above - functions?""" - if '"' in string: - string = '"' + string.replace('"', '\\"') + '"' - return string - - -def StringToMakefileVariable(string): - """Convert a string to a value that is acceptable as a make variable name.""" - return re.sub('[^a-zA-Z0-9_]', '_', string) - - -srcdir_prefix = '' -def Sourceify(path): - """Convert a path to its source directory form.""" - if '$(' in path: - return path - if os.path.isabs(path): - return path - return srcdir_prefix + path - - -def QuoteSpaces(s, quote=r'\ '): - return s.replace(' ', quote) - - -# Map from qualified target to path to output. -target_outputs = {} -# Map from qualified target to any linkable output. A subset -# of target_outputs. E.g. when mybinary depends on liba, we want to -# include liba in the linker line; when otherbinary depends on -# mybinary, we just want to build mybinary first. -target_link_deps = {} - - -class MakefileWriter: - """MakefileWriter packages up the writing of one target-specific foobar.mk. - - Its only real entry point is Write(), and is mostly used for namespacing. - """ - - def __init__(self, generator_flags, flavor): - self.generator_flags = generator_flags - self.flavor = flavor - - self.suffix_rules_srcdir = {} - self.suffix_rules_objdir1 = {} - self.suffix_rules_objdir2 = {} - - # Generate suffix rules for all compilable extensions. - for ext in COMPILABLE_EXTENSIONS.keys(): - # Suffix rules for source folder. - self.suffix_rules_srcdir.update({ext: ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD - @$(call do_cmd,%s,1) -""" % (ext, COMPILABLE_EXTENSIONS[ext]))}) - - # Suffix rules for generated source files. - self.suffix_rules_objdir1.update({ext: ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD - @$(call do_cmd,%s,1) -""" % (ext, COMPILABLE_EXTENSIONS[ext]))}) - self.suffix_rules_objdir2.update({ext: ("""\ -$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD - @$(call do_cmd,%s,1) -""" % (ext, COMPILABLE_EXTENSIONS[ext]))}) - - - def Write(self, qualified_target, base_path, output_filename, spec, configs, - part_of_all): - """The main entry point: writes a .mk file for a single target. - - Arguments: - qualified_target: target we're generating - base_path: path relative to source root we're building in, used to resolve - target-relative paths - output_filename: output .mk file name to write - spec, configs: gyp info - part_of_all: flag indicating this target is part of 'all' - """ - ensure_directory_exists(output_filename) - - self.fp = open(output_filename, 'w') - - self.fp.write(header) - - self.qualified_target = qualified_target - self.path = base_path - self.target = spec['target_name'] - self.type = spec['type'] - self.toolset = spec['toolset'] - - self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec) - if self.flavor == 'mac': - self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec) - else: - self.xcode_settings = None - - deps, link_deps = self.ComputeDeps(spec) - - # Some of the generation below can add extra output, sources, or - # link dependencies. All of the out params of the functions that - # follow use names like extra_foo. - extra_outputs = [] - extra_sources = [] - extra_link_deps = [] - extra_mac_bundle_resources = [] - mac_bundle_deps = [] - - if self.is_mac_bundle: - self.output = self.ComputeMacBundleOutput(spec) - self.output_binary = self.ComputeMacBundleBinaryOutput(spec) - else: - self.output = self.output_binary = self.ComputeOutput(spec) - - self.is_standalone_static_library = bool( - spec.get('standalone_static_library', 0)) - self._INSTALLABLE_TARGETS = ('executable', 'loadable_module', - 'shared_library') - if (self.is_standalone_static_library or - self.type in self._INSTALLABLE_TARGETS): - self.alias = os.path.basename(self.output) - install_path = self._InstallableTargetInstallPath() - else: - self.alias = self.output - install_path = self.output - - self.WriteLn("TOOLSET := " + self.toolset) - self.WriteLn("TARGET := " + self.target) - - # Actions must come first, since they can generate more OBJs for use below. - if 'actions' in spec: - self.WriteActions(spec['actions'], extra_sources, extra_outputs, - extra_mac_bundle_resources, part_of_all) - - # Rules must be early like actions. - if 'rules' in spec: - self.WriteRules(spec['rules'], extra_sources, extra_outputs, - extra_mac_bundle_resources, part_of_all) - - if 'copies' in spec: - self.WriteCopies(spec['copies'], extra_outputs, part_of_all) - - # Bundle resources. - if self.is_mac_bundle: - all_mac_bundle_resources = ( - spec.get('mac_bundle_resources', []) + extra_mac_bundle_resources) - self.WriteMacBundleResources(all_mac_bundle_resources, mac_bundle_deps) - self.WriteMacInfoPlist(mac_bundle_deps) - - # Sources. - all_sources = spec.get('sources', []) + extra_sources - if all_sources: - self.WriteSources( - configs, deps, all_sources, extra_outputs, - extra_link_deps, part_of_all, - gyp.xcode_emulation.MacPrefixHeader( - self.xcode_settings, lambda p: Sourceify(self.Absolutify(p)), - self.Pchify)) - sources = filter(Compilable, all_sources) - if sources: - self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1) - extensions = set([os.path.splitext(s)[1] for s in sources]) - for ext in extensions: - if ext in self.suffix_rules_srcdir: - self.WriteLn(self.suffix_rules_srcdir[ext]) - self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT2) - for ext in extensions: - if ext in self.suffix_rules_objdir1: - self.WriteLn(self.suffix_rules_objdir1[ext]) - for ext in extensions: - if ext in self.suffix_rules_objdir2: - self.WriteLn(self.suffix_rules_objdir2[ext]) - self.WriteLn('# End of this set of suffix rules') - - # Add dependency from bundle to bundle binary. - if self.is_mac_bundle: - mac_bundle_deps.append(self.output_binary) - - self.WriteTarget(spec, configs, deps, extra_link_deps + link_deps, - mac_bundle_deps, extra_outputs, part_of_all) - - # Update global list of target outputs, used in dependency tracking. - target_outputs[qualified_target] = install_path - - # Update global list of link dependencies. - if self.type in ('static_library', 'shared_library'): - target_link_deps[qualified_target] = self.output_binary - - # Currently any versions have the same effect, but in future the behavior - # could be different. - if self.generator_flags.get('android_ndk_version', None): - self.WriteAndroidNdkModuleRule(self.target, all_sources, link_deps) - - self.fp.close() - - - def WriteSubMake(self, output_filename, makefile_path, targets, build_dir): - """Write a "sub-project" Makefile. - - This is a small, wrapper Makefile that calls the top-level Makefile to build - the targets from a single gyp file (i.e. a sub-project). - - Arguments: - output_filename: sub-project Makefile name to write - makefile_path: path to the top-level Makefile - targets: list of "all" targets for this sub-project - build_dir: build output directory, relative to the sub-project - """ - ensure_directory_exists(output_filename) - self.fp = open(output_filename, 'w') - self.fp.write(header) - # For consistency with other builders, put sub-project build output in the - # sub-project dir (see test/subdirectory/gyptest-subdir-all.py). - self.WriteLn('export builddir_name ?= %s' % - os.path.join(os.path.dirname(output_filename), build_dir)) - self.WriteLn('.PHONY: all') - self.WriteLn('all:') - if makefile_path: - makefile_path = ' -C ' + makefile_path - self.WriteLn('\t$(MAKE)%s %s' % (makefile_path, ' '.join(targets))) - self.fp.close() - - - def WriteActions(self, actions, extra_sources, extra_outputs, - extra_mac_bundle_resources, part_of_all): - """Write Makefile code for any 'actions' from the gyp input. - - extra_sources: a list that will be filled in with newly generated source - files, if any - extra_outputs: a list that will be filled in with any outputs of these - actions (used to make other pieces dependent on these - actions) - part_of_all: flag indicating this target is part of 'all' - """ - env = self.GetSortedXcodeEnv() - for action in actions: - name = StringToMakefileVariable('%s_%s' % (self.qualified_target, - action['action_name'])) - self.WriteLn('### Rules for action "%s":' % action['action_name']) - inputs = action['inputs'] - outputs = action['outputs'] - - # Build up a list of outputs. - # Collect the output dirs we'll need. - dirs = set() - for out in outputs: - dir = os.path.split(out)[0] - if dir: - dirs.add(dir) - if int(action.get('process_outputs_as_sources', False)): - extra_sources += outputs - if int(action.get('process_outputs_as_mac_bundle_resources', False)): - extra_mac_bundle_resources += outputs - - # Write the actual command. - action_commands = action['action'] - if self.flavor == 'mac': - action_commands = [gyp.xcode_emulation.ExpandEnvVars(command, env) - for command in action_commands] - command = gyp.common.EncodePOSIXShellList(action_commands) - if 'message' in action: - self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, action['message'])) - else: - self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, name)) - if len(dirs) > 0: - command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command - - cd_action = 'cd %s; ' % Sourceify(self.path or '.') - - # command and cd_action get written to a toplevel variable called - # cmd_foo. Toplevel variables can't handle things that change per - # makefile like $(TARGET), so hardcode the target. - command = command.replace('$(TARGET)', self.target) - cd_action = cd_action.replace('$(TARGET)', self.target) - - # Set LD_LIBRARY_PATH in case the action runs an executable from this - # build which links to shared libs from this build. - # actions run on the host, so they should in theory only use host - # libraries, but until everything is made cross-compile safe, also use - # target libraries. - # TODO(piman): when everything is cross-compile safe, remove lib.target - self.WriteLn('cmd_%s = LD_LIBRARY_PATH=$(builddir)/lib.host:' - '$(builddir)/lib.target:$$LD_LIBRARY_PATH; ' - 'export LD_LIBRARY_PATH; ' - '%s%s' - % (name, cd_action, command)) - self.WriteLn() - outputs = map(self.Absolutify, outputs) - # The makefile rules are all relative to the top dir, but the gyp actions - # are defined relative to their containing dir. This replaces the obj - # variable for the action rule with an absolute version so that the output - # goes in the right place. - # Only write the 'obj' and 'builddir' rules for the "primary" output (:1); - # it's superfluous for the "extra outputs", and this avoids accidentally - # writing duplicate dummy rules for those outputs. - # Same for environment. - self.WriteLn("%s: obj := $(abs_obj)" % QuoteSpaces(outputs[0])) - self.WriteLn("%s: builddir := $(abs_builddir)" % QuoteSpaces(outputs[0])) - self.WriteSortedXcodeEnv(outputs[0], self.GetSortedXcodeEnv()) - - for input in inputs: - assert ' ' not in input, ( - "Spaces in action input filenames not supported (%s)" % input) - for output in outputs: - assert ' ' not in output, ( - "Spaces in action output filenames not supported (%s)" % output) - - # See the comment in WriteCopies about expanding env vars. - outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs] - inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs] - - self.WriteDoCmd(outputs, map(Sourceify, map(self.Absolutify, inputs)), - part_of_all=part_of_all, command=name) - - # Stuff the outputs in a variable so we can refer to them later. - outputs_variable = 'action_%s_outputs' % name - self.WriteLn('%s := %s' % (outputs_variable, ' '.join(outputs))) - extra_outputs.append('$(%s)' % outputs_variable) - self.WriteLn() - - self.WriteLn() - - - def WriteRules(self, rules, extra_sources, extra_outputs, - extra_mac_bundle_resources, part_of_all): - """Write Makefile code for any 'rules' from the gyp input. - - extra_sources: a list that will be filled in with newly generated source - files, if any - extra_outputs: a list that will be filled in with any outputs of these - rules (used to make other pieces dependent on these rules) - part_of_all: flag indicating this target is part of 'all' - """ - env = self.GetSortedXcodeEnv() - for rule in rules: - name = StringToMakefileVariable('%s_%s' % (self.qualified_target, - rule['rule_name'])) - count = 0 - self.WriteLn('### Generated for rule %s:' % name) - - all_outputs = [] - - for rule_source in rule.get('rule_sources', []): - dirs = set() - (rule_source_dirname, rule_source_basename) = os.path.split(rule_source) - (rule_source_root, rule_source_ext) = \ - os.path.splitext(rule_source_basename) - - outputs = [self.ExpandInputRoot(out, rule_source_root, - rule_source_dirname) - for out in rule['outputs']] - - for out in outputs: - dir = os.path.dirname(out) - if dir: - dirs.add(dir) - if int(rule.get('process_outputs_as_sources', False)): - extra_sources += outputs - if int(rule.get('process_outputs_as_mac_bundle_resources', False)): - extra_mac_bundle_resources += outputs - inputs = map(Sourceify, map(self.Absolutify, [rule_source] + - rule.get('inputs', []))) - actions = ['$(call do_cmd,%s_%d)' % (name, count)] - - if name == 'resources_grit': - # HACK: This is ugly. Grit intentionally doesn't touch the - # timestamp of its output file when the file doesn't change, - # which is fine in hash-based dependency systems like scons - # and forge, but not kosher in the make world. After some - # discussion, hacking around it here seems like the least - # amount of pain. - actions += ['@touch --no-create $@'] - - # See the comment in WriteCopies about expanding env vars. - outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs] - inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs] - - outputs = map(self.Absolutify, outputs) - all_outputs += outputs - # Only write the 'obj' and 'builddir' rules for the "primary" output - # (:1); it's superfluous for the "extra outputs", and this avoids - # accidentally writing duplicate dummy rules for those outputs. - self.WriteLn('%s: obj := $(abs_obj)' % outputs[0]) - self.WriteLn('%s: builddir := $(abs_builddir)' % outputs[0]) - self.WriteMakeRule(outputs, inputs + ['FORCE_DO_CMD'], actions) - for output in outputs: - assert ' ' not in output, ( - "Spaces in rule filenames not yet supported (%s)" % output) - self.WriteLn('all_deps += %s' % ' '.join(outputs)) - - action = [self.ExpandInputRoot(ac, rule_source_root, - rule_source_dirname) - for ac in rule['action']] - mkdirs = '' - if len(dirs) > 0: - mkdirs = 'mkdir -p %s; ' % ' '.join(dirs) - cd_action = 'cd %s; ' % Sourceify(self.path or '.') - - # action, cd_action, and mkdirs get written to a toplevel variable - # called cmd_foo. Toplevel variables can't handle things that change - # per makefile like $(TARGET), so hardcode the target. - if self.flavor == 'mac': - action = [gyp.xcode_emulation.ExpandEnvVars(command, env) - for command in action] - action = gyp.common.EncodePOSIXShellList(action) - action = action.replace('$(TARGET)', self.target) - cd_action = cd_action.replace('$(TARGET)', self.target) - mkdirs = mkdirs.replace('$(TARGET)', self.target) - - # Set LD_LIBRARY_PATH in case the rule runs an executable from this - # build which links to shared libs from this build. - # rules run on the host, so they should in theory only use host - # libraries, but until everything is made cross-compile safe, also use - # target libraries. - # TODO(piman): when everything is cross-compile safe, remove lib.target - self.WriteLn( - "cmd_%(name)s_%(count)d = LD_LIBRARY_PATH=" - "$(builddir)/lib.host:$(builddir)/lib.target:$$LD_LIBRARY_PATH; " - "export LD_LIBRARY_PATH; " - "%(cd_action)s%(mkdirs)s%(action)s" % { - 'action': action, - 'cd_action': cd_action, - 'count': count, - 'mkdirs': mkdirs, - 'name': name, - }) - self.WriteLn( - 'quiet_cmd_%(name)s_%(count)d = RULE %(name)s_%(count)d $@' % { - 'count': count, - 'name': name, - }) - self.WriteLn() - count += 1 - - outputs_variable = 'rule_%s_outputs' % name - self.WriteList(all_outputs, outputs_variable) - extra_outputs.append('$(%s)' % outputs_variable) - - self.WriteLn('### Finished generating for rule: %s' % name) - self.WriteLn() - self.WriteLn('### Finished generating for all rules') - self.WriteLn('') - - - def WriteCopies(self, copies, extra_outputs, part_of_all): - """Write Makefile code for any 'copies' from the gyp input. - - extra_outputs: a list that will be filled in with any outputs of this action - (used to make other pieces dependent on this action) - part_of_all: flag indicating this target is part of 'all' - """ - self.WriteLn('### Generated for copy rule.') - - variable = StringToMakefileVariable(self.qualified_target + '_copies') - outputs = [] - for copy in copies: - for path in copy['files']: - # Absolutify() may call normpath, and will strip trailing slashes. - path = Sourceify(self.Absolutify(path)) - filename = os.path.split(path)[1] - output = Sourceify(self.Absolutify(os.path.join(copy['destination'], - filename))) - - # If the output path has variables in it, which happens in practice for - # 'copies', writing the environment as target-local doesn't work, - # because the variables are already needed for the target name. - # Copying the environment variables into global make variables doesn't - # work either, because then the .d files will potentially contain spaces - # after variable expansion, and .d file handling cannot handle spaces. - # As a workaround, manually expand variables at gyp time. Since 'copies' - # can't run scripts, there's no need to write the env then. - # WriteDoCmd() will escape spaces for .d files. - env = self.GetSortedXcodeEnv() - output = gyp.xcode_emulation.ExpandEnvVars(output, env) - path = gyp.xcode_emulation.ExpandEnvVars(path, env) - self.WriteDoCmd([output], [path], 'copy', part_of_all) - outputs.append(output) - self.WriteLn('%s = %s' % (variable, ' '.join(map(QuoteSpaces, outputs)))) - extra_outputs.append('$(%s)' % variable) - self.WriteLn() - - - def WriteMacBundleResources(self, resources, bundle_deps): - """Writes Makefile code for 'mac_bundle_resources'.""" - self.WriteLn('### Generated for mac_bundle_resources') - - for output, res in gyp.xcode_emulation.GetMacBundleResources( - generator_default_variables['PRODUCT_DIR'], self.xcode_settings, - map(Sourceify, map(self.Absolutify, resources))): - self.WriteDoCmd([output], [res], 'mac_tool,,,copy-bundle-resource', - part_of_all=True) - bundle_deps.append(output) - - - def WriteMacInfoPlist(self, bundle_deps): - """Write Makefile code for bundle Info.plist files.""" - info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist( - generator_default_variables['PRODUCT_DIR'], self.xcode_settings, - lambda p: Sourceify(self.Absolutify(p))) - if not info_plist: - return - if defines: - # Create an intermediate file to store preprocessed results. - intermediate_plist = ('$(obj).$(TOOLSET)/$(TARGET)/' + - os.path.basename(info_plist)) - self.WriteList(defines, intermediate_plist + ': INFOPLIST_DEFINES', '-D', - quoter=EscapeCppDefine) - self.WriteMakeRule([intermediate_plist], [info_plist], - ['$(call do_cmd,infoplist)', - # "Convert" the plist so that any weird whitespace changes from the - # preprocessor do not affect the XML parser in mac_tool. - '@plutil -convert xml1 $@ $@']) - info_plist = intermediate_plist - # plists can contain envvars and substitute them into the file. - self.WriteSortedXcodeEnv( - out, self.GetSortedXcodeEnv(additional_settings=extra_env)) - self.WriteDoCmd([out], [info_plist], 'mac_tool,,,copy-info-plist', - part_of_all=True) - bundle_deps.append(out) - - - def WriteSources(self, configs, deps, sources, - extra_outputs, extra_link_deps, - part_of_all, precompiled_header): - """Write Makefile code for any 'sources' from the gyp input. - These are source files necessary to build the current target. - - configs, deps, sources: input from gyp. - extra_outputs: a list of extra outputs this action should be dependent on; - used to serialize action/rules before compilation - extra_link_deps: a list that will be filled in with any outputs of - compilation (to be used in link lines) - part_of_all: flag indicating this target is part of 'all' - """ - - # Write configuration-specific variables for CFLAGS, etc. - for configname in sorted(configs.keys()): - config = configs[configname] - self.WriteList(config.get('defines'), 'DEFS_%s' % configname, prefix='-D', - quoter=EscapeCppDefine) - - if self.flavor == 'mac': - cflags = self.xcode_settings.GetCflags(configname) - cflags_c = self.xcode_settings.GetCflagsC(configname) - cflags_cc = self.xcode_settings.GetCflagsCC(configname) - cflags_objc = self.xcode_settings.GetCflagsObjC(configname) - cflags_objcc = self.xcode_settings.GetCflagsObjCC(configname) - else: - cflags = config.get('cflags') - cflags_c = config.get('cflags_c') - cflags_cc = config.get('cflags_cc') - - self.WriteLn("# Flags passed to all source files."); - self.WriteList(cflags, 'CFLAGS_%s' % configname) - self.WriteLn("# Flags passed to only C files."); - self.WriteList(cflags_c, 'CFLAGS_C_%s' % configname) - self.WriteLn("# Flags passed to only C++ files."); - self.WriteList(cflags_cc, 'CFLAGS_CC_%s' % configname) - if self.flavor == 'mac': - self.WriteLn("# Flags passed to only ObjC files."); - self.WriteList(cflags_objc, 'CFLAGS_OBJC_%s' % configname) - self.WriteLn("# Flags passed to only ObjC++ files."); - self.WriteList(cflags_objcc, 'CFLAGS_OBJCC_%s' % configname) - includes = config.get('include_dirs') - if includes: - includes = map(Sourceify, map(self.Absolutify, includes)) - self.WriteList(includes, 'INCS_%s' % configname, prefix='-I') - - compilable = filter(Compilable, sources) - objs = map(self.Objectify, map(self.Absolutify, map(Target, compilable))) - self.WriteList(objs, 'OBJS') - - for obj in objs: - assert ' ' not in obj, ( - "Spaces in object filenames not supported (%s)" % obj) - self.WriteLn('# Add to the list of files we specially track ' - 'dependencies for.') - self.WriteLn('all_deps += $(OBJS)') - self.WriteLn() - - # Make sure our dependencies are built first. - if deps: - self.WriteMakeRule(['$(OBJS)'], deps, - comment = 'Make sure our dependencies are built ' - 'before any of us.', - order_only = True) - - # Make sure the actions and rules run first. - # If they generate any extra headers etc., the per-.o file dep tracking - # will catch the proper rebuilds, so order only is still ok here. - if extra_outputs: - self.WriteMakeRule(['$(OBJS)'], extra_outputs, - comment = 'Make sure our actions/rules run ' - 'before any of us.', - order_only = True) - - pchdeps = precompiled_header.GetObjDependencies(compilable, objs ) - if pchdeps: - self.WriteLn('# Dependencies from obj files to their precompiled headers') - for source, obj, gch in pchdeps: - self.WriteLn('%s: %s' % (obj, gch)) - self.WriteLn('# End precompiled header dependencies') - - if objs: - extra_link_deps.append('$(OBJS)') - self.WriteLn("""\ -# CFLAGS et al overrides must be target-local. -# See "Target-specific Variable Values" in the GNU Make manual.""") - self.WriteLn("$(OBJS): TOOLSET := $(TOOLSET)") - self.WriteLn("$(OBJS): GYP_CFLAGS := " - "$(DEFS_$(BUILDTYPE)) " - "$(INCS_$(BUILDTYPE)) " - "%s " % precompiled_header.GetInclude('c') + - "$(CFLAGS_$(BUILDTYPE)) " - "$(CFLAGS_C_$(BUILDTYPE))") - self.WriteLn("$(OBJS): GYP_CXXFLAGS := " - "$(DEFS_$(BUILDTYPE)) " - "$(INCS_$(BUILDTYPE)) " - "%s " % precompiled_header.GetInclude('cc') + - "$(CFLAGS_$(BUILDTYPE)) " - "$(CFLAGS_CC_$(BUILDTYPE))") - if self.flavor == 'mac': - self.WriteLn("$(OBJS): GYP_OBJCFLAGS := " - "$(DEFS_$(BUILDTYPE)) " - "$(INCS_$(BUILDTYPE)) " - "%s " % precompiled_header.GetInclude('m') + - "$(CFLAGS_$(BUILDTYPE)) " - "$(CFLAGS_C_$(BUILDTYPE)) " - "$(CFLAGS_OBJC_$(BUILDTYPE))") - self.WriteLn("$(OBJS): GYP_OBJCXXFLAGS := " - "$(DEFS_$(BUILDTYPE)) " - "$(INCS_$(BUILDTYPE)) " - "%s " % precompiled_header.GetInclude('mm') + - "$(CFLAGS_$(BUILDTYPE)) " - "$(CFLAGS_CC_$(BUILDTYPE)) " - "$(CFLAGS_OBJCC_$(BUILDTYPE))") - - self.WritePchTargets(precompiled_header.GetPchBuildCommands()) - - # If there are any object files in our input file list, link them into our - # output. - extra_link_deps += filter(Linkable, sources) - - self.WriteLn() - - def WritePchTargets(self, pch_commands): - """Writes make rules to compile prefix headers.""" - if not pch_commands: - return - - for gch, lang_flag, lang, input in pch_commands: - extra_flags = { - 'c': '$(CFLAGS_C_$(BUILDTYPE))', - 'cc': '$(CFLAGS_CC_$(BUILDTYPE))', - 'm': '$(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))', - 'mm': '$(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))', - }[lang] - var_name = { - 'c': 'GYP_PCH_CFLAGS', - 'cc': 'GYP_PCH_CXXFLAGS', - 'm': 'GYP_PCH_OBJCFLAGS', - 'mm': 'GYP_PCH_OBJCXXFLAGS', - }[lang] - self.WriteLn("%s: %s := %s " % (gch, var_name, lang_flag) + - "$(DEFS_$(BUILDTYPE)) " - "$(INCS_$(BUILDTYPE)) " - "$(CFLAGS_$(BUILDTYPE)) " + - extra_flags) - - self.WriteLn('%s: %s FORCE_DO_CMD' % (gch, input)) - self.WriteLn('\t@$(call do_cmd,pch_%s,1)' % lang) - self.WriteLn('') - assert ' ' not in gch, ( - "Spaces in gch filenames not supported (%s)" % gch) - self.WriteLn('all_deps += %s' % gch) - self.WriteLn('') - - - def ComputeOutputBasename(self, spec): - """Return the 'output basename' of a gyp spec. - - E.g., the loadable module 'foobar' in directory 'baz' will produce - 'libfoobar.so' - """ - assert not self.is_mac_bundle - - if self.flavor == 'mac' and self.type in ( - 'static_library', 'executable', 'shared_library', 'loadable_module'): - return self.xcode_settings.GetExecutablePath() - - target = spec['target_name'] - target_prefix = '' - target_ext = '' - if self.type == 'static_library': - if target[:3] == 'lib': - target = target[3:] - target_prefix = 'lib' - target_ext = '.a' - elif self.type in ('loadable_module', 'shared_library'): - if target[:3] == 'lib': - target = target[3:] - target_prefix = 'lib' - target_ext = '.so' - elif self.type == 'none': - target = '%s.stamp' % target - elif self.type != 'executable': - print ("ERROR: What output file should be generated?", - "type", self.type, "target", target) - - target_prefix = spec.get('product_prefix', target_prefix) - target = spec.get('product_name', target) - product_ext = spec.get('product_extension') - if product_ext: - target_ext = '.' + product_ext - - return target_prefix + target + target_ext - - - def _InstallImmediately(self): - return self.toolset == 'target' and self.flavor == 'mac' and self.type in ( - 'static_library', 'executable', 'shared_library', 'loadable_module') - - - def ComputeOutput(self, spec): - """Return the 'output' (full output path) of a gyp spec. - - E.g., the loadable module 'foobar' in directory 'baz' will produce - '$(obj)/baz/libfoobar.so' - """ - assert not self.is_mac_bundle - - path = os.path.join('$(obj).' + self.toolset, self.path) - if self.type == 'executable' or self._InstallImmediately(): - path = '$(builddir)' - path = spec.get('product_dir', path) - return os.path.join(path, self.ComputeOutputBasename(spec)) - - - def ComputeMacBundleOutput(self, spec): - """Return the 'output' (full output path) to a bundle output directory.""" - assert self.is_mac_bundle - path = generator_default_variables['PRODUCT_DIR'] - return os.path.join(path, self.xcode_settings.GetWrapperName()) - - - def ComputeMacBundleBinaryOutput(self, spec): - """Return the 'output' (full output path) to the binary in a bundle.""" - path = generator_default_variables['PRODUCT_DIR'] - return os.path.join(path, self.xcode_settings.GetExecutablePath()) - - - def ComputeDeps(self, spec): - """Compute the dependencies of a gyp spec. - - Returns a tuple (deps, link_deps), where each is a list of - filenames that will need to be put in front of make for either - building (deps) or linking (link_deps). - """ - deps = [] - link_deps = [] - if 'dependencies' in spec: - deps.extend([target_outputs[dep] for dep in spec['dependencies'] - if target_outputs[dep]]) - for dep in spec['dependencies']: - if dep in target_link_deps: - link_deps.append(target_link_deps[dep]) - deps.extend(link_deps) - # TODO: It seems we need to transitively link in libraries (e.g. -lfoo)? - # This hack makes it work: - # link_deps.extend(spec.get('libraries', [])) - return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps)) - - - def WriteDependencyOnExtraOutputs(self, target, extra_outputs): - self.WriteMakeRule([self.output_binary], extra_outputs, - comment = 'Build our special outputs first.', - order_only = True) - - - def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps, - extra_outputs, part_of_all): - """Write Makefile code to produce the final target of the gyp spec. - - spec, configs: input from gyp. - deps, link_deps: dependency lists; see ComputeDeps() - extra_outputs: any extra outputs that our target should depend on - part_of_all: flag indicating this target is part of 'all' - """ - - self.WriteLn('### Rules for final target.') - - if extra_outputs: - self.WriteDependencyOnExtraOutputs(self.output_binary, extra_outputs) - self.WriteMakeRule(extra_outputs, deps, - comment=('Preserve order dependency of ' - 'special output on deps.'), - order_only = True) - - target_postbuilds = {} - if self.type != 'none': - for configname in sorted(configs.keys()): - config = configs[configname] - if self.flavor == 'mac': - ldflags = self.xcode_settings.GetLdflags(configname, - generator_default_variables['PRODUCT_DIR'], - lambda p: Sourceify(self.Absolutify(p))) - - # TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on. - gyp_to_build = gyp.common.InvertRelativePath(self.path) - target_postbuild = self.xcode_settings.GetTargetPostbuilds( - configname, - QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build, - self.output))), - QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build, - self.output_binary)))) - if target_postbuild: - target_postbuilds[configname] = target_postbuild - else: - ldflags = config.get('ldflags', []) - # Compute an rpath for this output if needed. - if any(dep.endswith('.so') for dep in deps): - # We want to get the literal string "$ORIGIN" into the link command, - # so we need lots of escaping. - ldflags.append(r'-Wl,-rpath=\$$ORIGIN/lib.%s/' % self.toolset) - ldflags.append(r'-Wl,-rpath-link=\$(builddir)/lib.%s/' % - self.toolset) - self.WriteList(ldflags, 'LDFLAGS_%s' % configname) - if self.flavor == 'mac': - self.WriteList(self.xcode_settings.GetLibtoolflags(configname), - 'LIBTOOLFLAGS_%s' % configname) - libraries = spec.get('libraries') - if libraries: - # Remove duplicate entries - libraries = gyp.common.uniquer(libraries) - if self.flavor == 'mac': - libraries = self.xcode_settings.AdjustLibraries(libraries) - self.WriteList(libraries, 'LIBS') - self.WriteLn('%s: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))' % - QuoteSpaces(self.output_binary)) - self.WriteLn('%s: LIBS := $(LIBS)' % QuoteSpaces(self.output_binary)) - - if self.flavor == 'mac': - self.WriteLn('%s: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))' % - QuoteSpaces(self.output_binary)) - - # Postbuild actions. Like actions, but implicitly depend on the target's - # output. - postbuilds = [] - if self.flavor == 'mac': - if target_postbuilds: - postbuilds.append('$(TARGET_POSTBUILDS_$(BUILDTYPE))') - postbuilds.extend( - gyp.xcode_emulation.GetSpecPostbuildCommands(spec)) - - if postbuilds: - # Envvars may be referenced by TARGET_POSTBUILDS_$(BUILDTYPE), - # so we must output its definition first, since we declare variables - # using ":=". - self.WriteSortedXcodeEnv(self.output, self.GetSortedXcodePostbuildEnv()) - - for configname in target_postbuilds: - self.WriteLn('%s: TARGET_POSTBUILDS_%s := %s' % - (QuoteSpaces(self.output), - configname, - gyp.common.EncodePOSIXShellList(target_postbuilds[configname]))) - - # Postbuilds expect to be run in the gyp file's directory, so insert an - # implicit postbuild to cd to there. - postbuilds.insert(0, gyp.common.EncodePOSIXShellList(['cd', self.path])) - for i in xrange(len(postbuilds)): - if not postbuilds[i].startswith('$'): - postbuilds[i] = EscapeShellArgument(postbuilds[i]) - self.WriteLn('%s: builddir := $(abs_builddir)' % QuoteSpaces(self.output)) - self.WriteLn('%s: POSTBUILDS := %s' % ( - QuoteSpaces(self.output), ' '.join(postbuilds))) - - # A bundle directory depends on its dependencies such as bundle resources - # and bundle binary. When all dependencies have been built, the bundle - # needs to be packaged. - if self.is_mac_bundle: - # If the framework doesn't contain a binary, then nothing depends - # on the actions -- make the framework depend on them directly too. - self.WriteDependencyOnExtraOutputs(self.output, extra_outputs) - - # Bundle dependencies. Note that the code below adds actions to this - # target, so if you move these two lines, move the lines below as well. - self.WriteList(map(QuoteSpaces, bundle_deps), 'BUNDLE_DEPS') - self.WriteLn('%s: $(BUNDLE_DEPS)' % QuoteSpaces(self.output)) - - # After the framework is built, package it. Needs to happen before - # postbuilds, since postbuilds depend on this. - if self.type in ('shared_library', 'loadable_module'): - self.WriteLn('\t@$(call do_cmd,mac_package_framework,,,%s)' % - self.xcode_settings.GetFrameworkVersion()) - - # Bundle postbuilds can depend on the whole bundle, so run them after - # the bundle is packaged, not already after the bundle binary is done. - if postbuilds: - self.WriteLn('\t@$(call do_postbuilds)') - postbuilds = [] # Don't write postbuilds for target's output. - - # Needed by test/mac/gyptest-rebuild.py. - self.WriteLn('\t@true # No-op, used by tests') - - # Since this target depends on binary and resources which are in - # nested subfolders, the framework directory will be older than - # its dependencies usually. To prevent this rule from executing - # on every build (expensive, especially with postbuilds), expliclity - # update the time on the framework directory. - self.WriteLn('\t@touch -c %s' % QuoteSpaces(self.output)) - - if postbuilds: - assert not self.is_mac_bundle, ('Postbuilds for bundles should be done ' - 'on the bundle, not the binary (target \'%s\')' % self.target) - assert 'product_dir' not in spec, ('Postbuilds do not work with ' - 'custom product_dir') - - if self.type == 'executable': - self.WriteLn('%s: LD_INPUTS := %s' % ( - QuoteSpaces(self.output_binary), - ' '.join(map(QuoteSpaces, link_deps)))) - if self.toolset == 'host' and self.flavor == 'android': - self.WriteDoCmd([self.output_binary], link_deps, 'link_host', - part_of_all, postbuilds=postbuilds) - else: - self.WriteDoCmd([self.output_binary], link_deps, 'link', part_of_all, - postbuilds=postbuilds) - - elif self.type == 'static_library': - for link_dep in link_deps: - assert ' ' not in link_dep, ( - "Spaces in alink input filenames not supported (%s)" % link_dep) - if (self.flavor not in ('mac', 'win') and not - self.is_standalone_static_library): - self.WriteDoCmd([self.output_binary], link_deps, 'alink_thin', - part_of_all, postbuilds=postbuilds) - else: - self.WriteDoCmd([self.output_binary], link_deps, 'alink', part_of_all, - postbuilds=postbuilds) - elif self.type == 'shared_library': - self.WriteLn('%s: LD_INPUTS := %s' % ( - QuoteSpaces(self.output_binary), - ' '.join(map(QuoteSpaces, link_deps)))) - self.WriteDoCmd([self.output_binary], link_deps, 'solink', part_of_all, - postbuilds=postbuilds) - elif self.type == 'loadable_module': - for link_dep in link_deps: - assert ' ' not in link_dep, ( - "Spaces in module input filenames not supported (%s)" % link_dep) - if self.toolset == 'host' and self.flavor == 'android': - self.WriteDoCmd([self.output_binary], link_deps, 'solink_module_host', - part_of_all, postbuilds=postbuilds) - else: - self.WriteDoCmd( - [self.output_binary], link_deps, 'solink_module', part_of_all, - postbuilds=postbuilds) - elif self.type == 'none': - # Write a stamp line. - self.WriteDoCmd([self.output_binary], deps, 'touch', part_of_all, - postbuilds=postbuilds) - else: - print "WARNING: no output for", self.type, target - - # Add an alias for each target (if there are any outputs). - # Installable target aliases are created below. - if ((self.output and self.output != self.target) and - (self.type not in self._INSTALLABLE_TARGETS)): - self.WriteMakeRule([self.target], [self.output], - comment='Add target alias', phony = True) - if part_of_all: - self.WriteMakeRule(['all'], [self.target], - comment = 'Add target alias to "all" target.', - phony = True) - - # Add special-case rules for our installable targets. - # 1) They need to install to the build dir or "product" dir. - # 2) They get shortcuts for building (e.g. "make chrome"). - # 3) They are part of "make all". - if (self.type in self._INSTALLABLE_TARGETS or - self.is_standalone_static_library): - if self.type == 'shared_library': - file_desc = 'shared library' - elif self.type == 'static_library': - file_desc = 'static library' - else: - file_desc = 'executable' - install_path = self._InstallableTargetInstallPath() - installable_deps = [self.output] - if (self.flavor == 'mac' and not 'product_dir' in spec and - self.toolset == 'target'): - # On mac, products are created in install_path immediately. - assert install_path == self.output, '%s != %s' % ( - install_path, self.output) - - # Point the target alias to the final binary output. - self.WriteMakeRule([self.target], [install_path], - comment='Add target alias', phony = True) - if install_path != self.output: - assert not self.is_mac_bundle # See comment a few lines above. - self.WriteDoCmd([install_path], [self.output], 'copy', - comment = 'Copy this to the %s output path.' % - file_desc, part_of_all=part_of_all) - installable_deps.append(install_path) - if self.output != self.alias and self.alias != self.target: - self.WriteMakeRule([self.alias], installable_deps, - comment = 'Short alias for building this %s.' % - file_desc, phony = True) - if part_of_all: - self.WriteMakeRule(['all'], [install_path], - comment = 'Add %s to "all" target.' % file_desc, - phony = True) - - - def WriteList(self, value_list, variable=None, prefix='', - quoter=QuoteIfNecessary): - """Write a variable definition that is a list of values. - - E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out - foo = blaha blahb - but in a pretty-printed style. - """ - values = '' - if value_list: - value_list = [quoter(prefix + l) for l in value_list] - values = ' \\\n\t' + ' \\\n\t'.join(value_list) - self.fp.write('%s :=%s\n\n' % (variable, values)) - - - def WriteDoCmd(self, outputs, inputs, command, part_of_all, comment=None, - postbuilds=False): - """Write a Makefile rule that uses do_cmd. - - This makes the outputs dependent on the command line that was run, - as well as support the V= make command line flag. - """ - suffix = '' - if postbuilds: - assert ',' not in command - suffix = ',,1' # Tell do_cmd to honor $POSTBUILDS - self.WriteMakeRule(outputs, inputs, - actions = ['$(call do_cmd,%s%s)' % (command, suffix)], - comment = comment, - force = True) - # Add our outputs to the list of targets we read depfiles from. - # all_deps is only used for deps file reading, and for deps files we replace - # spaces with ? because escaping doesn't work with make's $(sort) and - # other functions. - outputs = [QuoteSpaces(o, SPACE_REPLACEMENT) for o in outputs] - self.WriteLn('all_deps += %s' % ' '.join(outputs)) - - - def WriteMakeRule(self, outputs, inputs, actions=None, comment=None, - order_only=False, force=False, phony=False): - """Write a Makefile rule, with some extra tricks. - - outputs: a list of outputs for the rule (note: this is not directly - supported by make; see comments below) - inputs: a list of inputs for the rule - actions: a list of shell commands to run for the rule - comment: a comment to put in the Makefile above the rule (also useful - for making this Python script's code self-documenting) - order_only: if true, makes the dependency order-only - force: if true, include FORCE_DO_CMD as an order-only dep - phony: if true, the rule does not actually generate the named output, the - output is just a name to run the rule - """ - outputs = map(QuoteSpaces, outputs) - inputs = map(QuoteSpaces, inputs) - - if comment: - self.WriteLn('# ' + comment) - if phony: - self.WriteLn('.PHONY: ' + ' '.join(outputs)) - # TODO(evanm): just make order_only a list of deps instead of these hacks. - if order_only: - order_insert = '| ' - pick_output = ' '.join(outputs) - else: - order_insert = '' - pick_output = outputs[0] - if force: - force_append = ' FORCE_DO_CMD' - else: - force_append = '' - if actions: - self.WriteLn("%s: TOOLSET := $(TOOLSET)" % outputs[0]) - self.WriteLn('%s: %s%s%s' % (pick_output, order_insert, ' '.join(inputs), - force_append)) - if actions: - for action in actions: - self.WriteLn('\t%s' % action) - if not order_only and len(outputs) > 1: - # If we have more than one output, a rule like - # foo bar: baz - # that for *each* output we must run the action, potentially - # in parallel. That is not what we're trying to write -- what - # we want is that we run the action once and it generates all - # the files. - # http://www.gnu.org/software/hello/manual/automake/Multiple-Outputs.html - # discusses this problem and has this solution: - # 1) Write the naive rule that would produce parallel runs of - # the action. - # 2) Make the outputs seralized on each other, so we won't start - # a parallel run until the first run finishes, at which point - # we'll have generated all the outputs and we're done. - self.WriteLn('%s: %s' % (' '.join(outputs[1:]), outputs[0])) - # Add a dummy command to the "extra outputs" rule, otherwise make seems to - # think these outputs haven't (couldn't have?) changed, and thus doesn't - # flag them as changed (i.e. include in '$?') when evaluating dependent - # rules, which in turn causes do_cmd() to skip running dependent commands. - self.WriteLn('%s: ;' % (' '.join(outputs[1:]))) - self.WriteLn() - - - def WriteAndroidNdkModuleRule(self, module_name, all_sources, link_deps): - """Write a set of LOCAL_XXX definitions for Android NDK. - - These variable definitions will be used by Android NDK but do nothing for - non-Android applications. - - Arguments: - module_name: Android NDK module name, which must be unique among all - module names. - all_sources: A list of source files (will be filtered by Compilable). - link_deps: A list of link dependencies, which must be sorted in - the order from dependencies to dependents. - """ - if self.type not in ('executable', 'shared_library', 'static_library'): - return - - self.WriteLn('# Variable definitions for Android applications') - self.WriteLn('include $(CLEAR_VARS)') - self.WriteLn('LOCAL_MODULE := ' + module_name) - self.WriteLn('LOCAL_CFLAGS := $(CFLAGS_$(BUILDTYPE)) ' - '$(DEFS_$(BUILDTYPE)) ' - # LOCAL_CFLAGS is applied to both of C and C++. There is - # no way to specify $(CFLAGS_C_$(BUILDTYPE)) only for C - # sources. - '$(CFLAGS_C_$(BUILDTYPE)) ' - # $(INCS_$(BUILDTYPE)) includes the prefix '-I' while - # LOCAL_C_INCLUDES does not expect it. So put it in - # LOCAL_CFLAGS. - '$(INCS_$(BUILDTYPE))') - # LOCAL_CXXFLAGS is obsolete and LOCAL_CPPFLAGS is preferred. - self.WriteLn('LOCAL_CPPFLAGS := $(CFLAGS_CC_$(BUILDTYPE))') - self.WriteLn('LOCAL_C_INCLUDES :=') - self.WriteLn('LOCAL_LDLIBS := $(LDFLAGS_$(BUILDTYPE)) $(LIBS)') - - # Detect the C++ extension. - cpp_ext = {'.cc': 0, '.cpp': 0, '.cxx': 0} - default_cpp_ext = '.cpp' - for filename in all_sources: - ext = os.path.splitext(filename)[1] - if ext in cpp_ext: - cpp_ext[ext] += 1 - if cpp_ext[ext] > cpp_ext[default_cpp_ext]: - default_cpp_ext = ext - self.WriteLn('LOCAL_CPP_EXTENSION := ' + default_cpp_ext) - - self.WriteList(map(self.Absolutify, filter(Compilable, all_sources)), - 'LOCAL_SRC_FILES') - - # Filter out those which do not match prefix and suffix and produce - # the resulting list without prefix and suffix. - def DepsToModules(deps, prefix, suffix): - modules = [] - for filepath in deps: - filename = os.path.basename(filepath) - if filename.startswith(prefix) and filename.endswith(suffix): - modules.append(filename[len(prefix):-len(suffix)]) - return modules - - # Retrieve the default value of 'SHARED_LIB_SUFFIX' - params = {'flavor': 'linux'} - default_variables = {} - CalculateVariables(default_variables, params) - - self.WriteList( - DepsToModules(link_deps, - generator_default_variables['SHARED_LIB_PREFIX'], - default_variables['SHARED_LIB_SUFFIX']), - 'LOCAL_SHARED_LIBRARIES') - self.WriteList( - DepsToModules(link_deps, - generator_default_variables['STATIC_LIB_PREFIX'], - generator_default_variables['STATIC_LIB_SUFFIX']), - 'LOCAL_STATIC_LIBRARIES') - - if self.type == 'executable': - self.WriteLn('include $(BUILD_EXECUTABLE)') - elif self.type == 'shared_library': - self.WriteLn('include $(BUILD_SHARED_LIBRARY)') - elif self.type == 'static_library': - self.WriteLn('include $(BUILD_STATIC_LIBRARY)') - self.WriteLn() - - - def WriteLn(self, text=''): - self.fp.write(text + '\n') - - - def GetSortedXcodeEnv(self, additional_settings=None): - return gyp.xcode_emulation.GetSortedXcodeEnv( - self.xcode_settings, "$(abs_builddir)", - os.path.join("$(abs_srcdir)", self.path), "$(BUILDTYPE)", - additional_settings) - - - def GetSortedXcodePostbuildEnv(self): - # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack. - # TODO(thakis): It would be nice to have some general mechanism instead. - strip_save_file = self.xcode_settings.GetPerTargetSetting( - 'CHROMIUM_STRIP_SAVE_FILE', '') - # Even if strip_save_file is empty, explicitly write it. Else a postbuild - # might pick up an export from an earlier target. - return self.GetSortedXcodeEnv( - additional_settings={'CHROMIUM_STRIP_SAVE_FILE': strip_save_file}) - - - def WriteSortedXcodeEnv(self, target, env): - for k, v in env: - # For - # foo := a\ b - # the escaped space does the right thing. For - # export foo := a\ b - # it does not -- the backslash is written to the env as literal character. - # So don't escape spaces in |env[k]|. - self.WriteLn('%s: export %s := %s' % (QuoteSpaces(target), k, v)) - - - def Objectify(self, path): - """Convert a path to its output directory form.""" - if '$(' in path: - path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/' % self.toolset) - if not '$(obj)' in path: - path = '$(obj).%s/$(TARGET)/%s' % (self.toolset, path) - return path - - - def Pchify(self, path, lang): - """Convert a prefix header path to its output directory form.""" - path = self.Absolutify(path) - if '$(' in path: - path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/pch-%s' % - (self.toolset, lang)) - return path - return '$(obj).%s/$(TARGET)/pch-%s/%s' % (self.toolset, lang, path) - - - def Absolutify(self, path): - """Convert a subdirectory-relative path into a base-relative path. - Skips over paths that contain variables.""" - if '$(' in path: - # Don't call normpath in this case, as it might collapse the - # path too aggressively if it features '..'. However it's still - # important to strip trailing slashes. - return path.rstrip('/') - return os.path.normpath(os.path.join(self.path, path)) - - - def ExpandInputRoot(self, template, expansion, dirname): - if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template: - return template - path = template % { - 'INPUT_ROOT': expansion, - 'INPUT_DIRNAME': dirname, - } - return path - - - def _InstallableTargetInstallPath(self): - """Returns the location of the final output for an installable target.""" - # Xcode puts shared_library results into PRODUCT_DIR, and some gyp files - # rely on this. Emulate this behavior for mac. - if (self.type == 'shared_library' and - (self.flavor != 'mac' or self.toolset != 'target')): - # Install all shared libs into a common directory (per toolset) for - # convenient access with LD_LIBRARY_PATH. - return '$(builddir)/lib.%s/%s' % (self.toolset, self.alias) - return '$(builddir)/' + self.alias - - -def WriteAutoRegenerationRule(params, root_makefile, makefile_name, - build_files): - """Write the target to regenerate the Makefile.""" - options = params['options'] - build_files_args = [gyp.common.RelativePath(filename, options.toplevel_dir) - for filename in params['build_files_arg']] - gyp_binary = gyp.common.FixIfRelativePath(params['gyp_binary'], - options.toplevel_dir) - if not gyp_binary.startswith(os.sep): - gyp_binary = os.path.join('.', gyp_binary) - root_makefile.write( - "quiet_cmd_regen_makefile = ACTION Regenerating $@\n" - "cmd_regen_makefile = %(cmd)s\n" - "%(makefile_name)s: %(deps)s\n" - "\t$(call do_cmd,regen_makefile)\n\n" % { - 'makefile_name': makefile_name, - 'deps': ' '.join(map(Sourceify, build_files)), - 'cmd': gyp.common.EncodePOSIXShellList( - [gyp_binary, '-fmake'] + - gyp.RegenerateFlags(options) + - build_files_args)}) - - -def PerformBuild(data, configurations, params): - options = params['options'] - for config in configurations: - arguments = ['make'] - if options.toplevel_dir and options.toplevel_dir != '.': - arguments += '-C', options.toplevel_dir - arguments.append('BUILDTYPE=' + config) - print 'Building [%s]: %s' % (config, arguments) - subprocess.check_call(arguments) - - -def GenerateOutput(target_list, target_dicts, data, params): - options = params['options'] - flavor = gyp.common.GetFlavor(params) - generator_flags = params.get('generator_flags', {}) - builddir_name = generator_flags.get('output_dir', 'out') - android_ndk_version = generator_flags.get('android_ndk_version', None) - default_target = generator_flags.get('default_target', 'all') - - def CalculateMakefilePath(build_file, base_name): - """Determine where to write a Makefile for a given gyp file.""" - # Paths in gyp files are relative to the .gyp file, but we want - # paths relative to the source root for the master makefile. Grab - # the path of the .gyp file as the base to relativize against. - # E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp". - base_path = gyp.common.RelativePath(os.path.dirname(build_file), - options.depth) - # We write the file in the base_path directory. - output_file = os.path.join(options.depth, base_path, base_name) - if options.generator_output: - output_file = os.path.join(options.generator_output, output_file) - base_path = gyp.common.RelativePath(os.path.dirname(build_file), - options.toplevel_dir) - return base_path, output_file - - # TODO: search for the first non-'Default' target. This can go - # away when we add verification that all targets have the - # necessary configurations. - default_configuration = None - toolsets = set([target_dicts[target]['toolset'] for target in target_list]) - for target in target_list: - spec = target_dicts[target] - if spec['default_configuration'] != 'Default': - default_configuration = spec['default_configuration'] - break - if not default_configuration: - default_configuration = 'Default' - - srcdir = '.' - makefile_name = 'Makefile' + options.suffix - makefile_path = os.path.join(options.toplevel_dir, makefile_name) - if options.generator_output: - global srcdir_prefix - makefile_path = os.path.join(options.generator_output, makefile_path) - srcdir = gyp.common.RelativePath(srcdir, options.generator_output) - srcdir_prefix = '$(srcdir)/' - - flock_command= 'flock' - header_params = { - 'default_target': default_target, - 'builddir': builddir_name, - 'default_configuration': default_configuration, - 'flock': flock_command, - 'flock_index': 1, - 'link_commands': LINK_COMMANDS_LINUX, - 'extra_commands': '', - 'srcdir': srcdir, - } - if flavor == 'mac': - flock_command = './gyp-mac-tool flock' - header_params.update({ - 'flock': flock_command, - 'flock_index': 2, - 'link_commands': LINK_COMMANDS_MAC, - 'extra_commands': SHARED_HEADER_MAC_COMMANDS, - }) - elif flavor == 'android': - header_params.update({ - 'link_commands': LINK_COMMANDS_ANDROID, - }) - elif flavor == 'solaris': - header_params.update({ - 'flock': './gyp-sun-tool flock', - 'flock_index': 2, - 'extra_commands': SHARED_HEADER_SUN_COMMANDS, - }) - elif flavor == 'freebsd': - header_params.update({ - 'flock': 'lockf', - }) - - header_params.update({ - 'CC.target': GetEnvironFallback(('CC_target', 'CC'), '$(CC)'), - 'AR.target': GetEnvironFallback(('AR_target', 'AR'), '$(AR)'), - 'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), '$(CXX)'), - 'LINK.target': GetEnvironFallback(('LD_target', 'LD'), '$(LINK)'), - 'CC.host': GetEnvironFallback(('CC_host',), 'gcc'), - 'AR.host': GetEnvironFallback(('AR_host',), 'ar'), - 'CXX.host': GetEnvironFallback(('CXX_host',), 'g++'), - 'LINK.host': GetEnvironFallback(('LD_host',), 'g++'), - }) - - build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) - make_global_settings_array = data[build_file].get('make_global_settings', []) - wrappers = {} - wrappers['LINK'] = '%s $(builddir)/linker.lock' % flock_command - for key, value in make_global_settings_array: - if key.endswith('_wrapper'): - wrappers[key[:-len('_wrapper')]] = '$(abspath %s)' % value - make_global_settings = '' - for key, value in make_global_settings_array: - if re.match('.*_wrapper', key): - continue - if value[0] != '$': - value = '$(abspath %s)' % value - wrapper = wrappers.get(key) - if wrapper: - value = '%s %s' % (wrapper, value) - del wrappers[key] - if key in ('CC', 'CC.host', 'CXX', 'CXX.host'): - make_global_settings += ( - 'ifneq (,$(filter $(origin %s), undefined default))\n' % key) - # Let gyp-time envvars win over global settings. - if key in os.environ: - value = os.environ[key] - make_global_settings += ' %s = %s\n' % (key, value) - make_global_settings += 'endif\n' - else: - make_global_settings += '%s ?= %s\n' % (key, value) - # TODO(ukai): define cmd when only wrapper is specified in - # make_global_settings. - - header_params['make_global_settings'] = make_global_settings - - ensure_directory_exists(makefile_path) - root_makefile = open(makefile_path, 'w') - root_makefile.write(SHARED_HEADER % header_params) - # Currently any versions have the same effect, but in future the behavior - # could be different. - if android_ndk_version: - root_makefile.write( - '# Define LOCAL_PATH for build of Android applications.\n' - 'LOCAL_PATH := $(call my-dir)\n' - '\n') - for toolset in toolsets: - root_makefile.write('TOOLSET := %s\n' % toolset) - WriteRootHeaderSuffixRules(root_makefile) - - # Put build-time support tools next to the root Makefile. - dest_path = os.path.dirname(makefile_path) - gyp.common.CopyTool(flavor, dest_path) - - # Find the list of targets that derive from the gyp file(s) being built. - needed_targets = set() - for build_file in params['build_files']: - for target in gyp.common.AllTargets(target_list, target_dicts, build_file): - needed_targets.add(target) - - build_files = set() - include_list = set() - for qualified_target in target_list: - build_file, target, toolset = gyp.common.ParseQualifiedTarget( - qualified_target) - - this_make_global_settings = data[build_file].get('make_global_settings', []) - assert make_global_settings_array == this_make_global_settings, ( - "make_global_settings needs to be the same for all targets.") - - build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir)) - included_files = data[build_file]['included_files'] - for included_file in included_files: - # The included_files entries are relative to the dir of the build file - # that included them, so we have to undo that and then make them relative - # to the root dir. - relative_include_file = gyp.common.RelativePath( - gyp.common.UnrelativePath(included_file, build_file), - options.toplevel_dir) - abs_include_file = os.path.abspath(relative_include_file) - # If the include file is from the ~/.gyp dir, we should use absolute path - # so that relocating the src dir doesn't break the path. - if (params['home_dot_gyp'] and - abs_include_file.startswith(params['home_dot_gyp'])): - build_files.add(abs_include_file) - else: - build_files.add(relative_include_file) - - base_path, output_file = CalculateMakefilePath(build_file, - target + '.' + toolset + options.suffix + '.mk') - - spec = target_dicts[qualified_target] - configs = spec['configurations'] - - if flavor == 'mac': - gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec) - - writer = MakefileWriter(generator_flags, flavor) - writer.Write(qualified_target, base_path, output_file, spec, configs, - part_of_all=qualified_target in needed_targets) - - # Our root_makefile lives at the source root. Compute the relative path - # from there to the output_file for including. - mkfile_rel_path = gyp.common.RelativePath(output_file, - os.path.dirname(makefile_path)) - include_list.add(mkfile_rel_path) - - # Write out per-gyp (sub-project) Makefiles. - depth_rel_path = gyp.common.RelativePath(options.depth, os.getcwd()) - for build_file in build_files: - # The paths in build_files were relativized above, so undo that before - # testing against the non-relativized items in target_list and before - # calculating the Makefile path. - build_file = os.path.join(depth_rel_path, build_file) - gyp_targets = [target_dicts[target]['target_name'] for target in target_list - if target.startswith(build_file) and - target in needed_targets] - # Only generate Makefiles for gyp files with targets. - if not gyp_targets: - continue - base_path, output_file = CalculateMakefilePath(build_file, - os.path.splitext(os.path.basename(build_file))[0] + '.Makefile') - makefile_rel_path = gyp.common.RelativePath(os.path.dirname(makefile_path), - os.path.dirname(output_file)) - writer.WriteSubMake(output_file, makefile_rel_path, gyp_targets, - builddir_name) - - - # Write out the sorted list of includes. - root_makefile.write('\n') - for include_file in sorted(include_list): - # We wrap each .mk include in an if statement so users can tell make to - # not load a file by setting NO_LOAD. The below make code says, only - # load the .mk file if the .mk filename doesn't start with a token in - # NO_LOAD. - root_makefile.write( - "ifeq ($(strip $(foreach prefix,$(NO_LOAD),\\\n" - " $(findstring $(join ^,$(prefix)),\\\n" - " $(join ^," + include_file + ")))),)\n") - root_makefile.write(" include " + include_file + "\n") - root_makefile.write("endif\n") - root_makefile.write('\n') - - if (not generator_flags.get('standalone') - and generator_flags.get('auto_regeneration', True)): - WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files) - - root_makefile.write(SHARED_FOOTER) - - root_makefile.close() diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/generator/msvs.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/generator/msvs.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/generator/msvs.py 2013-02-25 22:29:21.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/generator/msvs.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,3111 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import copy -import ntpath -import os -import posixpath -import re -import subprocess -import sys - -import gyp.common -import gyp.easy_xml as easy_xml -import gyp.MSVSNew as MSVSNew -import gyp.MSVSProject as MSVSProject -import gyp.MSVSSettings as MSVSSettings -import gyp.MSVSToolFile as MSVSToolFile -import gyp.MSVSUserFile as MSVSUserFile -import gyp.MSVSUtil as MSVSUtil -import gyp.MSVSVersion as MSVSVersion -from gyp.common import GypError - - -# Regular expression for validating Visual Studio GUIDs. If the GUID -# contains lowercase hex letters, MSVS will be fine. However, -# IncrediBuild BuildConsole will parse the solution file, but then -# silently skip building the target causing hard to track down errors. -# Note that this only happens with the BuildConsole, and does not occur -# if IncrediBuild is executed from inside Visual Studio. This regex -# validates that the string looks like a GUID with all uppercase hex -# letters. -VALID_MSVS_GUID_CHARS = re.compile('^[A-F0-9\-]+$') - - -generator_default_variables = { - 'EXECUTABLE_PREFIX': '', - 'EXECUTABLE_SUFFIX': '.exe', - 'STATIC_LIB_PREFIX': '', - 'SHARED_LIB_PREFIX': '', - 'STATIC_LIB_SUFFIX': '.lib', - 'SHARED_LIB_SUFFIX': '.dll', - 'INTERMEDIATE_DIR': '$(IntDir)', - 'SHARED_INTERMEDIATE_DIR': '$(OutDir)obj/global_intermediate', - 'OS': 'win', - 'PRODUCT_DIR': '$(OutDir)', - 'LIB_DIR': '$(OutDir)lib', - 'RULE_INPUT_ROOT': '$(InputName)', - 'RULE_INPUT_DIRNAME': '$(InputDir)', - 'RULE_INPUT_EXT': '$(InputExt)', - 'RULE_INPUT_NAME': '$(InputFileName)', - 'RULE_INPUT_PATH': '$(InputPath)', - 'CONFIGURATION_NAME': '$(ConfigurationName)', -} - - -# The msvs specific sections that hold paths -generator_additional_path_sections = [ - 'msvs_cygwin_dirs', - 'msvs_props', -] - - -generator_additional_non_configuration_keys = [ - 'msvs_cygwin_dirs', - 'msvs_cygwin_shell', - 'msvs_shard', -] - - -# List of precompiled header related keys. -precomp_keys = [ - 'msvs_precompiled_header', - 'msvs_precompiled_source', -] - - -cached_username = None - - -cached_domain = None - - -# TODO(gspencer): Switch the os.environ calls to be -# win32api.GetDomainName() and win32api.GetUserName() once the -# python version in depot_tools has been updated to work on Vista -# 64-bit. -def _GetDomainAndUserName(): - if sys.platform not in ('win32', 'cygwin'): - return ('DOMAIN', 'USERNAME') - global cached_username - global cached_domain - if not cached_domain or not cached_username: - domain = os.environ.get('USERDOMAIN') - username = os.environ.get('USERNAME') - if not domain or not username: - call = subprocess.Popen(['net', 'config', 'Workstation'], - stdout=subprocess.PIPE) - config = call.communicate()[0] - username_re = re.compile('^User name\s+(\S+)', re.MULTILINE) - username_match = username_re.search(config) - if username_match: - username = username_match.group(1) - domain_re = re.compile('^Logon domain\s+(\S+)', re.MULTILINE) - domain_match = domain_re.search(config) - if domain_match: - domain = domain_match.group(1) - cached_domain = domain - cached_username = username - return (cached_domain, cached_username) - -fixpath_prefix = None - - -def _NormalizedSource(source): - """Normalize the path. - - But not if that gets rid of a variable, as this may expand to something - larger than one directory. - - Arguments: - source: The path to be normalize.d - - Returns: - The normalized path. - """ - normalized = os.path.normpath(source) - if source.count('$') == normalized.count('$'): - source = normalized - return source - - -def _FixPath(path): - """Convert paths to a form that will make sense in a vcproj file. - - Arguments: - path: The path to convert, may contain / etc. - Returns: - The path with all slashes made into backslashes. - """ - if fixpath_prefix and path and not os.path.isabs(path) and not path[0] == '$': - path = os.path.join(fixpath_prefix, path) - path = path.replace('/', '\\') - path = _NormalizedSource(path) - if path and path[-1] == '\\': - path = path[:-1] - return path - - -def _FixPaths(paths): - """Fix each of the paths of the list.""" - return [_FixPath(i) for i in paths] - - -def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None, - list_excluded=True): - """Converts a list split source file paths into a vcproj folder hierarchy. - - Arguments: - sources: A list of source file paths split. - prefix: A list of source file path layers meant to apply to each of sources. - excluded: A set of excluded files. - - Returns: - A hierarchy of filenames and MSVSProject.Filter objects that matches the - layout of the source tree. - For example: - _ConvertSourcesToFilterHierarchy([['a', 'bob1.c'], ['b', 'bob2.c']], - prefix=['joe']) - --> - [MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']), - MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])] - """ - if not prefix: prefix = [] - result = [] - excluded_result = [] - folders = dict() - # Gather files into the final result, excluded, or folders. - for s in sources: - if len(s) == 1: - filename = _NormalizedSource('\\'.join(prefix + s)) - if filename in excluded: - excluded_result.append(filename) - else: - result.append(filename) - else: - if not folders.get(s[0]): - folders[s[0]] = [] - folders[s[0]].append(s[1:]) - # Add a folder for excluded files. - if excluded_result and list_excluded: - excluded_folder = MSVSProject.Filter('_excluded_files', - contents=excluded_result) - result.append(excluded_folder) - # Populate all the folders. - for f in folders: - contents = _ConvertSourcesToFilterHierarchy(folders[f], prefix=prefix + [f], - excluded=excluded, - list_excluded=list_excluded) - contents = MSVSProject.Filter(f, contents=contents) - result.append(contents) - - return result - - -def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False): - if not value: return - _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset) - - -def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False): - # TODO(bradnelson): ugly hack, fix this more generally!!! - if 'Directories' in setting or 'Dependencies' in setting: - if type(value) == str: - value = value.replace('/', '\\') - else: - value = [i.replace('/', '\\') for i in value] - if not tools.get(tool_name): - tools[tool_name] = dict() - tool = tools[tool_name] - if tool.get(setting): - if only_if_unset: return - if type(tool[setting]) == list: - tool[setting] += value - else: - raise TypeError( - 'Appending "%s" to a non-list setting "%s" for tool "%s" is ' - 'not allowed, previous value: %s' % ( - value, setting, tool_name, str(tool[setting]))) - else: - tool[setting] = value - - -def _ConfigPlatform(config_data): - return config_data.get('msvs_configuration_platform', 'Win32') - - -def _ConfigBaseName(config_name, platform_name): - if config_name.endswith('_' + platform_name): - return config_name[0:-len(platform_name) - 1] - else: - return config_name - - -def _ConfigFullName(config_name, config_data): - platform_name = _ConfigPlatform(config_data) - return '%s|%s' % (_ConfigBaseName(config_name, platform_name), platform_name) - - -def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path, - quote_cmd, do_setup_env): - - if [x for x in cmd if '$(InputDir)' in x]: - input_dir_preamble = ( - 'set INPUTDIR=$(InputDir)\n' - 'set INPUTDIR=%INPUTDIR:$(ProjectDir)=%\n' - 'set INPUTDIR=%INPUTDIR:~0,-1%\n' - ) - else: - input_dir_preamble = '' - - if cygwin_shell: - # Find path to cygwin. - cygwin_dir = _FixPath(spec.get('msvs_cygwin_dirs', ['.'])[0]) - # Prepare command. - direct_cmd = cmd - direct_cmd = [i.replace('$(IntDir)', - '`cygpath -m "${INTDIR}"`') for i in direct_cmd] - direct_cmd = [i.replace('$(OutDir)', - '`cygpath -m "${OUTDIR}"`') for i in direct_cmd] - direct_cmd = [i.replace('$(InputDir)', - '`cygpath -m "${INPUTDIR}"`') for i in direct_cmd] - if has_input_path: - direct_cmd = [i.replace('$(InputPath)', - '`cygpath -m "${INPUTPATH}"`') - for i in direct_cmd] - direct_cmd = ['\\"%s\\"' % i.replace('"', '\\\\\\"') for i in direct_cmd] - # direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd) - direct_cmd = ' '.join(direct_cmd) - # TODO(quote): regularize quoting path names throughout the module - cmd = '' - if do_setup_env: - cmd += 'call "$(ProjectDir)%(cygwin_dir)s\\setup_env.bat" && ' - cmd += 'set CYGWIN=nontsec&& ' - if direct_cmd.find('NUMBER_OF_PROCESSORS') >= 0: - cmd += 'set /a NUMBER_OF_PROCESSORS_PLUS_1=%%NUMBER_OF_PROCESSORS%%+1&& ' - if direct_cmd.find('INTDIR') >= 0: - cmd += 'set INTDIR=$(IntDir)&& ' - if direct_cmd.find('OUTDIR') >= 0: - cmd += 'set OUTDIR=$(OutDir)&& ' - if has_input_path and direct_cmd.find('INPUTPATH') >= 0: - cmd += 'set INPUTPATH=$(InputPath) && ' - cmd += 'bash -c "%(cmd)s"' - cmd = cmd % {'cygwin_dir': cygwin_dir, - 'cmd': direct_cmd} - return input_dir_preamble + cmd - else: - # Convert cat --> type to mimic unix. - if cmd[0] == 'cat': - command = ['type'] - else: - command = [cmd[0].replace('/', '\\')] - # Add call before command to ensure that commands can be tied together one - # after the other without aborting in Incredibuild, since IB makes a bat - # file out of the raw command string, and some commands (like python) are - # actually batch files themselves. - command.insert(0, 'call') - # Fix the paths - # TODO(quote): This is a really ugly heuristic, and will miss path fixing - # for arguments like "--arg=path" or "/opt:path". - # If the argument starts with a slash or dash, it's probably a command line - # switch - arguments = [i if (i[:1] in "/-") else _FixPath(i) for i in cmd[1:]] - arguments = [i.replace('$(InputDir)', '%INPUTDIR%') for i in arguments] - arguments = [MSVSSettings.FixVCMacroSlashes(i) for i in arguments] - if quote_cmd: - # Support a mode for using cmd directly. - # Convert any paths to native form (first element is used directly). - # TODO(quote): regularize quoting path names throughout the module - arguments = ['"%s"' % i for i in arguments] - # Collapse into a single command. - return input_dir_preamble + ' '.join(command + arguments) - - -def _BuildCommandLineForRule(spec, rule, has_input_path, do_setup_env): - # Currently this weird argument munging is used to duplicate the way a - # python script would need to be run as part of the chrome tree. - # Eventually we should add some sort of rule_default option to set this - # per project. For now the behavior chrome needs is the default. - mcs = rule.get('msvs_cygwin_shell') - if mcs is None: - mcs = int(spec.get('msvs_cygwin_shell', 1)) - elif isinstance(mcs, str): - mcs = int(mcs) - quote_cmd = int(rule.get('msvs_quote_cmd', 1)) - return _BuildCommandLineForRuleRaw(spec, rule['action'], mcs, has_input_path, - quote_cmd, do_setup_env=do_setup_env) - - -def _AddActionStep(actions_dict, inputs, outputs, description, command): - """Merge action into an existing list of actions. - - Care must be taken so that actions which have overlapping inputs either don't - get assigned to the same input, or get collapsed into one. - - Arguments: - actions_dict: dictionary keyed on input name, which maps to a list of - dicts describing the actions attached to that input file. - inputs: list of inputs - outputs: list of outputs - description: description of the action - command: command line to execute - """ - # Require there to be at least one input (call sites will ensure this). - assert inputs - - action = { - 'inputs': inputs, - 'outputs': outputs, - 'description': description, - 'command': command, - } - - # Pick where to stick this action. - # While less than optimal in terms of build time, attach them to the first - # input for now. - chosen_input = inputs[0] - - # Add it there. - if chosen_input not in actions_dict: - actions_dict[chosen_input] = [] - actions_dict[chosen_input].append(action) - - -def _AddCustomBuildToolForMSVS(p, spec, primary_input, - inputs, outputs, description, cmd): - """Add a custom build tool to execute something. - - Arguments: - p: the target project - spec: the target project dict - primary_input: input file to attach the build tool to - inputs: list of inputs - outputs: list of outputs - description: description of the action - cmd: command line to execute - """ - inputs = _FixPaths(inputs) - outputs = _FixPaths(outputs) - tool = MSVSProject.Tool( - 'VCCustomBuildTool', - {'Description': description, - 'AdditionalDependencies': ';'.join(inputs), - 'Outputs': ';'.join(outputs), - 'CommandLine': cmd, - }) - # Add to the properties of primary input for each config. - for config_name, c_data in spec['configurations'].iteritems(): - p.AddFileConfig(_FixPath(primary_input), - _ConfigFullName(config_name, c_data), tools=[tool]) - - -def _AddAccumulatedActionsToMSVS(p, spec, actions_dict): - """Add actions accumulated into an actions_dict, merging as needed. - - Arguments: - p: the target project - spec: the target project dict - actions_dict: dictionary keyed on input name, which maps to a list of - dicts describing the actions attached to that input file. - """ - for primary_input in actions_dict: - inputs = set() - outputs = set() - descriptions = [] - commands = [] - for action in actions_dict[primary_input]: - inputs.update(set(action['inputs'])) - outputs.update(set(action['outputs'])) - descriptions.append(action['description']) - commands.append(action['command']) - # Add the custom build step for one input file. - description = ', and also '.join(descriptions) - command = '\r\n'.join(commands) - _AddCustomBuildToolForMSVS(p, spec, - primary_input=primary_input, - inputs=inputs, - outputs=outputs, - description=description, - cmd=command) - - -def _RuleExpandPath(path, input_file): - """Given the input file to which a rule applied, string substitute a path. - - Arguments: - path: a path to string expand - input_file: the file to which the rule applied. - Returns: - The string substituted path. - """ - path = path.replace('$(InputName)', - os.path.splitext(os.path.split(input_file)[1])[0]) - path = path.replace('$(InputDir)', os.path.dirname(input_file)) - path = path.replace('$(InputExt)', - os.path.splitext(os.path.split(input_file)[1])[1]) - path = path.replace('$(InputFileName)', os.path.split(input_file)[1]) - path = path.replace('$(InputPath)', input_file) - return path - - -def _FindRuleTriggerFiles(rule, sources): - """Find the list of files which a particular rule applies to. - - Arguments: - rule: the rule in question - sources: the set of all known source files for this project - Returns: - The list of sources that trigger a particular rule. - """ - rule_ext = rule['extension'] - return [s for s in sources if s.endswith('.' + rule_ext)] - - -def _RuleInputsAndOutputs(rule, trigger_file): - """Find the inputs and outputs generated by a rule. - - Arguments: - rule: the rule in question. - trigger_file: the main trigger for this rule. - Returns: - The pair of (inputs, outputs) involved in this rule. - """ - raw_inputs = _FixPaths(rule.get('inputs', [])) - raw_outputs = _FixPaths(rule.get('outputs', [])) - inputs = set() - outputs = set() - inputs.add(trigger_file) - for i in raw_inputs: - inputs.add(_RuleExpandPath(i, trigger_file)) - for o in raw_outputs: - outputs.add(_RuleExpandPath(o, trigger_file)) - return (inputs, outputs) - - -def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options): - """Generate a native rules file. - - Arguments: - p: the target project - rules: the set of rules to include - output_dir: the directory in which the project/gyp resides - spec: the project dict - options: global generator options - """ - rules_filename = '%s%s.rules' % (spec['target_name'], - options.suffix) - rules_file = MSVSToolFile.Writer(os.path.join(output_dir, rules_filename), - spec['target_name']) - # Add each rule. - for r in rules: - rule_name = r['rule_name'] - rule_ext = r['extension'] - inputs = _FixPaths(r.get('inputs', [])) - outputs = _FixPaths(r.get('outputs', [])) - # Skip a rule with no action and no inputs. - if 'action' not in r and not r.get('rule_sources', []): - continue - cmd = _BuildCommandLineForRule(spec, r, has_input_path=True, - do_setup_env=True) - rules_file.AddCustomBuildRule(name=rule_name, - description=r.get('message', rule_name), - extensions=[rule_ext], - additional_dependencies=inputs, - outputs=outputs, - cmd=cmd) - # Write out rules file. - rules_file.WriteIfChanged() - - # Add rules file to project. - p.AddToolFile(rules_filename) - - -def _Cygwinify(path): - path = path.replace('$(OutDir)', '$(OutDirCygwin)') - path = path.replace('$(IntDir)', '$(IntDirCygwin)') - return path - - -def _GenerateExternalRules(rules, output_dir, spec, - sources, options, actions_to_add): - """Generate an external makefile to do a set of rules. - - Arguments: - rules: the list of rules to include - output_dir: path containing project and gyp files - spec: project specification data - sources: set of sources known - options: global generator options - actions_to_add: The list of actions we will add to. - """ - filename = '%s_rules%s.mk' % (spec['target_name'], options.suffix) - mk_file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename)) - # Find cygwin style versions of some paths. - mk_file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n') - mk_file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n') - # Gather stuff needed to emit all: target. - all_inputs = set() - all_outputs = set() - all_output_dirs = set() - first_outputs = [] - for rule in rules: - trigger_files = _FindRuleTriggerFiles(rule, sources) - for tf in trigger_files: - inputs, outputs = _RuleInputsAndOutputs(rule, tf) - all_inputs.update(set(inputs)) - all_outputs.update(set(outputs)) - # Only use one target from each rule as the dependency for - # 'all' so we don't try to build each rule multiple times. - first_outputs.append(list(outputs)[0]) - # Get the unique output directories for this rule. - output_dirs = [os.path.split(i)[0] for i in outputs] - for od in output_dirs: - all_output_dirs.add(od) - first_outputs_cyg = [_Cygwinify(i) for i in first_outputs] - # Write out all: target, including mkdir for each output directory. - mk_file.write('all: %s\n' % ' '.join(first_outputs_cyg)) - for od in all_output_dirs: - if od: - mk_file.write('\tmkdir -p `cygpath -u "%s"`\n' % od) - mk_file.write('\n') - # Define how each output is generated. - for rule in rules: - trigger_files = _FindRuleTriggerFiles(rule, sources) - for tf in trigger_files: - # Get all the inputs and outputs for this rule for this trigger file. - inputs, outputs = _RuleInputsAndOutputs(rule, tf) - inputs = [_Cygwinify(i) for i in inputs] - outputs = [_Cygwinify(i) for i in outputs] - # Prepare the command line for this rule. - cmd = [_RuleExpandPath(c, tf) for c in rule['action']] - cmd = ['"%s"' % i for i in cmd] - cmd = ' '.join(cmd) - # Add it to the makefile. - mk_file.write('%s: %s\n' % (' '.join(outputs), ' '.join(inputs))) - mk_file.write('\t%s\n\n' % cmd) - # Close up the file. - mk_file.close() - - # Add makefile to list of sources. - sources.add(filename) - # Add a build action to call makefile. - cmd = ['make', - 'OutDir=$(OutDir)', - 'IntDir=$(IntDir)', - '-j', '${NUMBER_OF_PROCESSORS_PLUS_1}', - '-f', filename] - cmd = _BuildCommandLineForRuleRaw(spec, cmd, True, False, True, True) - # Insert makefile as 0'th input, so it gets the action attached there, - # as this is easier to understand from in the IDE. - all_inputs = list(all_inputs) - all_inputs.insert(0, filename) - _AddActionStep(actions_to_add, - inputs=_FixPaths(all_inputs), - outputs=_FixPaths(all_outputs), - description='Running external rules for %s' % - spec['target_name'], - command=cmd) - - -def _EscapeEnvironmentVariableExpansion(s): - """Escapes % characters. - - Escapes any % characters so that Windows-style environment variable - expansions will leave them alone. - See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile - to understand why we have to do this. - - Args: - s: The string to be escaped. - - Returns: - The escaped string. - """ - s = s.replace('%', '%%') - return s - - -quote_replacer_regex = re.compile(r'(\\*)"') - - -def _EscapeCommandLineArgumentForMSVS(s): - """Escapes a Windows command-line argument. - - So that the Win32 CommandLineToArgv function will turn the escaped result back - into the original string. - See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx - ("Parsing C++ Command-Line Arguments") to understand why we have to do - this. - - Args: - s: the string to be escaped. - Returns: - the escaped string. - """ - - def _Replace(match): - # For a literal quote, CommandLineToArgv requires an odd number of - # backslashes preceding it, and it produces half as many literal backslashes - # (rounded down). So we need to produce 2n+1 backslashes. - return 2 * match.group(1) + '\\"' - - # Escape all quotes so that they are interpreted literally. - s = quote_replacer_regex.sub(_Replace, s) - # Now add unescaped quotes so that any whitespace is interpreted literally. - s = '"' + s + '"' - return s - - -delimiters_replacer_regex = re.compile(r'(\\*)([,;]+)') - - -def _EscapeVCProjCommandLineArgListItem(s): - """Escapes command line arguments for MSVS. - - The VCProj format stores string lists in a single string using commas and - semi-colons as separators, which must be quoted if they are to be - interpreted literally. However, command-line arguments may already have - quotes, and the VCProj parser is ignorant of the backslash escaping - convention used by CommandLineToArgv, so the command-line quotes and the - VCProj quotes may not be the same quotes. So to store a general - command-line argument in a VCProj list, we need to parse the existing - quoting according to VCProj's convention and quote any delimiters that are - not already quoted by that convention. The quotes that we add will also be - seen by CommandLineToArgv, so if backslashes precede them then we also have - to escape those backslashes according to the CommandLineToArgv - convention. - - Args: - s: the string to be escaped. - Returns: - the escaped string. - """ - - def _Replace(match): - # For a non-literal quote, CommandLineToArgv requires an even number of - # backslashes preceding it, and it produces half as many literal - # backslashes. So we need to produce 2n backslashes. - return 2 * match.group(1) + '"' + match.group(2) + '"' - - segments = s.split('"') - # The unquoted segments are at the even-numbered indices. - for i in range(0, len(segments), 2): - segments[i] = delimiters_replacer_regex.sub(_Replace, segments[i]) - # Concatenate back into a single string - s = '"'.join(segments) - if len(segments) % 2 == 0: - # String ends while still quoted according to VCProj's convention. This - # means the delimiter and the next list item that follow this one in the - # .vcproj file will be misinterpreted as part of this item. There is nothing - # we can do about this. Adding an extra quote would correct the problem in - # the VCProj but cause the same problem on the final command-line. Moving - # the item to the end of the list does works, but that's only possible if - # there's only one such item. Let's just warn the user. - print >> sys.stderr, ('Warning: MSVS may misinterpret the odd number of ' + - 'quotes in ' + s) - return s - - -def _EscapeCppDefineForMSVS(s): - """Escapes a CPP define so that it will reach the compiler unaltered.""" - s = _EscapeEnvironmentVariableExpansion(s) - s = _EscapeCommandLineArgumentForMSVS(s) - s = _EscapeVCProjCommandLineArgListItem(s) - # cl.exe replaces literal # characters with = in preprocesor definitions for - # some reason. Octal-encode to work around that. - s = s.replace('#', '\\%03o' % ord('#')) - return s - - -quote_replacer_regex2 = re.compile(r'(\\+)"') - - -def _EscapeCommandLineArgumentForMSBuild(s): - """Escapes a Windows command-line argument for use by MSBuild.""" - - def _Replace(match): - return (len(match.group(1)) / 2 * 4) * '\\' + '\\"' - - # Escape all quotes so that they are interpreted literally. - s = quote_replacer_regex2.sub(_Replace, s) - return s - - -def _EscapeMSBuildSpecialCharacters(s): - escape_dictionary = { - '%': '%25', - '$': '%24', - '@': '%40', - "'": '%27', - ';': '%3B', - '?': '%3F', - '*': '%2A' - } - result = ''.join([escape_dictionary.get(c, c) for c in s]) - return result - - -def _EscapeCppDefineForMSBuild(s): - """Escapes a CPP define so that it will reach the compiler unaltered.""" - s = _EscapeEnvironmentVariableExpansion(s) - s = _EscapeCommandLineArgumentForMSBuild(s) - s = _EscapeMSBuildSpecialCharacters(s) - # cl.exe replaces literal # characters with = in preprocesor definitions for - # some reason. Octal-encode to work around that. - s = s.replace('#', '\\%03o' % ord('#')) - return s - - -def _GenerateRulesForMSVS(p, output_dir, options, spec, - sources, excluded_sources, - actions_to_add): - """Generate all the rules for a particular project. - - Arguments: - p: the project - output_dir: directory to emit rules to - options: global options passed to the generator - spec: the specification for this project - sources: the set of all known source files in this project - excluded_sources: the set of sources excluded from normal processing - actions_to_add: deferred list of actions to add in - """ - rules = spec.get('rules', []) - rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))] - rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))] - - # Handle rules that use a native rules file. - if rules_native: - _GenerateNativeRulesForMSVS(p, rules_native, output_dir, spec, options) - - # Handle external rules (non-native rules). - if rules_external: - _GenerateExternalRules(rules_external, output_dir, spec, - sources, options, actions_to_add) - _AdjustSourcesForRules(rules, sources, excluded_sources) - - -def _AdjustSourcesForRules(rules, sources, excluded_sources): - # Add outputs generated by each rule (if applicable). - for rule in rules: - # Done if not processing outputs as sources. - if int(rule.get('process_outputs_as_sources', False)): - # Add in the outputs from this rule. - trigger_files = _FindRuleTriggerFiles(rule, sources) - for trigger_file in trigger_files: - inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file) - inputs = set(_FixPaths(inputs)) - outputs = set(_FixPaths(outputs)) - inputs.remove(_FixPath(trigger_file)) - sources.update(inputs) - excluded_sources.update(inputs) - sources.update(outputs) - - -def _FilterActionsFromExcluded(excluded_sources, actions_to_add): - """Take inputs with actions attached out of the list of exclusions. - - Arguments: - excluded_sources: list of source files not to be built. - actions_to_add: dict of actions keyed on source file they're attached to. - Returns: - excluded_sources with files that have actions attached removed. - """ - must_keep = set(_FixPaths(actions_to_add.keys())) - return [s for s in excluded_sources if s not in must_keep] - - -def _GetDefaultConfiguration(spec): - return spec['configurations'][spec['default_configuration']] - - -def _GetGuidOfProject(proj_path, spec): - """Get the guid for the project. - - Arguments: - proj_path: Path of the vcproj or vcxproj file to generate. - spec: The target dictionary containing the properties of the target. - Returns: - the guid. - Raises: - ValueError: if the specified GUID is invalid. - """ - # Pluck out the default configuration. - default_config = _GetDefaultConfiguration(spec) - # Decide the guid of the project. - guid = default_config.get('msvs_guid') - if guid: - if VALID_MSVS_GUID_CHARS.match(guid) is None: - raise ValueError('Invalid MSVS guid: "%s". Must match regex: "%s".' % - (guid, VALID_MSVS_GUID_CHARS.pattern)) - guid = '{%s}' % guid - guid = guid or MSVSNew.MakeGuid(proj_path) - return guid - - -def _GetMsbuildToolsetOfProject(proj_path, spec, version): - """Get the platform toolset for the project. - - Arguments: - proj_path: Path of the vcproj or vcxproj file to generate. - spec: The target dictionary containing the properties of the target. - version: The MSVSVersion object. - Returns: - the platform toolset string or None. - """ - # Pluck out the default configuration. - default_config = _GetDefaultConfiguration(spec) - toolset = default_config.get('msbuild_toolset') - if not toolset and version.DefaultToolset(): - toolset = version.DefaultToolset() - return toolset - - -def _GenerateProject(project, options, version, generator_flags): - """Generates a vcproj file. - - Arguments: - project: the MSVSProject object. - options: global generator options. - version: the MSVSVersion object. - generator_flags: dict of generator-specific flags. - Returns: - A list of source files that cannot be found on disk. - """ - default_config = _GetDefaultConfiguration(project.spec) - - # Skip emitting anything if told to with msvs_existing_vcproj option. - if default_config.get('msvs_existing_vcproj'): - return [] - - if version.UsesVcxproj(): - return _GenerateMSBuildProject(project, options, version, generator_flags) - else: - return _GenerateMSVSProject(project, options, version, generator_flags) - - -def _GenerateMSVSProject(project, options, version, generator_flags): - """Generates a .vcproj file. It may create .rules and .user files too. - - Arguments: - project: The project object we will generate the file for. - options: Global options passed to the generator. - version: The VisualStudioVersion object. - generator_flags: dict of generator-specific flags. - """ - spec = project.spec - vcproj_dir = os.path.dirname(project.path) - if vcproj_dir and not os.path.exists(vcproj_dir): - os.makedirs(vcproj_dir) - - platforms = _GetUniquePlatforms(spec) - p = MSVSProject.Writer(project.path, version, spec['target_name'], - project.guid, platforms) - - # Get directory project file is in. - project_dir = os.path.split(project.path)[0] - gyp_path = _NormalizedSource(project.build_file) - relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir) - - config_type = _GetMSVSConfigurationType(spec, project.build_file) - for config_name, config in spec['configurations'].iteritems(): - _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config) - - # Prepare list of sources and excluded sources. - gyp_file = os.path.split(project.build_file)[1] - sources, excluded_sources = _PrepareListOfSources(spec, generator_flags, - gyp_file) - - # Add rules. - actions_to_add = {} - _GenerateRulesForMSVS(p, project_dir, options, spec, - sources, excluded_sources, - actions_to_add) - list_excluded = generator_flags.get('msvs_list_excluded_files', True) - sources, excluded_sources, excluded_idl = ( - _AdjustSourcesAndConvertToFilterHierarchy( - spec, options, project_dir, sources, excluded_sources, list_excluded)) - - # Add in files. - missing_sources = _VerifySourcesExist(sources, project_dir) - p.AddFiles(sources) - - _AddToolFilesToMSVS(p, spec) - _HandlePreCompiledHeaders(p, sources, spec) - _AddActions(actions_to_add, spec, relative_path_of_gyp_file) - _AddCopies(actions_to_add, spec) - _WriteMSVSUserFile(project.path, version, spec) - - # NOTE: this stanza must appear after all actions have been decided. - # Don't excluded sources with actions attached, or they won't run. - excluded_sources = _FilterActionsFromExcluded( - excluded_sources, actions_to_add) - _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl, - list_excluded) - _AddAccumulatedActionsToMSVS(p, spec, actions_to_add) - - # Write it out. - p.WriteIfChanged() - - return missing_sources - - -def _GetUniquePlatforms(spec): - """Returns the list of unique platforms for this spec, e.g ['win32', ...]. - - Arguments: - spec: The target dictionary containing the properties of the target. - Returns: - The MSVSUserFile object created. - """ - # Gather list of unique platforms. - platforms = set() - for configuration in spec['configurations']: - platforms.add(_ConfigPlatform(spec['configurations'][configuration])) - platforms = list(platforms) - return platforms - - -def _CreateMSVSUserFile(proj_path, version, spec): - """Generates a .user file for the user running this Gyp program. - - Arguments: - proj_path: The path of the project file being created. The .user file - shares the same path (with an appropriate suffix). - version: The VisualStudioVersion object. - spec: The target dictionary containing the properties of the target. - Returns: - The MSVSUserFile object created. - """ - (domain, username) = _GetDomainAndUserName() - vcuser_filename = '.'.join([proj_path, domain, username, 'user']) - user_file = MSVSUserFile.Writer(vcuser_filename, version, - spec['target_name']) - return user_file - - -def _GetMSVSConfigurationType(spec, build_file): - """Returns the configuration type for this project. - - It's a number defined by Microsoft. May raise an exception. - - Args: - spec: The target dictionary containing the properties of the target. - build_file: The path of the gyp file. - Returns: - An integer, the configuration type. - """ - try: - config_type = { - 'executable': '1', # .exe - 'shared_library': '2', # .dll - 'loadable_module': '2', # .dll - 'static_library': '4', # .lib - 'none': '10', # Utility type - }[spec['type']] - except KeyError: - if spec.get('type'): - raise GypError('Target type %s is not a valid target type for ' - 'target %s in %s.' % - (spec['type'], spec['target_name'], build_file)) - else: - raise GypError('Missing type field for target %s in %s.' % - (spec['target_name'], build_file)) - return config_type - - -def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config): - """Adds a configuration to the MSVS project. - - Many settings in a vcproj file are specific to a configuration. This - function the main part of the vcproj file that's configuration specific. - - Arguments: - p: The target project being generated. - spec: The target dictionary containing the properties of the target. - config_type: The configuration type, a number as defined by Microsoft. - config_name: The name of the configuration. - config: The dictionnary that defines the special processing to be done - for this configuration. - """ - # Get the information for this configuration - include_dirs, resource_include_dirs = _GetIncludeDirs(config) - libraries = _GetLibraries(spec) - out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False) - defines = _GetDefines(config) - defines = [_EscapeCppDefineForMSVS(d) for d in defines] - disabled_warnings = _GetDisabledWarnings(config) - prebuild = config.get('msvs_prebuild') - postbuild = config.get('msvs_postbuild') - def_file = _GetModuleDefinition(spec) - precompiled_header = config.get('msvs_precompiled_header') - - # Prepare the list of tools as a dictionary. - tools = dict() - # Add in user specified msvs_settings. - msvs_settings = config.get('msvs_settings', {}) - MSVSSettings.ValidateMSVSSettings(msvs_settings) - - # Prevent default library inheritance from the environment. - _ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', ['$(NOINHERIT)']) - - for tool in msvs_settings: - settings = config['msvs_settings'][tool] - for setting in settings: - _ToolAppend(tools, tool, setting, settings[setting]) - # Add the information to the appropriate tool - _ToolAppend(tools, 'VCCLCompilerTool', - 'AdditionalIncludeDirectories', include_dirs) - _ToolAppend(tools, 'VCResourceCompilerTool', - 'AdditionalIncludeDirectories', resource_include_dirs) - # Add in libraries. - _ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', libraries) - if out_file: - _ToolAppend(tools, vc_tool, 'OutputFile', out_file, only_if_unset=True) - # Add defines. - _ToolAppend(tools, 'VCCLCompilerTool', 'PreprocessorDefinitions', defines) - _ToolAppend(tools, 'VCResourceCompilerTool', 'PreprocessorDefinitions', - defines) - # Change program database directory to prevent collisions. - _ToolAppend(tools, 'VCCLCompilerTool', 'ProgramDataBaseFileName', - '$(IntDir)$(ProjectName)\\vc80.pdb', only_if_unset=True) - # Add disabled warnings. - _ToolAppend(tools, 'VCCLCompilerTool', - 'DisableSpecificWarnings', disabled_warnings) - # Add Pre-build. - _ToolAppend(tools, 'VCPreBuildEventTool', 'CommandLine', prebuild) - # Add Post-build. - _ToolAppend(tools, 'VCPostBuildEventTool', 'CommandLine', postbuild) - # Turn on precompiled headers if appropriate. - if precompiled_header: - precompiled_header = os.path.split(precompiled_header)[1] - _ToolAppend(tools, 'VCCLCompilerTool', 'UsePrecompiledHeader', '2') - _ToolAppend(tools, 'VCCLCompilerTool', - 'PrecompiledHeaderThrough', precompiled_header) - _ToolAppend(tools, 'VCCLCompilerTool', - 'ForcedIncludeFiles', precompiled_header) - # Loadable modules don't generate import libraries; - # tell dependent projects to not expect one. - if spec['type'] == 'loadable_module': - _ToolAppend(tools, 'VCLinkerTool', 'IgnoreImportLibrary', 'true') - # Set the module definition file if any. - if def_file: - _ToolAppend(tools, 'VCLinkerTool', 'ModuleDefinitionFile', def_file) - - _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name) - - -def _GetIncludeDirs(config): - """Returns the list of directories to be used for #include directives. - - Arguments: - config: The dictionnary that defines the special processing to be done - for this configuration. - Returns: - The list of directory paths. - """ - # TODO(bradnelson): include_dirs should really be flexible enough not to - # require this sort of thing. - include_dirs = ( - config.get('include_dirs', []) + - config.get('msvs_system_include_dirs', [])) - resource_include_dirs = config.get('resource_include_dirs', include_dirs) - include_dirs = _FixPaths(include_dirs) - resource_include_dirs = _FixPaths(resource_include_dirs) - return include_dirs, resource_include_dirs - - -def _GetLibraries(spec): - """Returns the list of libraries for this configuration. - - Arguments: - spec: The target dictionary containing the properties of the target. - Returns: - The list of directory paths. - """ - libraries = spec.get('libraries', []) - # Strip out -l, as it is not used on windows (but is needed so we can pass - # in libraries that are assumed to be in the default library path). - # Also remove duplicate entries, leaving only the last duplicate, while - # preserving order. - found = set() - unique_libraries_list = [] - for entry in reversed(libraries): - library = re.sub('^\-l', '', entry) - if not os.path.splitext(library)[1]: - library += '.lib' - if library not in found: - found.add(library) - unique_libraries_list.append(library) - unique_libraries_list.reverse() - return unique_libraries_list - - -def _GetOutputFilePathAndTool(spec, msbuild): - """Returns the path and tool to use for this target. - - Figures out the path of the file this spec will create and the name of - the VC tool that will create it. - - Arguments: - spec: The target dictionary containing the properties of the target. - Returns: - A triple of (file path, name of the vc tool, name of the msbuild tool) - """ - # Select a name for the output file. - out_file = '' - vc_tool = '' - msbuild_tool = '' - output_file_map = { - 'executable': ('VCLinkerTool', 'Link', '$(OutDir)', '.exe'), - 'shared_library': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'), - 'loadable_module': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'), - 'static_library': ('VCLibrarianTool', 'Lib', '$(OutDir)lib\\', '.lib'), - } - output_file_props = output_file_map.get(spec['type']) - if output_file_props and int(spec.get('msvs_auto_output_file', 1)): - vc_tool, msbuild_tool, out_dir, suffix = output_file_props - if spec.get('standalone_static_library', 0): - out_dir = '$(OutDir)' - out_dir = spec.get('product_dir', out_dir) - product_extension = spec.get('product_extension') - if product_extension: - suffix = '.' + product_extension - elif msbuild: - suffix = '$(TargetExt)' - prefix = spec.get('product_prefix', '') - product_name = spec.get('product_name', '$(ProjectName)') - out_file = ntpath.join(out_dir, prefix + product_name + suffix) - return out_file, vc_tool, msbuild_tool - - -def _GetDefines(config): - """Returns the list of preprocessor definitions for this configuation. - - Arguments: - config: The dictionnary that defines the special processing to be done - for this configuration. - Returns: - The list of preprocessor definitions. - """ - defines = [] - for d in config.get('defines', []): - if type(d) == list: - fd = '='.join([str(dpart) for dpart in d]) - else: - fd = str(d) - defines.append(fd) - return defines - - -def _GetDisabledWarnings(config): - return [str(i) for i in config.get('msvs_disabled_warnings', [])] - - -def _GetModuleDefinition(spec): - def_file = '' - if spec['type'] in ['shared_library', 'loadable_module', 'executable']: - def_files = [s for s in spec.get('sources', []) if s.endswith('.def')] - if len(def_files) == 1: - def_file = _FixPath(def_files[0]) - elif def_files: - raise ValueError( - 'Multiple module definition files in one target, target %s lists ' - 'multiple .def files: %s' % ( - spec['target_name'], ' '.join(def_files))) - return def_file - - -def _ConvertToolsToExpectedForm(tools): - """Convert tools to a form expected by Visual Studio. - - Arguments: - tools: A dictionnary of settings; the tool name is the key. - Returns: - A list of Tool objects. - """ - tool_list = [] - for tool, settings in tools.iteritems(): - # Collapse settings with lists. - settings_fixed = {} - for setting, value in settings.iteritems(): - if type(value) == list: - if ((tool == 'VCLinkerTool' and - setting == 'AdditionalDependencies') or - setting == 'AdditionalOptions'): - settings_fixed[setting] = ' '.join(value) - else: - settings_fixed[setting] = ';'.join(value) - else: - settings_fixed[setting] = value - # Add in this tool. - tool_list.append(MSVSProject.Tool(tool, settings_fixed)) - return tool_list - - -def _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name): - """Add to the project file the configuration specified by config. - - Arguments: - p: The target project being generated. - spec: the target project dict. - tools: A dictionnary of settings; the tool name is the key. - config: The dictionnary that defines the special processing to be done - for this configuration. - config_type: The configuration type, a number as defined by Microsoft. - config_name: The name of the configuration. - """ - attributes = _GetMSVSAttributes(spec, config, config_type) - # Add in this configuration. - tool_list = _ConvertToolsToExpectedForm(tools) - p.AddConfig(_ConfigFullName(config_name, config), - attrs=attributes, tools=tool_list) - - -def _GetMSVSAttributes(spec, config, config_type): - # Prepare configuration attributes. - prepared_attrs = {} - source_attrs = config.get('msvs_configuration_attributes', {}) - for a in source_attrs: - prepared_attrs[a] = source_attrs[a] - # Add props files. - vsprops_dirs = config.get('msvs_props', []) - vsprops_dirs = _FixPaths(vsprops_dirs) - if vsprops_dirs: - prepared_attrs['InheritedPropertySheets'] = ';'.join(vsprops_dirs) - # Set configuration type. - prepared_attrs['ConfigurationType'] = config_type - output_dir = prepared_attrs.get('OutputDirectory', - '$(SolutionDir)$(ConfigurationName)') - prepared_attrs['OutputDirectory'] = _FixPath(output_dir) + '\\' - if 'IntermediateDirectory' not in prepared_attrs: - intermediate = '$(ConfigurationName)\\obj\\$(ProjectName)' - prepared_attrs['IntermediateDirectory'] = _FixPath(intermediate) + '\\' - else: - intermediate = _FixPath(prepared_attrs['IntermediateDirectory']) + '\\' - intermediate = MSVSSettings.FixVCMacroSlashes(intermediate) - prepared_attrs['IntermediateDirectory'] = intermediate - return prepared_attrs - - -def _AddNormalizedSources(sources_set, sources_array): - sources = [_NormalizedSource(s) for s in sources_array] - sources_set.update(set(sources)) - - -def _PrepareListOfSources(spec, generator_flags, gyp_file): - """Prepare list of sources and excluded sources. - - Besides the sources specified directly in the spec, adds the gyp file so - that a change to it will cause a re-compile. Also adds appropriate sources - for actions and copies. Assumes later stage will un-exclude files which - have custom build steps attached. - - Arguments: - spec: The target dictionary containing the properties of the target. - gyp_file: The name of the gyp file. - Returns: - A pair of (list of sources, list of excluded sources). - The sources will be relative to the gyp file. - """ - sources = set() - _AddNormalizedSources(sources, spec.get('sources', [])) - excluded_sources = set() - # Add in the gyp file. - if not generator_flags.get('standalone'): - sources.add(gyp_file) - - # Add in 'action' inputs and outputs. - for a in spec.get('actions', []): - inputs = a['inputs'] - inputs = [_NormalizedSource(i) for i in inputs] - # Add all inputs to sources and excluded sources. - inputs = set(inputs) - sources.update(inputs) - excluded_sources.update(inputs) - if int(a.get('process_outputs_as_sources', False)): - _AddNormalizedSources(sources, a.get('outputs', [])) - # Add in 'copies' inputs and outputs. - for cpy in spec.get('copies', []): - _AddNormalizedSources(sources, cpy.get('files', [])) - return (sources, excluded_sources) - - -def _AdjustSourcesAndConvertToFilterHierarchy( - spec, options, gyp_dir, sources, excluded_sources, list_excluded): - """Adjusts the list of sources and excluded sources. - - Also converts the sets to lists. - - Arguments: - spec: The target dictionary containing the properties of the target. - options: Global generator options. - gyp_dir: The path to the gyp file being processed. - sources: A set of sources to be included for this project. - excluded_sources: A set of sources to be excluded for this project. - Returns: - A trio of (list of sources, list of excluded sources, - path of excluded IDL file) - """ - # Exclude excluded sources coming into the generator. - excluded_sources.update(set(spec.get('sources_excluded', []))) - # Add excluded sources into sources for good measure. - sources.update(excluded_sources) - # Convert to proper windows form. - # NOTE: sources goes from being a set to a list here. - # NOTE: excluded_sources goes from being a set to a list here. - sources = _FixPaths(sources) - # Convert to proper windows form. - excluded_sources = _FixPaths(excluded_sources) - - excluded_idl = _IdlFilesHandledNonNatively(spec, sources) - - precompiled_related = _GetPrecompileRelatedFiles(spec) - # Find the excluded ones, minus the precompiled header related ones. - fully_excluded = [i for i in excluded_sources if i not in precompiled_related] - - # Convert to folders and the right slashes. - sources = [i.split('\\') for i in sources] - sources = _ConvertSourcesToFilterHierarchy(sources, excluded=fully_excluded, - list_excluded=list_excluded) - - return sources, excluded_sources, excluded_idl - - -def _IdlFilesHandledNonNatively(spec, sources): - # If any non-native rules use 'idl' as an extension exclude idl files. - # Gather a list here to use later. - using_idl = False - for rule in spec.get('rules', []): - if rule['extension'] == 'idl' and int(rule.get('msvs_external_rule', 0)): - using_idl = True - break - if using_idl: - excluded_idl = [i for i in sources if i.endswith('.idl')] - else: - excluded_idl = [] - return excluded_idl - - -def _GetPrecompileRelatedFiles(spec): - # Gather a list of precompiled header related sources. - precompiled_related = [] - for _, config in spec['configurations'].iteritems(): - for k in precomp_keys: - f = config.get(k) - if f: - precompiled_related.append(_FixPath(f)) - return precompiled_related - - -def _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl, - list_excluded): - exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl) - for file_name, excluded_configs in exclusions.iteritems(): - if (not list_excluded and - len(excluded_configs) == len(spec['configurations'])): - # If we're not listing excluded files, then they won't appear in the - # project, so don't try to configure them to be excluded. - pass - else: - for config_name, config in excluded_configs: - p.AddFileConfig(file_name, _ConfigFullName(config_name, config), - {'ExcludedFromBuild': 'true'}) - - -def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl): - exclusions = {} - # Exclude excluded sources from being built. - for f in excluded_sources: - excluded_configs = [] - for config_name, config in spec['configurations'].iteritems(): - precomped = [_FixPath(config.get(i, '')) for i in precomp_keys] - # Don't do this for ones that are precompiled header related. - if f not in precomped: - excluded_configs.append((config_name, config)) - exclusions[f] = excluded_configs - # If any non-native rules use 'idl' as an extension exclude idl files. - # Exclude them now. - for f in excluded_idl: - excluded_configs = [] - for config_name, config in spec['configurations'].iteritems(): - excluded_configs.append((config_name, config)) - exclusions[f] = excluded_configs - return exclusions - - -def _AddToolFilesToMSVS(p, spec): - # Add in tool files (rules). - tool_files = set() - for _, config in spec['configurations'].iteritems(): - for f in config.get('msvs_tool_files', []): - tool_files.add(f) - for f in tool_files: - p.AddToolFile(f) - - -def _HandlePreCompiledHeaders(p, sources, spec): - # Pre-compiled header source stubs need a different compiler flag - # (generate precompiled header) and any source file not of the same - # kind (i.e. C vs. C++) as the precompiled header source stub needs - # to have use of precompiled headers disabled. - extensions_excluded_from_precompile = [] - for config_name, config in spec['configurations'].iteritems(): - source = config.get('msvs_precompiled_source') - if source: - source = _FixPath(source) - # UsePrecompiledHeader=1 for if using precompiled headers. - tool = MSVSProject.Tool('VCCLCompilerTool', - {'UsePrecompiledHeader': '1'}) - p.AddFileConfig(source, _ConfigFullName(config_name, config), - {}, tools=[tool]) - basename, extension = os.path.splitext(source) - if extension == '.c': - extensions_excluded_from_precompile = ['.cc', '.cpp', '.cxx'] - else: - extensions_excluded_from_precompile = ['.c'] - def DisableForSourceTree(source_tree): - for source in source_tree: - if isinstance(source, MSVSProject.Filter): - DisableForSourceTree(source.contents) - else: - basename, extension = os.path.splitext(source) - if extension in extensions_excluded_from_precompile: - for config_name, config in spec['configurations'].iteritems(): - tool = MSVSProject.Tool('VCCLCompilerTool', - {'UsePrecompiledHeader': '0', - 'ForcedIncludeFiles': '$(NOINHERIT)'}) - p.AddFileConfig(_FixPath(source), - _ConfigFullName(config_name, config), - {}, tools=[tool]) - # Do nothing if there was no precompiled source. - if extensions_excluded_from_precompile: - DisableForSourceTree(sources) - - -def _AddActions(actions_to_add, spec, relative_path_of_gyp_file): - # Add actions. - actions = spec.get('actions', []) - # Don't setup_env every time. When all the actions are run together in one - # batch file in VS, the PATH will grow too long. - # Membership in this set means that the cygwin environment has been set up, - # and does not need to be set up again. - have_setup_env = set() - for a in actions: - # Attach actions to the gyp file if nothing else is there. - inputs = a.get('inputs') or [relative_path_of_gyp_file] - attached_to = inputs[0] - need_setup_env = attached_to not in have_setup_env - cmd = _BuildCommandLineForRule(spec, a, has_input_path=False, - do_setup_env=need_setup_env) - have_setup_env.add(attached_to) - # Add the action. - _AddActionStep(actions_to_add, - inputs=inputs, - outputs=a.get('outputs', []), - description=a.get('message', a['action_name']), - command=cmd) - - -def _WriteMSVSUserFile(project_path, version, spec): - # Add run_as and test targets. - if 'run_as' in spec: - run_as = spec['run_as'] - action = run_as.get('action', []) - environment = run_as.get('environment', []) - working_directory = run_as.get('working_directory', '.') - elif int(spec.get('test', 0)): - action = ['$(TargetPath)', '--gtest_print_time'] - environment = [] - working_directory = '.' - else: - return # Nothing to add - # Write out the user file. - user_file = _CreateMSVSUserFile(project_path, version, spec) - for config_name, c_data in spec['configurations'].iteritems(): - user_file.AddDebugSettings(_ConfigFullName(config_name, c_data), - action, environment, working_directory) - user_file.WriteIfChanged() - - -def _AddCopies(actions_to_add, spec): - copies = _GetCopies(spec) - for inputs, outputs, cmd, description in copies: - _AddActionStep(actions_to_add, inputs=inputs, outputs=outputs, - description=description, command=cmd) - - -def _GetCopies(spec): - copies = [] - # Add copies. - for cpy in spec.get('copies', []): - for src in cpy.get('files', []): - dst = os.path.join(cpy['destination'], os.path.basename(src)) - # _AddCustomBuildToolForMSVS() will call _FixPath() on the inputs and - # outputs, so do the same for our generated command line. - if src.endswith('/'): - src_bare = src[:-1] - base_dir = posixpath.split(src_bare)[0] - outer_dir = posixpath.split(src_bare)[1] - cmd = 'cd "%s" && xcopy /e /f /y "%s" "%s\\%s\\"' % ( - _FixPath(base_dir), outer_dir, _FixPath(dst), outer_dir) - copies.append(([src], ['dummy_copies', dst], cmd, - 'Copying %s to %s' % (src, dst))) - else: - cmd = 'mkdir "%s" 2>nul & set ERRORLEVEL=0 & copy /Y "%s" "%s"' % ( - _FixPath(cpy['destination']), _FixPath(src), _FixPath(dst)) - copies.append(([src], [dst], cmd, 'Copying %s to %s' % (src, dst))) - return copies - - -def _GetPathDict(root, path): - # |path| will eventually be empty (in the recursive calls) if it was initially - # relative; otherwise it will eventually end up as '\', 'D:\', etc. - if not path or path.endswith(os.sep): - return root - parent, folder = os.path.split(path) - parent_dict = _GetPathDict(root, parent) - if folder not in parent_dict: - parent_dict[folder] = dict() - return parent_dict[folder] - - -def _DictsToFolders(base_path, bucket, flat): - # Convert to folders recursively. - children = [] - for folder, contents in bucket.iteritems(): - if type(contents) == dict: - folder_children = _DictsToFolders(os.path.join(base_path, folder), - contents, flat) - if flat: - children += folder_children - else: - folder_children = MSVSNew.MSVSFolder(os.path.join(base_path, folder), - name='(' + folder + ')', - entries=folder_children) - children.append(folder_children) - else: - children.append(contents) - return children - - -def _CollapseSingles(parent, node): - # Recursively explorer the tree of dicts looking for projects which are - # the sole item in a folder which has the same name as the project. Bring - # such projects up one level. - if (type(node) == dict and - len(node) == 1 and - node.keys()[0] == parent + '.vcproj'): - return node[node.keys()[0]] - if type(node) != dict: - return node - for child in node: - node[child] = _CollapseSingles(child, node[child]) - return node - - -def _GatherSolutionFolders(sln_projects, project_objects, flat): - root = {} - # Convert into a tree of dicts on path. - for p in sln_projects: - gyp_file, target = gyp.common.ParseQualifiedTarget(p)[0:2] - gyp_dir = os.path.dirname(gyp_file) - path_dict = _GetPathDict(root, gyp_dir) - path_dict[target + '.vcproj'] = project_objects[p] - # Walk down from the top until we hit a folder that has more than one entry. - # In practice, this strips the top-level "src/" dir from the hierarchy in - # the solution. - while len(root) == 1 and type(root[root.keys()[0]]) == dict: - root = root[root.keys()[0]] - # Collapse singles. - root = _CollapseSingles('', root) - # Merge buckets until everything is a root entry. - return _DictsToFolders('', root, flat) - - -def _GetPathOfProject(qualified_target, spec, options, msvs_version): - default_config = _GetDefaultConfiguration(spec) - proj_filename = default_config.get('msvs_existing_vcproj') - if not proj_filename: - proj_filename = (spec['target_name'] + options.suffix + - msvs_version.ProjectExtension()) - - build_file = gyp.common.BuildFile(qualified_target) - proj_path = os.path.join(os.path.dirname(build_file), proj_filename) - fix_prefix = None - if options.generator_output: - project_dir_path = os.path.dirname(os.path.abspath(proj_path)) - proj_path = os.path.join(options.generator_output, proj_path) - fix_prefix = gyp.common.RelativePath(project_dir_path, - os.path.dirname(proj_path)) - return proj_path, fix_prefix - - -def _GetPlatformOverridesOfProject(spec): - # Prepare a dict indicating which project configurations are used for which - # solution configurations for this target. - config_platform_overrides = {} - for config_name, c in spec['configurations'].iteritems(): - config_fullname = _ConfigFullName(config_name, c) - platform = c.get('msvs_target_platform', _ConfigPlatform(c)) - fixed_config_fullname = '%s|%s' % ( - _ConfigBaseName(config_name, _ConfigPlatform(c)), platform) - config_platform_overrides[config_fullname] = fixed_config_fullname - return config_platform_overrides - - -def _CreateProjectObjects(target_list, target_dicts, options, msvs_version): - """Create a MSVSProject object for the targets found in target list. - - Arguments: - target_list: the list of targets to generate project objects for. - target_dicts: the dictionary of specifications. - options: global generator options. - msvs_version: the MSVSVersion object. - Returns: - A set of created projects, keyed by target. - """ - global fixpath_prefix - # Generate each project. - projects = {} - for qualified_target in target_list: - spec = target_dicts[qualified_target] - if spec['toolset'] != 'target': - raise GypError( - 'Multiple toolsets not supported in msvs build (target %s)' % - qualified_target) - proj_path, fixpath_prefix = _GetPathOfProject(qualified_target, spec, - options, msvs_version) - guid = _GetGuidOfProject(proj_path, spec) - overrides = _GetPlatformOverridesOfProject(spec) - build_file = gyp.common.BuildFile(qualified_target) - # Create object for this project. - obj = MSVSNew.MSVSProject( - proj_path, - name=spec['target_name'], - guid=guid, - spec=spec, - build_file=build_file, - config_platform_overrides=overrides, - fixpath_prefix=fixpath_prefix) - # Set project toolset if any (MS build only) - if msvs_version.UsesVcxproj(): - obj.set_msbuild_toolset( - _GetMsbuildToolsetOfProject(proj_path, spec, msvs_version)) - projects[qualified_target] = obj - # Set all the dependencies - for project in projects.values(): - deps = project.spec.get('dependencies', []) - deps = [projects[d] for d in deps] - project.set_dependencies(deps) - return projects - - -def CalculateVariables(default_variables, params): - """Generated variables that require params to be known.""" - - generator_flags = params.get('generator_flags', {}) - - # Select project file format version (if unset, default to auto detecting). - msvs_version = MSVSVersion.SelectVisualStudioVersion( - generator_flags.get('msvs_version', 'auto')) - # Stash msvs_version for later (so we don't have to probe the system twice). - params['msvs_version'] = msvs_version - - # Set a variable so conditions can be based on msvs_version. - default_variables['MSVS_VERSION'] = msvs_version.ShortName() - - # To determine processor word size on Windows, in addition to checking - # PROCESSOR_ARCHITECTURE (which reflects the word size of the current - # process), it is also necessary to check PROCESSOR_ARCITEW6432 (which - # contains the actual word size of the system when running thru WOW64). - if (os.environ.get('PROCESSOR_ARCHITECTURE', '').find('64') >= 0 or - os.environ.get('PROCESSOR_ARCHITEW6432', '').find('64') >= 0): - default_variables['MSVS_OS_BITS'] = 64 - else: - default_variables['MSVS_OS_BITS'] = 32 - - -def PerformBuild(data, configurations, params): - options = params['options'] - msvs_version = params['msvs_version'] - devenv = os.path.join(msvs_version.path, 'Common7', 'IDE', 'devenv.com') - - for build_file, build_file_dict in data.iteritems(): - (build_file_root, build_file_ext) = os.path.splitext(build_file) - if build_file_ext != '.gyp': - continue - sln_path = build_file_root + options.suffix + '.sln' - if options.generator_output: - sln_path = os.path.join(options.generator_output, sln_path) - - for config in configurations: - arguments = [devenv, sln_path, '/Build', config] - print 'Building [%s]: %s' % (config, arguments) - rtn = subprocess.check_call(arguments) - - -def GenerateOutput(target_list, target_dicts, data, params): - """Generate .sln and .vcproj files. - - This is the entry point for this generator. - Arguments: - target_list: List of target pairs: 'base/base.gyp:base'. - target_dicts: Dict of target properties keyed on target pair. - data: Dictionary containing per .gyp data. - """ - global fixpath_prefix - - options = params['options'] - - # Get the project file format version back out of where we stashed it in - # GeneratorCalculatedVariables. - msvs_version = params['msvs_version'] - - generator_flags = params.get('generator_flags', {}) - - # Optionally shard targets marked with 'msvs_shard': SHARD_COUNT. - (target_list, target_dicts) = MSVSUtil.ShardTargets(target_list, target_dicts) - - # Prepare the set of configurations. - configs = set() - for qualified_target in target_list: - spec = target_dicts[qualified_target] - for config_name, config in spec['configurations'].iteritems(): - configs.add(_ConfigFullName(config_name, config)) - configs = list(configs) - - # Figure out all the projects that will be generated and their guids - project_objects = _CreateProjectObjects(target_list, target_dicts, options, - msvs_version) - - # Generate each project. - missing_sources = [] - for project in project_objects.values(): - fixpath_prefix = project.fixpath_prefix - missing_sources.extend(_GenerateProject(project, options, msvs_version, - generator_flags)) - fixpath_prefix = None - - for build_file in data: - # Validate build_file extension - if not build_file.endswith('.gyp'): - continue - sln_path = os.path.splitext(build_file)[0] + options.suffix + '.sln' - if options.generator_output: - sln_path = os.path.join(options.generator_output, sln_path) - # Get projects in the solution, and their dependents. - sln_projects = gyp.common.BuildFileTargets(target_list, build_file) - sln_projects += gyp.common.DeepDependencyTargets(target_dicts, sln_projects) - # Create folder hierarchy. - root_entries = _GatherSolutionFolders( - sln_projects, project_objects, flat=msvs_version.FlatSolution()) - # Create solution. - sln = MSVSNew.MSVSSolution(sln_path, - entries=root_entries, - variants=configs, - websiteProperties=False, - version=msvs_version) - sln.Write() - - if missing_sources: - error_message = "Missing input files:\n" + \ - '\n'.join(set(missing_sources)) - if generator_flags.get('msvs_error_on_missing_sources', False): - raise GypError(error_message) - else: - print >> sys.stdout, "Warning: " + error_message - - -def _GenerateMSBuildFiltersFile(filters_path, source_files, - extension_to_rule_name): - """Generate the filters file. - - This file is used by Visual Studio to organize the presentation of source - files into folders. - - Arguments: - filters_path: The path of the file to be created. - source_files: The hierarchical structure of all the sources. - extension_to_rule_name: A dictionary mapping file extensions to rules. - """ - filter_group = [] - source_group = [] - _AppendFiltersForMSBuild('', source_files, extension_to_rule_name, - filter_group, source_group) - if filter_group: - content = ['Project', - {'ToolsVersion': '4.0', - 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003' - }, - ['ItemGroup'] + filter_group, - ['ItemGroup'] + source_group - ] - easy_xml.WriteXmlIfChanged(content, filters_path, pretty=True, win32=True) - elif os.path.exists(filters_path): - # We don't need this filter anymore. Delete the old filter file. - os.unlink(filters_path) - - -def _AppendFiltersForMSBuild(parent_filter_name, sources, - extension_to_rule_name, - filter_group, source_group): - """Creates the list of filters and sources to be added in the filter file. - - Args: - parent_filter_name: The name of the filter under which the sources are - found. - sources: The hierarchy of filters and sources to process. - extension_to_rule_name: A dictionary mapping file extensions to rules. - filter_group: The list to which filter entries will be appended. - source_group: The list to which source entries will be appeneded. - """ - for source in sources: - if isinstance(source, MSVSProject.Filter): - # We have a sub-filter. Create the name of that sub-filter. - if not parent_filter_name: - filter_name = source.name - else: - filter_name = '%s\\%s' % (parent_filter_name, source.name) - # Add the filter to the group. - filter_group.append( - ['Filter', {'Include': filter_name}, - ['UniqueIdentifier', MSVSNew.MakeGuid(source.name)]]) - # Recurse and add its dependents. - _AppendFiltersForMSBuild(filter_name, source.contents, - extension_to_rule_name, - filter_group, source_group) - else: - # It's a source. Create a source entry. - _, element = _MapFileToMsBuildSourceType(source, extension_to_rule_name) - source_entry = [element, {'Include': source}] - # Specify the filter it is part of, if any. - if parent_filter_name: - source_entry.append(['Filter', parent_filter_name]) - source_group.append(source_entry) - - -def _MapFileToMsBuildSourceType(source, extension_to_rule_name): - """Returns the group and element type of the source file. - - Arguments: - source: The source file name. - extension_to_rule_name: A dictionary mapping file extensions to rules. - - Returns: - A pair of (group this file should be part of, the label of element) - """ - _, ext = os.path.splitext(source) - if ext in extension_to_rule_name: - group = 'rule' - element = extension_to_rule_name[ext] - elif ext in ['.cc', '.cpp', '.c', '.cxx']: - group = 'compile' - element = 'ClCompile' - elif ext in ['.h', '.hxx']: - group = 'include' - element = 'ClInclude' - elif ext == '.rc': - group = 'resource' - element = 'ResourceCompile' - elif ext == '.idl': - group = 'midl' - element = 'Midl' - else: - group = 'none' - element = 'None' - return (group, element) - - -def _GenerateRulesForMSBuild(output_dir, options, spec, - sources, excluded_sources, - props_files_of_rules, targets_files_of_rules, - actions_to_add, extension_to_rule_name): - # MSBuild rules are implemented using three files: an XML file, a .targets - # file and a .props file. - # See http://blogs.msdn.com/b/vcblog/archive/2010/04/21/quick-help-on-vs2010-custom-build-rule.aspx - # for more details. - rules = spec.get('rules', []) - rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))] - rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))] - - msbuild_rules = [] - for rule in rules_native: - # Skip a rule with no action and no inputs. - if 'action' not in rule and not rule.get('rule_sources', []): - continue - msbuild_rule = MSBuildRule(rule, spec) - msbuild_rules.append(msbuild_rule) - extension_to_rule_name[msbuild_rule.extension] = msbuild_rule.rule_name - if msbuild_rules: - base = spec['target_name'] + options.suffix - props_name = base + '.props' - targets_name = base + '.targets' - xml_name = base + '.xml' - - props_files_of_rules.add(props_name) - targets_files_of_rules.add(targets_name) - - props_path = os.path.join(output_dir, props_name) - targets_path = os.path.join(output_dir, targets_name) - xml_path = os.path.join(output_dir, xml_name) - - _GenerateMSBuildRulePropsFile(props_path, msbuild_rules) - _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules) - _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules) - - if rules_external: - _GenerateExternalRules(rules_external, output_dir, spec, - sources, options, actions_to_add) - _AdjustSourcesForRules(rules, sources, excluded_sources) - - -class MSBuildRule(object): - """Used to store information used to generate an MSBuild rule. - - Attributes: - rule_name: The rule name, sanitized to use in XML. - target_name: The name of the target. - after_targets: The name of the AfterTargets element. - before_targets: The name of the BeforeTargets element. - depends_on: The name of the DependsOn element. - compute_output: The name of the ComputeOutput element. - dirs_to_make: The name of the DirsToMake element. - inputs: The name of the _inputs element. - tlog: The name of the _tlog element. - extension: The extension this rule applies to. - description: The message displayed when this rule is invoked. - additional_dependencies: A string listing additional dependencies. - outputs: The outputs of this rule. - command: The command used to run the rule. - """ - - def __init__(self, rule, spec): - self.display_name = rule['rule_name'] - # Assure that the rule name is only characters and numbers - self.rule_name = re.sub(r'\W', '_', self.display_name) - # Create the various element names, following the example set by the - # Visual Studio 2008 to 2010 conversion. I don't know if VS2010 - # is sensitive to the exact names. - self.target_name = '_' + self.rule_name - self.after_targets = self.rule_name + 'AfterTargets' - self.before_targets = self.rule_name + 'BeforeTargets' - self.depends_on = self.rule_name + 'DependsOn' - self.compute_output = 'Compute%sOutput' % self.rule_name - self.dirs_to_make = self.rule_name + 'DirsToMake' - self.inputs = self.rule_name + '_inputs' - self.tlog = self.rule_name + '_tlog' - self.extension = rule['extension'] - if not self.extension.startswith('.'): - self.extension = '.' + self.extension - - self.description = MSVSSettings.ConvertVCMacrosToMSBuild( - rule.get('message', self.rule_name)) - old_additional_dependencies = _FixPaths(rule.get('inputs', [])) - self.additional_dependencies = ( - ';'.join([MSVSSettings.ConvertVCMacrosToMSBuild(i) - for i in old_additional_dependencies])) - old_outputs = _FixPaths(rule.get('outputs', [])) - self.outputs = ';'.join([MSVSSettings.ConvertVCMacrosToMSBuild(i) - for i in old_outputs]) - old_command = _BuildCommandLineForRule(spec, rule, has_input_path=True, - do_setup_env=True) - self.command = MSVSSettings.ConvertVCMacrosToMSBuild(old_command) - - -def _GenerateMSBuildRulePropsFile(props_path, msbuild_rules): - """Generate the .props file.""" - content = ['Project', - {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}] - for rule in msbuild_rules: - content.extend([ - ['PropertyGroup', - {'Condition': "'$(%s)' == '' and '$(%s)' == '' and " - "'$(ConfigurationType)' != 'Makefile'" % (rule.before_targets, - rule.after_targets) - }, - [rule.before_targets, 'Midl'], - [rule.after_targets, 'CustomBuild'], - ], - ['PropertyGroup', - [rule.depends_on, - {'Condition': "'$(ConfigurationType)' != 'Makefile'"}, - '_SelectedFiles;$(%s)' % rule.depends_on - ], - ], - ['ItemDefinitionGroup', - [rule.rule_name, - ['CommandLineTemplate', rule.command], - ['Outputs', rule.outputs], - ['ExecutionDescription', rule.description], - ['AdditionalDependencies', rule.additional_dependencies], - ], - ] - ]) - easy_xml.WriteXmlIfChanged(content, props_path, pretty=True, win32=True) - - -def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules): - """Generate the .targets file.""" - content = ['Project', - {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003' - } - ] - item_group = [ - 'ItemGroup', - ['PropertyPageSchema', - {'Include': '$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml'} - ] - ] - for rule in msbuild_rules: - item_group.append( - ['AvailableItemName', - {'Include': rule.rule_name}, - ['Targets', rule.target_name], - ]) - content.append(item_group) - - for rule in msbuild_rules: - content.append( - ['UsingTask', - {'TaskName': rule.rule_name, - 'TaskFactory': 'XamlTaskFactory', - 'AssemblyName': 'Microsoft.Build.Tasks.v4.0' - }, - ['Task', '$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml'], - ]) - for rule in msbuild_rules: - rule_name = rule.rule_name - target_outputs = '%%(%s.Outputs)' % rule_name - target_inputs = ('%%(%s.Identity);%%(%s.AdditionalDependencies);' - '$(MSBuildProjectFile)') % (rule_name, rule_name) - rule_inputs = '%%(%s.Identity)' % rule_name - extension_condition = ("'%(Extension)'=='.obj' or " - "'%(Extension)'=='.res' or " - "'%(Extension)'=='.rsc' or " - "'%(Extension)'=='.lib'") - remove_section = [ - 'ItemGroup', - {'Condition': "'@(SelectedFiles)' != ''"}, - [rule_name, - {'Remove': '@(%s)' % rule_name, - 'Condition': "'%(Identity)' != '@(SelectedFiles)'" - } - ] - ] - inputs_section = [ - 'ItemGroup', - [rule.inputs, {'Include': '%%(%s.AdditionalDependencies)' % rule_name}] - ] - logging_section = [ - 'ItemGroup', - [rule.tlog, - {'Include': '%%(%s.Outputs)' % rule_name, - 'Condition': ("'%%(%s.Outputs)' != '' and " - "'%%(%s.ExcludedFromBuild)' != 'true'" % - (rule_name, rule_name)) - }, - ['Source', "@(%s, '|')" % rule_name], - ['Inputs', "@(%s -> '%%(Fullpath)', ';')" % rule.inputs], - ], - ] - message_section = [ - 'Message', - {'Importance': 'High', - 'Text': '%%(%s.ExecutionDescription)' % rule_name - } - ] - write_tlog_section = [ - 'WriteLinesToFile', - {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != " - "'true'" % (rule.tlog, rule.tlog), - 'File': '$(IntDir)$(ProjectName).write.1.tlog', - 'Lines': "^%%(%s.Source);@(%s->'%%(Fullpath)')" % (rule.tlog, - rule.tlog) - } - ] - read_tlog_section = [ - 'WriteLinesToFile', - {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != " - "'true'" % (rule.tlog, rule.tlog), - 'File': '$(IntDir)$(ProjectName).read.1.tlog', - 'Lines': "^%%(%s.Source);%%(%s.Inputs)" % (rule.tlog, rule.tlog) - } - ] - command_and_input_section = [ - rule_name, - {'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != " - "'true'" % (rule_name, rule_name), - 'CommandLineTemplate': '%%(%s.CommandLineTemplate)' % rule_name, - 'AdditionalOptions': '%%(%s.AdditionalOptions)' % rule_name, - 'Inputs': rule_inputs - } - ] - content.extend([ - ['Target', - {'Name': rule.target_name, - 'BeforeTargets': '$(%s)' % rule.before_targets, - 'AfterTargets': '$(%s)' % rule.after_targets, - 'Condition': "'@(%s)' != ''" % rule_name, - 'DependsOnTargets': '$(%s);%s' % (rule.depends_on, - rule.compute_output), - 'Outputs': target_outputs, - 'Inputs': target_inputs - }, - remove_section, - inputs_section, - logging_section, - message_section, - write_tlog_section, - read_tlog_section, - command_and_input_section, - ], - ['PropertyGroup', - ['ComputeLinkInputsTargets', - '$(ComputeLinkInputsTargets);', - '%s;' % rule.compute_output - ], - ['ComputeLibInputsTargets', - '$(ComputeLibInputsTargets);', - '%s;' % rule.compute_output - ], - ], - ['Target', - {'Name': rule.compute_output, - 'Condition': "'@(%s)' != ''" % rule_name - }, - ['ItemGroup', - [rule.dirs_to_make, - {'Condition': "'@(%s)' != '' and " - "'%%(%s.ExcludedFromBuild)' != 'true'" % (rule_name, rule_name), - 'Include': '%%(%s.Outputs)' % rule_name - } - ], - ['Link', - {'Include': '%%(%s.Identity)' % rule.dirs_to_make, - 'Condition': extension_condition - } - ], - ['Lib', - {'Include': '%%(%s.Identity)' % rule.dirs_to_make, - 'Condition': extension_condition - } - ], - ['ImpLib', - {'Include': '%%(%s.Identity)' % rule.dirs_to_make, - 'Condition': extension_condition - } - ], - ], - ['MakeDir', - {'Directories': ("@(%s->'%%(RootDir)%%(Directory)')" % - rule.dirs_to_make) - } - ] - ], - ]) - easy_xml.WriteXmlIfChanged(content, targets_path, pretty=True, win32=True) - - -def _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules): - # Generate the .xml file - content = [ - 'ProjectSchemaDefinitions', - {'xmlns': ('clr-namespace:Microsoft.Build.Framework.XamlTypes;' - 'assembly=Microsoft.Build.Framework'), - 'xmlns:x': 'http://schemas.microsoft.com/winfx/2006/xaml', - 'xmlns:sys': 'clr-namespace:System;assembly=mscorlib', - 'xmlns:transformCallback': - 'Microsoft.Cpp.Dev10.ConvertPropertyCallback' - } - ] - for rule in msbuild_rules: - content.extend([ - ['Rule', - {'Name': rule.rule_name, - 'PageTemplate': 'tool', - 'DisplayName': rule.display_name, - 'Order': '200' - }, - ['Rule.DataSource', - ['DataSource', - {'Persistence': 'ProjectFile', - 'ItemType': rule.rule_name - } - ] - ], - ['Rule.Categories', - ['Category', - {'Name': 'General'}, - ['Category.DisplayName', - ['sys:String', 'General'], - ], - ], - ['Category', - {'Name': 'Command Line', - 'Subtype': 'CommandLine' - }, - ['Category.DisplayName', - ['sys:String', 'Command Line'], - ], - ], - ], - ['StringListProperty', - {'Name': 'Inputs', - 'Category': 'Command Line', - 'IsRequired': 'true', - 'Switch': ' ' - }, - ['StringListProperty.DataSource', - ['DataSource', - {'Persistence': 'ProjectFile', - 'ItemType': rule.rule_name, - 'SourceType': 'Item' - } - ] - ], - ], - ['StringProperty', - {'Name': 'CommandLineTemplate', - 'DisplayName': 'Command Line', - 'Visible': 'False', - 'IncludeInCommandLine': 'False' - } - ], - ['DynamicEnumProperty', - {'Name': rule.before_targets, - 'Category': 'General', - 'EnumProvider': 'Targets', - 'IncludeInCommandLine': 'False' - }, - ['DynamicEnumProperty.DisplayName', - ['sys:String', 'Execute Before'], - ], - ['DynamicEnumProperty.Description', - ['sys:String', 'Specifies the targets for the build customization' - ' to run before.' - ], - ], - ['DynamicEnumProperty.ProviderSettings', - ['NameValuePair', - {'Name': 'Exclude', - 'Value': '^%s|^Compute' % rule.before_targets - } - ] - ], - ['DynamicEnumProperty.DataSource', - ['DataSource', - {'Persistence': 'ProjectFile', - 'HasConfigurationCondition': 'true' - } - ] - ], - ], - ['DynamicEnumProperty', - {'Name': rule.after_targets, - 'Category': 'General', - 'EnumProvider': 'Targets', - 'IncludeInCommandLine': 'False' - }, - ['DynamicEnumProperty.DisplayName', - ['sys:String', 'Execute After'], - ], - ['DynamicEnumProperty.Description', - ['sys:String', ('Specifies the targets for the build customization' - ' to run after.') - ], - ], - ['DynamicEnumProperty.ProviderSettings', - ['NameValuePair', - {'Name': 'Exclude', - 'Value': '^%s|^Compute' % rule.after_targets - } - ] - ], - ['DynamicEnumProperty.DataSource', - ['DataSource', - {'Persistence': 'ProjectFile', - 'ItemType': '', - 'HasConfigurationCondition': 'true' - } - ] - ], - ], - ['StringListProperty', - {'Name': 'Outputs', - 'DisplayName': 'Outputs', - 'Visible': 'False', - 'IncludeInCommandLine': 'False' - } - ], - ['StringProperty', - {'Name': 'ExecutionDescription', - 'DisplayName': 'Execution Description', - 'Visible': 'False', - 'IncludeInCommandLine': 'False' - } - ], - ['StringListProperty', - {'Name': 'AdditionalDependencies', - 'DisplayName': 'Additional Dependencies', - 'IncludeInCommandLine': 'False', - 'Visible': 'false' - } - ], - ['StringProperty', - {'Subtype': 'AdditionalOptions', - 'Name': 'AdditionalOptions', - 'Category': 'Command Line' - }, - ['StringProperty.DisplayName', - ['sys:String', 'Additional Options'], - ], - ['StringProperty.Description', - ['sys:String', 'Additional Options'], - ], - ], - ], - ['ItemType', - {'Name': rule.rule_name, - 'DisplayName': rule.display_name - } - ], - ['FileExtension', - {'Name': '*' + rule.extension, - 'ContentType': rule.rule_name - } - ], - ['ContentType', - {'Name': rule.rule_name, - 'DisplayName': '', - 'ItemType': rule.rule_name - } - ] - ]) - easy_xml.WriteXmlIfChanged(content, xml_path, pretty=True, win32=True) - - -def _GetConfigurationAndPlatform(name, settings): - configuration = name.rsplit('_', 1)[0] - platform = settings.get('msvs_configuration_platform', 'Win32') - return (configuration, platform) - - -def _GetConfigurationCondition(name, settings): - return (r"'$(Configuration)|$(Platform)'=='%s|%s'" % - _GetConfigurationAndPlatform(name, settings)) - - -def _GetMSBuildProjectConfigurations(configurations): - group = ['ItemGroup', {'Label': 'ProjectConfigurations'}] - for (name, settings) in sorted(configurations.iteritems()): - configuration, platform = _GetConfigurationAndPlatform(name, settings) - designation = '%s|%s' % (configuration, platform) - group.append( - ['ProjectConfiguration', {'Include': designation}, - ['Configuration', configuration], - ['Platform', platform]]) - return [group] - - -def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name): - namespace = os.path.splitext(gyp_file_name)[0] - return [ - ['PropertyGroup', {'Label': 'Globals'}, - ['ProjectGuid', guid], - ['Keyword', 'Win32Proj'], - ['RootNamespace', namespace], - ] - ] - - -def _GetMSBuildConfigurationDetails(spec, build_file): - properties = {} - for name, settings in spec['configurations'].iteritems(): - msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file) - condition = _GetConfigurationCondition(name, settings) - character_set = msbuild_attributes.get('CharacterSet') - _AddConditionalProperty(properties, condition, 'ConfigurationType', - msbuild_attributes['ConfigurationType']) - if character_set: - _AddConditionalProperty(properties, condition, 'CharacterSet', - character_set) - return _GetMSBuildPropertyGroup(spec, 'Configuration', properties) - - -def _GetMSBuildLocalProperties(msbuild_toolset): - # Currently the only local property we support is PlatformToolset - properties = {} - if msbuild_toolset: - properties = [ - ['PropertyGroup', {'Label': 'Locals'}, - ['PlatformToolset', msbuild_toolset], - ] - ] - return properties - - -def _GetMSBuildPropertySheets(configurations): - user_props = r'$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props' - additional_props = {} - props_specified = False - for name, settings in sorted(configurations.iteritems()): - configuration = _GetConfigurationCondition(name, settings) - if settings.has_key('msbuild_props'): - additional_props[configuration] = _FixPaths(settings['msbuild_props']) - props_specified = True - else: - additional_props[configuration] = '' - - if not props_specified: - return [ - ['ImportGroup', - {'Label': 'PropertySheets'}, - ['Import', - {'Project': user_props, - 'Condition': "exists('%s')" % user_props, - 'Label': 'LocalAppDataPlatform' - } - ] - ] - ] - else: - sheets = [] - for condition, props in additional_props.iteritems(): - import_group = [ - 'ImportGroup', - {'Label': 'PropertySheets', - 'Condition': condition - }, - ['Import', - {'Project': user_props, - 'Condition': "exists('%s')" % user_props, - 'Label': 'LocalAppDataPlatform' - } - ] - ] - for props_file in props: - import_group.append(['Import', {'Project':props_file}]) - sheets.append(import_group) - return sheets - -def _ConvertMSVSBuildAttributes(spec, config, build_file): - config_type = _GetMSVSConfigurationType(spec, build_file) - msvs_attributes = _GetMSVSAttributes(spec, config, config_type) - msbuild_attributes = {} - for a in msvs_attributes: - if a in ['IntermediateDirectory', 'OutputDirectory']: - directory = MSVSSettings.ConvertVCMacrosToMSBuild(msvs_attributes[a]) - if not directory.endswith('\\'): - directory += '\\' - msbuild_attributes[a] = directory - elif a == 'CharacterSet': - msbuild_attributes[a] = _ConvertMSVSCharacterSet(msvs_attributes[a]) - elif a == 'ConfigurationType': - msbuild_attributes[a] = _ConvertMSVSConfigurationType(msvs_attributes[a]) - else: - print 'Warning: Do not know how to convert MSVS attribute ' + a - return msbuild_attributes - - -def _ConvertMSVSCharacterSet(char_set): - if char_set.isdigit(): - char_set = { - '0': 'MultiByte', - '1': 'Unicode', - '2': 'MultiByte', - }[char_set] - return char_set - - -def _ConvertMSVSConfigurationType(config_type): - if config_type.isdigit(): - config_type = { - '1': 'Application', - '2': 'DynamicLibrary', - '4': 'StaticLibrary', - '10': 'Utility' - }[config_type] - return config_type - - -def _GetMSBuildAttributes(spec, config, build_file): - if 'msbuild_configuration_attributes' not in config: - msbuild_attributes = _ConvertMSVSBuildAttributes(spec, config, build_file) - - else: - config_type = _GetMSVSConfigurationType(spec, build_file) - config_type = _ConvertMSVSConfigurationType(config_type) - msbuild_attributes = config.get('msbuild_configuration_attributes', {}) - msbuild_attributes.setdefault('ConfigurationType', config_type) - output_dir = msbuild_attributes.get('OutputDirectory', - '$(SolutionDir)$(Configuration)') - msbuild_attributes['OutputDirectory'] = _FixPath(output_dir) + '\\' - if 'IntermediateDirectory' not in msbuild_attributes: - intermediate = _FixPath('$(Configuration)') + '\\' - msbuild_attributes['IntermediateDirectory'] = intermediate - if 'CharacterSet' in msbuild_attributes: - msbuild_attributes['CharacterSet'] = _ConvertMSVSCharacterSet( - msbuild_attributes['CharacterSet']) - if 'TargetName' not in msbuild_attributes: - prefix = spec.get('product_prefix', '') - product_name = spec.get('product_name', '$(ProjectName)') - target_name = prefix + product_name - msbuild_attributes['TargetName'] = target_name - - # Make sure that 'TargetPath' matches 'Lib.OutputFile' or 'Link.OutputFile' - # (depending on the tool used) to avoid MSB8012 warning. - msbuild_tool_map = { - 'executable': 'Link', - 'shared_library': 'Link', - 'loadable_module': 'Link', - 'static_library': 'Lib', - } - msbuild_tool = msbuild_tool_map.get(spec['type']) - if msbuild_tool: - msbuild_settings = config['finalized_msbuild_settings'] - out_file = msbuild_settings[msbuild_tool].get('OutputFile') - if out_file: - msbuild_attributes['TargetPath'] = _FixPath(out_file) - - return msbuild_attributes - - -def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file): - # TODO(jeanluc) We could optimize out the following and do it only if - # there are actions. - # TODO(jeanluc) Handle the equivalent of setting 'CYGWIN=nontsec'. - new_paths = [] - cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])[0] - if cygwin_dirs: - cyg_path = '$(MSBuildProjectDirectory)\\%s\\bin\\' % _FixPath(cygwin_dirs) - new_paths.append(cyg_path) - # TODO(jeanluc) Change the convention to have both a cygwin_dir and a - # python_dir. - python_path = cyg_path.replace('cygwin\\bin', 'python_26') - new_paths.append(python_path) - if new_paths: - new_paths = '$(ExecutablePath);' + ';'.join(new_paths) - - properties = {} - for (name, configuration) in sorted(configurations.iteritems()): - condition = _GetConfigurationCondition(name, configuration) - attributes = _GetMSBuildAttributes(spec, configuration, build_file) - msbuild_settings = configuration['finalized_msbuild_settings'] - _AddConditionalProperty(properties, condition, 'IntDir', - attributes['IntermediateDirectory']) - _AddConditionalProperty(properties, condition, 'OutDir', - attributes['OutputDirectory']) - _AddConditionalProperty(properties, condition, 'TargetName', - attributes['TargetName']) - - if attributes.get('TargetPath'): - _AddConditionalProperty(properties, condition, 'TargetPath', - attributes['TargetPath']) - - if new_paths: - _AddConditionalProperty(properties, condition, 'ExecutablePath', - new_paths) - tool_settings = msbuild_settings.get('', {}) - for name, value in sorted(tool_settings.iteritems()): - formatted_value = _GetValueFormattedForMSBuild('', name, value) - _AddConditionalProperty(properties, condition, name, formatted_value) - return _GetMSBuildPropertyGroup(spec, None, properties) - - -def _AddConditionalProperty(properties, condition, name, value): - """Adds a property / conditional value pair to a dictionary. - - Arguments: - properties: The dictionary to be modified. The key is the name of the - property. The value is itself a dictionary; its key is the value and - the value a list of condition for which this value is true. - condition: The condition under which the named property has the value. - name: The name of the property. - value: The value of the property. - """ - if name not in properties: - properties[name] = {} - values = properties[name] - if value not in values: - values[value] = [] - conditions = values[value] - conditions.append(condition) - - -# Regex for msvs variable references ( i.e. $(FOO) ). -MSVS_VARIABLE_REFERENCE = re.compile('\$\(([a-zA-Z_][a-zA-Z0-9_]*)\)') - - -def _GetMSBuildPropertyGroup(spec, label, properties): - """Returns a PropertyGroup definition for the specified properties. - - Arguments: - spec: The target project dict. - label: An optional label for the PropertyGroup. - properties: The dictionary to be converted. The key is the name of the - property. The value is itself a dictionary; its key is the value and - the value a list of condition for which this value is true. - """ - group = ['PropertyGroup'] - if label: - group.append({'Label': label}) - num_configurations = len(spec['configurations']) - def GetEdges(node): - # Use a definition of edges such that user_of_variable -> used_varible. - # This happens to be easier in this case, since a variable's - # definition contains all variables it references in a single string. - edges = set() - for value in sorted(properties[node].keys()): - # Add to edges all $(...) references to variables. - # - # Variable references that refer to names not in properties are excluded - # These can exist for instance to refer built in definitions like - # $(SolutionDir). - # - # Self references are ignored. Self reference is used in a few places to - # append to the default value. I.e. PATH=$(PATH);other_path - edges.update(set([v for v in MSVS_VARIABLE_REFERENCE.findall(value) - if v in properties and v != node])) - return edges - properties_ordered = gyp.common.TopologicallySorted( - properties.keys(), GetEdges) - # Walk properties in the reverse of a topological sort on - # user_of_variable -> used_variable as this ensures variables are - # defined before they are used. - # NOTE: reverse(topsort(DAG)) = topsort(reverse_edges(DAG)) - for name in reversed(properties_ordered): - values = properties[name] - for value, conditions in sorted(values.iteritems()): - if len(conditions) == num_configurations: - # If the value is the same all configurations, - # just add one unconditional entry. - group.append([name, value]) - else: - for condition in conditions: - group.append([name, {'Condition': condition}, value]) - return [group] - - -def _GetMSBuildToolSettingsSections(spec, configurations): - groups = [] - for (name, configuration) in sorted(configurations.iteritems()): - msbuild_settings = configuration['finalized_msbuild_settings'] - group = ['ItemDefinitionGroup', - {'Condition': _GetConfigurationCondition(name, configuration)} - ] - for tool_name, tool_settings in sorted(msbuild_settings.iteritems()): - # Skip the tool named '' which is a holder of global settings handled - # by _GetMSBuildConfigurationGlobalProperties. - if tool_name: - if tool_settings: - tool = [tool_name] - for name, value in sorted(tool_settings.iteritems()): - formatted_value = _GetValueFormattedForMSBuild(tool_name, name, - value) - tool.append([name, formatted_value]) - group.append(tool) - groups.append(group) - return groups - - -def _FinalizeMSBuildSettings(spec, configuration): - if 'msbuild_settings' in configuration: - converted = False - msbuild_settings = configuration['msbuild_settings'] - MSVSSettings.ValidateMSBuildSettings(msbuild_settings) - else: - converted = True - msvs_settings = configuration.get('msvs_settings', {}) - msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings) - include_dirs, resource_include_dirs = _GetIncludeDirs(configuration) - libraries = _GetLibraries(spec) - out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True) - defines = _GetDefines(configuration) - if converted: - # Visual Studio 2010 has TR1 - defines = [d for d in defines if d != '_HAS_TR1=0'] - # Warn of ignored settings - ignored_settings = ['msvs_prebuild', 'msvs_postbuild', 'msvs_tool_files'] - for ignored_setting in ignored_settings: - value = configuration.get(ignored_setting) - if value: - print ('Warning: The automatic conversion to MSBuild does not handle ' - '%s. Ignoring setting of %s' % (ignored_setting, str(value))) - - defines = [_EscapeCppDefineForMSBuild(d) for d in defines] - disabled_warnings = _GetDisabledWarnings(configuration) - # TODO(jeanluc) Validate & warn that we don't translate - # prebuild = configuration.get('msvs_prebuild') - # postbuild = configuration.get('msvs_postbuild') - def_file = _GetModuleDefinition(spec) - precompiled_header = configuration.get('msvs_precompiled_header') - - # Add the information to the appropriate tool - # TODO(jeanluc) We could optimize and generate these settings only if - # the corresponding files are found, e.g. don't generate ResourceCompile - # if you don't have any resources. - _ToolAppend(msbuild_settings, 'ClCompile', - 'AdditionalIncludeDirectories', include_dirs) - _ToolAppend(msbuild_settings, 'ResourceCompile', - 'AdditionalIncludeDirectories', resource_include_dirs) - # Add in libraries, note that even for empty libraries, we want this - # set, to prevent inheriting default libraries from the enviroment. - _ToolSetOrAppend(msbuild_settings, 'Link', 'AdditionalDependencies', - libraries) - if out_file: - _ToolAppend(msbuild_settings, msbuild_tool, 'OutputFile', out_file, - only_if_unset=True) - # Add defines. - _ToolAppend(msbuild_settings, 'ClCompile', - 'PreprocessorDefinitions', defines) - _ToolAppend(msbuild_settings, 'ResourceCompile', - 'PreprocessorDefinitions', defines) - # Add disabled warnings. - _ToolAppend(msbuild_settings, 'ClCompile', - 'DisableSpecificWarnings', disabled_warnings) - # Turn on precompiled headers if appropriate. - if precompiled_header: - precompiled_header = os.path.split(precompiled_header)[1] - _ToolAppend(msbuild_settings, 'ClCompile', 'PrecompiledHeader', 'Use') - _ToolAppend(msbuild_settings, 'ClCompile', - 'PrecompiledHeaderFile', precompiled_header) - _ToolAppend(msbuild_settings, 'ClCompile', - 'ForcedIncludeFiles', precompiled_header) - # Loadable modules don't generate import libraries; - # tell dependent projects to not expect one. - if spec['type'] == 'loadable_module': - _ToolAppend(msbuild_settings, '', 'IgnoreImportLibrary', 'true') - # Set the module definition file if any. - if def_file: - _ToolAppend(msbuild_settings, 'Link', 'ModuleDefinitionFile', def_file) - configuration['finalized_msbuild_settings'] = msbuild_settings - - -def _GetValueFormattedForMSBuild(tool_name, name, value): - if type(value) == list: - # For some settings, VS2010 does not automatically extends the settings - # TODO(jeanluc) Is this what we want? - if name in ['AdditionalIncludeDirectories', - 'AdditionalLibraryDirectories', - 'AdditionalOptions', - 'DelayLoadDLLs', - 'DisableSpecificWarnings', - 'PreprocessorDefinitions']: - value.append('%%(%s)' % name) - # For most tools, entries in a list should be separated with ';' but some - # settings use a space. Check for those first. - exceptions = { - 'ClCompile': ['AdditionalOptions'], - 'Link': ['AdditionalOptions'], - 'Lib': ['AdditionalOptions']} - if tool_name in exceptions and name in exceptions[tool_name]: - char = ' ' - else: - char = ';' - formatted_value = char.join( - [MSVSSettings.ConvertVCMacrosToMSBuild(i) for i in value]) - else: - formatted_value = MSVSSettings.ConvertVCMacrosToMSBuild(value) - return formatted_value - - -def _VerifySourcesExist(sources, root_dir): - """Verifies that all source files exist on disk. - - Checks that all regular source files, i.e. not created at run time, - exist on disk. Missing files cause needless recompilation but no otherwise - visible errors. - - Arguments: - sources: A recursive list of Filter/file names. - root_dir: The root directory for the relative path names. - Returns: - A list of source files that cannot be found on disk. - """ - missing_sources = [] - for source in sources: - if isinstance(source, MSVSProject.Filter): - missing_sources.extend(_VerifySourcesExist(source.contents, root_dir)) - else: - if '$' not in source: - full_path = os.path.join(root_dir, source) - if not os.path.exists(full_path): - missing_sources.append(full_path) - return missing_sources - - -def _GetMSBuildSources(spec, sources, exclusions, extension_to_rule_name, - actions_spec, sources_handled_by_action, list_excluded): - groups = ['none', 'midl', 'include', 'compile', 'resource', 'rule'] - grouped_sources = {} - for g in groups: - grouped_sources[g] = [] - - _AddSources2(spec, sources, exclusions, grouped_sources, - extension_to_rule_name, sources_handled_by_action, list_excluded) - sources = [] - for g in groups: - if grouped_sources[g]: - sources.append(['ItemGroup'] + grouped_sources[g]) - if actions_spec: - sources.append(['ItemGroup'] + actions_spec) - return sources - - -def _AddSources2(spec, sources, exclusions, grouped_sources, - extension_to_rule_name, sources_handled_by_action, - list_excluded): - extensions_excluded_from_precompile = [] - for source in sources: - if isinstance(source, MSVSProject.Filter): - _AddSources2(spec, source.contents, exclusions, grouped_sources, - extension_to_rule_name, sources_handled_by_action, - list_excluded) - else: - if not source in sources_handled_by_action: - detail = [] - excluded_configurations = exclusions.get(source, []) - if len(excluded_configurations) == len(spec['configurations']): - detail.append(['ExcludedFromBuild', 'true']) - else: - for config_name, configuration in sorted(excluded_configurations): - condition = _GetConfigurationCondition(config_name, configuration) - detail.append(['ExcludedFromBuild', - {'Condition': condition}, - 'true']) - # Add precompile if needed - for config_name, configuration in spec['configurations'].iteritems(): - precompiled_source = configuration.get('msvs_precompiled_source', '') - if precompiled_source != '': - precompiled_source = _FixPath(precompiled_source) - if not extensions_excluded_from_precompile: - # If the precompiled header is generated by a C source, we must - # not try to use it for C++ sources, and vice versa. - basename, extension = os.path.splitext(precompiled_source) - if extension == '.c': - extensions_excluded_from_precompile = ['.cc', '.cpp', '.cxx'] - else: - extensions_excluded_from_precompile = ['.c'] - - if precompiled_source == source: - condition = _GetConfigurationCondition(config_name, configuration) - detail.append(['PrecompiledHeader', - {'Condition': condition}, - 'Create' - ]) - else: - # Turn off precompiled header usage for source files of a - # different type than the file that generated the - # precompiled header. - for extension in extensions_excluded_from_precompile: - if source.endswith(extension): - detail.append(['PrecompiledHeader', '']) - detail.append(['ForcedIncludeFiles', '']) - - group, element = _MapFileToMsBuildSourceType(source, - extension_to_rule_name) - grouped_sources[group].append([element, {'Include': source}] + detail) - - -def _GetMSBuildProjectReferences(project): - references = [] - if project.dependencies: - group = ['ItemGroup'] - for dependency in project.dependencies: - guid = dependency.guid - project_dir = os.path.split(project.path)[0] - relative_path = gyp.common.RelativePath(dependency.path, project_dir) - project_ref = ['ProjectReference', - {'Include': relative_path}, - ['Project', guid], - ['ReferenceOutputAssembly', 'false'] - ] - for config in dependency.spec.get('configurations', {}).itervalues(): - # If it's disabled in any config, turn it off in the reference. - if config.get('msvs_2010_disable_uldi_when_referenced', 0): - project_ref.append(['UseLibraryDependencyInputs', 'false']) - break - group.append(project_ref) - references.append(group) - return references - - -def _GenerateMSBuildProject(project, options, version, generator_flags): - spec = project.spec - configurations = spec['configurations'] - project_dir, project_file_name = os.path.split(project.path) - msbuildproj_dir = os.path.dirname(project.path) - if msbuildproj_dir and not os.path.exists(msbuildproj_dir): - os.makedirs(msbuildproj_dir) - # Prepare list of sources and excluded sources. - gyp_path = _NormalizedSource(project.build_file) - relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir) - - gyp_file = os.path.split(project.build_file)[1] - sources, excluded_sources = _PrepareListOfSources(spec, generator_flags, - gyp_file) - # Add rules. - actions_to_add = {} - props_files_of_rules = set() - targets_files_of_rules = set() - extension_to_rule_name = {} - list_excluded = generator_flags.get('msvs_list_excluded_files', True) - _GenerateRulesForMSBuild(project_dir, options, spec, - sources, excluded_sources, - props_files_of_rules, targets_files_of_rules, - actions_to_add, extension_to_rule_name) - sources, excluded_sources, excluded_idl = ( - _AdjustSourcesAndConvertToFilterHierarchy(spec, options, - project_dir, sources, - excluded_sources, - list_excluded)) - _AddActions(actions_to_add, spec, project.build_file) - _AddCopies(actions_to_add, spec) - - # NOTE: this stanza must appear after all actions have been decided. - # Don't excluded sources with actions attached, or they won't run. - excluded_sources = _FilterActionsFromExcluded( - excluded_sources, actions_to_add) - - exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl) - actions_spec, sources_handled_by_action = _GenerateActionsForMSBuild( - spec, actions_to_add) - - _GenerateMSBuildFiltersFile(project.path + '.filters', sources, - extension_to_rule_name) - missing_sources = _VerifySourcesExist(sources, project_dir) - - for configuration in configurations.itervalues(): - _FinalizeMSBuildSettings(spec, configuration) - - # Add attributes to root element - - import_default_section = [ - ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.Default.props'}]] - import_cpp_props_section = [ - ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.props'}]] - import_cpp_targets_section = [ - ['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.targets'}]] - macro_section = [['PropertyGroup', {'Label': 'UserMacros'}]] - - content = [ - 'Project', - {'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003', - 'ToolsVersion': version.ProjectVersion(), - 'DefaultTargets': 'Build' - }] - - content += _GetMSBuildProjectConfigurations(configurations) - content += _GetMSBuildGlobalProperties(spec, project.guid, project_file_name) - content += import_default_section - content += _GetMSBuildConfigurationDetails(spec, project.build_file) - content += _GetMSBuildLocalProperties(project.msbuild_toolset) - content += import_cpp_props_section - content += _GetMSBuildExtensions(props_files_of_rules) - content += _GetMSBuildPropertySheets(configurations) - content += macro_section - content += _GetMSBuildConfigurationGlobalProperties(spec, configurations, - project.build_file) - content += _GetMSBuildToolSettingsSections(spec, configurations) - content += _GetMSBuildSources( - spec, sources, exclusions, extension_to_rule_name, actions_spec, - sources_handled_by_action, list_excluded) - content += _GetMSBuildProjectReferences(project) - content += import_cpp_targets_section - content += _GetMSBuildExtensionTargets(targets_files_of_rules) - - # TODO(jeanluc) File a bug to get rid of runas. We had in MSVS: - # has_run_as = _WriteMSVSUserFile(project.path, version, spec) - - easy_xml.WriteXmlIfChanged(content, project.path, pretty=True, win32=True) - - return missing_sources - - -def _GetMSBuildExtensions(props_files_of_rules): - extensions = ['ImportGroup', {'Label': 'ExtensionSettings'}] - for props_file in props_files_of_rules: - extensions.append(['Import', {'Project': props_file}]) - return [extensions] - - -def _GetMSBuildExtensionTargets(targets_files_of_rules): - targets_node = ['ImportGroup', {'Label': 'ExtensionTargets'}] - for targets_file in sorted(targets_files_of_rules): - targets_node.append(['Import', {'Project': targets_file}]) - return [targets_node] - - -def _GenerateActionsForMSBuild(spec, actions_to_add): - """Add actions accumulated into an actions_to_add, merging as needed. - - Arguments: - spec: the target project dict - actions_to_add: dictionary keyed on input name, which maps to a list of - dicts describing the actions attached to that input file. - - Returns: - A pair of (action specification, the sources handled by this action). - """ - sources_handled_by_action = set() - actions_spec = [] - for primary_input, actions in actions_to_add.iteritems(): - inputs = set() - outputs = set() - descriptions = [] - commands = [] - for action in actions: - inputs.update(set(action['inputs'])) - outputs.update(set(action['outputs'])) - descriptions.append(action['description']) - cmd = action['command'] - # For most actions, add 'call' so that actions that invoke batch files - # return and continue executing. msbuild_use_call provides a way to - # disable this but I have not seen any adverse effect from doing that - # for everything. - if action.get('msbuild_use_call', True): - cmd = 'call ' + cmd - commands.append(cmd) - # Add the custom build action for one input file. - description = ', and also '.join(descriptions) - - # We can't join the commands simply with && because the command line will - # get too long. See also _AddActions: cygwin's setup_env mustn't be called - # for every invocation or the command that sets the PATH will grow too - # long. - command = ( - '\r\nif %errorlevel% neq 0 exit /b %errorlevel%\r\n'.join(commands)) - _AddMSBuildAction(spec, - primary_input, - inputs, - outputs, - command, - description, - sources_handled_by_action, - actions_spec) - return actions_spec, sources_handled_by_action - - -def _AddMSBuildAction(spec, primary_input, inputs, outputs, cmd, description, - sources_handled_by_action, actions_spec): - command = MSVSSettings.ConvertVCMacrosToMSBuild(cmd) - primary_input = _FixPath(primary_input) - inputs_array = _FixPaths(inputs) - outputs_array = _FixPaths(outputs) - additional_inputs = ';'.join([i for i in inputs_array - if i != primary_input]) - outputs = ';'.join(outputs_array) - sources_handled_by_action.add(primary_input) - action_spec = ['CustomBuild', {'Include': primary_input}] - action_spec.extend( - # TODO(jeanluc) 'Document' for all or just if as_sources? - [['FileType', 'Document'], - ['Command', command], - ['Message', description], - ['Outputs', outputs] - ]) - if additional_inputs: - action_spec.append(['AdditionalInputs', additional_inputs]) - actions_spec.append(action_spec) diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/generator/msvs_test.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/generator/msvs_test.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/generator/msvs_test.py 2012-09-24 21:42:55.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/generator/msvs_test.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,37 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" Unit tests for the msvs.py file. """ - -import gyp.generator.msvs as msvs -import unittest -import StringIO - - -class TestSequenceFunctions(unittest.TestCase): - - def setUp(self): - self.stderr = StringIO.StringIO() - - def test_GetLibraries(self): - self.assertEqual( - msvs._GetLibraries({}), - []) - self.assertEqual( - msvs._GetLibraries({'libraries': []}), - []) - self.assertEqual( - msvs._GetLibraries({'other':'foo', 'libraries': ['a.lib']}), - ['a.lib']) - self.assertEqual( - msvs._GetLibraries({'libraries': ['-la']}), - ['a.lib']) - self.assertEqual( - msvs._GetLibraries({'libraries': ['a.lib', 'b.lib', 'c.lib', '-lb.lib', - '-lb.lib', 'd.lib', 'a.lib']}), - ['c.lib', 'b.lib', 'd.lib', 'a.lib']) - -if __name__ == '__main__': - unittest.main() diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/generator/ninja.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/generator/ninja.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/generator/ninja.py 2013-02-26 02:59:54.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/generator/ninja.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,1804 +0,0 @@ -# Copyright (c) 2013 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import copy -import hashlib -import multiprocessing -import os.path -import re -import signal -import subprocess -import sys -import gyp -import gyp.common -import gyp.msvs_emulation -import gyp.MSVSUtil as MSVSUtil -import gyp.xcode_emulation - -from gyp.common import GetEnvironFallback -import gyp.ninja_syntax as ninja_syntax - -generator_default_variables = { - 'EXECUTABLE_PREFIX': '', - 'EXECUTABLE_SUFFIX': '', - 'STATIC_LIB_PREFIX': 'lib', - 'STATIC_LIB_SUFFIX': '.a', - 'SHARED_LIB_PREFIX': 'lib', - - # Gyp expects the following variables to be expandable by the build - # system to the appropriate locations. Ninja prefers paths to be - # known at gyp time. To resolve this, introduce special - # variables starting with $! and $| (which begin with a $ so gyp knows it - # should be treated specially, but is otherwise an invalid - # ninja/shell variable) that are passed to gyp here but expanded - # before writing out into the target .ninja files; see - # ExpandSpecial. - # $! is used for variables that represent a path and that can only appear at - # the start of a string, while $| is used for variables that can appear - # anywhere in a string. - 'INTERMEDIATE_DIR': '$!INTERMEDIATE_DIR', - 'SHARED_INTERMEDIATE_DIR': '$!PRODUCT_DIR/gen', - 'PRODUCT_DIR': '$!PRODUCT_DIR', - 'CONFIGURATION_NAME': '$|CONFIGURATION_NAME', - - # Special variables that may be used by gyp 'rule' targets. - # We generate definitions for these variables on the fly when processing a - # rule. - 'RULE_INPUT_ROOT': '${root}', - 'RULE_INPUT_DIRNAME': '${dirname}', - 'RULE_INPUT_PATH': '${source}', - 'RULE_INPUT_EXT': '${ext}', - 'RULE_INPUT_NAME': '${name}', -} - -# Placates pylint. -generator_additional_non_configuration_keys = [] -generator_additional_path_sections = [] -generator_extra_sources_for_rules = [] - -# TODO: figure out how to not build extra host objects in the non-cross-compile -# case when this is enabled, and enable unconditionally. -generator_supports_multiple_toolsets = ( - os.environ.get('GYP_CROSSCOMPILE') or - os.environ.get('AR_host') or - os.environ.get('CC_host') or - os.environ.get('CXX_host') or - os.environ.get('AR_target') or - os.environ.get('CC_target') or - os.environ.get('CXX_target')) - - -def StripPrefix(arg, prefix): - if arg.startswith(prefix): - return arg[len(prefix):] - return arg - - -def QuoteShellArgument(arg, flavor): - """Quote a string such that it will be interpreted as a single argument - by the shell.""" - # Rather than attempting to enumerate the bad shell characters, just - # whitelist common OK ones and quote anything else. - if re.match(r'^[a-zA-Z0-9_=.\\/-]+$', arg): - return arg # No quoting necessary. - if flavor == 'win': - return gyp.msvs_emulation.QuoteForRspFile(arg) - return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'" - - -def Define(d, flavor): - """Takes a preprocessor define and returns a -D parameter that's ninja- and - shell-escaped.""" - if flavor == 'win': - # cl.exe replaces literal # characters with = in preprocesor definitions for - # some reason. Octal-encode to work around that. - d = d.replace('#', '\\%03o' % ord('#')) - return QuoteShellArgument(ninja_syntax.escape('-D' + d), flavor) - - -class Target: - """Target represents the paths used within a single gyp target. - - Conceptually, building a single target A is a series of steps: - - 1) actions/rules/copies generates source/resources/etc. - 2) compiles generates .o files - 3) link generates a binary (library/executable) - 4) bundle merges the above in a mac bundle - - (Any of these steps can be optional.) - - From a build ordering perspective, a dependent target B could just - depend on the last output of this series of steps. - - But some dependent commands sometimes need to reach inside the box. - For example, when linking B it needs to get the path to the static - library generated by A. - - This object stores those paths. To keep things simple, member - variables only store concrete paths to single files, while methods - compute derived values like "the last output of the target". - """ - def __init__(self, type): - # Gyp type ("static_library", etc.) of this target. - self.type = type - # File representing whether any input dependencies necessary for - # dependent actions have completed. - self.preaction_stamp = None - # File representing whether any input dependencies necessary for - # dependent compiles have completed. - self.precompile_stamp = None - # File representing the completion of actions/rules/copies, if any. - self.actions_stamp = None - # Path to the output of the link step, if any. - self.binary = None - # Path to the file representing the completion of building the bundle, - # if any. - self.bundle = None - # On Windows, incremental linking requires linking against all the .objs - # that compose a .lib (rather than the .lib itself). That list is stored - # here. - self.component_objs = None - # Windows only. The import .lib is the output of a build step, but - # because dependents only link against the lib (not both the lib and the - # dll) we keep track of the import library here. - self.import_lib = None - - def Linkable(self): - """Return true if this is a target that can be linked against.""" - return self.type in ('static_library', 'shared_library') - - def UsesToc(self, flavor): - """Return true if the target should produce a restat rule based on a TOC - file.""" - # For bundles, the .TOC should be produced for the binary, not for - # FinalOutput(). But the naive approach would put the TOC file into the - # bundle, so don't do this for bundles for now. - if flavor == 'win' or self.bundle: - return False - return self.type in ('shared_library', 'loadable_module') - - def PreActionInput(self, flavor): - """Return the path, if any, that should be used as a dependency of - any dependent action step.""" - if self.UsesToc(flavor): - return self.FinalOutput() + '.TOC' - return self.FinalOutput() or self.preaction_stamp - - def PreCompileInput(self): - """Return the path, if any, that should be used as a dependency of - any dependent compile step.""" - return self.actions_stamp or self.precompile_stamp - - def FinalOutput(self): - """Return the last output of the target, which depends on all prior - steps.""" - return self.bundle or self.binary or self.actions_stamp - - -# A small discourse on paths as used within the Ninja build: -# All files we produce (both at gyp and at build time) appear in the -# build directory (e.g. out/Debug). -# -# Paths within a given .gyp file are always relative to the directory -# containing the .gyp file. Call these "gyp paths". This includes -# sources as well as the starting directory a given gyp rule/action -# expects to be run from. We call the path from the source root to -# the gyp file the "base directory" within the per-.gyp-file -# NinjaWriter code. -# -# All paths as written into the .ninja files are relative to the build -# directory. Call these paths "ninja paths". -# -# We translate between these two notions of paths with two helper -# functions: -# -# - GypPathToNinja translates a gyp path (i.e. relative to the .gyp file) -# into the equivalent ninja path. -# -# - GypPathToUniqueOutput translates a gyp path into a ninja path to write -# an output file; the result can be namespaced such that it is unique -# to the input file name as well as the output target name. - -class NinjaWriter: - def __init__(self, qualified_target, target_outputs, base_dir, build_dir, - output_file, flavor, toplevel_dir=None): - """ - base_dir: path from source root to directory containing this gyp file, - by gyp semantics, all input paths are relative to this - build_dir: path from source root to build output - toplevel_dir: path to the toplevel directory - """ - - self.qualified_target = qualified_target - self.target_outputs = target_outputs - self.base_dir = base_dir - self.build_dir = build_dir - self.ninja = ninja_syntax.Writer(output_file) - self.flavor = flavor - self.abs_build_dir = None - if toplevel_dir is not None: - self.abs_build_dir = os.path.abspath(os.path.join(toplevel_dir, - build_dir)) - self.obj_ext = '.obj' if flavor == 'win' else '.o' - if flavor == 'win': - # See docstring of msvs_emulation.GenerateEnvironmentFiles(). - self.win_env = {} - for arch in ('x86', 'x64'): - self.win_env[arch] = 'environment.' + arch - - # Relative path from build output dir to base dir. - build_to_top = gyp.common.InvertRelativePath(build_dir, toplevel_dir) - self.build_to_base = os.path.join(build_to_top, base_dir) - # Relative path from base dir to build dir. - base_to_top = gyp.common.InvertRelativePath(base_dir, toplevel_dir) - self.base_to_build = os.path.join(base_to_top, build_dir) - - def ExpandSpecial(self, path, product_dir=None): - """Expand specials like $!PRODUCT_DIR in |path|. - - If |product_dir| is None, assumes the cwd is already the product - dir. Otherwise, |product_dir| is the relative path to the product - dir. - """ - - PRODUCT_DIR = '$!PRODUCT_DIR' - if PRODUCT_DIR in path: - if product_dir: - path = path.replace(PRODUCT_DIR, product_dir) - else: - path = path.replace(PRODUCT_DIR + '/', '') - path = path.replace(PRODUCT_DIR + '\\', '') - path = path.replace(PRODUCT_DIR, '.') - - INTERMEDIATE_DIR = '$!INTERMEDIATE_DIR' - if INTERMEDIATE_DIR in path: - int_dir = self.GypPathToUniqueOutput('gen') - # GypPathToUniqueOutput generates a path relative to the product dir, - # so insert product_dir in front if it is provided. - path = path.replace(INTERMEDIATE_DIR, - os.path.join(product_dir or '', int_dir)) - - CONFIGURATION_NAME = '$|CONFIGURATION_NAME' - path = path.replace(CONFIGURATION_NAME, self.config_name) - - return path - - def ExpandRuleVariables(self, path, root, dirname, source, ext, name): - if self.flavor == 'win': - path = self.msvs_settings.ConvertVSMacros( - path, config=self.config_name) - path = path.replace(generator_default_variables['RULE_INPUT_ROOT'], root) - path = path.replace(generator_default_variables['RULE_INPUT_DIRNAME'], - dirname) - path = path.replace(generator_default_variables['RULE_INPUT_PATH'], source) - path = path.replace(generator_default_variables['RULE_INPUT_EXT'], ext) - path = path.replace(generator_default_variables['RULE_INPUT_NAME'], name) - return path - - def GypPathToNinja(self, path, env=None): - """Translate a gyp path to a ninja path, optionally expanding environment - variable references in |path| with |env|. - - See the above discourse on path conversions.""" - if env: - if self.flavor == 'mac': - path = gyp.xcode_emulation.ExpandEnvVars(path, env) - elif self.flavor == 'win': - path = gyp.msvs_emulation.ExpandMacros(path, env) - if path.startswith('$!'): - expanded = self.ExpandSpecial(path) - if self.flavor == 'win': - expanded = os.path.normpath(expanded) - return expanded - if '$|' in path: - path = self.ExpandSpecial(path) - assert '$' not in path, path - return os.path.normpath(os.path.join(self.build_to_base, path)) - - def GypPathToUniqueOutput(self, path, qualified=True): - """Translate a gyp path to a ninja path for writing output. - - If qualified is True, qualify the resulting filename with the name - of the target. This is necessary when e.g. compiling the same - path twice for two separate output targets. - - See the above discourse on path conversions.""" - - path = self.ExpandSpecial(path) - assert not path.startswith('$'), path - - # Translate the path following this scheme: - # Input: foo/bar.gyp, target targ, references baz/out.o - # Output: obj/foo/baz/targ.out.o (if qualified) - # obj/foo/baz/out.o (otherwise) - # (and obj.host instead of obj for cross-compiles) - # - # Why this scheme and not some other one? - # 1) for a given input, you can compute all derived outputs by matching - # its path, even if the input is brought via a gyp file with '..'. - # 2) simple files like libraries and stamps have a simple filename. - - obj = 'obj' - if self.toolset != 'target': - obj += '.' + self.toolset - - path_dir, path_basename = os.path.split(path) - if qualified: - path_basename = self.name + '.' + path_basename - return os.path.normpath(os.path.join(obj, self.base_dir, path_dir, - path_basename)) - - def WriteCollapsedDependencies(self, name, targets): - """Given a list of targets, return a path for a single file - representing the result of building all the targets or None. - - Uses a stamp file if necessary.""" - - assert targets == filter(None, targets), targets - if len(targets) == 0: - return None - if len(targets) > 1: - stamp = self.GypPathToUniqueOutput(name + '.stamp') - targets = self.ninja.build(stamp, 'stamp', targets) - self.ninja.newline() - return targets[0] - - def WriteSpec(self, spec, config_name, generator_flags, - case_sensitive_filesystem): - """The main entry point for NinjaWriter: write the build rules for a spec. - - Returns a Target object, which represents the output paths for this spec. - Returns None if there are no outputs (e.g. a settings-only 'none' type - target).""" - - self.config_name = config_name - self.name = spec['target_name'] - self.toolset = spec['toolset'] - config = spec['configurations'][config_name] - self.target = Target(spec['type']) - self.is_standalone_static_library = bool( - spec.get('standalone_static_library', 0)) - - self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec) - self.xcode_settings = self.msvs_settings = None - if self.flavor == 'mac': - self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec) - if self.flavor == 'win': - self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, - generator_flags) - arch = self.msvs_settings.GetArch(config_name) - self.ninja.variable('arch', self.win_env[arch]) - - # Compute predepends for all rules. - # actions_depends is the dependencies this target depends on before running - # any of its action/rule/copy steps. - # compile_depends is the dependencies this target depends on before running - # any of its compile steps. - actions_depends = [] - compile_depends = [] - # TODO(evan): it is rather confusing which things are lists and which - # are strings. Fix these. - if 'dependencies' in spec: - for dep in spec['dependencies']: - if dep in self.target_outputs: - target = self.target_outputs[dep] - actions_depends.append(target.PreActionInput(self.flavor)) - compile_depends.append(target.PreCompileInput()) - actions_depends = filter(None, actions_depends) - compile_depends = filter(None, compile_depends) - actions_depends = self.WriteCollapsedDependencies('actions_depends', - actions_depends) - compile_depends = self.WriteCollapsedDependencies('compile_depends', - compile_depends) - self.target.preaction_stamp = actions_depends - self.target.precompile_stamp = compile_depends - - # Write out actions, rules, and copies. These must happen before we - # compile any sources, so compute a list of predependencies for sources - # while we do it. - extra_sources = [] - mac_bundle_depends = [] - self.target.actions_stamp = self.WriteActionsRulesCopies( - spec, extra_sources, actions_depends, mac_bundle_depends) - - # If we have actions/rules/copies, we depend directly on those, but - # otherwise we depend on dependent target's actions/rules/copies etc. - # We never need to explicitly depend on previous target's link steps, - # because no compile ever depends on them. - compile_depends_stamp = (self.target.actions_stamp or compile_depends) - - # Write out the compilation steps, if any. - link_deps = [] - sources = spec.get('sources', []) + extra_sources - if sources: - pch = None - if self.flavor == 'win': - gyp.msvs_emulation.VerifyMissingSources( - sources, self.abs_build_dir, generator_flags, self.GypPathToNinja) - pch = gyp.msvs_emulation.PrecompiledHeader( - self.msvs_settings, config_name, self.GypPathToNinja, - self.GypPathToUniqueOutput, self.obj_ext) - else: - pch = gyp.xcode_emulation.MacPrefixHeader( - self.xcode_settings, self.GypPathToNinja, - lambda path, lang: self.GypPathToUniqueOutput(path + '-' + lang)) - link_deps = self.WriteSources( - config_name, config, sources, compile_depends_stamp, pch, - case_sensitive_filesystem, spec) - # Some actions/rules output 'sources' that are already object files. - link_deps += [self.GypPathToNinja(f) - for f in sources if f.endswith(self.obj_ext)] - - if self.flavor == 'win' and self.target.type == 'static_library': - self.target.component_objs = link_deps - - # Write out a link step, if needed. - output = None - if link_deps or self.target.actions_stamp or actions_depends: - output = self.WriteTarget(spec, config_name, config, link_deps, - self.target.actions_stamp or actions_depends) - if self.is_mac_bundle: - mac_bundle_depends.append(output) - - # Bundle all of the above together, if needed. - if self.is_mac_bundle: - output = self.WriteMacBundle(spec, mac_bundle_depends) - - if not output: - return None - - assert self.target.FinalOutput(), output - return self.target - - def _WinIdlRule(self, source, prebuild, outputs): - """Handle the implicit VS .idl rule for one source file. Fills |outputs| - with files that are generated.""" - outdir, output, vars, flags = self.msvs_settings.GetIdlBuildData( - source, self.config_name) - outdir = self.GypPathToNinja(outdir) - def fix_path(path, rel=None): - path = os.path.join(outdir, path) - dirname, basename = os.path.split(source) - root, ext = os.path.splitext(basename) - path = self.ExpandRuleVariables( - path, root, dirname, source, ext, basename) - if rel: - path = os.path.relpath(path, rel) - return path - vars = [(name, fix_path(value, outdir)) for name, value in vars] - output = [fix_path(p) for p in output] - vars.append(('outdir', outdir)) - vars.append(('idlflags', flags)) - input = self.GypPathToNinja(source) - self.ninja.build(output, 'idl', input, - variables=vars, order_only=prebuild) - outputs.extend(output) - - def WriteWinIdlFiles(self, spec, prebuild): - """Writes rules to match MSVS's implicit idl handling.""" - assert self.flavor == 'win' - if self.msvs_settings.HasExplicitIdlRules(spec): - return [] - outputs = [] - for source in filter(lambda x: x.endswith('.idl'), spec['sources']): - self._WinIdlRule(source, prebuild, outputs) - return outputs - - def WriteActionsRulesCopies(self, spec, extra_sources, prebuild, - mac_bundle_depends): - """Write out the Actions, Rules, and Copies steps. Return a path - representing the outputs of these steps.""" - outputs = [] - extra_mac_bundle_resources = [] - - if 'actions' in spec: - outputs += self.WriteActions(spec['actions'], extra_sources, prebuild, - extra_mac_bundle_resources) - if 'rules' in spec: - outputs += self.WriteRules(spec['rules'], extra_sources, prebuild, - extra_mac_bundle_resources) - if 'copies' in spec: - outputs += self.WriteCopies(spec['copies'], prebuild, mac_bundle_depends) - - if 'sources' in spec and self.flavor == 'win': - outputs += self.WriteWinIdlFiles(spec, prebuild) - - stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs) - - if self.is_mac_bundle: - mac_bundle_resources = spec.get('mac_bundle_resources', []) + \ - extra_mac_bundle_resources - self.WriteMacBundleResources(mac_bundle_resources, mac_bundle_depends) - self.WriteMacInfoPlist(mac_bundle_depends) - - return stamp - - def GenerateDescription(self, verb, message, fallback): - """Generate and return a description of a build step. - - |verb| is the short summary, e.g. ACTION or RULE. - |message| is a hand-written description, or None if not available. - |fallback| is the gyp-level name of the step, usable as a fallback. - """ - if self.toolset != 'target': - verb += '(%s)' % self.toolset - if message: - return '%s %s' % (verb, self.ExpandSpecial(message)) - else: - return '%s %s: %s' % (verb, self.name, fallback) - - def WriteActions(self, actions, extra_sources, prebuild, - extra_mac_bundle_resources): - # Actions cd into the base directory. - env = self.GetSortedXcodeEnv() - if self.flavor == 'win': - env = self.msvs_settings.GetVSMacroEnv( - '$!PRODUCT_DIR', config=self.config_name) - all_outputs = [] - for action in actions: - # First write out a rule for the action. - name = '%s_%s' % (action['action_name'], - hashlib.md5(self.qualified_target).hexdigest()) - description = self.GenerateDescription('ACTION', - action.get('message', None), - name) - is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(action) - if self.flavor == 'win' else False) - args = action['action'] - rule_name, _ = self.WriteNewNinjaRule(name, args, description, - is_cygwin, env=env) - - inputs = [self.GypPathToNinja(i, env) for i in action['inputs']] - if int(action.get('process_outputs_as_sources', False)): - extra_sources += action['outputs'] - if int(action.get('process_outputs_as_mac_bundle_resources', False)): - extra_mac_bundle_resources += action['outputs'] - outputs = [self.GypPathToNinja(o, env) for o in action['outputs']] - - # Then write out an edge using the rule. - self.ninja.build(outputs, rule_name, inputs, - order_only=prebuild) - all_outputs += outputs - - self.ninja.newline() - - return all_outputs - - def WriteRules(self, rules, extra_sources, prebuild, - extra_mac_bundle_resources): - env = self.GetSortedXcodeEnv() - all_outputs = [] - for rule in rules: - # First write out a rule for the rule action. - name = '%s_%s' % (rule['rule_name'], - hashlib.md5(self.qualified_target).hexdigest()) - # Skip a rule with no action and no inputs. - if 'action' not in rule and not rule.get('rule_sources', []): - continue - args = rule['action'] - description = self.GenerateDescription( - 'RULE', - rule.get('message', None), - ('%s ' + generator_default_variables['RULE_INPUT_PATH']) % name) - is_cygwin = (self.msvs_settings.IsRuleRunUnderCygwin(rule) - if self.flavor == 'win' else False) - rule_name, args = self.WriteNewNinjaRule( - name, args, description, is_cygwin, env=env) - - # TODO: if the command references the outputs directly, we should - # simplify it to just use $out. - - # Rules can potentially make use of some special variables which - # must vary per source file. - # Compute the list of variables we'll need to provide. - special_locals = ('source', 'root', 'dirname', 'ext', 'name') - needed_variables = set(['source']) - for argument in args: - for var in special_locals: - if ('${%s}' % var) in argument: - needed_variables.add(var) - - def cygwin_munge(path): - if is_cygwin: - return path.replace('\\', '/') - return path - - # For each source file, write an edge that generates all the outputs. - for source in rule.get('rule_sources', []): - dirname, basename = os.path.split(source) - root, ext = os.path.splitext(basename) - - # Gather the list of inputs and outputs, expanding $vars if possible. - outputs = [self.ExpandRuleVariables(o, root, dirname, - source, ext, basename) - for o in rule['outputs']] - inputs = [self.ExpandRuleVariables(i, root, dirname, - source, ext, basename) - for i in rule.get('inputs', [])] - - if int(rule.get('process_outputs_as_sources', False)): - extra_sources += outputs - if int(rule.get('process_outputs_as_mac_bundle_resources', False)): - extra_mac_bundle_resources += outputs - - extra_bindings = [] - for var in needed_variables: - if var == 'root': - extra_bindings.append(('root', cygwin_munge(root))) - elif var == 'dirname': - extra_bindings.append(('dirname', cygwin_munge(dirname))) - elif var == 'source': - # '$source' is a parameter to the rule action, which means - # it shouldn't be converted to a Ninja path. But we don't - # want $!PRODUCT_DIR in there either. - source_expanded = self.ExpandSpecial(source, self.base_to_build) - extra_bindings.append(('source', cygwin_munge(source_expanded))) - elif var == 'ext': - extra_bindings.append(('ext', ext)) - elif var == 'name': - extra_bindings.append(('name', cygwin_munge(basename))) - else: - assert var == None, repr(var) - - inputs = [self.GypPathToNinja(i, env) for i in inputs] - outputs = [self.GypPathToNinja(o, env) for o in outputs] - extra_bindings.append(('unique_name', - hashlib.md5(outputs[0]).hexdigest())) - self.ninja.build(outputs, rule_name, self.GypPathToNinja(source), - implicit=inputs, - order_only=prebuild, - variables=extra_bindings) - - all_outputs.extend(outputs) - - return all_outputs - - def WriteCopies(self, copies, prebuild, mac_bundle_depends): - outputs = [] - env = self.GetSortedXcodeEnv() - for copy in copies: - for path in copy['files']: - # Normalize the path so trailing slashes don't confuse us. - path = os.path.normpath(path) - basename = os.path.split(path)[1] - src = self.GypPathToNinja(path, env) - dst = self.GypPathToNinja(os.path.join(copy['destination'], basename), - env) - outputs += self.ninja.build(dst, 'copy', src, order_only=prebuild) - if self.is_mac_bundle: - # gyp has mac_bundle_resources to copy things into a bundle's - # Resources folder, but there's no built-in way to copy files to other - # places in the bundle. Hence, some targets use copies for this. Check - # if this file is copied into the current bundle, and if so add it to - # the bundle depends so that dependent targets get rebuilt if the copy - # input changes. - if dst.startswith(self.xcode_settings.GetBundleContentsFolderPath()): - mac_bundle_depends.append(dst) - - return outputs - - def WriteMacBundleResources(self, resources, bundle_depends): - """Writes ninja edges for 'mac_bundle_resources'.""" - for output, res in gyp.xcode_emulation.GetMacBundleResources( - self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']), - self.xcode_settings, map(self.GypPathToNinja, resources)): - self.ninja.build(output, 'mac_tool', res, - variables=[('mactool_cmd', 'copy-bundle-resource')]) - bundle_depends.append(output) - - def WriteMacInfoPlist(self, bundle_depends): - """Write build rules for bundle Info.plist files.""" - info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist( - self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']), - self.xcode_settings, self.GypPathToNinja) - if not info_plist: - return - if defines: - # Create an intermediate file to store preprocessed results. - intermediate_plist = self.GypPathToUniqueOutput( - os.path.basename(info_plist)) - defines = ' '.join([Define(d, self.flavor) for d in defines]) - info_plist = self.ninja.build(intermediate_plist, 'infoplist', info_plist, - variables=[('defines',defines)]) - - env = self.GetSortedXcodeEnv(additional_settings=extra_env) - env = self.ComputeExportEnvString(env) - - self.ninja.build(out, 'mac_tool', info_plist, - variables=[('mactool_cmd', 'copy-info-plist'), - ('env', env)]) - bundle_depends.append(out) - - def WriteSources(self, config_name, config, sources, predepends, - precompiled_header, case_sensitive_filesystem, spec): - """Write build rules to compile all of |sources|.""" - if self.toolset == 'host': - self.ninja.variable('ar', '$ar_host') - self.ninja.variable('cc', '$cc_host') - self.ninja.variable('cxx', '$cxx_host') - self.ninja.variable('ld', '$ld_host') - - extra_defines = [] - if self.flavor == 'mac': - cflags = self.xcode_settings.GetCflags(config_name) - cflags_c = self.xcode_settings.GetCflagsC(config_name) - cflags_cc = self.xcode_settings.GetCflagsCC(config_name) - cflags_objc = ['$cflags_c'] + \ - self.xcode_settings.GetCflagsObjC(config_name) - cflags_objcc = ['$cflags_cc'] + \ - self.xcode_settings.GetCflagsObjCC(config_name) - elif self.flavor == 'win': - cflags = self.msvs_settings.GetCflags(config_name) - cflags_c = self.msvs_settings.GetCflagsC(config_name) - cflags_cc = self.msvs_settings.GetCflagsCC(config_name) - extra_defines = self.msvs_settings.GetComputedDefines(config_name) - pdbpath = self.msvs_settings.GetCompilerPdbName( - config_name, self.ExpandSpecial) - if not pdbpath: - obj = 'obj' - if self.toolset != 'target': - obj += '.' + self.toolset - pdbpath = os.path.normpath(os.path.join(obj, self.base_dir, - self.name + '.pdb')) - self.WriteVariableList('pdbname', [pdbpath]) - self.WriteVariableList('pchprefix', [self.name]) - else: - cflags = config.get('cflags', []) - cflags_c = config.get('cflags_c', []) - cflags_cc = config.get('cflags_cc', []) - - defines = config.get('defines', []) + extra_defines - self.WriteVariableList('defines', [Define(d, self.flavor) for d in defines]) - if self.flavor == 'win': - self.WriteVariableList('rcflags', - [QuoteShellArgument(self.ExpandSpecial(f), self.flavor) - for f in self.msvs_settings.GetRcflags(config_name, - self.GypPathToNinja)]) - - include_dirs = config.get('include_dirs', []) - if self.flavor == 'win': - include_dirs = self.msvs_settings.AdjustIncludeDirs(include_dirs, - config_name) - self.WriteVariableList('includes', - [QuoteShellArgument('-I' + self.GypPathToNinja(i), self.flavor) - for i in include_dirs]) - - pch_commands = precompiled_header.GetPchBuildCommands() - if self.flavor == 'mac': - self.WriteVariableList('cflags_pch_c', - [precompiled_header.GetInclude('c')]) - self.WriteVariableList('cflags_pch_cc', - [precompiled_header.GetInclude('cc')]) - self.WriteVariableList('cflags_pch_objc', - [precompiled_header.GetInclude('m')]) - self.WriteVariableList('cflags_pch_objcc', - [precompiled_header.GetInclude('mm')]) - - self.WriteVariableList('cflags', map(self.ExpandSpecial, cflags)) - self.WriteVariableList('cflags_c', map(self.ExpandSpecial, cflags_c)) - self.WriteVariableList('cflags_cc', map(self.ExpandSpecial, cflags_cc)) - if self.flavor == 'mac': - self.WriteVariableList('cflags_objc', map(self.ExpandSpecial, - cflags_objc)) - self.WriteVariableList('cflags_objcc', map(self.ExpandSpecial, - cflags_objcc)) - self.ninja.newline() - outputs = [] - for source in sources: - filename, ext = os.path.splitext(source) - ext = ext[1:] - obj_ext = self.obj_ext - if ext in ('cc', 'cpp', 'cxx'): - command = 'cxx' - elif ext == 'c' or (ext == 'S' and self.flavor != 'win'): - command = 'cc' - elif ext == 's' and self.flavor != 'win': # Doesn't generate .o.d files. - command = 'cc_s' - elif (self.flavor == 'win' and ext == 'asm' and - self.msvs_settings.GetArch(config_name) == 'x86' and - not self.msvs_settings.HasExplicitAsmRules(spec)): - # Asm files only get auto assembled for x86 (not x64). - command = 'asm' - # Add the _asm suffix as msvs is capable of handling .cc and - # .asm files of the same name without collision. - obj_ext = '_asm.obj' - elif self.flavor == 'mac' and ext == 'm': - command = 'objc' - elif self.flavor == 'mac' and ext == 'mm': - command = 'objcxx' - elif self.flavor == 'win' and ext == 'rc': - command = 'rc' - obj_ext = '.res' - else: - # Ignore unhandled extensions. - continue - input = self.GypPathToNinja(source) - output = self.GypPathToUniqueOutput(filename + obj_ext) - # Ninja's depfile handling gets confused when the case of a filename - # changes on a case-insensitive file system. To work around that, always - # convert .o filenames to lowercase on such file systems. See - # https://github.com/martine/ninja/issues/402 for details. - if not case_sensitive_filesystem: - output = output.lower() - implicit = precompiled_header.GetObjDependencies([input], [output]) - variables = [] - if self.flavor == 'win': - variables, output, implicit = precompiled_header.GetFlagsModifications( - input, output, implicit, command, cflags_c, cflags_cc, - self.ExpandSpecial) - self.ninja.build(output, command, input, - implicit=[gch for _, _, gch in implicit], - order_only=predepends, variables=variables) - outputs.append(output) - - self.WritePchTargets(pch_commands) - - self.ninja.newline() - return outputs - - def WritePchTargets(self, pch_commands): - """Writes ninja rules to compile prefix headers.""" - if not pch_commands: - return - - for gch, lang_flag, lang, input in pch_commands: - var_name = { - 'c': 'cflags_pch_c', - 'cc': 'cflags_pch_cc', - 'm': 'cflags_pch_objc', - 'mm': 'cflags_pch_objcc', - }[lang] - - map = { 'c': 'cc', 'cc': 'cxx', 'm': 'objc', 'mm': 'objcxx', } - cmd = map.get(lang) - self.ninja.build(gch, cmd, input, variables=[(var_name, lang_flag)]) - - def WriteLink(self, spec, config_name, config, link_deps): - """Write out a link step. Fills out target.binary. """ - - command = { - 'executable': 'link', - 'loadable_module': 'solink_module', - 'shared_library': 'solink', - }[spec['type']] - - implicit_deps = set() - solibs = set() - - if 'dependencies' in spec: - # Two kinds of dependencies: - # - Linkable dependencies (like a .a or a .so): add them to the link line. - # - Non-linkable dependencies (like a rule that generates a file - # and writes a stamp file): add them to implicit_deps - extra_link_deps = set() - for dep in spec['dependencies']: - target = self.target_outputs.get(dep) - if not target: - continue - linkable = target.Linkable() - if linkable: - if (self.flavor == 'win' and - target.component_objs and - self.msvs_settings.IsUseLibraryDependencyInputs(config_name)): - extra_link_deps |= set(target.component_objs) - elif self.flavor == 'win' and target.import_lib: - extra_link_deps.add(target.import_lib) - elif target.UsesToc(self.flavor): - solibs.add(target.binary) - implicit_deps.add(target.binary + '.TOC') - else: - extra_link_deps.add(target.binary) - - final_output = target.FinalOutput() - if not linkable or final_output != target.binary: - implicit_deps.add(final_output) - - link_deps.extend(list(extra_link_deps)) - - extra_bindings = [] - if self.is_mac_bundle: - output = self.ComputeMacBundleBinaryOutput() - else: - output = self.ComputeOutput(spec) - extra_bindings.append(('postbuilds', - self.GetPostbuildCommand(spec, output, output))) - - is_executable = spec['type'] == 'executable' - if self.flavor == 'mac': - ldflags = self.xcode_settings.GetLdflags(config_name, - self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']), - self.GypPathToNinja) - elif self.flavor == 'win': - manifest_name = self.GypPathToUniqueOutput( - self.ComputeOutputFileName(spec)) - ldflags, manifest_files = self.msvs_settings.GetLdflags(config_name, - self.GypPathToNinja, self.ExpandSpecial, manifest_name, is_executable) - self.WriteVariableList('manifests', manifest_files) - else: - ldflags = config.get('ldflags', []) - if is_executable and len(solibs): - ldflags.append('-Wl,-rpath=\$$ORIGIN/lib/') - ldflags.append('-Wl,-rpath-link=lib/') - self.WriteVariableList('ldflags', - gyp.common.uniquer(map(self.ExpandSpecial, - ldflags))) - - libraries = gyp.common.uniquer(map(self.ExpandSpecial, - spec.get('libraries', []))) - if self.flavor == 'mac': - libraries = self.xcode_settings.AdjustLibraries(libraries) - elif self.flavor == 'win': - libraries = self.msvs_settings.AdjustLibraries(libraries) - self.WriteVariableList('libs', libraries) - - self.target.binary = output - - if command in ('solink', 'solink_module'): - extra_bindings.append(('soname', os.path.split(output)[1])) - extra_bindings.append(('lib', - gyp.common.EncodePOSIXShellArgument(output))) - if self.flavor == 'win': - extra_bindings.append(('dll', output)) - if '/NOENTRY' not in ldflags: - self.target.import_lib = output + '.lib' - extra_bindings.append(('implibflag', - '/IMPLIB:%s' % self.target.import_lib)) - output = [output, self.target.import_lib] - else: - output = [output, output + '.TOC'] - - if len(solibs): - extra_bindings.append(('solibs', gyp.common.EncodePOSIXShellList(solibs))) - - self.ninja.build(output, command, link_deps, - implicit=list(implicit_deps), - variables=extra_bindings) - - def WriteTarget(self, spec, config_name, config, link_deps, compile_deps): - if spec['type'] == 'none': - # TODO(evan): don't call this function for 'none' target types, as - # it doesn't do anything, and we fake out a 'binary' with a stamp file. - self.target.binary = compile_deps - elif spec['type'] == 'static_library': - self.target.binary = self.ComputeOutput(spec) - variables = [] - postbuild = self.GetPostbuildCommand( - spec, self.target.binary, self.target.binary) - if postbuild: - variables.append(('postbuilds', postbuild)) - if self.xcode_settings: - variables.append(('libtool_flags', - self.xcode_settings.GetLibtoolflags(config_name))) - if (self.flavor not in ('mac', 'win') and not - self.is_standalone_static_library): - self.ninja.build(self.target.binary, 'alink_thin', link_deps, - order_only=compile_deps, variables=variables) - else: - if self.msvs_settings: - libflags = self.msvs_settings.GetLibFlags(config_name, - self.GypPathToNinja) - variables.append(('libflags', libflags)) - self.ninja.build(self.target.binary, 'alink', link_deps, - order_only=compile_deps, variables=variables) - else: - self.WriteLink(spec, config_name, config, link_deps) - return self.target.binary - - def WriteMacBundle(self, spec, mac_bundle_depends): - assert self.is_mac_bundle - package_framework = spec['type'] in ('shared_library', 'loadable_module') - output = self.ComputeMacBundleOutput() - postbuild = self.GetPostbuildCommand(spec, output, self.target.binary, - is_command_start=not package_framework) - variables = [] - if postbuild: - variables.append(('postbuilds', postbuild)) - if package_framework: - variables.append(('version', self.xcode_settings.GetFrameworkVersion())) - self.ninja.build(output, 'package_framework', mac_bundle_depends, - variables=variables) - else: - self.ninja.build(output, 'stamp', mac_bundle_depends, - variables=variables) - self.target.bundle = output - return output - - def GetSortedXcodeEnv(self, additional_settings=None): - """Returns the variables Xcode would set for build steps.""" - assert self.abs_build_dir - abs_build_dir = self.abs_build_dir - return gyp.xcode_emulation.GetSortedXcodeEnv( - self.xcode_settings, abs_build_dir, - os.path.join(abs_build_dir, self.build_to_base), self.config_name, - additional_settings) - - def GetSortedXcodePostbuildEnv(self): - """Returns the variables Xcode would set for postbuild steps.""" - postbuild_settings = {} - # CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack. - # TODO(thakis): It would be nice to have some general mechanism instead. - strip_save_file = self.xcode_settings.GetPerTargetSetting( - 'CHROMIUM_STRIP_SAVE_FILE') - if strip_save_file: - postbuild_settings['CHROMIUM_STRIP_SAVE_FILE'] = strip_save_file - return self.GetSortedXcodeEnv(additional_settings=postbuild_settings) - - def GetPostbuildCommand(self, spec, output, output_binary, - is_command_start=False): - """Returns a shell command that runs all the postbuilds, and removes - |output| if any of them fails. If |is_command_start| is False, then the - returned string will start with ' && '.""" - if not self.xcode_settings or spec['type'] == 'none' or not output: - return '' - output = QuoteShellArgument(output, self.flavor) - target_postbuilds = self.xcode_settings.GetTargetPostbuilds( - self.config_name, - os.path.normpath(os.path.join(self.base_to_build, output)), - QuoteShellArgument( - os.path.normpath(os.path.join(self.base_to_build, output_binary)), - self.flavor), - quiet=True) - postbuilds = gyp.xcode_emulation.GetSpecPostbuildCommands(spec, quiet=True) - postbuilds = target_postbuilds + postbuilds - if not postbuilds: - return '' - # Postbuilds expect to be run in the gyp file's directory, so insert an - # implicit postbuild to cd to there. - postbuilds.insert(0, gyp.common.EncodePOSIXShellList( - ['cd', self.build_to_base])) - env = self.ComputeExportEnvString(self.GetSortedXcodePostbuildEnv()) - # G will be non-null if any postbuild fails. Run all postbuilds in a - # subshell. - commands = env + ' (' + \ - ' && '.join([ninja_syntax.escape(command) for command in postbuilds]) - command_string = (commands + '); G=$$?; ' - # Remove the final output if any postbuild failed. - '((exit $$G) || rm -rf %s) ' % output + '&& exit $$G)') - if is_command_start: - return '(' + command_string + ' && ' - else: - return '$ && (' + command_string - - def ComputeExportEnvString(self, env): - """Given an environment, returns a string looking like - 'export FOO=foo; export BAR="${FOO} bar;' - that exports |env| to the shell.""" - export_str = [] - for k, v in env: - export_str.append('export %s=%s;' % - (k, ninja_syntax.escape(gyp.common.EncodePOSIXShellArgument(v)))) - return ' '.join(export_str) - - def ComputeMacBundleOutput(self): - """Return the 'output' (full output path) to a bundle output directory.""" - assert self.is_mac_bundle - path = self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']) - return os.path.join(path, self.xcode_settings.GetWrapperName()) - - def ComputeMacBundleBinaryOutput(self): - """Return the 'output' (full output path) to the binary in a bundle.""" - assert self.is_mac_bundle - path = self.ExpandSpecial(generator_default_variables['PRODUCT_DIR']) - return os.path.join(path, self.xcode_settings.GetExecutablePath()) - - def ComputeOutputFileName(self, spec, type=None): - """Compute the filename of the final output for the current target.""" - if not type: - type = spec['type'] - - default_variables = copy.copy(generator_default_variables) - CalculateVariables(default_variables, {'flavor': self.flavor}) - - # Compute filename prefix: the product prefix, or a default for - # the product type. - DEFAULT_PREFIX = { - 'loadable_module': default_variables['SHARED_LIB_PREFIX'], - 'shared_library': default_variables['SHARED_LIB_PREFIX'], - 'static_library': default_variables['STATIC_LIB_PREFIX'], - 'executable': default_variables['EXECUTABLE_PREFIX'], - } - prefix = spec.get('product_prefix', DEFAULT_PREFIX.get(type, '')) - - # Compute filename extension: the product extension, or a default - # for the product type. - DEFAULT_EXTENSION = { - 'loadable_module': default_variables['SHARED_LIB_SUFFIX'], - 'shared_library': default_variables['SHARED_LIB_SUFFIX'], - 'static_library': default_variables['STATIC_LIB_SUFFIX'], - 'executable': default_variables['EXECUTABLE_SUFFIX'], - } - extension = spec.get('product_extension') - if extension: - extension = '.' + extension - else: - extension = DEFAULT_EXTENSION.get(type, '') - - if 'product_name' in spec: - # If we were given an explicit name, use that. - target = spec['product_name'] - else: - # Otherwise, derive a name from the target name. - target = spec['target_name'] - if prefix == 'lib': - # Snip out an extra 'lib' from libs if appropriate. - target = StripPrefix(target, 'lib') - - if type in ('static_library', 'loadable_module', 'shared_library', - 'executable'): - return '%s%s%s' % (prefix, target, extension) - elif type == 'none': - return '%s.stamp' % target - else: - raise Exception('Unhandled output type %s' % type) - - def ComputeOutput(self, spec, type=None): - """Compute the path for the final output of the spec.""" - assert not self.is_mac_bundle or type - - if not type: - type = spec['type'] - - if self.flavor == 'win': - override = self.msvs_settings.GetOutputName(self.config_name, - self.ExpandSpecial) - if override: - return override - - if self.flavor == 'mac' and type in ( - 'static_library', 'executable', 'shared_library', 'loadable_module'): - filename = self.xcode_settings.GetExecutablePath() - else: - filename = self.ComputeOutputFileName(spec, type) - - if 'product_dir' in spec: - path = os.path.join(spec['product_dir'], filename) - return self.ExpandSpecial(path) - - # Some products go into the output root, libraries go into shared library - # dir, and everything else goes into the normal place. - type_in_output_root = ['executable', 'loadable_module'] - if self.flavor == 'mac' and self.toolset == 'target': - type_in_output_root += ['shared_library', 'static_library'] - elif self.flavor == 'win' and self.toolset == 'target': - type_in_output_root += ['shared_library'] - - if type in type_in_output_root or self.is_standalone_static_library: - return filename - elif type == 'shared_library': - libdir = 'lib' - if self.toolset != 'target': - libdir = os.path.join('lib', '%s' % self.toolset) - return os.path.join(libdir, filename) - else: - return self.GypPathToUniqueOutput(filename, qualified=False) - - def WriteVariableList(self, var, values): - assert not isinstance(values, str) - if values is None: - values = [] - self.ninja.variable(var, ' '.join(values)) - - def WriteNewNinjaRule(self, name, args, description, is_cygwin, env): - """Write out a new ninja "rule" statement for a given command. - - Returns the name of the new rule, and a copy of |args| with variables - expanded.""" - - if self.flavor == 'win': - args = [self.msvs_settings.ConvertVSMacros( - arg, self.base_to_build, config=self.config_name) - for arg in args] - description = self.msvs_settings.ConvertVSMacros( - description, config=self.config_name) - elif self.flavor == 'mac': - # |env| is an empty list on non-mac. - args = [gyp.xcode_emulation.ExpandEnvVars(arg, env) for arg in args] - description = gyp.xcode_emulation.ExpandEnvVars(description, env) - - # TODO: we shouldn't need to qualify names; we do it because - # currently the ninja rule namespace is global, but it really - # should be scoped to the subninja. - rule_name = self.name - if self.toolset == 'target': - rule_name += '.' + self.toolset - rule_name += '.' + name - rule_name = re.sub('[^a-zA-Z0-9_]', '_', rule_name) - - # Remove variable references, but not if they refer to the magic rule - # variables. This is not quite right, as it also protects these for - # actions, not just for rules where they are valid. Good enough. - protect = [ '${root}', '${dirname}', '${source}', '${ext}', '${name}' ] - protect = '(?!' + '|'.join(map(re.escape, protect)) + ')' - description = re.sub(protect + r'\$', '_', description) - - # gyp dictates that commands are run from the base directory. - # cd into the directory before running, and adjust paths in - # the arguments to point to the proper locations. - rspfile = None - rspfile_content = None - args = [self.ExpandSpecial(arg, self.base_to_build) for arg in args] - if self.flavor == 'win': - rspfile = rule_name + '.$unique_name.rsp' - # The cygwin case handles this inside the bash sub-shell. - run_in = '' if is_cygwin else ' ' + self.build_to_base - if is_cygwin: - rspfile_content = self.msvs_settings.BuildCygwinBashCommandLine( - args, self.build_to_base) - else: - rspfile_content = gyp.msvs_emulation.EncodeRspFileList(args) - command = ('%s gyp-win-tool action-wrapper $arch ' % sys.executable + - rspfile + run_in) - else: - env = self.ComputeExportEnvString(env) - command = gyp.common.EncodePOSIXShellList(args) - command = 'cd %s; ' % self.build_to_base + env + command - - # GYP rules/actions express being no-ops by not touching their outputs. - # Avoid executing downstream dependencies in this case by specifying - # restat=1 to ninja. - self.ninja.rule(rule_name, command, description, restat=True, - rspfile=rspfile, rspfile_content=rspfile_content) - self.ninja.newline() - - return rule_name, args - - -def CalculateVariables(default_variables, params): - """Calculate additional variables for use in the build (called by gyp).""" - global generator_additional_non_configuration_keys - global generator_additional_path_sections - flavor = gyp.common.GetFlavor(params) - if flavor == 'mac': - default_variables.setdefault('OS', 'mac') - default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib') - default_variables.setdefault('SHARED_LIB_DIR', - generator_default_variables['PRODUCT_DIR']) - default_variables.setdefault('LIB_DIR', - generator_default_variables['PRODUCT_DIR']) - - # Copy additional generator configuration data from Xcode, which is shared - # by the Mac Ninja generator. - import gyp.generator.xcode as xcode_generator - generator_additional_non_configuration_keys = getattr(xcode_generator, - 'generator_additional_non_configuration_keys', []) - generator_additional_path_sections = getattr(xcode_generator, - 'generator_additional_path_sections', []) - global generator_extra_sources_for_rules - generator_extra_sources_for_rules = getattr(xcode_generator, - 'generator_extra_sources_for_rules', []) - elif flavor == 'win': - default_variables.setdefault('OS', 'win') - default_variables['EXECUTABLE_SUFFIX'] = '.exe' - default_variables['STATIC_LIB_PREFIX'] = '' - default_variables['STATIC_LIB_SUFFIX'] = '.lib' - default_variables['SHARED_LIB_PREFIX'] = '' - default_variables['SHARED_LIB_SUFFIX'] = '.dll' - generator_flags = params.get('generator_flags', {}) - - # Copy additional generator configuration data from VS, which is shared - # by the Windows Ninja generator. - import gyp.generator.msvs as msvs_generator - generator_additional_non_configuration_keys = getattr(msvs_generator, - 'generator_additional_non_configuration_keys', []) - generator_additional_path_sections = getattr(msvs_generator, - 'generator_additional_path_sections', []) - - # Set a variable so conditions can be based on msvs_version. - msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags) - default_variables['MSVS_VERSION'] = msvs_version.ShortName() - - # To determine processor word size on Windows, in addition to checking - # PROCESSOR_ARCHITECTURE (which reflects the word size of the current - # process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which - # contains the actual word size of the system when running thru WOW64). - if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or - '64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')): - default_variables['MSVS_OS_BITS'] = 64 - else: - default_variables['MSVS_OS_BITS'] = 32 - else: - operating_system = flavor - if flavor == 'android': - operating_system = 'linux' # Keep this legacy behavior for now. - default_variables.setdefault('OS', operating_system) - default_variables.setdefault('SHARED_LIB_SUFFIX', '.so') - default_variables.setdefault('SHARED_LIB_DIR', - os.path.join('$!PRODUCT_DIR', 'lib')) - default_variables.setdefault('LIB_DIR', - os.path.join('$!PRODUCT_DIR', 'obj')) - - -def OpenOutput(path, mode='w'): - """Open |path| for writing, creating directories if necessary.""" - try: - os.makedirs(os.path.dirname(path)) - except OSError: - pass - return open(path, mode) - - -def CommandWithWrapper(cmd, wrappers, prog): - wrapper = wrappers.get(cmd, '') - if wrapper: - return wrapper + ' ' + prog - return prog - - -def GenerateOutputForConfig(target_list, target_dicts, data, params, - config_name): - options = params['options'] - flavor = gyp.common.GetFlavor(params) - generator_flags = params.get('generator_flags', {}) - - # generator_dir: relative path from pwd to where make puts build files. - # Makes migrating from make to ninja easier, ninja doesn't put anything here. - generator_dir = os.path.relpath(params['options'].generator_output or '.') - - # output_dir: relative path from generator_dir to the build directory. - output_dir = generator_flags.get('output_dir', 'out') - - # build_dir: relative path from source root to our output files. - # e.g. "out/Debug" - build_dir = os.path.normpath(os.path.join(generator_dir, - output_dir, - config_name)) - - toplevel_build = os.path.join(options.toplevel_dir, build_dir) - - master_ninja = ninja_syntax.Writer( - OpenOutput(os.path.join(toplevel_build, 'build.ninja')), - width=120) - case_sensitive_filesystem = not os.path.exists( - os.path.join(toplevel_build, 'BUILD.NINJA')) - - # Put build-time support tools in out/{config_name}. - gyp.common.CopyTool(flavor, toplevel_build) - - # Grab make settings for CC/CXX. - # The rules are - # - The priority from low to high is gcc/g++, the 'make_global_settings' in - # gyp, the environment variable. - # - If there is no 'make_global_settings' for CC.host/CXX.host or - # 'CC_host'/'CXX_host' enviroment variable, cc_host/cxx_host should be set - # to cc/cxx. - if flavor == 'win': - cc = 'cl.exe' - cxx = 'cl.exe' - ld = 'link.exe' - gyp.msvs_emulation.GenerateEnvironmentFiles( - toplevel_build, generator_flags, OpenOutput) - ld_host = '$ld' - else: - cc = 'gcc' - cxx = 'g++' - ld = '$cxx' - ld_host = '$cxx_host' - - cc_host = None - cxx_host = None - cc_host_global_setting = None - cxx_host_global_setting = None - - build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0]) - make_global_settings = data[build_file].get('make_global_settings', []) - build_to_root = gyp.common.InvertRelativePath(build_dir, - options.toplevel_dir) - flock = 'flock' - if flavor == 'mac': - flock = './gyp-mac-tool flock' - wrappers = {} - if flavor != 'win': - wrappers['LINK'] = flock + ' linker.lock' - for key, value in make_global_settings: - if key == 'CC': - cc = os.path.join(build_to_root, value) - if key == 'CXX': - cxx = os.path.join(build_to_root, value) - if key == 'LD': - ld = os.path.join(build_to_root, value) - if key == 'CC.host': - cc_host = os.path.join(build_to_root, value) - cc_host_global_setting = value - if key == 'CXX.host': - cxx_host = os.path.join(build_to_root, value) - cxx_host_global_setting = value - if key == 'LD.host': - ld_host = os.path.join(build_to_root, value) - if key.endswith('_wrapper'): - wrappers[key[:-len('_wrapper')]] = os.path.join(build_to_root, value) - - cc = GetEnvironFallback(['CC_target', 'CC'], cc) - master_ninja.variable('cc', CommandWithWrapper('CC', wrappers, cc)) - cxx = GetEnvironFallback(['CXX_target', 'CXX'], cxx) - master_ninja.variable('cxx', CommandWithWrapper('CXX', wrappers, cxx)) - ld = GetEnvironFallback(['LD_target', 'LD'], ld) - - if not cc_host: - cc_host = cc - if not cxx_host: - cxx_host = cxx - - if flavor == 'win': - master_ninja.variable('ld', ld) - master_ninja.variable('idl', 'midl.exe') - master_ninja.variable('ar', 'lib.exe') - master_ninja.variable('rc', 'rc.exe') - master_ninja.variable('asm', 'ml.exe') - master_ninja.variable('mt', 'mt.exe') - master_ninja.variable('use_dep_database', '1') - else: - master_ninja.variable('ld', CommandWithWrapper('LINK', wrappers, ld)) - master_ninja.variable('ar', GetEnvironFallback(['AR_target', 'AR'], 'ar')) - - master_ninja.variable('ar_host', GetEnvironFallback(['AR_host'], 'ar')) - cc_host = GetEnvironFallback(['CC_host'], cc_host) - cxx_host = GetEnvironFallback(['CXX_host'], cxx_host) - ld_host = GetEnvironFallback(['LD_host'], ld_host) - - # The environment variable could be used in 'make_global_settings', like - # ['CC.host', '$(CC)'] or ['CXX.host', '$(CXX)'], transform them here. - if '$(CC)' in cc_host and cc_host_global_setting: - cc_host = cc_host_global_setting.replace('$(CC)', cc) - if '$(CXX)' in cxx_host and cxx_host_global_setting: - cxx_host = cxx_host_global_setting.replace('$(CXX)', cxx) - master_ninja.variable('cc_host', cc_host) - master_ninja.variable('cxx_host', cxx_host) - if flavor == 'win': - master_ninja.variable('ld_host', ld_host) - else: - master_ninja.variable('ld_host', CommandWithWrapper( - 'LINK', wrappers, ld_host)) - - master_ninja.newline() - - if flavor != 'win': - master_ninja.rule( - 'cc', - description='CC $out', - command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_c ' - '$cflags_pch_c -c $in -o $out'), - depfile='$out.d') - master_ninja.rule( - 'cc_s', - description='CC $out', - command=('$cc $defines $includes $cflags $cflags_c ' - '$cflags_pch_c -c $in -o $out')) - master_ninja.rule( - 'cxx', - description='CXX $out', - command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc ' - '$cflags_pch_cc -c $in -o $out'), - depfile='$out.d') - else: - cc_command = ('ninja -t msvc -o $out -e $arch ' - '-- ' - '$cc /nologo /showIncludes /FC ' - '@$out.rsp /c $in /Fo$out /Fd$pdbname ') - cxx_command = ('ninja -t msvc -o $out -e $arch ' - '-- ' - '$cxx /nologo /showIncludes /FC ' - '@$out.rsp /c $in /Fo$out /Fd$pdbname ') - master_ninja.rule( - 'cc', - description='CC $out', - command=cc_command, - depfile='$out.d', - rspfile='$out.rsp', - rspfile_content='$defines $includes $cflags $cflags_c') - master_ninja.rule( - 'cxx', - description='CXX $out', - command=cxx_command, - depfile='$out.d', - rspfile='$out.rsp', - rspfile_content='$defines $includes $cflags $cflags_cc') - master_ninja.rule( - 'idl', - description='IDL $in', - command=('%s gyp-win-tool midl-wrapper $arch $outdir ' - '$tlb $h $dlldata $iid $proxy $in ' - '$idlflags' % sys.executable)) - master_ninja.rule( - 'rc', - description='RC $in', - # Note: $in must be last otherwise rc.exe complains. - command=('%s gyp-win-tool rc-wrapper ' - '$arch $rc $defines $includes $rcflags /fo$out $in' % - sys.executable)) - master_ninja.rule( - 'asm', - description='ASM $in', - command=('%s gyp-win-tool asm-wrapper ' - '$arch $asm $defines $includes /c /Fo $out $in' % - sys.executable)) - - if flavor != 'mac' and flavor != 'win': - master_ninja.rule( - 'alink', - description='AR $out', - command='rm -f $out && $ar rcs $out $in') - master_ninja.rule( - 'alink_thin', - description='AR $out', - command='rm -f $out && $ar rcsT $out $in') - - # This allows targets that only need to depend on $lib's API to declare an - # order-only dependency on $lib.TOC and avoid relinking such downstream - # dependencies when $lib changes only in non-public ways. - # The resulting string leaves an uninterpolated %{suffix} which - # is used in the final substitution below. - mtime_preserving_solink_base = ( - 'if [ ! -e $lib -o ! -e ${lib}.TOC ]; then ' - '%(solink)s && %(extract_toc)s > ${lib}.TOC; else ' - '%(solink)s && %(extract_toc)s > ${lib}.tmp && ' - 'if ! cmp -s ${lib}.tmp ${lib}.TOC; then mv ${lib}.tmp ${lib}.TOC ; ' - 'fi; fi' - % { 'solink': - '$ld -shared $ldflags -o $lib -Wl,-soname=$soname %(suffix)s', - 'extract_toc': - ('{ readelf -d ${lib} | grep SONAME ; ' - 'nm -gD -f p ${lib} | cut -f1-2 -d\' \'; }')}) - - master_ninja.rule( - 'solink', - description='SOLINK $lib', - restat=True, - command=(mtime_preserving_solink_base % { - 'suffix': '-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive ' - '$libs'})) - master_ninja.rule( - 'solink_module', - description='SOLINK(module) $lib', - restat=True, - command=(mtime_preserving_solink_base % { - 'suffix': '-Wl,--start-group $in $solibs -Wl,--end-group $libs'})) - master_ninja.rule( - 'link', - description='LINK $out', - command=('$ld $ldflags -o $out ' - '-Wl,--start-group $in $solibs -Wl,--end-group $libs')) - elif flavor == 'win': - master_ninja.rule( - 'alink', - description='LIB $out', - command=('%s gyp-win-tool link-wrapper $arch ' - '$ar /nologo /ignore:4221 /OUT:$out @$out.rsp' % - sys.executable), - rspfile='$out.rsp', - rspfile_content='$in_newline $libflags') - dlldesc = 'LINK(DLL) $dll' - dllcmd = ('%s gyp-win-tool link-wrapper $arch ' - '$ld /nologo $implibflag /DLL /OUT:$dll ' - '/PDB:$dll.pdb @$dll.rsp' % sys.executable) - dllcmd += (' && %s gyp-win-tool manifest-wrapper $arch ' - 'cmd /c if exist $dll.manifest del $dll.manifest' % - sys.executable) - dllcmd += (' && %s gyp-win-tool manifest-wrapper $arch ' - '$mt -nologo -manifest $manifests -out:$dll.manifest' % - sys.executable) - master_ninja.rule('solink', description=dlldesc, command=dllcmd, - rspfile='$dll.rsp', - rspfile_content='$libs $in_newline $ldflags', - restat=True) - master_ninja.rule('solink_module', description=dlldesc, command=dllcmd, - rspfile='$dll.rsp', - rspfile_content='$libs $in_newline $ldflags', - restat=True) - # Note that ldflags goes at the end so that it has the option of - # overriding default settings earlier in the command line. - master_ninja.rule( - 'link', - description='LINK $out', - command=('%s gyp-win-tool link-wrapper $arch ' - '$ld /nologo /OUT:$out /PDB:$out.pdb @$out.rsp && ' - '%s gyp-win-tool manifest-wrapper $arch ' - 'cmd /c if exist $out.manifest del $out.manifest && ' - '%s gyp-win-tool manifest-wrapper $arch ' - '$mt -nologo -manifest $manifests -out:$out.manifest' % - (sys.executable, sys.executable, sys.executable)), - rspfile='$out.rsp', - rspfile_content='$in_newline $libs $ldflags') - else: - master_ninja.rule( - 'objc', - description='OBJC $out', - command=('$cc -MMD -MF $out.d $defines $includes $cflags $cflags_objc ' - '$cflags_pch_objc -c $in -o $out'), - depfile='$out.d') - master_ninja.rule( - 'objcxx', - description='OBJCXX $out', - command=('$cxx -MMD -MF $out.d $defines $includes $cflags $cflags_objcc ' - '$cflags_pch_objcc -c $in -o $out'), - depfile='$out.d') - master_ninja.rule( - 'alink', - description='LIBTOOL-STATIC $out, POSTBUILDS', - command='rm -f $out && ' - './gyp-mac-tool filter-libtool libtool $libtool_flags ' - '-static -o $out $in' - '$postbuilds') - - # Record the public interface of $lib in $lib.TOC. See the corresponding - # comment in the posix section above for details. - mtime_preserving_solink_base = ( - 'if [ ! -e $lib -o ! -e ${lib}.TOC ] || ' - # Always force dependent targets to relink if this library - # reexports something. Handling this correctly would require - # recursive TOC dumping but this is rare in practice, so punt. - 'otool -l $lib | grep -q LC_REEXPORT_DYLIB ; then ' - '%(solink)s && %(extract_toc)s > ${lib}.TOC; ' - 'else ' - '%(solink)s && %(extract_toc)s > ${lib}.tmp && ' - 'if ! cmp -s ${lib}.tmp ${lib}.TOC; then ' - 'mv ${lib}.tmp ${lib}.TOC ; ' - 'fi; ' - 'fi' - % { 'solink': '$ld -shared $ldflags -o $lib %(suffix)s', - 'extract_toc': - '{ otool -l $lib | grep LC_ID_DYLIB -A 5; ' - 'nm -gP $lib | cut -f1-2 -d\' \' | grep -v U$$; true; }'}) - - # TODO(thakis): The solink_module rule is likely wrong. Xcode seems to pass - # -bundle -single_module here (for osmesa.so). - master_ninja.rule( - 'solink', - description='SOLINK $lib, POSTBUILDS', - restat=True, - command=(mtime_preserving_solink_base % { - 'suffix': '$in $solibs $libs$postbuilds'})) - master_ninja.rule( - 'solink_module', - description='SOLINK(module) $lib, POSTBUILDS', - restat=True, - command=(mtime_preserving_solink_base % { - 'suffix': '$in $solibs $libs$postbuilds'})) - - master_ninja.rule( - 'link', - description='LINK $out, POSTBUILDS', - command=('$ld $ldflags -o $out ' - '$in $solibs $libs$postbuilds')) - master_ninja.rule( - 'infoplist', - description='INFOPLIST $out', - command=('$cc -E -P -Wno-trigraphs -x c $defines $in -o $out && ' - 'plutil -convert xml1 $out $out')) - master_ninja.rule( - 'mac_tool', - description='MACTOOL $mactool_cmd $in', - command='$env ./gyp-mac-tool $mactool_cmd $in $out') - master_ninja.rule( - 'package_framework', - description='PACKAGE FRAMEWORK $out, POSTBUILDS', - command='./gyp-mac-tool package-framework $out $version$postbuilds ' - '&& touch $out') - if flavor == 'win': - master_ninja.rule( - 'stamp', - description='STAMP $out', - command='%s gyp-win-tool stamp $out' % sys.executable) - master_ninja.rule( - 'copy', - description='COPY $in $out', - command='%s gyp-win-tool recursive-mirror $in $out' % sys.executable) - else: - master_ninja.rule( - 'stamp', - description='STAMP $out', - command='${postbuilds}touch $out') - master_ninja.rule( - 'copy', - description='COPY $in $out', - command='ln -f $in $out 2>/dev/null || (rm -rf $out && cp -af $in $out)') - master_ninja.newline() - - all_targets = set() - for build_file in params['build_files']: - for target in gyp.common.AllTargets(target_list, - target_dicts, - os.path.normpath(build_file)): - all_targets.add(target) - all_outputs = set() - - # target_outputs is a map from qualified target name to a Target object. - target_outputs = {} - # target_short_names is a map from target short name to a list of Target - # objects. - target_short_names = {} - for qualified_target in target_list: - # qualified_target is like: third_party/icu/icu.gyp:icui18n#target - build_file, name, toolset = \ - gyp.common.ParseQualifiedTarget(qualified_target) - - this_make_global_settings = data[build_file].get('make_global_settings', []) - assert make_global_settings == this_make_global_settings, ( - "make_global_settings needs to be the same for all targets.") - - spec = target_dicts[qualified_target] - if flavor == 'mac': - gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec) - - build_file = gyp.common.RelativePath(build_file, options.toplevel_dir) - - base_path = os.path.dirname(build_file) - obj = 'obj' - if toolset != 'target': - obj += '.' + toolset - output_file = os.path.join(obj, base_path, name + '.ninja') - - abs_build_dir = os.path.abspath(toplevel_build) - writer = NinjaWriter(qualified_target, target_outputs, base_path, build_dir, - OpenOutput(os.path.join(toplevel_build, output_file)), - flavor, toplevel_dir=options.toplevel_dir) - master_ninja.subninja(output_file) - - target = writer.WriteSpec( - spec, config_name, generator_flags, case_sensitive_filesystem) - if target: - if name != target.FinalOutput() and spec['toolset'] == 'target': - target_short_names.setdefault(name, []).append(target) - target_outputs[qualified_target] = target - if qualified_target in all_targets: - all_outputs.add(target.FinalOutput()) - - if target_short_names: - # Write a short name to build this target. This benefits both the - # "build chrome" case as well as the gyp tests, which expect to be - # able to run actions and build libraries by their short name. - master_ninja.newline() - master_ninja.comment('Short names for targets.') - for short_name in target_short_names: - master_ninja.build(short_name, 'phony', [x.FinalOutput() for x in - target_short_names[short_name]]) - - if all_outputs: - master_ninja.newline() - master_ninja.build('all', 'phony', list(all_outputs)) - master_ninja.default(generator_flags.get('default_target', 'all')) - - -def PerformBuild(data, configurations, params): - options = params['options'] - for config in configurations: - builddir = os.path.join(options.toplevel_dir, 'out', config) - arguments = ['ninja', '-C', builddir] - print 'Building [%s]: %s' % (config, arguments) - subprocess.check_call(arguments) - - -def CallGenerateOutputForConfig(arglist): - # Ignore the interrupt signal so that the parent process catches it and - # kills all multiprocessing children. - signal.signal(signal.SIGINT, signal.SIG_IGN) - - (target_list, target_dicts, data, params, config_name) = arglist - GenerateOutputForConfig(target_list, target_dicts, data, params, config_name) - - -def GenerateOutput(target_list, target_dicts, data, params): - user_config = params.get('generator_flags', {}).get('config', None) - if gyp.common.GetFlavor(params) == 'win': - target_list, target_dicts = MSVSUtil.ShardTargets(target_list, target_dicts) - if user_config: - GenerateOutputForConfig(target_list, target_dicts, data, params, - user_config) - else: - config_names = target_dicts[target_list[0]]['configurations'].keys() - if params['parallel']: - try: - pool = multiprocessing.Pool(len(config_names)) - arglists = [] - for config_name in config_names: - arglists.append( - (target_list, target_dicts, data, params, config_name)) - pool.map(CallGenerateOutputForConfig, arglists) - except KeyboardInterrupt, e: - pool.terminate() - raise e - else: - for config_name in config_names: - GenerateOutputForConfig(target_list, target_dicts, data, params, - config_name) diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/generator/ninja_test.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/generator/ninja_test.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/generator/ninja_test.py 2012-06-11 09:47:53.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/generator/ninja_test.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,44 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" Unit tests for the ninja.py file. """ - -import gyp.generator.ninja as ninja -import unittest -import StringIO -import sys -import TestCommon - - -class TestPrefixesAndSuffixes(unittest.TestCase): - if sys.platform in ('win32', 'cygwin'): - def test_BinaryNamesWindows(self): - writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'ninja.build', 'win') - spec = { 'target_name': 'wee' } - self.assertTrue(writer.ComputeOutputFileName(spec, 'executable'). - endswith('.exe')) - self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library'). - endswith('.dll')) - self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library'). - endswith('.lib')) - - if sys.platform == 'linux2': - def test_BinaryNamesLinux(self): - writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'ninja.build', 'linux') - spec = { 'target_name': 'wee' } - self.assertTrue('.' not in writer.ComputeOutputFileName(spec, - 'executable')) - self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library'). - startswith('lib')) - self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library'). - startswith('lib')) - self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library'). - endswith('.so')) - self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library'). - endswith('.a')) - -if __name__ == '__main__': - unittest.main() diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/generator/scons.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/generator/scons.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/generator/scons.py 2012-09-18 18:05:05.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/generator/scons.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,1072 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import gyp -import gyp.common -import gyp.SCons as SCons -import os.path -import pprint -import re -import subprocess - - -# TODO: remove when we delete the last WriteList() call in this module -WriteList = SCons.WriteList - - -generator_default_variables = { - 'EXECUTABLE_PREFIX': '', - 'EXECUTABLE_SUFFIX': '', - 'STATIC_LIB_PREFIX': '${LIBPREFIX}', - 'SHARED_LIB_PREFIX': '${SHLIBPREFIX}', - 'STATIC_LIB_SUFFIX': '${LIBSUFFIX}', - 'SHARED_LIB_SUFFIX': '${SHLIBSUFFIX}', - 'INTERMEDIATE_DIR': '${INTERMEDIATE_DIR}', - 'SHARED_INTERMEDIATE_DIR': '${SHARED_INTERMEDIATE_DIR}', - 'OS': 'linux', - 'PRODUCT_DIR': '$TOP_BUILDDIR', - 'SHARED_LIB_DIR': '$LIB_DIR', - 'LIB_DIR': '$LIB_DIR', - 'RULE_INPUT_ROOT': '${SOURCE.filebase}', - 'RULE_INPUT_DIRNAME': '${SOURCE.dir}', - 'RULE_INPUT_EXT': '${SOURCE.suffix}', - 'RULE_INPUT_NAME': '${SOURCE.file}', - 'RULE_INPUT_PATH': '${SOURCE.abspath}', - 'CONFIGURATION_NAME': '${CONFIG_NAME}', -} - -# Tell GYP how to process the input for us. -generator_handles_variants = True -generator_wants_absolute_build_file_paths = True - - -def FixPath(path, prefix): - if not os.path.isabs(path) and not path[0] == '$': - path = prefix + path - return path - - -header = """\ -# This file is generated; do not edit. -""" - - -_alias_template = """ -if GetOption('verbose'): - _action = Action([%(action)s]) -else: - _action = Action([%(action)s], %(message)s) -_outputs = env.Alias( - ['_%(target_name)s_action'], - %(inputs)s, - _action -) -env.AlwaysBuild(_outputs) -""" - -_run_as_template = """ -if GetOption('verbose'): - _action = Action([%(action)s]) -else: - _action = Action([%(action)s], %(message)s) -""" - -_run_as_template_suffix = """ -_run_as_target = env.Alias('run_%(target_name)s', target_files, _action) -env.Requires(_run_as_target, [ - Alias('%(target_name)s'), -]) -env.AlwaysBuild(_run_as_target) -""" - -_command_template = """ -if GetOption('verbose'): - _action = Action([%(action)s]) -else: - _action = Action([%(action)s], %(message)s) -_outputs = env.Command( - %(outputs)s, - %(inputs)s, - _action -) -""" - -# This is copied from the default SCons action, updated to handle symlinks. -_copy_action_template = """ -import shutil -import SCons.Action - -def _copy_files_or_dirs_or_symlinks(dest, src): - SCons.Node.FS.invalidate_node_memos(dest) - if SCons.Util.is_List(src) and os.path.isdir(dest): - for file in src: - shutil.copy2(file, dest) - return 0 - elif os.path.islink(src): - linkto = os.readlink(src) - os.symlink(linkto, dest) - return 0 - elif os.path.isfile(src): - return shutil.copy2(src, dest) - else: - return shutil.copytree(src, dest, 1) - -def _copy_files_or_dirs_or_symlinks_str(dest, src): - return 'Copying %s to %s ...' % (src, dest) - -GYPCopy = SCons.Action.ActionFactory(_copy_files_or_dirs_or_symlinks, - _copy_files_or_dirs_or_symlinks_str, - convert=str) -""" - -_rule_template = """ -%(name)s_additional_inputs = %(inputs)s -%(name)s_outputs = %(outputs)s -def %(name)s_emitter(target, source, env): - return (%(name)s_outputs, source + %(name)s_additional_inputs) -if GetOption('verbose'): - %(name)s_action = Action([%(action)s]) -else: - %(name)s_action = Action([%(action)s], %(message)s) -env['BUILDERS']['%(name)s'] = Builder(action=%(name)s_action, - emitter=%(name)s_emitter) - -_outputs = [] -_processed_input_files = [] -for infile in input_files: - if (type(infile) == type('') - and not os.path.isabs(infile) - and not infile[0] == '$'): - infile = %(src_dir)r + infile - if str(infile).endswith('.%(extension)s'): - _generated = env.%(name)s(infile) - env.Precious(_generated) - _outputs.append(_generated) - %(process_outputs_as_sources_line)s - else: - _processed_input_files.append(infile) -prerequisites.extend(_outputs) -input_files = _processed_input_files -""" - -_spawn_hack = """ -import re -import SCons.Platform.posix -needs_shell = re.compile('["\\'>= 2.5: - return os.sysconf('SC_NPROCESSORS_ONLN') - else: # Mac OS X with Python < 2.5: - return int(os.popen2("sysctl -n hw.ncpu")[1].read()) - # Windows: - if os.environ.has_key('NUMBER_OF_PROCESSORS'): - return max(int(os.environ.get('NUMBER_OF_PROCESSORS', '1')), 1) - return 1 # Default - -# Support PROGRESS= to show progress in different ways. -p = ARGUMENTS.get('PROGRESS') -if p == 'spinner': - Progress(['/\\r', '|\\r', '\\\\\\r', '-\\r'], - interval=5, - file=open('/dev/tty', 'w')) -elif p == 'name': - Progress('$TARGET\\r', overwrite=True, file=open('/dev/tty', 'w')) - -# Set the default -j value based on the number of processors. -SetOption('num_jobs', GetProcessorCount() + 1) - -# Have SCons use its cached dependency information. -SetOption('implicit_cache', 1) - -# Only re-calculate MD5 checksums if a timestamp has changed. -Decider('MD5-timestamp') - -# Since we set the -j value by default, suppress SCons warnings about being -# unable to support parallel build on versions of Python with no threading. -default_warnings = ['no-no-parallel-support'] -SetOption('warn', default_warnings + GetOption('warn')) - -AddOption('--mode', nargs=1, dest='conf_list', default=[], - action='append', help='Configuration to build.') - -AddOption('--verbose', dest='verbose', default=False, - action='store_true', help='Verbose command-line output.') - - -# -sconscript_file_map = %(sconscript_files)s - -class LoadTarget: - ''' - Class for deciding if a given target sconscript is to be included - based on a list of included target names, optionally prefixed with '-' - to exclude a target name. - ''' - def __init__(self, load): - ''' - Initialize a class with a list of names for possible loading. - - Arguments: - load: list of elements in the LOAD= specification - ''' - self.included = set([c for c in load if not c.startswith('-')]) - self.excluded = set([c[1:] for c in load if c.startswith('-')]) - - if not self.included: - self.included = set(['all']) - - def __call__(self, target): - ''' - Returns True if the specified target's sconscript file should be - loaded, based on the initialized included and excluded lists. - ''' - return (target in self.included or - ('all' in self.included and not target in self.excluded)) - -if 'LOAD' in ARGUMENTS: - load = ARGUMENTS['LOAD'].split(',') -else: - load = [] -load_target = LoadTarget(load) - -sconscript_files = [] -for target, sconscript in sconscript_file_map.iteritems(): - if load_target(target): - sconscript_files.append(sconscript) - - -target_alias_list= [] - -conf_list = GetOption('conf_list') -if conf_list: - # In case the same --mode= value was specified multiple times. - conf_list = list(set(conf_list)) -else: - conf_list = [%(default_configuration)r] - -sconsbuild_dir = Dir(%(sconsbuild_dir)s) - - -def FilterOut(self, **kw): - kw = SCons.Environment.copy_non_reserved_keywords(kw) - for key, val in kw.items(): - envval = self.get(key, None) - if envval is None: - # No existing variable in the environment, so nothing to delete. - continue - - for vremove in val: - # Use while not if, so we can handle duplicates. - while vremove in envval: - envval.remove(vremove) - - self[key] = envval - - # TODO(sgk): SCons.Environment.Append() has much more logic to deal - # with various types of values. We should handle all those cases in here - # too. (If variable is a dict, etc.) - - -non_compilable_suffixes = { - 'LINUX' : set([ - '.bdic', - '.css', - '.dat', - '.fragment', - '.gperf', - '.h', - '.hh', - '.hpp', - '.html', - '.hxx', - '.idl', - '.in', - '.in0', - '.in1', - '.js', - '.mk', - '.rc', - '.sigs', - '', - ]), - 'WINDOWS' : set([ - '.h', - '.hh', - '.hpp', - '.dat', - '.idl', - '.in', - '.in0', - '.in1', - ]), -} - -def compilable(env, file): - base, ext = os.path.splitext(str(file)) - if ext in non_compilable_suffixes[env['TARGET_PLATFORM']]: - return False - return True - -def compilable_files(env, sources): - return [x for x in sources if compilable(env, x)] - -def GypProgram(env, target, source, *args, **kw): - source = compilable_files(env, source) - result = env.Program(target, source, *args, **kw) - if env.get('INCREMENTAL'): - env.Precious(result) - return result - -def GypTestProgram(env, target, source, *args, **kw): - source = compilable_files(env, source) - result = env.Program(target, source, *args, **kw) - if env.get('INCREMENTAL'): - env.Precious(*result) - return result - -def GypLibrary(env, target, source, *args, **kw): - source = compilable_files(env, source) - result = env.Library(target, source, *args, **kw) - return result - -def GypLoadableModule(env, target, source, *args, **kw): - source = compilable_files(env, source) - result = env.LoadableModule(target, source, *args, **kw) - return result - -def GypStaticLibrary(env, target, source, *args, **kw): - source = compilable_files(env, source) - result = env.StaticLibrary(target, source, *args, **kw) - return result - -def GypSharedLibrary(env, target, source, *args, **kw): - source = compilable_files(env, source) - result = env.SharedLibrary(target, source, *args, **kw) - if env.get('INCREMENTAL'): - env.Precious(result) - return result - -def add_gyp_methods(env): - env.AddMethod(GypProgram) - env.AddMethod(GypTestProgram) - env.AddMethod(GypLibrary) - env.AddMethod(GypLoadableModule) - env.AddMethod(GypStaticLibrary) - env.AddMethod(GypSharedLibrary) - - env.AddMethod(FilterOut) - - env.AddMethod(compilable) - - -base_env = Environment( - tools = %(scons_tools)s, - INTERMEDIATE_DIR='$OBJ_DIR/${COMPONENT_NAME}/_${TARGET_NAME}_intermediate', - LIB_DIR='$TOP_BUILDDIR/lib', - OBJ_DIR='$TOP_BUILDDIR/obj', - SCONSBUILD_DIR=sconsbuild_dir.abspath, - SHARED_INTERMEDIATE_DIR='$OBJ_DIR/_global_intermediate', - SRC_DIR=Dir(%(src_dir)r), - TARGET_PLATFORM='LINUX', - TOP_BUILDDIR='$SCONSBUILD_DIR/$CONFIG_NAME', - LIBPATH=['$LIB_DIR'], -) - -if not GetOption('verbose'): - base_env.SetDefault( - ARCOMSTR='Creating library $TARGET', - ASCOMSTR='Assembling $TARGET', - CCCOMSTR='Compiling $TARGET', - CONCATSOURCECOMSTR='ConcatSource $TARGET', - CXXCOMSTR='Compiling $TARGET', - LDMODULECOMSTR='Building loadable module $TARGET', - LINKCOMSTR='Linking $TARGET', - MANIFESTCOMSTR='Updating manifest for $TARGET', - MIDLCOMSTR='Compiling IDL $TARGET', - PCHCOMSTR='Precompiling $TARGET', - RANLIBCOMSTR='Indexing $TARGET', - RCCOMSTR='Compiling resource $TARGET', - SHCCCOMSTR='Compiling $TARGET', - SHCXXCOMSTR='Compiling $TARGET', - SHLINKCOMSTR='Linking $TARGET', - SHMANIFESTCOMSTR='Updating manifest for $TARGET', - ) - -add_gyp_methods(base_env) - -for conf in conf_list: - env = base_env.Clone(CONFIG_NAME=conf) - SConsignFile(env.File('$TOP_BUILDDIR/.sconsign').abspath) - for sconscript in sconscript_files: - target_alias = env.SConscript(sconscript, exports=['env']) - if target_alias: - target_alias_list.extend(target_alias) - -Default(Alias('all', target_alias_list)) - -help_fmt = ''' -Usage: hammer [SCONS_OPTIONS] [VARIABLES] [TARGET] ... - -Local command-line build options: - --mode=CONFIG Configuration to build: - --mode=Debug [default] - --mode=Release - --verbose Print actual executed command lines. - -Supported command-line build variables: - LOAD=[module,...] Comma-separated list of components to load in the - dependency graph ('-' prefix excludes) - PROGRESS=type Display a progress indicator: - name: print each evaluated target name - spinner: print a spinner every 5 targets - -The following TARGET names can also be used as LOAD= module names: - -%%s -''' - -if GetOption('help'): - def columnar_text(items, width=78, indent=2, sep=2): - result = [] - colwidth = max(map(len, items)) + sep - cols = (width - indent) / colwidth - if cols < 1: - cols = 1 - rows = (len(items) + cols - 1) / cols - indent = '%%*s' %% (indent, '') - sep = indent - for row in xrange(0, rows): - result.append(sep) - for i in xrange(row, len(items), rows): - result.append('%%-*s' %% (colwidth, items[i])) - sep = '\\n' + indent - result.append('\\n') - return ''.join(result) - - load_list = set(sconscript_file_map.keys()) - target_aliases = set(map(str, target_alias_list)) - - common = load_list and target_aliases - load_only = load_list - common - target_only = target_aliases - common - help_text = [help_fmt %% columnar_text(sorted(list(common)))] - if target_only: - fmt = "The following are additional TARGET names:\\n\\n%%s\\n" - help_text.append(fmt %% columnar_text(sorted(list(target_only)))) - if load_only: - fmt = "The following are additional LOAD= module names:\\n\\n%%s\\n" - help_text.append(fmt %% columnar_text(sorted(list(load_only)))) - Help(''.join(help_text)) -""" - -# TEMPLATE END -############################################################################# - - -def GenerateSConscriptWrapper(build_file, build_file_data, name, - output_filename, sconscript_files, - default_configuration): - """ - Generates the "wrapper" SConscript file (analogous to the Visual Studio - solution) that calls all the individual target SConscript files. - """ - output_dir = os.path.dirname(output_filename) - src_dir = build_file_data['_DEPTH'] - src_dir_rel = gyp.common.RelativePath(src_dir, output_dir) - if not src_dir_rel: - src_dir_rel = '.' - scons_settings = build_file_data.get('scons_settings', {}) - sconsbuild_dir = scons_settings.get('sconsbuild_dir', '#') - scons_tools = scons_settings.get('tools', ['default']) - - sconscript_file_lines = ['dict('] - for target in sorted(sconscript_files.keys()): - sconscript = sconscript_files[target] - sconscript_file_lines.append(' %s = %r,' % (target, sconscript)) - sconscript_file_lines.append(')') - - fp = open(output_filename, 'w') - fp.write(header) - fp.write(_wrapper_template % { - 'default_configuration' : default_configuration, - 'name' : name, - 'scons_tools' : repr(scons_tools), - 'sconsbuild_dir' : repr(sconsbuild_dir), - 'sconscript_files' : '\n'.join(sconscript_file_lines), - 'src_dir' : src_dir_rel, - }) - fp.close() - - # Generate the SConstruct file that invokes the wrapper SConscript. - dir, fname = os.path.split(output_filename) - SConstruct = os.path.join(dir, 'SConstruct') - fp = open(SConstruct, 'w') - fp.write(header) - fp.write('SConscript(%s)\n' % repr(fname)) - fp.close() - - -def TargetFilename(target, build_file=None, output_suffix=''): - """Returns the .scons file name for the specified target. - """ - if build_file is None: - build_file, target = gyp.common.ParseQualifiedTarget(target)[:2] - output_file = os.path.join(os.path.dirname(build_file), - target + output_suffix + '.scons') - return output_file - - -def PerformBuild(data, configurations, params): - options = params['options'] - - # Due to the way we test gyp on the chromium typbots - # we need to look for 'scons.py' as well as the more common 'scons' - # TODO(sbc): update the trybots to have a more normal install - # of scons. - scons = 'scons' - paths = os.environ['PATH'].split(os.pathsep) - for scons_name in ['scons', 'scons.py']: - for path in paths: - test_scons = os.path.join(path, scons_name) - print 'looking for: %s' % test_scons - if os.path.exists(test_scons): - print "found scons: %s" % scons - scons = test_scons - break - - for config in configurations: - arguments = [scons, '-C', options.toplevel_dir, '--mode=%s' % config] - print "Building [%s]: %s" % (config, arguments) - subprocess.check_call(arguments) - - -def GenerateOutput(target_list, target_dicts, data, params): - """ - Generates all the output files for the specified targets. - """ - options = params['options'] - - if options.generator_output: - def output_path(filename): - return filename.replace(params['cwd'], options.generator_output) - else: - def output_path(filename): - return filename - - default_configuration = None - - for qualified_target in target_list: - spec = target_dicts[qualified_target] - if spec['toolset'] != 'target': - raise Exception( - 'Multiple toolsets not supported in scons build (target %s)' % - qualified_target) - scons_target = SCons.Target(spec) - if scons_target.is_ignored: - continue - - # TODO: assumes the default_configuration of the first target - # non-Default target is the correct default for all targets. - # Need a better model for handle variation between targets. - if (not default_configuration and - spec['default_configuration'] != 'Default'): - default_configuration = spec['default_configuration'] - - build_file, target = gyp.common.ParseQualifiedTarget(qualified_target)[:2] - output_file = TargetFilename(target, build_file, options.suffix) - if options.generator_output: - output_file = output_path(output_file) - - if not spec.has_key('libraries'): - spec['libraries'] = [] - - # Add dependent static library targets to the 'libraries' value. - deps = spec.get('dependencies', []) - spec['scons_dependencies'] = [] - for d in deps: - td = target_dicts[d] - target_name = td['target_name'] - spec['scons_dependencies'].append("Alias('%s')" % target_name) - if td['type'] in ('static_library', 'shared_library'): - libname = td.get('product_name', target_name) - spec['libraries'].append('lib' + libname) - if td['type'] == 'loadable_module': - prereqs = spec.get('scons_prerequisites', []) - # TODO: parameterize with <(SHARED_LIBRARY_*) variables? - td_target = SCons.Target(td) - td_target.target_prefix = '${SHLIBPREFIX}' - td_target.target_suffix = '${SHLIBSUFFIX}' - - GenerateSConscript(output_file, spec, build_file, data[build_file]) - - if not default_configuration: - default_configuration = 'Default' - - for build_file in sorted(data.keys()): - path, ext = os.path.splitext(build_file) - if ext != '.gyp': - continue - output_dir, basename = os.path.split(path) - output_filename = path + '_main' + options.suffix + '.scons' - - all_targets = gyp.common.AllTargets(target_list, target_dicts, build_file) - sconscript_files = {} - for t in all_targets: - scons_target = SCons.Target(target_dicts[t]) - if scons_target.is_ignored: - continue - bf, target = gyp.common.ParseQualifiedTarget(t)[:2] - target_filename = TargetFilename(target, bf, options.suffix) - tpath = gyp.common.RelativePath(target_filename, output_dir) - sconscript_files[target] = tpath - - output_filename = output_path(output_filename) - if sconscript_files: - GenerateSConscriptWrapper(build_file, data[build_file], basename, - output_filename, sconscript_files, - default_configuration) diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/generator/xcode.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/generator/xcode.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/generator/xcode.py 2013-02-03 16:48:10.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/generator/xcode.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,1239 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import filecmp -import gyp.common -import gyp.xcodeproj_file -import errno -import os -import sys -import posixpath -import re -import shutil -import subprocess -import tempfile - - -# Project files generated by this module will use _intermediate_var as a -# custom Xcode setting whose value is a DerivedSources-like directory that's -# project-specific and configuration-specific. The normal choice, -# DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive -# as it is likely that multiple targets within a single project file will want -# to access the same set of generated files. The other option, -# PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific, -# it is not configuration-specific. INTERMEDIATE_DIR is defined as -# $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION). -_intermediate_var = 'INTERMEDIATE_DIR' - -# SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all -# targets that share the same BUILT_PRODUCTS_DIR. -_shared_intermediate_var = 'SHARED_INTERMEDIATE_DIR' - -_library_search_paths_var = 'LIBRARY_SEARCH_PATHS' - -generator_default_variables = { - 'EXECUTABLE_PREFIX': '', - 'EXECUTABLE_SUFFIX': '', - 'STATIC_LIB_PREFIX': 'lib', - 'SHARED_LIB_PREFIX': 'lib', - 'STATIC_LIB_SUFFIX': '.a', - 'SHARED_LIB_SUFFIX': '.dylib', - # INTERMEDIATE_DIR is a place for targets to build up intermediate products. - # It is specific to each build environment. It is only guaranteed to exist - # and be constant within the context of a project, corresponding to a single - # input file. Some build environments may allow their intermediate directory - # to be shared on a wider scale, but this is not guaranteed. - 'INTERMEDIATE_DIR': '$(%s)' % _intermediate_var, - 'OS': 'mac', - 'PRODUCT_DIR': '$(BUILT_PRODUCTS_DIR)', - 'LIB_DIR': '$(BUILT_PRODUCTS_DIR)', - 'RULE_INPUT_ROOT': '$(INPUT_FILE_BASE)', - 'RULE_INPUT_EXT': '$(INPUT_FILE_SUFFIX)', - 'RULE_INPUT_NAME': '$(INPUT_FILE_NAME)', - 'RULE_INPUT_PATH': '$(INPUT_FILE_PATH)', - 'RULE_INPUT_DIRNAME': '$(INPUT_FILE_DIRNAME)', - 'SHARED_INTERMEDIATE_DIR': '$(%s)' % _shared_intermediate_var, - 'CONFIGURATION_NAME': '$(CONFIGURATION)', -} - -# The Xcode-specific sections that hold paths. -generator_additional_path_sections = [ - 'mac_bundle_resources', - 'mac_framework_headers', - 'mac_framework_private_headers', - # 'mac_framework_dirs', input already handles _dirs endings. -] - -# The Xcode-specific keys that exist on targets and aren't moved down to -# configurations. -generator_additional_non_configuration_keys = [ - 'mac_bundle', - 'mac_bundle_resources', - 'mac_framework_headers', - 'mac_framework_private_headers', - 'xcode_create_dependents_test_runner', -] - -# We want to let any rules apply to files that are resources also. -generator_extra_sources_for_rules = [ - 'mac_bundle_resources', - 'mac_framework_headers', - 'mac_framework_private_headers', -] - -# Xcode's standard set of library directories, which don't need to be duplicated -# in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay. -xcode_standard_library_dirs = frozenset([ - '$(SDKROOT)/usr/lib', - '$(SDKROOT)/usr/local/lib', -]) - -def CreateXCConfigurationList(configuration_names): - xccl = gyp.xcodeproj_file.XCConfigurationList({'buildConfigurations': []}) - if len(configuration_names) == 0: - configuration_names = ['Default'] - for configuration_name in configuration_names: - xcbc = gyp.xcodeproj_file.XCBuildConfiguration({ - 'name': configuration_name}) - xccl.AppendProperty('buildConfigurations', xcbc) - xccl.SetProperty('defaultConfigurationName', configuration_names[0]) - return xccl - - -class XcodeProject(object): - def __init__(self, gyp_path, path, build_file_dict): - self.gyp_path = gyp_path - self.path = path - self.project = gyp.xcodeproj_file.PBXProject(path=path) - projectDirPath = gyp.common.RelativePath( - os.path.dirname(os.path.abspath(self.gyp_path)), - os.path.dirname(path) or '.') - self.project.SetProperty('projectDirPath', projectDirPath) - self.project_file = \ - gyp.xcodeproj_file.XCProjectFile({'rootObject': self.project}) - self.build_file_dict = build_file_dict - - # TODO(mark): add destructor that cleans up self.path if created_dir is - # True and things didn't complete successfully. Or do something even - # better with "try"? - self.created_dir = False - try: - os.makedirs(self.path) - self.created_dir = True - except OSError, e: - if e.errno != errno.EEXIST: - raise - - def Finalize1(self, xcode_targets, serialize_all_tests): - # Collect a list of all of the build configuration names used by the - # various targets in the file. It is very heavily advised to keep each - # target in an entire project (even across multiple project files) using - # the same set of configuration names. - configurations = [] - for xct in self.project.GetProperty('targets'): - xccl = xct.GetProperty('buildConfigurationList') - xcbcs = xccl.GetProperty('buildConfigurations') - for xcbc in xcbcs: - name = xcbc.GetProperty('name') - if name not in configurations: - configurations.append(name) - - # Replace the XCConfigurationList attached to the PBXProject object with - # a new one specifying all of the configuration names used by the various - # targets. - try: - xccl = CreateXCConfigurationList(configurations) - self.project.SetProperty('buildConfigurationList', xccl) - except: - sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path) - raise - - # The need for this setting is explained above where _intermediate_var is - # defined. The comments below about wanting to avoid project-wide build - # settings apply here too, but this needs to be set on a project-wide basis - # so that files relative to the _intermediate_var setting can be displayed - # properly in the Xcode UI. - # - # Note that for configuration-relative files such as anything relative to - # _intermediate_var, for the purposes of UI tree view display, Xcode will - # only resolve the configuration name once, when the project file is - # opened. If the active build configuration is changed, the project file - # must be closed and reopened if it is desired for the tree view to update. - # This is filed as Apple radar 6588391. - xccl.SetBuildSetting(_intermediate_var, - '$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)') - xccl.SetBuildSetting(_shared_intermediate_var, - '$(SYMROOT)/DerivedSources/$(CONFIGURATION)') - - # Set user-specified project-wide build settings and config files. This - # is intended to be used very sparingly. Really, almost everything should - # go into target-specific build settings sections. The project-wide - # settings are only intended to be used in cases where Xcode attempts to - # resolve variable references in a project context as opposed to a target - # context, such as when resolving sourceTree references while building up - # the tree tree view for UI display. - # Any values set globally are applied to all configurations, then any - # per-configuration values are applied. - for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems(): - xccl.SetBuildSetting(xck, xcv) - if 'xcode_config_file' in self.build_file_dict: - config_ref = self.project.AddOrGetFileInRootGroup( - self.build_file_dict['xcode_config_file']) - xccl.SetBaseConfiguration(config_ref) - build_file_configurations = self.build_file_dict.get('configurations', {}) - if build_file_configurations: - for config_name in configurations: - build_file_configuration_named = \ - build_file_configurations.get(config_name, {}) - if build_file_configuration_named: - xcc = xccl.ConfigurationNamed(config_name) - for xck, xcv in build_file_configuration_named.get('xcode_settings', - {}).iteritems(): - xcc.SetBuildSetting(xck, xcv) - if 'xcode_config_file' in build_file_configuration_named: - config_ref = self.project.AddOrGetFileInRootGroup( - build_file_configurations[config_name]['xcode_config_file']) - xcc.SetBaseConfiguration(config_ref) - - # Sort the targets based on how they appeared in the input. - # TODO(mark): Like a lot of other things here, this assumes internal - # knowledge of PBXProject - in this case, of its "targets" property. - - # ordinary_targets are ordinary targets that are already in the project - # file. run_test_targets are the targets that run unittests and should be - # used for the Run All Tests target. support_targets are the action/rule - # targets used by GYP file targets, just kept for the assert check. - ordinary_targets = [] - run_test_targets = [] - support_targets = [] - - # targets is full list of targets in the project. - targets = [] - - # does the it define it's own "all"? - has_custom_all = False - - # targets_for_all is the list of ordinary_targets that should be listed - # in this project's "All" target. It includes each non_runtest_target - # that does not have suppress_wildcard set. - targets_for_all = [] - - for target in self.build_file_dict['targets']: - target_name = target['target_name'] - toolset = target['toolset'] - qualified_target = gyp.common.QualifiedTarget(self.gyp_path, target_name, - toolset) - xcode_target = xcode_targets[qualified_target] - # Make sure that the target being added to the sorted list is already in - # the unsorted list. - assert xcode_target in self.project._properties['targets'] - targets.append(xcode_target) - ordinary_targets.append(xcode_target) - if xcode_target.support_target: - support_targets.append(xcode_target.support_target) - targets.append(xcode_target.support_target) - - if not int(target.get('suppress_wildcard', False)): - targets_for_all.append(xcode_target) - - if target_name.lower() == 'all': - has_custom_all = True; - - # If this target has a 'run_as' attribute, add its target to the - # targets, and add it to the test targets. - if target.get('run_as'): - # Make a target to run something. It should have one - # dependency, the parent xcode target. - xccl = CreateXCConfigurationList(configurations) - run_target = gyp.xcodeproj_file.PBXAggregateTarget({ - 'name': 'Run ' + target_name, - 'productName': xcode_target.GetProperty('productName'), - 'buildConfigurationList': xccl, - }, - parent=self.project) - run_target.AddDependency(xcode_target) - - command = target['run_as'] - script = '' - if command.get('working_directory'): - script = script + 'cd "%s"\n' % \ - gyp.xcodeproj_file.ConvertVariablesToShellSyntax( - command.get('working_directory')) - - if command.get('environment'): - script = script + "\n".join( - ['export %s="%s"' % - (key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val)) - for (key, val) in command.get('environment').iteritems()]) + "\n" - - # Some test end up using sockets, files on disk, etc. and can get - # confused if more then one test runs at a time. The generator - # flag 'xcode_serialize_all_test_runs' controls the forcing of all - # tests serially. It defaults to True. To get serial runs this - # little bit of python does the same as the linux flock utility to - # make sure only one runs at a time. - command_prefix = '' - if serialize_all_tests: - command_prefix = \ -"""python -c "import fcntl, subprocess, sys -file = open('$TMPDIR/GYP_serialize_test_runs', 'a') -fcntl.flock(file.fileno(), fcntl.LOCK_EX) -sys.exit(subprocess.call(sys.argv[1:]))" """ - - # If we were unable to exec for some reason, we want to exit - # with an error, and fixup variable references to be shell - # syntax instead of xcode syntax. - script = script + 'exec ' + command_prefix + '%s\nexit 1\n' % \ - gyp.xcodeproj_file.ConvertVariablesToShellSyntax( - gyp.common.EncodePOSIXShellList(command.get('action'))) - - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ - 'shellScript': script, - 'showEnvVarsInLog': 0, - }) - run_target.AppendProperty('buildPhases', ssbp) - - # Add the run target to the project file. - targets.append(run_target) - run_test_targets.append(run_target) - xcode_target.test_runner = run_target - - - # Make sure that the list of targets being replaced is the same length as - # the one replacing it, but allow for the added test runner targets. - assert len(self.project._properties['targets']) == \ - len(ordinary_targets) + len(support_targets) - - self.project._properties['targets'] = targets - - # Get rid of unnecessary levels of depth in groups like the Source group. - self.project.RootGroupsTakeOverOnlyChildren(True) - - # Sort the groups nicely. Do this after sorting the targets, because the - # Products group is sorted based on the order of the targets. - self.project.SortGroups() - - # Create an "All" target if there's more than one target in this project - # file and the project didn't define its own "All" target. Put a generated - # "All" target first so that people opening up the project for the first - # time will build everything by default. - if len(targets_for_all) > 1 and not has_custom_all: - xccl = CreateXCConfigurationList(configurations) - all_target = gyp.xcodeproj_file.PBXAggregateTarget( - { - 'buildConfigurationList': xccl, - 'name': 'All', - }, - parent=self.project) - - for target in targets_for_all: - all_target.AddDependency(target) - - # TODO(mark): This is evil because it relies on internal knowledge of - # PBXProject._properties. It's important to get the "All" target first, - # though. - self.project._properties['targets'].insert(0, all_target) - - # The same, but for run_test_targets. - if len(run_test_targets) > 1: - xccl = CreateXCConfigurationList(configurations) - run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget( - { - 'buildConfigurationList': xccl, - 'name': 'Run All Tests', - }, - parent=self.project) - for run_test_target in run_test_targets: - run_all_tests_target.AddDependency(run_test_target) - - # Insert after the "All" target, which must exist if there is more than - # one run_test_target. - self.project._properties['targets'].insert(1, run_all_tests_target) - - def Finalize2(self, xcode_targets, xcode_target_to_target_dict): - # Finalize2 needs to happen in a separate step because the process of - # updating references to other projects depends on the ordering of targets - # within remote project files. Finalize1 is responsible for sorting duty, - # and once all project files are sorted, Finalize2 can come in and update - # these references. - - # To support making a "test runner" target that will run all the tests - # that are direct dependents of any given target, we look for - # xcode_create_dependents_test_runner being set on an Aggregate target, - # and generate a second target that will run the tests runners found under - # the marked target. - for bf_tgt in self.build_file_dict['targets']: - if int(bf_tgt.get('xcode_create_dependents_test_runner', 0)): - tgt_name = bf_tgt['target_name'] - toolset = bf_tgt['toolset'] - qualified_target = gyp.common.QualifiedTarget(self.gyp_path, - tgt_name, toolset) - xcode_target = xcode_targets[qualified_target] - if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget): - # Collect all the run test targets. - all_run_tests = [] - pbxtds = xcode_target.GetProperty('dependencies') - for pbxtd in pbxtds: - pbxcip = pbxtd.GetProperty('targetProxy') - dependency_xct = pbxcip.GetProperty('remoteGlobalIDString') - if hasattr(dependency_xct, 'test_runner'): - all_run_tests.append(dependency_xct.test_runner) - - # Directly depend on all the runners as they depend on the target - # that builds them. - if len(all_run_tests) > 0: - run_all_target = gyp.xcodeproj_file.PBXAggregateTarget({ - 'name': 'Run %s Tests' % tgt_name, - 'productName': tgt_name, - }, - parent=self.project) - for run_test_target in all_run_tests: - run_all_target.AddDependency(run_test_target) - - # Insert the test runner after the related target. - idx = self.project._properties['targets'].index(xcode_target) - self.project._properties['targets'].insert(idx + 1, run_all_target) - - # Update all references to other projects, to make sure that the lists of - # remote products are complete. Otherwise, Xcode will fill them in when - # it opens the project file, which will result in unnecessary diffs. - # TODO(mark): This is evil because it relies on internal knowledge of - # PBXProject._other_pbxprojects. - for other_pbxproject in self.project._other_pbxprojects.keys(): - self.project.AddOrGetProjectReference(other_pbxproject) - - self.project.SortRemoteProductReferences() - - # Give everything an ID. - self.project_file.ComputeIDs() - - # Make sure that no two objects in the project file have the same ID. If - # multiple objects wind up with the same ID, upon loading the file, Xcode - # will only recognize one object (the last one in the file?) and the - # results are unpredictable. - self.project_file.EnsureNoIDCollisions() - - def Write(self): - # Write the project file to a temporary location first. Xcode watches for - # changes to the project file and presents a UI sheet offering to reload - # the project when it does change. However, in some cases, especially when - # multiple projects are open or when Xcode is busy, things don't work so - # seamlessly. Sometimes, Xcode is able to detect that a project file has - # changed but can't unload it because something else is referencing it. - # To mitigate this problem, and to avoid even having Xcode present the UI - # sheet when an open project is rewritten for inconsequential changes, the - # project file is written to a temporary file in the xcodeproj directory - # first. The new temporary file is then compared to the existing project - # file, if any. If they differ, the new file replaces the old; otherwise, - # the new project file is simply deleted. Xcode properly detects a file - # being renamed over an open project file as a change and so it remains - # able to present the "project file changed" sheet under this system. - # Writing to a temporary file first also avoids the possible problem of - # Xcode rereading an incomplete project file. - (output_fd, new_pbxproj_path) = \ - tempfile.mkstemp(suffix='.tmp', prefix='project.pbxproj.gyp.', - dir=self.path) - - try: - output_file = os.fdopen(output_fd, 'wb') - - self.project_file.Print(output_file) - output_file.close() - - pbxproj_path = os.path.join(self.path, 'project.pbxproj') - - same = False - try: - same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False) - except OSError, e: - if e.errno != errno.ENOENT: - raise - - if same: - # The new file is identical to the old one, just get rid of the new - # one. - os.unlink(new_pbxproj_path) - else: - # The new file is different from the old one, or there is no old one. - # Rename the new file to the permanent name. - # - # tempfile.mkstemp uses an overly restrictive mode, resulting in a - # file that can only be read by the owner, regardless of the umask. - # There's no reason to not respect the umask here, which means that - # an extra hoop is required to fetch it and reset the new file's mode. - # - # No way to get the umask without setting a new one? Set a safe one - # and then set it back to the old value. - umask = os.umask(077) - os.umask(umask) - - os.chmod(new_pbxproj_path, 0666 & ~umask) - os.rename(new_pbxproj_path, pbxproj_path) - - except Exception: - # Don't leave turds behind. In fact, if this code was responsible for - # creating the xcodeproj directory, get rid of that too. - os.unlink(new_pbxproj_path) - if self.created_dir: - shutil.rmtree(self.path, True) - raise - - -cached_xcode_version = None -def InstalledXcodeVersion(): - """Fetches the installed version of Xcode, returns empty string if it is - unable to figure it out.""" - - global cached_xcode_version - if not cached_xcode_version is None: - return cached_xcode_version - - # Default to an empty string - cached_xcode_version = '' - - # Collect the xcodebuild's version information. - try: - import subprocess - cmd = ['/usr/bin/xcodebuild', '-version'] - proc = subprocess.Popen(cmd, stdout=subprocess.PIPE) - xcodebuild_version_info = proc.communicate()[0] - # Any error, return empty string - if proc.returncode: - xcodebuild_version_info = '' - except OSError: - # We failed to launch the tool - xcodebuild_version_info = '' - - # Pull out the Xcode version itself. - match_line = re.search('^Xcode (.*)$', xcodebuild_version_info, re.MULTILINE) - if match_line: - cached_xcode_version = match_line.group(1) - # Done! - return cached_xcode_version - - -def AddSourceToTarget(source, type, pbxp, xct): - # TODO(mark): Perhaps source_extensions and library_extensions can be made a - # little bit fancier. - source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's'] - - # .o is conceptually more of a "source" than a "library," but Xcode thinks - # of "sources" as things to compile and "libraries" (or "frameworks") as - # things to link with. Adding an object file to an Xcode target's frameworks - # phase works properly. - library_extensions = ['a', 'dylib', 'framework', 'o'] - - basename = posixpath.basename(source) - (root, ext) = posixpath.splitext(basename) - if ext: - ext = ext[1:].lower() - - if ext in source_extensions and type != 'none': - xct.SourcesPhase().AddFile(source) - elif ext in library_extensions and type != 'none': - xct.FrameworksPhase().AddFile(source) - else: - # Files that aren't added to a sources or frameworks build phase can still - # go into the project file, just not as part of a build phase. - pbxp.AddOrGetFileInRootGroup(source) - - -def AddResourceToTarget(resource, pbxp, xct): - # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call - # where it's used. - xct.ResourcesPhase().AddFile(resource) - - -def AddHeaderToTarget(header, pbxp, xct, is_public): - # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call - # where it's used. - settings = '{ATTRIBUTES = (%s, ); }' % ('Private', 'Public')[is_public] - xct.HeadersPhase().AddFile(header, settings) - - -_xcode_variable_re = re.compile('(\$\((.*?)\))') -def ExpandXcodeVariables(string, expansions): - """Expands Xcode-style $(VARIABLES) in string per the expansions dict. - - In some rare cases, it is appropriate to expand Xcode variables when a - project file is generated. For any substring $(VAR) in string, if VAR is a - key in the expansions dict, $(VAR) will be replaced with expansions[VAR]. - Any $(VAR) substring in string for which VAR is not a key in the expansions - dict will remain in the returned string. - """ - - matches = _xcode_variable_re.findall(string) - if matches == None: - return string - - matches.reverse() - for match in matches: - (to_replace, variable) = match - if not variable in expansions: - continue - - replacement = expansions[variable] - string = re.sub(re.escape(to_replace), replacement, string) - - return string - - -def EscapeXCodeArgument(s): - """We must escape the arguments that we give to XCode so that it knows not to - split on spaces and to respect backslash and quote literals.""" - s = s.replace('\\', '\\\\') - s = s.replace('"', '\\"') - return '"' + s + '"' - - - -def PerformBuild(data, configurations, params): - options = params['options'] - - for build_file, build_file_dict in data.iteritems(): - (build_file_root, build_file_ext) = os.path.splitext(build_file) - if build_file_ext != '.gyp': - continue - xcodeproj_path = build_file_root + options.suffix + '.xcodeproj' - if options.generator_output: - xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path) - - for config in configurations: - arguments = ['xcodebuild', '-project', xcodeproj_path] - arguments += ['-configuration', config] - print "Building [%s]: %s" % (config, arguments) - subprocess.check_call(arguments) - - -def GenerateOutput(target_list, target_dicts, data, params): - options = params['options'] - generator_flags = params.get('generator_flags', {}) - parallel_builds = generator_flags.get('xcode_parallel_builds', True) - serialize_all_tests = \ - generator_flags.get('xcode_serialize_all_test_runs', True) - project_version = generator_flags.get('xcode_project_version', None) - skip_excluded_files = \ - not generator_flags.get('xcode_list_excluded_files', True) - xcode_projects = {} - for build_file, build_file_dict in data.iteritems(): - (build_file_root, build_file_ext) = os.path.splitext(build_file) - if build_file_ext != '.gyp': - continue - xcodeproj_path = build_file_root + options.suffix + '.xcodeproj' - if options.generator_output: - xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path) - xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict) - xcode_projects[build_file] = xcp - pbxp = xcp.project - - if parallel_builds: - pbxp.SetProperty('attributes', - {'BuildIndependentTargetsInParallel': 'YES'}) - if project_version: - xcp.project_file.SetXcodeVersion(project_version) - - # Add gyp/gypi files to project - if not generator_flags.get('standalone'): - main_group = pbxp.GetProperty('mainGroup') - build_group = gyp.xcodeproj_file.PBXGroup({'name': 'Build'}) - main_group.AppendChild(build_group) - for included_file in build_file_dict['included_files']: - build_group.AddOrGetFileByPath(included_file, False) - - xcode_targets = {} - xcode_target_to_target_dict = {} - for qualified_target in target_list: - [build_file, target_name, toolset] = \ - gyp.common.ParseQualifiedTarget(qualified_target) - - spec = target_dicts[qualified_target] - if spec['toolset'] != 'target': - raise Exception( - 'Multiple toolsets not supported in xcode build (target %s)' % - qualified_target) - configuration_names = [spec['default_configuration']] - for configuration_name in sorted(spec['configurations'].keys()): - if configuration_name not in configuration_names: - configuration_names.append(configuration_name) - xcp = xcode_projects[build_file] - pbxp = xcp.project - - # Set up the configurations for the target according to the list of names - # supplied. - xccl = CreateXCConfigurationList(configuration_names) - - # Create an XCTarget subclass object for the target. The type with - # "+bundle" appended will be used if the target has "mac_bundle" set. - # loadable_modules not in a mac_bundle are mapped to - # com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets - # to create a single-file mh_bundle. - _types = { - 'executable': 'com.apple.product-type.tool', - 'loadable_module': 'com.googlecode.gyp.xcode.bundle', - 'shared_library': 'com.apple.product-type.library.dynamic', - 'static_library': 'com.apple.product-type.library.static', - 'executable+bundle': 'com.apple.product-type.application', - 'loadable_module+bundle': 'com.apple.product-type.bundle', - 'shared_library+bundle': 'com.apple.product-type.framework', - } - - target_properties = { - 'buildConfigurationList': xccl, - 'name': target_name, - } - - type = spec['type'] - is_bundle = int(spec.get('mac_bundle', 0)) - if type != 'none': - type_bundle_key = type - if is_bundle: - type_bundle_key += '+bundle' - xctarget_type = gyp.xcodeproj_file.PBXNativeTarget - try: - target_properties['productType'] = _types[type_bundle_key] - except KeyError, e: - gyp.common.ExceptionAppend(e, "-- unknown product type while " - "writing target %s" % target_name) - raise - else: - xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget - assert not is_bundle, ( - 'mac_bundle targets cannot have type none (target "%s")' % - target_name) - - target_product_name = spec.get('product_name') - if target_product_name is not None: - target_properties['productName'] = target_product_name - - xct = xctarget_type(target_properties, parent=pbxp, - force_outdir=spec.get('product_dir'), - force_prefix=spec.get('product_prefix'), - force_extension=spec.get('product_extension')) - pbxp.AppendProperty('targets', xct) - xcode_targets[qualified_target] = xct - xcode_target_to_target_dict[xct] = spec - - spec_actions = spec.get('actions', []) - spec_rules = spec.get('rules', []) - - # Xcode has some "issues" with checking dependencies for the "Compile - # sources" step with any source files/headers generated by actions/rules. - # To work around this, if a target is building anything directly (not - # type "none"), then a second target is used to run the GYP actions/rules - # and is made a dependency of this target. This way the work is done - # before the dependency checks for what should be recompiled. - support_xct = None - if type != 'none' and (spec_actions or spec_rules): - support_xccl = CreateXCConfigurationList(configuration_names); - support_target_properties = { - 'buildConfigurationList': support_xccl, - 'name': target_name + ' Support', - } - if target_product_name: - support_target_properties['productName'] = \ - target_product_name + ' Support' - support_xct = \ - gyp.xcodeproj_file.PBXAggregateTarget(support_target_properties, - parent=pbxp) - pbxp.AppendProperty('targets', support_xct) - xct.AddDependency(support_xct) - # Hang the support target off the main target so it can be tested/found - # by the generator during Finalize. - xct.support_target = support_xct - - prebuild_index = 0 - - # Add custom shell script phases for "actions" sections. - for action in spec_actions: - # There's no need to write anything into the script to ensure that the - # output directories already exist, because Xcode will look at the - # declared outputs and automatically ensure that they exist for us. - - # Do we have a message to print when this action runs? - message = action.get('message') - if message: - message = 'echo note: ' + gyp.common.EncodePOSIXShellArgument(message) - else: - message = '' - - # Turn the list into a string that can be passed to a shell. - action_string = gyp.common.EncodePOSIXShellList(action['action']) - - # Convert Xcode-type variable references to sh-compatible environment - # variable references. - message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message) - action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax( - action_string) - - script = '' - # Include the optional message - if message_sh: - script += message_sh + '\n' - # Be sure the script runs in exec, and that if exec fails, the script - # exits signalling an error. - script += 'exec ' + action_string_sh + '\nexit 1\n' - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ - 'inputPaths': action['inputs'], - 'name': 'Action "' + action['action_name'] + '"', - 'outputPaths': action['outputs'], - 'shellScript': script, - 'showEnvVarsInLog': 0, - }) - - if support_xct: - support_xct.AppendProperty('buildPhases', ssbp) - else: - # TODO(mark): this assumes too much knowledge of the internals of - # xcodeproj_file; some of these smarts should move into xcodeproj_file - # itself. - xct._properties['buildPhases'].insert(prebuild_index, ssbp) - prebuild_index = prebuild_index + 1 - - # TODO(mark): Should verify that at most one of these is specified. - if int(action.get('process_outputs_as_sources', False)): - for output in action['outputs']: - AddSourceToTarget(output, type, pbxp, xct) - - if int(action.get('process_outputs_as_mac_bundle_resources', False)): - for output in action['outputs']: - AddResourceToTarget(output, pbxp, xct) - - # tgt_mac_bundle_resources holds the list of bundle resources so - # the rule processing can check against it. - if is_bundle: - tgt_mac_bundle_resources = spec.get('mac_bundle_resources', []) - else: - tgt_mac_bundle_resources = [] - - # Add custom shell script phases driving "make" for "rules" sections. - # - # Xcode's built-in rule support is almost powerful enough to use directly, - # but there are a few significant deficiencies that render them unusable. - # There are workarounds for some of its inadequacies, but in aggregate, - # the workarounds added complexity to the generator, and some workarounds - # actually require input files to be crafted more carefully than I'd like. - # Consequently, until Xcode rules are made more capable, "rules" input - # sections will be handled in Xcode output by shell script build phases - # performed prior to the compilation phase. - # - # The following problems with Xcode rules were found. The numbers are - # Apple radar IDs. I hope that these shortcomings are addressed, I really - # liked having the rules handled directly in Xcode during the period that - # I was prototyping this. - # - # 6588600 Xcode compiles custom script rule outputs too soon, compilation - # fails. This occurs when rule outputs from distinct inputs are - # interdependent. The only workaround is to put rules and their - # inputs in a separate target from the one that compiles the rule - # outputs. This requires input file cooperation and it means that - # process_outputs_as_sources is unusable. - # 6584932 Need to declare that custom rule outputs should be excluded from - # compilation. A possible workaround is to lie to Xcode about a - # rule's output, giving it a dummy file it doesn't know how to - # compile. The rule action script would need to touch the dummy. - # 6584839 I need a way to declare additional inputs to a custom rule. - # A possible workaround is a shell script phase prior to - # compilation that touches a rule's primary input files if any - # would-be additional inputs are newer than the output. Modifying - # the source tree - even just modification times - feels dirty. - # 6564240 Xcode "custom script" build rules always dump all environment - # variables. This is a low-prioroty problem and is not a - # show-stopper. - rules_by_ext = {} - for rule in spec_rules: - rules_by_ext[rule['extension']] = rule - - # First, some definitions: - # - # A "rule source" is a file that was listed in a target's "sources" - # list and will have a rule applied to it on the basis of matching the - # rule's "extensions" attribute. Rule sources are direct inputs to - # rules. - # - # Rule definitions may specify additional inputs in their "inputs" - # attribute. These additional inputs are used for dependency tracking - # purposes. - # - # A "concrete output" is a rule output with input-dependent variables - # resolved. For example, given a rule with: - # 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'], - # if the target's "sources" list contained "one.ext" and "two.ext", - # the "concrete output" for rule input "two.ext" would be "two.cc". If - # a rule specifies multiple outputs, each input file that the rule is - # applied to will have the same number of concrete outputs. - # - # If any concrete outputs are outdated or missing relative to their - # corresponding rule_source or to any specified additional input, the - # rule action must be performed to generate the concrete outputs. - - # concrete_outputs_by_rule_source will have an item at the same index - # as the rule['rule_sources'] that it corresponds to. Each item is a - # list of all of the concrete outputs for the rule_source. - concrete_outputs_by_rule_source = [] - - # concrete_outputs_all is a flat list of all concrete outputs that this - # rule is able to produce, given the known set of input files - # (rule_sources) that apply to it. - concrete_outputs_all = [] - - # messages & actions are keyed by the same indices as rule['rule_sources'] - # and concrete_outputs_by_rule_source. They contain the message and - # action to perform after resolving input-dependent variables. The - # message is optional, in which case None is stored for each rule source. - messages = [] - actions = [] - - for rule_source in rule.get('rule_sources', []): - rule_source_dirname, rule_source_basename = \ - posixpath.split(rule_source) - (rule_source_root, rule_source_ext) = \ - posixpath.splitext(rule_source_basename) - - # These are the same variable names that Xcode uses for its own native - # rule support. Because Xcode's rule engine is not being used, they - # need to be expanded as they are written to the makefile. - rule_input_dict = { - 'INPUT_FILE_BASE': rule_source_root, - 'INPUT_FILE_SUFFIX': rule_source_ext, - 'INPUT_FILE_NAME': rule_source_basename, - 'INPUT_FILE_PATH': rule_source, - 'INPUT_FILE_DIRNAME': rule_source_dirname, - } - - concrete_outputs_for_this_rule_source = [] - for output in rule.get('outputs', []): - # Fortunately, Xcode and make both use $(VAR) format for their - # variables, so the expansion is the only transformation necessary. - # Any remaning $(VAR)-type variables in the string can be given - # directly to make, which will pick up the correct settings from - # what Xcode puts into the environment. - concrete_output = ExpandXcodeVariables(output, rule_input_dict) - concrete_outputs_for_this_rule_source.append(concrete_output) - - # Add all concrete outputs to the project. - pbxp.AddOrGetFileInRootGroup(concrete_output) - - concrete_outputs_by_rule_source.append( \ - concrete_outputs_for_this_rule_source) - concrete_outputs_all.extend(concrete_outputs_for_this_rule_source) - - # TODO(mark): Should verify that at most one of these is specified. - if int(rule.get('process_outputs_as_sources', False)): - for output in concrete_outputs_for_this_rule_source: - AddSourceToTarget(output, type, pbxp, xct) - - # If the file came from the mac_bundle_resources list or if the rule - # is marked to process outputs as bundle resource, do so. - was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources - if was_mac_bundle_resource or \ - int(rule.get('process_outputs_as_mac_bundle_resources', False)): - for output in concrete_outputs_for_this_rule_source: - AddResourceToTarget(output, pbxp, xct) - - # Do we have a message to print when this rule runs? - message = rule.get('message') - if message: - message = gyp.common.EncodePOSIXShellArgument(message) - message = ExpandXcodeVariables(message, rule_input_dict) - messages.append(message) - - # Turn the list into a string that can be passed to a shell. - action_string = gyp.common.EncodePOSIXShellList(rule['action']) - - action = ExpandXcodeVariables(action_string, rule_input_dict) - actions.append(action) - - if len(concrete_outputs_all) > 0: - # TODO(mark): There's a possibilty for collision here. Consider - # target "t" rule "A_r" and target "t_A" rule "r". - makefile_name = '%s.make' % re.sub( - '[^a-zA-Z0-9_]', '_' , '%s_%s' % (target_name, rule['rule_name'])) - makefile_path = os.path.join(xcode_projects[build_file].path, - makefile_name) - # TODO(mark): try/close? Write to a temporary file and swap it only - # if it's got changes? - makefile = open(makefile_path, 'wb') - - # make will build the first target in the makefile by default. By - # convention, it's called "all". List all (or at least one) - # concrete output for each rule source as a prerequisite of the "all" - # target. - makefile.write('all: \\\n') - for concrete_output_index in \ - xrange(0, len(concrete_outputs_by_rule_source)): - # Only list the first (index [0]) concrete output of each input - # in the "all" target. Otherwise, a parallel make (-j > 1) would - # attempt to process each input multiple times simultaneously. - # Otherwise, "all" could just contain the entire list of - # concrete_outputs_all. - concrete_output = \ - concrete_outputs_by_rule_source[concrete_output_index][0] - if concrete_output_index == len(concrete_outputs_by_rule_source) - 1: - eol = '' - else: - eol = ' \\' - makefile.write(' %s%s\n' % (concrete_output, eol)) - - for (rule_source, concrete_outputs, message, action) in \ - zip(rule['rule_sources'], concrete_outputs_by_rule_source, - messages, actions): - makefile.write('\n') - - # Add a rule that declares it can build each concrete output of a - # rule source. Collect the names of the directories that are - # required. - concrete_output_dirs = [] - for concrete_output_index in xrange(0, len(concrete_outputs)): - concrete_output = concrete_outputs[concrete_output_index] - if concrete_output_index == 0: - bol = '' - else: - bol = ' ' - makefile.write('%s%s \\\n' % (bol, concrete_output)) - - concrete_output_dir = posixpath.dirname(concrete_output) - if (concrete_output_dir and - concrete_output_dir not in concrete_output_dirs): - concrete_output_dirs.append(concrete_output_dir) - - makefile.write(' : \\\n') - - # The prerequisites for this rule are the rule source itself and - # the set of additional rule inputs, if any. - prerequisites = [rule_source] - prerequisites.extend(rule.get('inputs', [])) - for prerequisite_index in xrange(0, len(prerequisites)): - prerequisite = prerequisites[prerequisite_index] - if prerequisite_index == len(prerequisites) - 1: - eol = '' - else: - eol = ' \\' - makefile.write(' %s%s\n' % (prerequisite, eol)) - - # Make sure that output directories exist before executing the rule - # action. - if len(concrete_output_dirs) > 0: - makefile.write('\t@mkdir -p "%s"\n' % - '" "'.join(concrete_output_dirs)) - - # The rule message and action have already had the necessary variable - # substitutions performed. - if message: - # Mark it with note: so Xcode picks it up in build output. - makefile.write('\t@echo note: %s\n' % message) - makefile.write('\t%s\n' % action) - - makefile.close() - - # It might be nice to ensure that needed output directories exist - # here rather than in each target in the Makefile, but that wouldn't - # work if there ever was a concrete output that had an input-dependent - # variable anywhere other than in the leaf position. - - # Don't declare any inputPaths or outputPaths. If they're present, - # Xcode will provide a slight optimization by only running the script - # phase if any output is missing or outdated relative to any input. - # Unfortunately, it will also assume that all outputs are touched by - # the script, and if the outputs serve as files in a compilation - # phase, they will be unconditionally rebuilt. Since make might not - # rebuild everything that could be declared here as an output, this - # extra compilation activity is unnecessary. With inputPaths and - # outputPaths not supplied, make will always be called, but it knows - # enough to not do anything when everything is up-to-date. - - # To help speed things up, pass -j COUNT to make so it does some work - # in parallel. Don't use ncpus because Xcode will build ncpus targets - # in parallel and if each target happens to have a rules step, there - # would be ncpus^2 things going. With a machine that has 2 quad-core - # Xeons, a build can quickly run out of processes based on - # scheduling/other tasks, and randomly failing builds are no good. - script = \ -"""JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)" -if [ "${JOB_COUNT}" -gt 4 ]; then - JOB_COUNT=4 -fi -exec "${DEVELOPER_BIN_DIR}/make" -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}" -exit 1 -""" % makefile_name - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ - 'name': 'Rule "' + rule['rule_name'] + '"', - 'shellScript': script, - 'showEnvVarsInLog': 0, - }) - - if support_xct: - support_xct.AppendProperty('buildPhases', ssbp) - else: - # TODO(mark): this assumes too much knowledge of the internals of - # xcodeproj_file; some of these smarts should move into xcodeproj_file - # itself. - xct._properties['buildPhases'].insert(prebuild_index, ssbp) - prebuild_index = prebuild_index + 1 - - # Extra rule inputs also go into the project file. Concrete outputs were - # already added when they were computed. - groups = ['inputs', 'inputs_excluded'] - if skip_excluded_files: - groups = [x for x in groups if not x.endswith('_excluded')] - for group in groups: - for item in rule.get(group, []): - pbxp.AddOrGetFileInRootGroup(item) - - # Add "sources". - for source in spec.get('sources', []): - (source_root, source_extension) = posixpath.splitext(source) - if source_extension[1:] not in rules_by_ext: - # AddSourceToTarget will add the file to a root group if it's not - # already there. - AddSourceToTarget(source, type, pbxp, xct) - else: - pbxp.AddOrGetFileInRootGroup(source) - - # Add "mac_bundle_resources" and "mac_framework_private_headers" if - # it's a bundle of any type. - if is_bundle: - for resource in tgt_mac_bundle_resources: - (resource_root, resource_extension) = posixpath.splitext(resource) - if resource_extension[1:] not in rules_by_ext: - AddResourceToTarget(resource, pbxp, xct) - else: - pbxp.AddOrGetFileInRootGroup(resource) - - for header in spec.get('mac_framework_private_headers', []): - AddHeaderToTarget(header, pbxp, xct, False) - - # Add "mac_framework_headers". These can be valid for both frameworks - # and static libraries. - if is_bundle or type == 'static_library': - for header in spec.get('mac_framework_headers', []): - AddHeaderToTarget(header, pbxp, xct, True) - - # Add "copies". - pbxcp_dict = {} - for copy_group in spec.get('copies', []): - dest = copy_group['destination'] - if dest[0] not in ('/', '$'): - # Relative paths are relative to $(SRCROOT). - dest = '$(SRCROOT)/' + dest - - # Coalesce multiple "copies" sections in the same target with the same - # "destination" property into the same PBXCopyFilesBuildPhase, otherwise - # they'll wind up with ID collisions. - pbxcp = pbxcp_dict.get(dest, None) - if pbxcp is None: - pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({ - 'name': 'Copy to ' + copy_group['destination'] - }, - parent=xct) - pbxcp.SetDestination(dest) - - # TODO(mark): The usual comment about this knowing too much about - # gyp.xcodeproj_file internals applies. - xct._properties['buildPhases'].insert(prebuild_index, pbxcp) - - pbxcp_dict[dest] = pbxcp - - for file in copy_group['files']: - pbxcp.AddFile(file) - - # Excluded files can also go into the project file. - if not skip_excluded_files: - for key in ['sources', 'mac_bundle_resources', 'mac_framework_headers', - 'mac_framework_private_headers']: - excluded_key = key + '_excluded' - for item in spec.get(excluded_key, []): - pbxp.AddOrGetFileInRootGroup(item) - - # So can "inputs" and "outputs" sections of "actions" groups. - groups = ['inputs', 'inputs_excluded', 'outputs', 'outputs_excluded'] - if skip_excluded_files: - groups = [x for x in groups if not x.endswith('_excluded')] - for action in spec.get('actions', []): - for group in groups: - for item in action.get(group, []): - # Exclude anything in BUILT_PRODUCTS_DIR. They're products, not - # sources. - if not item.startswith('$(BUILT_PRODUCTS_DIR)/'): - pbxp.AddOrGetFileInRootGroup(item) - - for postbuild in spec.get('postbuilds', []): - action_string_sh = gyp.common.EncodePOSIXShellList(postbuild['action']) - script = 'exec ' + action_string_sh + '\nexit 1\n' - - # Make the postbuild step depend on the output of ld or ar from this - # target. Apparently putting the script step after the link step isn't - # sufficient to ensure proper ordering in all cases. With an input - # declared but no outputs, the script step should run every time, as - # desired. - ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ - 'inputPaths': ['$(BUILT_PRODUCTS_DIR)/$(EXECUTABLE_PATH)'], - 'name': 'Postbuild "' + postbuild['postbuild_name'] + '"', - 'shellScript': script, - 'showEnvVarsInLog': 0, - }) - xct.AppendProperty('buildPhases', ssbp) - - # Add dependencies before libraries, because adding a dependency may imply - # adding a library. It's preferable to keep dependencies listed first - # during a link phase so that they can override symbols that would - # otherwise be provided by libraries, which will usually include system - # libraries. On some systems, ld is finicky and even requires the - # libraries to be ordered in such a way that unresolved symbols in - # earlier-listed libraries may only be resolved by later-listed libraries. - # The Mac linker doesn't work that way, but other platforms do, and so - # their linker invocations need to be constructed in this way. There's - # no compelling reason for Xcode's linker invocations to differ. - - if 'dependencies' in spec: - for dependency in spec['dependencies']: - xct.AddDependency(xcode_targets[dependency]) - # The support project also gets the dependencies (in case they are - # needed for the actions/rules to work). - if support_xct: - support_xct.AddDependency(xcode_targets[dependency]) - - if 'libraries' in spec: - for library in spec['libraries']: - xct.FrameworksPhase().AddFile(library) - # Add the library's directory to LIBRARY_SEARCH_PATHS if necessary. - # I wish Xcode handled this automatically. - library_dir = posixpath.dirname(library) - if library_dir not in xcode_standard_library_dirs and ( - not xct.HasBuildSetting(_library_search_paths_var) or - library_dir not in xct.GetBuildSetting(_library_search_paths_var)): - xct.AppendBuildSetting(_library_search_paths_var, library_dir) - - for configuration_name in configuration_names: - configuration = spec['configurations'][configuration_name] - xcbc = xct.ConfigurationNamed(configuration_name) - for include_dir in configuration.get('mac_framework_dirs', []): - xcbc.AppendBuildSetting('FRAMEWORK_SEARCH_PATHS', include_dir) - for include_dir in configuration.get('include_dirs', []): - xcbc.AppendBuildSetting('HEADER_SEARCH_PATHS', include_dir) - if 'defines' in configuration: - for define in configuration['defines']: - set_define = EscapeXCodeArgument(define) - xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define) - if 'xcode_settings' in configuration: - for xck, xcv in configuration['xcode_settings'].iteritems(): - xcbc.SetBuildSetting(xck, xcv) - if 'xcode_config_file' in configuration: - config_ref = pbxp.AddOrGetFileInRootGroup( - configuration['xcode_config_file']) - xcbc.SetBaseConfiguration(config_ref) - - build_files = [] - for build_file, build_file_dict in data.iteritems(): - if build_file.endswith('.gyp'): - build_files.append(build_file) - - for build_file in build_files: - xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests) - - for build_file in build_files: - xcode_projects[build_file].Finalize2(xcode_targets, - xcode_target_to_target_dict) - - for build_file in build_files: - xcode_projects[build_file].Write() diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/input.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/input.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/input.py 2013-02-27 14:15:28.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/input.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,2679 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -from compiler.ast import Const -from compiler.ast import Dict -from compiler.ast import Discard -from compiler.ast import List -from compiler.ast import Module -from compiler.ast import Node -from compiler.ast import Stmt -import compiler -import copy -import gyp.common -import multiprocessing -import optparse -import os.path -import re -import shlex -import signal -import subprocess -import sys -import threading -import time -from gyp.common import GypError - - -# A list of types that are treated as linkable. -linkable_types = ['executable', 'shared_library', 'loadable_module'] - -# A list of sections that contain links to other targets. -dependency_sections = ['dependencies', 'export_dependent_settings'] - -# base_path_sections is a list of sections defined by GYP that contain -# pathnames. The generators can provide more keys, the two lists are merged -# into path_sections, but you should call IsPathSection instead of using either -# list directly. -base_path_sections = [ - 'destination', - 'files', - 'include_dirs', - 'inputs', - 'libraries', - 'outputs', - 'sources', -] -path_sections = [] - -is_path_section_charset = set('=+?!') -is_path_section_match_re = re.compile('_(dir|file|path)s?$') - -def IsPathSection(section): - # If section ends in one of these characters, it's applied to a section - # without the trailing characters. '/' is notably absent from this list, - # because there's no way for a regular expression to be treated as a path. - while section[-1:] in is_path_section_charset: - section = section[:-1] - return section in path_sections or is_path_section_match_re.search(section) - -# base_non_configuraiton_keys is a list of key names that belong in the target -# itself and should not be propagated into its configurations. It is merged -# with a list that can come from the generator to -# create non_configuration_keys. -base_non_configuration_keys = [ - # Sections that must exist inside targets and not configurations. - 'actions', - 'configurations', - 'copies', - 'default_configuration', - 'dependencies', - 'dependencies_original', - 'link_languages', - 'libraries', - 'postbuilds', - 'product_dir', - 'product_extension', - 'product_name', - 'product_prefix', - 'rules', - 'run_as', - 'sources', - 'standalone_static_library', - 'suppress_wildcard', - 'target_name', - 'toolset', - 'toolsets', - 'type', - 'variants', - - # Sections that can be found inside targets or configurations, but that - # should not be propagated from targets into their configurations. - 'variables', -] -non_configuration_keys = [] - -# Keys that do not belong inside a configuration dictionary. -invalid_configuration_keys = [ - 'actions', - 'all_dependent_settings', - 'configurations', - 'dependencies', - 'direct_dependent_settings', - 'libraries', - 'link_settings', - 'sources', - 'standalone_static_library', - 'target_name', - 'type', -] - -# Controls how the generator want the build file paths. -absolute_build_file_paths = False - -# Controls whether or not the generator supports multiple toolsets. -multiple_toolsets = False - - -def GetIncludedBuildFiles(build_file_path, aux_data, included=None): - """Return a list of all build files included into build_file_path. - - The returned list will contain build_file_path as well as all other files - that it included, either directly or indirectly. Note that the list may - contain files that were included into a conditional section that evaluated - to false and was not merged into build_file_path's dict. - - aux_data is a dict containing a key for each build file or included build - file. Those keys provide access to dicts whose "included" keys contain - lists of all other files included by the build file. - - included should be left at its default None value by external callers. It - is used for recursion. - - The returned list will not contain any duplicate entries. Each build file - in the list will be relative to the current directory. - """ - - if included == None: - included = [] - - if build_file_path in included: - return included - - included.append(build_file_path) - - for included_build_file in aux_data[build_file_path].get('included', []): - GetIncludedBuildFiles(included_build_file, aux_data, included) - - return included - - -def CheckedEval(file_contents): - """Return the eval of a gyp file. - - The gyp file is restricted to dictionaries and lists only, and - repeated keys are not allowed. - - Note that this is slower than eval() is. - """ - - ast = compiler.parse(file_contents) - assert isinstance(ast, Module) - c1 = ast.getChildren() - assert c1[0] is None - assert isinstance(c1[1], Stmt) - c2 = c1[1].getChildren() - assert isinstance(c2[0], Discard) - c3 = c2[0].getChildren() - assert len(c3) == 1 - return CheckNode(c3[0], []) - - -def CheckNode(node, keypath): - if isinstance(node, Dict): - c = node.getChildren() - dict = {} - for n in range(0, len(c), 2): - assert isinstance(c[n], Const) - key = c[n].getChildren()[0] - if key in dict: - raise GypError("Key '" + key + "' repeated at level " + - repr(len(keypath) + 1) + " with key path '" + - '.'.join(keypath) + "'") - kp = list(keypath) # Make a copy of the list for descending this node. - kp.append(key) - dict[key] = CheckNode(c[n + 1], kp) - return dict - elif isinstance(node, List): - c = node.getChildren() - children = [] - for index, child in enumerate(c): - kp = list(keypath) # Copy list. - kp.append(repr(index)) - children.append(CheckNode(child, kp)) - return children - elif isinstance(node, Const): - return node.getChildren()[0] - else: - raise TypeError, "Unknown AST node at key path '" + '.'.join(keypath) + \ - "': " + repr(node) - - -def LoadOneBuildFile(build_file_path, data, aux_data, variables, includes, - is_target, check): - if build_file_path in data: - return data[build_file_path] - - if os.path.exists(build_file_path): - build_file_contents = open(build_file_path).read() - else: - raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd())) - - build_file_data = None - try: - if check: - build_file_data = CheckedEval(build_file_contents) - else: - build_file_data = eval(build_file_contents, {'__builtins__': None}, - None) - except SyntaxError, e: - e.filename = build_file_path - raise - except Exception, e: - gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path) - raise - - data[build_file_path] = build_file_data - aux_data[build_file_path] = {} - - # Scan for includes and merge them in. - try: - if is_target: - LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, - aux_data, variables, includes, check) - else: - LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, - aux_data, variables, None, check) - except Exception, e: - gyp.common.ExceptionAppend(e, - 'while reading includes of ' + build_file_path) - raise - - return build_file_data - - -def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data, - variables, includes, check): - includes_list = [] - if includes != None: - includes_list.extend(includes) - if 'includes' in subdict: - for include in subdict['includes']: - # "include" is specified relative to subdict_path, so compute the real - # path to include by appending the provided "include" to the directory - # in which subdict_path resides. - relative_include = \ - os.path.normpath(os.path.join(os.path.dirname(subdict_path), include)) - includes_list.append(relative_include) - # Unhook the includes list, it's no longer needed. - del subdict['includes'] - - # Merge in the included files. - for include in includes_list: - if not 'included' in aux_data[subdict_path]: - aux_data[subdict_path]['included'] = [] - aux_data[subdict_path]['included'].append(include) - - gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include) - - MergeDicts(subdict, - LoadOneBuildFile(include, data, aux_data, variables, None, - False, check), - subdict_path, include) - - # Recurse into subdictionaries. - for k, v in subdict.iteritems(): - if v.__class__ == dict: - LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, variables, - None, check) - elif v.__class__ == list: - LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, variables, - check) - - -# This recurses into lists so that it can look for dicts. -def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, - variables, check): - for item in sublist: - if item.__class__ == dict: - LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data, - variables, None, check) - elif item.__class__ == list: - LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, - variables, check) - -# Processes toolsets in all the targets. This recurses into condition entries -# since they can contain toolsets as well. -def ProcessToolsetsInDict(data): - if 'targets' in data: - target_list = data['targets'] - new_target_list = [] - for target in target_list: - # If this target already has an explicit 'toolset', and no 'toolsets' - # list, don't modify it further. - if 'toolset' in target and 'toolsets' not in target: - new_target_list.append(target) - continue - if multiple_toolsets: - toolsets = target.get('toolsets', ['target']) - else: - toolsets = ['target'] - # Make sure this 'toolsets' definition is only processed once. - if 'toolsets' in target: - del target['toolsets'] - if len(toolsets) > 0: - # Optimization: only do copies if more than one toolset is specified. - for build in toolsets[1:]: - new_target = copy.deepcopy(target) - new_target['toolset'] = build - new_target_list.append(new_target) - target['toolset'] = toolsets[0] - new_target_list.append(target) - data['targets'] = new_target_list - if 'conditions' in data: - for condition in data['conditions']: - if isinstance(condition, list): - for condition_dict in condition[1:]: - ProcessToolsetsInDict(condition_dict) - - -# TODO(mark): I don't love this name. It just means that it's going to load -# a build file that contains targets and is expected to provide a targets dict -# that contains the targets... -def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, - depth, check, load_dependencies): - # If depth is set, predefine the DEPTH variable to be a relative path from - # this build file's directory to the directory identified by depth. - if depth: - # TODO(dglazkov) The backslash/forward-slash replacement at the end is a - # temporary measure. This should really be addressed by keeping all paths - # in POSIX until actual project generation. - d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path)) - if d == '': - variables['DEPTH'] = '.' - else: - variables['DEPTH'] = d.replace('\\', '/') - - # If the generator needs absolue paths, then do so. - if absolute_build_file_paths: - build_file_path = os.path.abspath(build_file_path) - - if build_file_path in data['target_build_files']: - # Already loaded. - return False - data['target_build_files'].add(build_file_path) - - gyp.DebugOutput(gyp.DEBUG_INCLUDES, - "Loading Target Build File '%s'", build_file_path) - - build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, variables, - includes, True, check) - - # Store DEPTH for later use in generators. - build_file_data['_DEPTH'] = depth - - # Set up the included_files key indicating which .gyp files contributed to - # this target dict. - if 'included_files' in build_file_data: - raise GypError(build_file_path + ' must not contain included_files key') - - included = GetIncludedBuildFiles(build_file_path, aux_data) - build_file_data['included_files'] = [] - for included_file in included: - # included_file is relative to the current directory, but it needs to - # be made relative to build_file_path's directory. - included_relative = \ - gyp.common.RelativePath(included_file, - os.path.dirname(build_file_path)) - build_file_data['included_files'].append(included_relative) - - # Do a first round of toolsets expansion so that conditions can be defined - # per toolset. - ProcessToolsetsInDict(build_file_data) - - # Apply "pre"/"early" variable expansions and condition evaluations. - ProcessVariablesAndConditionsInDict( - build_file_data, PHASE_EARLY, variables, build_file_path) - - # Since some toolsets might have been defined conditionally, perform - # a second round of toolsets expansion now. - ProcessToolsetsInDict(build_file_data) - - # Look at each project's target_defaults dict, and merge settings into - # targets. - if 'target_defaults' in build_file_data: - if 'targets' not in build_file_data: - raise GypError("Unable to find targets in build file %s" % - build_file_path) - - index = 0 - while index < len(build_file_data['targets']): - # This procedure needs to give the impression that target_defaults is - # used as defaults, and the individual targets inherit from that. - # The individual targets need to be merged into the defaults. Make - # a deep copy of the defaults for each target, merge the target dict - # as found in the input file into that copy, and then hook up the - # copy with the target-specific data merged into it as the replacement - # target dict. - old_target_dict = build_file_data['targets'][index] - new_target_dict = copy.deepcopy(build_file_data['target_defaults']) - MergeDicts(new_target_dict, old_target_dict, - build_file_path, build_file_path) - build_file_data['targets'][index] = new_target_dict - index += 1 - - # No longer needed. - del build_file_data['target_defaults'] - - # Look for dependencies. This means that dependency resolution occurs - # after "pre" conditionals and variable expansion, but before "post" - - # in other words, you can't put a "dependencies" section inside a "post" - # conditional within a target. - - dependencies = [] - if 'targets' in build_file_data: - for target_dict in build_file_data['targets']: - if 'dependencies' not in target_dict: - continue - for dependency in target_dict['dependencies']: - dependencies.append( - gyp.common.ResolveTarget(build_file_path, dependency, None)[0]) - - if load_dependencies: - for dependency in dependencies: - try: - LoadTargetBuildFile(dependency, data, aux_data, variables, - includes, depth, check, load_dependencies) - except Exception, e: - gyp.common.ExceptionAppend( - e, 'while loading dependencies of %s' % build_file_path) - raise - else: - return (build_file_path, dependencies) - - -def CallLoadTargetBuildFile(global_flags, - build_file_path, data, - aux_data, variables, - includes, depth, check): - """Wrapper around LoadTargetBuildFile for parallel processing. - - This wrapper is used when LoadTargetBuildFile is executed in - a worker process. - """ - - try: - signal.signal(signal.SIGINT, signal.SIG_IGN) - - # Apply globals so that the worker process behaves the same. - for key, value in global_flags.iteritems(): - globals()[key] = value - - # Save the keys so we can return data that changed. - data_keys = set(data) - aux_data_keys = set(aux_data) - - result = LoadTargetBuildFile(build_file_path, data, - aux_data, variables, - includes, depth, check, False) - if not result: - return result - - (build_file_path, dependencies) = result - - data_out = {} - for key in data: - if key == 'target_build_files': - continue - if key not in data_keys: - data_out[key] = data[key] - aux_data_out = {} - for key in aux_data: - if key not in aux_data_keys: - aux_data_out[key] = aux_data[key] - - # This gets serialized and sent back to the main process via a pipe. - # It's handled in LoadTargetBuildFileCallback. - return (build_file_path, - data_out, - aux_data_out, - dependencies) - except Exception, e: - print >>sys.stderr, 'Exception: ', e - return None - - -class ParallelProcessingError(Exception): - pass - - -class ParallelState(object): - """Class to keep track of state when processing input files in parallel. - - If build files are loaded in parallel, use this to keep track of - state during farming out and processing parallel jobs. It's stored - in a global so that the callback function can have access to it. - """ - - def __init__(self): - # The multiprocessing pool. - self.pool = None - # The condition variable used to protect this object and notify - # the main loop when there might be more data to process. - self.condition = None - # The "data" dict that was passed to LoadTargetBuildFileParallel - self.data = None - # The "aux_data" dict that was passed to LoadTargetBuildFileParallel - self.aux_data = None - # The number of parallel calls outstanding; decremented when a response - # was received. - self.pending = 0 - # The set of all build files that have been scheduled, so we don't - # schedule the same one twice. - self.scheduled = set() - # A list of dependency build file paths that haven't been scheduled yet. - self.dependencies = [] - # Flag to indicate if there was an error in a child process. - self.error = False - - def LoadTargetBuildFileCallback(self, result): - """Handle the results of running LoadTargetBuildFile in another process. - """ - self.condition.acquire() - if not result: - self.error = True - self.condition.notify() - self.condition.release() - return - (build_file_path0, data0, aux_data0, dependencies0) = result - self.data['target_build_files'].add(build_file_path0) - for key in data0: - self.data[key] = data0[key] - for key in aux_data0: - self.aux_data[key] = aux_data0[key] - for new_dependency in dependencies0: - if new_dependency not in self.scheduled: - self.scheduled.add(new_dependency) - self.dependencies.append(new_dependency) - self.pending -= 1 - self.condition.notify() - self.condition.release() - - -def LoadTargetBuildFileParallel(build_file_path, data, aux_data, - variables, includes, depth, check): - parallel_state = ParallelState() - parallel_state.condition = threading.Condition() - parallel_state.dependencies = [build_file_path] - parallel_state.scheduled = set([build_file_path]) - parallel_state.pending = 0 - parallel_state.data = data - parallel_state.aux_data = aux_data - - try: - parallel_state.condition.acquire() - while parallel_state.dependencies or parallel_state.pending: - if parallel_state.error: - print >>sys.stderr, ( - '\n' - 'Note: an error occurred while running gyp using multiprocessing.\n' - 'For more verbose output, set GYP_PARALLEL=0 in your environment.\n' - 'If the error only occurs when GYP_PARALLEL=1, ' - 'please report a bug!') - break - if not parallel_state.dependencies: - parallel_state.condition.wait() - continue - - dependency = parallel_state.dependencies.pop() - - parallel_state.pending += 1 - data_in = {} - data_in['target_build_files'] = data['target_build_files'] - aux_data_in = {} - global_flags = { - 'path_sections': globals()['path_sections'], - 'non_configuration_keys': globals()['non_configuration_keys'], - 'absolute_build_file_paths': globals()['absolute_build_file_paths'], - 'multiple_toolsets': globals()['multiple_toolsets']} - - if not parallel_state.pool: - parallel_state.pool = multiprocessing.Pool(8) - parallel_state.pool.apply_async( - CallLoadTargetBuildFile, - args = (global_flags, dependency, - data_in, aux_data_in, - variables, includes, depth, check), - callback = parallel_state.LoadTargetBuildFileCallback) - except KeyboardInterrupt, e: - parallel_state.pool.terminate() - raise e - - parallel_state.condition.release() - if parallel_state.error: - sys.exit() - - -# Look for the bracket that matches the first bracket seen in a -# string, and return the start and end as a tuple. For example, if -# the input is something like "<(foo <(bar)) blah", then it would -# return (1, 13), indicating the entire string except for the leading -# "<" and trailing " blah". -LBRACKETS= set('{[(') -BRACKETS = {'}': '{', ']': '[', ')': '('} -def FindEnclosingBracketGroup(input_str): - stack = [] - start = -1 - for index, char in enumerate(input_str): - if char in LBRACKETS: - stack.append(char) - if start == -1: - start = index - elif char in BRACKETS: - if not stack: - return (-1, -1) - if stack.pop() != BRACKETS[char]: - return (-1, -1) - if not stack: - return (start, index + 1) - return (-1, -1) - - -canonical_int_re = re.compile('(0|-?[1-9][0-9]*)$') - - -def IsStrCanonicalInt(string): - """Returns True if |string| is in its canonical integer form. - - The canonical form is such that str(int(string)) == string. - """ - return isinstance(string, str) and canonical_int_re.match(string) - - -# This matches things like "<(asdf)", "(?P<(?:(?:!?@?)|\|)?)' - '(?P[-a-zA-Z0-9_.]+)?' - '\((?P\s*\[?)' - '(?P.*?)(\]?)\))') - -# This matches the same as early_variable_re, but with '>' instead of '<'. -late_variable_re = re.compile( - '(?P(?P>(?:(?:!?@?)|\|)?)' - '(?P[-a-zA-Z0-9_.]+)?' - '\((?P\s*\[?)' - '(?P.*?)(\]?)\))') - -# This matches the same as early_variable_re, but with '^' instead of '<'. -latelate_variable_re = re.compile( - '(?P(?P[\^](?:(?:!?@?)|\|)?)' - '(?P[-a-zA-Z0-9_.]+)?' - '\((?P\s*\[?)' - '(?P.*?)(\]?)\))') - -# Global cache of results from running commands so they don't have to be run -# more then once. -cached_command_results = {} - - -def FixupPlatformCommand(cmd): - if sys.platform == 'win32': - if type(cmd) == list: - cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:] - else: - cmd = re.sub('^cat ', 'type ', cmd) - return cmd - - -PHASE_EARLY = 0 -PHASE_LATE = 1 -PHASE_LATELATE = 2 - - -def ExpandVariables(input, phase, variables, build_file): - # Look for the pattern that gets expanded into variables - if phase == PHASE_EARLY: - variable_re = early_variable_re - expansion_symbol = '<' - elif phase == PHASE_LATE: - variable_re = late_variable_re - expansion_symbol = '>' - elif phase == PHASE_LATELATE: - variable_re = latelate_variable_re - expansion_symbol = '^' - else: - assert False - - input_str = str(input) - if IsStrCanonicalInt(input_str): - return int(input_str) - - # Do a quick scan to determine if an expensive regex search is warranted. - if expansion_symbol not in input_str: - return input_str - - # Get the entire list of matches as a list of MatchObject instances. - # (using findall here would return strings instead of MatchObjects). - matches = list(variable_re.finditer(input_str)) - if not matches: - return input_str - - output = input_str - # Reverse the list of matches so that replacements are done right-to-left. - # That ensures that earlier replacements won't mess up the string in a - # way that causes later calls to find the earlier substituted text instead - # of what's intended for replacement. - matches.reverse() - for match_group in matches: - match = match_group.groupdict() - gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match) - # match['replace'] is the substring to look for, match['type'] - # is the character code for the replacement type (< > ! <| >| <@ - # >@ !@), match['is_array'] contains a '[' for command - # arrays, and match['content'] is the name of the variable (< >) - # or command to run (!). match['command_string'] is an optional - # command string. Currently, only 'pymod_do_main' is supported. - - # run_command is true if a ! variant is used. - run_command = '!' in match['type'] - command_string = match['command_string'] - - # file_list is true if a | variant is used. - file_list = '|' in match['type'] - - # Capture these now so we can adjust them later. - replace_start = match_group.start('replace') - replace_end = match_group.end('replace') - - # Find the ending paren, and re-evaluate the contained string. - (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:]) - - # Adjust the replacement range to match the entire command - # found by FindEnclosingBracketGroup (since the variable_re - # probably doesn't match the entire command if it contained - # nested variables). - replace_end = replace_start + c_end - - # Find the "real" replacement, matching the appropriate closing - # paren, and adjust the replacement start and end. - replacement = input_str[replace_start:replace_end] - - # Figure out what the contents of the variable parens are. - contents_start = replace_start + c_start + 1 - contents_end = replace_end - 1 - contents = input_str[contents_start:contents_end] - - # Do filter substitution now for <|(). - # Admittedly, this is different than the evaluation order in other - # contexts. However, since filtration has no chance to run on <|(), - # this seems like the only obvious way to give them access to filters. - if file_list: - processed_variables = copy.deepcopy(variables) - ProcessListFiltersInDict(contents, processed_variables) - # Recurse to expand variables in the contents - contents = ExpandVariables(contents, phase, - processed_variables, build_file) - else: - # Recurse to expand variables in the contents - contents = ExpandVariables(contents, phase, variables, build_file) - - # Strip off leading/trailing whitespace so that variable matches are - # simpler below (and because they are rarely needed). - contents = contents.strip() - - # expand_to_list is true if an @ variant is used. In that case, - # the expansion should result in a list. Note that the caller - # is to be expecting a list in return, and not all callers do - # because not all are working in list context. Also, for list - # expansions, there can be no other text besides the variable - # expansion in the input string. - expand_to_list = '@' in match['type'] and input_str == replacement - - if run_command or file_list: - # Find the build file's directory, so commands can be run or file lists - # generated relative to it. - build_file_dir = os.path.dirname(build_file) - if build_file_dir == '': - # If build_file is just a leaf filename indicating a file in the - # current directory, build_file_dir might be an empty string. Set - # it to None to signal to subprocess.Popen that it should run the - # command in the current directory. - build_file_dir = None - - # Support <|(listfile.txt ...) which generates a file - # containing items from a gyp list, generated at gyp time. - # This works around actions/rules which have more inputs than will - # fit on the command line. - if file_list: - if type(contents) == list: - contents_list = contents - else: - contents_list = contents.split(' ') - replacement = contents_list[0] - path = replacement - if not os.path.isabs(path): - path = os.path.join(build_file_dir, path) - f = gyp.common.WriteOnDiff(path) - for i in contents_list[1:]: - f.write('%s\n' % i) - f.close() - - elif run_command: - use_shell = True - if match['is_array']: - contents = eval(contents) - use_shell = False - - # Check for a cached value to avoid executing commands, or generating - # file lists more than once. - # TODO(http://code.google.com/p/gyp/issues/detail?id=112): It is - # possible that the command being invoked depends on the current - # directory. For that case the syntax needs to be extended so that the - # directory is also used in cache_key (it becomes a tuple). - # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory, - # someone could author a set of GYP files where each time the command - # is invoked it produces different output by design. When the need - # arises, the syntax should be extended to support no caching off a - # command's output so it is run every time. - cache_key = str(contents) - cached_value = cached_command_results.get(cache_key, None) - if cached_value is None: - gyp.DebugOutput(gyp.DEBUG_VARIABLES, - "Executing command '%s' in directory '%s'", - contents, build_file_dir) - - replacement = '' - - if command_string == 'pymod_do_main': - # (sources/) etc. to resolve to - # and empty list if undefined. This allows actions to: - # 'action!': [ - # '>@(_sources!)', - # ], - # 'action/': [ - # '>@(_sources/)', - # ], - replacement = [] - else: - raise GypError('Undefined variable ' + contents + - ' in ' + build_file) - else: - replacement = variables[contents] - - if isinstance(replacement, list): - for item in replacement: - if (not contents[-1] == '/' and - not isinstance(item, str) and not isinstance(item, int)): - raise GypError('Variable ' + contents + - ' must expand to a string or list of strings; ' + - 'list contains a ' + - item.__class__.__name__) - # Run through the list and handle variable expansions in it. Since - # the list is guaranteed not to contain dicts, this won't do anything - # with conditions sections. - ProcessVariablesAndConditionsInList(replacement, phase, variables, - build_file) - elif not isinstance(replacement, str) and \ - not isinstance(replacement, int): - raise GypError('Variable ' + contents + - ' must expand to a string or list of strings; ' + - 'found a ' + replacement.__class__.__name__) - - if expand_to_list: - # Expanding in list context. It's guaranteed that there's only one - # replacement to do in |input_str| and that it's this replacement. See - # above. - if isinstance(replacement, list): - # If it's already a list, make a copy. - output = replacement[:] - else: - # Split it the same way sh would split arguments. - output = shlex.split(str(replacement)) - else: - # Expanding in string context. - encoded_replacement = '' - if isinstance(replacement, list): - # When expanding a list into string context, turn the list items - # into a string in a way that will work with a subprocess call. - # - # TODO(mark): This isn't completely correct. This should - # call a generator-provided function that observes the - # proper list-to-argument quoting rules on a specific - # platform instead of just calling the POSIX encoding - # routine. - encoded_replacement = gyp.common.EncodePOSIXShellList(replacement) - else: - encoded_replacement = replacement - - output = output[:replace_start] + str(encoded_replacement) + \ - output[replace_end:] - # Prepare for the next match iteration. - input_str = output - - # Look for more matches now that we've replaced some, to deal with - # expanding local variables (variables defined in the same - # variables block as this one). - gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output) - if isinstance(output, list): - if output and isinstance(output[0], list): - # Leave output alone if it's a list of lists. - # We don't want such lists to be stringified. - pass - else: - new_output = [] - for item in output: - new_output.append( - ExpandVariables(item, phase, variables, build_file)) - output = new_output - else: - output = ExpandVariables(output, phase, variables, build_file) - - # Convert all strings that are canonically-represented integers into integers. - if isinstance(output, list): - for index in xrange(0, len(output)): - if IsStrCanonicalInt(output[index]): - output[index] = int(output[index]) - elif IsStrCanonicalInt(output): - output = int(output) - - return output - - -def ProcessConditionsInDict(the_dict, phase, variables, build_file): - # Process a 'conditions' or 'target_conditions' section in the_dict, - # depending on phase. - # early -> conditions - # late -> target_conditions - # latelate -> no conditions - # - # Each item in a conditions list consists of cond_expr, a string expression - # evaluated as the condition, and true_dict, a dict that will be merged into - # the_dict if cond_expr evaluates to true. Optionally, a third item, - # false_dict, may be present. false_dict is merged into the_dict if - # cond_expr evaluates to false. - # - # Any dict merged into the_dict will be recursively processed for nested - # conditionals and other expansions, also according to phase, immediately - # prior to being merged. - - if phase == PHASE_EARLY: - conditions_key = 'conditions' - elif phase == PHASE_LATE: - conditions_key = 'target_conditions' - elif phase == PHASE_LATELATE: - return - else: - assert False - - if not conditions_key in the_dict: - return - - conditions_list = the_dict[conditions_key] - # Unhook the conditions list, it's no longer needed. - del the_dict[conditions_key] - - for condition in conditions_list: - if not isinstance(condition, list): - raise GypError(conditions_key + ' must be a list') - if len(condition) != 2 and len(condition) != 3: - # It's possible that condition[0] won't work in which case this - # attempt will raise its own IndexError. That's probably fine. - raise GypError(conditions_key + ' ' + condition[0] + - ' must be length 2 or 3, not ' + str(len(condition))) - - [cond_expr, true_dict] = condition[0:2] - false_dict = None - if len(condition) == 3: - false_dict = condition[2] - - # Do expansions on the condition itself. Since the conditon can naturally - # contain variable references without needing to resort to GYP expansion - # syntax, this is of dubious value for variables, but someone might want to - # use a command expansion directly inside a condition. - cond_expr_expanded = ExpandVariables(cond_expr, phase, variables, - build_file) - if not isinstance(cond_expr_expanded, str) and \ - not isinstance(cond_expr_expanded, int): - raise ValueError, \ - 'Variable expansion in this context permits str and int ' + \ - 'only, found ' + expanded.__class__.__name__ - - try: - ast_code = compile(cond_expr_expanded, '', 'eval') - - if eval(ast_code, {'__builtins__': None}, variables): - merge_dict = true_dict - else: - merge_dict = false_dict - except SyntaxError, e: - syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s ' - 'at character %d.' % - (str(e.args[0]), e.text, build_file, e.offset), - e.filename, e.lineno, e.offset, e.text) - raise syntax_error - except NameError, e: - gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' % - (cond_expr_expanded, build_file)) - raise GypError(e) - - if merge_dict != None: - # Expand variables and nested conditinals in the merge_dict before - # merging it. - ProcessVariablesAndConditionsInDict(merge_dict, phase, - variables, build_file) - - MergeDicts(the_dict, merge_dict, build_file, build_file) - - -def LoadAutomaticVariablesFromDict(variables, the_dict): - # Any keys with plain string values in the_dict become automatic variables. - # The variable name is the key name with a "_" character prepended. - for key, value in the_dict.iteritems(): - if isinstance(value, str) or isinstance(value, int) or \ - isinstance(value, list): - variables['_' + key] = value - - -def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key): - # Any keys in the_dict's "variables" dict, if it has one, becomes a - # variable. The variable name is the key name in the "variables" dict. - # Variables that end with the % character are set only if they are unset in - # the variables dict. the_dict_key is the name of the key that accesses - # the_dict in the_dict's parent dict. If the_dict's parent is not a dict - # (it could be a list or it could be parentless because it is a root dict), - # the_dict_key will be None. - for key, value in the_dict.get('variables', {}).iteritems(): - if not isinstance(value, str) and not isinstance(value, int) and \ - not isinstance(value, list): - continue - - if key.endswith('%'): - variable_name = key[:-1] - if variable_name in variables: - # If the variable is already set, don't set it. - continue - if the_dict_key is 'variables' and variable_name in the_dict: - # If the variable is set without a % in the_dict, and the_dict is a - # variables dict (making |variables| a varaibles sub-dict of a - # variables dict), use the_dict's definition. - value = the_dict[variable_name] - else: - variable_name = key - - variables[variable_name] = value - - -def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in, - build_file, the_dict_key=None): - """Handle all variable and command expansion and conditional evaluation. - - This function is the public entry point for all variable expansions and - conditional evaluations. The variables_in dictionary will not be modified - by this function. - """ - - # Make a copy of the variables_in dict that can be modified during the - # loading of automatics and the loading of the variables dict. - variables = variables_in.copy() - LoadAutomaticVariablesFromDict(variables, the_dict) - - if 'variables' in the_dict: - # Make sure all the local variables are added to the variables - # list before we process them so that you can reference one - # variable from another. They will be fully expanded by recursion - # in ExpandVariables. - for key, value in the_dict['variables'].iteritems(): - variables[key] = value - - # Handle the associated variables dict first, so that any variable - # references within can be resolved prior to using them as variables. - # Pass a copy of the variables dict to avoid having it be tainted. - # Otherwise, it would have extra automatics added for everything that - # should just be an ordinary variable in this scope. - ProcessVariablesAndConditionsInDict(the_dict['variables'], phase, - variables, build_file, 'variables') - - LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) - - for key, value in the_dict.iteritems(): - # Skip "variables", which was already processed if present. - if key != 'variables' and isinstance(value, str): - expanded = ExpandVariables(value, phase, variables, build_file) - if not isinstance(expanded, str) and not isinstance(expanded, int): - raise ValueError, \ - 'Variable expansion in this context permits str and int ' + \ - 'only, found ' + expanded.__class__.__name__ + ' for ' + key - the_dict[key] = expanded - - # Variable expansion may have resulted in changes to automatics. Reload. - # TODO(mark): Optimization: only reload if no changes were made. - variables = variables_in.copy() - LoadAutomaticVariablesFromDict(variables, the_dict) - LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) - - # Process conditions in this dict. This is done after variable expansion - # so that conditions may take advantage of expanded variables. For example, - # if the_dict contains: - # {'type': '<(library_type)', - # 'conditions': [['_type=="static_library"', { ... }]]}, - # _type, as used in the condition, will only be set to the value of - # library_type if variable expansion is performed before condition - # processing. However, condition processing should occur prior to recursion - # so that variables (both automatic and "variables" dict type) may be - # adjusted by conditions sections, merged into the_dict, and have the - # intended impact on contained dicts. - # - # This arrangement means that a "conditions" section containing a "variables" - # section will only have those variables effective in subdicts, not in - # the_dict. The workaround is to put a "conditions" section within a - # "variables" section. For example: - # {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]], - # 'defines': ['<(define)'], - # 'my_subdict': {'defines': ['<(define)']}}, - # will not result in "IS_MAC" being appended to the "defines" list in the - # current scope but would result in it being appended to the "defines" list - # within "my_subdict". By comparison: - # {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]}, - # 'defines': ['<(define)'], - # 'my_subdict': {'defines': ['<(define)']}}, - # will append "IS_MAC" to both "defines" lists. - - # Evaluate conditions sections, allowing variable expansions within them - # as well as nested conditionals. This will process a 'conditions' or - # 'target_conditions' section, perform appropriate merging and recursive - # conditional and variable processing, and then remove the conditions section - # from the_dict if it is present. - ProcessConditionsInDict(the_dict, phase, variables, build_file) - - # Conditional processing may have resulted in changes to automatics or the - # variables dict. Reload. - variables = variables_in.copy() - LoadAutomaticVariablesFromDict(variables, the_dict) - LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) - - # Recurse into child dicts, or process child lists which may result in - # further recursion into descendant dicts. - for key, value in the_dict.iteritems(): - # Skip "variables" and string values, which were already processed if - # present. - if key == 'variables' or isinstance(value, str): - continue - if isinstance(value, dict): - # Pass a copy of the variables dict so that subdicts can't influence - # parents. - ProcessVariablesAndConditionsInDict(value, phase, variables, - build_file, key) - elif isinstance(value, list): - # The list itself can't influence the variables dict, and - # ProcessVariablesAndConditionsInList will make copies of the variables - # dict if it needs to pass it to something that can influence it. No - # copy is necessary here. - ProcessVariablesAndConditionsInList(value, phase, variables, - build_file) - elif not isinstance(value, int): - raise TypeError, 'Unknown type ' + value.__class__.__name__ + \ - ' for ' + key - - -def ProcessVariablesAndConditionsInList(the_list, phase, variables, - build_file): - # Iterate using an index so that new values can be assigned into the_list. - index = 0 - while index < len(the_list): - item = the_list[index] - if isinstance(item, dict): - # Make a copy of the variables dict so that it won't influence anything - # outside of its own scope. - ProcessVariablesAndConditionsInDict(item, phase, variables, build_file) - elif isinstance(item, list): - ProcessVariablesAndConditionsInList(item, phase, variables, build_file) - elif isinstance(item, str): - expanded = ExpandVariables(item, phase, variables, build_file) - if isinstance(expanded, str) or isinstance(expanded, int): - the_list[index] = expanded - elif isinstance(expanded, list): - the_list[index:index+1] = expanded - index += len(expanded) - - # index now identifies the next item to examine. Continue right now - # without falling into the index increment below. - continue - else: - raise ValueError, \ - 'Variable expansion in this context permits strings and ' + \ - 'lists only, found ' + expanded.__class__.__name__ + ' at ' + \ - index - elif not isinstance(item, int): - raise TypeError, 'Unknown type ' + item.__class__.__name__ + \ - ' at index ' + index - index = index + 1 - - -def BuildTargetsDict(data): - """Builds a dict mapping fully-qualified target names to their target dicts. - - |data| is a dict mapping loaded build files by pathname relative to the - current directory. Values in |data| are build file contents. For each - |data| value with a "targets" key, the value of the "targets" key is taken - as a list containing target dicts. Each target's fully-qualified name is - constructed from the pathname of the build file (|data| key) and its - "target_name" property. These fully-qualified names are used as the keys - in the returned dict. These keys provide access to the target dicts, - the dicts in the "targets" lists. - """ - - targets = {} - for build_file in data['target_build_files']: - for target in data[build_file].get('targets', []): - target_name = gyp.common.QualifiedTarget(build_file, - target['target_name'], - target['toolset']) - if target_name in targets: - raise GypError('Duplicate target definitions for ' + target_name) - targets[target_name] = target - - return targets - - -def QualifyDependencies(targets): - """Make dependency links fully-qualified relative to the current directory. - - |targets| is a dict mapping fully-qualified target names to their target - dicts. For each target in this dict, keys known to contain dependency - links are examined, and any dependencies referenced will be rewritten - so that they are fully-qualified and relative to the current directory. - All rewritten dependencies are suitable for use as keys to |targets| or a - similar dict. - """ - - all_dependency_sections = [dep + op - for dep in dependency_sections - for op in ('', '!', '/')] - - for target, target_dict in targets.iteritems(): - target_build_file = gyp.common.BuildFile(target) - toolset = target_dict['toolset'] - for dependency_key in all_dependency_sections: - dependencies = target_dict.get(dependency_key, []) - for index in xrange(0, len(dependencies)): - dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget( - target_build_file, dependencies[index], toolset) - if not multiple_toolsets: - # Ignore toolset specification in the dependency if it is specified. - dep_toolset = toolset - dependency = gyp.common.QualifiedTarget(dep_file, - dep_target, - dep_toolset) - dependencies[index] = dependency - - # Make sure anything appearing in a list other than "dependencies" also - # appears in the "dependencies" list. - if dependency_key != 'dependencies' and \ - dependency not in target_dict['dependencies']: - raise GypError('Found ' + dependency + ' in ' + dependency_key + - ' of ' + target + ', but not in dependencies') - - -def ExpandWildcardDependencies(targets, data): - """Expands dependencies specified as build_file:*. - - For each target in |targets|, examines sections containing links to other - targets. If any such section contains a link of the form build_file:*, it - is taken as a wildcard link, and is expanded to list each target in - build_file. The |data| dict provides access to build file dicts. - - Any target that does not wish to be included by wildcard can provide an - optional "suppress_wildcard" key in its target dict. When present and - true, a wildcard dependency link will not include such targets. - - All dependency names, including the keys to |targets| and the values in each - dependency list, must be qualified when this function is called. - """ - - for target, target_dict in targets.iteritems(): - toolset = target_dict['toolset'] - target_build_file = gyp.common.BuildFile(target) - for dependency_key in dependency_sections: - dependencies = target_dict.get(dependency_key, []) - - # Loop this way instead of "for dependency in" or "for index in xrange" - # because the dependencies list will be modified within the loop body. - index = 0 - while index < len(dependencies): - (dependency_build_file, dependency_target, dependency_toolset) = \ - gyp.common.ParseQualifiedTarget(dependencies[index]) - if dependency_target != '*' and dependency_toolset != '*': - # Not a wildcard. Keep it moving. - index = index + 1 - continue - - if dependency_build_file == target_build_file: - # It's an error for a target to depend on all other targets in - # the same file, because a target cannot depend on itself. - raise GypError('Found wildcard in ' + dependency_key + ' of ' + - target + ' referring to same build file') - - # Take the wildcard out and adjust the index so that the next - # dependency in the list will be processed the next time through the - # loop. - del dependencies[index] - index = index - 1 - - # Loop through the targets in the other build file, adding them to - # this target's list of dependencies in place of the removed - # wildcard. - dependency_target_dicts = data[dependency_build_file]['targets'] - for dependency_target_dict in dependency_target_dicts: - if int(dependency_target_dict.get('suppress_wildcard', False)): - continue - dependency_target_name = dependency_target_dict['target_name'] - if (dependency_target != '*' and - dependency_target != dependency_target_name): - continue - dependency_target_toolset = dependency_target_dict['toolset'] - if (dependency_toolset != '*' and - dependency_toolset != dependency_target_toolset): - continue - dependency = gyp.common.QualifiedTarget(dependency_build_file, - dependency_target_name, - dependency_target_toolset) - index = index + 1 - dependencies.insert(index, dependency) - - index = index + 1 - - -def Unify(l): - """Removes duplicate elements from l, keeping the first element.""" - seen = {} - return [seen.setdefault(e, e) for e in l if e not in seen] - - -def RemoveDuplicateDependencies(targets): - """Makes sure every dependency appears only once in all targets's dependency - lists.""" - for target_name, target_dict in targets.iteritems(): - for dependency_key in dependency_sections: - dependencies = target_dict.get(dependency_key, []) - if dependencies: - target_dict[dependency_key] = Unify(dependencies) - - -def Filter(l, item): - """Removes item from l.""" - res = {} - return [res.setdefault(e, e) for e in l if e != item] - - -def RemoveSelfDependencies(targets): - """Remove self dependencies from targets that have the prune_self_dependency - variable set.""" - for target_name, target_dict in targets.iteritems(): - for dependency_key in dependency_sections: - dependencies = target_dict.get(dependency_key, []) - if dependencies: - for t in dependencies: - if t == target_name: - if targets[t].get('variables', {}).get('prune_self_dependency', 0): - target_dict[dependency_key] = Filter(dependencies, target_name) - - -class DependencyGraphNode(object): - """ - - Attributes: - ref: A reference to an object that this DependencyGraphNode represents. - dependencies: List of DependencyGraphNodes on which this one depends. - dependents: List of DependencyGraphNodes that depend on this one. - """ - - class CircularException(GypError): - pass - - def __init__(self, ref): - self.ref = ref - self.dependencies = [] - self.dependents = [] - - def FlattenToList(self): - # flat_list is the sorted list of dependencies - actually, the list items - # are the "ref" attributes of DependencyGraphNodes. Every target will - # appear in flat_list after all of its dependencies, and before all of its - # dependents. - flat_list = [] - - # in_degree_zeros is the list of DependencyGraphNodes that have no - # dependencies not in flat_list. Initially, it is a copy of the children - # of this node, because when the graph was built, nodes with no - # dependencies were made implicit dependents of the root node. - in_degree_zeros = set(self.dependents[:]) - - while in_degree_zeros: - # Nodes in in_degree_zeros have no dependencies not in flat_list, so they - # can be appended to flat_list. Take these nodes out of in_degree_zeros - # as work progresses, so that the next node to process from the list can - # always be accessed at a consistent position. - node = in_degree_zeros.pop() - flat_list.append(node.ref) - - # Look at dependents of the node just added to flat_list. Some of them - # may now belong in in_degree_zeros. - for node_dependent in node.dependents: - is_in_degree_zero = True - for node_dependent_dependency in node_dependent.dependencies: - if not node_dependent_dependency.ref in flat_list: - # The dependent one or more dependencies not in flat_list. There - # will be more chances to add it to flat_list when examining - # it again as a dependent of those other dependencies, provided - # that there are no cycles. - is_in_degree_zero = False - break - - if is_in_degree_zero: - # All of the dependent's dependencies are already in flat_list. Add - # it to in_degree_zeros where it will be processed in a future - # iteration of the outer loop. - in_degree_zeros.add(node_dependent) - - return flat_list - - def DirectDependencies(self, dependencies=None): - """Returns a list of just direct dependencies.""" - if dependencies == None: - dependencies = [] - - for dependency in self.dependencies: - # Check for None, corresponding to the root node. - if dependency.ref != None and dependency.ref not in dependencies: - dependencies.append(dependency.ref) - - return dependencies - - def _AddImportedDependencies(self, targets, dependencies=None): - """Given a list of direct dependencies, adds indirect dependencies that - other dependencies have declared to export their settings. - - This method does not operate on self. Rather, it operates on the list - of dependencies in the |dependencies| argument. For each dependency in - that list, if any declares that it exports the settings of one of its - own dependencies, those dependencies whose settings are "passed through" - are added to the list. As new items are added to the list, they too will - be processed, so it is possible to import settings through multiple levels - of dependencies. - - This method is not terribly useful on its own, it depends on being - "primed" with a list of direct dependencies such as one provided by - DirectDependencies. DirectAndImportedDependencies is intended to be the - public entry point. - """ - - if dependencies == None: - dependencies = [] - - index = 0 - while index < len(dependencies): - dependency = dependencies[index] - dependency_dict = targets[dependency] - # Add any dependencies whose settings should be imported to the list - # if not already present. Newly-added items will be checked for - # their own imports when the list iteration reaches them. - # Rather than simply appending new items, insert them after the - # dependency that exported them. This is done to more closely match - # the depth-first method used by DeepDependencies. - add_index = 1 - for imported_dependency in \ - dependency_dict.get('export_dependent_settings', []): - if imported_dependency not in dependencies: - dependencies.insert(index + add_index, imported_dependency) - add_index = add_index + 1 - index = index + 1 - - return dependencies - - def DirectAndImportedDependencies(self, targets, dependencies=None): - """Returns a list of a target's direct dependencies and all indirect - dependencies that a dependency has advertised settings should be exported - through the dependency for. - """ - - dependencies = self.DirectDependencies(dependencies) - return self._AddImportedDependencies(targets, dependencies) - - def DeepDependencies(self, dependencies=None): - """Returns a list of all of a target's dependencies, recursively.""" - if dependencies == None: - dependencies = [] - - for dependency in self.dependencies: - # Check for None, corresponding to the root node. - if dependency.ref != None and dependency.ref not in dependencies: - dependencies.append(dependency.ref) - dependency.DeepDependencies(dependencies) - - return dependencies - - def LinkDependencies(self, targets, dependencies=None, initial=True): - """Returns a list of dependency targets that are linked into this target. - - This function has a split personality, depending on the setting of - |initial|. Outside callers should always leave |initial| at its default - setting. - - When adding a target to the list of dependencies, this function will - recurse into itself with |initial| set to False, to collect dependencies - that are linked into the linkable target for which the list is being built. - """ - if dependencies == None: - dependencies = [] - - # Check for None, corresponding to the root node. - if self.ref == None: - return dependencies - - # It's kind of sucky that |targets| has to be passed into this function, - # but that's presently the easiest way to access the target dicts so that - # this function can find target types. - - if 'target_name' not in targets[self.ref]: - raise GypError("Missing 'target_name' field in target.") - - if 'type' not in targets[self.ref]: - raise GypError("Missing 'type' field in target %s" % - targets[self.ref]['target_name']) - - target_type = targets[self.ref]['type'] - - is_linkable = target_type in linkable_types - - if initial and not is_linkable: - # If this is the first target being examined and it's not linkable, - # return an empty list of link dependencies, because the link - # dependencies are intended to apply to the target itself (initial is - # True) and this target won't be linked. - return dependencies - - # Don't traverse 'none' targets if explicitly excluded. - if (target_type == 'none' and - not targets[self.ref].get('dependencies_traverse', True)): - if self.ref not in dependencies: - dependencies.append(self.ref) - return dependencies - - # Executables and loadable modules are already fully and finally linked. - # Nothing else can be a link dependency of them, there can only be - # dependencies in the sense that a dependent target might run an - # executable or load the loadable_module. - if not initial and target_type in ('executable', 'loadable_module'): - return dependencies - - # The target is linkable, add it to the list of link dependencies. - if self.ref not in dependencies: - dependencies.append(self.ref) - if initial or not is_linkable: - # If this is a subsequent target and it's linkable, don't look any - # further for linkable dependencies, as they'll already be linked into - # this target linkable. Always look at dependencies of the initial - # target, and always look at dependencies of non-linkables. - for dependency in self.dependencies: - dependency.LinkDependencies(targets, dependencies, False) - - return dependencies - - -def BuildDependencyList(targets): - # Create a DependencyGraphNode for each target. Put it into a dict for easy - # access. - dependency_nodes = {} - for target, spec in targets.iteritems(): - if target not in dependency_nodes: - dependency_nodes[target] = DependencyGraphNode(target) - - # Set up the dependency links. Targets that have no dependencies are treated - # as dependent on root_node. - root_node = DependencyGraphNode(None) - for target, spec in targets.iteritems(): - target_node = dependency_nodes[target] - target_build_file = gyp.common.BuildFile(target) - dependencies = spec.get('dependencies') - if not dependencies: - target_node.dependencies = [root_node] - root_node.dependents.append(target_node) - else: - for dependency in dependencies: - dependency_node = dependency_nodes.get(dependency) - if not dependency_node: - raise GypError("Dependency '%s' not found while " - "trying to load target %s" % (dependency, target)) - target_node.dependencies.append(dependency_node) - dependency_node.dependents.append(target_node) - - flat_list = root_node.FlattenToList() - - # If there's anything left unvisited, there must be a circular dependency - # (cycle). If you need to figure out what's wrong, look for elements of - # targets that are not in flat_list. - if len(flat_list) != len(targets): - raise DependencyGraphNode.CircularException( - 'Some targets not reachable, cycle in dependency graph detected: ' + - ' '.join(set(flat_list) ^ set(targets))) - - return [dependency_nodes, flat_list] - - -def VerifyNoGYPFileCircularDependencies(targets): - # Create a DependencyGraphNode for each gyp file containing a target. Put - # it into a dict for easy access. - dependency_nodes = {} - for target in targets.iterkeys(): - build_file = gyp.common.BuildFile(target) - if not build_file in dependency_nodes: - dependency_nodes[build_file] = DependencyGraphNode(build_file) - - # Set up the dependency links. - for target, spec in targets.iteritems(): - build_file = gyp.common.BuildFile(target) - build_file_node = dependency_nodes[build_file] - target_dependencies = spec.get('dependencies', []) - for dependency in target_dependencies: - try: - dependency_build_file = gyp.common.BuildFile(dependency) - except GypError, e: - gyp.common.ExceptionAppend( - e, 'while computing dependencies of .gyp file %s' % build_file) - raise - - if dependency_build_file == build_file: - # A .gyp file is allowed to refer back to itself. - continue - dependency_node = dependency_nodes.get(dependency_build_file) - if not dependency_node: - raise GypError("Dependancy '%s' not found" % dependency_build_file) - if dependency_node not in build_file_node.dependencies: - build_file_node.dependencies.append(dependency_node) - dependency_node.dependents.append(build_file_node) - - - # Files that have no dependencies are treated as dependent on root_node. - root_node = DependencyGraphNode(None) - for build_file_node in dependency_nodes.itervalues(): - if len(build_file_node.dependencies) == 0: - build_file_node.dependencies.append(root_node) - root_node.dependents.append(build_file_node) - - flat_list = root_node.FlattenToList() - - # If there's anything left unvisited, there must be a circular dependency - # (cycle). - if len(flat_list) != len(dependency_nodes): - bad_files = [] - for file in dependency_nodes.iterkeys(): - if not file in flat_list: - bad_files.append(file) - raise DependencyGraphNode.CircularException, \ - 'Some files not reachable, cycle in .gyp file dependency graph ' + \ - 'detected involving some or all of: ' + \ - ' '.join(bad_files) - - -def DoDependentSettings(key, flat_list, targets, dependency_nodes): - # key should be one of all_dependent_settings, direct_dependent_settings, - # or link_settings. - - for target in flat_list: - target_dict = targets[target] - build_file = gyp.common.BuildFile(target) - - if key == 'all_dependent_settings': - dependencies = dependency_nodes[target].DeepDependencies() - elif key == 'direct_dependent_settings': - dependencies = \ - dependency_nodes[target].DirectAndImportedDependencies(targets) - elif key == 'link_settings': - dependencies = dependency_nodes[target].LinkDependencies(targets) - else: - raise GypError("DoDependentSettings doesn't know how to determine " - 'dependencies for ' + key) - - for dependency in dependencies: - dependency_dict = targets[dependency] - if not key in dependency_dict: - continue - dependency_build_file = gyp.common.BuildFile(dependency) - MergeDicts(target_dict, dependency_dict[key], - build_file, dependency_build_file) - - -def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes, - sort_dependencies): - # Recompute target "dependencies" properties. For each static library - # target, remove "dependencies" entries referring to other static libraries, - # unless the dependency has the "hard_dependency" attribute set. For each - # linkable target, add a "dependencies" entry referring to all of the - # target's computed list of link dependencies (including static libraries - # if no such entry is already present. - for target in flat_list: - target_dict = targets[target] - target_type = target_dict['type'] - - if target_type == 'static_library': - if not 'dependencies' in target_dict: - continue - - target_dict['dependencies_original'] = target_dict.get( - 'dependencies', [])[:] - - # A static library should not depend on another static library unless - # the dependency relationship is "hard," which should only be done when - # a dependent relies on some side effect other than just the build - # product, like a rule or action output. Further, if a target has a - # non-hard dependency, but that dependency exports a hard dependency, - # the non-hard dependency can safely be removed, but the exported hard - # dependency must be added to the target to keep the same dependency - # ordering. - dependencies = \ - dependency_nodes[target].DirectAndImportedDependencies(targets) - index = 0 - while index < len(dependencies): - dependency = dependencies[index] - dependency_dict = targets[dependency] - - # Remove every non-hard static library dependency and remove every - # non-static library dependency that isn't a direct dependency. - if (dependency_dict['type'] == 'static_library' and \ - not dependency_dict.get('hard_dependency', False)) or \ - (dependency_dict['type'] != 'static_library' and \ - not dependency in target_dict['dependencies']): - # Take the dependency out of the list, and don't increment index - # because the next dependency to analyze will shift into the index - # formerly occupied by the one being removed. - del dependencies[index] - else: - index = index + 1 - - # Update the dependencies. If the dependencies list is empty, it's not - # needed, so unhook it. - if len(dependencies) > 0: - target_dict['dependencies'] = dependencies - else: - del target_dict['dependencies'] - - elif target_type in linkable_types: - # Get a list of dependency targets that should be linked into this - # target. Add them to the dependencies list if they're not already - # present. - - link_dependencies = dependency_nodes[target].LinkDependencies(targets) - for dependency in link_dependencies: - if dependency == target: - continue - if not 'dependencies' in target_dict: - target_dict['dependencies'] = [] - if not dependency in target_dict['dependencies']: - target_dict['dependencies'].append(dependency) - # Sort the dependencies list in the order from dependents to dependencies. - # e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D. - # Note: flat_list is already sorted in the order from dependencies to - # dependents. - if sort_dependencies and 'dependencies' in target_dict: - target_dict['dependencies'] = [dep for dep in reversed(flat_list) - if dep in target_dict['dependencies']] - - -# Initialize this here to speed up MakePathRelative. -exception_re = re.compile(r'''["']?[-/$<>^]''') - - -def MakePathRelative(to_file, fro_file, item): - # If item is a relative path, it's relative to the build file dict that it's - # coming from. Fix it up to make it relative to the build file dict that - # it's going into. - # Exception: any |item| that begins with these special characters is - # returned without modification. - # / Used when a path is already absolute (shortcut optimization; - # such paths would be returned as absolute anyway) - # $ Used for build environment variables - # - Used for some build environment flags (such as -lapr-1 in a - # "libraries" section) - # < Used for our own variable and command expansions (see ExpandVariables) - # > Used for our own variable and command expansions (see ExpandVariables) - # ^ Used for our own variable and command expansions (see ExpandVariables) - # - # "/' Used when a value is quoted. If these are present, then we - # check the second character instead. - # - if to_file == fro_file or exception_re.match(item): - return item - else: - # TODO(dglazkov) The backslash/forward-slash replacement at the end is a - # temporary measure. This should really be addressed by keeping all paths - # in POSIX until actual project generation. - ret = os.path.normpath(os.path.join( - gyp.common.RelativePath(os.path.dirname(fro_file), - os.path.dirname(to_file)), - item)).replace('\\', '/') - if item[-1] == '/': - ret += '/' - return ret - -def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True): - # Python documentation recommends objects which do not support hash - # set this value to None. Python library objects follow this rule. - is_hashable = lambda val: val.__hash__ - - # If x is hashable, returns whether x is in s. Else returns whether x is in l. - def is_in_set_or_list(x, s, l): - if is_hashable(x): - return x in s - return x in l - - prepend_index = 0 - - # Make membership testing of hashables in |to| (in particular, strings) - # faster. - hashable_to_set = set(x for x in to if is_hashable(x)) - for item in fro: - singleton = False - if isinstance(item, str) or isinstance(item, int): - # The cheap and easy case. - if is_paths: - to_item = MakePathRelative(to_file, fro_file, item) - else: - to_item = item - - if not isinstance(item, str) or not item.startswith('-'): - # Any string that doesn't begin with a "-" is a singleton - it can - # only appear once in a list, to be enforced by the list merge append - # or prepend. - singleton = True - elif isinstance(item, dict): - # Make a copy of the dictionary, continuing to look for paths to fix. - # The other intelligent aspects of merge processing won't apply because - # item is being merged into an empty dict. - to_item = {} - MergeDicts(to_item, item, to_file, fro_file) - elif isinstance(item, list): - # Recurse, making a copy of the list. If the list contains any - # descendant dicts, path fixing will occur. Note that here, custom - # values for is_paths and append are dropped; those are only to be - # applied to |to| and |fro|, not sublists of |fro|. append shouldn't - # matter anyway because the new |to_item| list is empty. - to_item = [] - MergeLists(to_item, item, to_file, fro_file) - else: - raise TypeError, \ - 'Attempt to merge list item of unsupported type ' + \ - item.__class__.__name__ - - if append: - # If appending a singleton that's already in the list, don't append. - # This ensures that the earliest occurrence of the item will stay put. - if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to): - to.append(to_item) - if is_hashable(to_item): - hashable_to_set.add(to_item) - else: - # If prepending a singleton that's already in the list, remove the - # existing instance and proceed with the prepend. This ensures that the - # item appears at the earliest possible position in the list. - while singleton and to_item in to: - to.remove(to_item) - - # Don't just insert everything at index 0. That would prepend the new - # items to the list in reverse order, which would be an unwelcome - # surprise. - to.insert(prepend_index, to_item) - if is_hashable(to_item): - hashable_to_set.add(to_item) - prepend_index = prepend_index + 1 - - -def MergeDicts(to, fro, to_file, fro_file): - # I wanted to name the parameter "from" but it's a Python keyword... - for k, v in fro.iteritems(): - # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give - # copy semantics. Something else may want to merge from the |fro| dict - # later, and having the same dict ref pointed to twice in the tree isn't - # what anyone wants considering that the dicts may subsequently be - # modified. - if k in to: - bad_merge = False - if isinstance(v, str) or isinstance(v, int): - if not (isinstance(to[k], str) or isinstance(to[k], int)): - bad_merge = True - elif v.__class__ != to[k].__class__: - bad_merge = True - - if bad_merge: - raise TypeError, \ - 'Attempt to merge dict value of type ' + v.__class__.__name__ + \ - ' into incompatible type ' + to[k].__class__.__name__ + \ - ' for key ' + k - if isinstance(v, str) or isinstance(v, int): - # Overwrite the existing value, if any. Cheap and easy. - is_path = IsPathSection(k) - if is_path: - to[k] = MakePathRelative(to_file, fro_file, v) - else: - to[k] = v - elif isinstance(v, dict): - # Recurse, guaranteeing copies will be made of objects that require it. - if not k in to: - to[k] = {} - MergeDicts(to[k], v, to_file, fro_file) - elif isinstance(v, list): - # Lists in dicts can be merged with different policies, depending on - # how the key in the "from" dict (k, the from-key) is written. - # - # If the from-key has ...the to-list will have this action - # this character appended:... applied when receiving the from-list: - # = replace - # + prepend - # ? set, only if to-list does not yet exist - # (none) append - # - # This logic is list-specific, but since it relies on the associated - # dict key, it's checked in this dict-oriented function. - ext = k[-1] - append = True - if ext == '=': - list_base = k[:-1] - lists_incompatible = [list_base, list_base + '?'] - to[list_base] = [] - elif ext == '+': - list_base = k[:-1] - lists_incompatible = [list_base + '=', list_base + '?'] - append = False - elif ext == '?': - list_base = k[:-1] - lists_incompatible = [list_base, list_base + '=', list_base + '+'] - else: - list_base = k - lists_incompatible = [list_base + '=', list_base + '?'] - - # Some combinations of merge policies appearing together are meaningless. - # It's stupid to replace and append simultaneously, for example. Append - # and prepend are the only policies that can coexist. - for list_incompatible in lists_incompatible: - if list_incompatible in fro: - raise GypError('Incompatible list policies ' + k + ' and ' + - list_incompatible) - - if list_base in to: - if ext == '?': - # If the key ends in "?", the list will only be merged if it doesn't - # already exist. - continue - if not isinstance(to[list_base], list): - # This may not have been checked above if merging in a list with an - # extension character. - raise TypeError, \ - 'Attempt to merge dict value of type ' + v.__class__.__name__ + \ - ' into incompatible type ' + to[list_base].__class__.__name__ + \ - ' for key ' + list_base + '(' + k + ')' - else: - to[list_base] = [] - - # Call MergeLists, which will make copies of objects that require it. - # MergeLists can recurse back into MergeDicts, although this will be - # to make copies of dicts (with paths fixed), there will be no - # subsequent dict "merging" once entering a list because lists are - # always replaced, appended to, or prepended to. - is_paths = IsPathSection(list_base) - MergeLists(to[list_base], v, to_file, fro_file, is_paths, append) - else: - raise TypeError, \ - 'Attempt to merge dict value of unsupported type ' + \ - v.__class__.__name__ + ' for key ' + k - - -def MergeConfigWithInheritance(new_configuration_dict, build_file, - target_dict, configuration, visited): - # Skip if previously visted. - if configuration in visited: - return - - # Look at this configuration. - configuration_dict = target_dict['configurations'][configuration] - - # Merge in parents. - for parent in configuration_dict.get('inherit_from', []): - MergeConfigWithInheritance(new_configuration_dict, build_file, - target_dict, parent, visited + [configuration]) - - # Merge it into the new config. - MergeDicts(new_configuration_dict, configuration_dict, - build_file, build_file) - - # Drop abstract. - if 'abstract' in new_configuration_dict: - del new_configuration_dict['abstract'] - - -def SetUpConfigurations(target, target_dict): - # key_suffixes is a list of key suffixes that might appear on key names. - # These suffixes are handled in conditional evaluations (for =, +, and ?) - # and rules/exclude processing (for ! and /). Keys with these suffixes - # should be treated the same as keys without. - key_suffixes = ['=', '+', '?', '!', '/'] - - build_file = gyp.common.BuildFile(target) - - # Provide a single configuration by default if none exists. - # TODO(mark): Signal an error if default_configurations exists but - # configurations does not. - if not 'configurations' in target_dict: - target_dict['configurations'] = {'Default': {}} - if not 'default_configuration' in target_dict: - concrete = [i for i in target_dict['configurations'].iterkeys() - if not target_dict['configurations'][i].get('abstract')] - target_dict['default_configuration'] = sorted(concrete)[0] - - for configuration in target_dict['configurations'].keys(): - old_configuration_dict = target_dict['configurations'][configuration] - # Skip abstract configurations (saves work only). - if old_configuration_dict.get('abstract'): - continue - # Configurations inherit (most) settings from the enclosing target scope. - # Get the inheritance relationship right by making a copy of the target - # dict. - new_configuration_dict = copy.deepcopy(target_dict) - - # Take out the bits that don't belong in a "configurations" section. - # Since configuration setup is done before conditional, exclude, and rules - # processing, be careful with handling of the suffix characters used in - # those phases. - delete_keys = [] - for key in new_configuration_dict: - key_ext = key[-1:] - if key_ext in key_suffixes: - key_base = key[:-1] - else: - key_base = key - if key_base in non_configuration_keys: - delete_keys.append(key) - - for key in delete_keys: - del new_configuration_dict[key] - - # Merge in configuration (with all its parents first). - MergeConfigWithInheritance(new_configuration_dict, build_file, - target_dict, configuration, []) - - # Put the new result back into the target dict as a configuration. - target_dict['configurations'][configuration] = new_configuration_dict - - # Now drop all the abstract ones. - for configuration in target_dict['configurations'].keys(): - old_configuration_dict = target_dict['configurations'][configuration] - if old_configuration_dict.get('abstract'): - del target_dict['configurations'][configuration] - - # Now that all of the target's configurations have been built, go through - # the target dict's keys and remove everything that's been moved into a - # "configurations" section. - delete_keys = [] - for key in target_dict: - key_ext = key[-1:] - if key_ext in key_suffixes: - key_base = key[:-1] - else: - key_base = key - if not key_base in non_configuration_keys: - delete_keys.append(key) - for key in delete_keys: - del target_dict[key] - - # Check the configurations to see if they contain invalid keys. - for configuration in target_dict['configurations'].keys(): - configuration_dict = target_dict['configurations'][configuration] - for key in configuration_dict.keys(): - if key in invalid_configuration_keys: - raise GypError('%s not allowed in the %s configuration, found in ' - 'target %s' % (key, configuration, target)) - - - -def ProcessListFiltersInDict(name, the_dict): - """Process regular expression and exclusion-based filters on lists. - - An exclusion list is in a dict key named with a trailing "!", like - "sources!". Every item in such a list is removed from the associated - main list, which in this example, would be "sources". Removed items are - placed into a "sources_excluded" list in the dict. - - Regular expression (regex) filters are contained in dict keys named with a - trailing "/", such as "sources/" to operate on the "sources" list. Regex - filters in a dict take the form: - 'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'], - ['include', '_mac\\.cc$'] ], - The first filter says to exclude all files ending in _linux.cc, _mac.cc, and - _win.cc. The second filter then includes all files ending in _mac.cc that - are now or were once in the "sources" list. Items matching an "exclude" - filter are subject to the same processing as would occur if they were listed - by name in an exclusion list (ending in "!"). Items matching an "include" - filter are brought back into the main list if previously excluded by an - exclusion list or exclusion regex filter. Subsequent matching "exclude" - patterns can still cause items to be excluded after matching an "include". - """ - - # Look through the dictionary for any lists whose keys end in "!" or "/". - # These are lists that will be treated as exclude lists and regular - # expression-based exclude/include lists. Collect the lists that are - # needed first, looking for the lists that they operate on, and assemble - # then into |lists|. This is done in a separate loop up front, because - # the _included and _excluded keys need to be added to the_dict, and that - # can't be done while iterating through it. - - lists = [] - del_lists = [] - for key, value in the_dict.iteritems(): - operation = key[-1] - if operation != '!' and operation != '/': - continue - - if not isinstance(value, list): - raise ValueError, name + ' key ' + key + ' must be list, not ' + \ - value.__class__.__name__ - - list_key = key[:-1] - if list_key not in the_dict: - # This happens when there's a list like "sources!" but no corresponding - # "sources" list. Since there's nothing for it to operate on, queue up - # the "sources!" list for deletion now. - del_lists.append(key) - continue - - if not isinstance(the_dict[list_key], list): - raise ValueError, name + ' key ' + list_key + \ - ' must be list, not ' + \ - value.__class__.__name__ + ' when applying ' + \ - {'!': 'exclusion', '/': 'regex'}[operation] - - if not list_key in lists: - lists.append(list_key) - - # Delete the lists that are known to be unneeded at this point. - for del_list in del_lists: - del the_dict[del_list] - - for list_key in lists: - the_list = the_dict[list_key] - - # Initialize the list_actions list, which is parallel to the_list. Each - # item in list_actions identifies whether the corresponding item in - # the_list should be excluded, unconditionally preserved (included), or - # whether no exclusion or inclusion has been applied. Items for which - # no exclusion or inclusion has been applied (yet) have value -1, items - # excluded have value 0, and items included have value 1. Includes and - # excludes override previous actions. All items in list_actions are - # initialized to -1 because no excludes or includes have been processed - # yet. - list_actions = list((-1,) * len(the_list)) - - exclude_key = list_key + '!' - if exclude_key in the_dict: - for exclude_item in the_dict[exclude_key]: - for index in xrange(0, len(the_list)): - if exclude_item == the_list[index]: - # This item matches the exclude_item, so set its action to 0 - # (exclude). - list_actions[index] = 0 - - # The "whatever!" list is no longer needed, dump it. - del the_dict[exclude_key] - - regex_key = list_key + '/' - if regex_key in the_dict: - for regex_item in the_dict[regex_key]: - [action, pattern] = regex_item - pattern_re = re.compile(pattern) - - if action == 'exclude': - # This item matches an exclude regex, so set its value to 0 (exclude). - action_value = 0 - elif action == 'include': - # This item matches an include regex, so set its value to 1 (include). - action_value = 1 - else: - # This is an action that doesn't make any sense. - raise ValueError, 'Unrecognized action ' + action + ' in ' + name + \ - ' key ' + regex_key - - for index in xrange(0, len(the_list)): - list_item = the_list[index] - if list_actions[index] == action_value: - # Even if the regex matches, nothing will change so continue (regex - # searches are expensive). - continue - if pattern_re.search(list_item): - # Regular expression match. - list_actions[index] = action_value - - # The "whatever/" list is no longer needed, dump it. - del the_dict[regex_key] - - # Add excluded items to the excluded list. - # - # Note that exclude_key ("sources!") is different from excluded_key - # ("sources_excluded"). The exclude_key list is input and it was already - # processed and deleted; the excluded_key list is output and it's about - # to be created. - excluded_key = list_key + '_excluded' - if excluded_key in the_dict: - raise GypError(name + ' key ' + excluded_key + - ' must not be present prior ' - ' to applying exclusion/regex filters for ' + list_key) - - excluded_list = [] - - # Go backwards through the list_actions list so that as items are deleted, - # the indices of items that haven't been seen yet don't shift. That means - # that things need to be prepended to excluded_list to maintain them in the - # same order that they existed in the_list. - for index in xrange(len(list_actions) - 1, -1, -1): - if list_actions[index] == 0: - # Dump anything with action 0 (exclude). Keep anything with action 1 - # (include) or -1 (no include or exclude seen for the item). - excluded_list.insert(0, the_list[index]) - del the_list[index] - - # If anything was excluded, put the excluded list into the_dict at - # excluded_key. - if len(excluded_list) > 0: - the_dict[excluded_key] = excluded_list - - # Now recurse into subdicts and lists that may contain dicts. - for key, value in the_dict.iteritems(): - if isinstance(value, dict): - ProcessListFiltersInDict(key, value) - elif isinstance(value, list): - ProcessListFiltersInList(key, value) - - -def ProcessListFiltersInList(name, the_list): - for item in the_list: - if isinstance(item, dict): - ProcessListFiltersInDict(name, item) - elif isinstance(item, list): - ProcessListFiltersInList(name, item) - - -def ValidateTargetType(target, target_dict): - """Ensures the 'type' field on the target is one of the known types. - - Arguments: - target: string, name of target. - target_dict: dict, target spec. - - Raises an exception on error. - """ - VALID_TARGET_TYPES = ('executable', 'loadable_module', - 'static_library', 'shared_library', - 'none') - target_type = target_dict.get('type', None) - if target_type not in VALID_TARGET_TYPES: - raise GypError("Target %s has an invalid target type '%s'. " - "Must be one of %s." % - (target, target_type, '/'.join(VALID_TARGET_TYPES))) - if (target_dict.get('standalone_static_library', 0) and - not target_type == 'static_library'): - raise GypError('Target %s has type %s but standalone_static_library flag is' - ' only valid for static_library type.' % (target, - target_type)) - - -def ValidateSourcesInTarget(target, target_dict, build_file): - # TODO: Check if MSVC allows this for loadable_module targets. - if target_dict.get('type', None) not in ('static_library', 'shared_library'): - return - sources = target_dict.get('sources', []) - basenames = {} - for source in sources: - name, ext = os.path.splitext(source) - is_compiled_file = ext in [ - '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S'] - if not is_compiled_file: - continue - basename = os.path.basename(name) # Don't include extension. - basenames.setdefault(basename, []).append(source) - - error = '' - for basename, files in basenames.iteritems(): - if len(files) > 1: - error += ' %s: %s\n' % (basename, ' '.join(files)) - - if error: - print('static library %s has several files with the same basename:\n' % - target + error + 'Some build systems, e.g. MSVC08, ' - 'cannot handle that.') - raise GypError('Duplicate basenames in sources section, see list above') - - -def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules): - """Ensures that the rules sections in target_dict are valid and consistent, - and determines which sources they apply to. - - Arguments: - target: string, name of target. - target_dict: dict, target spec containing "rules" and "sources" lists. - extra_sources_for_rules: a list of keys to scan for rule matches in - addition to 'sources'. - """ - - # Dicts to map between values found in rules' 'rule_name' and 'extension' - # keys and the rule dicts themselves. - rule_names = {} - rule_extensions = {} - - rules = target_dict.get('rules', []) - for rule in rules: - # Make sure that there's no conflict among rule names and extensions. - rule_name = rule['rule_name'] - if rule_name in rule_names: - raise GypError('rule %s exists in duplicate, target %s' % - (rule_name, target)) - rule_names[rule_name] = rule - - rule_extension = rule['extension'] - if rule_extension in rule_extensions: - raise GypError(('extension %s associated with multiple rules, ' + - 'target %s rules %s and %s') % - (rule_extension, target, - rule_extensions[rule_extension]['rule_name'], - rule_name)) - rule_extensions[rule_extension] = rule - - # Make sure rule_sources isn't already there. It's going to be - # created below if needed. - if 'rule_sources' in rule: - raise GypError( - 'rule_sources must not exist in input, target %s rule %s' % - (target, rule_name)) - extension = rule['extension'] - - rule_sources = [] - source_keys = ['sources'] - source_keys.extend(extra_sources_for_rules) - for source_key in source_keys: - for source in target_dict.get(source_key, []): - (source_root, source_extension) = os.path.splitext(source) - if source_extension.startswith('.'): - source_extension = source_extension[1:] - if source_extension == extension: - rule_sources.append(source) - - if len(rule_sources) > 0: - rule['rule_sources'] = rule_sources - - -def ValidateRunAsInTarget(target, target_dict, build_file): - target_name = target_dict.get('target_name') - run_as = target_dict.get('run_as') - if not run_as: - return - if not isinstance(run_as, dict): - raise GypError("The 'run_as' in target %s from file %s should be a " - "dictionary." % - (target_name, build_file)) - action = run_as.get('action') - if not action: - raise GypError("The 'run_as' in target %s from file %s must have an " - "'action' section." % - (target_name, build_file)) - if not isinstance(action, list): - raise GypError("The 'action' for 'run_as' in target %s from file %s " - "must be a list." % - (target_name, build_file)) - working_directory = run_as.get('working_directory') - if working_directory and not isinstance(working_directory, str): - raise GypError("The 'working_directory' for 'run_as' in target %s " - "in file %s should be a string." % - (target_name, build_file)) - environment = run_as.get('environment') - if environment and not isinstance(environment, dict): - raise GypError("The 'environment' for 'run_as' in target %s " - "in file %s should be a dictionary." % - (target_name, build_file)) - - -def ValidateActionsInTarget(target, target_dict, build_file): - '''Validates the inputs to the actions in a target.''' - target_name = target_dict.get('target_name') - actions = target_dict.get('actions', []) - for action in actions: - action_name = action.get('action_name') - if not action_name: - raise GypError("Anonymous action in target %s. " - "An action must have an 'action_name' field." % - target_name) - inputs = action.get('inputs', None) - if inputs is None: - raise GypError('Action in target %s has no inputs.' % target_name) - action_command = action.get('action') - if action_command and not action_command[0]: - raise GypError("Empty action as command in target %s." % target_name) - - -def TurnIntIntoStrInDict(the_dict): - """Given dict the_dict, recursively converts all integers into strings. - """ - # Use items instead of iteritems because there's no need to try to look at - # reinserted keys and their associated values. - for k, v in the_dict.items(): - if isinstance(v, int): - v = str(v) - the_dict[k] = v - elif isinstance(v, dict): - TurnIntIntoStrInDict(v) - elif isinstance(v, list): - TurnIntIntoStrInList(v) - - if isinstance(k, int): - the_dict[str(k)] = v - del the_dict[k] - - -def TurnIntIntoStrInList(the_list): - """Given list the_list, recursively converts all integers into strings. - """ - for index in xrange(0, len(the_list)): - item = the_list[index] - if isinstance(item, int): - the_list[index] = str(item) - elif isinstance(item, dict): - TurnIntIntoStrInDict(item) - elif isinstance(item, list): - TurnIntIntoStrInList(item) - - -def VerifyNoCollidingTargets(targets): - """Verify that no two targets in the same directory share the same name. - - Arguments: - targets: A list of targets in the form 'path/to/file.gyp:target_name'. - """ - # Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'. - used = {} - for target in targets: - # Separate out 'path/to/file.gyp, 'target_name' from - # 'path/to/file.gyp:target_name'. - path, name = target.rsplit(':', 1) - # Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'. - subdir, gyp = os.path.split(path) - # Use '.' for the current directory '', so that the error messages make - # more sense. - if not subdir: - subdir = '.' - # Prepare a key like 'path/to:target_name'. - key = subdir + ':' + name - if key in used: - # Complain if this target is already used. - raise GypError('Duplicate target name "%s" in directory "%s" used both ' - 'in "%s" and "%s".' % (name, subdir, gyp, used[key])) - used[key] = gyp - - -def Load(build_files, variables, includes, depth, generator_input_info, check, - circular_check, parallel): - # Set up path_sections and non_configuration_keys with the default data plus - # the generator-specifc data. - global path_sections - path_sections = base_path_sections[:] - path_sections.extend(generator_input_info['path_sections']) - - global non_configuration_keys - non_configuration_keys = base_non_configuration_keys[:] - non_configuration_keys.extend(generator_input_info['non_configuration_keys']) - - # TODO(mark) handle variants if the generator doesn't want them directly. - generator_handles_variants = \ - generator_input_info['generator_handles_variants'] - - global absolute_build_file_paths - absolute_build_file_paths = \ - generator_input_info['generator_wants_absolute_build_file_paths'] - - global multiple_toolsets - multiple_toolsets = generator_input_info[ - 'generator_supports_multiple_toolsets'] - - # A generator can have other lists (in addition to sources) be processed - # for rules. - extra_sources_for_rules = generator_input_info['extra_sources_for_rules'] - - # Load build files. This loads every target-containing build file into - # the |data| dictionary such that the keys to |data| are build file names, - # and the values are the entire build file contents after "early" or "pre" - # processing has been done and includes have been resolved. - # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as - # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps - # track of the keys corresponding to "target" files. - data = {'target_build_files': set()} - aux_data = {} - for build_file in build_files: - # Normalize paths everywhere. This is important because paths will be - # used as keys to the data dict and for references between input files. - build_file = os.path.normpath(build_file) - try: - if parallel: - print >>sys.stderr, 'Using parallel processing.' - LoadTargetBuildFileParallel(build_file, data, aux_data, - variables, includes, depth, check) - else: - LoadTargetBuildFile(build_file, data, aux_data, - variables, includes, depth, check, True) - except Exception, e: - gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file) - raise - - # Build a dict to access each target's subdict by qualified name. - targets = BuildTargetsDict(data) - - # Fully qualify all dependency links. - QualifyDependencies(targets) - - # Remove self-dependencies from targets that have 'prune_self_dependencies' - # set to 1. - RemoveSelfDependencies(targets) - - # Expand dependencies specified as build_file:*. - ExpandWildcardDependencies(targets, data) - - # Apply exclude (!) and regex (/) list filters only for dependency_sections. - for target_name, target_dict in targets.iteritems(): - tmp_dict = {} - for key_base in dependency_sections: - for op in ('', '!', '/'): - key = key_base + op - if key in target_dict: - tmp_dict[key] = target_dict[key] - del target_dict[key] - ProcessListFiltersInDict(target_name, tmp_dict) - # Write the results back to |target_dict|. - for key in tmp_dict: - target_dict[key] = tmp_dict[key] - - # Make sure every dependency appears at most once. - RemoveDuplicateDependencies(targets) - - if circular_check: - # Make sure that any targets in a.gyp don't contain dependencies in other - # .gyp files that further depend on a.gyp. - VerifyNoGYPFileCircularDependencies(targets) - - [dependency_nodes, flat_list] = BuildDependencyList(targets) - - # Check that no two targets in the same directory have the same name. - VerifyNoCollidingTargets(flat_list) - - # Handle dependent settings of various types. - for settings_type in ['all_dependent_settings', - 'direct_dependent_settings', - 'link_settings']: - DoDependentSettings(settings_type, flat_list, targets, dependency_nodes) - - # Take out the dependent settings now that they've been published to all - # of the targets that require them. - for target in flat_list: - if settings_type in targets[target]: - del targets[target][settings_type] - - # Make sure static libraries don't declare dependencies on other static - # libraries, but that linkables depend on all unlinked static libraries - # that they need so that their link steps will be correct. - gii = generator_input_info - if gii['generator_wants_static_library_dependencies_adjusted']: - AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes, - gii['generator_wants_sorted_dependencies']) - - # Apply "post"/"late"/"target" variable expansions and condition evaluations. - for target in flat_list: - target_dict = targets[target] - build_file = gyp.common.BuildFile(target) - ProcessVariablesAndConditionsInDict( - target_dict, PHASE_LATE, variables, build_file) - - # Move everything that can go into a "configurations" section into one. - for target in flat_list: - target_dict = targets[target] - SetUpConfigurations(target, target_dict) - - # Apply exclude (!) and regex (/) list filters. - for target in flat_list: - target_dict = targets[target] - ProcessListFiltersInDict(target, target_dict) - - # Apply "latelate" variable expansions and condition evaluations. - for target in flat_list: - target_dict = targets[target] - build_file = gyp.common.BuildFile(target) - ProcessVariablesAndConditionsInDict( - target_dict, PHASE_LATELATE, variables, build_file) - - # Make sure that the rules make sense, and build up rule_sources lists as - # needed. Not all generators will need to use the rule_sources lists, but - # some may, and it seems best to build the list in a common spot. - # Also validate actions and run_as elements in targets. - for target in flat_list: - target_dict = targets[target] - build_file = gyp.common.BuildFile(target) - ValidateTargetType(target, target_dict) - # TODO(thakis): Get vpx_scale/arm/scalesystemdependent.c to be renamed to - # scalesystemdependent_arm_additions.c or similar. - if 'arm' not in variables.get('target_arch', ''): - ValidateSourcesInTarget(target, target_dict, build_file) - ValidateRulesInTarget(target, target_dict, extra_sources_for_rules) - ValidateRunAsInTarget(target, target_dict, build_file) - ValidateActionsInTarget(target, target_dict, build_file) - - # Generators might not expect ints. Turn them into strs. - TurnIntIntoStrInDict(data) - - # TODO(mark): Return |data| for now because the generator needs a list of - # build files that came in. In the future, maybe it should just accept - # a list, and not the whole data dict. - return [flat_list, targets, data] diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/mac_tool.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/mac_tool.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/mac_tool.py 2013-02-13 18:51:16.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/mac_tool.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,223 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions to perform Xcode-style build steps. - -These functions are executed via gyp-mac-tool when using the Makefile generator. -""" - -import fcntl -import os -import plistlib -import re -import shutil -import string -import subprocess -import sys - - -def main(args): - executor = MacTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class MacTool(object): - """This class performs all the Mac tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecCopyBundleResource(self, source, dest): - """Copies a resource file to the bundle/Resources directory, performing any - necessary compilation on each resource.""" - extension = os.path.splitext(source)[1].lower() - if os.path.isdir(source): - # Copy tree. - if os.path.exists(dest): - shutil.rmtree(dest) - shutil.copytree(source, dest) - elif extension == '.xib': - return self._CopyXIBFile(source, dest) - elif extension == '.strings': - self._CopyStringsFile(source, dest) - else: - shutil.copyfile(source, dest) - - def _CopyXIBFile(self, source, dest): - """Compiles a XIB file with ibtool into a binary plist in the bundle.""" - tools_dir = os.environ.get('DEVELOPER_BIN_DIR', '/usr/bin') - args = [os.path.join(tools_dir, 'ibtool'), '--errors', '--warnings', - '--notices', '--output-format', 'human-readable-text', '--compile', - dest, source] - ibtool_section_re = re.compile(r'/\*.*\*/') - ibtool_re = re.compile(r'.*note:.*is clipping its content') - ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE) - current_section_header = None - for line in ibtoolout.stdout: - if ibtool_section_re.match(line): - current_section_header = line - elif not ibtool_re.match(line): - if current_section_header: - sys.stdout.write(current_section_header) - current_section_header = None - sys.stdout.write(line) - return ibtoolout.returncode - - def _CopyStringsFile(self, source, dest): - """Copies a .strings file using iconv to reconvert the input into UTF-16.""" - input_code = self._DetectInputEncoding(source) or "UTF-8" - - # Xcode's CpyCopyStringsFile / builtin-copyStrings seems to call - # CFPropertyListCreateFromXMLData() behind the scenes; at least it prints - # CFPropertyListCreateFromXMLData(): Old-style plist parser: missing - # semicolon in dictionary. - # on invalid files. Do the same kind of validation. - import CoreFoundation - s = open(source).read() - d = CoreFoundation.CFDataCreate(None, s, len(s)) - _, error = CoreFoundation.CFPropertyListCreateFromXMLData(None, d, 0, None) - if error: - return - - fp = open(dest, 'w') - args = ['/usr/bin/iconv', '--from-code', input_code, '--to-code', - 'UTF-16', source] - subprocess.call(args, stdout=fp) - fp.close() - - def _DetectInputEncoding(self, file_name): - """Reads the first few bytes from file_name and tries to guess the text - encoding. Returns None as a guess if it can't detect it.""" - fp = open(file_name, 'rb') - try: - header = fp.read(3) - except e: - fp.close() - return None - fp.close() - if header.startswith("\xFE\xFF"): - return "UTF-16BE" - elif header.startswith("\xFF\xFE"): - return "UTF-16LE" - elif header.startswith("\xEF\xBB\xBF"): - return "UTF-8" - else: - return None - - def ExecCopyInfoPlist(self, source, dest): - """Copies the |source| Info.plist to the destination directory |dest|.""" - # Read the source Info.plist into memory. - fd = open(source, 'r') - lines = fd.read() - fd.close() - - # Go through all the environment variables and replace them as variables in - # the file. - for key in os.environ: - if key.startswith('_'): - continue - evar = '${%s}' % key - lines = string.replace(lines, evar, os.environ[key]) - - # Write out the file with variables replaced. - fd = open(dest, 'w') - fd.write(lines) - fd.close() - - # Now write out PkgInfo file now that the Info.plist file has been - # "compiled". - self._WritePkgInfo(dest) - - def _WritePkgInfo(self, info_plist): - """This writes the PkgInfo file from the data stored in Info.plist.""" - plist = plistlib.readPlist(info_plist) - if not plist: - return - - # Only create PkgInfo for executable types. - package_type = plist['CFBundlePackageType'] - if package_type != 'APPL': - return - - # The format of PkgInfo is eight characters, representing the bundle type - # and bundle signature, each four characters. If that is missing, four - # '?' characters are used instead. - signature_code = plist.get('CFBundleSignature', '????') - if len(signature_code) != 4: # Wrong length resets everything, too. - signature_code = '?' * 4 - - dest = os.path.join(os.path.dirname(info_plist), 'PkgInfo') - fp = open(dest, 'w') - fp.write('%s%s' % (package_type, signature_code)) - fp.close() - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - fd = os.open(lockfile, os.O_RDONLY|os.O_NOCTTY|os.O_CREAT, 0o666) - fcntl.flock(fd, fcntl.LOCK_EX) - return subprocess.call(cmd_list) - - def ExecFilterLibtool(self, *cmd_list): - """Calls libtool and filters out 'libtool: file: foo.o has no symbols'.""" - libtool_re = re.compile(r'^libtool: file: .* has no symbols$') - libtoolout = subprocess.Popen(cmd_list, stderr=subprocess.PIPE) - _, err = libtoolout.communicate() - for line in err.splitlines(): - if not libtool_re.match(line): - print >>sys.stderr, line - return libtoolout.returncode - - def ExecPackageFramework(self, framework, version): - """Takes a path to Something.framework and the Current version of that and - sets up all the symlinks.""" - # Find the name of the binary based on the part before the ".framework". - binary = os.path.basename(framework).split('.')[0] - - CURRENT = 'Current' - RESOURCES = 'Resources' - VERSIONS = 'Versions' - - if not os.path.exists(os.path.join(framework, VERSIONS, version, binary)): - # Binary-less frameworks don't seem to contain symlinks (see e.g. - # chromium's out/Debug/org.chromium.Chromium.manifest/ bundle). - return - - # Move into the framework directory to set the symlinks correctly. - pwd = os.getcwd() - os.chdir(framework) - - # Set up the Current version. - self._Relink(version, os.path.join(VERSIONS, CURRENT)) - - # Set up the root symlinks. - self._Relink(os.path.join(VERSIONS, CURRENT, binary), binary) - self._Relink(os.path.join(VERSIONS, CURRENT, RESOURCES), RESOURCES) - - # Back to where we were before! - os.chdir(pwd) - - def _Relink(self, dest, link): - """Creates a symlink to |dest| named |link|. If |link| already exists, - it is overwritten.""" - if os.path.lexists(link): - os.remove(link) - os.symlink(dest, link) - - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/msvs_emulation.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/msvs_emulation.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/msvs_emulation.py 2013-02-25 22:29:21.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/msvs_emulation.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,771 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -This module helps emulate Visual Studio 2008 behavior on top of other -build systems, primarily ninja. -""" - -import os -import re -import subprocess -import sys - -import gyp.MSVSVersion - -windows_quoter_regex = re.compile(r'(\\*)"') - -def QuoteForRspFile(arg): - """Quote a command line argument so that it appears as one argument when - processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for - Windows programs).""" - # See http://goo.gl/cuFbX and http://goo.gl/dhPnp including the comment - # threads. This is actually the quoting rules for CommandLineToArgvW, not - # for the shell, because the shell doesn't do anything in Windows. This - # works more or less because most programs (including the compiler, etc.) - # use that function to handle command line arguments. - - # For a literal quote, CommandLineToArgvW requires 2n+1 backslashes - # preceding it, and results in n backslashes + the quote. So we substitute - # in 2* what we match, +1 more, plus the quote. - arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg) - - # %'s also need to be doubled otherwise they're interpreted as batch - # positional arguments. Also make sure to escape the % so that they're - # passed literally through escaping so they can be singled to just the - # original %. Otherwise, trying to pass the literal representation that - # looks like an environment variable to the shell (e.g. %PATH%) would fail. - arg = arg.replace('%', '%%') - - # These commands are used in rsp files, so no escaping for the shell (via ^) - # is necessary. - - # Finally, wrap the whole thing in quotes so that the above quote rule - # applies and whitespace isn't a word break. - return '"' + arg + '"' - - -def EncodeRspFileList(args): - """Process a list of arguments using QuoteCmdExeArgument.""" - # Note that the first argument is assumed to be the command. Don't add - # quotes around it because then built-ins like 'echo', etc. won't work. - # Take care to normpath only the path in the case of 'call ../x.bat' because - # otherwise the whole thing is incorrectly interpreted as a path and not - # normalized correctly. - if not args: return '' - if args[0].startswith('call '): - call, program = args[0].split(' ', 1) - program = call + ' ' + os.path.normpath(program) - else: - program = os.path.normpath(args[0]) - return program + ' ' + ' '.join(QuoteForRspFile(arg) for arg in args[1:]) - - -def _GenericRetrieve(root, default, path): - """Given a list of dictionary keys |path| and a tree of dicts |root|, find - value at path, or return |default| if any of the path doesn't exist.""" - if not root: - return default - if not path: - return root - return _GenericRetrieve(root.get(path[0]), default, path[1:]) - - -def _AddPrefix(element, prefix): - """Add |prefix| to |element| or each subelement if element is iterable.""" - if element is None: - return element - # Note, not Iterable because we don't want to handle strings like that. - if isinstance(element, list) or isinstance(element, tuple): - return [prefix + e for e in element] - else: - return prefix + element - - -def _DoRemapping(element, map): - """If |element| then remap it through |map|. If |element| is iterable then - each item will be remapped. Any elements not found will be removed.""" - if map is not None and element is not None: - if not callable(map): - map = map.get # Assume it's a dict, otherwise a callable to do the remap. - if isinstance(element, list) or isinstance(element, tuple): - element = filter(None, [map(elem) for elem in element]) - else: - element = map(element) - return element - - -def _AppendOrReturn(append, element): - """If |append| is None, simply return |element|. If |append| is not None, - then add |element| to it, adding each item in |element| if it's a list or - tuple.""" - if append is not None and element is not None: - if isinstance(element, list) or isinstance(element, tuple): - append.extend(element) - else: - append.append(element) - else: - return element - - -def _FindDirectXInstallation(): - """Try to find an installation location for the DirectX SDK. Check for the - standard environment variable, and if that doesn't exist, try to find - via the registry. May return None if not found in either location.""" - # Return previously calculated value, if there is one - if hasattr(_FindDirectXInstallation, 'dxsdk_dir'): - return _FindDirectXInstallation.dxsdk_dir - - dxsdk_dir = os.environ.get('DXSDK_DIR') - if not dxsdk_dir: - # Setup params to pass to and attempt to launch reg.exe. - cmd = ['reg.exe', 'query', r'HKLM\Software\Microsoft\DirectX', '/s'] - p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - for line in p.communicate()[0].splitlines(): - if 'InstallPath' in line: - dxsdk_dir = line.split(' ')[3] + "\\" - - # Cache return value - _FindDirectXInstallation.dxsdk_dir = dxsdk_dir - return dxsdk_dir - - -class MsvsSettings(object): - """A class that understands the gyp 'msvs_...' values (especially the - msvs_settings field). They largely correpond to the VS2008 IDE DOM. This - class helps map those settings to command line options.""" - - def __init__(self, spec, generator_flags): - self.spec = spec - self.vs_version = GetVSVersion(generator_flags) - self.dxsdk_dir = _FindDirectXInstallation() - - # Try to find an installation location for the Windows DDK by checking - # the WDK_DIR environment variable, may be None. - self.wdk_dir = os.environ.get('WDK_DIR') - - supported_fields = [ - ('msvs_configuration_attributes', dict), - ('msvs_settings', dict), - ('msvs_system_include_dirs', list), - ('msvs_disabled_warnings', list), - ('msvs_precompiled_header', str), - ('msvs_precompiled_source', str), - ('msvs_configuration_platform', str), - ('msvs_target_platform', str), - ] - configs = spec['configurations'] - for field, default in supported_fields: - setattr(self, field, {}) - for configname, config in configs.iteritems(): - getattr(self, field)[configname] = config.get(field, default()) - - self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.']) - - def GetVSMacroEnv(self, base_to_build=None, config=None): - """Get a dict of variables mapping internal VS macro names to their gyp - equivalents.""" - target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64' - replacements = { - '$(OutDir)\\': base_to_build + '\\' if base_to_build else '', - '$(IntDir)': '$!INTERMEDIATE_DIR', - '$(InputPath)': '${source}', - '$(InputName)': '${root}', - '$(ProjectName)': self.spec['target_name'], - '$(PlatformName)': target_platform, - '$(ProjectDir)\\': '', - } - # '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when - # Visual Studio is actually installed. - if self.vs_version.Path(): - replacements['$(VSInstallDir)'] = self.vs_version.Path() - replacements['$(VCInstallDir)'] = os.path.join(self.vs_version.Path(), - 'VC') + '\\' - # Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be - # set. This happens when the SDK is sync'd via src-internal, rather than - # by typical end-user installation of the SDK. If it's not set, we don't - # want to leave the unexpanded variable in the path, so simply strip it. - replacements['$(DXSDK_DIR)'] = self.dxsdk_dir if self.dxsdk_dir else '' - replacements['$(WDK_DIR)'] = self.wdk_dir if self.wdk_dir else '' - return replacements - - def ConvertVSMacros(self, s, base_to_build=None, config=None): - """Convert from VS macro names to something equivalent.""" - env = self.GetVSMacroEnv(base_to_build, config=config) - return ExpandMacros(s, env) - - def AdjustLibraries(self, libraries): - """Strip -l from library if it's specified with that.""" - return [lib[2:] if lib.startswith('-l') else lib for lib in libraries] - - def _GetAndMunge(self, field, path, default, prefix, append, map): - """Retrieve a value from |field| at |path| or return |default|. If - |append| is specified, and the item is found, it will be appended to that - object instead of returned. If |map| is specified, results will be - remapped through |map| before being returned or appended.""" - result = _GenericRetrieve(field, default, path) - result = _DoRemapping(result, map) - result = _AddPrefix(result, prefix) - return _AppendOrReturn(append, result) - - class _GetWrapper(object): - def __init__(self, parent, field, base_path, append=None): - self.parent = parent - self.field = field - self.base_path = [base_path] - self.append = append - def __call__(self, name, map=None, prefix='', default=None): - return self.parent._GetAndMunge(self.field, self.base_path + [name], - default=default, prefix=prefix, append=self.append, map=map) - - def GetArch(self, config): - """Get architecture based on msvs_configuration_platform and - msvs_target_platform. Returns either 'x86' or 'x64'.""" - configuration_platform = self.msvs_configuration_platform.get(config, '') - platform = self.msvs_target_platform.get(config, '') - if not platform: # If no specific override, use the configuration's. - platform = configuration_platform - # Map from platform to architecture. - return {'Win32': 'x86', 'x64': 'x64'}.get(platform, 'x86') - - def _TargetConfig(self, config): - """Returns the target-specific configuration.""" - # There's two levels of architecture/platform specification in VS. The - # first level is globally for the configuration (this is what we consider - # "the" config at the gyp level, which will be something like 'Debug' or - # 'Release_x64'), and a second target-specific configuration, which is an - # override for the global one. |config| is remapped here to take into - # account the local target-specific overrides to the global configuration. - arch = self.GetArch(config) - if arch == 'x64' and not config.endswith('_x64'): - config += '_x64' - if arch == 'x86' and config.endswith('_x64'): - config = config.rsplit('_', 1)[0] - return config - - def _Setting(self, path, config, - default=None, prefix='', append=None, map=None): - """_GetAndMunge for msvs_settings.""" - return self._GetAndMunge( - self.msvs_settings[config], path, default, prefix, append, map) - - def _ConfigAttrib(self, path, config, - default=None, prefix='', append=None, map=None): - """_GetAndMunge for msvs_configuration_attributes.""" - return self._GetAndMunge( - self.msvs_configuration_attributes[config], - path, default, prefix, append, map) - - def AdjustIncludeDirs(self, include_dirs, config): - """Updates include_dirs to expand VS specific paths, and adds the system - include dirs used for platform SDK and similar.""" - config = self._TargetConfig(config) - includes = include_dirs + self.msvs_system_include_dirs[config] - includes.extend(self._Setting( - ('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[])) - return [self.ConvertVSMacros(p, config=config) for p in includes] - - def GetComputedDefines(self, config): - """Returns the set of defines that are injected to the defines list based - on other VS settings.""" - config = self._TargetConfig(config) - defines = [] - if self._ConfigAttrib(['CharacterSet'], config) == '1': - defines.extend(('_UNICODE', 'UNICODE')) - if self._ConfigAttrib(['CharacterSet'], config) == '2': - defines.append('_MBCS') - defines.extend(self._Setting( - ('VCCLCompilerTool', 'PreprocessorDefinitions'), config, default=[])) - return defines - - def GetCompilerPdbName(self, config, expand_special): - """Get the pdb file name that should be used for compiler invocations, or - None if there's no explicit name specified.""" - config = self._TargetConfig(config) - pdbname = self._Setting( - ('VCCLCompilerTool', 'ProgramDataBaseFileName'), config) - if pdbname: - pdbname = expand_special(self.ConvertVSMacros(pdbname)) - return pdbname - - def GetOutputName(self, config, expand_special): - """Gets the explicitly overridden output name for a target or returns None - if it's not overridden.""" - config = self._TargetConfig(config) - type = self.spec['type'] - root = 'VCLibrarianTool' if type == 'static_library' else 'VCLinkerTool' - # TODO(scottmg): Handle OutputDirectory without OutputFile. - output_file = self._Setting((root, 'OutputFile'), config) - if output_file: - output_file = expand_special(self.ConvertVSMacros( - output_file, config=config)) - return output_file - - def GetPDBName(self, config, expand_special): - """Gets the explicitly overridden pdb name for a target or returns None - if it's not overridden.""" - config = self._TargetConfig(config) - output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config) - if output_file: - output_file = expand_special(self.ConvertVSMacros( - output_file, config=config)) - return output_file - - def GetCflags(self, config): - """Returns the flags that need to be added to .c and .cc compilations.""" - config = self._TargetConfig(config) - cflags = [] - cflags.extend(['/wd' + w for w in self.msvs_disabled_warnings[config]]) - cl = self._GetWrapper(self, self.msvs_settings[config], - 'VCCLCompilerTool', append=cflags) - cl('Optimization', - map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O') - cl('InlineFunctionExpansion', prefix='/Ob') - cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy') - cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi') - cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O') - cl('WholeProgramOptimization', map={'true': '/GL'}) - cl('WarningLevel', prefix='/W') - cl('WarnAsError', map={'true': '/WX'}) - cl('DebugInformationFormat', - map={'1': '7', '3': 'i', '4': 'I'}, prefix='/Z') - cl('RuntimeTypeInfo', map={'true': '/GR', 'false': '/GR-'}) - cl('EnableFunctionLevelLinking', map={'true': '/Gy', 'false': '/Gy-'}) - cl('MinimalRebuild', map={'true': '/Gm'}) - cl('BufferSecurityCheck', map={'true': '/GS', 'false': '/GS-'}) - cl('BasicRuntimeChecks', map={'1': 's', '2': 'u', '3': '1'}, prefix='/RTC') - cl('RuntimeLibrary', - map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M') - cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH') - cl('DefaultCharIsUnsigned', map={'true': '/J'}) - cl('TreatWChar_tAsBuiltInType', - map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t') - cl('EnablePREfast', map={'true': '/analyze'}) - cl('AdditionalOptions', prefix='') - cflags.extend(['/FI' + f for f in self._Setting( - ('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])]) - # ninja handles parallelism by itself, don't have the compiler do it too. - cflags = filter(lambda x: not x.startswith('/MP'), cflags) - return cflags - - def GetPrecompiledHeader(self, config, gyp_to_build_path): - """Returns an object that handles the generation of precompiled header - build steps.""" - config = self._TargetConfig(config) - return _PchHelper(self, config, gyp_to_build_path) - - def _GetPchFlags(self, config, extension): - """Get the flags to be added to the cflags for precompiled header support. - """ - config = self._TargetConfig(config) - # The PCH is only built once by a particular source file. Usage of PCH must - # only be for the same language (i.e. C vs. C++), so only include the pch - # flags when the language matches. - if self.msvs_precompiled_header[config]: - source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1] - if _LanguageMatchesForPch(source_ext, extension): - pch = os.path.split(self.msvs_precompiled_header[config])[1] - return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pch + '.pch'] - return [] - - def GetCflagsC(self, config): - """Returns the flags that need to be added to .c compilations.""" - config = self._TargetConfig(config) - return self._GetPchFlags(config, '.c') - - def GetCflagsCC(self, config): - """Returns the flags that need to be added to .cc compilations.""" - config = self._TargetConfig(config) - return ['/TP'] + self._GetPchFlags(config, '.cc') - - def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path): - """Get and normalize the list of paths in AdditionalLibraryDirectories - setting.""" - config = self._TargetConfig(config) - libpaths = self._Setting((root, 'AdditionalLibraryDirectories'), - config, default=[]) - libpaths = [os.path.normpath( - gyp_to_build_path(self.ConvertVSMacros(p, config=config))) - for p in libpaths] - return ['/LIBPATH:"' + p + '"' for p in libpaths] - - def GetLibFlags(self, config, gyp_to_build_path): - """Returns the flags that need to be added to lib commands.""" - config = self._TargetConfig(config) - libflags = [] - lib = self._GetWrapper(self, self.msvs_settings[config], - 'VCLibrarianTool', append=libflags) - libflags.extend(self._GetAdditionalLibraryDirectories( - 'VCLibrarianTool', config, gyp_to_build_path)) - lib('LinkTimeCodeGeneration', map={'true': '/LTCG'}) - lib('AdditionalOptions') - return libflags - - def _GetDefFileAsLdflags(self, spec, ldflags, gyp_to_build_path): - """.def files get implicitly converted to a ModuleDefinitionFile for the - linker in the VS generator. Emulate that behaviour here.""" - def_file = '' - if spec['type'] in ('shared_library', 'loadable_module', 'executable'): - def_files = [s for s in spec.get('sources', []) if s.endswith('.def')] - if len(def_files) == 1: - ldflags.append('/DEF:"%s"' % gyp_to_build_path(def_files[0])) - elif len(def_files) > 1: - raise Exception("Multiple .def files") - - def GetLdflags(self, config, gyp_to_build_path, expand_special, - manifest_base_name, is_executable): - """Returns the flags that need to be added to link commands, and the - manifest files.""" - config = self._TargetConfig(config) - ldflags = [] - ld = self._GetWrapper(self, self.msvs_settings[config], - 'VCLinkerTool', append=ldflags) - self._GetDefFileAsLdflags(self.spec, ldflags, gyp_to_build_path) - ld('GenerateDebugInformation', map={'true': '/DEBUG'}) - ld('TargetMachine', map={'1': 'X86', '17': 'X64'}, prefix='/MACHINE:') - ldflags.extend(self._GetAdditionalLibraryDirectories( - 'VCLinkerTool', config, gyp_to_build_path)) - ld('DelayLoadDLLs', prefix='/DELAYLOAD:') - out = self.GetOutputName(config, expand_special) - if out: - ldflags.append('/OUT:' + out) - pdb = self.GetPDBName(config, expand_special) - if pdb: - ldflags.append('/PDB:' + pdb) - ld('AdditionalOptions', prefix='') - ld('SubSystem', map={'1': 'CONSOLE', '2': 'WINDOWS'}, prefix='/SUBSYSTEM:') - ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE') - ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL') - ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED') - ld('RandomizedBaseAddress', - map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE') - ld('DataExecutionPrevention', - map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT') - ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:') - ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:') - ld('LinkTimeCodeGeneration', map={'1': '/LTCG'}) - ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:') - ld('ResourceOnlyDLL', map={'true': '/NOENTRY'}) - ld('EntryPointSymbol', prefix='/ENTRY:') - ld('Profile', map={'true': '/PROFILE'}) - ld('LargeAddressAware', - map={'1': ':NO', '2': ''}, prefix='/LARGEADDRESSAWARE') - # TODO(scottmg): This should sort of be somewhere else (not really a flag). - ld('AdditionalDependencies', prefix='') - - # If the base address is not specifically controlled, DYNAMICBASE should - # be on by default. - base_flags = filter(lambda x: 'DYNAMICBASE' in x or x == '/FIXED', - ldflags) - if not base_flags: - ldflags.append('/DYNAMICBASE') - - # If the NXCOMPAT flag has not been specified, default to on. Despite the - # documentation that says this only defaults to on when the subsystem is - # Vista or greater (which applies to the linker), the IDE defaults it on - # unless it's explicitly off. - if not filter(lambda x: 'NXCOMPAT' in x, ldflags): - ldflags.append('/NXCOMPAT') - - have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags) - manifest_flags, intermediate_manifest_file = self._GetLdManifestFlags( - config, manifest_base_name, is_executable and not have_def_file) - ldflags.extend(manifest_flags) - manifest_files = self._GetAdditionalManifestFiles(config, gyp_to_build_path) - manifest_files.append(intermediate_manifest_file) - - return ldflags, manifest_files - - def _GetLdManifestFlags(self, config, name, allow_isolation): - """Returns the set of flags that need to be added to the link to generate - a default manifest, as well as the name of the generated file.""" - # Add manifest flags that mirror the defaults in VS. Chromium dev builds - # do not currently use any non-default settings, but we could parse - # VCManifestTool blocks if Chromium or other projects need them in the - # future. Of particular note, we do not yet support EmbedManifest because - # it complicates incremental linking. - output_name = name + '.intermediate.manifest' - flags = [ - '/MANIFEST', - '/ManifestFile:' + output_name, - '''/MANIFESTUAC:"level='asInvoker' uiAccess='false'"''' - ] - if allow_isolation: - flags.append('/ALLOWISOLATION') - return flags, output_name - - def _GetAdditionalManifestFiles(self, config, gyp_to_build_path): - """Gets additional manifest files that are added to the default one - generated by the linker.""" - files = self._Setting(('VCManifestTool', 'AdditionalManifestFiles'), config, - default=[]) - if (self._Setting( - ('VCManifestTool', 'EmbedManifest'), config, default='') == 'true'): - print 'gyp/msvs_emulation.py: "EmbedManifest: true" not yet supported.' - if isinstance(files, str): - files = files.split(';') - return [os.path.normpath( - gyp_to_build_path(self.ConvertVSMacros(f, config=config))) - for f in files] - - def IsUseLibraryDependencyInputs(self, config): - """Returns whether the target should be linked via Use Library Dependency - Inputs (using component .objs of a given .lib).""" - config = self._TargetConfig(config) - uldi = self._Setting(('VCLinkerTool', 'UseLibraryDependencyInputs'), config) - return uldi == 'true' - - def GetRcflags(self, config, gyp_to_ninja_path): - """Returns the flags that need to be added to invocations of the resource - compiler.""" - config = self._TargetConfig(config) - rcflags = [] - rc = self._GetWrapper(self, self.msvs_settings[config], - 'VCResourceCompilerTool', append=rcflags) - rc('AdditionalIncludeDirectories', map=gyp_to_ninja_path, prefix='/I') - rcflags.append('/I' + gyp_to_ninja_path('.')) - rc('PreprocessorDefinitions', prefix='/d') - # /l arg must be in hex without leading '0x' - rc('Culture', prefix='/l', map=lambda x: hex(int(x))[2:]) - return rcflags - - def BuildCygwinBashCommandLine(self, args, path_to_base): - """Build a command line that runs args via cygwin bash. We assume that all - incoming paths are in Windows normpath'd form, so they need to be - converted to posix style for the part of the command line that's passed to - bash. We also have to do some Visual Studio macro emulation here because - various rules use magic VS names for things. Also note that rules that - contain ninja variables cannot be fixed here (for example ${source}), so - the outer generator needs to make sure that the paths that are written out - are in posix style, if the command line will be used here.""" - cygwin_dir = os.path.normpath( - os.path.join(path_to_base, self.msvs_cygwin_dirs[0])) - cd = ('cd %s' % path_to_base).replace('\\', '/') - args = [a.replace('\\', '/').replace('"', '\\"') for a in args] - args = ["'%s'" % a.replace("'", "'\\''") for a in args] - bash_cmd = ' '.join(args) - cmd = ( - 'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir + - 'bash -c "%s ; %s"' % (cd, bash_cmd)) - return cmd - - def IsRuleRunUnderCygwin(self, rule): - """Determine if an action should be run under cygwin. If the variable is - unset, or set to 1 we use cygwin.""" - return int(rule.get('msvs_cygwin_shell', - self.spec.get('msvs_cygwin_shell', 1))) != 0 - - def _HasExplicitRuleForExtension(self, spec, extension): - """Determine if there's an explicit rule for a particular extension.""" - for rule in spec.get('rules', []): - if rule['extension'] == extension: - return True - return False - - def HasExplicitIdlRules(self, spec): - """Determine if there's an explicit rule for idl files. When there isn't we - need to generate implicit rules to build MIDL .idl files.""" - return self._HasExplicitRuleForExtension(spec, 'idl') - - def HasExplicitAsmRules(self, spec): - """Determine if there's an explicit rule for asm files. When there isn't we - need to generate implicit rules to assemble .asm files.""" - return self._HasExplicitRuleForExtension(spec, 'asm') - - def GetIdlBuildData(self, source, config): - """Determine the implicit outputs for an idl file. Returns output - directory, outputs, and variables and flags that are required.""" - config = self._TargetConfig(config) - midl_get = self._GetWrapper(self, self.msvs_settings[config], 'VCMIDLTool') - def midl(name, default=None): - return self.ConvertVSMacros(midl_get(name, default=default), - config=config) - tlb = midl('TypeLibraryName', default='${root}.tlb') - header = midl('HeaderFileName', default='${root}.h') - dlldata = midl('DLLDataFileName', default='dlldata.c') - iid = midl('InterfaceIdentifierFileName', default='${root}_i.c') - proxy = midl('ProxyFileName', default='${root}_p.c') - # Note that .tlb is not included in the outputs as it is not always - # generated depending on the content of the input idl file. - outdir = midl('OutputDirectory', default='') - output = [header, dlldata, iid, proxy] - variables = [('tlb', tlb), - ('h', header), - ('dlldata', dlldata), - ('iid', iid), - ('proxy', proxy)] - # TODO(scottmg): Are there configuration settings to set these flags? - target_platform = 'win32' if self.GetArch(config) == 'x86' else 'x64' - flags = ['/char', 'signed', '/env', target_platform, '/Oicf'] - return outdir, output, variables, flags - - -def _LanguageMatchesForPch(source_ext, pch_source_ext): - c_exts = ('.c',) - cc_exts = ('.cc', '.cxx', '.cpp') - return ((source_ext in c_exts and pch_source_ext in c_exts) or - (source_ext in cc_exts and pch_source_ext in cc_exts)) - - -class PrecompiledHeader(object): - """Helper to generate dependencies and build rules to handle generation of - precompiled headers. Interface matches the GCH handler in xcode_emulation.py. - """ - def __init__( - self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext): - self.settings = settings - self.config = config - pch_source = self.settings.msvs_precompiled_source[self.config] - self.pch_source = gyp_to_build_path(pch_source) - filename, _ = os.path.splitext(pch_source) - self.output_obj = gyp_to_unique_output(filename + obj_ext).lower() - - def _PchHeader(self): - """Get the header that will appear in an #include line for all source - files.""" - return os.path.split(self.settings.msvs_precompiled_header[self.config])[1] - - def GetObjDependencies(self, sources, objs): - """Given a list of sources files and the corresponding object files, - returns a list of the pch files that should be depended upon. The - additional wrapping in the return value is for interface compatability - with make.py on Mac, and xcode_emulation.py.""" - if not self._PchHeader(): - return [] - pch_ext = os.path.splitext(self.pch_source)[1] - for source in sources: - if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext): - return [(None, None, self.output_obj)] - return [] - - def GetPchBuildCommands(self): - """Not used on Windows as there are no additional build steps required - (instead, existing steps are modified in GetFlagsModifications below).""" - return [] - - def GetFlagsModifications(self, input, output, implicit, command, - cflags_c, cflags_cc, expand_special): - """Get the modified cflags and implicit dependencies that should be used - for the pch compilation step.""" - if input == self.pch_source: - pch_output = ['/Yc' + self._PchHeader()] - if command == 'cxx': - return ([('cflags_cc', map(expand_special, cflags_cc + pch_output))], - self.output_obj, []) - elif command == 'cc': - return ([('cflags_c', map(expand_special, cflags_c + pch_output))], - self.output_obj, []) - return [], output, implicit - - -vs_version = None -def GetVSVersion(generator_flags): - global vs_version - if not vs_version: - vs_version = gyp.MSVSVersion.SelectVisualStudioVersion( - generator_flags.get('msvs_version', 'auto')) - return vs_version - -def _GetVsvarsSetupArgs(generator_flags, arch): - vs = GetVSVersion(generator_flags) - return vs.SetupScript() - -def ExpandMacros(string, expansions): - """Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv - for the canonical way to retrieve a suitable dict.""" - if '$' in string: - for old, new in expansions.iteritems(): - assert '$(' not in new, new - string = string.replace(old, new) - return string - -def _ExtractImportantEnvironment(output_of_set): - """Extracts environment variables required for the toolchain to run from - a textual dump output by the cmd.exe 'set' command.""" - envvars_to_save = ( - 'goma_.*', # TODO(scottmg): This is ugly, but needed for goma. - 'include', - 'lib', - 'libpath', - 'path', - 'pathext', - 'systemroot', - 'temp', - 'tmp', - ) - env = {} - for line in output_of_set.splitlines(): - for envvar in envvars_to_save: - if re.match(envvar + '=', line.lower()): - var, setting = line.split('=', 1) - if envvar == 'path': - # Our own rules (for running gyp-win-tool) and other actions in - # Chromium rely on python being in the path. Add the path to this - # python here so that if it's not in the path when ninja is run - # later, python will still be found. - setting = os.path.dirname(sys.executable) + os.pathsep + setting - env[var.upper()] = setting - break - for required in ('SYSTEMROOT', 'TEMP', 'TMP'): - if required not in env: - raise Exception('Environment variable "%s" ' - 'required to be set to valid path' % required) - return env - -def _FormatAsEnvironmentBlock(envvar_dict): - """Format as an 'environment block' directly suitable for CreateProcess. - Briefly this is a list of key=value\0, terminated by an additional \0. See - CreateProcess documentation for more details.""" - block = '' - nul = '\0' - for key, value in envvar_dict.iteritems(): - block += key + '=' + value + nul - block += nul - return block - -def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags, open_out): - """It's not sufficient to have the absolute path to the compiler, linker, - etc. on Windows, as those tools rely on .dlls being in the PATH. We also - need to support both x86 and x64 compilers within the same build (to support - msvs_target_platform hackery). Different architectures require a different - compiler binary, and different supporting environment variables (INCLUDE, - LIB, LIBPATH). So, we extract the environment here, wrap all invocations - of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which - sets up the environment, and then we do not prefix the compiler with - an absolute path, instead preferring something like "cl.exe" in the rule - which will then run whichever the environment setup has put in the path. - When the following procedure to generate environment files does not - meet your requirement (e.g. for custom toolchains), you can pass - "-G ninja_use_custom_environment_files" to the gyp to suppress file - generation and use custom environment files prepared by yourself.""" - if generator_flags.get('ninja_use_custom_environment_files', 0): - return - vs = GetVSVersion(generator_flags) - for arch in ('x86', 'x64'): - args = vs.SetupScript(arch) - args.extend(('&&', 'set')) - popen = subprocess.Popen( - args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - variables, _ = popen.communicate() - env = _ExtractImportantEnvironment(variables) - env_block = _FormatAsEnvironmentBlock(env) - f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb') - f.write(env_block) - f.close() - -def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja): - """Emulate behavior of msvs_error_on_missing_sources present in the msvs - generator: Check that all regular source files, i.e. not created at run time, - exist on disk. Missing files cause needless recompilation when building via - VS, and we want this check to match for people/bots that build using ninja, - so they're not surprised when the VS build fails.""" - if int(generator_flags.get('msvs_error_on_missing_sources', 0)): - no_specials = filter(lambda x: '$' not in x, sources) - relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials] - missing = filter(lambda x: not os.path.exists(x), relative) - if missing: - # They'll look like out\Release\..\..\stuff\things.cc, so normalize the - # path for a slightly less crazy looking output. - cleaned_up = [os.path.normpath(x) for x in missing] - raise Exception('Missing input files:\n%s' % '\n'.join(cleaned_up)) diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/ninja_syntax.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/ninja_syntax.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/ninja_syntax.py 2012-09-10 08:15:04.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/ninja_syntax.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,152 +0,0 @@ -# This file comes from -# https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py -# Do not edit! Edit the upstream one instead. - -"""Python module for generating .ninja files. - -Note that this is emphatically not a required piece of Ninja; it's -just a helpful utility for build-file-generation systems that already -use Python. -""" - -import textwrap -import re - -def escape_path(word): - return word.replace('$ ','$$ ').replace(' ','$ ').replace(':', '$:') - -class Writer(object): - def __init__(self, output, width=78): - self.output = output - self.width = width - - def newline(self): - self.output.write('\n') - - def comment(self, text): - for line in textwrap.wrap(text, self.width - 2): - self.output.write('# ' + line + '\n') - - def variable(self, key, value, indent=0): - if value is None: - return - if isinstance(value, list): - value = ' '.join(filter(None, value)) # Filter out empty strings. - self._line('%s = %s' % (key, value), indent) - - def rule(self, name, command, description=None, depfile=None, - generator=False, restat=False, rspfile=None, rspfile_content=None): - self._line('rule %s' % name) - self.variable('command', command, indent=1) - if description: - self.variable('description', description, indent=1) - if depfile: - self.variable('depfile', depfile, indent=1) - if generator: - self.variable('generator', '1', indent=1) - if restat: - self.variable('restat', '1', indent=1) - if rspfile: - self.variable('rspfile', rspfile, indent=1) - if rspfile_content: - self.variable('rspfile_content', rspfile_content, indent=1) - - def build(self, outputs, rule, inputs=None, implicit=None, order_only=None, - variables=None): - outputs = self._as_list(outputs) - all_inputs = self._as_list(inputs)[:] - out_outputs = list(map(escape_path, outputs)) - all_inputs = list(map(escape_path, all_inputs)) - - if implicit: - implicit = map(escape_path, self._as_list(implicit)) - all_inputs.append('|') - all_inputs.extend(implicit) - if order_only: - order_only = map(escape_path, self._as_list(order_only)) - all_inputs.append('||') - all_inputs.extend(order_only) - - self._line('build %s: %s %s' % (' '.join(out_outputs), - rule, - ' '.join(all_inputs))) - - if variables: - if isinstance(variables, dict): - iterator = variables.iteritems() - else: - iterator = iter(variables) - - for key, val in iterator: - self.variable(key, val, indent=1) - - return outputs - - def include(self, path): - self._line('include %s' % path) - - def subninja(self, path): - self._line('subninja %s' % path) - - def default(self, paths): - self._line('default %s' % ' '.join(self._as_list(paths))) - - def _count_dollars_before_index(self, s, i): - """Returns the number of '$' characters right in front of s[i].""" - dollar_count = 0 - dollar_index = i - 1 - while dollar_index > 0 and s[dollar_index] == '$': - dollar_count += 1 - dollar_index -= 1 - return dollar_count - - def _line(self, text, indent=0): - """Write 'text' word-wrapped at self.width characters.""" - leading_space = ' ' * indent - while len(leading_space) + len(text) > self.width: - # The text is too wide; wrap if possible. - - # Find the rightmost space that would obey our width constraint and - # that's not an escaped space. - available_space = self.width - len(leading_space) - len(' $') - space = available_space - while True: - space = text.rfind(' ', 0, space) - if space < 0 or \ - self._count_dollars_before_index(text, space) % 2 == 0: - break - - if space < 0: - # No such space; just use the first unescaped space we can find. - space = available_space - 1 - while True: - space = text.find(' ', space + 1) - if space < 0 or \ - self._count_dollars_before_index(text, space) % 2 == 0: - break - if space < 0: - # Give up on breaking. - break - - self.output.write(leading_space + text[0:space] + ' $\n') - text = text[space+1:] - - # Subsequent lines are continuations, so indent them. - leading_space = ' ' * (indent+2) - - self.output.write(leading_space + text + '\n') - - def _as_list(self, input): - if input is None: - return [] - if isinstance(input, list): - return input - return [input] - - -def escape(string): - """Escape a string such that it can be embedded into a Ninja file without - further interpretation.""" - assert '\n' not in string, 'Ninja syntax does not allow newlines' - # We only have one special metacharacter: '$'. - return string.replace('$', '$$') diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/sun_tool.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/sun_tool.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/sun_tool.py 2011-11-28 16:07:19.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/sun_tool.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,51 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""These functions are executed via gyp-sun-tool when using the Makefile -generator.""" - -import fcntl -import os -import struct -import subprocess -import sys - - -def main(args): - executor = SunTool() - executor.Dispatch(args) - - -class SunTool(object): - """This class performs all the SunOS tooling steps. The methods can either be - executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like copy-info-plist to CopyInfoPlist""" - return name_string.title().replace('-', '') - - def ExecFlock(self, lockfile, *cmd_list): - """Emulates the most basic behavior of Linux's flock(1).""" - # Rely on exception handling to report errors. - # Note that the stock python on SunOS has a bug - # where fcntl.flock(fd, LOCK_EX) always fails - # with EBADF, that's why we use this F_SETLK - # hack instead. - fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0666) - op = struct.pack('hhllhhl', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0) - fcntl.fcntl(fd, fcntl.F_SETLK, op) - return subprocess.call(cmd_list) - - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/win_tool.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/win_tool.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/win_tool.py 2012-09-18 21:52:51.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/win_tool.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,193 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Utility functions for Windows builds. - -These functions are executed via gyp-win-tool when using the ninja generator. -""" - -from ctypes import windll, wintypes -import os -import shutil -import subprocess -import sys - -BASE_DIR = os.path.dirname(os.path.abspath(__file__)) - - -def main(args): - executor = WinTool() - exit_code = executor.Dispatch(args) - if exit_code is not None: - sys.exit(exit_code) - - -class LinkLock(object): - """A flock-style lock to limit the number of concurrent links to one. - - Uses a session-local mutex based on the file's directory. - """ - def __enter__(self): - name = 'Local\\%s' % BASE_DIR.replace('\\', '_').replace(':', '_') - self.mutex = windll.kernel32.CreateMutexW( - wintypes.c_int(0), - wintypes.c_int(0), - wintypes.create_unicode_buffer(name)) - assert self.mutex - result = windll.kernel32.WaitForSingleObject( - self.mutex, wintypes.c_int(0xFFFFFFFF)) - # 0x80 means another process was killed without releasing the mutex, but - # that this process has been given ownership. This is fine for our - # purposes. - assert result in (0, 0x80), ( - "%s, %s" % (result, windll.kernel32.GetLastError())) - - def __exit__(self, type, value, traceback): - windll.kernel32.ReleaseMutex(self.mutex) - windll.kernel32.CloseHandle(self.mutex) - - -class WinTool(object): - """This class performs all the Windows tooling steps. The methods can either - be executed directly, or dispatched from an argument list.""" - - def Dispatch(self, args): - """Dispatches a string command to a method.""" - if len(args) < 1: - raise Exception("Not enough arguments") - - method = "Exec%s" % self._CommandifyName(args[0]) - return getattr(self, method)(*args[1:]) - - def _CommandifyName(self, name_string): - """Transforms a tool name like recursive-mirror to RecursiveMirror.""" - return name_string.title().replace('-', '') - - def _GetEnv(self, arch): - """Gets the saved environment from a file for a given architecture.""" - # The environment is saved as an "environment block" (see CreateProcess - # and msvs_emulation for details). We convert to a dict here. - # Drop last 2 NULs, one for list terminator, one for trailing vs. separator. - pairs = open(arch).read()[:-2].split('\0') - kvs = [item.split('=', 1) for item in pairs] - return dict(kvs) - - def ExecStamp(self, path): - """Simple stamp command.""" - open(path, 'w').close() - - def ExecRecursiveMirror(self, source, dest): - """Emulation of rm -rf out && cp -af in out.""" - if os.path.exists(dest): - if os.path.isdir(dest): - shutil.rmtree(dest) - else: - os.unlink(dest) - if os.path.isdir(source): - shutil.copytree(source, dest) - else: - shutil.copy2(source, dest) - - def ExecLinkWrapper(self, arch, *args): - """Filter diagnostic output from link that looks like: - ' Creating library ui.dll.lib and object ui.dll.exp' - This happens when there are exports from the dll or exe. - """ - with LinkLock(): - env = self._GetEnv(arch) - popen = subprocess.Popen(args, shell=True, env=env, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - out, _ = popen.communicate() - for line in out.splitlines(): - if not line.startswith(' Creating library '): - print line - return popen.returncode - - def ExecManifestWrapper(self, arch, *args): - """Run manifest tool with environment set. Strip out undesirable warning - (some XML blocks are recognized by the OS loader, but not the manifest - tool).""" - env = self._GetEnv(arch) - popen = subprocess.Popen(args, shell=True, env=env, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - out, _ = popen.communicate() - for line in out.splitlines(): - if line and 'manifest authoring warning 81010002' not in line: - print line - return popen.returncode - - def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl, - *flags): - """Filter noisy filenames output from MIDL compile step that isn't - quietable via command line flags. - """ - args = ['midl', '/nologo'] + list(flags) + [ - '/out', outdir, - '/tlb', tlb, - '/h', h, - '/dlldata', dlldata, - '/iid', iid, - '/proxy', proxy, - idl] - env = self._GetEnv(arch) - popen = subprocess.Popen(args, shell=True, env=env, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - out, _ = popen.communicate() - # Filter junk out of stdout, and write filtered versions. Output we want - # to filter is pairs of lines that look like this: - # Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl - # objidl.idl - lines = out.splitlines() - prefix = 'Processing ' - processing = set(os.path.basename(x) for x in lines if x.startswith(prefix)) - for line in lines: - if not line.startswith(prefix) and line not in processing: - print line - return popen.returncode - - def ExecAsmWrapper(self, arch, *args): - """Filter logo banner from invocations of asm.exe.""" - env = self._GetEnv(arch) - # MSVS doesn't assemble x64 asm files. - if arch == 'environment.x64': - return 0 - popen = subprocess.Popen(args, shell=True, env=env, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - out, _ = popen.communicate() - for line in out.splitlines(): - if (not line.startswith('Copyright (C) Microsoft Corporation') and - not line.startswith('Microsoft (R) Macro Assembler') and - not line.startswith(' Assembling: ') and - line): - print line - return popen.returncode - - def ExecRcWrapper(self, arch, *args): - """Filter logo banner from invocations of rc.exe. Older versions of RC - don't support the /nologo flag.""" - env = self._GetEnv(arch) - popen = subprocess.Popen(args, shell=True, env=env, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - out, _ = popen.communicate() - for line in out.splitlines(): - if (not line.startswith('Microsoft (R) Windows (R) Resource Compiler') and - not line.startswith('Copyright (C) Microsoft Corporation') and - line): - print line - return popen.returncode - - def ExecActionWrapper(self, arch, rspfile, *dir): - """Runs an action command line from a response file using the environment - for |arch|. If |dir| is supplied, use that as the working directory.""" - env = self._GetEnv(arch) - args = open(rspfile).read() - dir = dir[0] if dir else None - popen = subprocess.Popen(args, shell=True, env=env, cwd=dir) - popen.wait() - return popen.returncode - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/xcode_emulation.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/xcode_emulation.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/xcode_emulation.py 2012-08-29 03:31:35.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/xcode_emulation.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,1065 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -""" -This module contains classes that help to emulate xcodebuild behavior on top of -other build systems, such as make and ninja. -""" - -import gyp.common -import os.path -import re -import shlex - -class XcodeSettings(object): - """A class that understands the gyp 'xcode_settings' object.""" - - # Computed lazily by _GetSdkBaseDir(). Shared by all XcodeSettings, so cached - # at class-level for efficiency. - _sdk_base_dir = None - - def __init__(self, spec): - self.spec = spec - - # Per-target 'xcode_settings' are pushed down into configs earlier by gyp. - # This means self.xcode_settings[config] always contains all settings - # for that config -- the per-target settings as well. Settings that are - # the same for all configs are implicitly per-target settings. - self.xcode_settings = {} - configs = spec['configurations'] - for configname, config in configs.iteritems(): - self.xcode_settings[configname] = config.get('xcode_settings', {}) - - # This is only non-None temporarily during the execution of some methods. - self.configname = None - - # Used by _AdjustLibrary to match .a and .dylib entries in libraries. - self.library_re = re.compile(r'^lib([^/]+)\.(a|dylib)$') - - def _Settings(self): - assert self.configname - return self.xcode_settings[self.configname] - - def _Test(self, test_key, cond_key, default): - return self._Settings().get(test_key, default) == cond_key - - def _Appendf(self, lst, test_key, format_str, default=None): - if test_key in self._Settings(): - lst.append(format_str % str(self._Settings()[test_key])) - elif default: - lst.append(format_str % str(default)) - - def _WarnUnimplemented(self, test_key): - if test_key in self._Settings(): - print 'Warning: Ignoring not yet implemented key "%s".' % test_key - - def _IsBundle(self): - return int(self.spec.get('mac_bundle', 0)) != 0 - - def GetFrameworkVersion(self): - """Returns the framework version of the current target. Only valid for - bundles.""" - assert self._IsBundle() - return self.GetPerTargetSetting('FRAMEWORK_VERSION', default='A') - - def GetWrapperExtension(self): - """Returns the bundle extension (.app, .framework, .plugin, etc). Only - valid for bundles.""" - assert self._IsBundle() - if self.spec['type'] in ('loadable_module', 'shared_library'): - default_wrapper_extension = { - 'loadable_module': 'bundle', - 'shared_library': 'framework', - }[self.spec['type']] - wrapper_extension = self.GetPerTargetSetting( - 'WRAPPER_EXTENSION', default=default_wrapper_extension) - return '.' + self.spec.get('product_extension', wrapper_extension) - elif self.spec['type'] == 'executable': - return '.app' - else: - assert False, "Don't know extension for '%s', target '%s'" % ( - self.spec['type'], self.spec['target_name']) - - def GetProductName(self): - """Returns PRODUCT_NAME.""" - return self.spec.get('product_name', self.spec['target_name']) - - def GetFullProductName(self): - """Returns FULL_PRODUCT_NAME.""" - if self._IsBundle(): - return self.GetWrapperName() - else: - return self._GetStandaloneBinaryPath() - - def GetWrapperName(self): - """Returns the directory name of the bundle represented by this target. - Only valid for bundles.""" - assert self._IsBundle() - return self.GetProductName() + self.GetWrapperExtension() - - def GetBundleContentsFolderPath(self): - """Returns the qualified path to the bundle's contents folder. E.g. - Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles.""" - assert self._IsBundle() - if self.spec['type'] == 'shared_library': - return os.path.join( - self.GetWrapperName(), 'Versions', self.GetFrameworkVersion()) - else: - # loadable_modules have a 'Contents' folder like executables. - return os.path.join(self.GetWrapperName(), 'Contents') - - def GetBundleResourceFolder(self): - """Returns the qualified path to the bundle's resource folder. E.g. - Chromium.app/Contents/Resources. Only valid for bundles.""" - assert self._IsBundle() - return os.path.join(self.GetBundleContentsFolderPath(), 'Resources') - - def GetBundlePlistPath(self): - """Returns the qualified path to the bundle's plist file. E.g. - Chromium.app/Contents/Info.plist. Only valid for bundles.""" - assert self._IsBundle() - if self.spec['type'] in ('executable', 'loadable_module'): - return os.path.join(self.GetBundleContentsFolderPath(), 'Info.plist') - else: - return os.path.join(self.GetBundleContentsFolderPath(), - 'Resources', 'Info.plist') - - def GetProductType(self): - """Returns the PRODUCT_TYPE of this target.""" - if self._IsBundle(): - return { - 'executable': 'com.apple.product-type.application', - 'loadable_module': 'com.apple.product-type.bundle', - 'shared_library': 'com.apple.product-type.framework', - }[self.spec['type']] - else: - return { - 'executable': 'com.apple.product-type.tool', - 'loadable_module': 'com.apple.product-type.library.dynamic', - 'shared_library': 'com.apple.product-type.library.dynamic', - 'static_library': 'com.apple.product-type.library.static', - }[self.spec['type']] - - def GetMachOType(self): - """Returns the MACH_O_TYPE of this target.""" - # Weird, but matches Xcode. - if not self._IsBundle() and self.spec['type'] == 'executable': - return '' - return { - 'executable': 'mh_execute', - 'static_library': 'staticlib', - 'shared_library': 'mh_dylib', - 'loadable_module': 'mh_bundle', - }[self.spec['type']] - - def _GetBundleBinaryPath(self): - """Returns the name of the bundle binary of by this target. - E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles.""" - assert self._IsBundle() - if self.spec['type'] in ('shared_library'): - path = self.GetBundleContentsFolderPath() - elif self.spec['type'] in ('executable', 'loadable_module'): - path = os.path.join(self.GetBundleContentsFolderPath(), 'MacOS') - return os.path.join(path, self.GetExecutableName()) - - def _GetStandaloneExecutableSuffix(self): - if 'product_extension' in self.spec: - return '.' + self.spec['product_extension'] - return { - 'executable': '', - 'static_library': '.a', - 'shared_library': '.dylib', - 'loadable_module': '.so', - }[self.spec['type']] - - def _GetStandaloneExecutablePrefix(self): - return self.spec.get('product_prefix', { - 'executable': '', - 'static_library': 'lib', - 'shared_library': 'lib', - # Non-bundled loadable_modules are called foo.so for some reason - # (that is, .so and no prefix) with the xcode build -- match that. - 'loadable_module': '', - }[self.spec['type']]) - - def _GetStandaloneBinaryPath(self): - """Returns the name of the non-bundle binary represented by this target. - E.g. hello_world. Only valid for non-bundles.""" - assert not self._IsBundle() - assert self.spec['type'] in ( - 'executable', 'shared_library', 'static_library', 'loadable_module'), ( - 'Unexpected type %s' % self.spec['type']) - target = self.spec['target_name'] - if self.spec['type'] == 'static_library': - if target[:3] == 'lib': - target = target[3:] - elif self.spec['type'] in ('loadable_module', 'shared_library'): - if target[:3] == 'lib': - target = target[3:] - - target_prefix = self._GetStandaloneExecutablePrefix() - target = self.spec.get('product_name', target) - target_ext = self._GetStandaloneExecutableSuffix() - return target_prefix + target + target_ext - - def GetExecutableName(self): - """Returns the executable name of the bundle represented by this target. - E.g. Chromium.""" - if self._IsBundle(): - return self.spec.get('product_name', self.spec['target_name']) - else: - return self._GetStandaloneBinaryPath() - - def GetExecutablePath(self): - """Returns the directory name of the bundle represented by this target. E.g. - Chromium.app/Contents/MacOS/Chromium.""" - if self._IsBundle(): - return self._GetBundleBinaryPath() - else: - return self._GetStandaloneBinaryPath() - - def _GetSdkBaseDir(self): - """Returns the root of the 'Developer' directory. On Xcode 4.2 and prior, - this is usually just /Developer. Xcode 4.3 moved that folder into the Xcode - bundle.""" - if not XcodeSettings._sdk_base_dir: - import subprocess - job = subprocess.Popen(['xcode-select', '-print-path'], - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT) - out, err = job.communicate() - if job.returncode != 0: - print out - raise Exception('Error %d running xcode-select' % job.returncode) - # The Developer folder moved in Xcode 4.3. - xcode43_sdk_path = os.path.join( - out.rstrip(), 'Platforms/MacOSX.platform/Developer/SDKs') - if os.path.isdir(xcode43_sdk_path): - XcodeSettings._sdk_base_dir = xcode43_sdk_path - else: - XcodeSettings._sdk_base_dir = os.path.join(out.rstrip(), 'SDKs') - return XcodeSettings._sdk_base_dir - - def _SdkPath(self): - sdk_root = self.GetPerTargetSetting('SDKROOT', default='macosx10.5') - if sdk_root.startswith('macosx'): - return os.path.join(self._GetSdkBaseDir(), - 'MacOSX' + sdk_root[len('macosx'):] + '.sdk') - return sdk_root - - def GetCflags(self, configname): - """Returns flags that need to be added to .c, .cc, .m, and .mm - compilations.""" - # This functions (and the similar ones below) do not offer complete - # emulation of all xcode_settings keys. They're implemented on demand. - - self.configname = configname - cflags = [] - - sdk_root = self._SdkPath() - if 'SDKROOT' in self._Settings(): - cflags.append('-isysroot %s' % sdk_root) - - if self._Test('GCC_CHAR_IS_UNSIGNED_CHAR', 'YES', default='NO'): - cflags.append('-funsigned-char') - - if self._Test('GCC_CW_ASM_SYNTAX', 'YES', default='YES'): - cflags.append('-fasm-blocks') - - if 'GCC_DYNAMIC_NO_PIC' in self._Settings(): - if self._Settings()['GCC_DYNAMIC_NO_PIC'] == 'YES': - cflags.append('-mdynamic-no-pic') - else: - pass - # TODO: In this case, it depends on the target. xcode passes - # mdynamic-no-pic by default for executable and possibly static lib - # according to mento - - if self._Test('GCC_ENABLE_PASCAL_STRINGS', 'YES', default='YES'): - cflags.append('-mpascal-strings') - - self._Appendf(cflags, 'GCC_OPTIMIZATION_LEVEL', '-O%s', default='s') - - if self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES'): - dbg_format = self._Settings().get('DEBUG_INFORMATION_FORMAT', 'dwarf') - if dbg_format == 'dwarf': - cflags.append('-gdwarf-2') - elif dbg_format == 'stabs': - raise NotImplementedError('stabs debug format is not supported yet.') - elif dbg_format == 'dwarf-with-dsym': - cflags.append('-gdwarf-2') - else: - raise NotImplementedError('Unknown debug format %s' % dbg_format) - - if self._Test('GCC_SYMBOLS_PRIVATE_EXTERN', 'YES', default='NO'): - cflags.append('-fvisibility=hidden') - - if self._Test('GCC_TREAT_WARNINGS_AS_ERRORS', 'YES', default='NO'): - cflags.append('-Werror') - - if self._Test('GCC_WARN_ABOUT_MISSING_NEWLINE', 'YES', default='NO'): - cflags.append('-Wnewline-eof') - - self._Appendf(cflags, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s') - - # TODO: - if self._Test('COPY_PHASE_STRIP', 'YES', default='NO'): - self._WarnUnimplemented('COPY_PHASE_STRIP') - self._WarnUnimplemented('GCC_DEBUGGING_SYMBOLS') - self._WarnUnimplemented('GCC_ENABLE_OBJC_EXCEPTIONS') - - # TODO: This is exported correctly, but assigning to it is not supported. - self._WarnUnimplemented('MACH_O_TYPE') - self._WarnUnimplemented('PRODUCT_TYPE') - - archs = self._Settings().get('ARCHS', ['i386']) - if len(archs) != 1: - # TODO: Supporting fat binaries will be annoying. - self._WarnUnimplemented('ARCHS') - archs = ['i386'] - cflags.append('-arch ' + archs[0]) - - if archs[0] in ('i386', 'x86_64'): - if self._Test('GCC_ENABLE_SSE3_EXTENSIONS', 'YES', default='NO'): - cflags.append('-msse3') - if self._Test('GCC_ENABLE_SUPPLEMENTAL_SSE3_INSTRUCTIONS', 'YES', - default='NO'): - cflags.append('-mssse3') # Note 3rd 's'. - if self._Test('GCC_ENABLE_SSE41_EXTENSIONS', 'YES', default='NO'): - cflags.append('-msse4.1') - if self._Test('GCC_ENABLE_SSE42_EXTENSIONS', 'YES', default='NO'): - cflags.append('-msse4.2') - - cflags += self._Settings().get('WARNING_CFLAGS', []) - - config = self.spec['configurations'][self.configname] - framework_dirs = config.get('mac_framework_dirs', []) - for directory in framework_dirs: - cflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root)) - - self.configname = None - return cflags - - def GetCflagsC(self, configname): - """Returns flags that need to be added to .c, and .m compilations.""" - self.configname = configname - cflags_c = [] - self._Appendf(cflags_c, 'GCC_C_LANGUAGE_STANDARD', '-std=%s') - cflags_c += self._Settings().get('OTHER_CFLAGS', []) - self.configname = None - return cflags_c - - def GetCflagsCC(self, configname): - """Returns flags that need to be added to .cc, and .mm compilations.""" - self.configname = configname - cflags_cc = [] - if self._Test('GCC_ENABLE_CPP_RTTI', 'NO', default='YES'): - cflags_cc.append('-fno-rtti') - if self._Test('GCC_ENABLE_CPP_EXCEPTIONS', 'NO', default='YES'): - cflags_cc.append('-fno-exceptions') - if self._Test('GCC_INLINES_ARE_PRIVATE_EXTERN', 'YES', default='NO'): - cflags_cc.append('-fvisibility-inlines-hidden') - if self._Test('GCC_THREADSAFE_STATICS', 'NO', default='YES'): - cflags_cc.append('-fno-threadsafe-statics') - if self._Test('GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO', 'NO', default='YES'): - cflags_cc.append('-Wno-invalid-offsetof') - - other_ccflags = [] - - for flag in self._Settings().get('OTHER_CPLUSPLUSFLAGS', ['$(inherited)']): - # TODO: More general variable expansion. Missing in many other places too. - if flag in ('$inherited', '$(inherited)', '${inherited}'): - flag = '$OTHER_CFLAGS' - if flag in ('$OTHER_CFLAGS', '$(OTHER_CFLAGS)', '${OTHER_CFLAGS}'): - other_ccflags += self._Settings().get('OTHER_CFLAGS', []) - else: - other_ccflags.append(flag) - cflags_cc += other_ccflags - - self.configname = None - return cflags_cc - - def _AddObjectiveCGarbageCollectionFlags(self, flags): - gc_policy = self._Settings().get('GCC_ENABLE_OBJC_GC', 'unsupported') - if gc_policy == 'supported': - flags.append('-fobjc-gc') - elif gc_policy == 'required': - flags.append('-fobjc-gc-only') - - def GetCflagsObjC(self, configname): - """Returns flags that need to be added to .m compilations.""" - self.configname = configname - cflags_objc = [] - - self._AddObjectiveCGarbageCollectionFlags(cflags_objc) - - self.configname = None - return cflags_objc - - def GetCflagsObjCC(self, configname): - """Returns flags that need to be added to .mm compilations.""" - self.configname = configname - cflags_objcc = [] - self._AddObjectiveCGarbageCollectionFlags(cflags_objcc) - if self._Test('GCC_OBJC_CALL_CXX_CDTORS', 'YES', default='NO'): - cflags_objcc.append('-fobjc-call-cxx-cdtors') - self.configname = None - return cflags_objcc - - def GetInstallNameBase(self): - """Return DYLIB_INSTALL_NAME_BASE for this target.""" - # Xcode sets this for shared_libraries, and for nonbundled loadable_modules. - if (self.spec['type'] != 'shared_library' and - (self.spec['type'] != 'loadable_module' or self._IsBundle())): - return None - install_base = self.GetPerTargetSetting( - 'DYLIB_INSTALL_NAME_BASE', - default='/Library/Frameworks' if self._IsBundle() else '/usr/local/lib') - return install_base - - def _StandardizePath(self, path): - """Do :standardizepath processing for path.""" - # I'm not quite sure what :standardizepath does. Just call normpath(), - # but don't let @executable_path/../foo collapse to foo. - if '/' in path: - prefix, rest = '', path - if path.startswith('@'): - prefix, rest = path.split('/', 1) - rest = os.path.normpath(rest) # :standardizepath - path = os.path.join(prefix, rest) - return path - - def GetInstallName(self): - """Return LD_DYLIB_INSTALL_NAME for this target.""" - # Xcode sets this for shared_libraries, and for nonbundled loadable_modules. - if (self.spec['type'] != 'shared_library' and - (self.spec['type'] != 'loadable_module' or self._IsBundle())): - return None - - default_install_name = \ - '$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)' - install_name = self.GetPerTargetSetting( - 'LD_DYLIB_INSTALL_NAME', default=default_install_name) - - # Hardcode support for the variables used in chromium for now, to - # unblock people using the make build. - if '$' in install_name: - assert install_name in ('$(DYLIB_INSTALL_NAME_BASE:standardizepath)/' - '$(WRAPPER_NAME)/$(PRODUCT_NAME)', default_install_name), ( - 'Variables in LD_DYLIB_INSTALL_NAME are not generally supported ' - 'yet in target \'%s\' (got \'%s\')' % - (self.spec['target_name'], install_name)) - - install_name = install_name.replace( - '$(DYLIB_INSTALL_NAME_BASE:standardizepath)', - self._StandardizePath(self.GetInstallNameBase())) - if self._IsBundle(): - # These are only valid for bundles, hence the |if|. - install_name = install_name.replace( - '$(WRAPPER_NAME)', self.GetWrapperName()) - install_name = install_name.replace( - '$(PRODUCT_NAME)', self.GetProductName()) - else: - assert '$(WRAPPER_NAME)' not in install_name - assert '$(PRODUCT_NAME)' not in install_name - - install_name = install_name.replace( - '$(EXECUTABLE_PATH)', self.GetExecutablePath()) - return install_name - - def _MapLinkerFlagFilename(self, ldflag, gyp_to_build_path): - """Checks if ldflag contains a filename and if so remaps it from - gyp-directory-relative to build-directory-relative.""" - # This list is expanded on demand. - # They get matched as: - # -exported_symbols_list file - # -Wl,exported_symbols_list file - # -Wl,exported_symbols_list,file - LINKER_FILE = '(\S+)' - WORD = '\S+' - linker_flags = [ - ['-exported_symbols_list', LINKER_FILE], # Needed for NaCl. - ['-unexported_symbols_list', LINKER_FILE], - ['-reexported_symbols_list', LINKER_FILE], - ['-sectcreate', WORD, WORD, LINKER_FILE], # Needed for remoting. - ] - for flag_pattern in linker_flags: - regex = re.compile('(?:-Wl,)?' + '[ ,]'.join(flag_pattern)) - m = regex.match(ldflag) - if m: - ldflag = ldflag[:m.start(1)] + gyp_to_build_path(m.group(1)) + \ - ldflag[m.end(1):] - # Required for ffmpeg (no idea why they don't use LIBRARY_SEARCH_PATHS, - # TODO(thakis): Update ffmpeg.gyp): - if ldflag.startswith('-L'): - ldflag = '-L' + gyp_to_build_path(ldflag[len('-L'):]) - return ldflag - - def GetLdflags(self, configname, product_dir, gyp_to_build_path): - """Returns flags that need to be passed to the linker. - - Args: - configname: The name of the configuration to get ld flags for. - product_dir: The directory where products such static and dynamic - libraries are placed. This is added to the library search path. - gyp_to_build_path: A function that converts paths relative to the - current gyp file to paths relative to the build direcotry. - """ - self.configname = configname - ldflags = [] - - # The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS - # can contain entries that depend on this. Explicitly absolutify these. - for ldflag in self._Settings().get('OTHER_LDFLAGS', []): - ldflags.append(self._MapLinkerFlagFilename(ldflag, gyp_to_build_path)) - - if self._Test('DEAD_CODE_STRIPPING', 'YES', default='NO'): - ldflags.append('-Wl,-dead_strip') - - if self._Test('PREBINDING', 'YES', default='NO'): - ldflags.append('-Wl,-prebind') - - self._Appendf( - ldflags, 'DYLIB_COMPATIBILITY_VERSION', '-compatibility_version %s') - self._Appendf( - ldflags, 'DYLIB_CURRENT_VERSION', '-current_version %s') - self._Appendf( - ldflags, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s') - if 'SDKROOT' in self._Settings(): - ldflags.append('-isysroot ' + self._SdkPath()) - - for library_path in self._Settings().get('LIBRARY_SEARCH_PATHS', []): - ldflags.append('-L' + gyp_to_build_path(library_path)) - - if 'ORDER_FILE' in self._Settings(): - ldflags.append('-Wl,-order_file ' + - '-Wl,' + gyp_to_build_path( - self._Settings()['ORDER_FILE'])) - - archs = self._Settings().get('ARCHS', ['i386']) - if len(archs) != 1: - # TODO: Supporting fat binaries will be annoying. - self._WarnUnimplemented('ARCHS') - archs = ['i386'] - ldflags.append('-arch ' + archs[0]) - - # Xcode adds the product directory by default. - ldflags.append('-L' + product_dir) - - install_name = self.GetInstallName() - if install_name: - ldflags.append('-install_name ' + install_name.replace(' ', r'\ ')) - - for rpath in self._Settings().get('LD_RUNPATH_SEARCH_PATHS', []): - ldflags.append('-Wl,-rpath,' + rpath) - - config = self.spec['configurations'][self.configname] - framework_dirs = config.get('mac_framework_dirs', []) - for directory in framework_dirs: - ldflags.append('-F' + directory.replace('$(SDKROOT)', self._SdkPath())) - - self.configname = None - return ldflags - - def GetLibtoolflags(self, configname): - """Returns flags that need to be passed to the static linker. - - Args: - configname: The name of the configuration to get ld flags for. - """ - self.configname = configname - libtoolflags = [] - - for libtoolflag in self._Settings().get('OTHER_LDFLAGS', []): - libtoolflags.append(libtoolflag) - # TODO(thakis): ARCHS? - - self.configname = None - return libtoolflags - - def GetPerTargetSettings(self): - """Gets a list of all the per-target settings. This will only fetch keys - whose values are the same across all configurations.""" - first_pass = True - result = {} - for configname in sorted(self.xcode_settings.keys()): - if first_pass: - result = dict(self.xcode_settings[configname]) - first_pass = False - else: - for key, value in self.xcode_settings[configname].iteritems(): - if key not in result: - continue - elif result[key] != value: - del result[key] - return result - - def GetPerTargetSetting(self, setting, default=None): - """Tries to get xcode_settings.setting from spec. Assumes that the setting - has the same value in all configurations and throws otherwise.""" - first_pass = True - result = None - for configname in sorted(self.xcode_settings.keys()): - if first_pass: - result = self.xcode_settings[configname].get(setting, None) - first_pass = False - else: - assert result == self.xcode_settings[configname].get(setting, None), ( - "Expected per-target setting for '%s', got per-config setting " - "(target %s)" % (setting, spec['target_name'])) - if result is None: - return default - return result - - def _GetStripPostbuilds(self, configname, output_binary, quiet): - """Returns a list of shell commands that contain the shell commands - neccessary to strip this target's binary. These should be run as postbuilds - before the actual postbuilds run.""" - self.configname = configname - - result = [] - if (self._Test('DEPLOYMENT_POSTPROCESSING', 'YES', default='NO') and - self._Test('STRIP_INSTALLED_PRODUCT', 'YES', default='NO')): - - default_strip_style = 'debugging' - if self._IsBundle(): - default_strip_style = 'non-global' - elif self.spec['type'] == 'executable': - default_strip_style = 'all' - - strip_style = self._Settings().get('STRIP_STYLE', default_strip_style) - strip_flags = { - 'all': '', - 'non-global': '-x', - 'debugging': '-S', - }[strip_style] - - explicit_strip_flags = self._Settings().get('STRIPFLAGS', '') - if explicit_strip_flags: - strip_flags += ' ' + _NormalizeEnvVarReferences(explicit_strip_flags) - - if not quiet: - result.append('echo STRIP\\(%s\\)' % self.spec['target_name']) - result.append('strip %s %s' % (strip_flags, output_binary)) - - self.configname = None - return result - - def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet): - """Returns a list of shell commands that contain the shell commands - neccessary to massage this target's debug information. These should be run - as postbuilds before the actual postbuilds run.""" - self.configname = configname - - # For static libraries, no dSYMs are created. - result = [] - if (self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES') and - self._Test( - 'DEBUG_INFORMATION_FORMAT', 'dwarf-with-dsym', default='dwarf') and - self.spec['type'] != 'static_library'): - if not quiet: - result.append('echo DSYMUTIL\\(%s\\)' % self.spec['target_name']) - result.append('dsymutil %s -o %s' % (output_binary, output + '.dSYM')) - - self.configname = None - return result - - def GetTargetPostbuilds(self, configname, output, output_binary, quiet=False): - """Returns a list of shell commands that contain the shell commands - to run as postbuilds for this target, before the actual postbuilds.""" - # dSYMs need to build before stripping happens. - return ( - self._GetDebugInfoPostbuilds(configname, output, output_binary, quiet) + - self._GetStripPostbuilds(configname, output_binary, quiet)) - - def _AdjustLibrary(self, library): - if library.endswith('.framework'): - l = '-framework ' + os.path.splitext(os.path.basename(library))[0] - else: - m = self.library_re.match(library) - if m: - l = '-l' + m.group(1) - else: - l = library - return l.replace('$(SDKROOT)', self._SdkPath()) - - def AdjustLibraries(self, libraries): - """Transforms entries like 'Cocoa.framework' in libraries into entries like - '-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc. - """ - libraries = [ self._AdjustLibrary(library) for library in libraries] - return libraries - - -class MacPrefixHeader(object): - """A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature. - - This feature consists of several pieces: - * If GCC_PREFIX_HEADER is present, all compilations in that project get an - additional |-include path_to_prefix_header| cflag. - * If GCC_PRECOMPILE_PREFIX_HEADER is present too, then the prefix header is - instead compiled, and all other compilations in the project get an - additional |-include path_to_compiled_header| instead. - + Compiled prefix headers have the extension gch. There is one gch file for - every language used in the project (c, cc, m, mm), since gch files for - different languages aren't compatible. - + gch files themselves are built with the target's normal cflags, but they - obviously don't get the |-include| flag. Instead, they need a -x flag that - describes their language. - + All o files in the target need to depend on the gch file, to make sure - it's built before any o file is built. - - This class helps with some of these tasks, but it needs help from the build - system for writing dependencies to the gch files, for writing build commands - for the gch files, and for figuring out the location of the gch files. - """ - def __init__(self, xcode_settings, - gyp_path_to_build_path, gyp_path_to_build_output): - """If xcode_settings is None, all methods on this class are no-ops. - - Args: - gyp_path_to_build_path: A function that takes a gyp-relative path, - and returns a path relative to the build directory. - gyp_path_to_build_output: A function that takes a gyp-relative path and - a language code ('c', 'cc', 'm', or 'mm'), and that returns a path - to where the output of precompiling that path for that language - should be placed (without the trailing '.gch'). - """ - # This doesn't support per-configuration prefix headers. Good enough - # for now. - self.header = None - self.compile_headers = False - if xcode_settings: - self.header = xcode_settings.GetPerTargetSetting('GCC_PREFIX_HEADER') - self.compile_headers = xcode_settings.GetPerTargetSetting( - 'GCC_PRECOMPILE_PREFIX_HEADER', default='NO') != 'NO' - self.compiled_headers = {} - if self.header: - if self.compile_headers: - for lang in ['c', 'cc', 'm', 'mm']: - self.compiled_headers[lang] = gyp_path_to_build_output( - self.header, lang) - self.header = gyp_path_to_build_path(self.header) - - def GetInclude(self, lang): - """Gets the cflags to include the prefix header for language |lang|.""" - if self.compile_headers and lang in self.compiled_headers: - return '-include %s' % self.compiled_headers[lang] - elif self.header: - return '-include %s' % self.header - else: - return '' - - def _Gch(self, lang): - """Returns the actual file name of the prefix header for language |lang|.""" - assert self.compile_headers - return self.compiled_headers[lang] + '.gch' - - def GetObjDependencies(self, sources, objs): - """Given a list of source files and the corresponding object files, returns - a list of (source, object, gch) tuples, where |gch| is the build-directory - relative path to the gch file each object file depends on. |compilable[i]| - has to be the source file belonging to |objs[i]|.""" - if not self.header or not self.compile_headers: - return [] - - result = [] - for source, obj in zip(sources, objs): - ext = os.path.splitext(source)[1] - lang = { - '.c': 'c', - '.cpp': 'cc', '.cc': 'cc', '.cxx': 'cc', - '.m': 'm', - '.mm': 'mm', - }.get(ext, None) - if lang: - result.append((source, obj, self._Gch(lang))) - return result - - def GetPchBuildCommands(self): - """Returns [(path_to_gch, language_flag, language, header)]. - |path_to_gch| and |header| are relative to the build directory. - """ - if not self.header or not self.compile_headers: - return [] - return [ - (self._Gch('c'), '-x c-header', 'c', self.header), - (self._Gch('cc'), '-x c++-header', 'cc', self.header), - (self._Gch('m'), '-x objective-c-header', 'm', self.header), - (self._Gch('mm'), '-x objective-c++-header', 'mm', self.header), - ] - - -def MergeGlobalXcodeSettingsToSpec(global_dict, spec): - """Merges the global xcode_settings dictionary into each configuration of the - target represented by spec. For keys that are both in the global and the local - xcode_settings dict, the local key gets precendence. - """ - # The xcode generator special-cases global xcode_settings and does something - # that amounts to merging in the global xcode_settings into each local - # xcode_settings dict. - global_xcode_settings = global_dict.get('xcode_settings', {}) - for config in spec['configurations'].values(): - if 'xcode_settings' in config: - new_settings = global_xcode_settings.copy() - new_settings.update(config['xcode_settings']) - config['xcode_settings'] = new_settings - - -def IsMacBundle(flavor, spec): - """Returns if |spec| should be treated as a bundle. - - Bundles are directories with a certain subdirectory structure, instead of - just a single file. Bundle rules do not produce a binary but also package - resources into that directory.""" - is_mac_bundle = (int(spec.get('mac_bundle', 0)) != 0 and flavor == 'mac') - if is_mac_bundle: - assert spec['type'] != 'none', ( - 'mac_bundle targets cannot have type none (target "%s")' % - spec['target_name']) - return is_mac_bundle - - -def GetMacBundleResources(product_dir, xcode_settings, resources): - """Yields (output, resource) pairs for every resource in |resources|. - Only call this for mac bundle targets. - - Args: - product_dir: Path to the directory containing the output bundle, - relative to the build directory. - xcode_settings: The XcodeSettings of the current target. - resources: A list of bundle resources, relative to the build directory. - """ - dest = os.path.join(product_dir, - xcode_settings.GetBundleResourceFolder()) - for res in resources: - output = dest - - # The make generator doesn't support it, so forbid it everywhere - # to keep the generators more interchangable. - assert ' ' not in res, ( - "Spaces in resource filenames not supported (%s)" % res) - - # Split into (path,file). - res_parts = os.path.split(res) - - # Now split the path into (prefix,maybe.lproj). - lproj_parts = os.path.split(res_parts[0]) - # If the resource lives in a .lproj bundle, add that to the destination. - if lproj_parts[1].endswith('.lproj'): - output = os.path.join(output, lproj_parts[1]) - - output = os.path.join(output, res_parts[1]) - # Compiled XIB files are referred to by .nib. - if output.endswith('.xib'): - output = output[0:-3] + 'nib' - - yield output, res - - -def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path): - """Returns (info_plist, dest_plist, defines, extra_env), where: - * |info_plist| is the sourc plist path, relative to the - build directory, - * |dest_plist| is the destination plist path, relative to the - build directory, - * |defines| is a list of preprocessor defines (empty if the plist - shouldn't be preprocessed, - * |extra_env| is a dict of env variables that should be exported when - invoking |mac_tool copy-info-plist|. - - Only call this for mac bundle targets. - - Args: - product_dir: Path to the directory containing the output bundle, - relative to the build directory. - xcode_settings: The XcodeSettings of the current target. - gyp_to_build_path: A function that converts paths relative to the - current gyp file to paths relative to the build direcotry. - """ - info_plist = xcode_settings.GetPerTargetSetting('INFOPLIST_FILE') - if not info_plist: - return None, None, [], {} - - # The make generator doesn't support it, so forbid it everywhere - # to keep the generators more interchangable. - assert ' ' not in info_plist, ( - "Spaces in Info.plist filenames not supported (%s)" % info_plist) - - info_plist = gyp_path_to_build_path(info_plist) - - # If explicitly set to preprocess the plist, invoke the C preprocessor and - # specify any defines as -D flags. - if xcode_settings.GetPerTargetSetting( - 'INFOPLIST_PREPROCESS', default='NO') == 'YES': - # Create an intermediate file based on the path. - defines = shlex.split(xcode_settings.GetPerTargetSetting( - 'INFOPLIST_PREPROCESSOR_DEFINITIONS', default='')) - else: - defines = [] - - dest_plist = os.path.join(product_dir, xcode_settings.GetBundlePlistPath()) - extra_env = xcode_settings.GetPerTargetSettings() - - return info_plist, dest_plist, defines, extra_env - - -def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration, - additional_settings=None): - """Return the environment variables that Xcode would set. See - http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153 - for a full list. - - Args: - xcode_settings: An XcodeSettings object. If this is None, this function - returns an empty dict. - built_products_dir: Absolute path to the built products dir. - srcroot: Absolute path to the source root. - configuration: The build configuration name. - additional_settings: An optional dict with more values to add to the - result. - """ - if not xcode_settings: return {} - - # This function is considered a friend of XcodeSettings, so let it reach into - # its implementation details. - spec = xcode_settings.spec - - # These are filled in on a as-needed basis. - env = { - 'BUILT_PRODUCTS_DIR' : built_products_dir, - 'CONFIGURATION' : configuration, - 'PRODUCT_NAME' : xcode_settings.GetProductName(), - # See /Developer/Platforms/MacOSX.platform/Developer/Library/Xcode/Specifications/MacOSX\ Product\ Types.xcspec for FULL_PRODUCT_NAME - 'SRCROOT' : srcroot, - 'SOURCE_ROOT': '${SRCROOT}', - # This is not true for static libraries, but currently the env is only - # written for bundles: - 'TARGET_BUILD_DIR' : built_products_dir, - 'TEMP_DIR' : '${TMPDIR}', - } - if xcode_settings.GetPerTargetSetting('SDKROOT'): - env['SDKROOT'] = xcode_settings._SdkPath() - else: - env['SDKROOT'] = '' - - if spec['type'] in ( - 'executable', 'static_library', 'shared_library', 'loadable_module'): - env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName() - env['EXECUTABLE_PATH'] = xcode_settings.GetExecutablePath() - env['FULL_PRODUCT_NAME'] = xcode_settings.GetFullProductName() - mach_o_type = xcode_settings.GetMachOType() - if mach_o_type: - env['MACH_O_TYPE'] = mach_o_type - env['PRODUCT_TYPE'] = xcode_settings.GetProductType() - if xcode_settings._IsBundle(): - env['CONTENTS_FOLDER_PATH'] = \ - xcode_settings.GetBundleContentsFolderPath() - env['UNLOCALIZED_RESOURCES_FOLDER_PATH'] = \ - xcode_settings.GetBundleResourceFolder() - env['INFOPLIST_PATH'] = xcode_settings.GetBundlePlistPath() - env['WRAPPER_NAME'] = xcode_settings.GetWrapperName() - - install_name = xcode_settings.GetInstallName() - if install_name: - env['LD_DYLIB_INSTALL_NAME'] = install_name - install_name_base = xcode_settings.GetInstallNameBase() - if install_name_base: - env['DYLIB_INSTALL_NAME_BASE'] = install_name_base - - if not additional_settings: - additional_settings = {} - else: - # Flatten lists to strings. - for k in additional_settings: - if not isinstance(additional_settings[k], str): - additional_settings[k] = ' '.join(additional_settings[k]) - additional_settings.update(env) - - for k in additional_settings: - additional_settings[k] = _NormalizeEnvVarReferences(additional_settings[k]) - - return additional_settings - - -def _NormalizeEnvVarReferences(str): - """Takes a string containing variable references in the form ${FOO}, $(FOO), - or $FOO, and returns a string with all variable references in the form ${FOO}. - """ - # $FOO -> ${FOO} - str = re.sub(r'\$([a-zA-Z_][a-zA-Z0-9_]*)', r'${\1}', str) - - # $(FOO) -> ${FOO} - matches = re.findall(r'(\$\(([a-zA-Z0-9\-_]+)\))', str) - for match in matches: - to_replace, variable = match - assert '$(' not in match, '$($(FOO)) variables not supported: ' + match - str = str.replace(to_replace, '${' + variable + '}') - - return str - - -def ExpandEnvVars(string, expansions): - """Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the - expansions list. If the variable expands to something that references - another variable, this variable is expanded as well if it's in env -- - until no variables present in env are left.""" - for k, v in reversed(expansions): - string = string.replace('${' + k + '}', v) - string = string.replace('$(' + k + ')', v) - string = string.replace('$' + k, v) - return string - - -def _TopologicallySortedEnvVarKeys(env): - """Takes a dict |env| whose values are strings that can refer to other keys, - for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of - env such that key2 is after key1 in L if env[key2] refers to env[key1]. - - Throws an Exception in case of dependency cycles. - """ - # Since environment variables can refer to other variables, the evaluation - # order is important. Below is the logic to compute the dependency graph - # and sort it. - regex = re.compile(r'\$\{([a-zA-Z0-9\-_]+)\}') - def GetEdges(node): - # Use a definition of edges such that user_of_variable -> used_varible. - # This happens to be easier in this case, since a variable's - # definition contains all variables it references in a single string. - # We can then reverse the result of the topological sort at the end. - # Since: reverse(topsort(DAG)) = topsort(reverse_edges(DAG)) - matches = set([v for v in regex.findall(env[node]) if v in env]) - for dependee in matches: - assert '${' not in dependee, 'Nested variables not supported: ' + dependee - return matches - - try: - # Topologically sort, and then reverse, because we used an edge definition - # that's inverted from the expected result of this function (see comment - # above). - order = gyp.common.TopologicallySorted(env.keys(), GetEdges) - order.reverse() - return order - except gyp.common.CycleError, e: - raise Exception( - 'Xcode environment variables are cyclically dependent: ' + str(e.nodes)) - - -def GetSortedXcodeEnv(xcode_settings, built_products_dir, srcroot, - configuration, additional_settings=None): - env = _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration, - additional_settings) - return [(key, env[key]) for key in _TopologicallySortedEnvVarKeys(env)] - - -def GetSpecPostbuildCommands(spec, quiet=False): - """Returns the list of postbuilds explicitly defined on |spec|, in a form - executable by a shell.""" - postbuilds = [] - for postbuild in spec.get('postbuilds', []): - if not quiet: - postbuilds.append('echo POSTBUILD\\(%s\\) %s' % ( - spec['target_name'], postbuild['postbuild_name'])) - postbuilds.append(gyp.common.EncodePOSIXShellList(postbuild['action'])) - return postbuilds diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/xcodeproj_file.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/xcodeproj_file.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/xcodeproj_file.py 2013-02-13 19:12:31.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/xcodeproj_file.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,2870 +0,0 @@ -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Xcode project file generator. - -This module is both an Xcode project file generator and a documentation of the -Xcode project file format. Knowledge of the project file format was gained -based on extensive experience with Xcode, and by making changes to projects in -Xcode.app and observing the resultant changes in the associated project files. - -XCODE PROJECT FILES - -The generator targets the file format as written by Xcode 3.2 (specifically, -3.2.6), but past experience has taught that the format has not changed -significantly in the past several years, and future versions of Xcode are able -to read older project files. - -Xcode project files are "bundled": the project "file" from an end-user's -perspective is actually a directory with an ".xcodeproj" extension. The -project file from this module's perspective is actually a file inside this -directory, always named "project.pbxproj". This file contains a complete -description of the project and is all that is needed to use the xcodeproj. -Other files contained in the xcodeproj directory are simply used to store -per-user settings, such as the state of various UI elements in the Xcode -application. - -The project.pbxproj file is a property list, stored in a format almost -identical to the NeXTstep property list format. The file is able to carry -Unicode data, and is encoded in UTF-8. The root element in the property list -is a dictionary that contains several properties of minimal interest, and two -properties of immense interest. The most important property is a dictionary -named "objects". The entire structure of the project is represented by the -children of this property. The objects dictionary is keyed by unique 96-bit -values represented by 24 uppercase hexadecimal characters. Each value in the -objects dictionary is itself a dictionary, describing an individual object. - -Each object in the dictionary is a member of a class, which is identified by -the "isa" property of each object. A variety of classes are represented in a -project file. Objects can refer to other objects by ID, using the 24-character -hexadecimal object key. A project's objects form a tree, with a root object -of class PBXProject at the root. As an example, the PBXProject object serves -as parent to an XCConfigurationList object defining the build configurations -used in the project, a PBXGroup object serving as a container for all files -referenced in the project, and a list of target objects, each of which defines -a target in the project. There are several different types of target object, -such as PBXNativeTarget and PBXAggregateTarget. In this module, this -relationship is expressed by having each target type derive from an abstract -base named XCTarget. - -The project.pbxproj file's root dictionary also contains a property, sibling to -the "objects" dictionary, named "rootObject". The value of rootObject is a -24-character object key referring to the root PBXProject object in the -objects dictionary. - -In Xcode, every file used as input to a target or produced as a final product -of a target must appear somewhere in the hierarchy rooted at the PBXGroup -object referenced by the PBXProject's mainGroup property. A PBXGroup is -generally represented as a folder in the Xcode application. PBXGroups can -contain other PBXGroups as well as PBXFileReferences, which are pointers to -actual files. - -Each XCTarget contains a list of build phases, represented in this module by -the abstract base XCBuildPhase. Examples of concrete XCBuildPhase derivations -are PBXSourcesBuildPhase and PBXFrameworksBuildPhase, which correspond to the -"Compile Sources" and "Link Binary With Libraries" phases displayed in the -Xcode application. Files used as input to these phases (for example, source -files in the former case and libraries and frameworks in the latter) are -represented by PBXBuildFile objects, referenced by elements of "files" lists -in XCTarget objects. Each PBXBuildFile object refers to a PBXBuildFile -object as a "weak" reference: it does not "own" the PBXBuildFile, which is -owned by the root object's mainGroup or a descendant group. In most cases, the -layer of indirection between an XCBuildPhase and a PBXFileReference via a -PBXBuildFile appears extraneous, but there's actually one reason for this: -file-specific compiler flags are added to the PBXBuildFile object so as to -allow a single file to be a member of multiple targets while having distinct -compiler flags for each. These flags can be modified in the Xcode applciation -in the "Build" tab of a File Info window. - -When a project is open in the Xcode application, Xcode will rewrite it. As -such, this module is careful to adhere to the formatting used by Xcode, to -avoid insignificant changes appearing in the file when it is used in the -Xcode application. This will keep version control repositories happy, and -makes it possible to compare a project file used in Xcode to one generated by -this module to determine if any significant changes were made in the -application. - -Xcode has its own way of assigning 24-character identifiers to each object, -which is not duplicated here. Because the identifier only is only generated -once, when an object is created, and is then left unchanged, there is no need -to attempt to duplicate Xcode's behavior in this area. The generator is free -to select any identifier, even at random, to refer to the objects it creates, -and Xcode will retain those identifiers and use them when subsequently -rewriting the project file. However, the generator would choose new random -identifiers each time the project files are generated, leading to difficulties -comparing "used" project files to "pristine" ones produced by this module, -and causing the appearance of changes as every object identifier is changed -when updated projects are checked in to a version control repository. To -mitigate this problem, this module chooses identifiers in a more deterministic -way, by hashing a description of each object as well as its parent and ancestor -objects. This strategy should result in minimal "shift" in IDs as successive -generations of project files are produced. - -THIS MODULE - -This module introduces several classes, all derived from the XCObject class. -Nearly all of the "brains" are built into the XCObject class, which understands -how to create and modify objects, maintain the proper tree structure, compute -identifiers, and print objects. For the most part, classes derived from -XCObject need only provide a _schema class object, a dictionary that -expresses what properties objects of the class may contain. - -Given this structure, it's possible to build a minimal project file by creating -objects of the appropriate types and making the proper connections: - - config_list = XCConfigurationList() - group = PBXGroup() - project = PBXProject({'buildConfigurationList': config_list, - 'mainGroup': group}) - -With the project object set up, it can be added to an XCProjectFile object. -XCProjectFile is a pseudo-class in the sense that it is a concrete XCObject -subclass that does not actually correspond to a class type found in a project -file. Rather, it is used to represent the project file's root dictionary. -Printing an XCProjectFile will print the entire project file, including the -full "objects" dictionary. - - project_file = XCProjectFile({'rootObject': project}) - project_file.ComputeIDs() - project_file.Print() - -Xcode project files are always encoded in UTF-8. This module will accept -strings of either the str class or the unicode class. Strings of class str -are assumed to already be encoded in UTF-8. Obviously, if you're just using -ASCII, you won't encounter difficulties because ASCII is a UTF-8 subset. -Strings of class unicode are handled properly and encoded in UTF-8 when -a project file is output. -""" - -import gyp.common -import posixpath -import re -import struct -import sys - -# hashlib is supplied as of Python 2.5 as the replacement interface for sha -# and other secure hashes. In 2.6, sha is deprecated. Import hashlib if -# available, avoiding a deprecation warning under 2.6. Import sha otherwise, -# preserving 2.4 compatibility. -try: - import hashlib - _new_sha1 = hashlib.sha1 -except ImportError: - import sha - _new_sha1 = sha.new - - -# See XCObject._EncodeString. This pattern is used to determine when a string -# can be printed unquoted. Strings that match this pattern may be printed -# unquoted. Strings that do not match must be quoted and may be further -# transformed to be properly encoded. Note that this expression matches the -# characters listed with "+", for 1 or more occurrences: if a string is empty, -# it must not match this pattern, because it needs to be encoded as "". -_unquoted = re.compile('^[A-Za-z0-9$./_]+$') - -# Strings that match this pattern are quoted regardless of what _unquoted says. -# Oddly, Xcode will quote any string with a run of three or more underscores. -_quoted = re.compile('___') - -# This pattern should match any character that needs to be escaped by -# XCObject._EncodeString. See that function. -_escaped = re.compile('[\\\\"]|[^ -~]') - - -# Used by SourceTreeAndPathFromPath -_path_leading_variable = re.compile('^\$\((.*?)\)(/(.*))?$') - -def SourceTreeAndPathFromPath(input_path): - """Given input_path, returns a tuple with sourceTree and path values. - - Examples: - input_path (source_tree, output_path) - '$(VAR)/path' ('VAR', 'path') - '$(VAR)' ('VAR', None) - 'path' (None, 'path') - """ - - source_group_match = _path_leading_variable.match(input_path) - if source_group_match: - source_tree = source_group_match.group(1) - output_path = source_group_match.group(3) # This may be None. - else: - source_tree = None - output_path = input_path - - return (source_tree, output_path) - -def ConvertVariablesToShellSyntax(input_string): - return re.sub('\$\((.*?)\)', '${\\1}', input_string) - -class XCObject(object): - """The abstract base of all class types used in Xcode project files. - - Class variables: - _schema: A dictionary defining the properties of this class. The keys to - _schema are string property keys as used in project files. Values - are a list of four or five elements: - [ is_list, property_type, is_strong, is_required, default ] - is_list: True if the property described is a list, as opposed - to a single element. - property_type: The type to use as the value of the property, - or if is_list is True, the type to use for each - element of the value's list. property_type must - be an XCObject subclass, or one of the built-in - types str, int, or dict. - is_strong: If property_type is an XCObject subclass, is_strong - is True to assert that this class "owns," or serves - as parent, to the property value (or, if is_list is - True, values). is_strong must be False if - property_type is not an XCObject subclass. - is_required: True if the property is required for the class. - Note that is_required being True does not preclude - an empty string ("", in the case of property_type - str) or list ([], in the case of is_list True) from - being set for the property. - default: Optional. If is_requried is True, default may be set - to provide a default value for objects that do not supply - their own value. If is_required is True and default - is not provided, users of the class must supply their own - value for the property. - Note that although the values of the array are expressed in - boolean terms, subclasses provide values as integers to conserve - horizontal space. - _should_print_single_line: False in XCObject. Subclasses whose objects - should be written to the project file in the - alternate single-line format, such as - PBXFileReference and PBXBuildFile, should - set this to True. - _encode_transforms: Used by _EncodeString to encode unprintable characters. - The index into this list is the ordinal of the - character to transform; each value is a string - used to represent the character in the output. XCObject - provides an _encode_transforms list suitable for most - XCObject subclasses. - _alternate_encode_transforms: Provided for subclasses that wish to use - the alternate encoding rules. Xcode seems - to use these rules when printing objects in - single-line format. Subclasses that desire - this behavior should set _encode_transforms - to _alternate_encode_transforms. - _hashables: A list of XCObject subclasses that can be hashed by ComputeIDs - to construct this object's ID. Most classes that need custom - hashing behavior should do it by overriding Hashables, - but in some cases an object's parent may wish to push a - hashable value into its child, and it can do so by appending - to _hashables. - Attributes: - id: The object's identifier, a 24-character uppercase hexadecimal string. - Usually, objects being created should not set id until the entire - project file structure is built. At that point, UpdateIDs() should - be called on the root object to assign deterministic values for id to - each object in the tree. - parent: The object's parent. This is set by a parent XCObject when a child - object is added to it. - _properties: The object's property dictionary. An object's properties are - described by its class' _schema variable. - """ - - _schema = {} - _should_print_single_line = False - - # See _EncodeString. - _encode_transforms = [] - i = 0 - while i < ord(' '): - _encode_transforms.append('\\U%04x' % i) - i = i + 1 - _encode_transforms[7] = '\\a' - _encode_transforms[8] = '\\b' - _encode_transforms[9] = '\\t' - _encode_transforms[10] = '\\n' - _encode_transforms[11] = '\\v' - _encode_transforms[12] = '\\f' - _encode_transforms[13] = '\\n' - - _alternate_encode_transforms = list(_encode_transforms) - _alternate_encode_transforms[9] = chr(9) - _alternate_encode_transforms[10] = chr(10) - _alternate_encode_transforms[11] = chr(11) - - def __init__(self, properties=None, id=None, parent=None): - self.id = id - self.parent = parent - self._properties = {} - self._hashables = [] - self._SetDefaultsFromSchema() - self.UpdateProperties(properties) - - def __repr__(self): - try: - name = self.Name() - except NotImplementedError: - return '<%s at 0x%x>' % (self.__class__.__name__, id(self)) - return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self)) - - def Copy(self): - """Make a copy of this object. - - The new object will have its own copy of lists and dicts. Any XCObject - objects owned by this object (marked "strong") will be copied in the - new object, even those found in lists. If this object has any weak - references to other XCObjects, the same references are added to the new - object without making a copy. - """ - - that = self.__class__(id=self.id, parent=self.parent) - for key, value in self._properties.iteritems(): - is_strong = self._schema[key][2] - - if isinstance(value, XCObject): - if is_strong: - new_value = value.Copy() - new_value.parent = that - that._properties[key] = new_value - else: - that._properties[key] = value - elif isinstance(value, str) or isinstance(value, unicode) or \ - isinstance(value, int): - that._properties[key] = value - elif isinstance(value, list): - if is_strong: - # If is_strong is True, each element is an XCObject, so it's safe to - # call Copy. - that._properties[key] = [] - for item in value: - new_item = item.Copy() - new_item.parent = that - that._properties[key].append(new_item) - else: - that._properties[key] = value[:] - elif isinstance(value, dict): - # dicts are never strong. - if is_strong: - raise TypeError, 'Strong dict for key ' + key + ' in ' + \ - self.__class__.__name__ - else: - that._properties[key] = value.copy() - else: - raise TypeError, 'Unexpected type ' + value.__class__.__name__ + \ - ' for key ' + key + ' in ' + self.__class__.__name__ - - return that - - def Name(self): - """Return the name corresponding to an object. - - Not all objects necessarily need to be nameable, and not all that do have - a "name" property. Override as needed. - """ - - # If the schema indicates that "name" is required, try to access the - # property even if it doesn't exist. This will result in a KeyError - # being raised for the property that should be present, which seems more - # appropriate than NotImplementedError in this case. - if 'name' in self._properties or \ - ('name' in self._schema and self._schema['name'][3]): - return self._properties['name'] - - raise NotImplementedError, \ - self.__class__.__name__ + ' must implement Name' - - def Comment(self): - """Return a comment string for the object. - - Most objects just use their name as the comment, but PBXProject uses - different values. - - The returned comment is not escaped and does not have any comment marker - strings applied to it. - """ - - return self.Name() - - def Hashables(self): - hashables = [self.__class__.__name__] - - name = self.Name() - if name != None: - hashables.append(name) - - hashables.extend(self._hashables) - - return hashables - - def HashablesForChild(self): - return None - - def ComputeIDs(self, recursive=True, overwrite=True, seed_hash=None): - """Set "id" properties deterministically. - - An object's "id" property is set based on a hash of its class type and - name, as well as the class type and name of all ancestor objects. As - such, it is only advisable to call ComputeIDs once an entire project file - tree is built. - - If recursive is True, recurse into all descendant objects and update their - hashes. - - If overwrite is True, any existing value set in the "id" property will be - replaced. - """ - - def _HashUpdate(hash, data): - """Update hash with data's length and contents. - - If the hash were updated only with the value of data, it would be - possible for clowns to induce collisions by manipulating the names of - their objects. By adding the length, it's exceedingly less likely that - ID collisions will be encountered, intentionally or not. - """ - - hash.update(struct.pack('>i', len(data))) - hash.update(data) - - if seed_hash is None: - seed_hash = _new_sha1() - - hash = seed_hash.copy() - - hashables = self.Hashables() - assert len(hashables) > 0 - for hashable in hashables: - _HashUpdate(hash, hashable) - - if recursive: - hashables_for_child = self.HashablesForChild() - if hashables_for_child is None: - child_hash = hash - else: - assert len(hashables_for_child) > 0 - child_hash = seed_hash.copy() - for hashable in hashables_for_child: - _HashUpdate(child_hash, hashable) - - for child in self.Children(): - child.ComputeIDs(recursive, overwrite, child_hash) - - if overwrite or self.id is None: - # Xcode IDs are only 96 bits (24 hex characters), but a SHA-1 digest is - # is 160 bits. Instead of throwing out 64 bits of the digest, xor them - # into the portion that gets used. - assert hash.digest_size % 4 == 0 - digest_int_count = hash.digest_size / 4 - digest_ints = struct.unpack('>' + 'I' * digest_int_count, hash.digest()) - id_ints = [0, 0, 0] - for index in xrange(0, digest_int_count): - id_ints[index % 3] ^= digest_ints[index] - self.id = '%08X%08X%08X' % tuple(id_ints) - - def EnsureNoIDCollisions(self): - """Verifies that no two objects have the same ID. Checks all descendants. - """ - - ids = {} - descendants = self.Descendants() - for descendant in descendants: - if descendant.id in ids: - other = ids[descendant.id] - raise KeyError, \ - 'Duplicate ID %s, objects "%s" and "%s" in "%s"' % \ - (descendant.id, str(descendant._properties), - str(other._properties), self._properties['rootObject'].Name()) - ids[descendant.id] = descendant - - def Children(self): - """Returns a list of all of this object's owned (strong) children.""" - - children = [] - for property, attributes in self._schema.iteritems(): - (is_list, property_type, is_strong) = attributes[0:3] - if is_strong and property in self._properties: - if not is_list: - children.append(self._properties[property]) - else: - children.extend(self._properties[property]) - return children - - def Descendants(self): - """Returns a list of all of this object's descendants, including this - object. - """ - - children = self.Children() - descendants = [self] - for child in children: - descendants.extend(child.Descendants()) - return descendants - - def PBXProjectAncestor(self): - # The base case for recursion is defined at PBXProject.PBXProjectAncestor. - if self.parent: - return self.parent.PBXProjectAncestor() - return None - - def _EncodeComment(self, comment): - """Encodes a comment to be placed in the project file output, mimicing - Xcode behavior. - """ - - # This mimics Xcode behavior by wrapping the comment in "/*" and "*/". If - # the string already contains a "*/", it is turned into "(*)/". This keeps - # the file writer from outputting something that would be treated as the - # end of a comment in the middle of something intended to be entirely a - # comment. - - return '/* ' + comment.replace('*/', '(*)/') + ' */' - - def _EncodeTransform(self, match): - # This function works closely with _EncodeString. It will only be called - # by re.sub with match.group(0) containing a character matched by the - # the _escaped expression. - char = match.group(0) - - # Backslashes (\) and quotation marks (") are always replaced with a - # backslash-escaped version of the same. Everything else gets its - # replacement from the class' _encode_transforms array. - if char == '\\': - return '\\\\' - if char == '"': - return '\\"' - return self._encode_transforms[ord(char)] - - def _EncodeString(self, value): - """Encodes a string to be placed in the project file output, mimicing - Xcode behavior. - """ - - # Use quotation marks when any character outside of the range A-Z, a-z, 0-9, - # $ (dollar sign), . (period), and _ (underscore) is present. Also use - # quotation marks to represent empty strings. - # - # Escape " (double-quote) and \ (backslash) by preceding them with a - # backslash. - # - # Some characters below the printable ASCII range are encoded specially: - # 7 ^G BEL is encoded as "\a" - # 8 ^H BS is encoded as "\b" - # 11 ^K VT is encoded as "\v" - # 12 ^L NP is encoded as "\f" - # 127 ^? DEL is passed through as-is without escaping - # - In PBXFileReference and PBXBuildFile objects: - # 9 ^I HT is passed through as-is without escaping - # 10 ^J NL is passed through as-is without escaping - # 13 ^M CR is passed through as-is without escaping - # - In other objects: - # 9 ^I HT is encoded as "\t" - # 10 ^J NL is encoded as "\n" - # 13 ^M CR is encoded as "\n" rendering it indistinguishable from - # 10 ^J NL - # All other nonprintable characters within the ASCII range (0 through 127 - # inclusive) are encoded as "\U001f" referring to the Unicode code point in - # hexadecimal. For example, character 14 (^N SO) is encoded as "\U000e". - # Characters above the ASCII range are passed through to the output encoded - # as UTF-8 without any escaping. These mappings are contained in the - # class' _encode_transforms list. - - if _unquoted.search(value) and not _quoted.search(value): - return value - - return '"' + _escaped.sub(self._EncodeTransform, value) + '"' - - def _XCPrint(self, file, tabs, line): - file.write('\t' * tabs + line) - - def _XCPrintableValue(self, tabs, value, flatten_list=False): - """Returns a representation of value that may be printed in a project file, - mimicing Xcode's behavior. - - _XCPrintableValue can handle str and int values, XCObjects (which are - made printable by returning their id property), and list and dict objects - composed of any of the above types. When printing a list or dict, and - _should_print_single_line is False, the tabs parameter is used to determine - how much to indent the lines corresponding to the items in the list or - dict. - - If flatten_list is True, single-element lists will be transformed into - strings. - """ - - printable = '' - comment = None - - if self._should_print_single_line: - sep = ' ' - element_tabs = '' - end_tabs = '' - else: - sep = '\n' - element_tabs = '\t' * (tabs + 1) - end_tabs = '\t' * tabs - - if isinstance(value, XCObject): - printable += value.id - comment = value.Comment() - elif isinstance(value, str): - printable += self._EncodeString(value) - elif isinstance(value, unicode): - printable += self._EncodeString(value.encode('utf-8')) - elif isinstance(value, int): - printable += str(value) - elif isinstance(value, list): - if flatten_list and len(value) <= 1: - if len(value) == 0: - printable += self._EncodeString('') - else: - printable += self._EncodeString(value[0]) - else: - printable = '(' + sep - for item in value: - printable += element_tabs + \ - self._XCPrintableValue(tabs + 1, item, flatten_list) + \ - ',' + sep - printable += end_tabs + ')' - elif isinstance(value, dict): - printable = '{' + sep - for item_key, item_value in sorted(value.iteritems()): - printable += element_tabs + \ - self._XCPrintableValue(tabs + 1, item_key, flatten_list) + ' = ' + \ - self._XCPrintableValue(tabs + 1, item_value, flatten_list) + ';' + \ - sep - printable += end_tabs + '}' - else: - raise TypeError, "Can't make " + value.__class__.__name__ + ' printable' - - if comment != None: - printable += ' ' + self._EncodeComment(comment) - - return printable - - def _XCKVPrint(self, file, tabs, key, value): - """Prints a key and value, members of an XCObject's _properties dictionary, - to file. - - tabs is an int identifying the indentation level. If the class' - _should_print_single_line variable is True, tabs is ignored and the - key-value pair will be followed by a space insead of a newline. - """ - - if self._should_print_single_line: - printable = '' - after_kv = ' ' - else: - printable = '\t' * tabs - after_kv = '\n' - - # Xcode usually prints remoteGlobalIDString values in PBXContainerItemProxy - # objects without comments. Sometimes it prints them with comments, but - # the majority of the time, it doesn't. To avoid unnecessary changes to - # the project file after Xcode opens it, don't write comments for - # remoteGlobalIDString. This is a sucky hack and it would certainly be - # cleaner to extend the schema to indicate whether or not a comment should - # be printed, but since this is the only case where the problem occurs and - # Xcode itself can't seem to make up its mind, the hack will suffice. - # - # Also see PBXContainerItemProxy._schema['remoteGlobalIDString']. - if key == 'remoteGlobalIDString' and isinstance(self, - PBXContainerItemProxy): - value_to_print = value.id - else: - value_to_print = value - - # PBXBuildFile's settings property is represented in the output as a dict, - # but a hack here has it represented as a string. Arrange to strip off the - # quotes so that it shows up in the output as expected. - if key == 'settings' and isinstance(self, PBXBuildFile): - strip_value_quotes = True - else: - strip_value_quotes = False - - # In another one-off, let's set flatten_list on buildSettings properties - # of XCBuildConfiguration objects, because that's how Xcode treats them. - if key == 'buildSettings' and isinstance(self, XCBuildConfiguration): - flatten_list = True - else: - flatten_list = False - - try: - printable_key = self._XCPrintableValue(tabs, key, flatten_list) - printable_value = self._XCPrintableValue(tabs, value_to_print, - flatten_list) - if strip_value_quotes and len(printable_value) > 1 and \ - printable_value[0] == '"' and printable_value[-1] == '"': - printable_value = printable_value[1:-1] - printable += printable_key + ' = ' + printable_value + ';' + after_kv - except TypeError, e: - gyp.common.ExceptionAppend(e, - 'while printing key "%s"' % key) - raise - - self._XCPrint(file, 0, printable) - - def Print(self, file=sys.stdout): - """Prints a reprentation of this object to file, adhering to Xcode output - formatting. - """ - - self.VerifyHasRequiredProperties() - - if self._should_print_single_line: - # When printing an object in a single line, Xcode doesn't put any space - # between the beginning of a dictionary (or presumably a list) and the - # first contained item, so you wind up with snippets like - # ...CDEF = {isa = PBXFileReference; fileRef = 0123... - # If it were me, I would have put a space in there after the opening - # curly, but I guess this is just another one of those inconsistencies - # between how Xcode prints PBXFileReference and PBXBuildFile objects as - # compared to other objects. Mimic Xcode's behavior here by using an - # empty string for sep. - sep = '' - end_tabs = 0 - else: - sep = '\n' - end_tabs = 2 - - # Start the object. For example, '\t\tPBXProject = {\n'. - self._XCPrint(file, 2, self._XCPrintableValue(2, self) + ' = {' + sep) - - # "isa" isn't in the _properties dictionary, it's an intrinsic property - # of the class which the object belongs to. Xcode always outputs "isa" - # as the first element of an object dictionary. - self._XCKVPrint(file, 3, 'isa', self.__class__.__name__) - - # The remaining elements of an object dictionary are sorted alphabetically. - for property, value in sorted(self._properties.iteritems()): - self._XCKVPrint(file, 3, property, value) - - # End the object. - self._XCPrint(file, end_tabs, '};\n') - - def UpdateProperties(self, properties, do_copy=False): - """Merge the supplied properties into the _properties dictionary. - - The input properties must adhere to the class schema or a KeyError or - TypeError exception will be raised. If adding an object of an XCObject - subclass and the schema indicates a strong relationship, the object's - parent will be set to this object. - - If do_copy is True, then lists, dicts, strong-owned XCObjects, and - strong-owned XCObjects in lists will be copied instead of having their - references added. - """ - - if properties is None: - return - - for property, value in properties.iteritems(): - # Make sure the property is in the schema. - if not property in self._schema: - raise KeyError, property + ' not in ' + self.__class__.__name__ - - # Make sure the property conforms to the schema. - (is_list, property_type, is_strong) = self._schema[property][0:3] - if is_list: - if value.__class__ != list: - raise TypeError, \ - property + ' of ' + self.__class__.__name__ + \ - ' must be list, not ' + value.__class__.__name__ - for item in value: - if not isinstance(item, property_type) and \ - not (item.__class__ == unicode and property_type == str): - # Accept unicode where str is specified. str is treated as - # UTF-8-encoded. - raise TypeError, \ - 'item of ' + property + ' of ' + self.__class__.__name__ + \ - ' must be ' + property_type.__name__ + ', not ' + \ - item.__class__.__name__ - elif not isinstance(value, property_type) and \ - not (value.__class__ == unicode and property_type == str): - # Accept unicode where str is specified. str is treated as - # UTF-8-encoded. - raise TypeError, \ - property + ' of ' + self.__class__.__name__ + ' must be ' + \ - property_type.__name__ + ', not ' + value.__class__.__name__ - - # Checks passed, perform the assignment. - if do_copy: - if isinstance(value, XCObject): - if is_strong: - self._properties[property] = value.Copy() - else: - self._properties[property] = value - elif isinstance(value, str) or isinstance(value, unicode) or \ - isinstance(value, int): - self._properties[property] = value - elif isinstance(value, list): - if is_strong: - # If is_strong is True, each element is an XCObject, so it's safe - # to call Copy. - self._properties[property] = [] - for item in value: - self._properties[property].append(item.Copy()) - else: - self._properties[property] = value[:] - elif isinstance(value, dict): - self._properties[property] = value.copy() - else: - raise TypeError, "Don't know how to copy a " + \ - value.__class__.__name__ + ' object for ' + \ - property + ' in ' + self.__class__.__name__ - else: - self._properties[property] = value - - # Set up the child's back-reference to this object. Don't use |value| - # any more because it may not be right if do_copy is true. - if is_strong: - if not is_list: - self._properties[property].parent = self - else: - for item in self._properties[property]: - item.parent = self - - def HasProperty(self, key): - return key in self._properties - - def GetProperty(self, key): - return self._properties[key] - - def SetProperty(self, key, value): - self.UpdateProperties({key: value}) - - def DelProperty(self, key): - if key in self._properties: - del self._properties[key] - - def AppendProperty(self, key, value): - # TODO(mark): Support ExtendProperty too (and make this call that)? - - # Schema validation. - if not key in self._schema: - raise KeyError, key + ' not in ' + self.__class__.__name__ - - (is_list, property_type, is_strong) = self._schema[key][0:3] - if not is_list: - raise TypeError, key + ' of ' + self.__class__.__name__ + ' must be list' - if not isinstance(value, property_type): - raise TypeError, 'item of ' + key + ' of ' + self.__class__.__name__ + \ - ' must be ' + property_type.__name__ + ', not ' + \ - value.__class__.__name__ - - # If the property doesn't exist yet, create a new empty list to receive the - # item. - if not key in self._properties: - self._properties[key] = [] - - # Set up the ownership link. - if is_strong: - value.parent = self - - # Store the item. - self._properties[key].append(value) - - def VerifyHasRequiredProperties(self): - """Ensure that all properties identified as required by the schema are - set. - """ - - # TODO(mark): A stronger verification mechanism is needed. Some - # subclasses need to perform validation beyond what the schema can enforce. - for property, attributes in self._schema.iteritems(): - (is_list, property_type, is_strong, is_required) = attributes[0:4] - if is_required and not property in self._properties: - raise KeyError, self.__class__.__name__ + ' requires ' + property - - def _SetDefaultsFromSchema(self): - """Assign object default values according to the schema. This will not - overwrite properties that have already been set.""" - - defaults = {} - for property, attributes in self._schema.iteritems(): - (is_list, property_type, is_strong, is_required) = attributes[0:4] - if is_required and len(attributes) >= 5 and \ - not property in self._properties: - default = attributes[4] - - defaults[property] = default - - if len(defaults) > 0: - # Use do_copy=True so that each new object gets its own copy of strong - # objects, lists, and dicts. - self.UpdateProperties(defaults, do_copy=True) - - -class XCHierarchicalElement(XCObject): - """Abstract base for PBXGroup and PBXFileReference. Not represented in a - project file.""" - - # TODO(mark): Do name and path belong here? Probably so. - # If path is set and name is not, name may have a default value. Name will - # be set to the basename of path, if the basename of path is different from - # the full value of path. If path is already just a leaf name, name will - # not be set. - _schema = XCObject._schema.copy() - _schema.update({ - 'comments': [0, str, 0, 0], - 'fileEncoding': [0, str, 0, 0], - 'includeInIndex': [0, int, 0, 0], - 'indentWidth': [0, int, 0, 0], - 'lineEnding': [0, int, 0, 0], - 'sourceTree': [0, str, 0, 1, ''], - 'tabWidth': [0, int, 0, 0], - 'usesTabs': [0, int, 0, 0], - 'wrapsLines': [0, int, 0, 0], - }) - - def __init__(self, properties=None, id=None, parent=None): - # super - XCObject.__init__(self, properties, id, parent) - if 'path' in self._properties and not 'name' in self._properties: - path = self._properties['path'] - name = posixpath.basename(path) - if name != '' and path != name: - self.SetProperty('name', name) - - if 'path' in self._properties and \ - (not 'sourceTree' in self._properties or \ - self._properties['sourceTree'] == ''): - # If the pathname begins with an Xcode variable like "$(SDKROOT)/", take - # the variable out and make the path be relative to that variable by - # assigning the variable name as the sourceTree. - (source_tree, path) = SourceTreeAndPathFromPath(self._properties['path']) - if source_tree != None: - self._properties['sourceTree'] = source_tree - if path != None: - self._properties['path'] = path - if source_tree != None and path is None and \ - not 'name' in self._properties: - # The path was of the form "$(SDKROOT)" with no path following it. - # This object is now relative to that variable, so it has no path - # attribute of its own. It does, however, keep a name. - del self._properties['path'] - self._properties['name'] = source_tree - - def Name(self): - if 'name' in self._properties: - return self._properties['name'] - elif 'path' in self._properties: - return self._properties['path'] - else: - # This happens in the case of the root PBXGroup. - return None - - def Hashables(self): - """Custom hashables for XCHierarchicalElements. - - XCHierarchicalElements are special. Generally, their hashes shouldn't - change if the paths don't change. The normal XCObject implementation of - Hashables adds a hashable for each object, which means that if - the hierarchical structure changes (possibly due to changes caused when - TakeOverOnlyChild runs and encounters slight changes in the hierarchy), - the hashes will change. For example, if a project file initially contains - a/b/f1 and a/b becomes collapsed into a/b, f1 will have a single parent - a/b. If someone later adds a/f2 to the project file, a/b can no longer be - collapsed, and f1 winds up with parent b and grandparent a. That would - be sufficient to change f1's hash. - - To counteract this problem, hashables for all XCHierarchicalElements except - for the main group (which has neither a name nor a path) are taken to be - just the set of path components. Because hashables are inherited from - parents, this provides assurance that a/b/f1 has the same set of hashables - whether its parent is b or a/b. - - The main group is a special case. As it is permitted to have no name or - path, it is permitted to use the standard XCObject hash mechanism. This - is not considered a problem because there can be only one main group. - """ - - if self == self.PBXProjectAncestor()._properties['mainGroup']: - # super - return XCObject.Hashables(self) - - hashables = [] - - # Put the name in first, ensuring that if TakeOverOnlyChild collapses - # children into a top-level group like "Source", the name always goes - # into the list of hashables without interfering with path components. - if 'name' in self._properties: - # Make it less likely for people to manipulate hashes by following the - # pattern of always pushing an object type value onto the list first. - hashables.append(self.__class__.__name__ + '.name') - hashables.append(self._properties['name']) - - # NOTE: This still has the problem that if an absolute path is encountered, - # including paths with a sourceTree, they'll still inherit their parents' - # hashables, even though the paths aren't relative to their parents. This - # is not expected to be much of a problem in practice. - path = self.PathFromSourceTreeAndPath() - if path != None: - components = path.split(posixpath.sep) - for component in components: - hashables.append(self.__class__.__name__ + '.path') - hashables.append(component) - - hashables.extend(self._hashables) - - return hashables - - def Compare(self, other): - # Allow comparison of these types. PBXGroup has the highest sort rank; - # PBXVariantGroup is treated as equal to PBXFileReference. - valid_class_types = { - PBXFileReference: 'file', - PBXGroup: 'group', - PBXVariantGroup: 'file', - } - self_type = valid_class_types[self.__class__] - other_type = valid_class_types[other.__class__] - - if self_type == other_type: - # If the two objects are of the same sort rank, compare their names. - return cmp(self.Name(), other.Name()) - - # Otherwise, sort groups before everything else. - if self_type == 'group': - return -1 - return 1 - - def CompareRootGroup(self, other): - # This function should be used only to compare direct children of the - # containing PBXProject's mainGroup. These groups should appear in the - # listed order. - # TODO(mark): "Build" is used by gyp.generator.xcode, perhaps the - # generator should have a way of influencing this list rather than having - # to hardcode for the generator here. - order = ['Source', 'Intermediates', 'Projects', 'Frameworks', 'Products', - 'Build'] - - # If the groups aren't in the listed order, do a name comparison. - # Otherwise, groups in the listed order should come before those that - # aren't. - self_name = self.Name() - other_name = other.Name() - self_in = isinstance(self, PBXGroup) and self_name in order - other_in = isinstance(self, PBXGroup) and other_name in order - if not self_in and not other_in: - return self.Compare(other) - if self_name in order and not other_name in order: - return -1 - if other_name in order and not self_name in order: - return 1 - - # If both groups are in the listed order, go by the defined order. - self_index = order.index(self_name) - other_index = order.index(other_name) - if self_index < other_index: - return -1 - if self_index > other_index: - return 1 - return 0 - - def PathFromSourceTreeAndPath(self): - # Turn the object's sourceTree and path properties into a single flat - # string of a form comparable to the path parameter. If there's a - # sourceTree property other than "", wrap it in $(...) for the - # comparison. - components = [] - if self._properties['sourceTree'] != '': - components.append('$(' + self._properties['sourceTree'] + ')') - if 'path' in self._properties: - components.append(self._properties['path']) - - if len(components) > 0: - return posixpath.join(*components) - - return None - - def FullPath(self): - # Returns a full path to self relative to the project file, or relative - # to some other source tree. Start with self, and walk up the chain of - # parents prepending their paths, if any, until no more parents are - # available (project-relative path) or until a path relative to some - # source tree is found. - xche = self - path = None - while isinstance(xche, XCHierarchicalElement) and \ - (path is None or \ - (not path.startswith('/') and not path.startswith('$'))): - this_path = xche.PathFromSourceTreeAndPath() - if this_path != None and path != None: - path = posixpath.join(this_path, path) - elif this_path != None: - path = this_path - xche = xche.parent - - return path - - -class PBXGroup(XCHierarchicalElement): - """ - Attributes: - _children_by_path: Maps pathnames of children of this PBXGroup to the - actual child XCHierarchicalElement objects. - _variant_children_by_name_and_path: Maps (name, path) tuples of - PBXVariantGroup children to the actual child PBXVariantGroup objects. - """ - - _schema = XCHierarchicalElement._schema.copy() - _schema.update({ - 'children': [1, XCHierarchicalElement, 1, 1, []], - 'name': [0, str, 0, 0], - 'path': [0, str, 0, 0], - }) - - def __init__(self, properties=None, id=None, parent=None): - # super - XCHierarchicalElement.__init__(self, properties, id, parent) - self._children_by_path = {} - self._variant_children_by_name_and_path = {} - for child in self._properties.get('children', []): - self._AddChildToDicts(child) - - def Hashables(self): - # super - hashables = XCHierarchicalElement.Hashables(self) - - # It is not sufficient to just rely on name and parent to build a unique - # hashable : a node could have two child PBXGroup sharing a common name. - # To add entropy the hashable is enhanced with the names of all its - # children. - for child in self._properties.get('children', []): - child_name = child.Name() - if child_name != None: - hashables.append(child_name) - - return hashables - - def HashablesForChild(self): - # To avoid a circular reference the hashables used to compute a child id do - # not include the child names. - return XCHierarchicalElement.Hashables(self) - - def _AddChildToDicts(self, child): - # Sets up this PBXGroup object's dicts to reference the child properly. - child_path = child.PathFromSourceTreeAndPath() - if child_path: - if child_path in self._children_by_path: - raise ValueError, 'Found multiple children with path ' + child_path - self._children_by_path[child_path] = child - - if isinstance(child, PBXVariantGroup): - child_name = child._properties.get('name', None) - key = (child_name, child_path) - if key in self._variant_children_by_name_and_path: - raise ValueError, 'Found multiple PBXVariantGroup children with ' + \ - 'name ' + str(child_name) + ' and path ' + \ - str(child_path) - self._variant_children_by_name_and_path[key] = child - - def AppendChild(self, child): - # Callers should use this instead of calling - # AppendProperty('children', child) directly because this function - # maintains the group's dicts. - self.AppendProperty('children', child) - self._AddChildToDicts(child) - - def GetChildByName(self, name): - # This is not currently optimized with a dict as GetChildByPath is because - # it has few callers. Most callers probably want GetChildByPath. This - # function is only useful to get children that have names but no paths, - # which is rare. The children of the main group ("Source", "Products", - # etc.) is pretty much the only case where this likely to come up. - # - # TODO(mark): Maybe this should raise an error if more than one child is - # present with the same name. - if not 'children' in self._properties: - return None - - for child in self._properties['children']: - if child.Name() == name: - return child - - return None - - def GetChildByPath(self, path): - if not path: - return None - - if path in self._children_by_path: - return self._children_by_path[path] - - return None - - def GetChildByRemoteObject(self, remote_object): - # This method is a little bit esoteric. Given a remote_object, which - # should be a PBXFileReference in another project file, this method will - # return this group's PBXReferenceProxy object serving as a local proxy - # for the remote PBXFileReference. - # - # This function might benefit from a dict optimization as GetChildByPath - # for some workloads, but profiling shows that it's not currently a - # problem. - if not 'children' in self._properties: - return None - - for child in self._properties['children']: - if not isinstance(child, PBXReferenceProxy): - continue - - container_proxy = child._properties['remoteRef'] - if container_proxy._properties['remoteGlobalIDString'] == remote_object: - return child - - return None - - def AddOrGetFileByPath(self, path, hierarchical): - """Returns an existing or new file reference corresponding to path. - - If hierarchical is True, this method will create or use the necessary - hierarchical group structure corresponding to path. Otherwise, it will - look in and create an item in the current group only. - - If an existing matching reference is found, it is returned, otherwise, a - new one will be created, added to the correct group, and returned. - - If path identifies a directory by virtue of carrying a trailing slash, - this method returns a PBXFileReference of "folder" type. If path - identifies a variant, by virtue of it identifying a file inside a directory - with an ".lproj" extension, this method returns a PBXVariantGroup - containing the variant named by path, and possibly other variants. For - all other paths, a "normal" PBXFileReference will be returned. - """ - - # Adding or getting a directory? Directories end with a trailing slash. - is_dir = False - if path.endswith('/'): - is_dir = True - path = posixpath.normpath(path) - if is_dir: - path = path + '/' - - # Adding or getting a variant? Variants are files inside directories - # with an ".lproj" extension. Xcode uses variants for localization. For - # a variant path/to/Language.lproj/MainMenu.nib, put a variant group named - # MainMenu.nib inside path/to, and give it a variant named Language. In - # this example, grandparent would be set to path/to and parent_root would - # be set to Language. - variant_name = None - parent = posixpath.dirname(path) - grandparent = posixpath.dirname(parent) - parent_basename = posixpath.basename(parent) - (parent_root, parent_ext) = posixpath.splitext(parent_basename) - if parent_ext == '.lproj': - variant_name = parent_root - if grandparent == '': - grandparent = None - - # Putting a directory inside a variant group is not currently supported. - assert not is_dir or variant_name is None - - path_split = path.split(posixpath.sep) - if len(path_split) == 1 or \ - ((is_dir or variant_name != None) and len(path_split) == 2) or \ - not hierarchical: - # The PBXFileReference or PBXVariantGroup will be added to or gotten from - # this PBXGroup, no recursion necessary. - if variant_name is None: - # Add or get a PBXFileReference. - file_ref = self.GetChildByPath(path) - if file_ref != None: - assert file_ref.__class__ == PBXFileReference - else: - file_ref = PBXFileReference({'path': path}) - self.AppendChild(file_ref) - else: - # Add or get a PBXVariantGroup. The variant group name is the same - # as the basename (MainMenu.nib in the example above). grandparent - # specifies the path to the variant group itself, and path_split[-2:] - # is the path of the specific variant relative to its group. - variant_group_name = posixpath.basename(path) - variant_group_ref = self.AddOrGetVariantGroupByNameAndPath( - variant_group_name, grandparent) - variant_path = posixpath.sep.join(path_split[-2:]) - variant_ref = variant_group_ref.GetChildByPath(variant_path) - if variant_ref != None: - assert variant_ref.__class__ == PBXFileReference - else: - variant_ref = PBXFileReference({'name': variant_name, - 'path': variant_path}) - variant_group_ref.AppendChild(variant_ref) - # The caller is interested in the variant group, not the specific - # variant file. - file_ref = variant_group_ref - return file_ref - else: - # Hierarchical recursion. Add or get a PBXGroup corresponding to the - # outermost path component, and then recurse into it, chopping off that - # path component. - next_dir = path_split[0] - group_ref = self.GetChildByPath(next_dir) - if group_ref != None: - assert group_ref.__class__ == PBXGroup - else: - group_ref = PBXGroup({'path': next_dir}) - self.AppendChild(group_ref) - return group_ref.AddOrGetFileByPath(posixpath.sep.join(path_split[1:]), - hierarchical) - - def AddOrGetVariantGroupByNameAndPath(self, name, path): - """Returns an existing or new PBXVariantGroup for name and path. - - If a PBXVariantGroup identified by the name and path arguments is already - present as a child of this object, it is returned. Otherwise, a new - PBXVariantGroup with the correct properties is created, added as a child, - and returned. - - This method will generally be called by AddOrGetFileByPath, which knows - when to create a variant group based on the structure of the pathnames - passed to it. - """ - - key = (name, path) - if key in self._variant_children_by_name_and_path: - variant_group_ref = self._variant_children_by_name_and_path[key] - assert variant_group_ref.__class__ == PBXVariantGroup - return variant_group_ref - - variant_group_properties = {'name': name} - if path != None: - variant_group_properties['path'] = path - variant_group_ref = PBXVariantGroup(variant_group_properties) - self.AppendChild(variant_group_ref) - - return variant_group_ref - - def TakeOverOnlyChild(self, recurse=False): - """If this PBXGroup has only one child and it's also a PBXGroup, take - it over by making all of its children this object's children. - - This function will continue to take over only children when those children - are groups. If there are three PBXGroups representing a, b, and c, with - c inside b and b inside a, and a and b have no other children, this will - result in a taking over both b and c, forming a PBXGroup for a/b/c. - - If recurse is True, this function will recurse into children and ask them - to collapse themselves by taking over only children as well. Assuming - an example hierarchy with files at a/b/c/d1, a/b/c/d2, and a/b/c/d3/e/f - (d1, d2, and f are files, the rest are groups), recursion will result in - a group for a/b/c containing a group for d3/e. - """ - - # At this stage, check that child class types are PBXGroup exactly, - # instead of using isinstance. The only subclass of PBXGroup, - # PBXVariantGroup, should not participate in reparenting in the same way: - # reparenting by merging different object types would be wrong. - while len(self._properties['children']) == 1 and \ - self._properties['children'][0].__class__ == PBXGroup: - # Loop to take over the innermost only-child group possible. - - child = self._properties['children'][0] - - # Assume the child's properties, including its children. Save a copy - # of this object's old properties, because they'll still be needed. - # This object retains its existing id and parent attributes. - old_properties = self._properties - self._properties = child._properties - self._children_by_path = child._children_by_path - - if not 'sourceTree' in self._properties or \ - self._properties['sourceTree'] == '': - # The child was relative to its parent. Fix up the path. Note that - # children with a sourceTree other than "" are not relative to - # their parents, so no path fix-up is needed in that case. - if 'path' in old_properties: - if 'path' in self._properties: - # Both the original parent and child have paths set. - self._properties['path'] = posixpath.join(old_properties['path'], - self._properties['path']) - else: - # Only the original parent has a path, use it. - self._properties['path'] = old_properties['path'] - if 'sourceTree' in old_properties: - # The original parent had a sourceTree set, use it. - self._properties['sourceTree'] = old_properties['sourceTree'] - - # If the original parent had a name set, keep using it. If the original - # parent didn't have a name but the child did, let the child's name - # live on. If the name attribute seems unnecessary now, get rid of it. - if 'name' in old_properties and old_properties['name'] != None and \ - old_properties['name'] != self.Name(): - self._properties['name'] = old_properties['name'] - if 'name' in self._properties and 'path' in self._properties and \ - self._properties['name'] == self._properties['path']: - del self._properties['name'] - - # Notify all children of their new parent. - for child in self._properties['children']: - child.parent = self - - # If asked to recurse, recurse. - if recurse: - for child in self._properties['children']: - if child.__class__ == PBXGroup: - child.TakeOverOnlyChild(recurse) - - def SortGroup(self): - self._properties['children'] = \ - sorted(self._properties['children'], cmp=lambda x,y: x.Compare(y)) - - # Recurse. - for child in self._properties['children']: - if isinstance(child, PBXGroup): - child.SortGroup() - - -class XCFileLikeElement(XCHierarchicalElement): - # Abstract base for objects that can be used as the fileRef property of - # PBXBuildFile. - - def PathHashables(self): - # A PBXBuildFile that refers to this object will call this method to - # obtain additional hashables specific to this XCFileLikeElement. Don't - # just use this object's hashables, they're not specific and unique enough - # on their own (without access to the parent hashables.) Instead, provide - # hashables that identify this object by path by getting its hashables as - # well as the hashables of ancestor XCHierarchicalElement objects. - - hashables = [] - xche = self - while xche != None and isinstance(xche, XCHierarchicalElement): - xche_hashables = xche.Hashables() - for index in xrange(0, len(xche_hashables)): - hashables.insert(index, xche_hashables[index]) - xche = xche.parent - return hashables - - -class XCContainerPortal(XCObject): - # Abstract base for objects that can be used as the containerPortal property - # of PBXContainerItemProxy. - pass - - -class XCRemoteObject(XCObject): - # Abstract base for objects that can be used as the remoteGlobalIDString - # property of PBXContainerItemProxy. - pass - - -class PBXFileReference(XCFileLikeElement, XCContainerPortal, XCRemoteObject): - _schema = XCFileLikeElement._schema.copy() - _schema.update({ - 'explicitFileType': [0, str, 0, 0], - 'lastKnownFileType': [0, str, 0, 0], - 'name': [0, str, 0, 0], - 'path': [0, str, 0, 1], - }) - - # Weird output rules for PBXFileReference. - _should_print_single_line = True - # super - _encode_transforms = XCFileLikeElement._alternate_encode_transforms - - def __init__(self, properties=None, id=None, parent=None): - # super - XCFileLikeElement.__init__(self, properties, id, parent) - if 'path' in self._properties and self._properties['path'].endswith('/'): - self._properties['path'] = self._properties['path'][:-1] - is_dir = True - else: - is_dir = False - - if 'path' in self._properties and \ - not 'lastKnownFileType' in self._properties and \ - not 'explicitFileType' in self._properties: - # TODO(mark): This is the replacement for a replacement for a quick hack. - # It is no longer incredibly sucky, but this list needs to be extended. - extension_map = { - 'a': 'archive.ar', - 'app': 'wrapper.application', - 'bdic': 'file', - 'bundle': 'wrapper.cfbundle', - 'c': 'sourcecode.c.c', - 'cc': 'sourcecode.cpp.cpp', - 'cpp': 'sourcecode.cpp.cpp', - 'css': 'text.css', - 'cxx': 'sourcecode.cpp.cpp', - 'dylib': 'compiled.mach-o.dylib', - 'framework': 'wrapper.framework', - 'h': 'sourcecode.c.h', - 'hxx': 'sourcecode.cpp.h', - 'icns': 'image.icns', - 'java': 'sourcecode.java', - 'js': 'sourcecode.javascript', - 'm': 'sourcecode.c.objc', - 'mm': 'sourcecode.cpp.objcpp', - 'nib': 'wrapper.nib', - 'o': 'compiled.mach-o.objfile', - 'pdf': 'image.pdf', - 'pl': 'text.script.perl', - 'plist': 'text.plist.xml', - 'pm': 'text.script.perl', - 'png': 'image.png', - 'py': 'text.script.python', - 'r': 'sourcecode.rez', - 'rez': 'sourcecode.rez', - 's': 'sourcecode.asm', - 'storyboard': 'file.storyboard', - 'strings': 'text.plist.strings', - 'ttf': 'file', - 'xcconfig': 'text.xcconfig', - 'xcdatamodel': 'wrapper.xcdatamodel', - 'xib': 'file.xib', - 'y': 'sourcecode.yacc', - } - - if is_dir: - file_type = 'folder' - else: - basename = posixpath.basename(self._properties['path']) - (root, ext) = posixpath.splitext(basename) - # Check the map using a lowercase extension. - # TODO(mark): Maybe it should try with the original case first and fall - # back to lowercase, in case there are any instances where case - # matters. There currently aren't. - if ext != '': - ext = ext[1:].lower() - - # TODO(mark): "text" is the default value, but "file" is appropriate - # for unrecognized files not containing text. Xcode seems to choose - # based on content. - file_type = extension_map.get(ext, 'text') - - self._properties['lastKnownFileType'] = file_type - - -class PBXVariantGroup(PBXGroup, XCFileLikeElement): - """PBXVariantGroup is used by Xcode to represent localizations.""" - # No additions to the schema relative to PBXGroup. - pass - - -# PBXReferenceProxy is also an XCFileLikeElement subclass. It is defined below -# because it uses PBXContainerItemProxy, defined below. - - -class XCBuildConfiguration(XCObject): - _schema = XCObject._schema.copy() - _schema.update({ - 'baseConfigurationReference': [0, PBXFileReference, 0, 0], - 'buildSettings': [0, dict, 0, 1, {}], - 'name': [0, str, 0, 1], - }) - - def HasBuildSetting(self, key): - return key in self._properties['buildSettings'] - - def GetBuildSetting(self, key): - return self._properties['buildSettings'][key] - - def SetBuildSetting(self, key, value): - # TODO(mark): If a list, copy? - self._properties['buildSettings'][key] = value - - def AppendBuildSetting(self, key, value): - if not key in self._properties['buildSettings']: - self._properties['buildSettings'][key] = [] - self._properties['buildSettings'][key].append(value) - - def DelBuildSetting(self, key): - if key in self._properties['buildSettings']: - del self._properties['buildSettings'][key] - - def SetBaseConfiguration(self, value): - self._properties['baseConfigurationReference'] = value - -class XCConfigurationList(XCObject): - # _configs is the default list of configurations. - _configs = [ XCBuildConfiguration({'name': 'Debug'}), - XCBuildConfiguration({'name': 'Release'}) ] - - _schema = XCObject._schema.copy() - _schema.update({ - 'buildConfigurations': [1, XCBuildConfiguration, 1, 1, _configs], - 'defaultConfigurationIsVisible': [0, int, 0, 1, 1], - 'defaultConfigurationName': [0, str, 0, 1, 'Release'], - }) - - def Name(self): - return 'Build configuration list for ' + \ - self.parent.__class__.__name__ + ' "' + self.parent.Name() + '"' - - def ConfigurationNamed(self, name): - """Convenience accessor to obtain an XCBuildConfiguration by name.""" - for configuration in self._properties['buildConfigurations']: - if configuration._properties['name'] == name: - return configuration - - raise KeyError, name - - def DefaultConfiguration(self): - """Convenience accessor to obtain the default XCBuildConfiguration.""" - return self.ConfigurationNamed(self._properties['defaultConfigurationName']) - - def HasBuildSetting(self, key): - """Determines the state of a build setting in all XCBuildConfiguration - child objects. - - If all child objects have key in their build settings, and the value is the - same in all child objects, returns 1. - - If no child objects have the key in their build settings, returns 0. - - If some, but not all, child objects have the key in their build settings, - or if any children have different values for the key, returns -1. - """ - - has = None - value = None - for configuration in self._properties['buildConfigurations']: - configuration_has = configuration.HasBuildSetting(key) - if has is None: - has = configuration_has - elif has != configuration_has: - return -1 - - if configuration_has: - configuration_value = configuration.GetBuildSetting(key) - if value is None: - value = configuration_value - elif value != configuration_value: - return -1 - - if not has: - return 0 - - return 1 - - def GetBuildSetting(self, key): - """Gets the build setting for key. - - All child XCConfiguration objects must have the same value set for the - setting, or a ValueError will be raised. - """ - - # TODO(mark): This is wrong for build settings that are lists. The list - # contents should be compared (and a list copy returned?) - - value = None - for configuration in self._properties['buildConfigurations']: - configuration_value = configuration.GetBuildSetting(key) - if value is None: - value = configuration_value - else: - if value != configuration_value: - raise ValueError, 'Variant values for ' + key - - return value - - def SetBuildSetting(self, key, value): - """Sets the build setting for key to value in all child - XCBuildConfiguration objects. - """ - - for configuration in self._properties['buildConfigurations']: - configuration.SetBuildSetting(key, value) - - def AppendBuildSetting(self, key, value): - """Appends value to the build setting for key, which is treated as a list, - in all child XCBuildConfiguration objects. - """ - - for configuration in self._properties['buildConfigurations']: - configuration.AppendBuildSetting(key, value) - - def DelBuildSetting(self, key): - """Deletes the build setting key from all child XCBuildConfiguration - objects. - """ - - for configuration in self._properties['buildConfigurations']: - configuration.DelBuildSetting(key) - - def SetBaseConfiguration(self, value): - """Sets the build configuration in all child XCBuildConfiguration objects. - """ - - for configuration in self._properties['buildConfigurations']: - configuration.SetBaseConfiguration(value) - - -class PBXBuildFile(XCObject): - _schema = XCObject._schema.copy() - _schema.update({ - 'fileRef': [0, XCFileLikeElement, 0, 1], - 'settings': [0, str, 0, 0], # hack, it's a dict - }) - - # Weird output rules for PBXBuildFile. - _should_print_single_line = True - _encode_transforms = XCObject._alternate_encode_transforms - - def Name(self): - # Example: "main.cc in Sources" - return self._properties['fileRef'].Name() + ' in ' + self.parent.Name() - - def Hashables(self): - # super - hashables = XCObject.Hashables(self) - - # It is not sufficient to just rely on Name() to get the - # XCFileLikeElement's name, because that is not a complete pathname. - # PathHashables returns hashables unique enough that no two - # PBXBuildFiles should wind up with the same set of hashables, unless - # someone adds the same file multiple times to the same target. That - # would be considered invalid anyway. - hashables.extend(self._properties['fileRef'].PathHashables()) - - return hashables - - -class XCBuildPhase(XCObject): - """Abstract base for build phase classes. Not represented in a project - file. - - Attributes: - _files_by_path: A dict mapping each path of a child in the files list by - path (keys) to the corresponding PBXBuildFile children (values). - _files_by_xcfilelikeelement: A dict mapping each XCFileLikeElement (keys) - to the corresponding PBXBuildFile children (values). - """ - - # TODO(mark): Some build phase types, like PBXShellScriptBuildPhase, don't - # actually have a "files" list. XCBuildPhase should not have "files" but - # another abstract subclass of it should provide this, and concrete build - # phase types that do have "files" lists should be derived from that new - # abstract subclass. XCBuildPhase should only provide buildActionMask and - # runOnlyForDeploymentPostprocessing, and not files or the various - # file-related methods and attributes. - - _schema = XCObject._schema.copy() - _schema.update({ - 'buildActionMask': [0, int, 0, 1, 0x7fffffff], - 'files': [1, PBXBuildFile, 1, 1, []], - 'runOnlyForDeploymentPostprocessing': [0, int, 0, 1, 0], - }) - - def __init__(self, properties=None, id=None, parent=None): - # super - XCObject.__init__(self, properties, id, parent) - - self._files_by_path = {} - self._files_by_xcfilelikeelement = {} - for pbxbuildfile in self._properties.get('files', []): - self._AddBuildFileToDicts(pbxbuildfile) - - def FileGroup(self, path): - # Subclasses must override this by returning a two-element tuple. The - # first item in the tuple should be the PBXGroup to which "path" should be - # added, either as a child or deeper descendant. The second item should - # be a boolean indicating whether files should be added into hierarchical - # groups or one single flat group. - raise NotImplementedError, \ - self.__class__.__name__ + ' must implement FileGroup' - - def _AddPathToDict(self, pbxbuildfile, path): - """Adds path to the dict tracking paths belonging to this build phase. - - If the path is already a member of this build phase, raises an exception. - """ - - if path in self._files_by_path: - raise ValueError, 'Found multiple build files with path ' + path - self._files_by_path[path] = pbxbuildfile - - def _AddBuildFileToDicts(self, pbxbuildfile, path=None): - """Maintains the _files_by_path and _files_by_xcfilelikeelement dicts. - - If path is specified, then it is the path that is being added to the - phase, and pbxbuildfile must contain either a PBXFileReference directly - referencing that path, or it must contain a PBXVariantGroup that itself - contains a PBXFileReference referencing the path. - - If path is not specified, either the PBXFileReference's path or the paths - of all children of the PBXVariantGroup are taken as being added to the - phase. - - If the path is already present in the phase, raises an exception. - - If the PBXFileReference or PBXVariantGroup referenced by pbxbuildfile - are already present in the phase, referenced by a different PBXBuildFile - object, raises an exception. This does not raise an exception when - a PBXFileReference or PBXVariantGroup reappear and are referenced by the - same PBXBuildFile that has already introduced them, because in the case - of PBXVariantGroup objects, they may correspond to multiple paths that are - not all added simultaneously. When this situation occurs, the path needs - to be added to _files_by_path, but nothing needs to change in - _files_by_xcfilelikeelement, and the caller should have avoided adding - the PBXBuildFile if it is already present in the list of children. - """ - - xcfilelikeelement = pbxbuildfile._properties['fileRef'] - - paths = [] - if path != None: - # It's best when the caller provides the path. - if isinstance(xcfilelikeelement, PBXVariantGroup): - paths.append(path) - else: - # If the caller didn't provide a path, there can be either multiple - # paths (PBXVariantGroup) or one. - if isinstance(xcfilelikeelement, PBXVariantGroup): - for variant in xcfilelikeelement._properties['children']: - paths.append(variant.FullPath()) - else: - paths.append(xcfilelikeelement.FullPath()) - - # Add the paths first, because if something's going to raise, the - # messages provided by _AddPathToDict are more useful owing to its - # having access to a real pathname and not just an object's Name(). - for a_path in paths: - self._AddPathToDict(pbxbuildfile, a_path) - - # If another PBXBuildFile references this XCFileLikeElement, there's a - # problem. - if xcfilelikeelement in self._files_by_xcfilelikeelement and \ - self._files_by_xcfilelikeelement[xcfilelikeelement] != pbxbuildfile: - raise ValueError, 'Found multiple build files for ' + \ - xcfilelikeelement.Name() - self._files_by_xcfilelikeelement[xcfilelikeelement] = pbxbuildfile - - def AppendBuildFile(self, pbxbuildfile, path=None): - # Callers should use this instead of calling - # AppendProperty('files', pbxbuildfile) directly because this function - # maintains the object's dicts. Better yet, callers can just call AddFile - # with a pathname and not worry about building their own PBXBuildFile - # objects. - self.AppendProperty('files', pbxbuildfile) - self._AddBuildFileToDicts(pbxbuildfile, path) - - def AddFile(self, path, settings=None): - (file_group, hierarchical) = self.FileGroup(path) - file_ref = file_group.AddOrGetFileByPath(path, hierarchical) - - if file_ref in self._files_by_xcfilelikeelement and \ - isinstance(file_ref, PBXVariantGroup): - # There's already a PBXBuildFile in this phase corresponding to the - # PBXVariantGroup. path just provides a new variant that belongs to - # the group. Add the path to the dict. - pbxbuildfile = self._files_by_xcfilelikeelement[file_ref] - self._AddBuildFileToDicts(pbxbuildfile, path) - else: - # Add a new PBXBuildFile to get file_ref into the phase. - if settings is None: - pbxbuildfile = PBXBuildFile({'fileRef': file_ref}) - else: - pbxbuildfile = PBXBuildFile({'fileRef': file_ref, 'settings': settings}) - self.AppendBuildFile(pbxbuildfile, path) - - -class PBXHeadersBuildPhase(XCBuildPhase): - # No additions to the schema relative to XCBuildPhase. - - def Name(self): - return 'Headers' - - def FileGroup(self, path): - return self.PBXProjectAncestor().RootGroupForPath(path) - - -class PBXResourcesBuildPhase(XCBuildPhase): - # No additions to the schema relative to XCBuildPhase. - - def Name(self): - return 'Resources' - - def FileGroup(self, path): - return self.PBXProjectAncestor().RootGroupForPath(path) - - -class PBXSourcesBuildPhase(XCBuildPhase): - # No additions to the schema relative to XCBuildPhase. - - def Name(self): - return 'Sources' - - def FileGroup(self, path): - return self.PBXProjectAncestor().RootGroupForPath(path) - - -class PBXFrameworksBuildPhase(XCBuildPhase): - # No additions to the schema relative to XCBuildPhase. - - def Name(self): - return 'Frameworks' - - def FileGroup(self, path): - (root, ext) = posixpath.splitext(path) - if ext != '': - ext = ext[1:].lower() - if ext == 'o': - # .o files are added to Xcode Frameworks phases, but conceptually aren't - # frameworks, they're more like sources or intermediates. Redirect them - # to show up in one of those other groups. - return self.PBXProjectAncestor().RootGroupForPath(path) - else: - return (self.PBXProjectAncestor().FrameworksGroup(), False) - - -class PBXShellScriptBuildPhase(XCBuildPhase): - _schema = XCBuildPhase._schema.copy() - _schema.update({ - 'inputPaths': [1, str, 0, 1, []], - 'name': [0, str, 0, 0], - 'outputPaths': [1, str, 0, 1, []], - 'shellPath': [0, str, 0, 1, '/bin/sh'], - 'shellScript': [0, str, 0, 1], - 'showEnvVarsInLog': [0, int, 0, 0], - }) - - def Name(self): - if 'name' in self._properties: - return self._properties['name'] - - return 'ShellScript' - - -class PBXCopyFilesBuildPhase(XCBuildPhase): - _schema = XCBuildPhase._schema.copy() - _schema.update({ - 'dstPath': [0, str, 0, 1], - 'dstSubfolderSpec': [0, int, 0, 1], - 'name': [0, str, 0, 0], - }) - - # path_tree_re matches "$(DIR)/path" or just "$(DIR)". Match group 1 is - # "DIR", match group 3 is "path" or None. - path_tree_re = re.compile('^\\$\\((.*)\\)(/(.*)|)$') - - # path_tree_to_subfolder maps names of Xcode variables to the associated - # dstSubfolderSpec property value used in a PBXCopyFilesBuildPhase object. - path_tree_to_subfolder = { - 'BUILT_PRODUCTS_DIR': 16, # Products Directory - # Other types that can be chosen via the Xcode UI. - # TODO(mark): Map Xcode variable names to these. - # : 1, # Wrapper - # : 6, # Executables: 6 - # : 7, # Resources - # : 15, # Java Resources - # : 10, # Frameworks - # : 11, # Shared Frameworks - # : 12, # Shared Support - # : 13, # PlugIns - } - - def Name(self): - if 'name' in self._properties: - return self._properties['name'] - - return 'CopyFiles' - - def FileGroup(self, path): - return self.PBXProjectAncestor().RootGroupForPath(path) - - def SetDestination(self, path): - """Set the dstSubfolderSpec and dstPath properties from path. - - path may be specified in the same notation used for XCHierarchicalElements, - specifically, "$(DIR)/path". - """ - - path_tree_match = self.path_tree_re.search(path) - if path_tree_match: - # Everything else needs to be relative to an Xcode variable. - path_tree = path_tree_match.group(1) - relative_path = path_tree_match.group(3) - - if path_tree in self.path_tree_to_subfolder: - subfolder = self.path_tree_to_subfolder[path_tree] - if relative_path is None: - relative_path = '' - else: - # The path starts with an unrecognized Xcode variable - # name like $(SRCROOT). Xcode will still handle this - # as an "absolute path" that starts with the variable. - subfolder = 0 - relative_path = path - elif path.startswith('/'): - # Special case. Absolute paths are in dstSubfolderSpec 0. - subfolder = 0 - relative_path = path[1:] - else: - raise ValueError, 'Can\'t use path %s in a %s' % \ - (path, self.__class__.__name__) - - self._properties['dstPath'] = relative_path - self._properties['dstSubfolderSpec'] = subfolder - - -class PBXBuildRule(XCObject): - _schema = XCObject._schema.copy() - _schema.update({ - 'compilerSpec': [0, str, 0, 1], - 'filePatterns': [0, str, 0, 0], - 'fileType': [0, str, 0, 1], - 'isEditable': [0, int, 0, 1, 1], - 'outputFiles': [1, str, 0, 1, []], - 'script': [0, str, 0, 0], - }) - - def Name(self): - # Not very inspired, but it's what Xcode uses. - return self.__class__.__name__ - - def Hashables(self): - # super - hashables = XCObject.Hashables(self) - - # Use the hashables of the weak objects that this object refers to. - hashables.append(self._properties['fileType']) - if 'filePatterns' in self._properties: - hashables.append(self._properties['filePatterns']) - return hashables - - -class PBXContainerItemProxy(XCObject): - # When referencing an item in this project file, containerPortal is the - # PBXProject root object of this project file. When referencing an item in - # another project file, containerPortal is a PBXFileReference identifying - # the other project file. - # - # When serving as a proxy to an XCTarget (in this project file or another), - # proxyType is 1. When serving as a proxy to a PBXFileReference (in another - # project file), proxyType is 2. Type 2 is used for references to the - # producs of the other project file's targets. - # - # Xcode is weird about remoteGlobalIDString. Usually, it's printed without - # a comment, indicating that it's tracked internally simply as a string, but - # sometimes it's printed with a comment (usually when the object is initially - # created), indicating that it's tracked as a project file object at least - # sometimes. This module always tracks it as an object, but contains a hack - # to prevent it from printing the comment in the project file output. See - # _XCKVPrint. - _schema = XCObject._schema.copy() - _schema.update({ - 'containerPortal': [0, XCContainerPortal, 0, 1], - 'proxyType': [0, int, 0, 1], - 'remoteGlobalIDString': [0, XCRemoteObject, 0, 1], - 'remoteInfo': [0, str, 0, 1], - }) - - def __repr__(self): - props = self._properties - name = '%s.gyp:%s' % (props['containerPortal'].Name(), props['remoteInfo']) - return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self)) - - def Name(self): - # Admittedly not the best name, but it's what Xcode uses. - return self.__class__.__name__ - - def Hashables(self): - # super - hashables = XCObject.Hashables(self) - - # Use the hashables of the weak objects that this object refers to. - hashables.extend(self._properties['containerPortal'].Hashables()) - hashables.extend(self._properties['remoteGlobalIDString'].Hashables()) - return hashables - - -class PBXTargetDependency(XCObject): - # The "target" property accepts an XCTarget object, and obviously not - # NoneType. But XCTarget is defined below, so it can't be put into the - # schema yet. The definition of PBXTargetDependency can't be moved below - # XCTarget because XCTarget's own schema references PBXTargetDependency. - # Python doesn't deal well with this circular relationship, and doesn't have - # a real way to do forward declarations. To work around, the type of - # the "target" property is reset below, after XCTarget is defined. - # - # At least one of "name" and "target" is required. - _schema = XCObject._schema.copy() - _schema.update({ - 'name': [0, str, 0, 0], - 'target': [0, None.__class__, 0, 0], - 'targetProxy': [0, PBXContainerItemProxy, 1, 1], - }) - - def __repr__(self): - name = self._properties.get('name') or self._properties['target'].Name() - return '<%s %r at 0x%x>' % (self.__class__.__name__, name, id(self)) - - def Name(self): - # Admittedly not the best name, but it's what Xcode uses. - return self.__class__.__name__ - - def Hashables(self): - # super - hashables = XCObject.Hashables(self) - - # Use the hashables of the weak objects that this object refers to. - hashables.extend(self._properties['targetProxy'].Hashables()) - return hashables - - -class PBXReferenceProxy(XCFileLikeElement): - _schema = XCFileLikeElement._schema.copy() - _schema.update({ - 'fileType': [0, str, 0, 1], - 'path': [0, str, 0, 1], - 'remoteRef': [0, PBXContainerItemProxy, 1, 1], - }) - - -class XCTarget(XCRemoteObject): - # An XCTarget is really just an XCObject, the XCRemoteObject thing is just - # to allow PBXProject to be used in the remoteGlobalIDString property of - # PBXContainerItemProxy. - # - # Setting a "name" property at instantiation may also affect "productName", - # which may in turn affect the "PRODUCT_NAME" build setting in children of - # "buildConfigurationList". See __init__ below. - _schema = XCRemoteObject._schema.copy() - _schema.update({ - 'buildConfigurationList': [0, XCConfigurationList, 1, 1, - XCConfigurationList()], - 'buildPhases': [1, XCBuildPhase, 1, 1, []], - 'dependencies': [1, PBXTargetDependency, 1, 1, []], - 'name': [0, str, 0, 1], - 'productName': [0, str, 0, 1], - }) - - def __init__(self, properties=None, id=None, parent=None, - force_outdir=None, force_prefix=None, force_extension=None): - # super - XCRemoteObject.__init__(self, properties, id, parent) - - # Set up additional defaults not expressed in the schema. If a "name" - # property was supplied, set "productName" if it is not present. Also set - # the "PRODUCT_NAME" build setting in each configuration, but only if - # the setting is not present in any build configuration. - if 'name' in self._properties: - if not 'productName' in self._properties: - self.SetProperty('productName', self._properties['name']) - - if 'productName' in self._properties: - if 'buildConfigurationList' in self._properties: - configs = self._properties['buildConfigurationList'] - if configs.HasBuildSetting('PRODUCT_NAME') == 0: - configs.SetBuildSetting('PRODUCT_NAME', - self._properties['productName']) - - def AddDependency(self, other): - pbxproject = self.PBXProjectAncestor() - other_pbxproject = other.PBXProjectAncestor() - if pbxproject == other_pbxproject: - # Add a dependency to another target in the same project file. - container = PBXContainerItemProxy({'containerPortal': pbxproject, - 'proxyType': 1, - 'remoteGlobalIDString': other, - 'remoteInfo': other.Name()}) - dependency = PBXTargetDependency({'target': other, - 'targetProxy': container}) - self.AppendProperty('dependencies', dependency) - else: - # Add a dependency to a target in a different project file. - other_project_ref = \ - pbxproject.AddOrGetProjectReference(other_pbxproject)[1] - container = PBXContainerItemProxy({ - 'containerPortal': other_project_ref, - 'proxyType': 1, - 'remoteGlobalIDString': other, - 'remoteInfo': other.Name(), - }) - dependency = PBXTargetDependency({'name': other.Name(), - 'targetProxy': container}) - self.AppendProperty('dependencies', dependency) - - # Proxy all of these through to the build configuration list. - - def ConfigurationNamed(self, name): - return self._properties['buildConfigurationList'].ConfigurationNamed(name) - - def DefaultConfiguration(self): - return self._properties['buildConfigurationList'].DefaultConfiguration() - - def HasBuildSetting(self, key): - return self._properties['buildConfigurationList'].HasBuildSetting(key) - - def GetBuildSetting(self, key): - return self._properties['buildConfigurationList'].GetBuildSetting(key) - - def SetBuildSetting(self, key, value): - return self._properties['buildConfigurationList'].SetBuildSetting(key, \ - value) - - def AppendBuildSetting(self, key, value): - return self._properties['buildConfigurationList'].AppendBuildSetting(key, \ - value) - - def DelBuildSetting(self, key): - return self._properties['buildConfigurationList'].DelBuildSetting(key) - - -# Redefine the type of the "target" property. See PBXTargetDependency._schema -# above. -PBXTargetDependency._schema['target'][1] = XCTarget - - -class PBXNativeTarget(XCTarget): - # buildPhases is overridden in the schema to be able to set defaults. - # - # NOTE: Contrary to most objects, it is advisable to set parent when - # constructing PBXNativeTarget. A parent of an XCTarget must be a PBXProject - # object. A parent reference is required for a PBXNativeTarget during - # construction to be able to set up the target defaults for productReference, - # because a PBXBuildFile object must be created for the target and it must - # be added to the PBXProject's mainGroup hierarchy. - _schema = XCTarget._schema.copy() - _schema.update({ - 'buildPhases': [1, XCBuildPhase, 1, 1, - [PBXSourcesBuildPhase(), PBXFrameworksBuildPhase()]], - 'buildRules': [1, PBXBuildRule, 1, 1, []], - 'productReference': [0, PBXFileReference, 0, 1], - 'productType': [0, str, 0, 1], - }) - - # Mapping from Xcode product-types to settings. The settings are: - # filetype : used for explicitFileType in the project file - # prefix : the prefix for the file name - # suffix : the suffix for the filen ame - _product_filetypes = { - 'com.apple.product-type.application': ['wrapper.application', - '', '.app'], - 'com.apple.product-type.bundle': ['wrapper.cfbundle', - '', '.bundle'], - 'com.apple.product-type.framework': ['wrapper.framework', - '', '.framework'], - 'com.apple.product-type.library.dynamic': ['compiled.mach-o.dylib', - 'lib', '.dylib'], - 'com.apple.product-type.library.static': ['archive.ar', - 'lib', '.a'], - 'com.apple.product-type.tool': ['compiled.mach-o.executable', - '', ''], - 'com.googlecode.gyp.xcode.bundle': ['compiled.mach-o.dylib', - '', '.so'], - } - - def __init__(self, properties=None, id=None, parent=None, - force_outdir=None, force_prefix=None, force_extension=None): - # super - XCTarget.__init__(self, properties, id, parent) - - if 'productName' in self._properties and \ - 'productType' in self._properties and \ - not 'productReference' in self._properties and \ - self._properties['productType'] in self._product_filetypes: - products_group = None - pbxproject = self.PBXProjectAncestor() - if pbxproject != None: - products_group = pbxproject.ProductsGroup() - - if products_group != None: - (filetype, prefix, suffix) = \ - self._product_filetypes[self._properties['productType']] - # Xcode does not have a distinct type for loadable modules that are - # pure BSD targets (not in a bundle wrapper). GYP allows such modules - # to be specified by setting a target type to loadable_module without - # having mac_bundle set. These are mapped to the pseudo-product type - # com.googlecode.gyp.xcode.bundle. - # - # By picking up this special type and converting it to a dynamic - # library (com.apple.product-type.library.dynamic) with fix-ups, - # single-file loadable modules can be produced. - # - # MACH_O_TYPE is changed to mh_bundle to produce the proper file type - # (as opposed to mh_dylib). In order for linking to succeed, - # DYLIB_CURRENT_VERSION and DYLIB_COMPATIBILITY_VERSION must be - # cleared. They are meaningless for type mh_bundle. - # - # Finally, the .so extension is forcibly applied over the default - # (.dylib), unless another forced extension is already selected. - # .dylib is plainly wrong, and .bundle is used by loadable_modules in - # bundle wrappers (com.apple.product-type.bundle). .so seems an odd - # choice because it's used as the extension on many other systems that - # don't distinguish between linkable shared libraries and non-linkable - # loadable modules, but there's precedent: Python loadable modules on - # Mac OS X use an .so extension. - if self._properties['productType'] == 'com.googlecode.gyp.xcode.bundle': - self._properties['productType'] = \ - 'com.apple.product-type.library.dynamic' - self.SetBuildSetting('MACH_O_TYPE', 'mh_bundle') - self.SetBuildSetting('DYLIB_CURRENT_VERSION', '') - self.SetBuildSetting('DYLIB_COMPATIBILITY_VERSION', '') - if force_extension is None: - force_extension = suffix[1:] - - if force_extension is not None: - # If it's a wrapper (bundle), set WRAPPER_EXTENSION. - if filetype.startswith('wrapper.'): - self.SetBuildSetting('WRAPPER_EXTENSION', force_extension) - else: - # Extension override. - suffix = '.' + force_extension - self.SetBuildSetting('EXECUTABLE_EXTENSION', force_extension) - - if filetype.startswith('compiled.mach-o.executable'): - product_name = self._properties['productName'] - product_name += suffix - suffix = '' - self.SetProperty('productName', product_name) - self.SetBuildSetting('PRODUCT_NAME', product_name) - - # Xcode handles most prefixes based on the target type, however there - # are exceptions. If a "BSD Dynamic Library" target is added in the - # Xcode UI, Xcode sets EXECUTABLE_PREFIX. This check duplicates that - # behavior. - if force_prefix is not None: - prefix = force_prefix - if filetype.startswith('wrapper.'): - self.SetBuildSetting('WRAPPER_PREFIX', prefix) - else: - self.SetBuildSetting('EXECUTABLE_PREFIX', prefix) - - if force_outdir is not None: - self.SetBuildSetting('TARGET_BUILD_DIR', force_outdir) - - # TODO(tvl): Remove the below hack. - # http://code.google.com/p/gyp/issues/detail?id=122 - - # Some targets include the prefix in the target_name. These targets - # really should just add a product_name setting that doesn't include - # the prefix. For example: - # target_name = 'libevent', product_name = 'event' - # This check cleans up for them. - product_name = self._properties['productName'] - prefix_len = len(prefix) - if prefix_len and (product_name[:prefix_len] == prefix): - product_name = product_name[prefix_len:] - self.SetProperty('productName', product_name) - self.SetBuildSetting('PRODUCT_NAME', product_name) - - ref_props = { - 'explicitFileType': filetype, - 'includeInIndex': 0, - 'path': prefix + product_name + suffix, - 'sourceTree': 'BUILT_PRODUCTS_DIR', - } - file_ref = PBXFileReference(ref_props) - products_group.AppendChild(file_ref) - self.SetProperty('productReference', file_ref) - - def GetBuildPhaseByType(self, type): - if not 'buildPhases' in self._properties: - return None - - the_phase = None - for phase in self._properties['buildPhases']: - if isinstance(phase, type): - # Some phases may be present in multiples in a well-formed project file, - # but phases like PBXSourcesBuildPhase may only be present singly, and - # this function is intended as an aid to GetBuildPhaseByType. Loop - # over the entire list of phases and assert if more than one of the - # desired type is found. - assert the_phase is None - the_phase = phase - - return the_phase - - def HeadersPhase(self): - headers_phase = self.GetBuildPhaseByType(PBXHeadersBuildPhase) - if headers_phase is None: - headers_phase = PBXHeadersBuildPhase() - - # The headers phase should come before the resources, sources, and - # frameworks phases, if any. - insert_at = len(self._properties['buildPhases']) - for index in xrange(0, len(self._properties['buildPhases'])): - phase = self._properties['buildPhases'][index] - if isinstance(phase, PBXResourcesBuildPhase) or \ - isinstance(phase, PBXSourcesBuildPhase) or \ - isinstance(phase, PBXFrameworksBuildPhase): - insert_at = index - break - - self._properties['buildPhases'].insert(insert_at, headers_phase) - headers_phase.parent = self - - return headers_phase - - def ResourcesPhase(self): - resources_phase = self.GetBuildPhaseByType(PBXResourcesBuildPhase) - if resources_phase is None: - resources_phase = PBXResourcesBuildPhase() - - # The resources phase should come before the sources and frameworks - # phases, if any. - insert_at = len(self._properties['buildPhases']) - for index in xrange(0, len(self._properties['buildPhases'])): - phase = self._properties['buildPhases'][index] - if isinstance(phase, PBXSourcesBuildPhase) or \ - isinstance(phase, PBXFrameworksBuildPhase): - insert_at = index - break - - self._properties['buildPhases'].insert(insert_at, resources_phase) - resources_phase.parent = self - - return resources_phase - - def SourcesPhase(self): - sources_phase = self.GetBuildPhaseByType(PBXSourcesBuildPhase) - if sources_phase is None: - sources_phase = PBXSourcesBuildPhase() - self.AppendProperty('buildPhases', sources_phase) - - return sources_phase - - def FrameworksPhase(self): - frameworks_phase = self.GetBuildPhaseByType(PBXFrameworksBuildPhase) - if frameworks_phase is None: - frameworks_phase = PBXFrameworksBuildPhase() - self.AppendProperty('buildPhases', frameworks_phase) - - return frameworks_phase - - def AddDependency(self, other): - # super - XCTarget.AddDependency(self, other) - - static_library_type = 'com.apple.product-type.library.static' - shared_library_type = 'com.apple.product-type.library.dynamic' - framework_type = 'com.apple.product-type.framework' - if isinstance(other, PBXNativeTarget) and \ - 'productType' in self._properties and \ - self._properties['productType'] != static_library_type and \ - 'productType' in other._properties and \ - (other._properties['productType'] == static_library_type or \ - ((other._properties['productType'] == shared_library_type or \ - other._properties['productType'] == framework_type) and \ - ((not other.HasBuildSetting('MACH_O_TYPE')) or - other.GetBuildSetting('MACH_O_TYPE') != 'mh_bundle'))): - - file_ref = other.GetProperty('productReference') - - pbxproject = self.PBXProjectAncestor() - other_pbxproject = other.PBXProjectAncestor() - if pbxproject != other_pbxproject: - other_project_product_group = \ - pbxproject.AddOrGetProjectReference(other_pbxproject)[0] - file_ref = other_project_product_group.GetChildByRemoteObject(file_ref) - - self.FrameworksPhase().AppendProperty('files', - PBXBuildFile({'fileRef': file_ref})) - - -class PBXAggregateTarget(XCTarget): - pass - - -class PBXProject(XCContainerPortal): - # A PBXProject is really just an XCObject, the XCContainerPortal thing is - # just to allow PBXProject to be used in the containerPortal property of - # PBXContainerItemProxy. - """ - - Attributes: - path: "sample.xcodeproj". TODO(mark) Document me! - _other_pbxprojects: A dictionary, keyed by other PBXProject objects. Each - value is a reference to the dict in the - projectReferences list associated with the keyed - PBXProject. - """ - - _schema = XCContainerPortal._schema.copy() - _schema.update({ - 'attributes': [0, dict, 0, 0], - 'buildConfigurationList': [0, XCConfigurationList, 1, 1, - XCConfigurationList()], - 'compatibilityVersion': [0, str, 0, 1, 'Xcode 3.2'], - 'hasScannedForEncodings': [0, int, 0, 1, 1], - 'mainGroup': [0, PBXGroup, 1, 1, PBXGroup()], - 'projectDirPath': [0, str, 0, 1, ''], - 'projectReferences': [1, dict, 0, 0], - 'projectRoot': [0, str, 0, 1, ''], - 'targets': [1, XCTarget, 1, 1, []], - }) - - def __init__(self, properties=None, id=None, parent=None, path=None): - self.path = path - self._other_pbxprojects = {} - # super - return XCContainerPortal.__init__(self, properties, id, parent) - - def Name(self): - name = self.path - if name[-10:] == '.xcodeproj': - name = name[:-10] - return posixpath.basename(name) - - def Path(self): - return self.path - - def Comment(self): - return 'Project object' - - def Children(self): - # super - children = XCContainerPortal.Children(self) - - # Add children that the schema doesn't know about. Maybe there's a more - # elegant way around this, but this is the only case where we need to own - # objects in a dictionary (that is itself in a list), and three lines for - # a one-off isn't that big a deal. - if 'projectReferences' in self._properties: - for reference in self._properties['projectReferences']: - children.append(reference['ProductGroup']) - - return children - - def PBXProjectAncestor(self): - return self - - def _GroupByName(self, name): - if not 'mainGroup' in self._properties: - self.SetProperty('mainGroup', PBXGroup()) - - main_group = self._properties['mainGroup'] - group = main_group.GetChildByName(name) - if group is None: - group = PBXGroup({'name': name}) - main_group.AppendChild(group) - - return group - - # SourceGroup and ProductsGroup are created by default in Xcode's own - # templates. - def SourceGroup(self): - return self._GroupByName('Source') - - def ProductsGroup(self): - return self._GroupByName('Products') - - # IntermediatesGroup is used to collect source-like files that are generated - # by rules or script phases and are placed in intermediate directories such - # as DerivedSources. - def IntermediatesGroup(self): - return self._GroupByName('Intermediates') - - # FrameworksGroup and ProjectsGroup are top-level groups used to collect - # frameworks and projects. - def FrameworksGroup(self): - return self._GroupByName('Frameworks') - - def ProjectsGroup(self): - return self._GroupByName('Projects') - - def RootGroupForPath(self, path): - """Returns a PBXGroup child of this object to which path should be added. - - This method is intended to choose between SourceGroup and - IntermediatesGroup on the basis of whether path is present in a source - directory or an intermediates directory. For the purposes of this - determination, any path located within a derived file directory such as - PROJECT_DERIVED_FILE_DIR is treated as being in an intermediates - directory. - - The returned value is a two-element tuple. The first element is the - PBXGroup, and the second element specifies whether that group should be - organized hierarchically (True) or as a single flat list (False). - """ - - # TODO(mark): make this a class variable and bind to self on call? - # Also, this list is nowhere near exhaustive. - # INTERMEDIATE_DIR and SHARED_INTERMEDIATE_DIR are used by - # gyp.generator.xcode. There should probably be some way for that module - # to push the names in, rather than having to hard-code them here. - source_tree_groups = { - 'DERIVED_FILE_DIR': (self.IntermediatesGroup, True), - 'INTERMEDIATE_DIR': (self.IntermediatesGroup, True), - 'PROJECT_DERIVED_FILE_DIR': (self.IntermediatesGroup, True), - 'SHARED_INTERMEDIATE_DIR': (self.IntermediatesGroup, True), - } - - (source_tree, path) = SourceTreeAndPathFromPath(path) - if source_tree != None and source_tree in source_tree_groups: - (group_func, hierarchical) = source_tree_groups[source_tree] - group = group_func() - return (group, hierarchical) - - # TODO(mark): make additional choices based on file extension. - - return (self.SourceGroup(), True) - - def AddOrGetFileInRootGroup(self, path): - """Returns a PBXFileReference corresponding to path in the correct group - according to RootGroupForPath's heuristics. - - If an existing PBXFileReference for path exists, it will be returned. - Otherwise, one will be created and returned. - """ - - (group, hierarchical) = self.RootGroupForPath(path) - return group.AddOrGetFileByPath(path, hierarchical) - - def RootGroupsTakeOverOnlyChildren(self, recurse=False): - """Calls TakeOverOnlyChild for all groups in the main group.""" - - for group in self._properties['mainGroup']._properties['children']: - if isinstance(group, PBXGroup): - group.TakeOverOnlyChild(recurse) - - def SortGroups(self): - # Sort the children of the mainGroup (like "Source" and "Products") - # according to their defined order. - self._properties['mainGroup']._properties['children'] = \ - sorted(self._properties['mainGroup']._properties['children'], - cmp=lambda x,y: x.CompareRootGroup(y)) - - # Sort everything else by putting group before files, and going - # alphabetically by name within sections of groups and files. SortGroup - # is recursive. - for group in self._properties['mainGroup']._properties['children']: - if not isinstance(group, PBXGroup): - continue - - if group.Name() == 'Products': - # The Products group is a special case. Instead of sorting - # alphabetically, sort things in the order of the targets that - # produce the products. To do this, just build up a new list of - # products based on the targets. - products = [] - for target in self._properties['targets']: - if not isinstance(target, PBXNativeTarget): - continue - product = target._properties['productReference'] - # Make sure that the product is already in the products group. - assert product in group._properties['children'] - products.append(product) - - # Make sure that this process doesn't miss anything that was already - # in the products group. - assert len(products) == len(group._properties['children']) - group._properties['children'] = products - else: - group.SortGroup() - - def AddOrGetProjectReference(self, other_pbxproject): - """Add a reference to another project file (via PBXProject object) to this - one. - - Returns [ProductGroup, ProjectRef]. ProductGroup is a PBXGroup object in - this project file that contains a PBXReferenceProxy object for each - product of each PBXNativeTarget in the other project file. ProjectRef is - a PBXFileReference to the other project file. - - If this project file already references the other project file, the - existing ProductGroup and ProjectRef are returned. The ProductGroup will - still be updated if necessary. - """ - - if not 'projectReferences' in self._properties: - self._properties['projectReferences'] = [] - - product_group = None - project_ref = None - - if not other_pbxproject in self._other_pbxprojects: - # This project file isn't yet linked to the other one. Establish the - # link. - product_group = PBXGroup({'name': 'Products'}) - - # ProductGroup is strong. - product_group.parent = self - - # There's nothing unique about this PBXGroup, and if left alone, it will - # wind up with the same set of hashables as all other PBXGroup objects - # owned by the projectReferences list. Add the hashables of the - # remote PBXProject that it's related to. - product_group._hashables.extend(other_pbxproject.Hashables()) - - # The other project reports its path as relative to the same directory - # that this project's path is relative to. The other project's path - # is not necessarily already relative to this project. Figure out the - # pathname that this project needs to use to refer to the other one. - this_path = posixpath.dirname(self.Path()) - projectDirPath = self.GetProperty('projectDirPath') - if projectDirPath: - if posixpath.isabs(projectDirPath[0]): - this_path = projectDirPath - else: - this_path = posixpath.join(this_path, projectDirPath) - other_path = gyp.common.RelativePath(other_pbxproject.Path(), this_path) - - # ProjectRef is weak (it's owned by the mainGroup hierarchy). - project_ref = PBXFileReference({ - 'lastKnownFileType': 'wrapper.pb-project', - 'path': other_path, - 'sourceTree': 'SOURCE_ROOT', - }) - self.ProjectsGroup().AppendChild(project_ref) - - ref_dict = {'ProductGroup': product_group, 'ProjectRef': project_ref} - self._other_pbxprojects[other_pbxproject] = ref_dict - self.AppendProperty('projectReferences', ref_dict) - - # Xcode seems to sort this list case-insensitively - self._properties['projectReferences'] = \ - sorted(self._properties['projectReferences'], cmp=lambda x,y: - cmp(x['ProjectRef'].Name().lower(), - y['ProjectRef'].Name().lower())) - else: - # The link already exists. Pull out the relevnt data. - project_ref_dict = self._other_pbxprojects[other_pbxproject] - product_group = project_ref_dict['ProductGroup'] - project_ref = project_ref_dict['ProjectRef'] - - self._SetUpProductReferences(other_pbxproject, product_group, project_ref) - - return [product_group, project_ref] - - def _SetUpProductReferences(self, other_pbxproject, product_group, - project_ref): - # TODO(mark): This only adds references to products in other_pbxproject - # when they don't exist in this pbxproject. Perhaps it should also - # remove references from this pbxproject that are no longer present in - # other_pbxproject. Perhaps it should update various properties if they - # change. - for target in other_pbxproject._properties['targets']: - if not isinstance(target, PBXNativeTarget): - continue - - other_fileref = target._properties['productReference'] - if product_group.GetChildByRemoteObject(other_fileref) is None: - # Xcode sets remoteInfo to the name of the target and not the name - # of its product, despite this proxy being a reference to the product. - container_item = PBXContainerItemProxy({ - 'containerPortal': project_ref, - 'proxyType': 2, - 'remoteGlobalIDString': other_fileref, - 'remoteInfo': target.Name() - }) - # TODO(mark): Does sourceTree get copied straight over from the other - # project? Can the other project ever have lastKnownFileType here - # instead of explicitFileType? (Use it if so?) Can path ever be - # unset? (I don't think so.) Can other_fileref have name set, and - # does it impact the PBXReferenceProxy if so? These are the questions - # that perhaps will be answered one day. - reference_proxy = PBXReferenceProxy({ - 'fileType': other_fileref._properties['explicitFileType'], - 'path': other_fileref._properties['path'], - 'sourceTree': other_fileref._properties['sourceTree'], - 'remoteRef': container_item, - }) - - product_group.AppendChild(reference_proxy) - - def SortRemoteProductReferences(self): - # For each remote project file, sort the associated ProductGroup in the - # same order that the targets are sorted in the remote project file. This - # is the sort order used by Xcode. - - def CompareProducts(x, y, remote_products): - # x and y are PBXReferenceProxy objects. Go through their associated - # PBXContainerItem to get the remote PBXFileReference, which will be - # present in the remote_products list. - x_remote = x._properties['remoteRef']._properties['remoteGlobalIDString'] - y_remote = y._properties['remoteRef']._properties['remoteGlobalIDString'] - x_index = remote_products.index(x_remote) - y_index = remote_products.index(y_remote) - - # Use the order of each remote PBXFileReference in remote_products to - # determine the sort order. - return cmp(x_index, y_index) - - for other_pbxproject, ref_dict in self._other_pbxprojects.iteritems(): - # Build up a list of products in the remote project file, ordered the - # same as the targets that produce them. - remote_products = [] - for target in other_pbxproject._properties['targets']: - if not isinstance(target, PBXNativeTarget): - continue - remote_products.append(target._properties['productReference']) - - # Sort the PBXReferenceProxy children according to the list of remote - # products. - product_group = ref_dict['ProductGroup'] - product_group._properties['children'] = sorted( - product_group._properties['children'], - cmp=lambda x, y: CompareProducts(x, y, remote_products)) - - -class XCProjectFile(XCObject): - _schema = XCObject._schema.copy() - _schema.update({ - 'archiveVersion': [0, int, 0, 1, 1], - 'classes': [0, dict, 0, 1, {}], - 'objectVersion': [0, int, 0, 1, 45], - 'rootObject': [0, PBXProject, 1, 1], - }) - - def SetXcodeVersion(self, version): - version_to_object_version = { - '2.4': 45, - '3.0': 45, - '3.1': 45, - '3.2': 46, - } - if not version in version_to_object_version: - supported_str = ', '.join(sorted(version_to_object_version.keys())) - raise Exception( - 'Unsupported Xcode version %s (supported: %s)' % - ( version, supported_str ) ) - compatibility_version = 'Xcode %s' % version - self._properties['rootObject'].SetProperty('compatibilityVersion', - compatibility_version) - self.SetProperty('objectVersion', version_to_object_version[version]); - - def ComputeIDs(self, recursive=True, overwrite=True, hash=None): - # Although XCProjectFile is implemented here as an XCObject, it's not a - # proper object in the Xcode sense, and it certainly doesn't have its own - # ID. Pass through an attempt to update IDs to the real root object. - if recursive: - self._properties['rootObject'].ComputeIDs(recursive, overwrite, hash) - - def Print(self, file=sys.stdout): - self.VerifyHasRequiredProperties() - - # Add the special "objects" property, which will be caught and handled - # separately during printing. This structure allows a fairly standard - # loop do the normal printing. - self._properties['objects'] = {} - self._XCPrint(file, 0, '// !$*UTF8*$!\n') - if self._should_print_single_line: - self._XCPrint(file, 0, '{ ') - else: - self._XCPrint(file, 0, '{\n') - for property, value in sorted(self._properties.iteritems(), - cmp=lambda x, y: cmp(x, y)): - if property == 'objects': - self._PrintObjects(file) - else: - self._XCKVPrint(file, 1, property, value) - self._XCPrint(file, 0, '}\n') - del self._properties['objects'] - - def _PrintObjects(self, file): - if self._should_print_single_line: - self._XCPrint(file, 0, 'objects = {') - else: - self._XCPrint(file, 1, 'objects = {\n') - - objects_by_class = {} - for object in self.Descendants(): - if object == self: - continue - class_name = object.__class__.__name__ - if not class_name in objects_by_class: - objects_by_class[class_name] = [] - objects_by_class[class_name].append(object) - - for class_name in sorted(objects_by_class): - self._XCPrint(file, 0, '\n') - self._XCPrint(file, 0, '/* Begin ' + class_name + ' section */\n') - for object in sorted(objects_by_class[class_name], - cmp=lambda x, y: cmp(x.id, y.id)): - object.Print(file) - self._XCPrint(file, 0, '/* End ' + class_name + ' section */\n') - - if self._should_print_single_line: - self._XCPrint(file, 0, '}; ') - else: - self._XCPrint(file, 1, '};\n') diff -Nru mozc-1.11.1502.102/third_party/gyp/pylib/gyp/xml_fix.py mozc-1.11.1522.102/third_party/gyp/pylib/gyp/xml_fix.py --- mozc-1.11.1502.102/third_party/gyp/pylib/gyp/xml_fix.py 2011-11-28 16:07:19.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylib/gyp/xml_fix.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,69 +0,0 @@ -# Copyright (c) 2011 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Applies a fix to CR LF TAB handling in xml.dom. - -Fixes this: http://code.google.com/p/chromium/issues/detail?id=76293 -Working around this: http://bugs.python.org/issue5752 -TODO(bradnelson): Consider dropping this when we drop XP support. -""" - - -import xml.dom.minidom - - -def _Replacement_write_data(writer, data, is_attrib=False): - """Writes datachars to writer.""" - data = data.replace("&", "&").replace("<", "<") - data = data.replace("\"", """).replace(">", ">") - if is_attrib: - data = data.replace( - "\r", " ").replace( - "\n", " ").replace( - "\t", " ") - writer.write(data) - - -def _Replacement_writexml(self, writer, indent="", addindent="", newl=""): - # indent = current indentation - # addindent = indentation to add to higher levels - # newl = newline string - writer.write(indent+"<" + self.tagName) - - attrs = self._get_attributes() - a_names = attrs.keys() - a_names.sort() - - for a_name in a_names: - writer.write(" %s=\"" % a_name) - _Replacement_write_data(writer, attrs[a_name].value, is_attrib=True) - writer.write("\"") - if self.childNodes: - writer.write(">%s" % newl) - for node in self.childNodes: - node.writexml(writer, indent + addindent, addindent, newl) - writer.write("%s%s" % (indent, self.tagName, newl)) - else: - writer.write("/>%s" % newl) - - -class XmlFix(object): - """Object to manage temporary patching of xml.dom.minidom.""" - - def __init__(self): - # Preserve current xml.dom.minidom functions. - self.write_data = xml.dom.minidom._write_data - self.writexml = xml.dom.minidom.Element.writexml - # Inject replacement versions of a function and a method. - xml.dom.minidom._write_data = _Replacement_write_data - xml.dom.minidom.Element.writexml = _Replacement_writexml - - def Cleanup(self): - if self.write_data: - xml.dom.minidom._write_data = self.write_data - xml.dom.minidom.Element.writexml = self.writexml - self.write_data = None - - def __del__(self): - self.Cleanup() diff -Nru mozc-1.11.1502.102/third_party/gyp/pylintrc mozc-1.11.1522.102/third_party/gyp/pylintrc --- mozc-1.11.1502.102/third_party/gyp/pylintrc 2011-11-28 16:31:34.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/pylintrc 1970-01-01 00:00:00.000000000 +0000 @@ -1,307 +0,0 @@ -[MASTER] - -# Specify a configuration file. -#rcfile= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Profiled execution. -profile=no - -# Add files or directories to the blacklist. They should be base names, not -# paths. -ignore=CVS - -# Pickle collected data for later comparisons. -persistent=yes - -# List of plugins (as comma separated values of python modules names) to load, -# usually to register additional checkers. -load-plugins= - - -[MESSAGES CONTROL] - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time. -#enable= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). -# C0103: Invalid name "NN" (should match [a-z_][a-z0-9_]{2,30}$) -# C0111: Missing docstring -# C0302: Too many lines in module (NN) -# R0902: Too many instance attributes (N/7) -# R0903: Too few public methods (N/2) -# R0904: Too many public methods (NN/20) -# R0912: Too many branches (NN/12) -# R0913: Too many arguments (N/5) -# R0914: Too many local variables (NN/15) -# R0915: Too many statements (NN/50) -# W0141: Used builtin function 'map' -# W0142: Used * or ** magic -# W0232: Class has no __init__ method -# W0511: TODO -# W0603: Using the global statement -# -# These should be enabled eventually: -# C0112: Empty docstring -# C0301: Line too long (NN/80) -# C0321: More than one statement on single line -# C0322: Operator not preceded by a space -# C0323: Operator not followed by a space -# C0324: Comma not followed by a space -# E0101: Explicit return in __init__ -# E0102: function already defined line NN -# E1002: Use of super on an old style class -# E1101: Instance of 'XX' has no 'YY' member -# E1103: Instance of 'XX' has no 'XX' member (but some types could not be inferred) -# E0602: Undefined variable 'XX' -# F0401: Unable to import 'XX' -# R0201: Method could be a function -# R0801: Similar lines in N files -# W0102: Dangerous default value {} as argument -# W0104: Statement seems to have no effect -# W0105: String statement has no effect -# W0108: Lambda may not be necessary -# W0201: Attribute 'XX' defined outside __init__ -# W0212: Access to a protected member XX of a client class -# W0221: Arguments number differs from overridden method -# W0223: Method 'XX' is abstract in class 'YY' but is not overridden -# W0231: __init__ method from base class 'XX' is not called -# W0301: Unnecessary semicolon -# W0311: Bad indentation. Found NN spaces, expected NN -# W0401: Wildcard import XX -# W0402: Uses of a deprecated module 'string' -# W0403: Relative import 'XX', should be 'YY.XX' -# W0404: Reimport 'XX' (imported line NN) -# W0601: Global variable 'XX' undefined at the module level -# W0602: Using global for 'XX' but no assignment is done -# W0611: Unused import pprint -# W0612: Unused variable 'XX' -# W0613: Unused argument 'XX' -# W0614: Unused import XX from wildcard import -# W0621: Redefining name 'XX' from outer scope (line NN) -# W0622: Redefining built-in 'NN' -# W0631: Using possibly undefined loop variable 'XX' -# W0701: Raising a string exception -# W0702: No exception type(s) specified -disable=C0103,C0111,C0302,R0902,R0903,R0904,R0912,R0913,R0914,R0915,W0141,W0142,W0232,W0511,W0603,C0112,C0301,C0321,C0322,C0323,C0324,E0101,E0102,E1002,E1101,E1103,E0602,F0401,R0201,R0801,W0102,W0104,W0105,W0108,W0201,W0212,W0221,W0223,W0231,W0301,W0311,W0401,W0402,W0403,W0404,W0601,W0602,W0611,W0612,W0613,W0614,W0621,W0622,W0631,W0701,W0702 - - -[REPORTS] - -# Set the output format. Available formats are text, parseable, colorized, msvs -# (visual studio) and html -output-format=text - -# Include message's id in output -include-ids=yes - -# Put messages in a separate file for each module / package specified on the -# command line instead of printing them on stdout. Reports (if any) will be -# written in a file name "pylint_global.[txt|html]". -files-output=no - -# Tells whether to display a full report or only the messages -reports=no - -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Add a comment according to your evaluation note. This is used by the global -# evaluation report (RP0004). -comment=no - - -[VARIABLES] - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# A regular expression matching the beginning of the name of dummy variables -# (i.e. not used). -dummy-variables-rgx=_|dummy - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid to define new builtins when possible. -additional-builtins= - - -[TYPECHECK] - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# List of classes names for which member attributes should not be checked -# (useful for classes with attributes dynamically set). -ignored-classes=SQLObject - -# When zope mode is activated, add a predefined set of Zope acquired attributes -# to generated-members. -zope=no - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E0201 when accessed. Python regular -# expressions are accepted. -generated-members=REQUEST,acl_users,aq_parent - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME,XXX,TODO - - -[SIMILARITIES] - -# Minimum lines number of a similarity. -min-similarity-lines=4 - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - - -[FORMAT] - -# Maximum number of characters on a single line. -max-line-length=80 - -# Maximum number of lines in a module -max-module-lines=1000 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - - -[BASIC] - -# Required attributes for module, separated by a comma -required-attributes= - -# List of builtins function names that should not be used, separated by a comma -bad-functions=map,filter,apply,input - -# Regular expression which should only match correct module names -module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ - -# Regular expression which should only match correct module level names -const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ - -# Regular expression which should only match correct class names -class-rgx=[A-Z_][a-zA-Z0-9]+$ - -# Regular expression which should only match correct function names -function-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct method names -method-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct instance attribute names -attr-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct argument names -argument-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct variable names -variable-rgx=[a-z_][a-z0-9_]{2,30}$ - -# Regular expression which should only match correct list comprehension / -# generator expression variable names -inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ - -# Good variable names which should always be accepted, separated by a comma -good-names=i,j,k,ex,Run,_ - -# Bad variable names which should always be refused, separated by a comma -bad-names=foo,bar,baz,toto,tutu,tata - -# Regular expression which should only match functions or classes name which do -# not require a docstring -no-docstring-rgx=__.*__ - - -[DESIGN] - -# Maximum number of arguments for function / method -max-args=5 - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore -ignored-argument-names=_.* - -# Maximum number of locals for function / method body -max-locals=15 - -# Maximum number of return / yield for function / method body -max-returns=6 - -# Maximum number of branch for function / method body -max-branchs=12 - -# Maximum number of statements in function / method body -max-statements=50 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - - -[CLASSES] - -# List of interface methods to ignore, separated by a comma. This is used for -# instance to not check methods defines in Zope's Interface base class. -ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__,__new__,setUp - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - - -[IMPORTS] - -# Deprecated modules which should not be used, separated by a comma -deprecated-modules=regsub,string,TERMIOS,Bastion,rexec - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled) -import-graph= - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled) -ext-import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled) -int-import-graph= - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "Exception" -overgeneral-exceptions=Exception diff -Nru mozc-1.11.1502.102/third_party/gyp/samples/samples mozc-1.11.1522.102/third_party/gyp/samples/samples --- mozc-1.11.1502.102/third_party/gyp/samples/samples 2009-10-22 20:02:37.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/samples/samples 1970-01-01 00:00:00.000000000 +0000 @@ -1,81 +0,0 @@ -#!/usr/bin/python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -import os.path -import shutil -import sys - - -gyps = [ - 'app/app.gyp', - 'base/base.gyp', - 'build/temp_gyp/googleurl.gyp', - 'build/all.gyp', - 'build/common.gypi', - 'build/external_code.gypi', - 'chrome/test/security_tests/security_tests.gyp', - 'chrome/third_party/hunspell/hunspell.gyp', - 'chrome/chrome.gyp', - 'media/media.gyp', - 'net/net.gyp', - 'printing/printing.gyp', - 'sdch/sdch.gyp', - 'skia/skia.gyp', - 'testing/gmock.gyp', - 'testing/gtest.gyp', - 'third_party/bzip2/bzip2.gyp', - 'third_party/icu38/icu38.gyp', - 'third_party/libevent/libevent.gyp', - 'third_party/libjpeg/libjpeg.gyp', - 'third_party/libpng/libpng.gyp', - 'third_party/libxml/libxml.gyp', - 'third_party/libxslt/libxslt.gyp', - 'third_party/lzma_sdk/lzma_sdk.gyp', - 'third_party/modp_b64/modp_b64.gyp', - 'third_party/npapi/npapi.gyp', - 'third_party/sqlite/sqlite.gyp', - 'third_party/zlib/zlib.gyp', - 'v8/tools/gyp/v8.gyp', - 'webkit/activex_shim/activex_shim.gyp', - 'webkit/activex_shim_dll/activex_shim_dll.gyp', - 'webkit/build/action_csspropertynames.py', - 'webkit/build/action_cssvaluekeywords.py', - 'webkit/build/action_jsconfig.py', - 'webkit/build/action_makenames.py', - 'webkit/build/action_maketokenizer.py', - 'webkit/build/action_useragentstylesheets.py', - 'webkit/build/rule_binding.py', - 'webkit/build/rule_bison.py', - 'webkit/build/rule_gperf.py', - 'webkit/tools/test_shell/test_shell.gyp', - 'webkit/webkit.gyp', -] - - -def Main(argv): - if len(argv) != 3 or argv[1] not in ['push', 'pull']: - print 'Usage: %s push/pull PATH_TO_CHROME' % argv[0] - return 1 - - path_to_chrome = argv[2] - - for g in gyps: - chrome_file = os.path.join(path_to_chrome, g) - local_file = os.path.join(os.path.dirname(argv[0]), os.path.split(g)[1]) - if argv[1] == 'push': - print 'Copying %s to %s' % (local_file, chrome_file) - shutil.copyfile(local_file, chrome_file) - elif argv[1] == 'pull': - print 'Copying %s to %s' % (chrome_file, local_file) - shutil.copyfile(chrome_file, local_file) - else: - assert False - - return 0 - - -if __name__ == '__main__': - sys.exit(Main(sys.argv)) diff -Nru mozc-1.11.1502.102/third_party/gyp/samples/samples.bat mozc-1.11.1522.102/third_party/gyp/samples/samples.bat --- mozc-1.11.1502.102/third_party/gyp/samples/samples.bat 2009-10-22 20:02:37.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/samples/samples.bat 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -@rem Copyright (c) 2009 Google Inc. All rights reserved. -@rem Use of this source code is governed by a BSD-style license that can be -@rem found in the LICENSE file. - -@python %~dp0/samples %* diff -Nru mozc-1.11.1502.102/third_party/gyp/setup.py mozc-1.11.1522.102/third_party/gyp/setup.py --- mozc-1.11.1502.102/third_party/gyp/setup.py 2010-01-08 14:58:07.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/setup.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,26 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2009 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -from distutils.core import setup -from distutils.command.install import install -from distutils.command.install_lib import install_lib -from distutils.command.install_scripts import install_scripts - -setup( - name='gyp', - version='0.1', - description='Generate Your Projects', - author='Chromium Authors', - author_email='chromium-dev@googlegroups.com', - url='http://code.google.com/p/gyp', - package_dir = {'': 'pylib'}, - packages=['gyp', 'gyp.generator'], - - scripts = ['gyp'], - cmdclass = {'install': install, - 'install_lib': install_lib, - 'install_scripts': install_scripts}, -) diff -Nru mozc-1.11.1502.102/third_party/gyp/tools/README mozc-1.11.1522.102/third_party/gyp/tools/README --- mozc-1.11.1502.102/third_party/gyp/tools/README 2009-02-09 19:15:51.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/tools/README 1970-01-01 00:00:00.000000000 +0000 @@ -1,15 +0,0 @@ -pretty_vcproj: - Usage: pretty_vcproj.py "c:\path\to\vcproj.vcproj" [key1=value1] [key2=value2] - - They key/value pair are used to resolve vsprops name. - - For example, if I want to diff the base.vcproj project: - - pretty_vcproj.py z:\dev\src-chrome\src\base\build\base.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > orignal.txt - pretty_vcproj.py z:\dev\src-chrome\src\base\base_gyp.vcproj "$(SolutionDir)=z:\dev\src-chrome\src\chrome\\" "$(CHROMIUM_BUILD)=" "$(CHROME_BUILD_TYPE)=" > gyp.txt - - And you can use your favorite diff tool to see the changes. - - Note: In the case of base.vcproj, the original vcproj is one level up the generated one. - I suggest you do a search and replace for '"..\' and replace it with '"' in original.txt - before you perform the diff. \ No newline at end of file diff -Nru mozc-1.11.1502.102/third_party/gyp/tools/Xcode/README mozc-1.11.1522.102/third_party/gyp/tools/Xcode/README --- mozc-1.11.1502.102/third_party/gyp/tools/Xcode/README 2011-09-08 16:16:33.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/tools/Xcode/README 1970-01-01 00:00:00.000000000 +0000 @@ -1,5 +0,0 @@ -Specifications contains syntax formatters for Xcode 3. These do not appear to be supported yet on Xcode 4. To use these with Xcode 3 please install both the gyp.pbfilespec and gyp.xclangspec files in - -~/Library/Application Support/Developer/Shared/Xcode/Specifications/ - -and restart Xcode. \ No newline at end of file diff -Nru mozc-1.11.1502.102/third_party/gyp/tools/Xcode/Specifications/gyp.pbfilespec mozc-1.11.1522.102/third_party/gyp/tools/Xcode/Specifications/gyp.pbfilespec --- mozc-1.11.1502.102/third_party/gyp/tools/Xcode/Specifications/gyp.pbfilespec 2011-09-08 16:16:33.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/tools/Xcode/Specifications/gyp.pbfilespec 1970-01-01 00:00:00.000000000 +0000 @@ -1,27 +0,0 @@ -/* - gyp.pbfilespec - GYP source file spec for Xcode 3 - - There is not much documentation available regarding the format - of .pbfilespec files. As a starting point, see for instance the - outdated documentation at: - http://maxao.free.fr/xcode-plugin-interface/specifications.html - and the files in: - /Developer/Library/PrivateFrameworks/XcodeEdit.framework/Versions/A/Resources/ - - Place this file in directory: - ~/Library/Application Support/Developer/Shared/Xcode/Specifications/ -*/ - -( - { - Identifier = sourcecode.gyp; - BasedOn = sourcecode; - Name = "GYP Files"; - Extensions = ("gyp", "gypi"); - MIMETypes = ("text/gyp"); - Language = "xcode.lang.gyp"; - IsTextFile = YES; - IsSourceFile = YES; - } -) diff -Nru mozc-1.11.1502.102/third_party/gyp/tools/Xcode/Specifications/gyp.xclangspec mozc-1.11.1522.102/third_party/gyp/tools/Xcode/Specifications/gyp.xclangspec --- mozc-1.11.1502.102/third_party/gyp/tools/Xcode/Specifications/gyp.xclangspec 2011-09-08 16:16:33.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/tools/Xcode/Specifications/gyp.xclangspec 1970-01-01 00:00:00.000000000 +0000 @@ -1,226 +0,0 @@ -/* - Copyright (c) 2011 Google Inc. All rights reserved. - Use of this source code is governed by a BSD-style license that can be - found in the LICENSE file. - - gyp.xclangspec - GYP language specification for Xcode 3 - - There is not much documentation available regarding the format - of .xclangspec files. As a starting point, see for instance the - outdated documentation at: - http://maxao.free.fr/xcode-plugin-interface/specifications.html - and the files in: - /Developer/Library/PrivateFrameworks/XcodeEdit.framework/Versions/A/Resources/ - - Place this file in directory: - ~/Library/Application Support/Developer/Shared/Xcode/Specifications/ -*/ - -( - - { - Identifier = "xcode.lang.gyp.keyword"; - Syntax = { - Words = ( - "and", - "or", - " (caar gyp-parse-history) target-point) - (setq gyp-parse-history (cdr gyp-parse-history)))) - -(defun gyp-parse-point () - "The point of the last parse state added by gyp-parse-to." - (caar gyp-parse-history)) - -(defun gyp-parse-sections () - "A list of section symbols holding at the last parse state point." - (cdar gyp-parse-history)) - -(defun gyp-inside-dictionary-p () - "Predicate returning true if the parser is inside a dictionary." - (not (eq (cadar gyp-parse-history) 'list))) - -(defun gyp-add-parse-history (point sections) - "Add parse state SECTIONS to the parse history at POINT so that parsing can be - resumed instantly." - (while (>= (caar gyp-parse-history) point) - (setq gyp-parse-history (cdr gyp-parse-history))) - (setq gyp-parse-history (cons (cons point sections) gyp-parse-history))) - -(defun gyp-parse-to (target-point) - "Parses from (point) to TARGET-POINT adding the parse state information to - gyp-parse-state-history. Parsing stops if TARGET-POINT is reached or if a - string literal has been parsed. Returns nil if no further parsing can be - done, otherwise returns the position of the start of a parsed string, leaving - the point at the end of the string." - (let ((parsing t) - string-start) - (while parsing - (setq string-start nil) - ;; Parse up to a character that starts a sexp, or if the nesting - ;; level decreases. - (let ((state (parse-partial-sexp (gyp-parse-point) - target-point - -1 - t)) - (sections (gyp-parse-sections))) - (if (= (nth 0 state) -1) - (setq sections (cdr sections)) ; pop out a level - (cond ((looking-at-p "['\"]") ; a string - (setq string-start (point)) - (forward-sexp 1) - (if (gyp-inside-dictionary-p) - ;; Look for sections inside a dictionary - (let ((section (gyp-section-name - (buffer-substring-no-properties - (+ 1 string-start) - (- (point) 1))))) - (setq sections (cons section (cdr sections))))) - ;; Stop after the string so it can be fontified. - (setq target-point (point))) - ((looking-at-p "{") - ;; Inside a dictionary. Increase nesting. - (forward-char 1) - (setq sections (cons 'unknown sections))) - ((looking-at-p "\\[") - ;; Inside a list. Increase nesting - (forward-char 1) - (setq sections (cons 'list sections))) - ((not (eobp)) - ;; other - (forward-char 1)))) - (gyp-add-parse-history (point) sections) - (setq parsing (< (point) target-point)))) - string-start)) - -(defun gyp-section-at-point () - "Transform the last parse state, which is a list of nested sections and return - the section symbol that should be used to determine font-lock information for - the string. Can return nil indicating the string should not have any attached - section." - (let ((sections (gyp-parse-sections))) - (cond - ((eq (car sections) 'conditions) - ;; conditions can occur in a variables section, but we still want to - ;; highlight it as a keyword. - nil) - ((and (eq (car sections) 'list) - (eq (cadr sections) 'list)) - ;; conditions and sources can have items in [[ ]] - (caddr sections)) - (t (cadr sections))))) - -(defun gyp-section-match (limit) - "Parse from (point) to LIMIT returning by means of match data what was - matched. The group of the match indicates what style font-lock should apply. - See also `gyp-add-font-lock-keywords'." - (gyp-invalidate-parse-states-after (point)) - (let ((group nil) - (string-start t)) - (while (and (< (point) limit) - (not group) - string-start) - (setq string-start (gyp-parse-to limit)) - (if string-start - (setq group (case (gyp-section-at-point) - ('dependencies 1) - ('variables 2) - ('conditions 2) - ('sources 3) - ('defines 4) - (nil nil))))) - (if group - (progn - ;; Set the match data to indicate to the font-lock mechanism the - ;; highlighting to be performed. - (set-match-data (append (list string-start (point)) - (make-list (* (1- group) 2) nil) - (list (1+ string-start) (1- (point))))) - t)))) - -;;; Please see http://code.google.com/p/gyp/wiki/GypLanguageSpecification for -;;; canonical list of keywords. -(defun gyp-add-font-lock-keywords () - "Add gyp-mode keywords to font-lock mechanism." - ;; TODO(jknotten): Move all the keyword highlighting into gyp-section-match - ;; so that we can do the font-locking in a single font-lock pass. - (font-lock-add-keywords - nil - (list - ;; Top-level keywords - (list (concat "['\"]\\(" - (regexp-opt (list "action" "action_name" "actions" "cflags" - "conditions" "configurations" "copies" "defines" - "dependencies" "destination" - "direct_dependent_settings" - "export_dependent_settings" "extension" "files" - "include_dirs" "includes" "inputs" "libraries" - "link_settings" "mac_bundle" "message" - "msvs_external_rule" "outputs" "product_name" - "process_outputs_as_sources" "rules" "rule_name" - "sources" "suppress_wildcard" - "target_conditions" "target_defaults" - "target_defines" "target_name" "toolsets" - "targets" "type" "variables" "xcode_settings")) - "[!/+=]?\\)") 1 'font-lock-keyword-face t) - ;; Type of target - (list (concat "['\"]\\(" - (regexp-opt (list "loadable_module" "static_library" - "shared_library" "executable" "none")) - "\\)") 1 'font-lock-type-face t) - (list "\\(?:target\\|action\\)_name['\"]\\s-*:\\s-*['\"]\\([^ '\"]*\\)" 1 - 'font-lock-function-name-face t) - (list 'gyp-section-match - (list 1 'font-lock-function-name-face t t) ; dependencies - (list 2 'font-lock-variable-name-face t t) ; variables, conditions - (list 3 'font-lock-constant-face t t) ; sources - (list 4 'font-lock-preprocessor-face t t)) ; preprocessor - ;; Variable expansion - (list "<@?(\\([^\n )]+\\))" 1 'font-lock-variable-name-face t) - ;; Command expansion - (list " "%s"' % (src, dst) - - print '}' - - -def main(): - if len(sys.argv) < 2: - print >>sys.stderr, __doc__ - print >>sys.stderr - print >>sys.stderr, 'usage: %s target1 target2...' % (sys.argv[0]) - return 1 - - edges = LoadEdges('dump.json', sys.argv[1:]) - - WriteGraph(edges) - return 0 - - -if __name__ == '__main__': - sys.exit(main()) diff -Nru mozc-1.11.1502.102/third_party/gyp/tools/pretty_gyp.py mozc-1.11.1522.102/third_party/gyp/tools/pretty_gyp.py --- mozc-1.11.1502.102/third_party/gyp/tools/pretty_gyp.py 2012-05-25 21:36:14.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/tools/pretty_gyp.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,155 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Pretty-prints the contents of a GYP file.""" - -import sys -import re - - -# Regex to remove comments when we're counting braces. -COMMENT_RE = re.compile(r'\s*#.*') - -# Regex to remove quoted strings when we're counting braces. -# It takes into account quoted quotes, and makes sure that the quotes match. -# NOTE: It does not handle quotes that span more than one line, or -# cases where an escaped quote is preceeded by an escaped backslash. -QUOTE_RE_STR = r'(?P[\'"])(.*?)(? 0: - after = True - - # This catches the special case of a closing brace having something - # other than just whitespace ahead of it -- we don't want to - # unindent that until after this line is printed so it stays with - # the previous indentation level. - if cnt < 0 and closing_prefix_re.match(stripline): - after = True - return (cnt, after) - - -def prettyprint_input(lines): - """Does the main work of indenting the input based on the brace counts.""" - indent = 0 - basic_offset = 2 - last_line = "" - for line in lines: - if COMMENT_RE.match(line): - print line - else: - line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix. - if len(line) > 0: - (brace_diff, after) = count_braces(line) - if brace_diff != 0: - if after: - print " " * (basic_offset * indent) + line - indent += brace_diff - else: - indent += brace_diff - print " " * (basic_offset * indent) + line - else: - print " " * (basic_offset * indent) + line - else: - print "" - last_line = line - - -def main(): - if len(sys.argv) > 1: - data = open(sys.argv[1]).read().splitlines() - else: - data = sys.stdin.read().splitlines() - # Split up the double braces. - lines = split_double_braces(data) - - # Indent and print the output. - prettyprint_input(lines) - return 0 - - -if __name__ == '__main__': - sys.exit(main()) diff -Nru mozc-1.11.1502.102/third_party/gyp/tools/pretty_sln.py mozc-1.11.1522.102/third_party/gyp/tools/pretty_sln.py --- mozc-1.11.1502.102/third_party/gyp/tools/pretty_sln.py 2012-05-22 21:06:31.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/tools/pretty_sln.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,168 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Prints the information in a sln file in a diffable way. - - It first outputs each projects in alphabetical order with their - dependencies. - - Then it outputs a possible build order. -""" - -__author__ = 'nsylvain (Nicolas Sylvain)' - -import os -import re -import sys -import pretty_vcproj - -def BuildProject(project, built, projects, deps): - # if all dependencies are done, we can build it, otherwise we try to build the - # dependency. - # This is not infinite-recursion proof. - for dep in deps[project]: - if dep not in built: - BuildProject(dep, built, projects, deps) - print project - built.append(project) - -def ParseSolution(solution_file): - # All projects, their clsid and paths. - projects = dict() - - # A list of dependencies associated with a project. - dependencies = dict() - - # Regular expressions that matches the SLN format. - # The first line of a project definition. - begin_project = re.compile(('^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942' - '}"\) = "(.*)", "(.*)", "(.*)"$')) - # The last line of a project definition. - end_project = re.compile('^EndProject$') - # The first line of a dependency list. - begin_dep = re.compile('ProjectSection\(ProjectDependencies\) = postProject$') - # The last line of a dependency list. - end_dep = re.compile('EndProjectSection$') - # A line describing a dependency. - dep_line = re.compile(' *({.*}) = ({.*})$') - - in_deps = False - solution = open(solution_file) - for line in solution: - results = begin_project.search(line) - if results: - # Hack to remove icu because the diff is too different. - if results.group(1).find('icu') != -1: - continue - # We remove "_gyp" from the names because it helps to diff them. - current_project = results.group(1).replace('_gyp', '') - projects[current_project] = [results.group(2).replace('_gyp', ''), - results.group(3), - results.group(2)] - dependencies[current_project] = [] - continue - - results = end_project.search(line) - if results: - current_project = None - continue - - results = begin_dep.search(line) - if results: - in_deps = True - continue - - results = end_dep.search(line) - if results: - in_deps = False - continue - - results = dep_line.search(line) - if results and in_deps and current_project: - dependencies[current_project].append(results.group(1)) - continue - - # Change all dependencies clsid to name instead. - for project in dependencies: - # For each dependencies in this project - new_dep_array = [] - for dep in dependencies[project]: - # Look for the project name matching this cldis - for project_info in projects: - if projects[project_info][1] == dep: - new_dep_array.append(project_info) - dependencies[project] = sorted(new_dep_array) - - return (projects, dependencies) - -def PrintDependencies(projects, deps): - print "---------------------------------------" - print "Dependencies for all projects" - print "---------------------------------------" - print "-- --" - - for (project, dep_list) in sorted(deps.items()): - print "Project : %s" % project - print "Path : %s" % projects[project][0] - if dep_list: - for dep in dep_list: - print " - %s" % dep - print "" - - print "-- --" - -def PrintBuildOrder(projects, deps): - print "---------------------------------------" - print "Build order " - print "---------------------------------------" - print "-- --" - - built = [] - for (project, _) in sorted(deps.items()): - if project not in built: - BuildProject(project, built, projects, deps) - - print "-- --" - -def PrintVCProj(projects): - - for project in projects: - print "-------------------------------------" - print "-------------------------------------" - print project - print project - print project - print "-------------------------------------" - print "-------------------------------------" - - project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]), - projects[project][2])) - - pretty = pretty_vcproj - argv = [ '', - project_path, - '$(SolutionDir)=%s\\' % os.path.dirname(sys.argv[1]), - ] - argv.extend(sys.argv[3:]) - pretty.main(argv) - -def main(): - # check if we have exactly 1 parameter. - if len(sys.argv) < 2: - print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0] - return 1 - - (projects, deps) = ParseSolution(sys.argv[1]) - PrintDependencies(projects, deps) - PrintBuildOrder(projects, deps) - - if '--recursive' in sys.argv: - PrintVCProj(projects) - return 0 - - -if __name__ == '__main__': - sys.exit(main()) diff -Nru mozc-1.11.1502.102/third_party/gyp/tools/pretty_vcproj.py mozc-1.11.1522.102/third_party/gyp/tools/pretty_vcproj.py --- mozc-1.11.1502.102/third_party/gyp/tools/pretty_vcproj.py 2012-05-22 21:06:31.000000000 +0000 +++ mozc-1.11.1522.102/third_party/gyp/tools/pretty_vcproj.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,329 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Google Inc. All rights reserved. -# Use of this source code is governed by a BSD-style license that can be -# found in the LICENSE file. - -"""Make the format of a vcproj really pretty. - - This script normalize and sort an xml. It also fetches all the properties - inside linked vsprops and include them explicitly in the vcproj. - - It outputs the resulting xml to stdout. -""" - -__author__ = 'nsylvain (Nicolas Sylvain)' - -import os -import sys - -from xml.dom.minidom import parse -from xml.dom.minidom import Node - -REPLACEMENTS = dict() -ARGUMENTS = None - - -class CmpTuple(object): - """Compare function between 2 tuple.""" - def __call__(self, x, y): - return cmp(x[0], y[0]) - - -class CmpNode(object): - """Compare function between 2 xml nodes.""" - - def __call__(self, x, y): - def get_string(node): - node_string = "node" - node_string += node.nodeName - if node.nodeValue: - node_string += node.nodeValue - - if node.attributes: - # We first sort by name, if present. - node_string += node.getAttribute("Name") - - all_nodes = [] - for (name, value) in node.attributes.items(): - all_nodes.append((name, value)) - - all_nodes.sort(CmpTuple()) - for (name, value) in all_nodes: - node_string += name - node_string += value - - return node_string - - return cmp(get_string(x), get_string(y)) - - -def PrettyPrintNode(node, indent=0): - if node.nodeType == Node.TEXT_NODE: - if node.data.strip(): - print '%s%s' % (' '*indent, node.data.strip()) - return - - if node.childNodes: - node.normalize() - # Get the number of attributes - attr_count = 0 - if node.attributes: - attr_count = node.attributes.length - - # Print the main tag - if attr_count == 0: - print '%s<%s>' % (' '*indent, node.nodeName) - else: - print '%s<%s' % (' '*indent, node.nodeName) - - all_attributes = [] - for (name, value) in node.attributes.items(): - all_attributes.append((name, value)) - all_attributes.sort(CmpTuple()) - for (name, value) in all_attributes: - print '%s %s="%s"' % (' '*indent, name, value) - print '%s>' % (' '*indent) - if node.nodeValue: - print '%s %s' % (' '*indent, node.nodeValue) - - for sub_node in node.childNodes: - PrettyPrintNode(sub_node, indent=indent+2) - print '%s' % (' '*indent, node.nodeName) - - -def FlattenFilter(node): - """Returns a list of all the node and sub nodes.""" - node_list = [] - - if (node.attributes and - node.getAttribute('Name') == '_excluded_files'): - # We don't add the "_excluded_files" filter. - return [] - - for current in node.childNodes: - if current.nodeName == 'Filter': - node_list.extend(FlattenFilter(current)) - else: - node_list.append(current) - - return node_list - - -def FixFilenames(filenames, current_directory): - new_list = [] - for filename in filenames: - if filename: - for key in REPLACEMENTS: - filename = filename.replace(key, REPLACEMENTS[key]) - os.chdir(current_directory) - filename = filename.strip('"\' ') - if filename.startswith('$'): - new_list.append(filename) - else: - new_list.append(os.path.abspath(filename)) - return new_list - - -def AbsoluteNode(node): - """Makes all the properties we know about in this node absolute.""" - if node.attributes: - for (name, value) in node.attributes.items(): - if name in ['InheritedPropertySheets', 'RelativePath', - 'AdditionalIncludeDirectories', - 'IntermediateDirectory', 'OutputDirectory', - 'AdditionalLibraryDirectories']: - # We want to fix up these paths - path_list = value.split(';') - new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1])) - node.setAttribute(name, ';'.join(new_list)) - if not value: - node.removeAttribute(name) - - -def CleanupVcproj(node): - """For each sub node, we call recursively this function.""" - for sub_node in node.childNodes: - AbsoluteNode(sub_node) - CleanupVcproj(sub_node) - - # Normalize the node, and remove all extranous whitespaces. - for sub_node in node.childNodes: - if sub_node.nodeType == Node.TEXT_NODE: - sub_node.data = sub_node.data.replace("\r", "") - sub_node.data = sub_node.data.replace("\n", "") - sub_node.data = sub_node.data.rstrip() - - # Fix all the semicolon separated attributes to be sorted, and we also - # remove the dups. - if node.attributes: - for (name, value) in node.attributes.items(): - sorted_list = sorted(value.split(';')) - unique_list = [] - for i in sorted_list: - if not unique_list.count(i): - unique_list.append(i) - node.setAttribute(name, ';'.join(unique_list)) - if not value: - node.removeAttribute(name) - - if node.childNodes: - node.normalize() - - # For each node, take a copy, and remove it from the list. - node_array = [] - while node.childNodes and node.childNodes[0]: - # Take a copy of the node and remove it from the list. - current = node.childNodes[0] - node.removeChild(current) - - # If the child is a filter, we want to append all its children - # to this same list. - if current.nodeName == 'Filter': - node_array.extend(FlattenFilter(current)) - else: - node_array.append(current) - - - # Sort the list. - node_array.sort(CmpNode()) - - # Insert the nodes in the correct order. - for new_node in node_array: - # But don't append empty tool node. - if new_node.nodeName == 'Tool': - if new_node.attributes and new_node.attributes.length == 1: - # This one was empty. - continue - if new_node.nodeName == 'UserMacro': - continue - node.appendChild(new_node) - - -def GetConfiguationNodes(vcproj): - #TODO(nsylvain): Find a better way to navigate the xml. - nodes = [] - for node in vcproj.childNodes: - if node.nodeName == "Configurations": - for sub_node in node.childNodes: - if sub_node.nodeName == "Configuration": - nodes.append(sub_node) - - return nodes - - -def GetChildrenVsprops(filename): - dom = parse(filename) - if dom.documentElement.attributes: - vsprops = dom.documentElement.getAttribute('InheritedPropertySheets') - return FixFilenames(vsprops.split(';'), os.path.dirname(filename)) - return [] - -def SeekToNode(node1, child2): - # A text node does not have properties. - if child2.nodeType == Node.TEXT_NODE: - return None - - # Get the name of the current node. - current_name = child2.getAttribute("Name") - if not current_name: - # There is no name. We don't know how to merge. - return None - - # Look through all the nodes to find a match. - for sub_node in node1.childNodes: - if sub_node.nodeName == child2.nodeName: - name = sub_node.getAttribute("Name") - if name == current_name: - return sub_node - - # No match. We give up. - return None - - -def MergeAttributes(node1, node2): - # No attributes to merge? - if not node2.attributes: - return - - for (name, value2) in node2.attributes.items(): - # Don't merge the 'Name' attribute. - if name == 'Name': - continue - value1 = node1.getAttribute(name) - if value1: - # The attribute exist in the main node. If it's equal, we leave it - # untouched, otherwise we concatenate it. - if value1 != value2: - node1.setAttribute(name, ';'.join([value1, value2])) - else: - # The attribute does nto exist in the main node. We append this one. - node1.setAttribute(name, value2) - - # If the attribute was a property sheet attributes, we remove it, since - # they are useless. - if name == 'InheritedPropertySheets': - node1.removeAttribute(name) - - -def MergeProperties(node1, node2): - MergeAttributes(node1, node2) - for child2 in node2.childNodes: - child1 = SeekToNode(node1, child2) - if child1: - MergeProperties(child1, child2) - else: - node1.appendChild(child2.cloneNode(True)) - - -def main(argv): - """Main function of this vcproj prettifier.""" - global ARGUMENTS - ARGUMENTS = argv - - # check if we have exactly 1 parameter. - if len(argv) < 2: - print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] ' - '[key2=value2]' % argv[0]) - return 1 - - # Parse the keys - for i in range(2, len(argv)): - (key, value) = argv[i].split('=') - REPLACEMENTS[key] = value - - # Open the vcproj and parse the xml. - dom = parse(argv[1]) - - # First thing we need to do is find the Configuration Node and merge them - # with the vsprops they include. - for configuration_node in GetConfiguationNodes(dom.documentElement): - # Get the property sheets associated with this configuration. - vsprops = configuration_node.getAttribute('InheritedPropertySheets') - - # Fix the filenames to be absolute. - vsprops_list = FixFilenames(vsprops.strip().split(';'), - os.path.dirname(argv[1])) - - # Extend the list of vsprops with all vsprops contained in the current - # vsprops. - for current_vsprops in vsprops_list: - vsprops_list.extend(GetChildrenVsprops(current_vsprops)) - - # Now that we have all the vsprops, we need to merge them. - for current_vsprops in vsprops_list: - MergeProperties(configuration_node, - parse(current_vsprops).documentElement) - - # Now that everything is merged, we need to cleanup the xml. - CleanupVcproj(dom.documentElement) - - # Finally, we use the prett xml function to print the vcproj back to the - # user. - #print dom.toprettyxml(newl="\n") - PrettyPrintNode(dom.documentElement) - return 0 - - -if __name__ == '__main__': - sys.exit(main(sys.argv)) diff -Nru mozc-1.11.1502.102/unix/ibus/gen_mozc_xml.py mozc-1.11.1522.102/unix/ibus/gen_mozc_xml.py --- mozc-1.11.1502.102/unix/ibus/gen_mozc_xml.py 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/unix/ibus/gen_mozc_xml.py 2013-08-28 05:25:59.000000000 +0000 @@ -46,10 +46,8 @@ # a product name, 'Mozc' or 'Google Japanese Input'. IBUS_COMPONENT_PROPS = { 'name': 'com.google.IBus.Mozc', - 'description': '%s Component', - # TODO(yusukes): Support Linux distributions other than Gentoo/ChromeOS. - # For example, Ubuntu uses /usr/lib/ibus-mozc/. - 'exec': '/usr/libexec/ibus-engine-mozc --ibus', + 'description': '%(product_name)s Component', + 'exec': '%(ibus_mozc_path)s --ibus', # TODO(mazda): Generate the version number. 'version': '0.0.0.0', 'author': 'Google Inc.', @@ -60,17 +58,17 @@ # Information to generate part of mozc.xml. IBUS_ENGINE_COMMON_PROPS = { - 'description': '%s (Japanese Input Method)', + 'description': '%(product_name)s (Japanese Input Method)', 'language': 'ja', - 'icon': '/usr/share/ibus-mozc/product_icon.png', + 'icon': '%(ibus_mozc_icon_path)s', 'rank': '80', } # Information to generate part of mozc.xml for IBus 1.5 or later. IBUS_1_5_ENGINE_COMMON_PROPS = { - 'description': '%s (Japanese Input Method)', + 'description': '%(product_name)s (Japanese Input Method)', 'language': 'ja', - 'icon': '/usr/share/ibus-mozc/product_icon.png', + 'icon': '%(ibus_mozc_icon_path)s', 'rank': '80', 'symbol': 'あ', } @@ -83,7 +81,7 @@ # DO NOT change the engine name 'mozc-jp'. The names is referenced by # unix/ibus/mozc_engine.cc. 'name': ['mozc-jp'], - 'longname': ['%s'], + 'longname': ['%(product_name)s'], 'layout': ['jp'], }, # On Linux (IBus >= 1.5), we use special label 'default' for the keyboard @@ -92,7 +90,7 @@ # DO NOT change the engine name 'mozc-jp'. The names is referenced by # unix/ibus/mozc_engine.cc. 'name': ['mozc-jp'], - 'longname': ['%s'], + 'longname': ['%(product_name)s'], 'layout': ['default'], }, # On Chrome/Chromium OS, we provide three engines. @@ -100,8 +98,9 @@ # DO NOT change the engine name 'mozc-jp'. The names is referenced by # unix/ibus/mozc_engine.cc. 'name': ['mozc-jp', 'mozc', 'mozc-dv'], - 'longname': ['%s (Japanese keyboard layout)', '%s (US keyboard layout)', - '%s (US Dvorak keyboard layout)'], + 'longname': ['%(product_name)s (Japanese keyboard layout)', + '%(product_name)s (US keyboard layout)', + '%(product_name)s (US Dvorak keyboard layout)'], 'layout': ['jp', 'us', 'us(dvorak)'], }, } @@ -123,22 +122,16 @@ #endif // %s""" -def EmbedProductName(product_name, format_string): - if format_string.find('%s') != -1: - return format_string % product_name - return format_string +def OutputXmlElement(param_dict, element_name, value): + print ' <%s>%s' % (element_name, (value % param_dict), element_name) -def OutputXmlElement(product_name, element_name, value): - print ' <%s>%s' % (element_name, EmbedProductName(product_name, value), - element_name) - - -def OutputXml(product_name, component, engine_common, engines, setup_arg): +def OutputXml(param_dict, component, engine_common, engines, setup_arg): """Outputs a XML data for ibus-daemon. Args: - product_name: 'Mozc' or 'Google Japanese Input' + param_dict: A dictionary to embed options into output string. + For example, {'product_name': 'Mozc'}. component: A dictionary from a property name to a property value of the ibus-mozc component. For example, {'name': 'com.google.IBus.Mozc'}. engine_common: A dictionary from a property name to a property value that @@ -148,31 +141,31 @@ """ print '' for key in component: - OutputXmlElement(product_name, key, component[key]) + OutputXmlElement(param_dict, key, component[key]) print '' for i in range(len(engines['name'])): print '' for key in engine_common: - OutputXmlElement(product_name, key, engine_common[key]) + OutputXmlElement(param_dict, key, engine_common[key]) if setup_arg: - OutputXmlElement(product_name, 'setup', ' '.join(setup_arg)) + OutputXmlElement(param_dict, 'setup', ' '.join(setup_arg)) for key in engines: - OutputXmlElement(product_name, key, engines[key][i]) + OutputXmlElement(param_dict, key, engines[key][i]) print '' print '' print '' -def OutputCppVariable(product_name, prefix, variable_name, value): +def OutputCppVariable(param_dict, prefix, variable_name, value): print 'const char k%s%s[] = "%s";' % (prefix, variable_name.capitalize(), - EmbedProductName(product_name, value)) + (value % param_dict)) -def OutputCpp(product_name, component, engine_common, engines): +def OutputCpp(param_dict, component, engine_common, engines): """Outputs a C++ header file for mozc/unix/ibus/main.cc. Args: - product_name: see OutputXml. + param_dict: see OutputXml. component: ditto. engine_common: ditto. engines: ditto. @@ -180,13 +173,13 @@ guard_name = 'MOZC_UNIX_IBUS_MAIN_H_' print CPP_HEADER % (guard_name, guard_name) for key in component: - OutputCppVariable(product_name, 'Component', key, component[key]) + OutputCppVariable(param_dict, 'Component', key, component[key]) for key in engine_common: - OutputCppVariable(product_name, 'Engine', key, engine_common[key]) + OutputCppVariable(param_dict, 'Engine', key, engine_common[key]) for key in engines: print 'const char* kEngine%sArray[] = {' % key.capitalize() for i in range(len(engines[key])): - print '"%s",' % EmbedProductName(product_name, engines[key][i]) + print '"%s",' % (engines[key][i] % param_dict) print '};' print 'const size_t kEngineArrayLen = %s;' % len(engines['name']) print CPP_FOOTER % guard_name @@ -216,6 +209,10 @@ parser.add_option('--branding', dest='branding', default=None, help='GoogleJapaneseInput for the ChromeOS official build. ' 'Otherwise, Mozc.') + parser.add_option('--ibus_mozc_path', dest='ibus_mozc_path', default='', + help='The absolute path of ibus_mozc executable.') + parser.add_option('--ibus_mozc_icon_path', dest='ibus_mozc_icon_path', + default='', help='The absolute path of ibus_mozc icon.') parser.add_option('--server_dir', dest='server_dir', default='', help='The absolute directory path to be installed the ' 'server executable.') @@ -235,15 +232,16 @@ platform = 'Linux-IBus1.5' common_props = IBUS_1_5_ENGINE_COMMON_PROPS + param_dict = {'product_name': PRODUCT_NAMES[options.branding], + 'ibus_mozc_path': options.ibus_mozc_path, + 'ibus_mozc_icon_path': options.ibus_mozc_icon_path} + if options.output_cpp: - OutputCpp(PRODUCT_NAMES[options.branding], IBUS_COMPONENT_PROPS, - common_props, + OutputCpp(param_dict, IBUS_COMPONENT_PROPS, common_props, IBUS_ENGINES_PROPS[platform]) else: - OutputXml(PRODUCT_NAMES[options.branding], IBUS_COMPONENT_PROPS, - common_props, - IBUS_ENGINES_PROPS[platform], - setup_arg) + OutputXml(param_dict, IBUS_COMPONENT_PROPS, common_props, + IBUS_ENGINES_PROPS[platform], setup_arg) return 0 if __name__ == '__main__': diff -Nru mozc-1.11.1502.102/unix/ibus/ibus.gyp mozc-1.11.1522.102/unix/ibus/ibus.gyp --- mozc-1.11.1502.102/unix/ibus/ibus.gyp 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/unix/ibus/ibus.gyp 2013-08-28 05:25:59.000000000 +0000 @@ -53,6 +53,13 @@ '../../usage_stats/usage_stats_base.gyp:usage_stats', ], 'conditions': [ + ['target_platform=="ChromeOS"', { + 'ibus_mozc_icon_path': '/usr/share/ibus-mozc/product_icon.png', + 'ibus_mozc_path': '/usr/libexec/ibus-engine-mozc', + }, { # else + 'ibus_mozc_icon_path%': '/usr/share/ibus-mozc/product_icon.png', + 'ibus_mozc_path%': '/usr/lib/ibus-mozc/ibus-engine-mozc', + }], # enable_x11_selection_monitor represents if ibus_mozc uses X11 selection # monitor or not. ['target_platform=="Linux" and language=="japanese"', { @@ -114,6 +121,8 @@ '--platform=ChromeOS', '--branding=GoogleJapaneseInput', '--pkg_config_command=<(pkg_config_command)', + '--ibus_mozc_path=<(ibus_mozc_path)', + '--ibus_mozc_icon_path=<(ibus_mozc_icon_path)', ], }], ['target_platform=="ChromeOS" and branding!="GoogleJapaneseInput"', { @@ -124,6 +133,8 @@ '--platform=ChromeOS', '--branding=Mozc', '--pkg_config_command=<(pkg_config_command)', + '--ibus_mozc_path=<(ibus_mozc_path)', + '--ibus_mozc_icon_path=<(ibus_mozc_icon_path)', ], }], ['target_platform!="ChromeOS"', { @@ -135,6 +146,8 @@ '--branding=Mozc', '--server_dir=<(server_dir)', '--pkg_config_command=<(pkg_config_command)', + '--ibus_mozc_path=<(ibus_mozc_path)', + '--ibus_mozc_icon_path=<(ibus_mozc_icon_path)', ], }], ], @@ -255,7 +268,9 @@ './gen_mozc_xml.py', '--platform=ChromeOS', '--branding=GoogleJapaneseInput', - '--output_cpp' + '--output_cpp', + '--ibus_mozc_path=<(ibus_mozc_path)', + '--ibus_mozc_icon_path=<(ibus_mozc_icon_path)', ], }], ['target_platform=="ChromeOS" and branding!="GoogleJapaneseInput"', { @@ -265,7 +280,9 @@ './gen_mozc_xml.py', '--platform=ChromeOS', '--branding=Mozc', - '--output_cpp' + '--output_cpp', + '--ibus_mozc_path=<(ibus_mozc_path)', + '--ibus_mozc_icon_path=<(ibus_mozc_icon_path)', ], }], ['target_platform!="ChromeOS"', { @@ -275,7 +292,9 @@ './gen_mozc_xml.py', '--platform=Linux', '--branding=Mozc', - '--output_cpp' + '--output_cpp', + '--ibus_mozc_path=<(ibus_mozc_path)', + '--ibus_mozc_icon_path=<(ibus_mozc_icon_path)', ], }], ], diff -Nru mozc-1.11.1502.102/unix/ibus/mozc_engine.h mozc-1.11.1522.102/unix/ibus/mozc_engine.h --- mozc-1.11.1502.102/unix/ibus/mozc_engine.h 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/unix/ibus/mozc_engine.h 2013-08-28 05:25:59.000000000 +0000 @@ -170,7 +170,6 @@ // TODO(nona): Introduce CandidateWindowHandlerManager to avoid direct access. scoped_ptr gtk_candidate_window_handler_; scoped_ptr ibus_candidate_window_handler_; - vector prop_switch_properties_; config::Config::PreeditMethod preedit_method_; // Unique IDs of candidates that are currently shown. diff -Nru mozc-1.11.1502.102/unix/ibus/mozc_engine_property.cc mozc-1.11.1522.102/unix/ibus/mozc_engine_property.cc --- mozc-1.11.1502.102/unix/ibus/mozc_engine_property.cc 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/unix/ibus/mozc_engine_property.cc 2013-08-28 05:25:59.000000000 +0000 @@ -134,9 +134,6 @@ const commands::CompositionMode kMozcEngineInitialCompositionMode = commands::HIRAGANA; -const MozcEngineSwitchProperty *kMozcEngineSwitchProperties = NULL; -const size_t kMozcEngineSwitchPropertiesSize = 0; - const MozcEngineToolProperty *kMozcEngineToolProperties = &kMozcEngineToolPropertiesArray[0]; const size_t kMozcEngineToolPropertiesSize = diff -Nru mozc-1.11.1502.102/unix/ibus/mozc_engine_property.h mozc-1.11.1522.102/unix/ibus/mozc_engine_property.h --- mozc-1.11.1502.102/unix/ibus/mozc_engine_property.h 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/unix/ibus/mozc_engine_property.h 2013-08-28 05:25:59.000000000 +0000 @@ -54,19 +54,6 @@ extern const commands::CompositionMode kMozcEngineInitialCompositionMode; -struct MozcEngineSwitchProperty { - // Specifies the command id to trigger. - commands::SessionCommand::LanguageBarCommandId id; - const char *key; // IBus property key. - const char *label; // text for the menu. - const char *icon; // icon. - const char *tooltip; // tooltip. -}; - -// This pointer should be NULL when properties size is 0. -extern const MozcEngineSwitchProperty *kMozcEngineSwitchProperties; -extern const size_t kMozcEngineSwitchPropertiesSize; - struct MozcEngineToolProperty { const char *key; // IBus property key for the MozcTool. const char *mode; // command line passed as --mode= diff -Nru mozc-1.11.1502.102/unix/ibus/property_handler.cc mozc-1.11.1522.102/unix/ibus/property_handler.cc --- mozc-1.11.1502.102/unix/ibus/property_handler.cc 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/unix/ibus/property_handler.cc 2013-08-28 05:25:59.000000000 +0000 @@ -74,7 +74,6 @@ is_activated_(true) { AppendCompositionPropertyToPanel(); - AppendSwitchPropertyToPanel(); #ifndef OS_CHROMEOS AppendToolPropertyToPanel(); #endif @@ -97,12 +96,6 @@ prop_mozc_tool_ = NULL; } - for (size_t i = 0; i < prop_switch_properties_.size(); ++i) { - // The ref counter will drop to one. - g_object_unref(prop_switch_properties_[i]); - } - prop_switch_properties_.clear(); - if (prop_root_) { // Destroy all objects under the root. g_object_unref(prop_root_); @@ -235,41 +228,6 @@ ibus_prop_list_append(prop_root_, prop_mozc_tool_); } -// TODO(nona): do not use kMozcEngine*** directory. -void PropertyHandler::AppendSwitchPropertyToPanel() { - if (kMozcEngineSwitchProperties == NULL || - kMozcEngineSwitchPropertiesSize == 0) { - return; - } - - for (size_t i = 0; i < kMozcEngineSwitchPropertiesSize; ++i) { - const MozcEngineSwitchProperty &entry = kMozcEngineSwitchProperties[i]; - IBusText *label = ibus_text_new_from_string( - translator_->MaybeTranslate(entry.label).c_str()); - IBusText *tooltip = ibus_text_new_from_string( - translator_->MaybeTranslate(entry.tooltip).c_str()); - IBusProperty *item = ibus_property_new(entry.key, - PROP_TYPE_NORMAL, - label, - GetIconPath(entry.icon).c_str(), - tooltip, - TRUE, - TRUE, - PROP_STATE_UNCHECKED, - NULL); - g_object_set_data(G_OBJECT(item), kGObjectDataKey, (gpointer)&entry); - prop_switch_properties_.push_back(item); - - // We have to sink |*item| here so ibus_engine_update_property() call in - // PropertyActivate() does not destruct the object. - g_object_ref_sink(item); - } - - for (size_t i = 0; i < prop_switch_properties_.size(); ++i) { - ibus_prop_list_append(prop_root_, prop_switch_properties_[i]); - } -} - void PropertyHandler::Update(IBusEngine *engine, const commands::Output &output) { if (output.has_status() && @@ -388,25 +346,6 @@ } #endif - for (size_t i = 0; i < prop_switch_properties_.size(); ++i) { - IBusProperty *prop = prop_switch_properties_[i]; - if (!g_strcmp0(property_name, ibus_property_get_key(prop))) { - const MozcEngineSwitchProperty *entry = - reinterpret_cast( - g_object_get_data(G_OBJECT(prop), kGObjectDataKey)); - DCHECK(entry->id); - commands::Output output; - commands::SessionCommand command; - command.set_language_bar_command_id(entry->id); - command.set_type(commands::SessionCommand::SEND_LANGUAGE_BAR_COMMAND); - if (!client_->SendCommand(command, &output)) { - LOG(ERROR) << "cannot send command to update session config: " - << entry->id; - } - return; - } - } - if (property_state != PROP_STATE_CHECKED) { return; } diff -Nru mozc-1.11.1502.102/unix/ibus/property_handler.h mozc-1.11.1522.102/unix/ibus/property_handler.h --- mozc-1.11.1502.102/unix/ibus/property_handler.h 2013-07-17 02:37:50.000000000 +0000 +++ mozc-1.11.1522.102/unix/ibus/property_handler.h 2013-08-28 05:25:59.000000000 +0000 @@ -67,7 +67,6 @@ // Appends tool properties into panel void AppendToolPropertyToPanel(); // Appends switch properties into panel - void AppendSwitchPropertyToPanel(); void UpdateCompositionModeIcon( IBusEngine* engine, const commands::CompositionMode new_composition_mode); void SetCompositionMode(IBusEngine *engine, @@ -76,7 +75,6 @@ IBusPropList *prop_root_; IBusProperty *prop_composition_mode_; IBusProperty *prop_mozc_tool_; - vector prop_switch_properties_; client::ClientInterface *client_; scoped_ptr translator_; commands::CompositionMode original_composition_mode_; diff -Nru mozc-1.11.1502.102/usage_stats/upload_util.cc mozc-1.11.1522.102/usage_stats/upload_util.cc --- mozc-1.11.1502.102/usage_stats/upload_util.cc 2013-07-17 02:38:05.000000000 +0000 +++ mozc-1.11.1522.102/usage_stats/upload_util.cc 2013-08-28 05:26:13.000000000 +0000 @@ -38,12 +38,15 @@ namespace { const char kStatServerAddress[] = "http://clients4.google.com/tbproxy/usagestats"; +const char kStatServerSecureAddress[] = + "https://clients4.google.com/tbproxy/usagestats"; const char kStatServerSourceId[] = "sourceid=ime"; const char kStatServerAddedSendHeader[] = "Content-Type: application/x-www-form-urlencoded"; } // namespace -UploadUtil::UploadUtil() { +UploadUtil::UploadUtil() + : use_https_(false) { } UploadUtil::~UploadUtil() { @@ -62,6 +65,10 @@ optional_url_params_ = optional_url_params; } +void UploadUtil::SetUseHttps(bool use_https) { + use_https_ = use_https; +} + void UploadUtil::AddCountValue(const string &name, uint32 count) { string encoded_name; Util::EncodeURI(name, &encoded_name); @@ -113,7 +120,9 @@ bool UploadUtil::Upload() { DCHECK(!stat_header_.empty()); const string header_values = stat_header_ + stat_values_; - string url = string(kStatServerAddress) + "?" + string(kStatServerSourceId); + string url = + string(use_https_ ? kStatServerSecureAddress : kStatServerAddress) + + "?" + string(kStatServerSourceId); if (!optional_url_params_.empty()) { url.append("&"); Util::AppendCGIParams(optional_url_params_, &url); diff -Nru mozc-1.11.1502.102/usage_stats/upload_util.h mozc-1.11.1522.102/usage_stats/upload_util.h --- mozc-1.11.1502.102/usage_stats/upload_util.h 2013-07-17 02:38:05.000000000 +0000 +++ mozc-1.11.1522.102/usage_stats/upload_util.h 2013-08-28 05:26:13.000000000 +0000 @@ -62,6 +62,9 @@ int elapsed_sec, const vector > &optional_url_params); + // Enables using HTTPS. Default is false. + void SetUseHttps(bool use_https); + // Adds count data. // &:c= void AddCountValue(const string &name, uint32 count); @@ -89,6 +92,7 @@ string stat_header_; string stat_values_; vector > optional_url_params_; + bool use_https_; DISALLOW_COPY_AND_ASSIGN(UploadUtil); }; } // namespace mozc::usage_stats diff -Nru mozc-1.11.1502.102/usage_stats/upload_util_test.cc mozc-1.11.1522.102/usage_stats/upload_util_test.cc --- mozc-1.11.1502.102/usage_stats/upload_util_test.cc 2013-07-17 02:38:05.000000000 +0000 +++ mozc-1.11.1522.102/usage_stats/upload_util_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -127,5 +127,34 @@ EXPECT_TRUE(uploader.Upload()); } } + +TEST(UploadUtilTest, UploadSecureTest) { + HTTPClientMock client; + HTTPClient::SetHTTPClientHandler(&client); + const string base_url = "https://clients4.google.com/tbproxy/usagestats"; + { + HTTPClientMock::Result result; + result.expected_url = base_url + "?sourceid=ime&hl=ja&v=test"; + result.expected_request = "Test&100&Count:c=100"; + client.set_result(result); + + UploadUtil uploader; + vector > params; + params.push_back(make_pair("hl", "ja")); + params.push_back(make_pair("v", "test")); + uploader.SetHeader("Test", 100, params); + uploader.AddCountValue("Count", 100); + uploader.SetUseHttps(true); + EXPECT_TRUE(uploader.Upload()); + + uploader.RemoveAllValues(); + result.expected_request = "Test&100"; + client.set_result(result); + EXPECT_TRUE(uploader.Upload()); + + uploader.AddCountValue("Count", 1000); + EXPECT_FALSE(uploader.Upload()); + } +} } // namespace usage_stats } // namespace mozc diff -Nru mozc-1.11.1502.102/usage_stats/usage_stats_testing_util.cc mozc-1.11.1522.102/usage_stats/usage_stats_testing_util.cc --- mozc-1.11.1502.102/usage_stats/usage_stats_testing_util.cc 2013-07-17 02:38:05.000000000 +0000 +++ mozc-1.11.1522.102/usage_stats/usage_stats_testing_util.cc 2013-08-28 05:26:13.000000000 +0000 @@ -68,10 +68,10 @@ case Stats::TIMING: type_string = "Timing"; value_string = string() + - "num:" + NumberUtil::SimpleItoa(stats.num_timings()) + " " + - "total:" + NumberUtil::SimpleItoa(stats.total_time()) + " " + - "min:" + NumberUtil::SimpleItoa(stats.min_time()) + " " + - "max:" + NumberUtil::SimpleItoa(stats.max_time()); + "num:" + NumberUtil::SimpleItoa(stats.num_timings()) + " total:" + + NumberUtil::SimpleItoa(static_cast(stats.total_time())) + + " min:" + NumberUtil::SimpleItoa(stats.min_time()) + + " max:" + NumberUtil::SimpleItoa(stats.max_time()); break; case Stats::VIRTUAL_KEYBOARD: type_string = "Virtual Keyboard"; diff -Nru mozc-1.11.1502.102/usage_stats/usage_stats_uploader.cc mozc-1.11.1522.102/usage_stats/usage_stats_uploader.cc --- mozc-1.11.1502.102/usage_stats/usage_stats_uploader.cc 2013-07-17 02:38:05.000000000 +0000 +++ mozc-1.11.1522.102/usage_stats/usage_stats_uploader.cc 2013-08-28 05:26:13.000000000 +0000 @@ -359,6 +359,11 @@ UploadUtil uploader; uploader.SetHeader("Daily", elapsed_sec, params); +#ifdef __native_client__ + // In NaCl Mozc we use HTTPS to send usage stats to follow Chrome OS + // convention. https://code.google.com/p/chromium/issues/detail?id=255327 + uploader.SetUseHttps(true); +#endif // __native_client__ LoadStats(&uploader); // Just check for confirming that we can insert the value to upload_key. diff -Nru mozc-1.11.1502.102/usage_stats/usage_stats_uploader_test.cc mozc-1.11.1522.102/usage_stats/usage_stats_uploader_test.cc --- mozc-1.11.1502.102/usage_stats/usage_stats_uploader_test.cc 2013-07-17 02:38:05.000000000 +0000 +++ mozc-1.11.1522.102/usage_stats/usage_stats_uploader_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -127,7 +127,12 @@ const uint32 kOneDaySec = 24 * 60 * 60; // 24 hours const uint32 kHalfDaySec = 12 * 60 * 60; // 12 hours -const char kBaseUrl[] = "http://clients4.google.com/tbproxy/usagestats"; +const char kBaseUrl[] = +#ifdef __native_client__ + "https://clients4.google.com/tbproxy/usagestats"; +#else // __native_client__ + "http://clients4.google.com/tbproxy/usagestats"; +#endif // __native_client__ const char kTestClientId[] = "TestClientId"; const char kCountStatsKey[] = "Commit"; const uint32 kCountStatsDefaultValue = 100; diff -Nru mozc-1.11.1502.102/win32/base/keyevent_handler.cc mozc-1.11.1522.102/win32/base/keyevent_handler.cc --- mozc-1.11.1502.102/win32/base/keyevent_handler.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/win32/base/keyevent_handler.cc 2013-08-28 05:26:13.000000000 +0000 @@ -478,8 +478,11 @@ // Instead of using the actual toggle state of Kana-lock key, an expected // toggle state of the Kana-lock is emulated based on the IME open/close // state and conversion mode. See b/3046717 for details. + // Note that we never set |key_string| when Ctrl key is pressed because + // no valid Kana character will be generated with Ctrl key. See b/9684668. const bool use_kana_input = behavior.prefer_kana_input && ime_state.open && + !keyboard_status_wo_kana_lock.IsPressed(VK_CONTROL) && ((ime_state.logical_conversion_mode & IME_CMODE_NATIVE) == IME_CMODE_NATIVE); diff -Nru mozc-1.11.1502.102/win32/base/keyevent_handler_test.cc mozc-1.11.1522.102/win32/base/keyevent_handler_test.cc --- mozc-1.11.1502.102/win32/base/keyevent_handler_test.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/win32/base/keyevent_handler_test.cc 2013-08-28 05:26:13.000000000 +0000 @@ -1932,6 +1932,7 @@ initial_state.open = true; KeyboardStatus keyboard_status; + keyboard_status.SetState('A', kPressed); const VirtualKey virtual_key = VirtualKey::FromVirtualKey('A'); const LParamKeyInfo lparam(CreateLParam( @@ -2055,6 +2056,86 @@ } TEST_F(KeyEventHandlerTest, + CheckKeyCodeWhenAlphabeticalKeyIsPressedWithCtrlInKanaMode) { + // When a user presses an alphabet key and a control key, keyboard-layout + // drivers produce a control code (0x01,...,0x20), to which the session + // server assigns its own code. This should not be passed to the server + // as a Kana-input character. See b/9684668. + + // Force ImeSwitchUtil to reflect the config. + config::ImeSwitchUtil::Reload(); + const bool kKanaLocked = true; + + Output mock_output; + mock_output.set_consumed(true); + + MockState mock(mock_output); + KeyboardMock keyboard(kKanaLocked); + + InputState next_state; + KeyEventHandlerResult result; + + InputBehavior behavior; + behavior.prefer_kana_input = kKanaLocked; + behavior.disabled = false; + + Context context; + + // Press 'Ctrl+A' + { + InputState initial_state; + initial_state.logical_conversion_mode = + IME_CMODE_NATIVE | IME_CMODE_FULLSHAPE | IME_CMODE_ROMAN; + initial_state.visible_conversion_mode = + initial_state.logical_conversion_mode; + initial_state.open = true; + + KeyboardStatus keyboard_status; + keyboard_status.SetState(VK_CONTROL, kPressed); + keyboard_status.SetState('A', kPressed); + + const VirtualKey virtual_key = VirtualKey::FromVirtualKey('A'); + const LParamKeyInfo lparam(CreateLParam( + 0x0001, // repeat_count + 0x1e, // scan_code + false, // is_extended_key, + false, // has_context_code, + false, // is_previous_state_down, + false)); // is_in_transition_state + EXPECT_EQ(0x1e0001, lparam.lparam()); + + Output output; + result = TestableKeyEventHandler::ImeProcessKey( + virtual_key, lparam.GetScanCode(), lparam.IsKeyDownInImeProcessKey(), + keyboard_status, behavior, initial_state, context, + mock.mutable_client(), &keyboard, &next_state, &output); + + EXPECT_TRUE(result.succeeded); + EXPECT_TRUE(result.should_be_eaten); + EXPECT_TRUE(result.should_be_sent_to_server); + } + + { + commands::Input actual_input; + EXPECT_TRUE(mock.GetGeneratedRequest(&actual_input)); + EXPECT_EQ(commands::Input::TEST_SEND_KEY, actual_input.type()); + EXPECT_TRUE(actual_input.has_key()); + EXPECT_TRUE(actual_input.key().has_key_code()); + EXPECT_EQ('a', actual_input.key().key_code()); + EXPECT_FALSE(actual_input.key().has_key_string()); + EXPECT_TRUE(actual_input.key().has_activated()); + EXPECT_TRUE(actual_input.key().activated()); + EXPECT_TRUE(actual_input.key().has_mode()); + EXPECT_EQ(commands::HIRAGANA, actual_input.key().mode()); + EXPECT_FALSE(actual_input.key().has_modifiers()); + EXPECT_EQ(1, actual_input.key().modifier_keys_size()); + EXPECT_EQ(commands::KeyEvent::CTRL, actual_input.key().modifier_keys(0)); + EXPECT_FALSE(actual_input.key().has_special_key()); + } +} + + +TEST_F(KeyEventHandlerTest, Issue2801503_ModeChangeWhenIMEIsGoingToBeTurnedOff) { const bool kKanaLocked = false; @@ -2654,7 +2735,7 @@ } } -TEST_F(KeyEventHandlerTest, Issue3504241_VKPacketByQuestionKey) { +TEST_F(KeyEventHandlerTest, Issue3504241_VKPacketAsRawInput) { // To fix b/3504241, VK_PACKET must be supported. // Force ImeSwitchUtil to reflect the config. diff -Nru mozc-1.11.1502.102/win32/broker/mozc_broker.exe.manifest mozc-1.11.1522.102/win32/broker/mozc_broker.exe.manifest --- mozc-1.11.1502.102/win32/broker/mozc_broker.exe.manifest 2013-07-17 02:37:44.000000000 +0000 +++ mozc-1.11.1522.102/win32/broker/mozc_broker.exe.manifest 2013-08-28 05:25:53.000000000 +0000 @@ -9,6 +9,8 @@ + + diff -Nru mozc-1.11.1502.102/win32/broker/mozc_broker.gypi mozc-1.11.1522.102/win32/broker/mozc_broker.gypi --- mozc-1.11.1502.102/win32/broker/mozc_broker.gypi 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/win32/broker/mozc_broker.gypi 2013-08-28 05:26:13.000000000 +0000 @@ -58,7 +58,7 @@ 'msvs_settings': { 'VCManifestTool': { 'AdditionalManifestFiles': 'mozc_broker.exe.manifest', - 'EmbedManifest': 'false', + 'EmbedManifest': 'true', }, }, }], diff -Nru mozc-1.11.1502.102/win32/ime/ime.gyp mozc-1.11.1522.102/win32/ime/ime.gyp --- mozc-1.11.1502.102/win32/ime/ime.gyp 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/win32/ime/ime.gyp 2013-08-28 05:26:13.000000000 +0000 @@ -155,6 +155,9 @@ 'VCLinkerTool': { 'BaseAddress': '0x06000000', }, + 'VCManifestTool': { + 'EmbedManifest': 'true', + }, }, }, ] diff -Nru mozc-1.11.1502.102/win32/installer/installer.gyp mozc-1.11.1522.102/win32/installer/installer.gyp --- mozc-1.11.1502.102/win32/installer/installer.gyp 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/win32/installer/installer.gyp 2013-08-28 05:26:13.000000000 +0000 @@ -339,6 +339,34 @@ 'mozc_installers_win', ], }, + { + 'target_name': 'mozc_installers_win_size_check', + 'type': 'none', + 'actions': [ + { + 'action_name': 'mozc_installers_win_size_check', + 'variables': { + 'python_command': 'python', + }, + 'inputs': [ + '<(mozc_32bit_msi)', + '<(mozc_64bit_msi)', + ], + 'outputs': [ + '<(PRODUCT_DIR)/mozc_installers_win_size_check_dummy', + ], + 'action': [ + '<(python_command)', + '../../build_tools/binary_size_checker.py', + '--target_filename', + '<(mozc_32bit_msi),<(mozc_64bit_msi)', + ], + }, + ], + 'dependencies': [ + 'mozc_installers_win', + ], + }, ], }], ], diff -Nru mozc-1.11.1502.102/win32/tip/tip.gyp mozc-1.11.1522.102/win32/tip/tip.gyp --- mozc-1.11.1502.102/win32/tip/tip.gyp 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/win32/tip/tip.gyp 2013-08-28 05:26:13.000000000 +0000 @@ -174,6 +174,9 @@ 'VCLinkerTool': { 'BaseAddress': '0x06000000', }, + 'VCManifestTool': { + 'EmbedManifest': 'true', + }, }, }, ], diff -Nru mozc-1.11.1502.102/win32/tip/tip_dll_module.cc mozc-1.11.1522.102/win32/tip/tip_dll_module.cc --- mozc-1.11.1502.102/win32/tip/tip_dll_module.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/win32/tip/tip_dll_module.cc 2013-08-28 05:26:13.000000000 +0000 @@ -75,18 +75,11 @@ } } -// Release the global resources attached to this module. -void TipFreeGlobalObjects() { - // Free all singleton instances - SingletonFinalizer::Finalize(); - +void TipShutdownCrashReportHandler() { if (CrashReportHandler::IsInitialized()) { // Uninitialize the breakpad. CrashReportHandler::Uninitialize(); } - - // We intentionaly call google::protobuf::ShutdownProtobufLibrary from - // DllProcessDetachImpl rather than here. See b/2126375 for details. } class ModuleImpl { @@ -101,7 +94,19 @@ static LONG Release() { if (::InterlockedDecrement(&ref_count_) == 0) { if (!in_unit_test_) { - CallOnce(&g_uninitialize_once, TipFreeGlobalObjects); + // |ref_count_| is now decremented to be 0. So our DLL is likely to be + // unloaded soon. Here is the good point to release global resources + // that should not be unloaded in DllMain due to the loader lock. + // However, it should also be noted that there is a chance that + // AddRef() is called again and the application continues to use Mozc + // client DLL. Actually we can observe this situation inside + // "Visual Studio 2012 Remote Debugging Monitor" running on Windows 8. + // Thus we must not shut down libraries that cannot be designed to be + // re-initializable. For instance, we must not call following + // functions here. + // - SingletonFinalizer::Finalize() // see b/10233768 + // - google::protobuf::ShutdownProtobufLibrary() // see b/2126375 + CallOnce(&g_uninitialize_once, TipShutdownCrashReportHandler); } } return ref_count_; diff -Nru mozc-1.11.1502.102/win32/tip/tip_edit_session.cc mozc-1.11.1522.102/win32/tip/tip_edit_session.cc --- mozc-1.11.1502.102/win32/tip/tip_edit_session.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/win32/tip/tip_edit_session.cc 2013-08-28 05:26:13.000000000 +0000 @@ -447,8 +447,9 @@ } TipSurroundingTextInfo info; - if (!TipSurroundingText::PrepareForReconversion( - text_service, context, &info)) { + bool need_async_edit_session = false; + if (!TipSurroundingText::PrepareForReconversionFromIme( + text_service, context, &info, &need_async_edit_session)) { return false; } @@ -494,9 +495,11 @@ return false; } - // TSF spec guarantees that reconverting from application can always be a - // synchronous operation. - return TipEditSession::OnOutputReceivedSync(text_service, context, output); + if (need_async_edit_session) { + return TipEditSession::OnOutputReceivedAsync(text_service, context, output); + } else { + return TipEditSession::OnOutputReceivedSync(text_service, context, output); + } } bool UndoCommint(TipTextService *text_service, ITfContext *context) { @@ -629,11 +632,16 @@ DISALLOW_COPY_AND_ASSIGN(SyncEditSessionImpl); }; -} // namespace +enum EditSessionMode { + kDontCare = 0, + kAsync, + kSync, +}; -bool TipEditSession::OnOutputReceivedSync(TipTextService *text_service, - ITfContext *context, - const Output &new_output) { +bool OnOutputReceivedImpl(TipTextService *text_service, + ITfContext *context, + const Output &new_output, + EditSessionMode mode) { if (new_output.has_callback() && new_output.callback().has_session_command() && new_output.callback().session_command().has_type()) { @@ -654,11 +662,27 @@ CComPtr edit_session(new SyncEditSessionImpl( text_service, context, new_output)); + DWORD edit_session_flag = TF_ES_READWRITE; + switch (mode) { + case kAsync: + edit_session_flag |= TF_ES_ASYNC; + break; + case kSync: + edit_session_flag |= TF_ES_SYNC; + break; + case kDontCare: + edit_session_flag |= TF_ES_ASYNCDONTCARE; + break; + default: + DCHECK(false) << "unknown mode: " << mode; + break; + } + HRESULT edit_session_result = S_OK; const HRESULT hr = context->RequestEditSession( text_service->GetClientID(), edit_session, - TF_ES_SYNC | TF_ES_READWRITE, + edit_session_flag, &edit_session_result); if (FAILED(hr)) { return false; @@ -666,6 +690,20 @@ return SUCCEEDED(edit_session_result); } +} // namespace + +bool TipEditSession::OnOutputReceivedSync(TipTextService *text_service, + ITfContext *context, + const Output &new_output) { + return OnOutputReceivedImpl(text_service, context, new_output, kSync); +} + +bool TipEditSession::OnOutputReceivedAsync(TipTextService *text_service, + ITfContext *context, + const Output &new_output) { + return OnOutputReceivedImpl(text_service, context, new_output, kAsync); +} + bool TipEditSession::OnLayoutChangedAsync( TipTextService *text_service, ITfContext *context) { return OnLayoutChangedAsyncImpl(text_service, context); @@ -691,10 +729,10 @@ HRESULT edit_session_result = S_OK; const HRESULT hr = context->RequestEditSession( - text_service->GetClientID(), - edit_session, - TF_ES_ASYNCDONTCARE | TF_ES_READ, - &edit_session_result); + text_service->GetClientID(), + edit_session, + TF_ES_ASYNCDONTCARE | TF_ES_READ, + &edit_session_result); if (FAILED(hr)) { return false; } @@ -815,6 +853,13 @@ return OnSessionCommandAsync(text_service, context, session_command); } +bool TipEditSession::CanceleCompositionAsync( + TipTextService *text_service, ITfContext *context) { + SessionCommand command; + command.set_type(SessionCommand::REVERT); + return OnSessionCommandAsync(text_service, context, command); +} + bool TipEditSession::HilightCandidateAsync(TipTextService *text_service, ITfContext *context, int candidate_id) { diff -Nru mozc-1.11.1502.102/win32/tip/tip_edit_session.h mozc-1.11.1522.102/win32/tip/tip_edit_session.h --- mozc-1.11.1502.102/win32/tip/tip_edit_session.h 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/win32/tip/tip_edit_session.h 2013-08-28 05:26:13.000000000 +0000 @@ -49,12 +49,18 @@ class TipEditSession { public: // Begins a sync edit session with |new_output| to update the context. Note - // that sync edit session is guaranteed to be capable only in key event + // that sync edit session is guaranteed to be capable only in key event // handler and ITfFnReconversion::QueryRange. In other cases, you should use - // an async edit session to update the context. + // OnOutputReceivedAsync instead. static bool OnOutputReceivedSync(TipTextService *text_service, ITfContext *context, const commands::Output &new_output); + + // Begins an async edit session with |new_output| to update the context. + static bool OnOutputReceivedAsync(TipTextService *text_service, + ITfContext *context, + const commands::Output &new_output); + // Begins a sync edit session to invoke reconversion that is initialized by // the application. static bool ReconvertFromApplicationSync(TipTextService *text_service, @@ -77,8 +83,12 @@ ITfContext *context, WPARAM wparam, LPARAM lparam); + // Begins an async edit session to submit the current candidate. static bool SubmitAsync(TipTextService *text_service, ITfContext *context); + // Begins an async edit session to cancel the current composition. + static bool CanceleCompositionAsync(TipTextService *text_service, + ITfContext *context); // Begins an async edit session to highlight the candidate specified by // |candidate_id|. static bool HilightCandidateAsync(TipTextService *text_service, diff -Nru mozc-1.11.1502.102/win32/tip/tip_edit_session_impl.cc mozc-1.11.1522.102/win32/tip/tip_edit_session_impl.cc --- mozc-1.11.1502.102/win32/tip/tip_edit_session_impl.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/win32/tip/tip_edit_session_impl.cc 2013-08-28 05:26:13.000000000 +0000 @@ -46,6 +46,7 @@ #include "win32/base/input_state.h" #include "win32/base/string_util.h" #include "win32/tip/tip_composition_util.h" +#include "win32/tip/tip_edit_session.h" #include "win32/tip/tip_input_mode_manager.h" #include "win32/tip/tip_private_context.h" #include "win32/tip/tip_range_util.h" @@ -115,9 +116,9 @@ return result; } -CComPtr CreateCompositioin(TipTextService *text_service, - ITfContext *context, - TfEditCookie write_cookie) { +CComPtr CreateComposition(TipTextService *text_service, + ITfContext *context, + TfEditCookie write_cookie) { CComQIPtr composition_context = context; if (!composition_context) { return nullptr; @@ -153,14 +154,17 @@ // 3. Call ITfComposition::ShiftStart to shrink the composition range. Note // that the text that is pushed out from the composition range is // interpreted as the "committed text". -// See also b/8406545. +// 4. Update the caret position explicitly. Note that some applications +// such as WPF's TextBox do not update the caret position automatically +// when an composition is commited. +// See also b/8406545 and b/9747361. CComPtr CommitText(TipTextService *text_service, ITfContext *context, TfEditCookie write_cookie, CComPtr composition, const Output &output) { if (!composition) { - composition = CreateCompositioin(text_service, context, write_cookie); + composition = CreateComposition(text_service, context, write_cookie); } if (!composition) { return nullptr; @@ -215,6 +219,14 @@ if (FAILED(result)) { return nullptr; } + // We need to update the caret position manually for WPF's TextBox, where + // caret position is not updated automatically when a composition text is + // committed by ITfComposition::ShiftStart. + result = TipRangeUtil::SetSelection( + context, write_cookie, new_composition_start, TF_AE_END); + if (FAILED(result)) { + return nullptr; + } return composition; } @@ -232,13 +244,22 @@ if (FAILED(result)) { return result; } - result = composition_range->SetText(write_cookie, 0, L"", 0); + BOOL is_empty = FALSE; + result = composition_range->IsEmpty(write_cookie, &is_empty); if (FAILED(result)) { return result; } - result = ClearReadingProperties(context, composition_range, write_cookie); - if (FAILED(result)) { - return result; + if (is_empty != TRUE) { + wstring str; + TipRangeUtil::GetText(composition_range, write_cookie, &str); + result = composition_range->SetText(write_cookie, 0, L"", 0); + if (FAILED(result)) { + return result; + } + result = ClearReadingProperties(context, composition_range, write_cookie); + if (FAILED(result)) { + return result; + } } } @@ -265,7 +286,7 @@ if (FAILED(result)) { return result; } - composition = CreateCompositioin(text_service, context, write_cookie); + composition = CreateComposition(text_service, context, write_cookie); } if (!composition) { return E_FAIL; @@ -385,52 +406,60 @@ return result; } -HRESULT DoEditSessionImpl(TipTextService *text_service, - ITfContext *context, - TfEditCookie write_cookie, - const Output &output) { - HRESULT result = S_OK; - +HRESULT UpdatePrivateContext(TipTextService *text_service, + ITfContext *context, + TfEditCookie write_cookie, + const Output &output) { TipPrivateContext *private_context = text_service->GetPrivateContext(context); - if (private_context != nullptr) { - private_context->mutable_last_output()->CopyFrom(output); - if (output.has_status()) { - const Status &status = output.status(); - TipInputModeManager *input_mode_manager = - text_service->GetThreadContext()->GetInputModeManager(); - const TipInputModeManager::NotifyActionSet action_set = - input_mode_manager->OnReceiveCommand(status.activated(), - status.comeback_mode(), - status.mode()); - if ((action_set & TipInputModeManager::kNotifySystemOpenClose) == - TipInputModeManager::kNotifySystemOpenClose) { - TipStatus::SetIMEOpen(text_service->GetThreadManager(), - text_service->GetClientID(), - input_mode_manager->GetEffectiveOpenClose()); - } - if ((action_set & TipInputModeManager::kNotifySystemConversionMode) == - TipInputModeManager::kNotifySystemConversionMode) { - const CompositionMode mozc_mode = static_cast( - input_mode_manager->GetEffectiveConversionMode()); - uint32 native_mode = 0; - if (ConversionModeUtil::ToNativeMode( - mozc_mode, - private_context->input_behavior().prefer_kana_input, - &native_mode)) { - TipStatus::SetInputModeConversion(text_service->GetThreadManager(), - text_service->GetClientID(), - native_mode); - } - } + if (private_context == nullptr) { + return S_FALSE; + } + private_context->mutable_last_output()->CopyFrom(output); + if (!output.has_status()) { + return S_FALSE; + } + + const Status &status = output.status(); + TipInputModeManager *input_mode_manager = + text_service->GetThreadContext()->GetInputModeManager(); + const TipInputModeManager::NotifyActionSet action_set = + input_mode_manager->OnReceiveCommand(status.activated(), + status.comeback_mode(), + status.mode()); + if ((action_set & TipInputModeManager::kNotifySystemOpenClose) == + TipInputModeManager::kNotifySystemOpenClose) { + TipStatus::SetIMEOpen(text_service->GetThreadManager(), + text_service->GetClientID(), + input_mode_manager->GetEffectiveOpenClose()); + } + + if ((action_set & TipInputModeManager::kNotifySystemConversionMode) == + TipInputModeManager::kNotifySystemConversionMode) { + const CompositionMode mozc_mode = static_cast( + input_mode_manager->GetEffectiveConversionMode()); + uint32 native_mode = 0; + if (ConversionModeUtil::ToNativeMode( + mozc_mode, + private_context->input_behavior().prefer_kana_input, + &native_mode)) { + TipStatus::SetInputModeConversion(text_service->GetThreadManager(), + text_service->GetClientID(), + native_mode); } } + return S_OK; +} +HRESULT UpdatePreeditAndComposition(TipTextService *text_service, + ITfContext *context, + TfEditCookie write_cookie, + const Output &output) { CComPtr composition = CComQIPtr( TipCompositionUtil::GetComposition(context, write_cookie)); // Clear the display attributes first. if (composition) { - result = TipCompositionUtil::ClearDisplayAttributes( + const HRESULT result = TipCompositionUtil::ClearDisplayAttributes( context, composition, write_cookie); if (FAILED(result)) { return result; @@ -450,6 +479,27 @@ text_service, context, composition, write_cookie, output); } +HRESULT DoEditSessionInComposition(TipTextService *text_service, + ITfContext *context, + TfEditCookie write_cookie, + const Output &output) { + const HRESULT result = UpdatePrivateContext( + text_service, context, write_cookie, output); + if (FAILED(result)) { + return result; + } + return UpdatePreeditAndComposition( + text_service, context, write_cookie, output); +} + +HRESULT DoEditSessionAfterComposition(TipTextService *text_service, + ITfContext *context, + TfEditCookie write_cookie, + const Output &output) { + return UpdatePrivateContext( + text_service, context, write_cookie, output); +} + HRESULT OnEndEditImpl(TipTextService *text_service, ITfContext *context, TfEditCookie write_cookie, @@ -469,7 +519,7 @@ result = TipRangeUtil::GetDefaultSelection( context, write_cookie, &selection_range, &active_sel_end); if (FAILED(result)) { - return result; + return result; } vector input_scopes; result = TipRangeUtil::GetInputScopes( @@ -487,7 +537,7 @@ } CComPtr composition_view = - TipCompositionUtil::GetComposition(context, write_cookie); + TipCompositionUtil::GetComposition(context, write_cookie); if (!composition_view) { // If there is no composition, nothing to check. return S_OK; @@ -526,9 +576,14 @@ } if (!TipRangeUtil::IsRangeCovered( write_cookie, selected_range, composition_range)) { - if (!TipEditSessionImpl::OnCompositionTerminated( - text_service, context, composition, write_cookie)) { - return E_FAIL; + // We enqueue another edit session to sync the composition state between + // the application and Mozc server because we are already in + // ITfTextEditSink::OnEndEdit and some operations (e.g., + // ITfComposition::EndComposition) result in failure in this edit + // session. + result = TipEditSession::SubmitAsync(text_service, context); + if (FAILED(result)) { + return result; } // Cancels further operations. return S_OK; @@ -542,13 +597,19 @@ } if (is_empty) { // When the composition range is empty, we assume the composition is - // canceled by the application or something. Actually CUAS does this - // when it receives NI_COMPOSITIONSTR/CPS_CANCEL. You can see this as - // Excel's auto-completion. If this happens, send REVERT command to - // the server to keep the state consistent. See b/1793331 for details. - if (!TipEditSessionImpl::OnCompositionReverted( - text_service, context, composition, write_cookie)) { - return E_FAIL; + // canceled by the application or something. Actually CUAS does this when + // it receives NI_COMPOSITIONSTR/CPS_CANCEL. You can see this as Excel's + // auto-completion. If this happens, send REVERT command to the server to + // keep the state consistent. See b/1793331 for details. + + // We enqueue another edit session to sync the composition state between + // the application and Mozc server because we are already in + // ITfTextEditSink::OnEndEdit and some operations (e.g., + // ITfComposition::EndComposition) result in failure in this edit session. + result = TipEditSession::CanceleCompositionAsync(text_service, context); + *update_ui = false; + if (FAILED(result)) { + return result; } } return S_OK; @@ -556,74 +617,64 @@ } // namespace -HRESULT TipEditSessionImpl::UpdateContext( - TipTextService *text_service, - ITfContext *context, - TfEditCookie write_cookie, - const commands::Output &output) { - const HRESULT result = DoEditSessionImpl( - text_service, context, write_cookie, output); - UpdateUI(text_service, context, write_cookie); +HRESULT TipEditSessionImpl::OnEndEdit(TipTextService *text_service, + ITfContext *context, + TfEditCookie write_cookie, + ITfEditRecord *edit_record) { + bool update_ui = false; + const HRESULT result = OnEndEditImpl( + text_service, context, write_cookie, edit_record, &update_ui); + if (update_ui) { + TipEditSessionImpl::UpdateUI(text_service, context, write_cookie); + } return result; } -bool TipEditSessionImpl::OnCompositionReverted(TipTextService *text_service, - ITfContext *context, - ITfComposition *composition, - TfEditCookie write_cookie) { - // Ignore any error. - TipCompositionUtil::ClearDisplayAttributes( - context, composition, write_cookie); - - TipPrivateContext *private_context = text_service->GetPrivateContext(context); - if (!private_context) { - return false; +HRESULT TipEditSessionImpl::OnCompositionTerminated( + TipTextService *text_service, + ITfContext *context, + ITfComposition *composition, + TfEditCookie write_cookie) { + if (text_service == nullptr) { + return E_FAIL; } - - Output output; - SessionCommand command; - command.set_type(SessionCommand::REVERT); - if (!private_context->GetClient()->SendCommand(command, &output)) { - return false; + if (context == nullptr) { + return E_FAIL; } - return SUCCEEDED(UpdateContext(text_service, context, write_cookie, output)); -} - -bool TipEditSessionImpl::OnCompositionTerminated(TipTextService *text_service, - ITfContext *context, - ITfComposition *composition, - TfEditCookie write_cookie) { - if (!composition) { - return false; + // Clear the display attributes first. + if (composition) { + const HRESULT result = TipCompositionUtil::ClearDisplayAttributes( + context, composition, write_cookie); + if (FAILED(result)) { + return result; + } } - // Ignore any error. - TipCompositionUtil::ClearDisplayAttributes( - context, composition, write_cookie); - TipPrivateContext *private_context = text_service->GetPrivateContext(context); - if (!private_context) { - return false; - } - Output output; SessionCommand command; command.set_type(SessionCommand::SUBMIT); + Output output; + TipPrivateContext *private_context = text_service->GetPrivateContext(context); + if (private_context == nullptr) { + return E_FAIL; + } if (!private_context->GetClient()->SendCommand(command, &output)) { - return false; + return E_FAIL; } - return SUCCEEDED(UpdateContext(text_service, context, write_cookie, output)); + const HRESULT result = DoEditSessionAfterComposition( + text_service, context, write_cookie, output); + UpdateUI(text_service, context, write_cookie); + return result; } -HRESULT TipEditSessionImpl::OnEndEdit(TipTextService *text_service, - ITfContext *context, - TfEditCookie write_cookie, - ITfEditRecord *edit_record) { - bool update_ui = false; - const HRESULT result = OnEndEditImpl( - text_service, context, write_cookie, edit_record, &update_ui); - if (update_ui) { - TipEditSessionImpl::UpdateUI(text_service, context, write_cookie); - } +HRESULT TipEditSessionImpl::UpdateContext( + TipTextService *text_service, + ITfContext *context, + TfEditCookie write_cookie, + const commands::Output &output) { + const HRESULT result = DoEditSessionInComposition( + text_service, context, write_cookie, output); + UpdateUI(text_service, context, write_cookie); return result; } diff -Nru mozc-1.11.1502.102/win32/tip/tip_edit_session_impl.h mozc-1.11.1522.102/win32/tip/tip_edit_session_impl.h --- mozc-1.11.1502.102/win32/tip/tip_edit_session_impl.h 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/win32/tip/tip_edit_session_impl.h 2013-08-28 05:26:13.000000000 +0000 @@ -51,16 +51,11 @@ // TODO(yukawa): Use more descriptive class name. class TipEditSessionImpl { public: - // A high level logic to handle on-composition-reverted event. - static bool OnCompositionReverted(TipTextService *text_service, - ITfContext *context, - ITfComposition *composition, - TfEditCookie write_cookie); // A high level logic to handle on-composition-terminated event. - static bool OnCompositionTerminated(TipTextService *text_service, - ITfContext *context, - ITfComposition *composition, - TfEditCookie write_cookie); + static HRESULT OnCompositionTerminated(TipTextService *text_service, + ITfContext *context, + ITfComposition *composition, + TfEditCookie write_cookie); // Does post-edit status checking for composition (if exists). For example, diff -Nru mozc-1.11.1502.102/win32/tip/tip_keyevent_handler.cc mozc-1.11.1522.102/win32/tip/tip_keyevent_handler.cc --- mozc-1.11.1502.102/win32/tip/tip_keyevent_handler.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/win32/tip/tip_keyevent_handler.cc 2013-08-28 05:26:13.000000000 +0000 @@ -41,6 +41,7 @@ #include #include "base/util.h" +#include "client/client_interface.h" #include "session/commands.pb.h" #include "win32/base/conversion_mode_util.h" #include "win32/base/deleter.h" @@ -67,7 +68,8 @@ using mozc::commands::Context; using mozc::commands::Output; using std::unique_ptr; -typedef commands::CompositionMode CompositionMode; +typedef mozc::commands::CompositionMode CompositionMode; +typedef mozc::commands::SessionCommand SessionCommand; namespace { @@ -105,12 +107,6 @@ if (L'A' <= ucs2 && ucs2 <= L'Z') { return VirtualKey::FromVirtualKey(ucs2); } - if (ucs2 == kTouchKeyboardNextPage) { - return VirtualKey::FromVirtualKey(VK_NEXT); - } - if (ucs2 == kTouchKeyboardPreviousPage) { - return VirtualKey::FromVirtualKey(VK_PRIOR); - } // Emulate IME_PROP_ACCEPT_WIDE_VKEY. return VirtualKey::FromCombinedVirtualKey(ucs2 << 16 | VK_PACKET); @@ -247,8 +243,15 @@ DLOG(FATAL) << "this action is not applicable to OnTestKey."; break; } - } + // Handle NextPage/PrevPage button on the on-screen keyboard. + if (key_info.IsKeyDownInImeProcessKey() && + ((vk.wide_char() == kTouchKeyboardNextPage) || + (vk.wide_char() == kTouchKeyboardPreviousPage))) { + *eaten = TRUE; + return S_OK; + } + } // Make an immutable snapshot of |private_context->ime_behavior_|, which // cannot be substituted for by const reference. @@ -412,6 +415,26 @@ // KeyEventHandler::ImeToAsciiEx. temporal_output.CopyFrom( private_context->GetDeleter()->pending_output()); + } else if (open && is_key_down && + (vk.wide_char() == kTouchKeyboardPreviousPage)) { + // Handle PrevPage button on the on-screen keyboard. + SessionCommand command; + command.set_type(SessionCommand::CONVERT_PREV_PAGE); + if (!private_context->GetClient()->SendCommand(command, &temporal_output)) { + *eaten = FALSE; + return E_FAIL; + } + ignore_this_keyevent = false; + } else if (open && is_key_down && + (vk.wide_char() == kTouchKeyboardNextPage)) { + // Handle NextPage button on the on-screen keyboard. + SessionCommand command; + command.set_type(SessionCommand::CONVERT_NEXT_PAGE); + if (!private_context->GetClient()->SendCommand(command, &temporal_output)) { + *eaten = FALSE; + return E_FAIL; + } + ignore_this_keyevent = false; } else { InputBehavior behavior = private_context->input_behavior(); diff -Nru mozc-1.11.1502.102/win32/tip/tip_surrounding_text.cc mozc-1.11.1522.102/win32/tip/tip_surrounding_text.cc --- mozc-1.11.1502.102/win32/tip/tip_surrounding_text.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/win32/tip/tip_surrounding_text.cc 2013-08-28 05:26:13.000000000 +0000 @@ -455,14 +455,19 @@ return true; } -bool TipSurroundingText::PrepareForReconversion( +bool TipSurroundingText::PrepareForReconversionFromIme( TipTextService *text_service, ITfContext *context, - TipSurroundingTextInfo *info) { + TipSurroundingTextInfo *info, + bool *need_async_reconversion) { if (info == nullptr) { return false; } + if (need_async_reconversion == nullptr) { + return false; + } *info = TipSurroundingTextInfo(); + *need_async_reconversion = false; if (PrepareForReconversionTSF(text_service, context, info)) { // Here we assume selection text info is valid iff |info->is_transitory| is // false. @@ -471,7 +476,12 @@ return true; } } - return PrepareForReconversionIMM32(context, info); + if (!PrepareForReconversionIMM32(context, info)) { + return false; + } + // IMM32-like reconversion requires async edit session. + *need_async_reconversion = true; + return true; } bool TipSurroundingText::DeletePrecedingText( diff -Nru mozc-1.11.1502.102/win32/tip/tip_surrounding_text.h mozc-1.11.1522.102/win32/tip/tip_surrounding_text.h --- mozc-1.11.1502.102/win32/tip/tip_surrounding_text.h 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/win32/tip/tip_surrounding_text.h 2013-08-28 05:26:13.000000000 +0000 @@ -78,10 +78,14 @@ // moves the anchor position of the selection at the end of the range. // Another difference is that this method uses IMM32 message when fails to // retrieve/update the selection. + // In order to emulate the IMM32 reconversion, we need to use async edit + // session if |need_async_reconversion| is set to be true. See + // IMN_PRIVATE/kNotifyReconvertFromIME in IMM32-Mozc about IMM32 reconversion. // TODO(yukawa): Consider to unify this method with TipSurroundingText::Get. - static bool PrepareForReconversion(TipTextService *text_service, - ITfContext *context, - TipSurroundingTextInfo *info); + static bool PrepareForReconversionFromIme(TipTextService *text_service, + ITfContext *context, + TipSurroundingTextInfo *info, + bool *need_async_reconversion); // Returns true when succeeds to delete preceeding text from the beginning of // the selected range. diff -Nru mozc-1.11.1502.102/win32/tip/tip_text_service.cc mozc-1.11.1522.102/win32/tip/tip_text_service.cc --- mozc-1.11.1502.102/win32/tip/tip_text_service.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/win32/tip/tip_text_service.cc 2013-08-28 05:26:13.000000000 +0000 @@ -161,6 +161,119 @@ #define SPI_SETTHREADLOCALINPUTSETTINGS 0x104F #endif // SPI_SETTHREADLOCALINPUTSETTINGS +// ITfFnGetPreferredTouchKeyboardLayout is available on Windows 8 SDK and later. +#ifndef TKBL_UNDEFINED +#define TKBL_UNDEFINED 0x0000 +#define TKBL_CLASSIC_TRADITIONAL_CHINESE_PHONETIC 0x0404 +#define TKBL_CLASSIC_TRADITIONAL_CHINESE_CHANGJIE 0xF042 +#define TKBL_CLASSIC_TRADITIONAL_CHINESE_DAYI 0xF043 +#define TKBL_OPT_JAPANESE_ABC 0x0411 +#define TKBL_OPT_KOREAN_HANGUL_2_BULSIK 0x0412 +#define TKBL_OPT_SIMPLIFIED_CHINESE_PINYIN 0x0804 +#define TKBL_OPT_TRADITIONAL_CHINESE_PHONETIC 0x0404 + +enum TKBLayoutType { + TKBLT_UNDEFINED = 0, + TKBLT_CLASSIC = 1, + TKBLT_OPTIMIZED = 2 +}; + +// {5F309A41-590A-4ACC-A97F-D8EFFF13FDFC} +const IID IID_ITfFnGetPreferredTouchKeyboardLayout = { + 0x5f309a41, 0x590a, 0x4acc, {0xa9, 0x7f, 0xd8, 0xef, 0xff, 0x13, 0xfd, 0xfc} +}; + +// Note: "5F309A41-590A-4ACC-A97F-D8EFFF13FDFC" is equivalent to +// IID_ITfFnGetPreferredTouchKeyboardLayout +struct __declspec(uuid("5F309A41-590A-4ACC-A97F-D8EFFF13FDFC")) +ITfFnGetPreferredTouchKeyboardLayout : public ITfFunction { + public: + virtual HRESULT STDMETHODCALLTYPE GetLayout(TKBLayoutType *layout_type, + WORD *preferred_layout_id) = 0; +}; +#endif // !TKBL_UNDEFINED + +#ifdef GOOGLE_JAPANESE_INPUT_BUILD +const wchar_t kGetPreferredTouchKeyboardLayoutFunctionDisplayName[] = + L"Google Japanese Input: GetPreferredTouchKeyboardLayout Function"; +#else +const wchar_t kGetPreferredTouchKeyboardLayoutFunctionDisplayName[] = + L"Mozc: GetPreferredTouchKeyboardLayout Function"; +#endif + +class ITfFnGetPreferredTouchKeyboardLayoutImpl + : public ITfFnGetPreferredTouchKeyboardLayout { + public: + static ITfFnGetPreferredTouchKeyboardLayout *New() { + return new ITfFnGetPreferredTouchKeyboardLayoutImpl(); + } + + // The IUnknown interface methods. + STDMETHODIMP QueryInterface(REFIID interface_id, void **object) { + if (!object) { + return E_INVALIDARG; + } + + // Find a matching interface from the ones implemented by this object. + // This object implements IUnknown and ITfEditSession. + if (::IsEqualIID(interface_id, IID_IUnknown)) { + *object = static_cast(this); + } else if (IsEqualIID(interface_id, IID_ITfFunction)) { + *object = static_cast(this); + } else if (IsEqualIID(interface_id, + IID_ITfFnGetPreferredTouchKeyboardLayout)) { + *object = static_cast(this); + } else { + *object = nullptr; + return E_NOINTERFACE; + } + + AddRef(); + return S_OK; + } + + STDMETHODIMP_(ULONG) AddRef() { + return ref_count_.AddRefImpl(); + } + + STDMETHODIMP_(ULONG) Release() { + const ULONG count = ref_count_.ReleaseImpl(); + if (count == 0) { + delete this; + } + return count; + } + + private: + ITfFnGetPreferredTouchKeyboardLayoutImpl() {} + + // The ITfFunction interface method. + virtual HRESULT STDMETHODCALLTYPE GetDisplayName(BSTR *name) { + if (name == nullptr) { + return E_INVALIDARG; + } + *name = + CComBSTR(kGetPreferredTouchKeyboardLayoutFunctionDisplayName).Detach(); + return S_OK; + } + + // ITfFnGetPreferredTouchKeyboardLayout + virtual HRESULT STDMETHODCALLTYPE GetLayout(TKBLayoutType *layout_type, + WORD *preferred_layout_id) { + if (layout_type != nullptr) { + *layout_type = TKBLT_OPTIMIZED; + } + if (preferred_layout_id != nullptr) { + *preferred_layout_id = TKBL_OPT_JAPANESE_ABC; + } + return S_OK; + } + + TipRefCount ref_count_; + + DISALLOW_COPY_AND_ASSIGN(ITfFnGetPreferredTouchKeyboardLayoutImpl); +}; + HRESULT SpawnTool(const string &command) { if (!Process::SpawnMozcProcess(kMozcTool, "--mode=" + command)) { return E_FAIL; @@ -319,11 +432,10 @@ // Implements the ITfCompositionSink::OnCompositionTerminated() function. // This function is called by Windows when an ongoing composition is // terminated by applications. - virtual STDMETHODIMP OnCompositionTerminated(TfEditCookie cookie, + virtual STDMETHODIMP OnCompositionTerminated(TfEditCookie write_cookie, ITfComposition *composition) { - TipEditSessionImpl::OnCompositionTerminated( - text_service_, context_, composition, cookie); - return S_OK; + return TipEditSessionImpl::OnCompositionTerminated( + text_service_, context_, composition, write_cookie); } private: @@ -809,7 +921,6 @@ } } - return result; } @@ -1038,6 +1149,11 @@ (*unknown)->AddRef(); return S_OK; } + if (::IsEqualGUID(IID_ITfFnGetPreferredTouchKeyboardLayout, iid)) { + *unknown = ITfFnGetPreferredTouchKeyboardLayoutImpl::New(); + (*unknown)->AddRef(); + return S_OK; + } return E_NOINTERFACE; } @@ -1064,11 +1180,6 @@ case TipLangBarCallback::kHalfAlphanumeric: case TipLangBarCallback::kFullAlphanumeric: case TipLangBarCallback::kHalfKatakana: { - bool use_kana_input = false; - TipPrivateContext *context = GetFocusedPrivateContext(); - if (context != nullptr) { - use_kana_input = context->input_behavior().prefer_kana_input; - } const commands::CompositionMode mozc_mode = GetMozcMode(menu_id); return TipEditSession::SwitchInputModeAsync(this, mozc_mode); } @@ -1095,11 +1206,6 @@ ? S_OK : E_FAIL; } - bool use_kana_input = false; - TipPrivateContext *context = GetFocusedPrivateContext(); - if (context != nullptr) { - use_kana_input = context->input_behavior().prefer_kana_input; - } // Like MSIME 2012, switch to Hiragana mode when the LangBar button is // clicked. return TipEditSession::SwitchInputModeAsync(this, commands::HIRAGANA); diff -Nru mozc-1.11.1502.102/win32/tip/tip_ui_element_immersive.cc mozc-1.11.1522.102/win32/tip/tip_ui_element_immersive.cc --- mozc-1.11.1502.102/win32/tip/tip_ui_element_immersive.cc 2013-07-17 02:38:04.000000000 +0000 +++ mozc-1.11.1522.102/win32/tip/tip_ui_element_immersive.cc 2013-08-28 05:26:13.000000000 +0000 @@ -96,6 +96,12 @@ #endif // GOOGLE_JAPANESE_INPUT_BUILD +#ifndef EVENT_OBJECT_IME_SHOW +#define EVENT_OBJECT_IME_SHOW 0x8027 +#define EVENT_OBJECT_IME_HIDE 0x8028 +#define EVENT_OBJECT_IME_CHANGE 0x8029 +#endif // EVENT_OBJECT_IME_SHOW + // Represents the module handle of this module. volatile HMODULE g_module = nullptr; @@ -259,7 +265,8 @@ text_service, context, TipUiElementDelegateFactory::kImmersiveCandidateWindow)), working_area_(renderer::win32::WorkingAreaFactory::Create()), - window_(window_handle) { + window_(window_handle), + window_visible_(false) { } // Destructor is kept as non-virtual because this class is designed to be @@ -352,12 +359,38 @@ return false; } + void ShowWindow(bool content_changed) { + window_.ShowWindow(SW_SHOWNA); + if (!window_visible_) { + ::NotifyWinEvent( + EVENT_OBJECT_IME_SHOW, window_.m_hWnd, OBJID_WINDOW, CHILDID_SELF); + } else if (content_changed) { + ::NotifyWinEvent( + EVENT_OBJECT_IME_CHANGE, window_.m_hWnd, OBJID_WINDOW, CHILDID_SELF); + } + window_visible_ = true; + } + + void HideWindow() { + window_.ShowWindow(SW_HIDE); + if (window_visible_) { + ::NotifyWinEvent( + EVENT_OBJECT_IME_HIDE, window_.m_hWnd, OBJID_WINDOW, CHILDID_SELF); + } + window_visible_ = false; + } + void Render(const RenderingInfo &info) { if (!info.output.has_candidates()) { - window_.ShowWindow(SW_HIDE); + HideWindow(); return; } + const bool content_changed = + (rendering_info_.target_rect != info.target_rect) || + (rendering_info_.output.SerializeAsString() != + info.output.SerializeAsString()); + rendering_info_.target_rect = info.target_rect; rendering_info_.output.CopyFrom(info.output); RenderImpl(info); @@ -365,9 +398,9 @@ BOOL shown = FALSE; delegate_->IsShown(&shown); if (!shown) { - window_.ShowWindow(SW_HIDE); + HideWindow(); } else { - window_.ShowWindow(SW_SHOWNA); + ShowWindow(content_changed); } } @@ -540,6 +573,7 @@ unique_ptr delegate_; unique_ptr working_area_; CWindow window_; + bool window_visible_; TableLayout table_layout_; RenderingInfo rendering_info_; DISALLOW_COPY_AND_ASSIGN(TipImmersiveUiElementImpl);