rally-0.9.1/0000775000567000056710000000000013073420067014007 5ustar jenkinsjenkins00000000000000rally-0.9.1/AUTHORS0000664000567000056710000003322313073420065015060 0ustar jenkinsjenkins00000000000000Aaron Rosen Abe Music Adam Zhang Albert White Alberto Planas Alex Alex Krzos Alexander Chadin Alexander Gubanov Alexander Maretskiy Alexander Nevenchannyy Alexandr Nevenchannyy Alexei Kornienko Alexei Kornienko Alina Nesterova Allen Gao Anastasia Kuznetsova Andreas Jaeger Andrew F. Ly Andrew McDermott Andrey Andrey Kurilin Andrey Kurilin Andy Botting Angus Lees Anh Tran Anthony Lee Anthony Washington Anton Arefiev Anton Frolov Anton Kremenetsky Anton Staroverov Anton Studenov Antonio Messina Anusha Kadambala Anwar Ruwaifa Arata Notsu Arx Cruz Aswad Rangnekar Atsushi SAKAI Bertrand Lallau Bo Chi Bo Chi Boris Pavlovic Brandon Clifford Brian Downs Brooke Trimble Bruno Semperlotti Béla Vancsics Cady_Chen Cao Xuan Hoang Carlos L. Torres Castulo J. Martinez ChangBo Guo(gcb) Chaozhe.Chen Chris St. Pierre Christian Berendt Christian Berendt Ciprian Barbu Clint Byrum Cuong Nguyen Cyril Roelandt Danny Choi Dao Cong Tien Darla Ahlert Davanum Srinivas Dave McCowan Dave Spano David Paterson Dina Belova Dmitriy Uvarenkov Dmitry Ratushnyy Doug Hellmann Doug Hellmann Duncan Thomas Edgar Magana Egor Tolmachev Endre Karlson Evgeniy Evgeny Ivanov Evgeny Sikachev Fei Long Wang Filip Hubik Flavio Percoco George Tian Gleb Stepanov Hai Shi Harsh Shah Hiroki Aramaki Hugh Ma Hugh Saunders Igor Degtiarov Igor Pavlovic Ihar Hrachyshka Ikuo Kumagai Illia Khudoshyn Ilya Kharin Ilya Popov Ilya Shakhat Ilya Sokolov Ilya Tyaptin Iswarya_Vakati Ivan Kolodyazhny Ivan Lozgachev Jan Klare Javier Pena Jean-Philippe Braun Jens Rosenboom Jeremy Stanley Jesse Keating Jesse Pretorius Jiantao He Jin Hase Joe Gordon Joe H. Rahme Joe Talerico John Schwarz John Wu Joris Roovers Joseph Bajin Joshua Harlow Juha Kosonen Julia Varigina Julian Edwards Julien Vey Kahou Lei Kai Zhang Ken Pepple Kevin Benton Kevin Tsang Kiran Kiran Ranganath Kirill Shileev Kiseok Kim Kui Shi Kumar Rishabh Kun Huang Kyle Jorgensen LIU Yulong Lee Yarwood Li Ma Li Tianqing Li, Chen Lianhao Lu Lingxian Kong LingxianKong Liping Mao LipingMao LiuNanke Lu lei Lucio Seki Luong Anh Tuan Luz Luz Cazares Lv Fumei Mahito OGURA Maksym Iarmak Maplalabs Marcela Bonell Marcin Iwinski Marco Ceppi Marco Morais Marian Krcmarik Mark Wagner Michal Dulko Michal Gershenzon Michal Rostecki Michał Jastrzębski Mike Durnosvistov Mike Fedosin Mikhail Dubov Mikhail Lelyakin Mohammad Banikazemi Monty Taylor Mouad Benchchaoui Mykhailo Dovgal Neependra Khare Nguyen Hung Phuong Nikita Konovalov Nir Magnezi Nobuto MURATA Nobuto MURATA Oleg Anufriev Oleg Gelbukh Oleh Anufriiev Oleksandr Savatieiev Olga Kopylova Om Prakash Pandey Ondřej Nový OpenStack Release Bot Paul Belanger Pavel Boldin Pierre Padrixe Piyush Piyush Raman Srivastava Prabhjyot Singh Sodhi Pradeep K Surisetty Pradeep Kumar Surisetty Prateek Arora Rafi Khardalian Rajath Agasthya Rajath Agasthya Rajesh Tailor Raphael Badin Raphael Badin RobberPhex Rodion Promyshlennikov Rohan Kanade Roman Prykhodchenko Roman Vasilets Ross Martyn Sai Sindhur Malleni Sergey Galkin Sergey Kraynev Sergey Lukjanov Sergey Nikitin Sergey Novikov Sergey Skripnick Sergey Skripnick Sergey Vasilenko Sergey Vilgelm Serhii Skrypnik Serhii Vasheka Shahifali Agrawal Shaifali Agrawal Shane Wang Shunde Zhang Shuquan Huang Sindhur Sirisha Areti Spyros Trigazis Sreeram Vancheeswaran Srinivas Sakhamuri Srinivas Sakhamuri Staroverov Anton Steve Heyman Steve Wilkerson Subhash Dasyam Sumant Murke Sunil Mamillapalli Swapnil Kulkarni Swapnil Kulkarni Takeaki Matsumoto Takyuki Mitsui Tetsuo Nakamura Thomas Bechtold Timothy R. Chavez Timur Kozhevnikov Timur Nurlygayanov TimurNurlygayanov Tom Patzig Travis Truman Tzanetos Balitsaris Tzanetos Balitsaris Valeriy Ponomaryov Victor Estrada Victor Ryzhenkin Victor Sergeyev Victoria Martínez de la Cruz Walter A. Boring IV Wataru Takase Wenwu Peng Winnie Tsang YAMAMOTO Takashi Yair Fried Yan Ning Yaroslav Isakov Yaroslav Lobankov Yatin Kumbhare Yoshifumi Sumida Yuki Nishiwaki Yuki Nishiwaki Yuriy Nesenenko Yusuke Ide Zhao Lei Zhidong Yu Zhongcheng Lao ali-mohsin april astarove astaroverov bailinzhang boden chandidas chen-li chen-li chenaidong1 chenhb-zte dagnello dagnello dineshbhor ekonstantinov fandeliang fhubik gecong1973 gengchc2 gtt116 haishi harikrishna-mallavolu hayderimran7 hayderimran7 hgangwx huang.huayong hyunsun jacobliberman janonymous jianghuaw kairat_kushaev kbaikov kiran-r kong leiyashuai lewis li,chen lingxiankong linwwu liyingjun liyingjun maxinjian melissaml mitsutu yoshida morganrOL msdubov msdubov nayna-patel ningy ningyan nmagnezi obutenko obutenko pboros pomeo92 qianlin ravikumar-venkatesan ravikumar-venkatesan rook sathyanarayana-pvr shiyanchdev simpleranchero smurashov sskripnick sudhir_agarwal sunguoshuai tcs temujin theSimplex twm2016 tynorth-cisco uppi vagrant vaidyanath veena venkatamahesh vgusev vigny_wu <398776277@qq.com> vrovachev wangxf xiaozhuangqing xuchao yanyanhu yaohelan yuhui_inspur yuli yuriy_n yuyafei zhanghao zhangyanxian zhangzhang zhufl zoukeke rally-0.9.1/rally.egg-info/0000775000567000056710000000000013073420067016624 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally.egg-info/requires.txt0000664000567000056710000000257213073420065021230 0ustar jenkinsjenkins00000000000000alembic>=0.8.10,<=0.9.1 decorator>=3.4.0,<=4.0.11 Jinja2>=2.8,!=2.9.0,!=2.9.1,!=2.9.2,!=2.9.3,!=2.9.4,<=2.9.5 jsonschema>=2.0.0,!=2.5.0,<3.0.0 netaddr>=0.7.13,!=0.7.16,<=0.7.19 oslo.config>=3.22.0,<=3.23.0 oslo.db>=4.15.0,<=4.18.0 oslo.i18n>=2.1.0,<=3.14.0 oslo.log>=3.11.0,<=3.21.0 oslo.serialization>=1.10.0,<=2.17.0 oslo.utils>=3.20.0,<=3.23.0 paramiko>=2.0,<=2.1.2 pbr==2.0.0 PrettyTable>=0.7.1,<0.8 PyYAML>=3.10.0,<=3.12 python-subunit>=0.0.18,<=1.2.0 requests>=2.10.0,!=2.12.2,!=2.13.0,<=2.13.0 SQLAlchemy>=1.0.10,<=1.1.6 sphinx>=1.5.1,<=1.5.3 six>=1.9.0,<=1.10.0 virtualenv boto>=2.32.1,<=2.46.1 gnocchiclient>=2.7.0,<=3.1.1 keystoneauth1>=2.18.0,<=2.19.0 os-faults>=0.1.5,<=0.1.11 python-ceilometerclient>=2.5.0,<=2.8.0 python-cinderclient>=1.6.0,!=1.7.0,!=1.7.1,<=2.0.1 python-designateclient>=1.5.0,<=2.6.0 python-glanceclient>=2.5.0,<=2.6.0 python-heatclient>=1.6.1,<=1.8.0 python-ironicclient>=1.11.0,<=1.11.1 python-keystoneclient>=3.8.0,<=3.10.0 python-magnumclient>=2.0.0,<=2.5.0 python-manilaclient>=1.12.0,<=1.14.0 python-mistralclient>=2.0.0,<=3.0.0 python-monascaclient>=1.1.0,<=1.5.0 python-muranoclient>=0.8.2,<=0.12.0 python-neutronclient>=5.1.0,<=6.1.0 python-novaclient==7.1.0 python-saharaclient==1.1.0 python-senlinclient>=1.1.0,<=1.2.0 python-swiftclient>=3.2.0,<=3.3.0 python-troveclient>=2.2.0,<=2.8.0 python-watcherclient>=0.23.0,<=1.0.0 python-zaqarclient>=1.0.0,<=1.4.0 rally-0.9.1/rally.egg-info/not-zip-safe0000664000567000056710000000000113073420044021045 0ustar jenkinsjenkins00000000000000 rally-0.9.1/rally.egg-info/PKG-INFO0000664000567000056710000001474313073420065017730 0ustar jenkinsjenkins00000000000000Metadata-Version: 1.1 Name: rally Version: 0.9.1 Summary: Benchmark System for OpenStack Home-page: http://docs.openstack.org/developer/rally/ Author: OpenStack Author-email: openstack-dev@lists.openstack.org License: Apache License, Version 2.0 Description: ===== Rally ===== Team and repository tags ======================== .. image:: http://governance.openstack.org/badges/rally.svg :target: http://governance.openstack.org/reference/tags/index.html .. image:: https://img.shields.io/pypi/v/rally.svg :target: https://pypi.python.org/pypi/rally/ :alt: Latest Version .. image:: https://img.shields.io/badge/gitter-join_chat-ff69b4.svg :target: https://gitter.im/rally-dev/Lobby :alt: Gitter Chat .. image:: https://img.shields.io/badge/tasks-trello_board-blue.svg :target: https://trello.com/b/DoD8aeZy/rally :alt: Trello Board .. image:: https://img.shields.io/github/license/openstack/rally.svg :target: https://www.apache.org/licenses/LICENSE-2.0 :alt: Apache License, Version 2.0 What is Rally ============= Rally is a Benchmark-as-a-Service project for OpenStack. Rally is intended to provide the community with a benchmarking tool that is capable of performing **specific**, **complicated** and **reproducible** test cases on **real deployment** scenarios. If you are here, you are probably familiar with OpenStack and you also know that it's a really huge ecosystem of cooperative services. When something fails, performs slowly or doesn't scale, it's really hard to answer different questions on "what", "why" and "where" has happened. Another reason why you could be here is that you would like to build an OpenStack CI/CD system that will allow you to improve SLA, performance and stability of OpenStack continuously. The OpenStack QA team mostly works on CI/CD that ensures that new patches don't break some specific single node installation of OpenStack. On the other hand it's clear that such CI/CD is only an indication and does not cover all cases (e.g. if a cloud works well on a single node installation it doesn't mean that it will continue to do so on a 1k servers installation under high load as well). Rally aims to fix this and help us to answer the question "How does OpenStack work at scale?". To make it possible, we are going to automate and unify all steps that are required for benchmarking OpenStack at scale: multi-node OS deployment, verification, benchmarking & profiling. **Rally** workflow can be visualized by the following diagram: .. image:: doc/source/images/Rally-Actions.png :alt: Rally Architecture Who Is Using Rally ================== .. image:: doc/source/images/Rally_who_is_using.png :alt: Who is Using Rally Documentation ============= `Rally documentation on ReadTheDocs `_ is a perfect place to start learning about Rally. It provides you with an **easy** and **illustrative** guidance through this benchmarking tool. For example, check out the `Rally step-by-step tutorial `_ that explains, in a series of lessons, how to explore the power of Rally in benchmarking your OpenStack clouds. Architecture ------------ In terms of software architecture, Rally is built of 4 main components: 1. **Server Providers** - provide servers (virtual servers), with ssh access, in one L3 network. 2. **Deploy Engines** - deploy OpenStack cloud on servers that are presented by **Server Providers** 3. **Verification** - component that runs tempest (or another specific set of tests) against a deployed cloud, collects results & presents them in human readable form. 4. **Benchmark engine** - allows to write parameterized benchmark scenarios & run them against the cloud. Use Cases --------- There are 3 major high level Rally Use Cases: .. image:: doc/source/images/Rally-UseCases.png :alt: Rally Use Cases Typical cases where Rally aims to help are: - Automate measuring & profiling focused on how new code changes affect the OS performance; - Using Rally profiler to detect scaling & performance issues; - Investigate how different deployments affect the OS performance: - Find the set of suitable OpenStack deployment architectures; - Create deployment specifications for different loads (amount of controllers, swift nodes, etc.); - Automate the search for hardware best suited for particular OpenStack cloud; - Automate the production cloud specification generation: - Determine terminal loads for basic cloud operations: VM start & stop, Block Device create/destroy & various OpenStack API methods; - Check performance of basic cloud operations in case of different loads. Links ---------------------- * Free software: Apache license * Documentation: http://rally.readthedocs.org/en/latest/ * Source: http://git.openstack.org/cgit/openstack/rally * Bugs: http://bugs.launchpad.net/rally * Step-by-step tutorial: http://rally.readthedocs.org/en/latest/tutorial.html * RoadMap: https://docs.google.com/a/mirantis.com/spreadsheets/d/16DXpfbqvlzMFaqaXAcJsBzzpowb_XpymaK2aFY2gA2g * Launchpad page: https://launchpad.net/rally * Gitter chat: https://gitter.im/rally-dev/Lobby * Trello board: https://trello.com/b/DoD8aeZy/rally Platform: UNKNOWN Classifier: Environment :: OpenStack Classifier: Intended Audience :: Developers Classifier: Intended Audience :: Information Technology Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: POSIX :: Linux Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 rally-0.9.1/rally.egg-info/entry_points.txt0000664000567000056710000000021413073420065022115 0ustar jenkinsjenkins00000000000000[console_scripts] rally = rally.cli.main:main rally-manage = rally.cli.manage:main [oslo.config.opts] rally = rally.common.opts:list_opts rally-0.9.1/rally.egg-info/SOURCES.txt0000664000567000056710000025330713073420067020522 0ustar jenkinsjenkins00000000000000.coveragerc .dockerignore AUTHORS CONTRIBUTING.rst ChangeLog Dockerfile LICENSE README.rst babel.cfg bindep.txt install_rally.sh optional-requirements.txt requirements.txt setup.cfg setup.py test-requirements.txt tox.ini certification/openstack/README.rst certification/openstack/task.yaml certification/openstack/task_arguments.yaml certification/openstack/macro/macro.yaml certification/openstack/scenario/authentication.yaml certification/openstack/scenario/cinder.yaml certification/openstack/scenario/glance.yaml certification/openstack/scenario/keystone.yaml certification/openstack/scenario/neutron.yaml certification/openstack/scenario/nova.yaml devstack/README.rst devstack/features.yaml devstack/plugin.sh devstack/settings devstack/lib/rally doc/README.rst doc/ext/__init__.py doc/ext/cli_reference.py doc/ext/include_vars.py doc/ext/plugin_reference.py doc/ext/utils.py doc/feature_request/README.rst doc/feature_request/capture_task_logging.rst doc/feature_request/check_queue_perfdata.rst doc/feature_request/comparing_results_of_2_tasks.rst doc/feature_request/distributed_load_generation.rst doc/feature_request/explicitly_specify_existing_users_for_scenarios.rst doc/feature_request/historical_performance_data.rst doc/feature_request/installation_script_enhancements.rst doc/feature_request/installing_isolated.rst doc/feature_request/launch_specific_benchmark.rst doc/feature_request/multi_scenarios_load_gen.rst doc/feature_request/multiple_attach_volume.rst doc/feature_request/persistence_benchmark_env.rst doc/feature_request/production_ready_cleanup.rst doc/feature_request/implemented/LDAP_support.rst doc/feature_request/implemented/add_possibility_to_specify_concurrency_for_tempest.rst doc/feature_request/implemented/stop_scenario_after_several_errors.rst doc/release_notes/archive.rst doc/release_notes/latest.rst doc/release_notes/archive/v0.0.1.rst doc/release_notes/archive/v0.0.2.rst doc/release_notes/archive/v0.0.3.rst doc/release_notes/archive/v0.0.4.rst doc/release_notes/archive/v0.1.0.rst doc/release_notes/archive/v0.1.1.rst doc/release_notes/archive/v0.1.2.rst doc/release_notes/archive/v0.2.0.rst doc/release_notes/archive/v0.3.0.rst doc/release_notes/archive/v0.3.1.rst doc/release_notes/archive/v0.3.2.rst doc/release_notes/archive/v0.3.3.rst doc/release_notes/archive/v0.4.0.rst doc/release_notes/archive/v0.5.0.rst doc/release_notes/archive/v0.6.0.rst doc/release_notes/archive/v0.7.0.rst doc/release_notes/archive/v0.8.0.rst doc/release_notes/archive/v0.8.1.rst doc/release_notes/archive/v0.9.0.rst doc/release_notes/archive/v0.9.1.rst doc/source/Makefile doc/source/cli_reference.rst doc/source/conf.py doc/source/contribute.rst doc/source/feature_request doc/source/feature_requests.rst doc/source/index.rst doc/source/_templates/openstackrally/layout.html doc/source/_templates/openstackrally/theme.conf doc/source/_templates/openstackrally/_static/img.css doc/source/images/Amqp_rpc_single_reply_queue.png doc/source/images/Hook-Aggregated-Report.png doc/source/images/Hook-Per-Hook-Report.png doc/source/images/Hook-Results.png doc/source/images/Rally-Actions.png doc/source/images/Rally-Plugins.png doc/source/images/Rally-UseCases.png doc/source/images/Rally_Architecture.png doc/source/images/Rally_Distributed_Runner.png doc/source/images/Rally_QA.png doc/source/images/Rally_VM_list.png doc/source/images/Rally_snapshot_vm.png doc/source/images/Rally_who_is_using.png doc/source/images/Report-Abort-on-SLA-task-1.png doc/source/images/Report-Abort-on-SLA-task-2.png doc/source/images/Report-Collage.png doc/source/images/Report-Multiple-Configurations-Overview.png doc/source/images/Report-Multiple-Overview.png doc/source/images/Report-Overview.png doc/source/images/Report-SLA-Overview.png doc/source/images/Report-SLA-Scenario.png doc/source/images/Report-Scenario-Atomic.png doc/source/images/Report-Scenario-Overview.png doc/source/images/Report-Task-Actions-durations.png doc/source/images/Report-Task-Distribution.png doc/source/images/Report-Task-Failures.png doc/source/images/Report-Task-Input-file.png doc/source/images/Report-Task-Load-profile.png doc/source/images/Report-Task-Overview.png doc/source/images/Report-Task-SLA.png doc/source/images/Report-Task-Scenario-Data-Aggregated.png doc/source/images/Report-Task-Scenario-Data-Per-iteration.png doc/source/images/Report-Task-Subtask-configuration.png doc/source/images/Report-Task-Total-durations.png doc/source/images/Report-Trends-Atomic-actions.png doc/source/images/Report-Trends-Configuration.png doc/source/images/Report-Trends-Overview.png doc/source/images/Report-Trends-Total.png doc/source/images/Report-Trends-single-run.png doc/source/images/Report-Verify-filter-by-status.png doc/source/images/Report-Verify-for-4-Verifications.png doc/source/images/Report-Verify-toggle-tags.png doc/source/images/Report-Verify-tracebacks.png doc/source/images/Report-Verify-xfail.png doc/source/install_and_upgrade/db_migrations.rst doc/source/install_and_upgrade/index.rst doc/source/install_and_upgrade/install.rst doc/source/miscellaneous/concepts.rst doc/source/overview/glossary.rst doc/source/overview/index.rst doc/source/overview/overview.rst doc/source/overview/stories doc/source/overview/user_stories.rst doc/source/plugins/index.rst doc/source/plugins/plugin_reference.rst doc/source/plugins/implementation/context_plugin.rst doc/source/plugins/implementation/hook_and_trigger_plugins.rst doc/source/plugins/implementation/runner_plugin.rst doc/source/plugins/implementation/scenario_plugin.rst doc/source/plugins/implementation/sla_plugin.rst doc/source/project_info/index.rst doc/source/project_info/release_notes doc/source/project_info/release_notes.rst doc/source/quick_start/gates.rst doc/source/quick_start/index.rst doc/source/quick_start/tutorial.rst doc/source/quick_start/tutorial/step_0_installation.rst doc/source/quick_start/tutorial/step_10_verifying_cloud_via_tempest_verifier.rst doc/source/quick_start/tutorial/step_1_setting_up_env_and_running_benchmark_from_samples.rst doc/source/quick_start/tutorial/step_2_input_task_format.rst doc/source/quick_start/tutorial/step_3_benchmarking_with_existing_users.rst doc/source/quick_start/tutorial/step_4_adding_success_criteria_for_benchmarks.rst doc/source/quick_start/tutorial/step_5_task_templates.rst doc/source/quick_start/tutorial/step_6_aborting_load_generation_on_sla_failure.rst doc/source/quick_start/tutorial/step_7_working_with_multple_openstack_clouds.rst doc/source/quick_start/tutorial/step_8_discovering_more_plugins.rst doc/source/quick_start/tutorial/step_9_deploying_openstack.rst doc/source/task/index.rst doc/source/verification/cli_reference.rst doc/source/verification/index.rst doc/source/verification/overview.rst doc/source/verification/reports.rst doc/source/verification/verifiers.rst doc/source/verification/howto/add_new_reporter.rst doc/source/verification/howto/add_support_for_new_tool.rst doc/source/verification/howto/index.rst doc/source/verification/howto/migrate_from_old_design.rst doc/specs/README.rst doc/specs/template.rst doc/specs/implemented/README.rst doc/specs/implemented/class-based-scenarios.rst doc/specs/implemented/consistent_resource_names.rst doc/specs/implemented/db_refactoring.rst doc/specs/implemented/hook_plugins.rst doc/specs/implemented/improve_scenario_output_format.rst doc/specs/implemented/sla_pd_plugin.rst doc/specs/implemented/split_plugins.rst doc/specs/implemented/verification_refactoring.rst doc/specs/in-progress/README.rst doc/specs/in-progress/cleanup_refactoring.rst doc/specs/in-progress/deployment_type.rst doc/specs/in-progress/distributed_runner.rst doc/specs/in-progress/improve_atomic_actions_format.rst doc/specs/in-progress/new_rally_input_task_format.rst doc/specs/in-progress/pluggable-types.rst doc/specs/in-progress/pluggable_validators.rst doc/specs/in-progress/refactor_scenario_utils.rst doc/specs/in-progress/task_and_verification_export.rst doc/user_stories/keystone/authenticate.rst doc/user_stories/nova/boot_server.rst etc/rally.bash_completion etc/rally/rally-config-generator.conf etc/rally/rally.conf.sample rally/__init__.py rally/api.py rally/consts.py rally/exceptions.py rally/osclients.py rally-jobs/README.rst rally-jobs/certifcation_task_args.yaml rally-jobs/cinder.yaml rally-jobs/heat.yaml rally-jobs/nova.yaml rally-jobs/rally-designate.yaml rally-jobs/rally-ironic.yaml rally-jobs/rally-keystone-api-v2.yaml rally-jobs/rally-magnum.yaml rally-jobs/rally-manila-no-ss.yaml rally-jobs/rally-manila.yaml rally-jobs/rally-mistral.yaml rally-jobs/rally-monasca.yaml rally-jobs/rally-mos.yaml rally-jobs/rally-murano.yaml rally-jobs/rally-neutron-existing-users.yaml rally-jobs/rally-neutron-extensions.yaml rally-jobs/rally-neutron.yaml rally-jobs/rally-senlin.yaml rally-jobs/rally-watcher.yaml rally-jobs/rally-zaqar.yaml rally-jobs/rally.yaml rally-jobs/rally_args.yaml rally-jobs/sahara-clusters.yaml rally-jobs/unstable-neutron.yaml rally-jobs/extra/README.rst rally-jobs/extra/autoscaling_group.yaml.template rally-jobs/extra/autoscaling_policy.yaml.template rally-jobs/extra/default.yaml.template rally-jobs/extra/fake-image.img rally-jobs/extra/hook_example_script.sh rally-jobs/extra/install_benchmark.sh rally-jobs/extra/instance_test.sh rally-jobs/extra/mistral_input.json rally-jobs/extra/mistral_params.json rally-jobs/extra/mistral_wb.yaml rally-jobs/extra/random_strings.yaml.template rally-jobs/extra/resource_group.yaml.template rally-jobs/extra/resource_group_server_with_volume.yaml.template rally-jobs/extra/resource_group_with_constraint.yaml.template rally-jobs/extra/resource_group_with_outputs.yaml.template rally-jobs/extra/server_with_ports.yaml.template rally-jobs/extra/server_with_volume.yaml.template rally-jobs/extra/updated_autoscaling_policy_inplace.yaml.template rally-jobs/extra/updated_random_strings_add.yaml.template rally-jobs/extra/updated_random_strings_delete.yaml.template rally-jobs/extra/updated_random_strings_replace.yaml.template rally-jobs/extra/updated_resource_group_increase.yaml.template rally-jobs/extra/updated_resource_group_reduce.yaml.template rally-jobs/extra/murano/applications/README.rst rally-jobs/extra/murano/applications/HelloReporter/io.murano.apps.HelloReporter.zip rally-jobs/extra/murano/applications/HelloReporter/io.murano.apps.HelloReporter/manifest.yaml rally-jobs/extra/murano/applications/HelloReporter/io.murano.apps.HelloReporter/Classes/HelloReporter.yaml rally-jobs/extra/murano/applications/HelloReporter/io.murano.apps.HelloReporter/UI/ui.yaml rally-jobs/extra/workload/wordpress_heat_template.yaml rally-jobs/extra/workload/wp-instances.yaml rally-jobs/plugins/README.rst rally-jobs/plugins/__init__.py rally-jobs/plugins/fake_plugin.py rally-jobs/plugins/rally_profile.py rally-jobs/plugins/test_relative_import/__init__.py rally-jobs/plugins/test_relative_import/zzz.py rally.egg-info/PKG-INFO rally.egg-info/SOURCES.txt rally.egg-info/dependency_links.txt rally.egg-info/entry_points.txt rally.egg-info/not-zip-safe rally.egg-info/pbr.json rally.egg-info/requires.txt rally.egg-info/top_level.txt rally/aas/__init__.py rally/cli/__init__.py rally/cli/cliutils.py rally/cli/envutils.py rally/cli/main.py rally/cli/manage.py rally/cli/commands/__init__.py rally/cli/commands/deployment.py rally/cli/commands/plugin.py rally/cli/commands/task.py rally/cli/commands/verify.py rally/common/__init__.py rally/common/broker.py rally/common/fileutils.py rally/common/i18n.py rally/common/logging.py rally/common/opts.py rally/common/sshutils.py rally/common/streaming_algorithms.py rally/common/utils.py rally/common/version.py rally/common/yamlutils.py rally/common/db/__init__.py rally/common/db/api.py rally/common/db/sqlalchemy/__init__.py rally/common/db/sqlalchemy/alembic.ini rally/common/db/sqlalchemy/api.py rally/common/db/sqlalchemy/models.py rally/common/db/sqlalchemy/types.py rally/common/db/sqlalchemy/migrations/README.rst rally/common/db/sqlalchemy/migrations/env.py rally/common/db/sqlalchemy/migrations/script.py.mako rally/common/db/sqlalchemy/migrations/versions/08e1515a576c_fix_invalid_verification_logs.py rally/common/db/sqlalchemy/migrations/versions/3177d36ea270_merge_credentials_from_users_and_admin.py rally/common/db/sqlalchemy/migrations/versions/32fada9b2fde_remove_admin_domain_name.py rally/common/db/sqlalchemy/migrations/versions/37fdbb373e8d_fix_test_results_for_verifications.py rally/common/db/sqlalchemy/migrations/versions/484cd9413e66_new_db_schema_for_verification_component.py rally/common/db/sqlalchemy/migrations/versions/4ef544102ba7_change_task_status_enum.py rally/common/db/sqlalchemy/migrations/versions/54e844ebfbc3_update_deployment_configs.py rally/common/db/sqlalchemy/migrations/versions/6ad4f426f005_add_hooks_to_task_result.py rally/common/db/sqlalchemy/migrations/versions/92aaaa2a6bb3_refactor_credentials.py rally/common/db/sqlalchemy/migrations/versions/a6f364988fc2_change_tag_type_enum.py rally/common/db/sqlalchemy/migrations/versions/ca3626f62937_init_migration.py rally/common/db/sqlalchemy/migrations/versions/e654a0648db0_refactor_task_results.py rally/common/db/sqlalchemy/migrations/versions/f33f4610dcda_change_verification_statuses.py rally/common/io/__init__.py rally/common/io/junit.py rally/common/io/subunit_v2.py rally/common/objects/__init__.py rally/common/objects/credential.py rally/common/objects/deploy.py rally/common/objects/task.py rally/common/objects/verification.py rally/common/objects/verifier.py rally/common/plugin/__init__.py rally/common/plugin/discover.py rally/common/plugin/info.py rally/common/plugin/meta.py rally/common/plugin/plugin.py rally/deployment/__init__.py rally/deployment/engine.py rally/deployment/engines/__init__.py rally/deployment/engines/devstack.py rally/deployment/engines/existing.py rally/deployment/engines/lxc.py rally/deployment/engines/multihost.py rally/deployment/engines/devstack/install.sh rally/deployment/engines/lxc/start.sh rally/deployment/serverprovider/__init__.py rally/deployment/serverprovider/__main__.py rally/deployment/serverprovider/provider.py rally/deployment/serverprovider/providers/__init__.py rally/deployment/serverprovider/providers/cobbler.py rally/deployment/serverprovider/providers/existing.py rally/deployment/serverprovider/providers/lxc.py rally/deployment/serverprovider/providers/openstack.py rally/deployment/serverprovider/providers/virsh.py rally/deployment/serverprovider/providers/lxc/configure_container.sh rally/deployment/serverprovider/providers/lxc/lxc-install.sh rally/deployment/serverprovider/providers/lxc/tunnel-local.sh rally/deployment/serverprovider/providers/lxc/tunnel-remote.sh rally/deployment/serverprovider/providers/virsh/get_domain_ip.sh rally/plugins/__init__.py rally/plugins/common/__init__.py rally/plugins/common/types.py rally/plugins/common/context/__init__.py rally/plugins/common/context/dummy.py rally/plugins/common/exporter/__init__.py rally/plugins/common/exporter/file_system.py rally/plugins/common/hook/__init__.py rally/plugins/common/hook/sys_call.py rally/plugins/common/runners/__init__.py rally/plugins/common/runners/constant.py rally/plugins/common/runners/rps.py rally/plugins/common/runners/serial.py rally/plugins/common/scenarios/__init__.py rally/plugins/common/scenarios/dummy/__init__.py rally/plugins/common/scenarios/dummy/dummy.py rally/plugins/common/scenarios/requests/__init__.py rally/plugins/common/scenarios/requests/http_requests.py rally/plugins/common/scenarios/requests/utils.py rally/plugins/common/sla/__init__.py rally/plugins/common/sla/failure_rate.py rally/plugins/common/sla/iteration_time.py rally/plugins/common/sla/max_average_duration.py rally/plugins/common/sla/max_average_duration_per_atomic.py rally/plugins/common/sla/outliers.py rally/plugins/common/sla/performance_degradation.py rally/plugins/common/trigger/__init__.py rally/plugins/common/trigger/event.py rally/plugins/common/trigger/periodic.py rally/plugins/common/verification/__init__.py rally/plugins/common/verification/reporters.py rally/plugins/common/verification/testr.py rally/plugins/openstack/__init__.py rally/plugins/openstack/scenario.py rally/plugins/openstack/service.py rally/plugins/openstack/types.py rally/plugins/openstack/cleanup/__init__.py rally/plugins/openstack/cleanup/base.py rally/plugins/openstack/cleanup/manager.py rally/plugins/openstack/cleanup/resources.py rally/plugins/openstack/context/__init__.py rally/plugins/openstack/context/api_versions.py rally/plugins/openstack/context/fuel.py rally/plugins/openstack/context/ceilometer/__init__.py rally/plugins/openstack/context/ceilometer/samples.py rally/plugins/openstack/context/cinder/__init__.py rally/plugins/openstack/context/cinder/volume_types.py rally/plugins/openstack/context/cinder/volumes.py rally/plugins/openstack/context/cleanup/__init__.py rally/plugins/openstack/context/cleanup/admin.py rally/plugins/openstack/context/cleanup/base.py rally/plugins/openstack/context/cleanup/user.py rally/plugins/openstack/context/dataplane/__init__.py rally/plugins/openstack/context/dataplane/heat.py rally/plugins/openstack/context/designate/__init__.py rally/plugins/openstack/context/designate/zones.py rally/plugins/openstack/context/ec2/__init__.py rally/plugins/openstack/context/ec2/servers.py rally/plugins/openstack/context/glance/__init__.py rally/plugins/openstack/context/glance/images.py rally/plugins/openstack/context/heat/__init__.py rally/plugins/openstack/context/heat/stacks.py rally/plugins/openstack/context/keystone/__init__.py rally/plugins/openstack/context/keystone/existing_users.py rally/plugins/openstack/context/keystone/roles.py rally/plugins/openstack/context/keystone/users.py rally/plugins/openstack/context/magnum/__init__.py rally/plugins/openstack/context/magnum/cluster_templates.py rally/plugins/openstack/context/magnum/clusters.py rally/plugins/openstack/context/manila/__init__.py rally/plugins/openstack/context/manila/consts.py rally/plugins/openstack/context/manila/manila_security_services.py rally/plugins/openstack/context/manila/manila_share_networks.py rally/plugins/openstack/context/manila/manila_shares.py rally/plugins/openstack/context/monasca/__init__.py rally/plugins/openstack/context/monasca/metrics.py rally/plugins/openstack/context/murano/__init__.py rally/plugins/openstack/context/murano/murano_environments.py rally/plugins/openstack/context/murano/murano_packages.py rally/plugins/openstack/context/network/__init__.py rally/plugins/openstack/context/network/allow_ssh.py rally/plugins/openstack/context/network/existing_network.py rally/plugins/openstack/context/network/networks.py rally/plugins/openstack/context/neutron/__init__.py rally/plugins/openstack/context/neutron/lbaas.py rally/plugins/openstack/context/nova/__init__.py rally/plugins/openstack/context/nova/flavors.py rally/plugins/openstack/context/nova/keypairs.py rally/plugins/openstack/context/nova/servers.py rally/plugins/openstack/context/quotas/__init__.py rally/plugins/openstack/context/quotas/cinder_quotas.py rally/plugins/openstack/context/quotas/designate_quotas.py rally/plugins/openstack/context/quotas/manila_quotas.py rally/plugins/openstack/context/quotas/neutron_quotas.py rally/plugins/openstack/context/quotas/nova_quotas.py rally/plugins/openstack/context/quotas/quotas.py rally/plugins/openstack/context/sahara/__init__.py rally/plugins/openstack/context/sahara/sahara_cluster.py rally/plugins/openstack/context/sahara/sahara_image.py rally/plugins/openstack/context/sahara/sahara_input_data_sources.py rally/plugins/openstack/context/sahara/sahara_job_binaries.py rally/plugins/openstack/context/sahara/sahara_output_data_sources.py rally/plugins/openstack/context/senlin/__init__.py rally/plugins/openstack/context/senlin/profiles.py rally/plugins/openstack/context/swift/__init__.py rally/plugins/openstack/context/swift/objects.py rally/plugins/openstack/context/swift/utils.py rally/plugins/openstack/context/vm/__init__.py rally/plugins/openstack/context/vm/custom_image.py rally/plugins/openstack/context/vm/image_command_customizer.py rally/plugins/openstack/context/watcher/__init__.py rally/plugins/openstack/context/watcher/audit_templates.py rally/plugins/openstack/hook/__init__.py rally/plugins/openstack/hook/fault_injection.py rally/plugins/openstack/scenarios/__init__.py rally/plugins/openstack/scenarios/authenticate/__init__.py rally/plugins/openstack/scenarios/authenticate/authenticate.py rally/plugins/openstack/scenarios/ceilometer/__init__.py rally/plugins/openstack/scenarios/ceilometer/alarms.py rally/plugins/openstack/scenarios/ceilometer/events.py rally/plugins/openstack/scenarios/ceilometer/meters.py rally/plugins/openstack/scenarios/ceilometer/queries.py rally/plugins/openstack/scenarios/ceilometer/resources.py rally/plugins/openstack/scenarios/ceilometer/samples.py rally/plugins/openstack/scenarios/ceilometer/stats.py rally/plugins/openstack/scenarios/ceilometer/traits.py rally/plugins/openstack/scenarios/ceilometer/utils.py rally/plugins/openstack/scenarios/cinder/__init__.py rally/plugins/openstack/scenarios/cinder/utils.py rally/plugins/openstack/scenarios/cinder/volume_backups.py rally/plugins/openstack/scenarios/cinder/volume_types.py rally/plugins/openstack/scenarios/cinder/volumes.py rally/plugins/openstack/scenarios/designate/__init__.py rally/plugins/openstack/scenarios/designate/basic.py rally/plugins/openstack/scenarios/designate/utils.py rally/plugins/openstack/scenarios/ec2/__init__.py rally/plugins/openstack/scenarios/ec2/servers.py rally/plugins/openstack/scenarios/ec2/utils.py rally/plugins/openstack/scenarios/fuel/__init__.py rally/plugins/openstack/scenarios/fuel/environments.py rally/plugins/openstack/scenarios/fuel/nodes.py rally/plugins/openstack/scenarios/fuel/utils.py rally/plugins/openstack/scenarios/glance/__init__.py rally/plugins/openstack/scenarios/glance/images.py rally/plugins/openstack/scenarios/glance/utils.py rally/plugins/openstack/scenarios/heat/__init__.py rally/plugins/openstack/scenarios/heat/stacks.py rally/plugins/openstack/scenarios/heat/utils.py rally/plugins/openstack/scenarios/ironic/__init__.py rally/plugins/openstack/scenarios/ironic/nodes.py rally/plugins/openstack/scenarios/ironic/utils.py rally/plugins/openstack/scenarios/keystone/__init__.py rally/plugins/openstack/scenarios/keystone/basic.py rally/plugins/openstack/scenarios/keystone/utils.py rally/plugins/openstack/scenarios/magnum/__init__.py rally/plugins/openstack/scenarios/magnum/cluster_templates.py rally/plugins/openstack/scenarios/magnum/clusters.py rally/plugins/openstack/scenarios/magnum/utils.py rally/plugins/openstack/scenarios/manila/__init__.py rally/plugins/openstack/scenarios/manila/shares.py rally/plugins/openstack/scenarios/manila/utils.py rally/plugins/openstack/scenarios/mistral/__init__.py rally/plugins/openstack/scenarios/mistral/executions.py rally/plugins/openstack/scenarios/mistral/utils.py rally/plugins/openstack/scenarios/mistral/workbooks.py rally/plugins/openstack/scenarios/monasca/__init__.py rally/plugins/openstack/scenarios/monasca/metrics.py rally/plugins/openstack/scenarios/monasca/utils.py rally/plugins/openstack/scenarios/murano/__init__.py rally/plugins/openstack/scenarios/murano/environments.py rally/plugins/openstack/scenarios/murano/packages.py rally/plugins/openstack/scenarios/murano/utils.py rally/plugins/openstack/scenarios/neutron/__init__.py rally/plugins/openstack/scenarios/neutron/loadbalancer_v1.py rally/plugins/openstack/scenarios/neutron/loadbalancer_v2.py rally/plugins/openstack/scenarios/neutron/network.py rally/plugins/openstack/scenarios/neutron/security_groups.py rally/plugins/openstack/scenarios/neutron/utils.py rally/plugins/openstack/scenarios/nova/__init__.py rally/plugins/openstack/scenarios/nova/agents.py rally/plugins/openstack/scenarios/nova/aggregates.py rally/plugins/openstack/scenarios/nova/availability_zones.py rally/plugins/openstack/scenarios/nova/flavors.py rally/plugins/openstack/scenarios/nova/floating_ips_bulk.py rally/plugins/openstack/scenarios/nova/hosts.py rally/plugins/openstack/scenarios/nova/hypervisors.py rally/plugins/openstack/scenarios/nova/images.py rally/plugins/openstack/scenarios/nova/keypairs.py rally/plugins/openstack/scenarios/nova/networks.py rally/plugins/openstack/scenarios/nova/security_group.py rally/plugins/openstack/scenarios/nova/server_groups.py rally/plugins/openstack/scenarios/nova/servers.py rally/plugins/openstack/scenarios/nova/services.py rally/plugins/openstack/scenarios/nova/utils.py rally/plugins/openstack/scenarios/quotas/__init__.py rally/plugins/openstack/scenarios/quotas/quotas.py rally/plugins/openstack/scenarios/quotas/utils.py rally/plugins/openstack/scenarios/sahara/__init__.py rally/plugins/openstack/scenarios/sahara/clusters.py rally/plugins/openstack/scenarios/sahara/consts.py rally/plugins/openstack/scenarios/sahara/jobs.py rally/plugins/openstack/scenarios/sahara/node_group_templates.py rally/plugins/openstack/scenarios/sahara/utils.py rally/plugins/openstack/scenarios/senlin/__init__.py rally/plugins/openstack/scenarios/senlin/clusters.py rally/plugins/openstack/scenarios/senlin/utils.py rally/plugins/openstack/scenarios/swift/__init__.py rally/plugins/openstack/scenarios/swift/objects.py rally/plugins/openstack/scenarios/swift/utils.py rally/plugins/openstack/scenarios/vm/__init__.py rally/plugins/openstack/scenarios/vm/utils.py rally/plugins/openstack/scenarios/vm/vmtasks.py rally/plugins/openstack/scenarios/watcher/__init__.py rally/plugins/openstack/scenarios/watcher/basic.py rally/plugins/openstack/scenarios/watcher/utils.py rally/plugins/openstack/scenarios/zaqar/__init__.py rally/plugins/openstack/scenarios/zaqar/basic.py rally/plugins/openstack/scenarios/zaqar/utils.py rally/plugins/openstack/services/__init__.py rally/plugins/openstack/services/heat/__init__.py rally/plugins/openstack/services/heat/main.py rally/plugins/openstack/services/identity/__init__.py rally/plugins/openstack/services/identity/identity.py rally/plugins/openstack/services/identity/keystone_common.py rally/plugins/openstack/services/identity/keystone_v2.py rally/plugins/openstack/services/identity/keystone_v3.py rally/plugins/openstack/verification/__init__.py rally/plugins/openstack/verification/tempest/__init__.py rally/plugins/openstack/verification/tempest/config.ini rally/plugins/openstack/verification/tempest/config.py rally/plugins/openstack/verification/tempest/consts.py rally/plugins/openstack/verification/tempest/context.py rally/plugins/openstack/verification/tempest/manager.py rally/plugins/openstack/wrappers/__init__.py rally/plugins/openstack/wrappers/cinder.py rally/plugins/openstack/wrappers/glance.py rally/plugins/openstack/wrappers/keystone.py rally/plugins/openstack/wrappers/network.py rally/plugins/workload/__init__.py rally/plugins/workload/siege.py rally/task/__init__.py rally/task/atomic.py rally/task/context.py rally/task/engine.py rally/task/exporter.py rally/task/functional.py rally/task/hook.py rally/task/runner.py rally/task/scenario.py rally/task/service.py rally/task/sla.py rally/task/trigger.py rally/task/types.py rally/task/utils.py rally/task/validation.py rally/task/processing/__init__.py rally/task/processing/charts.py rally/task/processing/plot.py rally/task/processing/utils.py rally/ui/__init__.py rally/ui/utils.py rally/ui/templates/base.html rally/ui/templates/base.mako rally/ui/templates/ci/index.html rally/ui/templates/ci/index_verify.html rally/ui/templates/libs/README.rst rally/ui/templates/libs/angular.1.3.3.min.js rally/ui/templates/libs/d3.3.4.13.min.js rally/ui/templates/libs/nv.d3.1.1.15-beta.min.css rally/ui/templates/libs/nv.d3.1.1.15-beta.min.js rally/ui/templates/task/directive_widget.js rally/ui/templates/task/report.html rally/ui/templates/task/trends.html rally/ui/templates/verification/report.html rally/verification/__init__.py rally/verification/context.py rally/verification/manager.py rally/verification/reporter.py rally/verification/utils.py samples/README.rst samples/deployments/README.rst samples/deployments/existing-keystone-v3.json samples/deployments/existing-with-given-endpoint.json samples/deployments/existing-with-predefined-users.json samples/deployments/existing.json samples/deployments/for_deploying_openstack_with_rally/README.rst samples/deployments/for_deploying_openstack_with_rally/devstack-by-cobbler.json samples/deployments/for_deploying_openstack_with_rally/devstack-in-existing-servers.json samples/deployments/for_deploying_openstack_with_rally/devstack-in-lxc.json samples/deployments/for_deploying_openstack_with_rally/devstack-in-openstack.json samples/deployments/for_deploying_openstack_with_rally/devstack-lxc-engine-in-existing-servers.json samples/deployments/for_deploying_openstack_with_rally/devstack-lxc-engine-in-existing-servers.rst samples/deployments/for_deploying_openstack_with_rally/multihost.json samples/deployments/for_deploying_openstack_with_rally/multihost.rst samples/plugins/unpack_plugins_samples.sh samples/plugins/context/context_plugin.py samples/plugins/context/test_context.json samples/plugins/context/test_context.yaml samples/plugins/runner/runner_plugin.py samples/plugins/runner/test_runner.json samples/plugins/runner/test_runner.yaml samples/plugins/scenario/scenario_plugin.py samples/plugins/scenario/test_scenario.json samples/plugins/scenario/test_scenario.yaml samples/plugins/sla/sla_plugin.py samples/plugins/sla/test_sla.json samples/plugins/sla/test_sla.yaml samples/tasks/README.rst samples/tasks/contexts/README.rst samples/tasks/contexts/flavors/boot.json samples/tasks/contexts/flavors/boot.yaml samples/tasks/contexts/large-ops/create-volume-large-scale.json samples/tasks/contexts/large-ops/create-volume-large-scale.yaml samples/tasks/runners/README.rst samples/tasks/runners/constant/constant-for-duration.json samples/tasks/runners/constant/constant-for-duration.yaml samples/tasks/runners/constant/constant-timeout.json samples/tasks/runners/constant/constant-timeout.yaml samples/tasks/runners/rps/rps.json samples/tasks/runners/rps/rps.yaml samples/tasks/runners/serial/serial.json samples/tasks/runners/serial/serial.yaml samples/tasks/scenarios/README.rst samples/tasks/scenarios/authenticate/keystone.json samples/tasks/scenarios/authenticate/keystone.yaml samples/tasks/scenarios/authenticate/token-validate-ceilometer.json samples/tasks/scenarios/authenticate/token-validate-ceilometer.yaml samples/tasks/scenarios/authenticate/token-validate-cinder.json samples/tasks/scenarios/authenticate/token-validate-cinder.yaml samples/tasks/scenarios/authenticate/token-validate-glance.json samples/tasks/scenarios/authenticate/token-validate-glance.yaml samples/tasks/scenarios/authenticate/token-validate-heat.json samples/tasks/scenarios/authenticate/token-validate-heat.yaml samples/tasks/scenarios/authenticate/token-validate-monasca.json samples/tasks/scenarios/authenticate/token-validate-monasca.yaml samples/tasks/scenarios/authenticate/token-validate-neutron.json samples/tasks/scenarios/authenticate/token-validate-neutron.yaml samples/tasks/scenarios/authenticate/token-validate-nova.json samples/tasks/scenarios/authenticate/token-validate-nova.yaml samples/tasks/scenarios/ceilometer/all-list-meters.json samples/tasks/scenarios/ceilometer/all-list-meters.yaml samples/tasks/scenarios/ceilometer/all-list-resources.json samples/tasks/scenarios/ceilometer/all-list-resources.yaml samples/tasks/scenarios/ceilometer/create-alarm-and-get-history.json samples/tasks/scenarios/ceilometer/create-alarm-and-get-history.yaml samples/tasks/scenarios/ceilometer/create-alarm.json samples/tasks/scenarios/ceilometer/create-alarm.yaml samples/tasks/scenarios/ceilometer/create-and-delete-alarm.json samples/tasks/scenarios/ceilometer/create-and-delete-alarm.yaml samples/tasks/scenarios/ceilometer/create-and-get-alarm.json samples/tasks/scenarios/ceilometer/create-and-get-alarm.yaml samples/tasks/scenarios/ceilometer/create-and-list-alarm.json samples/tasks/scenarios/ceilometer/create-and-list-alarm.yaml samples/tasks/scenarios/ceilometer/create-and-query-alarm-history.json samples/tasks/scenarios/ceilometer/create-and-query-alarm-history.yaml samples/tasks/scenarios/ceilometer/create-and-query-alarms.json samples/tasks/scenarios/ceilometer/create-and-query-alarms.yaml samples/tasks/scenarios/ceilometer/create-and-query-samples.json samples/tasks/scenarios/ceilometer/create-and-query-samples.yaml samples/tasks/scenarios/ceilometer/create-and-update-alarm.json samples/tasks/scenarios/ceilometer/create-and-update-alarm.yaml samples/tasks/scenarios/ceilometer/create-meter-and-get-stats.json samples/tasks/scenarios/ceilometer/create-meter-and-get-stats.yaml samples/tasks/scenarios/ceilometer/create-user-and-get-event.json samples/tasks/scenarios/ceilometer/create-user-and-get-event.yaml samples/tasks/scenarios/ceilometer/create-user-and-list-event-types.json samples/tasks/scenarios/ceilometer/create-user-and-list-event-types.yaml samples/tasks/scenarios/ceilometer/create-user-and-list-events.json samples/tasks/scenarios/ceilometer/create-user-and-list-events.yaml samples/tasks/scenarios/ceilometer/create-user-and-list-trait-descriptions.json samples/tasks/scenarios/ceilometer/create-user-and-list-trait-descriptions.yaml samples/tasks/scenarios/ceilometer/create-user-and-list-traits.json samples/tasks/scenarios/ceilometer/create-user-and-list-traits.yaml samples/tasks/scenarios/ceilometer/get-stats.json samples/tasks/scenarios/ceilometer/get-stats.yaml samples/tasks/scenarios/ceilometer/get-tenant-resources.json samples/tasks/scenarios/ceilometer/get-tenant-resources.yaml samples/tasks/scenarios/ceilometer/list-alarms.json samples/tasks/scenarios/ceilometer/list-alarms.yaml samples/tasks/scenarios/ceilometer/list-matched-samples.json samples/tasks/scenarios/ceilometer/list-matched-samples.yaml samples/tasks/scenarios/ceilometer/list-meters.json samples/tasks/scenarios/ceilometer/list-meters.yaml samples/tasks/scenarios/ceilometer/list-resources.json samples/tasks/scenarios/ceilometer/list-resources.yaml samples/tasks/scenarios/ceilometer/list-samples.json samples/tasks/scenarios/ceilometer/list-samples.yaml samples/tasks/scenarios/cinder/create-and-accept-transfer.json samples/tasks/scenarios/cinder/create-and-accept-transfer.yaml samples/tasks/scenarios/cinder/create-and-attach-volume.json samples/tasks/scenarios/cinder/create-and-attach-volume.yaml samples/tasks/scenarios/cinder/create-and-delete-encryption-type.json samples/tasks/scenarios/cinder/create-and-delete-encryption-type.yaml samples/tasks/scenarios/cinder/create-and-delete-snapshot.json samples/tasks/scenarios/cinder/create-and-delete-snapshot.yaml samples/tasks/scenarios/cinder/create-and-delete-volume-type.json samples/tasks/scenarios/cinder/create-and-delete-volume-type.yaml samples/tasks/scenarios/cinder/create-and-delete-volume.json samples/tasks/scenarios/cinder/create-and-delete-volume.yaml samples/tasks/scenarios/cinder/create-and-extend-volume.json samples/tasks/scenarios/cinder/create-and-extend-volume.yaml samples/tasks/scenarios/cinder/create-and-get-volume.json samples/tasks/scenarios/cinder/create-and-get-volume.yaml samples/tasks/scenarios/cinder/create-and-list-encryption-type.json samples/tasks/scenarios/cinder/create-and-list-encryption-type.yaml samples/tasks/scenarios/cinder/create-and-list-snapshots.json samples/tasks/scenarios/cinder/create-and-list-snapshots.yaml samples/tasks/scenarios/cinder/create-and-list-volume-backups.json samples/tasks/scenarios/cinder/create-and-list-volume-backups.yaml samples/tasks/scenarios/cinder/create-and-list-volume.json samples/tasks/scenarios/cinder/create-and-list-volume.yaml samples/tasks/scenarios/cinder/create-and-restore-volume-backup.json samples/tasks/scenarios/cinder/create-and-restore-volume-backup.yaml samples/tasks/scenarios/cinder/create-and-set-volume-type-keys.json samples/tasks/scenarios/cinder/create-and-set-volume-type-keys.yaml samples/tasks/scenarios/cinder/create-and-update-readonly-flag.json samples/tasks/scenarios/cinder/create-and-update-readonly-flag.yaml samples/tasks/scenarios/cinder/create-and-update-volume.json samples/tasks/scenarios/cinder/create-and-update-volume.yaml samples/tasks/scenarios/cinder/create-and-upload-volume-to-image.json samples/tasks/scenarios/cinder/create-and-upload-volume-to-image.yaml samples/tasks/scenarios/cinder/create-encryption-type.json samples/tasks/scenarios/cinder/create-encryption-type.yaml samples/tasks/scenarios/cinder/create-from-image-and-delete-volume.json samples/tasks/scenarios/cinder/create-from-image-and-delete-volume.yaml samples/tasks/scenarios/cinder/create-from-volume-and-delete-volume.json samples/tasks/scenarios/cinder/create-from-volume-and-delete-volume.yaml samples/tasks/scenarios/cinder/create-incremental-volume-backup.json samples/tasks/scenarios/cinder/create-incremental-volume-backup.yaml samples/tasks/scenarios/cinder/create-nested-snapshots-and-attach-volume.json samples/tasks/scenarios/cinder/create-nested-snapshots-and-attach-volume.yaml samples/tasks/scenarios/cinder/create-snapshot-and-attach-volume.json samples/tasks/scenarios/cinder/create-snapshot-and-attach-volume.yaml samples/tasks/scenarios/cinder/create-volume-and-clone.json samples/tasks/scenarios/cinder/create-volume-and-clone.yaml samples/tasks/scenarios/cinder/create-volume-backup.json samples/tasks/scenarios/cinder/create-volume-backup.yaml samples/tasks/scenarios/cinder/create-volume-from-snapshot.json samples/tasks/scenarios/cinder/create-volume-from-snapshot.yaml samples/tasks/scenarios/cinder/create-volume.json samples/tasks/scenarios/cinder/create-volume.yaml samples/tasks/scenarios/cinder/list-transfers.json samples/tasks/scenarios/cinder/list-transfers.yaml samples/tasks/scenarios/cinder/list-types.json samples/tasks/scenarios/cinder/list-types.yaml samples/tasks/scenarios/cinder/list-volumes.json samples/tasks/scenarios/cinder/list-volumes.yaml samples/tasks/scenarios/cinder/modify-volume-metadata.json samples/tasks/scenarios/cinder/modify-volume-metadata.yaml samples/tasks/scenarios/designate/create-and-delete-domain.json samples/tasks/scenarios/designate/create-and-delete-domain.yaml samples/tasks/scenarios/designate/create-and-delete-records.json samples/tasks/scenarios/designate/create-and-delete-records.yaml samples/tasks/scenarios/designate/create-and-delete-recordsets.json samples/tasks/scenarios/designate/create-and-delete-recordsets.yaml samples/tasks/scenarios/designate/create-and-delete-server.json samples/tasks/scenarios/designate/create-and-delete-server.yaml samples/tasks/scenarios/designate/create-and-delete-zone.json samples/tasks/scenarios/designate/create-and-delete-zone.yaml samples/tasks/scenarios/designate/create-and-list-domain.json samples/tasks/scenarios/designate/create-and-list-domain.yaml samples/tasks/scenarios/designate/create-and-list-records.json samples/tasks/scenarios/designate/create-and-list-records.yaml samples/tasks/scenarios/designate/create-and-list-recordsets.json samples/tasks/scenarios/designate/create-and-list-recordsets.yaml samples/tasks/scenarios/designate/create-and-list-servers.json samples/tasks/scenarios/designate/create-and-list-servers.yaml samples/tasks/scenarios/designate/create-and-list-zones.json samples/tasks/scenarios/designate/create-and-list-zones.yaml samples/tasks/scenarios/designate/create-and-update-domain.json samples/tasks/scenarios/designate/create-and-update-domain.yaml samples/tasks/scenarios/designate/list-domains.json samples/tasks/scenarios/designate/list-domains.yaml samples/tasks/scenarios/designate/list-records.json samples/tasks/scenarios/designate/list-records.yaml samples/tasks/scenarios/designate/list-recordsets.json samples/tasks/scenarios/designate/list-recordsets.yaml samples/tasks/scenarios/designate/list-servers.json samples/tasks/scenarios/designate/list-servers.yaml samples/tasks/scenarios/designate/list-zones.json samples/tasks/scenarios/designate/list-zones.yaml samples/tasks/scenarios/dummy/dummy-exception-probability.json samples/tasks/scenarios/dummy/dummy-exception-probability.yaml samples/tasks/scenarios/dummy/dummy-exception.json samples/tasks/scenarios/dummy/dummy-exception.yaml samples/tasks/scenarios/dummy/dummy-failure.json samples/tasks/scenarios/dummy/dummy-failure.yaml samples/tasks/scenarios/dummy/dummy-output.json samples/tasks/scenarios/dummy/dummy-output.yaml samples/tasks/scenarios/dummy/dummy-random-action.json samples/tasks/scenarios/dummy/dummy-random-action.yaml samples/tasks/scenarios/dummy/dummy-random-fail-in-atomic.json samples/tasks/scenarios/dummy/dummy-random-fail-in-atomic.yaml samples/tasks/scenarios/dummy/dummy-timed-atomic-actions.json samples/tasks/scenarios/dummy/dummy-timed-atomic-actions.yaml samples/tasks/scenarios/dummy/dummy.json samples/tasks/scenarios/dummy/dummy.yaml samples/tasks/scenarios/ec2/boot.json samples/tasks/scenarios/ec2/boot.yaml samples/tasks/scenarios/ec2/list-servers.json samples/tasks/scenarios/ec2/list-servers.yaml samples/tasks/scenarios/fuel/add-and-remove-nodes.json samples/tasks/scenarios/fuel/add-and-remove-nodes.yaml samples/tasks/scenarios/fuel/create-and-delete-environments.json samples/tasks/scenarios/fuel/create-and-delete-environments.yaml samples/tasks/scenarios/fuel/create-and-list-environments.json samples/tasks/scenarios/fuel/create-and-list-environments.yaml samples/tasks/scenarios/glance/create-and-delete-image.json samples/tasks/scenarios/glance/create-and-delete-image.yaml samples/tasks/scenarios/glance/create-and-list-image.json samples/tasks/scenarios/glance/create-and-list-image.yaml samples/tasks/scenarios/glance/create-image-and-boot-instances.json samples/tasks/scenarios/glance/create-image-and-boot-instances.yaml samples/tasks/scenarios/glance/list-images.json samples/tasks/scenarios/glance/list-images.yaml samples/tasks/scenarios/heat/create-and-delete-stack-resource-group.json samples/tasks/scenarios/heat/create-and-delete-stack-resource-group.yaml samples/tasks/scenarios/heat/create-and-delete-stack-with-delay.json samples/tasks/scenarios/heat/create-and-delete-stack-with-delay.yaml samples/tasks/scenarios/heat/create-and-delete-stack-with-neutron.json samples/tasks/scenarios/heat/create-and-delete-stack-with-neutron.yaml samples/tasks/scenarios/heat/create-and-delete-stack-with-volume.json samples/tasks/scenarios/heat/create-and-delete-stack-with-volume.yaml samples/tasks/scenarios/heat/create-and-delete-stack.json samples/tasks/scenarios/heat/create-and-delete-stack.yaml samples/tasks/scenarios/heat/create-and-list-stack.json samples/tasks/scenarios/heat/create-and-list-stack.yaml samples/tasks/scenarios/heat/create-check-delete-stack.json samples/tasks/scenarios/heat/create-check-delete-stack.yaml samples/tasks/scenarios/heat/create-snapshot-restore-delete-stack.json samples/tasks/scenarios/heat/create-snapshot-restore-delete-stack.yaml samples/tasks/scenarios/heat/create-stack-and-list-output-resource-group.json samples/tasks/scenarios/heat/create-stack-and-list-output-resource-group.yaml samples/tasks/scenarios/heat/create-stack-and-scale.json samples/tasks/scenarios/heat/create-stack-and-scale.yaml samples/tasks/scenarios/heat/create-stack-and-show-output-resource-group.json samples/tasks/scenarios/heat/create-stack-and-show-output-resource-group.yaml samples/tasks/scenarios/heat/create-suspend-resume-delete-stack.json samples/tasks/scenarios/heat/create-suspend-resume-delete-stack.yaml samples/tasks/scenarios/heat/create-update-delete-stack-add-res.json samples/tasks/scenarios/heat/create-update-delete-stack-add-res.yaml samples/tasks/scenarios/heat/create-update-delete-stack-del-res.json samples/tasks/scenarios/heat/create-update-delete-stack-del-res.yaml samples/tasks/scenarios/heat/create-update-delete-stack-increase.json samples/tasks/scenarios/heat/create-update-delete-stack-increase.yaml samples/tasks/scenarios/heat/create-update-delete-stack-inplace.json samples/tasks/scenarios/heat/create-update-delete-stack-inplace.yaml samples/tasks/scenarios/heat/create-update-delete-stack-reduce.json samples/tasks/scenarios/heat/create-update-delete-stack-reduce.yaml samples/tasks/scenarios/heat/create-update-delete-stack-replace.json samples/tasks/scenarios/heat/create-update-delete-stack-replace.yaml samples/tasks/scenarios/heat/list-stack-and-event.json samples/tasks/scenarios/heat/list-stack-and-event.yaml samples/tasks/scenarios/heat/list-stack-and-resources.json samples/tasks/scenarios/heat/list-stack-and-resources.yaml samples/tasks/scenarios/heat/templates/autoscaling-group.yaml.template samples/tasks/scenarios/heat/templates/autoscaling-policy.yaml.template samples/tasks/scenarios/heat/templates/default.yaml.template samples/tasks/scenarios/heat/templates/random-strings.yaml.template samples/tasks/scenarios/heat/templates/resource-group-server-with-volume.yaml.template samples/tasks/scenarios/heat/templates/resource-group-with-constraint.yaml.template samples/tasks/scenarios/heat/templates/resource-group-with-outputs.yaml.template samples/tasks/scenarios/heat/templates/resource-group.yaml.template samples/tasks/scenarios/heat/templates/server-with-ports.yaml.template samples/tasks/scenarios/heat/templates/server-with-volume.yaml.template samples/tasks/scenarios/heat/templates/updated-autoscaling-policy-inplace.yaml.template samples/tasks/scenarios/heat/templates/updated-random-strings-add.yaml.template samples/tasks/scenarios/heat/templates/updated-random-strings-delete.yaml.template samples/tasks/scenarios/heat/templates/updated-random-strings-replace.yaml.template samples/tasks/scenarios/heat/templates/updated-resource-group-increase.yaml.template samples/tasks/scenarios/heat/templates/updated-resource-group-reduce.yaml.template samples/tasks/scenarios/ironic/create-and-delete-node.json samples/tasks/scenarios/ironic/create-and-delete-node.yaml samples/tasks/scenarios/ironic/create-and-list-node.json samples/tasks/scenarios/ironic/create-and-list-node.yaml samples/tasks/scenarios/keystone/add-and-remove-user-role.json samples/tasks/scenarios/keystone/add-and-remove-user-role.yaml samples/tasks/scenarios/keystone/authenticate-user-and-validate-token.json samples/tasks/scenarios/keystone/authenticate-user-and-validate-token.yaml samples/tasks/scenarios/keystone/create-add-and-list-user-roles.json samples/tasks/scenarios/keystone/create-add-and-list-user-roles.yaml samples/tasks/scenarios/keystone/create-and-delete-ec2credential.json samples/tasks/scenarios/keystone/create-and-delete-ec2credential.yaml samples/tasks/scenarios/keystone/create-and-delete-role.json samples/tasks/scenarios/keystone/create-and-delete-role.yaml samples/tasks/scenarios/keystone/create-and-delete-service.json samples/tasks/scenarios/keystone/create-and-delete-service.yaml samples/tasks/scenarios/keystone/create-and-delete-user.json samples/tasks/scenarios/keystone/create-and-delete-user.yaml samples/tasks/scenarios/keystone/create-and-get-role.json samples/tasks/scenarios/keystone/create-and-get-role.yaml samples/tasks/scenarios/keystone/create-and-list-ec2credentials.json samples/tasks/scenarios/keystone/create-and-list-ec2credentials.yaml samples/tasks/scenarios/keystone/create-and-list-roles.json samples/tasks/scenarios/keystone/create-and-list-roles.yaml samples/tasks/scenarios/keystone/create-and-list-services.json samples/tasks/scenarios/keystone/create-and-list-services.yaml samples/tasks/scenarios/keystone/create-and-list-tenants.json samples/tasks/scenarios/keystone/create-and-list-tenants.yaml samples/tasks/scenarios/keystone/create-and-list-users.json samples/tasks/scenarios/keystone/create-and-list-users.yaml samples/tasks/scenarios/keystone/create-and-update-user.json samples/tasks/scenarios/keystone/create-and-update-user.yaml samples/tasks/scenarios/keystone/create-tenant-with-users.json samples/tasks/scenarios/keystone/create-tenant-with-users.yaml samples/tasks/scenarios/keystone/create-tenant.json samples/tasks/scenarios/keystone/create-tenant.yaml samples/tasks/scenarios/keystone/create-update-and-delete-tenant.json samples/tasks/scenarios/keystone/create-update-and-delete-tenant.yaml samples/tasks/scenarios/keystone/create-user-set-enabled-and-delete.json samples/tasks/scenarios/keystone/create-user-set-enabled-and-delete.yaml samples/tasks/scenarios/keystone/create-user-update-password.json samples/tasks/scenarios/keystone/create-user-update-password.yaml samples/tasks/scenarios/keystone/create-user.json samples/tasks/scenarios/keystone/create-user.yaml samples/tasks/scenarios/keystone/get-entities.json samples/tasks/scenarios/keystone/get-entities.yaml samples/tasks/scenarios/magnum/create-and-list-clusters.json samples/tasks/scenarios/magnum/create-and-list-clusters.yaml samples/tasks/scenarios/magnum/list-cluster-templates.json samples/tasks/scenarios/magnum/list-cluster-templates.yaml samples/tasks/scenarios/magnum/list-clusters.json samples/tasks/scenarios/magnum/list-clusters.yaml samples/tasks/scenarios/manila/attach-security-service-to-share-network.json samples/tasks/scenarios/manila/attach-security-service-to-share-network.yaml samples/tasks/scenarios/manila/create-security-service-and-delete.json samples/tasks/scenarios/manila/create-security-service-and-delete.yaml samples/tasks/scenarios/manila/create-share-network-and-delete.json samples/tasks/scenarios/manila/create-share-network-and-delete.yaml samples/tasks/scenarios/manila/create-share-network-and-list.json samples/tasks/scenarios/manila/create-share-network-and-list.yaml samples/tasks/scenarios/manila/create-share-with-autocreated-share-networks-and-delete.json samples/tasks/scenarios/manila/create-share-with-autocreated-share-networks-and-delete.yaml samples/tasks/scenarios/manila/create-share-with-autocreated-share-networks-and-list.json samples/tasks/scenarios/manila/create-share-with-autocreated-share-networks-and-list.yaml samples/tasks/scenarios/manila/create-share-with-autocreated-share-networks-and-set-metadata.json samples/tasks/scenarios/manila/create-share-with-autocreated-share-networks-and-set-metadata.yaml samples/tasks/scenarios/manila/create-share-without-share-networks-and-delete.json samples/tasks/scenarios/manila/create-share-without-share-networks-and-delete.yaml samples/tasks/scenarios/manila/create-share-without-share-networks-and-list.json samples/tasks/scenarios/manila/create-share-without-share-networks-and-list.yaml samples/tasks/scenarios/manila/create-share-without-share-networks-and-set-metadata.json samples/tasks/scenarios/manila/create-share-without-share-networks-and-set-metadata.yaml samples/tasks/scenarios/manila/list-share-servers.json samples/tasks/scenarios/manila/list-share-servers.yaml samples/tasks/scenarios/manila/list-shares.json samples/tasks/scenarios/manila/list-shares.yaml samples/tasks/scenarios/mistral/create-delete-execution-with-workflow-name.json samples/tasks/scenarios/mistral/create-delete-execution-with-workflow-name.yaml samples/tasks/scenarios/mistral/create-delete-execution.json samples/tasks/scenarios/mistral/create-delete-execution.yaml samples/tasks/scenarios/mistral/create-delete-workbook.json samples/tasks/scenarios/mistral/create-delete-workbook.yaml samples/tasks/scenarios/mistral/create-execution-with-inputs.json samples/tasks/scenarios/mistral/create-execution-with-inputs.yaml samples/tasks/scenarios/mistral/create-execution-with-params.json samples/tasks/scenarios/mistral/create-execution-with-params.yaml samples/tasks/scenarios/mistral/create-execution-with-workflow-name.json samples/tasks/scenarios/mistral/create-execution-with-workflow-name.yaml samples/tasks/scenarios/mistral/create-execution.json samples/tasks/scenarios/mistral/create-execution.yaml samples/tasks/scenarios/mistral/create-workbook.json samples/tasks/scenarios/mistral/create-workbook.yaml samples/tasks/scenarios/mistral/list-executions.json samples/tasks/scenarios/mistral/list-executions.yaml samples/tasks/scenarios/mistral/list-workbooks.json samples/tasks/scenarios/mistral/list-workbooks.yaml samples/tasks/scenarios/monasca/list-metrics.json samples/tasks/scenarios/monasca/list-metrics.yaml samples/tasks/scenarios/murano/create-and-delete-environment.json samples/tasks/scenarios/murano/create-and-delete-environment.yaml samples/tasks/scenarios/murano/create-and-deploy-environment.json samples/tasks/scenarios/murano/create-and-deploy-environment.yaml samples/tasks/scenarios/murano/import-and-delete-package.json samples/tasks/scenarios/murano/import-and-delete-package.yaml samples/tasks/scenarios/murano/import-and-filter-applications.json samples/tasks/scenarios/murano/import-and-filter-applications.yaml samples/tasks/scenarios/murano/import-and-list-packages.json samples/tasks/scenarios/murano/import-and-list-packages.yaml samples/tasks/scenarios/murano/list-environments.json samples/tasks/scenarios/murano/list-environments.yaml samples/tasks/scenarios/murano/package-lifecycle.json samples/tasks/scenarios/murano/package-lifecycle.yaml samples/tasks/scenarios/neutron/create-and-delete-floating-ips.json samples/tasks/scenarios/neutron/create-and-delete-floating-ips.yaml samples/tasks/scenarios/neutron/create-and-delete-healthmonitors.json samples/tasks/scenarios/neutron/create-and-delete-healthmonitors.yaml samples/tasks/scenarios/neutron/create-and-delete-networks.json samples/tasks/scenarios/neutron/create-and-delete-networks.yaml samples/tasks/scenarios/neutron/create-and-delete-pools.json samples/tasks/scenarios/neutron/create-and-delete-pools.yaml samples/tasks/scenarios/neutron/create-and-delete-ports.json samples/tasks/scenarios/neutron/create-and-delete-ports.yaml samples/tasks/scenarios/neutron/create-and-delete-routers.json samples/tasks/scenarios/neutron/create-and-delete-routers.yaml samples/tasks/scenarios/neutron/create-and-delete-security-groups.json samples/tasks/scenarios/neutron/create-and-delete-security-groups.yaml samples/tasks/scenarios/neutron/create-and-delete-subnets.json samples/tasks/scenarios/neutron/create-and-delete-subnets.yaml samples/tasks/scenarios/neutron/create-and-delete-vips.json samples/tasks/scenarios/neutron/create-and-delete-vips.yaml samples/tasks/scenarios/neutron/create-and-list-floating-ips.json samples/tasks/scenarios/neutron/create-and-list-floating-ips.yaml samples/tasks/scenarios/neutron/create-and-list-healthmonitors.json samples/tasks/scenarios/neutron/create-and-list-healthmonitors.yaml samples/tasks/scenarios/neutron/create-and-list-loadbalancers.json samples/tasks/scenarios/neutron/create-and-list-loadbalancers.yaml samples/tasks/scenarios/neutron/create-and-list-networks.json samples/tasks/scenarios/neutron/create-and-list-networks.yaml samples/tasks/scenarios/neutron/create-and-list-pools.json samples/tasks/scenarios/neutron/create-and-list-pools.yaml samples/tasks/scenarios/neutron/create-and-list-ports.json samples/tasks/scenarios/neutron/create-and-list-ports.yaml samples/tasks/scenarios/neutron/create-and-list-routers.json samples/tasks/scenarios/neutron/create-and-list-routers.yaml samples/tasks/scenarios/neutron/create-and-list-security-groups.json samples/tasks/scenarios/neutron/create-and-list-security-groups.yaml samples/tasks/scenarios/neutron/create-and-list-subnets.json samples/tasks/scenarios/neutron/create-and-list-subnets.yaml samples/tasks/scenarios/neutron/create-and-list-vips.json samples/tasks/scenarios/neutron/create-and-list-vips.yaml samples/tasks/scenarios/neutron/create-and-show-network.json samples/tasks/scenarios/neutron/create-and-show-network.yaml samples/tasks/scenarios/neutron/create-and-update-healthmonitors.json samples/tasks/scenarios/neutron/create-and-update-healthmonitors.yaml samples/tasks/scenarios/neutron/create-and-update-networks.json samples/tasks/scenarios/neutron/create-and-update-networks.yaml samples/tasks/scenarios/neutron/create-and-update-pools.json samples/tasks/scenarios/neutron/create-and-update-pools.yaml samples/tasks/scenarios/neutron/create-and-update-ports.json samples/tasks/scenarios/neutron/create-and-update-ports.yaml samples/tasks/scenarios/neutron/create-and-update-routers.json samples/tasks/scenarios/neutron/create-and-update-routers.yaml samples/tasks/scenarios/neutron/create-and-update-security-groups.json samples/tasks/scenarios/neutron/create-and-update-security-groups.yaml samples/tasks/scenarios/neutron/create-and-update-subnets.json samples/tasks/scenarios/neutron/create-and-update-subnets.yaml samples/tasks/scenarios/neutron/create-and-update-vips.json samples/tasks/scenarios/neutron/create-and-update-vips.yaml samples/tasks/scenarios/neutron/list-agents.json samples/tasks/scenarios/neutron/list-agents.yaml samples/tasks/scenarios/nova/boot-and-associate-floating-ip.json samples/tasks/scenarios/nova/boot-and-associate-floating-ip.yaml samples/tasks/scenarios/nova/boot-and-block-migrate.json samples/tasks/scenarios/nova/boot-and-block-migrate.yaml samples/tasks/scenarios/nova/boot-and-delete-multiple.json samples/tasks/scenarios/nova/boot-and-delete-multiple.yaml samples/tasks/scenarios/nova/boot-and-delete-server-with-keypairs.json samples/tasks/scenarios/nova/boot-and-delete-server-with-keypairs.yaml samples/tasks/scenarios/nova/boot-and-delete-server-with-secgroups.json samples/tasks/scenarios/nova/boot-and-delete-server-with-secgroups.yaml samples/tasks/scenarios/nova/boot-and-delete.json samples/tasks/scenarios/nova/boot-and-delete.yaml samples/tasks/scenarios/nova/boot-and-get-console-server.json samples/tasks/scenarios/nova/boot-and-get-console-server.yaml samples/tasks/scenarios/nova/boot-and-list.json samples/tasks/scenarios/nova/boot-and-list.yaml samples/tasks/scenarios/nova/boot-and-live-migrate.json samples/tasks/scenarios/nova/boot-and-live-migrate.yaml samples/tasks/scenarios/nova/boot-and-migrate.json samples/tasks/scenarios/nova/boot-and-migrate.yaml samples/tasks/scenarios/nova/boot-and-rebuild.json samples/tasks/scenarios/nova/boot-and-rebuild.yaml samples/tasks/scenarios/nova/boot-and-show-server.json samples/tasks/scenarios/nova/boot-and-show-server.yaml samples/tasks/scenarios/nova/boot-and-update-server.json samples/tasks/scenarios/nova/boot-and-update-server.yaml samples/tasks/scenarios/nova/boot-bounce-delete.json samples/tasks/scenarios/nova/boot-bounce-delete.yaml samples/tasks/scenarios/nova/boot-from-volume-and-delete.json samples/tasks/scenarios/nova/boot-from-volume-and-delete.yaml samples/tasks/scenarios/nova/boot-from-volume-and-resize.json samples/tasks/scenarios/nova/boot-from-volume-and-resize.yaml samples/tasks/scenarios/nova/boot-from-volume-snapshot.json samples/tasks/scenarios/nova/boot-from-volume-snapshot.yaml samples/tasks/scenarios/nova/boot-from-volume.json samples/tasks/scenarios/nova/boot-from-volume.yaml samples/tasks/scenarios/nova/boot-lock-unlock-and-delete.json samples/tasks/scenarios/nova/boot-lock-unlock-and-delete.yaml samples/tasks/scenarios/nova/boot-server-and-add-secgroup.json samples/tasks/scenarios/nova/boot-server-and-add-secgroup.yaml samples/tasks/scenarios/nova/boot-server-associate-and-dissociate-floating-ip.json samples/tasks/scenarios/nova/boot-server-associate-and-dissociate-floating-ip.yaml samples/tasks/scenarios/nova/boot-server-attach-created-volume-and-live-migrate.json samples/tasks/scenarios/nova/boot-server-attach-created-volume-and-live-migrate.yaml samples/tasks/scenarios/nova/boot-server-attach-created-volume-and-resize.json samples/tasks/scenarios/nova/boot-server-attach-created-volume-and-resize.yaml samples/tasks/scenarios/nova/boot-server-from-volume-and-live-migrate.json samples/tasks/scenarios/nova/boot-server-from-volume-and-live-migrate.yaml samples/tasks/scenarios/nova/boot-snapshot-boot-delete.json samples/tasks/scenarios/nova/boot-snapshot-boot-delete.yaml samples/tasks/scenarios/nova/boot.json samples/tasks/scenarios/nova/boot.yaml samples/tasks/scenarios/nova/create-aggregate-add-and-remove-host.json samples/tasks/scenarios/nova/create-aggregate-add-and-remove-host.yaml samples/tasks/scenarios/nova/create-aggregate-add-host-and-boot-server.json samples/tasks/scenarios/nova/create-aggregate-add-host-and-boot-server.yaml samples/tasks/scenarios/nova/create-and-delete-aggregate.json samples/tasks/scenarios/nova/create-and-delete-aggregate.yaml samples/tasks/scenarios/nova/create-and-delete-flavor.json samples/tasks/scenarios/nova/create-and-delete-flavor.yaml samples/tasks/scenarios/nova/create-and-delete-floating-ips-bulk.json samples/tasks/scenarios/nova/create-and-delete-floating-ips-bulk.yaml samples/tasks/scenarios/nova/create-and-delete-keypair.json samples/tasks/scenarios/nova/create-and-delete-keypair.yaml samples/tasks/scenarios/nova/create-and-delete-network.json samples/tasks/scenarios/nova/create-and-delete-network.yaml samples/tasks/scenarios/nova/create-and-delete-secgroups.json samples/tasks/scenarios/nova/create-and-delete-secgroups.yaml samples/tasks/scenarios/nova/create-and-get-aggregate-details.json samples/tasks/scenarios/nova/create-and-get-aggregate-details.yaml samples/tasks/scenarios/nova/create-and-get-flavor.json samples/tasks/scenarios/nova/create-and-get-flavor.yaml samples/tasks/scenarios/nova/create-and-get-keypair.json samples/tasks/scenarios/nova/create-and-get-keypair.yaml samples/tasks/scenarios/nova/create-and-list-aggregates.json samples/tasks/scenarios/nova/create-and-list-aggregates.yaml samples/tasks/scenarios/nova/create-and-list-flavor-access.json samples/tasks/scenarios/nova/create-and-list-flavor-access.yaml samples/tasks/scenarios/nova/create-and-list-floating-ips-bulk.json samples/tasks/scenarios/nova/create-and-list-floating-ips-bulk.yaml samples/tasks/scenarios/nova/create-and-list-keypairs.json samples/tasks/scenarios/nova/create-and-list-keypairs.yaml samples/tasks/scenarios/nova/create-and-list-networks.json samples/tasks/scenarios/nova/create-and-list-networks.yaml samples/tasks/scenarios/nova/create-and-list-secgroups.json samples/tasks/scenarios/nova/create-and-list-secgroups.yaml samples/tasks/scenarios/nova/create-and-list-server-groups.json samples/tasks/scenarios/nova/create-and-list-server-groups.yaml samples/tasks/scenarios/nova/create-and-update-aggregate.json samples/tasks/scenarios/nova/create-and-update-aggregate.yaml samples/tasks/scenarios/nova/create-and-update-secgroups.json samples/tasks/scenarios/nova/create-and-update-secgroups.yaml samples/tasks/scenarios/nova/create-flavor-and-add-tenant-access.json samples/tasks/scenarios/nova/create-flavor-and-add-tenant-access.yaml samples/tasks/scenarios/nova/create-flavor-and-set-keys.json samples/tasks/scenarios/nova/create-flavor-and-set-keys.yaml samples/tasks/scenarios/nova/create-flavor.json samples/tasks/scenarios/nova/create-flavor.yaml samples/tasks/scenarios/nova/list-agents.json samples/tasks/scenarios/nova/list-agents.yaml samples/tasks/scenarios/nova/list-aggregates.json samples/tasks/scenarios/nova/list-aggregates.yaml samples/tasks/scenarios/nova/list-and-get-hosts.json samples/tasks/scenarios/nova/list-and-get-hosts.yaml samples/tasks/scenarios/nova/list-and-get-hypervisors.json samples/tasks/scenarios/nova/list-and-get-hypervisors.yaml samples/tasks/scenarios/nova/list-and-get-uptime-hypervisors.json samples/tasks/scenarios/nova/list-and-get-uptime-hypervisors.yaml samples/tasks/scenarios/nova/list-and-search-hypervisor.json samples/tasks/scenarios/nova/list-and-search-hypervisor.yaml samples/tasks/scenarios/nova/list-availability-zones.json samples/tasks/scenarios/nova/list-availability-zones.yaml samples/tasks/scenarios/nova/list-flavors.json samples/tasks/scenarios/nova/list-flavors.yaml samples/tasks/scenarios/nova/list-hosts.json samples/tasks/scenarios/nova/list-hosts.yaml samples/tasks/scenarios/nova/list-hypervisors.json samples/tasks/scenarios/nova/list-hypervisors.yaml samples/tasks/scenarios/nova/list-images.json samples/tasks/scenarios/nova/list-images.yaml samples/tasks/scenarios/nova/list-servers.json samples/tasks/scenarios/nova/list-servers.yaml samples/tasks/scenarios/nova/list-services.json samples/tasks/scenarios/nova/list-services.yaml samples/tasks/scenarios/nova/pause-and-unpause.json samples/tasks/scenarios/nova/pause-and-unpause.yaml samples/tasks/scenarios/nova/resize-server.json samples/tasks/scenarios/nova/resize-server.yaml samples/tasks/scenarios/nova/resize-shutoff-server.json samples/tasks/scenarios/nova/resize-shutoff-server.yaml samples/tasks/scenarios/nova/shelve-and-unshelve.json samples/tasks/scenarios/nova/shelve-and-unshelve.yaml samples/tasks/scenarios/nova/statistics-hypervisors.json samples/tasks/scenarios/nova/statistics-hypervisors.yaml samples/tasks/scenarios/nova/suspend-and-resume.json samples/tasks/scenarios/nova/suspend-and-resume.yaml samples/tasks/scenarios/quotas/cinder-get.json samples/tasks/scenarios/quotas/cinder-get.yaml samples/tasks/scenarios/quotas/cinder-update-and-delete.json samples/tasks/scenarios/quotas/cinder-update-and-delete.yaml samples/tasks/scenarios/quotas/cinder-update.json samples/tasks/scenarios/quotas/cinder-update.yaml samples/tasks/scenarios/quotas/neutron-update.json samples/tasks/scenarios/quotas/neutron-update.yaml samples/tasks/scenarios/quotas/nova-get.json samples/tasks/scenarios/quotas/nova-get.yaml samples/tasks/scenarios/quotas/nova-update-and-delete.json samples/tasks/scenarios/quotas/nova-update-and-delete.yaml samples/tasks/scenarios/quotas/nova-update.json samples/tasks/scenarios/quotas/nova-update.yaml samples/tasks/scenarios/requests/check-random-request.json samples/tasks/scenarios/requests/check-random-request.yaml samples/tasks/scenarios/requests/check-request.json samples/tasks/scenarios/requests/check-request.yaml samples/tasks/scenarios/sahara/create-and-delete-cluster.json samples/tasks/scenarios/sahara/create-and-delete-cluster.yaml samples/tasks/scenarios/sahara/create-and-list-node-group-templates.json samples/tasks/scenarios/sahara/create-and-list-node-group-templates.yaml samples/tasks/scenarios/sahara/create-delete-node-group-templates.json samples/tasks/scenarios/sahara/create-delete-node-group-templates.yaml samples/tasks/scenarios/sahara/create-scale-delete-cluster.json samples/tasks/scenarios/sahara/create-scale-delete-cluster.yaml samples/tasks/scenarios/sahara/jobs/dfsio-job-sequence-scaling.json samples/tasks/scenarios/sahara/jobs/dfsio-job-sequence-scaling.yaml samples/tasks/scenarios/sahara/jobs/dfsio-job-sequence.json samples/tasks/scenarios/sahara/jobs/dfsio-job-sequence.yaml samples/tasks/scenarios/sahara/jobs/java-action-job.json samples/tasks/scenarios/sahara/jobs/java-action-job.yaml samples/tasks/scenarios/sahara/jobs/pig-script-job.json samples/tasks/scenarios/sahara/jobs/pig-script-job.yaml samples/tasks/scenarios/senlin/create-and-delete-profile-cluster.json samples/tasks/scenarios/senlin/create-and-delete-profile-cluster.yaml samples/tasks/scenarios/swift/create-container-and-object-then-delete-all.json samples/tasks/scenarios/swift/create-container-and-object-then-delete-all.yaml samples/tasks/scenarios/swift/create-container-and-object-then-download-object.json samples/tasks/scenarios/swift/create-container-and-object-then-download-object.yaml samples/tasks/scenarios/swift/create-container-and-object-then-list-objects.json samples/tasks/scenarios/swift/create-container-and-object-then-list-objects.yaml samples/tasks/scenarios/swift/list-and-download-objects-in-containers.json samples/tasks/scenarios/swift/list-and-download-objects-in-containers.yaml samples/tasks/scenarios/swift/list-objects-in-containers.json samples/tasks/scenarios/swift/list-objects-in-containers.yaml samples/tasks/scenarios/vm/boot-runcommand-delete-custom-image.json samples/tasks/scenarios/vm/boot-runcommand-delete-custom-image.yaml samples/tasks/scenarios/vm/boot-runcommand-delete-script-inline.json samples/tasks/scenarios/vm/boot-runcommand-delete-script-inline.yaml samples/tasks/scenarios/vm/boot-runcommand-delete-with-disk.json samples/tasks/scenarios/vm/boot-runcommand-delete-with-disk.yaml samples/tasks/scenarios/vm/boot-runcommand-delete.json samples/tasks/scenarios/vm/boot-runcommand-delete.yaml samples/tasks/scenarios/vm/dd-load-test.json samples/tasks/scenarios/vm/dd-load-test.yaml samples/tasks/scenarios/watcher/create-audit-and-delete.json samples/tasks/scenarios/watcher/create-audit-and-delete.yaml samples/tasks/scenarios/watcher/create-audit-template-and-delete.json samples/tasks/scenarios/watcher/create-audit-template-and-delete.yaml samples/tasks/scenarios/watcher/list-audit-templates.json samples/tasks/scenarios/watcher/list-audit-templates.yaml samples/tasks/scenarios/workload/wordpress.json samples/tasks/scenarios/workload/wordpress.yaml samples/tasks/scenarios/zaqar/create-queue.json samples/tasks/scenarios/zaqar/create-queue.yaml samples/tasks/scenarios/zaqar/producer-consumer.json samples/tasks/scenarios/zaqar/producer-consumer.yaml samples/tasks/sla/README.rst samples/tasks/sla/create-and-delete-user.json samples/tasks/sla/create-and-delete-user.yaml samples/tasks/support/README.rst samples/tasks/support/instance_linpack.sh samples/tasks/support/instance_test.sh tests/README.rst tests/__init__.py tests/ci/README.rst tests/ci/__init__.py tests/ci/cover.sh tests/ci/osresources.py tests/ci/pytest_launcher.py tests/ci/rally-gate.sh tests/ci/rally_app.py tests/ci/rally_gate_functions.sh tests/ci/rally_verify.py tests/ci/render.py tests/ci/sync_requirements.py tests/ci/test_install.sh tests/ci/wip-rally-gate.py tests/ci/hooks/certification_post_test_hook.sh tests/functional/__init__.py tests/functional/test_certification_task.py tests/functional/test_cli_deployment.py tests/functional/test_cli_plugin.py tests/functional/test_cli_task.py tests/functional/test_cli_verify.py tests/functional/test_lib_api.py tests/functional/test_task_samples.py tests/functional/utils.py tests/functional/extra/test_fake_scenario.json tests/functional/extra/fake_dir1/fake_plugin1.py tests/functional/extra/fake_dir2/fake_plugin2.py tests/hacking/README.rst tests/hacking/__init__.py tests/hacking/checks.py tests/unit/__init__.py tests/unit/fakes.py tests/unit/test.py tests/unit/test_api.py tests/unit/test_ddt.py tests/unit/test_hacking.py tests/unit/test_logging.py tests/unit/test_mock.py tests/unit/test_osclients.py tests/unit/test_pytest_launcher.py tests/unit/test_resources.py tests/unit/test_test_ddt.py tests/unit/test_test_mock.py tests/unit/cli/__init__.py tests/unit/cli/test_cliutils.py tests/unit/cli/test_envutils.py tests/unit/cli/test_manage.py tests/unit/cli/commands/__init__.py tests/unit/cli/commands/test_deployment.py tests/unit/cli/commands/test_plugin.py tests/unit/cli/commands/test_task.py tests/unit/cli/commands/test_verify.py tests/unit/common/__init__.py tests/unit/common/test_broker.py tests/unit/common/test_fileutils.py tests/unit/common/test_logging.py tests/unit/common/test_sshutils.py tests/unit/common/test_streaming_algorithms.py tests/unit/common/test_utils.py tests/unit/common/test_version.py tests/unit/common/test_yamlutils.py tests/unit/common/db/__init__.py tests/unit/common/db/test_api.py tests/unit/common/db/test_migrations.py tests/unit/common/db/test_migrations_base.py tests/unit/common/db/test_types.py tests/unit/common/io/__init__.py tests/unit/common/io/subunit_v2.stream tests/unit/common/io/test_junit.py tests/unit/common/io/test_subunit_v2.py tests/unit/common/objects/__init__.py tests/unit/common/objects/test_credential.py tests/unit/common/objects/test_deploy.py tests/unit/common/objects/test_task.py tests/unit/common/objects/test_verification.py tests/unit/common/objects/test_verifier.py tests/unit/common/plugin/__init__.py tests/unit/common/plugin/test_discover.py tests/unit/common/plugin/test_info.py tests/unit/common/plugin/test_meta.py tests/unit/common/plugin/test_plugin.py tests/unit/deployment/__init__.py tests/unit/deployment/test_engine.py tests/unit/deployment/test_lxc.py tests/unit/deployment/test_multihost.py tests/unit/deployment/engines/__init__.py tests/unit/deployment/engines/test_devstack.py tests/unit/deployment/engines/test_existing.py tests/unit/deployment/serverprovider/__init__.py tests/unit/deployment/serverprovider/test_provider.py tests/unit/deployment/serverprovider/providers/__init__.py tests/unit/deployment/serverprovider/providers/test_cobbler.py tests/unit/deployment/serverprovider/providers/test_existing.py tests/unit/deployment/serverprovider/providers/test_lxc.py tests/unit/deployment/serverprovider/providers/test_openstack.py tests/unit/deployment/serverprovider/providers/test_virsh.py tests/unit/doc/__init__.py tests/unit/doc/missed_docstrings.txt tests/unit/doc/test_docstrings.py tests/unit/doc/test_format.py tests/unit/doc/test_jsonschemas.py tests/unit/doc/test_specs.py tests/unit/doc/test_task_samples.py tests/unit/doc/wrong_format.txt tests/unit/plugins/__init__.py tests/unit/plugins/common/__init__.py tests/unit/plugins/common/test_types.py tests/unit/plugins/common/context/__init__.py tests/unit/plugins/common/context/test_dummy.py tests/unit/plugins/common/exporter/__init__.py tests/unit/plugins/common/exporter/test_file_system.py tests/unit/plugins/common/hook/__init__.py tests/unit/plugins/common/hook/test_sys_call.py tests/unit/plugins/common/runners/__init__.py tests/unit/plugins/common/runners/test_constant.py tests/unit/plugins/common/runners/test_rps.py tests/unit/plugins/common/runners/test_serial.py tests/unit/plugins/common/scenarios/__init__.py tests/unit/plugins/common/scenarios/dummy/__init__.py tests/unit/plugins/common/scenarios/dummy/test_dummy.py tests/unit/plugins/common/scenarios/requests/__init__.py tests/unit/plugins/common/scenarios/requests/test_http_requests.py tests/unit/plugins/common/scenarios/requests/test_utils.py tests/unit/plugins/common/sla/__init__.py tests/unit/plugins/common/sla/test_failure_rate.py tests/unit/plugins/common/sla/test_iteration_time.py tests/unit/plugins/common/sla/test_max_average_duration.py tests/unit/plugins/common/sla/test_max_average_duration_per_atomic.py tests/unit/plugins/common/sla/test_ouliers.py tests/unit/plugins/common/sla/test_performance_degradation.py tests/unit/plugins/common/trigger/__init__.py tests/unit/plugins/common/trigger/test_event.py tests/unit/plugins/common/trigger/test_periodic.py tests/unit/plugins/common/verification/__init__.py tests/unit/plugins/common/verification/junit_report.xml tests/unit/plugins/common/verification/test_reporters.py tests/unit/plugins/common/verification/test_testr.py tests/unit/plugins/openstack/__init__.py tests/unit/plugins/openstack/test_scenario.py tests/unit/plugins/openstack/test_service.py tests/unit/plugins/openstack/test_types.py tests/unit/plugins/openstack/cleanup/__init__.py tests/unit/plugins/openstack/cleanup/test_base.py tests/unit/plugins/openstack/cleanup/test_manager.py tests/unit/plugins/openstack/cleanup/test_resources.py tests/unit/plugins/openstack/context/__init__.py tests/unit/plugins/openstack/context/test_api_versions.py tests/unit/plugins/openstack/context/test_fuel.py tests/unit/plugins/openstack/context/ceilometer/__init__.py tests/unit/plugins/openstack/context/ceilometer/test_samples.py tests/unit/plugins/openstack/context/cinder/__init__.py tests/unit/plugins/openstack/context/cinder/test_volume_types.py tests/unit/plugins/openstack/context/cinder/test_volumes.py tests/unit/plugins/openstack/context/cleanup/__init__.py tests/unit/plugins/openstack/context/cleanup/test_admin.py tests/unit/plugins/openstack/context/cleanup/test_user.py tests/unit/plugins/openstack/context/dataplane/__init__.py tests/unit/plugins/openstack/context/dataplane/test_heat.py tests/unit/plugins/openstack/context/designate/__init__.py tests/unit/plugins/openstack/context/designate/test_zones.py tests/unit/plugins/openstack/context/ec2/__init__.py tests/unit/plugins/openstack/context/ec2/test_servers.py tests/unit/plugins/openstack/context/glance/__init__.py tests/unit/plugins/openstack/context/glance/test_images.py tests/unit/plugins/openstack/context/heat/__init__.py tests/unit/plugins/openstack/context/heat/test_stacks.py tests/unit/plugins/openstack/context/keystone/__init__.py tests/unit/plugins/openstack/context/keystone/test_existing_users.py tests/unit/plugins/openstack/context/keystone/test_roles.py tests/unit/plugins/openstack/context/keystone/test_users.py tests/unit/plugins/openstack/context/magnum/__init__.py tests/unit/plugins/openstack/context/magnum/test_cluster_templates.py tests/unit/plugins/openstack/context/magnum/test_clusters.py tests/unit/plugins/openstack/context/manila/__init__.py tests/unit/plugins/openstack/context/manila/test_manila_security_services.py tests/unit/plugins/openstack/context/manila/test_manila_share_networks.py tests/unit/plugins/openstack/context/manila/test_manila_shares.py tests/unit/plugins/openstack/context/monasca/__init__.py tests/unit/plugins/openstack/context/monasca/test_metrics.py tests/unit/plugins/openstack/context/murano/__init__.py tests/unit/plugins/openstack/context/murano/test_murano_environments.py tests/unit/plugins/openstack/context/murano/test_murano_packages.py tests/unit/plugins/openstack/context/network/__init__.py tests/unit/plugins/openstack/context/network/test_allow_ssh.py tests/unit/plugins/openstack/context/network/test_network.py tests/unit/plugins/openstack/context/neutron/__init__.py tests/unit/plugins/openstack/context/neutron/test_existing_network.py tests/unit/plugins/openstack/context/neutron/test_lbaas.py tests/unit/plugins/openstack/context/nova/__init__.py tests/unit/plugins/openstack/context/nova/test_flavors.py tests/unit/plugins/openstack/context/nova/test_keypairs.py tests/unit/plugins/openstack/context/nova/test_servers.py tests/unit/plugins/openstack/context/quotas/__init__.py tests/unit/plugins/openstack/context/quotas/test_cinder_quotas.py tests/unit/plugins/openstack/context/quotas/test_designate_quotas.py tests/unit/plugins/openstack/context/quotas/test_manila_quotas.py tests/unit/plugins/openstack/context/quotas/test_neutron_quotas.py tests/unit/plugins/openstack/context/quotas/test_nova_quotas.py tests/unit/plugins/openstack/context/quotas/test_quotas.py tests/unit/plugins/openstack/context/sahara/__init__.py tests/unit/plugins/openstack/context/sahara/test_sahara_cluster.py tests/unit/plugins/openstack/context/sahara/test_sahara_image.py tests/unit/plugins/openstack/context/sahara/test_sahara_input_data_sources.py tests/unit/plugins/openstack/context/sahara/test_sahara_job_binaries.py tests/unit/plugins/openstack/context/sahara/test_sahara_output_data_sources.py tests/unit/plugins/openstack/context/senlin/__init__.py tests/unit/plugins/openstack/context/senlin/test_profiles.py tests/unit/plugins/openstack/context/swift/__init__.py tests/unit/plugins/openstack/context/swift/test_objects.py tests/unit/plugins/openstack/context/swift/test_utils.py tests/unit/plugins/openstack/context/vm/__init__.py tests/unit/plugins/openstack/context/vm/test_custom_image.py tests/unit/plugins/openstack/context/vm/test_image_command_customizer.py tests/unit/plugins/openstack/context/watcher/__init__.py tests/unit/plugins/openstack/context/watcher/test_audit_templates.py tests/unit/plugins/openstack/hook/__init__.py tests/unit/plugins/openstack/hook/test_fault_injection.py tests/unit/plugins/openstack/scenarios/__init__.py tests/unit/plugins/openstack/scenarios/authenticate/__init__.py tests/unit/plugins/openstack/scenarios/authenticate/test_authenticate.py tests/unit/plugins/openstack/scenarios/ceilometer/__init__.py tests/unit/plugins/openstack/scenarios/ceilometer/test_alarms.py tests/unit/plugins/openstack/scenarios/ceilometer/test_events.py tests/unit/plugins/openstack/scenarios/ceilometer/test_meters.py tests/unit/plugins/openstack/scenarios/ceilometer/test_queries.py tests/unit/plugins/openstack/scenarios/ceilometer/test_resources.py tests/unit/plugins/openstack/scenarios/ceilometer/test_samples.py tests/unit/plugins/openstack/scenarios/ceilometer/test_stats.py tests/unit/plugins/openstack/scenarios/ceilometer/test_traits.py tests/unit/plugins/openstack/scenarios/ceilometer/test_utils.py tests/unit/plugins/openstack/scenarios/cinder/__init__.py tests/unit/plugins/openstack/scenarios/cinder/test_utils.py tests/unit/plugins/openstack/scenarios/cinder/test_volume_backups.py tests/unit/plugins/openstack/scenarios/cinder/test_volume_types.py tests/unit/plugins/openstack/scenarios/cinder/test_volumes.py tests/unit/plugins/openstack/scenarios/designate/__init__.py tests/unit/plugins/openstack/scenarios/designate/test_basic.py tests/unit/plugins/openstack/scenarios/designate/test_utils.py tests/unit/plugins/openstack/scenarios/ec2/__init__.py tests/unit/plugins/openstack/scenarios/ec2/test_servers.py tests/unit/plugins/openstack/scenarios/ec2/test_utils.py tests/unit/plugins/openstack/scenarios/fuel/__init__.py tests/unit/plugins/openstack/scenarios/fuel/test_environments.py tests/unit/plugins/openstack/scenarios/fuel/test_nodes.py tests/unit/plugins/openstack/scenarios/fuel/test_utils.py tests/unit/plugins/openstack/scenarios/glance/__init__.py tests/unit/plugins/openstack/scenarios/glance/test_images.py tests/unit/plugins/openstack/scenarios/glance/test_utils.py tests/unit/plugins/openstack/scenarios/heat/__init__.py tests/unit/plugins/openstack/scenarios/heat/test_stacks.py tests/unit/plugins/openstack/scenarios/heat/test_utils.py tests/unit/plugins/openstack/scenarios/ironic/__init__.py tests/unit/plugins/openstack/scenarios/ironic/test_nodes.py tests/unit/plugins/openstack/scenarios/ironic/test_utils.py tests/unit/plugins/openstack/scenarios/keystone/__init__.py tests/unit/plugins/openstack/scenarios/keystone/test_basic.py tests/unit/plugins/openstack/scenarios/keystone/test_utils.py tests/unit/plugins/openstack/scenarios/magnum/__init__.py tests/unit/plugins/openstack/scenarios/magnum/test_cluster_templates.py tests/unit/plugins/openstack/scenarios/magnum/test_clusters.py tests/unit/plugins/openstack/scenarios/magnum/test_utils.py tests/unit/plugins/openstack/scenarios/manila/__init__.py tests/unit/plugins/openstack/scenarios/manila/test_shares.py tests/unit/plugins/openstack/scenarios/manila/test_utils.py tests/unit/plugins/openstack/scenarios/mistral/__init__.py tests/unit/plugins/openstack/scenarios/mistral/test_executions.py tests/unit/plugins/openstack/scenarios/mistral/test_utils.py tests/unit/plugins/openstack/scenarios/mistral/test_workbooks.py tests/unit/plugins/openstack/scenarios/monasca/__init__.py tests/unit/plugins/openstack/scenarios/monasca/test_metrics.py tests/unit/plugins/openstack/scenarios/monasca/test_utils.py tests/unit/plugins/openstack/scenarios/murano/__init__.py tests/unit/plugins/openstack/scenarios/murano/test_environments.py tests/unit/plugins/openstack/scenarios/murano/test_packages.py tests/unit/plugins/openstack/scenarios/murano/test_utils.py tests/unit/plugins/openstack/scenarios/neutron/__init__.py tests/unit/plugins/openstack/scenarios/neutron/test_loadbalancer_v1.py tests/unit/plugins/openstack/scenarios/neutron/test_loadbalancer_v2.py tests/unit/plugins/openstack/scenarios/neutron/test_network.py tests/unit/plugins/openstack/scenarios/neutron/test_security_groups.py tests/unit/plugins/openstack/scenarios/neutron/test_utils.py tests/unit/plugins/openstack/scenarios/nova/__init__.py tests/unit/plugins/openstack/scenarios/nova/test_agents.py tests/unit/plugins/openstack/scenarios/nova/test_aggregates.py tests/unit/plugins/openstack/scenarios/nova/test_availability_zones.py tests/unit/plugins/openstack/scenarios/nova/test_flavors.py tests/unit/plugins/openstack/scenarios/nova/test_floating_ips_bulk.py tests/unit/plugins/openstack/scenarios/nova/test_hosts.py tests/unit/plugins/openstack/scenarios/nova/test_hypervisors.py tests/unit/plugins/openstack/scenarios/nova/test_images.py tests/unit/plugins/openstack/scenarios/nova/test_keypairs.py tests/unit/plugins/openstack/scenarios/nova/test_networks.py tests/unit/plugins/openstack/scenarios/nova/test_security_group.py tests/unit/plugins/openstack/scenarios/nova/test_server_groups.py tests/unit/plugins/openstack/scenarios/nova/test_servers.py tests/unit/plugins/openstack/scenarios/nova/test_services.py tests/unit/plugins/openstack/scenarios/nova/test_utils.py tests/unit/plugins/openstack/scenarios/quotas/__init__.py tests/unit/plugins/openstack/scenarios/quotas/test_quotas.py tests/unit/plugins/openstack/scenarios/quotas/test_utils.py tests/unit/plugins/openstack/scenarios/sahara/__init__.py tests/unit/plugins/openstack/scenarios/sahara/test_clusters.py tests/unit/plugins/openstack/scenarios/sahara/test_jobs.py tests/unit/plugins/openstack/scenarios/sahara/test_node_group_templates.py tests/unit/plugins/openstack/scenarios/sahara/test_utils.py tests/unit/plugins/openstack/scenarios/senlin/__init__.py tests/unit/plugins/openstack/scenarios/senlin/test_clusters.py tests/unit/plugins/openstack/scenarios/senlin/test_utils.py tests/unit/plugins/openstack/scenarios/swift/__init__.py tests/unit/plugins/openstack/scenarios/swift/test_objects.py tests/unit/plugins/openstack/scenarios/swift/test_utils.py tests/unit/plugins/openstack/scenarios/vm/__init__.py tests/unit/plugins/openstack/scenarios/vm/test_utils.py tests/unit/plugins/openstack/scenarios/vm/test_vmtasks.py tests/unit/plugins/openstack/scenarios/watcher/__init__.py tests/unit/plugins/openstack/scenarios/watcher/test_basic.py tests/unit/plugins/openstack/scenarios/watcher/test_utils.py tests/unit/plugins/openstack/scenarios/zaqar/__init__.py tests/unit/plugins/openstack/scenarios/zaqar/test_basic.py tests/unit/plugins/openstack/scenarios/zaqar/test_utils.py tests/unit/plugins/openstack/services/__init__.py tests/unit/plugins/openstack/services/heat/__init__.py tests/unit/plugins/openstack/services/heat/test_main.py tests/unit/plugins/openstack/services/identity/__init__.py tests/unit/plugins/openstack/services/identity/test_identity.py tests/unit/plugins/openstack/services/identity/test_keystone_common.py tests/unit/plugins/openstack/services/identity/test_keystone_v2.py tests/unit/plugins/openstack/services/identity/test_keystone_v3.py tests/unit/plugins/openstack/verification/__init__.py tests/unit/plugins/openstack/verification/tempest/__init__.py tests/unit/plugins/openstack/verification/tempest/test_config.py tests/unit/plugins/openstack/verification/tempest/test_context.py tests/unit/plugins/openstack/verification/tempest/test_manager.py tests/unit/plugins/openstack/wrappers/__init__.py tests/unit/plugins/openstack/wrappers/test_cinder.py tests/unit/plugins/openstack/wrappers/test_glance.py tests/unit/plugins/openstack/wrappers/test_keystone.py tests/unit/plugins/openstack/wrappers/test_network.py tests/unit/plugins/workload/__init__.py tests/unit/plugins/workload/test_siege.py tests/unit/rally_jobs/__init__.py tests/unit/rally_jobs/test_jobs.py tests/unit/task/__init__.py tests/unit/task/test_atomic.py tests/unit/task/test_context.py tests/unit/task/test_engine.py tests/unit/task/test_exporter.py tests/unit/task/test_functional.py tests/unit/task/test_hook.py tests/unit/task/test_runner.py tests/unit/task/test_scenario.py tests/unit/task/test_services.py tests/unit/task/test_sla.py tests/unit/task/test_trigger.py tests/unit/task/test_types.py tests/unit/task/test_utils.py tests/unit/task/test_validation.py tests/unit/task/processing/__init__.py tests/unit/task/processing/test_charts.py tests/unit/task/processing/test_plot.py tests/unit/task/processing/test_utils.py tests/unit/ui/__init__.py tests/unit/ui/test_utils.py tests/unit/verification/__init__.py tests/unit/verification/test_context.py tests/unit/verification/test_manager.py tests/unit/verification/test_reporter.py tests/unit/verification/test_utils.pyrally-0.9.1/rally.egg-info/pbr.json0000664000567000056710000000005613073420065020301 0ustar jenkinsjenkins00000000000000{"git_version": "ef2b617", "is_release": true}rally-0.9.1/rally.egg-info/top_level.txt0000664000567000056710000000000613073420065021350 0ustar jenkinsjenkins00000000000000rally rally-0.9.1/rally.egg-info/dependency_links.txt0000664000567000056710000000000113073420065022670 0ustar jenkinsjenkins00000000000000 rally-0.9.1/babel.cfg0000664000567000056710000000001713073417716015542 0ustar jenkinsjenkins00000000000000[python: **.py]rally-0.9.1/LICENSE0000664000567000056710000002404113073417716015024 0ustar jenkinsjenkins00000000000000Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: You must give any other recipients of the Work or Derivative Works a copy of this License; and You must cause any modified files to carry prominent notices stating that You changed the files; and You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. rally-0.9.1/.coveragerc0000664000567000056710000000024613073417716016141 0ustar jenkinsjenkins00000000000000[run] branch = True source = rally [report] ignore_errors = True precision = 3 omit = */migrations/versions/ca3626f62937_init_migration.py [html] directory = cover rally-0.9.1/tox.ini0000664000567000056710000000462013073417720015326 0ustar jenkinsjenkins00000000000000[tox] minversion = 1.6 skipsdist = True envlist = py35,py34,py27,pep8 [testenv] setenv = VIRTUAL_ENV={envdir} LANG=en_US.UTF-8 LANGUAGE=en_US:en LC_ALL=C PYTHONHASHSEED=0 TOX_ENV_NAME={envname} whitelist_externals = find rm make deps = -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt install_command = pip install -c https://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt -U {opts} {packages} usedevelop = True commands = find . -type f -name "*.pyc" -delete python {toxinidir}/tests/ci/pytest_launcher.py tests/unit --posargs={posargs} distribute = false basepython = python2.7 passenv = http_proxy HTTP_PROXY https_proxy HTTPS_PROXY no_proxy NO_PROXY [testenv:pep8] commands = flake8 distribute = false [testenv:py34] basepython = python3.4 [testenv:py35] basepython = python3.5 [testenv:venv] commands = {posargs} [testenv:debug] commands = oslo_debug_helper -t tests {posargs} [testenv:debug34] basepython = python3.4 commands = oslo_debug_helper -t tests {posargs} [testenv:debug35] basepython = python3.5 commands = oslo_debug_helper -t tests {posargs} [testenv:cli] sitepackages = True commands = find . -type f -name "*.pyc" -delete python {toxinidir}/tests/ci/pytest_launcher.py "tests/functional" --timeout -1 --posargs={posargs} [testenv:cover] commands = {toxinidir}/tests/ci/cover.sh {posargs} [testenv:docs] changedir = doc/source commands = rm -rf _build make html [testenv:genconfig] commands = oslo-config-generator --config-file etc/rally/rally-config-generator.conf [testenv:requirements] deps = requests[security] commands = python {toxinidir}/tests/ci/sync_requirements.py {posargs} [flake8] ignore = H703,H105 show-source = true exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,tools,build,setup.py [hacking] import_exceptions = rally.common.i18n local-check-factory = tests.hacking.checks.factory [testenv:bindep] # Do not install any requirements. We want this to be fast and work even if # system dependencies are missing, since it's used to tell you what system # dependencies are missing! This also means that bindep must be installed # separately, outside of the requirements files. deps = bindep commands = bindep [testenv:self] # TODO(astudenov): Add real script here that will run rally self test commands = python -c 'print "SUCCESS"' rally-0.9.1/etc/0000775000567000056710000000000013073420067014562 5ustar jenkinsjenkins00000000000000rally-0.9.1/etc/rally.bash_completion0000664000567000056710000000746713073417716021022 0ustar jenkinsjenkins00000000000000#!/bin/bash # Standalone _filedir() alternative. # This exempts from dependence of bash completion routines function _rally_filedir() { test "${1}" \ && COMPREPLY=( \ $(compgen -f -- "${cur}" | grep -E "${1}") \ $(compgen -o plusdirs -- "${cur}") ) \ || COMPREPLY=( \ $(compgen -o plusdirs -f -- "${cur}") \ $(compgen -d -- "${cur}") ) } _rally() { declare -A SUBCOMMANDS declare -A OPTS OPTS["deployment_check"]="--deployment" OPTS["deployment_config"]="--deployment" OPTS["deployment_create"]="--name --fromenv --filename --no-use" OPTS["deployment_destroy"]="--deployment" OPTS["deployment_list"]="" OPTS["deployment_recreate"]="--filename --deployment" OPTS["deployment_show"]="--deployment" OPTS["deployment_use"]="--deployment" OPTS["plugin_list"]="--name --namespace --plugin-base" OPTS["plugin_show"]="--name --namespace" OPTS["task_abort"]="--uuid --soft" OPTS["task_delete"]="--force --uuid" OPTS["task_detailed"]="--uuid --iterations-data" OPTS["task_export"]="--uuid --connection" OPTS["task_list"]="--deployment --all-deployments --status --uuids-only" OPTS["task_report"]="--tasks --out --open --html --html-static --junit" OPTS["task_results"]="--uuid" OPTS["task_sla-check"]="--uuid --json" OPTS["task_sla_check"]="--uuid --json" OPTS["task_start"]="--deployment --task --task-args --task-args-file --tag --no-use --abort-on-sla-failure" OPTS["task_status"]="--uuid" OPTS["task_trends"]="--out --open --tasks" OPTS["task_use"]="--uuid" OPTS["task_validate"]="--deployment --task --task-args --task-args-file" OPTS["verify_add-verifier-ext"]="--id --source --version --extra-settings" OPTS["verify_configure-verifier"]="--id --deployment-id --reconfigure --extend --override --show" OPTS["verify_create-verifier"]="--name --type --namespace --source --version --system-wide --extra-settings --no-use" OPTS["verify_delete"]="--uuid" OPTS["verify_delete-verifier"]="--id --deployment-id --force" OPTS["verify_delete-verifier-ext"]="--id --name" OPTS["verify_import"]="--id --deployment-id --file --run-args --no-use" OPTS["verify_list"]="--id --deployment-id --tag --status" OPTS["verify_list-plugins"]="--namespace" OPTS["verify_list-verifier-exts"]="--id" OPTS["verify_list-verifier-tests"]="--id --pattern" OPTS["verify_list-verifiers"]="--status" OPTS["verify_report"]="--uuid --type --to --open" OPTS["verify_rerun"]="--uuid --deployment-id --failed --tag --concurrency --detailed --no-use" OPTS["verify_show"]="--uuid --sort-by --detailed" OPTS["verify_show-verifier"]="--id" OPTS["verify_start"]="--id --deployment-id --tag --pattern --concurrency --load-list --skip-list --xfail-list --detailed --no-use" OPTS["verify_update-verifier"]="--id --update-venv --version --system-wide --no-system-wide" OPTS["verify_use"]="--uuid" OPTS["verify_use-verifier"]="--id" for OPT in ${!OPTS[*]} ; do CMD=${OPT%%_*} CMDSUB=${OPT#*_} SUBCOMMANDS[${CMD}]+="${CMDSUB} " done COMMANDS="${!SUBCOMMANDS[*]}" COMPREPLY=() local cur="${COMP_WORDS[COMP_CWORD]}" local prev="${COMP_WORDS[COMP_CWORD-1]}" if [[ $cur =~ ^(\.|\~|\/) ]] || [[ $prev =~ ^--out(|put-file)$ ]] ; then _rally_filedir elif [[ $prev =~ ^--(task|filename)$ ]] ; then _rally_filedir "\.json|\.yaml|\.yml" elif [ $COMP_CWORD == "1" ] ; then COMPREPLY=($(compgen -W "$COMMANDS" -- ${cur})) elif [ $COMP_CWORD == "2" ] ; then COMPREPLY=($(compgen -W "${SUBCOMMANDS[${prev}]}" -- ${cur})) else COMMAND="${COMP_WORDS[1]}_${COMP_WORDS[2]}" COMPREPLY=($(compgen -W "${OPTS[$COMMAND]}" -- ${cur})) fi return 0 } complete -o filenames -F _rally rallyrally-0.9.1/etc/rally/0000775000567000056710000000000013073420067015705 5ustar jenkinsjenkins00000000000000rally-0.9.1/etc/rally/rally.conf.sample0000664000567000056710000006041113073417720021163 0ustar jenkinsjenkins00000000000000[DEFAULT] # # From oslo.log # # If set to true, the logging level will be set to DEBUG instead of # the default INFO level. (boolean value) # Note: This option can be changed without restarting. #debug = false # DEPRECATED: If set to false, the logging level will be set to # WARNING instead of the default INFO level. (boolean value) # This option is deprecated for removal. # Its value may be silently ignored in the future. #verbose = true # The name of a logging configuration file. This file is appended to # any existing logging configuration files. For details about logging # configuration files, see the Python logging module documentation. # Note that when logging configuration files are used then all logging # configuration is set in the configuration file and other logging # configuration options are ignored (for example, # logging_context_format_string). (string value) # Note: This option can be changed without restarting. # Deprecated group/name - [DEFAULT]/log_config #log_config_append = # Defines the format string for %%(asctime)s in log records. Default: # %(default)s . This option is ignored if log_config_append is set. # (string value) #log_date_format = %Y-%m-%d %H:%M:%S # (Optional) Name of log file to send logging output to. If no default # is set, logging will go to stderr as defined by use_stderr. This # option is ignored if log_config_append is set. (string value) # Deprecated group/name - [DEFAULT]/logfile #log_file = # (Optional) The base directory used for relative log_file paths. # This option is ignored if log_config_append is set. (string value) # Deprecated group/name - [DEFAULT]/logdir #log_dir = # Uses logging handler designed to watch file system. When log file is # moved or removed this handler will open a new log file with # specified path instantaneously. It makes sense only if log_file # option is specified and Linux platform is used. This option is # ignored if log_config_append is set. (boolean value) #watch_log_file = false # Use syslog for logging. Existing syslog format is DEPRECATED and # will be changed later to honor RFC5424. This option is ignored if # log_config_append is set. (boolean value) #use_syslog = false # Syslog facility to receive log lines. This option is ignored if # log_config_append is set. (string value) #syslog_log_facility = LOG_USER # Log output to standard error. This option is ignored if # log_config_append is set. (boolean value) #use_stderr = true # Format string to use for log messages with context. (string value) #logging_context_format_string = %(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [%(request_id)s %(user_identity)s] %(instance)s%(message)s # Format string to use for log messages when context is undefined. # (string value) #logging_default_format_string = %(asctime)s.%(msecs)03d %(process)d %(levelname)s %(name)s [-] %(instance)s%(message)s # Additional data to append to log message when logging level for the # message is DEBUG. (string value) #logging_debug_format_suffix = %(funcName)s %(pathname)s:%(lineno)d # Prefix each line of exception output with this format. (string # value) #logging_exception_prefix = %(asctime)s.%(msecs)03d %(process)d ERROR %(name)s %(instance)s # Defines the format string for %(user_identity)s that is used in # logging_context_format_string. (string value) #logging_user_identity_format = %(user)s %(tenant)s %(domain)s %(user_domain)s %(project_domain)s # List of package logging levels in logger=LEVEL pairs. This option is # ignored if log_config_append is set. (list value) #default_log_levels = amqp=WARN,amqplib=WARN,boto=WARN,qpid=WARN,sqlalchemy=WARN,suds=INFO,oslo.messaging=INFO,iso8601=WARN,requests.packages.urllib3.connectionpool=WARN,urllib3.connectionpool=WARN,websocket=WARN,requests.packages.urllib3.util.retry=WARN,urllib3.util.retry=WARN,keystonemiddleware=WARN,routes.middleware=WARN,stevedore=WARN,taskflow=WARN,keystoneauth=WARN,oslo.cache=INFO,dogpile.core.dogpile=INFO # Enables or disables publication of error events. (boolean value) #publish_errors = false # The format for an instance that is passed with the log message. # (string value) #instance_format = "[instance: %(uuid)s] " # The format for an instance UUID that is passed with the log message. # (string value) #instance_uuid_format = "[instance: %(uuid)s] " # Enables or disables fatal status of deprecations. (boolean value) #fatal_deprecations = false # # From rally # # Print debugging output only for Rally. Off-site components stay # quiet. (boolean value) #rally_debug = false # HTTP timeout for any of OpenStack service in seconds (floating point # value) #openstack_client_http_timeout = 180.0 # Size of raw result chunk in iterations (integer value) # Minimum value: 1 #raw_result_chunk_size = 1000 [benchmark] # # From rally # # Time to sleep after creating a resource before polling for it status # (floating point value) #cinder_volume_create_prepoll_delay = 2.0 # Time to wait for cinder volume to be created. (floating point value) #cinder_volume_create_timeout = 600.0 # Interval between checks when waiting for volume creation. (floating # point value) #cinder_volume_create_poll_interval = 2.0 # Time to wait for cinder volume to be deleted. (floating point value) #cinder_volume_delete_timeout = 600.0 # Interval between checks when waiting for volume deletion. (floating # point value) #cinder_volume_delete_poll_interval = 2.0 # Time to wait for cinder backup to be restored. (floating point # value) #cinder_backup_restore_timeout = 600.0 # Interval between checks when waiting for backup restoring. (floating # point value) #cinder_backup_restore_poll_interval = 2.0 # Time to sleep after boot before polling for status (floating point # value) #ec2_server_boot_prepoll_delay = 1.0 # Server boot timeout (floating point value) #ec2_server_boot_timeout = 300.0 # Server boot poll interval (floating point value) #ec2_server_boot_poll_interval = 1.0 # Time to sleep after creating a resource before polling for it status # (floating point value) #glance_image_create_prepoll_delay = 2.0 # Time to wait for glance image to be created. (floating point value) #glance_image_create_timeout = 120.0 # Interval between checks when waiting for image creation. (floating # point value) #glance_image_create_poll_interval = 1.0 # Time(in sec) to sleep after creating a resource before polling for # it status. (floating point value) #heat_stack_create_prepoll_delay = 2.0 # Time(in sec) to wait for heat stack to be created. (floating point # value) #heat_stack_create_timeout = 3600.0 # Time interval(in sec) between checks when waiting for stack # creation. (floating point value) #heat_stack_create_poll_interval = 1.0 # Time(in sec) to wait for heat stack to be deleted. (floating point # value) #heat_stack_delete_timeout = 3600.0 # Time interval(in sec) between checks when waiting for stack # deletion. (floating point value) #heat_stack_delete_poll_interval = 1.0 # Time(in sec) to wait for stack to be checked. (floating point value) #heat_stack_check_timeout = 3600.0 # Time interval(in sec) between checks when waiting for stack # checking. (floating point value) #heat_stack_check_poll_interval = 1.0 # Time(in sec) to sleep after updating a resource before polling for # it status. (floating point value) #heat_stack_update_prepoll_delay = 2.0 # Time(in sec) to wait for stack to be updated. (floating point value) #heat_stack_update_timeout = 3600.0 # Time interval(in sec) between checks when waiting for stack update. # (floating point value) #heat_stack_update_poll_interval = 1.0 # Time(in sec) to wait for stack to be suspended. (floating point # value) #heat_stack_suspend_timeout = 3600.0 # Time interval(in sec) between checks when waiting for stack suspend. # (floating point value) #heat_stack_suspend_poll_interval = 1.0 # Time(in sec) to wait for stack to be resumed. (floating point value) #heat_stack_resume_timeout = 3600.0 # Time interval(in sec) between checks when waiting for stack resume. # (floating point value) #heat_stack_resume_poll_interval = 1.0 # Time(in sec) to wait for stack snapshot to be created. (floating # point value) #heat_stack_snapshot_timeout = 3600.0 # Time interval(in sec) between checks when waiting for stack snapshot # to be created. (floating point value) #heat_stack_snapshot_poll_interval = 1.0 # Time(in sec) to wait for stack to be restored from snapshot. # (floating point value) #heat_stack_restore_timeout = 3600.0 # Time interval(in sec) between checks when waiting for stack to be # restored. (floating point value) #heat_stack_restore_poll_interval = 1.0 # Time (in sec) to wait for stack to scale up or down. (floating point # value) #heat_stack_scale_timeout = 3600.0 # Time interval (in sec) between checks when waiting for a stack to # scale up or down. (floating point value) #heat_stack_scale_poll_interval = 1.0 # Interval(in sec) between checks when waiting for node creation. # (floating point value) #ironic_node_create_poll_interval = 1.0 # Time(in sec) to sleep after creating a resource before polling for # the status. (floating point value) #magnum_cluster_create_prepoll_delay = 5.0 # Time(in sec) to wait for magnum cluster to be created. (floating # point value) #magnum_cluster_create_timeout = 1200.0 # Time interval(in sec) between checks when waiting for cluster # creation. (floating point value) #magnum_cluster_create_poll_interval = 1.0 # Delay between creating Manila share and polling for its status. # (floating point value) #manila_share_create_prepoll_delay = 2.0 # Timeout for Manila share creation. (floating point value) #manila_share_create_timeout = 300.0 # Interval between checks when waiting for Manila share creation. # (floating point value) #manila_share_create_poll_interval = 3.0 # Timeout for Manila share deletion. (floating point value) #manila_share_delete_timeout = 180.0 # Interval between checks when waiting for Manila share deletion. # (floating point value) #manila_share_delete_poll_interval = 2.0 # mistral execution timeout (integer value) #mistral_execution_timeout = 200 # Delay between creating Monasca metrics and polling for its elements. # (floating point value) #monasca_metric_create_prepoll_delay = 15.0 # A timeout in seconds for an environment deploy (integer value) # Deprecated group/name - [benchmark]/deploy_environment_timeout #murano_deploy_environment_timeout = 1200 # Deploy environment check interval in seconds (integer value) # Deprecated group/name - [benchmark]/deploy_environment_check_interval #murano_deploy_environment_check_interval = 5 # Time to sleep after start before polling for status (floating point # value) #nova_server_start_prepoll_delay = 0.0 # Server start timeout (floating point value) #nova_server_start_timeout = 300.0 # Server start poll interval (floating point value) #nova_server_start_poll_interval = 1.0 # Time to sleep after stop before polling for status (floating point # value) #nova_server_stop_prepoll_delay = 0.0 # Server stop timeout (floating point value) #nova_server_stop_timeout = 300.0 # Server stop poll interval (floating point value) #nova_server_stop_poll_interval = 2.0 # Time to sleep after boot before polling for status (floating point # value) #nova_server_boot_prepoll_delay = 1.0 # Server boot timeout (floating point value) #nova_server_boot_timeout = 300.0 # Server boot poll interval (floating point value) #nova_server_boot_poll_interval = 1.0 # Time to sleep after delete before polling for status (floating point # value) #nova_server_delete_prepoll_delay = 2.0 # Server delete timeout (floating point value) #nova_server_delete_timeout = 300.0 # Server delete poll interval (floating point value) #nova_server_delete_poll_interval = 2.0 # Time to sleep after reboot before polling for status (floating point # value) #nova_server_reboot_prepoll_delay = 2.0 # Server reboot timeout (floating point value) #nova_server_reboot_timeout = 300.0 # Server reboot poll interval (floating point value) #nova_server_reboot_poll_interval = 2.0 # Time to sleep after rebuild before polling for status (floating # point value) #nova_server_rebuild_prepoll_delay = 1.0 # Server rebuild timeout (floating point value) #nova_server_rebuild_timeout = 300.0 # Server rebuild poll interval (floating point value) #nova_server_rebuild_poll_interval = 1.0 # Time to sleep after rescue before polling for status (floating point # value) #nova_server_rescue_prepoll_delay = 2.0 # Server rescue timeout (floating point value) #nova_server_rescue_timeout = 300.0 # Server rescue poll interval (floating point value) #nova_server_rescue_poll_interval = 2.0 # Time to sleep after unrescue before polling for status (floating # point value) #nova_server_unrescue_prepoll_delay = 2.0 # Server unrescue timeout (floating point value) #nova_server_unrescue_timeout = 300.0 # Server unrescue poll interval (floating point value) #nova_server_unrescue_poll_interval = 2.0 # Time to sleep after suspend before polling for status (floating # point value) #nova_server_suspend_prepoll_delay = 2.0 # Server suspend timeout (floating point value) #nova_server_suspend_timeout = 300.0 # Server suspend poll interval (floating point value) #nova_server_suspend_poll_interval = 2.0 # Time to sleep after resume before polling for status (floating point # value) #nova_server_resume_prepoll_delay = 2.0 # Server resume timeout (floating point value) #nova_server_resume_timeout = 300.0 # Server resume poll interval (floating point value) #nova_server_resume_poll_interval = 2.0 # Time to sleep after pause before polling for status (floating point # value) #nova_server_pause_prepoll_delay = 2.0 # Server pause timeout (floating point value) #nova_server_pause_timeout = 300.0 # Server pause poll interval (floating point value) #nova_server_pause_poll_interval = 2.0 # Time to sleep after unpause before polling for status (floating # point value) #nova_server_unpause_prepoll_delay = 2.0 # Server unpause timeout (floating point value) #nova_server_unpause_timeout = 300.0 # Server unpause poll interval (floating point value) #nova_server_unpause_poll_interval = 2.0 # Time to sleep after shelve before polling for status (floating point # value) #nova_server_shelve_prepoll_delay = 2.0 # Server shelve timeout (floating point value) #nova_server_shelve_timeout = 300.0 # Server shelve poll interval (floating point value) #nova_server_shelve_poll_interval = 2.0 # Time to sleep after unshelve before polling for status (floating # point value) #nova_server_unshelve_prepoll_delay = 2.0 # Server unshelve timeout (floating point value) #nova_server_unshelve_timeout = 300.0 # Server unshelve poll interval (floating point value) #nova_server_unshelve_poll_interval = 2.0 # Time to sleep after image_create before polling for status (floating # point value) #nova_server_image_create_prepoll_delay = 0.0 # Server image_create timeout (floating point value) #nova_server_image_create_timeout = 300.0 # Server image_create poll interval (floating point value) #nova_server_image_create_poll_interval = 2.0 # Time to sleep after image_delete before polling for status (floating # point value) #nova_server_image_delete_prepoll_delay = 0.0 # Server image_delete timeout (floating point value) #nova_server_image_delete_timeout = 300.0 # Server image_delete poll interval (floating point value) #nova_server_image_delete_poll_interval = 2.0 # Time to sleep after resize before polling for status (floating point # value) #nova_server_resize_prepoll_delay = 2.0 # Server resize timeout (floating point value) #nova_server_resize_timeout = 400.0 # Server resize poll interval (floating point value) #nova_server_resize_poll_interval = 5.0 # Time to sleep after resize_confirm before polling for status # (floating point value) #nova_server_resize_confirm_prepoll_delay = 0.0 # Server resize_confirm timeout (floating point value) #nova_server_resize_confirm_timeout = 200.0 # Server resize_confirm poll interval (floating point value) #nova_server_resize_confirm_poll_interval = 2.0 # Time to sleep after resize_revert before polling for status # (floating point value) #nova_server_resize_revert_prepoll_delay = 0.0 # Server resize_revert timeout (floating point value) #nova_server_resize_revert_timeout = 200.0 # Server resize_revert poll interval (floating point value) #nova_server_resize_revert_poll_interval = 2.0 # Time to sleep after live_migrate before polling for status (floating # point value) #nova_server_live_migrate_prepoll_delay = 1.0 # Server live_migrate timeout (floating point value) #nova_server_live_migrate_timeout = 400.0 # Server live_migrate poll interval (floating point value) #nova_server_live_migrate_poll_interval = 2.0 # Time to sleep after migrate before polling for status (floating # point value) #nova_server_migrate_prepoll_delay = 1.0 # Server migrate timeout (floating point value) #nova_server_migrate_timeout = 400.0 # Server migrate poll interval (floating point value) #nova_server_migrate_poll_interval = 2.0 # Nova volume detach timeout (floating point value) #nova_detach_volume_timeout = 200.0 # Nova volume detach poll interval (floating point value) #nova_detach_volume_poll_interval = 2.0 # A timeout in seconds for a cluster create operation (integer value) # Deprecated group/name - [benchmark]/cluster_create_timeout #sahara_cluster_create_timeout = 1800 # A timeout in seconds for a cluster delete operation (integer value) # Deprecated group/name - [benchmark]/cluster_delete_timeout #sahara_cluster_delete_timeout = 900 # Cluster status polling interval in seconds (integer value) # Deprecated group/name - [benchmark]/cluster_check_interval #sahara_cluster_check_interval = 5 # A timeout in seconds for a Job Execution to complete (integer value) # Deprecated group/name - [benchmark]/job_execution_timeout #sahara_job_execution_timeout = 600 # Job Execution status polling interval in seconds (integer value) # Deprecated group/name - [benchmark]/job_check_interval #sahara_job_check_interval = 5 # Amount of workers one proxy should serve to. (integer value) #sahara_workers_per_proxy = 20 # Interval between checks when waiting for a VM to become pingable # (floating point value) #vm_ping_poll_interval = 1.0 # Time to wait for a VM to become pingable (floating point value) #vm_ping_timeout = 120.0 # Watcher audit launch interval (floating point value) #watcher_audit_launch_poll_interval = 2.0 # Watcher audit launch timeout (integer value) #watcher_audit_launch_timeout = 300 [cleanup] # # From rally # # A timeout in seconds for deleting resources (integer value) #resource_deletion_timeout = 600 # Number of cleanup threads to run (integer value) #cleanup_threads = 20 [database] # # From oslo.db # # DEPRECATED: The file name to use with SQLite. (string value) # Deprecated group/name - [DEFAULT]/sqlite_db # This option is deprecated for removal. # Its value may be silently ignored in the future. # Reason: Should use config option connection or slave_connection to # connect the database. #sqlite_db = oslo.sqlite # If True, SQLite uses synchronous mode. (boolean value) # Deprecated group/name - [DEFAULT]/sqlite_synchronous #sqlite_synchronous = true # The back end to use for the database. (string value) # Deprecated group/name - [DEFAULT]/db_backend #backend = sqlalchemy # The SQLAlchemy connection string to use to connect to the database. # (string value) # Deprecated group/name - [DEFAULT]/sql_connection # Deprecated group/name - [DATABASE]/sql_connection # Deprecated group/name - [sql]/connection #connection = # The SQLAlchemy connection string to use to connect to the slave # database. (string value) #slave_connection = # The SQL mode to be used for MySQL sessions. This option, including # the default, overrides any server-set SQL mode. To use whatever SQL # mode is set by the server configuration, set this to no value. # Example: mysql_sql_mode= (string value) #mysql_sql_mode = TRADITIONAL # Timeout before idle SQL connections are reaped. (integer value) # Deprecated group/name - [DEFAULT]/sql_idle_timeout # Deprecated group/name - [DATABASE]/sql_idle_timeout # Deprecated group/name - [sql]/idle_timeout #idle_timeout = 3600 # Minimum number of SQL connections to keep open in a pool. (integer # value) # Deprecated group/name - [DEFAULT]/sql_min_pool_size # Deprecated group/name - [DATABASE]/sql_min_pool_size #min_pool_size = 1 # Maximum number of SQL connections to keep open in a pool. Setting a # value of 0 indicates no limit. (integer value) # Deprecated group/name - [DEFAULT]/sql_max_pool_size # Deprecated group/name - [DATABASE]/sql_max_pool_size #max_pool_size = 5 # Maximum number of database connection retries during startup. Set to # -1 to specify an infinite retry count. (integer value) # Deprecated group/name - [DEFAULT]/sql_max_retries # Deprecated group/name - [DATABASE]/sql_max_retries #max_retries = 10 # Interval between retries of opening a SQL connection. (integer # value) # Deprecated group/name - [DEFAULT]/sql_retry_interval # Deprecated group/name - [DATABASE]/reconnect_interval #retry_interval = 10 # If set, use this value for max_overflow with SQLAlchemy. (integer # value) # Deprecated group/name - [DEFAULT]/sql_max_overflow # Deprecated group/name - [DATABASE]/sqlalchemy_max_overflow #max_overflow = 50 # Verbosity of SQL debugging information: 0=None, 100=Everything. # (integer value) # Minimum value: 0 # Maximum value: 100 # Deprecated group/name - [DEFAULT]/sql_connection_debug #connection_debug = 0 # Add Python stack traces to SQL as comment strings. (boolean value) # Deprecated group/name - [DEFAULT]/sql_connection_trace #connection_trace = false # If set, use this value for pool_timeout with SQLAlchemy. (integer # value) # Deprecated group/name - [DATABASE]/sqlalchemy_pool_timeout #pool_timeout = # Enable the experimental use of database reconnect on connection # lost. (boolean value) #use_db_reconnect = false # Seconds between retries of a database transaction. (integer value) #db_retry_interval = 1 # If True, increases the interval between retries of a database # operation up to db_max_retry_interval. (boolean value) #db_inc_retry_interval = true # If db_inc_retry_interval is set, the maximum seconds between retries # of a database operation. (integer value) #db_max_retry_interval = 10 # Maximum retries in case of connection error or deadlock error before # error is raised. Set to -1 to specify an infinite retry count. # (integer value) #db_max_retries = 20 [roles_context] # # From rally # # How many concurrent threads to use for serving roles context # (integer value) #resource_management_workers = 30 [tempest] # # From rally # # image URL (string value) #img_url = http://download.cirros-cloud.net/0.3.5/cirros-0.3.5-x86_64-disk.img # Image disk format to use when creating the image (string value) #img_disk_format = qcow2 # Image container format to use when creating the image (string value) #img_container_format = bare # Regular expression for name of a public image to discover it in the # cloud and use it for the tests. Note that when Rally is searching # for the image, case insensitive matching is performed. Specify # nothing ('img_name_regex =') if you want to disable discovering. In # this case Rally will create needed resources by itself if the values # for the corresponding config options are not specified in the # Tempest config file (string value) #img_name_regex = ^.*(cirros|testvm).*$ # Role required for users to be able to create Swift containers # (string value) #swift_operator_role = Member # User role that has reseller admin (string value) #swift_reseller_admin_role = ResellerAdmin # Role required for users to be able to manage Heat stacks (string # value) #heat_stack_owner_role = heat_stack_owner # Role for Heat template-defined users (string value) #heat_stack_user_role = heat_stack_user # Primary flavor RAM size used by most of the test cases (integer # value) #flavor_ref_ram = 64 # Alternate reference flavor RAM size used by test thatneed two # flavors, like those that resize an instance (integer value) #flavor_ref_alt_ram = 128 # RAM size flavor used for orchestration test cases (integer value) #heat_instance_type_ram = 64 [users_context] # # From rally # # How many concurrent threads use for serving users context (integer # value) #resource_management_workers = 20 # ID of domain in which projects will be created. (string value) #project_domain = default # ID of domain in which users will be created. (string value) #user_domain = default # The default role name of the keystone. (string value) #keystone_default_role = member rally-0.9.1/etc/rally/rally-config-generator.conf0000664000567000056710000000015713073417716023140 0ustar jenkinsjenkins00000000000000[DEFAULT] output_file = etc/rally/rally.conf.sample namespace = rally namespace = oslo.db namespace = oslo.log rally-0.9.1/PKG-INFO0000664000567000056710000001474313073420067015115 0ustar jenkinsjenkins00000000000000Metadata-Version: 1.1 Name: rally Version: 0.9.1 Summary: Benchmark System for OpenStack Home-page: http://docs.openstack.org/developer/rally/ Author: OpenStack Author-email: openstack-dev@lists.openstack.org License: Apache License, Version 2.0 Description: ===== Rally ===== Team and repository tags ======================== .. image:: http://governance.openstack.org/badges/rally.svg :target: http://governance.openstack.org/reference/tags/index.html .. image:: https://img.shields.io/pypi/v/rally.svg :target: https://pypi.python.org/pypi/rally/ :alt: Latest Version .. image:: https://img.shields.io/badge/gitter-join_chat-ff69b4.svg :target: https://gitter.im/rally-dev/Lobby :alt: Gitter Chat .. image:: https://img.shields.io/badge/tasks-trello_board-blue.svg :target: https://trello.com/b/DoD8aeZy/rally :alt: Trello Board .. image:: https://img.shields.io/github/license/openstack/rally.svg :target: https://www.apache.org/licenses/LICENSE-2.0 :alt: Apache License, Version 2.0 What is Rally ============= Rally is a Benchmark-as-a-Service project for OpenStack. Rally is intended to provide the community with a benchmarking tool that is capable of performing **specific**, **complicated** and **reproducible** test cases on **real deployment** scenarios. If you are here, you are probably familiar with OpenStack and you also know that it's a really huge ecosystem of cooperative services. When something fails, performs slowly or doesn't scale, it's really hard to answer different questions on "what", "why" and "where" has happened. Another reason why you could be here is that you would like to build an OpenStack CI/CD system that will allow you to improve SLA, performance and stability of OpenStack continuously. The OpenStack QA team mostly works on CI/CD that ensures that new patches don't break some specific single node installation of OpenStack. On the other hand it's clear that such CI/CD is only an indication and does not cover all cases (e.g. if a cloud works well on a single node installation it doesn't mean that it will continue to do so on a 1k servers installation under high load as well). Rally aims to fix this and help us to answer the question "How does OpenStack work at scale?". To make it possible, we are going to automate and unify all steps that are required for benchmarking OpenStack at scale: multi-node OS deployment, verification, benchmarking & profiling. **Rally** workflow can be visualized by the following diagram: .. image:: doc/source/images/Rally-Actions.png :alt: Rally Architecture Who Is Using Rally ================== .. image:: doc/source/images/Rally_who_is_using.png :alt: Who is Using Rally Documentation ============= `Rally documentation on ReadTheDocs `_ is a perfect place to start learning about Rally. It provides you with an **easy** and **illustrative** guidance through this benchmarking tool. For example, check out the `Rally step-by-step tutorial `_ that explains, in a series of lessons, how to explore the power of Rally in benchmarking your OpenStack clouds. Architecture ------------ In terms of software architecture, Rally is built of 4 main components: 1. **Server Providers** - provide servers (virtual servers), with ssh access, in one L3 network. 2. **Deploy Engines** - deploy OpenStack cloud on servers that are presented by **Server Providers** 3. **Verification** - component that runs tempest (or another specific set of tests) against a deployed cloud, collects results & presents them in human readable form. 4. **Benchmark engine** - allows to write parameterized benchmark scenarios & run them against the cloud. Use Cases --------- There are 3 major high level Rally Use Cases: .. image:: doc/source/images/Rally-UseCases.png :alt: Rally Use Cases Typical cases where Rally aims to help are: - Automate measuring & profiling focused on how new code changes affect the OS performance; - Using Rally profiler to detect scaling & performance issues; - Investigate how different deployments affect the OS performance: - Find the set of suitable OpenStack deployment architectures; - Create deployment specifications for different loads (amount of controllers, swift nodes, etc.); - Automate the search for hardware best suited for particular OpenStack cloud; - Automate the production cloud specification generation: - Determine terminal loads for basic cloud operations: VM start & stop, Block Device create/destroy & various OpenStack API methods; - Check performance of basic cloud operations in case of different loads. Links ---------------------- * Free software: Apache license * Documentation: http://rally.readthedocs.org/en/latest/ * Source: http://git.openstack.org/cgit/openstack/rally * Bugs: http://bugs.launchpad.net/rally * Step-by-step tutorial: http://rally.readthedocs.org/en/latest/tutorial.html * RoadMap: https://docs.google.com/a/mirantis.com/spreadsheets/d/16DXpfbqvlzMFaqaXAcJsBzzpowb_XpymaK2aFY2gA2g * Launchpad page: https://launchpad.net/rally * Gitter chat: https://gitter.im/rally-dev/Lobby * Trello board: https://trello.com/b/DoD8aeZy/rally Platform: UNKNOWN Classifier: Environment :: OpenStack Classifier: Intended Audience :: Developers Classifier: Intended Audience :: Information Technology Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: POSIX :: Linux Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 Classifier: Programming Language :: Python :: 2.7 Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.4 Classifier: Programming Language :: Python :: 3.5 rally-0.9.1/setup.cfg0000664000567000056710000000211613073420067015630 0ustar jenkinsjenkins00000000000000[metadata] name = rally summary = Benchmark System for OpenStack description-file = README.rst author = OpenStack author-email = openstack-dev@lists.openstack.org home-page = http://docs.openstack.org/developer/rally/ license = Apache License, Version 2.0 classifier = Environment :: OpenStack Intended Audience :: Developers Intended Audience :: Information Technology License :: OSI Approved :: Apache Software License Operating System :: POSIX :: Linux Programming Language :: Python Programming Language :: Python :: 2 Programming Language :: Python :: 2.7 Programming Language :: Python :: 3 Programming Language :: Python :: 3.4 Programming Language :: Python :: 3.5 [files] packages = rally data_files = etc/bash_completion.d = etc/rally.bash_completion [entry_points] console_scripts = rally = rally.cli.main:main rally-manage = rally.cli.manage:main oslo.config.opts = rally = rally.common.opts:list_opts [global] setup-hooks = pbr.hooks.setup_hook [build_sphinx] all_files = 1 build-dir = doc/build source-dir = doc/source [egg_info] tag_build = tag_date = 0 rally-0.9.1/test-requirements.txt0000664000567000056710000000201313073417716020253 0ustar jenkinsjenkins00000000000000# The order of packages is significant, because pip processes them in the order # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. hacking>=0.12.0,!=0.13.0,<0.14 # Apache Software License pytest>=2.7,<=3.0.7 # MIT # py.test plugin for measuring coverage. pytest-cov>=2.2.1,<=2.4.0 # MIT # py.test plugin for generating HTML reports pytest-html>=1.10.0,<=1.14.2 # Mozilla Public License 2.0 (MPL 2.0) coverage>=4.0,<=4.3.4 # Apache License, Version 2.0 ddt>=1.0.1,<=1.1.1 mock>=2.0,<=2.0.0 python-dateutil>=2.4.2,<=2.6.0 # Simplified BSD testtools>=1.4.0,<=2.2.0 oslosphinx>=4.7.0,<=4.11.0 # Apache Software License oslotest>=1.10.0,<=2.14.0 # Apache Software License testresources>=0.2.4,<=2.0.1 testscenarios>=0.4,<=0.5.0 rally-0.9.1/rally/0000775000567000056710000000000013073420067015132 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/__init__.py0000664000567000056710000000000013073417716017240 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/common/0000775000567000056710000000000013073420067016422 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/common/__init__.py0000664000567000056710000000000013073417716020530 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/common/fileutils.py0000664000567000056710000000742213073417716021010 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import tempfile import zipfile def _read_env_file(path, except_env=None): """Read the environment variable file. :param path: the path of the file :param except_env: the environment variable to avoid in the output :returns: the content of the original file except the line starting with the except_env parameter """ output = [] if os.path.exists(path): with open(path, "r") as env_file: content = env_file.readlines() for line in content: if except_env is None or not line.startswith("%s=" % except_env): output.append(line) return output def load_env_file(path): """Load the environment variable file into os.environ. :param path: the path of the file """ if os.path.exists(path): content = _read_env_file(path) for line in content: (key, sep, value) = line.partition("=") os.environ[key] = value.rstrip() def _rewrite_env_file(path, initial_content): """Rewrite the environment variable file. :param path: the path of the file :param initial_content: the original content of the file """ with open(path, "w+") as env_file: for line in initial_content: env_file.write(line) def update_env_file(path, env_key, env_value): """Update the environment variable file. :param path: the path of the file :param env_key: the key to update :param env_value: the value of the property to update """ output = _read_env_file(path, env_key) output.append("%s=%s" % (env_key, env_value)) _rewrite_env_file(path, output) def update_globals_file(key, value): """Update the globals variables file. :param key: the key to update :param value: the value to update """ dir = os.path.expanduser("~/.rally/") if not os.path.exists(dir): os.makedirs(dir) expanded_path = os.path.join(dir, "globals") update_env_file(expanded_path, key, "%s\n" % value) def pack_dir(source_directory, zip_name=None): """Archive content of the directory into .zip Zip content of the source folder excluding root directory into zip archive. When zip_name is specified, it would be used as a destination for the archive. Otherwise method would try to use temporary file as a destination for the archive. :param source_directory: root of the newly created archive. Directory is added recursively. :param zip_name: destination zip file name. :raises IOError: whenever there are IO issues. :returns: path to the newly created zip archive either specified via zip_name or a temporary one. """ if not zip_name: fp = tempfile.NamedTemporaryFile(delete=False) zip_name = fp.name zipf = zipfile.ZipFile(zip_name, mode="w") try: for root, dirs, files in os.walk(source_directory): for f in files: abspath = os.path.join(root, f) relpath = os.path.relpath(abspath, source_directory) zipf.write(abspath, relpath) finally: zipf.close() return zip_name rally-0.9.1/rally/common/objects/0000775000567000056710000000000013073420067020053 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/common/objects/__init__.py0000664000567000056710000000206013073417716022171 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Contains the Rally objects.""" from rally.common.objects.credential import Credential # noqa from rally.common.objects.deploy import Deployment # noqa from rally.common.objects.task import Subtask # noqa from rally.common.objects.task import Task # noqa from rally.common.objects.task import Workload # noqa from rally.common.objects.verification import Verification # noqa from rally.common.objects.verifier import Verifier # noqa rally-0.9.1/rally/common/objects/deploy.py0000664000567000056710000001017613073417720021730 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import datetime as dt import jsonschema from rally.common.i18n import _, _LW from rally.common import db from rally.common import logging from rally import consts from rally import exceptions LOG = logging.getLogger(__name__) CREDENTIALS_SCHEMA = { "type": "object", "patternProperties": { ".*": { "type": "array", "items": { "type": "object", "properties": { "admin": {"type": "object"}, "users": { "type": "array", "items": {"type": "object"} } }, "required": ["admin", "users"], "additionalProperties": False, }, } }, "minProperties": 1, } class Deployment(object): """Represents a deployment object.""" def __init__(self, deployment=None, **attributes): if deployment: self.deployment = deployment else: self.deployment = db.deployment_create(attributes) def __getitem__(self, key): # TODO(astudenov): remove this in future releases if key == "admin" or key == "users": LOG.warning(_LW("deployment.%s is deprecated in Rally 0.9.0. " "Use deployment.get_credentials_for('openstack')" "['%s'] to get credentials.") % (key, key)) return self.get_credentials_for("openstack")[key] return self.deployment[key] @staticmethod def get(deploy): return Deployment(db.deployment_get(deploy)) @staticmethod def list(status=None, parent_uuid=None, name=None): return db.deployment_list(status, parent_uuid, name) @staticmethod def delete_by_uuid(uuid): db.deployment_delete(uuid) def _update(self, values): self.deployment = db.deployment_update(self.deployment["uuid"], values) def update_status(self, status): self._update({"status": status}) def update_name(self, name): self._update({"name": name}) def update_config(self, config): self._update({"config": config}) def update_credentials(self, credentials): jsonschema.validate(credentials, CREDENTIALS_SCHEMA) self._update({"credentials": credentials}) def get_credentials_for(self, namespace): try: return self.deployment["credentials"][namespace][0] except (KeyError, IndexError) as e: LOG.exception(e) raise exceptions.RallyException(_( "No credentials found for %s") % namespace) def set_started(self): self._update({"started_at": dt.datetime.now(), "status": consts.DeployStatus.DEPLOY_STARTED}) def set_completed(self): self._update({"completed_at": dt.datetime.now(), "status": consts.DeployStatus.DEPLOY_FINISHED}) def add_resource(self, provider_name, type=None, info=None): return db.resource_create({ "deployment_uuid": self.deployment["uuid"], "provider_name": provider_name, "type": type, "info": info, }) def get_resources(self, provider_name=None, type=None): return db.resource_get_all(self.deployment["uuid"], provider_name=provider_name, type=type) @staticmethod def delete_resource(resource_id): db.resource_delete(resource_id) def delete(self): db.deployment_delete(self.deployment["uuid"]) rally-0.9.1/rally/common/objects/verifier.py0000664000567000056710000000565713073417720022257 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common import db from rally import exceptions from rally.verification import manager class Verifier(object): """Represents a verifier object.""" def __init__(self, verifier): """Init a verifier object. :param verifier: Dict representation of a verifier in the database """ self._db_entry = verifier self._deployment = None self._manager = None def __getattr__(self, attr): return self._db_entry[attr] def __getitem__(self, item): return self._db_entry[item] def __str__(self): return "'%s' (UUID=%s)" % (self.name, self.uuid) @classmethod def create(cls, name, vtype, namespace, source, version, system_wide, extra_settings=None): db_entry = db.verifier_create(name=name, vtype=vtype, namespace=namespace, source=source, version=version, system_wide=system_wide, extra_settings=extra_settings) return cls(db_entry) @classmethod def get(cls, verifier_id): return cls(db.verifier_get(verifier_id)) @classmethod def list(cls, status=None): return [cls(db_entry) for db_entry in db.verifier_list(status)] @staticmethod def delete(verifier_id): db.verifier_delete(verifier_id) def update_status(self, status): self.update_properties(status=status) def update_properties(self, **properties): self._db_entry = db.verifier_update(self.uuid, **properties) def set_deployment(self, deployment_id): from rally.common import objects self._deployment = objects.Deployment.get(deployment_id) @property def deployment(self): if self._deployment is None: raise exceptions.RallyException( "Verifier is not linked to any deployment. Please, call " "`set_deployment` method.") return self._deployment @property def manager(self): # lazy load manager to be able to use non-plugin related stuff without # loading plugins if not self._manager: self._manager = manager.VerifierManager.get(self.type, self.namespace)(self) return self._manager rally-0.9.1/rally/common/objects/task.py0000664000567000056710000005316413073417720021402 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import datetime as dt import uuid from rally.common import db from rally.common.i18n import _LE from rally import consts from rally import exceptions from rally.task.processing import charts OUTPUT_SCHEMA = { "type": "object", "properties": { "additive": { "type": "array", "items": { "type": "object", "properties": { "title": {"type": "string"}, "description": {"type": "string"}, "chart_plugin": {"type": "string"}, "data": { "type": "array", "items": { "type": "array", "items": [{"type": "string"}, {"type": "number"}], "additionalItems": False}}, "label": {"type": "string"}, "axis_label": {"type": "string"}}, "required": ["title", "chart_plugin", "data"], "additionalProperties": False } }, "complete": { "type": "array", "items": { "type": "object", "properties": { "title": {"type": "string"}, "description": {"type": "string"}, "chart_plugin": {"type": "string"}, "data": {"anyOf": [ {"type": "array", "items": { "type": "array", "items": [ {"type": "string"}, {"anyOf": [ {"type": "array", "items": {"type": "array", "items": [{"type": "number"}, {"type": "number"}] }}, {"type": "number"}]}]}}, {"type": "object", "properties": { "cols": {"type": "array", "items": {"type": "string"}}, "rows": { "type": "array", "items": { "type": "array", "items": {"anyOf": [{"type": "string"}, {"type": "number"}]}} } }, "required": ["cols", "rows"], "additionalProperties": False}, {"type": "array", "items": {"type": "string"}}, ]}, "label": {"type": "string"}, "axis_label": {"type": "string"} }, "required": ["title", "chart_plugin", "data"], "additionalProperties": False } } }, "required": ["additive", "complete"], "additionalProperties": False } HOOK_RUN_RESULT_SCHEMA = { "type": "object", "properties": { "started_at": {"type": "number"}, "finished_at": {"type": "number"}, "triggered_by": { "type": "object", "properties": {"event_type": {"type": "string"}, "value": {}}, "required": ["event_type", "value"], "additionalProperties": False }, "status": {"type": "string"}, "error": { "type": "array", "minItems": 3, "maxItems": 3, "items": {"type": "string"}, }, "output": OUTPUT_SCHEMA, }, "required": ["finished_at", "triggered_by", "status"], "additionalProperties": False } HOOK_RESULTS_SCHEMA = { "type": "object", "properties": { "config": {"type": "object"}, "results": {"type": "array", "items": HOOK_RUN_RESULT_SCHEMA}, "summary": {"type": "object"} }, "required": ["config", "results", "summary"], "additionalProperties": False, } TASK_RESULT_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "key": { "type": "object", "properties": { "kw": { "type": "object" }, "name": { "type": "string" }, "pos": { "type": "integer" }, }, "required": ["kw", "name", "pos"] }, "sla": { "type": "array", "items": { "type": "object", "properties": { "criterion": { "type": "string" }, "detail": { "type": "string" }, "success": { "type": "boolean" } } } }, "hooks": {"type": "array", "items": HOOK_RESULTS_SCHEMA}, "result": { "type": "array", "items": { "type": "object", "properties": { "atomic_actions": { "type": "object" }, "duration": { "type": "number" }, "error": { "type": "array" }, "idle_duration": { "type": "number" }, # NOTE(amaretskiy): "scenario_output" is deprecated # in favor of "output" "scenario_output": { "type": "object", "properties": { "data": { "type": "object" }, "errors": { "type": "string" }, }, "required": ["data", "errors"] }, "output": OUTPUT_SCHEMA }, "required": ["atomic_actions", "duration", "error", "idle_duration"] }, "minItems": 1 }, "load_duration": { "type": "number", }, "full_duration": { "type": "number", }, "created_at": { "type": "string" } }, "required": ["key", "sla", "result", "load_duration", "full_duration"], "additionalProperties": False } TASK_EXTENDED_RESULT_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "key": { "type": "object", "properties": { "kw": { "type": "object" }, "name": { "type": "string" }, "pos": { "type": "integer" }, }, "required": ["kw", "name", "pos"] }, "sla": { "type": "array", "items": { "type": "object", "properties": { "criterion": { "type": "string" }, "detail": { "type": "string" }, "success": { "type": "boolean" } } } }, "hooks": {"type": "array", "items": HOOK_RESULTS_SCHEMA}, "iterations": { "type": "array", "items": { "type": "object", "properties": { "timestamp": { "type": "number" }, "atomic_actions": { "type": "object" }, "duration": { "type": "number" }, "error": { "type": "array" }, "idle_duration": { "type": "number" }, "output": OUTPUT_SCHEMA }, "required": ["atomic_actions", "duration", "error", "idle_duration", "output"] }, "minItems": 1 }, "created_at": { "anyOf": [ {"type": "string", "format": "date-time"}, {"type": "null"} ] }, "updated_at": { "anyOf": [ {"type": "string", "format": "date-time"}, {"type": "null"} ] }, "info": { "type": "object", "properties": { "atomic": {"type": "object"}, "iterations_count": {"type": "integer"}, "iterations_failed": {"type": "integer"}, "min_duration": {"type": "number"}, "max_duration": {"type": "number"}, "tstamp_start": {"type": "number"}, "full_duration": {"type": "number"}, "load_duration": {"type": "number"} } } }, "required": ["key", "sla", "iterations", "info"], "additionalProperties": False } class Task(object): """Represents a task object. Task states graph INIT -> VALIDATING |-> VALIDATION_FAILED |-> ABORTING -> ABORTED |-> SOFT_ABORTING -> ABORTED |-> CRASHED |-> VALIDATED |-> RUNNING |-> FINISHED |-> ABORTING -> ABORTED |-> SOFT_ABORTING -> ABORTED |-> CRASHED """ # NOTE(andreykurilin): The following stages doesn't contain check for # current status of task. We should add it in the future, since "abort" # cmd should work everywhere. # TODO(andreykurilin): allow abort for each state. NOT_IMPLEMENTED_STAGES_FOR_ABORT = [consts.TaskStatus.VALIDATING, consts.TaskStatus.INIT] def __init__(self, task=None, temporary=False, **attributes): """Task object init :param task: dictionary like object, that represents a task :param temporary: whenever this param is True the task will be created with a random UUID and no database record. Used for special purposes, like task config validation. """ self.is_temporary = temporary if self.is_temporary: self.task = task or {"uuid": str(uuid.uuid4())} self.task.update(attributes) else: self.task = task or db.task_create(attributes) def __getitem__(self, key): return self.task[key] def to_dict(self): db_task = self.task deployment_name = db.deployment_get( self.task["deployment_uuid"])["name"] db_task["deployment_name"] = deployment_name return db_task @staticmethod def get_detailed(task_id): return db.api.task_get_detailed(task_id) @staticmethod def get(uuid): return Task(db.task_get(uuid)) @staticmethod def get_status(uuid): return db.task_get_status(uuid) @staticmethod def list(status=None, deployment=None): return [Task(db_task) for db_task in db.task_list(status, deployment)] @staticmethod def delete_by_uuid(uuid, status=None): db.task_delete(uuid, status=status) def _update(self, values): if not self.is_temporary: self.task = db.task_update(self.task["uuid"], values) def update_status(self, status, allowed_statuses=None): if allowed_statuses: db.task_update_status(self.task["uuid"], status, allowed_statuses) else: self._update({"status": status}) def set_validation_failed(self, log): self._update({"status": consts.TaskStatus.VALIDATION_FAILED, "validation_result": log}) def set_failed(self, etype, msg, etraceback): self._update({"status": consts.TaskStatus.CRASHED, "validation_result": { "etype": etype, "msg": msg, "trace": etraceback}}) def add_subtask(self, **subtask): return Subtask(self.task["uuid"], **subtask) def get_results(self): return db.task_result_get_all_by_uuid(self.task["uuid"]) @classmethod def extend_results(cls, results, serializable=False): """Modify and extend results with aggregated data. This is a workaround method that tries to adapt task results to schema of planned DB refactoring, so this method is expected to be simplified after DB refactoring since all the data should be taken as-is directly from the database. Each scenario results have extra `info' with aggregated data, and iterations data is represented by iterator - this simplifies its future implementation as generator and gives ability to process arbitrary number of iterations with low memory usage. :param results: list of db.sqlalchemy.models.TaskResult :param serializable: bool, whether to convert json non-serializable types (like datetime) to serializable ones :returns: list of dicts, each dict represents scenario results: key - dict, scenario input data sla - list, SLA results iterations - if serializable, then iterator with iterations data, otherwise a list created_at - if serializable, then str datetime, otherwise absent updated_at - if serializable, then str datetime, otherwise absent info: atomic - dict where key is one of atomic action names and value is dict {min_duration: number, max_duration: number} iterations_count - int number of iterations iterations_failed - int number of iterations with errors min_duration - float minimum iteration duration max_duration - float maximum iteration duration tstamp_start - float timestamp of the first iteration full_duration - float full scenario duration load_duration - float load scenario duration """ extended = [] for scenario_result in results: scenario = dict(scenario_result) tstamp_start = 0 min_duration = 0 max_duration = 0 iterations_failed = 0 atomic = collections.OrderedDict() for itr in scenario["data"]["raw"]: for atomic_name, duration in itr["atomic_actions"].items(): duration = duration or 0 if atomic_name not in atomic: atomic[atomic_name] = {"min_duration": duration, "max_duration": duration} elif duration < atomic[atomic_name]["min_duration"]: atomic[atomic_name]["min_duration"] = duration elif duration > atomic[atomic_name]["max_duration"]: atomic[atomic_name]["max_duration"] = duration if not tstamp_start or itr["timestamp"] < tstamp_start: tstamp_start = itr["timestamp"] if "output" not in itr: itr["output"] = {"additive": [], "complete": []} # NOTE(amaretskiy): Deprecated "scenario_output" # is supported for backward compatibility if ("scenario_output" in itr and itr["scenario_output"]["data"]): itr["output"]["additive"].append( {"items": itr["scenario_output"]["data"].items(), "title": "Scenario output", "description": "", "chart": "OutputStackedAreaChart"}) del itr["scenario_output"] if itr["error"]: iterations_failed += 1 else: duration = itr["duration"] or 0 if not min_duration or duration < min_duration: min_duration = duration if not max_duration or duration > max_duration: max_duration = duration for k in "created_at", "updated_at": if serializable: # NOTE(amaretskiy): convert datetime to str, # because json.dumps() does not like datetime if scenario[k] and isinstance(scenario[k], dt.datetime): scenario[k] = scenario[k].strftime("%Y-%d-%mT%H:%M:%S") else: del scenario[k] durations_stat = charts.MainStatsTable( {"iterations_count": len(scenario["data"]["raw"]), "atomic": atomic}) for itr in scenario["data"]["raw"]: durations_stat.add_iteration(itr) scenario["info"] = { "stat": durations_stat.render(), "atomic": atomic, "iterations_count": len(scenario["data"]["raw"]), "iterations_failed": iterations_failed, "min_duration": min_duration, "max_duration": max_duration, "tstamp_start": tstamp_start, "full_duration": scenario["data"]["full_duration"], "load_duration": scenario["data"]["load_duration"]} iterations = sorted(scenario["data"]["raw"], key=lambda itr: itr["timestamp"]) if serializable: scenario["iterations"] = list(iterations) else: scenario["iterations"] = iter(iterations) scenario["sla"] = scenario["data"]["sla"] scenario["hooks"] = scenario["data"].get("hooks", []) del scenario["data"] del scenario["task_uuid"] del scenario["id"] extended.append(scenario) return extended def delete(self, status=None): db.task_delete(self.task["uuid"], status=status) def abort(self, soft=False): current_status = self.get_status(self.task["uuid"]) if current_status in self.NOT_IMPLEMENTED_STAGES_FOR_ABORT: raise exceptions.RallyException( _LE("Failed to abort task '%(uuid)s'. It doesn't implemented " "for '%(stages)s' stages. Current task status is " "'%(status)s'.") % {"uuid": self.task["uuid"], "status": current_status, "stages": ", ".join(self.NOT_IMPLEMENTED_STAGES_FOR_ABORT)}) elif current_status in [consts.TaskStatus.FINISHED, consts.TaskStatus.CRASHED, consts.TaskStatus.ABORTED]: raise exceptions.RallyException( _LE("Failed to abort task '%s', since it already " "finished.") % self.task.uuid) new_status = (consts.TaskStatus.SOFT_ABORTING if soft else consts.TaskStatus.ABORTING) self.update_status(new_status, allowed_statuses=( consts.TaskStatus.RUNNING, consts.TaskStatus.SOFT_ABORTING)) class Subtask(object): """Represents a subtask object.""" def __init__(self, task_uuid, **attributes): self.subtask = db.subtask_create(task_uuid, **attributes) def __getitem__(self, key): return self.subtask[key] def add_workload(self, key): return Workload(self.subtask["task_uuid"], self.subtask["uuid"], key) class Workload(object): """Represents a workload object.""" def __init__(self, task_uuid, subtask_uuid, key): self.workload = db.workload_create(task_uuid, subtask_uuid, key) def __getitem__(self, key): return self.workload[key] def add_workload_data(self, chunk_order, workload_data): db.workload_data_create(self.workload["task_uuid"], self.workload["uuid"], chunk_order, workload_data) def set_results(self, data): db.workload_set_results(self.workload["uuid"], data) rally-0.9.1/rally/common/objects/credential.py0000664000567000056710000000437413073417720022551 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts class Credential(object): def __init__(self, auth_url, username, password, tenant_name=None, project_name=None, permission=consts.EndpointPermission.USER, region_name=None, endpoint_type=None, domain_name=None, endpoint=None, user_domain_name=None, project_domain_name=None, https_insecure=False, https_cacert=None): self.auth_url = auth_url self.username = username self.password = password self.tenant_name = tenant_name or project_name self.permission = permission self.region_name = region_name self.endpoint_type = endpoint_type self.domain_name = domain_name self.user_domain_name = user_domain_name self.project_domain_name = project_domain_name self.endpoint = endpoint self.insecure = https_insecure self.cacert = https_cacert def to_dict(self, include_permission=False): dct = {"auth_url": self.auth_url, "username": self.username, "password": self.password, "tenant_name": self.tenant_name, "region_name": self.region_name, "endpoint_type": self.endpoint_type, "domain_name": self.domain_name, "endpoint": self.endpoint, "https_insecure": self.insecure, "https_cacert": self.cacert, "user_domain_name": self.user_domain_name, "project_domain_name": self.project_domain_name} if include_permission: dct["permission"] = self.permission return dct rally-0.9.1/rally/common/objects/verification.py0000664000567000056710000000471513073417720023120 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common import db from rally import consts class Verification(object): """Represents a verification object.""" def __init__(self, verification): """Init a verification object. :param verification: Dict representation of a verification in the database """ self._db_entry = verification def __getattr__(self, attr): return self._db_entry[attr] def __getitem__(self, item): return self._db_entry[item] @classmethod def create(cls, verifier_id, deployment_id, tags=None, run_args=None): return cls(db.verification_create( verifier_id, deployment_id, tags, run_args)) @classmethod def get(cls, verification_uuid): return cls(db.verification_get(verification_uuid)) @classmethod def list(cls, verifier_id=None, deployment_id=None, tags=None, status=None): verification_list = db.verification_list(verifier_id, deployment_id, tags, status) return [cls(db_entry) for db_entry in verification_list] def delete(self): db.verification_delete(self.uuid) def _update(self, **properties): self._db_entry = db.verification_update(self.uuid, **properties) def update_status(self, status): self._update(status=status) def finish(self, totals, tests): if (totals.get("failures", 0) == 0 and totals.get("unexpected_success", 0) == 0): status = consts.VerificationStatus.FINISHED else: status = consts.VerificationStatus.FAILED self._update(status=status, tests=tests, **totals) def set_error(self, error_message): # TODO(andreykurilin): Save error message in the database. self.update_status(consts.VerificationStatus.CRASHED) rally-0.9.1/rally/common/io/0000775000567000056710000000000013073420067017031 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/common/io/__init__.py0000664000567000056710000000000013073417716021137 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/common/io/subunit_v2.py0000664000567000056710000002341713073417716021521 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # from oslo_utils import encodeutils from subunit import v2 from rally.common import logging def prepare_input_args(func): # NOTE(andreykurilin): Variables 'runnable', 'eof', 'route_code' are not # used in parser. def inner(self, test_id=None, test_status=None, timestamp=None, file_name=None, file_bytes=None, mime_type=None, test_tags=None, runnable=True, eof=False, route_code=None): if not test_id: return if (test_id.startswith("setUpClass (") or test_id.startswith("tearDown (")): test_id = test_id[test_id.find("(") + 1:-1] tags = _parse_test_tags(test_id) if mime_type: mime_type, charset = mime_type.split("; ")[:2] charset = charset.split("=")[1] else: charset = None func(self, test_id, test_status, timestamp, tags, file_name, file_bytes, test_tags, mime_type, charset) return inner def _parse_test_tags(test_id): tags = [] if test_id.find("[") > -1: tags = test_id.split("[")[1][:-1].split(",") return tags class SubunitV2StreamResult(object): def __init__(self, expected_failures=None, skipped_tests=None, live=False, logger_name=None): self._tests = {} self._expected_failures = expected_failures or {} self._skipped_tests = skipped_tests or {} self._live = live self._logger = logging.getLogger(logger_name or __name__) self._timestamps = {} # NOTE(andreykurilin): _first_timestamp and _last_timestamp variables # are designed to calculate the total time of tests execution. self._first_timestamp = None self._last_timestamp = None # Store unknown entities and process them later. self._unknown_entities = {} self._is_parsed = False @staticmethod def _get_test_name(test_id): return test_id.split("[")[0] if test_id.find("[") > -1 else test_id def _check_expected_failure(self, test_id): if (test_id in self._expected_failures or self._get_test_name(test_id) in self._expected_failures): if self._tests[test_id]["status"] == "fail": self._tests[test_id]["status"] = "xfail" if self._expected_failures[test_id]: self._tests[test_id]["reason"] = ( self._expected_failures[test_id]) elif self._tests[test_id]["status"] == "success": self._tests[test_id]["status"] = "uxsuccess" def _process_skipped_tests(self): for t_id in self._skipped_tests.copy(): if t_id not in self._tests: status = "skip" name = self._get_test_name(t_id) self._tests[t_id] = {"status": status, "name": name, "duration": "%.3f" % 0, "tags": _parse_test_tags(t_id)} if self._skipped_tests[t_id]: self._tests[t_id]["reason"] = self._skipped_tests[t_id] status += ": %s" % self._tests[t_id]["reason"] if self._live: self._logger.info("{-} %s ... %s", name, status) self._skipped_tests.pop(t_id) def _parse(self): # NOTE(andreykurilin): When whole test class is marked as skipped or # failed, there is only one event with reason and status. So we should # modify all tests of test class manually. for test_id in self._unknown_entities: known_test_ids = filter(lambda t: t == test_id or t.startswith( "%s." % test_id), self._tests) for t_id in known_test_ids: if self._tests[t_id]["status"] == "init": self._tests[t_id]["status"] = ( self._unknown_entities[test_id]["status"]) if self._unknown_entities[test_id].get("reason"): self._tests[t_id]["reason"] = ( self._unknown_entities[test_id]["reason"]) elif self._unknown_entities[test_id].get("traceback"): self._tests[t_id]["traceback"] = ( self._unknown_entities[test_id]["traceback"]) # decode data for test_id in self._tests: for file_name in ["traceback", "reason"]: # TODO(andreykurilin): decode fields based on mime_type if file_name in self._tests[test_id]: self._tests[test_id][file_name] = ( encodeutils.safe_decode( self._tests[test_id][file_name])) self._is_parsed = True @property def tests(self): if not self._is_parsed: self._parse() return self._tests @property def totals(self): td = 0 if self._first_timestamp: td = (self._last_timestamp - self._first_timestamp).total_seconds() return {"tests_count": len(self.tests), "tests_duration": "%.3f" % td, "failures": len(self.filter_tests("fail")), "skipped": len(self.filter_tests("skip")), "success": len(self.filter_tests("success")), "unexpected_success": len(self.filter_tests("uxsuccess")), "expected_failures": len(self.filter_tests("xfail"))} @prepare_input_args def status(self, test_id=None, test_status=None, timestamp=None, tags=None, file_name=None, file_bytes=None, worker=None, mime_type=None, charset=None): if timestamp: if not self._first_timestamp: self._first_timestamp = timestamp self._last_timestamp = timestamp if test_status == "exists": self._tests[test_id] = {"status": "init", "name": self._get_test_name(test_id), "duration": "%.3f" % 0, "tags": tags if tags else []} elif test_id in self._tests: if test_status == "inprogress": # timestamp of test start self._timestamps[test_id] = timestamp self._tests[test_id]["timestamp"] = timestamp.strftime( "%Y-%m-%dT%H:%M:%S%z") elif test_status: self._tests[test_id]["duration"] = "%.3f" % ( timestamp - self._timestamps[test_id]).total_seconds() self._tests[test_id]["status"] = test_status self._check_expected_failure(test_id) else: if file_name in ["traceback", "reason"]: if file_name not in self._tests[test_id]: self._tests[test_id][file_name] = file_bytes else: self._tests[test_id][file_name] += file_bytes else: self._unknown_entities.setdefault(test_id, {"name": test_id}) self._unknown_entities[test_id]["status"] = test_status if file_name in ["traceback", "reason"]: if file_name not in self._unknown_entities[test_id]: self._unknown_entities[test_id][file_name] = file_bytes else: self._unknown_entities[test_id][file_name] += file_bytes if self._skipped_tests: self._process_skipped_tests() if self._live and test_status not in (None, "exists", "inprogress"): duration = "" if test_id in self._tests: status = self._tests[test_id]["status"] duration = " [%ss]" % self._tests[test_id]["duration"] else: status = test_status status += duration if "xfail" in status or "skip" in status: if test_id in self._tests: reason = self._tests[test_id].get("reason") else: reason = self._unknown_entities[test_id].get("reason") if reason: status += ": %s" % reason w = "{%s} " % worker.pop().split("-")[1] if worker else "-" self._logger.info( "%s ... %s", w + self._get_test_name(test_id), status) def filter_tests(self, status): """Filter tests by given status.""" filtered_tests = {} for test in self.tests: if self.tests[test]["status"] == status: filtered_tests[test] = self.tests[test] return filtered_tests def parse(stream, expected_failures=None, skipped_tests=None, live=False, logger_name=None): results = SubunitV2StreamResult(expected_failures, skipped_tests, live, logger_name) v2.ByteStreamToStreamResult(stream, "non-subunit").run(results) return results def parse_file(filename, expected_failures=None, skipped_tests=None, live=False, logger_name=None): with open(filename, "rb") as stream: return parse(stream, expected_failures, skipped_tests, live, logger_name) rally-0.9.1/rally/common/io/junit.py0000664000567000056710000000451213073417716020545 0ustar jenkinsjenkins00000000000000# Copyright 2015: eNovance # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import xml.etree.ElementTree as ET class JUnit(object): SUCCESS = "success" FAILURE = "failure" ERROR = "error" def __init__(self, test_suite_name): self.test_suite_name = test_suite_name self.test_cases = [] self.n_tests = 0 self.n_failures = 0 self.n_errors = 0 self.total_time = 0.0 def add_test(self, test_name, time, outcome=SUCCESS, message=""): class_name, name = test_name.split(".", 1) self.test_cases.append({ "classname": class_name, "name": name, "time": str("%.2f" % time), "outcome": outcome, "message": message }) if outcome == JUnit.FAILURE: self.n_failures += 1 elif outcome == JUnit.ERROR: self.n_errors += 1 elif outcome != JUnit.SUCCESS: raise ValueError("Unexpected outcome %s" % outcome) self.n_tests += 1 self.total_time += time def to_xml(self): xml = ET.Element("testsuite", { "name": self.test_suite_name, "tests": str(self.n_tests), "time": str("%.2f" % self.total_time), "failures": str(self.n_failures), "errors": str(self.n_errors), }) for test_case in self.test_cases: outcome = test_case.pop("outcome") message = test_case.pop("message") if outcome in [JUnit.FAILURE, JUnit.ERROR]: sub = ET.SubElement(xml, "testcase", test_case) sub.append(ET.Element(outcome, {"message": message})) else: xml.append(ET.Element("testcase", test_case)) return ET.tostring(xml, encoding="utf-8").decode("utf-8") rally-0.9.1/rally/common/i18n.py0000664000567000056710000000215613073417716017566 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """oslo.i18n integration module for rally. See http://docs.openstack.org/developer/oslo.i18n/usage.html . """ import oslo_i18n _translators = oslo_i18n.TranslatorFactory(domain="rally") # The primary translation function using the well-known name "_" _ = _translators.primary # Translators for log levels. # # The abbreviated names are meant to reflect the usual use of a short # name like '_'. The "L" is for "log" and the other letter comes from # the level. _LI = _translators.log_info _LW = _translators.log_warning _LE = _translators.log_error _LC = _translators.log_critical rally-0.9.1/rally/common/version.py0000664000567000056710000000203413073417716020467 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from pbr import version as pbr_version from rally.common.db import api RALLY_VENDOR = "OpenStack Foundation" RALLY_PRODUCT = "OpenStack Rally" RALLY_PACKAGE = None # OS distro package version suffix loaded = False version_info = pbr_version.VersionInfo("rally") def version_string(): return version_info.semantic_version().debian_string() def database_revision(): return api.schema_revision(detailed=True) rally-0.9.1/rally/common/sshutils.py0000664000567000056710000002447413073417716020674 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """High level ssh library. Usage examples: Execute command and get output: ssh = sshclient.SSH("root", "example.com", port=33) status, stdout, stderr = ssh.execute("ps ax") if status: raise Exception("Command failed with non-zero status.") print stdout.splitlines() Execute command with huge output: class PseudoFile(object): def write(chunk): if "error" in chunk: email_admin(chunk) ssh = sshclient.SSH("root", "example.com") ssh.run("tail -f /var/log/syslog", stdout=PseudoFile(), timeout=False) Execute local script on remote side: ssh = sshclient.SSH("user", "example.com") status, out, err = ssh.execute("/bin/sh -s arg1 arg2", stdin=open("~/myscript.sh", "r")) Upload file: ssh = sshclient.SSH("user", "example.com") ssh.run("cat > ~/upload/file.gz", stdin=open("/store/file.gz", "rb")) Eventlet: eventlet.monkey_patch(select=True, time=True) or eventlet.monkey_patch() or sshclient = eventlet.import_patched("opentstack.common.sshclient") """ import os import select import socket import time import paramiko import six from rally.common import logging from rally import exceptions LOG = logging.getLogger(__name__) class SSH(object): """Represent ssh connection.""" def __init__(self, user, host, port=22, pkey=None, key_filename=None, password=None): """Initialize SSH client. :param user: ssh username :param host: hostname or ip address of remote ssh server :param port: remote ssh port :param pkey: RSA or DSS private key string or file object :param key_filename: private key filename :param password: password """ self.user = user self.host = host self.port = port self.pkey = self._get_pkey(pkey) if pkey else None self.password = password self.key_filename = key_filename self._client = False def _get_pkey(self, key): if isinstance(key, six.string_types): key = six.moves.StringIO(key) errors = [] for key_class in (paramiko.rsakey.RSAKey, paramiko.dsskey.DSSKey): try: return key_class.from_private_key(key) except paramiko.SSHException as e: errors.append(e) raise exceptions.SSHError("Invalid pkey: %s" % (errors)) def _get_client(self): if self._client: return self._client try: self._client = paramiko.SSHClient() self._client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self._client.connect(self.host, username=self.user, port=self.port, pkey=self.pkey, key_filename=self.key_filename, password=self.password, timeout=1) return self._client except Exception as e: message = ("Exception %(exception_type)s was raised " "during connect to %(user)s@%(host)s:%(port)s. " "Exception value is: %(exception)r") self._client = False raise exceptions.SSHError(message % {"exception": e, "user": self.user, "host": self.host, "port": self.port, "exception_type": type(e)}) def close(self): self._client.close() self._client = False def run(self, cmd, stdin=None, stdout=None, stderr=None, raise_on_error=True, timeout=3600): """Execute specified command on the server. :param cmd: Command to be executed. :param stdin: Open file or string to pass to stdin. :param stdout: Open file to connect to stdout. :param stderr: Open file to connect to stderr. :param raise_on_error: If False then exit code will be return. If True then exception will be raised if non-zero code. :param timeout: Timeout in seconds for command execution. Default 1 hour. No timeout if set to 0. """ client = self._get_client() if isinstance(stdin, six.string_types): stdin = six.moves.StringIO(stdin) return self._run(client, cmd, stdin=stdin, stdout=stdout, stderr=stderr, raise_on_error=raise_on_error, timeout=timeout) def _run(self, client, cmd, stdin=None, stdout=None, stderr=None, raise_on_error=True, timeout=3600): if isinstance(cmd, (list, tuple)): cmd = " ".join(six.moves.shlex_quote(str(p)) for p in cmd) transport = client.get_transport() session = transport.open_session() session.exec_command(cmd) start_time = time.time() data_to_send = "" stderr_data = None # If we have data to be sent to stdin then `select' should also # check for stdin availability. if stdin and not stdin.closed: writes = [session] else: writes = [] while True: # Block until data can be read/write. r, w, e = select.select([session], writes, [session], 1) if session.recv_ready(): data = session.recv(4096) LOG.debug("stdout: %r" % data) if stdout is not None: stdout.write(data.decode("utf8")) continue if session.recv_stderr_ready(): stderr_data = session.recv_stderr(4096) LOG.debug("stderr: %r" % stderr_data) if stderr is not None: stderr.write(stderr_data.decode("utf8")) continue if session.send_ready(): if stdin is not None and not stdin.closed: if not data_to_send: data_to_send = stdin.read(4096) if not data_to_send: stdin.close() session.shutdown_write() writes = [] continue sent_bytes = session.send(data_to_send) LOG.debug("sent: %s" % data_to_send[:sent_bytes]) data_to_send = data_to_send[sent_bytes:] if session.exit_status_ready(): break if timeout and (time.time() - timeout) > start_time: args = {"cmd": cmd, "host": self.host} raise exceptions.SSHTimeout("Timeout executing command " "'%(cmd)s' on host %(host)s" % args) if e: raise exceptions.SSHError("Socket error.") exit_status = session.recv_exit_status() if 0 != exit_status and raise_on_error: fmt = "Command '%(cmd)s' failed with exit_status %(status)d." details = fmt % {"cmd": cmd, "status": exit_status} if stderr_data: details += " Last stderr data: '%s'." % stderr_data raise exceptions.SSHError(details) return exit_status def execute(self, cmd, stdin=None, timeout=3600): """Execute the specified command on the server. :param cmd: Command to be executed, can be a list. :param stdin: Open file to be sent on process stdin. :param timeout: Timeout for execution of the command. :returns: tuple (exit_status, stdout, stderr) """ stdout = six.moves.StringIO() stderr = six.moves.StringIO() exit_status = self.run(cmd, stderr=stderr, stdout=stdout, stdin=stdin, timeout=timeout, raise_on_error=False) stdout.seek(0) stderr.seek(0) return (exit_status, stdout.read(), stderr.read()) def wait(self, timeout=120, interval=1): """Wait for the host will be available via ssh.""" start_time = time.time() while True: try: return self.execute("uname") except (socket.error, exceptions.SSHError) as e: LOG.debug("Ssh is still unavailable: %r" % e) time.sleep(interval) if time.time() > (start_time + timeout): raise exceptions.SSHTimeout("Timeout waiting for '%s'" % self.host) def _put_file_sftp(self, localpath, remotepath, mode=None): client = self._get_client() with client.open_sftp() as sftp: sftp.put(localpath, remotepath) if mode is None: mode = 0o777 & os.stat(localpath).st_mode sftp.chmod(remotepath, mode) def _put_file_shell(self, localpath, remotepath, mode=None): cmd = ["cat > %s" % remotepath] if mode is not None: cmd.append("chmod 0%o %s" % (mode, remotepath)) with open(localpath, "rb") as localfile: cmd = "; ".join(cmd) self.run(cmd, stdin=localfile) def put_file(self, localpath, remotepath, mode=None): """Copy specified local file to the server. :param localpath: Local filename. :param remotepath: Remote filename. :param mode: Permissions to set after upload """ import socket try: self._put_file_sftp(localpath, remotepath, mode=mode) except (paramiko.SSHException, socket.error): self._put_file_shell(localpath, remotepath, mode=mode) rally-0.9.1/rally/common/db/0000775000567000056710000000000013073420067017007 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/common/db/__init__.py0000664000567000056710000000124013073417716021124 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.db.api import * # noqa rally-0.9.1/rally/common/db/sqlalchemy/0000775000567000056710000000000013073420067021151 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/common/db/sqlalchemy/models.py0000664000567000056710000003440713073417720023020 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ SQLAlchemy models for rally data. """ import uuid from oslo_db.sqlalchemy.compat import utils as compat_utils from oslo_db.sqlalchemy import models from oslo_utils import timeutils import sqlalchemy as sa from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import schema from rally.common.db.sqlalchemy import types as sa_types from rally import consts BASE = declarative_base() def UUID(): return str(uuid.uuid4()) class RallyBase(models.ModelBase): metadata = None created_at = sa.Column(sa.DateTime, default=lambda: timeutils.utcnow()) updated_at = sa.Column(sa.DateTime, default=lambda: timeutils.utcnow(), onupdate=lambda: timeutils.utcnow()) def save(self, session=None): # NOTE(LimingWu): We can't direct import the api module. That will # result in the cyclic reference import since the api has imported # this module. from rally.common.db.sqlalchemy import api as sa_api if session is None: session = sa_api.get_session() super(RallyBase, self).save(session=session) class Deployment(BASE, RallyBase): """Represent a deployment of OpenStack.""" __tablename__ = "deployments" __table_args__ = ( sa.Index("deployment_uuid", "uuid", unique=True), sa.Index("deployment_parent_uuid", "parent_uuid"), ) id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) uuid = sa.Column(sa.String(36), default=UUID, nullable=False) parent_uuid = sa.Column( sa.String(36), sa.ForeignKey(uuid, use_alter=True, name="fk_parent_uuid"), default=None, ) name = sa.Column(sa.String(255), unique=True) started_at = sa.Column(sa.DateTime) completed_at = sa.Column(sa.DateTime) # XXX(akscram): Do we need to explicitly store a name of the # deployment engine? # engine_name = sa.Column(sa.String(36)) config = sa.Column( sa_types.MutableJSONEncodedDict, default={}, nullable=False, ) credentials = sa.Column( sa_types.MutableJSONEncodedDict, default={}, nullable=False) status = sa.Column( sa.Enum(*consts.DeployStatus, name="enum_deploy_status"), name="enum_deployments_status", default=consts.DeployStatus.DEPLOY_INIT, nullable=False, ) parent = sa.orm.relationship( "Deployment", backref=sa.orm.backref("subdeploys"), remote_side=[uuid], foreign_keys=parent_uuid, ) class Resource(BASE, RallyBase): """Represent a resource of a deployment.""" __tablename__ = "resources" __table_args__ = ( sa.Index("resource_deployment_uuid", "deployment_uuid"), sa.Index("resource_provider_name", "deployment_uuid", "provider_name"), sa.Index("resource_type", "deployment_uuid", "type"), sa.Index("resource_provider_name_and_type", "deployment_uuid", "provider_name", "type"), ) id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) provider_name = sa.Column(sa.String(255)) type = sa.Column(sa.String(255)) info = sa.Column( sa_types.MutableJSONEncodedDict, default={}, nullable=False, ) deployment_uuid = sa.Column( sa.String(36), sa.ForeignKey(Deployment.uuid), nullable=False, ) deployment = sa.orm.relationship( Deployment, backref=sa.orm.backref("resources"), foreign_keys=deployment_uuid, primaryjoin=(deployment_uuid == Deployment.uuid), ) class Task(BASE, RallyBase): """Represents a task.""" __tablename__ = "tasks" __table_args__ = ( sa.Index("task_uuid", "uuid", unique=True), sa.Index("task_status", "status"), sa.Index("task_deployment", "deployment_uuid"), ) id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) uuid = sa.Column(sa.String(36), default=UUID, nullable=False) deployment_uuid = sa.Column( sa.String(36), sa.ForeignKey(Deployment.uuid), nullable=False, ) deployment = sa.orm.relationship( Deployment, backref=sa.orm.backref("tasks"), foreign_keys=deployment_uuid, primaryjoin=(deployment_uuid == Deployment.uuid), ) input_task = sa.Column(sa.Text, default="") title = sa.Column(sa.String(64), default="") description = sa.Column(sa.Text, default="") validation_result = sa.Column( sa_types.MutableJSONEncodedDict, default={}, nullable=False) validation_duration = sa.Column(sa.Float) task_duration = sa.Column(sa.Float) pass_sla = sa.Column(sa.Boolean) status = sa.Column(sa.String(36), default=consts.TaskStatus.INIT) class Subtask(BASE, RallyBase): __tablename__ = "subtasks" __table_args__ = ( sa.Index("subtask_uuid", "uuid", unique=True), sa.Index("subtask_status", "status"), ) id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) uuid = sa.Column(sa.String(36), default=UUID, nullable=False) task_uuid = sa.Column( sa.String(36), sa.ForeignKey(Task.uuid), nullable=False, ) task = sa.orm.relationship( Task, backref=sa.orm.backref("subtasks"), foreign_keys=task_uuid, primaryjoin=(task_uuid == Task.uuid), ) title = sa.Column(sa.String(64), default="") description = sa.Column(sa.Text, default="") context = sa.Column( sa_types.JSONEncodedDict, default={}, nullable=False) sla = sa.Column( sa_types.JSONEncodedDict, default={}, nullable=False) run_in_parallel = sa.Column(sa.Boolean, default=False, nullable=False) duration = sa.Column(sa.Float) pass_sla = sa.Column(sa.Boolean) status = sa.Column(sa.String(36), default=consts.SubtaskStatus.RUNNING) class Workload(BASE, RallyBase): __tablename__ = "workloads" __table_args__ = ( sa.Index("workload_uuid", "uuid", unique=True), ) id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) uuid = sa.Column(sa.String(36), default=UUID, nullable=False) task_uuid = sa.Column( sa.String(36), sa.ForeignKey(Task.uuid), nullable=False, ) subtask_uuid = sa.Column( sa.String(36), sa.ForeignKey(Subtask.uuid), nullable=False, ) subtask = sa.orm.relationship( Subtask, backref=sa.orm.backref("workloads"), foreign_keys=subtask_uuid, primaryjoin=(subtask_uuid == Subtask.uuid), ) name = sa.Column(sa.String(64), nullable=False) description = sa.Column(sa.Text, default="") position = sa.Column(sa.Integer, default=0, nullable=False) runner = sa.Column( sa_types.JSONEncodedDict, default={}, nullable=False) runner_type = sa.Column(sa.String(64), nullable=False) context = sa.Column( sa_types.JSONEncodedDict, default={}, nullable=False) sla = sa.Column( sa_types.JSONEncodedDict, default={}, nullable=False) args = sa.Column( sa_types.JSONEncodedDict, default={}, nullable=False) hooks = sa.Column( sa_types.JSONEncodedList, default=[], nullable=False) sla_results = sa.Column( sa_types.MutableJSONEncodedDict, default={}, nullable=False) context_execution = sa.Column( sa_types.MutableJSONEncodedDict, default={}, nullable=False) start_time = sa.Column(sa.DateTime, default=lambda: timeutils.utcnow()) load_duration = sa.Column(sa.Float, default=0) full_duration = sa.Column(sa.Float, default=0) min_duration = sa.Column(sa.Float, default=0) max_duration = sa.Column(sa.Float, default=0) total_iteration_count = sa.Column(sa.Integer, default=0) failed_iteration_count = sa.Column(sa.Integer, default=0) statistics = sa.Column( sa_types.MutableJSONEncodedDict, default={}, nullable=False) pass_sla = sa.Column(sa.Boolean) _profiling_data = sa.Column(sa.Text, default="") class WorkloadData(BASE, RallyBase): __tablename__ = "workloaddata" __table_args__ = ( sa.Index("workload_data_uuid", "uuid", unique=True), ) id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) uuid = sa.Column(sa.String(36), default=UUID, nullable=False) task_uuid = sa.Column( sa.String(36), sa.ForeignKey(Task.uuid), nullable=False, ) workload_uuid = sa.Column( sa.String(36), sa.ForeignKey(Workload.uuid), nullable=False, ) workload = sa.orm.relationship( Workload, backref=sa.orm.backref("workload_data"), foreign_keys=workload_uuid, primaryjoin=(workload_uuid == Workload.uuid), ) chunk_order = sa.Column(sa.Integer, nullable=False) iteration_count = sa.Column(sa.Integer, nullable=False) failed_iteration_count = sa.Column(sa.Integer, nullable=False) chunk_size = sa.Column(sa.Integer, nullable=False) compressed_chunk_size = sa.Column(sa.Integer, nullable=False) started_at = sa.Column(sa.DateTime, default=lambda: timeutils.utcnow(), nullable=False) finished_at = sa.Column(sa.DateTime, default=lambda: timeutils.utcnow(), nullable=False) # chunk_data = sa.Column(sa.Text, nullable=False) chunk_data = sa.Column( sa_types.MutableJSONEncodedDict, default={}, nullable=False) class Tag(BASE, RallyBase): __tablename__ = "tags" __table_args__ = ( sa.Index("d_type_tag", "uuid", "type", "tag", unique=True), ) id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) uuid = sa.Column(sa.String(36), default=UUID, nullable=False) type = sa.Column(sa.String(36), nullable=False) tag = sa.Column(sa.String(255), nullable=False) class Verifier(BASE, RallyBase): """Represents a verifier.""" __tablename__ = "verifiers" __table_args__ = ( sa.Index("verifier_uuid", "uuid", unique=True), ) id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) uuid = sa.Column(sa.String(36), default=UUID, nullable=False) name = sa.Column(sa.String(255), unique=True) description = sa.Column(sa.Text) type = sa.Column(sa.String(255), nullable=False) namespace = sa.Column(sa.String(255)) source = sa.Column(sa.String(255)) version = sa.Column(sa.String(255)) system_wide = sa.Column(sa.Boolean) status = sa.Column(sa.String(36), default=consts.VerifierStatus.INIT, nullable=False) extra_settings = sa.Column(sa_types.MutableJSONEncodedDict) class Verification(BASE, RallyBase): """Represents a verification.""" __tablename__ = "verifications" __table_args__ = ( sa.Index("verification_uuid", "uuid", unique=True), ) id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) uuid = sa.Column(sa.String(36), default=UUID, nullable=False) verifier_uuid = sa.Column(sa.String(36), sa.ForeignKey(Verifier.uuid), nullable=False) deployment_uuid = sa.Column(sa.String(36), sa.ForeignKey(Deployment.uuid), nullable=False) run_args = sa.Column(sa_types.MutableJSONEncodedDict) status = sa.Column(sa.String(36), default=consts.VerificationStatus.INIT, nullable=False) tests_count = sa.Column(sa.Integer, default=0) failures = sa.Column(sa.Integer, default=0) skipped = sa.Column(sa.Integer, default=0) success = sa.Column(sa.Integer, default=0) unexpected_success = sa.Column(sa.Integer, default=0) expected_failures = sa.Column(sa.Integer, default=0) tests_duration = sa.Column(sa.Float, default=0.0) tests = sa.Column(sa_types.MutableJSONEncodedDict, default={}) class Worker(BASE, RallyBase): __tablename__ = "workers" __table_args__ = ( schema.UniqueConstraint("hostname", name="uniq_worker@hostname"), ) id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) hostname = sa.Column(sa.String(255)) # TODO(boris-42): Remove it after oslo.db > 1.4.1 will be released. def drop_all_objects(engine): """Drop all database objects. Drops all database objects remaining on the default schema of the given engine. Per-db implementations will also need to drop items specific to those systems, such as sequences, custom types (e.g. pg ENUM), etc. """ with engine.begin() as conn: inspector = sa.inspect(engine) metadata = schema.MetaData() tbs = [] all_fks = [] for table_name in inspector.get_table_names(): fks = [] for fk in inspector.get_foreign_keys(table_name): if not fk["name"]: continue fks.append( schema.ForeignKeyConstraint((), (), name=fk["name"])) table = schema.Table(table_name, metadata, *fks) tbs.append(table) all_fks.extend(fks) if engine.name != "sqlite": for fkc in all_fks: conn.execute(schema.DropConstraint(fkc)) for table in tbs: conn.execute(schema.DropTable(table)) if engine.name == "postgresql": if compat_utils.sqla_100: enums = [e["name"] for e in sa.inspect(conn).get_enums()] else: enums = conn.dialect._load_enums(conn).keys() for e in enums: conn.execute("DROP TYPE %s" % e) def drop_db(): # NOTE(LimingWu): We can't direct import the api module. That will # result in the cyclic reference import since the api has imported # this module. from rally.common.db.sqlalchemy import api as sa_api drop_all_objects(sa_api.get_engine()) rally-0.9.1/rally/common/db/sqlalchemy/__init__.py0000664000567000056710000000000013073417716023257 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/common/db/sqlalchemy/migrations/0000775000567000056710000000000013073420067023325 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/common/db/sqlalchemy/migrations/versions/0000775000567000056710000000000013073420067025175 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/common/db/sqlalchemy/migrations/versions/92aaaa2a6bb3_refactor_credentials.py0000664000567000056710000000452513073417716033764 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """refactor_credentials Revision ID: 92aaaa2a6bb3 Revises: 4ef544102ba7 Create Date: 2017-02-01 12:52:43.499663 """ # revision identifiers, used by Alembic. revision = "92aaaa2a6bb3" down_revision = "4ef544102ba7" branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa from rally.common.db.sqlalchemy import types as sa_types from rally import exceptions deployments_helper = sa.Table( "deployments", sa.MetaData(), sa.Column("name", sa.String(255), unique=True), sa.Column("id", sa.Integer, primary_key=True, autoincrement=True), sa.Column("credentials", sa.PickleType, nullable=True), sa.Column("new_credentials", sa_types.MutableJSONEncodedDict, default={}, nullable=False) ) def upgrade(): with op.batch_alter_table("deployments") as batch_op: batch_op.add_column( sa.Column("new_credentials", sa_types.MutableJSONEncodedDict, default={})) connection = op.get_bind() for deployment in connection.execute(deployments_helper.select()): creds = {} for cred_type, cred_obj in deployment.credentials: creds.setdefault(cred_type, []) creds[cred_type].append(cred_obj) connection.execute( deployments_helper.update().where( deployments_helper.c.id == deployment.id).values( new_credentials=creds)) with op.batch_alter_table("deployments") as batch_op: batch_op.drop_column("credentials") batch_op.alter_column("new_credentials", new_column_name="credentials", existing_type=sa_types.MutableJSONEncodedDict, nullable=False) def downgrade(): raise exceptions.DowngradeNotSupported() rally-0.9.1/rally/common/db/sqlalchemy/migrations/versions/32fada9b2fde_remove_admin_domain_name.py0000664000567000056710000000411713073417716034671 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Remove admin domain name Revision ID: 32fada9b2fde Revises: 5b983f0c9b9a Create Date: 2016-08-29 08:32:30.818019 """ # revision identifiers, used by Alembic. revision = "32fada9b2fde" down_revision = "6ad4f426f005" branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa from rally.common.db.sqlalchemy import types as sa_types from rally import exceptions deployments_helper = sa.Table( "deployments", sa.MetaData(), sa.Column("id", sa.Integer, primary_key=True, autoincrement=True), sa.Column( "config", sa_types.MutableJSONEncodedDict, default={}, nullable=False, ) ) def upgrade(): connection = op.get_bind() for deployment in connection.execute(deployments_helper.select()): conf = deployment.config if conf["type"] != "ExistingCloud": continue should_update = False if "admin_domain_name" in conf["admin"]: del conf["admin"]["admin_domain_name"] should_update = True if "users" in conf: for user in conf["users"]: if "admin_domain_name" in user: del user["admin_domain_name"] should_update = True if should_update: connection.execute( deployments_helper.update().where( deployments_helper.c.id == deployment.id).values( config=conf)) def downgrade(): raise exceptions.DowngradeNotSupported() rally-0.9.1/rally/common/db/sqlalchemy/migrations/versions/54e844ebfbc3_update_deployment_configs.py0000664000567000056710000000561613073417716035002 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Update_deployment_configs Previously we had bad deployment config validation Revision ID: 54e844ebfbc3 Revises: 3177d36ea270 Create Date: 2016-07-24 14:53:39.323105 """ # revision identifiers, used by Alembic. revision = "54e844ebfbc3" down_revision = "3177d36ea270" branch_labels = None depends_on = None from alembic import op # noqa import sqlalchemy as sa # noqa from rally.common.db.sqlalchemy import types as sa_types from rally import exceptions deployments_helper = sa.Table( "deployments", sa.MetaData(), sa.Column("id", sa.Integer, primary_key=True, autoincrement=True), sa.Column( "config", sa_types.MutableJSONEncodedDict, default={}, nullable=False, ) ) def _check_user_entry(user): """Fixes wrong format of users.""" if "tenant_name" in user: keys = set(user.keys()) if keys == {"username", "password", "tenant_name", "project_domain_name", "user_domain_name"}: if (user["user_domain_name"] == "" and user["project_domain_name"] == ""): # it is credentials of keystone v2 and they were created # --fromenv del user["user_domain_name"] del user["project_domain_name"] return True else: # it looks like keystone v3 credentials user["project_name"] = user.pop("tenant_name") return True def upgrade(): connection = op.get_bind() for deployment in connection.execute(deployments_helper.select()): conf = deployment.config if conf["type"] != "ExistingCloud": continue should_update = False if _check_user_entry(conf["admin"]): should_update = True if "users" in conf: for user in conf["users"]: if _check_user_entry(user): should_update = True if conf.get("endpoint_type") == "public": del conf["endpoint_type"] should_update = True if should_update: connection.execute( deployments_helper.update().where( deployments_helper.c.id == deployment.id).values( config=conf)) def downgrade(): raise exceptions.DowngradeNotSupported() ././@LongLink0000000000000000000000000000015000000000000011211 Lustar 00000000000000rally-0.9.1/rally/common/db/sqlalchemy/migrations/versions/f33f4610dcda_change_verification_statuses.pyrally-0.9.1/rally/common/db/sqlalchemy/migrations/versions/f33f4610dcda_change_verification_statuses0000664000567000056710000000355013073417716035024 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Change verification statuses Revision ID: f33f4610dcda Revises: a6f364988fc2 Create Date: 2017-01-23 13:56:30.999593 """ # revision identifiers, used by Alembic. revision = "f33f4610dcda" down_revision = "a6f364988fc2" branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa from rally import exceptions verifications_helper = sa.Table( "verifications", sa.MetaData(), sa.Column("id", sa.Integer, primary_key=True, autoincrement=True), sa.Column("failures", sa.Integer, default=0), sa.Column("unexpected_success", sa.Integer, default=0), sa.Column("status", sa.String(36), nullable=False) ) def upgrade(): connection = op.get_bind() for v in connection.execute(verifications_helper.select()): new_status = v.status if v.status == "finished" and ( v.failures != 0 or v.unexpected_success != 0): new_status = "failed" elif v.status == "failed": new_status = "crashed" else: pass if new_status != v.status: connection.execute(verifications_helper.update().where( verifications_helper.c.id == v.id).values( status=new_status)) def downgrade(): raise exceptions.DowngradeNotSupported() rally-0.9.1/rally/common/db/sqlalchemy/migrations/versions/ca3626f62937_init_migration.py0000664000567000056710000002002513073417716032336 0ustar jenkinsjenkins00000000000000# Copyright (c) 2016 Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Init migration Revision ID: ca3626f62937 Revises: Create Date: 2016-01-07 00:27:39.687814 """ # revision identifiers, used by Alembic. revision = "ca3626f62937" down_revision = None branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa import rally from rally.common.db.sqlalchemy import api from rally import exceptions def upgrade(): dialect = api.get_engine().dialect deployments_columns = [ sa.Column("created_at", sa.DateTime(), nullable=True), sa.Column("updated_at", sa.DateTime(), nullable=True), sa.Column("id", sa.Integer(), nullable=False), sa.Column("uuid", sa.String(length=36), nullable=False), sa.Column("parent_uuid", sa.String(length=36), nullable=True), sa.Column("name", sa.String(length=255), nullable=True), sa.Column("started_at", sa.DateTime(), nullable=True), sa.Column("completed_at", sa.DateTime(), nullable=True), sa.Column( "config", rally.common.db.sqlalchemy.types.MutableJSONEncodedDict(), nullable=False), sa.Column("admin", sa.PickleType(), nullable=True), sa.Column("users", sa.PickleType(), nullable=False), sa.Column("enum_deployments_status", sa.Enum( "cleanup->failed", "cleanup->finished", "cleanup->started", "deploy->failed", "deploy->finished", "deploy->inconsistent", "deploy->init", "deploy->started", "deploy->subdeploy", name="enum_deploy_status"), nullable=False), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("name") ] if dialect.name.startswith("sqlite"): deployments_columns.append( sa.ForeignKeyConstraint( ["parent_uuid"], [u"deployments.uuid"], name="fk_parent_uuid", use_alter=True) ) # commands auto generated by Alembic - please adjust! op.create_table("deployments", *deployments_columns) op.create_index("deployment_parent_uuid", "deployments", ["parent_uuid"], unique=False) op.create_index("deployment_uuid", "deployments", ["uuid"], unique=True) if not dialect.name.startswith("sqlite"): op.create_foreign_key("fk_parent_uuid", "deployments", "deployments", ["parent_uuid"], ["uuid"]) op.create_table( "workers", sa.Column("created_at", sa.DateTime(), nullable=True), sa.Column("updated_at", sa.DateTime(), nullable=True), sa.Column("id", sa.Integer(), nullable=False), sa.Column("hostname", sa.String(length=255), nullable=True), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("hostname", name="uniq_worker@hostname") ) op.create_table( "resources", sa.Column("created_at", sa.DateTime(), nullable=True), sa.Column("updated_at", sa.DateTime(), nullable=True), sa.Column("id", sa.Integer(), nullable=False), sa.Column("provider_name", sa.String(length=255), nullable=True), sa.Column("type", sa.String(length=255), nullable=True), sa.Column( "info", rally.common.db.sqlalchemy.types.MutableJSONEncodedDict(), nullable=False), sa.Column("deployment_uuid", sa.String(length=36), nullable=False), sa.ForeignKeyConstraint(["deployment_uuid"], [u"deployments.uuid"]), sa.PrimaryKeyConstraint("id") ) op.create_index("resource_deployment_uuid", "resources", ["deployment_uuid"], unique=False) op.create_index("resource_provider_name", "resources", ["deployment_uuid", "provider_name"], unique=False) op.create_index("resource_provider_name_and_type", "resources", ["deployment_uuid", "provider_name", "type"], unique=False) op.create_index("resource_type", "resources", ["deployment_uuid", "type"], unique=False) op.create_table( "tasks", sa.Column("created_at", sa.DateTime(), nullable=True), sa.Column("updated_at", sa.DateTime(), nullable=True), sa.Column("id", sa.Integer(), nullable=False), sa.Column("uuid", sa.String(length=36), nullable=False), sa.Column("status", sa.Enum( "aborted", "aborting", "cleaning up", "failed", "finished", "init", "paused", "running", "setting up", "soft_aborting", "verifying", name="enum_tasks_status"), nullable=False), sa.Column("verification_log", sa.Text(), nullable=True), sa.Column("tag", sa.String(length=64), nullable=True), sa.Column("deployment_uuid", sa.String(length=36), nullable=False), sa.ForeignKeyConstraint(["deployment_uuid"], [u"deployments.uuid"], ), sa.PrimaryKeyConstraint("id") ) op.create_index("task_deployment", "tasks", ["deployment_uuid"], unique=False) op.create_index("task_status", "tasks", ["status"], unique=False) op.create_index("task_uuid", "tasks", ["uuid"], unique=True) op.create_table( "verifications", sa.Column("created_at", sa.DateTime(), nullable=True), sa.Column("updated_at", sa.DateTime(), nullable=True), sa.Column("id", sa.Integer(), nullable=False), sa.Column("uuid", sa.String(length=36), nullable=False), sa.Column("deployment_uuid", sa.String(length=36), nullable=False), sa.Column("status", sa.Enum( "aborted", "aborting", "cleaning up", "failed", "finished", "init", "paused", "running", "setting up", "soft_aborting", "verifying", name="enum_tasks_status"), nullable=False), sa.Column("set_name", sa.String(length=20), nullable=True), sa.Column("tests", sa.Integer(), nullable=True), sa.Column("errors", sa.Integer(), nullable=True), sa.Column("failures", sa.Integer(), nullable=True), sa.Column("time", sa.Float(), nullable=True), sa.ForeignKeyConstraint(["deployment_uuid"], [u"deployments.uuid"], ), sa.PrimaryKeyConstraint("id") ) op.create_index("verification_uuid", "verifications", ["uuid"], unique=True) op.create_table( "task_results", sa.Column("created_at", sa.DateTime(), nullable=True), sa.Column("updated_at", sa.DateTime(), nullable=True), sa.Column("id", sa.Integer(), nullable=False), sa.Column( "key", rally.common.db.sqlalchemy.types.MutableJSONEncodedDict(), nullable=False), sa.Column( "data", rally.common.db.sqlalchemy.types.MutableJSONEncodedDict(), nullable=False), sa.Column("task_uuid", sa.String(length=36), nullable=True), sa.ForeignKeyConstraint(["task_uuid"], ["tasks.uuid"], ), sa.PrimaryKeyConstraint("id") ) op.create_table( "verification_results", sa.Column("created_at", sa.DateTime(), nullable=True), sa.Column("updated_at", sa.DateTime(), nullable=True), sa.Column("id", sa.Integer(), nullable=False), sa.Column("verification_uuid", sa.String(length=36), nullable=True), sa.Column( "data", rally.common.db.sqlalchemy.types.MutableJSONEncodedDict(), nullable=False), sa.ForeignKeyConstraint(["verification_uuid"], ["verifications.uuid"]), sa.PrimaryKeyConstraint("id") ) # end Alembic commands def downgrade(): raise exceptions.DowngradeNotSupported() rally-0.9.1/rally/common/db/sqlalchemy/migrations/versions/a6f364988fc2_change_tag_type_enum.py0000664000567000056710000000351013073417716033557 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Change tag type enum Revision ID: a6f364988fc2 Revises: 37fdbb373e8d Create Date: 2017-01-17 18:47:10.700459 """ # revision identifiers, used by Alembic. revision = "a6f364988fc2" down_revision = "37fdbb373e8d" branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa from rally import exceptions TAG_TYPES = ["task", "subtask"] tag_helper = sa.Table( "tags", sa.MetaData(), sa.Column("id", sa.Integer(), nullable=False), sa.Column("type", sa.Enum(*TAG_TYPES, name="enum_tag_types"), nullable=False), sa.Column("new_type", sa.String(36), nullable=False) ) def upgrade(): with op.batch_alter_table("tags") as batch_op: batch_op.add_column( sa.Column("new_type", sa.String(36))) op.execute(tag_helper.update().values(new_type=tag_helper.c.type)) op.drop_index("d_type_tag", "tags") with op.batch_alter_table("tags") as batch_op: batch_op.drop_column("type") batch_op.alter_column("new_type", new_column_name="type", existing_type=sa.String(36), nullable=False) op.create_index("d_type_tag", "tags", ["uuid", "type", "tag"], unique=True) def downgrade(): raise exceptions.DowngradeNotSupported() ././@LongLink0000000000000000000000000000015100000000000011212 Lustar 00000000000000rally-0.9.1/rally/common/db/sqlalchemy/migrations/versions/08e1515a576c_fix_invalid_verification_logs.pyrally-0.9.1/rally/common/db/sqlalchemy/migrations/versions/08e1515a576c_fix_invalid_verification_log0000664000567000056710000000740713073417716034574 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """fix invalid verification logs Revision ID: 08e1515a576c Revises: 54e844ebfbc3 Create Date: 2016-09-12 15:47:11.279610 """ # revision identifiers, used by Alembic. revision = "08e1515a576c" down_revision = "54e844ebfbc3" branch_labels = None depends_on = None import json import uuid from alembic import op import sqlalchemy as sa from rally import consts from rally import exceptions def UUID(): return str(uuid.uuid4()) task_helper = sa.Table( "tasks", sa.MetaData(), sa.Column("id", sa.Integer, primary_key=True, autoincrement=True), sa.Column("uuid", sa.String(36), default=UUID, nullable=False), sa.Column("status", sa.Enum(*list(consts.TaskStatus), name="enum_tasks_status"), default=consts.TaskStatus.INIT, nullable=False), sa.Column("verification_log", sa.Text, default=""), sa.Column("tag", sa.String(64), default=""), sa.Column("deployment_uuid", sa.String(36), nullable=False) ) def _make_trace(etype, emsg, raw_trace=None): trace = "Traceback (most recent call last):\n" if raw_trace is None: trace += "\n\t\t...n/a..\n\n" else: trace += "".join(json.loads(raw_trace)) trace += "%s: %s" % (etype, emsg) return trace def upgrade(): connection = op.get_bind() for task in connection.execute(task_helper.select()): verification_log = task.verification_log if not verification_log: continue new_value = None verification_log = json.loads(verification_log) if isinstance(verification_log, list): new_value = {"etype": verification_log[0], "msg": verification_log[1], "trace": verification_log[2]} if new_value["trace"].startswith("["): # NOTE(andreykurilin): For several cases traceback was # transmitted as list instead of string. new_value["trace"] = _make_trace(*verification_log) else: if verification_log.startswith("No such file"): new_value = {"etype": IOError.__name__, "msg": verification_log} new_value["trace"] = _make_trace(new_value["etype"], new_value["msg"]) elif verification_log.startswith("Task config is invalid"): new_value = {"etype": exceptions.InvalidTaskException.__name__, "msg": verification_log} new_value["trace"] = _make_trace(new_value["etype"], new_value["msg"]) elif verification_log.startswith("Failed to load task"): new_value = {"etype": "FailedToLoadTask", "msg": verification_log} new_value["trace"] = _make_trace(new_value["etype"], new_value["msg"]) if new_value: connection.execute(task_helper.update().where( task_helper.c.id == task.id).values( verification_log=json.dumps(new_value))) def downgrade(): raise exceptions.DowngradeNotSupported() rally-0.9.1/rally/common/db/sqlalchemy/migrations/versions/4ef544102ba7_change_task_status_enum.py0000664000567000056710000001310213073417716034261 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Change task status enum Revision ID: 4ef544102ba7 Revises: 3177d36ea270 Create Date: 2016-04-22 21:28:50.745316 """ # revision identifiers, used by Alembic. revision = "4ef544102ba7" down_revision = "f33f4610dcda" branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa from rally.common.db.sqlalchemy import types as sa_types from rally import consts from rally import exceptions OLD_STATUS = [ "aborted", "aborting", "cleaning up", "failed", "finished", "init", "paused", "running", "setting up", "soft_aborting", "verifying" ] OLD_ENUM = sa.Enum(*OLD_STATUS, name="enum_tasks_status") WITHOUT_CHANGES = ( "init", "running", "aborted", "aborting", "soft_aborting", "paused", "finished" ) OLD_TO_NEW = [ ("verifying", "validating",), ("failed", "crashed",) ] task = sa.Table( "tasks", sa.MetaData(), sa.Column("created_at", sa.DateTime(), nullable=True), sa.Column("updated_at", sa.DateTime(), nullable=True), sa.Column("id", sa.Integer(), nullable=False), sa.Column("uuid", sa.String(length=36), nullable=False), sa.Column("deployment_uuid", sa.String(length=36), nullable=False), sa.Column("title", sa.String(length=64), default=""), sa.Column("description", sa.Text(), default=""), sa.Column("input_task", sa.Text(), default=""), sa.Column("validation_duration", sa.Float()), sa.Column("task_duration", sa.Float()), sa.Column("pass_sla", sa.Boolean()), sa.Column("status", OLD_ENUM, nullable=False), sa.Column("new_status", sa.String(36), default=consts.TaskStatus.INIT), sa.Column( "validation_result", sa_types.MutableJSONEncodedDict(), default={}, nullable=False ) ) subtask = sa.Table( "subtasks", sa.MetaData(), sa.Column("created_at", sa.DateTime()), sa.Column("updated_at", sa.DateTime()), sa.Column("id", sa.Integer(), nullable=False, autoincrement=True), sa.Column("uuid", sa.String(length=36), nullable=False), sa.Column("task_uuid", sa.String(length=36), nullable=False), sa.Column("title", sa.String(length=64), default=""), sa.Column("description", sa.Text(), default=""), sa.Column( "context", sa_types.MutableJSONEncodedDict(), default={}, nullable=False), sa.Column( "sla", sa_types.MutableJSONEncodedDict(), default={}, nullable=False), sa.Column("duration", sa.Float()), sa.Column( "run_in_parallel", sa.Boolean(), default=False, nullable=False), sa.Column("pass_sla", sa.Boolean()), sa.Column("status", OLD_ENUM, nullable=False), sa.Column("new_status", sa.String(36), default=consts.SubtaskStatus.RUNNING), sa.ForeignKeyConstraint(["task_uuid"], ["tasks.uuid"], ), sa.PrimaryKeyConstraint("id") ) def upgrade(): # Workaround for Alemic bug #89 # https://bitbucket.org/zzzeek/alembic/issue/89 with op.batch_alter_table("tasks") as batch_op: batch_op.add_column(sa.Column("new_status", sa.String(36), default=consts.TaskStatus.INIT)) with op.batch_alter_table("subtasks") as batch_op: batch_op.add_column(sa.Column("new_status", sa.String(36), default=consts.SubtaskStatus.RUNNING)) op.execute( task.update() .where(task.c.status.in_(WITHOUT_CHANGES)) .values({"new_status": task.c.status})) for old, new in OLD_TO_NEW: op.execute( task.update() .where(task.c.status == op.inline_literal(old)) .values({"new_status": new})) # NOTE(rvasilets): Assume that set_failed was used only in causes of # validation failed op.execute( task.update().where( (task.c.status == op.inline_literal("failed")) & (task.c.validation_result == {})).values( {"new_status": "crashed", "validation_result": {}})) op.execute( task.update().where( (task.c.status == op.inline_literal("failed")) & (task.c.validation_result != {})).values( {"new_status": "validation_failed", "validation_result": task.c.validation_result})) op.drop_index("task_status", "tasks") op.drop_index("subtask_status", "subtasks") # NOTE(boris-42): Statuses "setting up", "cleaning up" were not used with op.batch_alter_table("tasks") as batch_op: batch_op.drop_column("status") batch_op.alter_column("new_status", new_column_name="status", existing_type=sa.String(36)) with op.batch_alter_table("subtasks") as batch_op: batch_op.drop_column("status") batch_op.alter_column("new_status", new_column_name="status", existing_type=sa.String(36)) op.create_index("task_status", "tasks", ["status"]) op.create_index("subtask_status", "subtasks", ["status"]) def downgrade(): raise exceptions.DowngradeNotSupported() ././@LongLink0000000000000000000000000000015600000000000011217 Lustar 00000000000000rally-0.9.1/rally/common/db/sqlalchemy/migrations/versions/37fdbb373e8d_fix_test_results_for_verifications.pyrally-0.9.1/rally/common/db/sqlalchemy/migrations/versions/37fdbb373e8d_fix_test_results_for_verific0000664000567000056710000000322613073417716035102 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Fix test results for verifications Revision ID: 37fdbb373e8d Revises: 484cd9413e66 Create Date: 2016-12-29 19:54:23.804525 """ # revision identifiers, used by Alembic. revision = "37fdbb373e8d" down_revision = "484cd9413e66" branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa from rally.common.db.sqlalchemy import types as sa_types from rally import exceptions verifications_helper = sa.Table( "verifications", sa.MetaData(), sa.Column("id", sa.Integer, primary_key=True, autoincrement=True), sa.Column("tests", sa_types.MutableJSONEncodedDict, default={}) ) def upgrade(): connection = op.get_bind() for v in connection.execute(verifications_helper.select()): tests = v.tests for test in tests.values(): duration = test.pop("time") test["duration"] = duration connection.execute( verifications_helper.update().where( verifications_helper.c.id == v.id).values(tests=tests)) def downgrade(): raise exceptions.DowngradeNotSupported() rally-0.9.1/rally/common/db/sqlalchemy/migrations/versions/6ad4f426f005_add_hooks_to_task_result.py0000664000567000056710000000321513073417716034447 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """add hooks to task result Adds empty hooks list to existing task results Revision ID: 6ad4f426f005 Revises: 08e1515a576c Create Date: 2016-09-13 18:11:47.703023 """ # revision identifiers, used by Alembic. revision = "6ad4f426f005" down_revision = "08e1515a576c" branch_labels = None depends_on = None from alembic import op # noqa import sqlalchemy as sa # noqa from rally.common.db.sqlalchemy import types as sa_types from rally import exceptions task_results_helper = sa.Table( "task_results", sa.MetaData(), sa.Column("id", sa.Integer(), nullable=False), sa.Column("data", sa_types.MutableJSONEncodedDict(), nullable=False), ) def upgrade(): connection = op.get_bind() for task_result in connection.execute(task_results_helper.select()): data = task_result.data data["hooks"] = [] connection.execute( task_results_helper.update().where( task_results_helper.c.id == task_result.id).values( data=data)) def downgrade(): raise exceptions.DowngradeNotSupported() ././@LongLink0000000000000000000000000000016200000000000011214 Lustar 00000000000000rally-0.9.1/rally/common/db/sqlalchemy/migrations/versions/3177d36ea270_merge_credentials_from_users_and_admin.pyrally-0.9.1/rally/common/db/sqlalchemy/migrations/versions/3177d36ea270_merge_credentials_from_users0000664000567000056710000000452013073417716034605 0ustar jenkinsjenkins00000000000000# Copyright (c) 2016 Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Merge credentials from users and admin Revision ID: 3177d36ea270 Revises: ca3626f62937 Create Date: 2016-03-01 16:01:38.747048 """ # revision identifiers, used by Alembic. revision = "3177d36ea270" down_revision = "ca3626f62937" branch_labels = None depends_on = None from alembic import op import sqlalchemy as sa from rally import exceptions deployments_helper = sa.Table( "deployments", sa.MetaData(), sa.Column("id", sa.Integer, primary_key=True, autoincrement=True), sa.Column("admin", sa.types.PickleType, nullable=True), sa.Column("users", sa.types.PickleType, default=[], nullable=False), sa.Column("credentials", sa.types.PickleType, nullable=True), ) def upgrade(): with op.batch_alter_table("deployments", schema=None) as batch_op: batch_op.add_column( sa.Column("credentials", sa.PickleType(), nullable=True)) connection = op.get_bind() for deployment in connection.execute(deployments_helper.select()): creds = [ ["openstack", { "admin": deployment.admin, "users": deployment.users }] ] connection.execute( deployments_helper.update().where( deployments_helper.c.id == deployment.id).values( credentials=creds)) with op.batch_alter_table("deployments", schema=None) as batch_op: batch_op.alter_column("credentials", existing_type=sa.PickleType, existing_nullable=True, nullable=False) batch_op.drop_column("admin") batch_op.drop_column("users") def downgrade(): raise exceptions.DowngradeNotSupported() ././@LongLink0000000000000000000000000000016400000000000011216 Lustar 00000000000000rally-0.9.1/rally/common/db/sqlalchemy/migrations/versions/484cd9413e66_new_db_schema_for_verification_component.pyrally-0.9.1/rally/common/db/sqlalchemy/migrations/versions/484cd9413e66_new_db_schema_for_verificati0000664000567000056710000001714413073417716034537 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Provide new db schema for verification component Revision ID: 484cd9413e66 Revises: e654a0648db0 Create Date: 2016-11-04 17:04:24.614075 """ # revision identifiers, used by Alembic. revision = "484cd9413e66" down_revision = "e654a0648db0" branch_labels = None depends_on = None import uuid from alembic import op from oslo_utils import timeutils import sqlalchemy as sa from rally.common.db.sqlalchemy import types as sa_types from rally import exceptions TASK_STATUSES = ["aborted", "aborting", "cleaning up", "failed", "finished", "init", "paused", "running", "setting up", "soft_aborting", "verifying"] _MAP_OLD_TO_NEW_TEST_STATUSES = { "OK": "success", "FAIL": "fail", "SKIP": "skip" } def UUID(): return str(uuid.uuid4()) verification_helper = sa.Table( "verifications", sa.MetaData(), sa.Column("id", sa.Integer, primary_key=True, autoincrement=True), sa.Column("uuid", sa.String(36), nullable=False), sa.Column("deployment_uuid", sa.String(36), nullable=False), sa.Column("status", sa.Enum(*TASK_STATUSES, name="enum_tasks_status"), default="init", nullable=False), sa.Column("set_name", sa.String(20)), sa.Column("tests", sa.Integer), sa.Column("errors", sa.Integer), sa.Column("failures", sa.Integer), sa.Column("time", sa.Float), sa.Column("created_at", sa.DateTime), sa.Column("updated_at", sa.DateTime) ) results_helper = sa.Table( "verification_results", sa.MetaData(), sa.Column("id", sa.Integer, primary_key=True, autoincrement=True), sa.Column("verification_uuid", sa.String(36), nullable=False), sa.Column("data", sa_types.MutableJSONEncodedDict, nullable=False, default={}), sa.Column("created_at", sa.DateTime), sa.Column("updated_at", sa.DateTime) ) def upgrade(): connection = op.get_bind() # create new table to store all verifiers verifiers_table = op.create_table( "verifiers", sa.Column("id", sa.Integer, primary_key=True, autoincrement=True), sa.Column("uuid", sa.String(36), default=UUID, nullable=False), sa.Column("name", sa.String(255), unique=True), sa.Column("description", sa.Text), sa.Column("type", sa.String(255), nullable=False), sa.Column("namespace", sa.String(255)), sa.Column("source", sa.String(255)), sa.Column("version", sa.String(255)), sa.Column("system_wide", sa.Boolean), sa.Column("status", sa.String(36), default="init", nullable=False), sa.Column("extra_settings", sa_types.MutableJSONEncodedDict, nullable=True), sa.Column("created_at", sa.DateTime), sa.Column("updated_at", sa.DateTime) ) op.create_index("verifier_uuid", "verifiers", ["uuid"], unique=True) verifications_table = op.create_table( "verifications_new", sa.Column("id", sa.Integer, primary_key=True, autoincrement=True), sa.Column("uuid", sa.String(36), default=UUID, nullable=False), sa.Column("verifier_uuid", sa.String(36), nullable=False), sa.Column("deployment_uuid", sa.String(36), nullable=False), sa.Column("run_args", sa_types.MutableJSONEncodedDict), sa.Column("status", sa.String(36), default="init", nullable=False), sa.Column("tests_count", sa.Integer, default=0), sa.Column("failures", sa.Integer, default=0), sa.Column("skipped", sa.Integer, default=0), sa.Column("success", sa.Integer, default=0), sa.Column("unexpected_success", sa.Integer, default=0), sa.Column("expected_failures", sa.Integer, default=0), sa.Column("tests_duration", sa.Float, default=0.0), sa.Column("tests", sa_types.MutableJSONEncodedDict, default={}), sa.Column("created_at", sa.DateTime), sa.Column("updated_at", sa.DateTime), sa.ForeignKeyConstraint(["verifier_uuid"], ["verifiers.uuid"]), sa.ForeignKeyConstraint(["deployment_uuid"], ["deployments.uuid"]) ) default_verifier = None for vresult in connection.execute(results_helper.select()): if default_verifier is None: vuuid = UUID() connection.execute( verifiers_table.insert(), [{ "uuid": vuuid, "name": "DefaultTempestVerifier", "description": "It is the default verifier to assign all " "migrated verification results for", "type": "tempest", "namespace": "openstack", "source": "n/a", "version": "n/a", "system_wide": False, "status": "init", "created_at": timeutils.utcnow(), "updated_at": timeutils.utcnow() }] ) default_verifier = connection.execute( verifiers_table.select().where( verifiers_table.c.uuid == vuuid)).first() data = vresult.data if "errors" in data: # it is a very old format... for test in data["test_cases"].keys(): old_status = data["test_cases"][test]["status"] new_status = _MAP_OLD_TO_NEW_TEST_STATUSES.get( old_status, old_status.lower()) data["test_cases"][test]["status"] = new_status if "failure" in data["test_cases"][test]: data["test_cases"][test]["traceback"] = data[ "test_cases"][test]["failure"]["log"] data["test_cases"][test].pop("failure") verifications = connection.execute( verification_helper.select().where( verification_helper.c.uuid == vresult.verification_uuid)) # for each verification result we have single verification object verification = verifications.first() connection.execute( verifications_table.insert(), [{"uuid": verification.uuid, "verifier_uuid": default_verifier.uuid, "deployment_uuid": verification.deployment_uuid, "run_args": {"pattern": "set=%s" % verification.set_name}, "status": verification.status, "tests": data["test_cases"], "tests_count": data["tests"], "failures": data["failures"], "skipped": data["skipped"], "success": data["success"], "unexpected_success": data.get("unexpected_success", 0), "expected_failures": data.get("expected_failures", 0), "tests_duration": data["time"], "created_at": vresult.created_at, "updated_at": vresult.updated_at }]) op.drop_table("verification_results") op.drop_table("verifications") op.rename_table("verifications_new", "verifications") op.create_index( "verification_uuid", "verifications", ["uuid"], unique=True) def downgrade(): raise exceptions.DowngradeNotSupported() rally-0.9.1/rally/common/db/sqlalchemy/migrations/versions/e654a0648db0_refactor_task_results.py0000664000567000056710000003621713073417716034013 0ustar jenkinsjenkins00000000000000# Copyright (c) 2016 Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Refactor task results Revision ID: e654a0648db0 Revises: 3177d36ea270 Create Date: 2016-04-01 14:36:56.373349 """ # revision identifiers, used by Alembic. revision = "e654a0648db0" down_revision = "32fada9b2fde" branch_labels = None depends_on = None import datetime as dt import json import uuid from alembic import op import sqlalchemy as sa from rally.common.db.sqlalchemy import types as sa_types from rally import exceptions taskhelper = sa.Table( "tasks", sa.MetaData(), sa.Column("created_at", sa.DateTime(), nullable=True), sa.Column("updated_at", sa.DateTime(), nullable=True), sa.Column("id", sa.Integer(), nullable=False), sa.Column("uuid", sa.String(length=36), nullable=False), sa.Column("status", sa.Enum( "aborted", "aborting", "cleaning up", "failed", "finished", "init", "paused", "running", "setting up", "soft_aborting", "verifying", name="enum_tasks_status"), nullable=False), sa.Column("verification_log", sa.Text(), nullable=True), sa.Column("tag", sa.String(length=64), nullable=True), sa.Column("deployment_uuid", sa.String(length=36), nullable=False), sa.Column("title", sa.String(length=64), default=""), sa.Column("description", sa.Text(), default=""), sa.Column("input_task", sa.Text(), default=""), sa.Column("validation_duration", sa.Float()), sa.Column("task_duration", sa.Float()), sa.Column("pass_sla", sa.Boolean()), sa.Column( "validation_result", sa_types.MutableJSONEncodedDict(), default={}, nullable=False ) ) task_result_helper = sa.Table( "task_results", sa.MetaData(), sa.Column("created_at", sa.DateTime()), sa.Column("updated_at", sa.DateTime()), sa.Column("id", sa.Integer(), nullable=False, autoincrement=True), sa.Column( "key", sa_types.MutableJSONEncodedDict(), nullable=False), sa.Column( "data", sa_types.MutableJSONEncodedDict(), nullable=False), sa.Column("task_uuid", sa.String(length=36), nullable=True) ) taghelper = sa.Table( "tags", sa.MetaData(), sa.Column("created_at", sa.DateTime()), sa.Column("updated_at", sa.DateTime()), sa.Column("id", sa.Integer(), nullable=False, autoincrement=True), sa.Column("uuid", sa.String(length=36), nullable=False), sa.Column("tag", sa.String(length=255), nullable=False), sa.Column( "type", sa.Enum( "task", "subtask", name="enum_tag_types"), nullable=False) ) def upgrade(): conn = op.get_bind() subtask_table = op.create_table( "subtasks", sa.Column("created_at", sa.DateTime()), sa.Column("updated_at", sa.DateTime()), sa.Column("id", sa.Integer(), nullable=False, autoincrement=True), sa.Column("uuid", sa.String(length=36), nullable=False), sa.Column("task_uuid", sa.String(length=36), nullable=False), sa.Column("title", sa.String(length=64), default=""), sa.Column("description", sa.Text(), default=""), sa.Column( "context", sa_types.MutableJSONEncodedDict(), default={}, nullable=False), sa.Column( "sla", sa_types.MutableJSONEncodedDict(), default={}, nullable=False), sa.Column("duration", sa.Float()), sa.Column( "run_in_parallel", sa.Boolean(), default=False, nullable=False), sa.Column("pass_sla", sa.Boolean()), sa.Column( "status", sa.Enum( "finished", "running", "crashed", name="enum_subtasks_status"), nullable=False), sa.ForeignKeyConstraint(["task_uuid"], ["tasks.uuid"], ), sa.PrimaryKeyConstraint("id") ) op.create_index("subtask_uuid", "subtasks", ["uuid"], unique=True) op.create_index("subtask_status", "subtasks", ["status"], unique=False) workload_table = op.create_table( "workloads", sa.Column("created_at", sa.DateTime()), sa.Column("updated_at", sa.DateTime()), sa.Column("id", sa.Integer(), nullable=False, autoincrement=True), sa.Column("uuid", sa.String(length=36), nullable=False), sa.Column("task_uuid", sa.String(length=36), nullable=False), sa.Column("subtask_uuid", sa.String(length=36), nullable=False), sa.Column("name", sa.String(length=64), nullable=False), sa.Column("description", sa.Text(), default=""), sa.Column("position", sa.Integer(), default=0, nullable=False), sa.Column( "runner_type", sa.String(length=64), nullable=False), sa.Column( "runner", sa_types.MutableJSONEncodedDict(), default={}, nullable=False), sa.Column( "args", sa_types.MutableJSONEncodedDict(), default={}, nullable=False), sa.Column( "context", sa_types.MutableJSONEncodedDict(), default={}, nullable=False), sa.Column( "hooks", sa_types.MutableJSONEncodedList(), default=[], nullable=False), sa.Column( "sla", sa_types.MutableJSONEncodedDict(), default={}, nullable=False), sa.Column( "sla_results", sa_types.MutableJSONEncodedDict(), default={}, nullable=False), sa.Column( "context_execution", sa_types.MutableJSONEncodedDict(), default={}, nullable=False), sa.Column("load_duration", sa.Float(), default=0), sa.Column("full_duration", sa.Float(), default=0), sa.Column("min_duration", sa.Float(), default=0), sa.Column("max_duration", sa.Float(), default=0), sa.Column("total_iteration_count", sa.Integer(), default=0), sa.Column("failed_iteration_count", sa.Integer(), default=0), sa.Column("pass_sla", sa.Boolean()), sa.Column( "statistics", sa_types.MutableJSONEncodedDict(), default={}, nullable=False), sa.Column("start_time", sa.DateTime()), sa.Column("_profiling_data", sa.Text(), default=""), sa.ForeignKeyConstraint(["task_uuid"], ["tasks.uuid"], ), sa.ForeignKeyConstraint(["subtask_uuid"], ["subtasks.uuid"], ), sa.PrimaryKeyConstraint("id") ) op.create_index("workload_uuid", "workloads", ["uuid"], unique=True) workloaddata_table = op.create_table( "workloaddata", sa.Column("created_at", sa.DateTime()), sa.Column("updated_at", sa.DateTime()), sa.Column("id", sa.Integer(), nullable=False, autoincrement=True), sa.Column("uuid", sa.String(length=36), nullable=False), sa.Column("task_uuid", sa.String(length=36), nullable=False), sa.Column("workload_uuid", sa.String(length=36), nullable=False), sa.Column("chunk_order", sa.Integer(), nullable=False), sa.Column("iteration_count", sa.Integer(), nullable=False), sa.Column("failed_iteration_count", sa.Integer(), nullable=False), sa.Column("chunk_size", sa.Integer(), nullable=False), sa.Column( "compressed_chunk_size", sa.Integer(), nullable=False), sa.Column("started_at", sa.DateTime(), nullable=False), sa.Column("finished_at", sa.DateTime(), nullable=False), # sa.Column("chunk_data", sa.Text(), nullable=False), sa.Column( "chunk_data", sa_types.MutableJSONEncodedDict(), default={}, nullable=False), sa.ForeignKeyConstraint(["task_uuid"], ["tasks.uuid"], ), sa.ForeignKeyConstraint(["workload_uuid"], ["workloads.uuid"], ), sa.PrimaryKeyConstraint("id") ) op.create_index( "workload_data_uuid", "workloaddata", ["uuid"], unique=True) tag_table = op.create_table( "tags", sa.Column("created_at", sa.DateTime()), sa.Column("updated_at", sa.DateTime()), sa.Column("id", sa.Integer(), nullable=False, autoincrement=True), sa.Column("uuid", sa.String(length=36), nullable=False), sa.Column("tag", sa.String(length=255), nullable=False), sa.Column( "type", sa.Enum( "task", "subtask", name="enum_tag_types"), nullable=False), sa.PrimaryKeyConstraint("id") ) op.create_index( "d_type_tag", "tags", ["uuid", "type", "tag"], unique=True) with op.batch_alter_table("tasks") as batch_op: batch_op.add_column( sa.Column("title", sa.String(length=64), default="") ) batch_op.add_column( sa.Column("description", sa.Text(), default="") ) batch_op.add_column( sa.Column("input_task", sa.Text(), default="") ) batch_op.add_column( sa.Column("validation_duration", sa.Float()) ) batch_op.add_column( sa.Column("task_duration", sa.Float()) ) batch_op.add_column( sa.Column("pass_sla", sa.Boolean()) ) batch_op.add_column( sa.Column( "validation_result", sa_types.MutableJSONEncodedDict(), default={}) ) for task in conn.execute(taskhelper.select()): if task.tag: conn.execute( tag_table.insert(), [{ "uuid": task.uuid, "type": "task", "tag": task.tag, "created_at": task.created_at, "updated_at": task.updated_at }] ) task_results = conn.execute( task_result_helper.select(). where(task_result_helper.c.task_uuid == task.uuid) ) pass_sla = True task_duration = 0 for task_result in task_results: raw_data = task_result.data.get("raw", []) iter_count = len(raw_data) failed_iter_count = 0 max_duration = 0 min_duration = -1 for d in raw_data: if d.get("error"): failed_iter_count += 1 duration = d.get("duration", 0) if duration > max_duration: max_duration = duration if min_duration < 0 or min_duration > duration: min_duration = duration sla = task_result.data.get("sla", []) success = all([s.get("success") for s in sla]) if not success: pass_sla = False task_duration += task_result.data.get("full_duration", 0) delta = dt.timedelta( seconds=task_result.data.get("full_duration", 0)) start = task_result.created_at - delta subtask_uuid = str(uuid.uuid4()) conn.execute( subtask_table.insert(), [{ "uuid": subtask_uuid, "task_uuid": task.uuid, "created_at": task_result.created_at, "updated_at": task_result.updated_at, # NOTE(ikhudoshyn) We don't have info on subtask status "status": "finished", "duration": task_result.data.get("full_duration", 0), "pass_sla": success }] ) workload_uuid = str(uuid.uuid4()) conn.execute( workload_table.insert(), [{ "created_at": task_result.created_at, "updated_at": task_result.updated_at, "uuid": workload_uuid, "task_uuid": task.uuid, "subtask_uuid": subtask_uuid, "name": task_result.key["name"], "position": task_result.key["pos"], "runner_type": task_result.key["kw"]["runner"]["type"], "runner": task_result.key["kw"]["runner"], "context": task_result.key["kw"].get("context", {}), "sla": task_result.key["kw"].get("sla", {}), "args": task_result.key["kw"].get("args", {}), "sla_results": {"sla": sla}, "context_execution": {}, "load_duration": task_result.data.get("load_duration", 0), "full_duration": task_result.data.get("full_duration", 0), "min_duration": min_duration, "max_duration": max_duration, "total_iteration_count": iter_count, "failed_iteration_count": failed_iter_count, "pass_sla": success, "statistics": {}, "start_time": start, }] ) conn.execute( workloaddata_table.insert(), [{ "uuid": str(uuid.uuid4()), "task_uuid": task.uuid, "workload_uuid": workload_uuid, "chunk_order": 0, "iteration_count": iter_count, "failed_iteration_count": failed_iter_count, "chunk_data": {"raw": raw_data}, # TODO(ikhudoshyn) "chunk_size": 0, "compressed_chunk_size": 0, "started_at": start, "finished_at": task_result.created_at }] ) task_verification_log = {} if task.verification_log: task_verification_log = json.loads(task.verification_log) conn.execute( taskhelper.update().where(taskhelper.c.uuid == task.uuid), { "pass_sla": pass_sla, "task_duration": task_duration, "validation_duration": 0, "validation_result": task_verification_log } ) # TODO(ikhudoshyn) update workload's statistics with op.batch_alter_table("tasks") as batch_op: batch_op.drop_column("tag") batch_op.drop_column("verification_log") batch_op.alter_column( "validation_result", existing_type=sa_types.MutableJSONEncodedDict(), nullable=False) op.drop_table("task_results") def downgrade(): raise exceptions.DowngradeNotSupported() rally-0.9.1/rally/common/db/sqlalchemy/migrations/env.py0000664000567000056710000000303313073417716024475 0ustar jenkinsjenkins00000000000000# Copyright (c) 2016 Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from alembic import context from rally.common.db.sqlalchemy import api from rally.common.db.sqlalchemy import models # add your model's MetaData object here # for 'autogenerate' support # from myapp import mymodel target_metadata = models.BASE.metadata # other values from the config, defined by the needs of env.py, # can be acquired: # my_important_option = config.get_main_option("my_important_option") # ... etc. def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ engine = api.get_engine() with engine.connect() as connection: context.configure(connection=connection, render_as_batch=True, target_metadata=target_metadata) with context.begin_transaction(): context.run_migrations() run_migrations_online() rally-0.9.1/rally/common/db/sqlalchemy/migrations/script.py.mako0000664000567000056710000000223613073417716026143 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """${message} Revision ID: ${up_revision} Revises: ${down_revision | comma,n} Create Date: ${create_date} """ # revision identifiers, used by Alembic. revision = "${up_revision}" down_revision = "${down_revision}" branch_labels = ${repr(branch_labels)} depends_on = ${repr(depends_on)} from alembic import op import sqlalchemy as sa ${imports if imports else ""} ${"from rally import exceptions" if not downgrades else ""} def upgrade(): ${upgrades if upgrades else "pass"} def downgrade(): ${downgrades if downgrades else "raise exceptions.DowngradeNotSupported()"} rally-0.9.1/rally/common/db/sqlalchemy/migrations/README.rst0000664000567000056710000000526113073417716025027 0ustar jenkinsjenkins00000000000000.. Copyright 2016 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _db_migrations: Database upgrade in Rally ========================= Information for users --------------------- Rally supports DB schema versioning (schema versions are called *revisions*) and migration (upgrade to the latest revision). End user is provided with the following possibilities: - Print current revision of DB. .. code-block:: shell rally-manage db revision - Upgrade existing DB to the latest state. This is needed when previously existing Rally installation is being upgraded to a newer version. In this case user should issue command .. code-block:: shell rally-manage db upgrade **AFTER** upgrading Rally package. DB schema will get upgraded to the latest state and all existing data will be kept. **WARNING** Rally does NOT support DB schema downgrade. One should consider backing up existing database in order to be able to rollback the change. Information for developers -------------------------- DB migration in Rally is implemented via package *alembic*. It is highly recommended to get familiar with it's documentation available by the link_ before proceeding. If developer is about to change existing DB schema they should create a new DB revision and a migration script with the following command. .. code-block:: shell alembic --config rally/common/db/sqlalchemy/alembic.ini revision -m or .. code-block:: shell alembic --config rally/common/db/sqlalchemy/alembic.ini revision --autogenerate -m It will generate migration script -- a file named `_.py` located in `rally/common/db/sqlalchemy/migrations/versions`. Alembic with parameter ``--autogenerate`` makes some "routine" job for developer, for example it makes some SQLite compatible batch expressions for migrations. Generated script should then be checked, edited if it is needed to be and added to Rally source tree. **WARNING** Even though alembic supports schema downgrade, migration scripts provided along with Rally do not contain actual code for downgrade. .. references: .. _link: https://alembic.readthedocs.org rally-0.9.1/rally/common/db/sqlalchemy/api.py0000664000567000056710000006725313073417720022313 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ SQLAlchemy implementation for DB.API """ import datetime as dt import json import os import time import alembic from alembic import config as alembic_config import alembic.migration as alembic_migration from alembic import script as alembic_script from oslo_config import cfg from oslo_db import exception as db_exc from oslo_db.sqlalchemy import session as db_session from oslo_utils import timeutils from sqlalchemy import or_ from sqlalchemy.orm.exc import NoResultFound from sqlalchemy.orm import load_only as sa_loadonly from rally.common.db import api as db_api from rally.common.db.sqlalchemy import models from rally.common.i18n import _ from rally import consts from rally import exceptions CONF = cfg.CONF _FACADE = None INITIAL_REVISION_UUID = "ca3626f62937" def _create_facade_lazily(): global _FACADE if _FACADE is None: _FACADE = db_session.EngineFacade.from_config(CONF) return _FACADE def get_engine(): facade = _create_facade_lazily() return facade.get_engine() def get_session(**kwargs): facade = _create_facade_lazily() return facade.get_session(**kwargs) def get_backend(): """The backend is this module itself.""" return Connection() def _alembic_config(): path = os.path.join(os.path.dirname(__file__), "alembic.ini") config = alembic_config.Config(path) return config class Connection(object): def engine_reset(self): global _FACADE _FACADE = None def schema_cleanup(self): models.drop_db() def schema_revision(self, config=None, engine=None, detailed=False): """Current database revision. :param config: Instance of alembic config :param engine: Instance of DB engine :param detailed: whether to return a dict with detailed data :rtype detailed: bool :returns: Database revision :rtype: string :rtype: dict """ engine = engine or get_engine() with engine.connect() as conn: context = alembic_migration.MigrationContext.configure(conn) revision = context.get_current_revision() if detailed: config = config or _alembic_config() sc_dir = alembic_script.ScriptDirectory.from_config(config) return {"revision": revision, "current_head": sc_dir.get_current_head()} return revision def schema_upgrade(self, revision=None, config=None, engine=None): """Used for upgrading database. :param revision: Desired database version :type revision: string :param config: Instance of alembic config :param engine: Instance of DB engine """ revision = revision or "head" config = config or _alembic_config() engine = engine or get_engine() if self.schema_revision() is None: self.schema_stamp(INITIAL_REVISION_UUID, config=config) alembic.command.upgrade(config, revision or "head") def schema_create(self, config=None, engine=None): """Create database schema from models description. Can be used for initial installation instead of upgrade('head'). :param config: Instance of alembic config :param engine: Instance of DB engine """ engine = engine or get_engine() # NOTE(viktors): If we will use metadata.create_all() for non empty db # schema, it will only add the new tables, but leave # existing as is. So we should avoid of this situation. if self.schema_revision(engine=engine) is not None: raise db_exc.DbMigrationError("DB schema is already under version" " control. Use upgrade() instead") models.BASE.metadata.create_all(engine) self.schema_stamp("head", config=config) def schema_stamp(self, revision, config=None): """Stamps database with provided revision. Don't run any migrations. :param revision: Should match one from repository or head - to stamp database with most recent revision :type revision: string :param config: Instance of alembic config """ config = config or _alembic_config() return alembic.command.stamp(config, revision=revision) def model_query(self, model, session=None): """The helper method to create query. :param model: The instance of :class:`rally.common.db.sqlalchemy.models.RallyBase` to request it. :param session: Reuse the session object or get new one if it is None. :returns: The query object. :raises Exception: when the model is not a sublcass of :class:`rally.common.db.sqlalchemy.models.RallyBase`. """ session = session or get_session() query = session.query(model) def issubclassof_rally_base(obj): return isinstance(obj, type) and issubclass(obj, models.RallyBase) if not issubclassof_rally_base(model): raise Exception(_("The model should be a subclass of RallyBase")) return query def _tags_get(self, uuid, tag_type): tags = (self.model_query(models.Tag). filter_by(uuid=uuid, type=tag_type).all()) return list(set(t.tag for t in tags)) def _uuids_by_tags_get(self, tag_type, tags): tags = (self.model_query(models.Tag). filter(models.Tag.type == tag_type, models.Tag.tag.in_(tags)).all()) return list(set(tag.uuid for tag in tags)) def _task_get(self, uuid, load_only=None, session=None): pre_query = self.model_query(models.Task, session=session) if load_only: pre_query = pre_query.options(sa_loadonly(load_only)) task = pre_query.filter_by(uuid=uuid).first() if not task: raise exceptions.TaskNotFound(uuid=uuid) return task def _make_old_task(self, task): tags = self._tags_get(task.uuid, consts.TagType.TASK) tag = tags[0] if tags else "" return { "id": task.id, "uuid": task.uuid, "deployment_uuid": task.deployment_uuid, "status": task.status, "created_at": task.created_at, "updated_at": task.updated_at, "tag": tag, "verification_log": json.dumps(task.validation_result) } def _make_old_task_result(self, workload, workload_data_list): raw_data = [data for workload_data in workload_data_list for data in workload_data.chunk_data["raw"]] return { "id": workload.id, "task_uuid": workload.task_uuid, "created_at": workload.created_at, "updated_at": workload.updated_at, "key": { "name": workload.name, "pos": workload.position, "kw": { "args": workload.args, "runner": workload.runner, "context": workload.context, "sla": workload.sla, "hooks": [r["config"] for r in workload.hooks], } }, "data": { "raw": raw_data, "load_duration": workload.load_duration, "full_duration": workload.full_duration, "sla": workload.sla_results["sla"], "hooks": workload.hooks } } def _task_workload_data_get_all(self, workload_uuid): return (self.model_query(models.WorkloadData). filter_by(workload_uuid=workload_uuid). order_by(models.WorkloadData.chunk_order.asc())) # @db_api.serialize def task_get(self, uuid): task = self._task_get(uuid) return self._make_old_task(task) # @db_api.serialize def task_get_detailed(self, uuid): task = self.task_get(uuid) task["results"] = self._task_result_get_all_by_uuid(uuid) return task @db_api.serialize def task_get_status(self, uuid): return self._task_get(uuid, load_only="status").status # @db_api.serialize def task_get_detailed_last(self): task = (self.model_query(models.Task). order_by(models.Task.id.desc()).first()) task = self._make_old_task(task) task["results"] = self._task_result_get_all_by_uuid(task["uuid"]) return task # @db_api.serialize def task_create(self, values): new_tag = values.pop("tag", None) # TODO(ikhudoshyn): currently 'input_task' # does not come in 'values' # After completely switching to the new # DB schema in API we should reconstruct # input_task's from associated workloads # the same is true for 'pass_sla', # 'task_duration', 'validation_result' # and 'validation_duration' task = models.Task() task.update(values) task.save() if new_tag: tag = models.Tag() tag.update({ "uuid": task.uuid, "type": consts.TagType.TASK, "tag": new_tag }) tag.save() return self._make_old_task(task) # @db_api.serialize def task_update(self, uuid, values): session = get_session() values.pop("uuid", None) new_tag = values.pop("tag", None) with session.begin(): task = self._task_get(uuid, session=session) task.update(values) if new_tag: tag = models.Tag() tag.update({ "uuid": uuid, "type": consts.TagType.TASK, "tag": new_tag }) tag.save() return self._make_old_task(task) def task_update_status(self, uuid, statuses, status_value): session = get_session() result = ( session.query(models.Task).filter( models.Task.uuid == uuid, models.Task.status.in_( statuses)). update({"status": status_value}, synchronize_session=False) ) if not result: status = " or ".join(statuses) msg = _("Task with uuid='%(uuid)s' and in statuses:'" "%(statuses)s' not found.'") % {"uuid": uuid, "statuses": status} raise exceptions.RallyException(msg) return result # @db_api.serialize def task_list(self, status=None, deployment=None): query = self.model_query(models.Task) filters = {} if status is not None: filters["status"] = status if deployment is not None: filters["deployment_uuid"] = self.deployment_get( deployment)["uuid"] if filters: query = query.filter_by(**filters) return [self._make_old_task(task) for task in query.all()] def task_delete(self, uuid, status=None): session = get_session() with session.begin(): query = base_query = (self.model_query(models.Task). filter_by(uuid=uuid)) if status is not None: query = base_query.filter_by(status=status) (self.model_query(models.WorkloadData).filter_by(task_uuid=uuid). delete(synchronize_session=False)) (self.model_query(models.Workload).filter_by(task_uuid=uuid). delete(synchronize_session=False)) (self.model_query(models.Subtask).filter_by(task_uuid=uuid). delete(synchronize_session=False)) (self.model_query(models.Tag).filter_by( uuid=uuid, type=consts.TagType.TASK). delete(synchronize_session=False)) count = query.delete(synchronize_session=False) if not count: if status is not None: task = base_query.first() if task: raise exceptions.TaskInvalidStatus(uuid=uuid, require=status, actual=task.status) raise exceptions.TaskNotFound(uuid=uuid) def _task_result_get_all_by_uuid(self, uuid): results = [] workloads = (self.model_query(models.Workload). filter_by(task_uuid=uuid).all()) for workload in workloads: workload_data_list = self._task_workload_data_get_all( workload.uuid) results.append( self._make_old_task_result(workload, workload_data_list)) return results # @db_api.serialize def task_result_get_all_by_uuid(self, uuid): return self._task_result_get_all_by_uuid(uuid) @db_api.serialize def subtask_create(self, task_uuid, title, description=None, context=None): subtask = models.Subtask(task_uuid=task_uuid) subtask.update({ "title": title, "description": description or "", "context": context or {}, }) subtask.save() return subtask @db_api.serialize def workload_create(self, task_uuid, subtask_uuid, key): workload = models.Workload(task_uuid=task_uuid, subtask_uuid=subtask_uuid) workload.update({ "name": key["name"], "position": key["pos"], "runner": key["kw"]["runner"], "runner_type": key["kw"]["runner"]["type"], "context": key["kw"].get("context", {}), "sla": key["kw"].get("sla", {}), "args": key["kw"].get("args", {}), "context_execution": {}, "statistics": {}, }) workload.save() return workload @db_api.serialize def workload_data_create(self, task_uuid, workload_uuid, chunk_order, data): workload_data = models.WorkloadData(task_uuid=task_uuid, workload_uuid=workload_uuid) raw_data = data.get("raw", []) iter_count = len(raw_data) failed_iter_count = 0 started_at = float("inf") finished_at = 0 for d in raw_data: if d.get("error"): failed_iter_count += 1 timestamp = d["timestamp"] duration = d["duration"] finished = timestamp + duration if timestamp < started_at: started_at = timestamp if finished > finished_at: finished_at = finished now = time.time() if started_at == float("inf"): started_at = now if finished_at == 0: finished_at = now workload_data.update({ "task_uuid": task_uuid, "workload_uuid": workload_uuid, "chunk_order": chunk_order, "iteration_count": iter_count, "failed_iteration_count": failed_iter_count, "chunk_data": {"raw": raw_data}, # TODO(ikhudoshyn) "chunk_size": 0, "compressed_chunk_size": 0, "started_at": dt.datetime.fromtimestamp(started_at), "finished_at": dt.datetime.fromtimestamp(finished_at) }) workload_data.save() return workload_data @db_api.serialize def workload_set_results(self, workload_uuid, data): workload = self.model_query(models.Workload).filter_by( uuid=workload_uuid).first() workload_data_list = self._task_workload_data_get_all(workload.uuid) raw_data = [raw for workload_data in workload_data_list for raw in workload_data.chunk_data["raw"]] iter_count = len(raw_data) failed_iter_count = 0 max_duration = 0 min_duration = 0 success = True for d in raw_data: if d.get("error"): failed_iter_count += 1 duration = d.get("duration", 0) if duration > max_duration: max_duration = duration if min_duration and min_duration > duration: min_duration = duration sla = data.get("sla", []) # TODO(ikhudoshyn): if no SLA was specified and there are # failed iterations is it success? # NOTE(ikhudoshyn): we call it 'pass_sla' # for the sake of consistency with other models # so if no SLAs were specified, then we assume pass_sla == True success = all([s.get("success") for s in sla]) now = timeutils.utcnow() delta = dt.timedelta(seconds=data.get("full_duration", 0)) start = now - delta workload.update({ "task_uuid": workload.task_uuid, "subtask_uuid": workload.subtask_uuid, "sla_results": {"sla": sla}, "context_execution": {}, "hooks": data.get("hooks", []), "load_duration": data.get("load_duration", 0), "full_duration": data.get("full_duration", 0), "min_duration": min_duration, "max_duration": max_duration, "total_iteration_count": iter_count, "failed_iteration_count": failed_iter_count, # TODO(ikhudoshyn) "start_time": start, "statistics": {}, "pass_sla": success }) # TODO(ikhudoshyn): if pass_sla is False, # then update task's and subtask's pass_sla # TODO(ikhudoshyn): update task.task_duration # and subtask.duration workload.save() return workload def _deployment_get(self, deployment, session=None): stored_deployment = self.model_query( models.Deployment, session=session).filter_by(name=deployment).first() if not stored_deployment: stored_deployment = self.model_query( models.Deployment, session=session).filter_by(uuid=deployment).first() if not stored_deployment: raise exceptions.DeploymentNotFound(deployment=deployment) return stored_deployment @db_api.serialize def deployment_create(self, values): deployment = models.Deployment() try: deployment.update(values) deployment.save() except db_exc.DBDuplicateEntry: raise exceptions.DeploymentNameExists(deployment=values["name"]) return deployment def deployment_delete(self, uuid): session = get_session() with session.begin(): count = (self.model_query(models.Resource, session=session). filter_by(deployment_uuid=uuid).count()) if count: raise exceptions.DeploymentIsBusy(uuid=uuid) count = (self.model_query(models.Deployment, session=session). filter_by(uuid=uuid).delete(synchronize_session=False)) if not count: raise exceptions.DeploymentNotFound(deployment=uuid) @db_api.serialize def deployment_get(self, deployment): return self._deployment_get(deployment) @db_api.serialize def deployment_update(self, deployment, values): session = get_session() values.pop("uuid", None) with session.begin(): dpl = self._deployment_get(deployment, session=session) dpl.update(values) return dpl @db_api.serialize def deployment_list(self, status=None, parent_uuid=None, name=None): query = (self.model_query(models.Deployment). filter_by(parent_uuid=parent_uuid)) if name: query = query.filter_by(name=name) if status: query = query.filter_by(status=status) return query.all() @db_api.serialize def resource_create(self, values): resource = models.Resource() resource.update(values) resource.save() return resource @db_api.serialize def resource_get_all(self, deployment_uuid, provider_name=None, type=None): query = (self.model_query(models.Resource). filter_by(deployment_uuid=deployment_uuid)) if provider_name is not None: query = query.filter_by(provider_name=provider_name) if type is not None: query = query.filter_by(type=type) return query.all() def resource_delete(self, id): count = (self.model_query(models.Resource). filter_by(id=id).delete(synchronize_session=False)) if not count: raise exceptions.ResourceNotFound(id=id) @db_api.serialize def verifier_create(self, name, vtype, namespace, source, version, system_wide, extra_settings=None): verifier = models.Verifier() properties = {"name": name, "type": vtype, "namespace": namespace, "source": source, "extra_settings": extra_settings, "version": version, "system_wide": system_wide} verifier.update(properties) verifier.save() return verifier @db_api.serialize def verifier_get(self, verifier_id): return self._verifier_get(verifier_id) def _verifier_get(self, verifier_id, session=None): verifier = self.model_query( models.Verifier, session=session).filter( or_(models.Verifier.name == verifier_id, models.Verifier.uuid == verifier_id)).first() if not verifier: raise exceptions.ResourceNotFound(id=verifier_id) return verifier @db_api.serialize def verifier_list(self, status=None): query = self.model_query(models.Verifier) if status: query = query.filter_by(status=status) return query.all() def verifier_delete(self, verifier_id): session = get_session() with session.begin(): query = self.model_query( models.Verifier, session=session).filter( or_(models.Verifier.name == verifier_id, models.Verifier.uuid == verifier_id)) count = query.delete(synchronize_session=False) if not count: raise exceptions.ResourceNotFound(id=verifier_id) @db_api.serialize def verifier_update(self, verifier_id, properties): session = get_session() with session.begin(): verifier = self._verifier_get(verifier_id) verifier.update(properties) verifier.save() return verifier @db_api.serialize def verification_create(self, verifier_id, deployment_id, tags=None, run_args=None): verifier = self._verifier_get(verifier_id) deployment = self._deployment_get(deployment_id) verification = models.Verification() verification.update({"verifier_uuid": verifier.uuid, "deployment_uuid": deployment["uuid"], "run_args": run_args}) verification.save() if tags: for t in set(tags): tag = models.Tag() tag.update({"uuid": verification.uuid, "type": consts.TagType.VERIFICATION, "tag": t}) tag.save() return verification @db_api.serialize def verification_get(self, verification_uuid): verification = self._verification_get(verification_uuid) verification.tags = sorted(self._tags_get(verification.uuid, consts.TagType.VERIFICATION)) return verification def _verification_get(self, verification_uuid, session=None): verification = self.model_query( models.Verification, session=session).filter_by( uuid=verification_uuid).first() if not verification: raise exceptions.ResourceNotFound(id=verification_uuid) return verification @db_api.serialize def verification_list(self, verifier_id=None, deployment_id=None, tags=None, status=None): session = get_session() with session.begin(): filter_by = {} if verifier_id: verifier = self._verifier_get(verifier_id, session=session) filter_by["verifier_uuid"] = verifier.uuid if deployment_id: deployment = self._deployment_get(deployment_id, session=session) filter_by["deployment_uuid"] = deployment.uuid if status: filter_by["status"] = status query = self.model_query(models.Verification, session=session) if filter_by: query = query.filter_by(**filter_by) def add_tags_to_verifications(verifications): for verification in verifications: verification.tags = sorted(self._tags_get( verification.uuid, consts.TagType.VERIFICATION)) return verifications if tags: uuids = self._uuids_by_tags_get( consts.TagType.VERIFICATION, tags) query = query.filter(models.Verification.uuid.in_(uuids)) return add_tags_to_verifications(query.all()) def verification_delete(self, verification_uuid): session = get_session() with session.begin(): count = self.model_query( models.Verification, session=session).filter_by( uuid=verification_uuid).delete(synchronize_session=False) if not count: raise exceptions.ResourceNotFound(id=verification_uuid) @db_api.serialize def verification_update(self, verification_uuid, properties): session = get_session() with session.begin(): verification = self._verification_get(verification_uuid) verification.update(properties) verification.save() return verification @db_api.serialize def register_worker(self, values): try: worker = models.Worker() worker.update(values) worker.update({"updated_at": timeutils.utcnow()}) worker.save() return worker except db_exc.DBDuplicateEntry: raise exceptions.WorkerAlreadyRegistered( worker=values["hostname"]) @db_api.serialize def get_worker(self, hostname): try: return (self.model_query(models.Worker). filter_by(hostname=hostname).one()) except NoResultFound: raise exceptions.WorkerNotFound(worker=hostname) def unregister_worker(self, hostname): count = (self.model_query(models.Worker). filter_by(hostname=hostname).delete()) if count == 0: raise exceptions.WorkerNotFound(worker=hostname) def update_worker(self, hostname): count = (self.model_query(models.Worker). filter_by(hostname=hostname). update({"updated_at": timeutils.utcnow()})) if count == 0: raise exceptions.WorkerNotFound(worker=hostname) rally-0.9.1/rally/common/db/sqlalchemy/types.py0000664000567000056710000001023413073417716022676 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import json from sqlalchemy.dialects import mysql as mysql_types from sqlalchemy.ext import mutable from sqlalchemy import types as sa_types class LongText(sa_types.TypeDecorator): """Represents an immutable structure as a json-encoded string. MySql can store only 64kb in Text type, and for example in psql or sqlite we are able to store more than 1GB. In some cases, like storing results of task 64kb is not enough. So this type uses for MySql LONGTEXT that allows us to store 4GiB. """ def load_dialect_impl(self, dialect): if dialect.name == "mysql": return dialect.type_descriptor(mysql_types.LONGTEXT) else: return dialect.type_descriptor(sa_types.Text) class JSONEncodedDict(LongText): """Represents an immutable structure as a json-encoded string.""" impl = sa_types.Text def process_bind_param(self, value, dialect): if value is not None: value = json.dumps(value, sort_keys=False) return value def process_result_value(self, value, dialect): if value is not None: value = json.loads( value, object_pairs_hook=collections.OrderedDict) return value class JSONEncodedList(JSONEncodedDict): """Represents an immutable structure as a json-encoded string.""" def process_result_value(self, value, dialect): if value is not None: value = json.loads(value) return value class MutableDict(mutable.Mutable, dict): @classmethod def coerce(cls, key, value): """Convert plain dictionaries to MutableDict.""" if not isinstance(value, MutableDict): if isinstance(value, dict): return MutableDict(value) # this call will raise ValueError return mutable.Mutable.coerce(key, value) else: return value def __setitem__(self, key, value): """Detect dictionary set events and emit change events.""" dict.__setitem__(self, key, value) self.changed() def __delitem__(self, key): """Detect dictionary del events and emit change events.""" dict.__delitem__(self, key) self.changed() class MutableList(mutable.Mutable, list): @classmethod def coerce(cls, key, value): """Convert plain lists to MutableList.""" if not isinstance(value, MutableList): if isinstance(value, list): return MutableList(value) # this call will raise ValueError return mutable.Mutable.coerce(key, value) else: return value def append(self, value): """Detect list add events and emit change events.""" list.append(self, value) self.changed() def remove(self, value): """Removes an item by value and emit change events.""" list.remove(self, value) self.changed() def __setitem__(self, key, value): """Detect list set events and emit change events.""" list.__setitem__(self, key, value) self.changed() def __delitem__(self, i): """Detect list del events and emit change events.""" list.__delitem__(self, i) self.changed() class MutableJSONEncodedList(JSONEncodedList): """Represent a mutable structure as a json-encoded string.""" class MutableJSONEncodedDict(JSONEncodedDict): """Represent a mutable structure as a json-encoded string.""" MutableDict.associate_with(MutableJSONEncodedDict) MutableList.associate_with(MutableJSONEncodedList) rally-0.9.1/rally/common/db/sqlalchemy/alembic.ini0000664000567000056710000000272413073417716023262 0ustar jenkinsjenkins00000000000000# A generic, single database configuration. [alembic] # path to migration scripts script_location = rally.common.db.sqlalchemy:migrations # template used to generate migration files # file_template = %%(rev)s_%%(slug)s # max length of characters to apply to the # "slug" field #truncate_slug_length = 40 # set to 'true' to run the environment during # the 'revision' command, regardless of autogenerate # revision_environment = false # set to 'true' to allow .pyc and .pyo files without # a source .py file to be detected as revisions in the # versions/ directory # sourceless = false # version location specification; this defaults # to alembic/versions. When using multiple version # directories, initial revisions must be specified with --version-path # version_locations = %(here)s/bar %(here)s/bat alembic/versions # the output encoding used when revision files # are written from script.py.mako # output_encoding = utf-8 sqlalchemy.url = driver://user:pass@localhost/dbname # Logging configuration [loggers] keys = root,sqlalchemy,alembic [handlers] keys = console [formatters] keys = generic [logger_root] level = WARN handlers = console qualname = [logger_sqlalchemy] level = WARN handlers = qualname = sqlalchemy.engine [logger_alembic] level = INFO handlers = qualname = alembic [handler_console] class = StreamHandler args = (sys.stderr,) level = NOTSET formatter = generic [formatter_generic] format = %(levelname)-5.5s [%(name)s] %(message)s datefmt = %H:%M:%S rally-0.9.1/rally/common/db/api.py0000664000567000056710000004037213073417720020142 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Defines interface for DB access. The underlying driver is loaded as a :class:`LazyPluggable`. Functions in this module are imported into the rally.common.db namespace. Call these functions from rally.common.db namespace, not the rally.common.db.api namespace. All functions in this module return objects that implement a dictionary-like interface. Currently, many of these objects are sqlalchemy objects that implement a dictionary interface. However, a future goal is to have all of these objects be simple dictionaries. **Related Flags** :backend: string to lookup in the list of LazyPluggable backends. `sqlalchemy` is the only supported backend right now. :connection: string specifying the sqlalchemy connection to use, like: `sqlite:///var/lib/cinder/cinder.sqlite`. :enable_new_services: when adding a new service to the database, is it in the pool of available hardware (Default: True) """ import datetime as dt from oslo_config import cfg from oslo_db import api as db_api from oslo_db import options as db_options import six from rally.common.i18n import _ CONF = cfg.CONF db_options.set_defaults(CONF, connection="sqlite:////tmp/rally.sqlite") IMPL = None def serialize(fn): def conv(data): if data is None: return None if isinstance(data, (six.integer_types, six.string_types, six.text_type, dt.date, dt.time, float, )): return data if isinstance(data, dict): return {k: conv(v) for k, v in data.items()} if isinstance(data, (list, tuple)): return [conv(i) for i in data] if hasattr(data, "_as_dict"): result = data._as_dict() for k, v in result.items(): result[k] = conv(v) return result raise ValueError(_("Can not serialize %s") % data) def wrapper(*args, **kwargs): result = fn(*args, **kwargs) return conv(result) return wrapper def get_impl(): global IMPL if not IMPL: _BACKEND_MAPPING = {"sqlalchemy": "rally.common.db.sqlalchemy.api"} IMPL = db_api.DBAPI.from_config(CONF, backend_mapping=_BACKEND_MAPPING) return IMPL def engine_reset(): """Reset DB engine.""" get_impl().engine_reset() def schema_cleanup(): """Drop DB schema. This method drops existing database.""" get_impl().schema_cleanup() def schema_upgrade(revision=None): """Migrate the database to `revision` or the most recent revision.""" return get_impl().schema_upgrade(revision) def schema_create(): """Create database schema from models description.""" return get_impl().schema_create() def schema_revision(detailed=False): """Return the schema revision.""" return get_impl().schema_revision(detailed=detailed) def schema_stamp(revision): """Stamps database with provided revision.""" return get_impl().schema_stamp(revision) def task_get(uuid): """Returns task by uuid. :param uuid: UUID of the task. :raises TaskNotFound: if the task does not exist. :returns: task dict with data on the task. """ return get_impl().task_get(uuid) def task_get_status(uuid): """Returns task by uuid. :param uuid: UUID of the task. :raises TaskNotFound: if the task does not exist. :returns: task dict with data on the task. """ return get_impl().task_get_status(uuid) def task_get_detailed_last(): """Returns the most recently created task.""" return get_impl().task_get_detailed_last() def task_get_detailed(uuid): """Returns task with results by uuid. :param uuid: UUID of the task. :returns: task dict with data on the task and its results. """ return get_impl().task_get_detailed(uuid) def task_create(values): """Create task record in DB. :param values: dict with record values. :returns: task dict with data on the task. """ return get_impl().task_create(values) def task_update(uuid, values): """Update task by values. :param uuid: UUID of the task. :param values: dict with record values. :raises TaskNotFound: if the task does not exist. :returns: new updated task dict with data on the task. """ return get_impl().task_update(uuid, values) def task_update_status(task_uuid, status, allowed_statuses): """Update task status with specified value. :param task_uuid: string with UUID of Task instance. :param status: new value to be written into db instead of status. :param allowed_statuses: list of expected statuses to update in db. :raises RallyException: if task not found with specified status. :returns: the count of rows match as returned by the database's "row count" feature """ return get_impl().task_update_status(task_uuid, allowed_statuses, status) def task_list(status=None, deployment=None): """Get a list of tasks. :param status: Task status to filter the returned list on. If set to None, all the tasks will be returned. :param deployment: Deployment UUID to filter the returned list on. If set to None, tasks from all deployments will be returned. :returns: A list of dicts with data on the tasks. """ return get_impl().task_list(status=status, deployment=deployment) def task_delete(uuid, status=None): """Delete a task. This method removes the task by the uuid, but if the status argument is specified, then the task is removed only when these statuses are equal otherwise an exception is raised. :param uuid: UUID of the task. :raises TaskNotFound: if the task does not exist. :raises TaskInvalidStatus: if the status of the task does not equal to the status argument. """ return get_impl().task_delete(uuid, status=status) def task_result_get_all_by_uuid(task_uuid): """Get list of task results. :param task_uuid: string with UUID of Task instance. :returns: list instances of TaskResult. """ return get_impl().task_result_get_all_by_uuid(task_uuid) def subtask_create(task_uuid, title, description=None, context=None): """Create a subtask. :param task_uuid: string with UUID of Task instance. :param title: subtask title. :param description: subtask description. :param context: subtask context dict. :returns: a dict with data on the subtask. """ return get_impl().subtask_create(task_uuid, title, description, context) def workload_create(task_uuid, subtask_uuid, key): """Create a workload. :param task_uuid: string with UUID of Task instance. :param subtask_uuid: string with UUID of Subtask instance. :param key: dict with record values on the workload. :returns: a dict with data on the workload. """ return get_impl().workload_create(task_uuid, subtask_uuid, key) def workload_data_create(task_uuid, workload_uuid, chunk_order, data): """Create a workload data. :param task_uuid: string with UUID of Task instance. :param workload_uuid: string with UUID of Workload instance. :param chunk_order: ordinal index of workload data. :param data: dict with record values on the workload data. :returns: a dict with data on the workload data. """ return get_impl().workload_data_create(task_uuid, workload_uuid, chunk_order, data) def workload_set_results(workload_uuid, data): """Set workload results. :param workload_uuid: string with UUID of Workload instance. :param data: dict with workload results. :returns: a dict with data on the workload. """ return get_impl().workload_set_results(workload_uuid, data) def deployment_create(values): """Create a deployment from the values dictionary. :param values: dict with record values on the deployment. :returns: a dict with data on the deployment. """ return get_impl().deployment_create(values) def deployment_delete(uuid): """Delete a deployment by UUID. :param uuid: UUID of the deployment. :raises DeploymentNotFound: if the deployment does not exist. :raises DeploymentIsBusy: if the resource is not enough. """ return get_impl().deployment_delete(uuid) def deployment_get(deployment): """Get a deployment by UUID. :param deployment: UUID or name of the deployment. :raises DeploymentNotFound: if the deployment does not exist. :returns: a dict with data on the deployment. """ return get_impl().deployment_get(deployment) def deployment_update(uuid, values): """Update a deployment by values. :param uuid: UUID of the deployment. :param values: dict with items to update. :raises DeploymentNotFound: if the deployment does not exist. :returns: a dict with data on the deployment. """ return get_impl().deployment_update(uuid, values) def deployment_list(status=None, parent_uuid=None, name=None): """Get list of deployments. :param status: if None returns any deployments with any status. :param parent_uuid: filter by parent. If None, return only "root" deployments. :param name: name of deployment. :returns: a list of dicts with data on the deployments. """ return get_impl().deployment_list(status=status, parent_uuid=parent_uuid, name=name) def resource_create(values): """Create a resource from the values dictionary. :param values: a dict with data on the resource. :returns: a dict with updated data on the resource. """ return get_impl().resource_create(values) def resource_get_all(deployment_uuid, provider_name=None, type=None): """Return resources of a deployment. :param deployment_uuid: filter by uuid of a deployment :param provider_name: filter by provider_name, if is None, then return all providers :param type: filter by type, if is None, then return all types :returns: a list of dicts with data on a resource """ return get_impl().resource_get_all(deployment_uuid, provider_name=provider_name, type=type) def resource_delete(id): """Delete a resource. :param id: ID of a resource. :raises ResourceNotFound: if the resource does not exist. """ return get_impl().resource_delete(id) def verifier_create(name, vtype, namespace, source, version, system_wide, extra_settings=None): """Create a verifier record. :param name: verifier name :param vtype: verifier plugin name :param namespace: verifier plugin namespace :param source: path or URL to a verifier repo :param version: branch, tag or commit ID of a verifier repo :param system_wide: whether or not to use the system-wide environment :param extra: verifier-specific installation options :returns: a dict with verifier data """ return get_impl().verifier_create(name=name, vtype=vtype, namespace=namespace, source=source, version=version, system_wide=system_wide, extra_settings=extra_settings) def verifier_get(verifier_id): """Get a verifier record. :param verifier_id: verifier name or UUID :raises ResourceNotFound: if verifier does not exist :returns: a dict with verifier data """ return get_impl().verifier_get(verifier_id) def verifier_list(status=None): """Get all verifier records. :param status: status to filter verifiers by :returns: a list of dicts with verifiers data """ return get_impl().verifier_list(status) def verifier_delete(verifier_id): """Delete a verifier record. :param verifier_id: verifier name or UUID :raises ResourceNotFound: if verifier does not exist """ get_impl().verifier_delete(verifier_id) def verifier_update(verifier_id, **properties): """Update a verifier record. :param verifier_id: verifier name or UUID :param properties: a dict with new properties to update verifier record :raises ResourceNotFound: if verifier does not exist :returns: the updated dict with verifier data """ return get_impl().verifier_update(verifier_id, properties) def verification_create(verifier_uuid, deployment_uuid, tags=None, run_args=None): """Create a verification record. :param verifier_uuid: verification UUID :param deployment_uuid: deployment UUID :param tags: a list of tags to assign them to verification :param run_args: a dict with run arguments for verification :returns: a dict with verification data """ return get_impl().verification_create(verifier_uuid, deployment_uuid, tags, run_args) def verification_get(verification_uuid): """Get a verification record. :param verification_uuid: verification UUID :raises ResourceNotFound: if verification does not exist :returns: a dict with verification data """ return get_impl().verification_get(verification_uuid) def verification_list(verifier_id=None, deployment_id=None, tags=None, status=None): """List all verification records. :param verifier_id: verifier name or UUID to filter verifications by :param deployment_id: deployment name or UUID to filter verifications by :param tags: tags to filter verifications by :param status: status to filter verifications by :returns: a list of dicts with verifications data """ return get_impl().verification_list(verifier_id, deployment_id, tags, status) def verification_delete(verification_uuid): """Delete a verification record. :param verification_uuid: verification UUID :raises ResourceNotFound: if verification does not exist """ return get_impl().verification_delete(verification_uuid) def verification_update(uuid, **properties): """Update a verification record. :param uuid: verification UUID :param properties: a dict with new properties to update verification record :raises ResourceNotFound: if verification does not exist :returns: the updated dict with verification data """ return get_impl().verification_update(uuid, properties) def register_worker(values): """Register a new worker service at the specified hostname. :param values: A dict of values which must contain the following: { "hostname": the unique hostname which identifies this worker service. } :returns: A worker. :raises WorkerAlreadyRegistered: if worker already registered """ return get_impl().register_worker(values) def get_worker(hostname): """Retrieve a worker service record from the database. :param hostname: The hostname of the worker service. :returns: A worker. :raises WorkerNotFound: if worker not found """ return get_impl().get_worker(hostname) def unregister_worker(hostname): """Unregister this worker with the service registry. :param hostname: The hostname of the worker service. :raises WorkerNotFound: if worker not found """ get_impl().unregister_worker(hostname) def update_worker(hostname): """Mark a worker as active by updating its "updated_at" property. :param hostname: The hostname of this worker service. :raises WorkerNotFound: if worker not found """ get_impl().update_worker(hostname) rally-0.9.1/rally/common/plugin/0000775000567000056710000000000013073420067017720 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/common/plugin/plugin.py0000664000567000056710000002253313073417720021577 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys from rally.common.i18n import _LE from rally.common.plugin import discover from rally.common.plugin import info from rally.common.plugin import meta from rally import exceptions def deprecated(reason, rally_version): """Mark plugin as deprecated. :param reason: Message that describes reason of plugin deprecation :param rally_version: Deprecated since this version of Rally """ def decorator(plugin): plugin._set_deprecated(reason, rally_version) return plugin return decorator def base(): """Mark Plugin as a base. .. warning:: This decorator should be added the line before six.add_metaclass if it is used. """ def wrapper(cls): if not issubclass(cls, Plugin): raise exceptions.RallyException(_LE( "Plugin's Base can be only a subclass of Plugin class.")) parent = cls._get_base() if parent != Plugin: raise exceptions.RallyException(_LE( "'%(plugin_cls)s' can not be marked as plugin base, since it " "inherits from '%(parent)s' which is also plugin base.") % { "plugin_cls": cls.__name__, "parent": parent.__name__}) cls.base_ref = cls return cls return wrapper def configure(name, namespace="default", hidden=False): """Use this decorator to configure plugin's attributes. :param name: name of plugin that is used for searching purpose :param namespace: plugin namespace :param hidden: if True the plugin will be marked as hidden and can be loaded only explicitly """ def decorator(plugin): plugin._configure(name, namespace) plugin._meta_set("hidden", hidden) return plugin return decorator def from_func(plugin_baseclass=None): """Add all plugin's methods to function object. Rally benchmark scenarios are different from all other plugins in Rally. Usually 1 plugin is 1 class and we can easily use Plugin() as base for all of them to avoid code duplication. In case of benchmark scenarios 1 class can contain any amount of scenarios that are just methods of this class. To make Rally code cleaner, these methods should look/work like other Plugins. This decorator makes all dirty work for us, it creates dynamically new class, adds plugin instance and aliases for all non-private methods of Plugin instance to passed function. For example, @plugin.from_func() def my_plugin_like_func(a, b): pass assert my_plugin_like_func.get_name() == "my_plugin_like_func" assert my_plugin_like_func.get_all() == [] As a result, adding plugin behavior for benchmark scenarios fully unifies work with benchmark scenarios and other kinds of plugins. :param plugin_baseclass: if specified, subclass of this class will be used to add behavior of plugin to function else, subclass of Plugin will be used. :returns: Function decorator that adds plugin behavior to function """ if plugin_baseclass: if not issubclass(plugin_baseclass, Plugin): raise TypeError("plugin_baseclass should be subclass of %s " % Plugin) class FuncPlugin(plugin_baseclass): is_classbased = False else: class FuncPlugin(Plugin): is_classbased = False def decorator(func): func._plugin = FuncPlugin # NOTE(boris-42): This is required by Plugin.get_all method to # return func instead of FuncPlugin that will be # auto discovered. FuncPlugin.func_ref = func # NOTE(boris-42): Make aliases from func to all public Plugin fields for field in dir(func._plugin): if not field.startswith("__"): obj = getattr(func._plugin, field) if callable(obj): setattr(func, field, obj) return func return decorator class Plugin(meta.MetaMixin, info.InfoMixin): """Base class for all Plugins in Rally.""" @classmethod def _configure(cls, name, namespace="default"): """Init plugin and set common meta information. For now it sets only name of plugin, that is an actual identifier. Plugin name should be unique, otherwise exception is raised. :param name: Plugin name :param namespace: Plugins with the same name are allowed only if they are in various namespaces. """ cls._meta_init() cls._set_name_and_namespace(name, namespace) return cls @classmethod def unregister(cls): """Removes all plugin meta information and makes it undiscoverable.""" cls._meta_clear() @classmethod def _get_base(cls): return getattr(cls, "base_ref", Plugin) @classmethod def _set_name_and_namespace(cls, name, namespace): try: existing_plugin = cls._get_base().get(name=name, namespace=namespace) except exceptions.PluginNotFound: cls._meta_set("name", name) cls._meta_set("namespace", namespace) else: raise exceptions.PluginWithSuchNameExists( name=name, namespace=namespace, existing_path=( sys.modules[existing_plugin.__module__].__file__), new_path=sys.modules[cls.__module__].__file__ ) @classmethod def _set_deprecated(cls, reason, rally_version): """Mark plugin as deprecated. :param reason: Message that describes reason of plugin deprecation :param rally_version: Deprecated since this version of Rally """ cls._meta_set("deprecated", { "reason": reason, "rally_version": rally_version }) return cls @classmethod def get(cls, name, namespace=None, allow_hidden=False): """Return plugin by its name from specified namespace. This method iterates over all subclasses of cls and returns plugin by name from specified namespace. If namespace is not specified, it will return first found plugin from any of namespaces. :param name: Plugin's name :param namespace: Namespace where to search for plugins :param allow_hidden: if False and found plugin is hidden then PluginNotFound will be raised """ potential_result = [] for p in cls.get_all(namespace=namespace, allow_hidden=True): if p.get_name() == name: potential_result.append(p) if len(potential_result) == 1: plugin = potential_result[0] if allow_hidden or not plugin.is_hidden(): return plugin elif potential_result: hint = _LE("Try to choose the correct Plugin base or namespace to " "search in.") if namespace: needle = "%s at %s namespace" % (name, namespace) else: needle = "%s at any of namespaces" % name raise exceptions.MultipleMatchesFound( needle=needle, haystack=", ".join(p.get_name() for p in potential_result), hint=hint) raise exceptions.PluginNotFound( name=name, namespace=namespace or "any of") @classmethod def get_all(cls, namespace=None, allow_hidden=False): """Return all subclass plugins of plugin. All plugins that are not configured will be ignored. :param namespace: return only plugins from specified namespace. :param allow_hidden: if False return only non hidden plugins """ plugins = [] for p in discover.itersubclasses(cls): if not issubclass(p, Plugin): continue if not p._meta_is_inited(raise_exc=False): continue if namespace and namespace != p.get_namespace(): continue if not allow_hidden and p.is_hidden(): continue plugins.append(getattr(p, "func_ref", p)) return plugins @classmethod def get_name(cls): """Return name of plugin.""" return cls._meta_get("name") @classmethod def get_namespace(cls): """"Return namespace of plugin, e.g. default or openstack.""" return cls._meta_get("namespace") @classmethod def is_hidden(cls): """Return True if plugin is hidden.""" return cls._meta_get("hidden", False) @classmethod def is_deprecated(cls): """Return deprecation details for deprecated plugins.""" return cls._meta_get("deprecated", False) rally-0.9.1/rally/common/plugin/__init__.py0000664000567000056710000000000013073417716022026 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/common/plugin/info.py0000664000567000056710000000654613073417716021247 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import re from sphinx.util import docstrings PARAM_OR_RETURNS_REGEX = re.compile(":(?:param|returns)") RETURNS_REGEX = re.compile(":returns: (?P.*)", re.S) PARAM_REGEX = re.compile(":param (?P[\*\w]+): (?P.*?)" "(?:(?=:param)|(?=:return)|(?=:raises)|\Z)", re.S) def reindent(string): return "\n".join(l.strip() for l in string.strip().split("\n")) def parse_docstring(docstring): """Parse the docstring into its components. :returns: a dictionary of form { "short_description": ..., "long_description": ..., "params": [{"name": ..., "doc": ...}, ...], "returns": ... } """ short_description = long_description = returns = "" params = [] if docstring: docstring = "\n".join(docstrings.prepare_docstring(docstring)) lines = docstring.split("\n", 1) short_description = lines[0] if len(lines) > 1: long_description = lines[1].strip() params_returns_desc = None match = PARAM_OR_RETURNS_REGEX.search(long_description) if match: long_desc_end = match.start() params_returns_desc = long_description[long_desc_end:].strip() long_description = long_description[:long_desc_end].rstrip() if params_returns_desc: params = [ {"name": name, "doc": "\n".join(docstrings.prepare_docstring(doc))} for name, doc in PARAM_REGEX.findall(params_returns_desc) ] match = RETURNS_REGEX.search(params_returns_desc) if match: returns = reindent(match.group("doc")) return { "short_description": short_description, "long_description": long_description, "params": params, "returns": returns } class InfoMixin(object): @classmethod def _get_doc(cls): """Return documentary of class By default it returns docstring of class, but it can be overridden for example for cases like merging own docstring with parent """ return cls.__doc__ @classmethod def get_info(cls): plugin_ = getattr(cls, "func_ref", cls) doc = parse_docstring(cls._get_doc()) return { "name": plugin_.get_name(), "namespace": plugin_.get_namespace(), "module": plugin_.__module__, "title": doc["short_description"], "description": doc["long_description"], "parameters": doc["params"], "schema": getattr(cls, "CONFIG_SCHEMA", None), "returns": doc["returns"] } rally-0.9.1/rally/common/plugin/meta.py0000664000567000056710000000775613073417716021246 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy DEFAULT_META_CONCATENATION = { "context": "dict", "validators": "list", } class MetaMixin(object): """Safe way to store meta information related to class object. We are storing information in class object instead of the instance. Information is stored in dict that is initialized only once during the load of module, it means that all subclasses of this class will point to the same dict object with the information. Sample that explains why it's important to use MetaMixin: >>> # Using direct fields >>> >>> class A(object): >>> _meta = {} >>> >>> class B(A): >>> pass >>> >>> B._meta["a"] = 10 >>> assert A._meta["a"] == 10 # We changed meta of base class, which # is going to produce nasty bugs >>> # MetaMixin in action >>> >>> class A(MetaMixin): >>> pass >>> >>> class B(A): >>> pass >>> >>> A._meta_set("a", 10) # Raises ReferenceError >>> A._meta_init() >>> A._meta_set("a", 10) # Set meta field "a" >>> >>> B._meta_get("a") # Raises ReferenceError >>> B._meta_init() >>> B._meta_set("a", 20) # Set meta field "a" >>> >>> assert A._meta_get("a") == 10 >>> assert B._meta_get("a") == 20 """ @classmethod def _meta_init(cls): """Initialize meta for this class.""" cls._meta = {} # set default values defined in all parent classes for class_ in reversed(cls.__mro__): default_meta = vars(class_).get("DEFAULT_META", {}) for key, value in default_meta.items(): if key in DEFAULT_META_CONCATENATION: if DEFAULT_META_CONCATENATION[key] == "list": cls._meta.setdefault(key, []) cls._meta[key].extend(value) elif DEFAULT_META_CONCATENATION[key] == "dict": cls._meta.setdefault(key, {}) cls._meta[key].update(value) else: cls._meta[key] = copy.deepcopy(value) @classmethod def _meta_clear(cls): cls._meta.clear() # NOTE(boris-42): make sure that meta is deleted delattr(cls, "_meta") @classmethod def _meta_is_inited(cls, raise_exc=True): """Check if meta is initialized. It means that this class has own cls._meta object (not pointer to parent cls._meta) """ if vars(cls).get("_meta") is None: if raise_exc: raise ReferenceError( "Trying to use MetaMixin before initialization %s. " "Call _meta_init() before using it" % cls) return False return True @classmethod def _meta_get(cls, key, default=None): """Get value corresponding to key in meta data.""" cls._meta_is_inited() return cls._meta.get(key, default) @classmethod def _meta_set(cls, key, value): """Set value for key in meta.""" cls._meta_is_inited() cls._meta[key] = value @classmethod def _meta_setdefault(cls, key, value): """Set default value for key in meta.""" cls._meta_is_inited() cls._meta.setdefault(key, value) rally-0.9.1/rally/common/plugin/discover.py0000664000567000056710000000737413073417716022132 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import imp import os import sys from oslo_utils import importutils import rally from rally.common.i18n import _ from rally.common import logging LOG = logging.getLogger(__name__) def itersubclasses(cls, seen=None): """Generator over all subclasses of a given class in depth first order.""" seen = seen or set() try: subs = cls.__subclasses__() except TypeError: # fails only when cls is type subs = cls.__subclasses__(cls) for sub in subs: if sub not in seen: seen.add(sub) yield sub for sub in itersubclasses(sub, seen): yield sub def import_modules_from_package(package): """Import modules from package and append into sys.modules :param package: Full package name. For example: rally.deployment.engines """ path = [os.path.dirname(rally.__file__), ".."] + package.split(".") path = os.path.join(*path) for root, dirs, files in os.walk(path): for filename in files: if filename.startswith("__") or not filename.endswith(".py"): continue new_package = ".".join(root.split(os.sep)).split("....")[1] module_name = "%s.%s" % (new_package, filename[:-3]) if module_name not in sys.modules: sys.modules[module_name] = importutils.import_module( module_name) def load_plugins(dir_or_file): if os.path.isdir(dir_or_file): directory = dir_or_file LOG.info(_("Loading plugins from directories %s/*") % directory.rstrip("/")) to_load = [] for root, dirs, files in os.walk(directory, followlinks=True): to_load.extend((plugin[:-3], root) for plugin in files if plugin.endswith(".py")) for plugin, directory in to_load: if directory not in sys.path: sys.path.append(directory) fullpath = os.path.join(directory, plugin) try: fp, pathname, descr = imp.find_module(plugin, [directory]) imp.load_module(plugin, fp, pathname, descr) fp.close() LOG.info(_("\t Loaded module with plugins: %s.py") % fullpath) except Exception as e: LOG.warning( "\t Failed to load module with plugins %(path)s.py: %(e)s" % {"path": fullpath, "e": e}) if logging.is_debug(): LOG.exception(e) elif os.path.isfile(dir_or_file): plugin_file = dir_or_file LOG.info(_("Loading plugins from file %s") % plugin_file) if plugin_file not in sys.path: sys.path.append(plugin_file) try: plugin_name = os.path.splitext(plugin_file.split("/")[-1])[0] imp.load_source(plugin_name, plugin_file) LOG.info(_("\t Loaded module with plugins: %s.py") % plugin_name) except Exception as e: LOG.warning(_( "\t Failed to load module with plugins %(path)s: %(e)s") % {"path": plugin_file, "e": e}) if logging.is_debug(): LOG.exception(e) rally-0.9.1/rally/common/opts.py0000664000567000056710000000641613073417720017772 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import itertools from rally.common import logging from rally import osclients from rally.plugins.openstack.cleanup import base as cleanup_base from rally.plugins.openstack.context.keystone import roles from rally.plugins.openstack.context.keystone import users from rally.plugins.openstack.scenarios.cinder import utils as cinder_utils from rally.plugins.openstack.scenarios.ec2 import utils as ec2_utils from rally.plugins.openstack.scenarios.heat import utils as heat_utils from rally.plugins.openstack.scenarios.ironic import utils as ironic_utils from rally.plugins.openstack.scenarios.magnum import utils as magnum_utils from rally.plugins.openstack.scenarios.manila import utils as manila_utils from rally.plugins.openstack.scenarios.mistral import utils as mistral_utils from rally.plugins.openstack.scenarios.monasca import utils as monasca_utils from rally.plugins.openstack.scenarios.murano import utils as murano_utils from rally.plugins.openstack.scenarios.nova import utils as nova_utils from rally.plugins.openstack.scenarios.sahara import utils as sahara_utils from rally.plugins.openstack.scenarios.vm import utils as vm_utils from rally.plugins.openstack.scenarios.watcher import utils as watcher_utils from rally.plugins.openstack.verification.tempest import config as tempest_conf from rally.plugins.openstack.wrappers import glance as glance_utils from rally.task import engine def list_opts(): return [ ("DEFAULT", itertools.chain(logging.DEBUG_OPTS, osclients.OSCLIENTS_OPTS, engine.TASK_ENGINE_OPTS)), ("benchmark", itertools.chain(cinder_utils.CINDER_BENCHMARK_OPTS, ec2_utils.EC2_BENCHMARK_OPTS, glance_utils.GLANCE_BENCHMARK_OPTS, heat_utils.HEAT_BENCHMARK_OPTS, ironic_utils.IRONIC_BENCHMARK_OPTS, magnum_utils.MAGNUM_BENCHMARK_OPTS, manila_utils.MANILA_BENCHMARK_OPTS, mistral_utils.MISTRAL_BENCHMARK_OPTS, monasca_utils.MONASCA_BENCHMARK_OPTS, murano_utils.MURANO_BENCHMARK_OPTS, nova_utils.NOVA_BENCHMARK_OPTS, sahara_utils.SAHARA_BENCHMARK_OPTS, vm_utils.VM_BENCHMARK_OPTS, watcher_utils.WATCHER_BENCHMARK_OPTS)), ("tempest", itertools.chain(tempest_conf.TEMPEST_OPTS)), ("roles_context", itertools.chain(roles.ROLES_CONTEXT_OPTS)), ("users_context", itertools.chain(users.USER_CONTEXT_OPTS)), ("cleanup", itertools.chain(cleanup_base.CLEANUP_OPTS)) ] rally-0.9.1/rally/common/logging.py0000664000567000056710000002103413073417716020431 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import functools from oslo_config import cfg from oslo_log import handlers from oslo_log import log as oslogging from rally.common.i18n import _ log = __import__("logging") DEBUG_OPTS = [cfg.BoolOpt( "rally-debug", default=False, help="Print debugging output only for Rally. " "Off-site components stay quiet.")] CONF = cfg.CONF CONF.register_cli_opts(DEBUG_OPTS) oslogging.register_options(CONF) log.RDEBUG = log.DEBUG + 1 log.addLevelName(log.RDEBUG, "RALLYDEBUG") CRITICAL = log.CRITICAL DEBUG = log.DEBUG ERROR = log.ERROR FATAL = log.FATAL INFO = log.INFO NOTSET = log.NOTSET RDEBUG = log.RDEBUG WARN = log.WARN WARNING = log.WARNING def setup(product_name, version="unknown"): dbg_color = handlers.ColorHandler.LEVEL_COLORS[log.DEBUG] handlers.ColorHandler.LEVEL_COLORS[log.RDEBUG] = dbg_color oslogging.setup(CONF, product_name, version) if CONF.rally_debug: oslogging.getLogger( project=product_name).logger.setLevel(log.RDEBUG) class RallyContextAdapter(oslogging.KeywordArgumentAdapter): def debug(self, msg, *args, **kwargs): self.log(log.RDEBUG, msg, *args, **kwargs) def getLogger(name="unknown", version="unknown"): if name not in oslogging._loggers: oslogging._loggers[name] = RallyContextAdapter(log.getLogger(name), {"project": "rally", "version": version}) return oslogging._loggers[name] LOG = getLogger(__name__) class ExceptionLogger(object): """Context that intercepts and logs exceptions. Usage:: LOG = logging.getLogger(__name__) ... def foobar(): with ExceptionLogger(LOG, "foobar warning") as e: return house_of_raising_exception() if e.exception: raise e.exception # remove if not required """ def __init__(self, logger, warn=None): self.logger = logger self.warn = warn self.exception = None def __enter__(self): return self def __exit__(self, type_, value, traceback): if value: self.exception = value if self.warn: self.logger.warning(self.warn) self.logger.debug(value) if is_debug(): self.logger.exception(value) return True class CatcherHandler(log.handlers.BufferingHandler): def __init__(self): log.handlers.BufferingHandler.__init__(self, 0) def shouldFlush(self): return False def emit(self, record): self.buffer.append(record) class LogCatcher(object): """Context manager that catches log messages. User can make an assertion on their content or fetch them all. Usage:: LOG = logging.getLogger(__name__) ... def foobar(): with LogCatcher(LOG) as catcher_in_rye: LOG.warning("Running Kids") catcher_in_rye.assertInLogs("Running Kids") """ def __init__(self, logger): self.logger = getattr(logger, "logger", logger) self.handler = CatcherHandler() def __enter__(self): self.logger.addHandler(self.handler) return self def __exit__(self, type_, value, traceback): self.logger.removeHandler(self.handler) def assertInLogs(self, msg): """Assert that `msg' is a substring at least of one logged message. :param msg: Substring to look for. :return: Log messages where the `msg' was found. Raises AssertionError if none. """ in_logs = [record.msg for record in self.handler.buffer if msg in record.msg] if not in_logs: raise AssertionError("Expected `%s' is not in logs" % msg) return in_logs def fetchLogRecords(self): """Returns all logged Records.""" return self.handler.buffer def fetchLogs(self): """Returns all logged messages.""" return [record.msg for record in self.handler.buffer] def _log_wrapper(obj, log_function, msg, **kw): """A logging wrapper for any method of a class. Class instances that use this decorator should have self.task or self.deployment attribute. The wrapper produces logs messages both before and after the method execution, in the following format (example for tasks): "Task | Starting: " [Method execution...] "Task | Completed: " :param obj: task or deployment which must be attribute of "self" :param log_function: Logging method to be used, e.g. LOG.info :param msg: Text message (possibly parameterized) to be put to the log :param **kw: Parameters for msg """ def decorator(f): @functools.wraps(f) def wrapper(self, *args, **kwargs): params = {"msg": msg % kw, "obj_name": obj.title(), "uuid": getattr(self, obj)["uuid"]} log_function(_("%(obj_name)s %(uuid)s | Starting: %(msg)s") % params) result = f(self, *args, **kwargs) log_function(_("%(obj_name)s %(uuid)s | Completed: %(msg)s") % params) return result return wrapper return decorator def log_task_wrapper(log_function, msg, **kw): return _log_wrapper("task", log_function, msg, **kw) def log_deploy_wrapper(log_function, msg, **kw): return _log_wrapper("deployment", log_function, msg, **kw) def log_verification_wrapper(log_function, msg, **kw): return _log_wrapper("verification", log_function, msg, **kw) def log_deprecated(message, rally_version, log_function=None, once=False): """A wrapper marking a certain method as deprecated. :param message: Message that describes why the method was deprecated :param rally_version: version of Rally when the method was deprecated :param log_function: Logging method to be used, e.g. LOG.info :param once: Show only once (default is each) """ log_function = log_function or LOG.warning def decorator(f): @functools.wraps(f) def wrapper(*args, **kwargs): if (not once) or (not getattr(f, "_warned_dep_method", False)): log_function("'%(func)s' is deprecated in Rally v%(version)s: " "%(msg)s" % {"msg": message, "version": rally_version, "func": f.__name__}) setattr(f, "_warned_dep_method", once) return f(*args, **kwargs) return wrapper return decorator def log_deprecated_args(message, rally_version, deprecated_args, log_function=None, once=False): """A wrapper marking certain arguments as deprecated. :param message: Message that describes why the arguments were deprecated :param rally_version: version of Rally when the arguments were deprecated :param deprecated_args: List of deprecated args. :param log_function: Logging method to be used, e.g. LOG.info :param once: Show only once (default is each) """ log_function = log_function or LOG.warning def decorator(f): @functools.wraps(f) def wrapper(*args, **kwargs): if (not once) or (not getattr(f, "_warned_dep_args", False)): deprecated = ", ".join([ "`%s'" % x for x in deprecated_args if x in kwargs]) if deprecated: log_function( "%(msg)s (args %(args)s deprecated in Rally " "v%(version)s)" % {"msg": message, "version": rally_version, "args": deprecated}) setattr(f, "_warned_dep_args", once) result = f(*args, **kwargs) return result return wrapper return decorator def is_debug(): return CONF.debug or CONF.rally_debug rally-0.9.1/rally/common/utils.py0000664000567000056710000006224513073417720020147 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import bisect import collections import copy import ctypes import heapq import inspect import multiprocessing import os import random import re import shutil import string import sys import tempfile import time import uuid from six import moves from rally.common.i18n import _LE from rally.common import logging from rally import exceptions LOG = logging.getLogger(__name__) class ImmutableMixin(object): _inited = False def __init__(self): self._inited = True def __setattr__(self, key, value): if self._inited: raise exceptions.ImmutableException() super(ImmutableMixin, self).__setattr__(key, value) class EnumMixin(object): def __iter__(self): for k, v in moves.map(lambda x: (x, getattr(self, x)), dir(self)): if not k.startswith("_"): yield v class StdOutCapture(object): def __init__(self): self.stdout = sys.stdout def __enter__(self): sys.stdout = moves.StringIO() return sys.stdout def __exit__(self, type, value, traceback): sys.stdout = self.stdout class StdErrCapture(object): def __init__(self): self.stderr = sys.stderr def __enter__(self): sys.stderr = moves.StringIO() return sys.stderr def __exit__(self, type, value, traceback): sys.stderr = self.stderr class Timer(object): """Timer based on context manager interface.""" def __enter__(self): self.error = None self.start = time.time() return self def timestamp(self): return self.start def finish_timestamp(self): return self.finish def __exit__(self, type, value, tb): self.finish = time.time() if type: self.error = (type, value, tb) def duration(self): return self.finish - self.start class Struct(object): def __init__(self, **entries): self.__dict__.update(entries) def __getitem__(self, item, default=None): return getattr(self, item, default) class RAMInt(object): """Share RAM integer, for IPC. This class represents iterable which refers directly to an integer value stored in RAM. Being a true system-level singleton, this allows safely share integer among processes and threads. """ def __init__(self, base_value=0): self.__int = multiprocessing.Value("I", base_value) def __int__(self): return self.__int.value def __str__(self): return str(self.__int.value) def __iter__(self): return self def __next__(self): with self.__int._lock: value = self.__int.value self.__int.value += 1 if self.__int.value > value: return value raise StopIteration def next(self): return self.__next__() def reset(self): with self.__int._lock: self.__int.value = 0 def get_method_class(func): """Return the class that defined the given method. :param func: function to get the class for. :returns: class object or None if func is not an instance method. """ if hasattr(func, "im_class"): # this check works in Python 2 for cls in inspect.getmro(func.im_class): if func.__name__ in cls.__dict__: return cls elif hasattr(func, "__qualname__") and inspect.isfunction(func): # this check works in Python 3 cls = getattr( inspect.getmodule(func), func.__qualname__.split("..", 1)[0].rsplit(".", 1)[0]) if isinstance(cls, type): return cls else: return None def first_index(lst, predicate): """Return the index of the first element that matches a predicate. :param lst: list to find the matching element in. :param predicate: predicate object. :returns: the index of the first matching element or None if no element matches the predicate. """ for i, e in enumerate(lst): if predicate(e): return i return None @logging.log_deprecated(message="Its not used elsewhere in Rally already.", rally_version="0.4.1") def distance(s1, s2): """Computes the edit distance between two strings. The edit distance is the Levenshtein distance. The larger the return value, the more edits are required to transform one string into the other. :param s1: First string to compare :param s2: Second string to compare :returns: Integer distance between two strings """ n = range(0, len(s1) + 1) for y in range(1, len(s2) + 1): l, n = n, [y] for x in moves.range(1, len(s1) + 1): n.append(min(l[x] + 1, n[-1] + 1, l[x - 1] + (s2[y - 1] != s1[x - 1]))) return n[-1] def retry(times, func, *args, **kwargs): """Try to execute multiple times function mitigating exceptions. :param times: Amount of attempts to execute function :param func: Function that should be executed :param args: *args that are passed to func :param kwargs: **kwargs that are passed to func :raises Exception: Raise any exception that can raise func :returns: Result of func(*args, **kwargs) """ for i in range(times): try: return func(*args, **kwargs) except Exception: if i == times - 1: raise def iterate_per_tenants(users): """Iterate of a single arbitrary user from each tenant :type users: list of users :return: iterator of a single user from each tenant """ processed_tenants = set() for user in users: if user["tenant_id"] not in processed_tenants: processed_tenants.add(user["tenant_id"]) yield (user, user["tenant_id"]) class RandomNameGeneratorMixin(object): """Mixin for objects that need to generate random names. This mixin provides one method, ``generate_random_name()``. Classes that include it must provide a ``self.task`` attribute that references a task dict or a ``self.verification`` attribute that references a verification dict. Classes that use this mixin may set two class variables to alter the behavior of ``generate_random_name()``: * ``RESOURCE_NAME_FORMAT``: A mktemp(1)-like format string that will be used to pattern the generated random string. It must contain two separate segments of at least three 'X's; the first one will be replaced by a portion of the task ID, and the second will be replaced with a random string. * ``RESOURCE_NAME_ALLOWED_CHARACTERS``: A string consisting of the characters allowed in the random portions of the name. """ _resource_name_placeholder_re = re.compile( "^(?P.*?)(?PX{3,})(?P[^X]+?)(?PX{3,})" "(?P.*)$") RESOURCE_NAME_FORMAT = "rally_XXXXXXXX_XXXXXXXX" RESOURCE_NAME_ALLOWED_CHARACTERS = string.ascii_letters + string.digits @classmethod def _generate_random_part(cls, length): """Generate a random string. :param length: The length of the random string. :returns: string, randomly-generated string of the specified length containing only characters from cls.RESOURCE_NAME_ALLOWED_CHARACTERS """ return "".join(random.choice(cls.RESOURCE_NAME_ALLOWED_CHARACTERS) for i in range(length)) @classmethod def _generate_task_id_part(cls, task_id, length): # NOTE(stpierre): the first part of the random name is a # subset of the task ID task_id_part = task_id.replace("-", "")[0:length] if len(task_id_part) < length: LOG.debug("Task ID %(task_id)s cannot be included in a random " "name because it is too short. Format: %(format)s" % {"task_id": task_id, "format": cls.RESOURCE_NAME_FORMAT}) elif any(char not in cls.RESOURCE_NAME_ALLOWED_CHARACTERS for char in task_id_part): LOG.debug("Task ID %(task_id)s cannot be included in a random " "name because it includes disallowed characters. " "Allowed characters are: %(chars)s" % {"task_id": task_id, "chars": cls.RESOURCE_NAME_ALLOWED_CHARACTERS}) else: return task_id_part # NOTE(stpierre): either the task UUID is shorter than the # task portion; or the portion of the task ID that we # would use contains only characters in # resource_name_allowed_characters. try: # NOTE(stpierre): seed pRNG with task ID so that all random # names with the same task ID have the same task ID part random.seed(task_id) return cls._generate_random_part(length) finally: random.seed() def generate_random_name(self): """Generate pseudo-random resource name for scenarios. The name follows a deterministic pattern, which helps support out-of-band cleanup of Rally-created resources. If possible, a portion of the task ID will be used in the random name. If the task ID contains characters that are not allowed by the 'RESOURCE_NAME_ALLOWED_CHARACTERS' class variable, then a random string, seeded with the task ID, will be generated for the task portion of the random name. :returns: str, pseudo-random name """ if hasattr(self, "task"): task_id = self.task["uuid"] elif hasattr(self, "verification"): task_id = self.verification["uuid"] match = self._resource_name_placeholder_re.match( self.RESOURCE_NAME_FORMAT) if match is None: raise ValueError("%s is not a valid resource name format" % self.RESOURCE_NAME_FORMAT) parts = match.groupdict() return "".join([ parts["prefix"], self._generate_task_id_part(task_id, len(parts["task"])), parts["sep"], self._generate_random_part(len(parts["rand"])), parts["suffix"]]) @classmethod def name_matches_object(cls, name, task_id=None, exact=True): """Determine if a resource name could have been created by this class. :param name: The resource name to check against this class's RESOURCE_NAME_FORMAT. :param task_id: The task ID that must match the task portion of the random name :param exact: If False, then additional information may follow the expected name. (For instance, this is useful when bulk creating instances, since Nova automatically appends a UUID to each instance created thusly.) :returns: bool """ match = cls._resource_name_placeholder_re.match( cls.RESOURCE_NAME_FORMAT) parts = match.groupdict() subst = { "prefix": re.escape(parts["prefix"]), "sep": re.escape(parts["sep"]), "suffix": re.escape(parts["suffix"]), "chars": re.escape(cls.RESOURCE_NAME_ALLOWED_CHARACTERS), "rand_length": len(parts["rand"])} if task_id: subst["task_id"] = cls._generate_task_id_part(task_id, len(parts["task"])) else: subst["task_id"] = "[%s]{%s}" % (subst["chars"], len(parts["task"])) subst["extra"] = "" if exact else ".*" name_re = re.compile( "%(prefix)s%(task_id)s%(sep)s" "[%(chars)s]{%(rand_length)s}%(suffix)s%(extra)s$" % subst) return bool(name_re.match(name)) def name_matches_object(name, *objects, **kwargs): """Determine if a resource name could have been created by given objects. The object(s) must implement RandomNameGeneratorMixin. It will often be more efficient to pass a list of classes to name_matches_object() than to perform multiple name_matches_object() calls, since this function will deduplicate identical name generation options. :param name: The resource name to check against the object's RESOURCE_NAME_FORMAT. :param *objects: Classes or objects to fetch random name generation parameters from. :param **kwargs: Additional keyword args. See the docstring for RandomNameGenerator.name_matches_object() for details on what args are recognized. :returns: bool """ unique_rng_options = {} for obj in objects: key = (obj.RESOURCE_NAME_FORMAT, obj.RESOURCE_NAME_ALLOWED_CHARACTERS) if key not in unique_rng_options: unique_rng_options[key] = obj return any(obj.name_matches_object(name, **kwargs) for obj in unique_rng_options.values()) def merge(length, *sources): """Merge lists of lists. Each source produces (or contains) lists of ordered items. Items of each list must be greater or equal to all items of the previous list (that implies that items must be comparable). The function merges the sources into lists with the length equal to given one, except the last list which can be shorter. Example: it1 = iter([[1, 3, 5], [5, 7, 9, 14], [17, 21, 36, 41]]) it2 = iter([[2, 2, 4], [9, 10], [16, 19, 23, 26, 91]]) it3 = iter([[5], [5, 7, 11, 14, 14, 19, 23]]) it = merge(10, it1, it2, it3) for i in it: print i prints out: [1, 2, 2, 3, 4, 5, 5, 5, 5, 7, 7, 9, 9, 10] [11, 14, 14, 14, 16, 17, 19, 19, 21, 23, 23] [26, 36, 41, 91] :param: length, length of generated lists, except the last one. :param: sources, generators that produce lists of items to merge """ streams = [ {"data": [], "gen": src} for src in sources] out_chunk = [] while True: while len(out_chunk) < length: # Least right item among streams lri = None # Refresh data if needed for s in streams: if s["gen"] and not s["data"]: try: while not s["data"]: s["data"] = next(s["gen"]) except StopIteration: s["gen"] = None # ... and define least right item if s["data"]: rightmost_item = s["data"][-1] if (lri is None) or (rightmost_item < lri): lri = rightmost_item # No more data to merge if lri is None: break to_merge = [] for s in streams: if s["data"]: pos = bisect.bisect_right(s["data"], lri) to_merge.append(s["data"][:pos]) s["data"] = s["data"][pos:] out_chunk += heapq.merge(*to_merge) if out_chunk: if len(out_chunk) > length: yield out_chunk[:length] out_chunk = out_chunk[length:] else: yield out_chunk out_chunk = [] else: return def interruptable_sleep(sleep_time, atomic_delay=0.1): """Return after sleep_time seconds. Divide sleep_time by atomic_delay, and call time.sleep N times. This should give a chance to interrupt current thread. :param sleep_time: idle time of method (in seconds). :param atomic_delay: parameter with which time.sleep would be called int(sleep_time / atomic_delay) times. """ if atomic_delay <= 0: raise ValueError("atomic_delay should be > 0") if sleep_time >= 0: if sleep_time < 1: return time.sleep(sleep_time) for x in moves.xrange(int(sleep_time / atomic_delay)): time.sleep(atomic_delay) left = sleep_time - (int(sleep_time / atomic_delay)) * atomic_delay if left: time.sleep(left) else: raise ValueError("sleep_time should be >= 0") def terminate_thread(thread_ident, exc_type=exceptions.ThreadTimeoutException): """Terminate a python thread. Use PyThreadState_SetAsyncExc to terminate thread. :param thread_ident: threading.Thread.ident value :param exc_type: an Exception type to be raised """ ctypes.pythonapi.PyThreadState_SetAsyncExc( ctypes.c_long(thread_ident), ctypes.py_object(exc_type)) def timeout_thread(queue): """Terminate threads by timeout. Function need to be run in separate thread. Its designed to terminate threads which are running longer then timeout. Parent thread will put tuples (thread_ident, deadline) in the queue, where `thread_ident` is Thread.ident value of thread to watch, and `deadline` is timestamp when thread should be terminated. Also tuple (None, None) should be put when all threads are exited and no more threads to watch. :param queue: Queue object to communicate with parent thread. """ all_threads = collections.deque() while True: if not all_threads: timeout = None else: thread, deadline = all_threads[0] timeout = deadline - time.time() try: next_thread = queue.get(timeout=timeout) all_threads.append(next_thread) except (moves.queue.Empty, ValueError): # NOTE(rvasilets) Empty means that timeout was occurred. # ValueError means that timeout lower than 0. if thread.isAlive(): LOG.info("Thread %s is timed out. Terminating." % thread.ident) terminate_thread(thread.ident) all_threads.popleft() if next_thread == (None, None,): return class LockedDict(dict): """This represents dict which can be locked for updates. It is read-only by default, but it can be updated via context manager interface: d = LockedDict(foo="bar") d["spam"] = 42 # RuntimeError with d.unlocked(): d["spam"] = 42 # Works """ def __init__(self, *args, **kwargs): super(LockedDict, self).__init__(*args, **kwargs) self._is_locked = True self._is_ready_to_be_unlocked = False def lock(obj): if isinstance(obj, dict): return LockedDict(obj) elif isinstance(obj, list): return tuple([lock(v) for v in obj]) return obj with self.unlocked(): for k, v in self.items(): self[k] = lock(v) def _check_is_unlocked(self): if self._is_locked: raise RuntimeError("Trying to change read-only dict %r" % self) def unlocked(self): self._is_ready_to_be_unlocked = True return self def __deepcopy__(self, memo=None): def unlock(obj): if isinstance(obj, LockedDict): obj = dict(obj) for k, v in obj.items(): obj[k] = unlock(v) elif type(obj) == tuple: obj = tuple([unlock(v) for v in obj]) return obj return copy.deepcopy(unlock(self), memo=memo) def __enter__(self, *args): if self._is_ready_to_be_unlocked: self._is_locked = False def __exit__(self, *args): self._is_ready_to_be_unlocked = False self._is_locked = True def __setitem__(self, *args, **kwargs): self._check_is_unlocked() return super(LockedDict, self).__setitem__(*args, **kwargs) def __delitem__(self, *args, **kwargs): self._check_is_unlocked() return super(LockedDict, self).__delitem__(*args, **kwargs) def pop(self, *args, **kwargs): self._check_is_unlocked() return super(LockedDict, self).pop(*args, **kwargs) def popitem(self, *args, **kwargs): self._check_is_unlocked() return super(LockedDict, self).popitem(*args, **kwargs) def update(self, *args, **kwargs): self._check_is_unlocked() return super(LockedDict, self).update(*args, **kwargs) def setdefault(self, *args, **kwargs): self._check_is_unlocked() return super(LockedDict, self).setdefault(*args, **kwargs) def clear(self, *args, **kwargs): self._check_is_unlocked() return super(LockedDict, self).clear(*args, **kwargs) def format_float_to_str(num): """Format number into human-readable float format. More precise it convert float into the string and remove redundant zeros from the floating part. It will format the number by the following examples: 0.0000001 -> 0.0 0.000000 -> 0.0 37 -> 37.0 1.0000001 -> 1.0 1.0000011 -> 1.000001 1.0000019 -> 1.000002 :param num: Number to be formatted :return: string representation of the number """ num_str = "%f" % num float_part = num_str.split(".")[1].rstrip("0") or "0" return num_str.split(".")[0] + "." + float_part class DequeAsQueue(object): """Allows to use some of Queue methods on collections.deque.""" def __init__(self, deque): self.deque = deque def qsize(self): return len(self.deque) def put(self, value): self.deque.append(value) def get(self): return self.deque.popleft() def empty(self): return bool(self.deque) class Stopwatch(object): """Allows to sleep till specified time since start.""" def __init__(self, stop_event=None): """Creates Stopwatch. :param stop_event: optional threading.Event to use for waiting allows to interrupt sleep. If not provided time.sleep will be used instead. """ self._stop_event = stop_event def start(self): self._start_time = time.time() def sleep(self, sec): """Sleeps till specified second since start.""" target_time = self._start_time + sec current_time = time.time() if current_time >= target_time: return time_to_sleep = target_time - current_time self._sleep(time_to_sleep) def _sleep(self, sec): if self._stop_event: self._stop_event.wait(sec) else: interruptable_sleep(sec) def generate_random_path(root_dir=None): """Generates a vacant name for a file or dir at the specified place. :param root_dir: Name of a directory to generate path in. If None (default behaviour), temporary directory (i.e /tmp in linux) will be used. """ root_dir = root_dir or tempfile.gettempdir() path = None while path is None: candidate = os.path.join(root_dir, str(uuid.uuid4())) if not os.path.exists(candidate): path = candidate return path class BackupHelper(object): def __init__(self): self._tempdir = generate_random_path() os.mkdir(self._tempdir) self._stored_data = {} self._rollback_actions = [] def backup(self, original_path): if original_path in self._stored_data: raise exceptions.RallyException( _LE("Failed to back up %s since it was already stored.") % original_path) backup_path = generate_random_path(self._tempdir) LOG.debug("Creating backup of %s in %s" % (original_path, backup_path)) try: shutil.copytree(original_path, backup_path, symlinks=True) except Exception: # Ooops. something went wrong self.rollback() raise self._stored_data[original_path] = backup_path def rollback(self): LOG.debug("Performing rollback of stored data.") for original_path, stored_path in self._stored_data.copy().items(): if os.path.exists(original_path): shutil.rmtree(original_path) shutil.copytree(stored_path, original_path, symlinks=True) # not to delete the same path in __del__ method self._stored_data.pop(original_path) for m, args, kwargs in self._rollback_actions: m(*args, **kwargs) def add_rollback_action(self, method, *args, **kwargs): self._rollback_actions.append((method, args, kwargs)) def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): if exc_type is not None: self.rollback() def __call__(self, path): self.backup(path) return self def __del__(self): for path in self._stored_data.values(): if os.path.exists(path): LOG.debug("Deleting %s" % path) shutil.rmtree(path) rally-0.9.1/rally/common/broker.py0000664000567000056710000000512313073417716020270 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import threading from rally.common.i18n import _LW from rally.common import logging LOG = logging.getLogger(__name__) def _consumer(consume, queue): """Infinity worker that consumes tasks from queue. :param consume: method that consumes an object removed from the queue :param queue: deque object to popleft() objects from """ cache = {} while True: if not queue: break else: try: args = queue.popleft() except IndexError: # consumed by other thread continue try: consume(cache, args) except Exception as e: LOG.warning(_LW("Failed to consume a task from the queue: %s") % e) if logging.is_debug(): LOG.exception(e) def _publisher(publish, queue): """Calls a publish method that fills queue with jobs. :param publish: method that fills the queue :param queue: deque object to be filled by the publish() method """ try: publish(queue) except Exception as e: LOG.warning(_LW("Failed to publish a task to the queue: %s") % e) if logging.is_debug(): LOG.exception(e) def run(publish, consume, consumers_count=1): """Run broker. publish() put to queue, consume() process one element from queue. When publish() is finished and elements from queue are processed process is finished all consumers threads are cleaned. :param publish: Function that puts values to the queue :param consume: Function that processes a single value from the queue :param consumers_count: Number of consumers """ queue = collections.deque() _publisher(publish, queue) consumers = [] for i in range(consumers_count): consumer = threading.Thread(target=_consumer, args=(consume, queue)) consumer.start() consumers.append(consumer) for consumer in consumers: consumer.join() rally-0.9.1/rally/common/streaming_algorithms.py0000664000567000056710000001544413073417716023235 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from __future__ import division import abc import math import six from rally.task.processing import utils @six.add_metaclass(abc.ABCMeta) class StreamingAlgorithm(object): """Base class for streaming computations that scale.""" @abc.abstractmethod def add(self, value): """Process a single value from the input stream.""" @abc.abstractmethod def merge(self, other): """Merge results processed by another instance.""" @abc.abstractmethod def result(self): """Return the result based on the values processed so far.""" def _cast_to_float(self, value): try: return float(value) except (TypeError, ValueError): raise TypeError("Non-numerical value: %r" % value) class MeanComputation(StreamingAlgorithm): """Compute mean for a stream of numbers.""" def __init__(self): self.total = 0.0 self.count = 0 def add(self, value): self.count += 1 self.total += value def merge(self, other): self.count += other.count self.total += other.total def result(self): if self.count: return self.total / self.count return None class StdDevComputation(StreamingAlgorithm): """Compute standard deviation for a stream of numbers.""" def __init__(self): self.count = 0 # NOTE(msdubov): To compute std, we need the auxiliary variables below. self.dev_sum = 0.0 self.mean_computation = MeanComputation() self.mean = 0.0 def add(self, value): # NOTE(msdubov): This streaming method for std computation appears # in "The Art of Computer Programming" by D. Knuth, # Vol 2, p. 232, 3rd edition. self.count += 1 mean_prev = self.mean self.mean_computation.add(value) self.mean = self.mean_computation.result() self.dev_sum = self.dev_sum + (value - mean_prev) * (value - self.mean) def merge(self, other): if not other.mean_computation.count: return dev_sum1 = self.dev_sum count1 = self.count mean1 = self.mean dev_sum2 = other.dev_sum count2 = other.count mean2 = other.mean self.mean_computation.merge(other.mean_computation) self.mean = self.mean_computation.result() self.count += other.count self.dev_sum = (dev_sum1 + count1 * mean1 ** 2 + dev_sum2 + count2 * mean2 ** 2 - self.count * self.mean ** 2) def result(self): # NOTE(amaretskiy): Need at least two values to be processed if self.count < 2: return None return math.sqrt(self.dev_sum / (self.count - 1)) class MinComputation(StreamingAlgorithm): """Compute minimal value from a stream of numbers.""" def __init__(self): self._value = None def add(self, value): value = self._cast_to_float(value) if self._value is None or value < self._value: self._value = value def merge(self, other): if other._value is not None: self.add(other._value) def result(self): return self._value class MaxComputation(StreamingAlgorithm): """Compute maximal value from a stream of numbers.""" def __init__(self): self._value = None def add(self, value): value = self._cast_to_float(value) if self._value is None or value > self._value: self._value = value def merge(self, other): if other._value is not None: self.add(other._value) def result(self): return self._value class PercentileComputation(StreamingAlgorithm): """Compute percentile value from a stream of numbers.""" def __init__(self, percent, length): """Init streaming computation. :param percent: numeric percent (from 0.00..1 to 0.999..) :param length: count of the measurements """ if not 0 < percent < 1: raise ValueError("Unexpected percent: %s" % percent) self._percent = percent self._graph_zipper = utils.GraphZipper(length, 10000) def add(self, value): self._graph_zipper.add_point(value) def merge(self, other): # TODO(ikhudoshyn): Implement me raise NotImplementedError() def result(self): results = list( map(lambda x: x[1], self._graph_zipper.get_zipped_graph())) if results: # NOTE(amaretskiy): Calculate percentile of a list of values results.sort() k = (len(results) - 1) * self._percent f = math.floor(k) c = math.ceil(k) if f == c: return results[int(k)] d0 = results[int(f)] * (c - k) d1 = results[int(c)] * (k - f) return (d0 + d1) return None class IncrementComputation(StreamingAlgorithm): """Simple incremental counter.""" def __init__(self): self._count = 0 def add(self, *args): self._count += 1 def merge(self, other): self._count += other._count def result(self): return self._count class DegradationComputation(StreamingAlgorithm): """Calculates degradation from a stream of numbers Finds min and max values from a stream and then calculates ratio between them in percentage. Works only with positive numbers. """ def __init__(self): self.min_value = MinComputation() self.max_value = MaxComputation() def add(self, value): if value <= 0.0: raise ValueError("Unexpected value: %s" % value) self.min_value.add(value) self.max_value.add(value) def merge(self, other): min_result = other.min_value.result() if min_result is not None: self.min_value.add(min_result) max_result = other.max_value.result() if max_result is not None: self.max_value.add(max_result) def result(self): min_result = self.min_value.result() max_result = self.max_value.result() if min_result is None or max_result is None: return 0.0 return (max_result / min_result - 1) * 100.0 rally-0.9.1/rally/common/yamlutils.py0000664000567000056710000000447213073417716021035 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import json import yaml from yaml import constructor from yaml import loader from yaml import nodes from yaml import parser from yaml import resolver ParserError = parser.ParserError # NOTE(andreykurilin): Jinja2 uses __repr__ methods of objects while rendering # templates. Such behaviour converts OrderedDict to the string like # "OrderedDict([('foo', 'xxx'), ('bar', 'yyy')])" # which breaks json/yaml load. # In 99% of cases, we are rendering templates based on the dicts obtained # after yaml.safe_load which uses collections.OrderedDict , so writing here # the workaround with overridden __repr__ method looks like the best choice. class OrderedDict(collections.OrderedDict): """collections.OrderedDict with __repr__ like in the regular dict.""" def __repr__(self): return json.dumps(self, sort_keys=False) def _construct_mapping(loader, node, deep=False): keys = [] if isinstance(node, nodes.MappingNode): for key_node, value_node in node.value: key = loader.construct_object(key_node, deep=deep) if key in keys: raise constructor.ConstructorError( "while constructing a mapping", node.start_mark, "the key (%s) is redefined" % key, key_node.start_mark) keys.append(key) return OrderedDict(loader.construct_pairs(node)) class _SafeLoader(loader.SafeLoader): pass _SafeLoader.add_constructor(resolver.BaseResolver.DEFAULT_MAPPING_TAG, _construct_mapping) def safe_load(stream): """Load stream to create python object :param stream: json/yaml stream. :returns: dict object """ return yaml.load(stream, _SafeLoader) rally-0.9.1/rally/cli/0000775000567000056710000000000013073420067015701 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/cli/__init__.py0000664000567000056710000000000013073417716020007 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/cli/cliutils.py0000664000567000056710000006303213073417720020111 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from __future__ import print_function import argparse import inspect import json import os import sys import textwrap import warnings import decorator import jsonschema from oslo_config import cfg from oslo_utils import encodeutils import prettytable import six import sqlalchemy.exc from rally import api from rally.common.i18n import _ from rally.common import logging from rally.common.plugin import info from rally import exceptions CONF = cfg.CONF LOG = logging.getLogger(__name__) # Some CLI-specific constants MARGIN = 3 class MissingArgs(Exception): """Supplied arguments are not sufficient for calling a function.""" def __init__(self, missing): self.missing = missing msg = _("Missing arguments: %s") % ", ".join(missing) super(MissingArgs, self).__init__(msg) def validate_args(fn, *args, **kwargs): """Check that the supplied args are sufficient for calling a function. >>> validate_args(lambda a: None) Traceback (most recent call last): ... MissingArgs: Missing argument(s): a >>> validate_args(lambda a, b, c, d: None, 0, c=1) Traceback (most recent call last): ... MissingArgs: Missing argument(s): b, d :param fn: the function to check :param args: the positional arguments supplied :param kwargs: the keyword arguments supplied """ argspec = inspect.getargspec(fn) num_defaults = len(argspec.defaults or []) required_args = argspec.args[:len(argspec.args) - num_defaults] if getattr(fn, "__self__", None): required_args.pop(0) missing_required_args = required_args[len(args):] missing = [arg for arg in missing_required_args if arg not in kwargs] if missing: raise MissingArgs(missing) def print_list(objs, fields, formatters=None, sortby_index=0, mixed_case_fields=None, field_labels=None, normalize_field_names=False, table_label=None, print_header=True, print_border=True, out=sys.stdout): """Print a list or objects as a table, one row per object. :param objs: iterable of :class:`Resource` :param fields: attributes that correspond to columns, in order :param formatters: `dict` of callables for field formatting :param sortby_index: index of the field for sorting table rows :param mixed_case_fields: fields corresponding to object attributes that have mixed case names (e.g., 'serverId') :param field_labels: Labels to use in the heading of the table, default to fields. :param normalize_field_names: If True, field names will be transformed, e.g. "Field Name" -> "field_name", otherwise they will be used unchanged. :param table_label: Label to use as header for the whole table. :param print_header: print table header. :param print_border: print table border. :param out: stream to write output to. """ formatters = formatters or {} mixed_case_fields = mixed_case_fields or [] field_labels = field_labels or fields if len(field_labels) != len(fields): raise ValueError(_("Field labels list %(labels)s has different number " "of elements than fields list %(fields)s"), {"labels": field_labels, "fields": fields}) if sortby_index is None: kwargs = {} else: kwargs = {"sortby": field_labels[sortby_index]} pt = prettytable.PrettyTable(field_labels) pt.align = "l" for o in objs: row = [] for field in fields: if field in formatters: row.append(formatters[field](o)) else: field_name = field if normalize_field_names: if field_name not in mixed_case_fields: field_name = field_name.lower() field_name = field_name.replace(" ", "_").replace("-", "_") if isinstance(o, dict): data = o.get(field_name, "") else: data = getattr(o, field_name, "") row.append(data) pt.add_row(row) if not print_border or not print_header: pt.set_style(prettytable.PLAIN_COLUMNS) pt.left_padding_width = 0 pt.right_padding_width = 1 table_body = pt.get_string(header=print_header, border=print_border, **kwargs) + "\n" table_header = "" if table_label: table_width = table_body.index("\n") table_header = make_table_header(table_label, table_width) table_header += "\n" if six.PY3: if table_header: out.write(encodeutils.safe_encode(table_header).decode()) out.write(encodeutils.safe_encode(table_body).decode()) else: if table_header: out.write(encodeutils.safe_encode(table_header)) out.write(encodeutils.safe_encode(table_body)) def print_dict(obj, fields=None, formatters=None, mixed_case_fields=False, normalize_field_names=False, property_label="Property", value_label="Value", table_label=None, print_header=True, print_border=True, wrap=0, out=sys.stdout): """Print dict as a table. :param obj: dict to print :param fields: `dict` of keys to print from d. Defaults to all keys :param formatters: `dict` of callables for field formatting :param mixed_case_fields: fields corresponding to object attributes that have mixed case names (e.g., 'serverId') :param normalize_field_names: If True, field names will be transformed, e.g. "Field Name" -> "field_name", otherwise they will be used unchanged. :param property_label: label of "property" column :param value_label: label of "value" column :param table_label: Label to use as header for the whole table. :param print_header: print table header. :param print_border: print table border. :param out: stream to write output to. """ formatters = formatters or {} mixed_case_fields = mixed_case_fields or [] if not fields: if isinstance(obj, dict): fields = sorted(obj.keys()) else: fields = [name for name in dir(obj) if (not name.startswith("_") and not callable(getattr(obj, name)))] pt = prettytable.PrettyTable([property_label, value_label], caching=False) pt.align = "l" for field_name in fields: if field_name in formatters: data = formatters[field_name](obj) else: field = field_name if normalize_field_names: if field not in mixed_case_fields: field = field_name.lower() field = field.replace(" ", "_").replace("-", "_") if isinstance(obj, dict): data = obj.get(field, "") else: data = getattr(obj, field, "") # convert dict to str to check length if isinstance(data, (dict, list)): data = json.dumps(data) if wrap > 0: data = textwrap.fill(six.text_type(data), wrap) # if value has a newline, add in multiple rows # e.g. fault with stacktrace if (data and isinstance(data, six.string_types) and (r"\n" in data or "\r" in data)): # "\r" would break the table, so remove it. if "\r" in data: data = data.replace("\r", "") lines = data.strip().split(r"\n") col1 = field_name for line in lines: pt.add_row([col1, line]) col1 = "" else: if data is None: data = "-" pt.add_row([field_name, data]) table_body = pt.get_string(header=print_header, border=print_border) + "\n" table_header = "" if table_label: table_width = table_body.index("\n") table_header = make_table_header(table_label, table_width) table_header += "\n" if six.PY3: if table_header: out.write(encodeutils.safe_encode(table_header).decode()) out.write(encodeutils.safe_encode(table_body).decode()) else: if table_header: out.write(encodeutils.safe_encode(table_header)) out.write(encodeutils.safe_encode(table_body)) def make_table_header(table_label, table_width, junction_char="+", horizontal_char="-", vertical_char="|"): """Generalized way make a table header string. :param table_label: label to print on header :param table_width: total width of table :param junction_char: character used where vertical and horizontal lines meet. :param horizontal_char: character used for horizontal lines. :param vertical_char: character used for vertical lines. :returns: string """ if len(table_label) >= (table_width - 2): raise ValueError(_("Table header %s is longer than total" "width of the table.")) label_and_space_width = table_width - len(table_label) - 2 padding = 0 if label_and_space_width % 2 == 0 else 1 half_table_width = label_and_space_width // 2 left_spacing = (" " * half_table_width) right_spacing = (" " * (half_table_width + padding)) border_line = "".join((junction_char, (horizontal_char * (table_width - 2)), junction_char,)) label_line = "".join((vertical_char, left_spacing, table_label, right_spacing, vertical_char,)) return "\n".join((border_line, label_line,)) def make_header(text, size=80, symbol="-"): """Unified way to make header message to CLI. :param text: what text to write :param size: Length of header decorative line :param symbol: What symbol to use to create header """ header = symbol * size + "\n" header += "%s\n" % text header += symbol * size + "\n" return header def suppress_warnings(f): f._suppress_warnings = True return f @decorator.decorator def process_keystone_exc(f, *args, **kwargs): from keystoneclient import exceptions as keystone_exc try: return f(*args, **kwargs) except keystone_exc.Unauthorized as e: print(_("User credentials are wrong! \n%s") % e) return 1 except keystone_exc.AuthorizationFailure as e: print(_("Failed to authorize! \n%s") % e) return 1 except keystone_exc.ConnectionRefused as e: print(_("Rally can't reach the Keystone service! \n%s") % e) return 1 class CategoryParser(argparse.ArgumentParser): """Customized arguments parser We need this one to override hardcoded behavior. So, we want to print item's help instead of 'error: too few arguments'. Also, we want not to print positional arguments in help message. """ def format_help(self): formatter = self._get_formatter() # usage formatter.add_usage(self.usage, self._actions, self._mutually_exclusive_groups) # description formatter.add_text(self.description) # positionals, optionals and user-defined groups # INFO(oanufriev) _action_groups[0] contains positional arguments. for action_group in self._action_groups[1:]: formatter.start_section(action_group.title) formatter.add_text(action_group.description) formatter.add_arguments(action_group._group_actions) formatter.end_section() # epilog formatter.add_text(self.epilog) # determine help from format above return formatter.format_help() def error(self, message): self.print_help(sys.stderr) if message.startswith("argument") and message.endswith("is required"): # NOTE(pirsriva) Argparse will currently raise an error # message for only 1 missing argument at a time i.e. in the # error message it WILL NOT LIST ALL the missing arguments # at once INSTEAD only 1 missing argument at a time missing_arg = message.split()[1] print(_("Missing argument:\n%s") % missing_arg) sys.exit(2) def pretty_float_formatter(field, ndigits=None): """Create a float value formatter function for the given field. :param field: str name of an object, which value should be formatted :param ndigits: int number of digits after decimal point to round default is None - this disables rounding :returns: field formatter function """ def _formatter(obj): value = obj[field] if type(obj) == dict else getattr(obj, field) if type(value) in (int, float): if ndigits: return round(value, ndigits) return value return "n/a" return _formatter def args(*args, **kwargs): def _decorator(func): func.__dict__.setdefault("args", []).insert(0, (args, kwargs)) if "metavar" not in kwargs and "action" not in kwargs: # NOTE(andreykurilin): argparse constructs awful metavars... kwargs["metavar"] = "<%s>" % args[0].replace( "--", "").replace("-", "_") return func return _decorator def alias(command_name): """Allow cli to use alias command name instead of function name. :param command_name: desired command name """ def decorator(func): func.alias = command_name return func return decorator def deprecated_args(*args, **kwargs): def _decorator(func): if "release" not in kwargs: raise ValueError("'release' is required keyword argument of " "'deprecated_args' decorator.") func.__dict__.setdefault("args", []).insert(0, (args, kwargs)) func.__dict__.setdefault("deprecated_args", []) func.deprecated_args.append(args[0]) help_msg = "[Deprecated since Rally %s] " % kwargs.pop("release") if "alternative" in kwargs: help_msg += "Use '%s' instead. " % kwargs.pop("alternative") if "help" in kwargs: help_msg += kwargs["help"] kwargs["help"] = help_msg return func return _decorator def help_group(uuid): """Label cli method with specific group. Joining methods by groups allows to compose more user-friendly help messages in CLI. :param uuid: Name of group to find common methods. It will be used for sorting groups in help message, so you can start uuid with some number (i.e "1_launcher", "2_management") to put groups in proper order. Note: default group had "0" uuid. """ def wrapper(func): func.help_group = uuid return func return wrapper def _methods_of(cls): """Get all callable methods of a class that don't start with underscore. :returns: a list of tuples of the form (method_name, method) """ # The idea of unbound methods exists in Python 2 and was removed in # Python 3, so "inspect.ismethod" is used here for Python 2 and # "inspect.isfunction" for Python 3. all_methods = inspect.getmembers( cls, predicate=lambda x: inspect.ismethod(x) or inspect.isfunction(x)) methods = [m for m in all_methods if not m[0].startswith("_")] help_groups = {} for m in methods: group = getattr(m[1], "help_group", "0") help_groups.setdefault(group, []).append(m) if len(help_groups) > 1: # we should sort methods by groups methods = [] for group in sorted(help_groups.items(), key=lambda x: x[0]): if methods: # None -> empty line between groups methods.append((None, None)) methods.extend(group[1]) return methods def _compose_category_description(category): descr_pairs = _methods_of(category) description = "" doc = category.__doc__ if doc: description = doc.strip() if descr_pairs: description += "\n\nCommands:\n" sublen = lambda item: len(item[0]) if item[0] else 0 first_column_len = max(map(sublen, descr_pairs)) + MARGIN for item in descr_pairs: if item[0] is None: description += "\n" continue name = getattr(item[1], "alias", item[0].replace("_", "-")) if item[1].__doc__: doc = info.parse_docstring( item[1].__doc__)["short_description"] else: doc = "" name += " " * (first_column_len - len(name)) description += " %s%s\n" % (name, doc) return description def _compose_action_description(action_fn): description = "" if action_fn.__doc__: parsed_doc = info.parse_docstring(action_fn.__doc__) short = parsed_doc.get("short_description") long = parsed_doc.get("long_description") description = "%s\n\n%s" % (short, long) if long else short return description def _add_command_parsers(categories, subparsers): # INFO(oanufriev) This monkey patching makes our custom parser class to be # used instead of native. This affects all subparsers down from # 'subparsers' parameter of this function (categories and actions). subparsers._parser_class = CategoryParser parser = subparsers.add_parser("version") parser = subparsers.add_parser("bash-completion") parser.add_argument("query_category", nargs="?") for category in categories: command_object = categories[category]() descr = _compose_category_description(categories[category]) parser = subparsers.add_parser( category, description=descr, formatter_class=argparse.RawDescriptionHelpFormatter) parser.set_defaults(command_object=command_object) category_subparsers = parser.add_subparsers(dest="action") for method_name, method in _methods_of(command_object): if method is None: continue method_name = method_name.replace("_", "-") descr = _compose_action_description(method) parser = category_subparsers.add_parser( getattr(method, "alias", method_name), formatter_class=argparse.RawDescriptionHelpFormatter, description=descr, help=descr) action_kwargs = [] for args, kwargs in getattr(method, "args", []): # FIXME(markmc): hack to assume dest is the arg name without # the leading hyphens if no dest is supplied kwargs.setdefault("dest", args[0][2:]) action_kwargs.append(kwargs["dest"]) kwargs["dest"] = "action_kwarg_" + kwargs["dest"] parser.add_argument(*args, **kwargs) parser.set_defaults(action_fn=method) parser.set_defaults(action_kwargs=action_kwargs) parser.add_argument("action_args", nargs="*") def validate_deprecated_args(argv, fn): if (len(argv) > 3 and (argv[2] == fn.__name__) and getattr(fn, "deprecated_args", None)): for item in fn.deprecated_args: if item in argv[3:]: LOG.warning("Deprecated argument %s for %s." % (item, fn.__name__)) def run(argv, categories): parser = lambda subparsers: _add_command_parsers(categories, subparsers) category_opt = cfg.SubCommandOpt("category", title="Command categories", help="Available categories", handler=parser) CONF.register_cli_opt(category_opt) help_msg = ("Additional custom plugin locations. Multiple files or " "directories may be specified. All plugins in the specified" " directories and subdirectories will be imported. Plugins in" " /opt/rally/plugins and ~/.rally/plugins will always be " "imported.") CONF.register_cli_opt(cfg.ListOpt("plugin-paths", default=os.environ.get( "RALLY_PLUGIN_PATHS"), help=help_msg)) try: rapi = api.API(config_args=argv[1:], skip_db_check=True) except exceptions.RallyException as e: print(e) return(2) if CONF.category.name == "version": print(CONF.version) return(0) if CONF.category.name == "bash-completion": print(_generate_bash_completion_script()) return(0) fn = CONF.category.action_fn fn_args = [encodeutils.safe_decode(arg) for arg in CONF.category.action_args] # api instance always is the first argument fn_args.insert(0, rapi) fn_kwargs = {} for k in CONF.category.action_kwargs: v = getattr(CONF.category, "action_kwarg_" + k) if v is None: continue if isinstance(v, six.string_types): v = encodeutils.safe_decode(v) fn_kwargs[k] = v # call the action with the remaining arguments # check arguments try: validate_args(fn, *fn_args, **fn_kwargs) except MissingArgs as e: # NOTE(mikal): this isn't the most helpful error message ever. It is # long, and tells you a lot of things you probably don't want to know # if you just got a single arg wrong. print(fn.__doc__) CONF.print_help() print("Missing arguments:") for missing in e.missing: for arg in fn.args: if arg[1].get("dest", "").endswith(missing): print(" " + arg[0][0]) break return(1) try: validate_deprecated_args(argv, fn) # skip db check for db and plugin commands if CONF.category.name not in ("db", "plugin"): rapi.check_db_revision() if getattr(fn, "_suppress_warnings", False): with warnings.catch_warnings(): warnings.simplefilter("ignore") ret = fn(*fn_args, **fn_kwargs) else: ret = fn(*fn_args, **fn_kwargs) return(ret) except (IOError, TypeError, ValueError, exceptions.RallyException, jsonschema.ValidationError) as e: if logging.is_debug(): LOG.exception(e) print(e) return 1 except sqlalchemy.exc.OperationalError as e: if logging.is_debug(): LOG.exception(e) print(e) print("Looks like Rally can't connect to its DB.") print("Make sure that connection string in rally.conf is proper:") print(CONF.database.connection) return 1 except Exception: print(_("Command failed, please check log for more info")) raise def _generate_bash_completion_script(): from rally.cli import main bash_data = """#!/bin/bash # Standalone _filedir() alternative. # This exempts from dependence of bash completion routines function _rally_filedir() { test "${1}" \\ && COMPREPLY=( \\ $(compgen -f -- "${cur}" | grep -E "${1}") \\ $(compgen -o plusdirs -- "${cur}") ) \\ || COMPREPLY=( \\ $(compgen -o plusdirs -f -- "${cur}") \\ $(compgen -d -- "${cur}") ) } _rally() { declare -A SUBCOMMANDS declare -A OPTS %(data)s for OPT in ${!OPTS[*]} ; do CMD=${OPT%%%%_*} CMDSUB=${OPT#*_} SUBCOMMANDS[${CMD}]+="${CMDSUB} " done COMMANDS="${!SUBCOMMANDS[*]}" COMPREPLY=() local cur="${COMP_WORDS[COMP_CWORD]}" local prev="${COMP_WORDS[COMP_CWORD-1]}" if [[ $cur =~ ^(\.|\~|\/) ]] || [[ $prev =~ ^--out(|put-file)$ ]] ; then _rally_filedir elif [[ $prev =~ ^--(task|filename)$ ]] ; then _rally_filedir "\\.json|\\.yaml|\\.yml" elif [ $COMP_CWORD == "1" ] ; then COMPREPLY=($(compgen -W "$COMMANDS" -- ${cur})) elif [ $COMP_CWORD == "2" ] ; then COMPREPLY=($(compgen -W "${SUBCOMMANDS[${prev}]}" -- ${cur})) else COMMAND="${COMP_WORDS[1]}_${COMP_WORDS[2]}" COMPREPLY=($(compgen -W "${OPTS[$COMMAND]}" -- ${cur})) fi return 0 } complete -o filenames -F _rally rally """ completion = [] for category, cmds in main.categories.items(): for name, command in _methods_of(cmds): if name is None: continue command_name = getattr(command, "alias", name.replace("_", "-")) args_list = [] for arg in getattr(command, "args", []): if getattr(command, "deprecated_args", []): if arg[0][0] not in command.deprecated_args: args_list.append(arg[0][0]) else: args_list.append(arg[0][0]) args = " ".join(args_list) completion.append(""" OPTS["{cat}_{cmd}"]="{args}"\n""".format( cat=category, cmd=command_name, args=args)) return bash_data % {"data": "".join(sorted(completion))} rally-0.9.1/rally/cli/commands/0000775000567000056710000000000013073420067017502 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/cli/commands/plugin.py0000664000567000056710000001061713073417716021366 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from __future__ import print_function from rally.cli import cliutils from rally.common.plugin import plugin from rally.common import utils from rally import plugins class PluginCommands(object): """Set of commands that allow you to manage Rally plugins.""" @staticmethod def _print_plugins_list(plugin_list): formatters = { "Name": lambda p: p.get_name(), "Namespace": lambda p: p.get_namespace(), "Title": lambda p: p.get_info()["title"], "Plugin base": lambda p: p._get_base().__name__ } cliutils.print_list(plugin_list, formatters=formatters, normalize_field_names=True, fields=["Plugin base", "Name", "Namespace", "Title"]) @cliutils.args("--name", dest="name", type=str, help="Plugin name.") @cliutils.args("--namespace", dest="namespace", type=str, help="Plugin namespace.") @plugins.ensure_plugins_are_loaded def show(self, api, name, namespace=None): """Show detailed information about a Rally plugin.""" name_lw = name.lower() all_plugins = plugin.Plugin.get_all(namespace=namespace) found = [p for p in all_plugins if name_lw in p.get_name().lower()] exact_match = [p for p in found if name_lw == p.get_name().lower()] if not found: if namespace: print( "There is no plugin: %(name)s in %(namespace)s namespace" % {"name": name, "namespace": namespace} ) else: print("There is no plugin: %s" % name) elif len(found) == 1 or exact_match: plugin_ = found[0] if len(found) == 1 else exact_match[0] plugin_info = plugin_.get_info() print(cliutils.make_header(plugin_info["title"])) print("NAME\n\t%s" % plugin_info["name"]) print("NAMESPACE\n\t%s" % plugin_info["namespace"]) print("MODULE\n\t%s" % plugin_info["module"]) if plugin_info["description"]: print("DESCRIPTION\n\t", end="") print("\n\t".join(plugin_info["description"].split("\n"))) if plugin_info["parameters"]: print("PARAMETERS") rows = [utils.Struct(name=p["name"], description="%s\n" % p["doc"]) for p in plugin_info["parameters"]] cliutils.print_list(rows, fields=["name", "description"]) else: print("Multiple plugins found:") self._print_plugins_list(found) @cliutils.args( "--name", dest="name", type=str, help="List only plugins that match the given name.") @cliutils.args( "--namespace", dest="namespace", type=str, help="List only plugins that are in the specified namespace.") @cliutils.args( "--plugin-base", dest="base_cls", type=str, help="Plugin base class.") @plugins.ensure_plugins_are_loaded def list(self, api, name=None, namespace=None, base_cls=None): """List all Rally plugins that match name and namespace.""" all_plugins = plugin.Plugin.get_all(namespace=namespace) matched = all_plugins if name: name_lw = name.lower() matched = [p for p in all_plugins if name_lw in p.get_name().lower()] if base_cls: matched = [p for p in matched if p._get_base().__name__ == base_cls] if not all_plugins: print("There is no plugin namespace: %s" % namespace) elif not matched: print("There is no plugin: %s" % name) else: self._print_plugins_list(matched) rally-0.9.1/rally/cli/commands/__init__.py0000664000567000056710000000000013073417716021610 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/cli/commands/task.py0000664000567000056710000011215013073417716021025 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Rally command: task""" from __future__ import print_function import json import os import sys import traceback import webbrowser import jsonschema from oslo_utils import uuidutils import six from six.moves.urllib import parse as urlparse from rally.cli import cliutils from rally.cli import envutils from rally.common import fileutils from rally.common.i18n import _ from rally.common.io import junit from rally.common import logging from rally.common import utils as rutils from rally.common import version from rally.common import yamlutils as yaml from rally import consts from rally import exceptions from rally import plugins from rally.task import exporter from rally.task.processing import plot LOG = logging.getLogger(__name__) class FailedToLoadTask(exceptions.RallyException): msg_fmt = _("Failed to load task") class TaskCommands(object): """Set of commands that allow you to manage benchmarking tasks and results. """ def _load_task(self, api, task_file, task_args=None, task_args_file=None): """Load tasks template from file and render it with passed args. :param task_file: Path to file with input task :param task_args: JSON or YAML representation of dict with args that will be used to render input task with jinja2 :param task_args_file: Path to file with JSON or YAML representation of dict, that will be used to render input with jinja2. If both specified task_args and task_args_file they will be merged. task_args has bigger priority so it will update values from task_args_file. :returns: Str with loaded and rendered task """ print(cliutils.make_header("Preparing input task")) def print_invalid_header(source_name, args): print(_("Invalid %(source)s passed: \n\n %(args)s \n") % {"source": source_name, "args": args}, file=sys.stderr) def parse_task_args(src_name, args): try: kw = args and yaml.safe_load(args) kw = {} if kw is None else kw except yaml.ParserError as e: print_invalid_header(src_name, args) print(_("%(source)s has to be YAML or JSON. Details:" "\n\n%(err)s\n") % {"source": src_name, "err": e}, file=sys.stderr) raise TypeError() if not isinstance(kw, dict): print_invalid_header(src_name, args) print(_("%(src)s has to be dict, actually %(src_type)s\n") % {"src": src_name, "src_type": type(kw)}, file=sys.stderr) raise TypeError() return kw try: kw = {} if task_args_file: with open(task_args_file) as f: kw.update(parse_task_args("task_args_file", f.read())) kw.update(parse_task_args("task_args", task_args)) except TypeError: raise FailedToLoadTask() with open(task_file) as f: try: input_task = f.read() task_dir = os.path.expanduser( os.path.dirname(task_file)) or "./" rendered_task = api.task.render_template(input_task, task_dir, **kw) except Exception as e: print(_("Failed to render task template:\n%(task)s\n%(err)s\n") % {"task": input_task, "err": e}, file=sys.stderr) raise FailedToLoadTask() print(_("Task is:\n%s\n") % rendered_task) try: parsed_task = yaml.safe_load(rendered_task) except Exception as e: print(_("Wrong format of rendered input task. It should be " "YAML or JSON.\n%s") % e, file=sys.stderr) raise FailedToLoadTask() print(_("Task syntax is correct :)")) return parsed_task def _load_and_validate_task(self, api, task, task_args, task_args_file, deployment, task_instance=None): try: input_task = self._load_task(api, task, task_args, task_args_file) except Exception as err: if task_instance: task_instance.set_validation_failed({ "etype": err.__class__.__name__, "msg": str(err), "trace": json.dumps(traceback.format_exc())}) raise api.task.validate(deployment, input_task, task_instance) print(_("Task config is valid :)")) return input_task @cliutils.args("--deployment", dest="deployment", type=str, metavar="", required=False, help="UUID or name of a deployment.") @cliutils.args("--task", "--filename", metavar="", help="Path to the input task file.") @cliutils.args("--task-args", metavar="", dest="task_args", help="Input task args (JSON dict). These args are used " "to render the Jinja2 template in the input task.") @cliutils.args("--task-args-file", metavar="", dest="task_args_file", help="Path to the file with input task args (dict in " "JSON/YAML). These args are used " "to render the Jinja2 template in the input task.") @envutils.with_default_deployment(cli_arg_name="deployment") @plugins.ensure_plugins_are_loaded def validate(self, api, task, deployment=None, task_args=None, task_args_file=None): """Validate a task configuration file. This will check that task configuration file has valid syntax and all required options of scenarios, contexts, SLA and runners are set. If both task_args and task_args_file are specified, they will be merged. task_args has a higher priority so it will override values from task_args_file. :param task: Path to the input task file. :param task_args: Input task args (JSON dict). These args are used to render the Jinja2 template in the input task. :param task_args_file: Path to the file with input task args (dict in JSON/YAML). These args are used to render the Jinja2 template in the input task. :param deployment: UUID or name of the deployment """ try: self._load_and_validate_task(api, task, task_args, task_args_file, deployment) except (exceptions.InvalidTaskException, FailedToLoadTask) as e: print(e, file=sys.stderr) return(1) @cliutils.args("--deployment", dest="deployment", type=str, metavar="", required=False, help="UUID or name of a deployment.") @cliutils.args("--task", "--filename", metavar="", help="Path to the input task file") @cliutils.args("--task-args", dest="task_args", metavar="", help="Input task args (JSON dict). These args are used " "to render the Jinja2 template in the input task.") @cliutils.args("--task-args-file", dest="task_args_file", metavar="", help="Path to the file with input task args (dict in " "JSON/YAML). These args are used " "to render the Jinja2 template in the input task.") @cliutils.args("--tag", help="Tag for this task") @cliutils.args("--no-use", action="store_false", dest="do_use", help="Don't set new task as default for future operations.") @cliutils.args("--abort-on-sla-failure", action="store_true", dest="abort_on_sla_failure", help="Abort the execution of a benchmark scenario when" "any SLA check for it fails.") @envutils.with_default_deployment(cli_arg_name="deployment") @plugins.ensure_plugins_are_loaded def start(self, api, task, deployment=None, task_args=None, task_args_file=None, tag=None, do_use=False, abort_on_sla_failure=False): """Start benchmark task. If both task_args and task_args_file are specified, they will be merged. task_args has a higher priority so it will override values from task_args_file. :param task: Path to the input task file. :param task_args: Input task args (JSON dict). These args are used to render the Jinja2 template in the input task. :param task_args_file: Path to the file with input task args (dict in JSON/YAML). These args are used to render the Jinja2 template in the input task. :param deployment: UUID or name of the deployment :param tag: optional tag for this task :param do_use: if True, the new task will be stored as the default one for future operations :param abort_on_sla_failure: if True, the execution of a benchmark scenario will stop when any SLA check for it fails """ try: task_instance = api.task.create(deployment, tag) print("Running Rally version", version.version_string()) input_task = self._load_and_validate_task( api, task, task_args, task_args_file, deployment, task_instance=task_instance) print(cliutils.make_header( _("Task %(tag)s %(uuid)s: started") % {"uuid": task_instance["uuid"], "tag": task_instance["tag"]})) print("Benchmarking... This can take a while...\n") print("To track task status use:\n") print("\trally task status\n\tor\n\trally task detailed\n") if do_use: self.use(api, task_instance["uuid"]) api.task.start(deployment, input_task, task=task_instance, abort_on_sla_failure=abort_on_sla_failure) self.detailed(api, task_id=task_instance["uuid"]) except exceptions.DeploymentNotFinishedStatus as e: print(_("Cannot start a task on unfinished deployment: %s") % e) return 1 except (exceptions.InvalidTaskException, FailedToLoadTask) as e: task_instance.set_validation_failed({ "etype": type(e).__name__, "msg": str(e), "trace": json.dumps(traceback.format_exc())}) print(e, file=sys.stderr) return(1) @cliutils.args("--uuid", type=str, dest="task_id", help="UUID of task.") @envutils.with_default_task_id @cliutils.args( "--soft", action="store_true", help="Abort task after current scenario finishes execution.") def abort(self, api, task_id=None, soft=False): """Abort a running benchmarking task. :param task_id: Task uuid :param soft: if set to True, task should be aborted after execution of current scenario """ if soft: print("INFO: please be informed that soft abort won't stop " "a running scenario, but will prevent new ones from " "starting. If you are running task with only one " "scenario, soft abort will not help at all.") api.task.abort(task_id, soft, async=False) print("Task %s successfully stopped." % task_id) @cliutils.args("--uuid", type=str, dest="task_id", help="UUID of task") @envutils.with_default_task_id def status(self, api, task_id=None): """Display the current status of a task. :param task_id: Task uuid Returns current status of task """ task = api.task.get(task_id) print(_("Task %(task_id)s: %(status)s") % {"task_id": task_id, "status": task["status"]}) @cliutils.args("--uuid", type=str, dest="task_id", help=("UUID of task. If --uuid is \"last\" the results of " " the most recently created task will be displayed.")) @cliutils.args("--iterations-data", dest="iterations_data", action="store_true", help="Print detailed results for each iteration.") @envutils.with_default_task_id def detailed(self, api, task_id=None, iterations_data=False): """Print detailed information about given task. :param task_id: str, task uuid :param iterations_data: bool, include results for each iteration """ task = api.task.get_detailed(task_id, extended_results=True) if not task: print("The task %s can not be found" % task_id) return 1 print() print("-" * 80) print(_("Task %(task_id)s: %(status)s") % {"task_id": task_id, "status": task["status"]}) if task["status"] == consts.TaskStatus.CRASHED or task["status"] == ( consts.TaskStatus.VALIDATION_FAILED): print("-" * 80) verification = yaml.safe_load(task["verification_log"]) if logging.is_debug(): print(yaml.safe_load(verification["trace"])) else: print(verification["etype"]) print(verification["msg"]) print(_("\nFor more details run:\nrally -d task detailed %s") % task["uuid"]) return 0 elif task["status"] not in [consts.TaskStatus.FINISHED, consts.TaskStatus.ABORTED]: print("-" * 80) print(_("\nThe task %s marked as '%s'. Results " "available when it is '%s'.") % ( task_id, task["status"], consts.TaskStatus.FINISHED)) return 0 for result in task["results"]: key = result["key"] print("-" * 80) print() print("test scenario %s" % key["name"]) print("args position %s" % key["pos"]) print("args values:") print(json.dumps(key["kw"], indent=2)) print() iterations = [] iterations_headers = ["iteration", "duration"] iterations_actions = [] output = [] task_errors = [] if iterations_data: for i, atomic_name in enumerate(result["info"]["atomic"], 1): action = "%i. %s" % (i, atomic_name) iterations_headers.append(action) iterations_actions.append((atomic_name, action)) for idx, itr in enumerate(result["iterations"], 1): if iterations_data: row = {"iteration": idx, "duration": itr["duration"]} for name, action in iterations_actions: row[action] = itr["atomic_actions"].get(name, 0) iterations.append(row) if "output" in itr: iteration_output = itr["output"] else: iteration_output = {"additive": [], "complete": []} # NOTE(amaretskiy): "scenario_output" is supported # for backward compatibility if ("scenario_output" in itr and itr["scenario_output"]["data"]): iteration_output["additive"].append( {"data": itr["scenario_output"]["data"].items(), "title": "Scenario output", "description": "", "chart_plugin": "StackedArea"}) for idx, additive in enumerate(iteration_output["additive"]): if len(output) <= idx + 1: output_table = plot.charts.OutputStatsTable( result["info"], title=additive["title"]) output.append(output_table) output[idx].add_iteration(additive["data"]) if itr.get("error"): task_errors.append(TaskCommands._format_task_error(itr)) self._print_task_errors(task_id, task_errors) cols = plot.charts.MainStatsTable.columns float_cols = result["info"]["stat"]["cols"][1:7] formatters = dict(zip(float_cols, [cliutils.pretty_float_formatter(col, 3) for col in float_cols])) rows = [dict(zip(cols, r)) for r in result["info"]["stat"]["rows"]] cliutils.print_list(rows, fields=cols, formatters=formatters, table_label="Response Times (sec)", sortby_index=None) print() if iterations_data: formatters = dict(zip(iterations_headers[1:], [cliutils.pretty_float_formatter(col, 3) for col in iterations_headers[1:]])) cliutils.print_list(iterations, fields=iterations_headers, table_label="Atomics per iteration", formatters=formatters) print() if output: cols = plot.charts.OutputStatsTable.columns float_cols = cols[1:7] formatters = dict(zip(float_cols, [cliutils.pretty_float_formatter(col, 3) for col in float_cols])) for out in output: data = out.render() rows = [dict(zip(cols, r)) for r in data["data"]["rows"]] if rows: # NOTE(amaretskiy): print title explicitly because # prettytable fails if title length is too long print(data["title"]) cliutils.print_list(rows, fields=cols, formatters=formatters) print() print(_("Load duration: %s") % rutils.format_float_to_str(result["info"]["load_duration"])) print(_("Full duration: %s") % rutils.format_float_to_str(result["info"]["full_duration"])) print("\nHINTS:") print(_("* To plot HTML graphics with this data, run:")) print("\trally task report %s --out output.html\n" % task["uuid"]) print(_("* To generate a JUnit report, run:")) print("\trally task report %s --junit --out output.xml\n" % task["uuid"]) print(_("* To get raw JSON output of task results, run:")) print("\trally task results %s\n" % task["uuid"]) @cliutils.args("--uuid", type=str, dest="task_id", help="UUID of task.") @envutils.with_default_task_id @cliutils.suppress_warnings def results(self, api, task_id=None): """Display raw task results. This will produce a lot of output data about every iteration. :param task_id: Task uuid """ task = api.task.get(task_id) finished_statuses = (consts.TaskStatus.FINISHED, consts.TaskStatus.ABORTED) if task["status"] not in finished_statuses: print(_("Task status is %s. Results available when it is one " "of %s.") % (task["status"], ", ".join(finished_statuses))) return 1 results = [{"key": x["key"], "result": x["data"]["raw"], "sla": x["data"]["sla"], "hooks": x["data"].get("hooks", []), "load_duration": x["data"]["load_duration"], "full_duration": x["data"]["full_duration"], "created_at": x.get("created_at").strftime( "%Y-%d-%mT%H:%M:%S")} for x in task.get_results()] print(json.dumps(results, sort_keys=False, indent=4)) @cliutils.args("--deployment", dest="deployment", type=str, metavar="", required=False, help="UUID or name of a deployment.") @cliutils.args("--all-deployments", action="store_true", dest="all_deployments", help="List tasks from all deployments.") @cliutils.args("--status", type=str, dest="status", help="List tasks with specified status." " Available statuses: %s" % ", ".join(consts.TaskStatus)) @cliutils.args("--uuids-only", action="store_true", dest="uuids_only", help="List task UUIDs only.") @envutils.with_default_deployment(cli_arg_name="deployment") def list(self, api, deployment=None, all_deployments=False, status=None, uuids_only=False): """List tasks, started and finished. Displayed tasks can be filtered by status or deployment. By default 'rally task list' will display tasks from the active deployment without filtering by status. :param deployment: UUID or name of deployment :param status: task status to filter by. Available task statuses are in rally.consts.TaskStatus :param all_deployments: display tasks from all deployments :param uuids_only: list task UUIDs only """ filters = {} headers = ["uuid", "deployment_name", "created_at", "duration", "status", "tag"] if status in consts.TaskStatus: filters.setdefault("status", status) elif status: print(_("Error: Invalid task status '%s'.\n" "Available statuses: %s") % ( status, ", ".join(consts.TaskStatus)), file=sys.stderr) return(1) if not all_deployments: filters.setdefault("deployment", deployment) task_list = [task.to_dict() for task in api.task.list(**filters)] for x in task_list: x["duration"] = x["updated_at"] - x["created_at"] if uuids_only: if task_list: cliutils.print_list(task_list, ["uuid"], print_header=False, print_border=False) elif task_list: cliutils.print_list( task_list, headers, sortby_index=headers.index("created_at")) else: if status: print(_("There are no tasks in '%s' status. " "To run a new task, use:\n" "\trally task start") % status) else: print(_("There are no tasks. To run a new task, use:\n" "\trally task start")) @cliutils.args("--out", metavar="", type=str, dest="out", required=False, help="Path to output file.") @cliutils.args("--open", dest="open_it", action="store_true", help="Open the output in a browser.") @cliutils.args("--tasks", dest="tasks", nargs="+", help="UUIDs of tasks, or JSON files with task results") @cliutils.suppress_warnings def trends(self, api, *args, **kwargs): """Generate workloads trends HTML report.""" tasks = kwargs.get("tasks", []) or list(args) if not tasks: print(_("ERROR: At least one task must be specified"), file=sys.stderr) return 1 results = [] for task_id in tasks: if os.path.exists(os.path.expanduser(task_id)): with open(os.path.expanduser(task_id), "r") as inp_js: task_results = json.load(inp_js) for result in task_results: try: jsonschema.validate( result, api.task.TASK_RESULT_SCHEMA) except jsonschema.ValidationError as e: print(_("ERROR: Invalid task result format in %s") % task_id, file=sys.stderr) print(six.text_type(e), file=sys.stderr) return 1 elif uuidutils.is_uuid_like(task_id): task_results = map( lambda x: {"key": x["key"], "sla": x["data"]["sla"], "hooks": x["data"].get("hooks", []), "result": x["data"]["raw"], "load_duration": x["data"]["load_duration"], "full_duration": x["data"]["full_duration"]}, api.task.get(task_id).get_results()) else: print(_("ERROR: Invalid UUID or file name passed: %s") % task_id, file=sys.stderr) return 1 results.extend(task_results) result = plot.trends(results) out = kwargs.get("out") if out: output_file = os.path.expanduser(out) with open(output_file, "w+") as f: f.write(result) if kwargs.get("open_it"): webbrowser.open_new_tab("file://" + os.path.realpath(out)) else: print(result) @cliutils.args("--tasks", dest="tasks", nargs="+", help="UUIDs of tasks, or JSON files with task results") @cliutils.args("--out", metavar="", type=str, dest="out", required=False, help="Path to output file.") @cliutils.args("--open", dest="open_it", action="store_true", help="Open the output in a browser.") @cliutils.args("--html", dest="out_format", action="store_const", const="html", help="Generate the report in HTML.") @cliutils.args("--html-static", dest="out_format", action="store_const", const="html_static", help=("Generate the report in HTML with embedded " "JS and CSS, so it will not depend on " "Internet availability.")) @cliutils.args("--junit", dest="out_format", action="store_const", const="junit", help="Generate the report in the JUnit format.") @envutils.default_from_global("tasks", envutils.ENV_TASK, "tasks") @cliutils.suppress_warnings def report(self, api, tasks=None, out=None, open_it=False, out_format="html"): """Generate report file for specified task. :param task_id: UUID, task identifier :param tasks: list, UUIDs od tasks or pathes files with tasks results :param out: str, output file name :param open_it: bool, whether to open output file in web browser :param out_format: output format (junit, html or html_static) """ tasks = isinstance(tasks, list) and tasks or [tasks] results = [] message = [] processed_names = {} for task_file_or_uuid in tasks: if os.path.exists(os.path.expanduser(task_file_or_uuid)): with open(os.path.expanduser(task_file_or_uuid), "r") as inp_js: tasks_results = json.load(inp_js) for result in tasks_results: try: jsonschema.validate( result, api.task.TASK_RESULT_SCHEMA) except jsonschema.ValidationError as e: print(_("ERROR: Invalid task result format in %s") % task_file_or_uuid, file=sys.stderr) print(six.text_type(e), file=sys.stderr) return 1 elif uuidutils.is_uuid_like(task_file_or_uuid): tasks_results = map( lambda x: {"key": x["key"], "sla": x["data"]["sla"], "hooks": x["data"].get("hooks", []), "result": x["data"]["raw"], "load_duration": x["data"]["load_duration"], "full_duration": x["data"]["full_duration"], "created_at": x["created_at"]}, api.task.get(task_file_or_uuid).get_results()) else: print(_("ERROR: Invalid UUID or file name passed: %s" ) % task_file_or_uuid, file=sys.stderr) return 1 for task_result in tasks_results: if task_result["key"]["name"] in processed_names: processed_names[task_result["key"]["name"]] += 1 task_result["key"]["pos"] = processed_names[ task_result["key"]["name"]] else: processed_names[task_result["key"]["name"]] = 0 results.append(task_result) if out_format.startswith("html"): result = plot.plot(results, include_libs=(out_format == "html_static")) elif out_format == "junit": test_suite = junit.JUnit("Rally test suite") for result in results: if isinstance(result["sla"], list): message = ",".join([sla["detail"] for sla in result["sla"] if not sla["success"]]) if message: outcome = junit.JUnit.FAILURE else: outcome = junit.JUnit.SUCCESS test_suite.add_test(result["key"]["name"], result["full_duration"], outcome, message) result = test_suite.to_xml() else: print(_("Invalid output format: %s") % out_format, file=sys.stderr) return 1 if out: output_file = os.path.expanduser(out) with open(output_file, "w+") as f: f.write(result) if open_it: webbrowser.open_new_tab("file://" + os.path.realpath(out)) else: print(result) @cliutils.args("--force", action="store_true", help="force delete") @cliutils.args("--uuid", type=str, dest="task_id", nargs="*", metavar="", help="UUID of task or a list of task UUIDs.") @envutils.with_default_task_id def delete(self, api, task_id=None, force=False): """Delete task and its results. :param task_id: Task uuid or a list of task uuids :param force: Force delete or not """ def _delete_single_task(tid, force): try: api.task.delete(tid, force=force) print("Successfully deleted task `%s`" % tid) except exceptions.TaskInvalidStatus as e: print(e) print("Use '--force' option to delete the task with vague " "state.") if isinstance(task_id, list): for tid in task_id: _delete_single_task(tid, force) else: _delete_single_task(task_id, force) @cliutils.args("--uuid", type=str, dest="task_id", help="UUID of task.") @cliutils.args("--json", dest="tojson", action="store_true", help="Output in JSON format.") @envutils.with_default_task_id @cliutils.alias("sla_check") def sla_check_deprecated(self, api, task_id=None, tojson=False): """DEPRECATED since Rally 0.8.0, use `rally task sla-check` instead.""" return self.sla_check(api, task_id=task_id, tojson=tojson) @cliutils.args("--uuid", type=str, dest="task_id", help="UUID of task.") @cliutils.args("--json", dest="tojson", action="store_true", help="Output in JSON format.") @envutils.with_default_task_id def sla_check(self, api, task_id=None, tojson=False): """Display SLA check results table. :param task_id: Task uuid. :returns: Number of failed criteria. """ results = api.task.get(task_id).get_results() failed_criteria = 0 data = [] STATUS_PASS = "PASS" STATUS_FAIL = "FAIL" for result in results: key = result["key"] for sla in sorted(result["data"]["sla"], key=lambda x: x["criterion"]): success = sla.pop("success") sla["status"] = success and STATUS_PASS or STATUS_FAIL sla["benchmark"] = key["name"] sla["pos"] = key["pos"] failed_criteria += int(not success) data.append(sla if tojson else rutils.Struct(**sla)) if tojson: print(json.dumps(data, sort_keys=False)) else: cliutils.print_list(data, ("benchmark", "pos", "criterion", "status", "detail")) return failed_criteria @cliutils.args("--uuid", type=str, dest="task_id", help="UUID of the task") @cliutils.deprecated_args("--task", dest="task_id", type=str, release="0.2.0", alternative="--uuid") def use(self, api, task_id): """Set active task. :param task_id: Task uuid. """ print("Using task: %s" % task_id) api.task.get(task_id) fileutils.update_globals_file("RALLY_TASK", task_id) @cliutils.args("--uuid", dest="uuid", type=str, required=True, help="UUID of a the task.") @cliutils.args("--connection", dest="connection_string", type=str, required=True, help="Connection url to the task export system.") @plugins.ensure_plugins_are_loaded def export(self, api, uuid, connection_string): """Export task results to the custom task's exporting system. :param uuid: UUID of the task :param connection_string: string used to connect to the system """ parsed_obj = urlparse.urlparse(connection_string) try: client = exporter.Exporter.get(parsed_obj.scheme)( connection_string) except exceptions.InvalidConnectionString as e: if logging.is_debug(): LOG.exception(e) print(e) return 1 except exceptions.PluginNotFound as e: if logging.is_debug(): LOG.exception(e) msg = ("\nPlease check your connection string. The format of " "`connection` should be plugin-name://" ":@:/.") print(str(e) + msg) return 1 try: client.export(uuid) except (IOError, exceptions.RallyException) as e: if logging.is_debug(): LOG.exception(e) print(e) return 1 print(_("Task %(uuid)s results was successfully exported to %(" "connection)s using %(name)s plugin.") % { "uuid": uuid, "connection": connection_string, "name": parsed_obj.scheme }) @staticmethod def _print_task_errors(task_id, task_errors): print(cliutils.make_header("Task %s has %d error(s)" % (task_id, len(task_errors)))) for err_data in task_errors: print(*err_data, sep="\n") print("-" * 80) @staticmethod def _format_task_error(data): error_type = _("Unknown type") error_message = _("Rally hasn't caught anything yet") error_traceback = _("No traceback available.") try: error_type = data["error"][0] error_message = data["error"][1] error_traceback = data["error"][2] except IndexError: pass return ("%(error_type)s: %(error_message)s\n" % {"error_type": error_type, "error_message": error_message}, error_traceback) rally-0.9.1/rally/cli/commands/verify.py0000664000567000056710000011413713073417720021371 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Rally command: verify""" from __future__ import print_function import json import os import webbrowser from six.moves import configparser from rally.cli import cliutils from rally.cli import envutils from rally.common import fileutils from rally.common.i18n import _ from rally.common import logging from rally.common import yamlutils as yaml from rally import exceptions from rally import plugins LIST_VERIFIERS_HINT = ("HINT: You can list all verifiers, executing " "command `rally verify list-verifiers`.") LIST_DEPLOYMENTS_HINT = ("HINT: You can list all deployments, executing " "command `rally deployment list`.") LIST_VERIFICATIONS_HINT = ("HINT: You can list all verifications, executing " "command `rally verify list`.") DEFAULT_REPORT_TYPES = ("HTML", "HTML-Static", "JSON", "JUnit-XML") class VerifyCommands(object): """Verify an OpenStack cloud via a verifier.""" @staticmethod def _print_totals(totals): print("\n======\n" "Totals" "\n======\n" "\nRan: %(tests_count)s tests in %(tests_duration)s sec.\n" " - Success: %(success)s\n" " - Skipped: %(skipped)s\n" " - Expected failures: %(expected_failures)s\n" " - Unexpected success: %(unexpected_success)s\n" " - Failures: %(failures)s\n" % totals) @staticmethod def _print_failures(h_text, failures, symbol="-"): print("\n%s" % cliutils.make_header( h_text, size=len(h_text), symbol=symbol).strip()) for f in failures: header = "%s\n%s\n" % (f["name"], "-" * len(f["name"])) failure = "\n%s%s\n" % (header, f["traceback"].strip()) print(failure) @cliutils.args("--namespace", dest="namespace", type=str, metavar="", required=False, help="Namespace name (for example, openstack).") @plugins.ensure_plugins_are_loaded def list_plugins(self, api, namespace=None): """List all plugins for verifiers management.""" if namespace: namespace = namespace.lower() verifier_plugins = api.verifier.list_plugins(namespace) fields = ["Plugin name", "Namespace", "Description"] if logging.is_debug(): fields.append("Location") cliutils.print_list(verifier_plugins, fields, formatters={"Plugin name": lambda p: p["name"]}, normalize_field_names=True) @cliutils.help_group("verifier") @cliutils.args("--name", dest="name", type=str, required=True, help="Verifier name (for example, 'My verifier').") @cliutils.args("--type", dest="vtype", type=str, required=True, help="Verifier plugin name. HINT: You can list all " "verifier plugins, executing command `rally verify " "list-plugins`.") @cliutils.args("--namespace", dest="namespace", type=str, metavar="", required=False, help="Verifier plugin namespace. Should be specified in " "case of two verifier plugins with equal names but " "in different namespaces.") @cliutils.args("--source", dest="source", type=str, required=False, help="Path or URL to the repo to clone verifier from.") @cliutils.args("--version", dest="version", type=str, required=False, help="Branch, tag or commit ID to checkout before " "verifier installation (the 'master' branch is used " "by default).") @cliutils.args("--system-wide", dest="system_wide", action="store_true", required=False, help="Use the system-wide environment for verifier instead " "of a virtual environment.") @cliutils.args("--extra-settings", dest="extra", type=str, required=False, help="Extra installation settings for verifier.") @cliutils.args("--no-use", dest="do_use", action="store_false", help="Not to set the created verifier as the default " "verifier for future operations.") @plugins.ensure_plugins_are_loaded def create_verifier(self, api, name, vtype, namespace="", source=None, version=None, system_wide=False, extra=None, do_use=True): """Create a verifier.""" verifier_uuid = api.verifier.create( name, vtype=vtype, namespace=namespace, source=source, version=version, system_wide=system_wide, extra_settings=extra) if do_use: self.use_verifier(api, verifier_uuid) @cliutils.help_group("verifier") @cliutils.args("--id", dest="verifier_id", type=str, required=True, help="Verifier name or UUID. " + LIST_VERIFIERS_HINT) def use_verifier(self, api, verifier_id): """Choose a verifier to use for the future operations.""" verifier = api.verifier.get(verifier_id) fileutils.update_globals_file(envutils.ENV_VERIFIER, verifier.uuid) print(_("Using verifier '%s' (UUID=%s) as the default verifier " "for the future operations.") % (verifier.name, verifier.uuid)) @cliutils.help_group("verifier") @cliutils.args("--status", dest="status", type=str, required=False, help="Status to filter verifiers by.") @plugins.ensure_plugins_are_loaded def list_verifiers(self, api, status=None): """List all verifiers.""" verifiers = api.verifier.list(status) if verifiers: fields = ["UUID", "Name", "Type", "Namespace", "Created at", "Updated at", "Status", "Version", "System-wide", "Active"] cv = envutils.get_global(envutils.ENV_VERIFIER) formatters = { "Created at": lambda v: v.created_at.replace(microsecond=0), "Updated at": lambda v: v.updated_at.replace(microsecond=0), "Active": lambda v: u"\u2714" if v.uuid == cv else "", } cliutils.print_list(verifiers, fields, formatters=formatters, normalize_field_names=True, sortby_index=4) elif status: print(_("There are no verifiers with status '%s'.") % status) else: print(_("There are no verifiers. You can create verifier, using " "command `rally verify create-verifier`.")) @cliutils.help_group("verifier") @cliutils.args("--id", dest="verifier_id", type=str, help="Verifier name or UUID. " + LIST_VERIFIERS_HINT) @envutils.with_default_verifier_id() @plugins.ensure_plugins_are_loaded def show_verifier(self, api, verifier_id=None): """Show detailed information about a verifier.""" verifier = api.verifier.get(verifier_id) fields = ["UUID", "Status", "Created at", "Updated at", "Active", "Name", "Description", "Type", "Namespace", "Source", "Version", "System-wide", "Extra settings", "Location"] used_verifier = envutils.get_global(envutils.ENV_VERIFIER) formatters = { "Created at": lambda v: v.created_at.replace(microsecond=0), "Updated at": lambda v: v.updated_at.replace(microsecond=0), "Active": lambda v: u"\u2714" if v.uuid == used_verifier else None, "Extra settings": lambda v: (json.dumps(v.extra_settings, indent=4) if v.extra_settings else None), "Location": lambda v: v.manager.repo_dir } if not verifier.system_wide: fields.append("Venv location") formatters["Venv location"] = lambda v: v.manager.venv_dir cliutils.print_dict(verifier, fields=fields, formatters=formatters, normalize_field_names=True, print_header=False, table_label="Verifier") print(_("Attention! All you do in the verifier repository or " "verifier virtual environment, you do it at your own risk!")) @cliutils.help_group("verifier") @cliutils.args("--id", dest="verifier_id", type=str, required=True, help="Verifier name or UUID. " + LIST_VERIFIERS_HINT) @cliutils.args("--deployment-id", dest="deployment", type=str, metavar="", required=False, help="Deployment name or UUID. If specified, only the " "deployment-specific data will be deleted for " "verifier. " + LIST_DEPLOYMENTS_HINT) @cliutils.args("--force", dest="force", action="store_true", required=False, help="Delete all stored verifications of the specified " "verifier. If a deployment specified, only " "verifications of this deployment will be deleted. " "Use this argument carefully! You can delete " "verifications that may be important to you.") @plugins.ensure_plugins_are_loaded def delete_verifier(self, api, verifier_id, deployment=None, force=False): """Delete a verifier.""" api.verifier.delete(verifier_id, deployment, force) @cliutils.help_group("verifier") @cliutils.args("--id", dest="verifier_id", type=str, help="Verifier name or UUID. " + LIST_VERIFIERS_HINT) @cliutils.args("--update-venv", dest="update_venv", action="store_true", required=False, help="Update the virtual environment for verifier.") @cliutils.args("--version", dest="version", type=str, required=False, help="Branch, tag or commit ID to checkout. HINT: Specify " "the same version to pull the latest repo code.") @cliutils.args("--system-wide", dest="system_wide", action="store_true", required=False, help="Switch to using the system-wide environment.") @cliutils.args("--no-system-wide", dest="no_system_wide", action="store_true", required=False, help="Switch to using the virtual environment. " "If the virtual environment doesn't exist, " "it will be created.") @envutils.with_default_verifier_id() @plugins.ensure_plugins_are_loaded def update_verifier(self, api, verifier_id=None, version=None, system_wide=None, no_system_wide=None, update_venv=None): """Update a verifier.""" if not (version or system_wide or no_system_wide or update_venv): print(_("At least one of the following arguments should be " "provided: '--update-venv', '--version', '--system-wide', " "'--no-system-wide'.")) return 1 msg = _("Arguments '--%s' and '--%s' cannot be used simultaneously. " "You can use only one of the mentioned arguments.") if update_venv and system_wide: print(msg % ("update-venv", "system-wide")) return 1 if system_wide and no_system_wide: print(msg % ("system-wide", "no-system-wide")) return 1 system_wide = False if no_system_wide else (system_wide or None) api.verifier.update(verifier_id, system_wide=system_wide, version=version, update_venv=update_venv) print(_("HINT: In some cases the verifier config file should be " "updated as well. Use `rally verify configure-verifier` " "command to update the config file.")) @cliutils.help_group("verifier") @cliutils.args("--id", dest="verifier_id", type=str, help="Verifier name or UUID. " + LIST_VERIFIERS_HINT) @cliutils.args("--deployment-id", dest="deployment", type=str, metavar="", help="Deployment name or UUID. " + LIST_DEPLOYMENTS_HINT) @cliutils.args("--reconfigure", dest="reconfigure", action="store_true", required=False, help="Reconfigure verifier.") @cliutils.args("--extend", dest="extra_options", type=str, metavar="", required=False, help="Extend verifier configuration with extra options. " "If options are already present, the given ones will " "override them. Can be a path to a regular config " "file or just a json/yaml.") @cliutils.args("--override", dest="new_configuration", type=str, metavar="", required=False, help="Override verifier configuration by another one " "from a given source.") @cliutils.args("--show", dest="show", action="store_true", required=False, help="Show verifier configuration.") @envutils.with_default_deployment(cli_arg_name="deployment-id") @envutils.with_default_verifier_id() @plugins.ensure_plugins_are_loaded def configure_verifier(self, api, verifier_id=None, deployment=None, reconfigure=False, extra_options=None, new_configuration=None, show=False): """Configure a verifier for a specific deployment.""" # TODO(ylobankov): Add an ability to read extra options from # a json or yaml file. if new_configuration and (extra_options or reconfigure): print(_("Argument '--override' cannot be used with arguments " "'--reconfigure' and '--extend'.")) return 1 if new_configuration: if not os.path.exists(new_configuration): print(_("File '%s' not found.") % new_configuration) return 1 with open(new_configuration) as f: config = f.read() api.verifier.override_configuration(verifier_id, deployment, config) else: if extra_options: if os.path.isfile(extra_options): conf = configparser.ConfigParser() conf.read(extra_options) extra_options = dict(conf._sections) for s in extra_options: extra_options[s] = dict(extra_options[s]) extra_options[s].pop("__name__", None) defaults = dict(conf.defaults()) if defaults: extra_options["DEFAULT"] = dict(conf.defaults()) else: extra_options = yaml.safe_load(extra_options) config = api.verifier.configure(verifier_id, deployment, extra_options=extra_options, reconfigure=reconfigure) if show: print("\n%s\n" % config.strip()) @cliutils.help_group("verifier") @cliutils.args("--id", dest="verifier_id", type=str, help="Verifier name or UUID. " + LIST_VERIFIERS_HINT) @cliutils.args("--pattern", dest="pattern", type=str, required=False, help="Pattern which will be used for matching. Can be a " "regexp or a verifier-specific entity (for example, " "in case of Tempest you can specify 'set=smoke').") @envutils.with_default_verifier_id() @plugins.ensure_plugins_are_loaded def list_verifier_tests(self, api, verifier_id=None, pattern=""): """List all verifier tests.""" tests = api.verifier.list_tests(verifier_id, pattern) if tests: for test in tests: print(test) else: print(_("No tests found.")) @cliutils.help_group("verifier-ext") @cliutils.args("--id", dest="verifier_id", type=str, help="Verifier name or UUID. " + LIST_VERIFIERS_HINT) @cliutils.args("--source", dest="source", type=str, required=True, help="Path or URL to the repo to clone verifier " "extension from.") @cliutils.args("--version", dest="version", type=str, required=False, help="Branch, tag or commit ID to checkout before " "installation of the verifier extension (the " "'master' branch is used by default).") @cliutils.args("--extra-settings", dest="extra", type=str, required=False, help="Extra installation settings for verifier extension.") @envutils.with_default_verifier_id() @plugins.ensure_plugins_are_loaded def add_verifier_ext(self, api, verifier_id=None, source=None, version=None, extra=None): """Add a verifier extension.""" api.verifier.add_extension(verifier_id, source=source, version=version, extra_settings=extra) @cliutils.help_group("verifier-ext") @cliutils.args("--id", dest="verifier_id", type=str, help="Verifier name or UUID. " + LIST_VERIFIERS_HINT) @envutils.with_default_verifier_id() @plugins.ensure_plugins_are_loaded def list_verifier_exts(self, api, verifier_id=None): """List all verifier extensions.""" verifier_exts = api.verifier.list_extensions(verifier_id) if verifier_exts: fields = ["Name", "Entry point"] if logging.is_debug(): fields.append("Location") cliutils.print_list(verifier_exts, fields, normalize_field_names=True) else: print(_("There are no verifier extensions. You can add " "verifier extension, using command `rally verify " "add-verifier-ext`.")) def _print_details_after_run(self, results): failures = results.filter_tests("fail").values() if failures: h_text = "Failed %d %s - output below:" % ( len(failures), "tests" if len(failures) > 1 else "test") self._print_failures(h_text, failures, "=") else: print(_("\nCongratulations! Verification doesn't have failed " "tests! :)")) @cliutils.help_group("verifier-ext") @cliutils.args("--id", dest="verifier_id", type=str, help="Verifier name or UUID. " + LIST_VERIFIERS_HINT) @cliutils.args("--name", type=str, required=True, help="Verifier extension name.") @envutils.with_default_verifier_id() @plugins.ensure_plugins_are_loaded def delete_verifier_ext(self, api, verifier_id=None, name=None): """Delete a verifier extension.""" api.verifier.delete_extension(verifier_id, name) @cliutils.help_group("verification") @cliutils.args("--id", dest="verifier_id", type=str, help="Verifier name or UUID. " + LIST_VERIFIERS_HINT) @cliutils.args("--deployment-id", dest="deployment", type=str, metavar="", help="Deployment name or UUID. " + LIST_DEPLOYMENTS_HINT) @cliutils.args("--tag", nargs="+", dest="tags", type=str, required=False, help="Mark verification with a tag or a few tags.") @cliutils.args("--pattern", dest="pattern", type=str, required=False, help="Pattern which will be used for running tests. Can be " "a regexp or a verifier-specific entity (for example, " "in case of Tempest you can specify 'set=smoke').") @cliutils.args("--concurrency", dest="concur", type=int, metavar="", required=False, help="How many processes to be used for running verifier " "tests. The default value (0) auto-detects your CPU " "count.") @cliutils.args("--load-list", dest="load_list", type=str, metavar="", required=False, help="Path to a file with a list of tests to run.") @cliutils.args("--skip-list", dest="skip_list", type=str, metavar="", required=False, help="Path to a file with a list of tests to skip. " "Format: json or yaml like a dictionary where keys " "are test names and values are reasons.") @cliutils.args("--xfail-list", dest="xfail_list", type=str, metavar="", required=False, help="Path to a file with a list of tests that will be " "considered as expected failures. " "Format: json or yaml like a dictionary where keys " "are test names and values are reasons.") @cliutils.args("--detailed", dest="detailed", action="store_true", required=False, help="Show verification details such as errors of failed " "tests.") @cliutils.args("--no-use", dest="do_use", action="store_false", help="Not to set the finished verification as the default " "verification for future operations.") @envutils.with_default_deployment(cli_arg_name="deployment-id") @envutils.with_default_verifier_id() @plugins.ensure_plugins_are_loaded def start(self, api, verifier_id=None, deployment=None, tags=None, pattern=None, concur=0, load_list=None, skip_list=None, xfail_list=None, detailed=False, do_use=True): """Start a verification (run verifier tests).""" if pattern and load_list: print(_("Arguments '--pattern' and '--load-list' cannot be used " "simultaneously. You can use only one of the mentioned " "arguments.")) return 1 def parse(filename): with open(filename, "r") as f: return yaml.safe_load(f.read()) if load_list: if not os.path.exists(load_list): print(_("File '%s' not found.") % load_list) return 1 with open(load_list, "r") as f: load_list = [test for test in f.read().split("\n") if test] if skip_list: if not os.path.exists(skip_list): print(_("File '%s' not found.") % skip_list) return 1 skip_list = parse(skip_list) if xfail_list: if not os.path.exists(xfail_list): print(_("File '%s' not found.") % xfail_list) return 1 xfail_list = parse(xfail_list) run_args = {key: value for key, value in ( ("pattern", pattern), ("load_list", load_list), ("skip_list", skip_list), ("xfail_list", xfail_list), ("concurrency", concur)) if value} try: verification, results = api.verification.start( verifier_id, deployment, tags=tags, **run_args) except exceptions.DeploymentNotFinishedStatus as e: print(_("Cannot start a verefication on " "unfinished deployment: %s") % e) return 1 if detailed: self._print_details_after_run(results) self._print_totals(results.totals) if do_use: self.use(api, verification.uuid) else: print(_("Verification UUID: %s.") % verification.uuid) @cliutils.help_group("verification") @cliutils.args("--uuid", dest="verification_uuid", type=str, required=True, help="Verification UUID. " + LIST_VERIFICATIONS_HINT) def use(self, api, verification_uuid): """Choose a verification to use for the future operations.""" verification = api.verification.get(verification_uuid) fileutils.update_globals_file( envutils.ENV_VERIFICATION, verification.uuid) print(_("Using verification (UUID=%s) as the default verification " "for the future operations.") % verification.uuid) @cliutils.help_group("verification") @cliutils.args("--uuid", dest="verification_uuid", type=str, help="Verification UUID. " + LIST_VERIFICATIONS_HINT) @cliutils.args("--deployment-id", dest="deployment", type=str, metavar="", help="Deployment name or UUID. " + LIST_DEPLOYMENTS_HINT) @cliutils.args("--failed", dest="failed", required=False, help="Rerun only failed tests.", action="store_true") @cliutils.args("--tag", nargs="+", dest="tags", type=str, required=False, help="Mark verification with a tag or a few tags.") @cliutils.args("--concurrency", dest="concur", type=int, metavar="", required=False, help="How many processes to be used for running verifier " "tests. The default value (0) auto-detects your CPU " "count.") @cliutils.args("--detailed", dest="detailed", action="store_true", required=False, help="Show verification details such as errors of failed " "tests.") @cliutils.args("--no-use", dest="do_use", action="store_false", help="Not to set the finished verification as the default " "verification for future operations.") @envutils.with_default_verification_uuid @envutils.with_default_deployment(cli_arg_name="deployment-id") @plugins.ensure_plugins_are_loaded def rerun(self, api, verification_uuid=None, deployment=None, tags=None, concur=None, failed=False, detailed=False, do_use=True): """Rerun tests from a verification for a specific deployment.""" verification, results = api.verification.rerun( verification_uuid, deployment_id=deployment, failed=failed, tags=tags, concurrency=concur) if detailed: self._print_details_after_run(results) self._print_totals(results.totals) if do_use: self.use(api, verification.uuid) else: print(_("Verification UUID: %s.") % verification.uuid) @cliutils.help_group("verification") @cliutils.args("--uuid", dest="verification_uuid", type=str, help="Verification UUID. " + LIST_VERIFICATIONS_HINT) @cliutils.args("--sort-by", metavar="", dest="sort_by", type=str, required=False, choices=("name", "duration", "status"), help="Sort tests by 'name', 'duration' or 'status'.") @cliutils.args("--detailed", dest="detailed", action="store_true", required=False, help="Show verification details such as run arguments " "and errors of failed tests.") @envutils.with_default_verification_uuid def show(self, api, verification_uuid=None, sort_by="name", detailed=False): """Show detailed information about a verification.""" verification = api.verification.get(verification_uuid) verifier = api.verifier.get(verification.verifier_uuid) deployment = api.deployment.get(verification.deployment_uuid) def run_args_formatter(v): run_args = [] for k in sorted(v.run_args): if k in ("load_list", "skip_list", "xfail_list"): value = "(value is too long, %s)" if detailed: value %= "will be displayed separately" else: value %= "use 'detailed' flag to display it" else: value = v.run_args[k] run_args.append("%s: %s" % (k, value)) return "\n".join(run_args) # Main table fields = ["UUID", "Status", "Started at", "Finished at", "Duration", "Run arguments", "Tags", "Verifier name", "Verifier type", "Deployment name", "Tests count", "Tests duration, sec", "Success", "Skipped", "Expected failures", "Unexpected success", "Failures"] formatters = { "Started at": lambda v: v.created_at.replace(microsecond=0), "Finished at": lambda v: v.updated_at.replace(microsecond=0), "Duration": lambda v: (v.updated_at.replace(microsecond=0) - v.created_at.replace(microsecond=0)), "Run arguments": run_args_formatter, "Tags": lambda v: ", ".join(v.tags) or None, "Verifier name": lambda v: "%s (UUID: %s)" % (verifier.name, verifier.uuid), "Verifier type": ( lambda v: "%s (namespace: %s)" % (verifier.type, verifier.namespace)), "Deployment name": ( lambda v: "%s (UUID: %s)" % (deployment["name"], deployment["uuid"])), "Tests duration, sec": lambda v: v.tests_duration } cliutils.print_dict(verification, fields, formatters=formatters, normalize_field_names=True, print_header=False, table_label="Verification") if detailed: h = _("Run arguments") print("\n%s" % cliutils.make_header(h, len(h)).strip()) print("\n%s\n" % json.dumps(verification.run_args, indent=4)) # Tests table tests = verification.tests values = [tests[test_id] for test_id in tests] fields = ["Name", "Duration, sec", "Status"] formatters = {"Duration, sec": lambda v: v["duration"]} index = ("name", "duration", "status").index(sort_by) cliutils.print_list(values, fields, formatters=formatters, table_label="Tests", normalize_field_names=True, sortby_index=index) if detailed: failures = [t for t in tests.values() if t["status"] == "fail"] if failures: self._print_failures("Failures", failures) else: print(_("\nCongratulations! Verification doesn't have failed " "tests! :)")) @cliutils.help_group("verification") @cliutils.args("--id", dest="verifier_id", type=str, required=False, help="Verifier name or UUID. " + LIST_VERIFIERS_HINT) @cliutils.args("--deployment-id", dest="deployment", type=str, metavar="", required=False, help="Deployment name or UUID. " + LIST_DEPLOYMENTS_HINT) @cliutils.args("--tag", nargs="+", dest="tags", type=str, required=False, help="Tags to filter verifications by.") @cliutils.args("--status", dest="status", type=str, required=False, help="Status to filter verifications by.") def list(self, api, verifier_id=None, deployment=None, tags=None, status=None): """List all verifications.""" verifications = api.verification.list(verifier_id, deployment, tags, status) if verifications: fields = ["UUID", "Tags", "Verifier name", "Deployment name", "Started at", "Finished at", "Duration", "Status"] formatters = { "Tags": lambda v: ", ".join(v.tags) or "-", "Verifier name": ( lambda v: api.verifier.get(v.verifier_uuid).name), "Deployment name": ( lambda v: api.deployment.get(v.deployment_uuid)["name"]), "Started at": lambda v: v.created_at.replace(microsecond=0), "Finished at": lambda v: v.updated_at.replace(microsecond=0), "Duration": lambda v: (v.updated_at.replace(microsecond=0) - v.created_at.replace(microsecond=0)) } cliutils.print_list(verifications, fields, formatters=formatters, normalize_field_names=True, sortby_index=4) elif verifier_id or deployment or status or tags: print(_("There are no verifications that meet specified filter " "arguments.")) else: print(_("There are no verifications. You can start verification, " "using command `rally verify start`.")) @cliutils.help_group("verification") @cliutils.args("--uuid", nargs="+", dest="verification_uuid", type=str, required=True, help="UUIDs of verifications. " + LIST_VERIFICATIONS_HINT) def delete(self, api, verification_uuid): """Delete a verification or a few verifications.""" if not isinstance(verification_uuid, list): verification_uuid = [verification_uuid] for v_uuid in verification_uuid: api.verification.delete(v_uuid) @cliutils.help_group("verification") @cliutils.args("--uuid", nargs="+", dest="verification_uuid", type=str, help="UUIDs of verifications. " + LIST_VERIFICATIONS_HINT) @cliutils.args("--type", dest="output_type", type=str, required=False, default="json", help="Report type (Defaults to JSON). Out-of-the-box types:" " %s. HINT: You can list all types, executing `rally " "plugin list --plugin-base VerificationReporter` " "command." % ", ".join(DEFAULT_REPORT_TYPES)) @cliutils.args("--to", dest="output_dest", type=str, metavar="", required=False, help="Report destination. Can be a path to a file (in case " "of HTML, JSON, etc. types) to save the report to or " "a connection string. It depends on the report type.") @cliutils.args("--open", dest="open_it", action="store_true", required=False, help="Open the output file in a browser.") @envutils.with_default_verification_uuid @plugins.ensure_plugins_are_loaded def report(self, api, verification_uuid=None, output_type=None, output_dest=None, open_it=None): """Generate a report for a verification or a few verifications.""" if not isinstance(verification_uuid, list): verification_uuid = [verification_uuid] result = api.verification.report(verification_uuid, output_type, output_dest) if "files" in result: print(_("Saving the report to '%s' file. It may take some time.") % output_dest) for path in result["files"]: full_path = os.path.abspath(os.path.expanduser(path)) if not os.path.exists(os.path.dirname(full_path)): os.makedirs(os.path.dirname(full_path)) with open(full_path, "w") as f: f.write(result["files"][path]) print(_("The report has been successfully saved.")) if open_it: if "open" not in result: print(_("Cannot open '%s' report in the browser because " "report type doesn't support it.") % output_type) return 1 webbrowser.open_new_tab( "file://" + os.path.abspath(result["open"])) if "print" in result: # NOTE(andreykurilin): we need a separation between logs and # printed information to be able to parse output h = _("Verification Report") print("\n%s\n%s" % (cliutils.make_header(h, len(h)), result["print"])) @cliutils.help_group("verification") @cliutils.args("--id", dest="verifier_id", type=str, required=False, help="Verifier name or UUID. " + LIST_VERIFIERS_HINT) @cliutils.args("--deployment-id", dest="deployment", type=str, metavar="", required=False, help="Deployment name or UUID. " + LIST_DEPLOYMENTS_HINT) @cliutils.args("--file", dest="file_to_parse", type=str, metavar="", required=True, help="File to import test results from.") @cliutils.args("--run-args", dest="run_args", type=str, required=False, help="Arguments that might be used when running tests. For " "example, '{concurrency: 2, pattern: set=identity}'.") @cliutils.args("--no-use", dest="do_use", action="store_false", help="Not to set the created verification as the default " "verification for future operations.") @cliutils.alias("import") @envutils.with_default_deployment(cli_arg_name="deployment-id") @envutils.with_default_verifier_id() @plugins.ensure_plugins_are_loaded def import_results(self, api, verifier_id=None, deployment=None, file_to_parse=None, run_args=None, do_use=True): """Import results of a test run into the Rally database.""" if not os.path.exists(file_to_parse): print(_("File '%s' not found.") % file_to_parse) return 1 with open(file_to_parse, "r") as f: data = f.read() run_args = yaml.safe_load(run_args) if run_args else {} verification, results = api.verification.import_results( verifier_id, deployment, data, **run_args) self._print_totals(results.totals) if do_use: self.use(verification.uuid) else: print(_("Verification UUID: %s.") % verification.uuid) rally-0.9.1/rally/cli/commands/deployment.py0000664000567000056710000003247113073417720022245 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Rally command: deployment""" from __future__ import print_function import json import os import re import sys import jsonschema from keystoneclient import exceptions as keystone_exceptions from six.moves.urllib import parse from rally.cli import cliutils from rally.cli import envutils from rally.common import fileutils from rally.common.i18n import _ from rally.common import utils from rally.common import yamlutils as yaml from rally import exceptions from rally import plugins class DeploymentCommands(object): """Set of commands that allow you to manage deployments.""" @cliutils.args("--name", type=str, required=True, help="Name of the deployment.") @cliutils.args("--fromenv", action="store_true", help="Read environment variables instead of config file.") @cliutils.args("--filename", type=str, required=False, metavar="", help="Path to the configuration file of the deployment.") @cliutils.args("--no-use", action="store_false", dest="do_use", help="Don't set new deployment as default for" " future operations.") @plugins.ensure_plugins_are_loaded def create(self, api, name, fromenv=False, filename=None, do_use=False): """Create new deployment. This command will create a new deployment record in rally database. In the case of ExistingCloud deployment engine, it will use the cloud represented in the configuration. If the cloud doesn't exist, Rally can deploy a new one for you with Devstack or Fuel. Different deployment engines exist for these cases. If you use the ExistingCloud deployment engine, you can pass the deployment config by environment variables with ``--fromenv``: OS_USERNAME OS_PASSWORD OS_AUTH_URL OS_TENANT_NAME or OS_PROJECT_NAME OS_ENDPOINT_TYPE or OS_INTERFACE OS_ENDPOINT OS_REGION_NAME OS_CACERT OS_INSECURE OS_IDENTITY_API_VERSION All other deployment engines need more complex configuration data, so it should be stored in a configuration file. You can use physical servers, LXC containers, KVM virtual machines or virtual machines in OpenStack for deploying the cloud. Except physical servers, Rally can create cluster nodes for you. Interaction with virtualization software, OpenStack cloud or physical servers is provided by server providers. :param fromenv: boolean, read environment instead of config file :param filename: path to the configuration file :param name: name of the deployment """ if fromenv: config = {"type": "ExistingCloud"} config.update(envutils.get_creds_from_env_vars()) else: if not filename: print("Either --filename or --fromenv is required.") return(1) filename = os.path.expanduser(filename) with open(filename, "rb") as deploy_file: config = yaml.safe_load(deploy_file.read()) try: deployment = api.deployment.create(config, name) except jsonschema.ValidationError: print(_("Config schema validation error: %s.") % sys.exc_info()[1]) return(1) except exceptions.DeploymentNameExists: print(_("Error: %s") % sys.exc_info()[1]) return(1) self.list(api, deployment_list=[deployment]) if do_use: self.use(api, deployment["uuid"]) @cliutils.args("--filename", type=str, required=False, metavar="", help="Path to the configuration file of the deployment.") @cliutils.args("--deployment", dest="deployment", type=str, metavar="", required=False, help="UUID or name of the deployment.") @envutils.with_default_deployment() @plugins.ensure_plugins_are_loaded def recreate(self, api, deployment=None, filename=None): """Destroy and create an existing deployment. Unlike 'deployment destroy', the deployment database record will not be deleted, so the deployment UUID stays the same. :param deployment: UUID or name of the deployment """ config = None if filename: with open(filename, "rb") as deploy_file: config = yaml.safe_load(deploy_file.read()) api.deployment.recreate(deployment, config) @cliutils.args("--deployment", dest="deployment", type=str, metavar="", required=False, help="UUID or name of the deployment.") @envutils.with_default_deployment() @plugins.ensure_plugins_are_loaded def destroy(self, api, deployment=None): """Destroy existing deployment. This will delete all containers, virtual machines, OpenStack instances or Fuel clusters created during Rally deployment creation. Also it will remove the deployment record from the Rally database. :param deployment: UUID or name of the deployment """ api.deployment.destroy(deployment) def list(self, api, deployment_list=None): """List existing deployments.""" headers = ["uuid", "created_at", "name", "status", "active"] current_deployment = envutils.get_global("RALLY_DEPLOYMENT") deployment_list = deployment_list or api.deployment.list() table_rows = [] if deployment_list: for t in deployment_list: r = [str(t[column]) for column in headers[:-1]] r.append("" if t["uuid"] != current_deployment else "*") table_rows.append(utils.Struct(**dict(zip(headers, r)))) cliutils.print_list(table_rows, headers, sortby_index=headers.index("created_at")) else: print(_("There are no deployments. " "To create a new deployment, use:" "\nrally deployment create")) @cliutils.args("--deployment", dest="deployment", type=str, metavar="", required=False, help="UUID or name of the deployment.") @envutils.with_default_deployment() @cliutils.suppress_warnings def config(self, api, deployment=None): """Display configuration of the deployment. Output is the configuration of the deployment in a pretty-printed JSON format. :param deployment: UUID or name of the deployment """ deploy = api.deployment.get(deployment) result = deploy["config"] print(json.dumps(result, sort_keys=True, indent=4)) @cliutils.args("--deployment", dest="deployment", type=str, metavar="", required=False, help="UUID or name of the deployment.") @envutils.with_default_deployment() def show(self, api, deployment=None): """Show the credentials of the deployment. :param deployment: UUID or name of the deployment """ headers = ["auth_url", "username", "password", "tenant_name", "region_name", "endpoint_type"] table_rows = [] deployment = api.deployment.get(deployment) creds = deployment.get_credentials_for("openstack") users = creds["users"] admin = creds["admin"] credentials = users + [admin] if admin else users for ep in credentials: data = ["***" if m == "password" else ep.get(m, "") for m in headers] table_rows.append(utils.Struct(**dict(zip(headers, data)))) cliutils.print_list(table_rows, headers) @cliutils.args("--deployment", dest="deployment", type=str, metavar="", required=False, help="UUID or name of the deployment.") @envutils.with_default_deployment() def check(self, api, deployment=None): """Check keystone authentication and list all available services. :param deployment: UUID or name of the deployment """ headers = ["services", "type", "status"] table_rows = [] try: deployment = api.deployment.get(deployment) except exceptions.DeploymentNotFound: print(_("Deployment %s is not found.") % deployment) return(1) try: services = api.deployment.check(deployment) except keystone_exceptions.ConnectionRefused: admin = deployment.get_credentials_for("openstack")["admin"] print(_("Unable to connect %s.") % admin["auth_url"]) return(1) except exceptions.InvalidArgumentsException: data = ["keystone", "identity", "Error"] table_rows.append(utils.Struct(**dict(zip(headers, data)))) print(_("Authentication Issues: %s.") % sys.exc_info()[1]) return(1) for serv_type, serv in services.items(): data = [serv, serv_type, "Available"] table_rows.append(utils.Struct(**dict(zip(headers, data)))) print(_("keystone endpoints are valid and following" " services are available:")) cliutils.print_list(table_rows, headers) if "__unknown__" in services.values(): print(_( "NOTE: '__unknown__' service name means that Keystone service " "catalog doesn't return name for this service and Rally can " "not identify service by its type. BUT you still can use " "such services with api_versions context, specifying type of " "service (execute `rally plugin show api_versions` for more " "details).")) def _update_openrc_deployment_file(self, deployment, credential): openrc_path = os.path.expanduser("~/.rally/openrc-%s" % deployment) with open(openrc_path, "w+") as env_file: env_file.write("export OS_AUTH_URL='%(auth_url)s'\n" "export OS_USERNAME='%(username)s'\n" "export OS_PASSWORD='%(password)s'\n" "export OS_TENANT_NAME='%(tenant_name)s'\n" % credential) if credential.get("region_name"): env_file.write("export OS_REGION_NAME='%s'\n" % credential["region_name"]) if credential.get("endpoint_type"): env_file.write("export OS_ENDPOINT_TYPE='%sURL'\n" % credential["endpoint_type"]) env_file.write("export OS_INTERFACE='%s'\n" % credential["endpoint_type"]) if credential.get("endpoint"): env_file.write("export OS_ENDPOINT='%s'\n" % credential["endpoint"]) if credential.get("https_cacert"): env_file.write("export OS_CACERT='%s'\n" % credential["https_cacert"]) if re.match(r"^/v3/?$", parse.urlparse( credential["auth_url"]).path) is not None: env_file.write("export OS_USER_DOMAIN_NAME='%s'\n" "export OS_PROJECT_DOMAIN_NAME='%s'\n" % (credential["user_domain_name"], credential["project_domain_name"])) expanded_path = os.path.expanduser("~/.rally/openrc") if os.path.exists(expanded_path): os.remove(expanded_path) os.symlink(openrc_path, expanded_path) @cliutils.args("--deployment", dest="deployment", type=str, metavar="", required=False, help="UUID or name of a deployment.") def use(self, api, deployment): """Set active deployment. :param deployment: UUID or name of the deployment """ try: deployment = api.deployment.get(deployment) print("Using deployment: %s" % deployment["uuid"]) fileutils.update_globals_file("RALLY_DEPLOYMENT", deployment["uuid"]) creds = deployment.get_credentials_for("openstack") self._update_openrc_deployment_file( deployment["uuid"], creds["admin"] or creds["users"][0]) print("~/.rally/openrc was updated\n\nHINTS:\n" "\n* To use standard OpenStack clients, set up your env by " "running:\n\tsource ~/.rally/openrc\n" " OpenStack clients are now configured, e.g run:\n\t" "openstack image list") except exceptions.DeploymentNotFound: print("Deployment %s is not found." % deployment) return 1 rally-0.9.1/rally/cli/envutils.py0000664000567000056710000001314513073417716020137 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import decorator from oslo_utils import strutils from rally.common import fileutils from rally.common.i18n import _ from rally import exceptions ENV_DEPLOYMENT = "RALLY_DEPLOYMENT" ENV_TASK = "RALLY_TASK" ENV_VERIFIER = "RALLY_VERIFIER" ENV_VERIFICATION = "RALLY_VERIFICATION" ENVVARS = [ENV_DEPLOYMENT, ENV_TASK, ENV_VERIFIER, ENV_VERIFICATION] MSG_MISSING_ARG = _("Missing argument: --%(arg_name)s") def clear_global(global_key): path = os.path.expanduser("~/.rally/globals") if os.path.exists(path): fileutils.update_env_file(path, global_key, "\n") if global_key in os.environ: os.environ.pop(global_key) def clear_env(): for envvar in ENVVARS: clear_global(envvar) def get_global(global_key, do_raise=False): if global_key not in os.environ: fileutils.load_env_file(os.path.expanduser("~/.rally/globals")) value = os.environ.get(global_key) if not value and do_raise: raise exceptions.InvalidArgumentsException("%s env is missing" % global_key) return value def default_from_global(arg_name, env_name, cli_arg_name, message=MSG_MISSING_ARG): def default_from_global(f, *args, **kwargs): id_arg_index = f.__code__.co_varnames.index(arg_name) args = list(args) if args[id_arg_index] is None: args[id_arg_index] = get_global(env_name) if not args[id_arg_index]: print(message % {"arg_name": cli_arg_name}) return(1) return f(*args, **kwargs) return decorator.decorator(default_from_global) def with_default_deployment(cli_arg_name="uuid"): return default_from_global("deployment", ENV_DEPLOYMENT, cli_arg_name, message=_("There is no default deployment.\n" "\tPlease use command:\n" "\trally deployment use " "|" "\nor pass uuid of deployment to " "the --%(arg_name)s argument of " "this command")) def with_default_verifier_id(cli_arg_name="id"): return default_from_global("verifier_id", ENV_VERIFIER, cli_arg_name) with_default_task_id = default_from_global("task_id", ENV_TASK, "uuid") with_default_verification_uuid = default_from_global("verification_uuid", ENV_VERIFICATION, "uuid") def get_creds_from_env_vars(): required_env_vars = ["OS_AUTH_URL", "OS_USERNAME", "OS_PASSWORD"] missing_env_vars = [v for v in required_env_vars if v not in os.environ] if missing_env_vars: msg = ("The following environment variables are " "required but not set: %s" % " ".join(missing_env_vars)) raise exceptions.ValidationError(message=msg) creds = { "auth_url": os.environ["OS_AUTH_URL"], "admin": { "username": os.environ["OS_USERNAME"], "password": os.environ["OS_PASSWORD"], "tenant_name": get_project_name_from_env() }, "endpoint_type": get_endpoint_type_from_env(), "endpoint": os.environ.get("OS_ENDPOINT"), "region_name": os.environ.get("OS_REGION_NAME", ""), "https_cacert": os.environ.get("OS_CACERT", ""), "https_insecure": strutils.bool_from_string( os.environ.get("OS_INSECURE")) } user_domain_name = os.environ.get("OS_USER_DOMAIN_NAME") project_domain_name = os.environ.get("OS_PROJECT_DOMAIN_NAME") identity_api_version = os.environ.get( "OS_IDENTITY_API_VERSION", os.environ.get("IDENTITY_API_VERSION")) if (identity_api_version == "3" or (identity_api_version is None and (user_domain_name or project_domain_name))): # it is Keystone v3 and it has another config scheme creds["admin"]["project_name"] = creds["admin"].pop("tenant_name") creds["admin"]["user_domain_name"] = user_domain_name or "Default" project_domain_name = project_domain_name or "Default" creds["admin"]["project_domain_name"] = project_domain_name return creds def get_project_name_from_env(): tenant_name = os.environ.get("OS_PROJECT_NAME", os.environ.get("OS_TENANT_NAME")) if tenant_name is None: raise exceptions.ValidationError("Either the OS_PROJECT_NAME or " "OS_TENANT_NAME environment variable " "is required, but neither is set.") return tenant_name def get_endpoint_type_from_env(): endpoint_type = os.environ.get("OS_ENDPOINT_TYPE", os.environ.get("OS_INTERFACE")) if endpoint_type and "URL" in endpoint_type: endpoint_type = endpoint_type.replace("URL", "") return endpoint_type rally-0.9.1/rally/cli/main.py0000664000567000056710000000223313073417716017206 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """CLI interface for Rally.""" from __future__ import print_function import sys from rally.cli import cliutils from rally.cli.commands import deployment from rally.cli.commands import plugin from rally.cli.commands import task from rally.cli.commands import verify categories = { "deployment": deployment.DeploymentCommands, "plugin": plugin.PluginCommands, "task": task.TaskCommands, "verify": verify.VerifyCommands } def main(): return cliutils.run(sys.argv, categories) if __name__ == "__main__": sys.exit(main()) rally-0.9.1/rally/cli/manage.py0000664000567000056710000000432013073417716017511 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """CLI interface for Rally DB management.""" from __future__ import print_function import contextlib import sys from rally.cli import cliutils from rally.cli import envutils from rally.common import db @contextlib.contextmanager def output_migration_result(method_name): """Print migration result.""" print("%s started." % method_name.capitalize()) start_revision = db.schema_revision() yield print("%s processed." % method_name.capitalize()) current_revision = db.schema_revision() if start_revision != current_revision: print("Database migrated successfully " "from {start} to {end} revision.".format(start=start_revision, end=current_revision)) else: print("Database is already up to date") class DBCommands(object): """Commands for DB management.""" def recreate(self, api): """Drop and create Rally database. This will delete all existing data. """ db.schema_cleanup() db.schema_create() envutils.clear_env() def create(self, api): """Create Rally database.""" db.schema_create() def upgrade(self, api): """Upgrade Rally database to the latest state.""" with output_migration_result("upgrade"): db.schema_upgrade() def revision(self, api): """Print current Rally database revision UUID.""" print(db.schema_revision()) def main(): categories = {"db": DBCommands} return cliutils.run(sys.argv, categories) if __name__ == "__main__": sys.exit(main()) rally-0.9.1/rally/plugins/0000775000567000056710000000000013073420067016613 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/__init__.py0000664000567000056710000000237313073417716020740 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import decorator from rally.common.plugin import discover PLUGINS_LOADED = False def load(): global PLUGINS_LOADED if not PLUGINS_LOADED: discover.import_modules_from_package("rally.deployment.engines") discover.import_modules_from_package("rally.deployment.serverprovider") discover.import_modules_from_package("rally.plugins") discover.load_plugins("/opt/rally/plugins/") discover.load_plugins(os.path.expanduser("~/.rally/plugins/")) PLUGINS_LOADED = True @decorator.decorator def ensure_plugins_are_loaded(f, *args, **kwargs): load() return f(*args, **kwargs) rally-0.9.1/rally/plugins/openstack/0000775000567000056710000000000013073420067020602 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/services/0000775000567000056710000000000013073420067022425 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/services/__init__.py0000664000567000056710000000000013073417716024533 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/services/identity/0000775000567000056710000000000013073420067024256 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/services/identity/__init__.py0000664000567000056710000000000013073417716026364 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/services/identity/keystone_common.py0000664000567000056710000001550113073417716030052 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import osclients from rally.plugins.openstack.services.identity import identity from rally.task import atomic class UnifiedKeystoneMixin(object): @staticmethod def _unify_service(service): return identity.Service(id=service.id, name=service.name) @staticmethod def _unify_role(role): return identity.Role(id=role.id, name=role.name) def delete_user(self, user_id): """Deletes user by its id.""" return self._impl.delete_user(user_id) def get_user(self, user_id): """Get user.""" return self._unify_user(self._impl.get_user(user_id)) def create_service(self, name=None, service_type=None, description=None): """Creates keystone service.""" return self._unify_service(self._impl.create_service( name=name, service_type=service_type, description=description)) def delete_service(self, service_id): """Deletes service.""" return self._impl.delete_service(service_id) def get_service(self, service_id): """Get service.""" return self._unify_service(self._impl.get_service(service_id)) def get_service_by_name(self, name): """List all services to find proper one.""" return self._unify_service(self._impl.get_service_by_name(name)) def get_role(self, role_id): """Get role.""" return self._unify_role(self._impl.get_role(role_id)) def delete_role(self, role_id): """Deletes role.""" return self._impl.delete_role(role_id) def list_ec2credentials(self, user_id): """List of access/secret pairs for a user_id. :param user_id: List all ec2-credentials for User ID :returns: Return ec2-credentials list """ return self._impl.list_ec2credentials(user_id) def delete_ec2credential(self, user_id, access): """Delete ec2credential. :param user_id: User ID for which to delete credential :param access: access key for ec2credential to delete """ return self._impl.delete_ec2credential(user_id=user_id, access=access) def fetch_token(self): """Authenticate user token.""" return self._impl.fetch_token() def validate_token(self, token): """Validate user token. :param token: Auth token to validate """ return self._impl.validate_token(token) class KeystoneMixin(object): def list_users(self): aname = "keystone_v%s.list_users" % self.version with atomic.ActionTimer(self, aname): return self._clients.keystone(self.version).users.list() def delete_user(self, user_id): """Deletes user by its id.""" aname = "keystone_v%s.delete_user" % self.version with atomic.ActionTimer(self, aname): self._clients.keystone(self.version).users.delete(user_id) def get_user(self, user_id): """Get user by its id.""" aname = "keystone_v%s.get_user" % self.version with atomic.ActionTimer(self, aname): return self._clients.keystone(self.version).users.get(user_id) def delete_service(self, service_id): """Deletes service.""" aname = "keystone_v%s.delete_service" % self.version with atomic.ActionTimer(self, aname): self._clients.keystone(self.version).services.delete(service_id) def list_services(self): """List all services.""" aname = "keystone_v%s.list_services" % self.version with atomic.ActionTimer(self, aname): return self._clients.keystone(self.version).services.list() def get_service(self, service_id): """Get service.""" aname = "keystone_v%s.get_services" % self.version with atomic.ActionTimer(self, aname): return self._clients.keystone(self.version).services.get( service_id) def get_service_by_name(self, name): """List all services to find proper one.""" for s in self.list_services(): if s.name == name: return s def delete_role(self, role_id): """Deletes role.""" aname = "keystone_v%s.delete_role" % self.version with atomic.ActionTimer(self, aname): self._clients.keystone(self.version).roles.delete(role_id) def list_roles(self): """List all roles.""" aname = "keystone_v%s.list_roles" % self.version with atomic.ActionTimer(self, aname): return self._clients.keystone(self.version).roles.list() def get_role(self, role_id): """Get role.""" aname = "keystone_v%s.get_role" % self.version with atomic.ActionTimer(self, aname): return self._clients.keystone(self.version).roles.get(role_id) def list_ec2credentials(self, user_id): """List of access/secret pairs for a user_id. :param user_id: List all ec2-credentials for User ID :returns: Return ec2-credentials list """ aname = "keystone_v%s.list_ec2creds" % self.version with atomic.ActionTimer(self, aname): return self._clients.keystone(self.version).ec2.list(user_id) def delete_ec2credential(self, user_id, access): """Delete ec2credential. :param user_id: User ID for which to delete credential :param access: access key for ec2credential to delete """ aname = "keystone_v%s.delete_ec2creds" % self.version with atomic.ActionTimer(self, aname): self._clients.keystone(self.version).ec2.delete(user_id=user_id, access=access) def fetch_token(self): """Authenticate user token.""" cred = self._clients.credential aname = "keystone_v%s.fetch_token" % self.version with atomic.ActionTimer(self, aname): clients = osclients.Clients(credential=cred, api_info=self._clients.api_info) return clients.keystone.auth_ref.auth_token def validate_token(self, token): """Validate user token. :param token: Auth token to validate """ aname = "keystone_v%s.validate_token" % self.version with atomic.ActionTimer(self, aname): self._clients.keystone(self.version).tokens.validate(token) rally-0.9.1/rally/plugins/openstack/services/identity/keystone_v2.py0000664000567000056710000003162113073417716027112 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import uuid from rally.plugins.openstack import service from rally.plugins.openstack.services.identity import identity from rally.plugins.openstack.services.identity import keystone_common from rally.task import atomic @service.service("keystone", service_type="identity", version="2") class KeystoneV2Service(service.Service, keystone_common.KeystoneMixin): @atomic.action_timer("keystone_v2.create_tenant") def create_tenant(self, tenant_name=None): tenant_name = tenant_name or self.generate_random_name() return self._clients.keystone("2").tenants.create(tenant_name) @atomic.action_timer("keystone_v2.update_tenant") def update_tenant(self, tenant_id, name=None, enabled=None, description=None): """Update tenant name and description. :param tenant_id: Id of tenant to update :param name: tenant name to be set (if boolean True, random name will be set) :param enabled: enabled status of project :param description: tenant description to be set (if boolean True, random description will be set) """ if name is True: name = self.generate_random_name() if description is True: description = self.generate_random_name() self._clients.keystone("2").tenants.update( tenant_id, name=name, description=description, enabled=enabled) @atomic.action_timer("keystone_v2.delete_tenant") def delete_tenant(self, tenant_id): return self._clients.keystone("2").tenants.delete(tenant_id) @atomic.action_timer("keystone_v2.list_tenants") def list_tenants(self): return self._clients.keystone("2").tenants.list() @atomic.action_timer("keystone_v2.get_tenant") def get_tenant(self, tenant_id): """Get tenant.""" return self._clients.keystone("2").tenants.get(tenant_id) @atomic.action_timer("keystone_v2.create_user") def create_user(self, username=None, password=None, email=None, tenant_id=None, enabled=True): username = username or self.generate_random_name() password = password or str(uuid.uuid4()) email = email or (username + "@rally.me") return self._clients.keystone("2").users.create(name=username, password=password, email=email, tenant_id=tenant_id, enabled=enabled) @atomic.action_timer("keystone_v2.create_users") def create_users(self, tenant_id, number_of_users, user_create_args=None): """Create specified amount of users. :param tenant_id: Id of tenant :param number_of_users: number of users to create :param user_create_args: additional user creation arguments """ users = [] for _i in range(number_of_users): users.append(self.create_user(tenant_id=tenant_id, **(user_create_args or {}))) return users @atomic.action_timer("keystone_v2.update_user") def update_user(self, user_id, **kwargs): allowed_args = ("name", "email", "enabled") restricted = set(kwargs) - set(allowed_args) if restricted: raise NotImplementedError( "Failed to update '%s', since Keystone V2 allows to update " "only '%s'." % ("', '".join(restricted), "', '".join(allowed_args))) self._clients.keystone("2").users.update(user_id, **kwargs) @atomic.action_timer("keystone_v2.update_user_password") def update_user_password(self, user_id, password): self._clients.keystone("2").users.update_password(user_id, password=password) @atomic.action_timer("keystone_v2.create_service") def create_service(self, name=None, service_type=None, description=None): """Creates keystone service. :param name: name of service to create :param service_type: type of the service :param description: description of the service :returns: keystone service instance """ name = name or self.generate_random_name() service_type = service_type or "rally_test_type" description = description or self.generate_random_name() return self._clients.keystone("2").services.create( name, service_type=service_type, description=description) @atomic.action_timer("keystone_v2.create_role") def create_role(self, name=None): name = name or self.generate_random_name() return self._clients.keystone("2").roles.create(name) @atomic.action_timer("keystone_v2.add_role") def add_role(self, role_id, user_id, tenant_id): self._clients.keystone("2").roles.add_user_role( user=user_id, role=role_id, tenant=tenant_id) @atomic.action_timer("keystone_v2.list_roles") def list_roles(self): """List all roles.""" return self._clients.keystone("2").roles.list() @atomic.action_timer("keystone_v2.list_roles_for_user") def list_roles_for_user(self, user_id, tenant_id=None): return self._clients.keystone("2").roles.roles_for_user( user_id, tenant_id) @atomic.action_timer("keystone_v2.revoke_role") def revoke_role(self, role_id, user_id, tenant_id): self._clients.keystone("2").roles.remove_user_role(user=user_id, role=role_id, tenant=tenant_id) @atomic.action_timer("keystone_v2.create_ec2creds") def create_ec2credentials(self, user_id, tenant_id): """Create ec2credentials. :param user_id: User ID for which to create credentials :param tenant_id: Tenant ID for which to create credentials :returns: Created ec2-credentials object """ return self._clients.keystone("2").ec2.create(user_id, tenant_id=tenant_id) @service.compat_layer(KeystoneV2Service) class UnifiedKeystoneV2Service(keystone_common.UnifiedKeystoneMixin, identity.Identity): """Compatibility layer for Keystone V2.""" @staticmethod def _check_domain(domain_name): if domain_name.lower() != "default": raise NotImplementedError("Domain functionality not implemented " "in Keystone v2") @staticmethod def _unify_tenant(tenant): return identity.Project(id=tenant.id, name=tenant.name, domain_id="default") @staticmethod def _unify_user(user): return identity.User(id=user.id, name=user.name, project_id=getattr(user, "tenantId", None), domain_id="default") def create_project(self, project_name=None, domain_name="Default"): """Creates new project/tenant and return project object. :param project_name: Name of project to be created. :param domain_name: Restricted for Keystone V2. Should not be set or "Default" is expected. """ self._check_domain(domain_name) tenant = self._impl.create_tenant(project_name) return self._unify_tenant(tenant) def update_project(self, project_id, name=None, enabled=None, description=None): """Update project name, enabled and description :param project_id: Id of project to update :param name: project name to be set :param enabled: enabled status of project :param description: project description to be set """ self._impl.update_tenant(tenant_id=project_id, name=name, enabled=enabled, description=description) def delete_project(self, project_id): """Deletes project.""" return self._impl.delete_tenant(project_id) def list_projects(self): """List all projects.""" return [self._unify_tenant(t) for t in self._impl.list_tenants()] def get_project(self, project_id): """Get project.""" return self._unify_tenant(self._impl.get_tenant(project_id)) def create_user(self, username=None, password=None, project_id=None, domain_name="Default", enabled=True, default_role="member"): """Create user. :param username: name of user :param password: user password :param project_id: user's default project :param domain_name: Restricted for Keystone V2. Should not be set or "Default" is expected. :param enabled: whether the user is enabled. :param default_role: Restricted for Keystone V2. Should not be set or "member" is expected. """ self._check_domain(domain_name) user = self._impl.create_user(username=username, password=password, tenant_id=project_id, enabled=enabled) return self._unify_user(user) def create_users(self, tenant_id, number_of_users, user_create_args=None): """Create specified amount of users. :param tenant_id: Id of tenant :param number_of_users: number of users to create :param user_create_args: additional user creation arguments """ if user_create_args and "domain_name" in user_create_args: self._check_domain(user_create_args["domain_name"]) return [self._unify_user(u) for u in self._impl.create_users( tenant_id=tenant_id, number_of_users=number_of_users, user_create_args=user_create_args)] def list_users(self): """List all users.""" return [self._unify_user(u) for u in self._impl.list_users()] def update_user(self, user_id, enabled=None, name=None, email=None, password=None): if password is not None: self._impl.update_user_password(user_id=user_id, password=password) update_args = {} if enabled is not None: update_args["enabled"] = enabled if name is not None: update_args["name"] = name if email is not None: update_args["email"] = email if update_args: self._impl.update_user(user_id, **update_args) def list_services(self): """List all services.""" return [self._unify_service(s) for s in self._impl.list_services()] def create_role(self, name=None, domain_name=None): """Add role to user.""" if domain_name is not None: raise NotImplementedError("Domain functionality not implemented " "in Keystone v2") return self._unify_role(self._impl.create_role(name)) def add_role(self, role_id, user_id, project_id): """Add role to user.""" self._impl.add_role(role_id=role_id, user_id=user_id, tenant_id=project_id) def revoke_role(self, role_id, user_id, project_id): """Revokes a role from a user.""" return self._impl.revoke_role(role_id=role_id, user_id=user_id, tenant_id=project_id) def list_roles(self, user_id=None, project_id=None, domain_name=None): """List all roles.""" if domain_name: raise NotImplementedError("Domain functionality not implemented " "in Keystone v2") if user_id: roles = self._impl.list_roles_for_user(user_id, tenant_id=project_id) else: roles = self._impl.list_roles() return [self._unify_role(role) for role in roles] def create_ec2credentials(self, user_id, project_id): """Create ec2credentials. :param user_id: User ID for which to create credentials :param project_id: Project ID for which to create credentials :returns: Created ec2-credentials object """ return self._impl.create_ec2credentials(user_id=user_id, tenant_id=project_id) rally-0.9.1/rally/plugins/openstack/services/identity/identity.py0000664000567000056710000002271013073417716026472 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections from rally.plugins.openstack import service Project = collections.namedtuple("Project", ["id", "name", "domain_id"]) User = collections.namedtuple("User", ["id", "name", "project_id", "domain_id"]) Service = collections.namedtuple("Service", ["id", "name"]) Role = collections.namedtuple("Role", ["id", "name"]) class Identity(service.UnifiedOpenStackService): @classmethod def is_applicable(cls, clients): cloud_version = clients.keystone().version.split(".")[0][1:] return cloud_version == cls._meta_get("impl")._meta_get("version") @service.should_be_overridden def create_project(self, project_name=None, domain_name="Default"): """Creates new project/tenant and return project object. :param project_name: Name of project to be created. :param domain_name: Name or id of domain where to create project, for those service implementations that don't support domains you should use None or 'Default' value. """ return self._impl.create_project(project_name, domain_name=domain_name) @service.should_be_overridden def update_project(self, project_id, name=None, enabled=None, description=None): """Update project name, enabled and description :param project_id: Id of project to update :param name: project name to be set :param enabled: enabled status of project :param description: project description to be set """ self._impl.update_project(project_id, name=name, enabled=enabled, description=description) @service.should_be_overridden def delete_project(self, project_id): """Deletes project.""" return self._impl.delete_project(project_id) @service.should_be_overridden def list_projects(self): """List all projects.""" return self._impl.list_projects() @service.should_be_overridden def get_project(self, project_id): """Get project.""" return self._impl.get_project(project_id) @service.should_be_overridden def create_user(self, username=None, password=None, project_id=None, domain_name="Default", enabled=True, default_role="member"): """Create user. :param username: name of user :param password: user password :param project_id: user's default project :param domain_name: Name or id of domain where to create user, for those service implementations that don't support domains you should use None or 'Default' value. :param enabled: whether the user is enabled. :param default_role: Name of role, for implementations that don't support domains this argument must be None or 'member'. """ return self._impl.create_user(username=username, password=password, project_id=project_id, domain_name=domain_name, default_role=default_role) @service.should_be_overridden def create_users(self, owner_id, number_of_users, user_create_args=None): """Create specified amount of users. :param owner_id: Id of tenant/project :param number_of_users: number of users to create :param user_create_args: additional user creation arguments """ return self._impl.create_users(owner_id, number_of_users=number_of_users, user_create_args=user_create_args) @service.should_be_overridden def delete_user(self, user_id): """Deletes user by its id.""" self._impl.delete_user(user_id) @service.should_be_overridden def list_users(self): """List all users.""" return self._impl.list_users() @service.should_be_overridden def update_user(self, user_id, enabled=None, name=None, email=None, password=None): return self._impl.update_user(user_id, enabled=enabled, name=name, email=email, password=password) @service.should_be_overridden def get_user(self, user_id): """Get user.""" return self._impl.get_user(user_id) @service.should_be_overridden def create_service(self, name=None, service_type=None, description=None): """Creates keystone service with random name. :param name: name of service to create :param service_type: type of the service :param description: description of the service """ return self._impl.create_service(name=name, service_type=service_type, description=description) @service.should_be_overridden def delete_service(self, service_id): """Deletes service.""" self._impl.delete_service(service_id) @service.should_be_overridden def list_services(self): """List all services.""" return self._impl.list_services() @service.should_be_overridden def get_service(self, service_id): """Get service.""" return self._impl.get_service(service_id) @service.should_be_overridden def create_role(self, name=None, domain_name=None): """Create role with specific name :param name: role name :param domain_name: Name or id of domain where to create role, for those service implementations that don't support domains you should use None or 'Default' value. """ return self._impl.create_role(name=name, domain_name=domain_name) @service.should_be_overridden def add_role(self, role_id, user_id, project_id): """Add role to user.""" return self._impl.add_role(role_id=role_id, user_id=user_id, project_id=project_id) @service.should_be_overridden def delete_role(self, role_id): """Deletes role.""" self._impl.delete_role(role_id) @service.should_be_overridden def revoke_role(self, role_id, user_id, project_id): """Revokes a role from a user.""" return self._impl.revoke_role(role_id=role_id, user_id=user_id, project_id=project_id) @service.should_be_overridden def list_roles(self, user_id=None, project_id=None, domain_name=None): """List all roles. :param user_id: filter in role grants for the specified user on a resource. Domain or project must be specified. :param project_id: filter in role grants on the specified project. user_id should be specified :param domain_name: filter in role grants on the specified domain. user_id should be specified """ return self._impl.list_roles(user_id=user_id, project_id=project_id, domain_name=domain_name) @service.should_be_overridden def get_role(self, role_id): """Get role.""" return self._impl.get_role(role_id) @service.should_be_overridden def get_service_by_name(self, name): """List all services to find proper one.""" return self._impl.get_service_by_name(name) @service.should_be_overridden def create_ec2credentials(self, user_id, project_id): """Create ec2credentials. :param user_id: User ID for which to create credentials :param project_id: Project ID for which to create credentials :returns: Created ec2-credentials object """ return self._impl.create_ec2credentials(user_id=user_id, project_id=project_id) @service.should_be_overridden def list_ec2credentials(self, user_id): """List of access/secret pairs for a user_id. :param user_id: List all ec2-credentials for User ID :returns: Return ec2-credentials list """ return self._impl.list_ec2credentials(user_id) @service.should_be_overridden def delete_ec2credential(self, user_id, access): """Delete ec2credential. :param user_id: User ID for which to delete credential :param access: access key for ec2credential to delete """ return self._impl.delete_ec2credential(user_id=user_id, access=access) @service.should_be_overridden def fetch_token(self): """Authenticate user token.""" return self._impl.fetch_token() @service.should_be_overridden def validate_token(self, token): """Validate user token. :param token: Auth token to validate """ return self._impl.validate_token(token) rally-0.9.1/rally/plugins/openstack/services/identity/keystone_v3.py0000664000567000056710000003355713073417716027125 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common import logging from rally import exceptions from rally.plugins.openstack import service from rally.plugins.openstack.services.identity import identity from rally.plugins.openstack.services.identity import keystone_common from rally.task import atomic LOG = logging.getLogger(__name__) @service.service("keystone", service_type="identity", version="3") class KeystoneV3Service(service.Service, keystone_common.KeystoneMixin): def _get_domain_id(self, domain_name_or_id): from keystoneclient import exceptions as kc_exceptions try: # First try to find domain by ID return self._clients.keystone("3").domains.get( domain_name_or_id).id except kc_exceptions.NotFound: # Domain not found by ID, try to find it by name domains = self._clients.keystone("3").domains.list( name=domain_name_or_id) if domains: return domains[0].id # Domain not found by name raise exceptions.GetResourceNotFound( resource="KeystoneDomain(%s)" % domain_name_or_id) @atomic.action_timer("keystone_v3.create_project") def create_project(self, project_name=None, domain_name="Default"): project_name = project_name or self.generate_random_name() domain_id = self._get_domain_id(domain_name) return self._clients.keystone("3").projects.create(name=project_name, domain=domain_id) @atomic.action_timer("keystone_v3.update_project") def update_project(self, project_id, name=None, enabled=None, description=None): """Update tenant name and description. :param project_id: Id of project to update :param name: project name to be set (if boolean True, random name will be set) :param enabled: enabled status of project :param description: project description to be set (if boolean True, random description will be set) """ if name is True: name = self.generate_random_name() if description is True: description = self.generate_random_name() self._clients.keystone("3").projects.update( project_id, name=name, description=description, enabled=enabled) @atomic.action_timer("keystone_v3.delete_project") def delete_project(self, project_id): self._clients.keystone("3").projects.delete(project_id) @atomic.action_timer("keystone_v3.list_projects") def list_projects(self): return self._clients.keystone("3").projects.list() @atomic.action_timer("keystone_v3.get_project") def get_project(self, project_id): """Get project.""" return self._clients.keystone("3").projects.get(project_id) @atomic.action_timer("keystone_v3.create_user") def create_user(self, username=None, password=None, project_id=None, domain_name="Default", enabled=True, default_role="member"): """Create user. :param username: name of user :param password: user password :param project_id: user's default project :param domain_name: Name or id of domain where to create project. :param enabled: whether the user is enabled. :param default_role: user's default role """ domain_id = self._get_domain_id(domain_name) username = username or self.generate_random_name() user = self._clients.keystone("3").users.create( name=username, password=password, default_project=project_id, domain=domain_id, enabled=enabled) if project_id: # we can't setup role without project_id for role in self.list_roles(): if default_role in role.name.lower(): self.add_role(role_id=role.id, user_id=user.id, project_id=project_id) break else: LOG.warning("Unable to set %s role to created user." % default_role) return user @atomic.action_timer("keystone_v3.create_users") def create_users(self, project_id, number_of_users, user_create_args=None): """Create specified amount of users. :param project_id: Id of project :param number_of_users: number of users to create :param user_create_args: additional user creation arguments """ users = [] for _i in range(number_of_users): users.append(self.create_user(project_id=project_id, **(user_create_args or {}))) return users @atomic.action_timer("keystone_v3.update_user") def update_user(self, user_id, name=None, domain_name=None, project_id=None, password=None, email=None, description=None, enabled=None, default_project=None): domain = None if domain_name: domain = self._get_domain_id(domain_name) self._clients.keystone("3").users.update( user_id, name=name, domain=domain, project=project_id, password=password, email=email, description=description, enabled=enabled, default_project=default_project) @atomic.action_timer("keystone_v3.create_service") def create_service(self, name=None, service_type=None, description=None, enabled=True): """Creates keystone service. :param name: name of service to create :param service_type: type of the service :param description: description of the service :param enabled: whether the service appears in the catalog :returns: keystone service instance """ name = name or self.generate_random_name() service_type = service_type or "rally_test_type" description = description or self.generate_random_name() return self._clients.keystone("3").services.create( name, type=service_type, description=description, enabled=enabled) @atomic.action_timer("keystone_v3.create_role") def create_role(self, name=None, domain_name=None): domain_id = None if domain_name: domain_id = self._get_domain_id(domain_name) name = name or self.generate_random_name() return self._clients.keystone("3").roles.create(name, domain=domain_id) @atomic.action_timer("keystone_v3.add_role") def add_role(self, role_id, user_id, project_id): self._clients.keystone("3").roles.grant(role=role_id, user=user_id, project=project_id) @atomic.action_timer("keystone_v3.list_roles") def list_roles(self, user_id=None, project_id=None, domain_name=None): """List all roles.""" domain_id = None if domain_name: domain_id = self._get_domain_id(domain_name) return self._clients.keystone("3").roles.list(user=user_id, project=project_id, domain=domain_id) @atomic.action_timer("keystone_v3.revoke_role") def revoke_role(self, role_id, user_id, project_id): self._clients.keystone("3").roles.revoke(role=role_id, user=user_id, project=project_id) @atomic.action_timer("keystone_v3.create_domain") def create_domain(self, name, description=None, enabled=True): return self._clients.keystone("3").domains.create( name, description=description, enabled=enabled) @atomic.action_timer("keystone_v3.create_ec2creds") def create_ec2credentials(self, user_id, project_id): """Create ec2credentials. :param user_id: User ID for which to create credentials :param project_id: Tenant ID for which to create credentials :returns: Created ec2-credentials object """ return self._clients.keystone("3").ec2.create(user_id, project_id=project_id) @service.compat_layer(KeystoneV3Service) class UnifiedKeystoneV3Service(keystone_common.UnifiedKeystoneMixin, identity.Identity): @staticmethod def _unify_project(project): return identity.Project(id=project.id, name=project.name, domain_id=project.domain_id) @staticmethod def _unify_user(user): # When user has default_project_id that is None user.default_project_id # will raise AttributeError project_id = getattr(user, "project_id", getattr(user, "default_project_id", None)) return identity.User(id=user.id, name=user.name, project_id=project_id, domain_id=user.domain_id) def create_project(self, project_name=None, domain_name="Default"): """Creates new project/tenant and return project object. :param project_name: Name of project to be created. :param domain_name: Name or id of domain where to create project, """ project = self._impl.create_project(project_name, domain_name=domain_name) return self._unify_project(project) def update_project(self, project_id, name=None, enabled=None, description=None): """Update project name, enabled and description :param project_id: Id of project to update :param name: project name to be set :param enabled: enabled status of project :param description: project description to be set """ self._impl.update_project(project_id=project_id, name=name, enabled=enabled, description=description) def delete_project(self, project_id): """Deletes project.""" return self._impl.delete_project(project_id) def list_projects(self): """List all projects.""" return [self._unify_project(p) for p in self._impl.list_projects()] def get_project(self, project_id): """Get project.""" return self._unify_project(self._impl.get_project(project_id)) def create_user(self, username=None, password=None, project_id=None, domain_name="Default", enabled=True, default_role="member"): """Create user. :param username: name of user :param password: user password :param project_id: user's default project :param domain_name: Name or id of domain where to create project, :param enabled: whether the user is enabled. :param default_role: Name of default user's role """ return self._unify_user(self._impl.create_user( username=username, password=password, project_id=project_id, domain_name=domain_name, default_role=default_role, enabled=enabled)) def create_users(self, project_id, number_of_users, user_create_args=None): """Create specified amount of users. :param project_id: Id of project :param number_of_users: number of users to create :param user_create_args: additional user creation arguments """ return [self._unify_user(u) for u in self._impl.create_users( project_id=project_id, number_of_users=number_of_users, user_create_args=user_create_args)] def list_users(self): """List all users.""" return [self._unify_user(u) for u in self._impl.list_users()] def update_user(self, user_id, enabled=None, name=None, email=None, password=None): return self._impl.update_user(user_id, enabled=enabled, name=name, email=email, password=password) def list_services(self): """List all services.""" return [self._unify_service(s) for s in self._impl.list_services()] def create_role(self, name=None, domain_name=None): """Add role to user.""" return self._unify_role(self._impl.create_role( name, domain_name=domain_name)) def add_role(self, role_id, user_id, project_id): """Add role to user.""" self._impl.add_role(role_id=role_id, user_id=user_id, project_id=project_id) def revoke_role(self, role_id, user_id, project_id): """Revokes a role from a user.""" return self._impl.revoke_role(role_id=role_id, user_id=user_id, project_id=project_id) def list_roles(self, user_id=None, project_id=None, domain_name=None): """List all roles.""" return [self._unify_role(role) for role in self._impl.list_roles( user_id=user_id, project_id=project_id, domain_name=domain_name)] def create_ec2credentials(self, user_id, project_id): """Create ec2credentials. :param user_id: User ID for which to create credentials :param project_id: Project ID for which to create credentials :returns: Created ec2-credentials object """ return self._impl.create_ec2credentials(user_id=user_id, project_id=project_id) rally-0.9.1/rally/plugins/openstack/services/heat/0000775000567000056710000000000013073420067023346 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/services/heat/__init__.py0000664000567000056710000000000013073417716025454 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/services/heat/main.py0000664000567000056710000000561713073417716024664 0ustar jenkinsjenkins00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from rally.common import utils as common_utils from rally.task import atomic from rally.task import utils CONF = cfg.CONF class Stack(common_utils.RandomNameGeneratorMixin): """Represent heat stack. Usage: >>> stack = Stack(scenario, task, "template.yaml", parameters={"nodes": 3}) >>> run_benchmark(stack) >>> stack.update(nodes=4) >>> run_benchmark(stack) """ def __init__(self, scenario, task, template, files, parameters=None): """Init heat wrapper. :param Scenario scenario: scenario instance :param Task task: task instance :param str template: template file path :param dict files: dict with file name and path :param dict parameters: parameters for template """ self.scenario = scenario self.task = task self.template = open(template).read() self.files = {} self.parameters = parameters for name, path in files.items(): self.files[name] = open(path).read() def _wait(self, ready_statuses, failure_statuses): self.stack = utils.wait_for_status( self.stack, check_interval=CONF.benchmark.heat_stack_create_poll_interval, timeout=CONF.benchmark.heat_stack_create_timeout, ready_statuses=ready_statuses, failure_statuses=failure_statuses, update_resource=utils.get_from_manager(), ) def create(self): with atomic.ActionTimer(self.scenario, "heat.create"): self.stack = self.scenario.clients("heat").stacks.create( stack_name=self.scenario.generate_random_name(), template=self.template, files=self.files, parameters=self.parameters) self.stack_id = self.stack["stack"]["id"] self.stack = self.scenario.clients( "heat").stacks.get(self.stack_id) self._wait(["CREATE_COMPLETE"], ["CREATE_FAILED"]) def update(self, data): self.parameters.update(data) with atomic.ActionTimer(self.scenario, "heat.update"): self.scenario.clients("heat").stacks.update( self.stack_id, template=self.template, files=self.files, parameters=self.parameters) self._wait(["UPDATE_COMPLETE"], ["UPDATE_FAILED"]) rally-0.9.1/rally/plugins/openstack/__init__.py0000664000567000056710000000000013073417716022710 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/service.py0000664000567000056710000000273713073417716022634 0ustar jenkinsjenkins00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.task import service as base_service service = base_service.service compat_layer = base_service.compat_layer Service = base_service.Service should_be_overridden = base_service.should_be_overridden class UnifiedOpenStackService(base_service.UnifiedService): def discover_impl(self): impl_cls, impls = super(UnifiedOpenStackService, self).discover_impl() if not impl_cls: # Nova-network is not listed in keystone catalog and we can not # assume that it is enabled if neutron is missed. Since such # discovery needs an external call, it is done only if needed. for impl in impls: o = impl._meta_get("impl") if (o._meta_get("name") == consts.Service.NOVA_NET and impl.is_applicable(self._clients)): return impl, impls return impl_cls, impls rally-0.9.1/rally/plugins/openstack/wrappers/0000775000567000056710000000000013073420067022445 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/wrappers/__init__.py0000664000567000056710000000000013073417716024553 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/wrappers/network.py0000664000567000056710000004342313073417716024525 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import netaddr import six from rally.common.i18n import _ from rally.common import logging from rally.common import utils from rally import consts from rally import exceptions from rally.task import utils as task_utils from neutronclient.common import exceptions as neutron_exceptions from novaclient import exceptions as nova_exceptions LOG = logging.getLogger(__name__) cidr_incr = utils.RAMInt() def generate_cidr(start_cidr="10.2.0.0/24"): """Generate next CIDR for network or subnet, without IP overlapping. This is process and thread safe, because `cidr_incr' points to value stored directly in RAM. This guarantees that CIDRs will be serial and unique even under hard multiprocessing/threading load. :param start_cidr: start CIDR str :returns: next available CIDR str """ cidr = str(netaddr.IPNetwork(start_cidr).next(next(cidr_incr))) LOG.debug("CIDR generated: %s" % cidr) return cidr class NetworkWrapperException(exceptions.RallyException): msg_fmt = _("%(message)s") @six.add_metaclass(abc.ABCMeta) class NetworkWrapper(object): """Base class for network service implementations. We actually have two network services implementations, with different API: NovaNetwork and Neutron. The idea is (at least to try) to use unified service, which hides most differences and routines behind the scenes. This allows to significantly re-use and simplify code. """ START_CIDR = "10.2.0.0/24" SERVICE_IMPL = None def __init__(self, clients, owner, config=None): """Returns available network wrapper instance. :param clients: rally.osclients.Clients instance :param owner: The object that owns resources created by this wrapper instance. It will be used to generate random names, so must implement rally.common.utils.RandomNameGeneratorMixin :param config: The configuration of the network wrapper. Currently only one config option is recognized, 'start_cidr', and only for Nova network. :returns: NetworkWrapper subclass instance """ if hasattr(clients, self.SERVICE_IMPL): self.client = getattr(clients, self.SERVICE_IMPL)() else: self.client = clients(self.SERVICE_IMPL) self.config = config or {} self.owner = owner self.start_cidr = self.config.get("start_cidr", self.START_CIDR) @abc.abstractmethod def create_network(self): """Create network.""" @abc.abstractmethod def delete_network(self): """Delete network.""" @abc.abstractmethod def list_networks(self): """List networks.""" @abc.abstractmethod def create_floating_ip(self): """Create floating IP.""" @abc.abstractmethod def delete_floating_ip(self): """Delete floating IP.""" @abc.abstractmethod def supports_extension(self): """Checks whether a network extension is supported.""" class NovaNetworkWrapper(NetworkWrapper): SERVICE_IMPL = consts.Service.NOVA def __init__(self, *args, **kwargs): super(NovaNetworkWrapper, self).__init__(*args, **kwargs) self.skip_cidrs = [n.cidr for n in self.client.networks.list()] def _generate_cidr(self): cidr = generate_cidr(start_cidr=self.start_cidr) while cidr in self.skip_cidrs: cidr = generate_cidr(start_cidr=self.start_cidr) return cidr def _marshal_network_object(self, net_obj): """Convert a Network object to a dict. This helps keep return values from the NovaNetworkWrapper compatible with those from NeutronWrapper. :param net_obj: The Network object to convert to a dict """ return {"id": net_obj.id, "cidr": net_obj.cidr, "name": net_obj.label, "status": "ACTIVE", "external": False, "tenant_id": net_obj.project_id} def create_network(self, tenant_id, **kwargs): """Create network. :param tenant_id: str, tenant ID :param **kwargs: Additional keyword arguments. Only ``network_create_args`` is honored; other arguments are accepted for compatibility, but are not used here. ``network_create_args`` can be used to provide additional arbitrary network creation arguments. :returns: dict, network data """ cidr = self._generate_cidr() label = self.owner.generate_random_name() network_create_args = kwargs.get("network_create_args", {}) network = self.client.networks.create( project_id=tenant_id, cidr=cidr, label=label, **network_create_args) return self._marshal_network_object(network) def delete_network(self, network): self.client.networks.disassociate(network["id"], disassociate_host=False, disassociate_project=True) return self.client.networks.delete(network["id"]) def list_networks(self): return [self._marshal_network_object(n) for n in self.client.networks.list()] def create_floating_ip(self, ext_network=None, **kwargs): """Allocate a floating ip from the given nova-network pool :param ext_network: name or external network, str :param **kwargs: for compatibility, not used here :returns: floating IP dict """ if not ext_network: try: ext_network = self.client.floating_ip_pools.list()[0].name except IndexError: raise NetworkWrapperException("No floating IP pools found") fip = self.client.floating_ips.create(ext_network) return {"id": fip.id, "ip": fip.ip} def _get_floating_ip(self, fip_id, do_raise=False): try: fip = self.client.floating_ips.get(fip_id) except nova_exceptions.NotFound: if not do_raise: return None raise exceptions.GetResourceNotFound( resource="Floating IP %s" % fip_id) return fip.id def delete_floating_ip(self, fip_id, wait=False): """Delete floating IP. :param fip_id: int floating IP id :param wait: if True then wait to return until floating ip is deleted """ self.client.floating_ips.delete(fip_id) if not wait: return task_utils.wait_for_status( fip_id, ready_statuses=["deleted"], check_deletion=True, update_resource=lambda i: self._get_floating_ip(i, do_raise=True)) def supports_extension(self, extension): """Check whether a Nova-network extension is supported :param extension: str Nova network extension :returns: result tuple. Always (True, "") for secgroups in nova-network :rtype: (bool, string) """ # TODO(rkiran): Add other extensions whenever necessary if extension == "security-group": return True, "" return False, _("Nova driver does not support %s") % (extension) class NeutronWrapper(NetworkWrapper): SERVICE_IMPL = consts.Service.NEUTRON SUBNET_IP_VERSION = 4 LB_METHOD = "ROUND_ROBIN" LB_PROTOCOL = "HTTP" @property def external_networks(self): return self.client.list_networks(**{ "router:external": True})["networks"] def get_network(self, net_id=None, name=None): net = None try: if net_id: net = self.client.show_network(net_id)["network"] else: for net in self.client.list_networks(name=name)["networks"]: break return {"id": net["id"], "name": net["name"], "tenant_id": net["tenant_id"], "status": net["status"], "external": net["router:external"], "subnets": net["subnets"], "router_id": None} except (TypeError, neutron_exceptions.NeutronClientException): raise NetworkWrapperException( "Network not found: %s" % (name or net_id)) def create_router(self, external=False, **kwargs): """Create neutron router. :param external: bool, whether to set setup external_gateway_info :param **kwargs: POST /v2.0/routers request options :returns: neutron router dict """ kwargs["name"] = self.owner.generate_random_name() if external and "external_gateway_info" not in kwargs: for net in self.external_networks: kwargs["external_gateway_info"] = { "network_id": net["id"], "enable_snat": True} return self.client.create_router({"router": kwargs})["router"] def create_v1_pool(self, tenant_id, subnet_id, **kwargs): """Create LB Pool (v1). :param tenant_id: str, pool tenant id :param subnet_id: str, neutron subnet-id :param **kwargs: extra options :returns: neutron lb-pool dict """ pool_args = { "pool": { "tenant_id": tenant_id, "name": self.owner.generate_random_name(), "subnet_id": subnet_id, "lb_method": kwargs.get("lb_method", self.LB_METHOD), "protocol": kwargs.get("protocol", self.LB_PROTOCOL) } } return self.client.create_pool(pool_args) def _generate_cidr(self): # TODO(amaretskiy): Generate CIDRs unique for network, not cluster return generate_cidr(start_cidr=self.start_cidr) def create_network(self, tenant_id, **kwargs): """Create network. The following keyword arguments are accepted: * add_router: Create an external router and add an interface to each subnet created. Default: False * subnets_num: Number of subnets to create per network. Default: 0 * dns_nameservers: Nameservers for each subnet. Default: 8.8.8.8, 8.8.4.4 * network_create_args: Additional network creation arguments. :param tenant_id: str, tenant ID :param kwargs: Additional options, left open-ended for compatbilitiy. See above for recognized keyword args. :returns: dict, network data """ network_args = {"network": kwargs.get("network_create_args", {})} network_args["network"].update({ "tenant_id": tenant_id, "name": self.owner.generate_random_name()}) network = self.client.create_network(network_args)["network"] router = None if kwargs.get("add_router", False): router = self.create_router(external=True, tenant_id=tenant_id) subnets = [] subnets_num = kwargs.get("subnets_num", 0) for i in range(subnets_num): subnet_args = { "subnet": { "tenant_id": tenant_id, "network_id": network["id"], "name": self.owner.generate_random_name(), "ip_version": self.SUBNET_IP_VERSION, "cidr": self._generate_cidr(), "enable_dhcp": True, "dns_nameservers": kwargs.get("dns_nameservers", ["8.8.8.8", "8.8.4.4"]) } } subnet = self.client.create_subnet(subnet_args)["subnet"] subnets.append(subnet["id"]) if router: self.client.add_interface_router(router["id"], {"subnet_id": subnet["id"]}) return {"id": network["id"], "name": network["name"], "status": network["status"], "subnets": subnets, "external": network.get("router:external", False), "router_id": router and router["id"] or None, "tenant_id": tenant_id} def delete_v1_pool(self, pool_id): """Delete LB Pool (v1) :param pool_id: str, Lb-Pool-id """ self.client.delete_pool(pool_id) def delete_network(self, network): if self.supports_extension("dhcp_agent_scheduler")[0]: net_dhcps = self.client.list_dhcp_agent_hosting_networks( network["id"])["agents"] for net_dhcp in net_dhcps: self.client.remove_network_from_dhcp_agent(net_dhcp["id"], network["id"]) if network["router_id"]: self.client.remove_gateway_router(network["router_id"]) for port in self.client.list_ports(network_id=network["id"])["ports"]: if port["device_owner"] in ( "network:router_interface", "network:router_interface_distributed", "network:ha_router_replicated_interface", "network:router_gateway"): self.client.remove_interface_router( port["device_id"], {"port_id": port["id"]}) else: self.client.delete_port(port["id"]) for subnet_id in network["subnets"]: self._delete_subnet(subnet_id) responce = self.client.delete_network(network["id"]) if network["router_id"]: self.client.delete_router(network["router_id"]) return responce def _delete_subnet(self, subnet_id): self.client.delete_subnet(subnet_id) def list_networks(self): return self.client.list_networks()["networks"] def create_port(self, network_id, **kwargs): """Create neutron port. :param network_id: neutron network id :param **kwargs: POST /v2.0/ports request options :returns: neutron port dict """ kwargs["network_id"] = network_id kwargs["name"] = self.owner.generate_random_name() return self.client.create_port({"port": kwargs})["port"] def create_floating_ip(self, ext_network=None, tenant_id=None, port_id=None, **kwargs): """Create Neutron floating IP. :param ext_network: floating network name or dict :param tenant_id: str tenant id :param port_id: str port id :param **kwargs: for compatibility, not used here :returns: floating IP dict """ if not tenant_id: raise ValueError("Missed tenant_id") net_id = None if type(ext_network) is dict: net_id = ext_network["id"] elif ext_network: ext_net = self.get_network(name=ext_network) if not ext_net["external"]: raise NetworkWrapperException("Network is not external: %s" % ext_network) net_id = ext_net["id"] else: ext_networks = self.external_networks if not ext_networks: raise NetworkWrapperException( "Failed to allocate floating IP: " "no external networks found") net_id = ext_networks[0]["id"] kwargs = {"floatingip": {"floating_network_id": net_id, "tenant_id": tenant_id}} if port_id: kwargs["floatingip"]["port_id"] = port_id fip = self.client.create_floatingip(kwargs)["floatingip"] return {"id": fip["id"], "ip": fip["floating_ip_address"]} def delete_floating_ip(self, fip_id, **kwargs): """Delete floating IP. :param fip_id: int floating IP id :param **kwargs: for compatibility, not used here """ self.client.delete_floatingip(fip_id) def supports_extension(self, extension): """Check whether a neutron extension is supported :param extension: str, neutron extension :returns: result tuple :rtype: (bool, string) """ extensions = self.client.list_extensions().get("extensions", []) if any(ext.get("alias") == extension for ext in extensions): return True, "" return False, _("Neutron driver does not support %s") % (extension) def wrap(clients, owner, config=None): """Returns available network wrapper instance. :param clients: rally.osclients.Clients instance :param owner: The object that owns resources created by this wrapper instance. It will be used to generate random names, so must implement rally.common.utils.RandomNameGeneratorMixin :param config: The configuration of the network wrapper. Currently only one config option is recognized, 'start_cidr', and only for Nova network. :returns: NetworkWrapper subclass instance """ if hasattr(clients, "services"): services = clients.services() else: services = clients("services") if consts.Service.NEUTRON in services.values(): return NeutronWrapper(clients, owner, config=config) return NovaNetworkWrapper(clients, owner, config=config) rally-0.9.1/rally/plugins/openstack/wrappers/cinder.py0000664000567000056710000000603213073417716024273 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc from rally.common import logging from rally import exceptions import six LOG = logging.getLogger(__name__) @six.add_metaclass(abc.ABCMeta) class CinderWrapper(object): def __init__(self, client, owner): self.owner = owner self.client = client @abc.abstractmethod def create_volume(self, volume): """Creates new volume.""" @abc.abstractmethod def update_volume(self, volume): """Updates name and description for this volume.""" @abc.abstractmethod def create_snapshot(self, volume_id): """Creates a volume snapshot.""" class CinderV1Wrapper(CinderWrapper): def create_volume(self, size, **kwargs): kwargs["display_name"] = self.owner.generate_random_name() volume = self.client.volumes.create(size, **kwargs) return volume def update_volume(self, volume, **update_args): update_args["display_name"] = self.owner.generate_random_name() update_args["display_description"] = ( update_args.get("display_description")) self.client.volumes.update(volume, **update_args) def create_snapshot(self, volume_id, **kwargs): kwargs["display_name"] = self.owner.generate_random_name() snapshot = self.client.volume_snapshots.create(volume_id, **kwargs) return snapshot class CinderV2Wrapper(CinderWrapper): def create_volume(self, size, **kwargs): kwargs["name"] = self.owner.generate_random_name() volume = self.client.volumes.create(size, **kwargs) return volume def update_volume(self, volume, **update_args): update_args["name"] = self.owner.generate_random_name() update_args["description"] = update_args.get("description") self.client.volumes.update(volume, **update_args) def create_snapshot(self, volume_id, **kwargs): kwargs["name"] = self.owner.generate_random_name() snapshot = self.client.volume_snapshots.create(volume_id, **kwargs) return snapshot def wrap(client, owner): """Returns cinderclient wrapper based on cinder client version.""" version = client.choose_version() if version == "1": return CinderV1Wrapper(client(), owner) elif version == "2": return CinderV2Wrapper(client(), owner) else: msg = "This version of API %s could not be identified." % version LOG.warning(msg) raise exceptions.InvalidArgumentsException(msg) rally-0.9.1/rally/plugins/openstack/wrappers/glance.py0000664000567000056710000001702113073417720024253 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import os import time from rally.common import logging from rally.common import utils as rutils from rally import exceptions from rally.task import utils from glanceclient import exc as glance_exc from oslo_config import cfg import requests import six LOG = logging.getLogger(__name__) GLANCE_BENCHMARK_OPTS = [ cfg.FloatOpt("glance_image_create_prepoll_delay", default=2.0, help="Time to sleep after creating a resource before " "polling for it status"), cfg.FloatOpt("glance_image_create_timeout", default=120.0, help="Time to wait for glance image to be created."), cfg.FloatOpt("glance_image_create_poll_interval", default=1.0, help="Interval between checks when waiting for image " "creation.") ] CONF = cfg.CONF benchmark_group = cfg.OptGroup(name="benchmark", title="benchmark options") CONF.register_opts(GLANCE_BENCHMARK_OPTS, group=benchmark_group) @six.add_metaclass(abc.ABCMeta) class GlanceWrapper(object): def __init__(self, client, owner): self.owner = owner self.client = client def get_image(self, image): """Gets image. This serves to fetch the latest data on the image for the various wait_for_*() functions. Must raise rally.exceptions.GetResourceNotFound if the resource is not found or deleted. """ # NOTE(stpierre): This function actually has a single # implementation that works for both Glance v1 and Glance v2, # but since we need to use this function in both wrappers, it # gets implemented here. try: return self.client.images.get(image.id) except glance_exc.HTTPNotFound: raise exceptions.GetResourceNotFound(resource=image) @abc.abstractmethod def create_image(self, container_format, image_location, disk_format): """Creates new image. Accepts all Glance v2 parameters. """ @abc.abstractmethod def set_visibility(self, image, visibility="public"): """Set an existing image to public or private.""" @abc.abstractmethod def list_images(self, **filters): """List images. Accepts all Glance v2 filters. """ class GlanceV1Wrapper(GlanceWrapper): def create_image(self, container_format, image_location, disk_format, **kwargs): kw = { "container_format": container_format, "disk_format": disk_format, } kw.update(kwargs) if "name" not in kw: kw["name"] = self.owner.generate_random_name() if "visibility" in kw: kw["is_public"] = kw.pop("visibility") == "public" image_location = os.path.expanduser(image_location) try: if os.path.isfile(image_location): kw["data"] = open(image_location) else: kw["copy_from"] = image_location image = self.client.images.create(**kw) rutils.interruptable_sleep(CONF.benchmark. glance_image_create_prepoll_delay) image = utils.wait_for_status( image, ["active"], update_resource=self.get_image, timeout=CONF.benchmark.glance_image_create_timeout, check_interval=CONF.benchmark. glance_image_create_poll_interval) finally: if "data" in kw: kw["data"].close() return image def set_visibility(self, image, visibility="public"): self.client.images.update(image.id, is_public=(visibility == "public")) def list_images(self, **filters): kwargs = {"filters": filters} if "owner" in filters: # NOTE(stpierre): in glance v1, "owner" is not a filter, # so we need to handle it separately. kwargs["owner"] = kwargs["filters"].pop("owner") visibility = kwargs["filters"].pop("visibility", None) images = self.client.images.list(**kwargs) # NOTE(stpierre): Glance v1 isn't smart enough to filter on # public/private images, so we have to do it manually. if visibility is not None: is_public = visibility == "public" return [i for i in images if i.is_public is is_public] return images class GlanceV2Wrapper(GlanceWrapper): def create_image(self, container_format, image_location, disk_format, **kwargs): kw = { "container_format": container_format, "disk_format": disk_format, } kw.update(kwargs) if "name" not in kw: kw["name"] = self.owner.generate_random_name() if "is_public" in kw: LOG.warning("is_public is not supported by Glance v2, and is " "deprecated in Rally v0.8.0") kw["visibility"] = "public" if kw.pop("is_public") else "private" image_location = os.path.expanduser(image_location) image = self.client.images.create(**kw) rutils.interruptable_sleep(CONF.benchmark. glance_image_create_prepoll_delay) start = time.time() image = utils.wait_for_status( image, ["queued"], update_resource=self.get_image, timeout=CONF.benchmark.glance_image_create_timeout, check_interval=CONF.benchmark. glance_image_create_poll_interval) timeout = time.time() - start image_data = None response = None try: if os.path.isfile(image_location): image_data = open(image_location) else: response = requests.get(image_location, stream=True) image_data = response.raw self.client.images.upload(image.id, image_data) finally: if image_data is not None: image_data.close() if response is not None: response.close() return utils.wait_for_status( image, ["active"], update_resource=self.get_image, timeout=timeout, check_interval=CONF.benchmark. glance_image_create_poll_interval) def set_visibility(self, image, visibility="public"): self.client.images.update(image.id, visibility=visibility) def list_images(self, **filters): return self.client.images.list(filters=filters) def wrap(client, owner): """Returns glanceclient wrapper based on glance client version.""" version = client.choose_version() if version == "1": return GlanceV1Wrapper(client(), owner) elif version == "2": return GlanceV2Wrapper(client(), owner) else: msg = "Version %s of the glance API could not be identified." % version LOG.warning(msg) raise exceptions.InvalidArgumentsException(msg) rally-0.9.1/rally/plugins/openstack/wrappers/keystone.py0000664000567000056710000002322713073417716024675 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import collections from keystoneclient import exceptions import six from rally.common import logging LOG = logging.getLogger(__name__) Project = collections.namedtuple("Project", ["id", "name", "domain_id"]) User = collections.namedtuple("User", ["id", "name", "project_id", "domain_id"]) Service = collections.namedtuple("Service", ["id", "name"]) Role = collections.namedtuple("Role", ["id", "name"]) @six.add_metaclass(abc.ABCMeta) class KeystoneWrapper(object): def __init__(self, client): self.client = client LOG.warning( "Class %s is deprecated since Rally 0.8.0 and will be removed " "soon. Use " "rally.plugins.openstack.services.identity.identity.Identity " "instead." % self.__class__) def __getattr__(self, attr_name): return getattr(self.client, attr_name) @abc.abstractmethod def create_project(self, project_name, domain_name="Default"): """Creates new project/tenant and return project object. :param project_name: Name of project to be created. :param domain_name: Name or id of domain where to create project, for implementations that don't support domains this argument must be None or 'Default'. """ @abc.abstractmethod def delete_project(self, project_id): """Deletes project.""" @abc.abstractmethod def create_user(self, username, password, email=None, project_id=None, domain_name="Default", default_role="member"): """Create user. :param username: name of user :param password: user password :param project: user's default project :param domain_name: Name or id of domain where to create project, for implementations that don't support domains this argument must be None or 'Default'. :param default_role: user's default role """ @abc.abstractmethod def delete_user(self, user_id): """Deletes user.""" @abc.abstractmethod def list_users(self): """List all users.""" @abc.abstractmethod def list_projects(self): """List all projects/tenants.""" def delete_service(self, service_id): """Deletes service.""" self.client.services.delete(service_id) def list_services(self): """List all services.""" return map(KeystoneWrapper._wrap_service, self.client.services.list()) def create_role(self, name, **kwargs): """create a role. :param name: name of role :param kwargs: Optional additional arguments for roles creation """ def delete_role(self, role_id): """Deletes role.""" self.client.roles.delete(role_id) def list_roles(self): """List all roles.""" return map(KeystoneWrapper._wrap_role, self.client.roles.list()) @abc.abstractmethod def add_role(self, role_id, user_id, project_id): """Assign role to user.""" @abc.abstractmethod def remove_role(self, role_id, user_id, project_id): """Remove role from user.""" @staticmethod def _wrap_service(service): return Service(id=service.id, name=service.name) @staticmethod def _wrap_role(role): return Role(id=role.id, name=role.name) class KeystoneV2Wrapper(KeystoneWrapper): def _check_domain(self, domain_name): if domain_name.lower() != "default": raise NotImplementedError("Domain functionality not implemented " "in Keystone v2") @staticmethod def _wrap_v2_tenant(tenant): return Project(id=tenant.id, name=tenant.name, domain_id="default") @staticmethod def _wrap_v2_role(role): return Role(id=role.id, name=role.name) @staticmethod def _wrap_v2_user(user): return User(id=user.id, name=user.name, project_id=getattr(user, "tenantId", None), domain_id="default") def create_project(self, project_name, domain_name="Default"): self._check_domain(domain_name) tenant = self.client.tenants.create(project_name) return KeystoneV2Wrapper._wrap_v2_tenant(tenant) def delete_project(self, project_id): self.client.tenants.delete(project_id) def create_user(self, username, password, email=None, project_id=None, domain_name="Default", default_role="member"): # NOTE(liuyulong): For v2 wrapper the `default_role` here is not used. self._check_domain(domain_name) user = self.client.users.create(username, password, email, project_id) return KeystoneV2Wrapper._wrap_v2_user(user) def delete_user(self, user_id): self.client.users.delete(user_id) def list_users(self): return map(KeystoneV2Wrapper._wrap_v2_user, self.client.users.list()) def list_projects(self): return map(KeystoneV2Wrapper._wrap_v2_tenant, self.client.tenants.list()) def create_role(self, name): role = self.client.roles.create(name) return KeystoneV2Wrapper._wrap_v2_role(role) def add_role(self, role_id, user_id, project_id): self.client.roles.add_user_role(user_id, role_id, tenant=project_id) def remove_role(self, role_id, user_id, project_id): self.client.roles.remove_user_role(user_id, role_id, tenant=project_id) class KeystoneV3Wrapper(KeystoneWrapper): def _get_domain_id(self, domain_name_or_id): try: # First try to find domain by ID return self.client.domains.get(domain_name_or_id).id except exceptions.NotFound: # Domain not found by ID, try to find it by name domains = self.client.domains.list(name=domain_name_or_id) if domains: return domains[0].id # Domain not found by name, raise original NotFound exception raise @staticmethod def _wrap_v3_project(project): return Project(id=project.id, name=project.name, domain_id=project.domain_id) @staticmethod def _wrap_v3_role(role): return Role(id=role.id, name=role.name) @staticmethod def _wrap_v3_user(user): # When user has default_project_id that is None user.default_project_id # will raise AttributeError project_id = getattr(user, "default_project_id", None) return User(id=user.id, name=user.name, project_id=project_id, domain_id=user.domain_id) def create_project(self, project_name, domain_name="Default"): domain_id = self._get_domain_id(domain_name) project = self.client.projects.create( name=project_name, domain=domain_id) return KeystoneV3Wrapper._wrap_v3_project(project) def delete_project(self, project_id): self.client.projects.delete(project_id) def create_user(self, username, password, email=None, project_id=None, domain_name="Default", default_role="member"): domain_id = self._get_domain_id(domain_name) user = self.client.users.create(name=username, password=password, default_project=project_id, email=email, domain=domain_id) for role in self.client.roles.list(): if default_role in role.name.lower(): self.client.roles.grant(role.id, user=user.id, project=project_id) break else: LOG.warning( "Unable to set %s role to created user." % default_role) return KeystoneV3Wrapper._wrap_v3_user(user) def delete_user(self, user_id): self.client.users.delete(user_id) def list_users(self): return map(KeystoneV3Wrapper._wrap_v3_user, self.client.users.list()) def list_projects(self): return map(KeystoneV3Wrapper._wrap_v3_project, self.client.projects.list()) def create_role(self, name, domain, **kwargs): role = self.client.roles.create(name, domain=domain, **kwargs) return KeystoneV3Wrapper._wrap_v3_role(role) def add_role(self, role_id, user_id, project_id): self.client.roles.grant(role_id, user=user_id, project=project_id) def remove_role(self, role_id, user_id, project_id): self.client.roles.revoke(role_id, user=user_id, project=project_id) def wrap(client): """Returns keystone wrapper based on keystone client version.""" LOG.warning("Method wrap from %s and whole Keystone wrappers are " "deprecated since Rally 0.8.0 and will be removed soon. Use " "rally.plugins.openstack.services.identity.identity.Identity " "instead." % __file__) if client.version == "v2.0": return KeystoneV2Wrapper(client) elif client.version == "v3": return KeystoneV3Wrapper(client) else: raise NotImplementedError( "Wrapper for version %s is not implemented." % client.version) rally-0.9.1/rally/plugins/openstack/scenario.py0000664000567000056710000001064313073417720022765 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import functools import random from rally import osclients from rally.task import scenario configure = functools.partial(scenario.configure, namespace="openstack") class OpenStackScenario(scenario.Scenario): """Base class for all OpenStack scenarios.""" def __init__(self, context=None, admin_clients=None, clients=None): super(OpenStackScenario, self).__init__(context) if context: api_info = {} if "api_versions" in context.get("config", {}): api_versions = context["config"]["api_versions"] for service in api_versions: api_info[service] = { "version": api_versions[service].get("version"), "service_type": api_versions[service].get( "service_type")} if admin_clients is None and "admin" in context: self._admin_clients = osclients.Clients( context["admin"]["credential"], api_info) if clients is None: if "users" in context and "user" not in context: self._choose_user(context) if "user" in context: self._clients = osclients.Clients( context["user"]["credential"], api_info) if admin_clients: self._admin_clients = admin_clients if clients: self._clients = clients def _choose_user(self, context): """Choose one user from users context We are choosing on each iteration one user """ if context["user_choice_method"] == "random": user = random.choice(context["users"]) tenant = context["tenants"][user["tenant_id"]] else: # Second and last case - 'round_robin'. tenants_amount = len(context["tenants"]) # NOTE(amaretskiy): iteration is subtracted by `1' because it # starts from `1' but we count from `0' iteration = context["iteration"] - 1 tenant_index = int(iteration % tenants_amount) tenant_id = sorted(context["tenants"].keys())[tenant_index] tenant = context["tenants"][tenant_id] users = context["tenants"][tenant_id]["users"] user_index = int((iteration / tenants_amount) % len(users)) user = users[user_index] context["user"], context["tenant"] = user, tenant def clients(self, client_type, version=None): """Returns a python openstack client of the requested type. The client will be that for one of the temporary non-administrator users created before the benchmark launch. :param client_type: Client type ("nova"/"glance" etc.) :param version: client version ("1"/"2" etc.) :returns: Standard python OpenStack client instance """ client = getattr(self._clients, client_type) return client(version) if version is not None else client() def admin_clients(self, client_type, version=None): """Returns a python admin openstack client of the requested type. :param client_type: Client type ("nova"/"glance" etc.) :param version: client version ("1"/"2" etc.) :returns: Python openstack client object """ client = getattr(self._admin_clients, client_type) return client(version) if version is not None else client() @classmethod def validate(cls, name, config, admin=None, users=None, deployment=None): if admin: admin = osclients.Clients(admin) if users: users = [osclients.Clients(user["credential"]) for user in users] super(OpenStackScenario, cls).validate( name=name, config=config, admin=admin, users=users, deployment=deployment) rally-0.9.1/rally/plugins/openstack/hook/0000775000567000056710000000000013073420067021542 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/hook/__init__.py0000664000567000056710000000000013073417716023650 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/hook/fault_injection.py0000664000567000056710000000501613073417716025302 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os_faults from rally.common import logging from rally.common import objects from rally import consts from rally.task import hook LOG = logging.getLogger(__name__) @hook.configure(name="fault_injection") class FaultInjectionHook(hook.Hook): """Performs fault injection using os-faults library. Configuration: action - string that represents an action (more info in [1]) verify - whether to verify connection to cloud nodes or not This plugin discovers extra config of ExistingCloud and looks for "cloud_config" field. If cloud_config is present then it will be used to connect to the cloud by os-faults. Another option is to provide os-faults config file through OS_FAULTS_CONFIG env variable. Format of the config can be found in [1]. [1] http://os-faults.readthedocs.io/en/latest/usage.html """ CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "action": {"type": "string"}, "verify": {"type": "boolean"}, }, "required": [ "action", ], "additionalProperties": False, } def get_cloud_config(self): deployment = objects.Deployment.get(self.task["deployment_uuid"]) deployment_config = deployment["config"] if deployment_config["type"] != "ExistingCloud": return None extra_config = deployment_config.get("extra", {}) return extra_config.get("cloud_config") def run(self): # get cloud configuration cloud_config = self.get_cloud_config() # connect to the cloud injector = os_faults.connect(cloud_config) # verify that all nodes are available if self.config.get("verify"): injector.verify() LOG.debug("Injecting fault: %s", self.config["action"]) os_faults.human_api(injector, self.config["action"]) rally-0.9.1/rally/plugins/openstack/verification/0000775000567000056710000000000013073420067023264 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/verification/__init__.py0000664000567000056710000000000013073417716025372 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/verification/tempest/0000775000567000056710000000000013073420067024745 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/verification/tempest/__init__.py0000664000567000056710000000000013073417716027053 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/verification/tempest/config.ini0000664000567000056710000000150013073417716026716 0ustar jenkinsjenkins00000000000000[DEFAULT] debug = True use_stderr = False log_file = [auth] use_dynamic_credentials = True [compute] image_ref = image_ref_alt = flavor_ref = flavor_ref_alt = fixed_network_name = [compute-feature-enabled] live_migration = False resize = True vnc_console = True attach_encrypted_volume = False [data-processing] [identity] [image-feature-enabled] deactivate_image = True [input-scenario] ssh_user_regex = [["^.*[Cc]irros.*$", "cirros"], ["^.*[Tt]est[VvMm].*$", "cirros"], ["^.*rally_verify.*$", "cirros"]] [network] [network-feature-enabled] ipv6_subnet_attributes = True ipv6 = True [object-storage] [oslo_concurrency] lock_path = [orchestration] instance_type = [scenario] img_dir = img_file = [service_available] [validation] run_validation = True image_ssh_user = cirros [volume-feature-enabled] bootable = True rally-0.9.1/rally/plugins/openstack/verification/tempest/manager.py0000664000567000056710000002213213073417716026740 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import re import shutil import subprocess from rally.common.i18n import _LE from rally.common import yamlutils as yaml from rally import exceptions from rally.plugins.common.verification import testr from rally.plugins.openstack.verification.tempest import config from rally.plugins.openstack.verification.tempest import consts from rally.verification import manager from rally.verification import utils AVAILABLE_SETS = (list(consts.TempestTestSets) + list(consts.TempestApiTestSets) + list(consts.TempestScenarioTestSets)) @manager.configure(name="tempest", namespace="openstack", default_repo="https://git.openstack.org/openstack/tempest", context={"tempest": {}, "testr": {}}) class TempestManager(testr.TestrLauncher): """Tempest verifier. **Description**: Quote from official documentation: This is a set of integration tests to be run against a live OpenStack cluster. Tempest has batteries of tests for OpenStack API validation, Scenarios, and other specific tests useful in validating an OpenStack deployment. Rally supports features listed below: * *cloning Tempest*: repository and version can be specified * *installation*: system-wide with checking existence of required packages or in virtual environment * *configuration*: options are discovered via OpenStack API, but you can override them if you need * *running*: pre-creating all required resources(i.e images, tenants, etc), prepare arguments, launching Tempest, live-progress output * *results*: all verifications are stored in db, you can built reports, compare verification at whatever you want time. Appeared in Rally 0.8.0 *(actually, it appeared long time ago with first revision of Verification Component, but 0.8.0 is mentioned since it is first release after Verification Component redesign)* """ RUN_ARGS = {"set": "Name of predefined set of tests. Known names: %s" % ", ".join(AVAILABLE_SETS)} @property def run_environ(self): env = super(TempestManager, self).run_environ env["TEMPEST_CONFIG_DIR"] = os.path.dirname(self.configfile) env["TEMPEST_CONFIG"] = os.path.basename(self.configfile) # TODO(andreykurilin): move it to Testr base class env["OS_TEST_PATH"] = os.path.join(self.repo_dir, "tempest/test_discover") return env @property def configfile(self): return os.path.join(self.home_dir, "tempest.conf") def validate_args(self, args): """Validate given arguments.""" super(TempestManager, self).validate_args(args) if args.get("pattern"): pattern = args["pattern"].split("=", 1) if len(pattern) == 1: pass # it is just a regex elif pattern[0] == "set": if pattern[1] not in AVAILABLE_SETS: raise exceptions.ValidationError( "Test set '%s' not found in available " "Tempest test sets. Available sets are '%s'." % (pattern[1], "', '".join(AVAILABLE_SETS))) else: raise exceptions.ValidationError( "'pattern' argument should be a regexp or set name " "(format: 'tempest.api.identity.v3', 'set=smoke').") def configure(self, extra_options=None): """Configure Tempest.""" utils.create_dir(self.home_dir) tcm = config.TempestConfigfileManager(self.verifier.deployment) return tcm.create(self.configfile, extra_options) def is_configured(self): """Check whether Tempest is configured or not.""" return os.path.exists(self.configfile) def get_configuration(self): """Get Tempest configuration.""" with open(self.configfile) as f: return f.read() def extend_configuration(self, extra_options): """Extend Tempest configuration with extra options.""" return utils.extend_configfile(extra_options, self.configfile) def override_configuration(self, new_configuration): """Override Tempest configuration by new configuration.""" with open(self.configfile, "w") as f: f.write(new_configuration) def install_extension(self, source, version=None, extra_settings=None): """Install a Tempest plugin.""" if extra_settings: raise NotImplementedError( _LE("'%s' verifiers don't support extra installation settings " "for extensions.") % self.get_name()) version = version or "master" egg = re.sub("\.git$", "", os.path.basename(source.strip("/"))) full_source = "git+{0}@{1}#egg={2}".format(source, version, egg) # NOTE(ylobankov): Use 'develop mode' installation to provide an # ability to advanced users to change tests or # develop new ones in verifier repo on the fly. cmd = ["pip", "install", "--src", os.path.join(self.base_dir, "extensions"), "-e", full_source] if self.verifier.system_wide: cmd.insert(2, "--no-deps") utils.check_output(cmd, cwd=self.base_dir, env=self.environ) # Very often Tempest plugins are inside projects and requirements # for plugins are listed in the test-requirements.txt file. test_reqs_path = os.path.join(self.base_dir, "extensions", egg, "test-requirements.txt") if os.path.exists(test_reqs_path): if not self.verifier.system_wide: utils.check_output(["pip", "install", "-r", test_reqs_path], cwd=self.base_dir, env=self.environ) else: self.check_system_wide(reqs_file_path=test_reqs_path) def list_extensions(self): """List all installed Tempest plugins.""" # TODO(andreykurilin): find a better way to list tempest plugins cmd = ("from tempest.test_discover import plugins; " "plugins_manager = plugins.TempestTestPluginManager(); " "plugins_map = plugins_manager.get_plugin_load_tests_tuple(); " "plugins_list = [" " {'name': p.name, " " 'entry_point': p.entry_point_target, " " 'location': plugins_map[p.name][1]} " " for p in plugins_manager.ext_plugins.extensions]; " "print(plugins_list)") try: output = utils.check_output(["python", "-c", cmd], cwd=self.base_dir, env=self.environ, debug_output=False).strip() except subprocess.CalledProcessError: raise exceptions.RallyException( "Cannot list installed Tempest plugins for verifier %s." % self.verifier) return yaml.safe_load(output) def uninstall_extension(self, name): """Uninstall a Tempest plugin.""" for ext in self.list_extensions(): if ext["name"] == name and os.path.exists(ext["location"]): shutil.rmtree(ext["location"]) break else: raise exceptions.RallyException( "There is no Tempest plugin with name '%s'. " "Are you sure that it was installed?" % name) def list_tests(self, pattern=""): """List all Tempest tests.""" if pattern: pattern = self._transform_pattern(pattern) return super(TempestManager, self).list_tests(pattern) def prepare_run_args(self, run_args): """Prepare 'run_args' for testr context.""" if run_args.get("pattern"): run_args["pattern"] = self._transform_pattern(run_args["pattern"]) return run_args @staticmethod def _transform_pattern(pattern): """Transform pattern into Tempest-specific pattern.""" parsed_pattern = pattern.split("=", 1) if len(parsed_pattern) == 2: if parsed_pattern[0] == "set": if parsed_pattern[1] in consts.TempestTestSets: return "smoke" if parsed_pattern[1] == "smoke" else "" elif parsed_pattern[1] in consts.TempestApiTestSets: return "tempest.api.%s" % parsed_pattern[1] else: return "tempest.%s" % parsed_pattern[1] return pattern # it is just a regex rally-0.9.1/rally/plugins/openstack/verification/tempest/config.py0000664000567000056710000002346313073417720026576 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import inspect import os from oslo_config import cfg import six from six.moves import configparser from six.moves.urllib import parse from rally.common import objects from rally import osclients from rally.verification import utils TEMPEST_OPTS = [ cfg.StrOpt("img_url", default="http://download.cirros-cloud.net/" "0.3.5/cirros-0.3.5-x86_64-disk.img", help="image URL"), cfg.StrOpt("img_disk_format", default="qcow2", help="Image disk format to use when creating the image"), cfg.StrOpt("img_container_format", default="bare", help="Image container format to use when creating the image"), cfg.StrOpt("img_name_regex", default="^.*(cirros|testvm).*$", help="Regular expression for name of a public image to " "discover it in the cloud and use it for the tests. " "Note that when Rally is searching for the image, case " "insensitive matching is performed. Specify nothing " "('img_name_regex =') if you want to disable discovering. " "In this case Rally will create needed resources by " "itself if the values for the corresponding config " "options are not specified in the Tempest config file"), cfg.StrOpt("swift_operator_role", default="Member", help="Role required for users " "to be able to create Swift containers"), cfg.StrOpt("swift_reseller_admin_role", default="ResellerAdmin", help="User role that has reseller admin"), cfg.StrOpt("heat_stack_owner_role", default="heat_stack_owner", help="Role required for users " "to be able to manage Heat stacks"), cfg.StrOpt("heat_stack_user_role", default="heat_stack_user", help="Role for Heat template-defined users"), cfg.IntOpt("flavor_ref_ram", default="64", help="Primary flavor RAM size used by most of the test cases"), cfg.IntOpt("flavor_ref_alt_ram", default="128", help="Alternate reference flavor RAM size used by test that" "need two flavors, like those that resize an instance"), cfg.IntOpt("heat_instance_type_ram", default="64", help="RAM size flavor used for orchestration test cases") ] CONF = cfg.CONF CONF.register_opts(TEMPEST_OPTS, "tempest") CONF.import_opt("glance_image_delete_timeout", "rally.plugins.openstack.scenarios.glance.utils", "benchmark") CONF.import_opt("glance_image_delete_poll_interval", "rally.plugins.openstack.scenarios.glance.utils", "benchmark") class TempestConfigfileManager(object): """Class to create a Tempest config file.""" def __init__(self, deployment): self.credential = deployment.get_credentials_for("openstack")["admin"] self.clients = osclients.Clients(objects.Credential(**self.credential)) self.available_services = self.clients.services().values() self.conf = configparser.ConfigParser() def _get_service_type_by_service_name(self, service_name): for s_type, s_name in self.clients.services().items(): if s_name == service_name: return s_type def _configure_auth(self, section_name="auth"): self.conf.set(section_name, "admin_username", self.credential["username"]) self.conf.set(section_name, "admin_password", self.credential["password"]) self.conf.set(section_name, "admin_project_name", self.credential["tenant_name"]) # Keystone v3 related parameter self.conf.set(section_name, "admin_domain_name", self.credential["user_domain_name"] or "Default") # Sahara has two service types: 'data_processing' and 'data-processing'. # 'data_processing' is deprecated, but it can be used in previous OpenStack # releases. So we need to configure the 'catalog_type' option to support # environments where 'data_processing' is used as service type for Sahara. def _configure_data_processing(self, section_name="data-processing"): if "sahara" in self.available_services: self.conf.set(section_name, "catalog_type", self._get_service_type_by_service_name("sahara")) def _configure_identity(self, section_name="identity"): self.conf.set(section_name, "region", self.credential["region_name"]) auth_url = self.credential["auth_url"] if "/v2" not in auth_url and "/v3" not in auth_url: auth_version = "v2" auth_url_v2 = parse.urljoin(auth_url, "/v2.0") else: url_path = parse.urlparse(auth_url).path auth_version = url_path[1:3] auth_url_v2 = auth_url.replace(url_path, "/v2.0") self.conf.set(section_name, "auth_version", auth_version) self.conf.set(section_name, "uri", auth_url_v2) self.conf.set(section_name, "uri_v3", auth_url_v2.replace("/v2.0", "/v3")) self.conf.set(section_name, "disable_ssl_certificate_validation", str(self.credential["https_insecure"])) self.conf.set(section_name, "ca_certificates_file", self.credential["https_cacert"]) # The compute section is configured in context class for Tempest resources. # Options which are configured there: 'image_ref', 'image_ref_alt', # 'flavor_ref', 'flavor_ref_alt'. def _configure_network(self, section_name="network"): if "neutron" in self.available_services: neutronclient = self.clients.neutron() public_nets = [net for net in neutronclient.list_networks()["networks"] if net["status"] == "ACTIVE" and net["router:external"] is True] if public_nets: net_id = public_nets[0]["id"] net_name = public_nets[0]["name"] self.conf.set(section_name, "public_network_id", net_id) self.conf.set(section_name, "floating_network_name", net_name) else: novaclient = self.clients.nova() net_name = next(net.human_id for net in novaclient.networks.list() if net.human_id is not None) self.conf.set("compute", "fixed_network_name", net_name) self.conf.set("validation", "network_for_ssh", net_name) def _configure_network_feature_enabled( self, section_name="network-feature-enabled"): if "neutron" in self.available_services: neutronclient = self.clients.neutron() extensions = neutronclient.list_ext("extensions", "/extensions", retrieve_all=True) aliases = [ext["alias"] for ext in extensions["extensions"]] aliases_str = ",".join(aliases) self.conf.set(section_name, "api_extensions", aliases_str) def _configure_object_storage(self, section_name="object-storage"): self.conf.set(section_name, "operator_role", CONF.tempest.swift_operator_role) self.conf.set(section_name, "reseller_admin_role", CONF.tempest.swift_reseller_admin_role) def _configure_service_available(self, section_name="service_available"): services = ["cinder", "glance", "heat", "ironic", "neutron", "nova", "sahara", "swift"] for service in services: # Convert boolean to string because ConfigParser fails # on attempt to get option with boolean value self.conf.set(section_name, service, str(service in self.available_services)) def _configure_validation(self, section_name="validation"): if "neutron" in self.available_services: self.conf.set(section_name, "connect_method", "floating") else: self.conf.set(section_name, "connect_method", "fixed") def _configure_orchestration(self, section_name="orchestration"): self.conf.set(section_name, "stack_owner_role", CONF.tempest.heat_stack_owner_role) self.conf.set(section_name, "stack_user_role", CONF.tempest.heat_stack_user_role) def create(self, conf_path, extra_options=None): self.conf.read(os.path.join(os.path.dirname(__file__), "config.ini")) for name, method in inspect.getmembers(self, inspect.ismethod): if name.startswith("_configure_"): method() if extra_options: utils.add_extra_options(extra_options, self.conf) with open(conf_path, "w") as configfile: self.conf.write(configfile) raw_conf = six.StringIO() raw_conf.write("# Some empty values of options will be replaced while " "creating required resources (images, flavors, etc).\n") self.conf.write(raw_conf) return raw_conf.getvalue() rally-0.9.1/rally/plugins/openstack/verification/tempest/context.py0000664000567000056710000003512113073417720027007 0ustar jenkinsjenkins00000000000000# Copyright 2017: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import re import requests from six.moves import configparser from rally.common.i18n import _ from rally.common import logging from rally.common import objects from rally import exceptions from rally import osclients from rally.plugins.openstack.verification.tempest import config as conf from rally.plugins.openstack.wrappers import glance from rally.plugins.openstack.wrappers import network from rally.task import utils as task_utils from rally.verification import context from rally.verification import utils LOG = logging.getLogger(__name__) @context.configure("tempest", order=900) class TempestContext(context.VerifierContext): """Context class to create/delete resources needed for Tempest.""" RESOURCE_NAME_FORMAT = "rally_verify_XXXXXXXX_XXXXXXXX" def __init__(self, ctx): super(TempestContext, self).__init__(ctx) creds = self.verifier.deployment.get_credentials_for("openstack") self.clients = osclients.Clients(objects.Credential(**creds["admin"])) self.available_services = self.clients.services().values() self.conf = configparser.ConfigParser() self.conf_path = self.verifier.manager.configfile self.data_dir = self.verifier.manager.home_dir self.image_name = "tempest-image" self._created_roles = [] self._created_images = [] self._created_flavors = [] self._created_networks = [] def setup(self): self.conf.read(self.conf_path) utils.create_dir(self.data_dir) self._create_tempest_roles() self._configure_option("DEFAULT", "log_file", os.path.join(self.data_dir, "tempest.log")) self._configure_option("oslo_concurrency", "lock_path", os.path.join(self.data_dir, "lock_files")) self._configure_option("scenario", "img_dir", self.data_dir) self._configure_option("scenario", "img_file", self.image_name, helper_method=self._download_image) self._configure_option("compute", "image_ref", helper_method=self._discover_or_create_image) self._configure_option("compute", "image_ref_alt", helper_method=self._discover_or_create_image) self._configure_option("compute", "flavor_ref", helper_method=self._discover_or_create_flavor, flv_ram=conf.CONF.tempest.flavor_ref_ram) self._configure_option("compute", "flavor_ref_alt", helper_method=self._discover_or_create_flavor, flv_ram=conf.CONF.tempest.flavor_ref_alt_ram) if "neutron" in self.available_services: neutronclient = self.clients.neutron() if neutronclient.list_networks(shared=True)["networks"]: # If the OpenStack cloud has some shared networks, we will # create our own shared network and specify its name in the # Tempest config file. Such approach will allow us to avoid # failures of Tempest tests with error "Multiple possible # networks found". Otherwise the default behavior defined in # Tempest will be used and Tempest itself will manage network # resources. LOG.debug("Shared networks found. " "'fixed_network_name' option should be configured.") self._configure_option( "compute", "fixed_network_name", helper_method=self._create_network_resources) if "heat" in self.available_services: self._configure_option( "orchestration", "instance_type", helper_method=self._discover_or_create_flavor, flv_ram=conf.CONF.tempest.heat_instance_type_ram) with open(self.conf_path, "w") as configfile: self.conf.write(configfile) def cleanup(self): # Tempest tests may take more than 1 hour and we should remove all # cached clients sessions to avoid tokens expiration when deleting # Tempest resources. self.clients.clear() self._cleanup_tempest_roles() self._cleanup_images() self._cleanup_flavors() if "neutron" in self.available_services: self._cleanup_network_resources() with open(self.conf_path, "w") as configfile: self.conf.write(configfile) def _create_tempest_roles(self): keystoneclient = self.clients.verified_keystone() roles = [conf.CONF.tempest.swift_operator_role, conf.CONF.tempest.swift_reseller_admin_role, conf.CONF.tempest.heat_stack_owner_role, conf.CONF.tempest.heat_stack_user_role] existing_roles = set(role.name for role in keystoneclient.roles.list()) for role in roles: if role not in existing_roles: LOG.debug("Creating role '%s'." % role) self._created_roles.append(keystoneclient.roles.create(role)) def _configure_option(self, section, option, value=None, helper_method=None, *args, **kwargs): option_value = self.conf.get(section, option) if not option_value: LOG.debug("Option '%s' from '%s' section " "is not configured." % (option, section)) if helper_method: res = helper_method(*args, **kwargs) if res: value = res["name"] if "network" in option else res.id LOG.debug("Setting value '%s' to option '%s'." % (value, option)) self.conf.set(section, option, value) LOG.debug("Option '{opt}' is configured. " "{opt} = {value}".format(opt=option, value=value)) else: LOG.debug("Option '{opt}' is already configured " "in Tempest config file. {opt} = {opt_val}" .format(opt=option, opt_val=option_value)) def _discover_image(self): LOG.debug("Trying to discover a public image with name matching " "regular expression '%s'. Note that case insensitive " "matching is performed." % conf.CONF.tempest.img_name_regex) glance_wrapper = glance.wrap(self.clients.glance, self) images = glance_wrapper.list_images(status="active", visibility="public") for image in images: if image.name and re.match(conf.CONF.tempest.img_name_regex, image.name, re.IGNORECASE): LOG.debug("The following public " "image discovered: '%s'." % image.name) return image LOG.debug("There is no public image with name matching regular " "expression '%s'." % conf.CONF.tempest.img_name_regex) def _download_image_from_source(self, target_path, image=None): if image: LOG.debug("Downloading image '%s' " "from Glance to %s." % (image.name, target_path)) with open(target_path, "wb") as image_file: for chunk in self.clients.glance().images.data(image.id): image_file.write(chunk) else: LOG.debug("Downloading image from %s " "to %s." % (conf.CONF.tempest.img_url, target_path)) try: response = requests.get(conf.CONF.tempest.img_url, stream=True) except requests.ConnectionError as err: msg = _("Failed to download image. " "Possibly there is no connection to Internet. " "Error: %s.") % (str(err) or "unknown") raise exceptions.RallyException(msg) if response.status_code == 200: with open(target_path, "wb") as image_file: for chunk in response.iter_content(chunk_size=1024): if chunk: # filter out keep-alive new chunks image_file.write(chunk) image_file.flush() else: if response.status_code == 404: msg = _("Failed to download image. Image was not found.") else: msg = _("Failed to download image. " "HTTP error code %d.") % response.status_code raise exceptions.RallyException(msg) LOG.debug("The image has been successfully downloaded!") def _download_image(self): image_path = os.path.join(self.data_dir, self.image_name) if os.path.isfile(image_path): LOG.debug("Image is already downloaded to %s." % image_path) return if conf.CONF.tempest.img_name_regex: image = self._discover_image() if image: return self._download_image_from_source(image_path, image) self._download_image_from_source(image_path) def _discover_or_create_image(self): if conf.CONF.tempest.img_name_regex: image = self._discover_image() if image: LOG.debug("Using image '%s' (ID = %s) " "for the tests." % (image.name, image.id)) return image params = { "name": self.generate_random_name(), "disk_format": conf.CONF.tempest.img_disk_format, "container_format": conf.CONF.tempest.img_container_format, "image_location": os.path.join(self.data_dir, self.image_name), "visibility": "public" } LOG.debug("Creating image '%s'." % params["name"]) glance_wrapper = glance.wrap(self.clients.glance, self) image = glance_wrapper.create_image(**params) LOG.debug("Image '%s' (ID = %s) has been " "successfully created!" % (image.name, image.id)) self._created_images.append(image) return image def _discover_or_create_flavor(self, flv_ram): novaclient = self.clients.nova() LOG.debug("Trying to discover a flavor with the following " "properties: RAM = %dMB, VCPUs = 1, disk = 0GB." % flv_ram) for flavor in novaclient.flavors.list(): if (flavor.ram == flv_ram and flavor.vcpus == 1 and flavor.disk == 0): LOG.debug("The following flavor discovered: '{0}'. " "Using flavor '{0}' (ID = {1}) for the tests." .format(flavor.name, flavor.id)) return flavor LOG.debug("There is no flavor with the mentioned properties.") params = { "name": self.generate_random_name(), "ram": flv_ram, "vcpus": 1, "disk": 0 } LOG.debug("Creating flavor '%s' with the following properties: RAM " "= %dMB, VCPUs = 1, disk = 0GB." % (params["name"], flv_ram)) flavor = novaclient.flavors.create(**params) LOG.debug("Flavor '%s' (ID = %s) has been " "successfully created!" % (flavor.name, flavor.id)) self._created_flavors.append(flavor) return flavor def _create_network_resources(self): neutron_wrapper = network.NeutronWrapper(self.clients, self) tenant_id = self.clients.keystone.auth_ref.project_id LOG.debug("Creating network resources: network, subnet, router.") net = neutron_wrapper.create_network( tenant_id, subnets_num=1, add_router=True, network_create_args={"shared": True}) LOG.debug("Network resources have been successfully created!") self._created_networks.append(net) return net def _cleanup_tempest_roles(self): keystoneclient = self.clients.keystone() for role in self._created_roles: LOG.debug("Deleting role '%s'." % role.name) keystoneclient.roles.delete(role.id) LOG.debug("Role '%s' has been deleted." % role.name) def _cleanup_images(self): glance_wrapper = glance.wrap(self.clients.glance, self) for image in self._created_images: LOG.debug("Deleting image '%s'." % image.name) self.clients.glance().images.delete(image.id) task_utils.wait_for_status( image, ["deleted", "pending_delete"], check_deletion=True, update_resource=glance_wrapper.get_image, timeout=conf.CONF.benchmark.glance_image_delete_timeout, check_interval=conf.CONF.benchmark. glance_image_delete_poll_interval) LOG.debug("Image '%s' has been deleted." % image.name) self._remove_opt_value_from_config("compute", image.id) def _cleanup_flavors(self): novaclient = self.clients.nova() for flavor in self._created_flavors: LOG.debug("Deleting flavor '%s'." % flavor.name) novaclient.flavors.delete(flavor.id) LOG.debug("Flavor '%s' has been deleted." % flavor.name) self._remove_opt_value_from_config("compute", flavor.id) self._remove_opt_value_from_config("orchestration", flavor.id) def _cleanup_network_resources(self): neutron_wrapper = network.NeutronWrapper(self.clients, self) for net in self._created_networks: LOG.debug("Deleting network resources: router, subnet, network.") neutron_wrapper.delete_network(net) self._remove_opt_value_from_config("compute", net["name"]) LOG.debug("Network resources have been deleted.") def _remove_opt_value_from_config(self, section, opt_value): for option, value in self.conf.items(section): if opt_value == value: LOG.debug("Removing value '%s' of option '%s' " "from Tempest config file." % (opt_value, option)) self.conf.set(section, option, "") LOG.debug("Value '%s' has been removed." % opt_value) rally-0.9.1/rally/plugins/openstack/verification/tempest/consts.py0000664000567000056710000000263413073417716026644 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common import utils class _TempestApiTestSets(utils.ImmutableMixin, utils.EnumMixin): BAREMETAL = "baremetal" CLUSTERING = "clustering" COMPUTE = "compute" DATA_PROCESSING = "data_processing" DATABASE = "database" IDENTITY = "identity" IMAGE = "image" MESSAGING = "messaging" NETWORK = "network" OBJECT_STORAGE = "object_storage" ORCHESTRATION = "orchestration" TELEMETRY = "telemetry" VOLUME = "volume" class _TempestScenarioTestSets(utils.ImmutableMixin, utils.EnumMixin): SCENARIO = "scenario" class _TempestTestSets(utils.ImmutableMixin, utils.EnumMixin): FULL = "full" SMOKE = "smoke" TempestApiTestSets = _TempestApiTestSets() TempestScenarioTestSets = _TempestScenarioTestSets() TempestTestSets = _TempestTestSets() rally-0.9.1/rally/plugins/openstack/context/0000775000567000056710000000000013073420067022266 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/watcher/0000775000567000056710000000000013073420067023723 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/watcher/__init__.py0000664000567000056710000000000013073417716026031 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/watcher/audit_templates.py0000664000567000056710000001010213073417720027455 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import random import six from rally.common.i18n import _ from rally.common import logging from rally import consts from rally import osclients from rally.plugins.openstack.cleanup import manager as resource_manager from rally.plugins.openstack.scenarios.watcher import utils as watcher_utils from rally.plugins.openstack import types from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="audit_templates", order=550) class AuditTemplateGenerator(context.Context): """Context class for adding temporary audit template for benchmarks.""" CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "audit_templates_per_admin": {"type": "integer", "minimum": 1}, "fill_strategy": {"enum": ["round_robin", "random", None]}, "params": { "type": "array", "minItems": 1, "uniqueItems": True, "items": { "type": "object", "properties": { "goal": { "type": "object", "properties": { "name": { "type": "string" } } }, "strategy": { "type": "object", "properties": { "name": { "type": "string" } } }, }, }, } }, "additionalProperties": False, "required": ["params"] } DEFAULT_CONFIG = { "audit_templates_per_admin": 1, "fill_strategy": "round_robin" } @logging.log_task_wrapper(LOG.info, _("Enter context: `Audit Templates`")) def setup(self): watcher_scenario = watcher_utils.WatcherScenario( {"admin": self.context["admin"], "task": self.context["task"], "config": { "api_versions": self.context["config"].get( "api_versions", [])} }) clients = osclients.Clients(self.context["admin"]["credential"]) self.context["audit_templates"] = [] for i in six.moves.range(self.config["audit_templates_per_admin"]): cfg_size = len(self.config["params"]) if self.config["fill_strategy"] == "round_robin": audit_params = self.config["params"][i % cfg_size] elif self.config["fill_strategy"] == "random": audit_params = random.choice(self.config["params"]) goal_id = types.WatcherGoal.transform( clients=clients, resource_config=audit_params["goal"]) strategy_id = types.WatcherStrategy.transform( clients=clients, resource_config=audit_params["strategy"]) audit_template = watcher_scenario._create_audit_template( goal_id, strategy_id) self.context["audit_templates"].append(audit_template.uuid) @logging.log_task_wrapper(LOG.info, _("Exit context: `Audit Templates`")) def cleanup(self): resource_manager.cleanup(names=["watcher.action_plan", "watcher.audit_template"], admin=self.context.get("admin", [])) rally-0.9.1/rally/plugins/openstack/context/fuel.py0000664000567000056710000001027513073417716023607 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections from rally.common import broker from rally.common.i18n import _ from rally.common import logging from rally import consts from rally import exceptions from rally.plugins.openstack.scenarios.fuel import utils as fuel_utils from rally.task import context as base LOG = logging.getLogger(__name__) @base.configure(name="fuel_environments", order=110) class FuelEnvGenerator(base.Context): """Context for generating Fuel environments.""" CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "environments": { "type": "integer", "minimum": 1 }, "release_id": { "type": "integer" }, "network_provider": { "type": "string" }, "net_segment_type": { "type": "string" }, "deployment_mode": { "type": "string" }, "resource_management_workers": { "type": "integer", "minimum": 1 }, }, "additionalProperties": False } DEFAULT_CONFIG = { "environments": 5, "release_id": 1, "network_provider": "neutron", "deployment_mode": "ha_compact", "net_segment_type": "vlan", "resource_management_workers": 2 } def _create_envs(self): threads = self.config["resource_management_workers"] envs = collections.deque() def publish(queue): kwargs = {"release_id": self.config["release_id"], "network_provider": self.config["network_provider"], "deployment_mode": self.config["deployment_mode"], "net_segment_type": self.config["net_segment_type"]} for i in range(self.config["environments"]): queue.append(kwargs) def consume(cache, kwargs): env_id = self.fscenario._create_environment(**kwargs) envs.append(env_id) broker.run(publish, consume, threads) return list(envs) def _delete_envs(self): threads = self.config["resource_management_workers"] def publish(queue): queue.extend(self.context["fuel"]["environments"]) def consume(cache, env_id): self.fscenario._delete_environment(env_id) broker.run(publish, consume, threads) self.context["fuel"] = {} @logging.log_task_wrapper(LOG.info, _("Enter context: `fuel_environments`")) def setup(self): """Create Fuel environments, using the broker pattern.""" self.context.setdefault("fuel", {}) self.context["fuel"].setdefault("environments", []) threads = self.config["resource_management_workers"] LOG.debug("Creating %(envs)d environments using %(threads)s threads" % {"envs": self.config["environments"], "threads": threads}) self.fscenario = fuel_utils.FuelScenario(self.context) self.context["fuel"]["environments"] = self._create_envs() if len(self.context[ "fuel"]["environments"]) != self.config["environments"]: raise exceptions.ContextSetupFailure( ctx_name=self.get_name(), msg=_("failed to create the requested" " number of environments.")) @logging.log_task_wrapper(LOG.info, _("Exit context: `fuel_environments`")) def cleanup(self): """Delete environments, using the broker pattern.""" self._delete_envs() rally-0.9.1/rally/plugins/openstack/context/__init__.py0000664000567000056710000000000013073417716024374 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/magnum/0000775000567000056710000000000013073420067023552 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/magnum/__init__.py0000664000567000056710000000000013073417716025660 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/magnum/cluster_templates.py0000664000567000056710000001033713073417720027671 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.i18n import _ from rally.common import logging from rally.common import utils as rutils from rally import consts from rally.plugins.openstack.cleanup import manager as resource_manager from rally.plugins.openstack.scenarios.magnum import utils as magnum_utils from rally.plugins.openstack.scenarios.nova import utils as nova_utils from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="cluster_templates", order=470) class ClusterTemplateGenerator(context.Context): """Context class for generating temporary cluster model for benchmarks.""" CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "image_id": { "type": "string" }, "flavor_id": { "type": "string" }, "master_flavor_id": { "type": "string" }, "external_network_id": { "type": "string" }, "fixed_network": { "type": "string" }, "fixed_subnet": { "type": "string" }, "dns_nameserver": { "type": "string" }, "docker_volume_size": { "type": "integer" }, "labels": { "type": "string" }, "coe": { "type": "string" }, "http_proxy": { "type": "string" }, "https_proxy": { "type": "string" }, "no_proxy": { "type": "string" }, "network_driver": { "type": "string" }, "tls_disabled": { "type": "boolean" }, "public": { "type": "boolean" }, "registry_enabled": { "type": "boolean" }, "volume_driver": { "type": "string" }, "server_type": { "type": "string" }, "docker_storage_driver": { "type": "string" }, "master_lb_enabled": { "type": "boolean" } }, "required": ["image_id", "external_network_id", "coe"], "additionalProperties": False } @logging.log_task_wrapper(LOG.info, _("Enter context: `ClusterTemplate`")) def setup(self): for user, tenant_id in rutils.iterate_per_tenants( self.context["users"]): nova_scenario = nova_utils.NovaScenario({ "user": user, "task": self.context["task"], "config": {"api_versions": self.context["config"].get( "api_versions", [])} }) keypair = nova_scenario._create_keypair() magnum_scenario = magnum_utils.MagnumScenario({ "user": user, "task": self.context["task"], "config": {"api_versions": self.context["config"].get( "api_versions", [])} }) cluster_template = magnum_scenario._create_cluster_template( keypair_id=keypair, **self.config) ct_uuid = cluster_template.uuid self.context["tenants"][tenant_id]["cluster_template"] = ct_uuid @logging.log_task_wrapper(LOG.info, _("Exit context: `ClusterTemplate`")) def cleanup(self): resource_manager.cleanup( names=["magnum.cluster_templates", "nova.keypairs"], users=self.context.get("users", [])) rally-0.9.1/rally/plugins/openstack/context/magnum/clusters.py0000664000567000056710000000507413073417720026000 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.i18n import _ from rally.common import logging from rally.common import utils as rutils from rally import consts from rally.plugins.openstack.cleanup import manager as resource_manager from rally.plugins.openstack.scenarios.magnum import utils as magnum_utils from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="clusters", order=480) class ClusterGenerator(context.Context): """Context class for generating temporary cluster for benchmarks.""" CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "cluster_template_uuid": { "type": "string" }, "node_count": { "type": "integer", "minimum": 1, }, }, "additionalProperties": False } DEFAULT_CONFIG = {"node_count": 1} @logging.log_task_wrapper(LOG.info, _("Enter context: `Cluster`")) def setup(self): for user, tenant_id in rutils.iterate_per_tenants( self.context["users"]): magnum_scenario = magnum_utils.MagnumScenario({ "user": user, "task": self.context["task"], "config": {"api_versions": self.context["config"].get( "api_versions", [])} }) # create a cluster ct_uuid = self.config.get("cluster_template_uuid", None) if ct_uuid is None: ctx = self.context["tenants"][tenant_id] ct_uuid = ctx.get("cluster_template") cluster = magnum_scenario._create_cluster( cluster_template=ct_uuid, node_count=self.config.get("node_count")) self.context["tenants"][tenant_id]["cluster"] = cluster.uuid @logging.log_task_wrapper(LOG.info, _("Exit context: `Cluster`")) def cleanup(self): resource_manager.cleanup( names=["magnum.clusters"], users=self.context.get("users", [])) rally-0.9.1/rally/plugins/openstack/context/cinder/0000775000567000056710000000000013073420067023532 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/cinder/__init__.py0000664000567000056710000000000013073417716025640 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/cinder/volumes.py0000775000567000056710000000606613073417720025613 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.i18n import _ from rally.common import logging from rally.common import utils as rutils from rally import consts from rally.plugins.openstack.cleanup import manager as resource_manager from rally.plugins.openstack.scenarios.cinder import utils as cinder_utils from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="volumes", order=420) class VolumeGenerator(context.Context): """Context class for adding volumes to each user for benchmarks.""" CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "size": { "type": "integer", "minimum": 1 }, "type": { "oneOf": [{"type": "string", "description": "a string-like type of volume to " "create."}, {"type": "null", "description": "Use default type for volume to " "create."}] }, "volumes_per_tenant": { "type": "integer", "minimum": 1 } }, "required": ["size"], "additionalProperties": False } DEFAULT_CONFIG = { "volumes_per_tenant": 1 } @logging.log_task_wrapper(LOG.info, _("Enter context: `Volumes`")) def setup(self): size = self.config["size"] volume_type = self.config.get("type", None) volumes_per_tenant = self.config["volumes_per_tenant"] for user, tenant_id in rutils.iterate_per_tenants( self.context["users"]): self.context["tenants"][tenant_id].setdefault("volumes", []) cinder_util = cinder_utils.CinderScenario( {"user": user, "task": self.context["task"], "config": self.context["config"]}) for i in range(volumes_per_tenant): vol = cinder_util._create_volume(size, volume_type=volume_type) self.context["tenants"][tenant_id]["volumes"].append(vol._info) @logging.log_task_wrapper(LOG.info, _("Exit context: `Volumes`")) def cleanup(self): resource_manager.cleanup( names=["cinder.volumes"], users=self.context.get("users", []), api_versions=self.context["config"].get("api_versions"), superclass=cinder_utils.CinderScenario, task_id=self.context["task"]["uuid"]) rally-0.9.1/rally/plugins/openstack/context/cinder/volume_types.py0000664000567000056710000000420013073417716026642 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.i18n import _ from rally.common import logging from rally import consts from rally import osclients from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="volume_types", order=410) class VolumeTypeGenerator(context.Context): """Context class for adding volumes types for benchmarks.""" CONFIG_SCHEMA = { "type": "array", "$schema": consts.JSON_SCHEMA, "items": {"type": "string"} } @logging.log_task_wrapper(LOG.info, _("Enter context: `volume_types`")) def setup(self): admin_clients = osclients.Clients( self.context.get("admin", {}).get("credential"), api_info=self.context["config"].get("api_versions")) cinder = admin_clients.cinder() self.context["volume_types"] = [] for vtype_name in self.config: LOG.debug("Creating Cinder volume type %s" % vtype_name) vtype = cinder.volume_types.create(vtype_name) self.context["volume_types"].append({"id": vtype.id, "name": vtype_name}) @logging.log_task_wrapper(LOG.info, _("Exit context: `volume_types`")) def cleanup(self): admin_clients = osclients.Clients( self.context.get("admin", {}).get("credential"), api_info=self.context["config"].get("api_versions")) cinder = admin_clients.cinder() for vtype in self.context["volume_types"]: LOG.debug("Deleting Cinder volume type %s" % vtype["name"]) cinder.volume_types.delete(vtype["id"]) rally-0.9.1/rally/plugins/openstack/context/ec2/0000775000567000056710000000000013073420067022737 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/ec2/__init__.py0000664000567000056710000000000013073417716025045 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/ec2/servers.py0000664000567000056710000000710413073417720025006 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.i18n import _ from rally.common import logging from rally.common import utils as rutils from rally import consts from rally import osclients from rally.plugins.openstack.cleanup import manager as resource_manager from rally.plugins.openstack.scenarios.ec2 import utils as ec2_utils from rally.plugins.openstack import types from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="ec2_servers", order=460) class EC2ServerGenerator(context.Context): """Context class for adding temporary servers for benchmarks. Servers are added for each tenant. """ CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "image": { "type": "object", "properties": { "name": { "type": "string" } } }, "flavor": { "type": "object", "properties": { "name": { "type": "string" } } }, "servers_per_tenant": { "type": "integer", "minimum": 1 } }, "required": ["image", "flavor", "servers_per_tenant"], "additionalProperties": False } @logging.log_task_wrapper(LOG.info, _("Enter context: `EC2 Servers`")) def setup(self): image = self.config["image"] flavor = self.config["flavor"] clients = osclients.Clients(self.context["users"][0]["credential"]) image_id = types.EC2Image.transform(clients=clients, resource_config=image) for user, tenant_id in rutils.iterate_per_tenants( self.context["users"]): LOG.debug("Booting servers for tenant %s " % (user["tenant_id"])) user_clients = osclients.Clients(user["credential"]) ec2_scenario = ec2_utils.EC2Scenario(clients=user_clients) LOG.debug( "Calling _boot_servers with " "image_id={image_id} flavor_name={flavor_name} " "servers_per_tenant={servers_per_tenant}".format( image_id=image_id, flavor_name=flavor["name"], servers_per_tenant=self.config["servers_per_tenant"])) servers = ec2_scenario._boot_servers( image_id, flavor["name"], self.config["servers_per_tenant"]) current_servers = [server.id for server in servers] self.context["tenants"][tenant_id]["ec2_servers"] = current_servers @logging.log_task_wrapper(LOG.info, _("Exit context: `EC2 Servers`")) def cleanup(self): resource_manager.cleanup(names=["ec2.servers"], users=self.context.get("users", []), superclass=ec2_utils.EC2Scenario, task_id=self.context["task"]["uuid"]) rally-0.9.1/rally/plugins/openstack/context/nova/0000775000567000056710000000000013073420067023231 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/nova/__init__.py0000664000567000056710000000000013073417716025337 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/nova/keypairs.py0000664000567000056710000000465313073417720025444 0ustar jenkinsjenkins00000000000000# Copyright 2014: Rackspace UK # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.i18n import _ from rally.common import logging from rally import osclients from rally.plugins.openstack.cleanup import manager as resource_manager from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="keypair", order=310) class Keypair(context.Context): # NOTE(andreykurilin): "type" != "null", since we need to support backward # compatibility(previously empty dict was valid) and I hope in near # future, we will extend this context to accept keys. CONFIG_SCHEMA = {"type": "object", "additionalProperties": False} def _generate_keypair(self, credential): nova_client = osclients.Clients(credential).nova() # NOTE(hughsaunders): If keypair exists, it should re-generate name. keypairs = nova_client.keypairs.list() keypair_names = [keypair.name for keypair in keypairs] while True: keypair_name = self.generate_random_name() if keypair_name not in keypair_names: break keypair = nova_client.keypairs.create(keypair_name) return {"private": keypair.private_key, "public": keypair.public_key, "name": keypair_name, "id": keypair.id} @logging.log_task_wrapper(LOG.info, _("Enter context: `keypair`")) def setup(self): for user in self.context["users"]: user["keypair"] = self._generate_keypair(user["credential"]) @logging.log_task_wrapper(LOG.info, _("Exit context: `keypair`")) def cleanup(self): resource_manager.cleanup(names=["nova.keypairs"], users=self.context.get("users", []), superclass=self.__class__, task_id=self.context["task"]["uuid"]) rally-0.9.1/rally/plugins/openstack/context/nova/servers.py0000775000567000056710000001223213073417720025301 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.i18n import _ from rally.common import logging from rally.common import utils as rutils from rally import osclients from rally.plugins.openstack.cleanup import manager as resource_manager from rally.plugins.openstack.scenarios.nova import utils as nova_utils from rally.plugins.openstack import types from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="servers", order=430) class ServerGenerator(context.Context): """Context class for adding temporary servers for benchmarks. Servers are added for each tenant. """ CONFIG_SCHEMA = { "type": "object", "properties": { "image": { "description": "Name of image to boot server(s) from.", "type": "object", "properties": { "name": { "type": "string" } } }, "flavor": { "description": "Name of flavor to boot server(s) with.", "type": "object", "properties": { "name": { "type": "string" } } }, "servers_per_tenant": { "description": "Number of servers to boot in each Tenant.", "type": "integer", "minimum": 1 }, "auto_assign_nic": { "description": "True if NICs should be assigned.", "type": "boolean", }, "nics": { "type": "array", "description": "List of networks to attach to server.", "items": {"oneOf": [ {"type": "object", "properties": {"net-id": {"type": "string"}}, "description": "Network ID in a format like OpenStack API" " expects to see."}, {"type": "string", "description": "Network ID."}]} } }, "required": ["image", "flavor"], "additionalProperties": False } DEFAULT_CONFIG = { "servers_per_tenant": 5, "auto_assign_nic": False } @logging.log_task_wrapper(LOG.info, _("Enter context: `Servers`")) def setup(self): image = self.config["image"] flavor = self.config["flavor"] auto_nic = self.config["auto_assign_nic"] servers_per_tenant = self.config["servers_per_tenant"] kwargs = {"nics": self.config.get("nics", [])} clients = osclients.Clients(self.context["users"][0]["credential"]) image_id = types.GlanceImage.transform(clients=clients, resource_config=image) flavor_id = types.Flavor.transform(clients=clients, resource_config=flavor) for iter_, (user, tenant_id) in enumerate(rutils.iterate_per_tenants( self.context["users"])): LOG.debug("Booting servers for user tenant %s " % (user["tenant_id"])) tmp_context = {"user": user, "tenant": self.context["tenants"][tenant_id], "task": self.context["task"], "iteration": iter_} nova_scenario = nova_utils.NovaScenario(tmp_context) LOG.debug("Calling _boot_servers with image_id=%(image_id)s " "flavor_id=%(flavor_id)s " "servers_per_tenant=%(servers_per_tenant)s" % {"image_id": image_id, "flavor_id": flavor_id, "servers_per_tenant": servers_per_tenant}) servers = nova_scenario._boot_servers(image_id, flavor_id, requests=servers_per_tenant, auto_assign_nic=auto_nic, **kwargs) current_servers = [server.id for server in servers] LOG.debug("Adding booted servers %s to context" % current_servers) self.context["tenants"][tenant_id][ "servers"] = current_servers @logging.log_task_wrapper(LOG.info, _("Exit context: `Servers`")) def cleanup(self): resource_manager.cleanup(names=["nova.servers"], users=self.context.get("users", []), superclass=nova_utils.NovaScenario, task_id=self.context["task"]["uuid"]) rally-0.9.1/rally/plugins/openstack/context/nova/flavors.py0000664000567000056710000001114113073417716025264 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from novaclient import exceptions as nova_exceptions from rally.common.i18n import _ from rally.common import logging from rally.common import utils as rutils from rally import consts from rally import osclients from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="flavors", order=340) class FlavorsGenerator(context.Context): """Context creates a list of flavors.""" CONFIG_SCHEMA = { "type": "array", "$schema": consts.JSON_SCHEMA, "items": { "type": "object", "properties": { "name": { "type": "string", }, "ram": { "type": "integer", "minimum": 1 }, "vcpus": { "type": "integer", "minimum": 1 }, "disk": { "type": "integer", "minimum": 0 }, "swap": { "type": "integer", "minimum": 0 }, "ephemeral": { "type": "integer", "minimum": 0 }, "extra_specs": { "type": "object", "additionalProperties": { "type": "string" } } }, "additionalProperties": False, "required": ["name", "ram"] } } @logging.log_task_wrapper(LOG.info, _("Enter context: `flavors`")) def setup(self): """Create list of flavors.""" self.context["flavors"] = {} clients = osclients.Clients(self.context["admin"]["credential"]) for flavor_config in self.config: extra_specs = flavor_config.get("extra_specs") flavor_config = FlavorConfig(**flavor_config) try: flavor = clients.nova().flavors.create(**flavor_config) except nova_exceptions.Conflict as e: LOG.warning("Using already existing flavor %s" % flavor_config["name"]) if logging.is_debug(): LOG.exception(e) continue if extra_specs: flavor.set_keys(extra_specs) self.context["flavors"][flavor_config["name"]] = flavor.to_dict() LOG.debug("Created flavor with id '%s'" % flavor.id) @logging.log_task_wrapper(LOG.info, _("Exit context: `flavors`")) def cleanup(self): """Delete created flavors.""" clients = osclients.Clients(self.context["admin"]["credential"]) for flavor in self.context["flavors"].values(): with logging.ExceptionLogger( LOG, _("Can't delete flavor %s") % flavor["id"]): rutils.retry(3, clients.nova().flavors.delete, flavor["id"]) LOG.debug("Flavor is deleted %s" % flavor["id"]) class FlavorConfig(dict): def __init__(self, name, ram, vcpus=1, disk=0, swap=0, ephemeral=0, extra_specs=None): """Flavor configuration for context and flavor & image validation code. Context code uses this code to provide default values for flavor creation. Validation code uses this class as a Flavor instance to check image validity against a flavor that is to be created by the context. :param name: name of the newly created flavor :param ram: RAM amount for the flavor (MBs) :param vcpus: VCPUs amount for the flavor :param disk: disk amount for the flavor (GBs) :param swap: swap amount for the flavor (MBs) :param ephemeral: ephemeral disk amount for the flavor (GBs) :param extra_specs: is ignored """ super(FlavorConfig, self).__init__( name=name, ram=ram, vcpus=vcpus, disk=disk, swap=swap, ephemeral=ephemeral) self.__dict__.update(self) rally-0.9.1/rally/plugins/openstack/context/glance/0000775000567000056710000000000013073420067023517 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/glance/__init__.py0000664000567000056710000000000013073417716025625 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/glance/images.py0000664000567000056710000001272013073417720025342 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from rally.common.i18n import _ from rally.common import logging from rally.common import utils as rutils from rally import consts from rally import osclients from rally.plugins.openstack.wrappers import glance as glance_wrapper from rally.task import context from rally.task import utils CONF = cfg.CONF CONF.import_opt("glance_image_delete_timeout", "rally.plugins.openstack.scenarios.glance.utils", "benchmark") CONF.import_opt("glance_image_delete_poll_interval", "rally.plugins.openstack.scenarios.glance.utils", "benchmark") LOG = logging.getLogger(__name__) @context.configure(name="images", order=410) class ImageGenerator(context.Context): """Context class for adding images to each user for benchmarks.""" CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "image_url": { "type": "string", }, "image_type": { "enum": ["qcow2", "raw", "vhd", "vmdk", "vdi", "iso", "aki", "ari", "ami"], }, "image_container": { "type": "string", }, "image_name": { "type": "string", }, "min_ram": { # megabytes "type": "integer", "minimum": 0 }, "min_disk": { # gigabytes "type": "integer", "minimum": 0 }, "images_per_tenant": { "type": "integer", "minimum": 1 }, "image_args": { "type": "object", "additionalProperties": True } }, "required": ["image_url", "image_type", "image_container", "images_per_tenant"], "additionalProperties": False } @logging.log_task_wrapper(LOG.info, _("Enter context: `Images`")) def setup(self): image_url = self.config["image_url"] image_type = self.config["image_type"] image_container = self.config["image_container"] images_per_tenant = self.config["images_per_tenant"] image_name = self.config.get("image_name") for user, tenant_id in rutils.iterate_per_tenants( self.context["users"]): current_images = [] clients = osclients.Clients( user["credential"], api_info=self.context["config"].get("api_versions")) glance_wrap = glance_wrapper.wrap(clients.glance, self) kwargs = self.config.get("image_args", {}) if self.config.get("min_ram") is not None: LOG.warning("The 'min_ram' argument is deprecated; specify " "arbitrary arguments with 'image_args' instead") kwargs["min_ram"] = self.config["min_ram"] if self.config.get("min_disk") is not None: LOG.warning("The 'min_disk' argument is deprecated; specify " "arbitrary arguments with 'image_args' instead") kwargs["min_disk"] = self.config["min_disk"] if "is_public" in kwargs: LOG.warning("The 'is_public' argument is deprecated since " "Rally 0.8.0; specify visibility arguments " "instead") for i in range(images_per_tenant): if image_name and i > 0: cur_name = image_name + str(i) elif image_name: cur_name = image_name else: cur_name = self.generate_random_name() image = glance_wrap.create_image( image_container, image_url, image_type, name=cur_name, **kwargs) current_images.append(image.id) self.context["tenants"][tenant_id]["images"] = current_images @logging.log_task_wrapper(LOG.info, _("Exit context: `Images`")) def cleanup(self): for user, tenant_id in rutils.iterate_per_tenants( self.context["users"]): clients = osclients.Clients( user["credential"], api_info=self.context["config"].get("api_versions")) glance_wrap = glance_wrapper.wrap(clients.glance, self) for image in self.context["tenants"][tenant_id].get("images", []): clients.glance().images.delete(image) utils.wait_for_status( clients.glance().images.get(image), ["deleted", "pending_delete"], check_deletion=True, update_resource=glance_wrap.get_image, timeout=CONF.benchmark.glance_image_delete_timeout, check_interval=CONF.benchmark. glance_image_delete_poll_interval) rally-0.9.1/rally/plugins/openstack/context/ceilometer/0000775000567000056710000000000013073420067024416 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/ceilometer/__init__.py0000664000567000056710000000000013073417716026524 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/ceilometer/samples.py0000664000567000056710000001264713073417716026455 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from six import moves from rally.common.i18n import _ from rally.common import logging from rally.common import utils as rutils from rally import consts from rally import exceptions from rally.plugins.openstack.scenarios.ceilometer import utils as ceilo_utils from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="ceilometer", order=450) class CeilometerSampleGenerator(context.Context): """Context for creating samples and collecting resources for benchmarks.""" CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "counter_name": { "type": "string" }, "counter_type": { "type": "string" }, "counter_unit": { "type": "string" }, "counter_volume": { "type": "number", "minimum": 0 }, "resources_per_tenant": { "type": "integer", "minimum": 1 }, "samples_per_resource": { "type": "integer", "minimum": 1 }, "timestamp_interval": { "type": "integer", "minimum": 1 }, "metadata_list": { "type": "array", "items": { "type": "object", "properties": { "status": { "type": "string" }, "name": { "type": "string" }, "deleted": { "type": "string" }, "created_at": { "type": "string" } } } }, "batch_size": { "type": "integer", "minimum": 1 }, "batches_allow_lose": { "type": "integer", "minimum": 0 } }, "required": ["counter_name", "counter_type", "counter_unit", "counter_volume"], "additionalProperties": False } DEFAULT_CONFIG = { "resources_per_tenant": 5, "samples_per_resource": 5, "timestamp_interval": 60 } def _store_batch_samples(self, scenario, batches, batches_allow_lose): batches_allow_lose = batches_allow_lose or 0 unsuccess = 0 for i, batch in enumerate(batches, start=1): try: samples = scenario._create_samples(batch) except Exception: unsuccess += 1 LOG.warning(_("Failed to store batch %d of Ceilometer samples" " during context creation") % i) if unsuccess > batches_allow_lose: raise exceptions.ContextSetupFailure( ctx_name=self.get_name(), msg=_("Context failed to store too many batches of samples")) return samples @logging.log_task_wrapper(LOG.info, _("Enter context: `Ceilometer`")) def setup(self): new_sample = { "counter_name": self.config["counter_name"], "counter_type": self.config["counter_type"], "counter_unit": self.config["counter_unit"], "counter_volume": self.config["counter_volume"], } for user, tenant_id in rutils.iterate_per_tenants( self.context["users"]): self.context["tenants"][tenant_id]["samples"] = [] self.context["tenants"][tenant_id]["resources"] = [] scenario = ceilo_utils.CeilometerScenario( context={"user": user, "task": self.context["task"]} ) for i in moves.xrange(self.config["resources_per_tenant"]): samples_to_create = scenario._make_samples( count=self.config["samples_per_resource"], interval=self.config["timestamp_interval"], metadata_list=self.config.get("metadata_list"), batch_size=self.config.get("batch_size"), **new_sample) samples = self._store_batch_samples( scenario, samples_to_create, self.config.get("batches_allow_lose") ) for sample in samples: self.context["tenants"][tenant_id]["samples"].append( sample.to_dict()) self.context["tenants"][tenant_id]["resources"].append( samples[0].resource_id) @logging.log_task_wrapper(LOG.info, _("Exit context: `Ceilometer`")) def cleanup(self): # We don't have API for removal of samples and resources pass rally-0.9.1/rally/plugins/openstack/context/api_versions.py0000664000567000056710000002171413073417720025350 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import random from rally.common.i18n import _, _LE from rally import consts from rally import exceptions from rally import osclients from rally.task import context @context.configure(name="api_versions", order=150) class OpenStackAPIVersions(context.Context): """Context for specifying OpenStack clients versions and service types. Some OpenStack services support several API versions. To recognize the endpoints of each version, separate service types are provided in Keystone service catalog. Rally has the map of default service names - service types. But since service type is an entity, which can be configured manually by admin( via keystone api) without relation to service name, such map can be insufficient. Also, Keystone service catalog does not provide a map types to name (this statement is true for keystone < 3.3 ). This context was designed for not-default service types and not-default API versions usage. An example of specifying API version: .. code-block:: json # In this example we will launch NovaKeypair.create_and_list_keypairs # scenario on 2.2 api version. { "NovaKeypair.create_and_list_keypairs": [ { "args": { "key_type": "x509" }, "runner": { "type": "constant", "times": 10, "concurrency": 2 }, "context": { "users": { "tenants": 3, "users_per_tenant": 2 }, "api_versions": { "nova": { "version": 2.2 } } } } ] } An example of specifying API version along with service type: .. code-block:: json # In this example we will launch CinderVolumes.create_and_attach_volume # scenario on Cinder V2 { "CinderVolumes.create_and_attach_volume": [ { "args": { "size": 10, "image": { "name": "^cirros.*-disk$" }, "flavor": { "name": "m1.tiny" }, "create_volume_params": { "availability_zone": "nova" } }, "runner": { "type": "constant", "times": 5, "concurrency": 1 }, "context": { "users": { "tenants": 2, "users_per_tenant": 2 }, "api_versions": { "cinder": { "version": 2, "service_type": "volumev2" } } } } ] } Also, it possible to use service name as an identifier of service endpoint, but an admin user is required (Keystone can return map of service names - types, but such API is permitted only for admin). An example: .. code-block:: json # Similar to the previous example, but `service_name` argument is used # instead of `service_type` { "CinderVolumes.create_and_attach_volume": [ { "args": { "size": 10, "image": { "name": "^cirros.*-disk$" }, "flavor": { "name": "m1.tiny" }, "create_volume_params": { "availability_zone": "nova" } }, "runner": { "type": "constant", "times": 5, "concurrency": 1 }, "context": { "users": { "tenants": 2, "users_per_tenant": 2 }, "api_versions": { "cinder": { "version": 2, "service_name": "cinderv2" } } } } ] } """ CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "patternProperties": { "^[a-z]+$": { "type": "object", "properties": { "version": { "anyOf": [{"type": "string", "description": "a string-like version."}, {"type": "number", "description": "a number-like version."}] }, "service_name": { "type": "string" }, "service_type": { "type": "string" } }, "additionalProperties": False } }, "additionalProperties": False } def setup(self): # FIXME(andreykurilin): move all checks to validate method. # use admin only when `service_name` is presented admin_clients = osclients.Clients( self.context.get("admin", {}).get("credential")) clients = osclients.Clients(random.choice( self.context["users"])["credential"]) services = clients.keystone.service_catalog.get_endpoints() services_from_admin = None for client_name, conf in self.config.items(): if "service_type" in conf and conf["service_type"] not in services: raise exceptions.ValidationError(_( "There is no service with '%s' type in your environment.") % conf["service_type"]) elif "service_name" in conf: if not self.context.get("admin", {}).get("credential"): raise exceptions.BenchmarkSetupFailure(_( "Setting 'service_name' is allowed only for 'admin' " "user.")) if not services_from_admin: services_from_admin = dict( [(s.name, s.type) for s in admin_clients.keystone().services.list()]) if conf["service_name"] not in services_from_admin: raise exceptions.ValidationError( _("There is no '%s' service in your environment") % conf["service_name"]) self.context["config"]["api_versions"][client_name][ "service_type"] = services_from_admin[conf["service_name"]] def cleanup(self): # nothing to do here pass @classmethod def validate(cls, config): super(OpenStackAPIVersions, cls).validate(config) for client in config: client_cls = osclients.OSClient.get(client) if ("service_type" in config[client] and "service_name" in config[client]): raise exceptions.ValidationError(_LE( "Setting both 'service_type' and 'service_name' properties" " is restricted.")) try: if ("service_type" in config[client] or "service_name" in config[client]): client_cls.is_service_type_configurable() if "version" in config[client]: client_cls.validate_version(config[client]["version"]) except exceptions.RallyException as e: raise exceptions.ValidationError( _LE("Invalid settings for '%(client)s': %(error)s") % { "client": client, "error": e.format_message()}) rally-0.9.1/rally/plugins/openstack/context/senlin/0000775000567000056710000000000013073420067023556 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/senlin/__init__.py0000664000567000056710000000000013073417716025664 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/senlin/profiles.py0000664000567000056710000000527413073417716025772 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.i18n import _ from rally.common import logging from rally.common import utils as rutils from rally import consts from rally.plugins.openstack.scenarios.senlin import utils as senlin_utils from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="profiles", order=190) class ProfilesGenerator(context.Context): """Context creates a temporary profile for Senlin test.""" CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "type": { "type": "string", }, "version": { "type": "string", }, "properties": { "type": "object", } }, "additionalProperties": False, "required": ["type", "version", "properties"] } @logging.log_task_wrapper(LOG.info, _("Enter context: `Senlin profiles`")) def setup(self): """Create test profiles.""" for user, tenant_id in rutils.iterate_per_tenants( self.context["users"]): senlin_scenario = senlin_utils.SenlinScenario({ "user": user, "task": self.context["task"], "config": { "api_versions": self.context["config"].get( "api_versions", [])} }) profile = senlin_scenario._create_profile(self.config) self.context["tenants"][tenant_id]["profile"] = profile.id @logging.log_task_wrapper(LOG.info, _("Exit context: `Senlin profiles`")) def cleanup(self): """Delete created test profiles.""" for user, tenant_id in rutils.iterate_per_tenants( self.context["users"]): senlin_scenario = senlin_utils.SenlinScenario({ "user": user, "task": self.context["task"], "config": { "api_versions": self.context["config"].get( "api_versions", [])} }) senlin_scenario._delete_profile( self.context["tenants"][tenant_id]["profile"]) rally-0.9.1/rally/plugins/openstack/context/monasca/0000775000567000056710000000000013073420067023707 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/monasca/__init__.py0000664000567000056710000000000013073417716026015 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/monasca/metrics.py0000664000567000056710000000644313073417716025745 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from six import moves from rally.common.i18n import _ from rally.common import logging from rally.common import utils as rutils from rally import consts from rally.plugins.openstack.scenarios.monasca import utils as monasca_utils from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="monasca_metrics", order=510) class MonascaMetricGenerator(context.Context): """Context for creating metrics for benchmarks.""" CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "name": { "type": "string" }, "dimensions": { "type": "object", "properties": { "region": { "type": "string" }, "service": { "type": "string" }, "hostname": { "type": "string" }, "url": { "type": "string" } } }, "metrics_per_tenant": { "type": "integer", "minimum": 1 }, "value_meta": { "type": "array", "items": { "type": "object", "properties": { "value_meta_key": { "type": "string" }, "value_meta_value": { "type": "string" } } } } }, "additionalProperties": False } DEFAULT_CONFIG = { "metrics_per_tenant": 2 } @logging.log_task_wrapper(LOG.info, _("Enter context: `Monasca`")) def setup(self): new_metric = {} if "dimensions" in self.config: new_metric = { "dimensions": self.config["dimensions"] } for user, tenant_id in rutils.iterate_per_tenants( self.context["users"]): scenario = monasca_utils.MonascaScenario( context={"user": user, "task": self.context["task"]} ) for i in moves.xrange(self.config["metrics_per_tenant"]): scenario._create_metrics(**new_metric) rutils.interruptable_sleep(0.001) rutils.interruptable_sleep( monasca_utils.CONF.benchmark.monasca_metric_create_prepoll_delay, atomic_delay=1) @logging.log_task_wrapper(LOG.info, _("Exit context: `Monasca`")) def cleanup(self): # We don't have API for removal of metrics pass rally-0.9.1/rally/plugins/openstack/context/dataplane/0000775000567000056710000000000013073420067024217 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/dataplane/__init__.py0000664000567000056710000000000013073417716026325 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/dataplane/heat.py0000664000567000056710000001350313073417720025516 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import pkgutil from rally.common.i18n import _ from rally.common import logging from rally.common import utils as rutils from rally import consts from rally import exceptions from rally import osclients from rally.plugins.openstack.cleanup import manager as resource_manager from rally.plugins.openstack.scenarios.heat import utils as heat_utils from rally.task import context LOG = logging.getLogger(__name__) def get_data(filename_or_resource): if isinstance(filename_or_resource, list): return pkgutil.get_data(*filename_or_resource) return open(filename_or_resource).read() @context.configure(name="heat_dataplane", order=435) class HeatDataplane(context.Context): """Context class for create stack by given template. This context will create stacks by given template for each tenant and add details to context. Following details will be added: id of stack; template file contents; files dictionary; stack parameters; Heat template should define a "gate" node which will interact with Rally by ssh and workload nodes by any protocol. To make this possible heat template should accept the following parameters: network_id: id of public network router_id: id of external router to connect "gate" node key_name: name of nova ssh keypair to use for "gate" node """ FILE_SCHEMA = { "description": "", "type": "string", } RESOURCE_SCHEMA = { "description": "", "type": "array", "minItems": 2, "maxItems": 2, "items": {"type": "string"} } CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "stacks_per_tenant": { "type": "integer", "minimum": 1 }, "template": { "oneOf": [FILE_SCHEMA, RESOURCE_SCHEMA], }, "files": { "type": "object", }, "parameters": { "type": "object", }, "context_parameters": { "type": "object", }, }, "additionalProperties": False } DEFAULT_CONFIG = { "stacks_per_tenant": 1, } def _get_context_parameter(self, user, tenant_id, path): value = {"user": user, "tenant": self.context["tenants"][tenant_id]} for key in path.split("."): try: # try to cast string to int in order to support integer keys # e.g 'spam.1.eggs' will be translated to ["spam"][1]["eggs"] key = int(key) except ValueError: pass try: value = value[key] except KeyError: raise exceptions.RallyException( "There is no key %s in context" % path) return value def _get_public_network_id(self): nc = osclients.Clients(self.context["admin"]["credential"]).neutron() networks = nc.list_networks(**{"router:external": True})["networks"] return networks[0]["id"] @logging.log_task_wrapper(LOG.info, _("Enter context: `HeatDataplane`")) def setup(self): template = get_data(self.config["template"]) files = {} for key, filename in self.config.get("files", {}).items(): files[key] = get_data(filename) parameters = self.config.get("parameters", rutils.LockedDict()) with parameters.unlocked(): if "network_id" not in parameters: parameters["network_id"] = self._get_public_network_id() for user, tenant_id in rutils.iterate_per_tenants( self.context["users"]): for name, path in self.config.get("context_parameters", {}).items(): parameters[name] = self._get_context_parameter(user, tenant_id, path) if "router_id" not in parameters: networks = self.context["tenants"][tenant_id]["networks"] parameters["router_id"] = networks[0]["router_id"] if "key_name" not in parameters: parameters["key_name"] = user["keypair"]["name"] heat_scenario = heat_utils.HeatScenario( {"user": user, "task": self.context["task"]}) self.context["tenants"][tenant_id]["stack_dataplane"] = [] for i in range(self.config["stacks_per_tenant"]): stack = heat_scenario._create_stack(template, files=files, parameters=parameters) tenant_data = self.context["tenants"][tenant_id] tenant_data["stack_dataplane"].append([stack.id, template, files, parameters]) @logging.log_task_wrapper(LOG.info, _("Exit context: `HeatDataplane`")) def cleanup(self): resource_manager.cleanup(names=["heat.stacks"], users=self.context.get("users", [])) rally-0.9.1/rally/plugins/openstack/context/designate/0000775000567000056710000000000013073420067024231 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/designate/__init__.py0000664000567000056710000000000013073417716026337 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/designate/zones.py0000664000567000056710000000443413073417720025750 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.i18n import _ from rally.common import logging from rally.common import utils as rutils from rally import consts from rally.plugins.openstack.cleanup import manager as resource_manager from rally.plugins.openstack.scenarios.designate import utils from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="zones", order=600) class ZoneGenerator(context.Context): """Context to add `zones_per_tenant` zones for each tenant.""" CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "zones_per_tenant": { "type": "integer", "minimum": 1 }, }, "additionalProperties": False } DEFAULT_CONFIG = { "zones_per_tenant": 1 } @logging.log_task_wrapper(LOG.info, _("Enter context: `Zones`")) def setup(self): for user, tenant_id in rutils.iterate_per_tenants( self.context["users"]): self.context["tenants"][tenant_id].setdefault("zones", []) designate_util = utils.DesignateScenario( {"user": user, "task": self.context["task"]}) for i in range(self.config["zones_per_tenant"]): zone = designate_util._create_zone() self.context["tenants"][tenant_id]["zones"].append(zone) @logging.log_task_wrapper(LOG.info, _("Exit context: `Zones`")) def cleanup(self): resource_manager.cleanup(names=["designate.zones"], users=self.context.get("users", []), superclass=utils.DesignateScenario, task_id=self.context["task"]["uuid"]) rally-0.9.1/rally/plugins/openstack/context/swift/0000775000567000056710000000000013073420067023422 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/swift/__init__.py0000664000567000056710000000000013073417716025530 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/swift/objects.py0000664000567000056710000001010413073417716025430 0ustar jenkinsjenkins00000000000000# Copyright 2015: Cisco Systems, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.i18n import _ from rally.common import logging from rally import consts from rally import exceptions from rally.plugins.openstack.context.swift import utils as swift_utils from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="swift_objects", order=360) class SwiftObjectGenerator(swift_utils.SwiftObjectMixin, context.Context): CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "containers_per_tenant": { "type": "integer", "minimum": 1 }, "objects_per_container": { "type": "integer", "minimum": 1 }, "object_size": { "type": "integer", "minimum": 1 }, "resource_management_workers": { "type": "integer", "minimum": 1 } }, "additionalProperties": False } DEFAULT_CONFIG = { "containers_per_tenant": 1, "objects_per_container": 1, "object_size": 1024, "resource_management_workers": 30 } @logging.log_task_wrapper(LOG.info, _("Enter context: `swift_objects`")) def setup(self): """Create containers and objects, using the broker pattern.""" threads = self.config["resource_management_workers"] containers_per_tenant = self.config["containers_per_tenant"] containers_num = len(self.context["tenants"]) * containers_per_tenant LOG.debug("Creating %d containers using %d threads." % (containers_num, threads)) containers_count = len(self._create_containers(self.context, containers_per_tenant, threads)) if containers_count != containers_num: raise exceptions.ContextSetupFailure( ctx_name=self.get_name(), msg=_("Failed to create the requested number of containers, " "expected %(expected)s but got %(actual)s.") % {"expected": containers_num, "actual": containers_count}) objects_per_container = self.config["objects_per_container"] objects_num = containers_num * objects_per_container LOG.debug("Creating %d objects using %d threads." % (objects_num, threads)) objects_count = len(self._create_objects(self.context, objects_per_container, self.config["object_size"], threads)) if objects_count != objects_num: raise exceptions.ContextSetupFailure( ctx_name=self.get_name(), msg=_("Failed to create the requested number of objects, " "expected %(expected)s but got %(actual)s.") % {"expected": objects_num, "actual": objects_count}) @logging.log_task_wrapper(LOG.info, _("Exit context: `swift_objects`")) def cleanup(self): """Delete containers and objects, using the broker pattern.""" threads = self.config["resource_management_workers"] self._delete_objects(self.context, threads) self._delete_containers(self.context, threads) rally-0.9.1/rally/plugins/openstack/context/swift/utils.py0000664000567000056710000001427213073417716025151 0ustar jenkinsjenkins00000000000000# Copyright 2015: Cisco Systems, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import tempfile from rally.common import broker from rally.common import utils as rutils from rally.plugins.openstack.scenarios.swift import utils as swift_utils class SwiftObjectMixin(object): """Mix-in method for Swift Object Context.""" def _create_containers(self, context, containers_per_tenant, threads): """Create containers and store results in Rally context. :param context: dict, Rally context environment :param containers_per_tenant: int, number of containers to create per tenant :param threads: int, number of threads to use for broker pattern :returns: list of tuples containing (account, container) """ containers = [] def publish(queue): for user, tenant_id in (rutils.iterate_per_tenants( context.get("users", []))): context["tenants"][tenant_id]["containers"] = [] for i in range(containers_per_tenant): args = (user, context["tenants"][tenant_id]["containers"]) queue.append(args) def consume(cache, args): user, tenant_containers = args if user["id"] not in cache: cache[user["id"]] = swift_utils.SwiftScenario( {"user": user, "task": context.get("task", {})}) container_name = cache[user["id"]]._create_container() tenant_containers.append({"user": user, "container": container_name, "objects": []}) containers.append((user["tenant_id"], container_name)) broker.run(publish, consume, threads) return containers def _create_objects(self, context, objects_per_container, object_size, threads): """Create objects and store results in Rally context. :param context: dict, Rally context environment :param objects_per_container: int, number of objects to create per container :param object_size: int, size of created swift objects in byte :param threads: int, number of threads to use for broker pattern :returns: list of tuples containing (account, container, object) """ objects = [] with tempfile.TemporaryFile() as dummy_file: # set dummy file to specified object size dummy_file.truncate(object_size) def publish(queue): for tenant_id in context["tenants"]: containers = context["tenants"][tenant_id]["containers"] for container in containers: for i in range(objects_per_container): queue.append(container) def consume(cache, container): user = container["user"] if user["id"] not in cache: cache[user["id"]] = swift_utils.SwiftScenario( {"user": user, "task": context.get("task", {})}) dummy_file.seek(0) object_name = cache[user["id"]]._upload_object( container["container"], dummy_file)[1] container["objects"].append(object_name) objects.append((user["tenant_id"], container["container"], object_name)) broker.run(publish, consume, threads) return objects def _delete_containers(self, context, threads): """Delete containers created by Swift context and update Rally context. :param context: dict, Rally context environment :param threads: int, number of threads to use for broker pattern """ def publish(queue): for tenant_id in context["tenants"]: containers = context["tenants"][tenant_id]["containers"] for container in containers[:]: args = container, containers queue.append(args) def consume(cache, args): container, tenant_containers = args user = container["user"] if user["id"] not in cache: cache[user["id"]] = swift_utils.SwiftScenario( {"user": user, "task": context.get("task", {})}) cache[user["id"]]._delete_container(container["container"]) tenant_containers.remove(container) broker.run(publish, consume, threads) def _delete_objects(self, context, threads): """Delete objects created by Swift context and update Rally context. :param context: dict, Rally context environment :param threads: int, number of threads to use for broker pattern """ def publish(queue): for tenant_id in context["tenants"]: containers = context["tenants"][tenant_id]["containers"] for container in containers: for object_name in container["objects"][:]: args = object_name, container queue.append(args) def consume(cache, args): object_name, container = args user = container["user"] if user["id"] not in cache: cache[user["id"]] = swift_utils.SwiftScenario( {"user": user, "task": context.get("task", {})}) cache[user["id"]]._delete_object(container["container"], object_name) container["objects"].remove(object_name) broker.run(publish, consume, threads) rally-0.9.1/rally/plugins/openstack/context/network/0000775000567000056710000000000013073420067023757 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/network/__init__.py0000664000567000056710000000000013073417716026065 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/network/allow_ssh.py0000664000567000056710000001010413073417716026327 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.i18n import _ from rally.common import logging from rally.common import utils from rally import osclients from rally.plugins.openstack.wrappers import network from rally.task import context LOG = logging.getLogger(__name__) def _prepare_open_secgroup(credential, secgroup_name): """Generate secgroup allowing all tcp/udp/icmp access. In order to run tests on instances it is necessary to have SSH access. This function generates a secgroup which allows all tcp/udp/icmp access. :param credential: clients credential :param secgroup_name: security group name :returns: dict with security group details """ nova = osclients.Clients(credential).nova() if secgroup_name not in [sg.name for sg in nova.security_groups.list()]: descr = "Allow ssh access to VMs created by Rally for benchmarking" rally_open = nova.security_groups.create(secgroup_name, descr) rally_open = nova.security_groups.find(name=secgroup_name) rules_to_add = [ { "ip_protocol": "tcp", "to_port": 65535, "from_port": 1, "ip_range": {"cidr": "0.0.0.0/0"} }, { "ip_protocol": "udp", "to_port": 65535, "from_port": 1, "ip_range": {"cidr": "0.0.0.0/0"} }, { "ip_protocol": "icmp", "to_port": -1, "from_port": -1, "ip_range": {"cidr": "0.0.0.0/0"} } ] def rule_match(criteria, existing_rule): return all(existing_rule[key] == value for key, value in criteria.items()) for new_rule in rules_to_add: if not any(rule_match(new_rule, existing_rule) for existing_rule in rally_open.rules): nova.security_group_rules.create( rally_open.id, from_port=new_rule["from_port"], to_port=new_rule["to_port"], ip_protocol=new_rule["ip_protocol"], cidr=new_rule["ip_range"]["cidr"]) return rally_open.to_dict() @context.configure(name="allow_ssh", order=320) class AllowSSH(context.Context): """Sets up security groups for all users to access VM via SSH.""" @logging.log_task_wrapper(LOG.info, _("Enter context: `allow_ssh`")) def setup(self): admin_or_user = (self.context.get("admin") or self.context.get("users")[0]) net_wrapper = network.wrap( osclients.Clients(admin_or_user["credential"]), self, config=self.config) use_sg, msg = net_wrapper.supports_extension("security-group") if not use_sg: LOG.info(_("Security group context is disabled: %s") % msg) return secgroup_name = self.generate_random_name() for user in self.context["users"]: user["secgroup"] = _prepare_open_secgroup(user["credential"], secgroup_name) @logging.log_task_wrapper(LOG.info, _("Exit context: `allow_ssh`")) def cleanup(self): for user, tenant_id in utils.iterate_per_tenants( self.context["users"]): with logging.ExceptionLogger( LOG, _("Unable to delete secgroup: %s.") % user["secgroup"]["name"]): clients = osclients.Clients(user["credential"]) clients.nova().security_groups.get( user["secgroup"]["id"]).delete() rally-0.9.1/rally/plugins/openstack/context/network/existing_network.py0000664000567000056710000000350113073417716027742 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.i18n import _ from rally.common import logging from rally.common import utils from rally import consts from rally import osclients from rally.plugins.openstack.wrappers import network as network_wrapper from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="existing_network", order=349) class ExistingNetwork(context.Context): """This context supports using existing networks in Rally. This context should be used on a deployment with existing users. """ CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "additionalProperties": False } @logging.log_task_wrapper(LOG.info, _("Enter context: `existing_network`")) def setup(self): for user, tenant_id in utils.iterate_per_tenants( self.context.get("users", [])): net_wrapper = network_wrapper.wrap( osclients.Clients(user["credential"]), self, config=self.config) self.context["tenants"][tenant_id]["networks"] = ( net_wrapper.list_networks()) @logging.log_task_wrapper(LOG.info, _("Exit context: `existing_network`")) def cleanup(self): """Networks were not created by Rally, so nothing to do.""" rally-0.9.1/rally/plugins/openstack/context/network/networks.py0000664000567000056710000001020513073417716026212 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.i18n import _ from rally.common import logging from rally.common import utils from rally import consts from rally import osclients from rally.plugins.openstack.wrappers import network as network_wrapper from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="network", order=350) class Network(context.Context): """Create networking resources. This creates networks for all tenants, and optionally creates another resources like subnets and routers. """ CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "start_cidr": { "type": "string" }, "networks_per_tenant": { "type": "integer", "minimum": 1 }, "subnets_per_network": { "type": "integer", "minimum": 1 }, "network_create_args": { "type": "object", "additionalProperties": True }, "dns_nameservers": { "type": "array", "items": {"type": "string"}, "uniqueItems": True } }, "additionalProperties": False } DEFAULT_CONFIG = { "start_cidr": "10.2.0.0/24", "networks_per_tenant": 1, "subnets_per_network": 1, "network_create_args": {}, "dns_nameservers": None } @logging.log_task_wrapper(LOG.info, _("Enter context: `network`")) def setup(self): # NOTE(rkiran): Some clients are not thread-safe. Thus during # multithreading/multiprocessing, it is likely the # sockets are left open. This problem is eliminated by # creating a connection in setup and cleanup separately. net_wrapper = network_wrapper.wrap( osclients.Clients(self.context["admin"]["credential"]), self, config=self.config) kwargs = {} if self.config["dns_nameservers"] is not None: kwargs["dns_nameservers"] = self.config["dns_nameservers"] for user, tenant_id in (utils.iterate_per_tenants( self.context.get("users", []))): self.context["tenants"][tenant_id]["networks"] = [] for i in range(self.config["networks_per_tenant"]): # NOTE(amaretskiy): add_router and subnets_num take effect # for Neutron only. network_create_args = self.config["network_create_args"].copy() network = net_wrapper.create_network( tenant_id, add_router=True, subnets_num=self.config["subnets_per_network"], network_create_args=network_create_args, **kwargs) self.context["tenants"][tenant_id]["networks"].append(network) @logging.log_task_wrapper(LOG.info, _("Exit context: `network`")) def cleanup(self): net_wrapper = network_wrapper.wrap( osclients.Clients(self.context["admin"]["credential"]), self, config=self.config) for tenant_id, tenant_ctx in self.context["tenants"].items(): for network in tenant_ctx.get("networks", []): with logging.ExceptionLogger( LOG, _("Failed to delete network for tenant %s") % tenant_id): net_wrapper.delete_network(network) rally-0.9.1/rally/plugins/openstack/context/murano/0000775000567000056710000000000013073420067023567 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/murano/__init__.py0000664000567000056710000000000013073417716025675 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/murano/murano_packages.py0000664000567000056710000000614313073417720027306 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import zipfile from rally.common import fileutils from rally.common.i18n import _ from rally.common.i18n import _LE from rally.common import logging from rally.common import utils from rally import consts from rally import exceptions from rally import osclients from rally.plugins.openstack.cleanup import manager as resource_manager from rally.plugins.openstack.scenarios.murano import utils as murano_utils from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="murano_packages", order=401) class PackageGenerator(context.Context): """Context class for uploading applications for murano.""" CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "app_package": { "type": "string", } }, "required": ["app_package"], "additionalProperties": False } @logging.log_task_wrapper(LOG.info, _("Enter context: `Murano packages`")) def setup(self): is_config_app_dir = False pckg_path = os.path.expanduser(self.config["app_package"]) if zipfile.is_zipfile(pckg_path): zip_name = pckg_path elif os.path.isdir(pckg_path): is_config_app_dir = True zip_name = fileutils.pack_dir(pckg_path) else: msg = (_LE("There is no zip archive or directory by this path:" " %s") % pckg_path) raise exceptions.ContextSetupFailure(msg=msg, ctx_name=self.get_name()) for user, tenant_id in utils.iterate_per_tenants( self.context["users"]): clients = osclients.Clients(user["credential"]) self.context["tenants"][tenant_id]["packages"] = [] if is_config_app_dir: self.context["tenants"][tenant_id]["murano_ctx"] = zip_name package = clients.murano().packages.create( {"categories": ["Web"], "tags": ["tag"]}, {"file": open(zip_name)}) self.context["tenants"][tenant_id]["packages"].append(package) @logging.log_task_wrapper(LOG.info, _("Exit context: `Murano packages`")) def cleanup(self): resource_manager.cleanup(names=["murano.packages"], users=self.context.get("users", []), superclass=murano_utils.MuranoScenario, task_id=self.context["task"]["uuid"]) rally-0.9.1/rally/plugins/openstack/context/murano/murano_environments.py0000664000567000056710000000460713073417720030262 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.i18n import _ from rally.common import logging from rally.common import utils from rally import consts from rally.plugins.openstack.cleanup import manager as resource_manager from rally.plugins.openstack.scenarios.murano import utils as murano_utils from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="murano_environments", order=402) class EnvironmentGenerator(context.Context): """Context class for creating murano environments.""" CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "environments_per_tenant": { "type": "integer", "minimum": 1 }, }, "required": ["environments_per_tenant"], "additionalProperties": False } @logging.log_task_wrapper(LOG.info, _("Enter context: `Murano environments`")) def setup(self): for user, tenant_id in utils.iterate_per_tenants( self.context["users"]): self.context["tenants"][tenant_id]["environments"] = [] for i in range(self.config["environments_per_tenant"]): murano_util = murano_utils.MuranoScenario( {"user": user, "task": self.context["task"], "config": self.context["config"]}) env = murano_util._create_environment() self.context["tenants"][tenant_id]["environments"].append(env) @logging.log_task_wrapper(LOG.info, _("Exit context: `Murano environments`")) def cleanup(self): resource_manager.cleanup(names=["murano.environments"], users=self.context.get("users", [])) rally-0.9.1/rally/plugins/openstack/context/heat/0000775000567000056710000000000013073420067023207 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/heat/__init__.py0000664000567000056710000000000013073417716025315 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/heat/stacks.py0000664000567000056710000000656513073417720025067 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.i18n import _ from rally.common import logging from rally.common import utils as rutils from rally import consts from rally.plugins.openstack.cleanup import manager as resource_manager from rally.plugins.openstack.scenarios.heat import utils as heat_utils from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="stacks", order=435) class StackGenerator(context.Context): """Context class for create temporary stacks with resources. Stack generator allows to generate arbitrary number of stacks for each tenant before test scenarios. In addition, it allows to define number of resources (namely OS::Heat::RandomString) that will be created inside each stack. After test execution the stacks will be automatically removed from heat. """ # The schema of the context configuration format CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "stacks_per_tenant": { "type": "integer", "minimum": 1 }, "resources_per_stack": { "type": "integer", "minimum": 1 } }, "additionalProperties": False } DEFAULT_CONFIG = { "stacks_per_tenant": 2, "resources_per_stack": 10 } @staticmethod def _prepare_stack_template(res_num): template = { "heat_template_version": "2014-10-16", "description": "Test template for rally", "resources": {} } rand_string = {"type": "OS::Heat::RandomString"} for i in range(res_num): template["resources"]["TestResource%d" % i] = rand_string return template @logging.log_task_wrapper(LOG.info, _("Enter context: `Stacks`")) def setup(self): template = self._prepare_stack_template( self.config["resources_per_stack"]) for user, tenant_id in rutils.iterate_per_tenants( self.context["users"]): heat_scenario = heat_utils.HeatScenario( {"user": user, "task": self.context["task"]}) self.context["tenants"][tenant_id]["stacks"] = [] for i in range(self.config["stacks_per_tenant"]): stack = heat_scenario._create_stack(template) self.context["tenants"][tenant_id]["stacks"].append(stack.id) @logging.log_task_wrapper(LOG.info, _("Exit context: `Stacks`")) def cleanup(self): resource_manager.cleanup(names=["heat.stacks"], users=self.context.get("users", []), superclass=heat_utils.HeatScenario, task_id=self.context["task"]["uuid"]) rally-0.9.1/rally/plugins/openstack/context/sahara/0000775000567000056710000000000013073420067023525 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/sahara/sahara_output_data_sources.py0000664000567000056710000001015013073417720031511 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.i18n import _ from rally.common import logging from rally.common import utils as rutils from rally import consts from rally import osclients from rally.plugins.openstack.cleanup import manager as resource_manager from rally.plugins.openstack.scenarios.sahara import utils from rally.plugins.openstack.scenarios.swift import utils as swift_utils from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="sahara_output_data_sources", order=444) class SaharaOutputDataSources(context.Context): """Context class for setting up Output Data Sources for an EDP job.""" CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "output_type": { "enum": ["swift", "hdfs"], }, "output_url_prefix": { "type": "string", } }, "additionalProperties": False, "required": ["output_type", "output_url_prefix"] } @logging.log_task_wrapper(LOG.info, _("Enter context: `Sahara Output Data Sources`")) def setup(self): utils.init_sahara_context(self) for user, tenant_id in rutils.iterate_per_tenants( self.context["users"]): clients = osclients.Clients(user["credential"]) sahara = clients.sahara() if self.config["output_type"] == "swift": swift = swift_utils.SwiftScenario(clients=clients, context=self.context) container_name = self.generate_random_name() self.context["tenants"][tenant_id]["sahara"]["container"] = { "name": swift._create_container( container_name=container_name), "output_swift_objects": [] } self.setup_outputs_swift(swift, sahara, tenant_id, container_name, user["credential"].username, user["credential"].password) else: self.setup_outputs_hdfs(sahara, tenant_id, self.config["output_url_prefix"]) def setup_outputs_hdfs(self, sahara, tenant_id, output_url): output_ds = sahara.data_sources.create( name=self.generate_random_name(), description="", data_source_type="hdfs", url=output_url) self.context["tenants"][tenant_id]["sahara"]["output"] = output_ds.id def setup_outputs_swift(self, swift, sahara, tenant_id, container_name, username, password): output_ds_swift = sahara.data_sources.create( name=self.generate_random_name(), description="", data_source_type="swift", url="swift://" + container_name + ".sahara/", credential_user=username, credential_pass=password) self.context["tenants"][tenant_id]["sahara"]["output"] = ( output_ds_swift.id ) @logging.log_task_wrapper(LOG.info, _("Exit context: `Sahara Output Data Sources`")) def cleanup(self): resources = ["data_sources"] resource_manager.cleanup( names=["sahara.%s" % res for res in resources], users=self.context.get("users", []), superclass=utils.SaharaScenario, task_id=self.context["task"]["uuid"]) rally-0.9.1/rally/plugins/openstack/context/sahara/__init__.py0000664000567000056710000000000013073417716025633 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/sahara/sahara_input_data_sources.py0000664000567000056710000001257313073417720031323 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import requests from six.moves.urllib import parse from rally.common.i18n import _ from rally.common import logging from rally.common import utils as rutils from rally import consts from rally import osclients from rally.plugins.openstack.cleanup import manager as resource_manager from rally.plugins.openstack.cleanup import resources as res_cleanup from rally.plugins.openstack.scenarios.sahara import utils from rally.plugins.openstack.scenarios.swift import utils as swift_utils from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="sahara_input_data_sources", order=443) class SaharaInputDataSources(context.Context): """Context class for setting up Input Data Sources for an EDP job.""" CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "input_type": { "enum": ["swift", "hdfs"], }, "input_url": { "type": "string", }, "swift_files": { "type": "array", "items": { "type": "object", "properties": { "name": { "type": "string" }, "download_url": { "type": "string" } }, "additionalProperties": False, "required": ["name", "download_url"] } } }, "additionalProperties": False, "required": ["input_type", "input_url"] } @logging.log_task_wrapper(LOG.info, _("Enter context: `Sahara Input Data Sources`")) def setup(self): utils.init_sahara_context(self) self.context["sahara"]["swift_objects"] = [] self.context["sahara"]["container_name"] = None for user, tenant_id in rutils.iterate_per_tenants( self.context["users"]): clients = osclients.Clients(user["credential"]) if self.config["input_type"] == "swift": self.setup_inputs_swift(clients, tenant_id, self.config["input_url"], self.config["swift_files"], user["credential"].username, user["credential"].password) else: self.setup_inputs(clients, tenant_id, self.config["input_type"], self.config["input_url"]) def setup_inputs(self, clients, tenant_id, input_type, input_url): input_ds = clients.sahara().data_sources.create( name=self.generate_random_name(), description="", data_source_type=input_type, url=input_url) self.context["tenants"][tenant_id]["sahara"]["input"] = input_ds.id def setup_inputs_swift(self, clients, tenant_id, input_url, swift_files, username, password): swift_scenario = swift_utils.SwiftScenario(clients=clients, context=self.context) container_name = "rally_" + parse.urlparse(input_url).netloc.rstrip( ".sahara") self.context["sahara"]["container_name"] = ( swift_scenario._create_container(container_name=container_name)) for swift_file in swift_files: content = requests.get(swift_file["download_url"]).content self.context["sahara"]["swift_objects"].append( swift_scenario._upload_object( self.context["sahara"]["container_name"], content, object_name=swift_file["name"])) input_ds_swift = clients.sahara().data_sources.create( name=self.generate_random_name(), description="", data_source_type="swift", url=input_url, credential_user=username, credential_pass=password) self.context["tenants"][tenant_id]["sahara"]["input"] = ( input_ds_swift.id) @logging.log_task_wrapper(LOG.info, _("Exit context: `Sahara Input Data" "Sources`")) def cleanup(self): resources = ["data_sources"] for swift_object in self.context["sahara"]["swift_objects"]: res_cleanup.SwiftObject(resource=swift_object[1]) res_cleanup.SwiftContainer( resource=self.context["sahara"]["container_name"]) resource_manager.cleanup( names=["sahara.%s" % res for res in resources], users=self.context.get("users", []), superclass=utils.SaharaScenario, task_id=self.context["task"]["uuid"]) rally-0.9.1/rally/plugins/openstack/context/sahara/sahara_job_binaries.py0000664000567000056710000001261213073417716030055 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import requests from rally.common.i18n import _ from rally.common import logging from rally.common import utils as rutils from rally import consts from rally import exceptions from rally import osclients from rally.plugins.openstack.cleanup import manager as resource_manager from rally.plugins.openstack.scenarios.sahara import utils from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="sahara_job_binaries", order=442) class SaharaJobBinaries(context.Context): """Context class for setting up Job Binaries for an EDP job.""" CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "mains": { "type": "array", "items": { "type": "object", "properties": { "name": { "type": "string" }, "download_url": { "type": "string" } }, "additionalProperties": False, "required": ["name", "download_url"] } }, "libs": { "type": "array", "items": { "type": "object", "properties": { "name": { "type": "string" }, "download_url": { "type": "string" } }, "additionalProperties": False, "required": ["name", "download_url"] } } }, "additionalProperties": False } # This cache will hold the downloaded libs content to prevent repeated # downloads for each tenant lib_cache = {} @logging.log_task_wrapper(LOG.info, _("Enter context: `Sahara Job Binaries`")) def setup(self): utils.init_sahara_context(self) for user, tenant_id in rutils.iterate_per_tenants( self.context["users"]): clients = osclients.Clients(user["credential"]) sahara = clients.sahara() self.context["tenants"][tenant_id]["sahara"]["mains"] = [] self.context["tenants"][tenant_id]["sahara"]["libs"] = [] for main in self.config.get("mains", []): self.download_and_save_lib( sahara=sahara, lib_type="mains", name=main["name"], download_url=main["download_url"], tenant_id=tenant_id) for lib in self.config.get("libs", []): self.download_and_save_lib( sahara=sahara, lib_type="libs", name=lib["name"], download_url=lib["download_url"], tenant_id=tenant_id) def setup_inputs(self, sahara, tenant_id, input_type, input_url): if input_type == "swift": raise exceptions.RallyException( _("Swift Data Sources are not implemented yet")) # Todo(nkonovalov): Add swift credentials parameters and data upload input_ds = sahara.data_sources.create( name=self.generate_random_name(), description="", data_source_type=input_type, url=input_url) self.context["tenants"][tenant_id]["sahara"]["input"] = input_ds.id def download_and_save_lib(self, sahara, lib_type, name, download_url, tenant_id): if download_url not in self.lib_cache: lib_data = requests.get(download_url).content self.lib_cache[download_url] = lib_data else: lib_data = self.lib_cache[download_url] job_binary_internal = sahara.job_binary_internals.create( name=name, data=lib_data) url = "internal-db://%s" % job_binary_internal.id job_binary = sahara.job_binaries.create(name=name, url=url, description="", extra={}) self.context["tenants"][tenant_id]["sahara"][lib_type].append( job_binary.id) @logging.log_task_wrapper(LOG.info, _("Exit context: `Sahara Job Binaries`")) def cleanup(self): resources = ["job_binary_internals", "job_binaries"] resource_manager.cleanup( names=["sahara.%s" % res for res in resources], users=self.context.get("users", []), superclass=utils.SaharaScenario, task_id=self.context["task"]["uuid"]) rally-0.9.1/rally/plugins/openstack/context/sahara/sahara_cluster.py0000664000567000056710000001462413073417720027110 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from rally.common.i18n import _ from rally.common import logging from rally.common import utils as rutils from rally import consts from rally import exceptions from rally.plugins.openstack.cleanup import manager as resource_manager from rally.plugins.openstack.scenarios.sahara import utils from rally.task import context from rally.task import utils as bench_utils CONF = cfg.CONF LOG = logging.getLogger(__name__) @context.configure(name="sahara_cluster", order=441) class SaharaCluster(context.Context): """Context class for setting up the Cluster an EDP job.""" CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "plugin_name": { "type": "string" }, "hadoop_version": { "type": "string", }, "workers_count": { "type": "integer", "minimum": 1 }, "flavor_id": { "type": "string", }, "master_flavor_id": { "type": "string", }, "worker_flavor_id": { "type": "string", }, "floating_ip_pool": { "type": "string", }, "volumes_per_node": { "type": "integer", "minimum": 1 }, "volumes_size": { "type": "integer", "minimum": 1 }, "auto_security_group": { "type": "boolean", }, "security_groups": { "type": "array", "items": { "type": "string" } }, "node_configs": { "type": "object" }, "cluster_configs": { "type": "object" }, "enable_anti_affinity": { "type": "boolean" }, "enable_proxy": { "type": "boolean" }, "use_autoconfig": { "type": "boolean" }, }, "additionalProperties": False, "required": ["plugin_name", "hadoop_version", "workers_count", "master_flavor_id", "worker_flavor_id"] } @logging.log_task_wrapper(LOG.info, _("Enter context: `Sahara Cluster`")) def setup(self): utils.init_sahara_context(self) self.context["sahara"]["clusters"] = {} wait_dict = {} for user, tenant_id in rutils.iterate_per_tenants( self.context["users"]): image_id = self.context["tenants"][tenant_id]["sahara"]["image"] floating_ip_pool = self.config.get("floating_ip_pool") temporary_context = { "user": user, "tenant": self.context["tenants"][tenant_id], "task": self.context["task"] } scenario = utils.SaharaScenario(context=temporary_context) cluster = scenario._launch_cluster( plugin_name=self.config["plugin_name"], hadoop_version=self.config["hadoop_version"], flavor_id=self.config.get("flavor_id"), master_flavor_id=self.config["master_flavor_id"], worker_flavor_id=self.config["worker_flavor_id"], workers_count=self.config["workers_count"], image_id=image_id, floating_ip_pool=floating_ip_pool, volumes_per_node=self.config.get("volumes_per_node"), volumes_size=self.config.get("volumes_size", 1), auto_security_group=self.config.get("auto_security_group", True), security_groups=self.config.get("security_groups"), node_configs=self.config.get("node_configs"), cluster_configs=self.config.get("cluster_configs"), enable_anti_affinity=self.config.get("enable_anti_affinity", False), enable_proxy=self.config.get("enable_proxy", False), wait_active=False, use_autoconfig=self.config.get("use_autoconfig", True) ) self.context["tenants"][tenant_id]["sahara"]["cluster"] = ( cluster.id) # Need to save the client instance to poll for active status wait_dict[cluster] = scenario.clients("sahara") bench_utils.wait_for( resource=wait_dict, update_resource=self.update_clusters_dict, is_ready=self.all_clusters_active, timeout=CONF.benchmark.sahara_cluster_create_timeout, check_interval=CONF.benchmark.sahara_cluster_check_interval) def update_clusters_dict(self, dct): new_dct = {} for cluster, client in dct.items(): new_cl = client.clusters.get(cluster.id) new_dct[new_cl] = client return new_dct def all_clusters_active(self, dct): for cluster, client in dct.items(): cluster_status = cluster.status.lower() if cluster_status == "error": raise exceptions.SaharaClusterFailure( name=cluster.name, action="start", reason=cluster.status_description) elif cluster_status != "active": return False return True @logging.log_task_wrapper(LOG.info, _("Exit context: `Sahara Cluster`")) def cleanup(self): resource_manager.cleanup(names=["sahara.clusters"], users=self.context.get("users", []), superclass=utils.SaharaScenario, task_id=self.context["task"]["uuid"]) rally-0.9.1/rally/plugins/openstack/context/sahara/sahara_image.py0000664000567000056710000001200413073417720026477 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.i18n import _ from rally.common import logging from rally.common import utils as rutils from rally import consts from rally import exceptions from rally import osclients from rally.plugins.openstack.cleanup import manager as resource_manager from rally.plugins.openstack.scenarios.glance import utils as glance_utils from rally.plugins.openstack.scenarios.sahara import utils from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="sahara_image", order=440) class SaharaImage(context.Context): """Context class for adding and tagging Sahara images.""" CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "image_uuid": { "type": "string" }, "image_url": { "type": "string", }, "username": { "type": "string" }, "plugin_name": { "type": "string", }, "hadoop_version": { "type": "string", } }, "oneOf": [ {"description": "Create an image.", "required": ["image_url", "username", "plugin_name", "hadoop_version"]}, {"description": "Use an existing image.", "required": ["image_uuid"]} ], "additionalProperties": False } def _create_image(self, hadoop_version, image_url, plugin_name, user, user_name): scenario = glance_utils.GlanceScenario( {"user": user, "task": self.context["task"]}) image_name = self.generate_random_name() image = scenario._create_image(name=image_name, container_format="bare", image_location=image_url, disk_format="qcow2") scenario.clients("sahara").images.update_image( image_id=image.id, user_name=user_name, desc="") scenario.clients("sahara").images.update_tags( image_id=image.id, new_tags=[plugin_name, hadoop_version]) return image.id @logging.log_task_wrapper(LOG.info, _("Enter context: `Sahara Image`")) def setup(self): utils.init_sahara_context(self) self.context["sahara"]["images"] = {} # The user may want to use the existing image. In this case he should # make sure that the image is public and has all required metadata. image_uuid = self.config.get("image_uuid") self.context["sahara"]["need_image_cleanup"] = not image_uuid if image_uuid: # Using the first user to check the existing image. user = self.context["users"][0] clients = osclients.Clients(user["credential"]) image = clients.glance().images.get(image_uuid) visibility = None if hasattr(image, "is_public"): visibility = "public" if image.is_public else "private" else: visibility = image["visibility"] if visibility != "public": raise exceptions.BenchmarkSetupFailure( "Image provided in the Sahara context should be public.") image_id = image_uuid for user, tenant_id in rutils.iterate_per_tenants( self.context["users"]): self.context["tenants"][tenant_id]["sahara"]["image"] = ( image_id) else: for user, tenant_id in rutils.iterate_per_tenants( self.context["users"]): image_id = self._create_image( hadoop_version=self.config["hadoop_version"], image_url=self.config["image_url"], plugin_name=self.config["plugin_name"], user=user, user_name=self.config["username"]) self.context["tenants"][tenant_id]["sahara"]["image"] = ( image_id) @logging.log_task_wrapper(LOG.info, _("Exit context: `Sahara Image`")) def cleanup(self): if self.context["sahara"]["need_image_cleanup"]: resource_manager.cleanup(names=["glance.images"], users=self.context.get("users", []), superclass=glance_utils.GlanceScenario, task_id=self.context["task"]["uuid"]) rally-0.9.1/rally/plugins/openstack/context/vm/0000775000567000056710000000000013073420067022710 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/vm/__init__.py0000664000567000056710000000000013073417716025016 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/vm/custom_image.py0000664000567000056710000002120313073417716025743 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import six from rally.common import broker from rally.common.i18n import _ from rally.common import logging from rally.common import utils from rally import consts from rally import osclients from rally.plugins.openstack.scenarios.nova import utils as nova_utils from rally.plugins.openstack.scenarios.vm import vmtasks from rally.plugins.openstack import types from rally.plugins.openstack.wrappers import glance as glance_wrapper from rally.task import context LOG = logging.getLogger(__name__) @six.add_metaclass(abc.ABCMeta) @context.configure(name="custom_image", order=500, hidden=True) class BaseCustomImageGenerator(context.Context): """Base class for the contexts providing customized image with. Every context class for the specific customization must implement the method `_customize_image` that is able to connect to the server using SSH and e.g. install applications inside it. This is used e.g. to install the benchmark application using SSH access. This base context class provides a way to prepare an image with custom preinstalled applications. Basically, this code boots a VM, calls the `_customize_image` and then snapshots the VM disk, removing the VM afterwards. The image UUID is stored in the user["custom_image"]["id"] and can be used afterwards by scenario. """ CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "image": { "type": "object", "properties": { "name": { "type": "string" } } }, "flavor": { "type": "object", "properties": { "name": { "type": "string" } } }, "username": { "type": "string" }, "password": { "type": "string" }, "floating_network": { "type": "string" }, "internal_network": { "type": "string" }, "port": { "type": "integer", "minimum": 1, "maximum": 65535 }, "userdata": { "type": "string" }, "workers": { "type": "integer", "minimum": 1, } }, "required": ["image", "flavor"], "additionalProperties": False } DEFAULT_CONFIG = { "username": "root", "port": 22, "workers": 1 } @logging.log_task_wrapper(LOG.info, _("Enter context: `custom_image`")) def setup(self): """Creates custom image(s) with preinstalled applications. When admin is present creates one public image that is usable from all the tenants and users. Otherwise create one image per user and tenant. """ if "admin" in self.context: # NOTE(pboldin): Create by first user and make it public by # the admin user = self.context["users"][0] tenant = self.context["tenants"][user["tenant_id"]] nics = None if "networks" in tenant: nics = [{"net-id": tenant["networks"][0]["id"]}] custom_image = self.create_one_image(user, nics=nics) for tenant in self.context["tenants"].values(): tenant["custom_image"] = custom_image else: def publish(queue): users = self.context.get("users", []) for user, tenant_id in utils.iterate_per_tenants(users): queue.append((user, tenant_id)) def consume(cache, args): user, tenant_id = args tenant = self.context["tenants"][tenant_id] tenant["custom_image"] = self.create_one_image(user) broker.run(publish, consume, self.config["workers"]) def create_one_image(self, user, **kwargs): """Create one image for the user.""" clients = osclients.Clients(user["credential"]) admin_clients = osclients.Clients(self.context["admin"]["credential"]) image_id = types.GlanceImage.transform( clients=clients, resource_config=self.config["image"]) flavor_id = types.Flavor.transform( clients=clients, resource_config=self.config["flavor"]) vm_scenario = vmtasks.BootRuncommandDelete(self.context, clients=clients) glance_wrap = glance_wrapper.wrap(admin_clients.glance, self) server, fip = vm_scenario._boot_server_with_fip( image=image_id, flavor=flavor_id, floating_network=self.config.get("floating_network"), userdata=self.config.get("userdata"), key_name=user["keypair"]["name"], security_groups=[user["secgroup"]["name"]], **kwargs) try: LOG.debug("Installing benchmark on %r %s", server, fip["ip"]) self.customize_image(server, fip, user) LOG.debug("Stopping server %r", server) vm_scenario._stop_server(server) LOG.debug("Creating snapshot for %r", server) custom_image = vm_scenario._create_image(server) glance_wrap.set_visibility(custom_image) finally: vm_scenario._delete_server_with_fip(server, fip) if hasattr(custom_image, "to_dict"): # NOTE(stpierre): Glance v1 images are objects that can be # converted to dicts; Glance v2 images are already # dict-like custom_image = custom_image.to_dict() return custom_image @logging.log_task_wrapper(LOG.info, _("Exit context: `custom_image`")) def cleanup(self): """Delete created custom image(s).""" if "admin" in self.context: user = self.context["users"][0] tenant = self.context["tenants"][user["tenant_id"]] if "custom_image" in tenant: self.delete_one_image(user, tenant["custom_image"]) tenant.pop("custom_image") else: def publish(queue): users = self.context.get("users", []) for user, tenant_id in utils.iterate_per_tenants(users): queue.append((user, tenant_id)) def consume(cache, args): user, tenant_id = args tenant = self.context["tenants"][tenant_id] if "custom_image" in tenant: self.delete_one_image(user, tenant["custom_image"]) tenant.pop("custom_image") broker.run(publish, consume, self.config["workers"]) def delete_one_image(self, user, custom_image): """Delete the image created for the user and tenant.""" clients = osclients.Clients(user["credential"]) nova_scenario = nova_utils.NovaScenario( context=self.context, clients=clients) with logging.ExceptionLogger( LOG, _("Unable to delete image %s") % custom_image["id"]): custom_image = nova_scenario.clients("nova").images.get( custom_image["id"]) nova_scenario._delete_image(custom_image) @logging.log_task_wrapper(LOG.info, _("Custom image context: customizing")) def customize_image(self, server, ip, user): return self._customize_image(server, ip, user) @abc.abstractmethod def _customize_image(self, server, ip, user): """Override this method with one that customizes image. Basically, code can simply call `VMScenario._run_command` function specifying an installation script and interpreter. This script will be then executed using SSH. :param server: nova.Server instance :param ip: dict with server IP details :param user: user who started a VM instance. Used to extract keypair """ pass rally-0.9.1/rally/plugins/openstack/context/vm/image_command_customizer.py0000664000567000056710000000773113073417716030345 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from rally import exceptions from rally.plugins.openstack.context.vm import custom_image from rally.plugins.openstack.scenarios.vm import utils as vm_utils import rally.task.context as context @context.configure(name="image_command_customizer", order=501) class ImageCommandCustomizerContext(custom_image.BaseCustomImageGenerator): """Context class for generating image customized by a command execution. Run a command specified by configuration to prepare image. Use this script e.g. to download and install something. """ CONFIG_SCHEMA = copy.deepcopy( custom_image.BaseCustomImageGenerator.CONFIG_SCHEMA) CONFIG_SCHEMA["definitions"] = { "stringOrStringList": { "anyOf": [ {"type": "string", "description": "just a string"}, { "type": "array", "description": "just a list of strings", "items": {"type": "string"} } ] }, "scriptFile": { "type": "object", "properties": { "script_file": {"$ref": "#/definitions/stringOrStringList"}, "interpreter": {"$ref": "#/definitions/stringOrStringList"}, "command_args": {"$ref": "#/definitions/stringOrStringList"} }, "required": ["script_file", "interpreter"], "additionalProperties": False, }, "scriptInline": { "type": "object", "properties": { "script_inline": {"type": "string"}, "interpreter": {"$ref": "#/definitions/stringOrStringList"}, "command_args": {"$ref": "#/definitions/stringOrStringList"} }, "required": ["script_inline", "interpreter"], "additionalProperties": False, }, "commandPath": { "type": "object", "properties": { "remote_path": {"$ref": "#/definitions/stringOrStringList"}, "local_path": {"type": "string"}, "command_args": {"$ref": "#/definitions/stringOrStringList"} }, "required": ["remote_path"], "additionalProperties": False, }, "commandDict": { "oneOf": [ {"$ref": "#/definitions/scriptFile"}, {"$ref": "#/definitions/scriptInline"}, {"$ref": "#/definitions/commandPath"}, ], } } CONFIG_SCHEMA["properties"]["command"] = { "$ref": "#/definitions/commandDict" } def _customize_image(self, server, fip, user): code, out, err = vm_utils.VMScenario(self.context)._run_command( fip["ip"], self.config["port"], self.config["username"], self.config.get("password"), command=self.config["command"], pkey=user["keypair"]["private"]) if code: raise exceptions.ScriptError( message="Command `%(command)s' execution failed," " code %(code)d:\n" "STDOUT:\n============================\n" "%(out)s\n" "STDERR:\n============================\n" "%(err)s\n" "============================\n" % {"command": self.config["command"], "code": code, "out": out, "err": err}) return code, out, err rally-0.9.1/rally/plugins/openstack/context/manila/0000775000567000056710000000000013073420067023527 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/manila/__init__.py0000664000567000056710000000000013073417716025635 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/manila/manila_share_networks.py0000664000567000056710000002761413073417716030501 0ustar jenkinsjenkins00000000000000# Copyright 2015 Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from rally.common.i18n import _ from rally.common import logging from rally.common import utils from rally import consts as rally_consts from rally import exceptions from rally.plugins.openstack.context.manila import consts from rally.plugins.openstack.scenarios.manila import utils as manila_utils from rally.task import context from rally.task import utils as bench_utils CONF = cfg.CONF LOG = logging.getLogger(__name__) CONTEXT_NAME = consts.SHARE_NETWORKS_CONTEXT_NAME @context.configure(name=CONTEXT_NAME, order=450) class ShareNetworks(context.Context): """This context creates share networks for Manila project.""" CONFIG_SCHEMA = { "type": "object", "$schema": rally_consts.JSON_SCHEMA, "properties": { # NOTE(vponomaryov): specifies whether manila should use # share networks for share creation or not. "use_share_networks": {"type": "boolean"}, # NOTE(vponomaryov): this context arg will be used only when # context arg "use_share_networks" is set to True. # If context arg 'share_networks' has values # then they will be used else share networks will be autocreated - # one for each tenant network. If networks do not exist then will # be created one share network for each tenant without network # data. # Expected value is dict of lists where tenant Name or ID is key # and list of share_network Names or IDs is value. Example: # "context": { # "manila_share_networks": { # "use_share_networks": true, # "share_networks": { # "tenant_1_name_or_id": ["share_network_1_name_or_id", # "share_network_2_name_or_id"], # "tenant_2_name_or_id": ["share_network_3_name_or_id"] # } # } # } # Also, make sure that all 'existing users' in appropriate # registered deployment have share networks if its usage is # enabled, else Rally will randomly take users that does not # satisfy criteria. "share_networks": {"type": "object"}, }, "additionalProperties": False } DEFAULT_CONFIG = { "use_share_networks": False, "share_networks": {}, } def _setup_for_existing_users(self): if (self.config["use_share_networks"] and not self.config["share_networks"]): msg = _("Usage of share networks was enabled but for deployment " "with existing users share networks also should be " "specified via arg 'share_networks'") raise exceptions.ContextSetupFailure( ctx_name=self.get_name(), msg=msg) # Set flag that says we will not delete/cleanup share networks self.context[CONTEXT_NAME]["delete_share_networks"] = False for tenant_name_or_id, share_networks in self.config[ "share_networks"].items(): # Verify project existence for tenant in self.context["tenants"].values(): if tenant_name_or_id in (tenant["id"], tenant["name"]): tenant_id = tenant["id"] existing_user = None for user in self.context["users"]: if user["tenant_id"] == tenant_id: existing_user = user break break else: msg = _("Provided tenant Name or ID '%s' was not found in " "existing tenants.") % tenant_name_or_id raise exceptions.ContextSetupFailure( ctx_name=self.get_name(), msg=msg) self.context["tenants"][tenant_id][CONTEXT_NAME] = {} self.context["tenants"][tenant_id][CONTEXT_NAME][ "share_networks"] = [] manila_scenario = manila_utils.ManilaScenario({ "user": existing_user, "config": { "api_versions": self.context["config"].get( "api_versions", [])} }) existing_sns = manila_scenario._list_share_networks( detailed=False, search_opts={"project_id": tenant_id}) for sn_name_or_id in share_networks: # Verify share network existence for sn in existing_sns: if sn_name_or_id in (sn.id, sn.name): break else: msg = _("Specified share network '%(sn)s' does not " "exist for tenant '%(tenant_id)s'") % { "sn": sn_name_or_id, "tenant_id": tenant_id} raise exceptions.ContextSetupFailure( ctx_name=self.get_name(), msg=msg) # Set share network for project self.context["tenants"][tenant_id][CONTEXT_NAME][ "share_networks"].append(sn.to_dict()) def _setup_for_autocreated_users(self): # Create share network for each network of tenant for user, tenant_id in (utils.iterate_per_tenants( self.context.get("users", []))): networks = self.context["tenants"][tenant_id].get("networks") manila_scenario = manila_utils.ManilaScenario({ "task": self.task, "user": user, "config": { "api_versions": self.context["config"].get( "api_versions", [])} }) self.context["tenants"][tenant_id][CONTEXT_NAME] = { "share_networks": []} data = {} def _setup_share_network(tenant_id, data): share_network = manila_scenario._create_share_network( **data).to_dict() self.context["tenants"][tenant_id][CONTEXT_NAME][ "share_networks"].append(share_network) for ss in self.context["tenants"][tenant_id].get( consts.SECURITY_SERVICES_CONTEXT_NAME, {}).get( "security_services", []): manila_scenario._add_security_service_to_share_network( share_network["id"], ss["id"]) if networks: for network in networks: if network.get("cidr"): data["nova_net_id"] = network["id"] elif network.get("subnets"): data["neutron_net_id"] = network["id"] data["neutron_subnet_id"] = network["subnets"][0] else: LOG.warning(_( "Can not determine network service provider. " "Share network will have no data.")) _setup_share_network(tenant_id, data) else: _setup_share_network(tenant_id, data) @logging.log_task_wrapper(LOG.info, _("Enter context: `%s`") % CONTEXT_NAME) def setup(self): self.context[CONTEXT_NAME] = {} if not self.config["use_share_networks"]: self.context[CONTEXT_NAME]["delete_share_networks"] = False elif self.context["config"].get("existing_users"): self._setup_for_existing_users() else: self._setup_for_autocreated_users() def _cleanup_tenant_resources(self, resources_plural_name, resources_singular_name): """Cleans up tenant resources. :param resources_plural_name: plural name for resources :param resources_singular_name: singular name for resource. Expected to be part of resource deletion method name (obj._delete_%s) """ for user, tenant_id in (utils.iterate_per_tenants( self.context.get("users", []))): manila_scenario = manila_utils.ManilaScenario({ "user": user, "config": { "api_versions": self.context["config"].get( "api_versions", [])} }) resources = self.context["tenants"][tenant_id][CONTEXT_NAME].get( resources_plural_name, []) for resource in resources: logger = logging.ExceptionLogger( LOG, _("Failed to delete %(name)s %(id)s for tenant %(t)s.") % { "id": resource, "t": tenant_id, "name": resources_singular_name}) with logger: delete_func = getattr( manila_scenario, "_delete_%s" % resources_singular_name) delete_func(resource) def _wait_for_cleanup_of_share_networks(self): """Waits for deletion of Manila service resources.""" for user, tenant_id in (utils.iterate_per_tenants( self.context.get("users", []))): self._wait_for_resources_deletion( self.context["tenants"][tenant_id][CONTEXT_NAME].get("shares")) manila_scenario = manila_utils.ManilaScenario({ "user": user, "admin": self.context["admin"], "config": { "api_versions": self.context["config"].get( "api_versions", [])} }) for sn in self.context["tenants"][tenant_id][CONTEXT_NAME][ "share_networks"]: share_servers = manila_scenario._list_share_servers( search_opts={"share_network": sn["id"]}) self._wait_for_resources_deletion(share_servers) def _wait_for_resources_deletion(self, resources): """Waiter for resources deletion. :param resources: resource or list of resources for deletion verification """ if not resources: return if not isinstance(resources, list): resources = [resources] for resource in resources: bench_utils.wait_for_status( resource, ready_statuses=["deleted"], check_deletion=True, update_resource=bench_utils.get_from_manager(), timeout=CONF.benchmark.manila_share_delete_timeout, check_interval=( CONF.benchmark.manila_share_delete_poll_interval)) @logging.log_task_wrapper(LOG.info, _("Exit context: `%s`") % CONTEXT_NAME) def cleanup(self): if self.context[CONTEXT_NAME].get("delete_share_networks", True): # NOTE(vponomaryov): Schedule 'share networks' deletion. self._cleanup_tenant_resources("share_networks", "share_network") # NOTE(vponomaryov): Share network deletion schedules deletion of # share servers. So, we should wait for its deletion too to avoid # further failures of network resources release. # Use separate cycle to make share servers be deleted in parallel. self._wait_for_cleanup_of_share_networks() else: # NOTE(vponomaryov): assume that share networks were not created # by test run. return rally-0.9.1/rally/plugins/openstack/context/manila/manila_shares.py0000664000567000056710000000717213073417720026720 0ustar jenkinsjenkins00000000000000# Copyright 2016 Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from rally.common.i18n import _ from rally.common import logging from rally.common import utils from rally import consts as rally_consts from rally.plugins.openstack.cleanup import manager as resource_manager from rally.plugins.openstack.context.manila import consts from rally.plugins.openstack.scenarios.manila import utils as manila_utils from rally.task import context CONF = cfg.CONF LOG = logging.getLogger(__name__) CONTEXT_NAME = consts.SHARES_CONTEXT_NAME @context.configure(name=CONTEXT_NAME, order=455) class Shares(context.Context): """This context creates shares for Manila project.""" CONFIG_SCHEMA = { "type": "object", "$schema": rally_consts.JSON_SCHEMA, "properties": { "shares_per_tenant": { "type": "integer", "minimum": 1, }, "size": { "type": "integer", "minimum": 1 }, "share_proto": { "type": "string", }, "share_type": { "type": "string", }, }, "additionalProperties": False } DEFAULT_CONFIG = { "shares_per_tenant": 1, "size": 1, "share_proto": "NFS", "share_type": None, } def _create_shares(self, manila_scenario, tenant_id, share_proto, size=1, share_type=None): tenant_ctxt = self.context["tenants"][tenant_id] tenant_ctxt.setdefault("shares", []) for i in range(self.config["shares_per_tenant"]): kwargs = {"share_proto": share_proto, "size": size} if share_type: kwargs["share_type"] = share_type share_networks = tenant_ctxt.get("manila_share_networks", {}).get( "share_networks", []) if share_networks: kwargs["share_network"] = share_networks[ i % len(share_networks)]["id"] share = manila_scenario._create_share(**kwargs) tenant_ctxt["shares"].append(share.to_dict()) @logging.log_task_wrapper( LOG.info, _("Enter context: `%s`") % CONTEXT_NAME) def setup(self): for user, tenant_id in ( utils.iterate_per_tenants(self.context.get("users", []))): manila_scenario = manila_utils.ManilaScenario({ "task": self.task, "user": user, "config": { "api_versions": self.context["config"].get( "api_versions", [])} }) self._create_shares( manila_scenario, tenant_id, self.config["share_proto"], self.config["size"], self.config["share_type"], ) @logging.log_task_wrapper(LOG.info, _("Exit context: `%s`") % CONTEXT_NAME) def cleanup(self): resource_manager.cleanup( names=["manila.shares"], users=self.context.get("users", []), ) rally-0.9.1/rally/plugins/openstack/context/manila/manila_security_services.py0000664000567000056710000000726213073417720031205 0ustar jenkinsjenkins00000000000000# Copyright 2015 Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from rally.common.i18n import _ from rally.common import logging from rally.common import utils from rally import consts as rally_consts from rally.plugins.openstack.cleanup import manager as resource_manager from rally.plugins.openstack.context.manila import consts from rally.plugins.openstack.scenarios.manila import utils as manila_utils from rally.task import context CONF = cfg.CONF LOG = logging.getLogger(__name__) CONTEXT_NAME = consts.SECURITY_SERVICES_CONTEXT_NAME @context.configure(name=CONTEXT_NAME, order=445) class SecurityServices(context.Context): """This context creates 'security services' for Manila project.""" CONFIG_SCHEMA = { "type": "object", "$schema": rally_consts.JSON_SCHEMA, "properties": { "security_services": { "type": "array", "description": "It is expected to be list of dicts with data for creation" " of security services.", "items": { "type": "object", "properties": {"type": {"enum": ["active_directory", "kerberos", "ldap"]}}, "required": ["type"], "additionalProperties": True, "description": "Data for creation of security services. \n " "Example:\n\n" " .. code-block:: json\n\n" " {'type': 'LDAP', 'dns_ip': 'foo_ip', \n" " 'server': 'bar_ip', 'domain': 'quuz_domain',\n" " 'user': 'ololo', 'password': 'fake_password'}\n" } }, }, "additionalProperties": False } DEFAULT_CONFIG = { "security_services": [], } @logging.log_task_wrapper( LOG.info, _("Enter context: `%s`") % CONTEXT_NAME) def setup(self): for user, tenant_id in (utils.iterate_per_tenants( self.context.get("users", []))): self.context["tenants"][tenant_id][CONTEXT_NAME] = { "security_services": [], } if self.config["security_services"]: manila_scenario = manila_utils.ManilaScenario({ "task": self.task, "user": user, "config": { "api_versions": self.context["config"].get( "api_versions", [])} }) for ss in self.config["security_services"]: inst = manila_scenario._create_security_service( **ss).to_dict() self.context["tenants"][tenant_id][CONTEXT_NAME][ "security_services"].append(inst) @logging.log_task_wrapper(LOG.info, _("Exit context: `%s`") % CONTEXT_NAME) def cleanup(self): resource_manager.cleanup( names=["manila.security_services"], users=self.context.get("users", []), ) rally-0.9.1/rally/plugins/openstack/context/manila/consts.py0000664000567000056710000000141513073417716025422 0ustar jenkinsjenkins00000000000000# Copyright 2015 Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. SHARES_CONTEXT_NAME = "manila_shares" SHARE_NETWORKS_CONTEXT_NAME = "manila_share_networks" SECURITY_SERVICES_CONTEXT_NAME = "manila_security_services" rally-0.9.1/rally/plugins/openstack/context/quotas/0000775000567000056710000000000013073420067023602 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/quotas/__init__.py0000664000567000056710000000000013073417716025710 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/quotas/cinder_quotas.py0000664000567000056710000000341013073417716027021 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. class CinderQuotas(object): """Management of Cinder quotas.""" QUOTAS_SCHEMA = { "type": "object", "additionalProperties": False, "properties": { "gigabytes": { "type": "integer", "minimum": -1 }, "snapshots": { "type": "integer", "minimum": -1 }, "volumes": { "type": "integer", "minimum": -1 }, "backups": { "type": "integer", "minimum": -1 }, "backup_gigabytes": { "type": "integer", "minimum": -1 } } } def __init__(self, clients): self.clients = clients def update(self, tenant_id, **kwargs): self.clients.cinder().quotas.update(tenant_id, **kwargs) def delete(self, tenant_id): self.clients.cinder().quotas.delete(tenant_id) def get(self, tenant_id): response = self.clients.cinder().quotas.get(tenant_id) return dict([(k, getattr(response, k)) for k in self.QUOTAS_SCHEMA["properties"]]) rally-0.9.1/rally/plugins/openstack/context/quotas/manila_quotas.py0000664000567000056710000000345713073417716027031 0ustar jenkinsjenkins00000000000000# Copyright 2015 Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. class ManilaQuotas(object): """Management of Manila quotas.""" QUOTAS_SCHEMA = { "type": "object", "additionalProperties": False, "properties": { "shares": { "type": "integer", "minimum": -1 }, "gigabytes": { "type": "integer", "minimum": -1 }, "snapshots": { "type": "integer", "minimum": -1 }, "snapshot_gigabytes": { "type": "integer", "minimum": -1 }, "share_networks": { "type": "integer", "minimum": -1 } } } def __init__(self, clients): self.clients = clients def update(self, tenant_id, **kwargs): self.clients.manila().quotas.update(tenant_id, **kwargs) def delete(self, tenant_id): self.clients.manila().quotas.delete(tenant_id) def get(self, tenant_id): response = self.clients.manila().quotas.get(tenant_id) return dict([(k, getattr(response, k)) for k in self.QUOTAS_SCHEMA["properties"]]) rally-0.9.1/rally/plugins/openstack/context/quotas/nova_quotas.py0000664000567000056710000000551113073417716026524 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. class NovaQuotas(object): """Management of Nova quotas.""" QUOTAS_SCHEMA = { "type": "object", "additionalProperties": False, "properties": { "instances": { "type": "integer", "minimum": -1 }, "cores": { "type": "integer", "minimum": -1 }, "ram": { "type": "integer", "minimum": -1 }, "floating_ips": { "type": "integer", "minimum": -1 }, "fixed_ips": { "type": "integer", "minimum": -1 }, "metadata_items": { "type": "integer", "minimum": -1 }, "injected_files": { "type": "integer", "minimum": -1 }, "injected_file_content_bytes": { "type": "integer", "minimum": -1 }, "injected_file_path_bytes": { "type": "integer", "minimum": -1 }, "key_pairs": { "type": "integer", "minimum": -1 }, "security_groups": { "type": "integer", "minimum": -1 }, "security_group_rules": { "type": "integer", "minimum": -1 }, "server_groups": { "type": "integer", "minimum": -1 }, "server_group_members": { "type": "integer", "minimum": -1 } } } def __init__(self, clients): self.clients = clients def update(self, tenant_id, **kwargs): self.clients.nova().quotas.update(tenant_id, **kwargs) def delete(self, tenant_id): # Reset quotas to defaults and tag database objects as deleted self.clients.nova().quotas.delete(tenant_id) def get(self, tenant_id): response = self.clients.nova().quotas.get(tenant_id) return dict([(k, getattr(response, k)) for k in self.QUOTAS_SCHEMA["properties"]]) rally-0.9.1/rally/plugins/openstack/context/quotas/quotas.py0000664000567000056710000001106413073417716025501 0ustar jenkinsjenkins00000000000000# Copyright 2014: Dassault Systemes # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.i18n import _ from rally.common import logging from rally import consts from rally import osclients from rally.plugins.openstack.context.quotas import cinder_quotas from rally.plugins.openstack.context.quotas import designate_quotas from rally.plugins.openstack.context.quotas import manila_quotas from rally.plugins.openstack.context.quotas import neutron_quotas from rally.plugins.openstack.context.quotas import nova_quotas from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="quotas", order=300) class Quotas(context.Context): """Context class for updating benchmarks' tenants quotas.""" CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "additionalProperties": False, "properties": { "nova": nova_quotas.NovaQuotas.QUOTAS_SCHEMA, "cinder": cinder_quotas.CinderQuotas.QUOTAS_SCHEMA, "manila": manila_quotas.ManilaQuotas.QUOTAS_SCHEMA, "designate": designate_quotas.DesignateQuotas.QUOTAS_SCHEMA, "neutron": neutron_quotas.NeutronQuotas.QUOTAS_SCHEMA } } def __init__(self, ctx): super(Quotas, self).__init__(ctx) self.clients = osclients.Clients( self.context["admin"]["credential"], api_info=self.context["config"].get("api_versions")) self.manager = { "nova": nova_quotas.NovaQuotas(self.clients), "cinder": cinder_quotas.CinderQuotas(self.clients), "manila": manila_quotas.ManilaQuotas(self.clients), "designate": designate_quotas.DesignateQuotas(self.clients), "neutron": neutron_quotas.NeutronQuotas(self.clients) } self.original_quotas = [] def _service_has_quotas(self, service): return len(self.config.get(service, {})) > 0 @logging.log_task_wrapper(LOG.info, _("Enter context: `quotas`")) def setup(self): for tenant_id in self.context["tenants"]: for service in self.manager: if self._service_has_quotas(service): # NOTE(andreykurilin): in case of existing users it is # required to restore original quotas instead of reset # to default ones. if "existing_users" in self.context: self.original_quotas.append( (service, tenant_id, self.manager[service].get(tenant_id))) self.manager[service].update(tenant_id, **self.config[service]) def _restore_quotas(self): for service, tenant_id, quotas in self.original_quotas: try: self.manager[service].update(tenant_id, **quotas) except Exception as e: LOG.warning("Failed to restore quotas for tenant %(tenant_id)s" " in service %(service)s \n reason: %(exc)s" % {"tenant_id": tenant_id, "service": service, "exc": e}) def _delete_quotas(self): for service in self.manager: if self._service_has_quotas(service): for tenant_id in self.context["tenants"]: try: self.manager[service].delete(tenant_id) except Exception as e: LOG.warning("Failed to remove quotas for tenant " "%(tenant_id)s in service %(service)s " "\n reason: %(exc)s" % {"tenant_id": tenant_id, "service": service, "exc": e}) @logging.log_task_wrapper(LOG.info, _("Exit context: `quotas`")) def cleanup(self): if self.original_quotas: # existing users self._restore_quotas() else: self._delete_quotas() rally-0.9.1/rally/plugins/openstack/context/quotas/neutron_quotas.py0000664000567000056710000000443213073417716027254 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. class NeutronQuotas(object): """Management of Neutron quotas.""" QUOTAS_SCHEMA = { "type": "object", "additionalProperties": False, "properties": { "network": { "type": "integer", "minimum": -1 }, "subnet": { "type": "integer", "minimum": -1 }, "port": { "type": "integer", "minimum": -1 }, "router": { "type": "integer", "minimum": -1 }, "floatingip": { "type": "integer", "minimum": -1 }, "security_group": { "type": "integer", "minimum": -1 }, "security_group_rule": { "type": "integer", "minimum": -1 }, "pool": { "type": "integer", "minimum": -1 }, "vip": { "type": "integer", "minimum": -1 }, "health_monitor": { "type": "integer", "minimum": -1 } } } def __init__(self, clients): self.clients = clients def update(self, tenant_id, **kwargs): body = {"quota": kwargs} self.clients.neutron().update_quota(tenant_id, body=body) def delete(self, tenant_id): # Reset quotas to defaults and tag database objects as deleted self.clients.neutron().delete_quota(tenant_id) def get(self, tenant_id): return self.clients.neutron().show_quota(tenant_id)["quota"] rally-0.9.1/rally/plugins/openstack/context/quotas/designate_quotas.py0000664000567000056710000000345113073417716027525 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. class DesignateQuotas(object): """Management of Designate quotas.""" QUOTAS_SCHEMA = { "type": "object", "additionalProperties": False, "properties": { "domains": { "type": "integer", "minimum": 1 }, "domain_recordsets": { "type": "integer", "minimum": 1 }, "domain_records": { "type": "integer", "minimum": 1 }, "recordset_records": { "type": "integer", "minimum": 1 }, } } def __init__(self, clients): self.clients = clients def update(self, tenant_id, **kwargs): self.clients.designate().quotas.update(tenant_id, kwargs) def delete(self, tenant_id): self.clients.designate().quotas.reset(tenant_id) def get(self, tenant_id): # NOTE(andreykurilin): we have broken designate jobs, so I can't check # that this method is right :( response = self.clients.designate().quotas.get(tenant_id) return dict([(k, response.get(k)) for k in self.QUOTAS_SCHEMA["properties"]]) rally-0.9.1/rally/plugins/openstack/context/neutron/0000775000567000056710000000000013073420067023760 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/neutron/__init__.py0000664000567000056710000000000013073417716026066 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/neutron/lbaas.py0000664000567000056710000000671313073417716025432 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.i18n import _ from rally.common import logging from rally.common import utils from rally import consts from rally import osclients from rally.plugins.openstack.wrappers import network as network_wrapper from rally.task import context LOG = logging.getLogger(__name__) @context.configure(name="lbaas", order=360) class Lbaas(context.Context): CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "pool": { "type": "object" }, "lbaas_version": { "type": "integer", "minimum": 1 } }, "additionalProperties": False } DEFAULT_CONFIG = { "pool": { "lb_method": "ROUND_ROBIN", "protocol": "HTTP" }, "lbaas_version": 1 } @logging.log_task_wrapper(LOG.info, _("Enter context: `lbaas`")) def setup(self): net_wrapper = network_wrapper.wrap( osclients.Clients(self.context["admin"]["credential"]), self, config=self.config) use_lb, msg = net_wrapper.supports_extension("lbaas") if not use_lb: LOG.info(msg) return # Creates a lb-pool for every subnet created in network context. for user, tenant_id in (utils.iterate_per_tenants( self.context.get("users", []))): for network in self.context["tenants"][tenant_id]["networks"]: for subnet in network.get("subnets", []): if self.config["lbaas_version"] == 1: network.setdefault("lb_pools", []).append( net_wrapper.create_v1_pool( tenant_id, subnet, **self.config["pool"])) else: raise NotImplementedError( "Context for LBaaS version %s not implemented." % self.config["lbaas_version"]) @logging.log_task_wrapper(LOG.info, _("Exit context: `lbaas`")) def cleanup(self): net_wrapper = network_wrapper.wrap( osclients.Clients(self.context["admin"]["credential"]), self, config=self.config) for tenant_id, tenant_ctx in self.context["tenants"].items(): for network in tenant_ctx.get("networks", []): for pool in network.get("lb_pools", []): with logging.ExceptionLogger( LOG, _("Failed to delete pool %(pool)s for tenant " "%(tenant)s") % {"pool": pool["pool"]["id"], "tenant": tenant_id}): if self.config["lbaas_version"] == 1: net_wrapper.delete_v1_pool(pool["pool"]["id"]) rally-0.9.1/rally/plugins/openstack/context/keystone/0000775000567000056710000000000013073420067024127 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/keystone/__init__.py0000664000567000056710000000000013073417716026235 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/keystone/roles.py0000664000567000056710000001043013073417720025625 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from rally.common import broker from rally.common.i18n import _ from rally.common import logging from rally import consts from rally import exceptions from rally import osclients from rally.plugins.openstack.services.identity import identity from rally.task import context LOG = logging.getLogger(__name__) ROLES_CONTEXT_OPTS = [ cfg.IntOpt("resource_management_workers", default=30, help="How many concurrent threads to use for serving roles " "context"), ] CONF = cfg.CONF CONF.register_opts(ROLES_CONTEXT_OPTS, group=cfg.OptGroup(name="roles_context", title="benchmark context options")) @context.configure(name="roles", order=330) class RoleGenerator(context.Context): """Context class for assigning roles for users.""" CONFIG_SCHEMA = { "type": "array", "$schema": consts.JSON_SCHEMA, "items": { "type": "string", "description": "The name of role to assign to user" } } def __init__(self, ctx): super(RoleGenerator, self).__init__(ctx) self.credential = self.context["admin"]["credential"] self.workers = cfg.CONF.roles_context.resource_management_workers def _get_role_object(self, context_role): """Check if role exists. :param context_role: name of existing role. """ keystone = identity.Identity(osclients.Clients(self.credential)) default_roles = keystone.list_roles() for def_role in default_roles: if str(def_role.name) == context_role: return def_role else: raise exceptions.NoSuchRole(role=context_role) def _get_consumer(self, func_name): def consume(cache, args): role_id, user_id, project_id = args if "client" not in cache: clients = osclients.Clients(self.credential) cache["client"] = identity.Identity(clients) getattr(cache["client"], func_name)(role_id=role_id, user_id=user_id, project_id=project_id) return consume @logging.log_task_wrapper(LOG.info, _("Enter context: `roles`")) def setup(self): """Add all roles to users.""" threads = self.workers roles_dict = {} def publish(queue): for context_role in self.config: role = self._get_role_object(context_role) roles_dict[role.id] = role.name LOG.debug("Adding role %(role_name)s having ID %(role_id)s " "to all users using %(threads)s threads" % {"role_name": role.name, "role_id": role.id, "threads": threads}) for user in self.context["users"]: args = (role.id, user["id"], user["tenant_id"]) queue.append(args) broker.run(publish, self._get_consumer("add_role"), threads) self.context["roles"] = roles_dict @logging.log_task_wrapper(LOG.info, _("Exit context: `roles`")) def cleanup(self): """Remove all roles from users.""" threads = self.workers def publish(queue): for role_id in self.context["roles"]: LOG.debug("Removing role %s from all users" % role_id) for user in self.context["users"]: args = (role_id, user["id"], user["tenant_id"]) queue.append(args) broker.run(publish, self._get_consumer("revoke_role"), threads) rally-0.9.1/rally/plugins/openstack/context/keystone/users.py0000664000567000056710000003013313073417720025644 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import uuid from oslo_config import cfg from rally.common import broker from rally.common.i18n import _ from rally.common import logging from rally.common import objects from rally.common import utils as rutils from rally import consts from rally import exceptions from rally import osclients from rally.plugins.openstack.services.identity import identity from rally.plugins.openstack.wrappers import network from rally.task import context from rally.task import utils LOG = logging.getLogger(__name__) RESOURCE_MANAGEMENT_WORKERS_DESCR = ("The number of concurrent threads to use " "for serving users context.") PROJECT_DOMAIN_DESCR = "ID of domain in which projects will be created." USER_DOMAIN_DESCR = "ID of domain in which users will be created." USER_CONTEXT_OPTS = [ cfg.IntOpt("resource_management_workers", default=20, help=RESOURCE_MANAGEMENT_WORKERS_DESCR), cfg.StrOpt("project_domain", default="default", help=PROJECT_DOMAIN_DESCR), cfg.StrOpt("user_domain", default="default", help=USER_DOMAIN_DESCR), cfg.StrOpt("keystone_default_role", default="member", help="The default role name of the keystone to assign to " "users."), ] CONF = cfg.CONF CONF.register_opts(USER_CONTEXT_OPTS, group=cfg.OptGroup(name="users_context", title="benchmark context options")) @context.configure(name="users", namespace="openstack", order=100) class UserGenerator(context.Context): """Context class for generating temporary users/tenants for benchmarks.""" CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "tenants": { "type": "integer", "minimum": 1, "description": "The number of tenants to create." }, "users_per_tenant": { "type": "integer", "minimum": 1, "description": "The number of users to create per one tenant." }, "resource_management_workers": { "type": "integer", "minimum": 1, "description": RESOURCE_MANAGEMENT_WORKERS_DESCR, }, "project_domain": { "type": "string", "description": PROJECT_DOMAIN_DESCR }, "user_domain": { "type": "string", "description": USER_DOMAIN_DESCR }, "user_choice_method": { "enum": ["random", "round_robin"], "description": "The mode of balancing usage of users between " "scenario iterations." }, }, "additionalProperties": False } DEFAULT_CONFIG = { "tenants": 1, "users_per_tenant": 1, "resource_management_workers": cfg.CONF.users_context.resource_management_workers, "user_choice_method": "random", } def __init__(self, context): self.credential = context["admin"]["credential"] project_domain = (self.credential.project_domain_name or cfg.CONF.users_context.project_domain) user_domain = (self.credential.user_domain_name or cfg.CONF.users_context.user_domain) self.DEFAULT_CONFIG["project_domain"] = project_domain self.DEFAULT_CONFIG["user_domain"] = user_domain super(UserGenerator, self).__init__(context) def _remove_default_security_group(self): """Delete default security group for tenants.""" clients = osclients.Clients(self.credential) if consts.Service.NEUTRON not in clients.services().values(): return use_sg, msg = network.wrap(clients, self).supports_extension( "security-group") if not use_sg: LOG.debug("Security group context is disabled: %s" % msg) return for user, tenant_id in rutils.iterate_per_tenants( self.context["users"]): with logging.ExceptionLogger( LOG, _("Unable to delete default security group")): uclients = osclients.Clients(user["credential"]) sg = uclients.nova().security_groups.find(name="default") clients.neutron().delete_security_group(sg.id) def _remove_associated_networks(self): """Delete associated Nova networks from tenants.""" # NOTE(rmk): Ugly hack to deal with the fact that Nova Network # networks can only be disassociated in an admin context. Discussed # with boris-42 before taking this approach [LP-Bug #1350517]. clients = osclients.Clients(self.credential) if consts.Service.NOVA not in clients.services().values(): return nova_admin = clients.nova() if not utils.check_service_status(nova_admin, "nova-network"): return for net in nova_admin.networks.list(): network_tenant_id = nova_admin.networks.get(net).project_id if network_tenant_id in self.context["tenants"]: try: nova_admin.networks.disassociate(net) except Exception as ex: LOG.warning("Failed disassociate net: %(tenant_id)s. " "Exception: %(ex)s" % {"tenant_id": network_tenant_id, "ex": ex}) def _create_tenants(self): threads = self.config["resource_management_workers"] tenants = collections.deque() def publish(queue): for i in range(self.config["tenants"]): args = (self.config["project_domain"], self.task["uuid"], i) queue.append(args) def consume(cache, args): domain, task_id, i = args if "client" not in cache: clients = osclients.Clients(self.credential) cache["client"] = identity.Identity( clients, name_generator=self.generate_random_name) tenant = cache["client"].create_project(domain_name=domain) tenant_dict = {"id": tenant.id, "name": tenant.name, "users": []} tenants.append(tenant_dict) # NOTE(msdubov): consume() will fill the tenants list in the closure. broker.run(publish, consume, threads) tenants_dict = {} for t in tenants: tenants_dict[t["id"]] = t return tenants_dict def _create_users(self): # NOTE(msdubov): This should be called after _create_tenants(). threads = self.config["resource_management_workers"] users_per_tenant = self.config["users_per_tenant"] default_role = cfg.CONF.users_context.keystone_default_role users = collections.deque() def publish(queue): for tenant_id in self.context["tenants"]: for user_id in range(users_per_tenant): username = self.generate_random_name() password = str(uuid.uuid4()) args = (username, password, self.config["project_domain"], self.config["user_domain"], tenant_id) queue.append(args) def consume(cache, args): username, password, project_dom, user_dom, tenant_id = args if "client" not in cache: clients = osclients.Clients(self.credential) cache["client"] = identity.Identity( clients, name_generator=self.generate_random_name) client = cache["client"] user = client.create_user(username, password=password, project_id=tenant_id, domain_name=user_dom, default_role=default_role) user_credential = objects.Credential( self.credential.auth_url, user.name, password, self.context["tenants"][tenant_id]["name"], consts.EndpointPermission.USER, project_domain_name=project_dom, user_domain_name=user_dom, endpoint_type=self.credential.endpoint_type, https_insecure=self.credential.insecure, https_cacert=self.credential.cacert, region_name=self.credential.region_name) users.append({"id": user.id, "credential": user_credential, "tenant_id": tenant_id}) # NOTE(msdubov): consume() will fill the users list in the closure. broker.run(publish, consume, threads) return list(users) def _get_consumer_for_deletion(self, func_name): def consume(cache, resource_id): if "client" not in cache: clients = osclients.Clients(self.credential) cache["client"] = identity.Identity(clients) getattr(cache["client"], func_name)(resource_id) return consume def _delete_tenants(self): threads = self.config["resource_management_workers"] self._remove_associated_networks() def publish(queue): for tenant_id in self.context["tenants"]: queue.append(tenant_id) broker.run(publish, self._get_consumer_for_deletion("delete_project"), threads) self.context["tenants"] = {} def _delete_users(self): threads = self.config["resource_management_workers"] def publish(queue): for user in self.context["users"]: queue.append(user["id"]) broker.run(publish, self._get_consumer_for_deletion("delete_user"), threads) self.context["users"] = [] @logging.log_task_wrapper(LOG.info, _("Enter context: `users`")) def setup(self): """Create tenants and users, using the broker pattern.""" super(UserGenerator, self).setup() self.context["users"] = [] self.context["tenants"] = {} self.context["user_choice_method"] = self.config["user_choice_method"] threads = self.config["resource_management_workers"] LOG.debug("Creating %(tenants)d tenants using %(threads)s threads" % {"tenants": self.config["tenants"], "threads": threads}) self.context["tenants"] = self._create_tenants() if len(self.context["tenants"]) < self.config["tenants"]: raise exceptions.ContextSetupFailure( ctx_name=self.get_name(), msg=_("Failed to create the requested number of tenants.")) users_num = self.config["users_per_tenant"] * self.config["tenants"] LOG.debug("Creating %(users)d users using %(threads)s threads" % {"users": users_num, "threads": threads}) self.context["users"] = self._create_users() for user in self.context["users"]: self.context["tenants"][user["tenant_id"]]["users"].append(user) if len(self.context["users"]) < users_num: raise exceptions.ContextSetupFailure( ctx_name=self.get_name(), msg=_("Failed to create the requested number of users.")) @logging.log_task_wrapper(LOG.info, _("Exit context: `users`")) def cleanup(self): """Delete tenants and users, using the broker pattern.""" self._remove_default_security_group() self._delete_users() self._delete_tenants() rally-0.9.1/rally/plugins/openstack/context/keystone/existing_users.py0000664000567000056710000000476413073417720027571 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.i18n import _ from rally.common import logging from rally.common import objects from rally import osclients from rally.task import context LOG = logging.getLogger(__name__) # NOTE(boris-42): This context should be hidden for now and used only by # benchmark engine. In future during various refactoring of # validation system and rally CI testing we will make it public @context.configure(name="existing_users", namespace="openstack", order=99, hidden=True) class ExistingUsers(context.Context): """This context supports using existing users in Rally. It uses information about deployment to properly initialize context["users"] and context["tenants"] So there won't be big difference between usage of "users" and "existing_users" context. """ @logging.log_task_wrapper(LOG.info, _("Enter context: `existing_users`")) def setup(self): super(ExistingUsers, self).setup() self.context["users"] = [] self.context["tenants"] = {} self.context["user_choice_method"] = "random" for user in self.config: user_credential = objects.Credential(**user) user_clients = osclients.Clients(user_credential) user_id = user_clients.keystone.auth_ref.user_id tenant_id = user_clients.keystone.auth_ref.project_id if tenant_id not in self.context["tenants"]: self.context["tenants"][tenant_id] = { "id": tenant_id, "name": user_credential.tenant_name } self.context["users"].append({ "credential": user_credential, "id": user_id, "tenant_id": tenant_id }) @logging.log_task_wrapper(LOG.info, _("Exit context: `existing_users`")) def cleanup(self): """These users are not managed by Rally, so don't touch them.""" rally-0.9.1/rally/plugins/openstack/context/cleanup/0000775000567000056710000000000013073420067023715 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/cleanup/__init__.py0000664000567000056710000000000013073417716026023 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/context/cleanup/base.py0000664000567000056710000000204213073417720025201 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.i18n import _ from rally import consts from rally import exceptions class NoSuchCleanupResources(exceptions.RallyException): msg_fmt = _("Missing cleanup resource managers: %(message)s") class CleanupMixin(object): CONFIG_SCHEMA = { "type": "array", "$schema": consts.JSON_SCHEMA, "items": { "type": "string", } } def setup(self): pass rally-0.9.1/rally/plugins/openstack/context/cleanup/admin.py0000664000567000056710000000376713073417720025376 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys from rally.common.i18n import _ from rally.common import logging from rally.plugins.openstack.cleanup import manager from rally.plugins.openstack.context.cleanup import base from rally.plugins.openstack import scenario from rally.task import context LOG = logging.getLogger(__name__) # NOTE(amaretskiy): Set order to run this just before UserCleanup @context.configure(name="admin_cleanup", order=(sys.maxsize - 1), hidden=True) class AdminCleanup(base.CleanupMixin, context.Context): """Context class for admin resources cleanup.""" @classmethod def validate(cls, config): super(AdminCleanup, cls).validate(config) missing = set(config) missing -= manager.list_resource_names(admin_required=True) missing = ", ".join(missing) if missing: LOG.info(_("Couldn't find cleanup resource managers: %s") % missing) raise base.NoSuchCleanupResources(missing) @logging.log_task_wrapper(LOG.info, _("admin resources cleanup")) def cleanup(self): manager.cleanup( names=self.config, admin_required=True, admin=self.context["admin"], users=self.context.get("users", []), api_versions=self.context["config"].get("api_versions"), superclass=scenario.OpenStackScenario, task_id=self.context["task"]["uuid"]) rally-0.9.1/rally/plugins/openstack/context/cleanup/user.py0000664000567000056710000000367613073417720025263 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys from rally.common.i18n import _ from rally.common import logging from rally.plugins.openstack.cleanup import manager from rally.plugins.openstack.context.cleanup import base from rally.plugins.openstack import scenario from rally.task import context LOG = logging.getLogger(__name__) # NOTE(amaretskiy): Set maximum order to run this last @context.configure(name="cleanup", order=sys.maxsize, hidden=True) class UserCleanup(base.CleanupMixin, context.Context): """Context class for user resources cleanup.""" @classmethod def validate(cls, config): super(UserCleanup, cls).validate(config) missing = set(config) missing -= manager.list_resource_names(admin_required=False) missing = ", ".join(missing) if missing: LOG.info(_("Couldn't find cleanup resource managers: %s") % missing) raise base.NoSuchCleanupResources(missing) @logging.log_task_wrapper(LOG.info, _("user resources cleanup")) def cleanup(self): manager.cleanup( names=self.config, admin_required=False, users=self.context.get("users", []), api_versions=self.context["config"].get("api_versions"), superclass=scenario.OpenStackScenario, task_id=self.context["task"]["uuid"] ) rally-0.9.1/rally/plugins/openstack/scenarios/0000775000567000056710000000000013073420067022570 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/watcher/0000775000567000056710000000000013073420067024225 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/watcher/__init__.py0000664000567000056710000000000013073417716026333 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/watcher/utils.py0000664000567000056710000000631213073417720025743 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from rally.plugins.openstack import scenario from rally.task import atomic from rally.task import utils CONF = cfg.CONF WATCHER_BENCHMARK_OPTS = [ cfg.FloatOpt("watcher_audit_launch_poll_interval", default=2.0, help="Watcher audit launch interval"), cfg.IntOpt("watcher_audit_launch_timeout", default=300, help="Watcher audit launch timeout") ] benchmark_group = cfg.OptGroup(name="benchmark", title="benchmark options") CONF.register_opts(WATCHER_BENCHMARK_OPTS, group=benchmark_group) class WatcherScenario(scenario.OpenStackScenario): """Base class for Watcher scenarios with basic atomic actions.""" @atomic.action_timer("watcher.create_audit_template") def _create_audit_template(self, goal_id, strategy_id): """Create Audit Template in DB :param goal_id: UUID Goal :param strategy_id: UUID Strategy :return: Audit Template object """ return self.admin_clients("watcher").audit_template.create( goal=goal_id, strategy=strategy_id, name=self.generate_random_name()) @atomic.action_timer("watcher.delete_audit_template") def _delete_audit_template(self, audit_template): """Delete Audit Template from DB :param audit_template: Audit Template object """ self.admin_clients("watcher").audit_template.delete(audit_template) @atomic.action_timer("watcher.list_audit_templates") def _list_audit_templates(self, name=None, goal=None, strategy=None, limit=None, sort_key=None, sort_dir=None, detail=False): return self.admin_clients("watcher").audit_template.list( name=name, goal=goal, strategy=strategy, limit=limit, sort_key=sort_key, sort_dir=sort_dir, detail=detail) @atomic.action_timer("watcher.create_audit") def _create_audit(self, audit_template_uuid): audit = self.admin_clients("watcher").audit.create( audit_template_uuid=audit_template_uuid, audit_type="ONESHOT") utils.wait_for_status( audit, ready_statuses=["SUCCEEDED"], failure_statuses=["FAILED"], status_attr="state", update_resource=utils.get_from_manager(), timeout=CONF.benchmark.watcher_audit_launch_timeout, check_interval=CONF.benchmark.watcher_audit_launch_poll_interval, id_attr="uuid" ) return audit @atomic.action_timer("watcher.delete_audit") def _delete_audit(self, audit): self.admin_clients("watcher").audit.delete(audit.uuid) rally-0.9.1/rally/plugins/openstack/scenarios/watcher/basic.py0000664000567000056710000000751313073417720025670 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common import logging from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.watcher import utils from rally.task import types from rally.task import validation """Scenarios for Watcher servers.""" @types.convert(strategy={"type": "watcher_strategy"}, goal={"type": "watcher_goal"}) @validation.required_services(consts.Service.WATCHER) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["watcher"]}, name="Watcher.create_audit_template_and_delete") class CreateAuditTemplateAndDelete(utils.WatcherScenario): @logging.log_deprecated_args("Extra field has been removed " "since it isn't used.", "0.8.0", ["extra"], once=True) def run(self, goal, strategy): """Create audit template and delete it. :param goal: The goal audit template is based on :param strategy: The strategy used to provide resource optimization algorithm """ audit_template = self._create_audit_template(goal, strategy) self._delete_audit_template(audit_template.uuid) @validation.required_services(consts.Service.WATCHER) @scenario.configure(name="Watcher.list_audit_templates") class ListAuditTemplates(utils.WatcherScenario): def run(self, name=None, goal=None, strategy=None, limit=None, sort_key=None, sort_dir=None, detail=False): """List existing audit templates. Audit templates are being created by Audit Template Context. :param name: Name of the audit template :param goal: Name of the goal :param strategy: Name of the strategy :param limit: The maximum number of results to return per request, if: 1) limit > 0, the maximum number of audit templates to return. 2) limit == 0, return the entire list of audit_templates. 3) limit param is NOT specified (None), the number of items returned respect the maximum imposed by the Watcher API (see Watcher's api.max_limit option). :param sort_key: Optional, field used for sorting. :param sort_dir: Optional, direction of sorting, either 'asc' (the default) or 'desc'. :param detail: Optional, boolean whether to return detailed information about audit_templates. """ self._list_audit_templates(name=name, goal=goal, strategy=strategy, limit=limit, sort_key=sort_key, sort_dir=sort_dir, detail=detail) @validation.required_services(consts.Service.WATCHER) @validation.required_contexts("audit_templates") @scenario.configure(context={"admin_cleanup": ["watcher"]}, name="Watcher.create_audit_and_delete") class CreateAuditAndDelete(utils.WatcherScenario): def run(self): """Create and delete audit. Create Audit, wait until whether Audit is in SUCCEEDED state or in FAILED and delete audit. """ audit_template_uuid = self.context["audit_templates"][0] audit = self._create_audit(audit_template_uuid) self._delete_audit(audit) rally-0.9.1/rally/plugins/openstack/scenarios/__init__.py0000664000567000056710000000000013073417716024676 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/magnum/0000775000567000056710000000000013073420067024054 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/magnum/__init__.py0000664000567000056710000000000013073417716026162 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/magnum/utils.py0000664000567000056710000001111413073417720025566 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from rally.common import utils as common_utils from rally.plugins.openstack import scenario from rally.task import atomic from rally.task import utils MAGNUM_BENCHMARK_OPTS = [ cfg.FloatOpt("magnum_cluster_create_prepoll_delay", default=5.0, help="Time(in sec) to sleep after creating a resource before " "polling for the status."), cfg.FloatOpt("magnum_cluster_create_timeout", default=1200.0, help="Time(in sec) to wait for magnum cluster to be " "created."), cfg.FloatOpt("magnum_cluster_create_poll_interval", default=1.0, help="Time interval(in sec) between checks when waiting for " "cluster creation."), ] CONF = cfg.CONF benchmark_group = cfg.OptGroup(name="benchmark", title="benchmark options") CONF.register_opts(MAGNUM_BENCHMARK_OPTS, group=benchmark_group) class MagnumScenario(scenario.OpenStackScenario): """Base class for Magnum scenarios with basic atomic actions.""" @atomic.action_timer("magnum.list_cluster_templates") def _list_cluster_templates(self, **kwargs): """Return list of cluster_templates. :param limit: (Optional) The maximum number of results to return per request, if: 1) limit > 0, the maximum number of cluster_templates to return. 2) limit param is NOT specified (None), the number of items returned respect the maximum imposed by the Magnum API (see Magnum's api.max_limit option). :param kwargs: Optional additional arguments for cluster_templates listing :returns: cluster_templates list """ return self.clients("magnum").cluster_templates.list(**kwargs) @atomic.action_timer("magnum.create_cluster_template") def _create_cluster_template(self, **kwargs): """Create a cluster_template :param kwargs: optional additional arguments for cluster_template creation :returns: magnum cluster_template """ kwargs["name"] = self.generate_random_name() return self.clients("magnum").cluster_templates.create(**kwargs) @atomic.action_timer("magnum.list_clusters") def _list_clusters(self, limit=None, **kwargs): """Return list of clusters. :param limit: (Optional) the maximum number of results to return per request, if: 1) limit > 0, the maximum number of clusters to return. 2) limit param is NOT specified (None), the number of items returned respect the maximum imposed by the Magnum API (see Magnum's api.max_limit option). :param kwargs: Optional additional arguments for clusters listing :returns: clusters list """ return self.clients("magnum").clusters.list(limit=limit, **kwargs) @atomic.action_timer("magnum.create_cluster") def _create_cluster(self, cluster_template, node_count, **kwargs): """Create a cluster :param cluster_template: cluster_template for the cluster :param node_count: the cluster node count :param kwargs: optional additional arguments for cluster creation :returns: magnum cluster """ name = self.generate_random_name() cluster = self.clients("magnum").clusters.create( name=name, cluster_template_id=cluster_template, node_count=node_count, **kwargs) common_utils.interruptable_sleep( CONF.benchmark.magnum_cluster_create_prepoll_delay) cluster = utils.wait_for_status( cluster, ready_statuses=["CREATE_COMPLETE"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.magnum_cluster_create_timeout, check_interval=CONF.benchmark.magnum_cluster_create_poll_interval, id_attr="uuid" ) return cluster rally-0.9.1/rally/plugins/openstack/scenarios/magnum/cluster_templates.py0000664000567000056710000000336013073417720030171 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.magnum import utils from rally.task import validation """Scenarios for Magnum cluster_templates.""" @validation.required_services(consts.Service.MAGNUM) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["magnum"]}, name="MagnumClusterTemplates.list_cluster_templates") class ListClusterTemplates(utils.MagnumScenario): def run(self, **kwargs): """List all cluster_templates. Measure the "magnum cluster_template-list" command performance. :param limit: (Optional) The maximum number of results to return per request, if: 1) limit > 0, the maximum number of cluster_templates to return. 2) limit param is NOT specified (None), the number of items returned respect the maximum imposed by the Magnum API (see Magnum's api.max_limit option). :param kwargs: optional additional arguments for cluster_templates listing """ self._list_cluster_templates(**kwargs) rally-0.9.1/rally/plugins/openstack/scenarios/magnum/clusters.py0000664000567000056710000000537213073417720026303 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.magnum import utils from rally.task import validation """Scenarios for Magnum clusters.""" @validation.required_services(consts.Service.MAGNUM) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["magnum.clusters"]}, name="MagnumClusters.list_clusters") class ListClusters(utils.MagnumScenario): def run(self, **kwargs): """List all clusters. Measure the "magnum clusters-list" command performance. :param limit: (Optional) The maximum number of results to return per request, if: 1) limit > 0, the maximum number of clusters to return. 2) limit param is NOT specified (None), the number of items returned respect the maximum imposed by the Magnum API (see Magnum's api.max_limit option). :param kwargs: optional additional arguments for clusters listing """ self._list_clusters(**kwargs) @validation.required_services(consts.Service.MAGNUM) @validation.required_openstack(users=True) @validation.required_contexts("cluster_templates") @scenario.configure(context={"cleanup": ["magnum.clusters"]}, name="MagnumClusters.create_and_list_clusters") class CreateAndListClusters(utils.MagnumScenario): def run(self, node_count, **kwargs): """create cluster and then list all clusters. :param node_count: the cluster node count. :param cluster_template_uuid: optional, if user want to use an existing cluster_template :param kwargs: optional additional arguments for cluster creation """ cluster_template_uuid = kwargs.get("cluster_template_uuid", None) if cluster_template_uuid is None: cluster_template_uuid = self.context["tenant"]["cluster_template"] cluster = self._create_cluster(cluster_template_uuid, node_count, **kwargs) self.assertTrue(cluster) list_clusters = self._list_clusters(**kwargs) self.assertIn(cluster, list_clusters) rally-0.9.1/rally/plugins/openstack/scenarios/cinder/0000775000567000056710000000000013073420067024034 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/cinder/__init__.py0000664000567000056710000000000013073417716026142 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/cinder/volumes.py0000775000567000056710000010564113073417720026114 0ustar jenkinsjenkins00000000000000# Copyright 2013 Huawei Technologies Co.,LTD. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import random from rally.common import logging from rally import consts from rally import exceptions from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.cinder import utils as cinder_utils from rally.plugins.openstack.scenarios.glance import utils as glance_utils from rally.plugins.openstack.scenarios.nova import utils as nova_utils from rally.task import atomic from rally.task import types from rally.task import validation LOG = logging.getLogger(__name__) """Scenarios for Cinder Volumes.""" @types.convert(image={"type": "glance_image"}) @validation.image_exists("image", nullable=True) @validation.required_services(consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["cinder"]}, name="CinderVolumes.create_and_list_volume") class CreateAndListVolume(cinder_utils.CinderScenario, nova_utils.NovaScenario, glance_utils.GlanceScenario): def run(self, size, detailed=True, image=None, **kwargs): """Create a volume and list all volumes. Measure the "cinder volume-list" command performance. If you have only 1 user in your context, you will add 1 volume on every iteration. So you will have more and more volumes and will be able to measure the performance of the "cinder volume-list" command depending on the number of images owned by users. :param size: volume size (integer, in GB) or dictionary, must contain two values: min - minimum size volumes will be created as; max - maximum size volumes will be created as. :param detailed: determines whether the volume listing should contain detailed information about all of them :param image: image to be used to create volume :param kwargs: optional args to create a volume """ if image: kwargs["imageRef"] = image self._create_volume(size, **kwargs) self._list_volumes(detailed) @types.convert(image={"type": "glance_image"}) @validation.image_exists("image", nullable=True) @validation.required_services(consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["cinder"]}, name="CinderVolumes.create_and_get_volume") class CreateAndGetVolume(cinder_utils.CinderScenario, nova_utils.NovaScenario, glance_utils.GlanceScenario): def run(self, size, image=None, **kwargs): """Create a volume and get the volume. Measure the "cinder show" command performance. :param size: volume size (integer, in GB) or dictionary, must contain two values: min - minimum size volumes will be created as; max - maximum size volumes will be created as. :param image: image to be used to create volume :param kwargs: optional args to create a volume """ if image: kwargs["imageRef"] = image volume = self._create_volume(size, **kwargs) self._get_volume(volume.id) @validation.required_services(consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["cinder"]}, name="CinderVolumes.list_volumes") class ListVolumes(cinder_utils.CinderScenario, nova_utils.NovaScenario, glance_utils.GlanceScenario): def run(self, detailed=True): """List all volumes. This simple scenario tests the cinder list command by listing all the volumes. :param detailed: True if detailed information about volumes should be listed """ self._list_volumes(detailed) @validation.required_services(consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(name="CinderVolumes.list_types") class ListTypes(cinder_utils.CinderScenario): def run(self, search_opts=None, is_public=None): """List all volume types. This simple scenario tests the cinder type-list command by listing all the volume types. :param search_opts: Options used when search for volume types :param is_public: If query public volume type """ self._list_types(search_opts, is_public) @validation.required_services(consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(name="CinderVolumes.list_transfers") class ListTransfers(cinder_utils.CinderScenario): def run(self, detailed=True, search_opts=None): """List all transfers. This simple scenario tests the "cinder transfer-list" command by listing all the volume transfers. :param detailed: If True, detailed information about volume transfer should be listed :param search_opts: Search options to filter out volume transfers. """ self._list_transfers(detailed, search_opts) @types.convert(image={"type": "glance_image"}) @validation.image_exists("image", nullable=True) @validation.required_services(consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["cinder"]}, name="CinderVolumes.create_and_update_volume") class CreateAndUpdateVolume(cinder_utils.CinderScenario, nova_utils.NovaScenario, glance_utils.GlanceScenario): def run(self, size, image=None, create_volume_kwargs=None, update_volume_kwargs=None): """Create a volume and update its name and description. :param size: volume size (integer, in GB) :param image: image to be used to create volume :param create_volume_kwargs: dict, to be used to create volume :param update_volume_kwargs: dict, to be used to update volume """ create_volume_kwargs = create_volume_kwargs or {} update_volume_kwargs = update_volume_kwargs or {} if image: create_volume_kwargs["imageRef"] = image volume = self._create_volume(size, **create_volume_kwargs) self._update_volume(volume, **update_volume_kwargs) @types.convert(image={"type": "glance_image"}) @validation.image_exists("image", nullable=True) @validation.required_services(consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["cinder"]}, name="CinderVolumes.create_and_delete_volume") class CreateAndDeleteVolume(cinder_utils.CinderScenario, nova_utils.NovaScenario, glance_utils.GlanceScenario): def run(self, size, image=None, min_sleep=0, max_sleep=0, **kwargs): """Create and then delete a volume. Good for testing a maximal bandwidth of cloud. Optional 'min_sleep' and 'max_sleep' parameters allow the scenario to simulate a pause between volume creation and deletion (of random duration from [min_sleep, max_sleep]). :param size: volume size (integer, in GB) or dictionary, must contain two values: min - minimum size volumes will be created as; max - maximum size volumes will be created as. :param image: image to be used to create volume :param min_sleep: minimum sleep time between volume creation and deletion (in seconds) :param max_sleep: maximum sleep time between volume creation and deletion (in seconds) :param kwargs: optional args to create a volume """ if image: kwargs["imageRef"] = image volume = self._create_volume(size, **kwargs) self.sleep_between(min_sleep, max_sleep) self._delete_volume(volume) @types.convert(image={"type": "glance_image"}) @validation.image_exists("image", nullable=True) @validation.required_services(consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["cinder"]}, name="CinderVolumes.create_volume") class CreateVolume(cinder_utils.CinderScenario, nova_utils.NovaScenario, glance_utils.GlanceScenario): def run(self, size, image=None, **kwargs): """Create a volume. Good test to check how influence amount of active volumes on performance of creating new. :param size: volume size (integer, in GB) or dictionary, must contain two values: min - minimum size volumes will be created as; max - maximum size volumes will be created as. :param image: image to be used to create volume :param kwargs: optional args to create a volume """ if image: kwargs["imageRef"] = image self._create_volume(size, **kwargs) @validation.required_services(consts.Service.CINDER) @validation.required_openstack(users=True) @validation.required_contexts("volumes") @scenario.configure(context={"cleanup": ["cinder"]}, name="CinderVolumes.modify_volume_metadata") class ModifyVolumeMetadata(cinder_utils.CinderScenario, nova_utils.NovaScenario, glance_utils.GlanceScenario): def run(self, sets=10, set_size=3, deletes=5, delete_size=3): """Modify a volume's metadata. This requires a volume to be created with the volumes context. Additionally, ``sets * set_size`` must be greater than or equal to ``deletes * delete_size``. :param sets: how many set_metadata operations to perform :param set_size: number of metadata keys to set in each set_metadata operation :param deletes: how many delete_metadata operations to perform :param delete_size: number of metadata keys to delete in each delete_metadata operation """ if sets * set_size < deletes * delete_size: raise exceptions.InvalidArgumentsException( "Not enough metadata keys will be created: " "Setting %(num_keys)s keys, but deleting %(num_deletes)s" % {"num_keys": sets * set_size, "num_deletes": deletes * delete_size}) volume = random.choice(self.context["tenant"]["volumes"]) keys = self._set_metadata(volume["id"], sets, set_size) self._delete_metadata(volume["id"], keys, deletes, delete_size) @validation.required_services(consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["cinder"]}, name="CinderVolumes.create_and_extend_volume") class CreateAndExtendVolume(cinder_utils.CinderScenario, nova_utils.NovaScenario, glance_utils.GlanceScenario): def run(self, size, new_size, min_sleep=0, max_sleep=0, **kwargs): """Create and extend a volume and then delete it. :param size: volume size (in GB) or dictionary, must contain two values: min - minimum size volumes will be created as; max - maximum size volumes will be created as. :param new_size: volume new size (in GB) or dictionary, must contain two values: min - minimum size volumes will be created as; max - maximum size volumes will be created as. to extend. Notice: should be bigger volume size :param min_sleep: minimum sleep time between volume extension and deletion (in seconds) :param max_sleep: maximum sleep time between volume extension and deletion (in seconds) :param kwargs: optional args to extend the volume """ volume = self._create_volume(size, **kwargs) self._extend_volume(volume, new_size) self.sleep_between(min_sleep, max_sleep) self._delete_volume(volume) @validation.required_services(consts.Service.CINDER) @validation.required_contexts("volumes") @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["cinder"]}, name="CinderVolumes.create_from_volume_and_delete_volume") class CreateFromVolumeAndDeleteVolume(cinder_utils.CinderScenario, nova_utils.NovaScenario, glance_utils.GlanceScenario): def run(self, size, min_sleep=0, max_sleep=0, **kwargs): """Create volume from volume and then delete it. Scenario for testing volume clone.Optional 'min_sleep' and 'max_sleep' parameters allow the scenario to simulate a pause between volume creation and deletion (of random duration from [min_sleep, max_sleep]). :param size: volume size (in GB), or dictionary, must contain two values: min - minimum size volumes will be created as; max - maximum size volumes will be created as. Should be equal or bigger source volume size :param min_sleep: minimum sleep time between volume creation and deletion (in seconds) :param max_sleep: maximum sleep time between volume creation and deletion (in seconds) :param kwargs: optional args to create a volume """ source_vol = random.choice(self.context["tenant"]["volumes"]) volume = self._create_volume(size, source_volid=source_vol["id"], **kwargs) self.sleep_between(min_sleep, max_sleep) self._delete_volume(volume) @validation.required_services(consts.Service.CINDER) @validation.required_contexts("volumes") @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["cinder"]}, name="CinderVolumes.create_and_delete_snapshot") class CreateAndDeleteSnapshot(cinder_utils.CinderScenario, nova_utils.NovaScenario, glance_utils.GlanceScenario): def run(self, force=False, min_sleep=0, max_sleep=0, **kwargs): """Create and then delete a volume-snapshot. Optional 'min_sleep' and 'max_sleep' parameters allow the scenario to simulate a pause between snapshot creation and deletion (of random duration from [min_sleep, max_sleep]). :param force: when set to True, allows snapshot of a volume when the volume is attached to an instance :param min_sleep: minimum sleep time between snapshot creation and deletion (in seconds) :param max_sleep: maximum sleep time between snapshot creation and deletion (in seconds) :param kwargs: optional args to create a snapshot """ volume = random.choice(self.context["tenant"]["volumes"]) snapshot = self._create_snapshot(volume["id"], force=force, **kwargs) self.sleep_between(min_sleep, max_sleep) self._delete_snapshot(snapshot) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA, consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["cinder", "nova"]}, name="CinderVolumes.create_and_attach_volume") class CreateAndAttachVolume(cinder_utils.CinderScenario, nova_utils.NovaScenario, glance_utils.GlanceScenario): @logging.log_deprecated_args( "Use 'create_vm_params' for additional instance parameters.", "0.2.0", ["kwargs"], once=True) def run(self, size, image, flavor, create_volume_params=None, create_vm_params=None, **kwargs): """Create a VM and attach a volume to it. Simple test to create a VM and attach a volume, then detach the volume and delete volume/VM. :param size: volume size (integer, in GB) or dictionary, must contain two values: min - minimum size volumes will be created as; max - maximum size volumes will be created as. :param image: Glance image name to use for the VM :param flavor: VM flavor name :param create_volume_params: optional arguments for volume creation :param create_vm_params: optional arguments for VM creation :param kwargs: (deprecated) optional arguments for VM creation """ create_volume_params = create_volume_params or {} if kwargs and create_vm_params: raise ValueError("You can not set both 'kwargs'" "and 'create_vm_params' attributes." "Please use 'create_vm_params'.") create_vm_params = create_vm_params or kwargs or {} server = self._boot_server(image, flavor, **create_vm_params) volume = self._create_volume(size, **create_volume_params) attachment = self._attach_volume(server, volume) self._detach_volume(server, volume, attachment) self._delete_volume(volume) self._delete_server(server) @validation.volume_type_exists("volume_type") @validation.required_services(consts.Service.NOVA, consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["cinder", "nova"]}, name="CinderVolumes.create_snapshot_and_attach_volume") class CreateSnapshotAndAttachVolume(cinder_utils.CinderScenario, nova_utils.NovaScenario, glance_utils.GlanceScenario): def run(self, volume_type=False, size=None, **kwargs): """Create volume, snapshot and attach/detach volume. :param volume_type: Name of volume type to use :param size: Volume size - dictionary, contains two values: min - minimum size volumes will be created as; max - maximum size volumes will be created as. default values: {"min": 1, "max": 5} :param kwargs: Optional parameters used during volume snapshot creation. """ if size is None: size = {"min": 1, "max": 5} if isinstance(volume_type, bool): LOG.warning("Selecting a random volume type is deprecated" "as of Rally 0.7.0") volume_types = [None] volume_types_list = self.clients("cinder").volume_types.list() for s in volume_types_list: volume_types.append(s.name) volume_type = random.choice(volume_types) volume = self._create_volume(size, volume_type=volume_type) snapshot = self._create_snapshot(volume.id, False, **kwargs) server = self.get_random_server() attachment = self._attach_volume(server, volume) self._detach_volume(server, volume, attachment) self._delete_snapshot(snapshot) self._delete_volume(volume) @validation.required_services(consts.Service.NOVA, consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["cinder", "nova"]}, name="CinderVolumes.create_nested_snapshots" "_and_attach_volume") class CreateNestedSnapshotsAndAttachVolume(cinder_utils.CinderScenario, nova_utils.NovaScenario, glance_utils.GlanceScenario): @logging.log_deprecated_args( "Use 'create_snapshot_kwargs' for additional snapshot kwargs.", "0.4.1", ["kwargs"], once=True) def run(self, size=None, nested_level=1, create_volume_kwargs=None, create_snapshot_kwargs=None, **kwargs): """Create a volume from snapshot and attach/detach the volume This scenario create volume, create it's snapshot, attach volume, then create new volume from existing snapshot and so on, with defined nested level, after all detach and delete them. volume->snapshot->volume->snapshot->volume ... :param size: Volume size - dictionary, contains two values: min - minimum size volumes will be created as; max - maximum size volumes will be created as. default values: {"min": 1, "max": 5} :param nested_level: amount of nested levels :param create_volume_kwargs: optional args to create a volume :param create_snapshot_kwargs: optional args to create a snapshot :param kwargs: Optional parameters used during volume snapshot creation. """ if size is None: size = {"min": 1, "max": 5} # NOTE: Volume size cannot be smaller than the snapshot size, so # volume with specified size should be created to avoid # size mismatching between volume and snapshot due random # size in _create_volume method. size = random.randint(size["min"], size["max"]) create_volume_kwargs = create_volume_kwargs or {} create_snapshot_kwargs = create_snapshot_kwargs or kwargs or {} server = self.get_random_server() source_vol = self._create_volume(size, **create_volume_kwargs) snapshot = self._create_snapshot(source_vol.id, False, **create_snapshot_kwargs) attachment = self._attach_volume(server, source_vol) nes_objs = [(server, source_vol, snapshot, attachment)] for i in range(nested_level - 1): volume = self._create_volume(size, snapshot_id=snapshot.id) snapshot = self._create_snapshot(volume.id, False, **create_snapshot_kwargs) server = self.get_random_server() attachment = self._attach_volume(server, volume) nes_objs.append((server, volume, snapshot, attachment)) nes_objs.reverse() for server, volume, snapshot, attachment in nes_objs: self._detach_volume(server, volume, attachment) self._delete_snapshot(snapshot) self._delete_volume(volume) @validation.required_services(consts.Service.CINDER) @validation.required_contexts("volumes") @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["cinder"]}, name="CinderVolumes.create_and_list_snapshots") class CreateAndListSnapshots(cinder_utils.CinderScenario, nova_utils.NovaScenario, glance_utils.GlanceScenario): def run(self, force=False, detailed=True, **kwargs): """Create and then list a volume-snapshot. :param force: when set to True, allows snapshot of a volume when the volume is attached to an instance :param detailed: True if detailed information about snapshots should be listed :param kwargs: optional args to create a snapshot """ volume = random.choice(self.context["tenant"]["volumes"]) self._create_snapshot(volume["id"], force=force, **kwargs) self._list_snapshots(detailed) @types.convert(image={"type": "glance_image"}) @validation.required_services(consts.Service.CINDER, consts.Service.GLANCE) @validation.required_openstack(users=True) @validation.required_parameters("size") @scenario.configure(context={"cleanup": ["cinder", "glance"]}, name="CinderVolumes.create_and_upload_volume_to_image") class CreateAndUploadVolumeToImage(cinder_utils.CinderScenario, nova_utils.NovaScenario, glance_utils.GlanceScenario): def run(self, size, image=None, force=False, container_format="bare", disk_format="raw", do_delete=True, **kwargs): """Create and upload a volume to image. :param size: volume size (integers, in GB), or dictionary, must contain two values: min - minimum size volumes will be created as; max - maximum size volumes will be created as. :param image: image to be used to create volume. :param force: when set to True volume that is attached to an instance could be uploaded to image :param container_format: image container format :param disk_format: disk format for image :param do_delete: deletes image and volume after uploading if True :param kwargs: optional args to create a volume """ if image: kwargs["imageRef"] = image volume = self._create_volume(size, **kwargs) image = self._upload_volume_to_image(volume, force, container_format, disk_format) if do_delete: self._delete_volume(volume) self._delete_image(image) @validation.required_cinder_services("cinder-backup") @validation.required_services(consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["cinder"]}, name="CinderVolumes.create_volume_backup") class CreateVolumeBackup(cinder_utils.CinderScenario, nova_utils.NovaScenario, glance_utils.GlanceScenario): def run(self, size, do_delete=True, create_volume_kwargs=None, create_backup_kwargs=None): """Create a volume backup. :param size: volume size in GB :param do_delete: if True, a volume and a volume backup will be deleted after creation. :param create_volume_kwargs: optional args to create a volume :param create_backup_kwargs: optional args to create a volume backup """ create_volume_kwargs = create_volume_kwargs or {} create_backup_kwargs = create_backup_kwargs or {} volume = self._create_volume(size, **create_volume_kwargs) backup = self._create_backup(volume.id, **create_backup_kwargs) if do_delete: self._delete_volume(volume) self._delete_backup(backup) @validation.required_cinder_services("cinder-backup") @validation.required_services(consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["cinder"]}, name="CinderVolumes.create_and_restore_volume_backup") class CreateAndRestoreVolumeBackup(cinder_utils.CinderScenario, nova_utils.NovaScenario, glance_utils.GlanceScenario): def run(self, size, do_delete=True, create_volume_kwargs=None, create_backup_kwargs=None): """Restore volume backup. :param size: volume size in GB :param do_delete: if True, the volume and the volume backup will be deleted after creation. :param create_volume_kwargs: optional args to create a volume :param create_backup_kwargs: optional args to create a volume backup """ create_volume_kwargs = create_volume_kwargs or {} create_backup_kwargs = create_backup_kwargs or {} volume = self._create_volume(size, **create_volume_kwargs) backup = self._create_backup(volume.id, **create_backup_kwargs) self._restore_backup(backup.id) if do_delete: self._delete_volume(volume) self._delete_backup(backup) @validation.required_cinder_services("cinder-backup") @validation.required_services(consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["cinder"]}, name="CinderVolumes.create_and_list_volume_backups") class CreateAndListVolumeBackups(cinder_utils.CinderScenario, nova_utils.NovaScenario, glance_utils.GlanceScenario): def run(self, size, detailed=True, do_delete=True, create_volume_kwargs=None, create_backup_kwargs=None): """Create and then list a volume backup. :param size: volume size in GB :param detailed: True if detailed information about backup should be listed :param do_delete: if True, a volume backup will be deleted :param create_volume_kwargs: optional args to create a volume :param create_backup_kwargs: optional args to create a volume backup """ create_volume_kwargs = create_volume_kwargs or {} create_backup_kwargs = create_backup_kwargs or {} volume = self._create_volume(size, **create_volume_kwargs) backup = self._create_backup(volume.id, **create_backup_kwargs) self._list_backups(detailed) if do_delete: self._delete_volume(volume) self._delete_backup(backup) @types.convert(image={"type": "glance_image"}) @validation.image_exists("image", nullable=True) @validation.required_services(consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["cinder"]}, name="CinderVolumes.create_volume_and_clone") class CreateVolumeAndClone(cinder_utils.CinderScenario, nova_utils.NovaScenario, glance_utils.GlanceScenario): def run(self, size, image=None, nested_level=1, **kwargs): """Create a volume, then clone it to another volume. This creates a volume, then clone it to anothor volume, and then clone the new volume to next volume... 1. create source volume (from image) 2. clone source volume to volume1 3. clone volume1 to volume2 4. clone volume2 to volume3 5. ... :param size: volume size (integer, in GB) or dictionary, must contain two values: min - minimum size volumes will be created as; max - maximum size volumes will be created as. :param image: image to be used to create initial volume :param nested_level: amount of nested levels :param kwargs: optional args to create volumes """ if image: kwargs["imageRef"] = image source_vol = self._create_volume(size, **kwargs) kwargs.pop("imageRef", None) for i in range(nested_level): with atomic.ActionTimer(self, "cinder.clone_volume"): source_vol = self._create_volume(source_vol.size, source_volid=source_vol.id, atomic_action=False, **kwargs) @validation.required_services(consts.Service.CINDER) @validation.required_contexts("volumes") @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["cinder"]}, name="CinderVolumes.create_volume_from_snapshot") class CreateVolumeFromSnapshot(cinder_utils.CinderScenario, nova_utils.NovaScenario, glance_utils.GlanceScenario): def run(self, do_delete=True, create_snapshot_kwargs=None, **kwargs): """Create a volume-snapshot, then create a volume from this snapshot. :param do_delete: if True, a snapshot and a volume will be deleted after creation. :param create_snapshot_kwargs: optional args to create a snapshot :param kwargs: optional args to create a volume """ create_snapshot_kwargs = create_snapshot_kwargs or {} src_volume = random.choice(self.context["tenant"]["volumes"]) snapshot = self._create_snapshot(src_volume["id"], **create_snapshot_kwargs) volume = self._create_volume(src_volume["size"], snapshot_id=snapshot.id, **kwargs) if do_delete: self._delete_snapshot(snapshot) self._delete_volume(volume) @types.convert(image={"type": "glance_image"}) @validation.image_exists("image", nullable=True) @validation.required_services(consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["cinder"]}, name="CinderVolumes.create_volume_" "and_update_readonly_flag") class CreateVolumeAndUpdateReadonlyFlag(cinder_utils.CinderScenario, glance_utils.GlanceScenario): def run(self, size, image=None, read_only=True, **kwargs): """Create a volume and then update its readonly flag. :param size: volume size (integer, in GB) :param image: image to be used to create volume :param read_only: The value to indicate whether to update volume to read-only access mode :param kwargs: optional args to create a volume """ if image: kwargs["imageRef"] = image volume = self._create_volume(size, **kwargs) self._update_readonly_flag(volume.id, read_only) @types.convert(image={"type": "glance_image"}) @validation.image_exists("image", nullable=True) @validation.required_services(consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["cinder"]}, name="CinderVolumes.create_and_accept_transfer") class CreateAndAcceptTransfer(cinder_utils.CinderScenario, glance_utils.GlanceScenario): def run(self, size, image=None, **kwargs): """Create a volume transfer, then accept it Measure the "cinder transfer-create" and "cinder transfer-accept" command performace. :param size: volume size (integer, in GB) :param image: image to be used to create initial volume :param kwargs: optional args to create a volume """ if image: kwargs["imageRef"] = image volume = self._create_volume(size, **kwargs) transfer = self._transfer_create(volume.id) self._transfer_accept(transfer.id, transfer.auth_key) rally-0.9.1/rally/plugins/openstack/scenarios/cinder/volume_types.py0000664000567000056710000001132313073417720027143 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import random from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.cinder import utils as cinder_utils from rally.task import validation """Scenarios for Cinder Volume Type.""" @validation.required_services(consts.Service.CINDER) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["cinder"]}, name="CinderVolumeTypes.create_and_delete_volume_type") class CreateAndDeleteVolumeType(cinder_utils.CinderScenario): def run(self, **kwargs): """Create and delete a volume Type. :param kwargs: Optional parameters used during volume type creation. """ volume_type = self._create_volume_type(**kwargs) self._delete_volume_type(volume_type) @validation.required_services(consts.Service.CINDER) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["cinder"]}, name="CinderVolumeTypes.create_volume_type" "_and_encryption_type") class CreateVolumeTypeAndEncryptionType(cinder_utils.CinderScenario): def run(self, specs, **kwargs): """Create encryption type This scenario first creates a volume type, then creates an encryption type for the volume type. :param specs: the encryption type specifications to add :param kwargs: Optional parameters used during volume type creation. """ volume_type = self._create_volume_type(**kwargs) self._create_encryption_type(volume_type, specs) @validation.required_services(consts.Service.CINDER) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["cinder"]}, name="CinderVolumeTypes.create_and_list_" "encryption_type") class CreateAndListEncryptionType(cinder_utils.CinderScenario): def run(self, specs, search_opts=None, **kwargs): """Create and list encryption type This scenario firstly creates a volume type, secondly creates an encryption type for the volume type, thirdly lists all encryption types. :param specs: the encryption type specifications to add :param search_opts: Options used when search for encryption types :param kwargs: Optional parameters used during volume type creation. """ volume_type = self._create_volume_type(**kwargs) self._create_encryption_type(volume_type, specs) self._list_encryption_type(search_opts) @validation.required_services(consts.Service.CINDER) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["cinder"]}, name="CinderVolumeTypes.create_and_set_volume_type_keys") class CreateAndSetVolumeTypeKeys(cinder_utils.CinderScenario): def run(self, volume_type_key, **kwargs): """Create and set a volume type's extra specs. :param volume_type_key: A dict of key/value pairs to be set :param kwargs: Optional parameters used during volume type creation. """ volume_type = self._create_volume_type(**kwargs) self._set_volume_type_keys(volume_type, volume_type_key) @validation.required_services(consts.Service.CINDER) @validation.required_contexts("volume_types") @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["cinder"]}, name="CinderVolumeTypes.create_and_delete_" "encryption_type") class CreateAndDeleteEncryptionType(cinder_utils.CinderScenario): def run(self, create_specs): """Create and delete encryption type This scenario firstly creates an encryption type for a given volume type, then deletes the created encryption type. :param create_specs: the encryption type specifications to add """ volume_type = random.choice(self.context["volume_types"]) self._create_encryption_type(volume_type["id"], create_specs) self._delete_encryption_type(volume_type["id"]) rally-0.9.1/rally/plugins/openstack/scenarios/cinder/volume_backups.py0000664000567000056710000000446513073417720027440 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.cinder import utils as cinder_utils from rally.task import validation """Scenarios for Cinder Volume Backup.""" @validation.number("size", minval=1, integer_only=True) @validation.required_cinder_services("cinder-backup") @validation.required_services(consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["cinder"]}, name="CinderVolumeBackups." "create_incremental_volume_backup") class CreateIncrementalVolumeBackup(cinder_utils.CinderScenario): def run(self, size, do_delete=True, create_volume_kwargs=None, create_backup_kwargs=None): """Create a incremental volume backup. The scenario first create a volume, the create a backup, the backup is full backup. Because Incremental backup must be based on the full backup. finally create a incremental backup. :param size: volume size in GB :param do_delete: deletes backup and volume after creating if True :param create_volume_kwargs: optional args to create a volume :param create_backup_kwargs: optional args to create a volume backup """ create_volume_kwargs = create_volume_kwargs or {} create_backup_kwargs = create_backup_kwargs or {} volume = self._create_volume(size, **create_volume_kwargs) backup1 = self._create_backup(volume.id, **create_backup_kwargs) backup2 = self._create_backup(volume.id, incremental=True) if do_delete: self._delete_backup(backup2) self._delete_backup(backup1) self._delete_volume(volume) rally-0.9.1/rally/plugins/openstack/scenarios/cinder/utils.py0000664000567000056710000005031413073417720025553 0ustar jenkinsjenkins00000000000000# Copyright 2013 Huawei Technologies Co.,LTD. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import random from oslo_config import cfg from rally import exceptions from rally.plugins.openstack import scenario from rally.plugins.openstack.wrappers import cinder as cinder_wrapper from rally.plugins.openstack.wrappers import glance as glance_wrapper from rally.task import atomic from rally.task import utils as bench_utils CINDER_BENCHMARK_OPTS = [ cfg.FloatOpt("cinder_volume_create_prepoll_delay", default=2.0, help="Time to sleep after creating a resource before" " polling for it status"), cfg.FloatOpt("cinder_volume_create_timeout", default=600.0, help="Time to wait for cinder volume to be created."), cfg.FloatOpt("cinder_volume_create_poll_interval", default=2.0, help="Interval between checks when waiting for volume" " creation."), cfg.FloatOpt("cinder_volume_delete_timeout", default=600.0, help="Time to wait for cinder volume to be deleted."), cfg.FloatOpt("cinder_volume_delete_poll_interval", default=2.0, help="Interval between checks when waiting for volume" " deletion."), cfg.FloatOpt("cinder_backup_restore_timeout", default=600.0, help="Time to wait for cinder backup to be restored."), cfg.FloatOpt("cinder_backup_restore_poll_interval", default=2.0, help="Interval between checks when waiting for backup" " restoring."), ] CONF = cfg.CONF benchmark_group = cfg.OptGroup(name="benchmark", title="benchmark options") CONF.register_opts(CINDER_BENCHMARK_OPTS, group=benchmark_group) class CinderScenario(scenario.OpenStackScenario): """Base class for Cinder scenarios with basic atomic actions.""" @atomic.action_timer("cinder.list_volumes") def _list_volumes(self, detailed=True): """Returns user volumes list.""" return self.clients("cinder").volumes.list(detailed) @atomic.action_timer("cinder.get_volume") def _get_volume(self, volume_id): """get volume detailed information. :param volume_id: id of volume :returns: class:`Volume` """ return self.clients("cinder").volumes.get(volume_id) @atomic.action_timer("cinder.list_snapshots") def _list_snapshots(self, detailed=True): """Returns user snapshots list.""" return self.clients("cinder").volume_snapshots.list(detailed) @atomic.action_timer("cinder.list_types") def _list_types(self, search_opts=None, is_public=None): """Lists all volume types. :param search_opts: Options used when search for volume types :param is_public: If query public volume type :returns: A list of volume types """ return self.clients("cinder").volume_types.list(search_opts, is_public) def _set_metadata(self, volume, sets=10, set_size=3): """Set volume metadata. :param volume: The volume to set metadata on :param sets: how many operations to perform :param set_size: number of metadata keys to set in each operation :returns: A list of keys that were set """ key = "cinder.set_%s_metadatas_%s_times" % (set_size, sets) with atomic.ActionTimer(self, key): keys = [] for i in range(sets): metadata = {} for j in range(set_size): key = self.generate_random_name() keys.append(key) metadata[key] = self.generate_random_name() self.clients("cinder").volumes.set_metadata(volume, metadata) return keys def _delete_metadata(self, volume, keys, deletes=10, delete_size=3): """Delete volume metadata keys. Note that ``len(keys)`` must be greater than or equal to ``deletes * delete_size``. :param volume: The volume to delete metadata from :param deletes: how many operations to perform :param delete_size: number of metadata keys to delete in each operation :param keys: a list of keys to choose deletion candidates from """ if len(keys) < deletes * delete_size: raise exceptions.InvalidArgumentsException( "Not enough metadata keys to delete: " "%(num_keys)s keys, but asked to delete %(num_deletes)s" % {"num_keys": len(keys), "num_deletes": deletes * delete_size}) # make a shallow copy of the list of keys so that, when we pop # from it later, we don't modify the original list. keys = list(keys) random.shuffle(keys) action_name = "cinder.delete_%s_metadatas_%s_times" % (delete_size, deletes) with atomic.ActionTimer(self, action_name): for i in range(deletes): to_del = keys[i * delete_size:(i + 1) * delete_size] self.clients("cinder").volumes.delete_metadata(volume, to_del) @atomic.optional_action_timer("cinder.create_volume") def _create_volume(self, size, **kwargs): """Create one volume. Returns when the volume is actually created and is in the "Available" state. :param size: int be size of volume in GB, or dictionary, must contain two values: min - minimum size volumes will be created as; max - maximum size volumes will be created as. :param atomic_action: True if this is an atomic action. added and handled by the optional_action_timer() decorator :param kwargs: Other optional parameters to initialize the volume :returns: Created volume object """ if isinstance(size, dict): size = random.randint(size["min"], size["max"]) client = cinder_wrapper.wrap(self._clients.cinder, self) volume = client.create_volume(size, **kwargs) # NOTE(msdubov): It is reasonable to wait 5 secs before starting to # check whether the volume is ready => less API calls. self.sleep_between(CONF.benchmark.cinder_volume_create_prepoll_delay) volume = bench_utils.wait_for( volume, ready_statuses=["available"], update_resource=bench_utils.get_from_manager(), timeout=CONF.benchmark.cinder_volume_create_timeout, check_interval=CONF.benchmark.cinder_volume_create_poll_interval ) return volume @atomic.action_timer("cinder.update_volume") def _update_volume(self, volume, **update_volume_args): """Update name and description for this volume This atomic function updates volume information. The volume display name is always changed, and additional update arguments may also be specified. :param volume: volume object :param update_volume_args: dict, contains values to be updated. """ client = cinder_wrapper.wrap(self._clients.cinder, self) client.update_volume(volume, **update_volume_args) @atomic.action_timer("cinder.update_readonly_flag") def _update_readonly_flag(self, volume, read_only): """Update the read-only access mode flag of the specified volume. :param volume: The UUID of the volume to update. :param read_only: The value to indicate whether to update volume to read-only access mode. :returns: A tuple of http Response and body """ return self.clients("cinder").volumes.update_readonly_flag( volume, read_only) @atomic.action_timer("cinder.delete_volume") def _delete_volume(self, volume): """Delete the given volume. Returns when the volume is actually deleted. :param volume: volume object """ volume.delete() bench_utils.wait_for_status( volume, ready_statuses=["deleted"], check_deletion=True, update_resource=bench_utils.get_from_manager(), timeout=CONF.benchmark.cinder_volume_delete_timeout, check_interval=CONF.benchmark.cinder_volume_delete_poll_interval ) @atomic.action_timer("cinder.extend_volume") def _extend_volume(self, volume, new_size): """Extend the given volume. Returns when the volume is actually extended. :param volume: volume object :param new_size: new volume size in GB, or dictionary, must contain two values: min - minimum size volumes will be created as; max - maximum size volumes will be created as. Notice: should be bigger volume size """ if isinstance(new_size, dict): new_size = random.randint(new_size["min"], new_size["max"]) volume.extend(volume, new_size) volume = bench_utils.wait_for( volume, ready_statuses=["available"], update_resource=bench_utils.get_from_manager(), timeout=CONF.benchmark.cinder_volume_create_timeout, check_interval=CONF.benchmark.cinder_volume_create_poll_interval ) @atomic.action_timer("cinder.upload_volume_to_image") def _upload_volume_to_image(self, volume, force=False, container_format="bare", disk_format="raw"): """Upload the given volume to image. Returns created image. :param volume: volume object :param force: flag to indicate whether to snapshot a volume even if it's attached to an instance :param container_format: container format of image. Acceptable formats: ami, ari, aki, bare, and ovf :param disk_format: disk format of image. Acceptable formats: ami, ari, aki, vhd, vmdk, raw, qcow2, vdi and iso :returns: Returns created image object """ resp, img = volume.upload_to_image(force, self.generate_random_name(), container_format, disk_format) # NOTE (e0ne): upload_to_image changes volume status to uploading so # we need to wait until it will be available. volume = bench_utils.wait_for( volume, ready_statuses=["available"], update_resource=bench_utils.get_from_manager(), timeout=CONF.benchmark.cinder_volume_create_timeout, check_interval=CONF.benchmark.cinder_volume_create_poll_interval ) image_id = img["os-volume_upload_image"]["image_id"] image = self.clients("glance").images.get(image_id) wrapper = glance_wrapper.wrap(self._clients.glance, self) image = bench_utils.wait_for( image, ready_statuses=["active"], update_resource=wrapper.get_image, timeout=CONF.benchmark.glance_image_create_timeout, check_interval=CONF.benchmark.glance_image_create_poll_interval ) return image @atomic.action_timer("cinder.create_snapshot") def _create_snapshot(self, volume_id, force=False, **kwargs): """Create one snapshot. Returns when the snapshot is actually created and is in the "Available" state. :param volume_id: volume uuid for creating snapshot :param force: flag to indicate whether to snapshot a volume even if it's attached to an instance :param kwargs: Other optional parameters to initialize the volume :returns: Created snapshot object """ kwargs["force"] = force client = cinder_wrapper.wrap(self._clients.cinder, self) snapshot = client.create_snapshot(volume_id, **kwargs) self.sleep_between(CONF.benchmark.cinder_volume_create_prepoll_delay) snapshot = bench_utils.wait_for( snapshot, ready_statuses=["available"], update_resource=bench_utils.get_from_manager(), timeout=CONF.benchmark.cinder_volume_create_timeout, check_interval=CONF.benchmark.cinder_volume_create_poll_interval ) return snapshot @atomic.action_timer("cinder.delete_snapshot") def _delete_snapshot(self, snapshot): """Delete the given snapshot. Returns when the snapshot is actually deleted. :param snapshot: snapshot object """ snapshot.delete() bench_utils.wait_for_status( snapshot, ready_statuses=["deleted"], check_deletion=True, update_resource=bench_utils.get_from_manager(), timeout=CONF.benchmark.cinder_volume_delete_timeout, check_interval=CONF.benchmark.cinder_volume_delete_poll_interval ) @atomic.action_timer("cinder.create_backup") def _create_backup(self, volume_id, **kwargs): """Create a volume backup of the given volume. :param volume_id: The ID of the volume to backup. :param kwargs: Other optional parameters """ backup = self.clients("cinder").backups.create(volume_id, **kwargs) return bench_utils.wait_for( backup, ready_statuses=["available"], update_resource=bench_utils.get_from_manager(), timeout=CONF.benchmark.cinder_volume_create_timeout, check_interval=CONF.benchmark.cinder_volume_create_poll_interval ) @atomic.action_timer("cinder.delete_backup") def _delete_backup(self, backup): """Delete the given backup. Returns when the backup is actually deleted. :param backup: backup instance """ backup.delete() bench_utils.wait_for_status( backup, ready_statuses=["deleted"], check_deletion=True, update_resource=bench_utils.get_from_manager(), timeout=CONF.benchmark.cinder_volume_delete_timeout, check_interval=CONF.benchmark.cinder_volume_delete_poll_interval ) @atomic.action_timer("cinder.restore_backup") def _restore_backup(self, backup_id, volume_id=None): """Restore the given backup. :param backup_id: The ID of the backup to restore. :param volume_id: The ID of the volume to restore the backup to. """ restore = self.clients("cinder").restores.restore(backup_id, volume_id) restored_volume = self.clients("cinder").volumes.get(restore.volume_id) backup_for_restore = self.clients("cinder").backups.get(backup_id) bench_utils.wait_for( backup_for_restore, ready_statuses=["available"], update_resource=bench_utils.get_from_manager(), timeout=CONF.benchmark.cinder_backup_restore_timeout, check_interval=CONF.benchmark.cinder_backup_restore_poll_interval ) return bench_utils.wait_for( restored_volume, ready_statuses=["available"], update_resource=bench_utils.get_from_manager(), timeout=CONF.benchmark.cinder_volume_create_timeout, check_interval=CONF.benchmark.cinder_volume_create_poll_interval ) @atomic.action_timer("cinder.list_backups") def _list_backups(self, detailed=True): """Return user volume backups list. :param detailed: True if detailed information about backup should be listed """ return self.clients("cinder").backups.list(detailed) def get_random_server(self): server_id = random.choice(self.context["tenant"]["servers"]) return self.clients("nova").servers.get(server_id) @atomic.action_timer("cinder.list_transfers") def _list_transfers(self, detailed=True, search_opts=None): """Get a list of all volume transfers. :param detailed: If True, detailed information about transfer should be listed :param search_opts: Search options to filter out volume transfers :returns: list of :class:`VolumeTransfer` """ return self.clients("cinder").transfers.list(detailed, search_opts) @atomic.action_timer("cinder.create_volume_type") def _create_volume_type(self, **kwargs): """create volume type. :param kwargs: Optional additional arguments for volume type creation :returns: VolumeType object """ kwargs["name"] = self.generate_random_name() return self.admin_clients("cinder").volume_types.create(**kwargs) @atomic.action_timer("cinder.delete_volume_type") def _delete_volume_type(self, volume_type): """delete a volume type. :param volume_type: Name or Id of the volume type :returns: base on client response return True if the request has been accepted or not """ tuple_res = self.admin_clients("cinder").volume_types.delete( volume_type) return (tuple_res[0].status_code == 202) @atomic.action_timer("cinder.set_volume_type_keys") def _set_volume_type_keys(self, volume_type, metadata): """Set extra specs on a volume type. :param volume_type: The :class:`VolumeType` to set extra spec on :param metadata: A dict of key/value pairs to be set :returns: extra_specs if the request has been accepted """ return volume_type.set_keys(metadata) @atomic.action_timer("cinder.transfer_create") def _transfer_create(self, volume_id): """Create a volume transfer. :param volume_id: The ID of the volume to transfer :rtype: VolumeTransfer """ name = self.generate_random_name() return self.clients("cinder").transfers.create(volume_id, name) @atomic.action_timer("cinder.transfer_accept") def _transfer_accept(self, transfer_id, auth_key): """Accept a volume transfer. :param transfer_id: The ID of the transfer to accept. :param auth_key: The auth_key of the transfer. :rtype: VolumeTransfer """ return self.clients("cinder").transfers.accept(transfer_id, auth_key) @atomic.action_timer("cinder.create_encryption_type") def _create_encryption_type(self, volume_type, specs): """Create encryption type for a volume type. Default: admin only. :param volume_type: the volume type on which to add an encryption type :param specs: the encryption type specifications to add :return: an instance of :class: VolumeEncryptionType """ return self.admin_clients("cinder").volume_encryption_types.create( volume_type, specs) @atomic.action_timer("cinder.list_encryption_type") def _list_encryption_type(self, search_opts=None): """List all volume encryption types. :param search_opts: Options used when search for encryption types :return: a list of :class: VolumeEncryptionType instances """ return self.admin_clients("cinder").volume_encryption_types.list( search_opts) @atomic.action_timer("cinder.delete_encryption_type") def _delete_encryption_type(self, volume_type): """Delete the encryption type information for the specified volume type. :param volume_type: the volume type whose encryption type information must be deleted """ resp = self.admin_clients("cinder").volume_encryption_types.delete( volume_type) if (resp[0].status_code != 202): raise exceptions.EncryptionTypeDeleteException() rally-0.9.1/rally/plugins/openstack/scenarios/ec2/0000775000567000056710000000000013073420067023241 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/ec2/__init__.py0000664000567000056710000000000013073417716025347 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/ec2/utils.py0000664000567000056710000000567013073417720024765 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from rally.plugins.openstack import scenario from rally.task import atomic from rally.task import utils EC2_BENCHMARK_OPTS = [ cfg.FloatOpt( "ec2_server_boot_prepoll_delay", default=1.0, help="Time to sleep after boot before polling for status" ), cfg.FloatOpt( "ec2_server_boot_timeout", default=300.0, help="Server boot timeout" ), cfg.FloatOpt( "ec2_server_boot_poll_interval", default=1.0, help="Server boot poll interval" ) ] CONF = cfg.CONF benchmark_group = cfg.OptGroup(name="benchmark", title="benchmark options") CONF.register_opts(EC2_BENCHMARK_OPTS, group=benchmark_group) class EC2Scenario(scenario.OpenStackScenario): """Base class for EC2 scenarios with basic atomic actions.""" @atomic.action_timer("ec2.list_servers") def _list_servers(self): """Returns user servers list.""" return self.clients("ec2").get_only_instances() @atomic.action_timer("ec2.boot_servers") def _boot_servers(self, image_id, flavor_name, instance_num=1, **kwargs): """Boot multiple servers. Returns when all the servers are actually booted and are in the "Running" state. :param image_id: ID of the image to be used for server creation :param flavor_name: Name of the flavor to be used for server creation :param instance_num: Number of instances to boot :param kwargs: Other optional parameters to boot servers :returns: List of created server objects """ reservation = self.clients("ec2").run_instances( image_id=image_id, instance_type=flavor_name, min_count=instance_num, max_count=instance_num, **kwargs) servers = [instance for instance in reservation.instances] self.sleep_between(CONF.benchmark.ec2_server_boot_prepoll_delay) servers = [utils.wait_for( server, ready_statuses=["RUNNING"], update_resource=self._update_resource, timeout=CONF.benchmark.ec2_server_boot_timeout, check_interval=CONF.benchmark.ec2_server_boot_poll_interval ) for server in servers] return servers def _update_resource(self, resource): resource.update() return resource rally-0.9.1/rally/plugins/openstack/scenarios/ec2/servers.py0000664000567000056710000000366513073417720025320 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.ec2 import utils from rally.task import types from rally.task import validation """Scenarios for servers using EC2.""" @validation.required_services(consts.Service.EC2) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["ec2"]}, name="EC2Servers.list_servers") class ListServers(utils.EC2Scenario): def run(self): """List all servers. This simple scenario tests the EC2 API list function by listing all the servers. """ self._list_servers() @types.convert(image={"type": "ec2_image"}, flavor={"type": "ec2_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.EC2) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["ec2"]}, name="EC2Servers.boot_server") class BootServer(utils.EC2Scenario): def run(self, image, flavor, **kwargs): """Boot a server. Assumes that cleanup is done elsewhere. :param image: image to be used to boot an instance :param flavor: flavor to be used to boot an instance :param kwargs: optional additional arguments for server creation """ self._boot_servers(image, flavor, **kwargs) rally-0.9.1/rally/plugins/openstack/scenarios/nova/0000775000567000056710000000000013073420067023533 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/nova/__init__.py0000664000567000056710000000000013073417716025641 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/nova/images.py0000664000567000056710000000263513073417720025362 0ustar jenkinsjenkins00000000000000# Copyright 2015: Workday, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.nova import utils from rally.task import validation """Scenarios for Nova images.""" @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaImages.list_images") class ListImages(utils.NovaScenario): def run(self, detailed=True, **kwargs): """List all images. Measure the "nova image-list" command performance. :param detailed: True if the image listing should contain detailed information :param kwargs: Optional additional arguments for image listing """ self._list_images(detailed, **kwargs) rally-0.9.1/rally/plugins/openstack/scenarios/nova/aggregates.py0000664000567000056710000001667713073417720026241 0ustar jenkinsjenkins00000000000000# Copyright 2016 IBM Corp. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally import exceptions from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.nova import utils from rally.task import types from rally.task import validation """Scenarios for Nova aggregates.""" @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True) @scenario.configure(name="NovaAggregates.list_aggregates") class ListAggregates(utils.NovaScenario): def run(self): """List all nova aggregates. Measure the "nova aggregate-list" command performance. """ self._list_aggregates() @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["nova"]}, name="NovaAggregates.create_and_list_aggregates") class CreateAndListAggregates(utils.NovaScenario): """scenario for create and list aggregate.""" def run(self, availability_zone): """Create a aggregate and then list all aggregates. This scenario creates a aggregate and then lists all aggregates. :param availability_zone: The availability zone of the aggregate """ aggregate = self._create_aggregate(availability_zone) msg = "Aggregate isn't created" self.assertTrue(aggregate, err_msg=msg) all_aggregates = self._list_aggregates() msg = ("Created aggregate is not in the" " list of all available aggregates") self.assertIn(aggregate, all_aggregates, err_msg=msg) @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["nova"]}, name="NovaAggregates.create_and_delete_aggregate") class CreateAndDeleteAggregate(utils.NovaScenario): """Scenario for create and delete aggregate.""" def run(self, availability_zone): """Create an aggregate and then delete it. This scenario first creates an aggregate and then delete it. :param availability_zone: The availability zone of the aggregate """ aggregate = self._create_aggregate(availability_zone) self._delete_aggregate(aggregate) @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["nova"]}, name="NovaAggregates.create_and_update_aggregate") class CreateAndUpdateAggregate(utils.NovaScenario): """Scenario for create and update aggregate.""" def run(self, availability_zone): """Create an aggregate and then update its name and availability_zone This scenario first creates an aggregate and then update its name and availability_zone :param availability_zone: The availability zone of the aggregate """ aggregate = self._create_aggregate(availability_zone) self._update_aggregate(aggregate) @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["nova"]}, name="NovaAggregates.create_aggregate_add_and_remove_host") class CreateAggregateAddAndRemoveHost(utils.NovaScenario): """Scenario for add a host to and remove the host from an aggregate.""" def run(self, availability_zone): """Create an aggregate, add a host to and remove the host from it Measure "nova aggregate-add-host" and "nova aggregate-remove-host" command performance. :param availability_zone: The availability zone of the aggregate """ aggregate = self._create_aggregate(availability_zone) hosts = self._list_hypervisors() host_name = hosts[0].service["host"] self._aggregate_add_host(aggregate, host_name) self._aggregate_remove_host(aggregate, host_name) @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["nova"]}, name="NovaAggregates.create_and_get_aggregate_details") class CreateAndGetAggregateDetails(utils.NovaScenario): """Scenario for create and get aggregate details.""" def run(self, availability_zone): """Create an aggregate and then get its details. This scenario first creates an aggregate and then get details of it. :param availability_zone: The availability zone of the aggregate """ aggregate = self._create_aggregate(availability_zone) self._get_aggregate_details(aggregate) @types.convert(image={"type": "glance_image"}) @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True, users=True) @scenario.configure(context={"admin_cleanup": ["nova"], "cleanup": ["nova"]}, name="NovaAggregates." "create_aggregate_add_host_and_boot_server") class CreateAggregateAddHostAndBootServer(utils.NovaScenario): """Scenario to verify an aggregate.""" def run(self, image, metadata, availability_zone=None, ram=512, vcpus=1, disk=1, boot_server_kwargs=None): """Scenario to create and verify an aggregate This scenario creates an aggregate, adds a compute host and metadata to the aggregate, adds the same metadata to the flavor and creates an instance. Verifies that instance host is one of the hosts in the aggregate. :param image: The image ID to boot from :param metadata: The metadata to be set as flavor extra specs :param availability_zone: The availability zone of the aggregate :param ram: Memory in MB for the flavor :param vcpus: Number of VCPUs for the flavor :param disk: Size of local disk in GB :param boot_server_kwargs: Optional additional arguments to verify host aggregates :raises RallyException: if instance and aggregate hosts do not match """ boot_server_kwargs = boot_server_kwargs or {} aggregate = self._create_aggregate(availability_zone) hosts = self._list_hypervisors() host_name = hosts[0].service["host"] self._aggregate_set_metadata(aggregate, metadata) self._aggregate_add_host(aggregate, host_name) flavor = self._create_flavor(ram, vcpus, disk) flavor.set_keys(metadata) server = self._boot_server(image, flavor.id, **boot_server_kwargs) # NOTE: we need to get server object by admin user to obtain # "hypervisor_hostname" attribute server = self.admin_clients("nova").servers.get(server.id) instance_hostname = getattr(server, "OS-EXT-SRV-ATTR:hypervisor_hostname") if instance_hostname != host_name: raise exceptions.RallyException("Instance host and aggregate " "host are different") rally-0.9.1/rally/plugins/openstack/scenarios/nova/security_group.py0000664000567000056710000002100113073417720027164 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six from rally.common.i18n import _ from rally import consts from rally import exceptions from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.nova import utils from rally.task import atomic from rally.task import types from rally.task import validation """Scenarios for Nova security groups.""" class NovaSecurityGroupException(exceptions.RallyException): msg_fmt = _("%(message)s") @validation.required_parameters("security_group_count", "rules_per_security_group") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaSecGroup.create_and_delete_secgroups") class CreateAndDeleteSecgroups(utils.NovaScenario): def run(self, security_group_count, rules_per_security_group): """Create and delete security groups. This scenario creates N security groups with M rules per group and then deletes them. :param security_group_count: Number of security groups :param rules_per_security_group: Number of rules per security group """ security_groups = self._create_security_groups( security_group_count) self._create_rules_for_security_group(security_groups, rules_per_security_group) self._delete_security_groups(security_groups) @validation.required_parameters("security_group_count", "rules_per_security_group") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaSecGroup.create_and_list_secgroups") class CreateAndListSecgroups(utils.NovaScenario): def run(self, security_group_count, rules_per_security_group): """Create and list security groups. This scenario creates N security groups with M rules per group and then lists them. :param security_group_count: Number of security groups :param rules_per_security_group: Number of rules per security group """ security_groups = self._create_security_groups( security_group_count) self.assertTrue(security_groups) self._create_rules_for_security_group(security_groups, rules_per_security_group) pool_groups = self._list_security_groups() self.assertLessEqual(len(security_groups), len(pool_groups)) self.assertIsSubset([i.id for i in security_groups], [i.id for i in pool_groups]) @validation.required_parameters("security_group_count") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaSecGroup.create_and_update_secgroups") class CreateAndUpdateSecgroups(utils.NovaScenario): def run(self, security_group_count): """Create and update security groups. This scenario creates 'security_group_count' security groups then updates their name and description. :param security_group_count: Number of security groups """ security_groups = self._create_security_groups( security_group_count) self._update_security_groups(security_groups) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_parameters("security_group_count", "rules_per_security_group") @validation.required_contexts("network") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaSecGroup.boot_and_delete_server_with_secgroups") class BootAndDeleteServerWithSecgroups(utils.NovaScenario): def run(self, image, flavor, security_group_count, rules_per_security_group, **kwargs): """Boot and delete server with security groups attached. Plan of this scenario: - create N security groups with M rules per group vm with security groups) - boot a VM with created security groups - get list of attached security groups to server - delete server - delete all security groups - check that all groups were attached to server :param image: ID of the image to be used for server creation :param flavor: ID of the flavor to be used for server creation :param security_group_count: Number of security groups :param rules_per_security_group: Number of rules per security group :param **kwargs: Optional arguments for booting the instance """ security_groups = self._create_security_groups( security_group_count) self._create_rules_for_security_group(security_groups, rules_per_security_group) secgroups_names = [sg.name for sg in security_groups] server = self._boot_server(image, flavor, security_groups=secgroups_names, **kwargs) action_name = "nova.get_attached_security_groups" with atomic.ActionTimer(self, action_name): attached_security_groups = server.list_security_group() self._delete_server(server) try: self._delete_security_groups(security_groups) except Exception as e: if hasattr(e, "http_status") and e.http_status == 400: raise NovaSecurityGroupException(six.text_type(e)) raise error_message = ("Expected number of attached security groups to " " server %(server)s is '%(all)s', but actual number " "is '%(attached)s'." % { "attached": len(attached_security_groups), "all": len(security_groups), "server": server}) self.assertEqual(sorted([sg.id for sg in security_groups]), sorted([sg.id for sg in attached_security_groups]), error_message) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @validation.required_contexts("network") @scenario.configure(context={"cleanup": ["nova"]}, name="NovaSecGroup.boot_server_and_add_secgroups") class BootServerAndAddSecgroups(utils.NovaScenario): def run(self, image, flavor, security_group_count=1, rules_per_security_group=1, **kwargs): """Boot a server and add a security group to it. Plan of this scenario: - create N security groups with M rules per group - boot a VM - add security groups to VM :param image: ID of the image to be used for server creation :param flavor: ID of the flavor to be used for server creation :param security_group_count: Number of security groups :param rules_per_security_group: Number of rules per security group :param **kwargs: Optional arguments for booting the instance """ server = self._boot_server(image, flavor, **kwargs) security_groups = self._create_security_groups(security_group_count) self._create_rules_for_security_group(security_groups, rules_per_security_group) with atomic.ActionTimer(self, "nova.add_server_secgroups"): for sg in security_groups: self._add_server_secgroups(server, sg.name, atomic_action=False) rally-0.9.1/rally/plugins/openstack/scenarios/nova/hypervisors.py0000664000567000056710000001104613073417720026506 0ustar jenkinsjenkins00000000000000# Copyright 2015 Cisco Systems Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.nova import utils from rally.task import atomic from rally.task import validation """Scenarios for Nova hypervisors.""" @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True) @scenario.configure(name="NovaHypervisors.list_hypervisors") class ListHypervisors(utils.NovaScenario): def run(self, detailed=True): """List hypervisors. Measure the "nova hypervisor-list" command performance. :param detailed: True if the hypervisor listing should contain detailed information about all of them """ self._list_hypervisors(detailed) @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True) @scenario.configure(name="NovaHypervisors.list_and_get_hypervisors") class ListAndGetHypervisors(utils.NovaScenario): """Benchmark scenario for Nova hypervisors.""" def run(self, detailed=True): """List and Get hypervisors. The scenario first lists all hypervisors, then get detailed information of the listed hypervisors in turn. Measure the "nova hypervisor-show" command performance. :param detailed: True if the hypervisor listing should contain detailed information about all of them """ hypervisors = self._list_hypervisors(detailed) with atomic.ActionTimer(self, "nova.get_hypervisor"): for hypervisor in hypervisors: self._get_hypervisor(hypervisor, atomic_action=False) @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True) @scenario.configure(name="NovaHypervisors.statistics_hypervisors") class StatisticsHypervisors(utils.NovaScenario): def run(self): """Get hypervisor statistics over all compute nodes. Measure the "nova hypervisor-stats" command performance. """ self._statistics_hypervisors() @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True) @scenario.configure(name="NovaHypervisors.list_and_get_uptime_hypervisors") class ListAndGetUptimeHypervisors(utils.NovaScenario): def run(self, detailed=True): """List hypervisors,then display the uptime of it. The scenario first list all hypervisors,then display the uptime of the listed hypervisors in turn. Measure the "nova hypervisor-uptime" command performance. :param detailed: True if the hypervisor listing should contain detailed information about all of them """ hypervisors = self._list_hypervisors(detailed) with atomic.ActionTimer(self, "nova.uptime_hypervisor"): for hypervisor in hypervisors: self._uptime_hypervisor(hypervisor, atomic_action=False) @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True) @scenario.configure(name="NovaHypervisors.list_and_search_hypervisors") class ListAndSearchHypervisors(utils.NovaScenario): def run(self, detailed=True): """List all servers belonging to specific hypervisor. The scenario first list all hypervisors,then find its hostname, then list all servers belonging to the hypervisor Measure the "nova hypervisor-servers " command performance. :param detailed: True if the hypervisor listing should contain detailed information about all of them """ hypervisors = self._list_hypervisors(detailed) with atomic.ActionTimer(self, "nova.search_%s_hypervisors" % len(hypervisors) ): for hypervisor in hypervisors: self._search_hypervisors(hypervisor.hypervisor_hostname, atomic_action=False) rally-0.9.1/rally/plugins/openstack/scenarios/nova/services.py0000664000567000056710000000245013073417720025733 0ustar jenkinsjenkins00000000000000# Copyright 2016 IBM Corp. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.nova import utils from rally.task import validation """Scenarios for Nova agents.""" @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True) @scenario.configure(name="NovaServices.list_services") class ListServices(utils.NovaScenario): def run(self, host=None, binary=None): """List all nova services. Measure the "nova service-list" command performance. :param host: List nova services on host :param binary: List nova services matching given binary """ self._list_services(host, binary) rally-0.9.1/rally/plugins/openstack/scenarios/nova/keypairs.py0000664000567000056710000001051513073417720025740 0ustar jenkinsjenkins00000000000000# Copyright 2015: Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common import logging from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.nova import utils from rally.task import types from rally.task import validation """Scenarios for Nova keypairs.""" @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaKeypair.create_and_list_keypairs") class CreateAndListKeypairs(utils.NovaScenario): def run(self, **kwargs): """Create a keypair with random name and list keypairs. This scenario creates a keypair and then lists all keypairs. :param kwargs: Optional additional arguments for keypair creation """ keypair_name = self._create_keypair(**kwargs) self.assertTrue(keypair_name, "Keypair isn't created") list_keypairs = self._list_keypairs() self.assertIn(keypair_name, [i.id for i in list_keypairs]) @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaKeypair.create_and_delete_keypair") class CreateAndDeleteKeypair(utils.NovaScenario): def run(self, **kwargs): """Create a keypair with random name and delete keypair. This scenario creates a keypair and then delete that keypair. :param kwargs: Optional additional arguments for keypair creation """ keypair = self._create_keypair(**kwargs) self._delete_keypair(keypair) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaKeypair.boot_and_delete_server_with_keypair") class BootAndDeleteServerWithKeypair(utils.NovaScenario): @logging.log_deprecated_args( "'server_kwargs' has been renamed 'boot_server_kwargs'", "0.3.2", ["server_kwargs"], once=True) def run(self, image, flavor, boot_server_kwargs=None, server_kwargs=None, **kwargs): """Boot and delete server with keypair. Plan of this scenario: - create a keypair - boot a VM with created keypair - delete server - delete keypair :param image: ID of the image to be used for server creation :param flavor: ID of the flavor to be used for server creation :param boot_server_kwargs: Optional additional arguments for VM creation :param server_kwargs: Deprecated alias for boot_server_kwargs :param kwargs: Optional additional arguments for keypair creation """ boot_server_kwargs = boot_server_kwargs or server_kwargs or {} keypair = self._create_keypair(**kwargs) server = self._boot_server(image, flavor, key_name=keypair, **boot_server_kwargs) self._delete_server(server) self._delete_keypair(keypair) @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaKeypair.create_and_get_keypair") class CreateAndGetKeypair(utils.NovaScenario): def run(self, **kwargs): """Create a keypair and get the keypair details. :param kwargs: Optional additional arguments for keypair creation """ keypair = self._create_keypair(**kwargs) self._get_keypair(keypair) rally-0.9.1/rally/plugins/openstack/scenarios/nova/availability_zones.py0000664000567000056710000000257213073417720030005 0ustar jenkinsjenkins00000000000000# Copyright 2016 IBM Corp. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.nova import utils from rally.task import validation """Scenarios for Nova availability-zones.""" @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True) @scenario.configure(name="NovaAvailabilityZones.list_availability_zones") class ListAvailabilityZones(utils.NovaScenario): def run(self, detailed=True): """List all availability zones. Measure the "nova availability-zone-list" command performance. :param detailed: True if the availability-zone listing should contain detailed information about all of them """ self._list_availability_zones(detailed) rally-0.9.1/rally/plugins/openstack/scenarios/nova/networks.py0000664000567000056710000000511513073417720025765 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.nova import utils from rally.task import validation """Scenarios for Nova networks.""" @validation.restricted_parameters("label") @validation.required_parameters("start_cidr") @validation.required_services(consts.Service.NOVA, consts.Service.NOVA_NET) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["nova.networks"]}, name="NovaNetworks.create_and_list_networks") class CreateAndListNetworks(utils.NovaScenario): def run(self, start_cidr, **kwargs): """Create nova network and list all networks. :param start_cidr: IP range :param kwargs: Optional additional arguments for network creation """ network = self._create_network(start_cidr, **kwargs) msg = ("Network isn't created") self.assertTrue(network, err_msg=msg) list_networks = self._list_networks() msg = ("New network not in the list of existed networks.\n" "New network UUID: {}\n" "List of available networks: {}").format(network, list_networks) self.assertIn(network, list_networks, err_msg=msg) @validation.restricted_parameters("label") @validation.required_parameters("start_cidr") @validation.required_services(consts.Service.NOVA, consts.Service.NOVA_NET) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["nova.networks"]}, name="NovaNetworks.create_and_delete_network") class CreateAndDeleteNetwork(utils.NovaScenario): def run(self, start_cidr, **kwargs): """Create nova network and delete it. :param start_cidr: IP range :param kwargs: Optional additional arguments for network creation """ net_id = self._create_network(start_cidr, **kwargs) self._delete_network(net_id) rally-0.9.1/rally/plugins/openstack/scenarios/nova/utils.py0000775000567000056710000014570213073417720025263 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import random from oslo_config import cfg from rally import exceptions from rally.plugins.openstack import scenario from rally.plugins.openstack.wrappers import glance as glance_wrapper from rally.plugins.openstack.wrappers import network as network_wrapper from rally.task import atomic from rally.task import utils NOVA_BENCHMARK_OPTS = [] option_names_and_defaults = [ # action, prepoll delay, timeout, poll interval ("start", 0, 300, 1), ("stop", 0, 300, 2), ("boot", 1, 300, 1), ("delete", 2, 300, 2), ("reboot", 2, 300, 2), ("rebuild", 1, 300, 1), ("rescue", 2, 300, 2), ("unrescue", 2, 300, 2), ("suspend", 2, 300, 2), ("resume", 2, 300, 2), ("pause", 2, 300, 2), ("unpause", 2, 300, 2), ("shelve", 2, 300, 2), ("unshelve", 2, 300, 2), ("image_create", 0, 300, 2), ("image_delete", 0, 300, 2), ("resize", 2, 400, 5), ("resize_confirm", 0, 200, 2), ("resize_revert", 0, 200, 2), ("live_migrate", 1, 400, 2), ("migrate", 1, 400, 2), ] for action, prepoll, timeout, poll in option_names_and_defaults: NOVA_BENCHMARK_OPTS.extend([ cfg.FloatOpt( "nova_server_%s_prepoll_delay" % action, default=float(prepoll), help="Time to sleep after %s before polling for status" % action ), cfg.FloatOpt( "nova_server_%s_timeout" % action, default=float(timeout), help="Server %s timeout" % action ), cfg.FloatOpt( "nova_server_%s_poll_interval" % action, default=float(poll), help="Server %s poll interval" % action ) ]) NOVA_BENCHMARK_OPTS.extend([ cfg.FloatOpt( "nova_detach_volume_timeout", default=float(200), help="Nova volume detach timeout"), cfg.FloatOpt( "nova_detach_volume_poll_interval", default=float(2), help="Nova volume detach poll interval") ]) CONF = cfg.CONF benchmark_group = cfg.OptGroup(name="benchmark", title="benchmark options") CONF.register_group(benchmark_group) CONF.register_opts(NOVA_BENCHMARK_OPTS, group=benchmark_group) class NovaScenario(scenario.OpenStackScenario): """Base class for Nova scenarios with basic atomic actions.""" @atomic.action_timer("nova.list_servers") def _list_servers(self, detailed=True): """Returns user servers list.""" return self.clients("nova").servers.list(detailed) def _pick_random_nic(self): """Choose one network from existing ones.""" ctxt = self.context nets = [net["id"] for net in ctxt.get("tenant", {}).get("networks", [])] if nets: # NOTE(amaretskiy): Balance servers among networks. net_idx = self.context["iteration"] % len(nets) return [{"net-id": nets[net_idx]}] @atomic.action_timer("nova.boot_server") def _boot_server(self, image_id, flavor_id, auto_assign_nic=False, **kwargs): """Boot a server. Returns when the server is actually booted and in "ACTIVE" state. If multiple networks created by Network context are present, the first network found that isn't associated with a floating IP pool is used. :param image_id: int, image ID for server creation :param flavor_id: int, flavor ID for server creation :param auto_assign_nic: bool, whether or not to auto assign NICs :param kwargs: other optional parameters to initialize the server :returns: nova Server instance """ server_name = self.generate_random_name() secgroup = self.context.get("user", {}).get("secgroup") if secgroup: if "security_groups" not in kwargs: kwargs["security_groups"] = [secgroup["name"]] elif secgroup["name"] not in kwargs["security_groups"]: kwargs["security_groups"].append(secgroup["name"]) if auto_assign_nic and not kwargs.get("nics", False): nic = self._pick_random_nic() if nic: kwargs["nics"] = nic server = self.clients("nova").servers.create( server_name, image_id, flavor_id, **kwargs) self.sleep_between(CONF.benchmark.nova_server_boot_prepoll_delay) server = utils.wait_for_status( server, ready_statuses=["ACTIVE"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.nova_server_boot_timeout, check_interval=CONF.benchmark.nova_server_boot_poll_interval ) return server def _do_server_reboot(self, server, reboottype): server.reboot(reboot_type=reboottype) self.sleep_between(CONF.benchmark.nova_server_pause_prepoll_delay) utils.wait_for_status( server, ready_statuses=["ACTIVE"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.nova_server_reboot_timeout, check_interval=CONF.benchmark.nova_server_reboot_poll_interval ) @atomic.action_timer("nova.soft_reboot_server") def _soft_reboot_server(self, server): """Reboot a server with soft reboot. A soft reboot will be issued on the given server upon which time this method will wait for the server to become active. :param server: The server to reboot. """ self._do_server_reboot(server, "SOFT") @atomic.action_timer("nova.show_server") def _show_server(self, server): """Show server details. :param server: The server to get details for. :returns: Server details """ return self.clients("nova").servers.get(server) @atomic.action_timer("nova.get_console_output_server") def _get_server_console_output(self, server, length=None): """Get text of a console log output from a server. :param server: The server whose console output to retrieve :param length: The number of tail log lines you would like to retrieve. :returns: Text console output from server """ return self.clients("nova").servers.get_console_output(server, length=length) @atomic.action_timer("nova.reboot_server") def _reboot_server(self, server): """Reboot a server with hard reboot. A reboot will be issued on the given server upon which time this method will wait for the server to become active. :param server: The server to reboot. """ self._do_server_reboot(server, "HARD") @atomic.action_timer("nova.rebuild_server") def _rebuild_server(self, server, image, **kwargs): """Rebuild a server with a new image. :param server: The server to rebuild. :param image: The new image to rebuild the server with. :param kwargs: Optional additional arguments to pass to the rebuild """ server.rebuild(image, **kwargs) self.sleep_between(CONF.benchmark.nova_server_rebuild_prepoll_delay) utils.wait_for_status( server, ready_statuses=["ACTIVE"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.nova_server_rebuild_timeout, check_interval=CONF.benchmark.nova_server_rebuild_poll_interval ) @atomic.action_timer("nova.start_server") def _start_server(self, server): """Start the given server. A start will be issued for the given server upon which time this method will wait for it to become ACTIVE. :param server: The server to start and wait to become ACTIVE. """ server.start() utils.wait_for_status( server, ready_statuses=["ACTIVE"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.nova_server_start_timeout, check_interval=CONF.benchmark.nova_server_start_poll_interval ) @atomic.action_timer("nova.stop_server") def _stop_server(self, server): """Stop the given server. Issues a stop on the given server and waits for the server to become SHUTOFF. :param server: The server to stop. """ server.stop() utils.wait_for_status( server, ready_statuses=["SHUTOFF"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.nova_server_stop_timeout, check_interval=CONF.benchmark.nova_server_stop_poll_interval ) @atomic.action_timer("nova.rescue_server") def _rescue_server(self, server): """Rescue the given server. Returns when the server is actually rescue and is in the "Rescue" state. :param server: Server object """ server.rescue() self.sleep_between(CONF.benchmark.nova_server_rescue_prepoll_delay) utils.wait_for_status( server, ready_statuses=["RESCUE"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.nova_server_rescue_timeout, check_interval=CONF.benchmark.nova_server_rescue_poll_interval ) @atomic.action_timer("nova.unrescue_server") def _unrescue_server(self, server): """Unrescue the given server. Returns when the server is unrescue and waits to become ACTIVE :param server: Server object """ server.unrescue() self.sleep_between(CONF.benchmark.nova_server_unrescue_prepoll_delay) utils.wait_for_status( server, ready_statuses=["ACTIVE"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.nova_server_unrescue_timeout, check_interval=CONF.benchmark.nova_server_unrescue_poll_interval ) @atomic.action_timer("nova.suspend_server") def _suspend_server(self, server): """Suspends the given server. Returns when the server is actually suspended and is in the "Suspended" state. :param server: Server object """ server.suspend() self.sleep_between(CONF.benchmark.nova_server_suspend_prepoll_delay) utils.wait_for_status( server, ready_statuses=["SUSPENDED"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.nova_server_suspend_timeout, check_interval=CONF.benchmark.nova_server_suspend_poll_interval ) @atomic.action_timer("nova.resume_server") def _resume_server(self, server): """Resumes the suspended server. Returns when the server is actually resumed and is in the "ACTIVE" state. :param server: Server object """ server.resume() self.sleep_between(CONF.benchmark.nova_server_resume_prepoll_delay) utils.wait_for_status( server, ready_statuses=["ACTIVE"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.nova_server_resume_timeout, check_interval=CONF.benchmark.nova_server_resume_poll_interval ) @atomic.action_timer("nova.pause_server") def _pause_server(self, server): """Pause the live server. Returns when the server is actually paused and is in the "PAUSED" state. :param server: Server object """ server.pause() self.sleep_between(CONF.benchmark.nova_server_pause_prepoll_delay) utils.wait_for_status( server, ready_statuses=["PAUSED"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.nova_server_pause_timeout, check_interval=CONF.benchmark.nova_server_pause_poll_interval ) @atomic.action_timer("nova.unpause_server") def _unpause_server(self, server): """Unpause the paused server. Returns when the server is actually unpaused and is in the "ACTIVE" state. :param server: Server object """ server.unpause() self.sleep_between(CONF.benchmark.nova_server_pause_prepoll_delay) utils.wait_for_status( server, ready_statuses=["ACTIVE"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.nova_server_unpause_timeout, check_interval=CONF.benchmark.nova_server_unpause_poll_interval ) @atomic.action_timer("nova.shelve_server") def _shelve_server(self, server): """Shelve the given server. Returns when the server is actually shelved and is in the "SHELVED_OFFLOADED" state. :param server: Server object """ server.shelve() self.sleep_between(CONF.benchmark.nova_server_pause_prepoll_delay) utils.wait_for_status( server, ready_statuses=["SHELVED_OFFLOADED"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.nova_server_shelve_timeout, check_interval=CONF.benchmark.nova_server_shelve_poll_interval ) @atomic.action_timer("nova.unshelve_server") def _unshelve_server(self, server): """Unshelve the given server. Returns when the server is unshelved and is in the "ACTIVE" state. :param server: Server object """ server.unshelve() self.sleep_between(CONF.benchmark. nova_server_unshelve_prepoll_delay) utils.wait_for_status( server, ready_statuses=["ACTIVE"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.nova_server_unshelve_timeout, check_interval=CONF.benchmark.nova_server_unshelve_poll_interval ) def _delete_server(self, server, force=False): """Delete the given server. Returns when the server is actually deleted. :param server: Server object :param force: If True, force_delete will be used instead of delete. """ atomic_name = ("nova.%sdelete_server") % (force and "force_" or "") with atomic.ActionTimer(self, atomic_name): if force: server.force_delete() else: server.delete() utils.wait_for_status( server, ready_statuses=["deleted"], check_deletion=True, update_resource=utils.get_from_manager(), timeout=CONF.benchmark.nova_server_delete_timeout, check_interval=CONF.benchmark.nova_server_delete_poll_interval ) def _delete_servers(self, servers, force=False): """Delete multiple servers. :param servers: A list of servers to delete :param force: If True, force_delete will be used instead of delete. """ atomic_name = ("nova.%sdelete_servers") % (force and "force_" or "") with atomic.ActionTimer(self, atomic_name): for server in servers: if force: server.force_delete() else: server.delete() for server in servers: utils.wait_for_status( server, ready_statuses=["deleted"], check_deletion=True, update_resource=utils.get_from_manager(), timeout=CONF.benchmark.nova_server_delete_timeout, check_interval=CONF. benchmark.nova_server_delete_poll_interval ) @atomic.action_timer("nova.create_server_group") def _create_server_group(self, **kwargs): """Create (allocate) a server group. :param kwargs: Server group name and policy :returns: Nova server group """ return self.clients("nova").server_groups.create(**kwargs) @atomic.action_timer("nova.list_server_groups") def _list_server_groups(self, all_projects=False): """Get a list of all server groups. :param all_projects: If True, display server groups from all projects(Admin only) :rtype: list of :class:`ServerGroup`. """ if all_projects: return self.admin_clients("nova").server_groups.list(all_projects) else: return self.clients("nova").server_groups.list(all_projects) @atomic.action_timer("nova.delete_image") def _delete_image(self, image): """Delete the given image. Returns when the image is actually deleted. :param image: Image object """ self.clients("glance").images.delete(image.id) wrapper = glance_wrapper.wrap(self._clients.glance, self) check_interval = CONF.benchmark.nova_server_image_delete_poll_interval utils.wait_for_status( image, ready_statuses=["deleted", "pending_delete"], check_deletion=True, update_resource=wrapper.get_image, timeout=CONF.benchmark.nova_server_image_delete_timeout, check_interval=check_interval ) @atomic.action_timer("nova.create_image") def _create_image(self, server): """Create an image from the given server Uses the server name to name the created image. Returns when the image is actually created and is in the "Active" state. :param server: Server object for which the image will be created :returns: Created image object """ image_uuid = self.clients("nova").servers.create_image(server, server.name) image = self.clients("nova").images.get(image_uuid) check_interval = CONF.benchmark.nova_server_image_create_poll_interval image = utils.wait_for_status( image, ready_statuses=["ACTIVE"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.nova_server_image_create_timeout, check_interval=check_interval ) return image @atomic.action_timer("nova.list_images") def _list_images(self, detailed=False, **kwargs): """List all images. :param detailed: True if the image listing should contain detailed information :param kwargs: Optional additional arguments for image listing :returns: Image list """ return self.clients("nova").images.list(detailed, **kwargs) @atomic.action_timer("nova.get_keypair") def _get_keypair(self, keypair): """Get a keypair. :param keypair: The ID of the keypair to get. :rtype: :class:`Keypair` """ return self.clients("nova").keypairs.get(keypair) @atomic.action_timer("nova.create_keypair") def _create_keypair(self, **kwargs): """Create a keypair :returns: Created keypair name """ keypair_name = self.generate_random_name() keypair = self.clients("nova").keypairs.create(keypair_name, **kwargs) return keypair.name @atomic.action_timer("nova.list_keypairs") def _list_keypairs(self): """Return user keypairs list.""" return self.clients("nova").keypairs.list() @atomic.action_timer("nova.delete_keypair") def _delete_keypair(self, keypair_name): """Delete keypair :param keypair_name: The keypair name to delete. """ self.clients("nova").keypairs.delete(keypair_name) @atomic.action_timer("nova.boot_servers") def _boot_servers(self, image_id, flavor_id, requests, instances_amount=1, auto_assign_nic=False, **kwargs): """Boot multiple servers. Returns when all the servers are actually booted and are in the "Active" state. :param image_id: ID of the image to be used for server creation :param flavor_id: ID of the flavor to be used for server creation :param requests: Number of booting requests to perform :param instances_amount: Number of instances to boot per each request :param auto_assign_nic: bool, whether or not to auto assign NICs :param kwargs: other optional parameters to initialize the servers :returns: List of created server objects """ if auto_assign_nic and not kwargs.get("nics", False): nic = self._pick_random_nic() if nic: kwargs["nics"] = nic name_prefix = self.generate_random_name() for i in range(requests): self.clients("nova").servers.create("%s_%d" % (name_prefix, i), image_id, flavor_id, min_count=instances_amount, max_count=instances_amount, **kwargs) # NOTE(msdubov): Nova python client returns only one server even when # min_count > 1, so we have to rediscover all the # created servers manually. servers = [s for s in self.clients("nova").servers.list() if s.name.startswith(name_prefix)] self.sleep_between(CONF.benchmark.nova_server_boot_prepoll_delay) servers = [utils.wait_for_status( server, ready_statuses=["ACTIVE"], update_resource=utils. get_from_manager(), timeout=CONF.benchmark.nova_server_boot_timeout, check_interval=CONF.benchmark.nova_server_boot_poll_interval ) for server in servers] return servers @atomic.optional_action_timer("nova.associate_floating_ip") def _associate_floating_ip(self, server, address, fixed_address=None): """Add floating IP to an instance :param server: The :class:`Server` to add an IP to. :param address: The ip address or FloatingIP to add to the instance :param fixed_address: The fixedIP address the FloatingIP is to be associated with (optional) :param atomic_action: True if this is an atomic action. added and handled by the optional_action_timer() decorator """ server.add_floating_ip(address, fixed_address=fixed_address) utils.wait_for(server, is_ready=self.check_ip_address(address), update_resource=utils.get_from_manager()) # Update server data server.addresses = server.manager.get(server.id).addresses @atomic.optional_action_timer("nova.dissociate_floating_ip") def _dissociate_floating_ip(self, server, address): """Remove floating IP from an instance :param server: The :class:`Server` to add an IP to. :param address: The ip address or FloatingIP to remove :param atomic_action: True if this is an atomic action. added and handled by the optional_action_timer() decorator """ server.remove_floating_ip(address) utils.wait_for( server, is_ready=self.check_ip_address(address, must_exist=False), update_resource=utils.get_from_manager() ) # Update server data server.addresses = server.manager.get(server.id).addresses @staticmethod def check_ip_address(address, must_exist=True): ip_to_check = getattr(address, "ip", address) def _check_addr(resource): for network, addr_list in resource.addresses.items(): for addr in addr_list: if ip_to_check == addr["addr"]: return must_exist return not must_exist return _check_addr @atomic.action_timer("nova.list_networks") def _list_networks(self): """Return user networks list.""" return self.clients("nova").networks.list() @atomic.action_timer("nova.resize") def _resize(self, server, flavor): server.resize(flavor) utils.wait_for_status( server, ready_statuses=["VERIFY_RESIZE"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.nova_server_resize_timeout, check_interval=CONF.benchmark.nova_server_resize_poll_interval ) @atomic.action_timer("nova.resize_confirm") def _resize_confirm(self, server, status="ACTIVE"): server.confirm_resize() utils.wait_for_status( server, ready_statuses=[status], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.nova_server_resize_confirm_timeout, check_interval=( CONF.benchmark.nova_server_resize_confirm_poll_interval) ) @atomic.action_timer("nova.resize_revert") def _resize_revert(self, server, status="ACTIVE"): server.revert_resize() utils.wait_for_status( server, ready_statuses=[status], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.nova_server_resize_revert_timeout, check_interval=( CONF.benchmark.nova_server_resize_revert_poll_interval) ) @atomic.action_timer("nova.attach_volume") def _attach_volume(self, server, volume, device=None): server_id = server.id volume_id = volume.id attachment = self.clients("nova").volumes.create_server_volume( server_id, volume_id, device) utils.wait_for_status( volume, ready_statuses=["in-use"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.nova_server_resize_revert_timeout, check_interval=( CONF.benchmark.nova_server_resize_revert_poll_interval) ) return attachment @atomic.action_timer("nova.detach_volume") def _detach_volume(self, server, volume, attachment=None): server_id = server.id # NOTE(chenhb): Recommend the use of attachment.The use of # volume.id is retained mainly for backwoard compatible. attachment_id = attachment.id if attachment else volume.id self.clients("nova").volumes.delete_server_volume(server_id, attachment_id) utils.wait_for_status( volume, ready_statuses=["available"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.nova_detach_volume_timeout, check_interval=CONF.benchmark.nova_detach_volume_poll_interval ) @atomic.action_timer("nova.live_migrate") def _live_migrate(self, server, target_host, block_migration=False, disk_over_commit=False, skip_host_check=False): """Run live migration of the given server. :param server: Server object :param target_host: Specifies the target compute node to migrate :param block_migration: Specifies the migration type :param disk_over_commit: Specifies whether to overcommit migrated instance or not :param skip_host_check: Specifies whether to verify the targeted host availability """ server_admin = self.admin_clients("nova").servers.get(server.id) host_pre_migrate = getattr(server_admin, "OS-EXT-SRV-ATTR:host") server_admin.live_migrate(target_host, block_migration=block_migration, disk_over_commit=disk_over_commit) utils.wait_for_status( server, ready_statuses=["ACTIVE"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.nova_server_live_migrate_timeout, check_interval=( CONF.benchmark.nova_server_live_migrate_poll_interval) ) server_admin = self.admin_clients("nova").servers.get(server.id) if (host_pre_migrate == getattr(server_admin, "OS-EXT-SRV-ATTR:host") and not skip_host_check): raise exceptions.LiveMigrateException( "Migration complete but instance did not change host: %s" % host_pre_migrate) @atomic.action_timer("nova.find_host_to_migrate") def _find_host_to_migrate(self, server): """Find a compute node for live migration. :param server: Server object """ server_admin = self.admin_clients("nova").servers.get(server.id) host = getattr(server_admin, "OS-EXT-SRV-ATTR:host") az_name = getattr(server_admin, "OS-EXT-AZ:availability_zone") az = None for a in self.admin_clients("nova").availability_zones.list(): if az_name == a.zoneName: az = a break try: new_host = random.choice( [key for key, value in az.hosts.items() if key != host and value.get("nova-compute", {}).get("available", False)]) return new_host except IndexError: raise exceptions.InvalidHostException( "No valid host found to migrate") @atomic.action_timer("nova.migrate") def _migrate(self, server, skip_host_check=False): """Run migration of the given server. :param server: Server object :param skip_host_check: Specifies whether to verify the targeted host availability """ server_admin = self.admin_clients("nova").servers.get(server.id) host_pre_migrate = getattr(server_admin, "OS-EXT-SRV-ATTR:host") server_admin.migrate() utils.wait_for_status( server, ready_statuses=["VERIFY_RESIZE"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.nova_server_migrate_timeout, check_interval=( CONF.benchmark.nova_server_migrate_poll_interval) ) if not skip_host_check: server_admin = self.admin_clients("nova").servers.get(server.id) host_after_migrate = getattr(server_admin, "OS-EXT-SRV-ATTR:host") if host_pre_migrate == host_after_migrate: raise exceptions.MigrateException( "Migration complete but instance did not change host: %s" % host_pre_migrate) def _create_security_groups(self, security_group_count): security_groups = [] with atomic.ActionTimer(self, "nova.create_%s_security_groups" % security_group_count): for i in range(security_group_count): sg_name = self.generate_random_name() sg = self.clients("nova").security_groups.create(sg_name, sg_name) security_groups.append(sg) return security_groups def _create_rules_for_security_group(self, security_groups, rules_per_security_group, ip_protocol="tcp", cidr="0.0.0.0/0"): action_name = ("nova.create_%s_rules" % (rules_per_security_group * len(security_groups))) creation_status = True with atomic.ActionTimer(self, action_name): for i, security_group in enumerate(security_groups): result = True for j in range(rules_per_security_group): result = self.clients("nova").security_group_rules.create( security_group.id, from_port=(i * rules_per_security_group + j + 1), to_port=(i * rules_per_security_group + j + 1), ip_protocol=ip_protocol, cidr=cidr) if not result: creation_status = False return creation_status def _update_security_groups(self, security_groups): """Update a list of security groups :param security_groups: list, security_groups that are to be updated """ with atomic.ActionTimer(self, "nova.update_%s_security_groups" % len(security_groups)): for sec_group in security_groups: sg_new_name = self.generate_random_name() sg_new_desc = self.generate_random_name() self.clients("nova").security_groups.update(sec_group.id, sg_new_name, sg_new_desc) def _delete_security_groups(self, security_group): with atomic.ActionTimer(self, "nova.delete_%s_security_groups" % len(security_group)): for sg in security_group: self.clients("nova").security_groups.delete(sg.id) def _list_security_groups(self): """Return security groups list.""" with atomic.ActionTimer(self, "nova.list_security_groups"): return self.clients("nova").security_groups.list() @atomic.optional_action_timer("nova.add_server_secgroups") def _add_server_secgroups(self, server, security_group, atomic_action=False): """add security group to a server. :param server: Server object :param security_groups: The name of security group to add. :param atomic_action: True if this is atomic action. added and handled by the optional_action_timer() decorator. :returns: An instance of novaclient.base.DictWithMeta """ return self.clients("nova").servers.add_security_group(server, security_group) @atomic.action_timer("nova.list_floating_ips_bulk") def _list_floating_ips_bulk(self): """List all floating IPs.""" return self.admin_clients("nova").floating_ips_bulk.list() @atomic.action_timer("nova.create_floating_ips_bulk") def _create_floating_ips_bulk(self, ip_range, **kwargs): """Create floating IPs by range.""" ip_range = network_wrapper.generate_cidr(start_cidr=ip_range) pool_name = self.generate_random_name() return self.admin_clients("nova").floating_ips_bulk.create( ip_range=ip_range, pool=pool_name, **kwargs) @atomic.action_timer("nova.delete_floating_ips_bulk") def _delete_floating_ips_bulk(self, ip_range): """Delete floating IPs by range.""" return self.admin_clients("nova").floating_ips_bulk.delete(ip_range) @atomic.action_timer("nova.list_hypervisors") def _list_hypervisors(self, detailed=True): """List hypervisors.""" return self.admin_clients("nova").hypervisors.list(detailed) @atomic.action_timer("nova.statistics_hypervisors") def _statistics_hypervisors(self): """Get hypervisor statistics over all compute nodes. :returns: Hypervisor statistics """ return self.admin_clients("nova").hypervisors.statistics() @atomic.optional_action_timer("nova.get_hypervisor") def _get_hypervisor(self, hypervisor): """Get a specific hypervisor. :param hypervisor: Hypervisor to get. :param atomic_action: True if this is atomic action. added and handled by the optional_action_timer() decorator. :returns: Hypervisor object """ return self.admin_clients("nova").hypervisors.get(hypervisor) @atomic.optional_action_timer("nova.search_hypervisors") def _search_hypervisors(self, hypervisor_match, servers=False): """List all servers belonging to specific hypervisor. :param hypervisor_match: Hypervisor's host name. :param servers: If True, server information is also retrieved. :param atomic_action: True if this is atomic action. added and handled by the optional_action_timer() decorator. :returns: Hypervisor object """ return self.admin_clients("nova").hypervisors.search(hypervisor_match, servers=servers) @atomic.action_timer("nova.lock_server") def _lock_server(self, server): """Lock the given server. :param server: Server to lock """ server.lock() @atomic.optional_action_timer("nova.uptime_hypervisor") def _uptime_hypervisor(self, hypervisor, atomic_action=False): """Display the uptime of the specified hypervisor. :param hypervisor: Hypervisor to get. :param atomic_action: True if this is atomic action. added and handled by the optional_action_timer() decorator. :returns: Hypervisor object """ return self.admin_clients("nova").hypervisors.uptime(hypervisor) @atomic.action_timer("nova.unlock_server") def _unlock_server(self, server): """Unlock the given server. :param server: Server to unlock """ server.unlock() @atomic.action_timer("nova.create_network") def _create_network(self, ip_range, **kwargs): """Create nova network. :param ip_range: IP range in CIDR notation to create """ net_label = self.generate_random_name() ip_range = network_wrapper.generate_cidr(start_cidr=ip_range) return self.admin_clients("nova").networks.create( label=net_label, cidr=ip_range, **kwargs) @atomic.action_timer("nova.delete_network") def _delete_network(self, net_id): """Delete nova network. :param net_id: The nova-network ID to delete """ return self.admin_clients("nova").networks.delete(net_id) @atomic.action_timer("nova.list_flavors") def _list_flavors(self, detailed=True, **kwargs): """List all flavors. :param kwargs: Optional additional arguments for flavor listing :param detailed: True if the image listing should contain detailed information :returns: flavors list """ return self.clients("nova").flavors.list(detailed, **kwargs) @atomic.action_timer("nova.set_flavor_keys") def _set_flavor_keys(self, flavor, extra_specs): """set flavor keys :param flavor: flavor to set keys :param extra_specs: additional arguments for flavor set keys """ return flavor.set_keys(extra_specs) @atomic.action_timer("nova.list_agents") def _list_agents(self, hypervisor=None): """List all nova-agent builds. :param hypervisor: The nova-hypervisor ID on which we need to list all the builds :returns: Nova-agent build list """ return self.admin_clients("nova").agents.list(hypervisor) @atomic.action_timer("nova.list_aggregates") def _list_aggregates(self): """Returns list of all os-aggregates.""" return self.admin_clients("nova").aggregates.list() @atomic.action_timer("nova.list_availability_zones") def _list_availability_zones(self, detailed=True): """List availability-zones. :param detailed: True if the availability-zone listing should contain detailed information :returns: Availability-zone list """ return self.admin_clients("nova").availability_zones.list(detailed) @atomic.action_timer("nova.list_hosts") def _list_hosts(self, zone=None, service=None): """List nova hosts. :param zone: List all hosts in the given nova availability-zone ID :param service: Name of service type to filter :returns: Nova host list """ hosts = self.admin_clients("nova").hosts.list(zone) if service: hosts = [host for host in hosts if host.service == service] return hosts @atomic.optional_action_timer("nova.get_host") def _get_host(self, host_name, atomic_action=None): """Describe a specific host. :param host_name: host name to get. :returns: host object """ return self.admin_clients("nova").hosts.get(host_name) @atomic.action_timer("nova.list_services") def _list_services(self, host=None, binary=None): """return all nova service details :param host: List all nova services on host :param binary: List all nova services matching given binary """ return self.admin_clients("nova").services.list(host, binary) @atomic.action_timer("nova.create_flavor") def _create_flavor(self, ram, vcpus, disk, **kwargs): """Create a flavor :param ram: Memory in MB for the flavor :param vcpus: Number of VCPUs for the flavor :param disk: Size of local disk in GB :param kwargs: Optional additional arguments for flavor creation """ name = self.generate_random_name() return self.admin_clients("nova").flavors.create(name, ram, vcpus, disk, **kwargs) @atomic.action_timer("nova.delete_flavor") def _delete_flavor(self, flavor): """Delete a flavor :param flavor: The ID of the :class:`Flavor` :returns: An instance of novaclient.base.TupleWithMeta """ return self.admin_clients("nova").flavors.delete(flavor) @atomic.action_timer("nova.list_flavor_access") def _list_flavor_access(self, flavor): """List access-rules for non-public flavor. :param flavor: List access rules for flavor instance or flavor ID """ return self.admin_clients("nova").flavor_access.list(flavor=flavor) @atomic.action_timer("nova.add_tenant_access") def _add_tenant_access(self, flavor, tenant): """Add a tenant to the given flavor access list. :param flavor: name or id of the object flavor :param tenant: id of the object tenant :returns: access rules for flavor instance or flavor ID """ return self.admin_clients("nova").flavor_access.add_tenant_access( flavor, tenant) @atomic.action_timer("nova.update_server") def _update_server(self, server, description=None): """update the server's name and description. :param server: Server object :param description: update the server description :returns: The updated server """ new_name = self.generate_random_name() if description: return server.update(name=new_name, description=description) else: return server.update(name=new_name) @atomic.action_timer("nova.get_flavor") def _get_flavor(self, flavor_id): """Show a flavor :param flavor_id: The flavor ID to get """ return self.admin_clients("nova").flavors.get(flavor_id) @atomic.action_timer("nova.create_aggregate") def _create_aggregate(self, availability_zone): """Create a new aggregate. :param availability_zone: The availability zone of the aggregate :returns: The created aggregate """ aggregate_name = self.generate_random_name() return self.admin_clients("nova").aggregates.create(aggregate_name, availability_zone) @atomic.action_timer("nova.get_aggregate_details") def _get_aggregate_details(self, aggregate): """Get details of the specified aggregate. :param aggregate: The aggregate to get details :returns: Detailed information of aggregate """ return self.admin_clients("nova").aggregates.get_details(aggregate) @atomic.action_timer("nova.delete_aggregate") def _delete_aggregate(self, aggregate): """Delete the specified aggregate. :param aggregate: The aggregate to delete :returns: An instance of novaclient.base.TupleWithMeta """ return self.admin_clients("nova").aggregates.delete(aggregate) @atomic.action_timer("nova.bind_actions") def _bind_actions(self): actions = ["hard_reboot", "soft_reboot", "stop_start", "rescue_unrescue", "pause_unpause", "suspend_resume", "lock_unlock", "shelve_unshelve"] action_builder = utils.ActionBuilder(actions) action_builder.bind_action("hard_reboot", self._reboot_server) action_builder.bind_action("soft_reboot", self._soft_reboot_server) action_builder.bind_action("stop_start", self._stop_and_start_server) action_builder.bind_action("rescue_unrescue", self._rescue_and_unrescue_server) action_builder.bind_action("pause_unpause", self._pause_and_unpause_server) action_builder.bind_action("suspend_resume", self._suspend_and_resume_server) action_builder.bind_action("lock_unlock", self._lock_and_unlock_server) action_builder.bind_action("shelve_unshelve", self._shelve_and_unshelve_server) return action_builder @atomic.action_timer("nova.stop_and_start_server") def _stop_and_start_server(self, server): """Stop and then start the given server. A stop will be issued on the given server upon which time this method will wait for the server to become 'SHUTOFF'. Once the server is SHUTOFF a start will be issued and this method will wait for the server to become 'ACTIVE' again. :param server: The server to stop and then start. """ self._stop_server(server) self._start_server(server) @atomic.action_timer("nova.rescue_and_unrescue_server") def _rescue_and_unrescue_server(self, server): """Rescue and then unrescue the given server. A rescue will be issued on the given server upon which time this method will wait for the server to become 'RESCUE'. Once the server is RESCUE an unrescue will be issued and this method will wait for the server to become 'ACTIVE' again. :param server: The server to rescue and then unrescue. """ self._rescue_server(server) self._unrescue_server(server) @atomic.action_timer("nova.pause_and_unpause_server") def _pause_and_unpause_server(self, server): """Pause and then unpause the given server. A pause will be issued on the given server upon which time this method will wait for the server to become 'PAUSED'. Once the server is PAUSED an unpause will be issued and this method will wait for the server to become 'ACTIVE' again. :param server: The server to pause and then unpause. """ self._pause_server(server) self._unpause_server(server) @atomic.action_timer("nova.suspend_and_resume_server") def _suspend_and_resume_server(self, server): """Suspend and then resume the given server. A suspend will be issued on the given server upon which time this method will wait for the server to become 'SUSPENDED'. Once the server is SUSPENDED an resume will be issued and this method will wait for the server to become 'ACTIVE' again. :param server: The server to suspend and then resume. """ self._suspend_server(server) self._resume_server(server) @atomic.action_timer("nova.lock_and_unlock_server") def _lock_and_unlock_server(self, server): """Lock and then unlock the given server. A lock will be issued on the given server upon which time this method will wait for the server to become locked'. Once the server is locked an unlock will be issued. :param server: The server to lock and then unlock. """ self._lock_server(server) self._unlock_server(server) @atomic.action_timer("nova.shelve_and_unshelve_server") def _shelve_and_unshelve_server(self, server): """Shelve and then unshelve the given server. A shelve will be issued on the given server upon which time this method will wait for the server to become 'SHELVED'. Once the server is SHELVED an unshelve will be issued and this method will wait for the server to become 'ACTIVE' again. :param server: The server to shelve and then unshelve. """ self._shelve_server(server) self._unshelve_server(server) @atomic.action_timer("nova.update_aggregate") def _update_aggregate(self, aggregate): """Update the aggregate's name and availability_zone. :param aggregate: The aggregate to update :return: The updated aggregate """ aggregate_name = self.generate_random_name() availability_zone = self.generate_random_name() values = {"name": aggregate_name, "availability_zone": availability_zone} return self.admin_clients("nova").aggregates.update(aggregate, values) @atomic.action_timer("nova.aggregate_add_host") def _aggregate_add_host(self, aggregate, host): """Add a host into the Host Aggregate. :param aggregate: The aggregate add host to :param host: The host add to aggregate :returns: The aggregate that has been added host to """ return self.admin_clients("nova").aggregates.add_host(aggregate, host) @atomic.action_timer("nova.aggregate_remove_host") def _aggregate_remove_host(self, aggregate, host): """Remove a host from an aggregate. :param aggregate: The aggregate remove host from :param host: The host to remove :returns: The aggregate that has been removed host from """ return self.admin_clients("nova").aggregates.remove_host(aggregate, host) @atomic.action_timer("nova.aggregate_set_metadata") def _aggregate_set_metadata(self, aggregate, metadata): """Set metadata to an aggregate :param aggregate: The aggregate to set metadata to :param metadata: The metadata to be set :return: The aggregate that has the set metadata """ return self.admin_clients("nova").aggregates.set_metadata(aggregate, metadata) rally-0.9.1/rally/plugins/openstack/scenarios/nova/hosts.py0000664000567000056710000000414313073417720025251 0ustar jenkinsjenkins00000000000000# Copyright 2016 IBM Corp # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.nova import utils from rally.task import atomic from rally.task import validation """Scenarios for Nova hosts.""" @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True) @scenario.configure(name="NovaHosts.list_hosts") class ListHosts(utils.NovaScenario): def run(self, zone=None): """List all nova hosts. Measure the "nova host-list" command performance. :param zone: List nova hosts in an availability-zone. None (default value) means list hosts in all availability-zones """ self._list_hosts(zone) @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True) @scenario.configure(name="NovaHosts.list_and_get_hosts") class ListAndGetHosts(utils.NovaScenario): def run(self, zone=None): """List all nova hosts, and get detailed information for compute hosts. Measure the "nova host-describe" command performance. :param zone: List nova hosts in an availability-zone. None (default value) means list hosts in all availability-zones """ hosts = self._list_hosts(zone, service="compute") with atomic.ActionTimer(self, "nova.get_%s_hosts" % len(hosts)): for host in hosts: self._get_host(host.host_name, atomic_action=False) rally-0.9.1/rally/plugins/openstack/scenarios/nova/floating_ips_bulk.py0000664000567000056710000000546113073417720027610 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.nova import utils from rally.task import validation """Scenarios for create nova floating IP by range.""" @validation.restricted_parameters("pool") @validation.required_parameters("start_cidr") @validation.required_services(consts.Service.NOVA, consts.Service.NOVA_NET) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["nova"]}, name=("NovaFloatingIpsBulk" ".create_and_list_floating_ips_bulk")) class CreateAndListFloatingIpsBulk(utils.NovaScenario): def run(self, start_cidr, **kwargs): """Create nova floating IP by range and list it. This scenario creates a floating IP by range and then lists all. :param start_cidr: Floating IP range :param kwargs: Optional additional arguments for range IP creation """ ips_bulk = self._create_floating_ips_bulk(start_cidr, **kwargs) msg = ("Bulk of IPS isn't created") self.assertTrue(ips_bulk, err_msg=msg) list_ips = self._list_floating_ips_bulk() self.assertLessEqual(len(ips_bulk), len(list_ips)) self.assertIsSubset(ips_bulk, list_ips) @validation.restricted_parameters("pool") @validation.required_parameters("start_cidr") @validation.required_services(consts.Service.NOVA, consts.Service.NOVA_NET) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["nova"]}, name=("NovaFloatingIpsBulk" ".create_and_delete_floating_ips_bulk")) class CreateAndDeleteFloatingIpsBulk(utils.NovaScenario): def run(self, start_cidr, **kwargs): """Create nova floating IP by range and delete it. This scenario creates a floating IP by range and then delete it. :param start_cidr: Floating IP range :param kwargs: Optional additional arguments for range IP creation """ floating_ips_bulk = self._create_floating_ips_bulk(start_cidr, **kwargs) self._delete_floating_ips_bulk(floating_ips_bulk.ip_range) rally-0.9.1/rally/plugins/openstack/scenarios/nova/server_groups.py0000775000567000056710000000404713073417720027024 0ustar jenkinsjenkins00000000000000# Copyright 2017: Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.nova import utils from rally.task import validation """Scenarios for Nova Group servers.""" @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaServerGroups.create_and_list_server_groups") class CreateAndListServerGroups(utils.NovaScenario): def run(self, all_projects=False, kwargs=None): """Create a server group, then list all server groups. Measure the "nova server-group-create" and "nova server-group-list" command performance. :param all_projects: If True, display server groups from all projects(Admin only) :param kwargs: Server group name and policy """ kwargs["name"] = self.generate_random_name() server_group = self._create_server_group(**kwargs) msg = ("Server Groups isn't created") self.assertTrue(server_group, err_msg=msg) server_groups_list = self._list_server_groups(all_projects) msg = ("Server Group not included into list of server groups\n" "Created server group: {}\n" "list of server groups: {}").format(server_group, server_groups_list) self.assertIn(server_group, server_groups_list, err_msg=msg) rally-0.9.1/rally/plugins/openstack/scenarios/nova/agents.py0000664000567000056710000000251713073417720025375 0ustar jenkinsjenkins00000000000000# Copyright 2016 IBM Corp. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.nova import utils from rally.task import validation """Scenarios for Nova agents.""" @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True) @scenario.configure(name="NovaAgents.list_agents") class ListAgents(utils.NovaScenario): def run(self, hypervisor=None): """List all builds. Measure the "nova agent-list" command performance. :param hypervisor: List agent builds on a specific hypervisor. None (default value) means list for all hypervisors """ self._list_agents(hypervisor) rally-0.9.1/rally/plugins/openstack/scenarios/nova/servers.py0000775000567000056710000012620513073417720025611 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import jsonschema from rally.common import logging from rally import consts from rally import exceptions as rally_exceptions from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.cinder import utils as cinder_utils from rally.plugins.openstack.scenarios.nova import utils from rally.plugins.openstack.wrappers import network as network_wrapper from rally.task import types from rally.task import validation """Scenarios for Nova servers.""" LOG = logging.getLogger(__name__) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaServers.boot_and_list_server") class BootAndListServer(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, image, flavor, detailed=True, **kwargs): """Boot a server from an image and then list all servers. Measure the "nova list" command performance. If you have only 1 user in your context, you will add 1 server on every iteration. So you will have more and more servers and will be able to measure the performance of the "nova list" command depending on the number of servers owned by users. :param image: image to be used to boot an instance :param flavor: flavor to be used to boot an instance :param detailed: True if the server listing should contain detailed information about all of them :param kwargs: Optional additional arguments for server creation """ server = self._boot_server(image, flavor, **kwargs) msg = ("Servers isn't created") self.assertTrue(server, err_msg=msg) pool_list = self._list_servers(detailed) msg = ("Server not included into list of available servers\n" "Booted server: {}\n" "Pool of servers: {}").format(server, pool_list) self.assertIn(server, pool_list, err_msg=msg) @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaServers.list_servers") class ListServers(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, detailed=True): """List all servers. This simple scenario test the nova list command by listing all the servers. :param detailed: True if detailed information about servers should be listed """ self._list_servers(detailed) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaServers.boot_and_delete_server") class BootAndDeleteServer(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, image, flavor, min_sleep=0, max_sleep=0, force_delete=False, **kwargs): """Boot and delete a server. Optional 'min_sleep' and 'max_sleep' parameters allow the scenario to simulate a pause between volume creation and deletion (of random duration from [min_sleep, max_sleep]). :param image: image to be used to boot an instance :param flavor: flavor to be used to boot an instance :param min_sleep: Minimum sleep time in seconds (non-negative) :param max_sleep: Maximum sleep time in seconds (non-negative) :param force_delete: True if force_delete should be used :param kwargs: Optional additional arguments for server creation """ server = self._boot_server(image, flavor, **kwargs) self.sleep_between(min_sleep, max_sleep) self._delete_server(server, force=force_delete) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True, users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaServers.boot_and_delete_multiple_servers") class BootAndDeleteMultipleServers(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, image, flavor, count=2, min_sleep=0, max_sleep=0, force_delete=False, **kwargs): """Boot multiple servers in a single request and delete them. Deletion is done in parallel with one request per server, not with a single request for all servers. :param image: The image to boot from :param flavor: Flavor used to boot instance :param count: Number of instances to boot :param min_sleep: Minimum sleep time in seconds (non-negative) :param max_sleep: Maximum sleep time in seconds (non-negative) :param force_delete: True if force_delete should be used :param kwargs: Optional additional arguments for instance creation """ servers = self._boot_servers(image, flavor, 1, instances_amount=count, **kwargs) self.sleep_between(min_sleep, max_sleep) self._delete_servers(servers, force=force_delete) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image", validate_disk=False) @validation.required_services(consts.Service.NOVA, consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova", "cinder"]}, name="NovaServers.boot_server_from_volume_and_delete") class BootServerFromVolumeAndDelete(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, image, flavor, volume_size, volume_type=None, min_sleep=0, max_sleep=0, force_delete=False, **kwargs): """Boot a server from volume and then delete it. The scenario first creates a volume and then a server. Optional 'min_sleep' and 'max_sleep' parameters allow the scenario to simulate a pause between volume creation and deletion (of random duration from [min_sleep, max_sleep]). :param image: image to be used to boot an instance :param flavor: flavor to be used to boot an instance :param volume_size: volume size (in GB) :param volume_type: specifies volume type when there are multiple backends :param min_sleep: Minimum sleep time in seconds (non-negative) :param max_sleep: Maximum sleep time in seconds (non-negative) :param force_delete: True if force_delete should be used :param kwargs: Optional additional arguments for server creation """ volume = self._create_volume(volume_size, imageRef=image, volume_type=volume_type) block_device_mapping = {"vda": "%s:::1" % volume.id} server = self._boot_server(None, flavor, block_device_mapping=block_device_mapping, **kwargs) self.sleep_between(min_sleep, max_sleep) self._delete_server(server, force=force_delete) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaServers.boot_and_bounce_server") class BootAndBounceServer(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, image, flavor, force_delete=False, actions=None, **kwargs): """Boot a server and run specified actions against it. Actions should be passed into the actions parameter. Available actions are 'hard_reboot', 'soft_reboot', 'stop_start', 'rescue_unrescue', 'pause_unpause', 'suspend_resume', 'lock_unlock' and 'shelve_unshelve'. Delete server after all actions were completed. :param image: image to be used to boot an instance :param flavor: flavor to be used to boot an instance :param force_delete: True if force_delete should be used :param actions: list of action dictionaries, where each action dictionary speicifes an action to be performed in the following format: {"action_name": } :param kwargs: Optional additional arguments for server creation """ action_builder = self._bind_actions() actions = actions or [] try: action_builder.validate(actions) except jsonschema.exceptions.ValidationError as error: raise rally_exceptions.InvalidConfigException( "Invalid server actions configuration \'%(actions)s\' due to: " "%(error)s" % {"actions": str(actions), "error": str(error)}) server = self._boot_server(image, flavor, **kwargs) for action in action_builder.build_actions(actions, server): action() self._delete_server(server, force=force_delete) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaServers.boot_lock_unlock_and_delete") class BootLockUnlockAndDelete(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, image, flavor, min_sleep=0, max_sleep=0, force_delete=False, **kwargs): """Boot a server, lock it, then unlock and delete it. Optional 'min_sleep' and 'max_sleep' parameters allow the scenario to simulate a pause between locking and unlocking the server (of random duration from min_sleep to max_sleep). :param image: image to be used to boot an instance :param flavor: flavor to be used to boot an instance :param min_sleep: Minimum sleep time between locking and unlocking in seconds :param max_sleep: Maximum sleep time between locking and unlocking in seconds :param force_delete: True if force_delete should be used :param kwargs: Optional additional arguments for server creation """ server = self._boot_server(image, flavor, **kwargs) self._lock_server(server) self.sleep_between(min_sleep, max_sleep) self._unlock_server(server) self._delete_server(server, force=force_delete) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA, consts.Service.GLANCE) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova", "glance"]}, name="NovaServers.snapshot_server") class SnapshotServer(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, image, flavor, force_delete=False, **kwargs): """Boot a server, make its snapshot and delete both. :param image: image to be used to boot an instance :param flavor: flavor to be used to boot an instance :param force_delete: True if force_delete should be used :param kwargs: Optional additional arguments for server creation """ server = self._boot_server(image, flavor, **kwargs) image = self._create_image(server) self._delete_server(server, force=force_delete) server = self._boot_server(image.id, flavor, **kwargs) self._delete_server(server, force=force_delete) self._delete_image(image) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaServers.boot_server") class BootServer(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, image, flavor, auto_assign_nic=False, **kwargs): """Boot a server. Assumes that cleanup is done elsewhere. :param image: image to be used to boot an instance :param flavor: flavor to be used to boot an instance :param auto_assign_nic: True if NICs should be assigned :param kwargs: Optional additional arguments for server creation """ self._boot_server(image, flavor, auto_assign_nic=auto_assign_nic, **kwargs) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image", validate_disk=False) @validation.required_services(consts.Service.NOVA, consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova", "cinder"]}, name="NovaServers.boot_server_from_volume") class BootServerFromVolume(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, image, flavor, volume_size, volume_type=None, auto_assign_nic=False, **kwargs): """Boot a server from volume. The scenario first creates a volume and then a server. Assumes that cleanup is done elsewhere. :param image: image to be used to boot an instance :param flavor: flavor to be used to boot an instance :param volume_size: volume size (in GB) :param volume_type: specifies volume type when there are multiple backends :param auto_assign_nic: True if NICs should be assigned :param kwargs: Optional additional arguments for server creation """ volume = self._create_volume(volume_size, imageRef=image, volume_type=volume_type) block_device_mapping = {"vda": "%s:::1" % volume.id} self._boot_server(None, flavor, auto_assign_nic=auto_assign_nic, block_device_mapping=block_device_mapping, **kwargs) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}, to_flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaServers.resize_server") class ResizeServer(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, image, flavor, to_flavor, force_delete=False, **kwargs): """Boot a server, then resize and delete it. This test will confirm the resize by default, or revert the resize if confirm is set to false. :param image: image to be used to boot an instance :param flavor: flavor to be used to boot an instance :param to_flavor: flavor to be used to resize the booted instance :param force_delete: True if force_delete should be used :param kwargs: Optional additional arguments for server creation """ server = self._boot_server(image, flavor, **kwargs) self._resize(server, to_flavor) # by default we confirm confirm = kwargs.get("confirm", True) if confirm: self._resize_confirm(server) else: self._resize_revert(server) self._delete_server(server, force=force_delete) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}, to_flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaServers.resize_shutoff_server") class ResizeShutoffServer(utils.NovaScenario): def run(self, image, flavor, to_flavor, confirm=True, force_delete=False, **kwargs): """Boot a server and stop it, then resize and delete it. This test will confirm the resize by default, or revert the resize if confirm is set to false. :param image: image to be used to boot an instance :param flavor: flavor to be used to boot an instance :param to_flavor: flavor to be used to resize the booted instance :param confirm: True if need to confirm resize else revert resize :param force_delete: True if force_delete should be used :param kwargs: Optional additional arguments for server creation """ server = self._boot_server(image, flavor, **kwargs) self._stop_server(server) self._resize(server, to_flavor) if confirm: self._resize_confirm(server, "SHUTOFF") else: self._resize_revert(server, "SHUTOFF") self._delete_server(server, force=force_delete) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}, to_flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA, consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["cinder", "nova"]}, name=("NovaServers.boot_server" "_attach_created_volume_and_resize")) class BootServerAttachCreatedVolumeAndResize(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, image, flavor, to_flavor, volume_size, min_sleep=0, max_sleep=0, force_delete=False, confirm=True, do_delete=True, boot_server_kwargs=None, create_volume_kwargs=None): """Create a VM from image, attach a volume to it and resize. Simple test to create a VM and attach a volume, then resize the VM, detach the volume then delete volume and VM. Optional 'min_sleep' and 'max_sleep' parameters allow the scenario to simulate a pause between attaching a volume and running resize (of random duration from range [min_sleep, max_sleep]). :param image: Glance image name to use for the VM :param flavor: VM flavor name :param to_flavor: flavor to be used to resize the booted instance :param volume_size: volume size (in GB) :param min_sleep: Minimum sleep time in seconds (non-negative) :param max_sleep: Maximum sleep time in seconds (non-negative) :param force_delete: True if force_delete should be used :param confirm: True if need to confirm resize else revert resize :param do_delete: True if resources needs to be deleted explicitly else use rally cleanup to remove resources :param boot_server_kwargs: optional arguments for VM creation :param create_volume_kwargs: optional arguments for volume creation """ boot_server_kwargs = boot_server_kwargs or {} create_volume_kwargs = create_volume_kwargs or {} server = self._boot_server(image, flavor, **boot_server_kwargs) volume = self._create_volume(volume_size, **create_volume_kwargs) attachment = self._attach_volume(server, volume) self.sleep_between(min_sleep, max_sleep) self._resize(server, to_flavor) if confirm: self._resize_confirm(server) else: self._resize_revert(server) if do_delete: self._detach_volume(server, volume, attachment) self._delete_volume(volume) self._delete_server(server, force=force_delete) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}, to_flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image", validate_disk=False) @validation.required_services(consts.Service.NOVA, consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova", "cinder"]}, name="NovaServers.boot_server_from_volume_and_resize") class BootServerFromVolumeAndResize(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, image, flavor, to_flavor, volume_size, min_sleep=0, max_sleep=0, force_delete=False, confirm=True, do_delete=True, boot_server_kwargs=None, create_volume_kwargs=None): """Boot a server from volume, then resize and delete it. The scenario first creates a volume and then a server. Optional 'min_sleep' and 'max_sleep' parameters allow the scenario to simulate a pause between volume creation and deletion (of random duration from [min_sleep, max_sleep]). This test will confirm the resize by default, or revert the resize if confirm is set to false. :param image: image to be used to boot an instance :param flavor: flavor to be used to boot an instance :param to_flavor: flavor to be used to resize the booted instance :param volume_size: volume size (in GB) :param min_sleep: Minimum sleep time in seconds (non-negative) :param max_sleep: Maximum sleep time in seconds (non-negative) :param force_delete: True if force_delete should be used :param confirm: True if need to confirm resize else revert resize :param do_delete: True if resources needs to be deleted explicitly else use rally cleanup to remove resources :param boot_server_kwargs: optional arguments for VM creation :param create_volume_kwargs: optional arguments for volume creation """ boot_server_kwargs = boot_server_kwargs or {} create_volume_kwargs = create_volume_kwargs or {} if boot_server_kwargs.get("block_device_mapping"): LOG.warning("Using already existing volume is not permitted.") volume = self._create_volume(volume_size, imageRef=image, **create_volume_kwargs) boot_server_kwargs["block_device_mapping"] = { "vda": "%s:::1" % volume.id} server = self._boot_server(None, flavor, **boot_server_kwargs) self.sleep_between(min_sleep, max_sleep) self._resize(server, to_flavor) if confirm: self._resize_confirm(server) else: self._resize_revert(server) if do_delete: self._delete_server(server, force=force_delete) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaServers.suspend_and_resume_server") class SuspendAndResumeServer(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, image, flavor, force_delete=False, **kwargs): """Create a server, suspend, resume and then delete it :param image: image to be used to boot an instance :param flavor: flavor to be used to boot an instance :param force_delete: True if force_delete should be used :param kwargs: Optional additional arguments for server creation """ server = self._boot_server(image, flavor, **kwargs) self._suspend_server(server) self._resume_server(server) self._delete_server(server, force=force_delete) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaServers.pause_and_unpause_server") class PauseAndUnpauseServer(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, image, flavor, force_delete=False, **kwargs): """Create a server, pause, unpause and then delete it :param image: image to be used to boot an instance :param flavor: flavor to be used to boot an instance :param force_delete: True if force_delete should be used :param kwargs: Optional additional arguments for server creation """ server = self._boot_server(image, flavor, **kwargs) self._pause_server(server) self._unpause_server(server) self._delete_server(server, force=force_delete) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaServers.shelve_and_unshelve_server") class ShelveAndUnshelveServer(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, image, flavor, force_delete=False, **kwargs): """Create a server, shelve, unshelve and then delete it :param image: image to be used to boot an instance :param flavor: flavor to be used to boot an instance :param force_delete: True if force_delete should be used :param kwargs: Optional additional arguments for server creation """ server = self._boot_server(image, flavor, **kwargs) self._shelve_server(server) self._unshelve_server(server) self._delete_server(server, force=force_delete) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True, users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaServers.boot_and_live_migrate_server") class BootAndLiveMigrateServer(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, image, flavor, block_migration=False, disk_over_commit=False, min_sleep=0, max_sleep=0, **kwargs): """Live Migrate a server. This scenario launches a VM on a compute node available in the availability zone and then migrates the VM to another compute node on the same availability zone. Optional 'min_sleep' and 'max_sleep' parameters allow the scenario to simulate a pause between VM booting and running live migration (of random duration from range [min_sleep, max_sleep]). :param image: image to be used to boot an instance :param flavor: flavor to be used to boot an instance :param block_migration: Specifies the migration type :param disk_over_commit: Specifies whether to allow overcommit on migrated instance or not :param min_sleep: Minimum sleep time in seconds (non-negative) :param max_sleep: Maximum sleep time in seconds (non-negative) :param kwargs: Optional additional arguments for server creation """ server = self._boot_server(image, flavor, **kwargs) self.sleep_between(min_sleep, max_sleep) new_host = self._find_host_to_migrate(server) self._live_migrate(server, new_host, block_migration, disk_over_commit) self._delete_server(server) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image", validate_disk=False) @validation.required_services(consts.Service.NOVA, consts.Service.CINDER) @validation.required_openstack(admin=True, users=True) @scenario.configure(context={"cleanup": ["nova", "cinder"]}, name=("NovaServers.boot_server_from_volume" "_and_live_migrate")) class BootServerFromVolumeAndLiveMigrate(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, image, flavor, volume_size, volume_type=None, block_migration=False, disk_over_commit=False, force_delete=False, min_sleep=0, max_sleep=0, **kwargs): """Boot a server from volume and then migrate it. The scenario first creates a volume and a server booted from the volume on a compute node available in the availability zone and then migrates the VM to another compute node on the same availability zone. Optional 'min_sleep' and 'max_sleep' parameters allow the scenario to simulate a pause between VM booting and running live migration (of random duration from range [min_sleep, max_sleep]). :param image: image to be used to boot an instance :param flavor: flavor to be used to boot an instance :param volume_size: volume size (in GB) :param volume_type: specifies volume type when there are multiple backends :param block_migration: Specifies the migration type :param disk_over_commit: Specifies whether to allow overcommit on migrated instance or not :param force_delete: True if force_delete should be used :param min_sleep: Minimum sleep time in seconds (non-negative) :param max_sleep: Maximum sleep time in seconds (non-negative) :param kwargs: Optional additional arguments for server creation """ volume = self._create_volume(volume_size, imageRef=image, volume_type=volume_type) block_device_mapping = {"vda": "%s:::1" % volume.id} server = self._boot_server(None, flavor, block_device_mapping=block_device_mapping, **kwargs) self.sleep_between(min_sleep, max_sleep) new_host = self._find_host_to_migrate(server) self._live_migrate(server, new_host, block_migration, disk_over_commit) self._delete_server(server, force=force_delete) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA, consts.Service.CINDER) @validation.required_openstack(admin=True, users=True) @scenario.configure(context={"cleanup": ["cinder", "nova"]}, name=("NovaServers.boot_server_attach_created_volume" "_and_live_migrate")) class BootServerAttachCreatedVolumeAndLiveMigrate(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, image, flavor, size, block_migration=False, disk_over_commit=False, boot_server_kwargs=None, create_volume_kwargs=None, min_sleep=0, max_sleep=0): """Create a VM, attach a volume to it and live migrate. Simple test to create a VM and attach a volume, then migrate the VM, detach the volume and delete volume/VM. Optional 'min_sleep' and 'max_sleep' parameters allow the scenario to simulate a pause between attaching a volume and running live migration (of random duration from range [min_sleep, max_sleep]). :param image: Glance image name to use for the VM :param flavor: VM flavor name :param size: volume size (in GB) :param block_migration: Specifies the migration type :param disk_over_commit: Specifies whether to allow overcommit on migrated instance or not :param boot_server_kwargs: optional arguments for VM creation :param create_volume_kwargs: optional arguments for volume creation :param min_sleep: Minimum sleep time in seconds (non-negative) :param max_sleep: Maximum sleep time in seconds (non-negative) """ if boot_server_kwargs is None: boot_server_kwargs = {} if create_volume_kwargs is None: create_volume_kwargs = {} server = self._boot_server(image, flavor, **boot_server_kwargs) volume = self._create_volume(size, **create_volume_kwargs) attachment = self._attach_volume(server, volume) self.sleep_between(min_sleep, max_sleep) new_host = self._find_host_to_migrate(server) self._live_migrate(server, new_host, block_migration, disk_over_commit) self._detach_volume(server, volume, attachment) self._delete_volume(volume) self._delete_server(server) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True, users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaServers.boot_and_migrate_server") class BootAndMigrateServer(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, image, flavor, **kwargs): """Migrate a server. This scenario launches a VM on a compute node available in the availability zone, and then migrates the VM to another compute node on the same availability zone. :param image: image to be used to boot an instance :param flavor: flavor to be used to boot an instance :param kwargs: Optional additional arguments for server creation """ server = self._boot_server(image, flavor, **kwargs) self._migrate(server) # NOTE(wtakase): This is required because cold migration and resize # share same code path. confirm = kwargs.get("confirm", True) if confirm: self._resize_confirm(server, status="ACTIVE") else: self._resize_revert(server, status="ACTIVE") self._delete_server(server) @types.convert(from_image={"type": "glance_image"}, to_image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "from_image") @validation.image_valid_on_flavor("flavor", "to_image") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True, users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaServers.boot_and_rebuild_server") class BootAndRebuildServer(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, from_image, to_image, flavor, **kwargs): """Rebuild a server. This scenario launches a VM, then rebuilds that VM with a different image. :param from_image: image to be used to boot an instance :param to_image: image to be used to rebuild the instance :param flavor: flavor to be used to boot an instance :param kwargs: Optional additional arguments for server creation """ server = self._boot_server(from_image, flavor, **kwargs) self._rebuild_server(server, to_image) self._delete_server(server) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @validation.required_contexts("network") @scenario.configure(context={"cleanup": ["nova", "neutron.floatingip"]}, name="NovaServers.boot_and_associate_floating_ip") class BootAndAssociateFloatingIp(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, image, flavor, **kwargs): """Boot a server and associate a floating IP to it. :param image: image to be used to boot an instance :param flavor: flavor to be used to boot an instance :param kwargs: Optional additional arguments for server creation """ server = self._boot_server(image, flavor, **kwargs) address = network_wrapper.wrap(self.clients, self).create_floating_ip( tenant_id=server.tenant_id) self._associate_floating_ip(server, address["ip"]) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaServers.boot_and_show_server") class BootAndShowServer(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, image, flavor, **kwargs): """Show server details. This simple scenario tests the nova show command by retrieving the server details. :param image: image to be used to boot an instance :param flavor: flavor to be used to boot an instance :param kwargs: Optional additional arguments for server creation :returns: Server details """ server = self._boot_server(image, flavor, **kwargs) self._show_server(server) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaServers.boot_and_get_console_output") class BootAndGetConsoleOutput(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, image, flavor, length=None, **kwargs): """Get text console output from server. This simple scenario tests the nova console-log command by retrieving the text console log output. :param image: image to be used to boot an instance :param flavor: flavor to be used to boot an instance :param length: The number of tail log lines you would like to retrieve. None (default value) or -1 means unlimited length. :param kwargs: Optional additional arguments for server creation :returns: Text console log output for server """ server = self._boot_server(image, flavor, **kwargs) self._get_server_console_output(server, length) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova"]}, name="NovaServers.boot_and_update_server") class BootAndUpdateServer(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, image, flavor, description=None, **kwargs): """Boot a server, then update its name and description. The scenario first creates a server, then update it. Assumes that cleanup is done elsewhere. :param image: image to be used to boot an instance :param flavor: flavor to be used to boot an instance :param description: update the server description :param kwargs: Optional additional arguments for server creation """ server = self._boot_server(image, flavor, **kwargs) self._update_server(server, description) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA, consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova", "cinder"]}, name="NovaServers.boot_server_from_volume_snapshot") class BootServerFromVolumeSnapshot(utils.NovaScenario, cinder_utils.CinderScenario): def run(self, image, flavor, volume_size, volume_type=None, auto_assign_nic=False, **kwargs): """Boot a server from a snapshot. The scenario first creates a volume and creates a snapshot from this volume, then boots a server from the created snapshot. Assumes that cleanup is done elsewhere. :param image: image to be used to boot an instance :param flavor: flavor to be used to boot an instance :param volume_size: volume size (in GB) :param volume_type: specifies volume type when there are multiple backends :param auto_assign_nic: True if NICs should be assigned :param kwargs: Optional additional arguments for server creation """ volume = self._create_volume(volume_size, imageRef=image, volume_type=volume_type) snapshot = self._create_snapshot(volume.id, False) block_device_mapping = {"vda": "%s:snap::1" % snapshot.id} self._boot_server(None, flavor, auto_assign_nic=auto_assign_nic, block_device_mapping=block_device_mapping, **kwargs) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @validation.required_contexts("network") @scenario.configure(context={"cleanup": ["nova", "neutron.floatingip"]}, name="NovaServers.boot_server_associate_and" "_dissociate_floating_ip") class BootServerAssociateAndDissociateFloatingIP(utils.NovaScenario): """"Benchmark scenarios for Nova FloatingIp API.""" def run(self, image, flavor, **kwargs): """Boot a server associate and dissociate a floating IP from it. The scenario first boot a server and create a floating IP. then associate the floating IP to the server.Finally dissociate the floating IP. :param image: image to be used to boot an instance :param flavor: flavor to be used to boot an instance :param kwargs: Optional additional arguments for server creation """ server = self._boot_server(image, flavor, **kwargs) address = network_wrapper.wrap(self.clients, self).create_floating_ip( tenant_id=server.tenant_id) self._associate_floating_ip(server, address["ip"]) self._dissociate_floating_ip(server, address["ip"]) rally-0.9.1/rally/plugins/openstack/scenarios/nova/flavors.py0000664000567000056710000001437213073417720025572 0ustar jenkinsjenkins00000000000000# Copyright 2015: Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.i18n import _LW from rally.common import logging from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.nova import utils from rally.task import validation """Scenarios for Nova flavors.""" LOG = logging.getLogger(__name__) @validation.required_services(consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(name="NovaFlavors.list_flavors") class ListFlavors(utils.NovaScenario): def run(self, detailed=True, **kwargs): """List all flavors. Measure the "nova flavor-list" command performance. :param detailed: True if the flavor listing should contain detailed information :param kwargs: Optional additional arguments for flavor listing """ self._list_flavors(detailed, **kwargs) @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["nova"]}, name="NovaFlavors.create_and_list_flavor_access") class CreateAndListFlavorAccess(utils.NovaScenario): def run(self, ram, vcpus, disk, **kwargs): """Create a non-public flavor and list its access rules :param ram: Memory in MB for the flavor :param vcpus: Number of VCPUs for the flavor :param disk: Size of local disk in GB :param kwargs: Optional additional arguments for flavor creation """ # NOTE(pirsriva): access rules can be listed # only for non-public flavors if kwargs.get("is_public", False): LOG.warning(_LW("is_public cannot be set to True for listing " "flavor access rules. Setting is_public to False")) kwargs["is_public"] = False flavor = self._create_flavor(ram, vcpus, disk, **kwargs) self.assertTrue(flavor) self._list_flavor_access(flavor.id) @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["nova"]}, name="NovaFlavors.create_flavor_and_add_tenant_access") class CreateFlavorAndAddTenantAccess(utils.NovaScenario): def run(self, ram, vcpus, disk, **kwargs): """Create a flavor and Add flavor access for the given tenant. :param ram: Memory in MB for the flavor :param vcpus: Number of VCPUs for the flavor :param disk: Size of local disk in GB :param kwargs: Optional additional arguments for flavor creation """ flavor = self._create_flavor(ram, vcpus, disk, **kwargs) self.assertTrue(flavor) self._add_tenant_access(flavor.id, self.context["tenant"]["id"]) @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["nova"]}, name="NovaFlavors.create_flavor") class CreateFlavor(utils.NovaScenario): def run(self, ram, vcpus, disk, **kwargs): """Create a flavor. :param ram: Memory in MB for the flavor :param vcpus: Number of VCPUs for the flavor :param disk: Size of local disk in GB :param kwargs: Optional additional arguments for flavor creation """ self._create_flavor(ram, vcpus, disk, **kwargs) @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["nova"]}, name="NovaFlavors.create_and_get_flavor") class CreateAndGetFlavor(utils.NovaScenario): """Scenario for create and get flavor.""" def run(self, ram, vcpus, disk, **kwargs): """Create flavor and get detailed information of the flavor. :param ram: Memory in MB for the flavor :param vcpus: Number of VCPUs for the flavor :param disk: Size of local disk in GB :param kwargs: Optional additional arguments for flavor creation """ flavor = self._create_flavor(ram, vcpus, disk, **kwargs) self._get_flavor(flavor.id) @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["nova"]}, name="NovaFlavors.create_and_delete_flavor") class CreateAndDeleteFlavor(utils.NovaScenario): def run(self, ram, vcpus, disk, **kwargs): """Create flavor and delete the flavor. :param ram: Memory in MB for the flavor :param vcpus: Number of VCPUs for the flavor :param disk: Size of local disk in GB :param kwargs: Optional additional arguments for flavor creation """ flavor = self._create_flavor(ram, vcpus, disk, **kwargs) self._delete_flavor(flavor.id) @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["nova"]}, name="NovaFlavors.create_flavor_and_set_keys") class CreateFlavorAndSetKeys(utils.NovaScenario): def run(self, ram, vcpus, disk, extra_specs, **kwargs): """Create flavor and set keys to the flavor. Measure the "nova flavor-key" command performance. the scenario first create a flavor,then add the extra specs to it. :param ram: Memory in MB for the flavor :param vcpus: Number of VCPUs for the flavor :param disk: Size of local disk in GB :param extra_specs: additional arguments for flavor set keys :param kwargs: Optional additional arguments for flavor creation """ flavor = self._create_flavor(ram, vcpus, disk, **kwargs) self._set_flavor_keys(flavor, extra_specs) rally-0.9.1/rally/plugins/openstack/scenarios/glance/0000775000567000056710000000000013073420067024021 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/glance/__init__.py0000664000567000056710000000000013073417716026127 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/glance/images.py0000664000567000056710000001467213073417720025654 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common import logging from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.glance import utils from rally.plugins.openstack.scenarios.nova import utils as nova_utils from rally.task import types from rally.task import validation LOG = logging.getLogger(__name__) """Scenarios for Glance images.""" @types.convert(image_location={"type": "path_or_url"}, kwargs={"type": "glance_image_args"}) @validation.required_services(consts.Service.GLANCE) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["glance"]}, name="GlanceImages.create_and_list_image") class CreateAndListImage(utils.GlanceScenario, nova_utils.NovaScenario): def run(self, container_format, image_location, disk_format, **kwargs): """Create an image and then list all images. Measure the "glance image-list" command performance. If you have only 1 user in your context, you will add 1 image on every iteration. So you will have more and more images and will be able to measure the performance of the "glance image-list" command depending on the number of images owned by users. :param container_format: container format of image. Acceptable formats: ami, ari, aki, bare, and ovf :param image_location: image file location :param disk_format: disk format of image. Acceptable formats: ami, ari, aki, vhd, vmdk, raw, qcow2, vdi, and iso :param kwargs: optional parameters to create image """ image = self._create_image(container_format, image_location, disk_format, **kwargs) self.assertTrue(image) image_list = self._list_images() self.assertIn(image.id, [i.id for i in image_list]) @validation.required_services(consts.Service.GLANCE) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["glance"]}, name="GlanceImages.list_images") class ListImages(utils.GlanceScenario, nova_utils.NovaScenario): def run(self): """List all images. This simple scenario tests the glance image-list command by listing all the images. Suppose if we have 2 users in context and each has 2 images uploaded for them we will be able to test the performance of glance image-list command in this case. """ self._list_images() @types.convert(image_location={"type": "path_or_url"}, kwargs={"type": "glance_image_args"}) @validation.required_services(consts.Service.GLANCE) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["glance"]}, name="GlanceImages.create_and_delete_image") class CreateAndDeleteImage(utils.GlanceScenario, nova_utils.NovaScenario): def run(self, container_format, image_location, disk_format, **kwargs): """Create and then delete an image. :param container_format: container format of image. Acceptable formats: ami, ari, aki, bare, and ovf :param image_location: image file location :param disk_format: disk format of image. Acceptable formats: ami, ari, aki, vhd, vmdk, raw, qcow2, vdi, and iso :param kwargs: optional parameters to create image """ image = self._create_image(container_format, image_location, disk_format, **kwargs) self._delete_image(image) @types.convert(flavor={"type": "nova_flavor"}, image_location={"type": "path_or_url"}, kwargs={"type": "glance_image_args"}) @validation.flavor_exists("flavor") @validation.required_services(consts.Service.GLANCE, consts.Service.NOVA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["glance", "nova"]}, name="GlanceImages.create_image_and_boot_instances") class CreateImageAndBootInstances(utils.GlanceScenario, nova_utils.NovaScenario): def run(self, container_format, image_location, disk_format, flavor, number_instances, create_image_kwargs=None, boot_server_kwargs=None, **kwargs): """Create an image and boot several instances from it. :param container_format: container format of image. Acceptable formats: ami, ari, aki, bare, and ovf :param image_location: image file location :param disk_format: disk format of image. Acceptable formats: ami, ari, aki, vhd, vmdk, raw, qcow2, vdi, and iso :param flavor: Nova flavor to be used to launch an instance :param number_instances: number of Nova servers to boot :param create_image_kwargs: optional parameters to create image :param boot_server_kwargs: optional parameters to boot server :param kwargs: optional parameters to create server (deprecated) """ create_image_kwargs = create_image_kwargs or {} boot_server_kwargs = boot_server_kwargs or kwargs or {} if kwargs: LOG.warning("'kwargs' is deprecated in Rally v0.8.0: Use " "'boot_server_kwargs' for additional parameters when " "booting servers.") image = self._create_image(container_format, image_location, disk_format, **create_image_kwargs) self._boot_servers(image.id, flavor, number_instances, **boot_server_kwargs) rally-0.9.1/rally/plugins/openstack/scenarios/glance/utils.py0000664000567000056710000000622013073417720025535 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from rally.plugins.openstack import scenario from rally.plugins.openstack.wrappers import glance as glance_wrapper from rally.task import atomic from rally.task import utils GLANCE_BENCHMARK_OPTS = [ cfg.FloatOpt("glance_image_delete_timeout", default=120.0, help="Time to wait for glance image to be deleted."), cfg.FloatOpt("glance_image_delete_poll_interval", default=1.0, help="Interval between checks when waiting for image " "deletion.") ] CONF = cfg.CONF benchmark_group = cfg.OptGroup(name="benchmark", title="benchmark options") CONF.register_opts(GLANCE_BENCHMARK_OPTS, group=benchmark_group) class GlanceScenario(scenario.OpenStackScenario): """Base class for Glance scenarios with basic atomic actions.""" @atomic.action_timer("glance.list_images") def _list_images(self): """Returns user images list.""" return list(self.clients("glance").images.list()) @atomic.action_timer("glance.create_image") def _create_image(self, container_format, image_location, disk_format, **kwargs): """Create a new image. :param container_format: container format of image. Acceptable formats: ami, ari, aki, bare, and ovf :param image_location: image file location :param disk_format: disk format of image. Acceptable formats: ami, ari, aki, vhd, vmdk, raw, qcow2, vdi, and iso :param kwargs: optional parameters to create image :returns: image object """ if not kwargs.get("name"): kwargs["name"] = self.generate_random_name() client = glance_wrapper.wrap(self._clients.glance, self) return client.create_image(container_format, image_location, disk_format, **kwargs) @atomic.action_timer("glance.delete_image") def _delete_image(self, image): """Deletes given image. Returns when the image is actually deleted. :param image: Image object """ self.clients("glance").images.delete(image.id) wrapper = glance_wrapper.wrap(self._clients.glance, self) utils.wait_for_status( image, ["deleted", "pending_delete"], check_deletion=True, update_resource=wrapper.get_image, timeout=CONF.benchmark.glance_image_delete_timeout, check_interval=CONF.benchmark.glance_image_delete_poll_interval) rally-0.9.1/rally/plugins/openstack/scenarios/ceilometer/0000775000567000056710000000000013073420067024720 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/ceilometer/__init__.py0000664000567000056710000000000013073417716027026 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/ceilometer/events.py0000664000567000056710000000752413073417720026610 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Scenarios for Ceilometer Events API. """ from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.ceilometer import utils as cutils from rally.plugins.openstack.scenarios.keystone import basic as kbasic from rally.task import validation # NOTE(idegtiarov): to work with event we need to create it, there are # no other way except emit suitable notification from one of services, # for example create new user in keystone. @validation.required_services(consts.Service.CEILOMETER, consts.Service.KEYSTONE) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["keystone"], "cleanup": ["ceilometer"]}, name="CeilometerEvents.create_user_and_list_events") class CeilometerEventsCreateUserAndListEvents(cutils.CeilometerScenario, kbasic.KeystoneBasic): def run(self): """Create user and fetch all events. This scenario creates user to store new event and fetches list of all events using GET /v2/events. """ self.admin_keystone.create_user() events = self._list_events() msg = ("Events list is empty, but it should include at least one " "event about user creation") self.assertTrue(events, msg) @validation.required_services(consts.Service.CEILOMETER, consts.Service.KEYSTONE) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["keystone"], "cleanup": ["ceilometer"]}, name="CeilometerEvents.create_user_and_list_event_types") class CeilometerEventsCreateUserAndListEventTypes(cutils.CeilometerScenario, kbasic.KeystoneBasic): def run(self): """Create user and fetch all event types. This scenario creates user to store new event and fetches list of all events types using GET /v2/event_types. """ self.admin_keystone.create_user() event_types = self._list_event_types() msg = ("Event types list is empty, but it should include at least one" " type about user creation") self.assertTrue(event_types, msg) @validation.required_services(consts.Service.CEILOMETER, consts.Service.KEYSTONE) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["keystone"], "cleanup": ["ceilometer"]}, name="CeilometerEvents.create_user_and_get_event") class CeilometerEventsCreateUserAndGetEvent(cutils.CeilometerScenario, kbasic.KeystoneBasic): def run(self): """Create user and gets event. This scenario creates user to store new event and fetches one event using GET /v2/events/. """ self.admin_keystone.create_user() events = self._list_events() msg = ("Events list is empty, but it should include at least one " "event about user creation") self.assertTrue(events, msg) self._get_event(event_id=events[0].message_id) rally-0.9.1/rally/plugins/openstack/scenarios/ceilometer/queries.py0000664000567000056710000001114713073417720026755 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.ceilometer import utils as ceiloutils from rally.task import validation """Scenarios for Ceilometer Queries API.""" @validation.required_services(consts.Service.CEILOMETER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["ceilometer"]}, name="CeilometerQueries.create_and_query_alarms") class CeilometerQueriesCreateAndQueryAlarms(ceiloutils.CeilometerScenario): def run(self, meter_name, threshold, filter=None, orderby=None, limit=None, **kwargs): """Create an alarm and then query it with specific parameters. This scenario tests POST /v2/query/alarms An alarm is first created and then fetched using the input query. :param meter_name: specifies meter name of alarm :param threshold: specifies alarm threshold :param filter: optional filter query dictionary :param orderby: optional param for specifying ordering of results :param limit: optional param for maximum number of results returned :param kwargs: optional parameters for alarm creation """ if filter: filter = json.dumps(filter) self._create_alarm(meter_name, threshold, kwargs) self._query_alarms(filter, orderby, limit) @validation.required_services(consts.Service.CEILOMETER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["ceilometer"]}, name="CeilometerQueries.create_and_query_alarm_history") class CeilometerQueriesCreateAndQueryAlarmHistory(ceiloutils .CeilometerScenario): def run(self, meter_name, threshold, orderby=None, limit=None, **kwargs): """Create an alarm and then query for its history. This scenario tests POST /v2/query/alarms/history An alarm is first created and then its alarm_id is used to fetch the history of that specific alarm. :param meter_name: specifies meter name of alarm :param threshold: specifies alarm threshold :param orderby: optional param for specifying ordering of results :param limit: optional param for maximum number of results returned :param kwargs: optional parameters for alarm creation """ alarm = self._create_alarm(meter_name, threshold, kwargs) alarm_filter = json.dumps({"=": {"alarm_id": alarm.alarm_id}}) self._query_alarm_history(alarm_filter, orderby, limit) @validation.required_services(consts.Service.CEILOMETER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["ceilometer"]}, name="CeilometerQueries.create_and_query_samples") class CeilometerQueriesCreateAndQuerySamples(ceiloutils.CeilometerScenario): def run(self, counter_name, counter_type, counter_unit, counter_volume, resource_id, filter=None, orderby=None, limit=None, **kwargs): """Create a sample and then query it with specific parameters. This scenario tests POST /v2/query/samples A sample is first created and then fetched using the input query. :param counter_name: specifies name of the counter :param counter_type: specifies type of the counter :param counter_unit: specifies unit of the counter :param counter_volume: specifies volume of the counter :param resource_id: specifies resource id for the sample created :param filter: optional filter query dictionary :param orderby: optional param for specifying ordering of results :param limit: optional param for maximum number of results returned :param kwargs: parameters for sample creation """ self._create_sample(counter_name, counter_type, counter_unit, counter_volume, resource_id, **kwargs) if filter: filter = json.dumps(filter) self._query_samples(filter, orderby, limit) rally-0.9.1/rally/plugins/openstack/scenarios/ceilometer/stats.py0000664000567000056710000000577713073417720026452 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common import logging from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.ceilometer import utils from rally.task import validation """Scenarios for Ceilometer Stats API.""" @validation.required_services(consts.Service.CEILOMETER) @validation.required_openstack(users=True) @scenario.configure(name="CeilometerStats.create_meter_and_get_stats") class CreateMeterAndGetStats(utils.CeilometerScenario): @logging.log_deprecated("Use 'get_stats' method, now samples are created" "in context", "0.1.2") def run(self, **kwargs): """Create a meter and fetch its statistics. Meter is first created and then statistics is fetched for the same using GET /v2/meters/(meter_name)/statistics. :param kwargs: contains optional arguments to create a meter """ meter = self._create_meter(**kwargs) self._get_stats(meter.counter_name) @validation.required_services(consts.Service.CEILOMETER) @validation.required_contexts("ceilometer") @validation.required_openstack(users=True) @scenario.configure(name="CeilometerStats.get_stats") class GetStats(utils.CeilometerScenario): def run(self, meter_name, filter_by_user_id=False, filter_by_project_id=False, filter_by_resource_id=False, metadata_query=None, period=None, groupby=None, aggregates=None): """Fetch statistics for certain meter. Statistics is fetched for the using GET /v2/meters/(meter_name)/statistics. :param meter_name: meter to take statistic for :param filter_by_user_id: flag for query by user_id :param filter_by_project_id: flag for query by project_id :param filter_by_resource_id: flag for query by resource_id :param metadata_query: dict with metadata fields and values for query :param period: the length of the time range covered by these stats :param groupby: the fields used to group the samples :param aggregates: name of function for samples aggregation :returns: list of statistics data """ query = self._make_general_query(filter_by_project_id, filter_by_user_id, filter_by_resource_id, metadata_query) self._get_stats(meter_name, query, period, groupby, aggregates) rally-0.9.1/rally/plugins/openstack/scenarios/ceilometer/meters.py0000664000567000056710000000531113073417720026573 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.ceilometer import utils as ceiloutils from rally.task import validation """Scenarios for Ceilometer Meters API.""" @validation.required_services(consts.Service.CEILOMETER) @validation.required_contexts("ceilometer") @validation.required_openstack(users=True) @scenario.configure(name="CeilometerMeters.list_meters") class ListMeters(ceiloutils.CeilometerScenario): def run(self, metadata_query=None, limit=None): """Check all available queries for list resource request. :param metadata_query: dict with metadata fields and values :param limit: limit of meters in response """ scenario = ListMatchedMeters(self.context) scenario.run(filter_by_project_id=True) scenario.run(filter_by_user_id=True) scenario.run(filter_by_resource_id=True) if metadata_query: scenario.run(metadata_query=metadata_query) if limit: scenario.run(limit=limit) @validation.required_services(consts.Service.CEILOMETER) @validation.required_contexts("ceilometer") @validation.required_openstack(users=True) @scenario.configure(name="CeilometerMeters.list_matched_meters") class ListMatchedMeters(ceiloutils.CeilometerScenario): def run(self, filter_by_user_id=False, filter_by_project_id=False, filter_by_resource_id=False, metadata_query=None, limit=None): """Get meters that matched fields from context and args. :param filter_by_user_id: flag for query by user_id :param filter_by_project_id: flag for query by project_id :param filter_by_resource_id: flag for query by resource_id :param metadata_query: dict with metadata fields and values for query :param limit: count of resources in response """ query = self._make_general_query(filter_by_project_id, filter_by_user_id, filter_by_resource_id, metadata_query) self._list_meters(query, limit) rally-0.9.1/rally/plugins/openstack/scenarios/ceilometer/alarms.py0000664000567000056710000001755713073417720026572 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.ceilometer import utils as ceiloutils from rally.task import validation """Benchmark scenarios for Ceilometer Alarms API.""" @validation.required_services(consts.Service.CEILOMETER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["ceilometer"]}, name="CeilometerAlarms.create_alarm") class CreateAlarm(ceiloutils.CeilometerScenario): def run(self, meter_name, threshold, **kwargs): """Create an alarm. This scenarios test POST /v2/alarms. meter_name and threshold are required parameters for alarm creation. kwargs stores other optional parameters like 'ok_actions', 'project_id' etc that may be passed while creating an alarm. :param meter_name: specifies meter name of the alarm :param threshold: specifies alarm threshold :param kwargs: specifies optional arguments for alarm creation. """ self._create_alarm(meter_name, threshold, kwargs) @validation.required_services(consts.Service.CEILOMETER) @validation.required_openstack(users=True) @scenario.configure(name="CeilometerAlarms.list_alarms") class ListAlarms(ceiloutils.CeilometerScenario): def run(self): """Fetch all alarms. This scenario fetches list of all alarms using GET /v2/alarms. """ self._list_alarms() @validation.required_services(consts.Service.CEILOMETER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["ceilometer"]}, name="CeilometerAlarms.create_and_list_alarm") class CreateAndListAlarm(ceiloutils.CeilometerScenario): def run(self, meter_name, threshold, **kwargs): """Create and get the newly created alarm. This scenarios test GET /v2/alarms/(alarm_id) Initially alarm is created and then the created alarm is fetched using its alarm_id. meter_name and threshold are required parameters for alarm creation. kwargs stores other optional parameters like 'ok_actions', 'project_id' etc. that may be passed while creating an alarm. :param meter_name: specifies meter name of the alarm :param threshold: specifies alarm threshold :param kwargs: specifies optional arguments for alarm creation. """ alarm = self._create_alarm(meter_name, threshold, kwargs) self._list_alarms(alarm.alarm_id) @validation.required_services(consts.Service.CEILOMETER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["ceilometer"]}, name="CeilometerAlarms.create_and_get_alarm") class CreateAndGetAlarm(ceiloutils.CeilometerScenario): def run(self, meter_name, threshold, **kwargs): """Create and get the newly created alarm. These scenarios test GET /v2/alarms/(alarm_id) Initially an alarm is created and then its detailed information is fetched using its alarm_id. meter_name and threshold are required parameters for alarm creation. kwargs stores other optional parameters like 'ok_actions', 'project_id' etc. that may be passed while creating an alarm. :param meter_name: specifies meter name of the alarm :param threshold: specifies alarm threshold :param kwargs: specifies optional arguments for alarm creation. """ alarm = self._create_alarm(meter_name, threshold, kwargs) self._get_alarm(alarm.alarm_id) @validation.required_services(consts.Service.CEILOMETER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["ceilometer"]}, name="CeilometerAlarms.create_and_update_alarm") class CreateAndUpdateAlarm(ceiloutils.CeilometerScenario): def run(self, meter_name, threshold, **kwargs): """Create and update the newly created alarm. This scenarios test PUT /v2/alarms/(alarm_id) Initially alarm is created and then the created alarm is updated using its alarm_id. meter_name and threshold are required parameters for alarm creation. kwargs stores other optional parameters like 'ok_actions', 'project_id' etc that may be passed while alarm creation. :param meter_name: specifies meter name of the alarm :param threshold: specifies alarm threshold :param kwargs: specifies optional arguments for alarm creation. """ alarm = self._create_alarm(meter_name, threshold, kwargs) alarm_dict_diff = {"description": "Changed Test Description"} self._update_alarm(alarm.alarm_id, alarm_dict_diff) @validation.required_services(consts.Service.CEILOMETER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["ceilometer"]}, name="CeilometerAlarms.create_and_delete_alarm") class CreateAndDeleteAlarm(ceiloutils.CeilometerScenario): def run(self, meter_name, threshold, **kwargs): """Create and delete the newly created alarm. This scenarios test DELETE /v2/alarms/(alarm_id) Initially alarm is created and then the created alarm is deleted using its alarm_id. meter_name and threshold are required parameters for alarm creation. kwargs stores other optional parameters like 'ok_actions', 'project_id' etc that may be passed while alarm creation. :param meter_name: specifies meter name of the alarm :param threshold: specifies alarm threshold :param kwargs: specifies optional arguments for alarm creation. """ alarm = self._create_alarm(meter_name, threshold, kwargs) self._delete_alarm(alarm.alarm_id) @validation.required_services(consts.Service.CEILOMETER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["ceilometer"]}, name="CeilometerAlarms.create_alarm_and_get_history") class CreateAlarmAndGetHistory(ceiloutils.CeilometerScenario): def run(self, meter_name, threshold, state, timeout=60, **kwargs): """Create an alarm, get and set the state and get the alarm history. This scenario makes following queries: GET /v2/alarms/{alarm_id}/history GET /v2/alarms/{alarm_id}/state PUT /v2/alarms/{alarm_id}/state Initially alarm is created and then get the state of the created alarm using its alarm_id. Then get the history of the alarm. And finally the state of the alarm is updated using given state. meter_name and threshold are required parameters for alarm creation. kwargs stores other optional parameters like 'ok_actions', 'project_id' etc that may be passed while alarm creation. :param meter_name: specifies meter name of the alarm :param threshold: specifies alarm threshold :param state: an alarm state to be set :param timeout: The number of seconds for which to attempt a successful check of the alarm state :param kwargs: specifies optional arguments for alarm creation. """ alarm = self._create_alarm(meter_name, threshold, kwargs) self._get_alarm_state(alarm.alarm_id) self._get_alarm_history(alarm.alarm_id) self._set_alarm_state(alarm, state, timeout) rally-0.9.1/rally/plugins/openstack/scenarios/ceilometer/utils.py0000664000567000056710000004425013073417716026446 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import datetime as dt import six from rally import exceptions from rally.plugins.openstack import scenario from rally.task import atomic from rally.task import utils as bench_utils class CeilometerScenario(scenario.OpenStackScenario): """Base class for Ceilometer scenarios with basic atomic actions.""" def _make_samples(self, count=1, interval=0, counter_name="cpu_util", counter_type="gauge", counter_unit="%", counter_volume=1, project_id=None, user_id=None, source=None, timestamp=None, metadata_list=None, batch_size=None): """Prepare and return a list of samples. :param count: specifies number of samples in array :param interval: specifies interval between timestamps of near-by samples :param counter_name: specifies name of the counter :param counter_type: specifies type of the counter :param counter_unit: specifies unit of the counter :param counter_volume: specifies volume of the counter :param project_id: specifies project id for samples :param user_id: specifies user id for samples :param source: specifies source for samples :param timestamp: specifies timestamp for samples :param metadata_list: specifies list of resource metadata :param batch_size: specifies number of samples to store in one query :returns: generator that produces lists of samples """ batch_size = batch_size or count sample = { "counter_name": counter_name, "counter_type": counter_type, "counter_unit": counter_unit, "counter_volume": counter_volume, "resource_id": self.generate_random_name() } opt_fields = { "project_id": project_id, "user_id": user_id, "source": source, "timestamp": timestamp, } for k, v in opt_fields.items(): if v: sample.update({k: v}) len_meta = len(metadata_list) if metadata_list else 0 now = timestamp or dt.datetime.utcnow() samples = [] for i in six.moves.xrange(count): if i and not (i % batch_size): yield samples samples = [] sample_item = dict(sample) sample_item["timestamp"] = ( now - dt.timedelta(seconds=(interval * i)) ).isoformat() if metadata_list: # NOTE(idegtiarov): Adding more than one template of metadata # required it's proportional distribution among whole samples. sample_item["resource_metadata"] = metadata_list[ i * len_meta // count ] samples.append(sample_item) yield samples def _make_query_item(self, field, op="eq", value=None): """Create a SimpleQuery item for requests. :param field: filtered field :param op: operator for filtering :param value: matched value :returns: dict with field, op and value keys for query """ return {"field": field, "op": op, "value": value} def _make_general_query(self, filter_by_project_id=None, filter_by_user_id=None, filter_by_resource_id=None, metadata_query=None): """Create a SimpleQuery for the list benchmarks. :param filter_by_project_id: add a project id to query :param filter_by_user_id: add a user id to query :param filter_by_resource_id: add a resource id to query :param metadata_query: metadata dict that will add to query :returns: SimpleQuery with specified items """ query = [] metadata_query = metadata_query or {} if filter_by_user_id: query.append(self._make_query_item("user_id", "eq", self.context["user"]["id"])) if filter_by_project_id: query.append(self._make_query_item( "project_id", "eq", self.context["tenant"]["id"])) if filter_by_resource_id: query.append(self._make_query_item( "resource_id", "eq", self.context["tenant"]["resources"][0])) for key, value in metadata_query.items(): query.append(self._make_query_item("metadata.%s" % key, value=value)) return query def _make_timestamp_query(self, start_time=None, end_time=None): """Create ceilometer query for timestamp range. :param start_time: start datetime in isoformat :param end_time: end datetime in isoformat :returns: query with timestamp range """ query = [] if end_time and start_time and end_time < start_time: msg = "End time should be great or equal than start time" raise exceptions.InvalidArgumentsException(msg) if start_time: query.append(self._make_query_item("timestamp", ">=", start_time)) if end_time: query.append(self._make_query_item("timestamp", "<=", end_time)) return query def _make_profiler_key(self, method, query=None, limit=None): """Create key for profiling method with query. :param method: Original profiler tag for method :param query: ceilometer query which fields will be added to key :param limit: if it exists `limit` will be added to key :returns: profiler key that includes method and queried fields """ query = query or [] limit_line = limit and "limit" or "" fields_line = "&".join("%s" % a["field"] for a in query) key_identifiers = "&".join(x for x in (limit_line, fields_line) if x) key = ":".join(x for x in (method, key_identifiers) if x) return key def _get_alarm_dict(self, **kwargs): """Prepare and return an alarm dict for creating an alarm. :param kwargs: optional parameters to create alarm :returns: alarm dictionary used to create an alarm """ alarm_id = self.generate_random_name() alarm = {"alarm_id": alarm_id, "name": alarm_id, "description": "Test Alarm"} alarm.update(kwargs) return alarm @atomic.action_timer("ceilometer.list_alarms") def _list_alarms(self, alarm_id=None): """List alarms. List alarm matching alarm_id. It fetches all alarms if alarm_id is None. :param alarm_id: specifies id of the alarm :returns: list of alarms """ if alarm_id: return self.clients("ceilometer").alarms.get(alarm_id) else: return self.clients("ceilometer").alarms.list() @atomic.action_timer("ceilometer.get_alarm") def _get_alarm(self, alarm_id): """Get detailed information of an alarm. :param alarm_id: Specifies id of the alarm :returns: If alarm_id is existed and correct, returns detailed information of an alarm, else returns None """ return self.clients("ceilometer").alarms.get(alarm_id) @atomic.action_timer("ceilometer.create_alarm") def _create_alarm(self, meter_name, threshold, kwargs): """Create an alarm. :param meter_name: specifies meter name of the alarm :param threshold: specifies alarm threshold :param kwargs: contains optional features of alarm to be created :returns: alarm """ alarm_dict = self._get_alarm_dict(**kwargs) alarm_dict.update({"meter_name": meter_name, "threshold": threshold}) alarm = self.clients("ceilometer").alarms.create(**alarm_dict) return alarm @atomic.action_timer("ceilometer.delete_alarm") def _delete_alarm(self, alarm_id): """Delete an alarm. :param alarm_id: specifies id of the alarm """ self.clients("ceilometer").alarms.delete(alarm_id) @atomic.action_timer("ceilometer.update_alarm") def _update_alarm(self, alarm_id, alarm_dict_delta): """Update an alarm. :param alarm_id: specifies id of the alarm :param alarm_dict_delta: features of alarm to be updated """ self.clients("ceilometer").alarms.update(alarm_id, **alarm_dict_delta) @atomic.action_timer("ceilometer.get_alarm_history") def _get_alarm_history(self, alarm_id): """Assemble the alarm history requested. :param alarm_id: specifies id of the alarm :returns: list of alarm changes """ return self.clients("ceilometer").alarms.get_history(alarm_id) @atomic.action_timer("ceilometer.get_alarm_state") def _get_alarm_state(self, alarm_id): """Get the state of the alarm. :param alarm_id: specifies id of the alarm :returns: state of the alarm """ return self.clients("ceilometer").alarms.get_state(alarm_id) @atomic.action_timer("ceilometer.set_alarm_state") def _set_alarm_state(self, alarm, state, timeout): """Set the state of the alarm. :param alarm: alarm instance :param state: an alarm state to be set :param timeout: The number of seconds for which to attempt a successful check of the alarm state. :returns: alarm in the set state """ self.clients("ceilometer").alarms.set_state(alarm.alarm_id, state) return bench_utils.wait_for(alarm, ready_statuses=[state], update_resource=bench_utils .get_from_manager(), timeout=timeout, check_interval=1) @atomic.action_timer("ceilometer.list_events") def _list_events(self): """Get list of user's events. It fetches all events. :returns: list of events """ return self.admin_clients("ceilometer").events.list() @atomic.action_timer("ceilometer.get_event") def _get_event(self, event_id): """Get event with specific id. Get event matching event_id. :param event_id: specifies id of the event :returns: event """ return self.admin_clients("ceilometer").events.get(event_id) @atomic.action_timer("ceilometer.list_event_types") def _list_event_types(self): """Get list of all event types. :returns: list of event types """ return self.admin_clients("ceilometer").event_types.list() @atomic.action_timer("ceilometer.list_event_traits") def _list_event_traits(self, event_type, trait_name): """Get list of event traits. :param event_type: specifies the type of event :param trait_name: specifies trait name :returns: list of event traits """ return self.admin_clients("ceilometer").traits.list(event_type, trait_name) @atomic.action_timer("ceilometer.list_event_trait_descriptions") def _list_event_trait_descriptions(self, event_type): """Get list of event trait descriptions. :param event_type: specifies the type of event :returns: list of event trait descriptions """ return self.admin_clients("ceilometer").trait_descriptions.list( event_type) def _list_samples(self, query=None, limit=None): """List all Samples. :param query: optional param that specify query :param limit: optional param for maximum number of samples returned :returns: list of samples """ key = self._make_profiler_key("ceilometer.list_samples", query, limit) with atomic.ActionTimer(self, key): return self.clients("ceilometer").new_samples.list(q=query, limit=limit) @atomic.action_timer("ceilometer.get_resource") def _get_resource(self, resource_id): """Retrieve details about one resource.""" return self.clients("ceilometer").resources.get(resource_id) @atomic.action_timer("ceilometer.get_stats") def _get_stats(self, meter_name, query=None, period=None, groupby=None, aggregates=None): """Get stats for a specific meter. :param meter_name: Name of ceilometer meter :param query: list of queries :param period: the length of the time range covered by these stats :param groupby: the fields used to group the samples :param aggregates: function for samples aggregation :returns: list of statistics data """ return self.clients("ceilometer").statistics.list(meter_name, q=query, period=period, groupby=groupby, aggregates=aggregates ) @atomic.action_timer("ceilometer.create_meter") def _create_meter(self, **kwargs): """Create a new meter. :param kwargs: Contains the optional attributes for meter creation :returns: Newly created meter """ name = self.generate_random_name() samples = self.clients("ceilometer").samples.create( counter_name=name, **kwargs) return samples[0] @atomic.action_timer("ceilometer.query_alarms") def _query_alarms(self, filter, orderby, limit): """Query alarms with specific parameters. If no input params are provided, it returns all the results in the database. :param limit: optional param for maximum number of results returned :param orderby: optional param for specifying ordering of results :param filter: optional filter query :returns: queried alarms """ return self.clients("ceilometer").query_alarms.query( filter, orderby, limit) @atomic.action_timer("ceilometer.query_alarm_history") def _query_alarm_history(self, filter, orderby, limit): """Query history of an alarm. If no input params are provided, it returns all the results in the database. :param limit: optional param for maximum number of results returned :param orderby: optional param for specifying ordering of results :param filter: optional filter query :returns: alarm history """ return self.clients("ceilometer").query_alarm_history.query( filter, orderby, limit) @atomic.action_timer("ceilometer.create_sample") def _create_sample(self, counter_name, counter_type, counter_unit, counter_volume, resource_id=None, **kwargs): """Create a Sample with specified parameters. :param counter_name: specifies name of the counter :param counter_type: specifies type of the counter :param counter_unit: specifies unit of the counter :param counter_volume: specifies volume of the counter :param resource_id: specifies resource id for the sample created :param kwargs: contains optional parameters for creating a sample :returns: created sample """ kwargs.update({"counter_name": counter_name, "counter_type": counter_type, "counter_unit": counter_unit, "counter_volume": counter_volume, "resource_id": resource_id if resource_id else self.generate_random_name()}) return self.clients("ceilometer").samples.create(**kwargs) @atomic.action_timer("ceilometer.create_samples") def _create_samples(self, samples): """Create Samples with specified parameters. :param samples: a list of samples to create :returns: created list samples """ return self.clients("ceilometer").samples.create_list(samples) @atomic.action_timer("ceilometer.query_samples") def _query_samples(self, filter, orderby, limit): """Query samples with specified parameters. If no input params are provided, it returns all the results in the database. :param limit: optional param for maximum number of results returned :param orderby: optional param for specifying ordering of results :param filter: optional filter query :returns: queried samples """ return self.clients("ceilometer").query_samples.query( filter, orderby, limit) def _list_resources(self, query=None, limit=None): """List all resources. :param query: query list for Ceilometer api :param limit: count of returned resources :returns: list of all resources """ key = self._make_profiler_key("ceilometer.list_resources", query, limit) with atomic.ActionTimer(self, key): return self.clients("ceilometer").resources.list(q=query, limit=limit) def _list_meters(self, query=None, limit=None): """Get list of user's meters. :param query: query list for Ceilometer api :param limit: count of returned meters :returns: list of all meters """ key = self._make_profiler_key("ceilometer.list_meters", query, limit) with atomic.ActionTimer(self, key): return self.clients("ceilometer").meters.list(q=query, limit=limit) rally-0.9.1/rally/plugins/openstack/scenarios/ceilometer/resources.py0000664000567000056710000001023513073417720027307 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.ceilometer import utils as ceiloutils from rally.task import validation """Scenarios for Ceilometer Resource API.""" @validation.required_services(consts.Service.CEILOMETER) @validation.required_contexts("ceilometer") @validation.required_openstack(users=True) @scenario.configure(name="CeilometerResource.list_resources") class ListResources(ceiloutils.CeilometerScenario): def run(self, metadata_query=None, start_time=None, end_time=None, limit=None): """Check all available queries for list resource request. This scenario fetches list of all resources using GET /v2/resources. :param metadata_query: dict with metadata fields and values for query :param start_time: lower bound of resource timestamp in isoformat :param end_time: upper bound of resource timestamp in isoformat :param limit: count of resources in response """ scenario = ListMatchedResources(self.context) scenario.run(filter_by_project_id=True) scenario.run(filter_by_user_id=True) scenario.run(filter_by_resource_id=True) if metadata_query: scenario.run(metadata_query=metadata_query) if start_time: scenario.run(start_time=start_time) if end_time: scenario.run(end_time=end_time) if start_time and end_time: scenario.run(start_time=start_time, end_time=end_time) if limit: scenario.run(limit=limit) @validation.required_services(consts.Service.CEILOMETER) @validation.required_openstack(users=True) @scenario.configure(name="CeilometerResource.get_tenant_resources") class GetTenantResources(ceiloutils.CeilometerScenario): def run(self): """Get all tenant resources. This scenario retrieves information about tenant resources using GET /v2/resources/(resource_id) """ resources = self.context["tenant"].get("resources", []) msg = ("No resources found for tenant: %s" % self.context["tenant"].get("name")) self.assertTrue(resources, msg) for res_id in resources: self._get_resource(res_id) @validation.required_services(consts.Service.CEILOMETER) @validation.required_contexts("ceilometer") @validation.required_openstack(users=True) @scenario.configure(name="CeilometerResource.list_matched_resources") class ListMatchedResources(ceiloutils.CeilometerScenario): def run(self, filter_by_user_id=False, filter_by_project_id=False, filter_by_resource_id=False, metadata_query=None, start_time=None, end_time=None, limit=None): """Get resources that matched fields from context and args. :param filter_by_user_id: flag for query by user_id :param filter_by_project_id: flag for query by project_id :param filter_by_resource_id: flag for query by resource_id :param metadata_query: dict with metadata fields and values for query :param start_time: lower bound of resource timestamp in isoformat :param end_time: upper bound of resource timestamp in isoformat :param limit: count of resources in response """ query = self._make_general_query(filter_by_project_id, filter_by_user_id, filter_by_resource_id, metadata_query) query += self._make_timestamp_query(start_time, end_time) self._list_resources(query, limit) rally-0.9.1/rally/plugins/openstack/scenarios/ceilometer/traits.py0000664000567000056710000000563513073417720026613 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.ceilometer import utils as cutils from rally.plugins.openstack.scenarios.keystone import basic as kbasic from rally.task import validation """Scenarios for Ceilometer Events API.""" # NOTE(idegtiarov): to work with traits we need to create event firstly, # there are no other way except emit suitable notification from one of # services, for example create new user in keystone. @validation.required_services(consts.Service.CEILOMETER, consts.Service.KEYSTONE) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["keystone"], "cleanup": ["ceilometer"]}, name="CeilometerTraits.create_user_and_list_traits") class CreateUserAndListTraits(cutils.CeilometerScenario, kbasic.KeystoneBasic): def run(self): """Create user and fetch all event traits. This scenario creates user to store new event and fetches list of all traits for certain event type and trait name using GET /v2/event_types//traits/. """ self.admin_keystone.create_user() event = self._list_events()[0] trait_name = event.traits[0]["name"] self._list_event_traits(event_type=event.event_type, trait_name=trait_name) @validation.required_services(consts.Service.CEILOMETER, consts.Service.KEYSTONE) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["keystone"], "cleanup": ["ceilometer"]}, name="CeilometerTraits.create_user_and" "_list_trait_descriptions") class CreateUserAndListTraitDescriptions( cutils.CeilometerScenario, kbasic.KeystoneBasic): def run(self): """Create user and fetch all trait descriptions. This scenario creates user to store new event and fetches list of all traits for certain event type using GET /v2/event_types//traits. """ self.admin_keystone.create_user() event = self._list_events()[0] self._list_event_trait_descriptions(event_type=event.event_type) rally-0.9.1/rally/plugins/openstack/scenarios/ceilometer/samples.py0000664000567000056710000000534113073417720026743 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.ceilometer import utils as ceiloutils from rally.task import validation """Scenarios for Ceilometer Samples API.""" @validation.required_services(consts.Service.CEILOMETER) @validation.required_contexts("ceilometer") @validation.required_openstack(users=True) @scenario.configure(name="CeilometerSamples.list_matched_samples") class ListMatchedSamples(ceiloutils.CeilometerScenario): def run(self, filter_by_resource_id=False, filter_by_project_id=False, filter_by_user_id=False, metadata_query=None, limit=None): """Get list of samples that matched fields from context and args. :param filter_by_user_id: flag for query by user_id :param filter_by_project_id: flag for query by project_id :param filter_by_resource_id: flag for query by resource_id :param metadata_query: dict with metadata fields and values for query :param limit: count of samples in response """ query = self._make_general_query(filter_by_project_id, filter_by_user_id, filter_by_resource_id, metadata_query) self._list_samples(query, limit) @validation.required_services(consts.Service.CEILOMETER) @validation.required_contexts("ceilometer") @validation.required_openstack(users=True) @scenario.configure(name="CeilometerSamples.list_samples") class ListSamples(ceiloutils.CeilometerScenario): def run(self, metadata_query=None, limit=None): """Fetch all available queries for list sample request. :param metadata_query: dict with metadata fields and values for query :param limit: count of samples in response """ scenario = ListMatchedSamples(self.context) scenario.run(filter_by_project_id=True) scenario.run(filter_by_user_id=True) scenario.run(filter_by_resource_id=True) if metadata_query: scenario.run(metadata_query=metadata_query) if limit: scenario.run(limit=limit) rally-0.9.1/rally/plugins/openstack/scenarios/fuel/0000775000567000056710000000000013073420067023523 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/fuel/__init__.py0000664000567000056710000000000013073417716025631 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/fuel/environments.py0000664000567000056710000000564413073417720026637 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.fuel import utils from rally.task import validation """Scenarios for Fuel environments.""" @validation.required_clients("fuel", admin=True) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["fuel"]}, name="FuelEnvironments.create_and_delete_environment") class CreateAndDeleteEnvironment(utils.FuelScenario): def run(self, release_id=1, network_provider="neutron", deployment_mode="ha_compact", net_segment_type="vlan", delete_retries=5): """Create and delete Fuel environments. :param release_id: release id (default 1) :param network_provider: network provider (default 'neutron') :param deployment_mode: deployment mode (default 'ha_compact') :param net_segment_type: net segment type (default 'vlan') :param delete_retries: retries count on delete operations (default 5) """ env_id = self._create_environment(release_id=release_id, network_provider=network_provider, deployment_mode=deployment_mode, net_segment_type=net_segment_type) self._delete_environment(env_id, delete_retries) @validation.required_clients("fuel", admin=True) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["fuel"]}, name="FuelEnvironments.create_and_list_environments") class CreateAndListEnvironments(utils.FuelScenario): def run(self, release_id=1, network_provider="neutron", deployment_mode="ha_compact", net_segment_type="vlan"): """Create and list Fuel environments. :param release_id: release id (default 1) :param network_provider: network provider (default 'neutron') :param deployment_mode: deployment mode (default 'ha_compact') :param net_segment_type: net segment type (default 'vlan') """ self._create_environment(release_id=release_id, network_provider=network_provider, deployment_mode=deployment_mode, net_segment_type=net_segment_type) self._list_environments() rally-0.9.1/rally/plugins/openstack/scenarios/fuel/utils.py0000664000567000056710000001555713073417716025261 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import random import six from rally.common.i18n import _ from rally.common import utils as rutils from rally import osclients from rally.plugins.openstack import scenario from rally.task import atomic class FuelEnvManager(object): def __init__(self, client): self.client = client def get(self, env_id): try: return self.client.get_by_id(env_id) except BaseException: return None def list(self): """List Fuel environments.""" try: return self.client.get_all() except SystemExit: raise RuntimeError(_("Can't list environments. " "Please check server availability.")) def create(self, name, release_id=1, network_provider="neutron", deployment_mode="ha_compact", net_segment_type="vlan"): try: env = self.client.create(name, release_id, network_provider, deployment_mode, net_segment_type) except SystemExit: raise RuntimeError(_("Something went wrong while creating an " "environment. This can happen when " "environment with name %s already exists.") % name) if env: return env raise RuntimeError(_("Environment was not created or was " "created but not returned by server.")) def delete(self, env_id, retries=5, retry_pause=0.5): env = self.get(env_id) retry_number = 0 while env: if retry_number > retries: raise RuntimeError(_("Can't delete environment " "id: %s ") % env_id) try: self.client.delete_by_id(env_id) except BaseException: rutils.interruptable_sleep(retry_pause) env = self.get(env_id) retry_number += 1 class FuelClient(object): """Thin facade over `fuelclient.get_client'.""" def __init__(self, version, server_address, server_port, username, password): # NOTE(amaretskiy): For now, there are only 2 ways how to # configure fuelclient connection: # * configuration file - this is not convenient to create # separate file for each benchmark # * env variables - this approach is preferable os.environ["SERVER_ADDRESS"] = server_address os.environ["LISTEN_PORT"] = str(server_port) os.environ["KEYSTONE_USER"] = username os.environ["KEYSTONE_PASS"] = password import fuelclient FuelClient.fuelclient_module = fuelclient get_client = fuelclient.get_client self.environment = FuelEnvManager(get_client( "environment", version=version)) self.node = get_client("node", version=version) self.task = get_client("task", version=version) @osclients.configure("fuel", default_version="v1") class Fuel(osclients.OSClient): """FuelClient factory for osclients.Clients.""" def create_client(self, *args, **kwargs): auth_url = six.moves.urllib.parse.urlparse(self.credential.auth_url) return FuelClient(version=self.choose_version(), server_address=auth_url.hostname, server_port=8000, username=self.credential.username, password=self.credential.password) class FuelScenario(scenario.OpenStackScenario): """Base class for Fuel scenarios.""" @atomic.action_timer("fuel.list_environments") def _list_environments(self): return self.admin_clients("fuel").environment.list() @atomic.action_timer("fuel.create_environment") def _create_environment(self, release_id=1, network_provider="neutron", deployment_mode="ha_compact", net_segment_type="vlan"): name = self.generate_random_name() env = self.admin_clients("fuel").environment.create( name, release_id, network_provider, deployment_mode, net_segment_type) return env["id"] @atomic.action_timer("fuel.delete_environment") def _delete_environment(self, env_id, retries=5): self.admin_clients("fuel").environment.delete(env_id, retries) @atomic.action_timer("fuel.add_node") def _add_node(self, env_id, node_ids, node_roles=None): """Add node to environment :param env_id: environment id :param node_ids: list of node ids :param node_roles: list of roles """ node_roles = node_roles or ["compute"] try: self.admin_clients("fuel").environment.client.add_nodes( env_id, node_ids, node_roles) except BaseException as e: raise RuntimeError( "Unable to add node(s) to environment. Fuel client exited " "with error %s" % e) @atomic.action_timer("fuel.delete_node") def _remove_node(self, env_id, node_id): env = FuelClient.fuelclient_module.objects.environment.Environment( env_id) try: env.unassign([node_id]) except BaseException as e: raise RuntimeError( "Unable to add node(s) to environment. Fuel client exited " "with error %s" % e) @atomic.action_timer("fuel.list_nodes") def _list_node_ids(self, env_id=None): result = self.admin_clients("fuel").node.get_all( environment_id=env_id) return [x["id"] for x in result] def _node_is_assigned(self, node_id): try: node = self.admin_clients("fuel").node.get_by_id(node_id) return bool(node["cluster"]) except BaseException as e: raise RuntimeError( "Unable to add node(s) to environment. Fuel client exited " "with error %s" % e) def _get_free_node_id(self): node_ids = self._list_node_ids() random.shuffle(node_ids) for node_id in node_ids: if not self._node_is_assigned(node_id): return node_id else: raise RuntimeError("Can not found free node.") rally-0.9.1/rally/plugins/openstack/scenarios/fuel/nodes.py0000664000567000056710000000263013073417720025210 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import random from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.fuel import utils from rally.task import validation """Scenarios for Fuel nodes.""" @validation.required_clients("fuel", admin=True) @validation.required_openstack(admin=True) @validation.required_contexts("fuel_environments") @scenario.configure(name="FuelNodes.add_and_remove_node") class AddAndRemoveNode(utils.FuelScenario): def run(self, node_roles=None): """Add node to environment and remove. :param node_roles: list. Roles, which node should be assigned to env with """ env_id = random.choice(self.context["fuel"]["environments"]) node_id = self._get_free_node_id() self._add_node(env_id, [node_id], node_roles) self._remove_node(env_id, node_id) rally-0.9.1/rally/plugins/openstack/scenarios/senlin/0000775000567000056710000000000013073420067024060 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/senlin/__init__.py0000664000567000056710000000000013073417716026166 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/senlin/utils.py0000664000567000056710000001352613073417720025603 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from rally import exceptions from rally.plugins.openstack import scenario from rally.task import atomic from rally.task import utils SENLIN_BENCHMARK_OPTS = [ cfg.FloatOpt("senlin_action_timeout", default=3600, help="Time in seconds to wait for senlin action to finish."), ] CONF = cfg.CONF benchmark_group = cfg.OptGroup(name="benchmark", title="benchmark options") CONF.register_opts(SENLIN_BENCHMARK_OPTS, group=benchmark_group) class SenlinScenario(scenario.OpenStackScenario): """Base class for Senlin scenarios with basic atomic actions.""" @atomic.action_timer("senlin.list_clusters") def _list_clusters(self, **queries): """Return user cluster list. :param kwargs \*\*queries: Optional query parameters to be sent to restrict the clusters to be returned. Available parameters include: * name: The name of a cluster. * status: The current status of a cluster. * sort: A list of sorting keys separated by commas. Each sorting key can optionally be attached with a sorting direction modifier which can be ``asc`` or ``desc``. * limit: Requests a specified size of returned items from the query. Returns a number of items up to the specified limit value. * marker: Specifies the ID of the last-seen item. Use the limit parameter to make an initial limited request and use the ID of the last-seen item from the response as the marker parameter value in a subsequent limited request. * global_project: A boolean value indicating whether clusters from all projects will be returned. :returns: list of clusters according to query. """ return list(self.admin_clients("senlin").clusters(**queries)) @atomic.action_timer("senlin.create_cluster") def _create_cluster(self, profile_id, desired_capacity=0, min_size=0, max_size=-1, timeout=60, metadata=None): """Create a new cluster from attributes. :param profile_id: ID of profile used to create cluster :param desired_capacity: The capacity or initial number of nodes owned by the cluster :param min_size: The minimum number of nodes owned by the cluster :param max_size: The maximum number of nodes owned by the cluster. -1 means no limit :param timeout: The timeout value in minutes for cluster creation :param metadata: A set of key value pairs to associate with the cluster :returns: object of cluster created. """ attrs = { "profile_id": profile_id, "name": self.generate_random_name(), "desired_capacity": desired_capacity, "min_size": min_size, "max_size": max_size, "metadata": metadata, "timeout": timeout } cluster = self.admin_clients("senlin").create_cluster(**attrs) cluster = utils.wait_for_status( cluster, ready_statuses=["ACTIVE"], failure_statuses=["ERROR"], update_resource=self._get_cluster, timeout=CONF.benchmark.senlin_action_timeout) return cluster def _get_cluster(self, cluster): """Get cluster details. :param cluster: cluster to get :returns: object of cluster """ try: return self.admin_clients("senlin").get_cluster(cluster.id) except Exception as e: if getattr(e, "code", getattr(e, "http_status", 400)) == 404: raise exceptions.GetResourceNotFound(resource=cluster.id) raise exceptions.GetResourceFailure(resource=cluster.id, err=e) @atomic.action_timer("senlin.delete_cluster") def _delete_cluster(self, cluster): """Delete given cluster. Returns after the cluster is successfully deleted. :param cluster: cluster object to delete """ self.admin_clients("senlin").delete_cluster(cluster) utils.wait_for_status( cluster, ready_statuses=["DELETED"], failure_statuses=["ERROR"], check_deletion=True, update_resource=self._get_cluster, timeout=CONF.benchmark.senlin_action_timeout) @atomic.action_timer("senlin.create_profile") def _create_profile(self, spec, metadata=None): """Create a new profile from attributes. :param spec: spec dictionary used to create profile :param metadata: A set of key value pairs to associate with the profile :returns: object of profile created """ attrs = {} attrs["spec"] = spec attrs["name"] = self.generate_random_name() if metadata: attrs["metadata"] = metadata return self.clients("senlin").create_profile(**attrs) @atomic.action_timer("senlin.delete_profile") def _delete_profile(self, profile): """Delete given profile. Returns after the profile is successfully deleted. :param profile: profile object to be deleted """ self.clients("senlin").delete_profile(profile) rally-0.9.1/rally/plugins/openstack/scenarios/senlin/clusters.py0000664000567000056710000000403713073417720026304 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.senlin import utils from rally.task import validation """Scenarios for Senlin clusters.""" @validation.required_openstack(admin=True) @validation.required_services(consts.Service.SENLIN) @validation.required_contexts("profiles") @scenario.configure(context={"admin_cleanup": ["senlin"]}, name="SenlinClusters.create_and_delete_cluster") class CreateAndDeleteCluster(utils.SenlinScenario): def run(self, desired_capacity=0, min_size=0, max_size=-1, timeout=3600, metadata=None): """Create a cluster and then delete it. Measure the "senlin cluster-create" and "senlin cluster-delete" commands performance. :param desired_capacity: The capacity or initial number of nodes owned by the cluster :param min_size: The minimum number of nodes owned by the cluster :param max_size: The maximum number of nodes owned by the cluster. -1 means no limit :param timeout: The timeout value in seconds for cluster creation :param metadata: A set of key value pairs to associate with the cluster """ profile_id = self.context["tenant"]["profile"] cluster = self._create_cluster(profile_id, desired_capacity, min_size, max_size, timeout, metadata) self._delete_cluster(cluster) rally-0.9.1/rally/plugins/openstack/scenarios/monasca/0000775000567000056710000000000013073420067024211 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/monasca/__init__.py0000664000567000056710000000000013073417716026317 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/monasca/metrics.py0000664000567000056710000000240513073417720026234 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.monasca import utils as monascautils from rally.task import validation """Scenarios for monasca Metrics API.""" @validation.required_clients("monasca") @validation.required_services(consts.Service.MONASCA) @validation.required_openstack(users=True) @scenario.configure(name="MonascaMetrics.list_metrics") class ListMetrics(monascautils.MonascaScenario): def run(self, **kwargs): """Fetch user's metrics. :param kwargs: optional arguments for list query: name, dimensions, start_time, etc """ self._list_metrics(**kwargs) rally-0.9.1/rally/plugins/openstack/scenarios/monasca/utils.py0000664000567000056710000000424413073417720025731 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import random import time import uuid from oslo_config import cfg from rally.plugins.openstack import scenario from rally.task import atomic MONASCA_BENCHMARK_OPTS = [ cfg.FloatOpt( "monasca_metric_create_prepoll_delay", default=15.0, help="Delay between creating Monasca metrics and polling for " "its elements.") ] CONF = cfg.CONF benchmark_group = cfg.OptGroup(name="benchmark", title="benchmark options") CONF.register_opts(MONASCA_BENCHMARK_OPTS, group=benchmark_group) class MonascaScenario(scenario.OpenStackScenario): """Base class for Monasca scenarios with basic atomic actions.""" @atomic.action_timer("monasca.list_metrics") def _list_metrics(self, **kwargs): """Get list of user's metrics. :param kwargs: optional arguments for list query: name, dimensions, start_time, etc :returns list of monasca metrics """ return self.clients("monasca").metrics.list(**kwargs) @atomic.action_timer("monasca.create_metrics") def _create_metrics(self, **kwargs): """Create user metrics. :param kwargs: attributes for metric creation: name, dimension, timestamp, value, etc """ timestamp = int(time.time() * 1000) kwargs.update({"name": self.generate_random_name(), "timestamp": timestamp, "value": random.random(), "value_meta": { "key": str(uuid.uuid4())[:10]}}) self.clients("monasca").metrics.create(**kwargs) rally-0.9.1/rally/plugins/openstack/scenarios/designate/0000775000567000056710000000000013073420067024533 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/designate/__init__.py0000664000567000056710000000000013073417716026641 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/designate/utils.py0000664000567000056710000002020113073417716026247 0ustar jenkinsjenkins00000000000000# Copyright 2014 Hewlett-Packard Development Company, L.P. # # Author: Endre Karlson # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.plugins.openstack import scenario from rally.task import atomic class DesignateScenario(scenario.OpenStackScenario): """Base class for Designate scenarios with basic atomic actions.""" @atomic.action_timer("designate.create_domain") def _create_domain(self, domain=None): """Create domain. :param domain: dict, POST /v1/domains request options :returns: designate domain dict """ domain = domain or {} domain.setdefault("email", "root@random.name") domain["name"] = "%s.name." % self.generate_random_name() return self.clients("designate").domains.create(domain) @atomic.action_timer("designate.list_domains") def _list_domains(self): """Return user domain list.""" return self.clients("designate").domains.list() @atomic.action_timer("designate.delete_domain") def _delete_domain(self, domain_id): """Delete designate zone. :param domain_id: domain ID """ self.clients("designate").domains.delete(domain_id) @atomic.action_timer("designate.update_domain") def _update_domain(self, domain): """Update designate domain. :param domain: designate domain :returns: designate updated domain dict """ domain["description"] = "updated domain" domain["email"] = "updated@random.name" return self.clients("designate").domains.update(domain) @atomic.optional_action_timer("designate.create_record") def _create_record(self, domain, record=None): """Create a record in a domain. :param domain: domain dict :param record: record dict :param atomic_action: True if the record creation should be tracked as an atomic action. added and handled by the optional_action_timer() decorator :returns: Designate record dict """ record = record or {} record.setdefault("type", "A") record["name"] = "%s.%s" % (self.generate_random_name(), domain["name"]) record.setdefault("data", "10.0.0.1") client = self.clients("designate") return client.records.create(domain["id"], record) @atomic.action_timer("designate.list_records") def _list_records(self, domain_id): """List domain records. :param domain_id: domain ID :returns: domain records list """ return self.clients("designate").records.list(domain_id) @atomic.optional_action_timer("designate.delete_record") def _delete_record(self, domain_id, record_id): """Delete a domain record. :param domain_id: domain ID :param record_id: record ID :param atomic_action: True if the record creation should be tracked as an atomic action. added and handled by the optional_action_timer() decorator """ self.clients("designate").records.delete(domain_id, record_id) @atomic.action_timer("designate.create_server") def _create_server(self, server=None): """Create server. :param server: dict, POST /v1/servers request options :returns: designate server dict """ server = server or {} server["name"] = "name.%s." % self.generate_random_name() return self.admin_clients("designate").servers.create(server) @atomic.action_timer("designate.list_servers") def _list_servers(self): """Return user server list.""" return self.admin_clients("designate").servers.list() @atomic.action_timer("designate.delete_server") def _delete_server(self, server_id): """Delete Server. :param server_id: unicode server ID """ self.admin_clients("designate").servers.delete(server_id) # NOTE: API V2 @atomic.action_timer("designate.create_zone") def _create_zone(self, name=None, type_=None, email=None, description=None, ttl=None): """Create zone. :param name: Zone name :param type_: Zone type, PRIMARY or SECONDARY :param email: Zone owner email :param description: Zone description :param ttl: Zone ttl - Time to live in seconds :returns: designate zone dict """ type_ = type_ or "PRIMARY" if type_ == "PRIMARY": email = email or "root@random.name" # Name is only useful to be random for PRIMARY name = name or "%s.name." % self.generate_random_name() return self.clients("designate", version="2").zones.create( name=name, type_=type_, email=email, description=description, ttl=ttl ) @atomic.action_timer("designate.list_zones") def _list_zones(self, criterion=None, marker=None, limit=None): """Return user zone list. :param criterion: API Criterion to filter by :param marker: UUID marker of the item to start the page from :param limit: How many items to return in the page. :returns: list of designate zones """ return self.clients("designate", version="2").zones.list() @atomic.action_timer("designate.delete_zone") def _delete_zone(self, zone_id): """Delete designate zone. :param zone_id: Zone ID """ self.clients("designate", version="2").zones.delete(zone_id) @atomic.action_timer("designate.list_recordsets") def _list_recordsets(self, zone_id, criterion=None, marker=None, limit=None): """List zone recordsets. :param zone_id: Zone ID :param criterion: API Criterion to filter by :param marker: UUID marker of the item to start the page from :param limit: How many items to return in the page. :returns: zone recordsets list """ return self.clients("designate", version="2").recordsets.list( zone_id, criterion=criterion, marker=marker, limit=limit) @atomic.optional_action_timer("designate.create_recordset") def _create_recordset(self, zone, recordset=None): """Create a recordset in a zone. :param zone: zone dict :param recordset: recordset dict :param atomic_action: True if this is an atomic action. added and handled by the optional_action_timer() decorator :returns: Designate recordset dict """ recordset = recordset or {} recordset.setdefault("type_", recordset.pop("type", "A")) if "name" not in recordset: recordset["name"] = "%s.%s" % (self.generate_random_name(), zone["name"]) if "records" not in recordset: recordset["records"] = ["10.0.0.1"] return self.clients("designate", version="2").recordsets.create( zone["id"], **recordset) @atomic.optional_action_timer("designate.delete_recordset") def _delete_recordset(self, zone_id, recordset_id): """Delete a zone recordset. :param zone_id: Zone ID :param recordset_id: Recordset ID :param atomic_action: True if this is an atomic action. added and handled by the optional_action_timer() decorator """ self.clients("designate", version="2").recordsets.delete( zone_id, recordset_id) rally-0.9.1/rally/plugins/openstack/scenarios/designate/basic.py0000664000567000056710000003144413073417720026176 0ustar jenkinsjenkins00000000000000# Copyright 2014 Hewlett-Packard Development Company, L.P. # # Author: Endre Karlson # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import random from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.designate import utils from rally.task import atomic from rally.task import validation """Basic scenarios for Designate.""" @validation.required_services(consts.Service.DESIGNATE) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["designate"]}, name="DesignateBasic.create_and_list_domains") class CreateAndListDomains(utils.DesignateScenario): def run(self): """Create a domain and list all domains. Measure the "designate domain-list" command performance. If you have only 1 user in your context, you will add 1 domain on every iteration. So you will have more and more domain and will be able to measure the performance of the "designate domain-list" command depending on the number of domains owned by users. """ domain = self._create_domain() msg = "Domain isn't created" self.assertTrue(domain, msg) list_domains = self._list_domains() self.assertIn(domain, list_domains) @validation.required_services(consts.Service.DESIGNATE) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["designate"]}, name="DesignateBasic.list_domains") class ListDomains(utils.DesignateScenario): def run(self): """List Designate domains. This simple scenario tests the designate domain-list command by listing all the domains. Suppose if we have 2 users in context and each has 2 domains uploaded for them we will be able to test the performance of designate domain-list command in this case. """ self._list_domains() @validation.required_services(consts.Service.DESIGNATE) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["designate"]}, name="DesignateBasic.create_and_delete_domain") class CreateAndDeleteDomain(utils.DesignateScenario): def run(self): """Create and then delete a domain. Measure the performance of creating and deleting domains with different level of load. """ domain = self._create_domain() self._delete_domain(domain["id"]) @validation.required_services(consts.Service.DESIGNATE) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["designate"]}, name="DesignateBasic.create_and_update_domain") class CreateAndUpdateDomain(utils.DesignateScenario): def run(self): """Create and then update a domain. Measure the performance of creating and updating domains with different level of load. """ domain = self._create_domain() self._update_domain(domain) @validation.required_services(consts.Service.DESIGNATE) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["designate"]}, name="DesignateBasic.create_and_delete_records") class CreateAndDeleteRecords(utils.DesignateScenario): def run(self, records_per_domain=5): """Create and then delete records. Measure the performance of creating and deleting records with different level of load. :param records_per_domain: Records to create pr domain. """ domain = self._create_domain() records = [] key = "designate.create_%s_records" % records_per_domain with atomic.ActionTimer(self, key): for i in range(records_per_domain): record = self._create_record(domain, atomic_action=False) records.append(record) key = "designate.delete_%s_records" % records_per_domain with atomic.ActionTimer(self, key): for record in records: self._delete_record( domain["id"], record["id"], atomic_action=False) @validation.required_services(consts.Service.DESIGNATE) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["designate"]}, name="DesignateBasic.list_records") class ListRecords(utils.DesignateScenario): def run(self, domain_id): """List Designate records. This simple scenario tests the designate record-list command by listing all the records in a domain. Suppose if we have 2 users in context and each has 2 domains uploaded for them we will be able to test the performance of designate record-list command in this case. :param domain_id: Domain ID """ self._list_records(domain_id) @validation.required_services(consts.Service.DESIGNATE) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["designate"]}, name="DesignateBasic.create_and_list_records") class CreateAndListRecords(utils.DesignateScenario): def run(self, records_per_domain=5): """Create and then list records. If you have only 1 user in your context, you will add 1 record on every iteration. So you will have more and more records and will be able to measure the performance of the "designate record-list" command depending on the number of domains/records owned by users. :param records_per_domain: Records to create pr domain. """ domain = self._create_domain() key = "designate.create_%s_records" % records_per_domain records = [] with atomic.ActionTimer(self, key): for i in range(records_per_domain): records.append( self._create_record(domain, atomic_action=False)) list_records = self._list_records(domain["id"]) self.assertEqual(records, list_records) @validation.required_services(consts.Service.DESIGNATE) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["designate"]}, name="DesignateBasic.create_and_list_servers") class CreateAndListServers(utils.DesignateScenario): def run(self): """Create a Designate server and list all servers. If you have only 1 user in your context, you will add 1 server on every iteration. So you will have more and more server and will be able to measure the performance of the "designate server-list" command depending on the number of servers owned by users. """ server = self._create_server() self.assertTrue(server) list_servers = self._list_servers() self.assertIn(server, list_servers) @validation.required_services(consts.Service.DESIGNATE) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["designate"]}, name="DesignateBasic.create_and_delete_server") class CreateAndDeleteServer(utils.DesignateScenario): def run(self): """Create and then delete a server. Measure the performance of creating and deleting servers with different level of load. """ server = self._create_server() self._delete_server(server["id"]) @validation.required_services(consts.Service.DESIGNATE) @validation.required_openstack(admin=True) @scenario.configure(name="DesignateBasic.list_servers") class ListServers(utils.DesignateScenario): def run(self): """List Designate servers. This simple scenario tests the designate server-list command by listing all the servers. """ self._list_servers() # NOTE: API V2 @validation.required_services(consts.Service.DESIGNATE) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["designate"]}, name="DesignateBasic.create_and_list_zones") class CreateAndListZones(utils.DesignateScenario): def run(self): """Create a zone and list all zones. Measure the "openstack zone list" command performance. If you have only 1 user in your context, you will add 1 zone on every iteration. So you will have more and more zone and will be able to measure the performance of the "openstack zone list" command depending on the number of zones owned by users. """ zone = self._create_zone() self.assertTrue(zone) list_zones = self._list_zones() self.assertIn(zone, list_zones) @validation.required_services(consts.Service.DESIGNATE) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["designate"]}, name="DesignateBasic.list_zones") class ListZones(utils.DesignateScenario): def run(self): """List Designate zones. This simple scenario tests the openstack zone list command by listing all the zones. """ self._list_zones() @validation.required_services(consts.Service.DESIGNATE) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["designate"]}, name="DesignateBasic.create_and_delete_zone") class CreateAndDeleteZone(utils.DesignateScenario): def run(self): """Create and then delete a zone. Measure the performance of creating and deleting zones with different level of load. """ zone = self._create_zone() self._delete_zone(zone["id"]) @validation.required_services(consts.Service.DESIGNATE) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["designate"]}, name="DesignateBasic.list_recordsets") class ListRecordsets(utils.DesignateScenario): def run(self, zone_id): """List Designate recordsets. This simple scenario tests the openstack recordset list command by listing all the recordsets in a zone. :param zone_id: Zone ID """ self._list_recordsets(zone_id) @validation.required_services(consts.Service.DESIGNATE) @validation.required_openstack(users=True) @validation.required_contexts("zones") @scenario.configure(context={"cleanup": ["designate"]}, name="DesignateBasic.create_and_delete_recordsets") class CreateAndDeleteRecordsets(utils.DesignateScenario): def run(self, recordsets_per_zone=5): """Create and then delete recordsets. Measure the performance of creating and deleting recordsets with different level of load. :param recordsets_per_zone: recordsets to create pr zone. """ zone = random.choice(self.context["tenant"]["zones"]) recordsets = [] key = "designate.create_%s_recordsets" % recordsets_per_zone with atomic.ActionTimer(self, key): for i in range(recordsets_per_zone): recordset = self._create_recordset(zone, atomic_action=False) recordsets.append(recordset) key = "designate.delete_%s_recordsets" % recordsets_per_zone with atomic.ActionTimer(self, key): for recordset in recordsets: self._delete_recordset( zone["id"], recordset["id"], atomic_action=False) @validation.required_services(consts.Service.DESIGNATE) @validation.required_openstack(users=True) @validation.required_contexts("zones") @scenario.configure(context={"cleanup": ["designate"]}, name="DesignateBasic.create_and_list_recordsets") class CreateAndListRecordsets(utils.DesignateScenario): def run(self, recordsets_per_zone=5): """Create and then list recordsets. If you have only 1 user in your context, you will add 1 recordset on every iteration. So you will have more and more recordsets and will be able to measure the performance of the "openstack recordset list" command depending on the number of zones/recordsets owned by users. :param recordsets_per_zone: recordsets to create pr zone. """ zone = random.choice(self.context["tenant"]["zones"]) key = "designate.create_%s_recordsets" % recordsets_per_zone with atomic.ActionTimer(self, key): for i in range(recordsets_per_zone): self._create_recordset(zone, atomic_action=False) self._list_recordsets(zone["id"]) rally-0.9.1/rally/plugins/openstack/scenarios/ironic/0000775000567000056710000000000013073420067024053 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/ironic/__init__.py0000664000567000056710000000000013073417716026161 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/ironic/utils.py0000664000567000056710000001226413073417720025574 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import string from oslo_config import cfg from rally.plugins.openstack import scenario from rally.task import atomic from rally.task import utils IRONIC_BENCHMARK_OPTS = [ cfg.FloatOpt("ironic_node_create_poll_interval", default=1.0, help="Interval(in sec) between checks when waiting for node " "creation."), cfg.FloatOpt("ironic_node_create_timeout", default=300, help="Ironic node create timeout"), cfg.FloatOpt("ironic_node_poll_interval", default=1.0, help="Ironic node poll interval"), cfg.FloatOpt("ironic_node_delete_timeout", default=300, help="Ironic node create timeout") ] CONF = cfg.CONF benchmark_group = cfg.OptGroup(name="benchmark", title="benchmark options") CONF.register_opts(IRONIC_BENCHMARK_OPTS, group=benchmark_group) class IronicScenario(scenario.OpenStackScenario): """Base class for Ironic scenarios with basic atomic actions.""" # NOTE(stpierre): Ironic has two name checkers. The new-style # checker, in API v1.10+, is quite relaxed and will Just Work with # the default random name pattern. (See # https://bugs.launchpad.net/ironic/+bug/1434376.) The old-style # checker *claims* to implement RFCs 952 and 1123, but it doesn't # actually. (See https://bugs.launchpad.net/ironic/+bug/1468508 # for details.) The default RESOURCE_NAME_FORMAT works fine for # the new-style checker, but the old-style checker only allows # underscores after the first dot, for reasons that I'm sure are # entirely obvious, so we have to supply a bespoke format for # Ironic names. RESOURCE_NAME_FORMAT = "s-rally-XXXXXXXX-XXXXXXXX" RESOURCE_NAME_ALLOWED_CHARACTERS = string.ascii_lowercase + string.digits @atomic.action_timer("ironic.create_node") def _create_node(self, driver, **kwargs): """Create node immediately. :param driver: The name of the driver used to manage this Node. :param kwargs: optional parameters to create image :returns: node object """ kwargs["name"] = self.generate_random_name() node = self.admin_clients("ironic").node.create(driver=driver, **kwargs) self.sleep_between(CONF.benchmark.ironic_node_create_poll_interval) node = utils.wait_for_status( node, ready_statuses=["AVAILABLE"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.ironic_node_create_timeout, check_interval=CONF.benchmark.ironic_node_poll_interval, id_attr="uuid", status_attr="provision_state" ) return node @atomic.action_timer("ironic.list_nodes") def _list_nodes(self, associated=None, maintenance=None, detail=False, sort_dir=None): """Return list of nodes. :param associated: Optional. Either a Boolean or a string representation of a Boolean that indicates whether to return a list of associated (True or "True") or unassociated (False or "False") nodes. :param maintenance: Optional. Either a Boolean or a string representation of a Boolean that indicates whether to return nodes in maintenance mode (True or "True"), or not in maintenance mode (False or "False"). :param detail: Optional, boolean whether to return detailed information about nodes. :param sort_dir: Optional, direction of sorting, either 'asc' (the default) or 'desc'. :returns: A list of nodes. """ return self.admin_clients("ironic").node.list( associated=associated, maintenance=maintenance, detail=detail, sort_dir=sort_dir) @atomic.action_timer("ironic.delete_node") def _delete_node(self, node): """Delete the node with specific id. :param node: Ironic node object """ self.admin_clients("ironic").node.delete(node.uuid) utils.wait_for_status( node, ready_statuses=["deleted"], check_deletion=True, update_resource=utils.get_from_manager(), timeout=CONF.benchmark.ironic_node_delete_timeout, check_interval=CONF.benchmark.ironic_node_poll_interval, id_attr="uuid", status_attr="provision_state" ) rally-0.9.1/rally/plugins/openstack/scenarios/ironic/nodes.py0000664000567000056710000000665213073417720025550 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common import logging from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.ironic import utils from rally.task import validation """Scenarios for ironic nodes.""" @logging.log_deprecated_args("Useless arguments detected", "0.10.0", ("marker", "limit", "sort_key"), once=True) @validation.restricted_parameters(["name"]) @validation.required_services(consts.Service.IRONIC) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["ironic"]}, name="IronicNodes.create_and_list_node") class CreateAndListNode(utils.IronicScenario): def run(self, driver, associated=None, maintenance=None, detail=False, sort_dir=None, marker=None, limit=None, sort_key=None, **kwargs): """Create and list nodes. :param driver: The name of the driver used to manage this Node. :param associated: Optional argument of list request. Either a Boolean or a string representation of a Boolean that indicates whether to return a list of associated (True or "True") or unassociated (False or "False") nodes. :param maintenance: Optional argument of list request. Either a Boolean or a string representation of a Boolean that indicates whether to return nodes in maintenance mode (True or "True"), or not in maintenance mode (False or "False"). :param detail: Optional, boolean whether to return detailed information about nodes. :param sort_dir: Optional, direction of sorting, either 'asc' (the default) or 'desc'. :param marker: DEPRECATED since Rally 0.10.0 :param limit: DEPRECATED since Rally 0.10.0 :param sort_key: DEPRECATED since Rally 0.10.0 :param kwargs: Optional additional arguments for node creation """ node = self._create_node(driver, **kwargs) list_nodes = self._list_nodes( associated=associated, maintenance=maintenance, detail=detail, sort_dir=sort_dir) self.assertIn(node.name, [n.name for n in list_nodes]) @validation.restricted_parameters(["name"]) @validation.required_services(consts.Service.IRONIC) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["ironic"]}, name="IronicNodes.create_and_delete_node") class CreateAndDeleteNode(utils.IronicScenario): def run(self, driver, **kwargs): """Create and delete node. :param driver: The name of the driver used to manage this Node. :param kwargs: Optional additional arguments for node creation """ node = self._create_node(driver, **kwargs) self._delete_node(node) rally-0.9.1/rally/plugins/openstack/scenarios/swift/0000775000567000056710000000000013073420067023724 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/swift/__init__.py0000664000567000056710000000000013073417716026032 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/swift/objects.py0000664000567000056710000001761713073417720025745 0ustar jenkinsjenkins00000000000000# Copyright 2015: Cisco Systems, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import tempfile from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.swift import utils from rally.task import atomic from rally.task import validation """Scenarios for Swift Objects.""" @validation.required_services(consts.Service.SWIFT) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["swift"]}, name="SwiftObjects.create_container" "_and_object_then_list_objects") class CreateContainerAndObjectThenListObjects(utils.SwiftScenario): def run(self, objects_per_container=1, object_size=1024, **kwargs): """Create container and objects then list all objects. :param objects_per_container: int, number of objects to upload :param object_size: int, temporary local object size :param kwargs: dict, optional parameters to create container """ key_suffix = "object" if objects_per_container > 1: key_suffix = "%i_objects" % objects_per_container container_name = None with tempfile.TemporaryFile() as dummy_file: # set dummy file to specified object size dummy_file.truncate(object_size) container_name = self._create_container(**kwargs) with atomic.ActionTimer(self, "swift.create_%s" % key_suffix): for i in range(objects_per_container): dummy_file.seek(0) self._upload_object(container_name, dummy_file, atomic_action=False) self._list_objects(container_name) @validation.required_services(consts.Service.SWIFT) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["swift"]}, name="SwiftObjects.create_container" "_and_object_then_delete_all") class CreateContainerAndObjectThenDeleteAll(utils.SwiftScenario): def run(self, objects_per_container=1, object_size=1024, **kwargs): """Create container and objects then delete everything created. :param objects_per_container: int, number of objects to upload :param object_size: int, temporary local object size :param kwargs: dict, optional parameters to create container """ key_suffix = "object" if objects_per_container > 1: key_suffix = "%i_objects" % objects_per_container container_name = None objects_list = [] with tempfile.TemporaryFile() as dummy_file: # set dummy file to specified object size dummy_file.truncate(object_size) container_name = self._create_container(**kwargs) with atomic.ActionTimer(self, "swift.create_%s" % key_suffix): for i in range(objects_per_container): dummy_file.seek(0) object_name = self._upload_object(container_name, dummy_file, atomic_action=False)[1] objects_list.append(object_name) with atomic.ActionTimer(self, "swift.delete_%s" % key_suffix): for object_name in objects_list: self._delete_object(container_name, object_name, atomic_action=False) self._delete_container(container_name) @validation.required_services(consts.Service.SWIFT) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["swift"]}, name="SwiftObjects.create_container" "_and_object_then_download_object") class CreateContainerAndObjectThenDownloadObject(utils.SwiftScenario): def run(self, objects_per_container=1, object_size=1024, **kwargs): """Create container and objects then download all objects. :param objects_per_container: int, number of objects to upload :param object_size: int, temporary local object size :param kwargs: dict, optional parameters to create container """ key_suffix = "object" if objects_per_container > 1: key_suffix = "%i_objects" % objects_per_container container_name = None objects_list = [] with tempfile.TemporaryFile() as dummy_file: # set dummy file to specified object size dummy_file.truncate(object_size) container_name = self._create_container(**kwargs) with atomic.ActionTimer(self, "swift.create_%s" % key_suffix): for i in range(objects_per_container): dummy_file.seek(0) object_name = self._upload_object(container_name, dummy_file, atomic_action=False)[1] objects_list.append(object_name) with atomic.ActionTimer(self, "swift.download_%s" % key_suffix): for object_name in objects_list: self._download_object(container_name, object_name, atomic_action=False) @validation.required_services(consts.Service.SWIFT) @validation.required_openstack(users=True) @scenario.configure(context={"swift_objects": {}}, name="SwiftObjects.list_objects_in_containers") class ListObjectsInContainers(utils.SwiftScenario): def run(self): """List objects in all containers.""" containers = self._list_containers()[1] key_suffix = "container" if len(containers) > 1: key_suffix = "%i_containers" % len(containers) with atomic.ActionTimer(self, "swift.list_objects_in_%s" % key_suffix): for container in containers: self._list_objects(container["name"], atomic_action=False) @validation.required_services(consts.Service.SWIFT) @validation.required_openstack(users=True) @scenario.configure(context={"swift_objects": {}}, name="SwiftObjects.list_and_" "download_objects_in_containers") class ListAndDownloadObjectsInContainers(utils.SwiftScenario): def run(self): """List and download objects in all containers.""" containers = self._list_containers()[1] list_key_suffix = "container" if len(containers) > 1: list_key_suffix = "%i_containers" % len(containers) objects_dict = {} with atomic.ActionTimer(self, "swift.list_objects_in_%s" % list_key_suffix): for container in containers: container_name = container["name"] objects_dict[container_name] = self._list_objects( container_name, atomic_action=False)[1] objects_total = sum(map(len, objects_dict.values())) download_key_suffix = "object" if objects_total > 1: download_key_suffix = "%i_objects" % objects_total with atomic.ActionTimer(self, "swift.download_%s" % download_key_suffix): for container_name, objects in objects_dict.items(): for obj in objects: self._download_object(container_name, obj["name"], atomic_action=False) rally-0.9.1/rally/plugins/openstack/scenarios/swift/utils.py0000664000567000056710000001424413073417716025452 0ustar jenkinsjenkins00000000000000# Copyright 2015: Cisco Systems, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.plugins.openstack import scenario from rally.task import atomic class SwiftScenario(scenario.OpenStackScenario): """Base class for Swift scenarios with basic atomic actions.""" @atomic.action_timer("swift.list_containers") def _list_containers(self, full_listing=True, **kwargs): """Return list of containers. :param full_listing: bool, enable unlimit number of listing returned :param kwargs: dict, other optional parameters to get_account :returns: tuple, (dict of response headers, a list of containers) """ return self.clients("swift").get_account(full_listing=full_listing, **kwargs) @atomic.optional_action_timer("swift.create_container") def _create_container(self, public=False, **kwargs): """Create a new container. :param public: bool, set container as public :param atomic_action: bool, enable create container to be tracked as an atomic action. added and handled by the optional_action_timer() decorator :param kwargs: dict, other optional parameters to put_container :returns: container name """ if public: kwargs.setdefault("headers", {}) kwargs["headers"].setdefault("X-Container-Read", ".r:*,.rlistings") container_name = self.generate_random_name() self.clients("swift").put_container(container_name, **kwargs) return container_name @atomic.optional_action_timer("swift.delete_container") def _delete_container(self, container_name, **kwargs): """Delete a container with given name. :param container_name: str, name of the container to delete :param atomic_action: bool, enable delete container to be tracked as an atomic action. added and handled by the optional_action_timer() decorator :param kwargs: dict, other optional parameters to delete_container """ self.clients("swift").delete_container(container_name, **kwargs) @atomic.optional_action_timer("swift.list_objects") def _list_objects(self, container_name, full_listing=True, **kwargs): """Return objects inside container. :param container_name: str, name of the container to make the list objects operation against :param full_listing: bool, enable unlimit number of listing returned :param atomic_action: bool, enable list objects to be tracked as an atomic action. added and handled by the optional_action_timer() decorator :param kwargs: dict, other optional parameters to get_container :returns: tuple, (dict of response headers, a list of objects) """ return self.clients("swift").get_container(container_name, full_listing=full_listing, **kwargs) @atomic.optional_action_timer("swift.upload_object") def _upload_object(self, container_name, content, **kwargs): """Upload content to a given container. :param container_name: str, name of the container to upload object to :param content: file stream, content to upload :param atomic_action: bool, enable upload object to be tracked as an atomic action. added and handled by the optional_action_timer() decorator :param kwargs: dict, other optional parameters to put_object :returns: tuple, (etag and object name) """ object_name = self.generate_random_name() return (self.clients("swift").put_object(container_name, object_name, content, **kwargs), object_name) @atomic.optional_action_timer("swift.download_object") def _download_object(self, container_name, object_name, **kwargs): """Download object from container. :param container_name: str, name of the container to download object from :param object_name: str, name of the object to download :param atomic_action: bool, enable download object to be tracked as an atomic action. added and handled by the optional_action_timer() decorator :param kwargs: dict, other optional parameters to get_object :returns: tuple, (dict of response headers, the object's contents) """ return self.clients("swift").get_object(container_name, object_name, **kwargs) @atomic.optional_action_timer("swift.delete_object") def _delete_object(self, container_name, object_name, **kwargs): """Delete object from container. :param container_name: str, name of the container to delete object from :param object_name: str, name of the object to delete :param atomic_action: bool, enable delete object to be tracked as an atomic action. added and handled by the optional_action_timer() decorator :param kwargs: dict, other optional parameters to delete_object """ self.clients("swift").delete_object(container_name, object_name, **kwargs) rally-0.9.1/rally/plugins/openstack/scenarios/zaqar/0000775000567000056710000000000013073420067023706 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/zaqar/__init__.py0000664000567000056710000000000013073417716026014 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/zaqar/utils.py0000664000567000056710000000416213073417716025432 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.plugins.openstack import scenario from rally.task import atomic class ZaqarScenario(scenario.OpenStackScenario): """Base class for Zaqar scenarios with basic atomic actions.""" @atomic.action_timer("zaqar.create_queue") def _queue_create(self, **kwargs): """Create a Zaqar queue with random name. :param kwargs: other optional parameters to create queues like "metadata" :returns: Zaqar queue instance """ name = self.generate_random_name() return self.clients("zaqar").queue(name, **kwargs) @atomic.action_timer("zaqar.delete_queue") def _queue_delete(self, queue): """Removes a Zaqar queue. :param queue: queue to remove """ queue.delete() def _messages_post(self, queue, messages, min_msg_count, max_msg_count): """Post a list of messages to a given Zaqar queue. :param queue: post the messages to queue :param messages: messages to post :param min_msg_count: minimum number of messages :param max_msg_count: maximum number of messages """ with atomic.ActionTimer(self, "zaqar.post_between_%s_and_%s_messages" % (min_msg_count, max_msg_count)): queue.post(messages) @atomic.action_timer("zaqar.list_messages") def _messages_list(self, queue): """Gets messages from a given Zaqar queue. :param queue: get messages from queue :returns: messages iterator """ return queue.messages() rally-0.9.1/rally/plugins/openstack/scenarios/zaqar/basic.py0000664000567000056710000000470713073417716025360 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import random from rally.common import logging from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.zaqar import utils as zutils """Scenarios for Zaqar.""" @scenario.configure(context={"cleanup": ["zaqar"]}, name="ZaqarBasic.create_queue") class CreateQueue(zutils.ZaqarScenario): @logging.log_deprecated_args( "The 'name_length' argument to create_queue is ignored", "0.1.2", ["name_length"], once=True) def run(self, name_length=None, **kwargs): """Create a Zaqar queue with a random name. :param kwargs: other optional parameters to create queues like "metadata" """ self._queue_create(**kwargs) @scenario.configure(context={"cleanup": ["zaqar"]}, name="ZaqarBasic.producer_consumer") class ProducerConsumer(zutils.ZaqarScenario): @logging.log_deprecated_args( "The 'name_length' argument to producer_consumer is ignored", "0.1.2", ["name_length"], once=True) def run(self, name_length=None, min_msg_count=50, max_msg_count=200, **kwargs): """Serial message producer/consumer. Creates a Zaqar queue with random name, sends a set of messages and then retrieves an iterator containing those. :param min_msg_count: min number of messages to be posted :param max_msg_count: max number of messages to be posted :param kwargs: other optional parameters to create queues like "metadata" """ queue = self._queue_create(**kwargs) msg_count = random.randint(min_msg_count, max_msg_count) messages = [{"body": {"id": idx}, "ttl": 360} for idx in range(msg_count)] self._messages_post(queue, messages, min_msg_count, max_msg_count) self._messages_list(queue) self._queue_delete(queue) rally-0.9.1/rally/plugins/openstack/scenarios/mistral/0000775000567000056710000000000013073420067024243 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/mistral/__init__.py0000664000567000056710000000000013073417716026351 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/mistral/workbooks.py0000664000567000056710000000455013073417720026643 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.mistral import utils from rally.task import types from rally.task import validation """Scenarios for Mistral workbook.""" @validation.required_clients("mistral") @validation.required_openstack(users=True) @validation.required_services(consts.Service.MISTRAL) @scenario.configure(name="MistralWorkbooks.list_workbooks") class ListWorkbooks(utils.MistralScenario): def run(self): """Scenario test mistral workbook-list command. This simple scenario tests the Mistral workbook-list command by listing all the workbooks. """ self._list_workbooks() @validation.required_parameters("definition") @validation.file_exists("definition") @types.convert(definition={"type": "file"}) @validation.required_clients("mistral") @validation.required_openstack(users=True) @validation.required_services(consts.Service.MISTRAL) @scenario.configure(context={"cleanup": ["mistral"]}, name="MistralWorkbooks.create_workbook") class CreateWorkbook(utils.MistralScenario): def run(self, definition, do_delete=False): """Scenario tests workbook creation and deletion. This scenario is a very useful tool to measure the "mistral workbook-create" and "mistral workbook-delete" commands performance. :param definition: string (yaml string) representation of given file content (Mistral workbook definition) :param do_delete: if False than it allows to check performance in "create only" mode. """ wb = self._create_workbook(definition) if do_delete: self._delete_workbook(wb.name) rally-0.9.1/rally/plugins/openstack/scenarios/mistral/executions.py0000664000567000056710000001022613073417720027006 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json import six import yaml from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.mistral import utils from rally.task import types from rally.task import validation """Scenarios for Mistral execution.""" @validation.required_clients("mistral") @validation.required_openstack(users=True) @validation.required_services(consts.Service.MISTRAL) @scenario.configure(name="MistralExecutions.list_executions", context={"cleanup": ["mistral"]}) class ListExecutions(utils.MistralScenario): def run(self, marker="", limit=None, sort_keys="", sort_dirs=""): """Scenario test mistral execution-list command. This simple scenario tests the Mistral execution-list command by listing all the executions. :param marker: The last execution uuid of the previous page, displays list of executions after "marker". :param limit: number Maximum number of executions to return in a single result. :param sort_keys: id,description :param sort_dirs: [SORT_DIRS] Comma-separated list of sort directions. Default: asc. """ self._list_executions(marker=marker, limit=limit, sort_keys=sort_keys, sort_dirs=sort_dirs) @validation.required_parameters("definition") @validation.file_exists("definition") @types.convert(definition={"type": "file"}) @types.convert(params={"type": "file"}) @types.convert(wf_input={"type": "file"}) @validation.required_clients("mistral") @validation.required_openstack(users=True) @validation.required_services(consts.Service.MISTRAL) @validation.workbook_contains_workflow("definition", "workflow_name") @scenario.configure( name="MistralExecutions.create_execution_from_workbook", context={"cleanup": ["mistral"]}) class CreateExecutionFromWorkbook(utils.MistralScenario): def run(self, definition, workflow_name=None, wf_input=None, params=None, do_delete=False): """Scenario tests execution creation and deletion. This scenario is a very useful tool to measure the "mistral execution-create" and "mistral execution-delete" commands performance. :param definition: string (yaml string) representation of given file content (Mistral workbook definition) :param workflow_name: string the workflow name to execute. Should be one of the to workflows in the definition. If no workflow_name is passed, one of the workflows in the definition will be taken. :param wf_input: file containing a json string of mistral workflow input :param params: file containing a json string of mistral params (the string is the place to pass the environment) :param do_delete: if False than it allows to check performance in "create only" mode. """ wb = self._create_workbook(definition) wb_def = yaml.safe_load(wb.definition) if not workflow_name: workflow_name = six.next(six.iterkeys(wb_def["workflows"])) workflow_identifier = ".".join([wb.name, workflow_name]) if not params: params = {} else: params = json.loads(params) ex = self._create_execution(workflow_identifier, wf_input, **params) if do_delete: self._delete_workbook(wb.name) self._delete_execution(ex) rally-0.9.1/rally/plugins/openstack/scenarios/mistral/utils.py0000664000567000056710000000723613073417720025767 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg import yaml from rally.plugins.openstack import scenario from rally.task import atomic from rally.task import utils MISTRAL_BENCHMARK_OPTS = [ cfg.IntOpt( "mistral_execution_timeout", default=200, help="mistral execution timeout") ] CONF = cfg.CONF benchmark_group = cfg.OptGroup(name="benchmark", title="benchmark options") CONF.register_opts(MISTRAL_BENCHMARK_OPTS, group=benchmark_group) class MistralScenario(scenario.OpenStackScenario): """Base class for Mistral scenarios with basic atomic actions.""" @atomic.action_timer("mistral.list_workbooks") def _list_workbooks(self): """Gets list of existing workbooks. :returns: workbook list """ return self.clients("mistral").workbooks.list() @atomic.action_timer("mistral.create_workbook") def _create_workbook(self, definition): """Create a new workbook. :param definition: workbook description in string (yaml string) format :returns: workbook object """ definition = yaml.safe_load(definition) definition["name"] = self.generate_random_name() definition = yaml.safe_dump(definition) return self.clients("mistral").workbooks.create(definition) @atomic.action_timer("mistral.delete_workbook") def _delete_workbook(self, wb_name): """Delete the given workbook. :param wb_name: the name of workbook that would be deleted. """ self.clients("mistral").workbooks.delete(wb_name) @atomic.action_timer("mistral.list_executions") def _list_executions(self, marker="", limit=None, sort_keys="", sort_dirs=""): """Gets list of existing executions. :returns: execution list """ return self.clients("mistral").executions.list( marker=marker, limit=limit, sort_keys=sort_keys, sort_dirs=sort_dirs) @atomic.action_timer("mistral.create_execution") def _create_execution(self, workflow_identifier, wf_input=None, **params): """Create a new execution. :param workflow_identifier: name or id of the workflow to execute :param input_: json string of mistral workflow input :param params: optional mistral params (this is the place to pass environment). :returns: executions object """ execution = self.clients("mistral").executions.create( workflow_identifier, workflow_input=wf_input, **params) execution = utils.wait_for_status( execution, ready_statuses=["SUCCESS"], failure_statuses=["ERROR"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.mistral_execution_timeout) return execution @atomic.action_timer("mistral.delete_execution") def _delete_execution(self, execution): """Delete the given execution. :param ex: the execution that would be deleted. """ self.clients("mistral").executions.delete(execution.id) rally-0.9.1/rally/plugins/openstack/scenarios/murano/0000775000567000056710000000000013073420067024071 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/murano/__init__.py0000664000567000056710000000000013073417716026177 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/murano/packages.py0000664000567000056710000001462413073417720026232 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os from rally import consts from rally.plugins.openstack.scenarios.murano import utils from rally.task import scenario from rally.task import validation """Scenarios for Murano packages.""" @validation.required_parameters("package") @validation.file_exists(param_name="package", mode=os.F_OK) @validation.required_clients("murano") @validation.required_services(consts.Service.MURANO) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["murano.packages"]}, name="MuranoPackages.import_and_list_packages") class ImportAndListPackages(utils.MuranoScenario): def run(self, package, include_disabled=False): """Import Murano package and get list of packages. Measure the "murano import-package" and "murano package-list" commands performance. It imports Murano package from "package" (if it is not a zip archive then zip archive will be prepared) and gets list of imported packages. :param package: path to zip archive that represents Murano application package or absolute path to folder with package components :param include_disabled: specifies whether the disabled packages will be included in a the result or not. Default value is False. """ package_path = self._zip_package(package) try: self._import_package(package_path) self._list_packages(include_disabled=include_disabled) finally: os.remove(package_path) @validation.required_parameters("package") @validation.file_exists(param_name="package", mode=os.F_OK) @validation.required_clients("murano") @validation.required_services(consts.Service.MURANO) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["murano.packages"]}, name="MuranoPackages.import_and_delete_package") class ImportAndDeletePackage(utils.MuranoScenario): def run(self, package): """Import Murano package and then delete it. Measure the "murano import-package" and "murano package-delete" commands performance. It imports Murano package from "package" (if it is not a zip archive then zip archive will be prepared) and deletes it. :param package: path to zip archive that represents Murano application package or absolute path to folder with package components """ package_path = self._zip_package(package) try: package = self._import_package(package_path) self._delete_package(package) finally: os.remove(package_path) @validation.required_parameters("package", "body") @validation.file_exists(param_name="package", mode=os.F_OK) @validation.required_clients("murano") @validation.required_services(consts.Service.MURANO) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["murano.packages"]}, name="MuranoPackages.package_lifecycle") class PackageLifecycle(utils.MuranoScenario): def run(self, package, body, operation="replace"): """Import Murano package, modify it and then delete it. Measure the Murano import, update and delete package commands performance. It imports Murano package from "package" (if it is not a zip archive then zip archive will be prepared), modifies it (using data from "body") and deletes. :param package: path to zip archive that represents Murano application package or absolute path to folder with package components :param body: dict object that defines what package property will be updated, e.g {"tags": ["tag"]} or {"enabled": "true"} :param operation: string object that defines the way of how package property will be updated, allowed operations are "add", "replace" or "delete". Default value is "replace". """ package_path = self._zip_package(package) try: package = self._import_package(package_path) self._update_package(package, body, operation) self._delete_package(package) finally: os.remove(package_path) @validation.required_parameters("package", "filter_query") @validation.file_exists(param_name="package", mode=os.F_OK) @validation.required_clients("murano") @validation.required_services(consts.Service.MURANO) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["murano.packages"]}, name="MuranoPackages.import_and_filter_applications") class ImportAndFilterApplications(utils.MuranoScenario): def run(self, package, filter_query): """Import Murano package and then filter packages by some criteria. Measure the performance of package import and package filtering commands. It imports Murano package from "package" (if it is not a zip archive then zip archive will be prepared) and filters packages by some criteria. :param package: path to zip archive that represents Murano application package or absolute path to folder with package components :param filter_query: dict that contains filter criteria, lately it will be passed as **kwargs to filter method e.g. {"category": "Web"} """ package_path = self._zip_package(package) try: self._import_package(package_path) self._filter_applications(filter_query) finally: os.remove(package_path) rally-0.9.1/rally/plugins/openstack/scenarios/murano/environments.py0000775000567000056710000000567413073417716027220 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.murano import utils from rally.task import atomic from rally.task import validation """Scenarios for Murano environments.""" @validation.required_clients("murano") @validation.required_services(consts.Service.MURANO) @scenario.configure(context={"cleanup": ["murano.environments"]}, name="MuranoEnvironments.list_environments") class ListEnvironments(utils.MuranoScenario): def run(self): """List the murano environments. Run murano environment-list for listing all environments. """ self._list_environments() @validation.required_clients("murano") @validation.required_services(consts.Service.MURANO) @scenario.configure(context={"cleanup": ["murano.environments"]}, name="MuranoEnvironments.create_and_delete_environment") class CreateAndDeleteEnvironment(utils.MuranoScenario): def run(self): """Create environment, session and delete environment.""" environment = self._create_environment() self._create_session(environment.id) self._delete_environment(environment) @validation.required_clients("murano") @validation.required_services(consts.Service.MURANO) @validation.required_contexts("murano_packages") @scenario.configure(context={"cleanup": ["murano"], "roles": ["admin"]}, name="MuranoEnvironments.create_and_deploy_environment") class CreateAndDeployEnvironment(utils.MuranoScenario): def run(self, packages_per_env=1): """Create environment, session and deploy environment. Create environment, create session, add app to environment packages_per_env times, send environment to deploy. :param packages_per_env: number of packages per environment """ environment = self._create_environment() session = self._create_session(environment.id) package = self.context["tenant"]["packages"][0] with atomic.ActionTimer(self, "murano.create_services"): for i in range(packages_per_env): self._create_service(environment, session, package.fully_qualified_name, atomic_action=False) self._deploy_environment(environment, session) rally-0.9.1/rally/plugins/openstack/scenarios/murano/utils.py0000664000567000056710000002323513073417720025612 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import shutil import tempfile import uuid import zipfile from oslo_config import cfg import yaml from rally.common import fileutils from rally.common import utils as common_utils from rally.plugins.openstack import scenario from rally.task import atomic from rally.task import utils CONF = cfg.CONF MURANO_BENCHMARK_OPTS = [ cfg.IntOpt("murano_deploy_environment_timeout", default=1200, deprecated_name="deploy_environment_timeout", help="A timeout in seconds for an environment deploy"), cfg.IntOpt("murano_deploy_environment_check_interval", default=5, deprecated_name="deploy_environment_check_interval", help="Deploy environment check interval in seconds"), ] benchmark_group = cfg.OptGroup(name="benchmark", title="benchmark options") CONF.register_opts(MURANO_BENCHMARK_OPTS, group=benchmark_group) class MuranoScenario(scenario.OpenStackScenario): """Base class for Murano scenarios with basic atomic actions.""" @atomic.action_timer("murano.list_environments") def _list_environments(self): """Return environments list.""" return self.clients("murano").environments.list() @atomic.action_timer("murano.create_environment") def _create_environment(self): """Create environment. :param env_name: String used to name environment :returns: Environment instance """ env_name = self.generate_random_name() return self.clients("murano").environments.create({"name": env_name}) @atomic.action_timer("murano.delete_environment") def _delete_environment(self, environment): """Delete given environment. Return when the environment is actually deleted. :param environment: Environment instance """ self.clients("murano").environments.delete(environment.id) @atomic.action_timer("murano.create_session") def _create_session(self, environment_id): """Create session for environment with specific id :param environment_id: Environment id :returns: Session instance """ return self.clients("murano").sessions.configure(environment_id) @atomic.optional_action_timer("murano.create_service") def _create_service(self, environment, session, full_package_name, image_name=None, flavor_name=None): """Create Murano service. :param environment: Environment instance :param session: Session instance :param full_package_name: full name of the Murano package :param image_name: Image name :param flavor_name: Flavor name :param atomic_action: True if this is atomic action. added and handled by the optional_action_timer() decorator :returns: Service instance """ app_id = str(uuid.uuid4()) data = {"?": {"id": app_id, "type": full_package_name}, "name": self.generate_random_name()} return self.clients("murano").services.post( environment_id=environment.id, path="/", data=data, session_id=session.id) @atomic.action_timer("murano.deploy_environment") def _deploy_environment(self, environment, session): """Deploy environment. :param environment: Environment instance :param session: Session instance """ self.clients("murano").sessions.deploy(environment.id, session.id) config = CONF.benchmark utils.wait_for( environment, ready_statuses=["READY"], update_resource=utils.get_from_manager(["DEPLOY FAILURE"]), timeout=config.murano_deploy_environment_timeout, check_interval=config.murano_deploy_environment_check_interval ) @atomic.action_timer("murano.list_packages") def _list_packages(self, include_disabled=False): """Returns packages list. :param include_disabled: if "True" then disabled packages will be included in a the result. Default value is False. :returns: list of imported packages """ return self.clients("murano").packages.list( include_disabled=include_disabled) @atomic.action_timer("murano.import_package") def _import_package(self, package): """Import package to the Murano. :param package: path to zip archive with Murano application :returns: imported package """ package = self.clients("murano").packages.create( {}, {"file": open(package)} ) return package @atomic.action_timer("murano.delete_package") def _delete_package(self, package): """Delete specified package. :param package: package that will be deleted """ self.clients("murano").packages.delete(package.id) @atomic.action_timer("murano.update_package") def _update_package(self, package, body, operation="replace"): """Update specified package. :param package: package that will be updated :param body: dict object that defines what package property will be updated, e.g {"tags": ["tag"]} or {"enabled": "true"} :param operation: string object that defines the way of how package property will be updated, allowed operations are "add", "replace" or "delete". Default value is "replace". :returns: updated package """ return self.clients("murano").packages.update( package.id, body, operation) @atomic.action_timer("murano.filter_applications") def _filter_applications(self, filter_query): """Filter list of uploaded application by specified criteria. :param filter_query: dict that contains filter criteria, it will be passed as **kwargs to filter method e.g. {"category": "Web"} :returns: filtered list of packages """ return self.clients("murano").packages.filter(**filter_query) def _zip_package(self, package_path): """Call _prepare_package method that returns path to zip archive.""" return MuranoPackageManager(self.task)._prepare_package(package_path) class MuranoPackageManager(common_utils.RandomNameGeneratorMixin): RESOURCE_NAME_FORMAT = "app.rally_XXXXXXXX_XXXXXXXX" def __init__(self, task): self.task = task @staticmethod def _read_from_file(filename): with open(filename, "r") as f: read_data = f.read() return yaml.safe_load(read_data) @staticmethod def _write_to_file(data, filename): with open(filename, "w") as f: yaml.safe_dump(data, f) def _change_app_fullname(self, app_dir): """Change application full name. To avoid name conflict error during package import (when user tries to import a few packages into the same tenant) need to change the application name. For doing this need to replace following parts in manifest.yaml from ... FullName: app.name ... Classes: app.name: app_class.yaml to: ... FullName: ... Classes: : app_class.yaml :param app_dir: path to directory with Murano application context """ new_fullname = self.generate_random_name() manifest_file = os.path.join(app_dir, "manifest.yaml") manifest = self._read_from_file(manifest_file) class_file_name = manifest["Classes"][manifest["FullName"]] # update manifest.yaml file del manifest["Classes"][manifest["FullName"]] manifest["FullName"] = new_fullname manifest["Classes"][new_fullname] = class_file_name self._write_to_file(manifest, manifest_file) def _prepare_package(self, package_path): """Check whether the package path is path to zip archive or not. If package_path is not a path to zip archive but path to Murano application folder, than method prepares zip archive with Murano application. It copies directory with Murano app files to temporary folder, changes manifest.yaml and class file (to avoid '409 Conflict' errors in Murano) and prepares zip package. :param package_path: path to zip archive or directory with package components :returns: path to zip archive with Murano application """ if not zipfile.is_zipfile(package_path): tmp_dir = tempfile.mkdtemp() pkg_dir = os.path.join(tmp_dir, "package/") try: shutil.copytree(package_path, pkg_dir) self._change_app_fullname(pkg_dir) package_path = fileutils.pack_dir(pkg_dir) finally: shutil.rmtree(tmp_dir) return package_path rally-0.9.1/rally/plugins/openstack/scenarios/heat/0000775000567000056710000000000013073420067023511 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/heat/__init__.py0000664000567000056710000000000013073417716025617 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/heat/utils.py0000664000567000056710000004002113073417720025222 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg import requests from rally.common import logging from rally import exceptions from rally.plugins.openstack import scenario from rally.task import atomic from rally.task import utils LOG = logging.getLogger(__name__) HEAT_BENCHMARK_OPTS = [ cfg.FloatOpt("heat_stack_create_prepoll_delay", default=2.0, help="Time(in sec) to sleep after creating a resource before " "polling for it status."), cfg.FloatOpt("heat_stack_create_timeout", default=3600.0, help="Time(in sec) to wait for heat stack to be created."), cfg.FloatOpt("heat_stack_create_poll_interval", default=1.0, help="Time interval(in sec) between checks when waiting for " "stack creation."), cfg.FloatOpt("heat_stack_delete_timeout", default=3600.0, help="Time(in sec) to wait for heat stack to be deleted."), cfg.FloatOpt("heat_stack_delete_poll_interval", default=1.0, help="Time interval(in sec) between checks when waiting for " "stack deletion."), cfg.FloatOpt("heat_stack_check_timeout", default=3600.0, help="Time(in sec) to wait for stack to be checked."), cfg.FloatOpt("heat_stack_check_poll_interval", default=1.0, help="Time interval(in sec) between checks when waiting for " "stack checking."), cfg.FloatOpt("heat_stack_update_prepoll_delay", default=2.0, help="Time(in sec) to sleep after updating a resource before " "polling for it status."), cfg.FloatOpt("heat_stack_update_timeout", default=3600.0, help="Time(in sec) to wait for stack to be updated."), cfg.FloatOpt("heat_stack_update_poll_interval", default=1.0, help="Time interval(in sec) between checks when waiting for " "stack update."), cfg.FloatOpt("heat_stack_suspend_timeout", default=3600.0, help="Time(in sec) to wait for stack to be suspended."), cfg.FloatOpt("heat_stack_suspend_poll_interval", default=1.0, help="Time interval(in sec) between checks when waiting for " "stack suspend."), cfg.FloatOpt("heat_stack_resume_timeout", default=3600.0, help="Time(in sec) to wait for stack to be resumed."), cfg.FloatOpt("heat_stack_resume_poll_interval", default=1.0, help="Time interval(in sec) between checks when waiting for " "stack resume."), cfg.FloatOpt("heat_stack_snapshot_timeout", default=3600.0, help="Time(in sec) to wait for stack snapshot to " "be created."), cfg.FloatOpt("heat_stack_snapshot_poll_interval", default=1.0, help="Time interval(in sec) between checks when waiting for " "stack snapshot to be created."), cfg.FloatOpt("heat_stack_restore_timeout", default=3600.0, help="Time(in sec) to wait for stack to be restored from " "snapshot."), cfg.FloatOpt("heat_stack_restore_poll_interval", default=1.0, help="Time interval(in sec) between checks when waiting for " "stack to be restored."), cfg.FloatOpt("heat_stack_scale_timeout", default=3600.0, help="Time (in sec) to wait for stack to scale up or down."), cfg.FloatOpt("heat_stack_scale_poll_interval", default=1.0, help="Time interval (in sec) between checks when waiting for " "a stack to scale up or down."), ] CONF = cfg.CONF benchmark_group = cfg.OptGroup(name="benchmark", title="benchmark options") CONF.register_opts(HEAT_BENCHMARK_OPTS, group=benchmark_group) class HeatScenario(scenario.OpenStackScenario): """Base class for Heat scenarios with basic atomic actions.""" @atomic.action_timer("heat.list_stacks") def _list_stacks(self): """Return user stack list.""" return list(self.clients("heat").stacks.list()) @atomic.action_timer("heat.create_stack") def _create_stack(self, template, parameters=None, files=None, environment=None): """Create a new stack. :param template: template with stack description. :param parameters: template parameters used during stack creation :param files: additional files used in template :param environment: stack environment definition :returns: object of stack """ stack_name = self.generate_random_name() kw = { "stack_name": stack_name, "disable_rollback": True, "parameters": parameters or {}, "template": template, "files": files or {}, "environment": environment or {} } # heat client returns body instead manager object, so we should # get manager object using stack_id stack_id = self.clients("heat").stacks.create(**kw)["stack"]["id"] stack = self.clients("heat").stacks.get(stack_id) self.sleep_between(CONF.benchmark.heat_stack_create_prepoll_delay) stack = utils.wait_for( stack, ready_statuses=["CREATE_COMPLETE"], failure_statuses=["CREATE_FAILED"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.heat_stack_create_timeout, check_interval=CONF.benchmark.heat_stack_create_poll_interval) return stack @atomic.action_timer("heat.update_stack") def _update_stack(self, stack, template, parameters=None, files=None, environment=None): """Update an existing stack :param stack: stack that need to be updated :param template: Updated template :param parameters: template parameters for stack update :param files: additional files used in template :param environment: stack environment definition :returns: object of updated stack """ kw = { "stack_name": stack.stack_name, "disable_rollback": True, "parameters": parameters or {}, "template": template, "files": files or {}, "environment": environment or {} } self.clients("heat").stacks.update(stack.id, **kw) self.sleep_between(CONF.benchmark.heat_stack_update_prepoll_delay) stack = utils.wait_for( stack, ready_statuses=["UPDATE_COMPLETE"], failure_statuses=["UPDATE_FAILED"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.heat_stack_update_timeout, check_interval=CONF.benchmark.heat_stack_update_poll_interval) return stack @atomic.action_timer("heat.check_stack") def _check_stack(self, stack): """Check given stack. Check the stack and stack resources. :param stack: stack that needs to be checked """ self.clients("heat").actions.check(stack.id) utils.wait_for( stack, ready_statuses=["CHECK_COMPLETE"], failure_statuses=["CHECK_FAILED"], update_resource=utils.get_from_manager(["CHECK_FAILED"]), timeout=CONF.benchmark.heat_stack_check_timeout, check_interval=CONF.benchmark.heat_stack_check_poll_interval) @atomic.action_timer("heat.delete_stack") def _delete_stack(self, stack): """Delete given stack. Returns when the stack is actually deleted. :param stack: stack object """ stack.delete() utils.wait_for_status( stack, ready_statuses=["DELETE_COMPLETE"], failure_statuses=["DELETE_FAILED"], check_deletion=True, update_resource=utils.get_from_manager(), timeout=CONF.benchmark.heat_stack_delete_timeout, check_interval=CONF.benchmark.heat_stack_delete_poll_interval) @atomic.action_timer("heat.suspend_stack") def _suspend_stack(self, stack): """Suspend given stack. :param stack: stack that needs to be suspended """ self.clients("heat").actions.suspend(stack.id) utils.wait_for( stack, ready_statuses=["SUSPEND_COMPLETE"], failure_statuses=["SUSPEND_FAILED"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.heat_stack_suspend_timeout, check_interval=CONF.benchmark.heat_stack_suspend_poll_interval) @atomic.action_timer("heat.resume_stack") def _resume_stack(self, stack): """Resume given stack. :param stack: stack that needs to be resumed """ self.clients("heat").actions.resume(stack.id) utils.wait_for( stack, ready_statuses=["RESUME_COMPLETE"], failure_statuses=["RESUME_FAILED"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.heat_stack_resume_timeout, check_interval=CONF.benchmark.heat_stack_resume_poll_interval) @atomic.action_timer("heat.snapshot_stack") def _snapshot_stack(self, stack): """Creates a snapshot for given stack. :param stack: stack that will be used as base for snapshot :returns: snapshot created for given stack """ snapshot = self.clients("heat").stacks.snapshot( stack.id) utils.wait_for( stack, ready_statuses=["SNAPSHOT_COMPLETE"], failure_statuses=["SNAPSHOT_FAILED"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.heat_stack_snapshot_timeout, check_interval=CONF.benchmark.heat_stack_snapshot_poll_interval) return snapshot @atomic.action_timer("heat.restore_stack") def _restore_stack(self, stack, snapshot_id): """Restores stack from given snapshot. :param stack: stack that will be restored from snapshot :param snapshot_id: id of given snapshot """ self.clients("heat").stacks.restore(stack.id, snapshot_id) utils.wait_for( stack, ready_statuses=["RESTORE_COMPLETE"], failure_statuses=["RESTORE_FAILED"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.heat_stack_restore_timeout, check_interval=CONF.benchmark.heat_stack_restore_poll_interval ) @atomic.action_timer("heat.show_output") def _stack_show_output(self, stack, output_key): """Execute output_show for specified "output_key". This method uses new output API call. :param stack: stack with output_key output. :param output_key: The name of the output. """ output = self.clients("heat").stacks.output_show(stack.id, output_key) return output @atomic.action_timer("heat.show_output_via_API") def _stack_show_output_via_API(self, stack, output_key): """Execute output_show for specified "output_key". This method uses old way for getting output value. It gets whole stack object and then finds necessary "output_key". :param stack: stack with output_key output. :param output_key: The name of the output. """ # this code copy-pasted and adopted for rally from old client version # https://github.com/openstack/python-heatclient/blob/0.8.0/heatclient/ # v1/shell.py#L682-L699 stack = self.clients("heat").stacks.get(stack_id=stack.id) for output in stack.to_dict().get("outputs", []): if output["output_key"] == output_key: return output @atomic.action_timer("heat.list_output") def _stack_list_output(self, stack): """Execute output_list for specified "stack". This method uses new output API call. :param stack: stack to call output-list. """ output_list = self.clients("heat").stacks.output_list(stack.id) return output_list @atomic.action_timer("heat.list_output_via_API") def _stack_list_output_via_API(self, stack): """Execute output_list for specified "stack". This method uses old way for getting output value. It gets whole stack object and then prints all outputs belongs this stack. :param stack: stack to call output-list. """ # this code copy-pasted and adopted for rally from old client version # https://github.com/openstack/python-heatclient/blob/0.8.0/heatclient/ # v1/shell.py#L649-L663 stack = self.clients("heat").stacks.get(stack_id=stack.id) output_list = stack.to_dict()["outputs"] return output_list def _count_instances(self, stack): """Count instances in a Heat stack. :param stack: stack to count instances in. """ return len([ r for r in self.clients("heat").resources.list(stack.id, nested_depth=1) if r.resource_type == "OS::Nova::Server"]) def _scale_stack(self, stack, output_key, delta): """Scale a stack up or down. Calls the webhook given in the output value identified by 'output_key', and waits for the stack size to change by 'delta'. :param stack: stack to scale up or down :param output_key: The name of the output to get the URL from :param delta: The expected change in number of instances in the stack (signed int) """ num_instances = self._count_instances(stack) expected_instances = num_instances + delta LOG.debug("Scaling stack %s from %s to %s instances with %s" % (stack.id, num_instances, expected_instances, output_key)) with atomic.ActionTimer(self, "heat.scale_with_%s" % output_key): self._stack_webhook(stack, output_key) utils.wait_for( stack, is_ready=lambda s: ( self._count_instances(s) == expected_instances), failure_statuses=["UPDATE_FAILED"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.heat_stack_scale_timeout, check_interval=CONF.benchmark.heat_stack_scale_poll_interval) def _stack_webhook(self, stack, output_key): """POST to the URL given in the output value identified by output_key. This can be used to scale stacks up and down, for instance. :param stack: stack to call a webhook on :param output_key: The name of the output to get the URL from :raises InvalidConfigException: if the output key is not found """ url = None for output in stack.outputs: if output["output_key"] == output_key: url = output["output_value"] break else: raise exceptions.InvalidConfigException( "No output key %(key)s found in stack %(id)s" % {"key": output_key, "id": stack.id}) with atomic.ActionTimer(self, "heat.%s_webhook" % output_key): requests.post(url).raise_for_status() rally-0.9.1/rally/plugins/openstack/scenarios/heat/stacks.py0000664000567000056710000003663213073417720025367 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.heat import utils from rally.task import atomic from rally.task import types from rally.task import validation """Scenarios for Heat stacks.""" @types.convert(template_path={"type": "file"}, files={"type": "file_dict"}) @validation.validate_heat_template("template_path") @validation.required_services(consts.Service.HEAT) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["heat"]}, name="HeatStacks.create_and_list_stack") class CreateAndListStack(utils.HeatScenario): def run(self, template_path, parameters=None, files=None, environment=None): """Create a stack and then list all stacks. Measure the "heat stack-create" and "heat stack-list" commands performance. :param template_path: path to stack template file :param parameters: parameters to use in heat template :param files: files used in template :param environment: stack environment definition """ stack = self._create_stack(template_path, parameters, files, environment) self.assertTrue(stack) list_stacks = self._list_stacks() self.assertIn(stack.id, [i.id for i in list_stacks]) @validation.required_services(consts.Service.HEAT) @validation.required_openstack(users=True) @scenario.configure(name="HeatStacks.list_stacks_and_resources") class ListStacksAndResources(utils.HeatScenario): def run(self): """List all resources from tenant stacks.""" stacks = self._list_stacks() with atomic.ActionTimer( self, "heat.list_resources_of_%s_stacks" % len(stacks)): for stack in stacks: self.clients("heat").resources.list(stack.id) @types.convert(template_path={"type": "file"}, files={"type": "file_dict"}) @validation.validate_heat_template("template_path") @validation.required_services(consts.Service.HEAT) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["heat"]}, name="HeatStacks.create_and_delete_stack") class CreateAndDeleteStack(utils.HeatScenario): def run(self, template_path, parameters=None, files=None, environment=None): """Create and then delete a stack. Measure the "heat stack-create" and "heat stack-delete" commands performance. :param template_path: path to stack template file :param parameters: parameters to use in heat template :param files: files used in template :param environment: stack environment definition """ stack = self._create_stack(template_path, parameters, files, environment) self._delete_stack(stack) @types.convert(template_path={"type": "file"}, files={"type": "file_dict"}) @validation.validate_heat_template("template_path") @validation.required_services(consts.Service.HEAT) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["heat"]}, name="HeatStacks.create_check_delete_stack") class CreateCheckDeleteStack(utils.HeatScenario): def run(self, template_path, parameters=None, files=None, environment=None): """Create, check and delete a stack. Measure the performance of the following commands: - heat stack-create - heat action-check - heat stack-delete :param template_path: path to stack template file :param parameters: parameters to use in heat template :param files: files used in template :param environment: stack environment definition """ stack = self._create_stack(template_path, parameters, files, environment) self._check_stack(stack) self._delete_stack(stack) @types.convert(template_path={"type": "file"}, updated_template_path={"type": "file"}, files={"type": "file_dict"}, updated_files={"type": "file_dict"}) @validation.validate_heat_template("template_path") @validation.required_services(consts.Service.HEAT) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["heat"]}, name="HeatStacks.create_update_delete_stack") class CreateUpdateDeleteStack(utils.HeatScenario): def run(self, template_path, updated_template_path, parameters=None, updated_parameters=None, files=None, updated_files=None, environment=None, updated_environment=None): """Create, update and then delete a stack. Measure the "heat stack-create", "heat stack-update" and "heat stack-delete" commands performance. :param template_path: path to stack template file :param updated_template_path: path to updated stack template file :param parameters: parameters to use in heat template :param updated_parameters: parameters to use in updated heat template If not specified then parameters will be used instead :param files: files used in template :param updated_files: files used in updated template. If not specified files value will be used instead :param environment: stack environment definition :param updated_environment: environment definition for updated stack """ stack = self._create_stack(template_path, parameters, files, environment) self._update_stack(stack, updated_template_path, updated_parameters or parameters, updated_files or files, updated_environment or environment) self._delete_stack(stack) @types.convert(template_path={"type": "file"}, files={"type": "file_dict"}) @validation.validate_heat_template("template_path") @validation.required_services(consts.Service.HEAT) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["heat"]}, name="HeatStacks.create_stack_and_scale") class CreateStackAndScale(utils.HeatScenario): def run(self, template_path, output_key, delta, parameters=None, files=None, environment=None): """Create an autoscaling stack and invoke a scaling policy. Measure the performance of autoscaling webhooks. :param template_path: path to template file that includes an OS::Heat::AutoScalingGroup resource :param output_key: the stack output key that corresponds to the scaling webhook :param delta: the number of instances the stack is expected to change by. :param parameters: parameters to use in heat template :param files: files used in template (dict of file name to file path) :param environment: stack environment definition (dict) """ # TODO(stpierre): Kilo Heat is *much* better than Juno for the # requirements of this scenario, so once Juno goes out of # support we should update this scenario to suck less. Namely: # # * Kilo Heat can supply alarm_url attributes without needing # an output key, so instead of getting the output key from # the user, just get the name of the ScalingPolicy to apply. # * Kilo Heat changes the status of a stack while scaling it, # so _scale_stack() can check for the stack to have changed # size and for it to be in UPDATE_COMPLETE state, so the # user no longer needs to specify the expected delta. stack = self._create_stack(template_path, parameters, files, environment) self._scale_stack(stack, output_key, delta) @types.convert(template_path={"type": "file"}, files={"type": "file_dict"}) @validation.validate_heat_template("template_path") @validation.required_services(consts.Service.HEAT) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["heat"]}, name="HeatStacks.create_suspend_resume_delete_stack") class CreateSuspendResumeDeleteStack(utils.HeatScenario): def run(self, template_path, parameters=None, files=None, environment=None): """Create, suspend-resume and then delete a stack. Measure performance of the following commands: heat stack-create heat action-suspend heat action-resume heat stack-delete :param template_path: path to stack template file :param parameters: parameters to use in heat template :param files: files used in template :param environment: stack environment definition """ s = self._create_stack(template_path, parameters, files, environment) self._suspend_stack(s) self._resume_stack(s) self._delete_stack(s) @validation.required_services(consts.Service.HEAT) @validation.required_openstack(users=True) @scenario.configure(name="HeatStacks.list_stacks_and_events") class ListStacksAndEvents(utils.HeatScenario): def run(self): """List events from tenant stacks.""" stacks = self._list_stacks() with atomic.ActionTimer( self, "heat.list_events_of_%s_stacks" % len(stacks)): for stack in stacks: self.clients("heat").events.list(stack.id) @types.convert(template_path={"type": "file"}, files={"type": "file_dict"}) @validation.validate_heat_template("template_path") @validation.required_services(consts.Service.HEAT) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["heat"]}, name="HeatStacks.create_snapshot_restore_delete_stack") class CreateSnapshotRestoreDeleteStack(utils.HeatScenario): def run(self, template_path, parameters=None, files=None, environment=None): """Create, snapshot-restore and then delete a stack. Measure performance of the following commands: heat stack-create heat stack-snapshot heat stack-restore heat stack-delete :param template_path: path to stack template file :param parameters: parameters to use in heat template :param files: files used in template :param environment: stack environment definition """ stack = self._create_stack( template_path, parameters, files, environment) snapshot = self._snapshot_stack(stack) self._restore_stack(stack, snapshot["id"]) self._delete_stack(stack) @types.convert(template_path={"type": "file"}, files={"type": "file_dict"}) @validation.required_services(consts.Service.HEAT) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["heat"]}, name="HeatStacks.create_stack_and_show_output_via_API") class CreateStackAndShowOutputViaAPI(utils.HeatScenario): def run(self, template_path, output_key, parameters=None, files=None, environment=None): """Create stack and show output by using old algorithm. Measure performance of the following commands: heat stack-create heat output-show :param template_path: path to stack template file :param output_key: the stack output key that corresponds to the scaling webhook :param parameters: parameters to use in heat template :param files: files used in template :param environment: stack environment definition """ stack = self._create_stack( template_path, parameters, files, environment) self._stack_show_output_via_API(stack, output_key) @types.convert(template_path={"type": "file"}, files={"type": "file_dict"}) @validation.required_services(consts.Service.HEAT) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["heat"]}, name="HeatStacks.create_stack_and_show_output") class CreateStackAndShowOutput(utils.HeatScenario): def run(self, template_path, output_key, parameters=None, files=None, environment=None): """Create stack and show output by using new algorithm. Measure performance of the following commands: heat stack-create heat output-show :param template_path: path to stack template file :param output_key: the stack output key that corresponds to the scaling webhook :param parameters: parameters to use in heat template :param files: files used in template :param environment: stack environment definition """ stack = self._create_stack( template_path, parameters, files, environment) self._stack_show_output(stack, output_key) @types.convert(template_path={"type": "file"}, files={"type": "file_dict"}) @validation.required_services(consts.Service.HEAT) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["heat"]}, name="HeatStacks.create_stack_and_list_output_via_API") class CreateStackAndListOutputViaAPI(utils.HeatScenario): def run(self, template_path, parameters=None, files=None, environment=None): """Create stack and list outputs by using old algorithm. Measure performance of the following commands: heat stack-create heat output-list :param template_path: path to stack template file :param parameters: parameters to use in heat template :param files: files used in template :param environment: stack environment definition """ stack = self._create_stack( template_path, parameters, files, environment) self._stack_list_output_via_API(stack) @types.convert(template_path={"type": "file"}, files={"type": "file_dict"}) @validation.required_services(consts.Service.HEAT) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["heat"]}, name="HeatStacks.create_stack_and_list_output") class CreateStackAndListOutput(utils.HeatScenario): def run(self, template_path, parameters=None, files=None, environment=None): """Create stack and list outputs by using new algorithm. Measure performance of the following commands: heat stack-create heat output-list :param template_path: path to stack template file :param parameters: parameters to use in heat template :param files: files used in template :param environment: stack environment definition """ stack = self._create_stack( template_path, parameters, files, environment) self._stack_list_output(stack) rally-0.9.1/rally/plugins/openstack/scenarios/sahara/0000775000567000056710000000000013073420067024027 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/sahara/__init__.py0000664000567000056710000000000013073417716026135 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/sahara/jobs.py0000664000567000056710000001253113073417716025347 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common import logging from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.sahara import utils from rally.task import validation LOG = logging.getLogger(__name__) """Benchmark scenarios for Sahara jobs.""" @validation.required_services(consts.Service.SAHARA) @validation.required_contexts("users", "sahara_image", "sahara_job_binaries", "sahara_cluster") @scenario.configure(context={"cleanup": ["sahara"]}, name="SaharaJob.create_launch_job") class CreateLaunchJob(utils.SaharaScenario): def run(self, job_type, configs, job_idx=0): """Create and execute a Sahara EDP Job. This scenario Creates a Job entity and launches an execution on a Cluster. :param job_type: type of the Data Processing Job :param configs: config dict that will be passed to a Job Execution :param job_idx: index of a job in a sequence. This index will be used to create different atomic actions for each job in a sequence """ mains = self.context["tenant"]["sahara"]["mains"] libs = self.context["tenant"]["sahara"]["libs"] name = self.generate_random_name() job = self.clients("sahara").jobs.create(name=name, type=job_type, description="", mains=mains, libs=libs) cluster_id = self.context["tenant"]["sahara"]["cluster"] if job_type.lower() == "java": input_id = None output_id = None else: input_id = self.context["tenant"]["sahara"]["input"] output_id = self._create_output_ds().id self._run_job_execution(job_id=job.id, cluster_id=cluster_id, input_id=input_id, output_id=output_id, configs=configs, job_idx=job_idx) @validation.required_services(consts.Service.SAHARA) @validation.required_contexts("users", "sahara_image", "sahara_job_binaries", "sahara_cluster") @scenario.configure(context={"cleanup": ["sahara"]}, name="SaharaJob.create_launch_job_sequence") class CreateLaunchJobSequence(utils.SaharaScenario): def run(self, jobs): """Create and execute a sequence of the Sahara EDP Jobs. This scenario Creates a Job entity and launches an execution on a Cluster for every job object provided. :param jobs: list of jobs that should be executed in one context """ launch_job = CreateLaunchJob(self.context) for idx, job in enumerate(jobs): LOG.debug("Launching Job. Sequence #%d" % idx) launch_job.run(job["job_type"], job["configs"], idx) @validation.required_services(consts.Service.SAHARA) @validation.required_contexts("users", "sahara_image", "sahara_job_binaries", "sahara_cluster") @scenario.configure(context={"cleanup": ["sahara"]}, name="SaharaJob.create_launch_job_sequence_with_scaling") class CreateLaunchJobSequenceWithScaling(utils.SaharaScenario,): def run(self, jobs, deltas): """Create and execute Sahara EDP Jobs on a scaling Cluster. This scenario Creates a Job entity and launches an execution on a Cluster for every job object provided. The Cluster is scaled according to the deltas values and the sequence is launched again. :param jobs: list of jobs that should be executed in one context :param deltas: list of integers which will be used to add or remove worker nodes from the cluster """ cluster_id = self.context["tenant"]["sahara"]["cluster"] launch_job_sequence = CreateLaunchJobSequence(self.context) launch_job_sequence.run(jobs) for delta in deltas: # The Cluster is fetched every time so that its node groups have # correct 'count' values. cluster = self.clients("sahara").clusters.get(cluster_id) LOG.debug("Scaling cluster %s with delta %d" % (cluster.name, delta)) if delta == 0: # Zero scaling makes no sense. continue elif delta > 0: self._scale_cluster_up(cluster, delta) elif delta < 0: self._scale_cluster_down(cluster, delta) LOG.debug("Starting Job sequence") launch_job_sequence.run(jobs) rally-0.9.1/rally/plugins/openstack/scenarios/sahara/node_group_templates.py0000664000567000056710000001166413073417720030632 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.sahara import utils from rally.task import types from rally.task import validation """Scenarios for Sahara node group templates.""" @types.convert(flavor={"type": "nova_flavor"}) @validation.flavor_exists("flavor") @validation.required_services(consts.Service.SAHARA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["sahara"]}, name="SaharaNodeGroupTemplates" ".create_and_list_node_group_templates") class CreateAndListNodeGroupTemplates(utils.SaharaScenario): def run(self, flavor, plugin_name="vanilla", hadoop_version="1.2.1", use_autoconfig=True): """Create and list Sahara Node Group Templates. This scenario creates two Node Group Templates with different set of node processes. The master Node Group Template contains Hadoop's management processes. The worker Node Group Template contains Hadoop's worker processes. By default the templates are created for the vanilla Hadoop provisioning plugin using the version 1.2.1 After the templates are created the list operation is called. :param flavor: Nova flavor that will be for nodes in the created node groups :param plugin_name: name of a provisioning plugin :param hadoop_version: version of Hadoop distribution supported by the specified plugin. :param use_autoconfig: If True, instances of the node group will be automatically configured during cluster creation. If False, the configuration values should be specify manually """ self._create_master_node_group_template(flavor_id=flavor, plugin_name=plugin_name, hadoop_version=hadoop_version, use_autoconfig=use_autoconfig) self._create_worker_node_group_template(flavor_id=flavor, plugin_name=plugin_name, hadoop_version=hadoop_version, use_autoconfig=use_autoconfig) self._list_node_group_templates() @types.convert(flavor={"type": "nova_flavor"}) @validation.flavor_exists("flavor") @validation.required_services(consts.Service.SAHARA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["sahara"]}, name="SaharaNodeGroupTemplates" ".create_delete_node_group_templates") class CreateDeleteNodeGroupTemplates(utils.SaharaScenario): def run(self, flavor, plugin_name="vanilla", hadoop_version="1.2.1", use_autoconfig=True): """Create and delete Sahara Node Group Templates. This scenario creates and deletes two most common types of Node Group Templates. By default the templates are created for the vanilla Hadoop provisioning plugin using the version 1.2.1 :param flavor: Nova flavor that will be for nodes in the created node groups :param plugin_name: name of a provisioning plugin :param hadoop_version: version of Hadoop distribution supported by the specified plugin. :param use_autoconfig: If True, instances of the node group will be automatically configured during cluster creation. If False, the configuration values should be specify manually """ master_ngt = self._create_master_node_group_template( flavor_id=flavor, plugin_name=plugin_name, hadoop_version=hadoop_version, use_autoconfig=use_autoconfig) worker_ngt = self._create_worker_node_group_template( flavor_id=flavor, plugin_name=plugin_name, hadoop_version=hadoop_version, use_autoconfig=use_autoconfig) self._delete_node_group_template(master_ngt) self._delete_node_group_template(worker_ngt) rally-0.9.1/rally/plugins/openstack/scenarios/sahara/utils.py0000664000567000056710000006233213073417720025551 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import random from oslo_config import cfg from oslo_utils import uuidutils from saharaclient.api import base as sahara_base from rally.common.i18n import _ from rally.common import logging from rally.common import utils as rutils from rally import consts from rally import exceptions from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.sahara import consts as sahara_consts from rally.task import atomic from rally.task import utils LOG = logging.getLogger(__name__) CONF = cfg.CONF SAHARA_BENCHMARK_OPTS = [ cfg.IntOpt("sahara_cluster_create_timeout", default=1800, deprecated_name="cluster_create_timeout", help="A timeout in seconds for a cluster create operation"), cfg.IntOpt("sahara_cluster_delete_timeout", default=900, deprecated_name="cluster_delete_timeout", help="A timeout in seconds for a cluster delete operation"), cfg.IntOpt("sahara_cluster_check_interval", default=5, deprecated_name="cluster_check_interval", help="Cluster status polling interval in seconds"), cfg.IntOpt("sahara_job_execution_timeout", default=600, deprecated_name="job_execution_timeout", help="A timeout in seconds for a Job Execution to complete"), cfg.IntOpt("sahara_job_check_interval", default=5, deprecated_name="job_check_interval", help="Job Execution status polling interval in seconds"), cfg.IntOpt("sahara_workers_per_proxy", default=20, help="Amount of workers one proxy should serve to.") ] benchmark_group = cfg.OptGroup(name="benchmark", title="benchmark options") CONF.register_opts(SAHARA_BENCHMARK_OPTS, group=benchmark_group) class SaharaScenario(scenario.OpenStackScenario): """Base class for Sahara scenarios with basic atomic actions.""" # NOTE(sskripnick): Some sahara resource names are validated as hostnames. # Since underscores are not allowed in hostnames we should not use them. RESOURCE_NAME_FORMAT = "rally-sahara-XXXXXX-XXXXXXXXXXXXXXXX" @atomic.action_timer("sahara.list_node_group_templates") def _list_node_group_templates(self): """Return user Node Group Templates list.""" return self.clients("sahara").node_group_templates.list() @atomic.action_timer("sahara.create_master_node_group_template") def _create_master_node_group_template(self, flavor_id, plugin_name, hadoop_version, use_autoconfig=True): """Create a master Node Group Template with a random name. :param flavor_id: The required argument for the Template :param plugin_name: Sahara provisioning plugin name :param hadoop_version: The version of Hadoop distribution supported by the plugin :param use_autoconfig: If True, instances of the node group will be automatically configured during cluster creation. If False, the configuration values should be specify manually :returns: The created Template """ name = self.generate_random_name() return self.clients("sahara").node_group_templates.create( name=name, plugin_name=plugin_name, hadoop_version=hadoop_version, flavor_id=flavor_id, node_processes=sahara_consts.NODE_PROCESSES[plugin_name] [hadoop_version]["master"], use_autoconfig=use_autoconfig) @atomic.action_timer("sahara.create_worker_node_group_template") def _create_worker_node_group_template(self, flavor_id, plugin_name, hadoop_version, use_autoconfig): """Create a worker Node Group Template with a random name. :param flavor_id: The required argument for the Template :param plugin_name: Sahara provisioning plugin name :param hadoop_version: The version of Hadoop distribution supported by the plugin :param use_autoconfig: If True, instances of the node group will be automatically configured during cluster creation. If False, the configuration values should be specify manually :returns: The created Template """ name = self.generate_random_name() return self.clients("sahara").node_group_templates.create( name=name, plugin_name=plugin_name, hadoop_version=hadoop_version, flavor_id=flavor_id, node_processes=sahara_consts.NODE_PROCESSES[plugin_name] [hadoop_version]["worker"], use_autoconfig=use_autoconfig) @atomic.action_timer("sahara.delete_node_group_template") def _delete_node_group_template(self, node_group): """Delete a Node Group Template by id. :param node_group: The Node Group Template to be deleted """ self.clients("sahara").node_group_templates.delete(node_group.id) def _wait_active(self, cluster_object): utils.wait_for( resource=cluster_object, ready_statuses=["active"], failure_statuses=["error"], update_resource=self._update_cluster, timeout=CONF.benchmark.sahara_cluster_create_timeout, check_interval=CONF.benchmark.sahara_cluster_check_interval) def _setup_neutron_floating_ip_pool(self, name_or_id): if name_or_id: if uuidutils.is_uuid_like(name_or_id): # Looks like an id is provided Return as is. return name_or_id else: # It's a name. Changing to id. for net in self.clients("neutron").list_networks()["networks"]: if net["name"] == name_or_id: return net["id"] # If the name is not found in the list. Exit with error. raise exceptions.BenchmarkSetupFailure( "Could not resolve Floating IP Pool name %s to id" % name_or_id) else: # Pool is not provided. Using the one set as GW for current router. net = self.context["tenant"]["networks"][0] router_id = net["router_id"] router = self.clients("neutron").show_router(router_id)["router"] net_id = router["external_gateway_info"]["network_id"] return net_id def _setup_nova_floating_ip_pool(self, name): if name: # The name is provided returning it as is. return name else: # The name is not provided. Discovering LOG.debug("No Floating Ip Pool provided. Taking random.") pools = self.clients("nova").floating_ip_pools.list() if pools: return random.choice(pools).name else: LOG.warning("No Floating Ip Pools found. This may cause " "instances to be unreachable.") return None def _setup_floating_ip_pool(self, node_groups, floating_ip_pool, enable_proxy): if consts.Service.NEUTRON in self.clients("services").values(): LOG.debug("Neutron detected as networking backend.") floating_ip_pool_value = self._setup_neutron_floating_ip_pool( floating_ip_pool) else: LOG.debug("Nova Network detected as networking backend.") floating_ip_pool_value = self._setup_nova_floating_ip_pool( floating_ip_pool) if floating_ip_pool_value: LOG.debug("Using floating ip pool %s." % floating_ip_pool_value) # If the pool is set by any means assign it to all node groups. # If the proxy node feature is enabled, Master Node Group and # Proxy Workers should have a floating ip pool set up if enable_proxy: proxy_groups = [x for x in node_groups if x["name"] in ("master-ng", "proxy-ng")] for ng in proxy_groups: ng["is_proxy_gateway"] = True ng["floating_ip_pool"] = floating_ip_pool_value else: for ng in node_groups: ng["floating_ip_pool"] = floating_ip_pool_value return node_groups def _setup_volumes(self, node_groups, volumes_per_node, volumes_size): if volumes_per_node: LOG.debug("Adding volumes config to Node Groups") for ng in node_groups: ng_name = ng["name"] if "worker" in ng_name or "proxy" in ng_name: # NOTE: Volume storage is used only by HDFS Datanode # process which runs on workers and proxies. ng["volumes_per_node"] = volumes_per_node ng["volumes_size"] = volumes_size return node_groups def _setup_security_groups(self, node_groups, auto_security_group, security_groups): if auto_security_group: LOG.debug("Auto security group enabled. Adding to Node Groups.") if security_groups: LOG.debug("Adding provided Security Groups to Node Groups.") for ng in node_groups: if auto_security_group: ng["auto_security_group"] = auto_security_group if security_groups: ng["security_groups"] = security_groups return node_groups def _setup_node_configs(self, node_groups, node_configs): if node_configs: LOG.debug("Adding Hadoop configs to Node Groups") for ng in node_groups: ng["node_configs"] = node_configs return node_groups def _setup_node_autoconfig(self, node_groups, node_autoconfig): LOG.debug("Adding auto-config par to Node Groups") for ng in node_groups: ng["use_autoconfig"] = node_autoconfig return node_groups def _setup_replication_config(self, hadoop_version, workers_count, plugin_name): replication_value = min(workers_count, 3) # 3 is a default Hadoop replication conf = sahara_consts.REPLICATION_CONFIGS[plugin_name][hadoop_version] LOG.debug("Using replication factor: %s" % replication_value) replication_config = { conf["target"]: { conf["config_name"]: replication_value } } return replication_config @logging.log_deprecated_args("`flavor_id` argument is deprecated. Use " "`master_flavor_id` and `worker_flavor_id` " "parameters.", rally_version="2.0", deprecated_args=["flavor_id"]) @atomic.action_timer("sahara.launch_cluster") def _launch_cluster(self, plugin_name, hadoop_version, master_flavor_id, worker_flavor_id, image_id, workers_count, flavor_id=None, floating_ip_pool=None, volumes_per_node=None, volumes_size=None, auto_security_group=None, security_groups=None, node_configs=None, cluster_configs=None, enable_anti_affinity=False, enable_proxy=False, wait_active=True, use_autoconfig=True): """Create a cluster and wait until it becomes Active. The cluster is created with two node groups. The master Node Group is created with one instance. The worker node group contains node_count - 1 instances. :param plugin_name: provisioning plugin name :param hadoop_version: Hadoop version supported by the plugin :param master_flavor_id: flavor which will be used to create master instance :param worker_flavor_id: flavor which will be used to create workers :param image_id: image id that will be used to boot instances :param workers_count: number of worker instances. All plugins will also add one Master instance and some plugins add a Manager instance. :param floating_ip_pool: floating ip pool name from which Floating IPs will be allocated :param volumes_per_node: number of Cinder volumes that will be attached to every cluster node :param volumes_size: size of each Cinder volume in GB :param auto_security_group: boolean value. If set to True Sahara will create a Security Group for each Node Group in the Cluster automatically. :param security_groups: list of security groups that will be used while creating VMs. If auto_security_group is set to True, this list can be left empty. :param node_configs: configs dict that will be passed to each Node Group :param cluster_configs: configs dict that will be passed to the Cluster :param enable_anti_affinity: If set to true the vms will be scheduled one per compute node. :param enable_proxy: Use Master Node of a Cluster as a Proxy node and do not assign floating ips to workers. :param wait_active: Wait until a Cluster gets int "Active" state :param use_autoconfig: If True, instances of the node group will be automatically configured during cluster creation. If False, the configuration values should be specify manually :returns: created cluster """ if enable_proxy: proxies_count = int( workers_count / CONF.benchmark.sahara_workers_per_proxy) else: proxies_count = 0 if flavor_id: # Note: the deprecated argument is used. Falling back to single # flavor behavior. master_flavor_id = flavor_id worker_flavor_id = flavor_id node_groups = [ { "name": "master-ng", "flavor_id": master_flavor_id, "node_processes": sahara_consts.NODE_PROCESSES[plugin_name] [hadoop_version]["master"], "count": 1 }, { "name": "worker-ng", "flavor_id": worker_flavor_id, "node_processes": sahara_consts.NODE_PROCESSES[plugin_name] [hadoop_version]["worker"], "count": workers_count - proxies_count } ] if proxies_count: node_groups.append({ "name": "proxy-ng", "flavor_id": worker_flavor_id, "node_processes": sahara_consts.NODE_PROCESSES[plugin_name] [hadoop_version]["worker"], "count": proxies_count }) if "manager" in (sahara_consts.NODE_PROCESSES[plugin_name] [hadoop_version]): # Adding manager group separately as it is supported only in # specific configurations. node_groups.append({ "name": "manager-ng", "flavor_id": master_flavor_id, "node_processes": sahara_consts.NODE_PROCESSES[plugin_name] [hadoop_version]["manager"], "count": 1 }) node_groups = self._setup_floating_ip_pool(node_groups, floating_ip_pool, enable_proxy) neutron_net_id = self._get_neutron_net_id() node_groups = self._setup_volumes(node_groups, volumes_per_node, volumes_size) node_groups = self._setup_security_groups(node_groups, auto_security_group, security_groups) node_groups = self._setup_node_configs(node_groups, node_configs) node_groups = self._setup_node_autoconfig(node_groups, use_autoconfig) replication_config = self._setup_replication_config(hadoop_version, workers_count, plugin_name) # The replication factor should be set for small clusters. However the # cluster_configs parameter can override it merged_cluster_configs = self._merge_configs(replication_config, cluster_configs) aa_processes = None if enable_anti_affinity: aa_processes = (sahara_consts.ANTI_AFFINITY_PROCESSES[plugin_name] [hadoop_version]) name = self.generate_random_name() cluster_object = self.clients("sahara").clusters.create( name=name, plugin_name=plugin_name, hadoop_version=hadoop_version, node_groups=node_groups, default_image_id=image_id, net_id=neutron_net_id, cluster_configs=merged_cluster_configs, anti_affinity=aa_processes, use_autoconfig=use_autoconfig ) if wait_active: LOG.debug("Starting cluster `%s`" % name) self._wait_active(cluster_object) return self.clients("sahara").clusters.get(cluster_object.id) def _update_cluster(self, cluster): return self.clients("sahara").clusters.get(cluster.id) def _scale_cluster(self, cluster, delta): """The scaling helper. This method finds the worker node group in a cluster, builds a scale_object required by Sahara API and waits for the scaling to complete. NOTE: This method is not meant to be called directly in benchmarks. There two specific scaling methods of up and down scaling which have different atomic timers. """ worker_node_group = [g for g in cluster.node_groups if "worker" in g["name"]][0] scale_object = { "resize_node_groups": [ { "name": worker_node_group["name"], "count": worker_node_group["count"] + delta } ] } self.clients("sahara").clusters.scale(cluster.id, scale_object) self._wait_active(cluster) @atomic.action_timer("sahara.scale_up") def _scale_cluster_up(self, cluster, delta): """Add a given number of worker nodes to the cluster. :param cluster: The cluster to be scaled :param delta: The number of workers to be added. (A positive number is expected here) """ self._scale_cluster(cluster, delta) @atomic.action_timer("sahara.scale_down") def _scale_cluster_down(self, cluster, delta): """Remove a given number of worker nodes from the cluster. :param cluster: The cluster to be scaled :param delta: The number of workers to be removed. (A negative number is expected here) """ self._scale_cluster(cluster, delta) @atomic.action_timer("sahara.delete_cluster") def _delete_cluster(self, cluster): """Delete cluster. :param cluster: cluster to delete """ LOG.debug("Deleting cluster `%s`" % cluster.name) self.clients("sahara").clusters.delete(cluster.id) utils.wait_for( resource=cluster, timeout=CONF.benchmark.sahara_cluster_delete_timeout, check_interval=CONF.benchmark.sahara_cluster_check_interval, is_ready=self._is_cluster_deleted) def _is_cluster_deleted(self, cluster): LOG.debug("Checking cluster `%s` to be deleted. Status: `%s`" % (cluster.name, cluster.status)) try: self.clients("sahara").clusters.get(cluster.id) return False except sahara_base.APIException: return True def _create_output_ds(self): """Create an output Data Source based on EDP context :returns: The created Data Source """ ds_type = self.context["sahara"]["output_conf"]["output_type"] url_prefix = self.context["sahara"]["output_conf"]["output_url_prefix"] if ds_type == "swift": raise exceptions.RallyException( _("Swift Data Sources are not implemented yet")) url = url_prefix.rstrip("/") + "/%s" % self.generate_random_name() return self.clients("sahara").data_sources.create( name=self.generate_random_name(), description="", data_source_type=ds_type, url=url) def _run_job_execution(self, job_id, cluster_id, input_id, output_id, configs, job_idx): """Run a Job Execution and wait until it completes or fails. The Job Execution is accepted as successful when Oozie reports "success" or "succeeded" status. The failure statuses are "failed" and "killed". The timeout and the polling interval may be configured through "sahara_job_execution_timeout" and "sahara_job_check_interval" parameters under the "benchmark" section. :param job_id: The Job id that will be executed :param cluster_id: The Cluster id which will execute the Job :param input_id: The input Data Source id :param output_id: The output Data Source id :param configs: The config dict that will be passed as Job Execution's parameters. :param job_idx: The index of a job in a sequence """ @atomic.action_timer("sahara.job_execution_%s" % job_idx) def run(self): job_execution = self.clients("sahara").job_executions.create( job_id=job_id, cluster_id=cluster_id, input_id=input_id, output_id=output_id, configs=configs) utils.wait_for( resource=job_execution.id, is_ready=self._job_execution_is_finished, timeout=CONF.benchmark.sahara_job_execution_timeout, check_interval=CONF.benchmark.sahara_job_check_interval) run(self) def _job_execution_is_finished(self, je_id): status = self.clients("sahara").job_executions.get(je_id).info[ "status"].lower() LOG.debug("Checking for Job Execution %s to complete. Status: %s" % (je_id, status)) if status in ("success", "succeeded"): return True elif status in ("failed", "killed"): raise exceptions.RallyException( "Job execution %s has failed" % je_id) return False def _merge_configs(self, *configs): """Merge configs in special format. It supports merging of configs in the following format: applicable_target -> config_name -> config_value """ result = {} for config_dict in configs: if config_dict: for a_target in config_dict: if a_target not in result or not result[a_target]: result[a_target] = {} result[a_target].update(config_dict[a_target]) return result def _get_neutron_net_id(self): """Get the Neutron Network id from context. If Nova Network is used as networking backend, None is returned. :returns: Network id for Neutron or None for Nova Networking. """ if consts.Service.NEUTRON not in self.clients("services").values(): return None # Taking net id from context. net = self.context["tenant"]["networks"][0] neutron_net_id = net["id"] LOG.debug("Using neutron network %s." % neutron_net_id) LOG.debug("Using neutron router %s." % net["router_id"]) return neutron_net_id def init_sahara_context(context_instance): context_instance.context["sahara"] = context_instance.context.get("sahara", {}) for user, tenant_id in rutils.iterate_per_tenants( context_instance.context["users"]): context_instance.context["tenants"][tenant_id]["sahara"] = ( context_instance.context["tenants"][tenant_id].get("sahara", {})) rally-0.9.1/rally/plugins/openstack/scenarios/sahara/consts.py0000664000567000056710000001771513073417716025734 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. NODE_PROCESSES = { "vanilla": { "1.2.1": { "master": ["namenode", "jobtracker", "oozie"], "worker": ["datanode", "tasktracker"] }, "2.3.0": { "master": ["namenode", "resourcemanager", "historyserver", "oozie"], "worker": ["datanode", "nodemanager"] }, "2.4.1": { "master": ["namenode", "resourcemanager", "historyserver", "oozie"], "worker": ["datanode", "nodemanager"] }, "2.6.0": { "master": ["namenode", "resourcemanager", "historyserver", "oozie"], "worker": ["datanode", "nodemanager"] }, "2.7.1": { "master": ["namenode", "resourcemanager", "historyserver", "oozie"], "worker": ["datanode", "nodemanager"] } }, "hdp": { "1.3.2": { "master": ["JOBTRACKER", "NAMENODE", "SECONDARY_NAMENODE", "GANGLIA_SERVER", "NAGIOS_SERVER", "AMBARI_SERVER", "OOZIE_SERVER"], "worker": ["TASKTRACKER", "DATANODE", "HDFS_CLIENT", "MAPREDUCE_CLIENT", "OOZIE_CLIENT", "PIG"] }, "2.0.6": { "manager": ["AMBARI_SERVER", "GANGLIA_SERVER", "NAGIOS_SERVER"], "master": ["NAMENODE", "SECONDARY_NAMENODE", "ZOOKEEPER_SERVER", "ZOOKEEPER_CLIENT", "HISTORYSERVER", "RESOURCEMANAGER", "OOZIE_SERVER"], "worker": ["DATANODE", "HDFS_CLIENT", "ZOOKEEPER_CLIENT", "PIG", "MAPREDUCE2_CLIENT", "YARN_CLIENT", "NODEMANAGER", "OOZIE_CLIENT"] }, "2.2": { "manager": ["AMBARI_SERVER", "GANGLIA_SERVER", "NAGIOS_SERVER"], "master": ["NAMENODE", "SECONDARY_NAMENODE", "ZOOKEEPER_SERVER", "ZOOKEEPER_CLIENT", "HISTORYSERVER", "RESOURCEMANAGER", "OOZIE_SERVER"], "worker": ["DATANODE", "HDFS_CLIENT", "ZOOKEEPER_CLIENT", "PIG", "MAPREDUCE2_CLIENT", "YARN_CLIENT", "NODEMANAGER", "OOZIE_CLIENT", "TEZ_CLIENT"] } }, "cdh": { "5": { "manager": ["CLOUDERA_MANAGER"], "master": ["HDFS_NAMENODE", "YARN_RESOURCEMANAGER", "OOZIE_SERVER", "YARN_JOBHISTORY", "HDFS_SECONDARYNAMENODE", "HIVE_METASTORE", "HIVE_SERVER2"], "worker": ["YARN_NODEMANAGER", "HDFS_DATANODE"] }, "5.4.0": { "manager": ["CLOUDERA_MANAGER"], "master": ["HDFS_NAMENODE", "YARN_RESOURCEMANAGER", "OOZIE_SERVER", "YARN_JOBHISTORY", "HDFS_SECONDARYNAMENODE", "HIVE_METASTORE", "HIVE_SERVER2"], "worker": ["YARN_NODEMANAGER", "HDFS_DATANODE"] }, "5.5.0": { "manager": ["CLOUDERA_MANAGER"], "master": ["HDFS_NAMENODE", "YARN_RESOURCEMANAGER", "OOZIE_SERVER", "YARN_JOBHISTORY", "HDFS_SECONDARYNAMENODE", "HIVE_METASTORE", "HIVE_SERVER2"], "worker": ["YARN_NODEMANAGER", "HDFS_DATANODE"] } }, "spark": { "1.3.1": { "master": ["namenode", "master"], "worker": ["datanode", "slave"] }, "1.6.0": { "master": ["namenode", "master"], "worker": ["datanode", "slave"] } }, "ambari": { "2.3": { "master-edp": ["Hive Metastore", "HiveServer", "Oozie"], "master": ["Ambari", "MapReduce History Server", "Spark History Server", "NameNode", "ResourceManager", "SecondaryNameNode", "YARN Timeline Server", "ZooKeeper"], "worker": ["DataNode", "NodeManager"] } }, "mapr": { "5.0.0.mrv2": { "master": ["Metrics", "Webserver", "Zookeeper", "HTTPFS", "Oozie", "FileServer", "CLDB", "Flume", "Hue", "NodeManager", "HistoryServer", "ResourseManager", "HiveServer2", "HiveMetastore", "Sqoop2-Client", "Sqoop2-Server"], "worker": ["NodeManager", "FileServer"] }, "5.1.0.mrv2": { "master": ["Metrics", "Webserver", "Zookeeper", "HTTPFS", "Oozie", "FileServer", "CLDB", "Flume", "Hue", "NodeManager", "HistoryServer", "ResourseManager", "HiveServer2", "HiveMetastore", "Sqoop2-Client", "Sqoop2-Server"], "worker": ["NodeManager", "FileServer"] } } } REPLICATION_CONFIGS = { "vanilla": { "1.2.1": { "target": "HDFS", "config_name": "dfs.replication" }, "2.3.0": { "target": "HDFS", "config_name": "dfs.replication" }, "2.4.1": { "target": "HDFS", "config_name": "dfs.replication" }, "2.6.0": { "target": "HDFS", "config_name": "dfs.replication" }, "2.7.1": { "target": "HDFS", "config_name": "dfs.replication" } }, "hdp": { "1.3.2": { "target": "HDFS", "config_name": "dfs.replication" }, "2.0.6": { "target": "HDFS", "config_name": "dfs.replication" }, "2.2": { "target": "HDFS", "config_name": "dfs.replication" } }, "cdh": { "5": { "target": "HDFS", "config_name": "dfs_replication" }, "5.4.0": { "target": "HDFS", "config_name": "dfs_replication" }, "5.5.0": { "target": "HDFS", "config_name": "dfs_replication" } }, "spark": { "1.3.1": { "target": "HDFS", "config_name": "dfs_replication" }, "1.6.0": { "target": "HDFS", "config_name": "dfs_replication" } }, "ambari": { "2.3": { "target": "HDFS", "config_name": "dfs_replication" } }, "mapr": { "5.0.0.mrv2": { "target": "HDFS", "config_name": "dfs.replication" }, "5.1.0.mrv2": { "target": "HDFS", "config_name": "dfs.replication" } } } ANTI_AFFINITY_PROCESSES = { "vanilla": { "1.2.1": ["datanode"], "2.3.0": ["datanode"], "2.4.1": ["datanode"], "2.6.0": ["datanode"], "2.7.1": ["datanode"] }, "hdp": { "1.3.2": ["DATANODE"], "2.0.6": ["DATANODE"], "2.2": ["DATANODE"] }, "cdh": { "5": ["HDFS_DATANODE"], "5.4.0": ["HDFS_DATANODE"], "5.5.0": ["HDFS_DATANODE"] }, "spark": { "1.3.1": ["datanode"], "1.6.0": ["datanode"] }, "ambari": { "2.3": ["DataNode"], }, "mapr": { "5.0.0.mrv2": ["FileServer"], "5.1.0.mrv2": ["FileServer"], } } rally-0.9.1/rally/plugins/openstack/scenarios/sahara/clusters.py0000664000567000056710000002606113073417720026254 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common import logging from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.sahara import utils from rally.task import types from rally.task import validation LOG = logging.getLogger(__name__) """Scenarios for Sahara clusters.""" @types.convert(flavor={"type": "nova_flavor"}, master_flavor={"type": "nova_flavor"}, worker_flavor={"type": "nova_flavor"}, neutron_net={"type": "neutron_network"}, floating_ip_pool={"type": "neutron_network"}) @validation.flavor_exists("master_flavor") @validation.flavor_exists("worker_flavor") @validation.required_contexts("users", "sahara_image") @validation.number("workers_count", minval=1, integer_only=True) @validation.required_services(consts.Service.SAHARA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["sahara"]}, name="SaharaClusters.create_and_delete_cluster") class CreateAndDeleteCluster(utils.SaharaScenario): def run(self, workers_count, plugin_name, hadoop_version, master_flavor=None, worker_flavor=None, flavor=None, floating_ip_pool=None, volumes_per_node=None, volumes_size=None, auto_security_group=None, security_groups=None, node_configs=None, cluster_configs=None, enable_anti_affinity=False, enable_proxy=False, use_autoconfig=True): """Launch and delete a Sahara Cluster. This scenario launches a Hadoop cluster, waits until it becomes 'Active' and deletes it. :param flavor: Nova flavor that will be for nodes in the created node groups. Deprecated. :param master_flavor: Nova flavor that will be used for the master instance of the cluster :param worker_flavor: Nova flavor that will be used for the workers of the cluster :param workers_count: number of worker instances in a cluster :param plugin_name: name of a provisioning plugin :param hadoop_version: version of Hadoop distribution supported by the specified plugin. :param floating_ip_pool: floating ip pool name from which Floating IPs will be allocated. Sahara will determine automatically how to treat this depending on its own configurations. Defaults to None because in some cases Sahara may work w/o Floating IPs. :param volumes_per_node: number of Cinder volumes that will be attached to every cluster node :param volumes_size: size of each Cinder volume in GB :param auto_security_group: boolean value. If set to True Sahara will create a Security Group for each Node Group in the Cluster automatically. :param security_groups: list of security groups that will be used while creating VMs. If auto_security_group is set to True, this list can be left empty. :param node_configs: config dict that will be passed to each Node Group :param cluster_configs: config dict that will be passed to the Cluster :param enable_anti_affinity: If set to true the vms will be scheduled one per compute node. :param enable_proxy: Use Master Node of a Cluster as a Proxy node and do not assign floating ips to workers. :param use_autoconfig: If True, instances of the node group will be automatically configured during cluster creation. If False, the configuration values should be specify manually """ image_id = self.context["tenant"]["sahara"]["image"] LOG.debug("Using Image: %s" % image_id) cluster = self._launch_cluster( flavor_id=flavor, master_flavor_id=master_flavor, worker_flavor_id=worker_flavor, image_id=image_id, workers_count=workers_count, plugin_name=plugin_name, hadoop_version=hadoop_version, floating_ip_pool=floating_ip_pool, volumes_per_node=volumes_per_node, volumes_size=volumes_size, auto_security_group=auto_security_group, security_groups=security_groups, node_configs=node_configs, cluster_configs=cluster_configs, enable_anti_affinity=enable_anti_affinity, enable_proxy=enable_proxy, use_autoconfig=use_autoconfig) self._delete_cluster(cluster) @types.convert(flavor={"type": "nova_flavor"}, master_flavor={"type": "nova_flavor"}, worker_flavor={"type": "nova_flavor"}) @validation.flavor_exists("master_flavor") @validation.flavor_exists("worker_flavor") @validation.required_services(consts.Service.SAHARA) @validation.required_contexts("users", "sahara_image") @validation.number("workers_count", minval=1, integer_only=True) @scenario.configure(context={"cleanup": ["sahara"]}, name="SaharaClusters.create_scale_delete_cluster") class CreateScaleDeleteCluster(utils.SaharaScenario): def run(self, master_flavor, worker_flavor, workers_count, plugin_name, hadoop_version, deltas, flavor=None, floating_ip_pool=None, volumes_per_node=None, volumes_size=None, auto_security_group=None, security_groups=None, node_configs=None, cluster_configs=None, enable_anti_affinity=False, enable_proxy=False, use_autoconfig=True): """Launch, scale and delete a Sahara Cluster. This scenario launches a Hadoop cluster, waits until it becomes 'Active'. Then a series of scale operations is applied. The scaling happens according to numbers listed in :param deltas. Ex. if deltas is set to [2, -2] it means that the first scaling operation will add 2 worker nodes to the cluster and the second will remove two. :param flavor: Nova flavor that will be for nodes in the created node groups. Deprecated. :param master_flavor: Nova flavor that will be used for the master instance of the cluster :param worker_flavor: Nova flavor that will be used for the workers of the cluster :param workers_count: number of worker instances in a cluster :param plugin_name: name of a provisioning plugin :param hadoop_version: version of Hadoop distribution supported by the specified plugin. :param deltas: list of integers which will be used to add or remove worker nodes from the cluster :param floating_ip_pool: floating ip pool name from which Floating IPs will be allocated. Sahara will determine automatically how to treat this depending on its own configurations. Defaults to None because in some cases Sahara may work w/o Floating IPs. :param neutron_net_id: id of a Neutron network that will be used for fixed IPs. This parameter is ignored when Nova Network is set up. :param volumes_per_node: number of Cinder volumes that will be attached to every cluster node :param volumes_size: size of each Cinder volume in GB :param auto_security_group: boolean value. If set to True Sahara will create a Security Group for each Node Group in the Cluster automatically. :param security_groups: list of security groups that will be used while creating VMs. If auto_security_group is set to True this list can be left empty. :param node_configs: configs dict that will be passed to each Node Group :param cluster_configs: configs dict that will be passed to the Cluster :param enable_anti_affinity: If set to true the vms will be scheduled one per compute node. :param enable_proxy: Use Master Node of a Cluster as a Proxy node and do not assign floating ips to workers. :param use_autoconfig: If True, instances of the node group will be automatically configured during cluster creation. If False, the configuration values should be specify manually """ image_id = self.context["tenant"]["sahara"]["image"] LOG.debug("Using Image: %s" % image_id) cluster = self._launch_cluster( flavor_id=flavor, master_flavor_id=master_flavor, worker_flavor_id=worker_flavor, image_id=image_id, workers_count=workers_count, plugin_name=plugin_name, hadoop_version=hadoop_version, floating_ip_pool=floating_ip_pool, volumes_per_node=volumes_per_node, volumes_size=volumes_size, auto_security_group=auto_security_group, security_groups=security_groups, node_configs=node_configs, cluster_configs=cluster_configs, enable_anti_affinity=enable_anti_affinity, enable_proxy=enable_proxy, use_autoconfig=use_autoconfig) for delta in deltas: # The Cluster is fetched every time so that its node groups have # correct 'count' values. cluster = self.clients("sahara").clusters.get(cluster.id) if delta == 0: # Zero scaling makes no sense. continue elif delta > 0: self._scale_cluster_up(cluster, delta) elif delta < 0: self._scale_cluster_down(cluster, delta) self._delete_cluster(cluster) rally-0.9.1/rally/plugins/openstack/scenarios/vm/0000775000567000056710000000000013073420067023212 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/vm/__init__.py0000664000567000056710000000000013073417716025320 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/vm/vmtasks.py0000664000567000056710000003642113073417720025264 0ustar jenkinsjenkins00000000000000# Copyright 2014: Rackspace UK # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json import pkgutil from rally.common import logging from rally.common import sshutils from rally import consts from rally import exceptions from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.vm import utils as vm_utils from rally.plugins.openstack.services import heat from rally.task import atomic from rally.task import types from rally.task import validation """Scenarios that are to be run inside VM instances.""" LOG = logging.getLogger(__name__) @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image", fail_on_404_image=False) @validation.valid_command("command") @validation.number("port", minval=1, maxval=65535, nullable=True, integer_only=True) @validation.external_network_exists("floating_network") @validation.required_param_or_context(arg_name="image", ctx_name="image_command_customizer") @validation.required_services(consts.Service.NOVA, consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova", "cinder"], "keypair": {}, "allow_ssh": None}, name="VMTasks.boot_runcommand_delete") class BootRuncommandDelete(vm_utils.VMScenario): def run(self, flavor, username, password=None, image=None, command=None, volume_args=None, floating_network=None, port=22, use_floating_ip=True, force_delete=False, wait_for_ping=True, max_log_length=None, **kwargs): """Boot a server, run script specified in command and delete server. :param image: glance image name to use for the vm. Optional in case of specified "image_command_customizer" context :param flavor: VM flavor name :param username: ssh username on server, str :param password: Password on SSH authentication :param command: Command-specifying dictionary that either specifies remote command path via `remote_path' (can be uploaded from a local file specified by `local_path`), an inline script via `script_inline' or a local script file path using `script_file'. Both `script_file' and `local_path' are checked to be accessible by the `file_exists' validator code. The `script_inline' and `script_file' both require an `interpreter' value to specify the interpreter script should be run with. Note that any of `interpreter' and `remote_path' can be an array prefixed with environment variables and suffixed with args for the `interpreter' command. `remote_path's last component must be a path to a command to execute (also upload destination if a `local_path' is given). Uploading an interpreter is possible but requires that `remote_path' and `interpreter' path do match. Examples: .. code-block:: python # Run a `local_script.pl' file sending it to a remote # Perl interpreter command = { "script_file": "local_script.pl", "interpreter": "/usr/bin/perl" } # Run an inline script sending it to a remote interpreter command = { "script_inline": "echo 'Hello, World!'", "interpreter": "/bin/sh" } # Run a remote command command = { "remote_path": "/bin/false" } # Copy a local command and run it command = { "remote_path": "/usr/local/bin/fio", "local_path": "/home/foobar/myfiodir/bin/fio" } # Copy a local command and run it with environment variable command = { "remote_path": ["HOME=/root", "/usr/local/bin/fio"], "local_path": "/home/foobar/myfiodir/bin/fio" } # Run an inline script sending it to a remote interpreter command = { "script_inline": "echo \"Hello, ${NAME:-World}\"", "interpreter": ["NAME=Earth", "/bin/sh"] } # Run an inline script sending it to an uploaded remote # interpreter command = { "script_inline": "echo \"Hello, ${NAME:-World}\"", "interpreter": ["NAME=Earth", "/tmp/sh"], "remote_path": "/tmp/sh", "local_path": "/home/user/work/cve/sh-1.0/bin/sh" } :param volume_args: volume args for booting server from volume :param floating_network: external network name, for floating ip :param port: ssh port for SSH connection :param use_floating_ip: bool, floating or fixed IP for SSH connection :param force_delete: whether to use force_delete for servers :param wait_for_ping: whether to check connectivity on server creation :param **kwargs: extra arguments for booting the server :param max_log_length: The number of tail nova console-log lines user would like to retrieve :returns: dictionary with keys `data' and `errors': data: dict, JSON output from the script errors: str, raw data from the script's stderr stream """ if volume_args: volume = self._create_volume(volume_args["size"], imageRef=None) kwargs["block_device_mapping"] = {"vdrally": "%s:::1" % volume.id} if not image: image = self.context["tenant"]["custom_image"]["id"] server, fip = self._boot_server_with_fip( image, flavor, use_floating_ip=use_floating_ip, floating_network=floating_network, key_name=self.context["user"]["keypair"]["name"], **kwargs) try: if wait_for_ping: self._wait_for_ping(fip["ip"]) code, out, err = self._run_command( fip["ip"], port, username, password, command=command) text_area_output = ["StdErr: %s" % (err or "(none)"), "StdOut:"] if code: raise exceptions.ScriptError( "Error running command %(command)s. " "Error %(code)s: %(error)s" % { "command": command, "code": code, "error": err}) # Let's try to load output data try: data = json.loads(out) # 'echo 42' produces very json-compatible result # - check it here if not isinstance(data, dict): raise ValueError except ValueError: # It's not a JSON, probably it's 'script_inline' result data = [] except (exceptions.TimeoutException, exceptions.SSHTimeout): console_logs = self._get_server_console_output(server, max_log_length) LOG.debug("VM console logs:\n%s", console_logs) raise finally: self._delete_server_with_fip(server, fip, force_delete=force_delete) if isinstance(data, dict) and set(data) == {"additive", "complete"}: for chart_type, charts in data.items(): for chart in charts: self.add_output(**{chart_type: chart}) else: # it's a dict with several unknown lines text_area_output.extend(out.split("\n")) self.add_output(complete={"title": "Script Output", "chart_plugin": "TextArea", "data": text_area_output}) @scenario.configure(context={"cleanup": ["nova", "heat"], "keypair": {}, "network": {}}, name="VMTasks.runcommand_heat") class RuncommandHeat(vm_utils.VMScenario): def run(self, workload, template, files, parameters): """Run workload on stack deployed by heat. Workload can be either file or resource: .. code-block:: json {"file": "/path/to/file.sh"} {"resource": ["package.module", "workload.py"]} Also it should contain "username" key. Given file will be uploaded to `gate_node` and started. This script should print `key` `value` pairs separated by colon. These pairs will be presented in results. Gate node should be accessible via ssh with keypair `key_name`, so heat template should accept parameter `key_name`. :param workload: workload to run :param template: path to heat template file :param files: additional template files :param parameters: parameters for heat template """ keypair = self.context["user"]["keypair"] parameters["key_name"] = keypair["name"] network = self.context["tenant"]["networks"][0] parameters["router_id"] = network["router_id"] self.stack = heat.main.Stack(self, self.task, template, files=files, parameters=parameters) self.stack.create() for output in self.stack.stack.outputs: if output["output_key"] == "gate_node": ip = output["output_value"] break ssh = sshutils.SSH(workload["username"], ip, pkey=keypair["private"]) ssh.wait() script = workload.get("resource") if script: script = pkgutil.get_data(*script) else: script = open(workload["file"]).read() ssh.execute("cat > /tmp/.rally-workload", stdin=script) ssh.execute("chmod +x /tmp/.rally-workload") with atomic.ActionTimer(self, "runcommand_heat.workload"): status, out, err = ssh.execute( "/tmp/.rally-workload", stdin=json.dumps(self.stack.stack.outputs)) rows = [] for line in out.splitlines(): row = line.split(":") if len(row) != 2: raise exceptions.ScriptError("Invalid data '%s'" % line) rows.append(row) if not rows: raise exceptions.ScriptError("No data returned. Original error " "message is %s" % err) self.add_output( complete={"title": "Workload summary", "description": "Data generated by workload", "chart_plugin": "Table", "data": { "cols": ["key", "value"], "rows": rows}} ) BASH_DD_LOAD_TEST = """ #!/bin/sh # Load server and output JSON results ready to be processed # by Rally scenario for ex in awk top grep free tr df dc dd gzip do if ! type ${ex} >/dev/null then echo "Executable is required by script but not available\ on a server: ${ex}" >&2 return 1 fi done get_used_cpu_percent() { echo 100\ $(top -b -n 1 | grep -i CPU | head -n 1 | awk '{print $8}' | tr -d %)\ - p | dc } get_used_ram_percent() { local total=$(free | grep Mem: | awk '{print $2}') local used=$(free | grep -- -/+\ buffers | awk '{print $3}') echo ${used} 100 \* ${total} / p | dc } get_used_disk_percent() { df -P / | grep -v Filesystem | awk '{print $5}' | tr -d % } get_seconds() { (time -p ${1}) 2>&1 | awk '/real/{print $2}' } complete_load() { local script_file=${LOAD_SCRIPT_FILE:-/tmp/load.sh} local stop_file=${LOAD_STOP_FILE:-/tmp/load.stop} local processes_num=${LOAD_PROCESSES_COUNT:-20} local size=${LOAD_SIZE_MB:-5} cat << EOF > ${script_file} until test -e ${stop_file} do dd if=/dev/urandom bs=1M count=${size} 2>/dev/null | gzip >/dev/null ; done EOF local sep local cpu local ram local dis rm -f ${stop_file} for i in $(seq ${processes_num}) do i=$((i-1)) sh ${script_file} & cpu="${cpu}${sep}[${i}, $(get_used_cpu_percent)]" ram="${ram}${sep}[${i}, $(get_used_ram_percent)]" dis="${dis}${sep}[${i}, $(get_used_disk_percent)]" sep=", " done > ${stop_file} cat << EOF { "title": "Generate load by spawning processes", "description": "Each process runs gzip for ${size}M urandom data\ in a loop", "chart_plugin": "Lines", "axis_label": "Number of processes", "label": "Usage, %", "data": [ ["CPU", [${cpu}]], ["Memory", [${ram}]], ["Disk", [${dis}]]] } EOF } additive_dd() { local c=${1:-50} # Megabytes local file=/tmp/dd_test.img local write=$(get_seconds "dd if=/dev/urandom of=${file} bs=1M count=${c}") local read=$(get_seconds "dd if=${file} of=/dev/null bs=1M count=${c}") local gzip=$(get_seconds "gzip ${file}") rm ${file}.gz cat << EOF { "title": "Write, read and gzip file", "description": "Using file '${file}', size ${c}Mb.", "chart_plugin": "StackedArea", "data": [ ["write_${c}M", ${write}], ["read_${c}M", ${read}], ["gzip_${c}M", ${gzip}]] }, { "title": "Statistics for write/read/gzip", "chart_plugin": "StatsTable", "data": [ ["write_${c}M", ${write}], ["read_${c}M", ${read}], ["gzip_${c}M", ${gzip}]] } EOF } cat << EOF { "additive": [$(additive_dd)], "complete": [$(complete_load)] } EOF """ @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) @validation.image_valid_on_flavor("flavor", "image") @validation.valid_command("command") @validation.number("port", minval=1, maxval=65535, nullable=True, integer_only=True) @validation.external_network_exists("floating_network") @validation.required_services(consts.Service.NOVA, consts.Service.CINDER) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["nova", "cinder"], "keypair": {}, "allow_ssh": None}, name="VMTasks.dd_load_test") class DDLoadTest(BootRuncommandDelete): def run(self, command, **kwargs): """Boot a server from a custom image, run a command that outputs JSON. Example Script in rally-jobs/extra/install_benchmark.sh :param command: default parameter from scenario """ command["script_inline"] = BASH_DD_LOAD_TEST return super(DDLoadTest, self).run(command=command, **kwargs) rally-0.9.1/rally/plugins/openstack/scenarios/vm/utils.py0000664000567000056710000002212413073417720024727 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os.path import subprocess import sys import netaddr from oslo_config import cfg import six from rally.common.i18n import _ from rally.common import logging from rally.common import sshutils from rally.plugins.openstack.scenarios.cinder import utils as cinder_utils from rally.plugins.openstack.scenarios.nova import utils as nova_utils from rally.plugins.openstack.wrappers import network as network_wrapper from rally.task import atomic from rally.task import utils LOG = logging.getLogger(__name__) VM_BENCHMARK_OPTS = [ cfg.FloatOpt("vm_ping_poll_interval", default=1.0, help="Interval between checks when waiting for a VM to " "become pingable"), cfg.FloatOpt("vm_ping_timeout", default=120.0, help="Time to wait for a VM to become pingable")] CONF = cfg.CONF benchmark_group = cfg.OptGroup(name="benchmark", title="benchmark options") CONF.register_opts(VM_BENCHMARK_OPTS, group=benchmark_group) class Host(object): ICMP_UP_STATUS = "ICMP UP" ICMP_DOWN_STATUS = "ICMP DOWN" name = "ip" def __init__(self, ip): self.ip = netaddr.IPAddress(ip) self.status = self.ICMP_DOWN_STATUS @property def id(self): return self.ip.format() @classmethod def update_status(cls, server): """Check ip address is pingable and update status.""" ping = "ping" if server.ip.version == 4 else "ping6" if sys.platform.startswith("linux"): cmd = [ping, "-c1", "-w1", server.ip.format()] else: cmd = [ping, "-c1", server.ip.format()] proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) proc.wait() LOG.debug("Host %s is ICMP %s" % (server.ip.format(), proc.returncode and "down" or "up")) if proc.returncode == 0: server.status = cls.ICMP_UP_STATUS else: server.status = cls.ICMP_DOWN_STATUS return server def __eq__(self, other): if not isinstance(other, Host): raise TypeError("%s should be an instance of %s" % ( other, Host.__class__.__name__)) return self.ip == other.ip and self.status == other.status def __ne__(self, other): return not self.__eq__(other) class VMScenario(nova_utils.NovaScenario, cinder_utils.CinderScenario): """Base class for VM scenarios with basic atomic actions. VM scenarios are scenarios executed inside some launched VM instance. """ USER_RWX_OTHERS_RX_ACCESS_MODE = 0o755 RESOURCE_NAME_PREFIX = "rally_vm_" @atomic.action_timer("vm.run_command_over_ssh") def _run_command_over_ssh(self, ssh, command): """Run command inside an instance. This is a separate function so that only script execution is timed. :param ssh: A SSHClient instance. :param command: Dictionary specifying command to execute. See `rally info find VMTasks.boot_runcommand_delete' parameter `command' docstring for explanation. :returns: tuple (exit_status, stdout, stderr) """ cmd, stdin = [], None interpreter = command.get("interpreter") or [] if interpreter: if isinstance(interpreter, six.string_types): interpreter = [interpreter] elif type(interpreter) != list: raise ValueError("command 'interpreter' value must be str " "or list type") cmd.extend(interpreter) remote_path = command.get("remote_path") or [] if remote_path: if isinstance(remote_path, six.string_types): remote_path = [remote_path] elif type(remote_path) != list: raise ValueError("command 'remote_path' value must be str " "or list type") cmd.extend(remote_path) if command.get("local_path"): ssh.put_file(command["local_path"], remote_path[-1], mode=self.USER_RWX_OTHERS_RX_ACCESS_MODE) if command.get("script_file"): stdin = open(os.path.expanduser(command["script_file"]), "rb") elif command.get("script_inline"): stdin = six.moves.StringIO(command["script_inline"]) cmd.extend(command.get("command_args") or []) return ssh.execute(cmd, stdin=stdin) def _boot_server_with_fip(self, image, flavor, use_floating_ip=True, floating_network=None, **kwargs): """Boot server prepared for SSH actions.""" kwargs["auto_assign_nic"] = True server = self._boot_server(image, flavor, **kwargs) if not server.networks: raise RuntimeError( "Server `%s' is not connected to any network. " "Use network context for auto-assigning networks " "or provide `nics' argument with specific net-id." % server.name) if use_floating_ip: fip = self._attach_floating_ip(server, floating_network) else: internal_network = list(server.networks)[0] fip = {"ip": server.addresses[internal_network][0]["addr"]} return server, {"ip": fip.get("ip"), "id": fip.get("id"), "is_floating": use_floating_ip} @atomic.action_timer("vm.attach_floating_ip") def _attach_floating_ip(self, server, floating_network): internal_network = list(server.networks)[0] fixed_ip = server.addresses[internal_network][0]["addr"] fip = network_wrapper.wrap(self.clients, self).create_floating_ip( ext_network=floating_network, tenant_id=server.tenant_id, fixed_ip=fixed_ip) self._associate_floating_ip(server, fip["ip"], fixed_address=fixed_ip, atomic_action=False) return fip @atomic.action_timer("vm.delete_floating_ip") def _delete_floating_ip(self, server, fip): with logging.ExceptionLogger( LOG, _("Unable to delete IP: %s") % fip["ip"]): if self.check_ip_address(fip["ip"])(server): self._dissociate_floating_ip(server, fip["ip"], atomic_action=False) network_wrapper.wrap(self.clients, self).delete_floating_ip( fip["id"], wait=True) def _delete_server_with_fip(self, server, fip, force_delete=False): if fip["is_floating"]: self._delete_floating_ip(server, fip) return self._delete_server(server, force=force_delete) @atomic.action_timer("vm.wait_for_ssh") def _wait_for_ssh(self, ssh, timeout=120, interval=1): ssh.wait(timeout, interval) @atomic.action_timer("vm.wait_for_ping") def _wait_for_ping(self, server_ip): server = Host(server_ip) utils.wait_for_status( server, ready_statuses=[Host.ICMP_UP_STATUS], update_resource=Host.update_status, timeout=CONF.benchmark.vm_ping_timeout, check_interval=CONF.benchmark.vm_ping_poll_interval ) def _run_command(self, server_ip, port, username, password, command, pkey=None, timeout=120, interval=1): """Run command via SSH on server. Create SSH connection for server, wait for server to become available (there is a delay between server being set to ACTIVE and sshd being available). Then call run_command_over_ssh to actually execute the command. :param server_ip: server ip address :param port: ssh port for SSH connection :param username: str. ssh username for server :param password: Password for SSH authentication :param command: Dictionary specifying command to execute. See `rally info find VMTasks.boot_runcommand_delete' parameter `command' docstring for explanation. :param pkey: key for SSH authentication :param timeout: wait for ssh timeout. Default is 120 seconds :param interval: ssh retry interval. Default is 1 second :returns: tuple (exit_status, stdout, stderr) """ pkey = pkey if pkey else self.context["user"]["keypair"]["private"] ssh = sshutils.SSH(username, server_ip, port=port, pkey=pkey, password=password) self._wait_for_ssh(ssh, timeout, interval) return self._run_command_over_ssh(ssh, command) rally-0.9.1/rally/plugins/openstack/scenarios/authenticate/0000775000567000056710000000000013073420067025246 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/authenticate/__init__.py0000664000567000056710000000000013073417716027354 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/authenticate/authenticate.py0000664000567000056710000001512613073417720030305 0ustar jenkinsjenkins00000000000000# Copyright 2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.task import atomic from rally.task import validation """Scenarios for Authentication mechanism.""" @validation.required_openstack(users=True) @scenario.configure(name="Authenticate.keystone") class Keystone(scenario.OpenStackScenario): @atomic.action_timer("authenticate.keystone") def run(self): """Check Keystone Client.""" self.clients("keystone") @validation.number("repetitions", minval=1) @validation.required_openstack(users=True) @scenario.configure(name="Authenticate.validate_glance") class ValidateGlance(scenario.OpenStackScenario): def run(self, repetitions): """Check Glance Client to ensure validation of token. Creation of the client does not ensure validation of the token. We have to do some minimal operation to make sure token gets validated. In following we are checking for non-existent image. :param repetitions: number of times to validate """ glance_client = self.clients("glance") image_name = "__intentionally_non_existent_image___" with atomic.ActionTimer( self, "authenticate.validate_glance_%s_times" % repetitions): for i in range(repetitions): list(glance_client.images.list(name=image_name)) @validation.number("repetitions", minval=1) @validation.required_openstack(users=True) @scenario.configure(name="Authenticate.validate_nova") class ValidateNova(scenario.OpenStackScenario): def run(self, repetitions): """Check Nova Client to ensure validation of token. Creation of the client does not ensure validation of the token. We have to do some minimal operation to make sure token gets validated. :param repetitions: number of times to validate """ nova_client = self.clients("nova") with atomic.ActionTimer( self, "authenticate.validate_nova_%s_times" % repetitions): for i in range(repetitions): nova_client.flavors.list() @validation.number("repetitions", minval=1) @validation.required_openstack(users=True) @scenario.configure(name="Authenticate.validate_ceilometer") class ValidateCeilometer(scenario.OpenStackScenario): def run(self, repetitions): """Check Ceilometer Client to ensure validation of token. Creation of the client does not ensure validation of the token. We have to do some minimal operation to make sure token gets validated. :param repetitions: number of times to validate """ ceilometer_client = self.clients("ceilometer") with atomic.ActionTimer( self, "authenticate.validate_ceilometer_%s_times" % repetitions): for i in range(repetitions): ceilometer_client.meters.list() @validation.number("repetitions", minval=1) @validation.required_openstack(users=True) @scenario.configure(name="Authenticate.validate_cinder") class ValidateCinder(scenario.OpenStackScenario): def run(self, repetitions): """Check Cinder Client to ensure validation of token. Creation of the client does not ensure validation of the token. We have to do some minimal operation to make sure token gets validated. :param repetitions: number of times to validate """ cinder_client = self.clients("cinder") with atomic.ActionTimer( self, "authenticate.validate_cinder_%s_times" % repetitions): for i in range(repetitions): cinder_client.volume_types.list() @validation.number("repetitions", minval=1) @validation.required_openstack(users=True) @scenario.configure(name="Authenticate.validate_neutron") class ValidateNeutron(scenario.OpenStackScenario): def run(self, repetitions): """Check Neutron Client to ensure validation of token. Creation of the client does not ensure validation of the token. We have to do some minimal operation to make sure token gets validated. :param repetitions: number of times to validate """ neutron_client = self.clients("neutron") with atomic.ActionTimer( self, "authenticate.validate_neutron_%s_times" % repetitions): for i in range(repetitions): neutron_client.list_networks() @validation.number("repetitions", minval=1) @validation.required_openstack(users=True) @scenario.configure(name="Authenticate.validate_heat") class ValidateHeat(scenario.OpenStackScenario): def run(self, repetitions): """Check Heat Client to ensure validation of token. Creation of the client does not ensure validation of the token. We have to do some minimal operation to make sure token gets validated. :param repetitions: number of times to validate """ heat_client = self.clients("heat") with atomic.ActionTimer( self, "authenticate.validate_heat_%s_times" % repetitions): for i in range(repetitions): list(heat_client.stacks.list(limit=0)) @validation.number("repetitions", minval=1) @validation.required_openstack(users=True) @validation.required_services(consts.Service.MONASCA) @scenario.configure(name="Authenticate.validate_monasca") class ValidateMonasca(scenario.OpenStackScenario): def run(self, repetitions): """Check Monasca Client to ensure validation of token. Creation of the client does not ensure validation of the token. We have to do some minimal operation to make sure token gets validated. :param repetitions: number of times to validate """ monasca_client = self.clients("monasca") with atomic.ActionTimer( self, "authenticate.validate_monasca_%s_times" % repetitions): for i in range(repetitions): list(monasca_client.metrics.list(limit=0)) rally-0.9.1/rally/plugins/openstack/scenarios/manila/0000775000567000056710000000000013073420067024031 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/manila/__init__.py0000664000567000056710000000000013073417716026137 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/manila/utils.py0000664000567000056710000003110413073417720025544 0ustar jenkinsjenkins00000000000000# Copyright 2015 Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import random from oslo_config import cfg from rally import exceptions from rally.plugins.openstack.context.manila import consts from rally.plugins.openstack import scenario from rally.task import atomic from rally.task import utils MANILA_BENCHMARK_OPTS = [ cfg.FloatOpt( "manila_share_create_prepoll_delay", default=2.0, help="Delay between creating Manila share and polling for its " "status."), cfg.FloatOpt( "manila_share_create_timeout", default=300.0, help="Timeout for Manila share creation."), cfg.FloatOpt( "manila_share_create_poll_interval", default=3.0, help="Interval between checks when waiting for Manila share " "creation."), cfg.FloatOpt( "manila_share_delete_timeout", default=180.0, help="Timeout for Manila share deletion."), cfg.FloatOpt( "manila_share_delete_poll_interval", default=2.0, help="Interval between checks when waiting for Manila share " "deletion."), ] CONF = cfg.CONF benchmark_group = cfg.OptGroup(name="benchmark", title="benchmark options") CONF.register_opts(MANILA_BENCHMARK_OPTS, group=benchmark_group) class ManilaScenario(scenario.OpenStackScenario): """Base class for Manila scenarios with basic atomic actions.""" @atomic.action_timer("manila.create_share") def _create_share(self, share_proto, size=1, **kwargs): """Create a share. :param share_proto: share protocol for new share, available values are NFS, CIFS, GlusterFS and HDFS. :param size: size of a share in GB :param snapshot_id: ID of the snapshot :param name: name of new share :param description: description of a share :param metadata: optional metadata to set on share creation :param share_network: either instance of ShareNetwork or str with ID :param share_type: either instance of ShareType or str with ID :param is_public: defines whether to set share as public or not. :returns: instance of :class:`Share` """ if self.context: share_networks = self.context.get("tenant", {}).get( consts.SHARE_NETWORKS_CONTEXT_NAME, {}).get( "share_networks", []) if share_networks and not kwargs.get("share_network"): kwargs["share_network"] = share_networks[ self.context["iteration"] % len(share_networks)]["id"] if not kwargs.get("name"): kwargs["name"] = self.generate_random_name() share = self.clients("manila").shares.create( share_proto, size, **kwargs) self.sleep_between(CONF.benchmark.manila_share_create_prepoll_delay) share = utils.wait_for( share, ready_statuses=["available"], update_resource=utils.get_from_manager(), timeout=CONF.benchmark.manila_share_create_timeout, check_interval=CONF.benchmark.manila_share_create_poll_interval, ) return share @atomic.action_timer("manila.delete_share") def _delete_share(self, share): """Delete the given share. :param share: :class:`Share` """ share.delete() error_statuses = ("error_deleting", ) utils.wait_for_status( share, ready_statuses=["deleted"], check_deletion=True, update_resource=utils.get_from_manager(error_statuses), timeout=CONF.benchmark.manila_share_delete_timeout, check_interval=CONF.benchmark.manila_share_delete_poll_interval) @atomic.action_timer("manila.list_shares") def _list_shares(self, detailed=True, search_opts=None): """Returns user shares list. :param detailed: defines either to return detailed list of objects or not. :param search_opts: container of search opts such as "name", "host", "share_type", etc. """ return self.clients("manila").shares.list( detailed=detailed, search_opts=search_opts) @atomic.action_timer("manila.create_share_network") def _create_share_network(self, neutron_net_id=None, neutron_subnet_id=None, nova_net_id=None, description=None): """Create share network. :param neutron_net_id: ID of Neutron network :param neutron_subnet_id: ID of Neutron subnet :param nova_net_id: ID of Nova network :param description: share network description :returns: instance of :class:`ShareNetwork` """ share_network = self.clients("manila").share_networks.create( neutron_net_id=neutron_net_id, neutron_subnet_id=neutron_subnet_id, nova_net_id=nova_net_id, name=self.generate_random_name(), description=description) return share_network @atomic.action_timer("manila.delete_share_network") def _delete_share_network(self, share_network): """Delete share network. :param share_network: instance of :class:`ShareNetwork`. """ share_network.delete() utils.wait_for_status( share_network, ready_statuses=["deleted"], check_deletion=True, update_resource=utils.get_from_manager(), timeout=CONF.benchmark.manila_share_delete_timeout, check_interval=CONF.benchmark.manila_share_delete_poll_interval) @atomic.action_timer("manila.list_share_networks") def _list_share_networks(self, detailed=True, search_opts=None): """List share networks. :param detailed: defines either to return detailed list of objects or not. :param search_opts: container of search opts such as "project_id" and "name". :returns: list of instances of :class:`ShareNetwork` """ share_networks = self.clients("manila").share_networks.list( detailed=detailed, search_opts=search_opts) return share_networks @atomic.action_timer("manila.list_share_servers") def _list_share_servers(self, search_opts=None): """List share servers. Admin only. :param search_opts: set of key-value pairs to filter share servers by. Example: {"share_network": "share_network_name_or_id"} :returns: list of instances of :class:`ShareServer` """ share_servers = self.admin_clients("manila").share_servers.list( search_opts=search_opts) return share_servers @atomic.action_timer("manila.create_security_service") def _create_security_service(self, security_service_type, dns_ip=None, server=None, domain=None, user=None, password=None, description=None): """Create security service. 'Security service' is data container in Manila that stores info about auth services 'Active Directory', 'Kerberos' and catalog service 'LDAP' that should be used for shares. :param security_service_type: security service type, permitted values are 'ldap', 'kerberos' or 'active_directory'. :param dns_ip: dns ip address used inside tenant's network :param server: security service server ip address or hostname :param domain: security service domain :param user: security identifier used by tenant :param password: password used by user :param description: security service description :returns: instance of :class:`SecurityService` """ security_service = self.clients("manila").security_services.create( type=security_service_type, dns_ip=dns_ip, server=server, domain=domain, user=user, password=password, name=self.generate_random_name(), description=description) return security_service @atomic.action_timer("manila.delete_security_service") def _delete_security_service(self, security_service): """Delete security service. :param security_service: instance of :class:`SecurityService`. """ security_service.delete() utils.wait_for_status( security_service, ready_statuses=["deleted"], check_deletion=True, update_resource=utils.get_from_manager(), timeout=CONF.benchmark.manila_share_delete_timeout, check_interval=CONF.benchmark.manila_share_delete_poll_interval) @atomic.action_timer("manila.add_security_service_to_share_network") def _add_security_service_to_share_network(self, share_network, security_service): """Associate given security service with a share network. :param share_network: ID or instance of :class:`ShareNetwork`. :param security_service: ID or instance of :class:`SecurityService`. :returns: instance of :class:`ShareNetwork`. """ share_network = self.clients( "manila").share_networks.add_security_service( share_network, security_service) return share_network @atomic.action_timer("manila.set_metadata") def _set_metadata(self, share, sets=1, set_size=1, key_min_length=1, key_max_length=256, value_min_length=1, value_max_length=1024): """Sets share metadata. :param share: the share to set metadata on :param sets: how many operations to perform :param set_size: number of metadata keys to set in each operation :param key_min_length: minimal size of metadata key to set :param key_max_length: maximum size of metadata key to set :param value_min_length: minimal size of metadata value to set :param value_max_length: maximum size of metadata value to set :returns: A list of keys that were set :raises exceptions.InvalidArgumentsException: if invalid arguments were provided. """ if not (key_min_length <= key_max_length and value_min_length <= value_max_length): raise exceptions.InvalidArgumentsException( "Min length for keys and values of metadata can not be bigger " "than maximum length.") keys = [] for i in range(sets): metadata = {} for j in range(set_size): if key_min_length == key_max_length: key_length = key_min_length else: key_length = random.choice( range(key_min_length, key_max_length)) if value_min_length == value_max_length: value_length = value_min_length else: value_length = random.choice( range(value_min_length, value_max_length)) key = self._generate_random_part(length=key_length) keys.append(key) metadata[key] = self._generate_random_part(length=value_length) self.clients("manila").shares.set_metadata(share["id"], metadata) return keys @atomic.action_timer("manila.delete_metadata") def _delete_metadata(self, share, keys, delete_size=3): """Deletes share metadata. :param share: The share to delete metadata from. :param delete_size: number of metadata keys to delete using one single call. :param keys: a list or tuple of keys to choose deletion candidates from :raises exceptions.InvalidArgumentsException: if invalid arguments were provided. """ if not (isinstance(keys, list) and keys): raise exceptions.InvalidArgumentsException( "Param 'keys' should be non-empty 'list'. keys = '%s'" % keys) for i in range(0, len(keys), delete_size): self.clients("manila").shares.delete_metadata( share["id"], keys[i:i + delete_size]) rally-0.9.1/rally/plugins/openstack/scenarios/manila/shares.py0000664000567000056710000002636613073417720025707 0ustar jenkinsjenkins00000000000000# Copyright 2015 Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common import logging from rally import consts from rally.plugins.openstack.context.manila import consts as manila_consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.manila import utils from rally.task import validation """Scenarios for Manila shares.""" @validation.validate_share_proto() @validation.required_services(consts.Service.MANILA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["manila"]}, name="ManilaShares.create_and_delete_share") class CreateAndDeleteShare(utils.ManilaScenario): def run(self, share_proto, size=1, min_sleep=0, max_sleep=0, **kwargs): """Create and delete a share. Optional 'min_sleep' and 'max_sleep' parameters allow the scenario to simulate a pause between share creation and deletion (of random duration from [min_sleep, max_sleep]). :param share_proto: share protocol, valid values are NFS, CIFS, GlusterFS and HDFS :param size: share size in GB, should be greater than 0 :param min_sleep: minimum sleep time in seconds (non-negative) :param max_sleep: maximum sleep time in seconds (non-negative) :param kwargs: optional args to create a share """ share = self._create_share( share_proto=share_proto, size=size, **kwargs) self.sleep_between(min_sleep, max_sleep) self._delete_share(share) @validation.required_services(consts.Service.MANILA) @validation.required_openstack(users=True) @scenario.configure(name="ManilaShares.list_shares") class ListShares(utils.ManilaScenario): def run(self, detailed=True, search_opts=None): """Basic scenario for 'share list' operation. :param detailed: defines either to return detailed list of objects or not. :param search_opts: container of search opts such as "name", "host", "share_type", etc. """ self._list_shares(detailed=detailed, search_opts=search_opts) @validation.required_services(consts.Service.MANILA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["manila"]}, name="ManilaShares.create_share_network_and_delete") class CreateShareNetworkAndDelete(utils.ManilaScenario): @logging.log_deprecated_args( "The 'name' argument to create_and_delete_service will be ignored", "1.1.2", ["name"], once=True) def run(self, neutron_net_id=None, neutron_subnet_id=None, nova_net_id=None, name=None, description=None): """Creates share network and then deletes. :param neutron_net_id: ID of Neutron network :param neutron_subnet_id: ID of Neutron subnet :param nova_net_id: ID of Nova network :param description: share network description """ share_network = self._create_share_network( neutron_net_id=neutron_net_id, neutron_subnet_id=neutron_subnet_id, nova_net_id=nova_net_id, description=description, ) self._delete_share_network(share_network) @validation.required_services(consts.Service.MANILA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["manila"]}, name="ManilaShares.create_share_network_and_list") class CreateShareNetworkAndList(utils.ManilaScenario): @logging.log_deprecated_args( "The 'name' argument to create_and_delete_service will be ignored", "1.1.2", ["name"], once=True) def run(self, neutron_net_id=None, neutron_subnet_id=None, nova_net_id=None, name=None, description=None, detailed=True, search_opts=None): """Creates share network and then lists it. :param neutron_net_id: ID of Neutron network :param neutron_subnet_id: ID of Neutron subnet :param nova_net_id: ID of Nova network :param description: share network description :param detailed: defines either to return detailed list of objects or not. :param search_opts: container of search opts such as "name", "nova_net_id", "neutron_net_id", etc. """ self._create_share_network( neutron_net_id=neutron_net_id, neutron_subnet_id=neutron_subnet_id, nova_net_id=nova_net_id, description=description, ) self._list_share_networks( detailed=detailed, search_opts=search_opts, ) @validation.required_services(consts.Service.MANILA) @validation.required_openstack(admin=True) @scenario.configure(name="ManilaShares.list_share_servers") class ListShareServers(utils.ManilaScenario): def run(self, search_opts=None): """Lists share servers. Requires admin creds. :param search_opts: container of following search opts: "host", "status", "share_network" and "project_id". """ self._list_share_servers(search_opts=search_opts) @validation.required_services(consts.Service.MANILA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["manila"]}, name="ManilaShares.create_security_service_and_delete") class CreateSecurityServiceAndDelete(utils.ManilaScenario): @logging.log_deprecated_args( "The 'name' argument to create_and_delete_service will be ignored", "1.1.2", ["name"], once=True) def run(self, security_service_type, dns_ip=None, server=None, domain=None, user=None, password=None, name=None, description=None): """Creates security service and then deletes. :param security_service_type: security service type, permitted values are 'ldap', 'kerberos' or 'active_directory'. :param dns_ip: dns ip address used inside tenant's network :param server: security service server ip address or hostname :param domain: security service domain :param user: security identifier used by tenant :param password: password used by user :param description: security service description """ security_service = self._create_security_service( security_service_type=security_service_type, dns_ip=dns_ip, server=server, domain=domain, user=user, password=password, description=description, ) self._delete_security_service(security_service) @validation.required_services(consts.Service.MANILA) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["manila"]}, name=("ManilaShares." "attach_security_service_to_share_network")) class AttachSecurityServiceToShareNetwork(utils.ManilaScenario): def run(self, security_service_type="ldap"): """Attaches security service to share network. :param security_service_type: type of security service to use. Should be one of following: 'ldap', 'kerberos' or 'active_directory'. """ sn = self._create_share_network() ss = self._create_security_service( security_service_type=security_service_type) self._add_security_service_to_share_network(sn, ss) @validation.validate_share_proto() @validation.required_services(consts.Service.MANILA) @validation.required_openstack(users=True) @scenario.configure( context={"cleanup": ["manila"]}, name=("ManilaShares.create_and_list_share")) class CreateAndListShare(utils.ManilaScenario): def run(self, share_proto, size=1, min_sleep=0, max_sleep=0, detailed=True, **kwargs): """Create a share and list all shares. Optional 'min_sleep' and 'max_sleep' parameters allow the scenario to simulate a pause between share creation and list (of random duration from [min_sleep, max_sleep]). :param share_proto: share protocol, valid values are NFS, CIFS, GlusterFS and HDFS :param size: share size in GB, should be greater than 0 :param min_sleep: minimum sleep time in seconds (non-negative) :param max_sleep: maximum sleep time in seconds (non-negative) :param detailed: defines whether to get detailed list of shares or not :param kwargs: optional args to create a share """ self._create_share(share_proto=share_proto, size=size, **kwargs) self.sleep_between(min_sleep, max_sleep) self._list_shares(detailed=detailed) @validation.number("sets", minval=1, integer_only=True) @validation.number("set_size", minval=1, integer_only=True) @validation.number("key_min_length", minval=1, maxval=256, integer_only=True) @validation.number("key_max_length", minval=1, maxval=256, integer_only=True) @validation.number( "value_min_length", minval=1, maxval=1024, integer_only=True) @validation.number( "value_max_length", minval=1, maxval=1024, integer_only=True) @validation.required_services(consts.Service.MANILA) @validation.required_openstack(users=True) @validation.required_contexts(manila_consts.SHARES_CONTEXT_NAME) @scenario.configure( context={"cleanup": ["manila"]}, name="ManilaShares.set_and_delete_metadata") class SetAndDeleteMetadata(utils.ManilaScenario): def run(self, sets=10, set_size=3, delete_size=3, key_min_length=1, key_max_length=256, value_min_length=1, value_max_length=1024): """Sets and deletes share metadata. This requires a share to be created with the shares context. Additionally, ``sets * set_size`` must be greater than or equal to ``deletes * delete_size``. :param sets: how many set_metadata operations to perform :param set_size: number of metadata keys to set in each set_metadata operation :param delete_size: number of metadata keys to delete in each delete_metadata operation :param key_min_length: minimal size of metadata key to set :param key_max_length: maximum size of metadata key to set :param value_min_length: minimal size of metadata value to set :param value_max_length: maximum size of metadata value to set """ shares = self.context.get("tenant", {}).get("shares", []) share = shares[self.context["iteration"] % len(shares)] keys = self._set_metadata( share=share, sets=sets, set_size=set_size, key_min_length=key_min_length, key_max_length=key_max_length, value_min_length=value_min_length, value_max_length=value_max_length) self._delete_metadata(share=share, keys=keys, delete_size=delete_size) rally-0.9.1/rally/plugins/openstack/scenarios/quotas/0000775000567000056710000000000013073420067024104 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/quotas/__init__.py0000664000567000056710000000000013073417716026212 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/quotas/utils.py0000664000567000056710000000707513073417716025636 0ustar jenkinsjenkins00000000000000# Copyright 2014: Kylin Cloud # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import random from rally.plugins.openstack import scenario from rally.task import atomic class QuotasScenario(scenario.OpenStackScenario): """Base class for quotas scenarios with basic atomic actions.""" @atomic.action_timer("quotas.update_quotas") def _update_quotas(self, component, tenant_id, max_quota=1024, quota_update_fn=None): """Updates quotas. :param component: Component for the quotas. :param tenant_id: The project_id for the quotas to be updated. :param max_quota: Max value to be updated for quota. :param quota_update_fn: Client quota update function. Standard OpenStack clients use quotas.update(). Use `quota_update_fn` to override for non-standard clients. :returns: Updated quotas dictionary. """ quotas = self._generate_quota_values(max_quota, component) if quota_update_fn: return quota_update_fn(tenant_id, **quotas) return self.admin_clients(component).quotas.update(tenant_id, **quotas) @atomic.action_timer("quotas.delete_quotas") def _delete_quotas(self, component, tenant_id): """Delete quotas. :param component: Component for the quotas. :param tenant_id: The project_id for the quotas to be updated. """ self.admin_clients(component).quotas.delete(tenant_id) def _generate_quota_values(self, max_quota, component): quotas = {} if component == "nova": quotas = { "metadata_items": random.randint(-1, max_quota), "key_pairs": random.randint(-1, max_quota), "injected_file_content_bytes": random.randint(-1, max_quota), "injected_file_path_bytes": random.randint(-1, max_quota), "ram": random.randint(-1, max_quota), "instances": random.randint(-1, max_quota), "injected_files": random.randint(-1, max_quota), "cores": random.randint(-1, max_quota) } elif component == "cinder": quotas = { "volumes": random.randint(-1, max_quota), "snapshots": random.randint(-1, max_quota), "gigabytes": random.randint(-1, max_quota), } elif component == "neutron": quota = {} for key in ["network", "subnet", "port", "router", "floatingip", "security_group", "security_group_rule"]: quota[key] = random.randint(-1, max_quota) quotas = {"body": {"quota": quota}} return quotas @atomic.action_timer("quotas.get_quotas") def _get_quotas(self, component, tenant_id): """Get quotas for a project. :param component: Openstack component for the quotas. :param tenant_id: The project_id for the quotas to show. :return: Get quotas for a project. """ return self.admin_clients(component).quotas.get(tenant_id) rally-0.9.1/rally/plugins/openstack/scenarios/quotas/quotas.py0000664000567000056710000001064713073417720026004 0ustar jenkinsjenkins00000000000000# Copyright 2014: Kylin Cloud # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.quotas import utils from rally.task import validation """Scenarios for quotas.""" @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True, users=True) @scenario.configure(context={"admin_cleanup": ["nova.quotas"]}, name="Quotas.nova_update") class NovaUpdate(utils.QuotasScenario): def run(self, max_quota=1024): """Update quotas for Nova. :param max_quota: Max value to be updated for quota. """ self._update_quotas("nova", self.context["tenant"]["id"], max_quota) @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True, users=True) @scenario.configure(context={"admin_cleanup": ["nova.quotas"]}, name="Quotas.nova_update_and_delete") class NovaUpdateAndDelete(utils.QuotasScenario): def run(self, max_quota=1024): """Update and delete quotas for Nova. :param max_quota: Max value to be updated for quota. """ self._update_quotas("nova", self.context["tenant"]["id"], max_quota) self._delete_quotas("nova", self.context["tenant"]["id"]) @validation.required_services(consts.Service.CINDER) @validation.required_openstack(admin=True, users=True) @scenario.configure(context={"admin_cleanup": ["cinder.quotas"]}, name="Quotas.cinder_update") class CinderUpdate(utils.QuotasScenario): def run(self, max_quota=1024): """Update quotas for Cinder. :param max_quota: Max value to be updated for quota. """ self._update_quotas("cinder", self.context["tenant"]["id"], max_quota) @validation.required_services(consts.Service.CINDER) @validation.required_openstack(admin=True, users=True) @scenario.configure(context={"admin_cleanup": ["cinder.quotas"]}, name="Quotas.cinder_get") class CinderGet(utils.QuotasScenario): def run(self): """Get quotas for Cinder. Measure the "cinder quota-show" command performance """ self._get_quotas("cinder", self.context["tenant"]["id"]) @validation.required_services(consts.Service.CINDER) @validation.required_openstack(admin=True, users=True) @scenario.configure(context={"admin_cleanup": ["cinder.quotas"]}, name="Quotas.cinder_update_and_delete") class CinderUpdateAndDelete(utils.QuotasScenario): def run(self, max_quota=1024): """Update and Delete quotas for Cinder. :param max_quota: Max value to be updated for quota. """ self._update_quotas("cinder", self.context["tenant"]["id"], max_quota) self._delete_quotas("cinder", self.context["tenant"]["id"]) @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(admin=True, users=True) @scenario.configure(context={"admin_cleanup": ["neutron.quota"]}, name="Quotas.neutron_update") class NeutronUpdate(utils.QuotasScenario): def run(self, max_quota=1024): """Update quotas for neutron. :param max_quota: Max value to be updated for quota. """ quota_update_fn = self.admin_clients("neutron").update_quota self._update_quotas("neutron", self.context["tenant"]["id"], max_quota, quota_update_fn) @validation.required_services(consts.Service.NOVA) @validation.required_openstack(admin=True, users=True) @scenario.configure(context={"admin_cleanup": ["nova.quotas"]}, name="Quotas.nova_get") class NovaGet(utils.QuotasScenario): def run(self): """Get quotas for nova.""" self._get_quotas("nova", self.context["tenant"]["id"]) rally-0.9.1/rally/plugins/openstack/scenarios/neutron/0000775000567000056710000000000013073420067024262 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/neutron/__init__.py0000664000567000056710000000000013073417716026370 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/neutron/network.py0000664000567000056710000004233113073417720026332 0ustar jenkinsjenkins00000000000000# Copyright 2014: Intel Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.neutron import utils from rally.task import validation """Scenarios for Neutron.""" @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["neutron"]}, name="NeutronNetworks.create_and_list_networks") class CreateAndListNetworks(utils.NeutronScenario): def run(self, network_create_args=None): """Create a network and then list all networks. Measure the "neutron net-list" command performance. If you have only 1 user in your context, you will add 1 network on every iteration. So you will have more and more networks and will be able to measure the performance of the "neutron net-list" command depending on the number of networks owned by users. :param network_create_args: dict, POST /v2.0/networks request options """ self._create_network(network_create_args or {}) self._list_networks() @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["neutron"]}, name="NeutronNetworks.create_and_show_network") class CreateAndShowNetwork(utils.NeutronScenario): def run(self, network_create_args=None): """Create a network and show network details. Measure the "neutron net-show" command performance. :param network_create_args: dict, POST /v2.0/networks request options """ network = self._create_network(network_create_args or {}) self._show_network(network) @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["neutron"]}, name="NeutronNetworks.create_and_update_networks") class CreateAndUpdateNetworks(utils.NeutronScenario): def run(self, network_update_args, network_create_args=None): """Create and update a network. Measure the "neutron net-create and net-update" command performance. :param network_update_args: dict, PUT /v2.0/networks update request :param network_create_args: dict, POST /v2.0/networks request options """ network = self._create_network(network_create_args or {}) self._update_network(network, network_update_args) @validation.required_services(consts.Service.NEUTRON) @scenario.configure(context={"cleanup": ["neutron"]}, name="NeutronNetworks.create_and_delete_networks") class CreateAndDeleteNetworks(utils.NeutronScenario): def run(self, network_create_args=None): """Create and delete a network. Measure the "neutron net-create" and "net-delete" command performance. :param network_create_args: dict, POST /v2.0/networks request options """ network = self._create_network(network_create_args or {}) self._delete_network(network["network"]) @validation.number("subnets_per_network", minval=1, integer_only=True) @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["neutron"]}, name="NeutronNetworks.create_and_list_subnets") class CreateAndListSubnets(utils.NeutronScenario): def run(self, network_create_args=None, subnet_create_args=None, subnet_cidr_start=None, subnets_per_network=None): """Create and a given number of subnets and list all subnets. The scenario creates a network, a given number of subnets and then lists subnets. :param network_create_args: dict, POST /v2.0/networks request options. Deprecated :param subnet_create_args: dict, POST /v2.0/subnets request options :param subnet_cidr_start: str, start value for subnets CIDR :param subnets_per_network: int, number of subnets for one network """ network = self._create_network(network_create_args or {}) self._create_subnets(network, subnet_create_args, subnet_cidr_start, subnets_per_network) self._list_subnets() @validation.number("subnets_per_network", minval=1, integer_only=True) @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["neutron"]}, name="NeutronNetworks.create_and_update_subnets") class CreateAndUpdateSubnets(utils.NeutronScenario): def run(self, subnet_update_args, network_create_args=None, subnet_create_args=None, subnet_cidr_start=None, subnets_per_network=None): """Create and update a subnet. The scenario creates a network, a given number of subnets and then updates the subnet. This scenario measures the "neutron subnet-update" command performance. :param subnet_update_args: dict, PUT /v2.0/subnets update options :param network_create_args: dict, POST /v2.0/networks request options. Deprecated. :param subnet_create_args: dict, POST /v2.0/subnets request options :param subnet_cidr_start: str, start value for subnets CIDR :param subnets_per_network: int, number of subnets for one network """ network = self._create_network(network_create_args or {}) subnets = self._create_subnets(network, subnet_create_args, subnet_cidr_start, subnets_per_network) for subnet in subnets: self._update_subnet(subnet, subnet_update_args) @validation.required_parameters("subnets_per_network") @validation.required_services(consts.Service.NEUTRON) @scenario.configure(context={"cleanup": ["neutron"]}, name="NeutronNetworks.create_and_delete_subnets") class CreateAndDeleteSubnets(utils.NeutronScenario): def run(self, network_create_args=None, subnet_create_args=None, subnet_cidr_start=None, subnets_per_network=None): """Create and delete a given number of subnets. The scenario creates a network, a given number of subnets and then deletes subnets. :param network_create_args: dict, POST /v2.0/networks request options. Deprecated. :param subnet_create_args: dict, POST /v2.0/subnets request options :param subnet_cidr_start: str, start value for subnets CIDR :param subnets_per_network: int, number of subnets for one network """ network = self._get_or_create_network(network_create_args) subnets = self._create_subnets(network, subnet_create_args, subnet_cidr_start, subnets_per_network) for subnet in subnets: self._delete_subnet(subnet) @validation.number("subnets_per_network", minval=1, integer_only=True) @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["neutron"]}, name="NeutronNetworks.create_and_list_routers") class CreateAndListRouters(utils.NeutronScenario): def run(self, network_create_args=None, subnet_create_args=None, subnet_cidr_start=None, subnets_per_network=None, router_create_args=None): """Create and a given number of routers and list all routers. Create a network, a given number of subnets and routers and then list all routers. :param network_create_args: dict, POST /v2.0/networks request options. Deprecated. :param subnet_create_args: dict, POST /v2.0/subnets request options :param subnet_cidr_start: str, start value for subnets CIDR :param subnets_per_network: int, number of subnets for one network :param router_create_args: dict, POST /v2.0/routers request options """ self._create_network_structure(network_create_args, subnet_create_args, subnet_cidr_start, subnets_per_network, router_create_args) self._list_routers() @validation.number("subnets_per_network", minval=1, integer_only=True) @validation.required_parameters("subnets_per_network") @validation.required_services(consts.Service.NEUTRON) @scenario.configure(context={"cleanup": ["neutron"]}, name="NeutronNetworks.create_and_update_routers") class CreateAndUpdateRouters(utils.NeutronScenario): def run(self, router_update_args, network_create_args=None, subnet_create_args=None, subnet_cidr_start=None, subnets_per_network=None, router_create_args=None): """Create and update a given number of routers. Create a network, a given number of subnets and routers and then updating all routers. :param router_update_args: dict, PUT /v2.0/routers update options :param network_create_args: dict, POST /v2.0/networks request options. Deprecated. :param subnet_create_args: dict, POST /v2.0/subnets request options :param subnet_cidr_start: str, start value for subnets CIDR :param subnets_per_network: int, number of subnets for one network :param router_create_args: dict, POST /v2.0/routers request options """ network, subnets, routers = self._create_network_structure( network_create_args, subnet_create_args, subnet_cidr_start, subnets_per_network, router_create_args) for router in routers: self._update_router(router, router_update_args) @validation.required_parameters("subnets_per_network") @validation.required_services(consts.Service.NEUTRON) @scenario.configure(context={"cleanup": ["neutron"]}, name="NeutronNetworks.create_and_delete_routers") class CreateAndDeleteRouters(utils.NeutronScenario): def run(self, network_create_args=None, subnet_create_args=None, subnet_cidr_start=None, subnets_per_network=None, router_create_args=None): """Create and delete a given number of routers. Create a network, a given number of subnets and routers and then delete all routers. :param network_create_args: dict, POST /v2.0/networks request options. Deprecated. :param subnet_create_args: dict, POST /v2.0/subnets request options :param subnet_cidr_start: str, start value for subnets CIDR :param subnets_per_network: int, number of subnets for one network :param router_create_args: dict, POST /v2.0/routers request options """ network, subnets, routers = self._create_network_structure( network_create_args, subnet_create_args, subnet_cidr_start, subnets_per_network, router_create_args) for e in range(subnets_per_network): router = routers[e] subnet = subnets[e] self._remove_interface_router(subnet["subnet"], router["router"]) self._delete_router(router) @validation.number("ports_per_network", minval=1, integer_only=True) @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["neutron"]}, name="NeutronNetworks.create_and_list_ports") class CreateAndListPorts(utils.NeutronScenario): def run(self, network_create_args=None, port_create_args=None, ports_per_network=None): """Create and a given number of ports and list all ports. :param network_create_args: dict, POST /v2.0/networks request options. Deprecated. :param port_create_args: dict, POST /v2.0/ports request options :param ports_per_network: int, number of ports for one network """ network = self._get_or_create_network(network_create_args) for i in range(ports_per_network): self._create_port(network, port_create_args or {}) self._list_ports() @validation.number("ports_per_network", minval=1, integer_only=True) @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["neutron"]}, name="NeutronNetworks.create_and_update_ports") class CreateAndUpdatePorts(utils.NeutronScenario): def run(self, port_update_args, network_create_args=None, port_create_args=None, ports_per_network=None): """Create and update a given number of ports. Measure the "neutron port-create" and "neutron port-update" commands performance. :param port_update_args: dict, PUT /v2.0/ports update request options :param network_create_args: dict, POST /v2.0/networks request options. Deprecated. :param port_create_args: dict, POST /v2.0/ports request options :param ports_per_network: int, number of ports for one network """ network = self._get_or_create_network(network_create_args) for i in range(ports_per_network): port = self._create_port(network, port_create_args) self._update_port(port, port_update_args) @validation.required_parameters("ports_per_network") @validation.required_services(consts.Service.NEUTRON) @scenario.configure(context={"cleanup": ["neutron"]}, name="NeutronNetworks.create_and_delete_ports") class CreateAndDeletePorts(utils.NeutronScenario): def run(self, network_create_args=None, port_create_args=None, ports_per_network=None): """Create and delete a port. Measure the "neutron port-create" and "neutron port-delete" commands performance. :param network_create_args: dict, POST /v2.0/networks request options. Deprecated. :param port_create_args: dict, POST /v2.0/ports request options :param ports_per_network: int, number of ports for one network """ network = self._get_or_create_network(network_create_args) for i in range(ports_per_network): port = self._create_port(network, port_create_args) self._delete_port(port) @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(users=True) @validation.external_network_exists("floating_network") @scenario.configure(context={"cleanup": ["neutron"]}, name="NeutronNetworks.create_and_list_floating_ips") class CreateAndListFloatingIps(utils.NeutronScenario): def run(self, floating_network=None, floating_ip_args=None): """Create and list floating IPs. Measure the "neutron floating-ip-create" and "neutron floating-ip-list" commands performance. :param floating_network: str, external network for floating IP creation :param floating_ip_args: dict, POST /floatingips request options """ floating_ip_args = floating_ip_args or {} self._create_floatingip(floating_network, **floating_ip_args) self._list_floating_ips() @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(users=True) @validation.external_network_exists("floating_network") @scenario.configure(context={"cleanup": ["neutron"]}, name="NeutronNetworks.create_and_delete_floating_ips") class CreateAndDeleteFloatingIps(utils.NeutronScenario): def run(self, floating_network=None, floating_ip_args=None): """Create and delete floating IPs. Measure the "neutron floating-ip-create" and "neutron floating-ip-delete" commands performance. :param floating_network: str, external network for floating IP creation :param floating_ip_args: dict, POST /floatingips request options """ floating_ip_args = floating_ip_args or {} floating_ip = self._create_floatingip(floating_network, **floating_ip_args) self._delete_floating_ip(floating_ip["floatingip"]) @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(users=True) @scenario.configure(name="NeutronNetworks.list_agents") class ListAgents(utils.NeutronScenario): def run(self, agent_args=None): """List all neutron agents. This simple scenario tests the "neutron agent-list" command by listing all the neutron agents. :param agent_args: dict, POST /v2.0/agents request options """ agent_args = agent_args or {} self._list_agents(**agent_args) rally-0.9.1/rally/plugins/openstack/scenarios/neutron/security_groups.py0000664000567000056710000000732713073417720030115 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.neutron import utils from rally.task import validation """Scenarios for Neutron Security Groups.""" @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["neutron"]}, name=("NeutronSecurityGroup" ".create_and_list_security_groups")) class CreateAndListSecurityGroups(utils.NeutronScenario): def run(self, security_group_create_args=None): """Create and list Neutron security-groups. Measure the "neutron security-group-create" and "neutron security-group-list" command performance. :param security_group_create_args: dict, POST /v2.0/security-groups request options """ security_group_create_args = security_group_create_args or {} self._create_security_group(**security_group_create_args) self._list_security_groups() @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["neutron"]}, name=("NeutronSecurityGroup" ".create_and_delete_security_groups")) class CreateAndDeleteSecurityGroups(utils.NeutronScenario): def run(self, security_group_create_args=None): """Create and delete Neutron security-groups. Measure the "neutron security-group-create" and "neutron security-group-delete" command performance. :param security_group_create_args: dict, POST /v2.0/security-groups request options """ security_group_create_args = security_group_create_args or {} security_group = self._create_security_group( **security_group_create_args) self._delete_security_group(security_group) @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["neutron"]}, name=("NeutronSecurityGroup" ".create_and_update_security_groups")) class CreateAndUpdateSecurityGroups(utils.NeutronScenario): def run(self, security_group_create_args=None, security_group_update_args=None): """Create and update Neutron security-groups. Measure the "neutron security-group-create" and "neutron security-group-update" command performance. :param security_group_create_args: dict, POST /v2.0/security-groups request options :param security_group_update_args: dict, PUT /v2.0/security-groups update options """ security_group_create_args = security_group_create_args or {} security_group_update_args = security_group_update_args or {} security_group = self._create_security_group( **security_group_create_args) self._update_security_group(security_group, **security_group_update_args) rally-0.9.1/rally/plugins/openstack/scenarios/neutron/utils.py0000775000567000056710000006342313073417720026011 0ustar jenkinsjenkins00000000000000# Copyright 2014: Intel Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import random from oslo_config import cfg from rally.common.i18n import _ from rally.common import logging from rally import exceptions from rally.plugins.openstack import scenario from rally.plugins.openstack.wrappers import network as network_wrapper from rally.task import atomic from rally.task import utils NEUTRON_BENCHMARK_OPTS = [ cfg.FloatOpt( "neutron_create_loadbalancer_timeout", default=float(500), help="Neutron create loadbalancer timeout"), cfg.FloatOpt( "neutron_create_loadbalancer_poll_interval", default=float(2), help="Neutron create loadbalancer poll interval") ] CONF = cfg.CONF benchmark_group = cfg.OptGroup(name="benchmark", title="benchmark options") CONF.register_group(benchmark_group) CONF.register_opts(NEUTRON_BENCHMARK_OPTS, group=benchmark_group) LOG = logging.getLogger(__name__) class NeutronScenario(scenario.OpenStackScenario): """Base class for Neutron scenarios with basic atomic actions.""" SUBNET_IP_VERSION = 4 # TODO(rkiran): modify in case LBaaS-v2 requires LB_METHOD = "ROUND_ROBIN" LB_PROTOCOL = "HTTP" LB_PROTOCOL_PORT = 80 HM_TYPE = "PING" HM_MAX_RETRIES = 3 HM_DELAY = 20 HM_TIMEOUT = 10 def _get_network_id(self, network, **kwargs): """Get Neutron network ID for the network name. param network: str, network name/id param kwargs: dict, network options returns: str, Neutron network-id """ networks = self._list_networks(atomic_action=False) for net in networks: if (net["name"] == network) or (net["id"] == network): return net["id"] msg = (_("Network %s not found.") % network) raise exceptions.NotFoundException(message=msg) @atomic.action_timer("neutron.create_network") def _create_network(self, network_create_args): """Create neutron network. :param network_create_args: dict, POST /v2.0/networks request options :returns: neutron network dict """ network_create_args["name"] = self.generate_random_name() return self.clients("neutron").create_network( {"network": network_create_args}) @atomic.optional_action_timer("neutron.list_networks") def _list_networks(self, **kwargs): """Return user networks list. :param atomic_action: True if this is an atomic action. added and handled by the optional_action_timer() decorator :param kwargs: network list options """ return self.clients("neutron").list_networks(**kwargs)["networks"] @atomic.action_timer("neutron.list_agents") def _list_agents(self, **kwargs): """Fetches agents. :param kwargs: neutron agent list options :returns: user agents list """ return self.clients("neutron").list_agents(**kwargs)["agents"] @atomic.action_timer("neutron.update_network") def _update_network(self, network, network_update_args): """Update the network. This atomic function updates the network with network_update_args. :param network: Network object :param network_update_args: dict, POST /v2.0/networks update options :returns: updated neutron network dict """ network_update_args["name"] = self.generate_random_name() body = {"network": network_update_args} return self.clients("neutron").update_network( network["network"]["id"], body) @atomic.action_timer("neutron.show_network") def _show_network(self, network, **kwargs): """show network details. :param network: Network object :param kwargs: dict, POST /v2.0/networks show options :returns: details of the network """ return self.clients("neutron").show_network( network["network"]["id"], **kwargs) @atomic.action_timer("neutron.delete_network") def _delete_network(self, network): """Delete neutron network. :param network: Network object """ self.clients("neutron").delete_network(network["id"]) @atomic.action_timer("neutron.create_subnet") def _create_subnet(self, network, subnet_create_args, start_cidr=None): """Create neutron subnet. :param network: neutron network dict :param subnet_create_args: POST /v2.0/subnets request options :returns: neutron subnet dict """ network_id = network["network"]["id"] if not subnet_create_args.get("cidr"): start_cidr = start_cidr or "10.2.0.0/24" subnet_create_args["cidr"] = ( network_wrapper.generate_cidr(start_cidr=start_cidr)) subnet_create_args["network_id"] = network_id subnet_create_args["name"] = self.generate_random_name() subnet_create_args.setdefault("ip_version", self.SUBNET_IP_VERSION) return self.clients("neutron").create_subnet( {"subnet": subnet_create_args}) @atomic.action_timer("neutron.list_subnets") def _list_subnets(self): """Returns user subnetworks list.""" return self.clients("neutron").list_subnets()["subnets"] @atomic.action_timer("neutron.update_subnet") def _update_subnet(self, subnet, subnet_update_args): """Update the neutron subnet. This atomic function updates the subnet with subnet_update_args. :param subnet: Subnet object :param subnet_update_args: dict, PUT /v2.0/subnets update options :returns: updated neutron subnet dict """ subnet_update_args["name"] = self.generate_random_name() body = {"subnet": subnet_update_args} return self.clients("neutron").update_subnet( subnet["subnet"]["id"], body) @atomic.action_timer("neutron.delete_subnet") def _delete_subnet(self, subnet): """Delete neutron subnet :param subnet: Subnet object """ self.clients("neutron").delete_subnet(subnet["subnet"]["id"]) @atomic.action_timer("neutron.create_router") def _create_router(self, router_create_args, external_gw=False): """Create neutron router. :param router_create_args: POST /v2.0/routers request options :returns: neutron router dict """ router_create_args["name"] = self.generate_random_name() if external_gw: for network in self._list_networks(): if network.get("router:external"): external_network = network gw_info = {"network_id": external_network["id"], "enable_snat": True} router_create_args.setdefault("external_gateway_info", gw_info) return self.clients("neutron").create_router( {"router": router_create_args}) @atomic.action_timer("neutron.list_routers") def _list_routers(self): """Returns user routers list.""" return self.clients("neutron").list_routers()["routers"] @atomic.action_timer("neutron.delete_router") def _delete_router(self, router): """Delete neutron router :param router: Router object """ self.clients("neutron").delete_router(router["router"]["id"]) @atomic.action_timer("neutron.update_router") def _update_router(self, router, router_update_args): """Update the neutron router. This atomic function updates the router with router_update_args. :param router: dict, neutron router :param router_update_args: dict, PUT /v2.0/routers update options :returns: updated neutron router dict """ router_update_args["name"] = self.generate_random_name() body = {"router": router_update_args} return self.clients("neutron").update_router( router["router"]["id"], body) @atomic.action_timer("neutron.create_port") def _create_port(self, network, port_create_args): """Create neutron port. :param network: neutron network dict :param port_create_args: POST /v2.0/ports request options :returns: neutron port dict """ port_create_args["network_id"] = network["network"]["id"] port_create_args["name"] = self.generate_random_name() return self.clients("neutron").create_port({"port": port_create_args}) @atomic.action_timer("neutron.list_ports") def _list_ports(self): """Return user ports list.""" return self.clients("neutron").list_ports()["ports"] @atomic.action_timer("neutron.update_port") def _update_port(self, port, port_update_args): """Update the neutron port. This atomic function updates port with port_update_args. :param port: dict, neutron port :param port_update_args: dict, PUT /v2.0/ports update options :returns: updated neutron port dict """ port_update_args["name"] = self.generate_random_name() body = {"port": port_update_args} return self.clients("neutron").update_port(port["port"]["id"], body) @atomic.action_timer("neutron.delete_port") def _delete_port(self, port): """Delete neutron port. :param port: Port object """ self.clients("neutron").delete_port(port["port"]["id"]) @logging.log_deprecated_args(_("network_create_args is deprecated; " "use the network context instead"), "0.1.0", "network_create_args") def _get_or_create_network(self, network_create_args=None): """Get a network from context, or create a new one. This lets users either create networks with the 'network' context, provide existing networks with the 'existing_network' context, or let the scenario create a default network for them. Running this without one of the network contexts is deprecated. :param network_create_args: Deprecated way to provide network creation args; use the network context instead. :returns: Network dict """ if "networks" in self.context["tenant"]: return {"network": random.choice(self.context["tenant"]["networks"])} else: LOG.warning(_("Running this scenario without either the 'network' " "or 'existing_network' context is deprecated")) return self._create_network(network_create_args or {}) def _create_subnets(self, network, subnet_create_args=None, subnet_cidr_start=None, subnets_per_network=1): """Create new subnets in the given network. :param network: network to create subnets in :param subnet_create_args: dict, POST /v2.0/subnets request options :param subnet_cidr_start: str, start value for subnets CIDR :param subnets_per_network: int, number of subnets for one network :returns: List of subnet dicts """ return [self._create_subnet(network, subnet_create_args or {}, subnet_cidr_start) for i in range(subnets_per_network)] def _create_network_and_subnets(self, network_create_args=None, subnet_create_args=None, subnets_per_network=1, subnet_cidr_start="1.0.0.0/24"): """Create network and subnets. :parm network_create_args: dict, POST /v2.0/networks request options :parm subnet_create_args: dict, POST /v2.0/subnets request options :parm subnets_per_network: int, number of subnets for one network :parm subnet_cidr_start: str, start value for subnets CIDR :returns: tuple of result network and subnets list """ network = self._create_network(network_create_args or {}) subnets = self._create_subnets(network, subnet_create_args, subnet_cidr_start, subnets_per_network) return network, subnets def _create_network_structure(self, network_create_args=None, subnet_create_args=None, subnet_cidr_start=None, subnets_per_network=None, router_create_args=None): """Create a network and a given number of subnets and routers. :param network_create_args: dict, POST /v2.0/networks request options :param subnet_create_args: dict, POST /v2.0/subnets request options :param subnet_cidr_start: str, start value for subnets CIDR :param subnets_per_network: int, number of subnets for one network :param router_create_args: dict, POST /v2.0/routers request options :returns: tuple of (network, subnets, routers) """ network = self._create_network(network_create_args or {}) subnets = self._create_subnets(network, subnet_create_args, subnet_cidr_start, subnets_per_network) routers = [] for subnet in subnets: router = self._create_router(router_create_args or {}) self._add_interface_router(subnet["subnet"], router["router"]) routers.append(router) return (network, subnets, routers) @atomic.action_timer("neutron.add_interface_router") def _add_interface_router(self, subnet, router): """Connect subnet to router. :param subnet: dict, neutron subnet :param router: dict, neutron router """ self.clients("neutron").add_interface_router( router["id"], {"subnet_id": subnet["id"]}) @atomic.action_timer("neutron.remove_interface_router") def _remove_interface_router(self, subnet, router): """Remove subnet from router :param subnet: dict, neutron subnet :param router: dict, neutron router """ self.clients("neutron").remove_interface_router( router["id"], {"subnet_id": subnet["id"]}) @atomic.optional_action_timer("neutron.create_pool") def _create_lb_pool(self, subnet_id, **pool_create_args): """Create LB pool(v1) :param subnet_id: str, neutron subnet-id :param pool_create_args: dict, POST /lb/pools request options :param atomic_action: True if this is an atomic action. added and handled by the optional_action_timer() decorator :returns: dict, neutron lb pool """ args = {"lb_method": self.LB_METHOD, "protocol": self.LB_PROTOCOL, "name": self.generate_random_name(), "subnet_id": subnet_id} args.update(pool_create_args) return self.clients("neutron").create_pool({"pool": args}) def _create_v1_pools(self, networks, **pool_create_args): """Create LB pools(v1) :param networks: list, neutron networks :param pool_create_args: dict, POST /lb/pools request options :returns: list, neutron lb pools """ subnets = [] pools = [] for net in networks: subnets.extend(net.get("subnets", [])) with atomic.ActionTimer(self, "neutron.create_%s_pools" % len(subnets)): for subnet_id in subnets: pools.append(self._create_lb_pool( subnet_id, atomic_action=False, **pool_create_args)) return pools @atomic.action_timer("neutron.list_pools") def _list_v1_pools(self, **kwargs): """Return user lb pool list(v1).""" return self.clients("neutron").list_pools(**kwargs) @atomic.action_timer("neutron.delete_pool") def _delete_v1_pool(self, pool): """Delete neutron pool. :param pool: Pool object """ self.clients("neutron").delete_pool(pool["id"]) @atomic.action_timer("neutron.update_pool") def _update_v1_pool(self, pool, **pool_update_args): """Update pool. This atomic function updates the pool with pool_update_args. :param pool: Pool object :param pool_update_args: dict, POST /lb/pools update options :returns: updated neutron pool dict """ pool_update_args["name"] = self.generate_random_name() body = {"pool": pool_update_args} return self.clients("neutron").update_pool(pool["pool"]["id"], body) def _create_v1_vip(self, pool, **vip_create_args): """Create VIP(v1) :parm pool: dict, neutron lb-pool :parm vip_create_args: dict, POST /lb/vips request options :returns: dict, neutron lb vip """ args = {"protocol": self.LB_PROTOCOL, "protocol_port": self.LB_PROTOCOL_PORT, "name": self.generate_random_name(), "pool_id": pool["pool"]["id"], "subnet_id": pool["pool"]["subnet_id"]} args.update(vip_create_args) return self.clients("neutron").create_vip({"vip": args}) @atomic.action_timer("neutron.list_vips") def _list_v1_vips(self, **kwargs): """Return user lb vip list(v1).""" return self.clients("neutron").list_vips(**kwargs) @atomic.action_timer("neutron.delete_vip") def _delete_v1_vip(self, vip): """Delete neutron vip. :param vip: neutron Virtual IP object """ self.clients("neutron").delete_vip(vip["id"]) @atomic.action_timer("neutron.update_vip") def _update_v1_vip(self, vip, **vip_update_args): """Updates vip. This atomic function updates vip name and admin state :param vip: Vip object :param vip_update_args: dict, POST /lb/vips update options :returns: updated neutron vip dict """ vip_update_args["name"] = self.generate_random_name() body = {"vip": vip_update_args} return self.clients("neutron").update_vip(vip["vip"]["id"], body) @atomic.action_timer("neutron.create_floating_ip") def _create_floatingip(self, floating_network, **floating_ip_args): """Create floating IP with floating_network. param: floating_network: str, external network to create floating IP param: floating_ip_args: dict, POST /floatingips create options returns: dict, neutron floating IP """ floating_network_id = self._get_network_id( floating_network) args = {"floating_network_id": floating_network_id} args.update(floating_ip_args) return self.clients("neutron").create_floatingip({"floatingip": args}) @atomic.action_timer("neutron.list_floating_ips") def _list_floating_ips(self, **kwargs): """Return floating IPs list.""" return self.clients("neutron").list_floatingips(**kwargs) @atomic.action_timer("neutron.delete_floating_ip") def _delete_floating_ip(self, floating_ip): """Delete floating IP. :param: dict, floating IP object """ return self.clients("neutron").delete_floatingip(floating_ip["id"]) @atomic.optional_action_timer("neutron.create_healthmonitor") def _create_v1_healthmonitor(self, **healthmonitor_create_args): """Create LB healthmonitor. This atomic function creates healthmonitor with the provided healthmonitor_create_args. :param atomic_action: True if this is an atomic action. added and handled by the optional_action_timer() decorator :param healthmonitor_create_args: dict, POST /lb/healthmonitors :returns: neutron healthmonitor dict """ args = {"type": self.HM_TYPE, "delay": self.HM_DELAY, "max_retries": self.HM_MAX_RETRIES, "timeout": self.HM_TIMEOUT} args.update(healthmonitor_create_args) return self.clients("neutron").create_health_monitor( {"health_monitor": args}) @atomic.action_timer("neutron.list_healthmonitors") def _list_v1_healthmonitors(self, **kwargs): """List LB healthmonitors. This atomic function lists all helthmonitors. :param kwargs: optional parameters :returns: neutron lb healthmonitor list """ return self.clients("neutron").list_health_monitors(**kwargs) @atomic.action_timer("neutron.delete_healthmonitor") def _delete_v1_healthmonitor(self, healthmonitor): """Delete neutron healthmonitor. :param healthmonitor: neutron healthmonitor dict """ self.clients("neutron").delete_health_monitor(healthmonitor["id"]) @atomic.action_timer("neutron.update_healthmonitor") def _update_v1_healthmonitor(self, healthmonitor, **healthmonitor_update_args): """Update neutron healthmonitor. :param healthmonitor: neutron lb healthmonitor dict :param healthmonitor_update_args: POST /lb/healthmonitors update options :returns: updated neutron lb healthmonitor dict """ body = {"health_monitor": healthmonitor_update_args} return self.clients("neutron").update_health_monitor( healthmonitor["health_monitor"]["id"], body) @atomic.action_timer("neutron.create_security_group") def _create_security_group(self, **security_group_create_args): """Create Neutron security-group. param: security_group_create_args: dict, POST /v2.0/security-groups request options return: dict, neutron security-group """ security_group_create_args["name"] = self.generate_random_name() return self.clients("neutron").create_security_group( {"security_group": security_group_create_args}) @atomic.action_timer("neutron.delete_security_group") def _delete_security_group(self, security_group): """Delete Neutron security group. param: security_group: dict, neutron security_group """ return self.clients("neutron").delete_security_group( security_group["security_group"]["id"]) @atomic.action_timer("neutron.list_security_groups") def _list_security_groups(self, **kwargs): """Return list of Neutron security groups.""" return self.clients("neutron").list_security_groups(**kwargs) @atomic.action_timer("neutron.update_security_group") def _update_security_group(self, security_group, **security_group_update_args): """Update Neutron security-group. param: security_group: dict, neutron security_group param: security_group_update_args: dict, POST /v2.0/security-groups update options return: dict, updated neutron security-group """ security_group_update_args["name"] = self.generate_random_name() body = {"security_group": security_group_update_args} return self.clients("neutron").update_security_group( security_group["security_group"]["id"], body) def update_loadbalancer_resource(self, lb): try: new_lb = self.clients("neutron").show_loadbalancer(lb["id"]) except Exception as e: if getattr(e, "status_code", 400) == 404: raise exceptions.GetResourceNotFound(resource=lb) raise exceptions.GetResourceFailure(resource=lb, err=e) return new_lb["loadbalancer"] @atomic.optional_action_timer("neutron.create_lbaasv2_loadbalancer") def _create_lbaasv2_loadbalancer(self, subnet_id, **lb_create_args): """Create LB loadbalancer(v2) :param subnet_id: str, neutron subnet-id :param lb_create_args: dict, POST /lbaas/loadbalancers request options :param atomic_action: True if this is an atomic action. added and handled by the optional_action_timer() decorator :returns: dict, neutron lb """ args = {"name": self.generate_random_name(), "vip_subnet_id": subnet_id} args.update(lb_create_args) neutronclient = self.clients("neutron") lb = neutronclient.create_loadbalancer({"loadbalancer": args}) lb = lb["loadbalancer"] lb = utils.wait_for_status( lb, ready_statuses=["ACTIVE"], status_attr="provisioning_status", update_resource=self.update_loadbalancer_resource, timeout=CONF.benchmark.neutron_create_loadbalancer_timeout, check_interval=( CONF.benchmark.neutron_create_loadbalancer_poll_interval) ) return lb @atomic.action_timer("neutron.list_lbaasv2_loadbalancers") def _list_lbaasv2_loadbalancers(self, retrieve_all=True, **lb_list_args): """List LB loadbalancers(v2) :param lb_list_args: dict, POST /lbaas/loadbalancers request options :returns: dict, neutron lb loadbalancers(v2) """ return self.clients("neutron").list_loadbalancers(retrieve_all, **lb_list_args) rally-0.9.1/rally/plugins/openstack/scenarios/neutron/loadbalancer_v1.py0000664000567000056710000002715413073417720027664 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import random from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.neutron import utils from rally.task import atomic from rally.task import validation """Scenarios for Neutron Loadbalancer v1.""" @validation.restricted_parameters("subnet_id", subdict="pool_create_args") @validation.required_neutron_extensions("lbaas") @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(users=True) @validation.required_contexts("network") @scenario.configure(context={"cleanup": ["neutron"]}, name="NeutronLoadbalancerV1.create_and_list_pools") class CreateAndListPools(utils.NeutronScenario): def run(self, pool_create_args=None): """Create a pool(v1) and then list pools(v1). Measure the "neutron lb-pool-list" command performance. The scenario creates a pool for every subnet and then lists pools. :param pool_create_args: dict, POST /lb/pools request options """ pool_create_args = pool_create_args or {} networks = self.context.get("tenant", {}).get("networks", []) self._create_v1_pools(networks, **pool_create_args) self._list_v1_pools() @validation.restricted_parameters("subnet_id", subdict="pool_create_args") @validation.required_neutron_extensions("lbaas") @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(users=True) @validation.required_contexts("network") @scenario.configure(context={"cleanup": ["neutron"]}, name="NeutronLoadbalancerV1.create_and_delete_pools") class CreateAndDeletePools(utils.NeutronScenario): def run(self, pool_create_args=None): """Create pools(v1) and delete pools(v1). Measure the "neutron lb-pool-create" and "neutron lb-pool-delete" command performance. The scenario creates a pool for every subnet and then deletes those pools. :param pool_create_args: dict, POST /lb/pools request options """ pool_create_args = pool_create_args or {} networks = self.context.get("tenant", {}).get("networks", []) pools = self._create_v1_pools(networks, **pool_create_args) for pool in pools: self._delete_v1_pool(pool["pool"]) @validation.restricted_parameters("subnet_id", subdict="pool_create_args") @validation.required_neutron_extensions("lbaas") @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(users=True) @validation.required_contexts("network") @scenario.configure(context={"cleanup": ["neutron"]}, name="NeutronLoadbalancerV1.create_and_update_pools") class CreateAndUpdatePools(utils.NeutronScenario): def run(self, pool_update_args=None, pool_create_args=None): """Create pools(v1) and update pools(v1). Measure the "neutron lb-pool-create" and "neutron lb-pool-update" command performance. The scenario creates a pool for every subnet and then update those pools. :param pool_create_args: dict, POST /lb/pools request options :param pool_update_args: dict, POST /lb/pools update options """ pool_create_args = pool_create_args or {} pool_update_args = pool_update_args or {} networks = self.context.get("tenant", {}).get("networks", []) pools = self._create_v1_pools(networks, **pool_create_args) for pool in pools: self._update_v1_pool(pool, **pool_update_args) @validation.restricted_parameters(["pool_id", "subnet_id"], subdict="vip_create_args") @validation.required_neutron_extensions("lbaas") @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(users=True) @validation.required_contexts("network") @scenario.configure(context={"cleanup": ["neutron"]}, name="NeutronLoadbalancerV1.create_and_list_vips") class CreateAndListVips(utils.NeutronScenario): def run(self, pool_create_args=None, vip_create_args=None): """Create a vip(v1) and then list vips(v1). Measure the "neutron lb-vip-create" and "neutron lb-vip-list" command performance. The scenario creates a vip for every pool created and then lists vips. :param vip_create_args: dict, POST /lb/vips request options :param pool_create_args: dict, POST /lb/pools request options """ vip_create_args = vip_create_args or {} pool_create_args = pool_create_args or {} networks = self.context.get("tenant", {}).get("networks", []) pools = self._create_v1_pools(networks, **pool_create_args) with atomic.ActionTimer(self, "neutron.create_%s_vips" % len(pools)): for pool in pools: self._create_v1_vip(pool, **vip_create_args) self._list_v1_vips() @validation.restricted_parameters(["pool_id", "subnet_id"], subdict="vip_create_args") @validation.required_neutron_extensions("lbaas") @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(users=True) @validation.required_contexts("network") @scenario.configure(context={"cleanup": ["neutron"]}, name="NeutronLoadbalancerV1.create_and_delete_vips") class CreateAndDeleteVips(utils.NeutronScenario): def run(self, pool_create_args=None, vip_create_args=None): """Create a vip(v1) and then delete vips(v1). Measure the "neutron lb-vip-create" and "neutron lb-vip-delete" command performance. The scenario creates a vip for pool and then deletes those vips. :param pool_create_args: dict, POST /lb/pools request options :param vip_create_args: dict, POST /lb/vips request options """ vips = [] pool_create_args = pool_create_args or {} vip_create_args = vip_create_args or {} networks = self.context.get("tenant", {}).get("networks", []) pools = self._create_v1_pools(networks, **pool_create_args) with atomic.ActionTimer(self, "neutron.create_%s_vips" % len(pools)): for pool in pools: vips.append(self._create_v1_vip(pool, **vip_create_args)) for vip in vips: self._delete_v1_vip(vip["vip"]) @validation.restricted_parameters(["pool_id", "subnet_id"], subdict="vip_create_args") @validation.required_neutron_extensions("lbaas") @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(users=True) @validation.required_contexts("network") @scenario.configure(context={"cleanup": ["neutron"]}, name="NeutronLoadbalancerV1.create_and_update_vips") class CreateAndUpdateVips(utils.NeutronScenario): def run(self, pool_create_args=None, vip_update_args=None, vip_create_args=None): """Create vips(v1) and update vips(v1). Measure the "neutron lb-vip-create" and "neutron lb-vip-update" command performance. The scenario creates a pool for every subnet and then update those pools. :param pool_create_args: dict, POST /lb/pools request options :param vip_create_args: dict, POST /lb/vips request options :param vip_update_args: dict, POST /lb/vips update options """ vips = [] pool_create_args = pool_create_args or {} vip_create_args = vip_create_args or {} vip_update_args = vip_update_args or {} networks = self.context.get("tenant", {}).get("networks", []) pools = self._create_v1_pools(networks, **pool_create_args) with atomic.ActionTimer(self, "neutron.create_%s_vips" % len(pools)): for pool in pools: vips.append(self._create_v1_vip(pool, **vip_create_args)) for vip in vips: self._update_v1_vip(vip, **vip_update_args) @validation.required_neutron_extensions("lbaas") @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["neutron"]}, name=("NeutronLoadbalancerV1" ".create_and_list_healthmonitors")) class CreateAndListHealthmonitors(utils.NeutronScenario): def run(self, healthmonitor_create_args=None): """Create healthmonitors(v1) and list healthmonitors(v1). Measure the "neutron lb-healthmonitor-list" command performance. This scenario creates healthmonitors and lists them. :param healthmonitor_create_args: dict, POST /lb/healthmonitors request options """ healthmonitor_create_args = healthmonitor_create_args or {} self._create_v1_healthmonitor(**healthmonitor_create_args) self._list_v1_healthmonitors() @validation.required_neutron_extensions("lbaas") @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["neutron"]}, name=("NeutronLoadbalancerV1" ".create_and_delete_healthmonitors")) class CreateAndDeleteHealthmonitors(utils.NeutronScenario): def run(self, healthmonitor_create_args=None): """Create a healthmonitor(v1) and delete healthmonitors(v1). Measure the "neutron lb-healthmonitor-create" and "neutron lb-healthmonitor-delete" command performance. The scenario creates healthmonitors and deletes those healthmonitors. :param healthmonitor_create_args: dict, POST /lb/healthmonitors request options """ healthmonitor_create_args = healthmonitor_create_args or {} healthmonitor = self._create_v1_healthmonitor( **healthmonitor_create_args) self._delete_v1_healthmonitor(healthmonitor["health_monitor"]) @validation.required_neutron_extensions("lbaas") @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["neutron"]}, name=("NeutronLoadbalancerV1" ".create_and_update_healthmonitors")) class CreateAndUpdateHealthmonitors(utils.NeutronScenario): def run(self, healthmonitor_create_args=None, healthmonitor_update_args=None): """Create a healthmonitor(v1) and update healthmonitors(v1). Measure the "neutron lb-healthmonitor-create" and "neutron lb-healthmonitor-update" command performance. The scenario creates healthmonitors and then updates them. :param healthmonitor_create_args: dict, POST /lb/healthmonitors request options :param healthmonitor_update_args: dict, POST /lb/healthmonitors update options """ healthmonitor_create_args = healthmonitor_create_args or {} healthmonitor_update_args = healthmonitor_update_args or { "max_retries": random.choice(range(1, 10))} healthmonitor = self._create_v1_healthmonitor( **healthmonitor_create_args) self._update_v1_healthmonitor(healthmonitor, **healthmonitor_update_args) rally-0.9.1/rally/plugins/openstack/scenarios/neutron/loadbalancer_v2.py0000775000567000056710000000364013073417720027662 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.plugins.openstack import scenario from rally.plugins.openstack.scenarios.neutron import utils from rally.task import validation """Scenarios for Neutron Loadbalancer v2.""" @validation.required_neutron_extensions("lbaasv2") @validation.required_services(consts.Service.NEUTRON) @validation.required_openstack(users=True) @validation.required_contexts("network") @scenario.configure(context={"cleanup": ["neutron"]}, name="NeutronLoadbalancerV2.create_and_list_loadbalancers") class CreateAndListLoadbalancers(utils.NeutronScenario): def run(self, lb_create_args=None): """Create a loadbalancer(v2) and then list loadbalancers(v2). Measure the "neutron lbaas-loadbalancer-list" command performance. The scenario creates a loadbalancer for every subnet and then lists loadbalancers. :param lb_create_args: dict, POST /lbaas/loadbalancers request options """ lb_create_args = lb_create_args or {} subnets = [] networks = self.context.get("tenant", {}).get("networks", []) for network in networks: subnets.extend(network.get("subnets", [])) for subnet_id in subnets: self._create_lbaasv2_loadbalancer(subnet_id, **lb_create_args) self._list_lbaasv2_loadbalancers() rally-0.9.1/rally/plugins/openstack/scenarios/keystone/0000775000567000056710000000000013073420067024431 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/keystone/__init__.py0000664000567000056710000000000013073417716026537 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/scenarios/keystone/utils.py0000664000567000056710000002636313073417716026164 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import uuid from rally.common.i18n import _LW from rally.common import logging from rally.plugins.openstack import scenario from rally.plugins.openstack.wrappers import keystone as keystone_wrapper from rally.task import atomic LOG = logging.getLogger(__name__) class KeystoneScenario(scenario.OpenStackScenario): """Base class for Keystone scenarios with basic atomic actions.""" def __init__(self, context=None, admin_clients=None, clients=None): super(KeystoneScenario, self).__init__(context, admin_clients, clients) LOG.warning(_LW( "Class %s is deprecated since Rally 0.8.0 and will be removed " "soon. Use " "rally.plugins.openstack.services.identity.identity.Identity " "instead.") % self.__class__) @atomic.action_timer("keystone.create_user") def _user_create(self, email=None, **kwargs): """Creates keystone user with random name. :param kwargs: Other optional parameters to create users like "tenant_id", "enabled". :returns: keystone user instance """ name = self.generate_random_name() # NOTE(boris-42): password and email parameters are required by # keystone client v2.0. This should be cleanuped # when we switch to v3. password = kwargs.pop("password", str(uuid.uuid4())) email = email or (name + "@rally.me") return self.admin_clients("keystone").users.create( name, password=password, email=email, **kwargs) @atomic.action_timer("keystone.update_user_enabled") def _update_user_enabled(self, user, enabled): """Enable or disable a user. :param user: The user to enable or disable :param enabled: Boolean indicating if the user should be enabled (True) or disabled (False) """ self.admin_clients("keystone").users.update_enabled(user, enabled) @atomic.action_timer("keystone.validate_token") def _token_validate(self, token): """Validate a token for a user. :param token: The token to validate """ self.admin_clients("keystone").tokens.validate(token) @atomic.optional_action_timer("keystone.token_authenticate") def _authenticate_token(self, name, password, tenant_id, tenant): """Authenticate user token. :param name: The user username :param password: User password for authentication :param tenant_id: Tenant id for authentication :param tenant: Tenant on which authentication will take place :param atomic_action: bool, enable user authentication to be tracked as an atomic action. added and handled by the optional_action_timer() decorator """ return self.admin_clients("keystone").tokens.authenticate(name, tenant_id, tenant, password) def _resource_delete(self, resource): """"Delete keystone resource.""" r = "keystone.delete_%s" % resource.__class__.__name__.lower() with atomic.ActionTimer(self, r): resource.delete() @atomic.action_timer("keystone.create_tenant") def _tenant_create(self, **kwargs): """Creates keystone tenant with random name. :param kwargs: Other optional parameters :returns: keystone tenant instance """ name = self.generate_random_name() return self.admin_clients("keystone").tenants.create(name, **kwargs) @atomic.action_timer("keystone.create_service") def _service_create(self, service_type=None, description=None): """Creates keystone service with random name. :param service_type: type of the service :param description: description of the service :returns: keystone service instance """ service_type = service_type or "rally_test_type" description = description or self.generate_random_name() return self.admin_clients("keystone").services.create( self.generate_random_name(), service_type, description=description) @atomic.action_timer("keystone.create_users") def _users_create(self, tenant, users_per_tenant): """Adds users to a tenant. :param tenant: tenant object :param users_per_tenant: number of users in per tenant """ for i in range(users_per_tenant): name = self.generate_random_name() password = name email = name + "@rally.me" self.admin_clients("keystone").users.create( name, password=password, email=email, tenant_id=tenant.id) @atomic.action_timer("keystone.create_role") def _role_create(self, **kwargs): """Creates keystone user role with random name. :param **kwargs: Optional additional arguments for roles creation :returns: keystone user role """ admin_clients = keystone_wrapper.wrap(self.admin_clients("keystone")) role = admin_clients.create_role( self.generate_random_name(), **kwargs) return role @atomic.action_timer("keystone.role_delete") def _role_delete(self, role_id): """Creates keystone user role with random name. :param user_id: id of the role """ admin_clients = keystone_wrapper.wrap(self.admin_clients("keystone")) admin_clients.delete_role(role_id) @atomic.action_timer("keystone.list_users") def _list_users(self): """List users.""" return self.admin_clients("keystone").users.list() @atomic.action_timer("keystone.list_tenants") def _list_tenants(self): """List tenants.""" return self.admin_clients("keystone").tenants.list() @atomic.action_timer("keystone.service_list") def _list_services(self): """List services.""" return self.admin_clients("keystone").services.list() @atomic.action_timer("keystone.list_roles") def _list_roles_for_user(self, user, tenant): """List user roles. :param user: user for whom roles will be listed :param tenant: tenant on which user have roles """ return self.admin_clients("keystone").roles.roles_for_user( user, tenant) @atomic.action_timer("keystone.add_role") def _role_add(self, user, role, tenant): """Add role to a given user on a tenant. :param user: user to be assigned the role to :param role: user role to assign with :param tenant: tenant on which assignation will take place """ self.admin_clients("keystone").roles.add_user_role(user, role, tenant) @atomic.action_timer("keystone.remove_role") def _role_remove(self, user, role, tenant): """Dissociate user with role. :param user: user to be stripped with role :param role: role to be dissociated with user :param tenant: tenant on which assignation took place """ self.admin_clients("keystone").roles.remove_user_role(user, role, tenant) @atomic.action_timer("keystone.get_tenant") def _get_tenant(self, tenant_id): """Get given tenant. :param tenant_id: tenant object """ return self.admin_clients("keystone").tenants.get(tenant_id) @atomic.action_timer("keystone.get_user") def _get_user(self, user_id): """Get given user. :param user_id: user object """ return self.admin_clients("keystone").users.get(user_id) @atomic.action_timer("keystone.get_role") def _get_role(self, role_id): """Get given user role. :param role_id: user role object """ return self.admin_clients("keystone").roles.get(role_id) @atomic.action_timer("keystone.get_service") def _get_service(self, service_id): """Get service with given service id. :param service_id: id for service object """ return self.admin_clients("keystone").services.get(service_id) def _get_service_by_name(self, name): for i in self._list_services(): if i.name == name: return i @atomic.action_timer("keystone.delete_service") def _delete_service(self, service_id): """Delete service. :param service_id: service to be deleted """ self.admin_clients("keystone").services.delete(service_id) @atomic.action_timer("keystone.update_tenant") def _update_tenant(self, tenant, description=None): """Update tenant name and description. :param tenant: tenant to be updated :param description: tenant description to be set """ name = self.generate_random_name() description = description or self.generate_random_name() self.admin_clients("keystone").tenants.update(tenant.id, name, description) @atomic.action_timer("keystone.update_user_password") def _update_user_password(self, user_id, password): """Update user password. :param user_id: id of the user :param password: new password """ admin_clients = self.admin_clients("keystone") if admin_clients.version in ["v3"]: admin_clients.users.update(user_id, password=password) else: admin_clients.users.update_password(user_id, password) @atomic.action_timer("keystone.create_ec2creds") def _create_ec2credentials(self, user_id, tenant_id): """Create ec2credentials. :param user_id: User ID for which to create credentials :param tenant_id: Tenant ID for which to create credentials :returns: Created ec2-credentials object """ return self.clients("keystone").ec2.create(user_id, tenant_id) @atomic.action_timer("keystone.list_ec2creds") def _list_ec2credentials(self, user_id): """List of access/secret pairs for a user_id. :param user_id: List all ec2-credentials for User ID :returns: Return ec2-credentials list """ return self.clients("keystone").ec2.list(user_id) @atomic.action_timer("keystone.delete_ec2creds") def _delete_ec2credential(self, user_id, access): """Delete ec2credential. :param user_id: User ID for which to delete credential :param access: access key for ec2credential to delete """ self.clients("keystone").ec2.delete(user_id, access) rally-0.9.1/rally/plugins/openstack/scenarios/keystone/basic.py0000775000567000056710000004115613073417720026100 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Benchmark scenarios for Keystone. """ from rally.common import logging from rally.plugins.openstack import scenario from rally.plugins.openstack.services.identity import identity from rally.task import validation class KeystoneBasic(scenario.OpenStackScenario): """Base class for Keystone scenarios with initialized service object.""" def __init__(self, context=None, admin_clients=None, clients=None): super(KeystoneBasic, self).__init__(context, admin_clients, clients) if hasattr(self, "_admin_clients"): self.admin_keystone = identity.Identity( self._admin_clients, name_generator=self.generate_random_name, atomic_inst=self.atomic_actions()) if hasattr(self, "_clients"): self.keystone = identity.Identity( self._clients, name_generator=self.generate_random_name, atomic_inst=self.atomic_actions()) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["keystone"]}, name="KeystoneBasic.create_user") class CreateUser(KeystoneBasic): @logging.log_deprecated_args( "The 'name_length' argument to create_user is ignored", "0.1.2", ["name_length"], once=True) def run(self, name_length=10, **kwargs): """Create a keystone user with random name. :param kwargs: Other optional parameters to create users like "tenant_id", "enabled". """ self.admin_keystone.create_user(**kwargs) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["keystone"]}, name="KeystoneBasic.create_delete_user") class CreateDeleteUser(KeystoneBasic): @logging.log_deprecated_args( "The 'name_length' argument to create_delete_user is ignored", "0.1.2", ["name_length"], once=True) def run(self, name_length=10, **kwargs): """Create a keystone user with random name and then delete it. :param kwargs: Other optional parameters to create users like "tenant_id", "enabled". """ user = self.admin_keystone.create_user(**kwargs) self.admin_keystone.delete_user(user.id) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["keystone"]}, name="KeystoneBasic.create_user_set_enabled_and_delete") class CreateUserSetEnabledAndDelete(KeystoneBasic): def run(self, enabled=True, **kwargs): """Create a keystone user, enable or disable it, and delete it. :param enabled: Initial state of user 'enabled' flag. The user will be created with 'enabled' set to this value, and then it will be toggled. :param kwargs: Other optional parameters to create user. """ user = self.admin_keystone.create_user(enabled=enabled, **kwargs) self.admin_keystone.update_user(user.id, enabled=(not enabled)) self.admin_keystone.delete_user(user.id) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["keystone"]}, name="KeystoneBasic.create_tenant") class CreateTenant(KeystoneBasic): @logging.log_deprecated_args( "The 'name_length' argument to create_tenant is ignored", "0.1.2", ["name_length"], once=True) def run(self, name_length=10, **kwargs): """Create a keystone tenant with random name. :param kwargs: Other optional parameters """ self.admin_keystone.create_project(**kwargs) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["keystone"]}, name="KeystoneBasic.authenticate_user_and_validate_token") class AuthenticateUserAndValidateToken(KeystoneBasic): def run(self): """Authenticate and validate a keystone token.""" token = self.admin_keystone.fetch_token() self.admin_keystone.validate_token(token) @validation.number("users_per_tenant", minval=1) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["keystone"]}, name="KeystoneBasic.create_tenant_with_users") class CreateTenantWithUsers(KeystoneBasic): @logging.log_deprecated_args( "The 'name_length' argument to create_tenant_with_users is ignored", "0.1.2", ["name_length"], once=True) def run(self, users_per_tenant, name_length=10, **kwargs): """Create a keystone tenant and several users belonging to it. :param users_per_tenant: number of users to create for the tenant :param kwargs: Other optional parameters for tenant creation :returns: keystone tenant instance """ tenant = self.admin_keystone.create_project(**kwargs) self.admin_keystone.create_users(tenant.id, number_of_users=users_per_tenant) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["keystone"]}, name="KeystoneBasic.create_and_list_users") class CreateAndListUsers(KeystoneBasic): @logging.log_deprecated_args( "The 'name_length' argument to create_and_list_users is ignored", "0.1.2", ["name_length"], once=True) def run(self, name_length=10, **kwargs): """Create a keystone user with random name and list all users. :param kwargs: Other optional parameters to create users like "tenant_id", "enabled". """ kwargs.pop("name", None) self.admin_keystone.create_user(**kwargs) self.admin_keystone.list_users() @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["keystone"]}, name="KeystoneBasic.create_and_list_tenants") class CreateAndListTenants(KeystoneBasic): @logging.log_deprecated_args( "The 'name_length' argument to create_and_list_tenants is ignored", "0.1.2", ["name_length"], once=True) def run(self, name_length=10, **kwargs): """Create a keystone tenant with random name and list all tenants. :param kwargs: Other optional parameters """ self.admin_keystone.create_project(**kwargs) self.admin_keystone.list_projects() @validation.required_openstack(admin=True, users=True) @scenario.configure(context={"admin_cleanup": ["keystone"]}, name="KeystoneBasic.add_and_remove_user_role") class AddAndRemoveUserRole(KeystoneBasic): def run(self): """Create a user role add to a user and disassociate.""" tenant_id = self.context["tenant"]["id"] user_id = self.context["user"]["id"] role = self.admin_keystone.create_role() self.admin_keystone.add_role(role_id=role.id, user_id=user_id, project_id=tenant_id) self.admin_keystone.revoke_role(role.id, user_id=user_id, project_id=tenant_id) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["keystone"]}, name="KeystoneBasic.create_and_delete_role") class CreateAndDeleteRole(KeystoneBasic): def run(self): """Create a user role and delete it.""" role = self.admin_keystone.create_role() self.admin_keystone.delete_role(role.id) @validation.required_openstack(admin=True, users=True) @scenario.configure(context={"admin_cleanup": ["keystone"]}, name="KeystoneBasic.create_add_and_list_user_roles") class CreateAddAndListUserRoles(KeystoneBasic): def run(self): """Create user role, add it and list user roles for given user.""" tenant_id = self.context["tenant"]["id"] user_id = self.context["user"]["id"] role = self.admin_keystone.create_role() self.admin_keystone.add_role(user_id=user_id, role_id=role.id, project_id=tenant_id) self.admin_keystone.list_roles(user_id=user_id, project_id=tenant_id) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["keystone"]}, name="KeystoneBasic.get_entities") class GetEntities(KeystoneBasic): def run(self, service_name="keystone"): """Get instance of a tenant, user, role and service by id's. An ephemeral tenant, user, and role are each created. By default, fetches the 'keystone' service. This can be overridden (for instance, to get the 'Identity Service' service on older OpenStack), or None can be passed explicitly to service_name to create a new service and then query it by ID. :param service_name: The name of the service to get by ID; or None, to create an ephemeral service and get it by ID. """ project = self.admin_keystone.create_project() user = self.admin_keystone.create_user(project_id=project.id) role = self.admin_keystone.create_role() self.admin_keystone.get_project(project.id) self.admin_keystone.get_user(user.id) self.admin_keystone.get_role(role.id) if service_name is None: service = self.admin_keystone.create_service() else: service = self.admin_keystone.get_service_by_name(service_name) self.admin_keystone.get_service(service.id) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["keystone"]}, name="KeystoneBasic.create_and_delete_service") class CreateAndDeleteService(KeystoneBasic): @logging.log_deprecated_args( "The 'name' argument to create_and_delete_service will be ignored", "0.0.5", ["name"]) def run(self, name=None, service_type=None, description=None): """Create and delete service. :param service_type: type of the service :param description: description of the service """ service = self.admin_keystone.create_service(service_type=service_type, description=description) self.admin_keystone.delete_service(service.id) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["keystone"]}, name="KeystoneBasic.create_update_and_delete_tenant") class CreateUpdateAndDeleteTenant(KeystoneBasic): @logging.log_deprecated_args( "The 'name_length' argument to create_update_and_delete_tenant is " "ignored", "0.1.2", ["name_length"], once=True) def run(self, name_length=None, **kwargs): """Create, update and delete tenant. :param kwargs: Other optional parameters for tenant creation """ project = self.admin_keystone.create_project(**kwargs) new_name = self.generate_random_name() new_description = self.generate_random_name() self.admin_keystone.update_project(project.id, name=new_name, description=new_description) self.admin_keystone.delete_project(project.id) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["keystone"]}, name="KeystoneBasic.create_user_update_password") class CreateUserUpdatePassword(KeystoneBasic): @logging.log_deprecated_args( "The 'name_length' and 'password_length' arguments to " "create_user_update_password are ignored", "0.1.2", ["name_length", "password_length"], once=True) def run(self, name_length=None, password_length=None): """Create user and update password for that user.""" user = self.admin_keystone.create_user() password = self.generate_random_name() self.admin_keystone.update_user(user.id, password=password) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["keystone"]}, name="KeystoneBasic.create_and_list_services") class CreateAndListServices(KeystoneBasic): @logging.log_deprecated_args( "The 'name' argument to create_and_list_services will be ignored", "0.0.5", ["name"]) def run(self, name=None, service_type=None, description=None): """Create and list services. :param service_type: type of the service :param description: description of the service """ self.admin_keystone.create_service(service_type=service_type, description=description) self.admin_keystone.list_services() @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["keystone"]}, name="KeystoneBasic.create_and_list_ec2credentials") class CreateAndListEc2Credentials(KeystoneBasic): def run(self): """Create and List all keystone ec2-credentials.""" self.keystone.create_ec2credentials( self.context["user"]["id"], project_id=self.context["tenant"]["id"]) self.keystone.list_ec2credentials(self.context["user"]["id"]) @validation.required_openstack(users=True) @scenario.configure(context={"cleanup": ["keystone"]}, name="KeystoneBasic.create_and_delete_ec2credential") class CreateAndDeleteEc2Credential(KeystoneBasic): def run(self): """Create and delete keystone ec2-credential.""" creds = self.keystone.create_ec2credentials( self.context["user"]["id"], project_id=self.context["tenant"]["id"]) self.keystone.delete_ec2credential( self.context["user"]["id"], access=creds.access) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["keystone"]}, name="KeystoneBasic.create_and_get_role") class CreateAndGetRole(KeystoneBasic): def run(self, **kwargs): """Create a user role and get it detailed information. :param kwargs: Optional additional arguments for roles creation """ role = self.admin_keystone.create_role(**kwargs) self.admin_keystone.get_role(role.id) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["keystone"]}, name="KeystoneBasic.create_and_list_roles") class CreateAddListRoles(KeystoneBasic): def run(self, create_role_kwargs=None, list_role_kwargs=None): """Create a role, then list all roles. :param create_role_kwargs: Optional additional arguments for roles create :param list_role_kwargs: Optional additional arguments for roles list """ create_role_kwargs = create_role_kwargs or {} list_role_kwargs = list_role_kwargs or {} role = self.admin_keystone.create_role(**create_role_kwargs) msg = "Role isn't created" self.assertTrue(role, err_msg=msg) all_roles = self.admin_keystone.list_roles(**list_role_kwargs) msg = ("Created role is not in the" " list of all available roles") self.assertIn(role, all_roles, err_msg=msg) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["keystone"]}, name="KeystoneBasic.create_and_update_user") class CreateAndUpdateUser(KeystoneBasic): def run(self, create_user_kwargs=None, update_user_kwargs=None): """Create user and update the user. :param create_user_kwargs: Optional additional arguments for user creation :param update_user_kwargs: Optional additional arguments for user updation """ create_user_kwargs = create_user_kwargs or {} user = self.admin_keystone.create_user(**create_user_kwargs) self.admin_keystone.update_user(user.id, **update_user_kwargs) user_data = self.admin_clients("keystone").users.get(user.id) for args in update_user_kwargs: msg = ("%s isn't updated" % args) self.assertEqual(getattr(user_data, str(args)), update_user_kwargs[args], err_msg=msg) rally-0.9.1/rally/plugins/openstack/cleanup/0000775000567000056710000000000013073420067022231 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/cleanup/__init__.py0000664000567000056710000000000013073417716024337 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/openstack/cleanup/base.py0000664000567000056710000001237113073417720023523 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from rally.task import utils CONF = cfg.CONF CLEANUP_OPTS = [ cfg.IntOpt("resource_deletion_timeout", default=600, help="A timeout in seconds for deleting resources"), cfg.IntOpt("cleanup_threads", default=20, help="Number of cleanup threads to run") ] cleanup_group = cfg.OptGroup(name="cleanup", title="Cleanup Options") CONF.register_group(cleanup_group) CONF.register_opts(CLEANUP_OPTS, cleanup_group) # NOTE(andreykurilin): There are cases when there is no way to use any kind # of "name" for resource as an identifier of alignment resource to the # particular task run and even to Rally itself. Previously, we used empty # strings as a workaround for name matching specific templates, but # theoretically such behaviour can hide other cases when resource should have # a name property, but it is missed. # Let's use instances of specific class to return as a name of resources # which do not have names at all. class NoName(object): def __init__(self, resource_type): self.resource_type = resource_type def __repr__(self): return "" % self.resource_type def resource(service, resource, order=0, admin_required=False, perform_for_admin_only=False, tenant_resource=False, max_attempts=3, timeout=CONF.cleanup.resource_deletion_timeout, interval=1, threads=CONF.cleanup.cleanup_threads): """Decorator that overrides resource specification. Just put it on top of your resource class and specify arguments that you need. :param service: It is equal to client name for corresponding service. E.g. "nova", "cinder" or "zaqar" :param resource: Client manager name for resource. E.g. in case of nova.servers you should write here "servers" :param order: Used to adjust priority of cleanup for different resource types :param admin_required: Admin user is required :param perform_for_admin_only: Perform cleanup for admin user only :param tenant_resource: Perform deletion only 1 time per tenant :param max_attempts: Max amount of attempts to delete single resource :param timeout: Max duration of deletion in seconds :param interval: Resource status pooling interval :param threads: Amount of threads (workers) that are deleting resources simultaneously """ def inner(cls): # TODO(boris-42): This can be written better I believe =) cls._service = service cls._resource = resource cls._order = order cls._admin_required = admin_required cls._perform_for_admin_only = perform_for_admin_only cls._max_attempts = max_attempts cls._timeout = timeout cls._interval = interval cls._threads = threads cls._tenant_resource = tenant_resource return cls return inner @resource(service=None, resource=None) class ResourceManager(object): """Base class for cleanup plugins for specific resources. You should use @resource decorator to specify major configuration of resource manager. Usually you should specify: service, resource and order. If project python client is very specific, you can override delete(), list() and is_deleted() methods to make them fit to your case. """ def __init__(self, resource=None, admin=None, user=None, tenant_uuid=None): self.admin = admin self.user = user self.raw_resource = resource self.tenant_uuid = tenant_uuid def _manager(self): client = self._admin_required and self.admin or self.user return getattr(getattr(client, self._service)(), self._resource) def id(self): """Returns id of resource.""" return self.raw_resource.id def name(self): """Returns name of resource.""" return self.raw_resource.name def is_deleted(self): """Checks if the resource is deleted. Fetch resource by id from service and check it status. In case of NotFound or status is DELETED or DELETE_COMPLETE returns True, otherwise False. """ try: resource = self._manager().get(self.id()) except Exception as e: return getattr(e, "code", getattr(e, "http_status", 400)) == 404 return utils.get_status(resource) in ("DELETED", "DELETE_COMPLETE") def delete(self): """Delete resource that corresponds to instance of this class.""" self._manager().delete(self.id()) def list(self): """List all resources specific for admin or user.""" return self._manager().list() rally-0.9.1/rally/plugins/openstack/cleanup/manager.py0000664000567000056710000002654013073417720024226 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import time from rally.common import broker from rally.common.i18n import _ from rally.common import logging from rally.common.plugin import discover from rally.common.plugin import plugin from rally.common import utils as rutils from rally import osclients from rally.plugins.openstack.cleanup import base LOG = logging.getLogger(__name__) class SeekAndDestroy(object): cache = {} def __init__(self, manager_cls, admin, users, api_versions=None, resource_classes=None, task_id=None): """Resource deletion class. This class contains method exterminate() that finds and deletes all resources created by Rally. :param manager_cls: subclass of base.ResourceManager :param admin: admin credential like in context["admin"] :param users: users credentials like in context["users"] :param api_versions: dict of client API versions :param resource_classes: Resource classes to match resource names against :param task_id: The UUID of task to match resource names against """ self.manager_cls = manager_cls self.admin = admin self.users = users or [] self.api_versions = api_versions self.resource_classes = resource_classes or [ rutils.RandomNameGeneratorMixin] self.task_id = task_id def _get_cached_client(self, user): """Simplifies initialization and caching OpenStack clients.""" if not user: return None if self.api_versions: key = str((user["credential"], sorted(self.api_versions.items()))) else: key = user["credential"] if key not in self.cache: self.cache[key] = osclients.Clients( user["credential"], api_info=self.api_versions) return self.cache[key] def _delete_single_resource(self, resource): """Safe resource deletion with retries and timeouts. Send request to delete resource, in case of failures repeat it few times. After that pull status of resource until it's deleted. Writes in LOG warning with UUID of resource that wasn't deleted :param resource: instance of resource manager initiated with resource that should be deleted. """ msg_kw = { "uuid": resource.id(), "name": resource.name() or "", "service": resource._service, "resource": resource._resource } LOG.debug( "Deleting %(service)s %(resource)s object %(name)s (%(uuid)s)" % msg_kw) try: rutils.retry(resource._max_attempts, resource.delete) except Exception as e: msg_kw["reason"] = e LOG.warning( _("Resource deletion failed, max retries exceeded for " "%(service)s.%(resource)s: %(uuid)s. Reason: %(reason)s") % msg_kw) if logging.is_debug(): LOG.exception(e) else: started = time.time() failures_count = 0 while time.time() - started < resource._timeout: try: if resource.is_deleted(): return except Exception as e: LOG.warning( _("Seems like %s.%s.is_deleted(self) method is broken " "It shouldn't raise any exceptions.") % (resource.__module__, type(resource).__name__)) LOG.exception(e) # NOTE(boris-42): Avoid LOG spamming in case of bad # is_deleted() method failures_count += 1 if failures_count > resource._max_attempts: break finally: rutils.interruptable_sleep(resource._interval) LOG.warning(_("Resource deletion failed, timeout occurred for " "%(service)s.%(resource)s: %(uuid)s.") % msg_kw) def _publisher(self, queue): """Publisher for deletion jobs. This method iterates over all users, lists all resources (using manager_cls) and puts jobs for deletion. Every deletion job contains tuple with two values: user and resource uuid that should be deleted. In case of tenant based resource, uuids are fetched only from one user per tenant. """ def _publish(admin, user, manager): try: for raw_resource in rutils.retry(3, manager.list): queue.append((admin, user, raw_resource)) except Exception as e: LOG.warning( _("Seems like %s.%s.list(self) method is broken. " "It shouldn't raise any exceptions.") % (manager.__module__, type(manager).__name__)) LOG.exception(e) if self.admin and (not self.users or self.manager_cls._perform_for_admin_only): manager = self.manager_cls( admin=self._get_cached_client(self.admin)) _publish(self.admin, None, manager) else: visited_tenants = set() admin_client = self._get_cached_client(self.admin) for user in self.users: if (self.manager_cls._tenant_resource and user["tenant_id"] in visited_tenants): continue visited_tenants.add(user["tenant_id"]) manager = self.manager_cls( admin=admin_client, user=self._get_cached_client(user), tenant_uuid=user["tenant_id"]) _publish(self.admin, user, manager) def _consumer(self, cache, args): """Method that consumes single deletion job.""" admin, user, raw_resource = args manager = self.manager_cls( resource=raw_resource, admin=self._get_cached_client(admin), user=self._get_cached_client(user), tenant_uuid=user and user["tenant_id"]) if (isinstance(manager.name(), base.NoName) or rutils.name_matches_object( manager.name(), *self.resource_classes, task_id=self.task_id, exact=False)): self._delete_single_resource(manager) def exterminate(self): """Delete all resources for passed users, admin and resource_mgr.""" broker.run(self._publisher, self._consumer, consumers_count=self.manager_cls._threads) def list_resource_names(admin_required=None): """List all resource managers names. Returns all service names and all combination of service.resource names. :param admin_required: None -> returns all ResourceManagers True -> returns only admin ResourceManagers False -> returns only non admin ResourceManagers """ res_mgrs = discover.itersubclasses(base.ResourceManager) if admin_required is not None: res_mgrs = filter(lambda cls: cls._admin_required == admin_required, res_mgrs) names = set() for cls in res_mgrs: names.add(cls._service) names.add("%s.%s" % (cls._service, cls._resource)) return names def find_resource_managers(names=None, admin_required=None): """Returns resource managers. :param names: List of names in format or . that is used for filtering resource manager classes :param admin_required: None -> returns all ResourceManagers True -> returns only admin ResourceManagers False -> returns only non admin ResourceManagers """ names = set(names or []) resource_managers = [] for manager in discover.itersubclasses(base.ResourceManager): if admin_required is not None: if admin_required != manager._admin_required: continue if (manager._service in names or "%s.%s" % (manager._service, manager._resource) in names): resource_managers.append(manager) resource_managers.sort(key=lambda x: x._order) found_names = set() for mgr in resource_managers: found_names.add(mgr._service) found_names.add("%s.%s" % (mgr._service, mgr._resource)) missing = names - found_names if missing: LOG.warning("Missing resource managers: %s" % ", ".join(missing)) return resource_managers def cleanup(names=None, admin_required=None, admin=None, users=None, api_versions=None, superclass=plugin.Plugin, task_id=None): """Generic cleaner. This method goes through all plugins. Filter those and left only plugins with _service from services or _resource from resources. Then goes through all passed users and using cleaners cleans all related resources. :param names: Use only resource managers that have names in this list. There are in as _service or (%s.%s % (_service, _resource)) from :param admin_required: If None -> return all plugins If True -> return only admin plugins If False -> return only non admin plugins :param admin: rally.common.objects.Credential that corresponds to OpenStack admin. :param users: List of OpenStack users that was used during benchmarking. Every user has next structure: { "id": , "tenant_id": , "credential": } :param superclass: The plugin superclass to perform cleanup for. E.g., this could be ``rally.task.scenario.Scenario`` to cleanup all Scenario resources. :param task_id: The UUID of task """ resource_classes = [cls for cls in discover.itersubclasses(superclass) if issubclass(cls, rutils.RandomNameGeneratorMixin)] if not resource_classes and issubclass(superclass, rutils.RandomNameGeneratorMixin): resource_classes.append(superclass) for manager in find_resource_managers(names, admin_required): LOG.debug("Cleaning up %(service)s %(resource)s objects" % {"service": manager._service, "resource": manager._resource}) SeekAndDestroy(manager, admin, users, api_versions=api_versions, resource_classes=resource_classes, task_id=task_id).exterminate() rally-0.9.1/rally/plugins/openstack/cleanup/resources.py0000775000567000056710000007474113073417720024637 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from boto import exception as boto_exception from neutronclient.common import exceptions as neutron_exceptions from novaclient import exceptions as nova_exc from oslo_config import cfg from saharaclient.api import base as saharaclient_base from rally.common import logging from rally import consts from rally.plugins.openstack.cleanup import base from rally.plugins.openstack.services.identity import identity from rally.plugins.openstack.wrappers import glance as glance_wrapper from rally.task import utils as task_utils CONF = cfg.CONF CONF.import_opt("glance_image_delete_timeout", "rally.plugins.openstack.scenarios.glance.utils", "benchmark") CONF.import_opt("glance_image_delete_poll_interval", "rally.plugins.openstack.scenarios.glance.utils", "benchmark") LOG = logging.getLogger(__name__) def get_order(start): return iter(range(start, start + 99)) class SynchronizedDeletion(object): def is_deleted(self): return True class QuotaMixin(SynchronizedDeletion, base.ResourceManager): # NOTE(andreykurilin): Quotas resources are quite complex in terms of # cleanup. First of all, they do not have name, id fields at all. There # is only one identifier - reference to Keystone Project/Tenant. Also, # we should remove them in case of existing users case... To cover both # cases we should use project name as name field (it will allow to pass # existing users case) and project id as id of resource def list(self): if not self.tenant_uuid: return [] client = self._admin_required and self.admin or self.user project = identity.Identity(client).get_project(self.tenant_uuid) return [project] # MAGNUM _magnum_order = get_order(80) @base.resource(service=None, resource=None) class MagnumMixin(base.ResourceManager): def id(self): """Returns id of resource.""" return self.raw_resource.uuid def list(self): result = [] marker = None while True: resources = self._manager().list(marker=marker) if not resources: break result.extend(resources) marker = resources[-1].uuid return result @base.resource("magnum", "clusters", order=next(_magnum_order), tenant_resource=True) class MagnumCluster(MagnumMixin): """Resource class for Magnum cluster.""" @base.resource("magnum", "cluster_templates", order=next(_magnum_order), tenant_resource=True) class MagnumClusterTemplate(MagnumMixin): """Resource class for Magnum cluster_template.""" # HEAT @base.resource("heat", "stacks", order=100, tenant_resource=True) class HeatStack(base.ResourceManager): def name(self): return self.raw_resource.stack_name # SENLIN _senlin_order = get_order(150) @base.resource(service=None, resource=None, admin_required=True) class SenlinMixin(base.ResourceManager): def id(self): return self.raw_resource["id"] def _manager(self): client = self._admin_required and self.admin or self.user return getattr(client, self._service)() def list(self): return getattr(self._manager(), self._resource)() def delete(self): # make singular form of resource name from plural form res_name = self._resource[:-1] return getattr(self._manager(), "delete_%s" % res_name)(self.id()) @base.resource("senlin", "clusters", admin_required=True, order=next(_senlin_order)) class SenlinCluster(SenlinMixin): """Resource class for Senlin Cluster.""" @base.resource("senlin", "profiles", order=next(_senlin_order), admin_required=False, tenant_resource=True) class SenlinProfile(SenlinMixin): """Resource class for Senlin Profile.""" # NOVA _nova_order = get_order(200) @base.resource("nova", "servers", order=next(_nova_order), tenant_resource=True) class NovaServer(base.ResourceManager): def list(self): """List all servers.""" return self._manager().list(limit=-1) def delete(self): if getattr(self.raw_resource, "OS-EXT-STS:locked", False): self.raw_resource.unlock() super(NovaServer, self).delete() @base.resource("nova", "server_groups", order=next(_nova_order), tenant_resource=True) class NovaServerGroups(base.ResourceManager): pass @base.resource("nova", "floating_ips", order=next(_nova_order)) class NovaFloatingIPs(SynchronizedDeletion, base.ResourceManager): def name(self): return self.raw_resource.pool @base.resource("nova", "keypairs", order=next(_nova_order)) class NovaKeypair(SynchronizedDeletion, base.ResourceManager): pass @base.resource("nova", "security_groups", order=next(_nova_order), tenant_resource=True) class NovaSecurityGroup(SynchronizedDeletion, base.ResourceManager): def list(self): return filter(lambda x: x.name != "default", super(NovaSecurityGroup, self).list()) @base.resource("nova", "quotas", order=next(_nova_order), admin_required=True, tenant_resource=True) class NovaQuotas(QuotaMixin): pass @base.resource("nova", "flavors", order=next(_nova_order), admin_required=True, perform_for_admin_only=True) class NovaFlavors(base.ResourceManager): pass def is_deleted(self): try: self._manager().get(self.name()) except nova_exc.NotFound: return True return False @base.resource("nova", "floating_ips_bulk", order=next(_nova_order), admin_required=True) class NovaFloatingIpsBulk(SynchronizedDeletion, base.ResourceManager): def id(self): return self.raw_resource.address def name(self): return None @base.resource("nova", "networks", order=next(_nova_order), admin_required=True, tenant_resource=True) class NovaNetworks(SynchronizedDeletion, base.ResourceManager): def name(self): return self.raw_resource.label @base.resource("nova", "aggregates", order=next(_nova_order), admin_required=True, perform_for_admin_only=True) class NovaAggregate(SynchronizedDeletion, base.ResourceManager): def delete(self): for host in self.raw_resource.hosts: self.raw_resource.remove_host(host) super(NovaAggregate, self).delete() # EC2 _ec2_order = get_order(250) class EC2Mixin(object): def _manager(self): return getattr(self.user, self._service)() @base.resource("ec2", "servers", order=next(_ec2_order)) class EC2Server(EC2Mixin, base.ResourceManager): def is_deleted(self): try: instances = self._manager().get_only_instances( instance_ids=[self.id()]) except boto_exception.EC2ResponseError as e: # NOTE(wtakase): Nova EC2 API returns 'InvalidInstanceID.NotFound' # if instance not found. In this case, we consider # instance has already been deleted. return getattr(e, "error_code") == "InvalidInstanceID.NotFound" # NOTE(wtakase): After instance deletion, instance can be 'terminated' # state. If all instance states are 'terminated', this # returns True. And if get_only_instances() returns an # empty list, this also returns True because we consider # instance has already been deleted. return all(map(lambda i: i.state == "terminated", instances)) def delete(self): self._manager().terminate_instances(instance_ids=[self.id()]) def list(self): return self._manager().get_only_instances() # NEUTRON _neutron_order = get_order(300) @base.resource(service=None, resource=None, admin_required=True) class NeutronMixin(SynchronizedDeletion, base.ResourceManager): # Neutron has the best client ever, so we need to override everything def supports_extension(self, extension): exts = self._manager().list_extensions().get("extensions", []) if any(ext.get("alias") == extension for ext in exts): return True return False def _manager(self): client = self._admin_required and self.admin or self.user return getattr(client, self._service)() def id(self): return self.raw_resource["id"] def name(self): return self.raw_resource["name"] def delete(self): delete_method = getattr(self._manager(), "delete_%s" % self._resource) delete_method(self.id()) def list(self): resources = self._resource + "s" list_method = getattr(self._manager(), "list_%s" % resources) return filter(lambda r: r["tenant_id"] == self.tenant_uuid, list_method(tenant_id=self.tenant_uuid)[resources]) class NeutronLbaasV1Mixin(NeutronMixin): def list(self): if self.supports_extension("lbaas"): return super(NeutronLbaasV1Mixin, self).list() return [] @base.resource("neutron", "vip", order=next(_neutron_order), tenant_resource=True) class NeutronV1Vip(NeutronLbaasV1Mixin): pass @base.resource("neutron", "health_monitor", order=next(_neutron_order), tenant_resource=True) class NeutronV1Healthmonitor(NeutronLbaasV1Mixin): pass @base.resource("neutron", "pool", order=next(_neutron_order), tenant_resource=True) class NeutronV1Pool(NeutronLbaasV1Mixin): pass class NeutronLbaasV2Mixin(NeutronMixin): def list(self): if self.supports_extension("lbaasv2"): return super(NeutronLbaasV2Mixin, self).list() return [] @base.resource("neutron", "loadbalancer", order=next(_neutron_order), tenant_resource=True) class NeutronV2Loadbalancer(NeutronLbaasV2Mixin): def is_deleted(self): try: self._manager().show_loadbalancer(self.id()) except Exception as e: return getattr(e, "status_code", 400) == 404 return False # NOTE(andreykurilin): There are scenarios which uses unified way for creating # and associating floating ips. They do not care about nova-net and neutron. # We should clean floating IPs for them, but hardcoding "neutron.floatingip" # cleanup resource should not work in case of Nova-Net. # Since we are planning to abandon support of Nova-Network in next rally # release, let's apply dirty workaround to handle all resources. @base.resource("neutron", "floatingip", order=next(_neutron_order), tenant_resource=True) class NeutronFloatingIP(NeutronMixin): def name(self): return base.NoName(self._resource) def list(self): if consts.ServiceType.NETWORK not in self.user.services(): return [] return super(NeutronFloatingIP, self).list() @base.resource("neutron", "port", order=next(_neutron_order), tenant_resource=True) class NeutronPort(NeutronMixin): # NOTE(andreykurilin): port is the kind of resource that can be created # automatically. In this case it doesn't have name field which matches # our resource name templates. But we still need to identify such # resources, so let's do it by using parent resources. ROUTER_INTERFACE_OWNERS = ("network:router_interface", "network:router_interface_distributed", "network:ha_router_replicated_interface") ROUTER_GATEWAY_OWNER = "network:router_gateway" def __init__(self, *args, **kwargs): super(NeutronPort, self).__init__(*args, **kwargs) self._cache = {} def _get_resources(self, resource): if resource not in self._cache: resources = getattr(self._manager(), "list_%s" % resource)() self._cache[resource] = [r for r in resources[resource] if r["tenant_id"] == self.tenant_uuid] return self._cache[resource] def list(self): ports = self._get_resources("ports") for port in ports: if not port.get("name"): parent_name = None if (port["device_owner"] in self.ROUTER_INTERFACE_OWNERS or port["device_owner"] == self.ROUTER_GATEWAY_OWNER): # first case is a port created while adding an interface to # the subnet # second case is a port created while adding gateway for # the network port_router = [r for r in self._get_resources("routers") if r["id"] == port["device_id"]] if port_router: parent_name = port_router[0]["name"] # NOTE(andreykurilin): in case of existing network usage, # there is no way to identify ports that was created # automatically. # FIXME(andreykurilin): find the way to filter ports created # by rally # elif port["device_owner"] == "network:dhcp": # # port created while attaching a floating-ip to the VM # if port.get("fixed_ips"): # port_subnets = [] # for fixedip in port["fixed_ips"]: # port_subnets.extend( # [sn for sn in self._get_resources("subnets") # if sn["id"] == fixedip["subnet_id"]]) # if port_subnets: # parent_name = port_subnets[0]["name"] # NOTE(andreykurilin): the same case as for floating ips # if not parent_name: # port_net = [net for net in self._get_resources("networks") # if net["id"] == port["network_id"]] # if port_net: # parent_name = port_net[0]["name"] if parent_name: port["parent_name"] = parent_name return ports def name(self): name = self.raw_resource.get("parent_name", self.raw_resource.get("name", "")) return name or base.NoName(self._resource) def delete(self): device_owner = self.raw_resource["device_owner"] if (device_owner in self.ROUTER_INTERFACE_OWNERS or device_owner == self.ROUTER_GATEWAY_OWNER): if device_owner == self.ROUTER_GATEWAY_OWNER: self._manager().remove_gateway_router( self.raw_resource["device_id"]) self._manager().remove_interface_router( self.raw_resource["device_id"], {"port_id": self.id()}) else: try: self._manager().delete_port(self.id()) except neutron_exceptions.PortNotFoundClient: # Port can be already auto-deleted, skip silently LOG.debug("Port %s was not deleted. Skip silently because " "port can be already auto-deleted." % self.id()) @base.resource("neutron", "subnet", order=next(_neutron_order), tenant_resource=True) class NeutronSubnet(NeutronMixin): pass @base.resource("neutron", "network", order=next(_neutron_order), tenant_resource=True) class NeutronNetwork(NeutronMixin): pass @base.resource("neutron", "router", order=next(_neutron_order), tenant_resource=True) class NeutronRouter(NeutronMixin): pass @base.resource("neutron", "security_group", order=next(_neutron_order), tenant_resource=True) class NeutronSecurityGroup(NeutronMixin): def list(self): tenant_sgs = super(NeutronSecurityGroup, self).list() # NOTE(pirsriva): Filter out "default" security group deletion # by non-admin role user return filter(lambda r: r["name"] != "default", tenant_sgs) @base.resource("neutron", "quota", order=next(_neutron_order), admin_required=True, tenant_resource=True) class NeutronQuota(QuotaMixin): def delete(self): self.admin.neutron().delete_quota(self.tenant_uuid) # CINDER _cinder_order = get_order(400) @base.resource("cinder", "backups", order=next(_cinder_order), tenant_resource=True) class CinderVolumeBackup(base.ResourceManager): pass @base.resource("cinder", "volume_types", order=next(_cinder_order), admin_required=True, perform_for_admin_only=True) class CinderVolumeType(base.ResourceManager): pass @base.resource("cinder", "volume_snapshots", order=next(_cinder_order), tenant_resource=True) class CinderVolumeSnapshot(base.ResourceManager): pass @base.resource("cinder", "transfers", order=next(_cinder_order), tenant_resource=True) class CinderVolumeTransfer(base.ResourceManager): pass @base.resource("cinder", "volumes", order=next(_cinder_order), tenant_resource=True) class CinderVolume(base.ResourceManager): pass @base.resource("cinder", "quotas", order=next(_cinder_order), admin_required=True, tenant_resource=True) class CinderQuotas(QuotaMixin, base.ResourceManager): pass # MANILA _manila_order = get_order(450) @base.resource("manila", "shares", order=next(_manila_order), tenant_resource=True) class ManilaShare(base.ResourceManager): pass @base.resource("manila", "share_networks", order=next(_manila_order), tenant_resource=True) class ManilaShareNetwork(base.ResourceManager): pass @base.resource("manila", "security_services", order=next(_manila_order), tenant_resource=True) class ManilaSecurityService(base.ResourceManager): pass # GLANCE @base.resource("glance", "images", order=500, tenant_resource=True) class GlanceImage(base.ResourceManager): def _client(self): return getattr(self.admin or self.user, self._service) def _wrapper(self): return glance_wrapper.wrap(self._client(), self) def list(self): return self._wrapper().list_images(owner=self.tenant_uuid) def delete(self): client = self._client() client().images.delete(self.raw_resource.id) task_utils.wait_for_status( self.raw_resource, ["deleted"], check_deletion=True, update_resource=self._wrapper().get_image, timeout=CONF.benchmark.glance_image_delete_timeout, check_interval=CONF.benchmark.glance_image_delete_poll_interval) # SAHARA _sahara_order = get_order(600) @base.resource("sahara", "job_executions", order=next(_sahara_order), tenant_resource=True) class SaharaJobExecution(SynchronizedDeletion, base.ResourceManager): pass @base.resource("sahara", "jobs", order=next(_sahara_order), tenant_resource=True) class SaharaJob(SynchronizedDeletion, base.ResourceManager): pass @base.resource("sahara", "job_binary_internals", order=next(_sahara_order), tenant_resource=True) class SaharaJobBinaryInternals(SynchronizedDeletion, base.ResourceManager): pass @base.resource("sahara", "job_binaries", order=next(_sahara_order), tenant_resource=True) class SaharaJobBinary(SynchronizedDeletion, base.ResourceManager): pass @base.resource("sahara", "data_sources", order=next(_sahara_order), tenant_resource=True) class SaharaDataSource(SynchronizedDeletion, base.ResourceManager): pass @base.resource("sahara", "clusters", order=next(_sahara_order), tenant_resource=True) class SaharaCluster(base.ResourceManager): # Need special treatment for Sahara Cluster because of the way the # exceptions are described in: # https://github.com/openstack/python-saharaclient/blob/master/ # saharaclient/api/base.py#L145 def is_deleted(self): try: self._manager().get(self.id()) return False except saharaclient_base.APIException as e: return e.error_code == 404 @base.resource("sahara", "cluster_templates", order=next(_sahara_order), tenant_resource=True) class SaharaClusterTemplate(SynchronizedDeletion, base.ResourceManager): pass @base.resource("sahara", "node_group_templates", order=next(_sahara_order), tenant_resource=True) class SaharaNodeGroup(SynchronizedDeletion, base.ResourceManager): pass # CEILOMETER @base.resource("ceilometer", "alarms", order=700, tenant_resource=True) class CeilometerAlarms(SynchronizedDeletion, base.ResourceManager): def id(self): return self.raw_resource.alarm_id def list(self): query = [{ "field": "project_id", "op": "eq", "value": self.tenant_uuid }] return self._manager().list(q=query) # ZAQAR @base.resource("zaqar", "queues", order=800) class ZaqarQueues(SynchronizedDeletion, base.ResourceManager): def list(self): return self.user.zaqar().queues() # DESIGNATE _designate_order = get_order(900) class DesignateResource(SynchronizedDeletion, base.ResourceManager): def _manager(self): # Map resource names to api / client version resource_versions = { "domains": "1", "servers": "1", "recordsets": 2, "zones": "2" } version = resource_versions[self._resource] return getattr(getattr(self.user, self._service)(version), self._resource) def _walk_pages(self, func, *args, **kwargs): """Generator that keeps fetching pages until there's none left.""" marker = None while True: items = func(marker=marker, limit=100, *args, **kwargs) if not items: break for item in items: yield item marker = items[-1]["id"] @base.resource("designate", "domains", order=next(_designate_order)) class DesignateDomain(DesignateResource): pass @base.resource("designate", "servers", order=next(_designate_order), admin_required=True, perform_for_admin_only=True) class DesignateServer(DesignateResource): pass @base.resource("designate", "recordsets", order=next(_designate_order), tenant_resource=True) class DesignateRecordSets(DesignateResource): def _client(self): # Map resource names to api / client version resource_versions = { "domains": "1", "servers": "1", "recordsets": 2, "zones": "2" } version = resource_versions[self._resource] return getattr(self.user, self._service)(version) def list(self): criterion = {"name": "s_rally_*"} for zone in self._walk_pages(self._client().zones.list, criterion=criterion): for recordset in self._walk_pages(self._client().recordsets.list, zone["id"]): yield recordset @base.resource("designate", "zones", order=next(_designate_order), tenant_resource=True) class DesignateZones(DesignateResource): def list(self): criterion = {"name": "s_rally_*"} return self._walk_pages(self._manager().list, criterion=criterion) # SWIFT _swift_order = get_order(1000) class SwiftMixin(SynchronizedDeletion, base.ResourceManager): def _manager(self): client = self._admin_required and self.admin or self.user return getattr(client, self._service)() def id(self): return self.raw_resource def name(self): # NOTE(stpierre): raw_resource is a list of either [container # name, object name] (as in SwiftObject) or just [container # name] (as in SwiftContainer). return self.raw_resource[-1] def delete(self): delete_method = getattr(self._manager(), "delete_%s" % self._resource) # NOTE(weiwu): *self.raw_resource is required because for deleting # container we are passing only container name, to delete object we # should pass as first argument container and second is object name. delete_method(*self.raw_resource) @base.resource("swift", "object", order=next(_swift_order), tenant_resource=True) class SwiftObject(SwiftMixin): def list(self): object_list = [] containers = self._manager().get_account(full_listing=True)[1] for con in containers: objects = self._manager().get_container(con["name"], full_listing=True)[1] for obj in objects: raw_resource = [con["name"], obj["name"]] object_list.append(raw_resource) return object_list @base.resource("swift", "container", order=next(_swift_order), tenant_resource=True) class SwiftContainer(SwiftMixin): def list(self): containers = self._manager().get_account(full_listing=True)[1] return [[con["name"]] for con in containers] # MISTRAL _mistral_order = get_order(1100) class MistralMixin(SynchronizedDeletion, base.ResourceManager): def delete(self): self._manager().delete(self.raw_resource["id"]) @base.resource("mistral", "workbooks", order=next(_mistral_order), tenant_resource=True) class MistralWorkbooks(MistralMixin): def delete(self): self._manager().delete(self.raw_resource["name"]) @base.resource("mistral", "workflows", order=next(_mistral_order), tenant_resource=True) class MistralWorkflows(MistralMixin): pass @base.resource("mistral", "executions", order=next(_mistral_order), tenant_resource=True) class MistralExecutions(MistralMixin): pass # MURANO _murano_order = get_order(1200) @base.resource("murano", "environments", tenant_resource=True, order=next(_murano_order)) class MuranoEnvironments(SynchronizedDeletion, base.ResourceManager): pass @base.resource("murano", "packages", tenant_resource=True, order=next(_murano_order)) class MuranoPackages(base.ResourceManager): def list(self): return filter(lambda x: x.name != "Core library", super(MuranoPackages, self).list()) # IRONIC _ironic_order = get_order(1300) @base.resource("ironic", "node", admin_required=True, order=next(_ironic_order), perform_for_admin_only=True) class IronicNodes(base.ResourceManager): def id(self): return self.raw_resource.uuid # FUEL @base.resource("fuel", "environment", order=1400, admin_required=True, perform_for_admin_only=True) class FuelEnvironment(base.ResourceManager): """Fuel environment. That is the only resource that can be deleted by fuelclient explicitly. """ def id(self): return self.raw_resource["id"] def name(self): return self.raw_resource["name"] def is_deleted(self): return not self._manager().get(self.id()) # WATCHER _watcher_order = get_order(1500) class WatcherMixin(SynchronizedDeletion, base.ResourceManager): def id(self): return self.raw_resource.uuid def list(self): return self._manager().list(limit=0) def is_deleted(self): from watcherclient.common.apiclient import exceptions try: self._manager().get(self.id()) return False except exceptions.NotFound: return True @base.resource("watcher", "audit_template", order=next(_watcher_order), admin_required=True, perform_for_admin_only=True) class WatcherTemplate(WatcherMixin): pass @base.resource("watcher", "action_plan", order=next(_watcher_order), admin_required=True, perform_for_admin_only=True) class WatcherActionPlan(WatcherMixin): def name(self): return self.raw_resource.uuid @base.resource("watcher", "audit", order=next(_watcher_order), admin_required=True, perform_for_admin_only=True) class WatcherAudit(WatcherMixin): def name(self): return self.raw_resource.uuid # KEYSTONE _keystone_order = get_order(9000) class KeystoneMixin(SynchronizedDeletion): def _manager(self): return identity.Identity(self.admin) def delete(self): delete_method = getattr(self._manager(), "delete_%s" % self._resource) delete_method(self.id()) def list(self): resources = self._resource + "s" return getattr(self._manager(), "list_%s" % resources)() @base.resource("keystone", "user", order=next(_keystone_order), admin_required=True, perform_for_admin_only=True) class KeystoneUser(KeystoneMixin, base.ResourceManager): pass @base.resource("keystone", "project", order=next(_keystone_order), admin_required=True, perform_for_admin_only=True) class KeystoneProject(KeystoneMixin, base.ResourceManager): pass @base.resource("keystone", "service", order=next(_keystone_order), admin_required=True, perform_for_admin_only=True) class KeystoneService(KeystoneMixin, base.ResourceManager): pass @base.resource("keystone", "role", order=next(_keystone_order), admin_required=True, perform_for_admin_only=True) class KeystoneRole(KeystoneMixin, base.ResourceManager): pass # NOTE(andreykurilin): unfortunately, ec2 credentials doesn't have name # and id fields. It makes impossible to identify resources belonging to # particular task. @base.resource("keystone", "ec2", tenant_resource=True, order=next(_keystone_order)) class KeystoneEc2(SynchronizedDeletion, base.ResourceManager): def _manager(self): return identity.Identity(self.user) def id(self): return "n/a" def name(self): return base.NoName(self._resource) @property def user_id(self): return self.user.keystone.auth_ref.user_id def list(self): return self._manager().list_ec2credentials(self.user_id) def delete(self): self._manager().delete_ec2credential( self.user_id, access=self.raw_resource.access) rally-0.9.1/rally/plugins/openstack/types.py0000664000567000056710000002031613073417716022331 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from rally.common.plugin import plugin from rally import exceptions from rally.task import types @plugin.configure(name="nova_flavor") class Flavor(types.ResourceType): @classmethod def transform(cls, clients, resource_config): """Transform the resource config to id. :param clients: openstack admin client handles :param resource_config: scenario config with `id`, `name` or `regex` :returns: id matching resource """ resource_id = resource_config.get("id") if not resource_id: novaclient = clients.nova() resource_id = types._id_from_name( resource_config=resource_config, resources=novaclient.flavors.list(), typename="flavor") return resource_id @plugin.configure(name="ec2_flavor") class EC2Flavor(types.ResourceType): @classmethod def transform(cls, clients, resource_config): """Transform the resource config to name. In the case of using EC2 API, flavor name is used for launching servers. :param clients: openstack admin client handles :param resource_config: scenario config with `id`, `name` or `regex` :returns: name matching resource """ resource_name = resource_config.get("name") if not resource_name: # NOTE(wtakase): gets resource name from OpenStack id novaclient = clients.nova() resource_name = types._name_from_id( resource_config=resource_config, resources=novaclient.flavors.list(), typename="flavor") return resource_name @plugin.configure(name="glance_image") class GlanceImage(types.ResourceType): @classmethod def transform(cls, clients, resource_config): """Transform the resource config to id. :param clients: openstack admin client handles :param resource_config: scenario config with `id`, `name` or `regex` :returns: id matching resource """ resource_id = resource_config.get("id") if not resource_id: glanceclient = clients.glance() resource_id = types._id_from_name( resource_config=resource_config, resources=list(glanceclient.images.list()), typename="image") return resource_id @plugin.configure(name="glance_image_args") class GlanceImageArguments(types.ResourceType): @classmethod def transform(cls, clients, resource_config): """Transform the resource config to id. :param clients: openstack admin client handles :param resource_config: scenario config with `id`, `name` or `regex` :returns: id matching resource """ resource_config = copy.deepcopy(resource_config) if "is_public" in resource_config: if "visibility" in resource_config: resource_config.pop("is_public") else: visibility = ("public" if resource_config.pop("is_public") else "private") resource_config["visibility"] = visibility return resource_config @plugin.configure(name="ec2_image") class EC2Image(types.ResourceType): @classmethod def transform(cls, clients, resource_config): """Transform the resource config to EC2 id. If OpenStack resource id is given, this function gets resource name from the id and then gets EC2 resource id from the name. :param clients: openstack admin client handles :param resource_config: scenario config with `id`, `name` or `regex` :returns: EC2 id matching resource """ if "name" not in resource_config and "regex" not in resource_config: # NOTE(wtakase): gets resource name from OpenStack id glanceclient = clients.glance() resource_name = types._name_from_id( resource_config=resource_config, resources=list(glanceclient.images.list()), typename="image") resource_config["name"] = resource_name # NOTE(wtakase): gets EC2 resource id from name or regex ec2client = clients.ec2() resource_ec2_id = types._id_from_name( resource_config=resource_config, resources=list(ec2client.get_all_images()), typename="ec2_image") return resource_ec2_id @plugin.configure(name="cinder_volume_type") class VolumeType(types.ResourceType): @classmethod def transform(cls, clients, resource_config): """Transform the resource config to id. :param clients: openstack admin client handles :param resource_config: scenario config with `id`, `name` or `regex` :returns: id matching resource """ resource_id = resource_config.get("id") if not resource_id: cinderclient = clients.cinder() resource_id = types._id_from_name(resource_config=resource_config, resources=cinderclient. volume_types.list(), typename="volume_type") return resource_id @plugin.configure(name="neutron_network") class NeutronNetwork(types.ResourceType): @classmethod def transform(cls, clients, resource_config): """Transform the resource config to id. :param clients: openstack admin client handles :param resource_config: scenario config with `id`, `name` or `regex` :returns: id matching resource """ resource_id = resource_config.get("id") if resource_id: return resource_id else: neutronclient = clients.neutron() for net in neutronclient.list_networks()["networks"]: if net["name"] == resource_config.get("name"): return net["id"] raise exceptions.InvalidScenarioArgument( "Neutron network with name '{name}' not found".format( name=resource_config.get("name"))) @plugin.configure(name="watcher_strategy") class WatcherStrategy(types.ResourceType): @classmethod def transform(cls, clients, resource_config): """Transform the resource config to id. :param clients: openstack admin client handles :param resource_config: scenario config with `id`, `name` or `regex` :returns: id matching resource """ resource_id = resource_config.get("id") if not resource_id: watcherclient = clients.watcher() resource_id = types._id_from_name( resource_config=resource_config, resources=[watcherclient.strategy.get( resource_config.get("name"))], typename="strategy", id_attr="uuid") return resource_id @plugin.configure(name="watcher_goal") class WatcherGoal(types.ResourceType): @classmethod def transform(cls, clients, resource_config): """Transform the resource config to id. :param clients: openstack admin client handles :param resource_config: scenario config with `id`, `name` or `regex` :returns: id matching resource """ resource_id = resource_config.get("id") if not resource_id: watcherclient = clients.watcher() resource_id = types._id_from_name( resource_config=resource_config, resources=[watcherclient.goal.get( resource_config.get("name"))], typename="goal", id_attr="uuid") return resource_id rally-0.9.1/rally/plugins/common/0000775000567000056710000000000013073420067020103 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/common/__init__.py0000664000567000056710000000000013073417716022211 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/common/trigger/0000775000567000056710000000000013073420067021546 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/common/trigger/__init__.py0000664000567000056710000000000013073417716023654 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/common/trigger/event.py0000664000567000056710000000502313073417716023250 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.task import trigger @trigger.configure(name="event") class EventTrigger(trigger.Trigger): """Triggers hook on specified event and list of values.""" CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "oneOf": [ { "description": "Triage hook based on specified seconds after " "start of workload.", "properties": { "unit": {"enum": ["time"]}, "at": { "type": "array", "minItems": 1, "uniqueItems": True, "items": { "type": "integer", "minimum": 0, } }, }, "required": ["unit", "at"], "additionalProperties": False, }, { "description": "Triage hook based on specific iterations.", "properties": { "unit": {"enum": ["iteration"]}, "at": { "type": "array", "minItems": 1, "uniqueItems": True, "items": { "type": "integer", "minimum": 1, } }, }, "required": ["unit", "at"], "additionalProperties": False, }, ] } def get_listening_event(self): return self.config["unit"] def on_event(self, event_type, value=None): if not (event_type == self.get_listening_event() and value in self.config["at"]): # do nothing return super(EventTrigger, self).on_event(event_type, value) rally-0.9.1/rally/plugins/common/trigger/periodic.py0000664000567000056710000000522313073417716023727 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally.task import trigger @trigger.configure(name="periodic") class PeriodicTrigger(trigger.Trigger): """Periodically triggers hook with specified range and step.""" CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "oneOf": [ { "description": "Periodically triage hook based on elapsed time" " after start of workload.", "properties": { "unit": {"enum": ["time"]}, "start": {"type": "integer", "minimum": 0}, "end": {"type": "integer", "minimum": 1}, "step": {"type": "integer", "minimum": 1}, }, "required": ["unit", "step"], "additionalProperties": False, }, { "description": "Periodically triage hook based on iterations.", "properties": { "unit": {"enum": ["iteration"]}, "start": {"type": "integer", "minimum": 1}, "end": {"type": "integer", "minimum": 1}, "step": {"type": "integer", "minimum": 1}, }, "required": ["unit", "step"], "additionalProperties": False, }, ] } def __init__(self, context, task, hook_cls): super(PeriodicTrigger, self).__init__(context, task, hook_cls) self.config.setdefault( "start", 0 if self.config["unit"] == "time" else 1) self.config.setdefault("end", float("Inf")) def get_listening_event(self): return self.config["unit"] def on_event(self, event_type, value=None): if not (event_type == self.get_listening_event() and self.config["start"] <= value <= self.config["end"] and (value - self.config["start"]) % self.config["step"] == 0): # do nothing return super(PeriodicTrigger, self).on_event(event_type, value) rally-0.9.1/rally/plugins/common/hook/0000775000567000056710000000000013073420067021043 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/common/hook/sys_call.py0000664000567000056710000000477613073417716023253 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json import shlex import subprocess from rally.common import logging from rally import consts from rally import exceptions from rally.task import hook LOG = logging.getLogger(__name__) @hook.configure(name="sys_call") class SysCallHook(hook.Hook): """Performs system call.""" CONFIG_SCHEMA = { "$schema": consts.JSON_SCHEMA, "type": "string", "description": "Command to execute." } def run(self): LOG.debug("sys_call hook: Running command %s", self.config) proc = subprocess.Popen(shlex.split(self.config), stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) out, err = proc.communicate() LOG.debug("sys_call hook: Command %s returned %s", self.config, proc.returncode) if proc.returncode: self.set_error( exception_name="n/a", # no exception class description="Subprocess returned {}".format(proc.returncode), details=(err or "stdout: %s" % out)) # NOTE(amaretskiy): Try to load JSON for charts, # otherwise save output as-is try: output = json.loads(out) for arg in ("additive", "complete"): for out_ in output.get(arg, []): self.add_output(**{arg: out_}) except (TypeError, ValueError, exceptions.RallyException): self.add_output( complete={"title": "System call", "chart_plugin": "TextArea", "description": "Args: %s" % self.config, "data": ["RetCode: %i" % proc.returncode, "StdOut: %s" % (out or "(empty)"), "StdErr: %s" % (err or "(empty)")]}) rally-0.9.1/rally/plugins/common/hook/__init__.py0000664000567000056710000000000013073417716023151 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/common/verification/0000775000567000056710000000000013073420067022565 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/common/verification/testr.py0000664000567000056710000001202713073417716024311 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import re import shutil import subprocess from rally.common.i18n import _LE from rally.common.io import subunit_v2 from rally.common import logging from rally.common import utils as common_utils from rally import exceptions from rally.verification import context from rally.verification import manager from rally.verification import utils LOG = logging.getLogger(__name__) TEST_NAME_RE = re.compile(r"^[a-zA-Z_.0-9]+(\[[a-zA-Z-_,=0-9]*\])?$") @context.configure("testr", order=999) class TestrContext(context.VerifierContext): """Context to transform 'run_args' into CLI arguments for testr.""" def __init__(self, ctx): super(TestrContext, self).__init__(ctx) self._tmp_files = [] def setup(self): self.context["testr_cmd"] = ["testr", "run", "--subunit"] run_args = self.verifier.manager.prepare_run_args( self.context.get("run_args", {})) concurrency = run_args.get("concurrency", 0) if concurrency == 0 or concurrency > 1: self.context["testr_cmd"].append("--parallel") if concurrency >= 1: self.context["testr_cmd"].extend( ["--concurrency", str(concurrency)]) load_list = run_args.get("load_list") skip_list = run_args.get("skip_list") if skip_list: if not load_list: load_list = self.verifier.manager.list_tests() load_list = set(load_list) - set(skip_list) if load_list: load_list_file = common_utils.generate_random_path() with open(load_list_file, "w") as f: f.write("\n".join(load_list)) self._tmp_files.append(load_list_file) self.context["testr_cmd"].extend(["--load-list", load_list_file]) if run_args.get("failed"): self.context["testr_cmd"].append("--failing") if run_args.get("pattern"): self.context["testr_cmd"].append(run_args.get("pattern")) def cleanup(self): for f in self._tmp_files: if os.path.exists(f): os.remove(f) class TestrLauncher(manager.VerifierManager): """Testr wrapper.""" @property def run_environ(self): return self.environ def _init_testr(self): """Initialize testr.""" test_repository_dir = os.path.join(self.base_dir, ".testrepository") # NOTE(andreykurilin): Is there any possibility that .testrepository # presents in clear repo?! if not os.path.isdir(test_repository_dir): LOG.debug("Initializing testr.") try: utils.check_output(["testr", "init"], cwd=self.repo_dir, env=self.environ) except (subprocess.CalledProcessError, OSError): if os.path.exists(test_repository_dir): shutil.rmtree(test_repository_dir) raise exceptions.RallyException( _LE("Failed to initialize testr.")) def install(self): super(TestrLauncher, self).install() self._init_testr() def list_tests(self, pattern=""): """List all tests.""" output = utils.check_output(["testr", "list-tests", pattern], cwd=self.repo_dir, env=self.environ, debug_output=False) return [t for t in output.split("\n") if TEST_NAME_RE.match(t)] def run(self, context): """Run tests.""" testr_cmd = context["testr_cmd"] run_args = context.get("run_args", {}) LOG.debug("Test(s) started by the command: '%s'.", " ".join(testr_cmd)) stream = subprocess.Popen(testr_cmd, env=self.run_environ, cwd=self.repo_dir, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) xfail_list = run_args.get("xfail_list") skip_list = run_args.get("skip_list") results = subunit_v2.parse(stream.stdout, live=True, expected_failures=xfail_list, skipped_tests=skip_list, logger_name=self.verifier.name) stream.wait() return results def prepare_run_args(self, run_args): """Prepare 'run_args' for testr context. This method is called by TestrContext before transforming 'run_args' into CLI arguments for testr. """ return run_args rally-0.9.1/rally/plugins/common/verification/__init__.py0000664000567000056710000000000013073417716024673 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/common/verification/reporters.py0000664000567000056710000005037013073417720025173 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import datetime as dt import json import re import xml.etree.ElementTree as ET from rally.common import version from rally.ui import utils from rally.verification import reporter SKIP_RE = re.compile("Skipped until Bug: ?(?P\d+) is resolved.") LP_BUG_LINK = "https://launchpad.net/bugs/%s" TIME_FORMAT_ISO8601 = "%Y-%m-%dT%H:%M:%S%z" @reporter.configure("json") class JSONReporter(reporter.VerificationReporter): """Generates verification report in JSON format. An example of the report (All dates, numbers, names appearing in this example are fictitious. Any resemblance to real things is purely coincidental): .. code-block:: json {"verifications": { "verification-uuid-1": { "status": "finished", "skipped": 1, "started_at": "2001-01-01T00:00:00", "finished_at": "2001-01-01T00:05:00", "tests_duration": 5, "run_args": { "pattern": "set=smoke", "xfail_list": {"some.test.TestCase.test_xfail": "Some reason why it is expected."}, "skip_list": {"some.test.TestCase.test_skipped": "This test was skipped intentionally"}, }, "success": 1, "expected_failures": 1, "tests_count": 3, "failures": 0, "unexpected_success": 0 }, "verification-uuid-2": { "status": "finished", "skipped": 1, "started_at": "2002-01-01T00:00:00", "finished_at": "2002-01-01T00:05:00", "tests_duration": 5, "run_args": { "pattern": "set=smoke", "xfail_list": {"some.test.TestCase.test_xfail": "Some reason why it is expected."}, "skip_list": {"some.test.TestCase.test_skipped": "This test was skipped intentionally"}, }, "success": 1, "expected_failures": 1, "tests_count": 3, "failures": 1, "unexpected_success": 0 } }, "tests": { "some.test.TestCase.test_foo[tag1,tag2]": { "name": "some.test.TestCase.test_foo", "tags": ["tag1","tag2"], "by_verification": { "verification-uuid-1": { "status": "success", "duration": "1.111" }, "verification-uuid-2": { "status": "success", "duration": "22.222" } } }, "some.test.TestCase.test_skipped[tag1]": { "name": "some.test.TestCase.test_skipped", "tags": ["tag1"], "by_verification": { "verification-uuid-1": { "status": "skipped", "duration": "0", "details": "Skipped until Bug: 666 is resolved." }, "verification-uuid-2": { "status": "skipped", "duration": "0", "details": "Skipped until Bug: 666 is resolved." } } }, "some.test.TestCase.test_xfail": { "name": "some.test.TestCase.test_xfail", "tags": [], "by_verification": { "verification-uuid-1": { "status": "xfail", "duration": "3", "details": "Some reason why it is expected.\\n\\n" "Traceback (most recent call last): \\n" " File "fake.py", line 13, in \\n" " yyy()\\n" " File "fake.py", line 11, in yyy\\n" " xxx()\\n" " File "fake.py", line 8, in xxx\\n" " bar()\\n" " File "fake.py", line 5, in bar\\n" " foo()\\n" " File "fake.py", line 2, in foo\\n" " raise Exception()\\n" "Exception" }, "verification-uuid-2": { "status": "xfail", "duration": "3", "details": "Some reason why it is expected.\\n\\n" "Traceback (most recent call last): \\n" " File "fake.py", line 13, in \\n" " yyy()\\n" " File "fake.py", line 11, in yyy\\n" " xxx()\\n" " File "fake.py", line 8, in xxx\\n" " bar()\\n" " File "fake.py", line 5, in bar\\n" " foo()\\n" " File "fake.py", line 2, in foo\\n" " raise Exception()\\n" "Exception" } } }, "some.test.TestCase.test_failed": { "name": "some.test.TestCase.test_failed", "tags": [], "by_verification": { "verification-uuid-2": { "status": "fail", "duration": "4", "details": "Some reason why it is expected.\\n\\n" "Traceback (most recent call last): \\n" " File "fake.py", line 13, in \\n" " yyy()\\n" " File "fake.py", line 11, in yyy\\n" " xxx()\\n" " File "fake.py", line 8, in xxx\\n" " bar()\\n" " File "fake.py", line 5, in bar\\n" " foo()\\n" " File "fake.py", line 2, in foo\\n" " raise Exception()\\n" "Exception" } } } } } """ TIME_FORMAT = TIME_FORMAT_ISO8601 @classmethod def validate(cls, output_destination): """Validate destination of report. :param output_destination: Destination of report """ # nothing to check :) pass def _generate(self): """Prepare raw report.""" verifications = collections.OrderedDict() tests = {} for v in self.verifications: verifications[v.uuid] = { "started_at": v.created_at.strftime(self.TIME_FORMAT), "finished_at": v.updated_at.strftime(self.TIME_FORMAT), "status": v.status, "run_args": v.run_args, "tests_count": v.tests_count, "tests_duration": v.tests_duration, "skipped": v.skipped, "success": v.success, "expected_failures": v.expected_failures, "unexpected_success": v.unexpected_success, "failures": v.failures, } for test_id, result in v.tests.items(): if test_id not in tests: # NOTE(ylobankov): It is more convenient to see test ID # at the first place in the report. tags = sorted(result.get("tags", []), reverse=True, key=lambda tag: tag.startswith("id-")) tests[test_id] = {"tags": tags, "name": result["name"], "by_verification": {}} tests[test_id]["by_verification"][v.uuid] = { "status": result["status"], "duration": result["duration"] } reason = result.get("reason", "") if reason: match = SKIP_RE.match(reason) if match: link = LP_BUG_LINK % match.group("bug_number") reason = re.sub(match.group("bug_number"), link, reason) traceback = result.get("traceback", "") sep = "\n\n" if reason and traceback else "" d = (reason + sep + traceback.strip()) or None if d: tests[test_id]["by_verification"][v.uuid]["details"] = d return {"verifications": verifications, "tests": tests} def generate(self): raw_report = json.dumps(self._generate(), indent=4) if self.output_destination: return {"files": {self.output_destination: raw_report}, "open": self.output_destination} else: return {"print": raw_report} @reporter.configure("html") class HTMLReporter(JSONReporter): """Generates verification report in HTML format.""" INCLUDE_LIBS = False # "T" separator of ISO 8601 is not user-friendly enough. TIME_FORMAT = "%Y-%m-%d %H:%M:%S" def generate(self): report = self._generate() uuids = report["verifications"].keys() show_comparison_note = False for test in report["tests"].values(): # make as much as possible processing here to reduce processing # at JS side test["has_details"] = False for test_info in test["by_verification"].values(): if "details" not in test_info: test_info["details"] = None elif not test["has_details"]: test["has_details"] = True durations = [] # iter by uuids to store right order for comparison for uuid in uuids: if uuid in test["by_verification"]: durations.append(test["by_verification"][uuid]["duration"]) if float(durations[-1]) < 0.001: durations[-1] = "0" # not to display such little duration in the report test["by_verification"][uuid]["duration"] = "" if len(durations) > 1 and not ( durations[0] == "0" and durations[-1] == "0"): # compare result with result of the first verification diff = float(durations[-1]) - float(durations[0]) result = "%s (" % durations[-1] if diff >= 0: result += "+" result += "%s)" % diff test["by_verification"][uuid]["duration"] = result if not show_comparison_note and len(durations) > 2: # NOTE(andreykurilin): only in case of comparison of more # than 2 results of the same test we should display a note # about the comparison strategy show_comparison_note = True template = utils.get_template("verification/report.html") context = {"uuids": uuids, "verifications": report["verifications"], "tests": report["tests"], "show_comparison_note": show_comparison_note} raw_report = template.render(data=json.dumps(context), include_libs=self.INCLUDE_LIBS) # in future we will support html_static and will need to save more # files if self.output_destination: return {"files": {self.output_destination: raw_report}, "open": self.output_destination} else: return {"print": raw_report} @reporter.configure("html-static") class HTMLStaticReporter(HTMLReporter): """Generates verification report in HTML format with embedded JS/CSS.""" INCLUDE_LIBS = True @reporter.configure("junit-xml") class JUnitXMLReporter(reporter.VerificationReporter): """Generates verification report in JUnit-XML format. An example of the report (All dates, numbers, names appearing in this example are fictitious. Any resemblance to real things is purely coincidental): .. code-block:: xml Skipped until Bug: 666 is resolved. It is an unexpected success. The test should fail due to: It should fail, I said! HEEEEEEELP Skipped until Bug: 666 is resolved. """ @classmethod def validate(cls, output_destination): pass def _prettify_xml(self, elem, level=0): """Adds indents. Code of this method was copied from http://effbot.org/zone/element-lib.htm#prettyprint """ i = "\n" + level * " " if len(elem): if not elem.text or not elem.text.strip(): elem.text = i + " " if not elem.tail or not elem.tail.strip(): elem.tail = i for elem in elem: self._prettify_xml(elem, level + 1) if not elem.tail or not elem.tail.strip(): elem.tail = i else: if level and (not elem.tail or not elem.tail.strip()): elem.tail = i def generate(self): root = ET.Element("testsuites") root.append(ET.Comment("Report is generated by Rally %s at %s" % ( version.version_string(), dt.datetime.utcnow().strftime(TIME_FORMAT_ISO8601)))) for v in self.verifications: verification = ET.SubElement(root, "testsuite", { "id": v.uuid, "time": str(v.tests_duration), "tests": str(v.tests_count), "errors": "0", "skipped": str(v.skipped), "failures": str(v.failures + v.unexpected_success), "timestamp": v.created_at.strftime(TIME_FORMAT_ISO8601) }) tests = sorted(v.tests.values(), key=lambda t: (t.get("timestamp", ""), t["name"])) for result in tests: class_name, name = result["name"].rsplit(".", 1) test_case = { "time": result["duration"], "name": name, "classname": class_name } test_id = [tag[3:] for tag in result.get("tags", []) if tag.startswith("id-")] if test_id: test_case["id"] = test_id[0] if "timestamp" in result: test_case["timestamp"] = result["timestamp"] test_case_element = ET.SubElement(verification, "testcase", test_case) if result["status"] == "success": # nothing to add pass elif result["status"] == "uxsuccess": # NOTE(andreykurilin): junit doesn't support uxsuccess # status, so let's display it like "fail" with proper # comment. failure = ET.SubElement(test_case_element, "failure") failure.text = ("It is an unexpected success. The test " "should fail due to: %s" % result.get("reason", "Unknown reason")) elif result["status"] == "fail": failure = ET.SubElement(test_case_element, "failure") failure.text = result.get("traceback", None) elif result["status"] == "xfail": # NOTE(andreykurilin): junit doesn't support xfail status, # so let's display it like "success" with proper comment test_case_element.append(ET.Comment( "It is an expected failure due to: %s" % result.get("reason", "Unknown reason"))) trace = result.get("traceback", None) if trace: test_case_element.append(ET.Comment( "Traceback:\n%s" % trace)) elif result["status"] == "skip": skipped = ET.SubElement(test_case_element, "skipped") skipped.text = result.get("reason", "Unknown reason") else: # wtf is it?! we should add validation of results... pass self._prettify_xml(root) raw_report = ET.tostring(root, encoding="utf-8").decode("utf-8") if self.output_destination: return {"files": {self.output_destination: raw_report}, "open": self.output_destination} else: return {"print": raw_report} rally-0.9.1/rally/plugins/common/sla/0000775000567000056710000000000013073420067020662 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/common/sla/__init__.py0000664000567000056710000000000013073417716022770 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/common/sla/iteration_time.py0000664000567000056710000000357613073417716024272 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ SLA (Service-level agreement) is set of details for determining compliance with contracted values such as maximum error rate or minimum response time. """ from rally.common.i18n import _ from rally.task import sla @sla.configure(name="max_seconds_per_iteration") class IterationTime(sla.SLA): """Maximum time for one iteration in seconds.""" CONFIG_SCHEMA = {"type": "number", "minimum": 0.0, "exclusiveMinimum": True} def __init__(self, criterion_value): super(IterationTime, self).__init__(criterion_value) self.max_iteration_time = 0.0 def add_iteration(self, iteration): if iteration["duration"] > self.max_iteration_time: self.max_iteration_time = iteration["duration"] self.success = self.max_iteration_time <= self.criterion_value return self.success def merge(self, other): if other.max_iteration_time > self.max_iteration_time: self.max_iteration_time = other.max_iteration_time self.success = self.max_iteration_time <= self.criterion_value return self.success def details(self): return (_("Maximum seconds per iteration %.2fs <= %.2fs - %s") % (self.max_iteration_time, self.criterion_value, self.status())) rally-0.9.1/rally/plugins/common/sla/outliers.py0000664000567000056710000001066213073417720023111 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ SLA (Service-level agreement) is set of details for determining compliance with contracted values such as maximum error rate or minimum response time. """ from rally.common.i18n import _ from rally.common import streaming_algorithms from rally import consts from rally.task import sla @sla.configure(name="outliers") class Outliers(sla.SLA): """Limit the number of outliers (iterations that take too much time). The outliers are detected automatically using the computation of the mean and standard deviation (std) of the data. """ CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "max": {"type": "integer", "minimum": 0}, "min_iterations": {"type": "integer", "minimum": 3}, "sigmas": {"type": "number", "minimum": 0.0, "exclusiveMinimum": True} } } def __init__(self, criterion_value): super(Outliers, self).__init__(criterion_value) self.max_outliers = self.criterion_value.get("max", 0) # NOTE(msdubov): Having 3 as default is reasonable (need enough data). self.min_iterations = self.criterion_value.get("min_iterations", 3) self.sigmas = self.criterion_value.get("sigmas", 3.0) self.iterations = 0 self.outliers = 0 self.threshold = None self.mean_comp = streaming_algorithms.MeanComputation() self.std_comp = streaming_algorithms.StdDevComputation() def add_iteration(self, iteration): # NOTE(ikhudoshyn): This method can not be implemented properly. # After adding a new iteration, both mean and standard deviation # may change. Hence threshold will change as well. In this case we # should again compare durations of all accounted iterations # to the threshold. Unfortunately we can not do it since # we do not store durations. # Implementation provided here only gives rough approximation # of outliers number. if not iteration.get("error"): duration = iteration["duration"] self.iterations += 1 # NOTE(msdubov): First check if the current iteration is an outlier if ((self.iterations >= self.min_iterations and self.threshold and duration > self.threshold)): self.outliers += 1 # NOTE(msdubov): Then update the threshold value self.mean_comp.add(duration) self.std_comp.add(duration) if self.iterations >= 2: mean = self.mean_comp.result() std = self.std_comp.result() self.threshold = mean + self.sigmas * std self.success = self.outliers <= self.max_outliers return self.success def merge(self, other): # NOTE(ikhudoshyn): This method can not be implemented properly. # After merge, both mean and standard deviation may change. # Hence threshold will change as well. In this case we # should again compare durations of all accounted iterations # to the threshold. Unfortunately we can not do it since # we do not store durations. # Implementation provided here only gives rough approximation # of outliers number. self.iterations += other.iterations self.outliers += other.outliers self.mean_comp.merge(other.mean_comp) self.std_comp.merge(other.std_comp) if self.iterations >= 2: mean = self.mean_comp.result() std = self.std_comp.result() self.threshold = mean + self.sigmas * std self.success = self.outliers <= self.max_outliers return self.success def details(self): return (_("Maximum number of outliers %i <= %i - %s") % (self.outliers, self.max_outliers, self.status())) rally-0.9.1/rally/plugins/common/sla/performance_degradation.py0000664000567000056710000000504413073417716026110 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ SLA (Service-level agreement) is set of details for determining compliance with contracted values such as maximum error rate or minimum response time. """ from __future__ import division from rally.common.i18n import _ from rally.common import streaming_algorithms from rally.common import utils from rally import consts from rally.task import sla @sla.configure(name="performance_degradation") class PerformanceDegradation(sla.SLA): """Calculates performance degradation based on iteration time This SLA plugin finds minimum and maximum duration of iterations completed without errors during Rally task execution. Assuming that minimum duration is 100%, it calculates performance degradation against maximum duration. """ CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "max_degradation": { "type": "number", "minimum": 0.0, }, }, "required": [ "max_degradation", ], "additionalProperties": False, } def __init__(self, criterion_value): super(PerformanceDegradation, self).__init__(criterion_value) self.max_degradation = self.criterion_value["max_degradation"] self.degradation = streaming_algorithms.DegradationComputation() def add_iteration(self, iteration): if not iteration.get("error"): self.degradation.add(iteration["duration"]) self.success = self.degradation.result() <= self.max_degradation return self.success def merge(self, other): self.degradation.merge(other.degradation) self.success = self.degradation.result() <= self.max_degradation return self.success def details(self): return (_("Current degradation: %s%% - %s") % (utils.format_float_to_str(self.degradation.result() or 0.0), self.status())) rally-0.9.1/rally/plugins/common/sla/max_average_duration.py0000664000567000056710000000366013073417716025434 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ SLA (Service-level agreement) is set of details for determining compliance with contracted values such as maximum error rate or minimum response time. """ from rally.common.i18n import _ from rally.common import streaming_algorithms from rally.task import sla @sla.configure(name="max_avg_duration") class MaxAverageDuration(sla.SLA): """Maximum average duration of one iteration in seconds.""" CONFIG_SCHEMA = {"type": "number", "minimum": 0.0, "exclusiveMinimum": True} def __init__(self, criterion_value): super(MaxAverageDuration, self).__init__(criterion_value) self.avg = 0.0 self.avg_comp = streaming_algorithms.MeanComputation() def add_iteration(self, iteration): if not iteration.get("error"): self.avg_comp.add(iteration["duration"]) self.avg = self.avg_comp.result() self.success = self.avg <= self.criterion_value return self.success def merge(self, other): self.avg_comp.merge(other.avg_comp) self.avg = self.avg_comp.result() or 0.0 self.success = self.avg <= self.criterion_value return self.success def details(self): return (_("Average duration of one iteration %.2fs <= %.2fs - %s") % (self.avg, self.criterion_value, self.status())) rally-0.9.1/rally/plugins/common/sla/max_average_duration_per_atomic.py0000664000567000056710000000577213073417720027637 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ SLA (Service-level agreement) is set of details for determining compliance with contracted values such as maximum error rate or minimum response time. """ import collections from rally.common.i18n import _ from rally.common import streaming_algorithms from rally import consts from rally.task import sla @sla.configure(name="max_avg_duration_per_atomic") class MaxAverageDurationPerAtomic(sla.SLA): """Maximum average duration of one iterations atomic actions in seconds.""" CONFIG_SCHEMA = {"type": "object", "$schema": consts.JSON_SCHEMA, "patternProperties": {".*": { "type": "number", "description": "The name of atomic action."}}, "additionalProperties": False} def __init__(self, criterion_value): super(MaxAverageDurationPerAtomic, self).__init__(criterion_value) self.avg_by_action = collections.defaultdict(float) self.avg_comp_by_action = collections.defaultdict( streaming_algorithms.MeanComputation) self.criterion_items = self.criterion_value.items() def add_iteration(self, iteration): if not iteration.get("error"): for action, value in iteration["atomic_actions"].items(): self.avg_comp_by_action[action].add(value) result = self.avg_comp_by_action[action].result() self.avg_by_action[action] = result self.success = all(self.avg_by_action[atom] <= val for atom, val in self.criterion_items) return self.success def merge(self, other): for atom, comp in self.avg_comp_by_action.items(): if atom in other.avg_comp_by_action: comp.merge(other.avg_comp_by_action[atom]) self.avg_by_action = {a: comp.result() or 0.0 for a, comp in self.avg_comp_by_action.items()} self.success = all(self.avg_by_action[atom] <= val for atom, val in self.criterion_items) return self.success def details(self): strs = [_("Action: '%s'. %.2fs <= %.2fs") % (atom, self.avg_by_action[atom], val) for atom, val in self.criterion_items] head = _("Average duration of one iteration for atomic actions:") end = _("Status: %s") % self.status() return "\n".join([head] + strs + [end]) rally-0.9.1/rally/plugins/common/sla/failure_rate.py0000664000567000056710000000447013073417720023705 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ SLA (Service-level agreement) is set of details for determining compliance with contracted values such as maximum error rate or minimum response time. """ from rally.common.i18n import _ from rally import consts from rally.task import sla @sla.configure(name="failure_rate") class FailureRate(sla.SLA): """Failure rate minimum and maximum in percents.""" CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "min": {"type": "number", "minimum": 0.0, "maximum": 100.0}, "max": {"type": "number", "minimum": 0.0, "maximum": 100.0} } } def __init__(self, criterion_value): super(FailureRate, self).__init__(criterion_value) self.min_percent = self.criterion_value.get("min", 0) self.max_percent = self.criterion_value.get("max", 100) self.errors = 0 self.total = 0 self.error_rate = 0.0 def add_iteration(self, iteration): self.total += 1 if iteration["error"]: self.errors += 1 self.error_rate = self.errors * 100.0 / self.total self.success = self.min_percent <= self.error_rate <= self.max_percent return self.success def merge(self, other): self.total += other.total self.errors += other.errors if self.total: self.error_rate = self.errors * 100.0 / self.total self.success = self.min_percent <= self.error_rate <= self.max_percent return self.success def details(self): return (_("Failure rate criteria %.2f%% <= %.2f%% <= %.2f%% - %s") % (self.min_percent, self.error_rate, self.max_percent, self.status())) rally-0.9.1/rally/plugins/common/context/0000775000567000056710000000000013073420067021567 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/common/context/__init__.py0000664000567000056710000000000013073417716023675 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/common/context/dummy.py0000664000567000056710000000247613073417716023314 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import consts from rally import exceptions from rally.task import context @context.configure(name="dummy_context", order=750) class DummyContext(context.Context): """Dummy context.""" CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "fail_setup": {"type": "boolean"}, "fail_cleanup": {"type": "boolean"} }, } def setup(self): if self.config.get("fail_setup", False): raise exceptions.RallyException("Oops...setup is failed") def cleanup(self): if self.config.get("fail_cleanup", False): raise exceptions.RallyException("Oops...cleanup is failed") rally-0.9.1/rally/plugins/common/scenarios/0000775000567000056710000000000013073420067022071 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/common/scenarios/__init__.py0000664000567000056710000000000013073417716024177 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/common/scenarios/requests/0000775000567000056710000000000013073420067023744 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/common/scenarios/requests/__init__.py0000664000567000056710000000000013073417716026052 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/common/scenarios/requests/http_requests.py0000664000567000056710000000370413073417716027243 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import random from rally.plugins.common.scenarios.requests import utils from rally.task import scenario """Scenarios for HTTP requests.""" @scenario.configure(name="HttpRequests.check_request") class HttpRequestsCheckRequest(utils.RequestScenario): def run(self, url, method, status_code, **kwargs): """Standard way to benchmark web services. This benchmark is used to make request and check it with expected Response. :param url: url for the Request object :param method: method for the Request object :param status_code: expected response code :param kwargs: optional additional request parameters """ self._check_request(url, method, status_code, **kwargs) @scenario.configure(name="HttpRequests.check_random_request") class HttpRequestsCheckRandomRequest(utils.RequestScenario): def run(self, requests, status_code): """Benchmark the list of requests This scenario takes random url from list of requests, and raises exception if the response is not the expected response. :param requests: List of request dicts :param status_code: Expected Response Code it will be used only if we doesn't specified it in request proper """ request = random.choice(requests) request.setdefault("status_code", status_code) self._check_request(**request) rally-0.9.1/rally/plugins/common/scenarios/requests/utils.py0000664000567000056710000000310113073417716025460 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import requests from rally.common.i18n import _ from rally.task import atomic from rally.task import scenario class RequestScenario(scenario.Scenario): """Base class for Request scenarios with basic atomic actions.""" @atomic.action_timer("requests.check_request") def _check_request(self, url, method, status_code, **kwargs): """Compare request status code with specified code :param status_code: Expected status code of request :param url: Uniform resource locator :param method: Type of request method (GET | POST ..) :param kwargs: Optional additional request parameters :raises ValueError: if return http status code not equal to expected status code """ resp = requests.request(method, url, **kwargs) if status_code != resp.status_code: error_msg = _("Expected HTTP request code is `%s` actual `%s`") raise ValueError( error_msg % (status_code, resp.status_code)) rally-0.9.1/rally/plugins/common/scenarios/dummy/0000775000567000056710000000000013073420067023224 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/common/scenarios/dummy/__init__.py0000664000567000056710000000000013073417716025332 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/common/scenarios/dummy/dummy.py0000664000567000056710000002476413073417716024755 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import random from rally.common.i18n import _ from rally.common import utils from rally import exceptions from rally.task import atomic from rally.task import scenario from rally.task import validation """Dummy scenarios for testing Rally engine at scale.""" class DummyScenarioException(exceptions.RallyException): msg_fmt = _("Dummy scenario expected exception: '%(message)s'") @scenario.configure(name="Dummy.failure") class DummyFailure(scenario.Scenario): def run(self, sleep=0.1, from_iteration=0, to_iteration=0, each=1): """Raise errors in some iterations. :param sleep: float iteration sleep time in seconds :param from_iteration: int iteration number which starts range of failed iterations :param to_iteration: int iteration number which ends range of failed iterations :param each: int cyclic number of iteration which actually raises an error in selected range. For example, each=3 will raise error in each 3rd iteration. """ utils.interruptable_sleep(sleep) if from_iteration <= self.context["iteration"] <= to_iteration: if each and not self.context["iteration"] % each: raise DummyScenarioException(_("Expected failure")) @scenario.configure(name="Dummy.dummy") class Dummy(scenario.Scenario): def run(self, sleep=0): """Do nothing and sleep for the given number of seconds (0 by default). Dummy.dummy can be used for testing performance of different ScenarioRunners and of the ability of rally to store a large amount of results. :param sleep: idle time of method (in seconds). """ utils.interruptable_sleep(sleep) @validation.number("size_of_message", minval=1, integer_only=True, nullable=True) @scenario.configure(name="Dummy.dummy_exception") class DummyException(scenario.Scenario): def run(self, size_of_message=1, sleep=1, message=""): """Throw an exception. Dummy.dummy_exception can be used for test if exceptions are processed properly by ScenarioRunners and benchmark and analyze rally results storing process. :param size_of_message: int size of the exception message :param sleep: idle time of method (in seconds). :param message: message of the exception :raises DummyScenarioException: raise exception for test """ utils.interruptable_sleep(sleep) message = message or "M" * size_of_message raise DummyScenarioException(message) @validation.number("exception_probability", minval=0, maxval=1, integer_only=False, nullable=True) @scenario.configure(name="Dummy.dummy_exception_probability") class DummyExceptionProbability(scenario.Scenario): def run(self, exception_probability=0.5): """Throw an exception with given probability. Dummy.dummy_exception_probability can be used to test if exceptions are processed properly by ScenarioRunners. This scenario will throw an exception sometimes, depending on the given exception probability. :param exception_probability: Sets how likely it is that an exception will be thrown. Float between 0 and 1 0=never 1=always. """ if random.random() < exception_probability: raise DummyScenarioException( "Dummy Scenario Exception: Probability: %s" % exception_probability) @scenario.configure(name="Dummy.dummy_output") class DummyOutput(scenario.Scenario): def run(self, random_range=25): """Generate dummy output. This scenario generates example of output data. :param random_range: max int limit for generated random values """ rand = lambda n: [n, random.randint(1, random_range)] desc = "This is a description text for %s" self.add_output(additive={"title": "Additive StatsTable", "description": desc % "Additive StatsTable", "chart_plugin": "StatsTable", "data": [rand("foo stat"), rand("bar stat"), rand("spam stat")]}) self.add_output(additive={"title": ("Additive StackedArea " "(no description)"), "chart_plugin": "StackedArea", "data": [rand("foo %d" % i) for i in range(1, 7)], "label": "Measure this in Foo units"}) self.add_output(additive={"title": "Additive Lines", "description": ( desc % "Additive Lines"), "chart_plugin": "Lines", "data": [rand("bar %d" % i) for i in range(1, 4)], "label": "Measure this in Bar units"}) self.add_output(additive={"title": "Additive Pie", "description": desc % "Additive Pie", "chart_plugin": "Pie", "data": [rand("spam %d" % i) for i in range(1, 4)]}, complete={"title": "Complete Lines", "description": desc % "Complete Lines", "chart_plugin": "Lines", "data": [ [name, [rand(i) for i in range(1, 8)]] for name in ("Foo", "Bar", "Spam")], "label": "Measure this is some units", "axis_label": ("This is a custom " "X-axis label")}) self.add_output(complete={"title": "Complete StackedArea", "description": desc % "Complete StackedArea", "chart_plugin": "StackedArea", "data": [ [name, [rand(i) for i in range(50)]] for name in ("alpha", "beta", "gamma")], "label": "Yet another measurement units", "axis_label": ("This is a custom " "X-axis label")}) self.add_output( complete={"title": "Arbitrary Text", "chart_plugin": "TextArea", "data": ["Lorem ipsum dolor sit amet, consectetur " "adipiscing elit, sed do eiusmod tempor " "incididunt ut labore et dolore magna " "aliqua." * 2] * 4}) self.add_output( complete={"title": "Complete Pie (no description)", "chart_plugin": "Pie", "data": [rand("delta"), rand("epsilon"), rand("zeta"), rand("theta"), rand("lambda"), rand("omega")]}) data = {"cols": ["mu column", "xi column", "pi column", "tau column", "chi column"], "rows": [([name + " row"] + [rand(i)[1] for i in range(4)]) for name in ("iota", "nu", "rho", "phi", "psi")]} self.add_output(complete={"title": "Complete Table", "description": desc % "Complete Table", "chart_plugin": "Table", "data": data}) @scenario.configure(name="Dummy.dummy_random_fail_in_atomic") class DummyRandomFailInAtomic(scenario.Scenario): """Randomly throw exceptions in atomic actions.""" @atomic.action_timer("dummy_fail_test") def _random_fail_emitter(self, exception_probability): """Throw an exception with given probability. :raises KeyError: when exception_probability is bigger """ if random.random() < exception_probability: raise KeyError("Dummy test exception") def run(self, exception_probability=0.5): """Dummy.dummy_random_fail_in_atomic in dummy actions. Can be used to test atomic actions failures processing. :param exception_probability: Probability with which atomic actions fail in this dummy scenario (0 <= p <= 1) """ self._random_fail_emitter(exception_probability) self._random_fail_emitter(exception_probability) @scenario.configure(name="Dummy.dummy_random_action") class DummyRandomAction(scenario.Scenario): def run(self, actions_num=5, sleep_min=0, sleep_max=2): """Sleep random time in dummy actions. :param actions_num: int number of actions to generate :param sleep_min: minimal time to sleep, numeric seconds :param sleep_max: maximum time to sleep, numeric seconds """ for idx in range(actions_num): duration = random.uniform(sleep_min, sleep_max) with atomic.ActionTimer(self, "action_%d" % idx): utils.interruptable_sleep(duration) @scenario.configure(name="Dummy.dummy_timed_atomic_actions") class DummyTimedAtomicAction(scenario.Scenario): def run(self, number_of_actions=5, sleep_factor=1): """Run some sleepy atomic actions for SLA atomic action tests. :param number_of_actions: int number of atomic actions to create :param sleep_factor: int multiplier for number of seconds to sleep """ for sleeptime in range(number_of_actions): with atomic.ActionTimer(self, "action_%d" % sleeptime): utils.interruptable_sleep(sleeptime * sleep_factor) rally-0.9.1/rally/plugins/common/runners/0000775000567000056710000000000013073420067021577 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/common/runners/__init__.py0000664000567000056710000000000013073417716023705 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/common/runners/serial.py0000664000567000056710000000611013073417716023435 0ustar jenkinsjenkins00000000000000# Copyright (C) 2014 Yahoo! Inc. All Rights Reserved. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common import utils as rutils from rally import consts from rally.task import runner @runner.configure(name="serial") class SerialScenarioRunner(runner.ScenarioRunner): """Scenario runner that executes benchmark scenarios serially. Unlike scenario runners that execute in parallel, the serial scenario runner executes scenarios one-by-one in the same python interpreter process as Rally. This allows you to benchmark your scenario without introducing any concurrent operations as well as interactively debug the scenario from the same command that you use to start Rally. """ # NOTE(mmorais): additionalProperties is set True to allow switching # between parallel and serial runners by modifying only *type* property CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "type": { "type": "string" }, "times": { "type": "integer", "minimum": 1 } }, "additionalProperties": True } def _run_scenario(self, cls, method_name, context, args): """Runs the specified benchmark scenario with given arguments. The scenario iterations are executed one-by-one in the same python interpreter process as Rally. This allows you to benchmark your scenario without introducing any concurrent operations as well as interactively debug the scenario from the same command that you use to start Rally. :param cls: The Scenario class where the scenario is implemented :param method_name: Name of the method that implements the scenario :param context: Benchmark context that contains users, admin & other information, that was created before benchmark started. :param args: Arguments to call the scenario method with :returns: List of results fore each single scenario iteration, where each result is a dictionary """ times = self.config.get("times", 1) event_queue = rutils.DequeAsQueue(self.event_queue) for i in range(times): if self.aborted.is_set(): break result = runner._run_scenario_once( cls, method_name, runner._get_scenario_context(i, context), args, event_queue) self._send_result(result) self._flush_results() rally-0.9.1/rally/plugins/common/runners/constant.py0000664000567000056710000003327613073417720024017 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import multiprocessing import threading import time from six.moves import queue as Queue from rally.common import utils from rally import consts from rally import exceptions from rally.task import runner from rally.task import utils as butils def _worker_process(queue, iteration_gen, timeout, concurrency, times, context, cls, method_name, args, event_queue, aborted, info): """Start the scenario within threads. Spawn threads to support scenario execution for a fixed number of times. This generates a constant load on the cloud under test by executing each scenario iteration without pausing between iterations. Each thread runs the scenario method once with passed scenario arguments and context. After execution the result is appended to the queue. :param queue: queue object to append results :param iteration_gen: next iteration number generator :param timeout: operation's timeout :param concurrency: number of concurrently running scenario iterations :param times: total number of scenario iterations to be run :param context: scenario context object :param cls: scenario class :param method_name: scenario method name :param args: scenario args :param event_queue: queue object to append events :param aborted: multiprocessing.Event that aborts load generation if the flag is set :param info: info about all processes count and counter of launched process """ pool = collections.deque() alive_threads_in_pool = 0 finished_threads_in_pool = 0 runner._log_worker_info(times=times, concurrency=concurrency, timeout=timeout, cls=cls, method_name=method_name, args=args) if timeout: timeout_queue = Queue.Queue() collector_thr_by_timeout = threading.Thread( target=utils.timeout_thread, args=(timeout_queue, ) ) collector_thr_by_timeout.start() iteration = next(iteration_gen) while iteration < times and not aborted.is_set(): scenario_context = runner._get_scenario_context(iteration, context) worker_args = ( queue, cls, method_name, scenario_context, args, event_queue) thread = threading.Thread(target=runner._worker_thread, args=worker_args) thread.start() if timeout: timeout_queue.put((thread, time.time() + timeout)) pool.append(thread) alive_threads_in_pool += 1 while alive_threads_in_pool == concurrency: prev_finished_threads_in_pool = finished_threads_in_pool finished_threads_in_pool = 0 for t in pool: if not t.isAlive(): finished_threads_in_pool += 1 alive_threads_in_pool -= finished_threads_in_pool alive_threads_in_pool += prev_finished_threads_in_pool if alive_threads_in_pool < concurrency: # NOTE(boris-42): cleanup pool array. This is required because # in other case array length will be equal to times which # is unlimited big while pool and not pool[0].isAlive(): pool.popleft().join() finished_threads_in_pool -= 1 break # we should wait to not create big noise with these checks time.sleep(0.001) iteration = next(iteration_gen) # Wait until all threads are done while pool: pool.popleft().join() if timeout: timeout_queue.put((None, None,)) collector_thr_by_timeout.join() @runner.configure(name="constant") class ConstantScenarioRunner(runner.ScenarioRunner): """Creates constant load executing a scenario a specified number of times. This runner will place a constant load on the cloud under test by executing each scenario iteration without pausing between iterations up to the number of times specified in the scenario config. The concurrency parameter of the scenario config controls the number of concurrent iterations which execute during a single scenario in order to simulate the activities of multiple users placing load on the cloud under test. """ CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "type": { "type": "string", "description": "Type of Runner." }, "concurrency": { "type": "integer", "minimum": 1, "description": "The number of parallel iteration executions." }, "times": { "type": "integer", "minimum": 1, "description": "Total number of iteration executions." }, "timeout": { "type": "number", "description": "Operation's timeout." }, "max_cpu_count": { "type": "integer", "minimum": 1, "description": "The maximum number of processes to create load" " from." } }, "required": ["type"], "additionalProperties": False } @classmethod def validate(cls, config): """Validates runner's part of task config.""" super(ConstantScenarioRunner, cls).validate(config) if config.get("concurrency", 1) > config.get("times", 1): raise exceptions.ValidationError( "Parameter 'concurrency' means a number of parallel executions" "of iterations. Parameter 'times' means total number of " "iteration executions. It is redundant (and restricted) to " "have number of parallel iterations bigger then total number " "of iterations.") def _run_scenario(self, cls, method_name, context, args): """Runs the specified benchmark scenario with given arguments. This method generates a constant load on the cloud under test by executing each scenario iteration using a pool of processes without pausing between iterations up to the number of times specified in the scenario config. :param cls: The Scenario class where the scenario is implemented :param method_name: Name of the method that implements the scenario :param context: Benchmark context that contains users, admin & other information, that was created before benchmark started. :param args: Arguments to call the scenario method with :returns: List of results fore each single scenario iteration, where each result is a dictionary """ timeout = self.config.get("timeout", 0) # 0 means no timeout times = self.config.get("times", 1) concurrency = self.config.get("concurrency", 1) iteration_gen = utils.RAMInt() cpu_count = multiprocessing.cpu_count() max_cpu_used = min(cpu_count, self.config.get("max_cpu_count", cpu_count)) processes_to_start = min(max_cpu_used, times, concurrency) concurrency_per_worker, concurrency_overhead = divmod( concurrency, processes_to_start) self._log_debug_info(times=times, concurrency=concurrency, timeout=timeout, max_cpu_used=max_cpu_used, processes_to_start=processes_to_start, concurrency_per_worker=concurrency_per_worker, concurrency_overhead=concurrency_overhead) result_queue = multiprocessing.Queue() event_queue = multiprocessing.Queue() def worker_args_gen(concurrency_overhead): while True: yield (result_queue, iteration_gen, timeout, concurrency_per_worker + (concurrency_overhead and 1), times, context, cls, method_name, args, event_queue, self.aborted) if concurrency_overhead: concurrency_overhead -= 1 process_pool = self._create_process_pool( processes_to_start, _worker_process, worker_args_gen(concurrency_overhead)) self._join_processes(process_pool, result_queue, event_queue) def _run_scenario_once_with_unpack_args(args): # NOTE(andreykurilin): `pool.imap` is used in # ConstantForDurationScenarioRunner. It does not want to work with # instance-methods, class-methods and static-methods. Also, it can't # transmit positional or keyword arguments to destination function. # While original `rally.task.runner._run_scenario_once` accepts # multiple arguments instead of one big tuple with all arguments, we # need to hardcode unpacking here(all other runners are able to # transmit arguments in proper way). return runner._run_scenario_once(*args) @runner.configure(name="constant_for_duration") class ConstantForDurationScenarioRunner(runner.ScenarioRunner): """Creates constant load executing a scenario for an interval of time. This runner will place a constant load on the cloud under test by executing each scenario iteration without pausing between iterations until a specified interval of time has elapsed. The concurrency parameter of the scenario config controls the number of concurrent iterations which execute during a single sceanario in order to simulate the activities of multiple users placing load on the cloud under test. """ CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "type": { "type": "string", "description": "Type of Runner." }, "concurrency": { "type": "integer", "minimum": 1, "description": "The number of parallel iteration executions." }, "duration": { "type": "number", "minimum": 0.0, "description": "The number of seconds during which to generate" " a load." }, "timeout": { "type": "number", "minimum": 1, "description": "Operation's timeout." } }, "required": ["type", "duration"], "additionalProperties": False } @staticmethod def _iter_scenario_args(cls, method, ctx, args, event_queue, aborted): def _scenario_args(i): if aborted.is_set(): raise StopIteration() return (cls, method, runner._get_scenario_context(i, ctx), args, event_queue) return _scenario_args def _run_scenario(self, cls, method, context, args): """Runs the specified benchmark scenario with given arguments. :param cls: The Scenario class where the scenario is implemented :param method: Name of the method that implements the scenario :param context: Benchmark context that contains users, admin & other information, that was created before benchmark started. :param args: Arguments to call the scenario method with :returns: List of results fore each single scenario iteration, where each result is a dictionary """ timeout = self.config.get("timeout", 600) concurrency = self.config.get("concurrency", 1) duration = self.config.get("duration") # FIXME(andreykurilin): unify `_worker_process`, use it here and remove # usage of `multiprocessing.Pool`(usage of separate process for # each concurrent iteration is redundant). pool = multiprocessing.Pool(concurrency) manager = multiprocessing.Manager() event_queue = manager.Queue() stop_event_listener = threading.Event() def event_listener(): while not stop_event_listener.isSet(): while not event_queue.empty(): self.send_event(**event_queue.get()) else: time.sleep(0.01) event_listener_thread = threading.Thread(target=event_listener) event_listener_thread.start() run_args = butils.infinite_run_args_generator( self._iter_scenario_args( cls, method, context, args, event_queue, self.aborted)) iter_result = pool.imap(_run_scenario_once_with_unpack_args, run_args) start = time.time() while True: try: result = iter_result.next(timeout) except multiprocessing.TimeoutError as e: result = runner.format_result_on_timeout(e, timeout) except StopIteration: break self._send_result(result) if time.time() - start > duration: break stop_event_listener.set() event_listener_thread.join() pool.terminate() pool.join() self._flush_results() rally-0.9.1/rally/plugins/common/runners/rps.py0000664000567000056710000002662513073417720022772 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import multiprocessing import threading import time from six.moves import queue as Queue from rally.common import logging from rally.common import utils from rally import consts from rally import exceptions from rally.task import runner LOG = logging.getLogger(__name__) def _worker_process(queue, iteration_gen, timeout, times, max_concurrent, context, cls, method_name, args, event_queue, aborted, runs_per_second, rps_cfg, processes_to_start, info): """Start scenario within threads. Spawn N threads per second. Each thread runs the scenario once, and appends result to queue. A maximum of max_concurrent threads will be ran concurrently. :param queue: queue object to append results :param iteration_gen: next iteration number generator :param timeout: operation's timeout :param times: total number of scenario iterations to be run :param max_concurrent: maximum worker concurrency :param context: scenario context object :param cls: scenario class :param method_name: scenario method name :param args: scenario args :param aborted: multiprocessing.Event that aborts load generation if the flag is set :param runs_per_second: function that should return desired rps value :param rps_cfg: rps section from task config :param processes_to_start: int, number of started processes for scenario execution :param info: info about all processes count and counter of runned process """ pool = collections.deque() if isinstance(rps_cfg, dict): rps = rps_cfg["start"] else: rps = rps_cfg sleep = 1.0 / rps runner._log_worker_info(times=times, rps=rps, timeout=timeout, cls=cls, method_name=method_name, args=args) time.sleep( (sleep * info["processes_counter"]) / info["processes_to_start"]) start = time.time() timeout_queue = Queue.Queue() if timeout: collector_thr_by_timeout = threading.Thread( target=utils.timeout_thread, args=(timeout_queue, ) ) collector_thr_by_timeout.start() i = 0 while i < times and not aborted.is_set(): scenario_context = runner._get_scenario_context(next(iteration_gen), context) worker_args = ( queue, cls, method_name, scenario_context, args, event_queue) thread = threading.Thread(target=runner._worker_thread, args=worker_args) i += 1 thread.start() if timeout: timeout_queue.put((thread, time.time() + timeout)) pool.append(thread) time_gap = time.time() - start real_rps = i / time_gap if time_gap else "Infinity" LOG.debug("Worker: %s rps: %s (requested rps: %s)" % (i, real_rps, runs_per_second( rps_cfg, start, processes_to_start))) # try to join latest thread(s) until it finished, or until time to # start new thread (if we have concurrent slots available) while i / (time.time() - start) > runs_per_second( rps_cfg, start, processes_to_start) or ( len(pool) >= max_concurrent): if pool: pool[0].join(0.001) if not pool[0].isAlive(): pool.popleft() else: time.sleep(0.001) while pool: pool.popleft().join() if timeout: timeout_queue.put((None, None,)) collector_thr_by_timeout.join() @runner.configure(name="rps") class RPSScenarioRunner(runner.ScenarioRunner): """Scenario runner that does the job with specified frequency. Every single benchmark scenario iteration is executed with specified frequency (runs per second) in a pool of processes. The scenario will be launched for a fixed number of times in total (specified in the config). An example of a rps scenario is booting 1 VM per second. This execution type is thus very helpful in understanding the maximal load that a certain cloud can handle. """ CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "type": { "type": "string" }, "times": { "type": "integer", "minimum": 1 }, "rps": { "anyOf": [ { "description": "Generate constant requests per second " "during the whole workload.", "type": "number", "exclusiveMinimum": True, "minimum": 0 }, { "type": "object", "description": "Increase requests per second for " "specified value each time after a " "certain number of seconds.", "properties": { "start": { "type": "number", "minimum": 1 }, "end": { "type": "number", "minimum": 1 }, "step": { "type": "number", "minimum": 1 }, "duration": { "type": "number", "minimum": 1 } }, "required": ["start", "end", "step"] } ], }, "timeout": { "type": "number", }, "max_concurrency": { "type": "integer", "minimum": 1 }, "max_cpu_count": { "type": "integer", "minimum": 1 } }, "required": ["type", "times", "rps"], "additionalProperties": False } @staticmethod def validate(config): """Validates runner's part of task config.""" super(RPSScenarioRunner, RPSScenarioRunner).validate(config) if isinstance(config["rps"], dict): if config["rps"]["end"] < config["rps"]["start"]: msg = "rps end value must not be less than rps start value." raise exceptions.InvalidTaskException(msg) def _run_scenario(self, cls, method_name, context, args): """Runs the specified benchmark scenario with given arguments. Every single benchmark scenario iteration is executed with specified frequency (runs per second) in a pool of processes. The scenario will be launched for a fixed number of times in total (specified in the config). :param cls: The Scenario class where the scenario is implemented :param method_name: Name of the method that implements the scenario :param context: Benchmark context that contains users, admin & other information, that was created before benchmark started. :param args: Arguments to call the scenario method with :returns: List of results fore each single scenario iteration, where each result is a dictionary """ times = self.config["times"] timeout = self.config.get("timeout", 0) # 0 means no timeout iteration_gen = utils.RAMInt() cpu_count = multiprocessing.cpu_count() max_cpu_used = min(cpu_count, self.config.get("max_cpu_count", cpu_count)) def runs_per_second(rps_cfg, start_timer, number_of_processes): """At the given second return desired rps.""" if not isinstance(rps_cfg, dict): return float(rps_cfg) / number_of_processes stage_order = (time.time() - start_timer) / rps_cfg.get( "duration", 1) - 1 rps = (float(rps_cfg["start"] + rps_cfg["step"] * stage_order) / number_of_processes) return min(rps, float(rps_cfg["end"])) processes_to_start = min(max_cpu_used, times, self.config.get("max_concurrency", times)) times_per_worker, times_overhead = divmod(times, processes_to_start) # Determine concurrency per worker concurrency_per_worker, concurrency_overhead = divmod( self.config.get("max_concurrency", times), processes_to_start) self._log_debug_info(times=times, timeout=timeout, max_cpu_used=max_cpu_used, processes_to_start=processes_to_start, times_per_worker=times_per_worker, times_overhead=times_overhead, concurrency_per_worker=concurrency_per_worker, concurrency_overhead=concurrency_overhead) result_queue = multiprocessing.Queue() event_queue = multiprocessing.Queue() def worker_args_gen(times_overhead, concurrency_overhead): """Generate arguments for process worker. Remainder of threads per process division is distributed to process workers equally - one thread per each process worker until the remainder equals zero. The same logic is applied to concurrency overhead. :param times_overhead: remaining number of threads to be distributed to workers :param concurrency_overhead: remaining number of maximum concurrent threads to be distributed to workers """ while True: yield ( result_queue, iteration_gen, timeout, times_per_worker + (times_overhead and 1), concurrency_per_worker + (concurrency_overhead and 1), context, cls, method_name, args, event_queue, self.aborted, runs_per_second, self.config["rps"], processes_to_start ) if times_overhead: times_overhead -= 1 if concurrency_overhead: concurrency_overhead -= 1 process_pool = self._create_process_pool( processes_to_start, _worker_process, worker_args_gen(times_overhead, concurrency_overhead)) self._join_processes(process_pool, result_queue, event_queue) rally-0.9.1/rally/plugins/common/types.py0000664000567000056710000000514713073417716021637 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import requests from rally.common.plugin import plugin from rally import exceptions from rally.task import types @plugin.configure(name="path_or_url") class PathOrUrl(types.ResourceType): @classmethod def transform(cls, clients, resource_config): """Check whether file exists or url available. :param clients: openstack admin client handles :param resource_config: path or url :returns: url or expanded file path """ path = os.path.expanduser(resource_config) if os.path.isfile(path): return path try: head = requests.head(path) if head.status_code == 200: return path raise exceptions.InvalidScenarioArgument( "Url %s unavailable (code %s)" % (path, head.status_code)) except Exception as ex: raise exceptions.InvalidScenarioArgument( "Url error %s (%s)" % (path, ex)) @plugin.configure(name="file") class FileType(types.ResourceType): @classmethod def transform(cls, clients, resource_config): """Return content of the file by its path. :param clients: openstack admin client handles :param resource_config: path to file :returns: content of the file """ with open(os.path.expanduser(resource_config), "r") as f: return f.read() @plugin.configure(name="file_dict") class FileTypeDict(types.ResourceType): @classmethod def transform(cls, clients, resource_config): """Return the dictionary of items with file path and file content. :param clients: openstack admin client handles :param resource_config: list of file paths :returns: dictionary {file_path: file_content, ...} """ file_type_dict = {} for file_path in resource_config: file_path = os.path.expanduser(file_path) with open(file_path, "r") as f: file_type_dict[file_path] = f.read() return file_type_dict rally-0.9.1/rally/plugins/common/exporter/0000775000567000056710000000000013073420067021753 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/common/exporter/__init__.py0000664000567000056710000000000013073417716024061 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/common/exporter/file_system.py0000664000567000056710000000745713073417716024674 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json import os from six.moves.urllib import parse as urlparse from rally import api from rally.common import logging from rally import exceptions from rally.task import exporter LOG = logging.getLogger(__name__) @exporter.configure(name="file") class FileExporter(exporter.Exporter): def validate(self): """Validate connection string. The format of connection string in file plugin is file:///. """ parse_obj = urlparse.urlparse(self.connection_string) available_formats = ("json",) available_formats_str = ", ".join(available_formats) if self.connection_string is None or parse_obj.path == "": raise exceptions.InvalidConnectionString( "It should be `file:///.`.") if self.type not in available_formats: raise exceptions.InvalidConnectionString( "Type of the exported task is not available. The available " "formats are %s." % available_formats_str) def __init__(self, connection_string): super(FileExporter, self).__init__(connection_string) self.path = os.path.expanduser(urlparse.urlparse( connection_string).path[1:]) self.type = connection_string.split(".")[-1] self.validate() def export(self, uuid): """Export results of the task to the file. :param uuid: uuid of the task object """ task = api.Task.get(uuid) LOG.debug("Got the task object by it's uuid %s. " % uuid) task_results = [{"key": x["key"], "result": x["data"]["raw"], "sla": x["data"]["sla"], "hooks": x["data"].get("hooks"), "load_duration": x["data"]["load_duration"], "full_duration": x["data"]["full_duration"]} for x in task.get_results()] if self.type == "json": if task_results: res = json.dumps(task_results, sort_keys=False, indent=4, separators=(",", ": ")) LOG.debug("Got the task %s results." % uuid) else: msg = ("Task %s results would be available when it will " "finish." % uuid) raise exceptions.RallyException(msg) if os.path.dirname(self.path) and (not os.path.exists(os.path.dirname( self.path))): raise IOError("There is no such directory: %s" % os.path.dirname(self.path)) with open(self.path, "w") as f: LOG.debug("Writing task %s results to the %s." % ( uuid, self.connection_string)) f.write(res) LOG.debug("Task %s results was written to the %s." % ( uuid, self.connection_string)) @exporter.configure(name="file-exporter") class DeprecatedFileExporter(FileExporter): """DEPRECATED.""" def __init__(self, connection_string): super(DeprecatedFileExporter, self).__init__(connection_string) LOG.warning("'file-exporter' plugin is deprecated. Use 'file' " "instead.") rally-0.9.1/rally/plugins/workload/0000775000567000056710000000000013073420067020435 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/workload/__init__.py0000664000567000056710000000000013073417716022543 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/plugins/workload/siege.py0000664000567000056710000000332613073417720022111 0ustar jenkinsjenkins00000000000000#!/usr/bin/env python # # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Run HTTP benchmark by runcommand_heat scenario.""" import json import re import subprocess import sys import tempfile SIEGE_RE = re.compile(r"^(Throughput|Transaction rate):\s+(\d+\.\d+)\s+.*") def get_instances(): outputs = json.load(sys.stdin) for output in outputs: if output["output_key"] == "wp_nodes": for node in output["output_value"].values(): yield node["wordpress-network"][0] def generate_urls_list(instances): urls = tempfile.NamedTemporaryFile(delete=False) with urls: for inst in instances: for i in range(1, 1000): urls.write("http://%s/wordpress/index.php/%d/\n" % (inst, i)) return urls.name def run(): instances = list(get_instances()) urls = generate_urls_list(instances) out = subprocess.check_output("siege -q -t 60S -b -f %s" % urls, shell=True, stderr=subprocess.STDOUT) for line in out.splitlines(): m = SIEGE_RE.match(line) if m: sys.stdout.write("%s:%s\n" % m.groups()) if __name__ == "__main__": sys.exit(run()) rally-0.9.1/rally/ui/0000775000567000056710000000000013073420067015547 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/ui/__init__.py0000664000567000056710000000000013073417716017655 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/ui/templates/0000775000567000056710000000000013073420067017545 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/ui/templates/base.html0000664000567000056710000000531713073417716021362 0ustar jenkinsjenkins00000000000000 {%- if version %} {%- endif %} Rally | {% block title_text %}{% endblock %} {% block libs %}{% endblock %}
{% block content %}{% endblock %}
rally-0.9.1/rally/ui/templates/ci/0000775000567000056710000000000013073420067020140 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/ui/templates/ci/index_verify.html0000664000567000056710000001650213073417716023534 0ustar jenkinsjenkins00000000000000{% extends "/base.html" %} {% block title_text %}Rally Verification Job Results{% endblock %} {% block css %} li { margin:2px 0 } {% if verifications|length == 1 %} ol {padding: 0; list-style-type: none;} {% endif %} a, a:visited { color:#039 } code { padding:0 15px; color:#888; display: block } .columns li { position:relative } .columns li > :first-child { display:block } .columns li > :nth-child(2) { display:block; position:static; left:165px; top:0; white-space:nowrap } .fail {color: red} .success {color: green} {% endblock %} {% block css_content_wrap %}margin:0 auto; padding:0 5px{% endblock %} {% block media_queries %} @media only screen and (min-width: 320px) { .content-wrap { width:400px } } @media only screen and (min-width: 520px) { .content-wrap { width:500px } } @media only screen and (min-width: 620px) { .content-wrap { width:90% } .columns li > :nth-child(2) { position:absolute } } @media only screen and (min-width: 720px) { .content-wrap { width:70% } } {% endblock %} {% block header_text %}Verify Job Results{% endblock %} {% block content %}

Logs and Results Files

Steps

[{{ list_plugins.status }}] List plugins for verifiers management $ {{ list_plugins.cmd }}
[{{ create_verifier.status }}] Create a verifier $ {{ create_verifier.cmd }} [{{ show_verifier.status }}] Show the verifier $ {{ show_verifier.cmd }} [{{ list_verifiers.status }}] List verifiers $ {{ list_verifiers.cmd }} [{{ update_verifier.status }}] Switch the verifier to the penultimate version $ {{ update_verifier.cmd }} [{{ configure_verifier.status }}] Generate and show the verifier config file $ {{ configure_verifier.cmd }}
[{{ add_verifier_ext.status }}] Add a verifier extension $ {{ add_verifier_ext.cmd }} [{{ list_verifier_exts.status }}] List verifier extensions $ {{ list_verifier_exts.cmd }}
[{{ list_verifier_tests.status }}] List verifier tests $ {{ list_verifier_tests.cmd }}
{% for i in range(verifications|length) %} {% if verifications|length > 1 %} Verification # {{ i + 1}}

{% endif %} [{{ verifications[i].status }}] Start verification $ {{ verifications[i].cmd }}
[{{ verifications[i].show.status }}] Show verification results $ {{ verifications[i].show.cmd }} [{{ verifications[i].show_detailed.status }}] Show verification results with details $ {{ verifications[i].show_detailed.cmd }}
[{{ verifications[i].json.status }}] Generate the verification report in JSON format [Output from CLI] $ {{ verifications[i].json.cmd }} [{{ verifications[i].junit_xml.status }}] Generate the verification report in JUnit-XML format [Output from CLI] $ {{ verifications[i].junit_xml.cmd }} [{{ verifications[i].html.status }}] Generate the verification report in HTML format [Output from CLI] $ {{ verifications[i].html.cmd }}
{% if verifications|length > 1 %}
{% endif %} {% endfor %} {% if compare %} [{{ compare.json.status }}] Generate the trends report for two verifications in JSON format [Output from CLI] $ {{ compare.json.cmd }} [{{ compare.junit_xml.status }}] Generate the trends report for two verifications in JUnit-XML format [Output from CLI] $ {{ compare.junit_xml.cmd }} [{{ compare.html.status }}] Generate the trends report for two verifications in HTML format [Output from CLI] $ {{ compare.html.cmd }}
{% endif %} [{{ list.status }}] List verifications $ {{ list.cmd }}
[{{ delete_verifier_ext.status }}] Delete the verifier extension $ {{ delete_verifier_ext.cmd }} [{{ delete_verifier.status }}] Delete the verifier and all verifications $ {{ delete_verifier.cmd }}

About Rally

Rally is benchmarking and verification system for OpenStack:

{% endblock %} rally-0.9.1/rally/ui/templates/ci/index.html0000664000567000056710000000704313073417716022150 0ustar jenkinsjenkins00000000000000{% extends "/base.html" %} {% block title_text %}Performance job results{% endblock %} {% block js_after %} function checkLink (elem) { var request = new XMLHttpRequest(); request.open('GET', elem.href, true); request.onreadystatechange = function(){ if (request.readyState === 4){ if (request.status === 404) { elem.href = elem.href + ".gz" } } }; request.send(); } var elems = document.getElementsByClassName("check-gz"); for(var i=0; i :first-child { display:block } .columns li > :nth-child(2) { display:block; position:static; left:165px; top:0; white-space:nowrap } {% endblock %} {% block media_queries %} @media only screen and (min-width: 320px) { .content-wrap { width:400px } } @media only screen and (min-width: 520px) { .content-wrap { width:500px } } @media only screen and (min-width: 620px) { .content-wrap { width:90% } .columns li > :nth-child(2) { position:absolute } } @media only screen and (min-width: 720px) { .content-wrap { width:70% } } {% endblock %} {% block header_text %}performance job results{% endblock %} {% block content %}

Logs and files

Job results, in different formats

About Rally

Rally is benchmark system for OpenStack:

Steps to repeat locally

  1. Fetch rally task from here
  2. Fetch rally plugins from here
  3. Install OpenStack and Rally using this instruction
  4. Unzip plugins and put to .rally/plugins/ directory
  5. Run rally task: $ rally task start task.txt
{% endblock %} rally-0.9.1/rally/ui/templates/base.mako0000664000567000056710000000476513073417716021353 0ustar jenkinsjenkins00000000000000 > Rally | <%block name="title_text"/> <%block name="libs"/> >
Rally  <%block name="header_text"/>
<%block name="content"/>
rally-0.9.1/rally/ui/templates/libs/0000775000567000056710000000000013073420067020476 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/ui/templates/libs/d3.3.4.13.min.js0000664000567000056710000044110713073417716022667 0ustar jenkinsjenkins00000000000000/* Copyright (c) 2010-2015, Michael Bostock All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * The name Michael Bostock may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL MICHAEL BOSTOCK BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. https://github.com/mbostock/d3 */ !function(){function n(n,t){return t>n?-1:n>t?1:n>=t?0:0/0}function t(n){return null===n?0/0:+n}function e(n){return!isNaN(n)}function r(n){return{left:function(t,e,r,u){for(arguments.length<3&&(r=0),arguments.length<4&&(u=t.length);u>r;){var i=r+u>>>1;n(t[i],e)<0?r=i+1:u=i}return r},right:function(t,e,r,u){for(arguments.length<3&&(r=0),arguments.length<4&&(u=t.length);u>r;){var i=r+u>>>1;n(t[i],e)>0?u=i:r=i+1}return r}}}function u(n){return n.length}function i(n){for(var t=1;n*t%1;)t*=10;return t}function o(n,t){for(var e in t)Object.defineProperty(n.prototype,e,{value:t[e],enumerable:!1})}function a(){this._=Object.create(null)}function c(n){return(n+="")===la||n[0]===sa?sa+n:n}function l(n){return(n+="")[0]===sa?n.slice(1):n}function s(n){return c(n)in this._}function f(n){return(n=c(n))in this._&&delete this._[n]}function h(){var n=[];for(var t in this._)n.push(l(t));return n}function g(){var n=0;for(var t in this._)++n;return n}function p(){for(var n in this._)return!1;return!0}function v(){this._=Object.create(null)}function d(n,t,e){return function(){var r=e.apply(t,arguments);return r===t?n:r}}function m(n,t){if(t in n)return t;t=t.charAt(0).toUpperCase()+t.slice(1);for(var e=0,r=fa.length;r>e;++e){var u=fa[e]+t;if(u in n)return u}}function y(){}function x(){}function M(n){function t(){for(var t,r=e,u=-1,i=r.length;++ue;e++)for(var u,i=n[e],o=0,a=i.length;a>o;o++)(u=i[o])&&t(u,o,e);return n}function H(n){return ga(n,Ma),n}function O(n){var t,e;return function(r,u,i){var o,a=n[i].update,c=a.length;for(i!=e&&(e=i,t=0),u>=t&&(t=u+1);!(o=a[t])&&++t0&&(n=n.slice(0,a));var l=ba.get(n);return l&&(n=l,c=V),a?t?u:r:t?y:i}function Z(n,t){return function(e){var r=Bo.event;Bo.event=e,t[0]=this.__data__;try{n.apply(this,t)}finally{Bo.event=r}}}function V(n,t){var e=Z(n,t);return function(n){var t=this,r=n.relatedTarget;r&&(r===t||8&r.compareDocumentPosition(t))||e.call(t,n)}}function X(){var n=".dragsuppress-"+ ++Sa,t="click"+n,e=Bo.select(Qo).on("touchmove"+n,_).on("dragstart"+n,_).on("selectstart"+n,_);if(wa){var r=Ko.style,u=r[wa];r[wa]="none"}return function(i){function o(){e.on(t,null)}e.on(n,null),wa&&(r[wa]=u),i&&(e.on(t,function(){_(),o()},!0),setTimeout(o,0))}}function $(n,t){t.changedTouches&&(t=t.changedTouches[0]);var e=n.ownerSVGElement||n;if(e.createSVGPoint){var r=e.createSVGPoint();if(0>ka&&(Qo.scrollX||Qo.scrollY)){e=Bo.select("body").append("svg").style({position:"absolute",top:0,left:0,margin:0,padding:0,border:"none"},"important");var u=e[0][0].getScreenCTM();ka=!(u.f||u.e),e.remove()}return ka?(r.x=t.pageX,r.y=t.pageY):(r.x=t.clientX,r.y=t.clientY),r=r.matrixTransform(n.getScreenCTM().inverse()),[r.x,r.y]}var i=n.getBoundingClientRect();return[t.clientX-i.left-n.clientLeft,t.clientY-i.top-n.clientTop]}function B(){return Bo.event.changedTouches[0].identifier}function W(){return Bo.event.target}function J(){return Qo}function G(n){return n>0?1:0>n?-1:0}function K(n,t,e){return(t[0]-n[0])*(e[1]-n[1])-(t[1]-n[1])*(e[0]-n[0])}function Q(n){return n>1?0:-1>n?Ea:Math.acos(n)}function nt(n){return n>1?Ca:-1>n?-Ca:Math.asin(n)}function tt(n){return((n=Math.exp(n))-1/n)/2}function et(n){return((n=Math.exp(n))+1/n)/2}function rt(n){return((n=Math.exp(2*n))-1)/(n+1)}function ut(n){return(n=Math.sin(n/2))*n}function it(){}function ot(n,t,e){return this instanceof ot?(this.h=+n,this.s=+t,void(this.l=+e)):arguments.length<2?n instanceof ot?new ot(n.h,n.s,n.l):Mt(""+n,_t,ot):new ot(n,t,e)}function at(n,t,e){function r(n){return n>360?n-=360:0>n&&(n+=360),60>n?i+(o-i)*n/60:180>n?o:240>n?i+(o-i)*(240-n)/60:i}function u(n){return Math.round(255*r(n))}var i,o;return n=isNaN(n)?0:(n%=360)<0?n+360:n,t=isNaN(t)?0:0>t?0:t>1?1:t,e=0>e?0:e>1?1:e,o=.5>=e?e*(1+t):e+t-e*t,i=2*e-o,new dt(u(n+120),u(n),u(n-120))}function ct(n,t,e){return this instanceof ct?(this.h=+n,this.c=+t,void(this.l=+e)):arguments.length<2?n instanceof ct?new ct(n.h,n.c,n.l):n instanceof st?ht(n.l,n.a,n.b):ht((n=bt((n=Bo.rgb(n)).r,n.g,n.b)).l,n.a,n.b):new ct(n,t,e)}function lt(n,t,e){return isNaN(n)&&(n=0),isNaN(t)&&(t=0),new st(e,Math.cos(n*=La)*t,Math.sin(n)*t)}function st(n,t,e){return this instanceof st?(this.l=+n,this.a=+t,void(this.b=+e)):arguments.length<2?n instanceof st?new st(n.l,n.a,n.b):n instanceof ct?lt(n.h,n.c,n.l):bt((n=dt(n)).r,n.g,n.b):new st(n,t,e)}function ft(n,t,e){var r=(n+16)/116,u=r+t/500,i=r-e/200;return u=gt(u)*Ya,r=gt(r)*Ia,i=gt(i)*Za,new dt(vt(3.2404542*u-1.5371385*r-.4985314*i),vt(-.969266*u+1.8760108*r+.041556*i),vt(.0556434*u-.2040259*r+1.0572252*i))}function ht(n,t,e){return n>0?new ct(Math.atan2(e,t)*Ta,Math.sqrt(t*t+e*e),n):new ct(0/0,0/0,n)}function gt(n){return n>.206893034?n*n*n:(n-4/29)/7.787037}function pt(n){return n>.008856?Math.pow(n,1/3):7.787037*n+4/29}function vt(n){return Math.round(255*(.00304>=n?12.92*n:1.055*Math.pow(n,1/2.4)-.055))}function dt(n,t,e){return this instanceof dt?(this.r=~~n,this.g=~~t,void(this.b=~~e)):arguments.length<2?n instanceof dt?new dt(n.r,n.g,n.b):Mt(""+n,dt,at):new dt(n,t,e)}function mt(n){return new dt(n>>16,255&n>>8,255&n)}function yt(n){return mt(n)+""}function xt(n){return 16>n?"0"+Math.max(0,n).toString(16):Math.min(255,n).toString(16)}function Mt(n,t,e){var r,u,i,o=0,a=0,c=0;if(r=/([a-z]+)\((.*)\)/i.exec(n))switch(u=r[2].split(","),r[1]){case"hsl":return e(parseFloat(u[0]),parseFloat(u[1])/100,parseFloat(u[2])/100);case"rgb":return t(St(u[0]),St(u[1]),St(u[2]))}return(i=$a.get(n))?t(i.r,i.g,i.b):(null==n||"#"!==n.charAt(0)||isNaN(i=parseInt(n.slice(1),16))||(4===n.length?(o=(3840&i)>>4,o=o>>4|o,a=240&i,a=a>>4|a,c=15&i,c=c<<4|c):7===n.length&&(o=(16711680&i)>>16,a=(65280&i)>>8,c=255&i)),t(o,a,c))}function _t(n,t,e){var r,u,i=Math.min(n/=255,t/=255,e/=255),o=Math.max(n,t,e),a=o-i,c=(o+i)/2;return a?(u=.5>c?a/(o+i):a/(2-o-i),r=n==o?(t-e)/a+(e>t?6:0):t==o?(e-n)/a+2:(n-t)/a+4,r*=60):(r=0/0,u=c>0&&1>c?0:r),new ot(r,u,c)}function bt(n,t,e){n=wt(n),t=wt(t),e=wt(e);var r=pt((.4124564*n+.3575761*t+.1804375*e)/Ya),u=pt((.2126729*n+.7151522*t+.072175*e)/Ia),i=pt((.0193339*n+.119192*t+.9503041*e)/Za);return st(116*u-16,500*(r-u),200*(u-i))}function wt(n){return(n/=255)<=.04045?n/12.92:Math.pow((n+.055)/1.055,2.4)}function St(n){var t=parseFloat(n);return"%"===n.charAt(n.length-1)?Math.round(2.55*t):t}function kt(n){return"function"==typeof n?n:function(){return n}}function Et(n){return n}function At(n){return function(t,e,r){return 2===arguments.length&&"function"==typeof e&&(r=e,e=null),Ct(t,e,n,r)}}function Ct(n,t,e,r){function u(){var n,t=c.status;if(!t&&zt(c)||t>=200&&300>t||304===t){try{n=e.call(i,c)}catch(r){return o.error.call(i,r),void 0}o.load.call(i,n)}else o.error.call(i,c)}var i={},o=Bo.dispatch("beforesend","progress","load","error"),a={},c=new XMLHttpRequest,l=null;return!Qo.XDomainRequest||"withCredentials"in c||!/^(http(s)?:)?\/\//.test(n)||(c=new XDomainRequest),"onload"in c?c.onload=c.onerror=u:c.onreadystatechange=function(){c.readyState>3&&u()},c.onprogress=function(n){var t=Bo.event;Bo.event=n;try{o.progress.call(i,c)}finally{Bo.event=t}},i.header=function(n,t){return n=(n+"").toLowerCase(),arguments.length<2?a[n]:(null==t?delete a[n]:a[n]=t+"",i)},i.mimeType=function(n){return arguments.length?(t=null==n?null:n+"",i):t},i.responseType=function(n){return arguments.length?(l=n,i):l},i.response=function(n){return e=n,i},["get","post"].forEach(function(n){i[n]=function(){return i.send.apply(i,[n].concat(Jo(arguments)))}}),i.send=function(e,r,u){if(2===arguments.length&&"function"==typeof r&&(u=r,r=null),c.open(e,n,!0),null==t||"accept"in a||(a.accept=t+",*/*"),c.setRequestHeader)for(var s in a)c.setRequestHeader(s,a[s]);return null!=t&&c.overrideMimeType&&c.overrideMimeType(t),null!=l&&(c.responseType=l),null!=u&&i.on("error",u).on("load",function(n){u(null,n)}),o.beforesend.call(i,c),c.send(null==r?null:r),i},i.abort=function(){return c.abort(),i},Bo.rebind(i,o,"on"),null==r?i:i.get(Nt(r))}function Nt(n){return 1===n.length?function(t,e){n(null==t?e:null)}:n}function zt(n){var t=n.responseType;return t&&"text"!==t?n.response:n.responseText}function Lt(){var n=Tt(),t=qt()-n;t>24?(isFinite(t)&&(clearTimeout(Ga),Ga=setTimeout(Lt,t)),Ja=0):(Ja=1,Qa(Lt))}function Tt(){var n=Date.now();for(Ka=Ba;Ka;)n>=Ka.t&&(Ka.f=Ka.c(n-Ka.t)),Ka=Ka.n;return n}function qt(){for(var n,t=Ba,e=1/0;t;)t.f?t=n?n.n=t.n:Ba=t.n:(t.t8?function(n){return n/e}:function(n){return n*e},symbol:n}}function Pt(n){var t=n.decimal,e=n.thousands,r=n.grouping,u=n.currency,i=r&&e?function(n,t){for(var u=n.length,i=[],o=0,a=r[0],c=0;u>0&&a>0&&(c+a+1>t&&(a=Math.max(1,t-c)),i.push(n.substring(u-=a,u+a)),!((c+=a+1)>t));)a=r[o=(o+1)%r.length];return i.reverse().join(e)}:Et;return function(n){var e=tc.exec(n),r=e[1]||" ",o=e[2]||">",a=e[3]||"-",c=e[4]||"",l=e[5],s=+e[6],f=e[7],h=e[8],g=e[9],p=1,v="",d="",m=!1,y=!0;switch(h&&(h=+h.substring(1)),(l||"0"===r&&"="===o)&&(l=r="0",o="="),g){case"n":f=!0,g="g";break;case"%":p=100,d="%",g="f";break;case"p":p=100,d="%",g="r";break;case"b":case"o":case"x":case"X":"#"===c&&(v="0"+g.toLowerCase());case"c":y=!1;case"d":m=!0,h=0;break;case"s":p=-1,g="r"}"$"===c&&(v=u[0],d=u[1]),"r"!=g||h||(g="g"),null!=h&&("g"==g?h=Math.max(1,Math.min(21,h)):("e"==g||"f"==g)&&(h=Math.max(0,Math.min(20,h)))),g=ec.get(g)||Ut;var x=l&&f;return function(n){var e=d;if(m&&n%1)return"";var u=0>n||0===n&&0>1/n?(n=-n,"-"):"-"===a?"":a;if(0>p){var c=Bo.formatPrefix(n,h);n=c.scale(n),e=c.symbol+d}else n*=p;n=g(n,h);var M,_,b=n.lastIndexOf(".");if(0>b){var w=y?n.lastIndexOf("e"):-1;0>w?(M=n,_=""):(M=n.substring(0,w),_=n.substring(w))}else M=n.substring(0,b),_=t+n.substring(b+1);!l&&f&&(M=i(M,1/0));var S=v.length+M.length+_.length+(x?0:u.length),k=s>S?new Array(S=s-S+1).join(r):"";return x&&(M=i(k+M,k.length?s-_.length:1/0)),u+=v,n=M+_,("<"===o?u+n+k:">"===o?k+u+n:"^"===o?k.substring(0,S>>=1)+u+n+k.substring(S):u+(x?n:k+n))+e}}}function Ut(n){return n+""}function jt(){this._=new Date(arguments.length>1?Date.UTC.apply(this,arguments):arguments[0])}function Ft(n,t,e){function r(t){var e=n(t),r=i(e,1);return r-t>t-e?e:r}function u(e){return t(e=n(new uc(e-1)),1),e}function i(n,e){return t(n=new uc(+n),e),n}function o(n,r,i){var o=u(n),a=[];if(i>1)for(;r>o;)e(o)%i||a.push(new Date(+o)),t(o,1);else for(;r>o;)a.push(new Date(+o)),t(o,1);return a}function a(n,t,e){try{uc=jt;var r=new jt;return r._=n,o(r,t,e)}finally{uc=Date}}n.floor=n,n.round=r,n.ceil=u,n.offset=i,n.range=o;var c=n.utc=Ht(n);return c.floor=c,c.round=Ht(r),c.ceil=Ht(u),c.offset=Ht(i),c.range=a,n}function Ht(n){return function(t,e){try{uc=jt;var r=new jt;return r._=t,n(r,e)._}finally{uc=Date}}}function Ot(n){function t(n){function t(t){for(var e,u,i,o=[],a=-1,c=0;++aa;){if(r>=l)return-1;if(u=t.charCodeAt(a++),37===u){if(o=t.charAt(a++),i=N[o in oc?t.charAt(a++):o],!i||(r=i(n,e,r))<0)return-1}else if(u!=e.charCodeAt(r++))return-1}return r}function r(n,t,e){b.lastIndex=0;var r=b.exec(t.slice(e));return r?(n.w=w.get(r[0].toLowerCase()),e+r[0].length):-1}function u(n,t,e){M.lastIndex=0;var r=M.exec(t.slice(e));return r?(n.w=_.get(r[0].toLowerCase()),e+r[0].length):-1}function i(n,t,e){E.lastIndex=0;var r=E.exec(t.slice(e));return r?(n.m=A.get(r[0].toLowerCase()),e+r[0].length):-1}function o(n,t,e){S.lastIndex=0;var r=S.exec(t.slice(e));return r?(n.m=k.get(r[0].toLowerCase()),e+r[0].length):-1}function a(n,t,r){return e(n,C.c.toString(),t,r)}function c(n,t,r){return e(n,C.x.toString(),t,r)}function l(n,t,r){return e(n,C.X.toString(),t,r)}function s(n,t,e){var r=x.get(t.slice(e,e+=2).toLowerCase());return null==r?-1:(n.p=r,e)}var f=n.dateTime,h=n.date,g=n.time,p=n.periods,v=n.days,d=n.shortDays,m=n.months,y=n.shortMonths;t.utc=function(n){function e(n){try{uc=jt;var t=new uc;return t._=n,r(t)}finally{uc=Date}}var r=t(n);return e.parse=function(n){try{uc=jt;var t=r.parse(n);return t&&t._}finally{uc=Date}},e.toString=r.toString,e},t.multi=t.utc.multi=ae;var x=Bo.map(),M=It(v),_=Zt(v),b=It(d),w=Zt(d),S=It(m),k=Zt(m),E=It(y),A=Zt(y);p.forEach(function(n,t){x.set(n.toLowerCase(),t)});var C={a:function(n){return d[n.getDay()]},A:function(n){return v[n.getDay()]},b:function(n){return y[n.getMonth()]},B:function(n){return m[n.getMonth()]},c:t(f),d:function(n,t){return Yt(n.getDate(),t,2)},e:function(n,t){return Yt(n.getDate(),t,2)},H:function(n,t){return Yt(n.getHours(),t,2)},I:function(n,t){return Yt(n.getHours()%12||12,t,2)},j:function(n,t){return Yt(1+rc.dayOfYear(n),t,3)},L:function(n,t){return Yt(n.getMilliseconds(),t,3)},m:function(n,t){return Yt(n.getMonth()+1,t,2)},M:function(n,t){return Yt(n.getMinutes(),t,2)},p:function(n){return p[+(n.getHours()>=12)]},S:function(n,t){return Yt(n.getSeconds(),t,2)},U:function(n,t){return Yt(rc.sundayOfYear(n),t,2)},w:function(n){return n.getDay()},W:function(n,t){return Yt(rc.mondayOfYear(n),t,2)},x:t(h),X:t(g),y:function(n,t){return Yt(n.getFullYear()%100,t,2)},Y:function(n,t){return Yt(n.getFullYear()%1e4,t,4)},Z:ie,"%":function(){return"%"}},N={a:r,A:u,b:i,B:o,c:a,d:Qt,e:Qt,H:te,I:te,j:ne,L:ue,m:Kt,M:ee,p:s,S:re,U:Xt,w:Vt,W:$t,x:c,X:l,y:Wt,Y:Bt,Z:Jt,"%":oe};return t}function Yt(n,t,e){var r=0>n?"-":"",u=(r?-n:n)+"",i=u.length;return r+(e>i?new Array(e-i+1).join(t)+u:u)}function It(n){return new RegExp("^(?:"+n.map(Bo.requote).join("|")+")","i")}function Zt(n){for(var t=new a,e=-1,r=n.length;++e68?1900:2e3)}function Kt(n,t,e){ac.lastIndex=0;var r=ac.exec(t.slice(e,e+2));return r?(n.m=r[0]-1,e+r[0].length):-1}function Qt(n,t,e){ac.lastIndex=0;var r=ac.exec(t.slice(e,e+2));return r?(n.d=+r[0],e+r[0].length):-1}function ne(n,t,e){ac.lastIndex=0;var r=ac.exec(t.slice(e,e+3));return r?(n.j=+r[0],e+r[0].length):-1}function te(n,t,e){ac.lastIndex=0;var r=ac.exec(t.slice(e,e+2));return r?(n.H=+r[0],e+r[0].length):-1}function ee(n,t,e){ac.lastIndex=0;var r=ac.exec(t.slice(e,e+2));return r?(n.M=+r[0],e+r[0].length):-1}function re(n,t,e){ac.lastIndex=0;var r=ac.exec(t.slice(e,e+2));return r?(n.S=+r[0],e+r[0].length):-1}function ue(n,t,e){ac.lastIndex=0;var r=ac.exec(t.slice(e,e+3));return r?(n.L=+r[0],e+r[0].length):-1}function ie(n){var t=n.getTimezoneOffset(),e=t>0?"-":"+",r=0|ca(t)/60,u=ca(t)%60;return e+Yt(r,"0",2)+Yt(u,"0",2)}function oe(n,t,e){cc.lastIndex=0;var r=cc.exec(t.slice(e,e+1));return r?e+r[0].length:-1}function ae(n){for(var t=n.length,e=-1;++e=0?1:-1,a=o*e,c=Math.cos(t),l=Math.sin(t),s=i*l,f=u*c+s*Math.cos(a),h=s*o*Math.sin(a);pc.add(Math.atan2(h,f)),r=n,u=c,i=l}var t,e,r,u,i;vc.point=function(o,a){vc.point=n,r=(t=o)*La,u=Math.cos(a=(e=a)*La/2+Ea/4),i=Math.sin(a)},vc.lineEnd=function(){n(t,e)}}function pe(n){var t=n[0],e=n[1],r=Math.cos(e);return[r*Math.cos(t),r*Math.sin(t),Math.sin(e)]}function ve(n,t){return n[0]*t[0]+n[1]*t[1]+n[2]*t[2]}function de(n,t){return[n[1]*t[2]-n[2]*t[1],n[2]*t[0]-n[0]*t[2],n[0]*t[1]-n[1]*t[0]]}function me(n,t){n[0]+=t[0],n[1]+=t[1],n[2]+=t[2]}function ye(n,t){return[n[0]*t,n[1]*t,n[2]*t]}function xe(n){var t=Math.sqrt(n[0]*n[0]+n[1]*n[1]+n[2]*n[2]);n[0]/=t,n[1]/=t,n[2]/=t}function Me(n){return[Math.atan2(n[1],n[0]),nt(n[2])]}function _e(n,t){return ca(n[0]-t[0])a;++a)u.point((e=n[a])[0],e[1]);return u.lineEnd(),void 0}var c=new ze(e,n,null,!0),l=new ze(e,null,c,!1);c.o=l,i.push(c),o.push(l),c=new ze(r,n,null,!1),l=new ze(r,null,c,!0),c.o=l,i.push(c),o.push(l)}}),o.sort(t),Ne(i),Ne(o),i.length){for(var a=0,c=e,l=o.length;l>a;++a)o[a].e=c=!c;for(var s,f,h=i[0];;){for(var g=h,p=!0;g.v;)if((g=g.n)===h)return;s=g.z,u.lineStart();do{if(g.v=g.o.v=!0,g.e){if(p)for(var a=0,l=s.length;l>a;++a)u.point((f=s[a])[0],f[1]);else r(g.x,g.n.x,1,u);g=g.n}else{if(p){s=g.p.z;for(var a=s.length-1;a>=0;--a)u.point((f=s[a])[0],f[1])}else r(g.x,g.p.x,-1,u);g=g.p}g=g.o,s=g.z,p=!p}while(!g.v);u.lineEnd()}}}function Ne(n){if(t=n.length){for(var t,e,r=0,u=n[0];++r0){for(_||(i.polygonStart(),_=!0),i.lineStart();++o1&&2&t&&e.push(e.pop().concat(e.shift())),g.push(e.filter(Te))}var g,p,v,d=t(i),m=u.invert(r[0],r[1]),y={point:o,lineStart:c,lineEnd:l,polygonStart:function(){y.point=s,y.lineStart=f,y.lineEnd=h,g=[],p=[]},polygonEnd:function(){y.point=o,y.lineStart=c,y.lineEnd=l,g=Bo.merge(g);var n=je(m,p);g.length?(_||(i.polygonStart(),_=!0),Ce(g,Re,n,e,i)):n&&(_||(i.polygonStart(),_=!0),i.lineStart(),e(null,null,1,i),i.lineEnd()),_&&(i.polygonEnd(),_=!1),g=p=null},sphere:function(){i.polygonStart(),i.lineStart(),e(null,null,1,i),i.lineEnd(),i.polygonEnd()}},x=qe(),M=t(x),_=!1;return y}}function Te(n){return n.length>1}function qe(){var n,t=[];return{lineStart:function(){t.push(n=[])},point:function(t,e){n.push([t,e])},lineEnd:y,buffer:function(){var e=t;return t=[],n=null,e},rejoin:function(){t.length>1&&t.push(t.pop().concat(t.shift()))}}}function Re(n,t){return((n=n.x)[0]<0?n[1]-Ca-Na:Ca-n[1])-((t=t.x)[0]<0?t[1]-Ca-Na:Ca-t[1])}function De(n){var t,e=0/0,r=0/0,u=0/0;return{lineStart:function(){n.lineStart(),t=1},point:function(i,o){var a=i>0?Ea:-Ea,c=ca(i-e);ca(c-Ea)0?Ca:-Ca),n.point(u,r),n.lineEnd(),n.lineStart(),n.point(a,r),n.point(i,r),t=0):u!==a&&c>=Ea&&(ca(e-u)Na?Math.atan((Math.sin(t)*(i=Math.cos(r))*Math.sin(e)-Math.sin(r)*(u=Math.cos(t))*Math.sin(n))/(u*i*o)):(t+r)/2}function Ue(n,t,e,r){var u;if(null==n)u=e*Ca,r.point(-Ea,u),r.point(0,u),r.point(Ea,u),r.point(Ea,0),r.point(Ea,-u),r.point(0,-u),r.point(-Ea,-u),r.point(-Ea,0),r.point(-Ea,u);else if(ca(n[0]-t[0])>Na){var i=n[0]a;++a){var l=t[a],s=l.length;if(s)for(var f=l[0],h=f[0],g=f[1]/2+Ea/4,p=Math.sin(g),v=Math.cos(g),d=1;;){d===s&&(d=0),n=l[d];var m=n[0],y=n[1]/2+Ea/4,x=Math.sin(y),M=Math.cos(y),_=m-h,b=_>=0?1:-1,w=b*_,S=w>Ea,k=p*x;if(pc.add(Math.atan2(k*b*Math.sin(w),v*M+k*Math.cos(w))),i+=S?_+b*Aa:_,S^h>=e^m>=e){var E=de(pe(f),pe(n));xe(E);var A=de(u,E);xe(A);var C=(S^_>=0?-1:1)*nt(A[2]);(r>C||r===C&&(E[0]||E[1]))&&(o+=S^_>=0?1:-1)}if(!d++)break;h=m,p=x,v=M,f=n}}return(-Na>i||Na>i&&0>pc)^1&o}function Fe(n){function t(n,t){return Math.cos(n)*Math.cos(t)>i}function e(n){var e,i,c,l,s;return{lineStart:function(){l=c=!1,s=1},point:function(f,h){var g,p=[f,h],v=t(f,h),d=o?v?0:u(f,h):v?u(f+(0>f?Ea:-Ea),h):0;if(!e&&(l=c=v)&&n.lineStart(),v!==c&&(g=r(e,p),(_e(e,g)||_e(p,g))&&(p[0]+=Na,p[1]+=Na,v=t(p[0],p[1]))),v!==c)s=0,v?(n.lineStart(),g=r(p,e),n.point(g[0],g[1])):(g=r(e,p),n.point(g[0],g[1]),n.lineEnd()),e=g;else if(a&&e&&o^v){var m;d&i||!(m=r(p,e,!0))||(s=0,o?(n.lineStart(),n.point(m[0][0],m[0][1]),n.point(m[1][0],m[1][1]),n.lineEnd()):(n.point(m[1][0],m[1][1]),n.lineEnd(),n.lineStart(),n.point(m[0][0],m[0][1])))}!v||e&&_e(e,p)||n.point(p[0],p[1]),e=p,c=v,i=d},lineEnd:function(){c&&n.lineEnd(),e=null},clean:function(){return s|(l&&c)<<1}}}function r(n,t,e){var r=pe(n),u=pe(t),o=[1,0,0],a=de(r,u),c=ve(a,a),l=a[0],s=c-l*l;if(!s)return!e&&n;var f=i*c/s,h=-i*l/s,g=de(o,a),p=ye(o,f),v=ye(a,h);me(p,v);var d=g,m=ve(p,d),y=ve(d,d),x=m*m-y*(ve(p,p)-1);if(!(0>x)){var M=Math.sqrt(x),_=ye(d,(-m-M)/y);if(me(_,p),_=Me(_),!e)return _;var b,w=n[0],S=t[0],k=n[1],E=t[1];w>S&&(b=w,w=S,S=b);var A=S-w,C=ca(A-Ea)A;if(!C&&k>E&&(b=k,k=E,E=b),N?C?k+E>0^_[1]<(ca(_[0]-w)Ea^(w<=_[0]&&_[0]<=S)){var z=ye(d,(-m+M)/y);return me(z,p),[_,Me(z)]}}}function u(t,e){var r=o?n:Ea-n,u=0;return-r>t?u|=1:t>r&&(u|=2),-r>e?u|=4:e>r&&(u|=8),u}var i=Math.cos(n),o=i>0,a=ca(i)>Na,c=gr(n,6*La);return Le(t,e,c,o?[0,-n]:[-Ea,n-Ea])}function He(n,t,e,r){return function(u){var i,o=u.a,a=u.b,c=o.x,l=o.y,s=a.x,f=a.y,h=0,g=1,p=s-c,v=f-l;if(i=n-c,p||!(i>0)){if(i/=p,0>p){if(h>i)return;g>i&&(g=i)}else if(p>0){if(i>g)return;i>h&&(h=i)}if(i=e-c,p||!(0>i)){if(i/=p,0>p){if(i>g)return;i>h&&(h=i)}else if(p>0){if(h>i)return;g>i&&(g=i)}if(i=t-l,v||!(i>0)){if(i/=v,0>v){if(h>i)return;g>i&&(g=i)}else if(v>0){if(i>g)return;i>h&&(h=i)}if(i=r-l,v||!(0>i)){if(i/=v,0>v){if(i>g)return;i>h&&(h=i)}else if(v>0){if(h>i)return;g>i&&(g=i)}return h>0&&(u.a={x:c+h*p,y:l+h*v}),1>g&&(u.b={x:c+g*p,y:l+g*v}),u}}}}}}function Oe(n,t,e,r){function u(r,u){return ca(r[0]-n)0?0:3:ca(r[0]-e)0?2:1:ca(r[1]-t)0?1:0:u>0?3:2}function i(n,t){return o(n.x,t.x)}function o(n,t){var e=u(n,1),r=u(t,1);return e!==r?e-r:0===e?t[1]-n[1]:1===e?n[0]-t[0]:2===e?n[1]-t[1]:t[0]-n[0]}return function(a){function c(n){for(var t=0,e=d.length,r=n[1],u=0;e>u;++u)for(var i,o=1,a=d[u],c=a.length,l=a[0];c>o;++o)i=a[o],l[1]<=r?i[1]>r&&K(l,i,n)>0&&++t:i[1]<=r&&K(l,i,n)<0&&--t,l=i;return 0!==t}function l(i,a,c,l){var s=0,f=0;if(null==i||(s=u(i,c))!==(f=u(a,c))||o(i,a)<0^c>0){do l.point(0===s||3===s?n:e,s>1?r:t);while((s=(s+c+4)%4)!==f)}else l.point(a[0],a[1])}function s(u,i){return u>=n&&e>=u&&i>=t&&r>=i}function f(n,t){s(n,t)&&a.point(n,t)}function h(){N.point=p,d&&d.push(m=[]),S=!0,w=!1,_=b=0/0}function g(){v&&(p(y,x),M&&w&&A.rejoin(),v.push(A.buffer())),N.point=f,w&&a.lineEnd()}function p(n,t){n=Math.max(-Nc,Math.min(Nc,n)),t=Math.max(-Nc,Math.min(Nc,t));var e=s(n,t);if(d&&m.push([n,t]),S)y=n,x=t,M=e,S=!1,e&&(a.lineStart(),a.point(n,t));else if(e&&w)a.point(n,t);else{var r={a:{x:_,y:b},b:{x:n,y:t}};C(r)?(w||(a.lineStart(),a.point(r.a.x,r.a.y)),a.point(r.b.x,r.b.y),e||a.lineEnd(),k=!1):e&&(a.lineStart(),a.point(n,t),k=!1)}_=n,b=t,w=e}var v,d,m,y,x,M,_,b,w,S,k,E=a,A=qe(),C=He(n,t,e,r),N={point:f,lineStart:h,lineEnd:g,polygonStart:function(){a=A,v=[],d=[],k=!0},polygonEnd:function(){a=E,v=Bo.merge(v);var t=c([n,r]),e=k&&t,u=v.length;(e||u)&&(a.polygonStart(),e&&(a.lineStart(),l(null,null,1,a),a.lineEnd()),u&&Ce(v,i,t,l,a),a.polygonEnd()),v=d=m=null}};return N}}function Ye(n,t){function e(e,r){return e=n(e,r),t(e[0],e[1])}return n.invert&&t.invert&&(e.invert=function(e,r){return e=t.invert(e,r),e&&n.invert(e[0],e[1])}),e}function Ie(n){var t=0,e=Ea/3,r=ir(n),u=r(t,e);return u.parallels=function(n){return arguments.length?r(t=n[0]*Ea/180,e=n[1]*Ea/180):[180*(t/Ea),180*(e/Ea)]},u}function Ze(n,t){function e(n,t){var e=Math.sqrt(i-2*u*Math.sin(t))/u;return[e*Math.sin(n*=u),o-e*Math.cos(n)]}var r=Math.sin(n),u=(r+Math.sin(t))/2,i=1+r*(2*u-r),o=Math.sqrt(i)/u;return e.invert=function(n,t){var e=o-t;return[Math.atan2(n,e)/u,nt((i-(n*n+e*e)*u*u)/(2*u))]},e}function Ve(){function n(n,t){Lc+=u*n-r*t,r=n,u=t}var t,e,r,u;Pc.point=function(i,o){Pc.point=n,t=r=i,e=u=o},Pc.lineEnd=function(){n(t,e)}}function Xe(n,t){Tc>n&&(Tc=n),n>Rc&&(Rc=n),qc>t&&(qc=t),t>Dc&&(Dc=t)}function $e(){function n(n,t){o.push("M",n,",",t,i)}function t(n,t){o.push("M",n,",",t),a.point=e}function e(n,t){o.push("L",n,",",t)}function r(){a.point=n}function u(){o.push("Z")}var i=Be(4.5),o=[],a={point:n,lineStart:function(){a.point=t},lineEnd:r,polygonStart:function(){a.lineEnd=u},polygonEnd:function(){a.lineEnd=r,a.point=n},pointRadius:function(n){return i=Be(n),a},result:function(){if(o.length){var n=o.join("");return o=[],n}}};return a}function Be(n){return"m0,"+n+"a"+n+","+n+" 0 1,1 0,"+-2*n+"a"+n+","+n+" 0 1,1 0,"+2*n+"z"}function We(n,t){yc+=n,xc+=t,++Mc}function Je(){function n(n,r){var u=n-t,i=r-e,o=Math.sqrt(u*u+i*i);_c+=o*(t+n)/2,bc+=o*(e+r)/2,wc+=o,We(t=n,e=r)}var t,e;jc.point=function(r,u){jc.point=n,We(t=r,e=u)}}function Ge(){jc.point=We}function Ke(){function n(n,t){var e=n-r,i=t-u,o=Math.sqrt(e*e+i*i);_c+=o*(r+n)/2,bc+=o*(u+t)/2,wc+=o,o=u*n-r*t,Sc+=o*(r+n),kc+=o*(u+t),Ec+=3*o,We(r=n,u=t)}var t,e,r,u;jc.point=function(i,o){jc.point=n,We(t=r=i,e=u=o)},jc.lineEnd=function(){n(t,e)}}function Qe(n){function t(t,e){n.moveTo(t,e),n.arc(t,e,o,0,Aa)}function e(t,e){n.moveTo(t,e),a.point=r}function r(t,e){n.lineTo(t,e)}function u(){a.point=t}function i(){n.closePath()}var o=4.5,a={point:t,lineStart:function(){a.point=e},lineEnd:u,polygonStart:function(){a.lineEnd=i},polygonEnd:function(){a.lineEnd=u,a.point=t},pointRadius:function(n){return o=n,a},result:y};return a}function nr(n){function t(n){return(a?r:e)(n)}function e(t){return rr(t,function(e,r){e=n(e,r),t.point(e[0],e[1])})}function r(t){function e(e,r){e=n(e,r),t.point(e[0],e[1])}function r(){x=0/0,S.point=i,t.lineStart()}function i(e,r){var i=pe([e,r]),o=n(e,r);u(x,M,y,_,b,w,x=o[0],M=o[1],y=e,_=i[0],b=i[1],w=i[2],a,t),t.point(x,M)}function o(){S.point=e,t.lineEnd()}function c(){r(),S.point=l,S.lineEnd=s}function l(n,t){i(f=n,h=t),g=x,p=M,v=_,d=b,m=w,S.point=i}function s(){u(x,M,y,_,b,w,g,p,f,v,d,m,a,t),S.lineEnd=o,o()}var f,h,g,p,v,d,m,y,x,M,_,b,w,S={point:e,lineStart:r,lineEnd:o,polygonStart:function(){t.polygonStart(),S.lineStart=c},polygonEnd:function(){t.polygonEnd(),S.lineStart=r}};return S}function u(t,e,r,a,c,l,s,f,h,g,p,v,d,m){var y=s-t,x=f-e,M=y*y+x*x;if(M>4*i&&d--){var _=a+g,b=c+p,w=l+v,S=Math.sqrt(_*_+b*b+w*w),k=Math.asin(w/=S),E=ca(ca(w)-1)i||ca((y*z+x*L)/M-.5)>.3||o>a*g+c*p+l*v)&&(u(t,e,r,a,c,l,C,N,E,_/=S,b/=S,w,d,m),m.point(C,N),u(C,N,E,_,b,w,s,f,h,g,p,v,d,m))}}var i=.5,o=Math.cos(30*La),a=16;return t.precision=function(n){return arguments.length?(a=(i=n*n)>0&&16,t):Math.sqrt(i)},t}function tr(n){var t=nr(function(t,e){return n([t*Ta,e*Ta])});return function(n){return or(t(n))}}function er(n){this.stream=n}function rr(n,t){return{point:t,sphere:function(){n.sphere()},lineStart:function(){n.lineStart()},lineEnd:function(){n.lineEnd()},polygonStart:function(){n.polygonStart()},polygonEnd:function(){n.polygonEnd()}}}function ur(n){return ir(function(){return n})()}function ir(n){function t(n){return n=a(n[0]*La,n[1]*La),[n[0]*h+c,l-n[1]*h]}function e(n){return n=a.invert((n[0]-c)/h,(l-n[1])/h),n&&[n[0]*Ta,n[1]*Ta]}function r(){a=Ye(o=lr(m,y,x),i);var n=i(v,d);return c=g-n[0]*h,l=p+n[1]*h,u()}function u(){return s&&(s.valid=!1,s=null),t}var i,o,a,c,l,s,f=nr(function(n,t){return n=i(n,t),[n[0]*h+c,l-n[1]*h]}),h=150,g=480,p=250,v=0,d=0,m=0,y=0,x=0,M=Cc,_=Et,b=null,w=null;return t.stream=function(n){return s&&(s.valid=!1),s=or(M(o,f(_(n)))),s.valid=!0,s},t.clipAngle=function(n){return arguments.length?(M=null==n?(b=n,Cc):Fe((b=+n)*La),u()):b},t.clipExtent=function(n){return arguments.length?(w=n,_=n?Oe(n[0][0],n[0][1],n[1][0],n[1][1]):Et,u()):w},t.scale=function(n){return arguments.length?(h=+n,r()):h},t.translate=function(n){return arguments.length?(g=+n[0],p=+n[1],r()):[g,p]},t.center=function(n){return arguments.length?(v=n[0]%360*La,d=n[1]%360*La,r()):[v*Ta,d*Ta]},t.rotate=function(n){return arguments.length?(m=n[0]%360*La,y=n[1]%360*La,x=n.length>2?n[2]%360*La:0,r()):[m*Ta,y*Ta,x*Ta]},Bo.rebind(t,f,"precision"),function(){return i=n.apply(this,arguments),t.invert=i.invert&&e,r()}}function or(n){return rr(n,function(t,e){n.point(t*La,e*La)})}function ar(n,t){return[n,t]}function cr(n,t){return[n>Ea?n-Aa:-Ea>n?n+Aa:n,t]}function lr(n,t,e){return n?t||e?Ye(fr(n),hr(t,e)):fr(n):t||e?hr(t,e):cr}function sr(n){return function(t,e){return t+=n,[t>Ea?t-Aa:-Ea>t?t+Aa:t,e]}}function fr(n){var t=sr(n);return t.invert=sr(-n),t}function hr(n,t){function e(n,t){var e=Math.cos(t),a=Math.cos(n)*e,c=Math.sin(n)*e,l=Math.sin(t),s=l*r+a*u;return[Math.atan2(c*i-s*o,a*r-l*u),nt(s*i+c*o)]}var r=Math.cos(n),u=Math.sin(n),i=Math.cos(t),o=Math.sin(t);return e.invert=function(n,t){var e=Math.cos(t),a=Math.cos(n)*e,c=Math.sin(n)*e,l=Math.sin(t),s=l*i-c*o;return[Math.atan2(c*i+l*o,a*r+s*u),nt(s*r-a*u)]},e}function gr(n,t){var e=Math.cos(n),r=Math.sin(n);return function(u,i,o,a){var c=o*t;null!=u?(u=pr(e,u),i=pr(e,i),(o>0?i>u:u>i)&&(u+=o*Aa)):(u=n+o*Aa,i=n-.5*c);for(var l,s=u;o>0?s>i:i>s;s-=c)a.point((l=Me([e,-r*Math.cos(s),-r*Math.sin(s)]))[0],l[1])}}function pr(n,t){var e=pe(t);e[0]-=n,xe(e);var r=Q(-e[1]);return((-e[2]<0?-r:r)+2*Math.PI-Na)%(2*Math.PI)}function vr(n,t,e){var r=Bo.range(n,t-Na,e).concat(t);return function(n){return r.map(function(t){return[n,t]})}}function dr(n,t,e){var r=Bo.range(n,t-Na,e).concat(t);return function(n){return r.map(function(t){return[t,n]})}}function mr(n){return n.source}function yr(n){return n.target}function xr(n,t,e,r){var u=Math.cos(t),i=Math.sin(t),o=Math.cos(r),a=Math.sin(r),c=u*Math.cos(n),l=u*Math.sin(n),s=o*Math.cos(e),f=o*Math.sin(e),h=2*Math.asin(Math.sqrt(ut(r-t)+u*o*ut(e-n))),g=1/Math.sin(h),p=h?function(n){var t=Math.sin(n*=h)*g,e=Math.sin(h-n)*g,r=e*c+t*s,u=e*l+t*f,o=e*i+t*a;return[Math.atan2(u,r)*Ta,Math.atan2(o,Math.sqrt(r*r+u*u))*Ta]}:function(){return[n*Ta,t*Ta]};return p.distance=h,p}function Mr(){function n(n,u){var i=Math.sin(u*=La),o=Math.cos(u),a=ca((n*=La)-t),c=Math.cos(a);Fc+=Math.atan2(Math.sqrt((a=o*Math.sin(a))*a+(a=r*i-e*o*c)*a),e*i+r*o*c),t=n,e=i,r=o}var t,e,r;Hc.point=function(u,i){t=u*La,e=Math.sin(i*=La),r=Math.cos(i),Hc.point=n},Hc.lineEnd=function(){Hc.point=Hc.lineEnd=y}}function _r(n,t){function e(t,e){var r=Math.cos(t),u=Math.cos(e),i=n(r*u);return[i*u*Math.sin(t),i*Math.sin(e)]}return e.invert=function(n,e){var r=Math.sqrt(n*n+e*e),u=t(r),i=Math.sin(u),o=Math.cos(u);return[Math.atan2(n*i,r*o),Math.asin(r&&e*i/r)]},e}function br(n,t){function e(n,t){o>0?-Ca+Na>t&&(t=-Ca+Na):t>Ca-Na&&(t=Ca-Na);var e=o/Math.pow(u(t),i);return[e*Math.sin(i*n),o-e*Math.cos(i*n)]}var r=Math.cos(n),u=function(n){return Math.tan(Ea/4+n/2)},i=n===t?Math.sin(n):Math.log(r/Math.cos(t))/Math.log(u(t)/u(n)),o=r*Math.pow(u(n),i)/i;return i?(e.invert=function(n,t){var e=o-t,r=G(i)*Math.sqrt(n*n+e*e);return[Math.atan2(n,e)/i,2*Math.atan(Math.pow(o/r,1/i))-Ca]},e):Sr}function wr(n,t){function e(n,t){var e=i-t;return[e*Math.sin(u*n),i-e*Math.cos(u*n)]}var r=Math.cos(n),u=n===t?Math.sin(n):(r-Math.cos(t))/(t-n),i=r/u+n;return ca(u)u;u++){for(;r>1&&K(n[e[r-2]],n[e[r-1]],n[u])<=0;)--r;e[r++]=u}return e.slice(0,r)}function zr(n,t){return n[0]-t[0]||n[1]-t[1]}function Lr(n,t,e){return(e[0]-t[0])*(n[1]-t[1])<(e[1]-t[1])*(n[0]-t[0])}function Tr(n,t,e,r){var u=n[0],i=e[0],o=t[0]-u,a=r[0]-i,c=n[1],l=e[1],s=t[1]-c,f=r[1]-l,h=(a*(c-l)-f*(u-i))/(f*o-a*s);return[u+h*o,c+h*s]}function qr(n){var t=n[0],e=n[n.length-1];return!(t[0]-e[0]||t[1]-e[1])}function Rr(){tu(this),this.edge=this.site=this.circle=null}function Dr(n){var t=Kc.pop()||new Rr;return t.site=n,t}function Pr(n){Xr(n),Wc.remove(n),Kc.push(n),tu(n)}function Ur(n){var t=n.circle,e=t.x,r=t.cy,u={x:e,y:r},i=n.P,o=n.N,a=[n];Pr(n);for(var c=i;c.circle&&ca(e-c.circle.x)s;++s)l=a[s],c=a[s-1],Kr(l.edge,c.site,l.site,u);c=a[0],l=a[f-1],l.edge=Jr(c.site,l.site,null,u),Vr(c),Vr(l)}function jr(n){for(var t,e,r,u,i=n.x,o=n.y,a=Wc._;a;)if(r=Fr(a,o)-i,r>Na)a=a.L;else{if(u=i-Hr(a,o),!(u>Na)){r>-Na?(t=a.P,e=a):u>-Na?(t=a,e=a.N):t=e=a;break}if(!a.R){t=a;break}a=a.R}var c=Dr(n);if(Wc.insert(t,c),t||e){if(t===e)return Xr(t),e=Dr(t.site),Wc.insert(c,e),c.edge=e.edge=Jr(t.site,c.site),Vr(t),Vr(e),void 0;if(!e)return c.edge=Jr(t.site,c.site),void 0;Xr(t),Xr(e);var l=t.site,s=l.x,f=l.y,h=n.x-s,g=n.y-f,p=e.site,v=p.x-s,d=p.y-f,m=2*(h*d-g*v),y=h*h+g*g,x=v*v+d*d,M={x:(d*y-g*x)/m+s,y:(h*x-v*y)/m+f};Kr(e.edge,l,p,M),c.edge=Jr(l,n,null,M),e.edge=Jr(n,p,null,M),Vr(t),Vr(e)}}function Fr(n,t){var e=n.site,r=e.x,u=e.y,i=u-t;if(!i)return r;var o=n.P;if(!o)return-1/0;e=o.site;var a=e.x,c=e.y,l=c-t;if(!l)return a;var s=a-r,f=1/i-1/l,h=s/l;return f?(-h+Math.sqrt(h*h-2*f*(s*s/(-2*l)-c+l/2+u-i/2)))/f+r:(r+a)/2}function Hr(n,t){var e=n.N;if(e)return Fr(e,t);var r=n.site;return r.y===t?r.x:1/0}function Or(n){this.site=n,this.edges=[]}function Yr(n){for(var t,e,r,u,i,o,a,c,l,s,f=n[0][0],h=n[1][0],g=n[0][1],p=n[1][1],v=Bc,d=v.length;d--;)if(i=v[d],i&&i.prepare())for(a=i.edges,c=a.length,o=0;c>o;)s=a[o].end(),r=s.x,u=s.y,l=a[++o%c].start(),t=l.x,e=l.y,(ca(r-t)>Na||ca(u-e)>Na)&&(a.splice(o,0,new Qr(Gr(i.site,s,ca(r-f)Na?{x:f,y:ca(t-f)Na?{x:ca(e-p)Na?{x:h,y:ca(t-h)Na?{x:ca(e-g)=-za)){var g=c*c+l*l,p=s*s+f*f,v=(f*g-l*p)/h,d=(c*p-s*g)/h,f=d+a,m=Qc.pop()||new Zr;m.arc=n,m.site=u,m.x=v+o,m.y=f+Math.sqrt(v*v+d*d),m.cy=f,n.circle=m;for(var y=null,x=Gc._;x;)if(m.yd||d>=a)return;if(h>p){if(i){if(i.y>=l)return}else i={x:d,y:c};e={x:d,y:l}}else{if(i){if(i.yr||r>1)if(h>p){if(i){if(i.y>=l)return}else i={x:(c-u)/r,y:c};e={x:(l-u)/r,y:l}}else{if(i){if(i.yg){if(i){if(i.x>=a)return}else i={x:o,y:r*o+u};e={x:a,y:r*a+u}}else{if(i){if(i.xi&&(u=t.slice(i,u),a[o]?a[o]+=u:a[++o]=u),(e=e[0])===(r=r[0])?a[o]?a[o]+=r:a[++o]=r:(a[++o]=null,c.push({i:o,x:pu(e,r)})),i=el.lastIndex;return ir;++r)a[(e=c[r]).i]=e.x(n);return a.join("")})}function du(n,t){for(var e,r=Bo.interpolators.length;--r>=0&&!(e=Bo.interpolators[r](n,t)););return e}function mu(n,t){var e,r=[],u=[],i=n.length,o=t.length,a=Math.min(n.length,t.length);for(e=0;a>e;++e)r.push(du(n[e],t[e]));for(;i>e;++e)u[e]=n[e];for(;o>e;++e)u[e]=t[e];return function(n){for(e=0;a>e;++e)u[e]=r[e](n);return u}}function yu(n){return function(t){return 0>=t?0:t>=1?1:n(t)}}function xu(n){return function(t){return 1-n(1-t)}}function Mu(n){return function(t){return.5*(.5>t?n(2*t):2-n(2-2*t))}}function _u(n){return n*n}function bu(n){return n*n*n}function wu(n){if(0>=n)return 0;if(n>=1)return 1;var t=n*n,e=t*n;return 4*(.5>n?e:3*(n-t)+e-.75)}function Su(n){return function(t){return Math.pow(t,n)}}function ku(n){return 1-Math.cos(n*Ca)}function Eu(n){return Math.pow(2,10*(n-1))}function Au(n){return 1-Math.sqrt(1-n*n)}function Cu(n,t){var e;return arguments.length<2&&(t=.45),arguments.length?e=t/Aa*Math.asin(1/n):(n=1,e=t/4),function(r){return 1+n*Math.pow(2,-10*r)*Math.sin((r-e)*Aa/t)}}function Nu(n){return n||(n=1.70158),function(t){return t*t*((n+1)*t-n)}}function zu(n){return 1/2.75>n?7.5625*n*n:2/2.75>n?7.5625*(n-=1.5/2.75)*n+.75:2.5/2.75>n?7.5625*(n-=2.25/2.75)*n+.9375:7.5625*(n-=2.625/2.75)*n+.984375}function Lu(n,t){n=Bo.hcl(n),t=Bo.hcl(t);var e=n.h,r=n.c,u=n.l,i=t.h-e,o=t.c-r,a=t.l-u;return isNaN(o)&&(o=0,r=isNaN(r)?t.c:r),isNaN(i)?(i=0,e=isNaN(e)?t.h:e):i>180?i-=360:-180>i&&(i+=360),function(n){return lt(e+i*n,r+o*n,u+a*n)+""}}function Tu(n,t){n=Bo.hsl(n),t=Bo.hsl(t);var e=n.h,r=n.s,u=n.l,i=t.h-e,o=t.s-r,a=t.l-u;return isNaN(o)&&(o=0,r=isNaN(r)?t.s:r),isNaN(i)?(i=0,e=isNaN(e)?t.h:e):i>180?i-=360:-180>i&&(i+=360),function(n){return at(e+i*n,r+o*n,u+a*n)+""}}function qu(n,t){n=Bo.lab(n),t=Bo.lab(t);var e=n.l,r=n.a,u=n.b,i=t.l-e,o=t.a-r,a=t.b-u;return function(n){return ft(e+i*n,r+o*n,u+a*n)+""}}function Ru(n,t){return t-=n,function(e){return Math.round(n+t*e)}}function Du(n){var t=[n.a,n.b],e=[n.c,n.d],r=Uu(t),u=Pu(t,e),i=Uu(ju(e,t,-u))||0;t[0]*e[1]180?s+=360:s-l>180&&(l+=360),u.push({i:r.push(r.pop()+"rotate(",null,")")-2,x:pu(l,s)})):s&&r.push(r.pop()+"rotate("+s+")"),f!=h?u.push({i:r.push(r.pop()+"skewX(",null,")")-2,x:pu(f,h)}):h&&r.push(r.pop()+"skewX("+h+")"),g[0]!=p[0]||g[1]!=p[1]?(e=r.push(r.pop()+"scale(",null,",",null,")"),u.push({i:e-4,x:pu(g[0],p[0])},{i:e-2,x:pu(g[1],p[1])})):(1!=p[0]||1!=p[1])&&r.push(r.pop()+"scale("+p+")"),e=u.length,function(n){for(var t,i=-1;++i=0;)e.push(u[r])}function Ku(n,t){for(var e=[n],r=[];null!=(n=e.pop());)if(r.push(n),(i=n.children)&&(u=i.length))for(var u,i,o=-1;++oe;++e)(t=n[e][1])>u&&(r=e,u=t);return r}function li(n){return n.reduce(si,0)}function si(n,t){return n+t[1]}function fi(n,t){return hi(n,Math.ceil(Math.log(t.length)/Math.LN2+1))}function hi(n,t){for(var e=-1,r=+n[0],u=(n[1]-r)/t,i=[];++e<=t;)i[e]=u*e+r;return i}function gi(n){return[Bo.min(n),Bo.max(n)]}function pi(n,t){return n.value-t.value}function vi(n,t){var e=n._pack_next;n._pack_next=t,t._pack_prev=n,t._pack_next=e,e._pack_prev=t}function di(n,t){n._pack_next=t,t._pack_prev=n}function mi(n,t){var e=t.x-n.x,r=t.y-n.y,u=n.r+t.r;return.999*u*u>e*e+r*r}function yi(n){function t(n){s=Math.min(n.x-n.r,s),f=Math.max(n.x+n.r,f),h=Math.min(n.y-n.r,h),g=Math.max(n.y+n.r,g)}if((e=n.children)&&(l=e.length)){var e,r,u,i,o,a,c,l,s=1/0,f=-1/0,h=1/0,g=-1/0;if(e.forEach(xi),r=e[0],r.x=-r.r,r.y=0,t(r),l>1&&(u=e[1],u.x=u.r,u.y=0,t(u),l>2))for(i=e[2],bi(r,u,i),t(i),vi(r,i),r._pack_prev=i,vi(i,u),u=r._pack_next,o=3;l>o;o++){bi(r,u,i=e[o]);var p=0,v=1,d=1;for(a=u._pack_next;a!==u;a=a._pack_next,v++)if(mi(a,i)){p=1;break}if(1==p)for(c=r._pack_prev;c!==a._pack_prev&&!mi(c,i);c=c._pack_prev,d++);p?(d>v||v==d&&u.ro;o++)i=e[o],i.x-=m,i.y-=y,x=Math.max(x,i.r+Math.sqrt(i.x*i.x+i.y*i.y));n.r=x,e.forEach(Mi)}}function xi(n){n._pack_next=n._pack_prev=n}function Mi(n){delete n._pack_next,delete n._pack_prev}function _i(n,t,e,r){var u=n.children;if(n.x=t+=r*n.x,n.y=e+=r*n.y,n.r*=r,u)for(var i=-1,o=u.length;++i=0;)t=u[i],t.z+=e,t.m+=e,e+=t.s+(r+=t.c)}function Ci(n,t,e){return n.a.parent===t.parent?n.a:e}function Ni(n){return 1+Bo.max(n,function(n){return n.y})}function zi(n){return n.reduce(function(n,t){return n+t.x},0)/n.length}function Li(n){var t=n.children;return t&&t.length?Li(t[0]):n}function Ti(n){var t,e=n.children;return e&&(t=e.length)?Ti(e[t-1]):n}function qi(n){return{x:n.x,y:n.y,dx:n.dx,dy:n.dy}}function Ri(n,t){var e=n.x+t[3],r=n.y+t[0],u=n.dx-t[1]-t[3],i=n.dy-t[0]-t[2];return 0>u&&(e+=u/2,u=0),0>i&&(r+=i/2,i=0),{x:e,y:r,dx:u,dy:i}}function Di(n){var t=n[0],e=n[n.length-1];return e>t?[t,e]:[e,t]}function Pi(n){return n.rangeExtent?n.rangeExtent():Di(n.range())}function Ui(n,t,e,r){var u=e(n[0],n[1]),i=r(t[0],t[1]);return function(n){return i(u(n))}}function ji(n,t){var e,r=0,u=n.length-1,i=n[r],o=n[u];return i>o&&(e=r,r=u,u=e,e=i,i=o,o=e),n[r]=t.floor(i),n[u]=t.ceil(o),n}function Fi(n){return n?{floor:function(t){return Math.floor(t/n)*n},ceil:function(t){return Math.ceil(t/n)*n}}:gl}function Hi(n,t,e,r){var u=[],i=[],o=0,a=Math.min(n.length,t.length)-1;for(n[a]2?Hi:Ui,c=r?Ou:Hu;return o=u(n,t,c,e),a=u(t,n,c,du),i}function i(n){return o(n)}var o,a;return i.invert=function(n){return a(n)},i.domain=function(t){return arguments.length?(n=t.map(Number),u()):n},i.range=function(n){return arguments.length?(t=n,u()):t},i.rangeRound=function(n){return i.range(n).interpolate(Ru)},i.clamp=function(n){return arguments.length?(r=n,u()):r},i.interpolate=function(n){return arguments.length?(e=n,u()):e},i.ticks=function(t){return Vi(n,t)},i.tickFormat=function(t,e){return Xi(n,t,e)},i.nice=function(t){return Ii(n,t),u()},i.copy=function(){return Oi(n,t,e,r)},u()}function Yi(n,t){return Bo.rebind(n,t,"range","rangeRound","interpolate","clamp")}function Ii(n,t){return ji(n,Fi(Zi(n,t)[2]))}function Zi(n,t){null==t&&(t=10);var e=Di(n),r=e[1]-e[0],u=Math.pow(10,Math.floor(Math.log(r/t)/Math.LN10)),i=t/r*u;return.15>=i?u*=10:.35>=i?u*=5:.75>=i&&(u*=2),e[0]=Math.ceil(e[0]/u)*u,e[1]=Math.floor(e[1]/u)*u+.5*u,e[2]=u,e}function Vi(n,t){return Bo.range.apply(Bo,Zi(n,t))}function Xi(n,t,e){var r=Zi(n,t);if(e){var u=tc.exec(e);if(u.shift(),"s"===u[8]){var i=Bo.formatPrefix(Math.max(ca(r[0]),ca(r[1])));return u[7]||(u[7]="."+$i(i.scale(r[2]))),u[8]="f",e=Bo.format(u.join("")),function(n){return e(i.scale(n))+i.symbol}}u[7]||(u[7]="."+Bi(u[8],r)),e=u.join("")}else e=",."+$i(r[2])+"f";return Bo.format(e)}function $i(n){return-Math.floor(Math.log(n)/Math.LN10+.01)}function Bi(n,t){var e=$i(t[2]);return n in pl?Math.abs(e-$i(Math.max(ca(t[0]),ca(t[1]))))+ +("e"!==n):e-2*("%"===n)}function Wi(n,t,e,r){function u(n){return(e?Math.log(0>n?0:n):-Math.log(n>0?0:-n))/Math.log(t)}function i(n){return e?Math.pow(t,n):-Math.pow(t,-n)}function o(t){return n(u(t))}return o.invert=function(t){return i(n.invert(t))},o.domain=function(t){return arguments.length?(e=t[0]>=0,n.domain((r=t.map(Number)).map(u)),o):r},o.base=function(e){return arguments.length?(t=+e,n.domain(r.map(u)),o):t},o.nice=function(){var t=ji(r.map(u),e?Math:dl);return n.domain(t),r=t.map(i),o},o.ticks=function(){var n=Di(r),o=[],a=n[0],c=n[1],l=Math.floor(u(a)),s=Math.ceil(u(c)),f=t%1?2:t;if(isFinite(s-l)){if(e){for(;s>l;l++)for(var h=1;f>h;h++)o.push(i(l)*h);o.push(i(l))}else for(o.push(i(l));l++0;h--)o.push(i(l)*h);for(l=0;o[l]c;s--);o=o.slice(l,s)}return o},o.tickFormat=function(n,t){if(!arguments.length)return vl;arguments.length<2?t=vl:"function"!=typeof t&&(t=Bo.format(t));var r,a=Math.max(.1,n/o.ticks().length),c=e?(r=1e-12,Math.ceil):(r=-1e-12,Math.floor);return function(n){return n/i(c(u(n)+r))<=a?t(n):""}},o.copy=function(){return Wi(n.copy(),t,e,r)},Yi(o,n)}function Ji(n,t,e){function r(t){return n(u(t))}var u=Gi(t),i=Gi(1/t);return r.invert=function(t){return i(n.invert(t))},r.domain=function(t){return arguments.length?(n.domain((e=t.map(Number)).map(u)),r):e},r.ticks=function(n){return Vi(e,n)},r.tickFormat=function(n,t){return Xi(e,n,t)},r.nice=function(n){return r.domain(Ii(e,n))},r.exponent=function(o){return arguments.length?(u=Gi(t=o),i=Gi(1/t),n.domain(e.map(u)),r):t},r.copy=function(){return Ji(n.copy(),t,e)},Yi(r,n)}function Gi(n){return function(t){return 0>t?-Math.pow(-t,n):Math.pow(t,n)}}function Ki(n,t){function e(e){return i[((u.get(e)||("range"===t.t?u.set(e,n.push(e)):0/0))-1)%i.length]}function r(t,e){return Bo.range(n.length).map(function(n){return t+e*n})}var u,i,o;return e.domain=function(r){if(!arguments.length)return n;n=[],u=new a;for(var i,o=-1,c=r.length;++on?[0/0,0/0]:[n>0?a[n-1]:r[0],nt?0/0:t/i+n,[t,t+1/i]},r.copy=function(){return no(n,t,e)},u()}function to(n,t){function e(e){return e>=e?t[Bo.bisect(n,e)]:void 0}return e.domain=function(t){return arguments.length?(n=t,e):n},e.range=function(n){return arguments.length?(t=n,e):t},e.invertExtent=function(e){return e=t.indexOf(e),[n[e-1],n[e]]},e.copy=function(){return to(n,t)},e}function eo(n){function t(n){return+n}return t.invert=t,t.domain=t.range=function(e){return arguments.length?(n=e.map(t),t):n},t.ticks=function(t){return Vi(n,t)},t.tickFormat=function(t,e){return Xi(n,t,e)},t.copy=function(){return eo(n)},t}function ro(n){return n.innerRadius}function uo(n){return n.outerRadius}function io(n){return n.startAngle}function oo(n){return n.endAngle}function ao(n){function t(t){function o(){l.push("M",i(n(s),a))}for(var c,l=[],s=[],f=-1,h=t.length,g=kt(e),p=kt(r);++f1&&u.push("H",r[0]),u.join("")}function fo(n){for(var t=0,e=n.length,r=n[0],u=[r[0],",",r[1]];++t1){a=t[1],i=n[c],c++,r+="C"+(u[0]+o[0])+","+(u[1]+o[1])+","+(i[0]-a[0])+","+(i[1]-a[1])+","+i[0]+","+i[1];for(var l=2;l9&&(u=3*t/Math.sqrt(u),o[a]=u*e,o[a+1]=u*r));for(a=-1;++a<=c;)u=(n[Math.min(c,a+1)][0]-n[Math.max(0,a-1)][0])/(6*(1+o[a]*o[a])),i.push([u||0,o[a]*u||0]);return i}function Co(n){return n.length<3?co(n):n[0]+mo(n,Ao(n))}function No(n){for(var t,e,r,u=-1,i=n.length;++ue?l():(u.active=e,i.event&&i.event.start.call(n,s,t),i.tween.forEach(function(e,r){(r=r.call(n,s,t))&&v.push(r) }),Bo.timer(function(){return p.c=c(r||1)?Ae:c,1},0,o),void 0)}function c(r){if(u.active!==e)return l();for(var o=r/g,a=f(o),c=v.length;c>0;)v[--c].call(n,a);return o>=1?(i.event&&i.event.end.call(n,s,t),l()):void 0}function l(){return--u.count?delete u[e]:delete n.__transition__,1}var s=n.__data__,f=i.ease,h=i.delay,g=i.duration,p=Ka,v=[];return p.t=h+o,r>=h?a(r-h):(p.c=a,void 0)},0,o)}}function Oo(n,t,e){n.attr("transform",function(n){var r=t(n);return"translate("+(isFinite(r)?r:e(n))+",0)"})}function Yo(n,t,e){n.attr("transform",function(n){var r=t(n);return"translate(0,"+(isFinite(r)?r:e(n))+")"})}function Io(n){return n.toISOString()}function Zo(n,t,e){function r(t){return n(t)}function u(n,e){var r=n[1]-n[0],u=r/e,i=Bo.bisect(Ol,u);return i==Ol.length?[t.year,Zi(n.map(function(n){return n/31536e6}),e)[2]]:i?t[u/Ol[i-1]1?{floor:function(t){for(;e(t=n.floor(t));)t=Vo(t-1);return t},ceil:function(t){for(;e(t=n.ceil(t));)t=Vo(+t+1);return t}}:n))},r.ticks=function(n,t){var e=Di(r.domain()),i=null==n?u(e,10):"number"==typeof n?u(e,n):!n.range&&[{range:n},t];return i&&(n=i[0],t=i[1]),n.range(e[0],Vo(+e[1]+1),1>t?1:t)},r.tickFormat=function(){return e},r.copy=function(){return Zo(n.copy(),t,e)},Yi(r,n)}function Vo(n){return new Date(n)}function Xo(n){return JSON.parse(n.responseText)}function $o(n){var t=Go.createRange();return t.selectNode(Go.body),t.createContextualFragment(n.responseText)}var Bo={version:"3.4.13"};Date.now||(Date.now=function(){return+new Date});var Wo=[].slice,Jo=function(n){return Wo.call(n)},Go=document,Ko=Go.documentElement,Qo=window;try{Jo(Ko.childNodes)[0].nodeType}catch(na){Jo=function(n){for(var t=n.length,e=new Array(t);t--;)e[t]=n[t];return e}}try{Go.createElement("div").style.setProperty("opacity",0,"")}catch(ta){var ea=Qo.Element.prototype,ra=ea.setAttribute,ua=ea.setAttributeNS,ia=Qo.CSSStyleDeclaration.prototype,oa=ia.setProperty;ea.setAttribute=function(n,t){ra.call(this,n,t+"")},ea.setAttributeNS=function(n,t,e){ua.call(this,n,t,e+"")},ia.setProperty=function(n,t,e){oa.call(this,n,t+"",e)}}Bo.ascending=n,Bo.descending=function(n,t){return n>t?-1:t>n?1:t>=n?0:0/0},Bo.min=function(n,t){var e,r,u=-1,i=n.length;if(1===arguments.length){for(;++u=e);)e=void 0;for(;++ur&&(e=r)}else{for(;++u=e);)e=void 0;for(;++ur&&(e=r)}return e},Bo.max=function(n,t){var e,r,u=-1,i=n.length;if(1===arguments.length){for(;++u=e);)e=void 0;for(;++ue&&(e=r)}else{for(;++u=e);)e=void 0;for(;++ue&&(e=r)}return e},Bo.extent=function(n,t){var e,r,u,i=-1,o=n.length;if(1===arguments.length){for(;++i=e);)e=u=void 0;for(;++ir&&(e=r),r>u&&(u=r))}else{for(;++i=e);)e=void 0;for(;++ir&&(e=r),r>u&&(u=r))}return[e,u]},Bo.sum=function(n,t){var r,u=0,i=n.length,o=-1;if(1===arguments.length)for(;++or?0:r);r>e;)i[e]=[t=u,u=n[++e]];return i},Bo.zip=function(){if(!(r=arguments.length))return[];for(var n=-1,t=Bo.min(arguments,u),e=new Array(t);++n=0;)for(r=n[u],t=r.length;--t>=0;)e[--o]=r[t];return e};var ca=Math.abs;Bo.range=function(n,t,e){if(arguments.length<3&&(e=1,arguments.length<2&&(t=n,n=0)),1/0===(t-n)/e)throw new Error("infinite range");var r,u=[],o=i(ca(e)),a=-1;if(n*=o,t*=o,e*=o,0>e)for(;(r=n+e*++a)>t;)u.push(r/o);else for(;(r=n+e*++a)=i.length)return r?r.call(u,o):e?o.sort(e):o;for(var l,s,f,h,g=-1,p=o.length,v=i[c++],d=new a;++g=i.length)return n;var r=[],u=o[e++];return n.forEach(function(n,u){r.push({key:n,values:t(u,e)})}),u?r.sort(function(n,t){return u(n.key,t.key)}):r}var e,r,u={},i=[],o=[];return u.map=function(t,e){return n(e,t,0)},u.entries=function(e){return t(n(Bo.map,e,0),0)},u.key=function(n){return i.push(n),u},u.sortKeys=function(n){return o[i.length-1]=n,u},u.sortValues=function(n){return e=n,u},u.rollup=function(n){return r=n,u},u},Bo.set=function(n){var t=new v;if(n)for(var e=0,r=n.length;r>e;++e)t.add(n[e]);return t},o(v,{has:s,add:function(n){return this._[c(n+="")]=!0,n},remove:f,values:h,size:g,empty:p,forEach:function(n){for(var t in this._)n.call(this,l(t))}}),Bo.behavior={},Bo.rebind=function(n,t){for(var e,r=1,u=arguments.length;++r=0&&(r=n.slice(e+1),n=n.slice(0,e)),n)return arguments.length<2?this[n].on(r):this[n].on(r,t);if(2===arguments.length){if(null==t)for(n in this)this.hasOwnProperty(n)&&this[n].on(r,null);return this}},Bo.event=null,Bo.requote=function(n){return n.replace(ha,"\\$&")};var ha=/[\\\^\$\*\+\?\|\[\]\(\)\.\{\}]/g,ga={}.__proto__?function(n,t){n.__proto__=t}:function(n,t){for(var e in t)n[e]=t[e]},pa=function(n,t){return t.querySelector(n)},va=function(n,t){return t.querySelectorAll(n)},da=Ko.matches||Ko[m(Ko,"matchesSelector")],ma=function(n,t){return da.call(n,t)};"function"==typeof Sizzle&&(pa=function(n,t){return Sizzle(n,t)[0]||null},va=Sizzle,ma=Sizzle.matchesSelector),Bo.selection=function(){return _a};var ya=Bo.selection.prototype=[];ya.select=function(n){var t,e,r,u,i=[];n=k(n);for(var o=-1,a=this.length;++o=0&&(e=n.slice(0,t),n=n.slice(t+1)),xa.hasOwnProperty(e)?{space:xa[e],local:n}:n}},ya.attr=function(n,t){if(arguments.length<2){if("string"==typeof n){var e=this.node();return n=Bo.ns.qualify(n),n.local?e.getAttributeNS(n.space,n.local):e.getAttribute(n)}for(t in n)this.each(A(t,n[t]));return this}return this.each(A(n,t))},ya.classed=function(n,t){if(arguments.length<2){if("string"==typeof n){var e=this.node(),r=(n=z(n)).length,u=-1;if(t=e.classList){for(;++ur){if("string"!=typeof n){2>r&&(t="");for(e in n)this.each(q(e,n[e],t));return this}if(2>r)return Qo.getComputedStyle(this.node(),null).getPropertyValue(n);e=""}return this.each(q(n,t,e))},ya.property=function(n,t){if(arguments.length<2){if("string"==typeof n)return this.node()[n];for(t in n)this.each(R(t,n[t]));return this}return this.each(R(n,t))},ya.text=function(n){return arguments.length?this.each("function"==typeof n?function(){var t=n.apply(this,arguments);this.textContent=null==t?"":t}:null==n?function(){this.textContent=""}:function(){this.textContent=n}):this.node().textContent},ya.html=function(n){return arguments.length?this.each("function"==typeof n?function(){var t=n.apply(this,arguments);this.innerHTML=null==t?"":t}:null==n?function(){this.innerHTML=""}:function(){this.innerHTML=n}):this.node().innerHTML},ya.append=function(n){return n=D(n),this.select(function(){return this.appendChild(n.apply(this,arguments))})},ya.insert=function(n,t){return n=D(n),t=k(t),this.select(function(){return this.insertBefore(n.apply(this,arguments),t.apply(this,arguments)||null)})},ya.remove=function(){return this.each(function(){var n=this.parentNode;n&&n.removeChild(this)})},ya.data=function(n,t){function e(n,e){var r,u,i,o=n.length,f=e.length,h=Math.min(o,f),g=new Array(f),p=new Array(f),v=new Array(o);if(t){var d,m=new a,y=new Array(o);for(r=-1;++rr;++r)p[r]=P(e[r]);for(;o>r;++r)v[r]=n[r]}p.update=g,p.parentNode=g.parentNode=v.parentNode=n.parentNode,c.push(p),l.push(g),s.push(v)}var r,u,i=-1,o=this.length;if(!arguments.length){for(n=new Array(o=(r=this[0]).length);++ii;i++){u.push(t=[]),t.parentNode=(e=this[i]).parentNode;for(var a=0,c=e.length;c>a;a++)(r=e[a])&&n.call(r,r.__data__,a,i)&&t.push(r)}return S(u)},ya.order=function(){for(var n=-1,t=this.length;++n=0;)(e=r[u])&&(i&&i!==e.nextSibling&&i.parentNode.insertBefore(e,i),i=e);return this},ya.sort=function(n){n=j.apply(this,arguments);for(var t=-1,e=this.length;++tn;n++)for(var e=this[n],r=0,u=e.length;u>r;r++){var i=e[r];if(i)return i}return null},ya.size=function(){var n=0;return F(this,function(){++n}),n};var Ma=[];Bo.selection.enter=H,Bo.selection.enter.prototype=Ma,Ma.append=ya.append,Ma.empty=ya.empty,Ma.node=ya.node,Ma.call=ya.call,Ma.size=ya.size,Ma.select=function(n){for(var t,e,r,u,i,o=[],a=-1,c=this.length;++ar){if("string"!=typeof n){2>r&&(t=!1);for(e in n)this.each(I(e,n[e],t));return this}if(2>r)return(r=this.node()["__on"+n])&&r._;e=!1}return this.each(I(n,t,e))};var ba=Bo.map({mouseenter:"mouseover",mouseleave:"mouseout"});ba.forEach(function(n){"on"+n in Go&&ba.remove(n)});var wa="onselectstart"in Go?null:m(Ko.style,"userSelect"),Sa=0;Bo.mouse=function(n){return $(n,b())};var ka=/WebKit/.test(Qo.navigator.userAgent)?-1:0;Bo.touch=function(n,t,e){if(arguments.length<3&&(e=t,t=b().changedTouches),t)for(var r,u=0,i=t.length;i>u;++u)if((r=t[u]).identifier===e)return $(n,r)},Bo.behavior.drag=function(){function n(){this.on("mousedown.drag",u).on("touchstart.drag",i)}function t(n,t,u,i,o){return function(){function a(){var n,e,r=t(h,v);r&&(n=r[0]-x[0],e=r[1]-x[1],p|=n|e,x=r,g({type:"drag",x:r[0]+l[0],y:r[1]+l[1],dx:n,dy:e}))}function c(){t(h,v)&&(m.on(i+d,null).on(o+d,null),y(p&&Bo.event.target===f),g({type:"dragend"}))}var l,s=this,f=Bo.event.target,h=s.parentNode,g=e.of(s,arguments),p=0,v=n(),d=".drag"+(null==v?"":"-"+v),m=Bo.select(u()).on(i+d,a).on(o+d,c),y=X(),x=t(h,v);r?(l=r.apply(s,arguments),l=[l.x-x[0],l.y-x[1]]):l=[0,0],g({type:"dragstart"})}}var e=w(n,"drag","dragstart","dragend"),r=null,u=t(y,Bo.mouse,J,"mousemove","mouseup"),i=t(B,Bo.touch,W,"touchmove","touchend");return n.origin=function(t){return arguments.length?(r=t,n):r},Bo.rebind(n,e,"on")},Bo.touches=function(n,t){return arguments.length<2&&(t=b().touches),t?Jo(t).map(function(t){var e=$(n,t);return e.identifier=t.identifier,e}):[]};var Ea=Math.PI,Aa=2*Ea,Ca=Ea/2,Na=1e-6,za=Na*Na,La=Ea/180,Ta=180/Ea,qa=Math.SQRT2,Ra=2,Da=4;Bo.interpolateZoom=function(n,t){function e(n){var t=n*y;if(m){var e=et(v),o=i/(Ra*h)*(e*rt(qa*t+v)-tt(v));return[r+o*l,u+o*s,i*e/et(qa*t+v)]}return[r+n*l,u+n*s,i*Math.exp(qa*t)]}var r=n[0],u=n[1],i=n[2],o=t[0],a=t[1],c=t[2],l=o-r,s=a-u,f=l*l+s*s,h=Math.sqrt(f),g=(c*c-i*i+Da*f)/(2*i*Ra*h),p=(c*c-i*i-Da*f)/(2*c*Ra*h),v=Math.log(Math.sqrt(g*g+1)-g),d=Math.log(Math.sqrt(p*p+1)-p),m=d-v,y=(m||Math.log(c/i))/qa;return e.duration=1e3*y,e},Bo.behavior.zoom=function(){function n(n){n.on(A,l).on(ja+".zoom",f).on("dblclick.zoom",h).on(z,s)}function t(n){return[(n[0]-S.x)/S.k,(n[1]-S.y)/S.k]}function e(n){return[n[0]*S.k+S.x,n[1]*S.k+S.y]}function r(n){S.k=Math.max(E[0],Math.min(E[1],n))}function u(n,t){t=e(t),S.x+=n[0]-t[0],S.y+=n[1]-t[1]}function i(){x&&x.domain(y.range().map(function(n){return(n-S.x)/S.k}).map(y.invert)),b&&b.domain(M.range().map(function(n){return(n-S.y)/S.k}).map(M.invert))}function o(n){n({type:"zoomstart"})}function a(n){i(),n({type:"zoom",scale:S.k,translate:[S.x,S.y]})}function c(n){n({type:"zoomend"})}function l(){function n(){s=1,u(Bo.mouse(r),h),a(l)}function e(){f.on(C,null).on(N,null),g(s&&Bo.event.target===i),c(l)}var r=this,i=Bo.event.target,l=L.of(r,arguments),s=0,f=Bo.select(Qo).on(C,n).on(N,e),h=t(Bo.mouse(r)),g=X();Y.call(r),o(l)}function s(){function n(){var n=Bo.touches(g);return h=S.k,n.forEach(function(n){n.identifier in v&&(v[n.identifier]=t(n))}),n}function e(){var t=Bo.event.target;Bo.select(t).on(x,i).on(M,f),b.push(t);for(var e=Bo.event.changedTouches,o=0,c=e.length;c>o;++o)v[e[o].identifier]=null;var l=n(),s=Date.now();if(1===l.length){if(500>s-m){var h=l[0],g=v[h.identifier];r(2*S.k),u(h,g),_(),a(p)}m=s}else if(l.length>1){var h=l[0],y=l[1],w=h[0]-y[0],k=h[1]-y[1];d=w*w+k*k}}function i(){for(var n,t,e,i,o=Bo.touches(g),c=0,l=o.length;l>c;++c,i=null)if(e=o[c],i=v[e.identifier]){if(t)break;n=e,t=i}if(i){var s=(s=e[0]-n[0])*s+(s=e[1]-n[1])*s,f=d&&Math.sqrt(s/d);n=[(n[0]+e[0])/2,(n[1]+e[1])/2],t=[(t[0]+i[0])/2,(t[1]+i[1])/2],r(f*h)}m=null,u(n,t),a(p)}function f(){if(Bo.event.touches.length){for(var t=Bo.event.changedTouches,e=0,r=t.length;r>e;++e)delete v[t[e].identifier];for(var u in v)return void n()}Bo.selectAll(b).on(y,null),w.on(A,l).on(z,s),k(),c(p)}var h,g=this,p=L.of(g,arguments),v={},d=0,y=".zoom-"+Bo.event.changedTouches[0].identifier,x="touchmove"+y,M="touchend"+y,b=[],w=Bo.select(g),k=X();Y.call(g),e(),o(p),w.on(A,null).on(z,e)}function f(){var n=L.of(this,arguments);d?clearTimeout(d):(g=t(p=v||Bo.mouse(this)),Y.call(this),o(n)),d=setTimeout(function(){d=null,c(n)},50),_(),r(Math.pow(2,.002*Pa())*S.k),u(p,g),a(n)}function h(){var n=L.of(this,arguments),e=Bo.mouse(this),i=t(e),l=Math.log(S.k)/Math.LN2;o(n),r(Math.pow(2,Bo.event.shiftKey?Math.ceil(l)-1:Math.floor(l)+1)),u(e,i),a(n),c(n)}var g,p,v,d,m,y,x,M,b,S={x:0,y:0,k:1},k=[960,500],E=Ua,A="mousedown.zoom",C="mousemove.zoom",N="mouseup.zoom",z="touchstart.zoom",L=w(n,"zoomstart","zoom","zoomend");return n.event=function(n){n.each(function(){var n=L.of(this,arguments),t=S;Cl?Bo.select(this).transition().each("start.zoom",function(){S=this.__chart__||{x:0,y:0,k:1},o(n)}).tween("zoom:zoom",function(){var e=k[0],r=k[1],u=e/2,i=r/2,o=Bo.interpolateZoom([(u-S.x)/S.k,(i-S.y)/S.k,e/S.k],[(u-t.x)/t.k,(i-t.y)/t.k,e/t.k]);return function(t){var r=o(t),c=e/r[2];this.__chart__=S={x:u-r[0]*c,y:i-r[1]*c,k:c},a(n)}}).each("end.zoom",function(){c(n)}):(this.__chart__=S,o(n),a(n),c(n))})},n.translate=function(t){return arguments.length?(S={x:+t[0],y:+t[1],k:S.k},i(),n):[S.x,S.y]},n.scale=function(t){return arguments.length?(S={x:S.x,y:S.y,k:+t},i(),n):S.k},n.scaleExtent=function(t){return arguments.length?(E=null==t?Ua:[+t[0],+t[1]],n):E},n.center=function(t){return arguments.length?(v=t&&[+t[0],+t[1]],n):v},n.size=function(t){return arguments.length?(k=t&&[+t[0],+t[1]],n):k},n.x=function(t){return arguments.length?(x=t,y=t.copy(),S={x:0,y:0,k:1},n):x},n.y=function(t){return arguments.length?(b=t,M=t.copy(),S={x:0,y:0,k:1},n):b},Bo.rebind(n,L,"on")};var Pa,Ua=[0,1/0],ja="onwheel"in Go?(Pa=function(){return-Bo.event.deltaY*(Bo.event.deltaMode?120:1)},"wheel"):"onmousewheel"in Go?(Pa=function(){return Bo.event.wheelDelta},"mousewheel"):(Pa=function(){return-Bo.event.detail},"MozMousePixelScroll");Bo.color=it,it.prototype.toString=function(){return this.rgb()+""},Bo.hsl=ot;var Fa=ot.prototype=new it;Fa.brighter=function(n){return n=Math.pow(.7,arguments.length?n:1),new ot(this.h,this.s,this.l/n)},Fa.darker=function(n){return n=Math.pow(.7,arguments.length?n:1),new ot(this.h,this.s,n*this.l)},Fa.rgb=function(){return at(this.h,this.s,this.l)},Bo.hcl=ct;var Ha=ct.prototype=new it;Ha.brighter=function(n){return new ct(this.h,this.c,Math.min(100,this.l+Oa*(arguments.length?n:1)))},Ha.darker=function(n){return new ct(this.h,this.c,Math.max(0,this.l-Oa*(arguments.length?n:1)))},Ha.rgb=function(){return lt(this.h,this.c,this.l).rgb()},Bo.lab=st;var Oa=18,Ya=.95047,Ia=1,Za=1.08883,Va=st.prototype=new it;Va.brighter=function(n){return new st(Math.min(100,this.l+Oa*(arguments.length?n:1)),this.a,this.b)},Va.darker=function(n){return new st(Math.max(0,this.l-Oa*(arguments.length?n:1)),this.a,this.b)},Va.rgb=function(){return ft(this.l,this.a,this.b)},Bo.rgb=dt;var Xa=dt.prototype=new it;Xa.brighter=function(n){n=Math.pow(.7,arguments.length?n:1);var t=this.r,e=this.g,r=this.b,u=30;return t||e||r?(t&&u>t&&(t=u),e&&u>e&&(e=u),r&&u>r&&(r=u),new dt(Math.min(255,t/n),Math.min(255,e/n),Math.min(255,r/n))):new dt(u,u,u)},Xa.darker=function(n){return n=Math.pow(.7,arguments.length?n:1),new dt(n*this.r,n*this.g,n*this.b)},Xa.hsl=function(){return _t(this.r,this.g,this.b)},Xa.toString=function(){return"#"+xt(this.r)+xt(this.g)+xt(this.b)};var $a=Bo.map({aliceblue:15792383,antiquewhite:16444375,aqua:65535,aquamarine:8388564,azure:15794175,beige:16119260,bisque:16770244,black:0,blanchedalmond:16772045,blue:255,blueviolet:9055202,brown:10824234,burlywood:14596231,cadetblue:6266528,chartreuse:8388352,chocolate:13789470,coral:16744272,cornflowerblue:6591981,cornsilk:16775388,crimson:14423100,cyan:65535,darkblue:139,darkcyan:35723,darkgoldenrod:12092939,darkgray:11119017,darkgreen:25600,darkgrey:11119017,darkkhaki:12433259,darkmagenta:9109643,darkolivegreen:5597999,darkorange:16747520,darkorchid:10040012,darkred:9109504,darksalmon:15308410,darkseagreen:9419919,darkslateblue:4734347,darkslategray:3100495,darkslategrey:3100495,darkturquoise:52945,darkviolet:9699539,deeppink:16716947,deepskyblue:49151,dimgray:6908265,dimgrey:6908265,dodgerblue:2003199,firebrick:11674146,floralwhite:16775920,forestgreen:2263842,fuchsia:16711935,gainsboro:14474460,ghostwhite:16316671,gold:16766720,goldenrod:14329120,gray:8421504,green:32768,greenyellow:11403055,grey:8421504,honeydew:15794160,hotpink:16738740,indianred:13458524,indigo:4915330,ivory:16777200,khaki:15787660,lavender:15132410,lavenderblush:16773365,lawngreen:8190976,lemonchiffon:16775885,lightblue:11393254,lightcoral:15761536,lightcyan:14745599,lightgoldenrodyellow:16448210,lightgray:13882323,lightgreen:9498256,lightgrey:13882323,lightpink:16758465,lightsalmon:16752762,lightseagreen:2142890,lightskyblue:8900346,lightslategray:7833753,lightslategrey:7833753,lightsteelblue:11584734,lightyellow:16777184,lime:65280,limegreen:3329330,linen:16445670,magenta:16711935,maroon:8388608,mediumaquamarine:6737322,mediumblue:205,mediumorchid:12211667,mediumpurple:9662683,mediumseagreen:3978097,mediumslateblue:8087790,mediumspringgreen:64154,mediumturquoise:4772300,mediumvioletred:13047173,midnightblue:1644912,mintcream:16121850,mistyrose:16770273,moccasin:16770229,navajowhite:16768685,navy:128,oldlace:16643558,olive:8421376,olivedrab:7048739,orange:16753920,orangered:16729344,orchid:14315734,palegoldenrod:15657130,palegreen:10025880,paleturquoise:11529966,palevioletred:14381203,papayawhip:16773077,peachpuff:16767673,peru:13468991,pink:16761035,plum:14524637,powderblue:11591910,purple:8388736,red:16711680,rosybrown:12357519,royalblue:4286945,saddlebrown:9127187,salmon:16416882,sandybrown:16032864,seagreen:3050327,seashell:16774638,sienna:10506797,silver:12632256,skyblue:8900331,slateblue:6970061,slategray:7372944,slategrey:7372944,snow:16775930,springgreen:65407,steelblue:4620980,tan:13808780,teal:32896,thistle:14204888,tomato:16737095,turquoise:4251856,violet:15631086,wheat:16113331,white:16777215,whitesmoke:16119285,yellow:16776960,yellowgreen:10145074});$a.forEach(function(n,t){$a.set(n,mt(t))}),Bo.functor=kt,Bo.xhr=At(Et),Bo.dsv=function(n,t){function e(n,e,i){arguments.length<3&&(i=e,e=null);var o=Ct(n,t,null==e?r:u(e),i);return o.row=function(n){return arguments.length?o.response(null==(e=n)?r:u(n)):e},o}function r(n){return e.parse(n.responseText)}function u(n){return function(t){return e.parse(t.responseText,n)}}function i(t){return t.map(o).join(n)}function o(n){return a.test(n)?'"'+n.replace(/\"/g,'""')+'"':n}var a=new RegExp('["'+n+"\n]"),c=n.charCodeAt(0);return e.parse=function(n,t){var r;return e.parseRows(n,function(n,e){if(r)return r(n,e-1);var u=new Function("d","return {"+n.map(function(n,t){return JSON.stringify(n)+": d["+t+"]"}).join(",")+"}");r=t?function(n,e){return t(u(n),e)}:u})},e.parseRows=function(n,t){function e(){if(s>=l)return o;if(u)return u=!1,i;var t=s;if(34===n.charCodeAt(t)){for(var e=t;e++s;){var r=n.charCodeAt(s++),a=1;if(10===r)u=!0;else if(13===r)u=!0,10===n.charCodeAt(s)&&(++s,++a);else if(r!==c)continue;return n.slice(t,s-a)}return n.slice(t)}for(var r,u,i={},o={},a=[],l=n.length,s=0,f=0;(r=e())!==o;){for(var h=[];r!==i&&r!==o;)h.push(r),r=e();t&&null==(h=t(h,f++))||a.push(h)}return a},e.format=function(t){if(Array.isArray(t[0]))return e.formatRows(t);var r=new v,u=[];return t.forEach(function(n){for(var t in n)r.has(t)||u.push(r.add(t))}),[u.map(o).join(n)].concat(t.map(function(t){return u.map(function(n){return o(t[n])}).join(n)})).join("\n")},e.formatRows=function(n){return n.map(i).join("\n")},e},Bo.csv=Bo.dsv(",","text/csv"),Bo.tsv=Bo.dsv(" ","text/tab-separated-values");var Ba,Wa,Ja,Ga,Ka,Qa=Qo[m(Qo,"requestAnimationFrame")]||function(n){setTimeout(n,17)};Bo.timer=function(n,t,e){var r=arguments.length;2>r&&(t=0),3>r&&(e=Date.now());var u=e+t,i={c:n,t:u,f:!1,n:null};Wa?Wa.n=i:Ba=i,Wa=i,Ja||(Ga=clearTimeout(Ga),Ja=1,Qa(Lt))},Bo.timer.flush=function(){Tt(),qt()},Bo.round=function(n,t){return t?Math.round(n*(t=Math.pow(10,t)))/t:Math.round(n)};var nc=["y","z","a","f","p","n","\xb5","m","","k","M","G","T","P","E","Z","Y"].map(Dt);Bo.formatPrefix=function(n,t){var e=0;return n&&(0>n&&(n*=-1),t&&(n=Bo.round(n,Rt(n,t))),e=1+Math.floor(1e-12+Math.log(n)/Math.LN10),e=Math.max(-24,Math.min(24,3*Math.floor((e-1)/3)))),nc[8+e/3]};var tc=/(?:([^{])?([<>=^]))?([+\- ])?([$#])?(0)?(\d+)?(,)?(\.-?\d+)?([a-z%])?/i,ec=Bo.map({b:function(n){return n.toString(2)},c:function(n){return String.fromCharCode(n)},o:function(n){return n.toString(8)},x:function(n){return n.toString(16)},X:function(n){return n.toString(16).toUpperCase()},g:function(n,t){return n.toPrecision(t)},e:function(n,t){return n.toExponential(t)},f:function(n,t){return n.toFixed(t)},r:function(n,t){return(n=Bo.round(n,Rt(n,t))).toFixed(Math.max(0,Math.min(20,Rt(n*(1+1e-15),t))))}}),rc=Bo.time={},uc=Date;jt.prototype={getDate:function(){return this._.getUTCDate()},getDay:function(){return this._.getUTCDay()},getFullYear:function(){return this._.getUTCFullYear()},getHours:function(){return this._.getUTCHours()},getMilliseconds:function(){return this._.getUTCMilliseconds()},getMinutes:function(){return this._.getUTCMinutes()},getMonth:function(){return this._.getUTCMonth()},getSeconds:function(){return this._.getUTCSeconds()},getTime:function(){return this._.getTime()},getTimezoneOffset:function(){return 0},valueOf:function(){return this._.valueOf()},setDate:function(){ic.setUTCDate.apply(this._,arguments)},setDay:function(){ic.setUTCDay.apply(this._,arguments)},setFullYear:function(){ic.setUTCFullYear.apply(this._,arguments)},setHours:function(){ic.setUTCHours.apply(this._,arguments)},setMilliseconds:function(){ic.setUTCMilliseconds.apply(this._,arguments)},setMinutes:function(){ic.setUTCMinutes.apply(this._,arguments)},setMonth:function(){ic.setUTCMonth.apply(this._,arguments)},setSeconds:function(){ic.setUTCSeconds.apply(this._,arguments)},setTime:function(){ic.setTime.apply(this._,arguments)}};var ic=Date.prototype;rc.year=Ft(function(n){return n=rc.day(n),n.setMonth(0,1),n},function(n,t){n.setFullYear(n.getFullYear()+t)},function(n){return n.getFullYear()}),rc.years=rc.year.range,rc.years.utc=rc.year.utc.range,rc.day=Ft(function(n){var t=new uc(2e3,0);return t.setFullYear(n.getFullYear(),n.getMonth(),n.getDate()),t},function(n,t){n.setDate(n.getDate()+t)},function(n){return n.getDate()-1}),rc.days=rc.day.range,rc.days.utc=rc.day.utc.range,rc.dayOfYear=function(n){var t=rc.year(n);return Math.floor((n-t-6e4*(n.getTimezoneOffset()-t.getTimezoneOffset()))/864e5)},["sunday","monday","tuesday","wednesday","thursday","friday","saturday"].forEach(function(n,t){t=7-t;var e=rc[n]=Ft(function(n){return(n=rc.day(n)).setDate(n.getDate()-(n.getDay()+t)%7),n},function(n,t){n.setDate(n.getDate()+7*Math.floor(t))},function(n){var e=rc.year(n).getDay();return Math.floor((rc.dayOfYear(n)+(e+t)%7)/7)-(e!==t)});rc[n+"s"]=e.range,rc[n+"s"].utc=e.utc.range,rc[n+"OfYear"]=function(n){var e=rc.year(n).getDay();return Math.floor((rc.dayOfYear(n)+(e+t)%7)/7)}}),rc.week=rc.sunday,rc.weeks=rc.sunday.range,rc.weeks.utc=rc.sunday.utc.range,rc.weekOfYear=rc.sundayOfYear;var oc={"-":"",_:" ",0:"0"},ac=/^\s*\d+/,cc=/^%/;Bo.locale=function(n){return{numberFormat:Pt(n),timeFormat:Ot(n)}};var lc=Bo.locale({decimal:".",thousands:",",grouping:[3],currency:["$",""],dateTime:"%a %b %e %X %Y",date:"%m/%d/%Y",time:"%H:%M:%S",periods:["AM","PM"],days:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],shortDays:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],months:["January","February","March","April","May","June","July","August","September","October","November","December"],shortMonths:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]});Bo.format=lc.numberFormat,Bo.geo={},ce.prototype={s:0,t:0,add:function(n){le(n,this.t,sc),le(sc.s,this.s,this),this.s?this.t+=sc.t:this.s=sc.t},reset:function(){this.s=this.t=0},valueOf:function(){return this.s}};var sc=new ce;Bo.geo.stream=function(n,t){n&&fc.hasOwnProperty(n.type)?fc[n.type](n,t):se(n,t)};var fc={Feature:function(n,t){se(n.geometry,t)},FeatureCollection:function(n,t){for(var e=n.features,r=-1,u=e.length;++rn?4*Ea+n:n,vc.lineStart=vc.lineEnd=vc.point=y}};Bo.geo.bounds=function(){function n(n,t){x.push(M=[s=n,h=n]),f>t&&(f=t),t>g&&(g=t)}function t(t,e){var r=pe([t*La,e*La]);if(m){var u=de(m,r),i=[u[1],-u[0],0],o=de(i,u);xe(o),o=Me(o);var c=t-p,l=c>0?1:-1,v=o[0]*Ta*l,d=ca(c)>180;if(d^(v>l*p&&l*t>v)){var y=o[1]*Ta;y>g&&(g=y)}else if(v=(v+360)%360-180,d^(v>l*p&&l*t>v)){var y=-o[1]*Ta;f>y&&(f=y)}else f>e&&(f=e),e>g&&(g=e);d?p>t?a(s,t)>a(s,h)&&(h=t):a(t,h)>a(s,h)&&(s=t):h>=s?(s>t&&(s=t),t>h&&(h=t)):t>p?a(s,t)>a(s,h)&&(h=t):a(t,h)>a(s,h)&&(s=t)}else n(t,e);m=r,p=t}function e(){_.point=t}function r(){M[0]=s,M[1]=h,_.point=n,m=null}function u(n,e){if(m){var r=n-p;y+=ca(r)>180?r+(r>0?360:-360):r}else v=n,d=e;vc.point(n,e),t(n,e)}function i(){vc.lineStart()}function o(){u(v,d),vc.lineEnd(),ca(y)>Na&&(s=-(h=180)),M[0]=s,M[1]=h,m=null}function a(n,t){return(t-=n)<0?t+360:t}function c(n,t){return n[0]-t[0]}function l(n,t){return t[0]<=t[1]?t[0]<=n&&n<=t[1]:npc?(s=-(h=180),f=-(g=90)):y>Na?g=90:-Na>y&&(f=-90),M[0]=s,M[1]=h}};return function(n){g=h=-(s=f=1/0),x=[],Bo.geo.stream(n,_); var t=x.length;if(t){x.sort(c);for(var e,r=1,u=x[0],i=[u];t>r;++r)e=x[r],l(e[0],u)||l(e[1],u)?(a(u[0],e[1])>a(u[0],u[1])&&(u[1]=e[1]),a(e[0],u[1])>a(u[0],u[1])&&(u[0]=e[0])):i.push(u=e);for(var o,e,p=-1/0,t=i.length-1,r=0,u=i[t];t>=r;u=e,++r)e=i[r],(o=a(u[1],e[0]))>p&&(p=o,s=e[0],h=u[1])}return x=M=null,1/0===s||1/0===f?[[0/0,0/0],[0/0,0/0]]:[[s,f],[h,g]]}}(),Bo.geo.centroid=function(n){dc=mc=yc=xc=Mc=_c=bc=wc=Sc=kc=Ec=0,Bo.geo.stream(n,Ac);var t=Sc,e=kc,r=Ec,u=t*t+e*e+r*r;return za>u&&(t=_c,e=bc,r=wc,Na>mc&&(t=yc,e=xc,r=Mc),u=t*t+e*e+r*r,za>u)?[0/0,0/0]:[Math.atan2(e,t)*Ta,nt(r/Math.sqrt(u))*Ta]};var dc,mc,yc,xc,Mc,_c,bc,wc,Sc,kc,Ec,Ac={sphere:y,point:be,lineStart:Se,lineEnd:ke,polygonStart:function(){Ac.lineStart=Ee},polygonEnd:function(){Ac.lineStart=Se}},Cc=Le(Ae,De,Ue,[-Ea,-Ea/2]),Nc=1e9;Bo.geo.clipExtent=function(){var n,t,e,r,u,i,o={stream:function(n){return u&&(u.valid=!1),u=i(n),u.valid=!0,u},extent:function(a){return arguments.length?(i=Oe(n=+a[0][0],t=+a[0][1],e=+a[1][0],r=+a[1][1]),u&&(u.valid=!1,u=null),o):[[n,t],[e,r]]}};return o.extent([[0,0],[960,500]])},(Bo.geo.conicEqualArea=function(){return Ie(Ze)}).raw=Ze,Bo.geo.albers=function(){return Bo.geo.conicEqualArea().rotate([96,0]).center([-.6,38.7]).parallels([29.5,45.5]).scale(1070)},Bo.geo.albersUsa=function(){function n(n){var i=n[0],o=n[1];return t=null,e(i,o),t||(r(i,o),t)||u(i,o),t}var t,e,r,u,i=Bo.geo.albers(),o=Bo.geo.conicEqualArea().rotate([154,0]).center([-2,58.5]).parallels([55,65]),a=Bo.geo.conicEqualArea().rotate([157,0]).center([-3,19.9]).parallels([8,18]),c={point:function(n,e){t=[n,e]}};return n.invert=function(n){var t=i.scale(),e=i.translate(),r=(n[0]-e[0])/t,u=(n[1]-e[1])/t;return(u>=.12&&.234>u&&r>=-.425&&-.214>r?o:u>=.166&&.234>u&&r>=-.214&&-.115>r?a:i).invert(n)},n.stream=function(n){var t=i.stream(n),e=o.stream(n),r=a.stream(n);return{point:function(n,u){t.point(n,u),e.point(n,u),r.point(n,u)},sphere:function(){t.sphere(),e.sphere(),r.sphere()},lineStart:function(){t.lineStart(),e.lineStart(),r.lineStart()},lineEnd:function(){t.lineEnd(),e.lineEnd(),r.lineEnd()},polygonStart:function(){t.polygonStart(),e.polygonStart(),r.polygonStart()},polygonEnd:function(){t.polygonEnd(),e.polygonEnd(),r.polygonEnd()}}},n.precision=function(t){return arguments.length?(i.precision(t),o.precision(t),a.precision(t),n):i.precision()},n.scale=function(t){return arguments.length?(i.scale(t),o.scale(.35*t),a.scale(t),n.translate(i.translate())):i.scale()},n.translate=function(t){if(!arguments.length)return i.translate();var l=i.scale(),s=+t[0],f=+t[1];return e=i.translate(t).clipExtent([[s-.455*l,f-.238*l],[s+.455*l,f+.238*l]]).stream(c).point,r=o.translate([s-.307*l,f+.201*l]).clipExtent([[s-.425*l+Na,f+.12*l+Na],[s-.214*l-Na,f+.234*l-Na]]).stream(c).point,u=a.translate([s-.205*l,f+.212*l]).clipExtent([[s-.214*l+Na,f+.166*l+Na],[s-.115*l-Na,f+.234*l-Na]]).stream(c).point,n},n.scale(1070)};var zc,Lc,Tc,qc,Rc,Dc,Pc={point:y,lineStart:y,lineEnd:y,polygonStart:function(){Lc=0,Pc.lineStart=Ve},polygonEnd:function(){Pc.lineStart=Pc.lineEnd=Pc.point=y,zc+=ca(Lc/2)}},Uc={point:Xe,lineStart:y,lineEnd:y,polygonStart:y,polygonEnd:y},jc={point:We,lineStart:Je,lineEnd:Ge,polygonStart:function(){jc.lineStart=Ke},polygonEnd:function(){jc.point=We,jc.lineStart=Je,jc.lineEnd=Ge}};Bo.geo.path=function(){function n(n){return n&&("function"==typeof a&&i.pointRadius(+a.apply(this,arguments)),o&&o.valid||(o=u(i)),Bo.geo.stream(n,o)),i.result()}function t(){return o=null,n}var e,r,u,i,o,a=4.5;return n.area=function(n){return zc=0,Bo.geo.stream(n,u(Pc)),zc},n.centroid=function(n){return yc=xc=Mc=_c=bc=wc=Sc=kc=Ec=0,Bo.geo.stream(n,u(jc)),Ec?[Sc/Ec,kc/Ec]:wc?[_c/wc,bc/wc]:Mc?[yc/Mc,xc/Mc]:[0/0,0/0]},n.bounds=function(n){return Rc=Dc=-(Tc=qc=1/0),Bo.geo.stream(n,u(Uc)),[[Tc,qc],[Rc,Dc]]},n.projection=function(n){return arguments.length?(u=(e=n)?n.stream||tr(n):Et,t()):e},n.context=function(n){return arguments.length?(i=null==(r=n)?new $e:new Qe(n),"function"!=typeof a&&i.pointRadius(a),t()):r},n.pointRadius=function(t){return arguments.length?(a="function"==typeof t?t:(i.pointRadius(+t),+t),n):a},n.projection(Bo.geo.albersUsa()).context(null)},Bo.geo.transform=function(n){return{stream:function(t){var e=new er(t);for(var r in n)e[r]=n[r];return e}}},er.prototype={point:function(n,t){this.stream.point(n,t)},sphere:function(){this.stream.sphere()},lineStart:function(){this.stream.lineStart()},lineEnd:function(){this.stream.lineEnd()},polygonStart:function(){this.stream.polygonStart()},polygonEnd:function(){this.stream.polygonEnd()}},Bo.geo.projection=ur,Bo.geo.projectionMutator=ir,(Bo.geo.equirectangular=function(){return ur(ar)}).raw=ar.invert=ar,Bo.geo.rotation=function(n){function t(t){return t=n(t[0]*La,t[1]*La),t[0]*=Ta,t[1]*=Ta,t}return n=lr(n[0]%360*La,n[1]*La,n.length>2?n[2]*La:0),t.invert=function(t){return t=n.invert(t[0]*La,t[1]*La),t[0]*=Ta,t[1]*=Ta,t},t},cr.invert=ar,Bo.geo.circle=function(){function n(){var n="function"==typeof r?r.apply(this,arguments):r,t=lr(-n[0]*La,-n[1]*La,0).invert,u=[];return e(null,null,1,{point:function(n,e){u.push(n=t(n,e)),n[0]*=Ta,n[1]*=Ta}}),{type:"Polygon",coordinates:[u]}}var t,e,r=[0,0],u=6;return n.origin=function(t){return arguments.length?(r=t,n):r},n.angle=function(r){return arguments.length?(e=gr((t=+r)*La,u*La),n):t},n.precision=function(r){return arguments.length?(e=gr(t*La,(u=+r)*La),n):u},n.angle(90)},Bo.geo.distance=function(n,t){var e,r=(t[0]-n[0])*La,u=n[1]*La,i=t[1]*La,o=Math.sin(r),a=Math.cos(r),c=Math.sin(u),l=Math.cos(u),s=Math.sin(i),f=Math.cos(i);return Math.atan2(Math.sqrt((e=f*o)*e+(e=l*s-c*f*a)*e),c*s+l*f*a)},Bo.geo.graticule=function(){function n(){return{type:"MultiLineString",coordinates:t()}}function t(){return Bo.range(Math.ceil(i/d)*d,u,d).map(h).concat(Bo.range(Math.ceil(l/m)*m,c,m).map(g)).concat(Bo.range(Math.ceil(r/p)*p,e,p).filter(function(n){return ca(n%d)>Na}).map(s)).concat(Bo.range(Math.ceil(a/v)*v,o,v).filter(function(n){return ca(n%m)>Na}).map(f))}var e,r,u,i,o,a,c,l,s,f,h,g,p=10,v=p,d=90,m=360,y=2.5;return n.lines=function(){return t().map(function(n){return{type:"LineString",coordinates:n}})},n.outline=function(){return{type:"Polygon",coordinates:[h(i).concat(g(c).slice(1),h(u).reverse().slice(1),g(l).reverse().slice(1))]}},n.extent=function(t){return arguments.length?n.majorExtent(t).minorExtent(t):n.minorExtent()},n.majorExtent=function(t){return arguments.length?(i=+t[0][0],u=+t[1][0],l=+t[0][1],c=+t[1][1],i>u&&(t=i,i=u,u=t),l>c&&(t=l,l=c,c=t),n.precision(y)):[[i,l],[u,c]]},n.minorExtent=function(t){return arguments.length?(r=+t[0][0],e=+t[1][0],a=+t[0][1],o=+t[1][1],r>e&&(t=r,r=e,e=t),a>o&&(t=a,a=o,o=t),n.precision(y)):[[r,a],[e,o]]},n.step=function(t){return arguments.length?n.majorStep(t).minorStep(t):n.minorStep()},n.majorStep=function(t){return arguments.length?(d=+t[0],m=+t[1],n):[d,m]},n.minorStep=function(t){return arguments.length?(p=+t[0],v=+t[1],n):[p,v]},n.precision=function(t){return arguments.length?(y=+t,s=vr(a,o,90),f=dr(r,e,y),h=vr(l,c,90),g=dr(i,u,y),n):y},n.majorExtent([[-180,-90+Na],[180,90-Na]]).minorExtent([[-180,-80-Na],[180,80+Na]])},Bo.geo.greatArc=function(){function n(){return{type:"LineString",coordinates:[t||r.apply(this,arguments),e||u.apply(this,arguments)]}}var t,e,r=mr,u=yr;return n.distance=function(){return Bo.geo.distance(t||r.apply(this,arguments),e||u.apply(this,arguments))},n.source=function(e){return arguments.length?(r=e,t="function"==typeof e?null:e,n):r},n.target=function(t){return arguments.length?(u=t,e="function"==typeof t?null:t,n):u},n.precision=function(){return arguments.length?n:0},n},Bo.geo.interpolate=function(n,t){return xr(n[0]*La,n[1]*La,t[0]*La,t[1]*La)},Bo.geo.length=function(n){return Fc=0,Bo.geo.stream(n,Hc),Fc};var Fc,Hc={sphere:y,point:y,lineStart:Mr,lineEnd:y,polygonStart:y,polygonEnd:y},Oc=_r(function(n){return Math.sqrt(2/(1+n))},function(n){return 2*Math.asin(n/2)});(Bo.geo.azimuthalEqualArea=function(){return ur(Oc)}).raw=Oc;var Yc=_r(function(n){var t=Math.acos(n);return t&&t/Math.sin(t)},Et);(Bo.geo.azimuthalEquidistant=function(){return ur(Yc)}).raw=Yc,(Bo.geo.conicConformal=function(){return Ie(br)}).raw=br,(Bo.geo.conicEquidistant=function(){return Ie(wr)}).raw=wr;var Ic=_r(function(n){return 1/n},Math.atan);(Bo.geo.gnomonic=function(){return ur(Ic)}).raw=Ic,Sr.invert=function(n,t){return[n,2*Math.atan(Math.exp(t))-Ca]},(Bo.geo.mercator=function(){return kr(Sr)}).raw=Sr;var Zc=_r(function(){return 1},Math.asin);(Bo.geo.orthographic=function(){return ur(Zc)}).raw=Zc;var Vc=_r(function(n){return 1/(1+n)},function(n){return 2*Math.atan(n)});(Bo.geo.stereographic=function(){return ur(Vc)}).raw=Vc,Er.invert=function(n,t){return[-t,2*Math.atan(Math.exp(n))-Ca]},(Bo.geo.transverseMercator=function(){var n=kr(Er),t=n.center,e=n.rotate;return n.center=function(n){return n?t([-n[1],n[0]]):(n=t(),[n[1],-n[0]])},n.rotate=function(n){return n?e([n[0],n[1],n.length>2?n[2]+90:90]):(n=e(),[n[0],n[1],n[2]-90])},e([0,0,90])}).raw=Er,Bo.geom={},Bo.geom.hull=function(n){function t(n){if(n.length<3)return[];var t,u=kt(e),i=kt(r),o=n.length,a=[],c=[];for(t=0;o>t;t++)a.push([+u.call(this,n[t],t),+i.call(this,n[t],t),t]);for(a.sort(zr),t=0;o>t;t++)c.push([a[t][0],-a[t][1]]);var l=Nr(a),s=Nr(c),f=s[0]===l[0],h=s[s.length-1]===l[l.length-1],g=[];for(t=l.length-1;t>=0;--t)g.push(n[a[l[t]][2]]);for(t=+f;t=r&&l.x<=i&&l.y>=u&&l.y<=o?[[r,o],[i,o],[i,u],[r,u]]:[];s.point=n[a]}),t}function e(n){return n.map(function(n,t){return{x:Math.round(i(n,t)/Na)*Na,y:Math.round(o(n,t)/Na)*Na,i:t}})}var r=Ar,u=Cr,i=r,o=u,a=nl;return n?t(n):(t.links=function(n){return iu(e(n)).edges.filter(function(n){return n.l&&n.r}).map(function(t){return{source:n[t.l.i],target:n[t.r.i]}})},t.triangles=function(n){var t=[];return iu(e(n)).cells.forEach(function(e,r){for(var u,i,o=e.site,a=e.edges.sort(Ir),c=-1,l=a.length,s=a[l-1].edge,f=s.l===o?s.r:s.l;++c=l,h=r>=s,g=(h<<1)+f;n.leaf=!1,n=n.nodes[g]||(n.nodes[g]=su()),f?u=l:a=l,h?o=s:c=s,i(n,t,e,r,u,o,a,c)}var s,f,h,g,p,v,d,m,y,x=kt(a),M=kt(c);if(null!=t)v=t,d=e,m=r,y=u;else if(m=y=-(v=d=1/0),f=[],h=[],p=n.length,o)for(g=0;p>g;++g)s=n[g],s.xm&&(m=s.x),s.y>y&&(y=s.y),f.push(s.x),h.push(s.y);else for(g=0;p>g;++g){var _=+x(s=n[g],g),b=+M(s,g);v>_&&(v=_),d>b&&(d=b),_>m&&(m=_),b>y&&(y=b),f.push(_),h.push(b)}var w=m-v,S=y-d;w>S?y=d+w:m=v+S;var k=su();if(k.add=function(n){i(k,n,+x(n,++g),+M(n,g),v,d,m,y)},k.visit=function(n){fu(n,k,v,d,m,y)},g=-1,null==t){for(;++g=0?n.slice(0,t):n,r=t>=0?n.slice(t+1):"in";return e=ul.get(e)||rl,r=il.get(r)||Et,yu(r(e.apply(null,Wo.call(arguments,1))))},Bo.interpolateHcl=Lu,Bo.interpolateHsl=Tu,Bo.interpolateLab=qu,Bo.interpolateRound=Ru,Bo.transform=function(n){var t=Go.createElementNS(Bo.ns.prefix.svg,"g");return(Bo.transform=function(n){if(null!=n){t.setAttribute("transform",n);var e=t.transform.baseVal.consolidate()}return new Du(e?e.matrix:ol)})(n)},Du.prototype.toString=function(){return"translate("+this.translate+")rotate("+this.rotate+")skewX("+this.skew+")scale("+this.scale+")"};var ol={a:1,b:0,c:0,d:1,e:0,f:0};Bo.interpolateTransform=Fu,Bo.layout={},Bo.layout.bundle=function(){return function(n){for(var t=[],e=-1,r=n.length;++ea*a/d){if(p>c){var l=t.charge/c;n.px-=i*l,n.py-=o*l}return!0}if(t.point&&c&&p>c){var l=t.pointCharge/c;n.px-=i*l,n.py-=o*l}}return!t.charge}}function t(n){n.px=Bo.event.x,n.py=Bo.event.y,a.resume()}var e,r,u,i,o,a={},c=Bo.dispatch("start","tick","end"),l=[1,1],s=.9,f=al,h=cl,g=-30,p=ll,v=.1,d=.64,m=[],y=[];return a.tick=function(){if((r*=.99)<.005)return c.end({type:"end",alpha:r=0}),!0;var t,e,a,f,h,p,d,x,M,_=m.length,b=y.length;for(e=0;b>e;++e)a=y[e],f=a.source,h=a.target,x=h.x-f.x,M=h.y-f.y,(p=x*x+M*M)&&(p=r*i[e]*((p=Math.sqrt(p))-u[e])/p,x*=p,M*=p,h.x-=x*(d=f.weight/(h.weight+f.weight)),h.y-=M*d,f.x+=x*(d=1-d),f.y+=M*d);if((d=r*v)&&(x=l[0]/2,M=l[1]/2,e=-1,d))for(;++e<_;)a=m[e],a.x+=(x-a.x)*d,a.y+=(M-a.y)*d;if(g)for(Wu(t=Bo.geom.quadtree(m),r,o),e=-1;++e<_;)(a=m[e]).fixed||t.visit(n(a));for(e=-1;++e<_;)a=m[e],a.fixed?(a.x=a.px,a.y=a.py):(a.x-=(a.px-(a.px=a.x))*s,a.y-=(a.py-(a.py=a.y))*s);c.tick({type:"tick",alpha:r})},a.nodes=function(n){return arguments.length?(m=n,a):m},a.links=function(n){return arguments.length?(y=n,a):y},a.size=function(n){return arguments.length?(l=n,a):l},a.linkDistance=function(n){return arguments.length?(f="function"==typeof n?n:+n,a):f},a.distance=a.linkDistance,a.linkStrength=function(n){return arguments.length?(h="function"==typeof n?n:+n,a):h},a.friction=function(n){return arguments.length?(s=+n,a):s},a.charge=function(n){return arguments.length?(g="function"==typeof n?n:+n,a):g},a.chargeDistance=function(n){return arguments.length?(p=n*n,a):Math.sqrt(p)},a.gravity=function(n){return arguments.length?(v=+n,a):v},a.theta=function(n){return arguments.length?(d=n*n,a):Math.sqrt(d)},a.alpha=function(n){return arguments.length?(n=+n,r?r=n>0?n:0:n>0&&(c.start({type:"start",alpha:r=n}),Bo.timer(a.tick)),a):r},a.start=function(){function n(n,r){if(!e){for(e=new Array(c),a=0;c>a;++a)e[a]=[];for(a=0;l>a;++a){var u=y[a];e[u.source.index].push(u.target),e[u.target.index].push(u.source)}}for(var i,o=e[t],a=-1,l=o.length;++at;++t)(r=m[t]).index=t,r.weight=0;for(t=0;s>t;++t)r=y[t],"number"==typeof r.source&&(r.source=m[r.source]),"number"==typeof r.target&&(r.target=m[r.target]),++r.source.weight,++r.target.weight;for(t=0;c>t;++t)r=m[t],isNaN(r.x)&&(r.x=n("x",p)),isNaN(r.y)&&(r.y=n("y",v)),isNaN(r.px)&&(r.px=r.x),isNaN(r.py)&&(r.py=r.y);if(u=[],"function"==typeof f)for(t=0;s>t;++t)u[t]=+f.call(this,y[t],t);else for(t=0;s>t;++t)u[t]=f;if(i=[],"function"==typeof h)for(t=0;s>t;++t)i[t]=+h.call(this,y[t],t);else for(t=0;s>t;++t)i[t]=h;if(o=[],"function"==typeof g)for(t=0;c>t;++t)o[t]=+g.call(this,m[t],t);else for(t=0;c>t;++t)o[t]=g;return a.resume()},a.resume=function(){return a.alpha(.1)},a.stop=function(){return a.alpha(0)},a.drag=function(){return e||(e=Bo.behavior.drag().origin(Et).on("dragstart.force",Vu).on("drag.force",t).on("dragend.force",Xu)),arguments.length?(this.on("mouseover.force",$u).on("mouseout.force",Bu).call(e),void 0):e},Bo.rebind(a,c,"on")};var al=20,cl=1,ll=1/0;Bo.layout.hierarchy=function(){function n(u){var i,o=[u],a=[];for(u.depth=0;null!=(i=o.pop());)if(a.push(i),(l=e.call(n,i,i.depth))&&(c=l.length)){for(var c,l,s;--c>=0;)o.push(s=l[c]),s.parent=i,s.depth=i.depth+1;r&&(i.value=0),i.children=l}else r&&(i.value=+r.call(n,i,i.depth)||0),delete i.children;return Ku(u,function(n){var e,u;t&&(e=n.children)&&e.sort(t),r&&(u=n.parent)&&(u.value+=n.value)}),a}var t=ti,e=Qu,r=ni;return n.sort=function(e){return arguments.length?(t=e,n):t},n.children=function(t){return arguments.length?(e=t,n):e},n.value=function(t){return arguments.length?(r=t,n):r},n.revalue=function(t){return r&&(Gu(t,function(n){n.children&&(n.value=0)}),Ku(t,function(t){var e;t.children||(t.value=+r.call(n,t,t.depth)||0),(e=t.parent)&&(e.value+=t.value)})),t},n},Bo.layout.partition=function(){function n(t,e,r,u){var i=t.children;if(t.x=e,t.y=t.depth*u,t.dx=r,t.dy=u,i&&(o=i.length)){var o,a,c,l=-1;for(r=t.value?r/t.value:0;++lp;++p)for(u.call(n,l[0][p],v=d[p],s[0][p][1]),g=1;h>g;++g)u.call(n,l[g][p],v+=s[g-1][p][1],s[g][p][1]);return a}var t=Et,e=oi,r=ai,u=ii,i=ri,o=ui;return n.values=function(e){return arguments.length?(t=e,n):t},n.order=function(t){return arguments.length?(e="function"==typeof t?t:fl.get(t)||oi,n):e},n.offset=function(t){return arguments.length?(r="function"==typeof t?t:hl.get(t)||ai,n):r},n.x=function(t){return arguments.length?(i=t,n):i},n.y=function(t){return arguments.length?(o=t,n):o},n.out=function(t){return arguments.length?(u=t,n):u},n};var fl=Bo.map({"inside-out":function(n){var t,e,r=n.length,u=n.map(ci),i=n.map(li),o=Bo.range(r).sort(function(n,t){return u[n]-u[t]}),a=0,c=0,l=[],s=[];for(t=0;r>t;++t)e=o[t],c>a?(a+=i[e],l.push(e)):(c+=i[e],s.push(e));return s.reverse().concat(l)},reverse:function(n){return Bo.range(n.length).reverse()},"default":oi}),hl=Bo.map({silhouette:function(n){var t,e,r,u=n.length,i=n[0].length,o=[],a=0,c=[];for(e=0;i>e;++e){for(t=0,r=0;u>t;t++)r+=n[t][e][1];r>a&&(a=r),o.push(r)}for(e=0;i>e;++e)c[e]=(a-o[e])/2;return c},wiggle:function(n){var t,e,r,u,i,o,a,c,l,s=n.length,f=n[0],h=f.length,g=[];for(g[0]=c=l=0,e=1;h>e;++e){for(t=0,u=0;s>t;++t)u+=n[t][e][1];for(t=0,i=0,a=f[e][0]-f[e-1][0];s>t;++t){for(r=0,o=(n[t][e][1]-n[t][e-1][1])/(2*a);t>r;++r)o+=(n[r][e][1]-n[r][e-1][1])/a;i+=o*n[t][e][1]}g[e]=c-=u?i/u*a:0,l>c&&(l=c)}for(e=0;h>e;++e)g[e]-=l;return g},expand:function(n){var t,e,r,u=n.length,i=n[0].length,o=1/u,a=[];for(e=0;i>e;++e){for(t=0,r=0;u>t;t++)r+=n[t][e][1];if(r)for(t=0;u>t;t++)n[t][e][1]/=r;else for(t=0;u>t;t++)n[t][e][1]=o}for(e=0;i>e;++e)a[e]=0;return a},zero:ai});Bo.layout.histogram=function(){function n(n,i){for(var o,a,c=[],l=n.map(e,this),s=r.call(this,l,i),f=u.call(this,s,l,i),i=-1,h=l.length,g=f.length-1,p=t?1:1/h;++i0)for(i=-1;++i=s[0]&&a<=s[1]&&(o=c[Bo.bisect(f,a,1,g)-1],o.y+=p,o.push(n[i]));return c}var t=!0,e=Number,r=gi,u=fi;return n.value=function(t){return arguments.length?(e=t,n):e},n.range=function(t){return arguments.length?(r=kt(t),n):r},n.bins=function(t){return arguments.length?(u="number"==typeof t?function(n){return hi(n,t)}:kt(t),n):u},n.frequency=function(e){return arguments.length?(t=!!e,n):t},n},Bo.layout.pack=function(){function n(n,i){var o=e.call(this,n,i),a=o[0],c=u[0],l=u[1],s=null==t?Math.sqrt:"function"==typeof t?t:function(){return t};if(a.x=a.y=0,Ku(a,function(n){n.r=+s(n.value)}),Ku(a,yi),r){var f=r*(t?1:Math.max(2*a.r/c,2*a.r/l))/2;Ku(a,function(n){n.r+=f}),Ku(a,yi),Ku(a,function(n){n.r-=f})}return _i(a,c/2,l/2,t?1:1/Math.max(2*a.r/c,2*a.r/l)),o}var t,e=Bo.layout.hierarchy().sort(pi),r=0,u=[1,1];return n.size=function(t){return arguments.length?(u=t,n):u},n.radius=function(e){return arguments.length?(t=null==e||"function"==typeof e?e:+e,n):t},n.padding=function(t){return arguments.length?(r=+t,n):r},Ju(n,e)},Bo.layout.tree=function(){function n(n,u){var s=o.call(this,n,u),f=s[0],h=t(f);if(Ku(h,e),h.parent.m=-h.z,Gu(h,r),l)Gu(f,i);else{var g=f,p=f,v=f;Gu(f,function(n){n.xp.x&&(p=n),n.depth>v.depth&&(v=n)});var d=a(g,p)/2-g.x,m=c[0]/(p.x+a(p,g)/2+d),y=c[1]/(v.depth||1);Gu(f,function(n){n.x=(n.x+d)*m,n.y=n.depth*y})}return s}function t(n){for(var t,e={A:null,children:[n]},r=[e];null!=(t=r.pop());)for(var u,i=t.children,o=0,a=i.length;a>o;++o)r.push((i[o]=u={_:i[o],parent:t,children:(u=i[o].children)&&u.slice()||[],A:null,a:null,z:0,m:0,c:0,s:0,t:null,i:o}).a=u);return e.children[0]}function e(n){var t=n.children,e=n.parent.children,r=n.i?e[n.i-1]:null;if(t.length){Ai(n);var i=(t[0].z+t[t.length-1].z)/2;r?(n.z=r.z+a(n._,r._),n.m=n.z-i):n.z=i}else r&&(n.z=r.z+a(n._,r._));n.parent.A=u(n,r,n.parent.A||e[0])}function r(n){n._.x=n.z+n.parent.m,n.m+=n.parent.m}function u(n,t,e){if(t){for(var r,u=n,i=n,o=t,c=u.parent.children[0],l=u.m,s=i.m,f=o.m,h=c.m;o=ki(o),u=Si(u),o&&u;)c=Si(c),i=ki(i),i.a=n,r=o.z+f-u.z-l+a(o._,u._),r>0&&(Ei(Ci(o,n,e),n,r),l+=r,s+=r),f+=o.m,l+=u.m,h+=c.m,s+=i.m;o&&!ki(i)&&(i.t=o,i.m+=f-s),u&&!Si(c)&&(c.t=u,c.m+=l-h,e=n)}return e}function i(n){n.x*=c[0],n.y=n.depth*c[1]}var o=Bo.layout.hierarchy().sort(null).value(null),a=wi,c=[1,1],l=null;return n.separation=function(t){return arguments.length?(a=t,n):a},n.size=function(t){return arguments.length?(l=null==(c=t)?i:null,n):l?null:c},n.nodeSize=function(t){return arguments.length?(l=null==(c=t)?null:i,n):l?c:null},Ju(n,o)},Bo.layout.cluster=function(){function n(n,i){var o,a=t.call(this,n,i),c=a[0],l=0;Ku(c,function(n){var t=n.children;t&&t.length?(n.x=zi(t),n.y=Ni(t)):(n.x=o?l+=e(n,o):0,n.y=0,o=n)});var s=Li(c),f=Ti(c),h=s.x-e(s,f)/2,g=f.x+e(f,s)/2;return Ku(c,u?function(n){n.x=(n.x-c.x)*r[0],n.y=(c.y-n.y)*r[1]}:function(n){n.x=(n.x-h)/(g-h)*r[0],n.y=(1-(c.y?n.y/c.y:1))*r[1]}),a}var t=Bo.layout.hierarchy().sort(null).value(null),e=wi,r=[1,1],u=!1;return n.separation=function(t){return arguments.length?(e=t,n):e},n.size=function(t){return arguments.length?(u=null==(r=t),n):u?null:r},n.nodeSize=function(t){return arguments.length?(u=null!=(r=t),n):u?r:null},Ju(n,t)},Bo.layout.treemap=function(){function n(n,t){for(var e,r,u=-1,i=n.length;++ut?0:t),e.area=isNaN(r)||0>=r?0:r}function t(e){var i=e.children;if(i&&i.length){var o,a,c,l=f(e),s=[],h=i.slice(),p=1/0,v="slice"===g?l.dx:"dice"===g?l.dy:"slice-dice"===g?1&e.depth?l.dy:l.dx:Math.min(l.dx,l.dy);for(n(h,l.dx*l.dy/e.value),s.area=0;(c=h.length)>0;)s.push(o=h[c-1]),s.area+=o.area,"squarify"!==g||(a=r(s,v))<=p?(h.pop(),p=a):(s.area-=s.pop().area,u(s,v,l,!1),v=Math.min(l.dx,l.dy),s.length=s.area=0,p=1/0);s.length&&(u(s,v,l,!0),s.length=s.area=0),i.forEach(t)}}function e(t){var r=t.children;if(r&&r.length){var i,o=f(t),a=r.slice(),c=[];for(n(a,o.dx*o.dy/t.value),c.area=0;i=a.pop();)c.push(i),c.area+=i.area,null!=i.z&&(u(c,i.z?o.dx:o.dy,o,!a.length),c.length=c.area=0);r.forEach(e)}}function r(n,t){for(var e,r=n.area,u=0,i=1/0,o=-1,a=n.length;++oe&&(i=e),e>u&&(u=e));return r*=r,t*=t,r?Math.max(t*u*p/r,r/(t*i*p)):1/0}function u(n,t,e,r){var u,i=-1,o=n.length,a=e.x,l=e.y,s=t?c(n.area/t):0;if(t==e.dx){for((r||s>e.dy)&&(s=e.dy);++ie.dx)&&(s=e.dx);++ie&&(t=1),1>e&&(n=0),function(){var e,r,u;do e=2*Math.random()-1,r=2*Math.random()-1,u=e*e+r*r;while(!u||u>1);return n+t*e*Math.sqrt(-2*Math.log(u)/u)}},logNormal:function(){var n=Bo.random.normal.apply(Bo,arguments);return function(){return Math.exp(n())}},bates:function(n){var t=Bo.random.irwinHall(n);return function(){return t()/n}},irwinHall:function(n){return function(){for(var t=0,e=0;n>e;e++)t+=Math.random();return t}}},Bo.scale={};var gl={floor:Et,ceil:Et};Bo.scale.linear=function(){return Oi([0,1],[0,1],du,!1)};var pl={s:1,g:1,p:1,r:1,e:1};Bo.scale.log=function(){return Wi(Bo.scale.linear().domain([0,1]),10,!0,[1,10])};var vl=Bo.format(".0e"),dl={floor:function(n){return-Math.ceil(-n)},ceil:function(n){return-Math.floor(-n)}};Bo.scale.pow=function(){return Ji(Bo.scale.linear(),1,[0,1])},Bo.scale.sqrt=function(){return Bo.scale.pow().exponent(.5)},Bo.scale.ordinal=function(){return Ki([],{t:"range",a:[[]]})},Bo.scale.category10=function(){return Bo.scale.ordinal().range(ml)},Bo.scale.category20=function(){return Bo.scale.ordinal().range(yl)},Bo.scale.category20b=function(){return Bo.scale.ordinal().range(xl)},Bo.scale.category20c=function(){return Bo.scale.ordinal().range(Ml)};var ml=[2062260,16744206,2924588,14034728,9725885,9197131,14907330,8355711,12369186,1556175].map(yt),yl=[2062260,11454440,16744206,16759672,2924588,10018698,14034728,16750742,9725885,12955861,9197131,12885140,14907330,16234194,8355711,13092807,12369186,14408589,1556175,10410725].map(yt),xl=[3750777,5395619,7040719,10264286,6519097,9216594,11915115,13556636,9202993,12426809,15186514,15190932,8666169,11356490,14049643,15177372,8077683,10834324,13528509,14589654].map(yt),Ml=[3244733,7057110,10406625,13032431,15095053,16616764,16625259,16634018,3253076,7652470,10607003,13101504,7695281,10394312,12369372,14342891,6513507,9868950,12434877,14277081].map(yt);Bo.scale.quantile=function(){return Qi([],[]) },Bo.scale.quantize=function(){return no(0,1,[0,1])},Bo.scale.threshold=function(){return to([.5],[0,1])},Bo.scale.identity=function(){return eo([0,1])},Bo.svg={},Bo.svg.arc=function(){function n(){var n=t.apply(this,arguments),i=e.apply(this,arguments),o=r.apply(this,arguments)+_l,a=u.apply(this,arguments)+_l,c=(o>a&&(c=o,o=a,a=c),a-o),l=Ea>c?"0":"1",s=Math.cos(o),f=Math.sin(o),h=Math.cos(a),g=Math.sin(a);return c>=bl?n?"M0,"+i+"A"+i+","+i+" 0 1,1 0,"+-i+"A"+i+","+i+" 0 1,1 0,"+i+"M0,"+n+"A"+n+","+n+" 0 1,0 0,"+-n+"A"+n+","+n+" 0 1,0 0,"+n+"Z":"M0,"+i+"A"+i+","+i+" 0 1,1 0,"+-i+"A"+i+","+i+" 0 1,1 0,"+i+"Z":n?"M"+i*s+","+i*f+"A"+i+","+i+" 0 "+l+",1 "+i*h+","+i*g+"L"+n*h+","+n*g+"A"+n+","+n+" 0 "+l+",0 "+n*s+","+n*f+"Z":"M"+i*s+","+i*f+"A"+i+","+i+" 0 "+l+",1 "+i*h+","+i*g+"L0,0"+"Z"}var t=ro,e=uo,r=io,u=oo;return n.innerRadius=function(e){return arguments.length?(t=kt(e),n):t},n.outerRadius=function(t){return arguments.length?(e=kt(t),n):e},n.startAngle=function(t){return arguments.length?(r=kt(t),n):r},n.endAngle=function(t){return arguments.length?(u=kt(t),n):u},n.centroid=function(){var n=(t.apply(this,arguments)+e.apply(this,arguments))/2,i=(r.apply(this,arguments)+u.apply(this,arguments))/2+_l;return[Math.cos(i)*n,Math.sin(i)*n]},n};var _l=-Ca,bl=Aa-Na;Bo.svg.line=function(){return ao(Et)};var wl=Bo.map({linear:co,"linear-closed":lo,step:so,"step-before":fo,"step-after":ho,basis:xo,"basis-open":Mo,"basis-closed":_o,bundle:bo,cardinal:vo,"cardinal-open":go,"cardinal-closed":po,monotone:Co});wl.forEach(function(n,t){t.key=n,t.closed=/-closed$/.test(n)});var Sl=[0,2/3,1/3,0],kl=[0,1/3,2/3,0],El=[0,1/6,2/3,1/6];Bo.svg.line.radial=function(){var n=ao(No);return n.radius=n.x,delete n.x,n.angle=n.y,delete n.y,n},fo.reverse=ho,ho.reverse=fo,Bo.svg.area=function(){return zo(Et)},Bo.svg.area.radial=function(){var n=zo(No);return n.radius=n.x,delete n.x,n.innerRadius=n.x0,delete n.x0,n.outerRadius=n.x1,delete n.x1,n.angle=n.y,delete n.y,n.startAngle=n.y0,delete n.y0,n.endAngle=n.y1,delete n.y1,n},Bo.svg.chord=function(){function n(n,a){var c=t(this,i,n,a),l=t(this,o,n,a);return"M"+c.p0+r(c.r,c.p1,c.a1-c.a0)+(e(c,l)?u(c.r,c.p1,c.r,c.p0):u(c.r,c.p1,l.r,l.p0)+r(l.r,l.p1,l.a1-l.a0)+u(l.r,l.p1,c.r,c.p0))+"Z"}function t(n,t,e,r){var u=t.call(n,e,r),i=a.call(n,u,r),o=c.call(n,u,r)+_l,s=l.call(n,u,r)+_l;return{r:i,a0:o,a1:s,p0:[i*Math.cos(o),i*Math.sin(o)],p1:[i*Math.cos(s),i*Math.sin(s)]}}function e(n,t){return n.a0==t.a0&&n.a1==t.a1}function r(n,t,e){return"A"+n+","+n+" 0 "+ +(e>Ea)+",1 "+t}function u(n,t,e,r){return"Q 0,0 "+r}var i=mr,o=yr,a=Lo,c=io,l=oo;return n.radius=function(t){return arguments.length?(a=kt(t),n):a},n.source=function(t){return arguments.length?(i=kt(t),n):i},n.target=function(t){return arguments.length?(o=kt(t),n):o},n.startAngle=function(t){return arguments.length?(c=kt(t),n):c},n.endAngle=function(t){return arguments.length?(l=kt(t),n):l},n},Bo.svg.diagonal=function(){function n(n,u){var i=t.call(this,n,u),o=e.call(this,n,u),a=(i.y+o.y)/2,c=[i,{x:i.x,y:a},{x:o.x,y:a},o];return c=c.map(r),"M"+c[0]+"C"+c[1]+" "+c[2]+" "+c[3]}var t=mr,e=yr,r=To;return n.source=function(e){return arguments.length?(t=kt(e),n):t},n.target=function(t){return arguments.length?(e=kt(t),n):e},n.projection=function(t){return arguments.length?(r=t,n):r},n},Bo.svg.diagonal.radial=function(){var n=Bo.svg.diagonal(),t=To,e=n.projection;return n.projection=function(n){return arguments.length?e(qo(t=n)):t},n},Bo.svg.symbol=function(){function n(n,r){return(Al.get(t.call(this,n,r))||Po)(e.call(this,n,r))}var t=Do,e=Ro;return n.type=function(e){return arguments.length?(t=kt(e),n):t},n.size=function(t){return arguments.length?(e=kt(t),n):e},n};var Al=Bo.map({circle:Po,cross:function(n){var t=Math.sqrt(n/5)/2;return"M"+-3*t+","+-t+"H"+-t+"V"+-3*t+"H"+t+"V"+-t+"H"+3*t+"V"+t+"H"+t+"V"+3*t+"H"+-t+"V"+t+"H"+-3*t+"Z"},diamond:function(n){var t=Math.sqrt(n/(2*Ll)),e=t*Ll;return"M0,"+-t+"L"+e+",0"+" 0,"+t+" "+-e+",0"+"Z"},square:function(n){var t=Math.sqrt(n)/2;return"M"+-t+","+-t+"L"+t+","+-t+" "+t+","+t+" "+-t+","+t+"Z"},"triangle-down":function(n){var t=Math.sqrt(n/zl),e=t*zl/2;return"M0,"+e+"L"+t+","+-e+" "+-t+","+-e+"Z"},"triangle-up":function(n){var t=Math.sqrt(n/zl),e=t*zl/2;return"M0,"+-e+"L"+t+","+e+" "+-t+","+e+"Z"}});Bo.svg.symbolTypes=Al.keys();var Cl,Nl,zl=Math.sqrt(3),Ll=Math.tan(30*La),Tl=[],ql=0;Tl.call=ya.call,Tl.empty=ya.empty,Tl.node=ya.node,Tl.size=ya.size,Bo.transition=function(n){return arguments.length?Cl?n.transition():n:_a.transition()},Bo.transition.prototype=Tl,Tl.select=function(n){var t,e,r,u=this.id,i=[];n=k(n);for(var o=-1,a=this.length;++oi;i++){u.push(t=[]);for(var e=this[i],a=0,c=e.length;c>a;a++)(r=e[a])&&n.call(r,r.__data__,a,i)&&t.push(r)}return Uo(u,this.id)},Tl.tween=function(n,t){var e=this.id;return arguments.length<2?this.node().__transition__[e].tween.get(n):F(this,null==t?function(t){t.__transition__[e].tween.remove(n)}:function(r){r.__transition__[e].tween.set(n,t)})},Tl.attr=function(n,t){function e(){this.removeAttribute(a)}function r(){this.removeAttributeNS(a.space,a.local)}function u(n){return null==n?e:(n+="",function(){var t,e=this.getAttribute(a);return e!==n&&(t=o(e,n),function(n){this.setAttribute(a,t(n))})})}function i(n){return null==n?r:(n+="",function(){var t,e=this.getAttributeNS(a.space,a.local);return e!==n&&(t=o(e,n),function(n){this.setAttributeNS(a.space,a.local,t(n))})})}if(arguments.length<2){for(t in n)this.attr(t,n[t]);return this}var o="transform"==n?Fu:du,a=Bo.ns.qualify(n);return jo(this,"attr."+n,t,a.local?i:u)},Tl.attrTween=function(n,t){function e(n,e){var r=t.call(this,n,e,this.getAttribute(u));return r&&function(n){this.setAttribute(u,r(n))}}function r(n,e){var r=t.call(this,n,e,this.getAttributeNS(u.space,u.local));return r&&function(n){this.setAttributeNS(u.space,u.local,r(n))}}var u=Bo.ns.qualify(n);return this.tween("attr."+n,u.local?r:e)},Tl.style=function(n,t,e){function r(){this.style.removeProperty(n)}function u(t){return null==t?r:(t+="",function(){var r,u=Qo.getComputedStyle(this,null).getPropertyValue(n);return u!==t&&(r=du(u,t),function(t){this.style.setProperty(n,r(t),e)})})}var i=arguments.length;if(3>i){if("string"!=typeof n){2>i&&(t="");for(e in n)this.style(e,n[e],t);return this}e=""}return jo(this,"style."+n,t,u)},Tl.styleTween=function(n,t,e){function r(r,u){var i=t.call(this,r,u,Qo.getComputedStyle(this,null).getPropertyValue(n));return i&&function(t){this.style.setProperty(n,i(t),e)}}return arguments.length<3&&(e=""),this.tween("style."+n,r)},Tl.text=function(n){return jo(this,"text",n,Fo)},Tl.remove=function(){return this.each("end.transition",function(){var n;this.__transition__.count<2&&(n=this.parentNode)&&n.removeChild(this)})},Tl.ease=function(n){var t=this.id;return arguments.length<1?this.node().__transition__[t].ease:("function"!=typeof n&&(n=Bo.ease.apply(Bo,arguments)),F(this,function(e){e.__transition__[t].ease=n}))},Tl.delay=function(n){var t=this.id;return arguments.length<1?this.node().__transition__[t].delay:F(this,"function"==typeof n?function(e,r,u){e.__transition__[t].delay=+n.call(e,e.__data__,r,u)}:(n=+n,function(e){e.__transition__[t].delay=n}))},Tl.duration=function(n){var t=this.id;return arguments.length<1?this.node().__transition__[t].duration:F(this,"function"==typeof n?function(e,r,u){e.__transition__[t].duration=Math.max(1,n.call(e,e.__data__,r,u))}:(n=Math.max(1,n),function(e){e.__transition__[t].duration=n}))},Tl.each=function(n,t){var e=this.id;if(arguments.length<2){var r=Nl,u=Cl;Cl=e,F(this,function(t,r,u){Nl=t.__transition__[e],n.call(t,t.__data__,r,u)}),Nl=r,Cl=u}else F(this,function(r){var u=r.__transition__[e];(u.event||(u.event=Bo.dispatch("start","end"))).on(n,t)});return this},Tl.transition=function(){for(var n,t,e,r,u=this.id,i=++ql,o=[],a=0,c=this.length;c>a;a++){o.push(n=[]);for(var t=this[a],l=0,s=t.length;s>l;l++)(e=t[l])&&(r=Object.create(e.__transition__[u]),r.delay+=r.duration,Ho(e,l,i,r)),n.push(e)}return Uo(o,i)},Bo.svg.axis=function(){function n(n){n.each(function(){var n,l=Bo.select(this),s=this.__chart__||e,f=this.__chart__=e.copy(),h=null==c?f.ticks?f.ticks.apply(f,a):f.domain():c,g=null==t?f.tickFormat?f.tickFormat.apply(f,a):Et:t,p=l.selectAll(".tick").data(h,f),v=p.enter().insert("g",".domain").attr("class","tick").style("opacity",Na),d=Bo.transition(p.exit()).style("opacity",Na).remove(),m=Bo.transition(p.order()).style("opacity",1),y=Math.max(u,0)+o,x=Pi(f),M=l.selectAll(".domain").data([0]),_=(M.enter().append("path").attr("class","domain"),Bo.transition(M));v.append("line"),v.append("text");var b,w,S,k,E=v.select("line"),A=m.select("line"),C=p.select("text").text(g),N=v.select("text"),z=m.select("text"),L="top"===r||"left"===r?-1:1;if("bottom"===r||"top"===r?(n=Oo,b="x",S="y",w="x2",k="y2",C.attr("dy",0>L?"0em":".71em").style("text-anchor","middle"),_.attr("d","M"+x[0]+","+L*i+"V0H"+x[1]+"V"+L*i)):(n=Yo,b="y",S="x",w="y2",k="x2",C.attr("dy",".32em").style("text-anchor",0>L?"end":"start"),_.attr("d","M"+L*i+","+x[0]+"H0V"+x[1]+"H"+L*i)),E.attr(k,L*u),N.attr(S,L*y),A.attr(w,0).attr(k,L*u),z.attr(b,0).attr(S,L*y),f.rangeBand){var T=f,q=T.rangeBand()/2;s=f=function(n){return T(n)+q}}else s.rangeBand?s=f:d.call(n,f,s);v.call(n,s,f),m.call(n,f,f)})}var t,e=Bo.scale.linear(),r=Rl,u=6,i=6,o=3,a=[10],c=null;return n.scale=function(t){return arguments.length?(e=t,n):e},n.orient=function(t){return arguments.length?(r=t in Dl?t+"":Rl,n):r},n.ticks=function(){return arguments.length?(a=arguments,n):a},n.tickValues=function(t){return arguments.length?(c=t,n):c},n.tickFormat=function(e){return arguments.length?(t=e,n):t},n.tickSize=function(t){var e=arguments.length;return e?(u=+t,i=+arguments[e-1],n):u},n.innerTickSize=function(t){return arguments.length?(u=+t,n):u},n.outerTickSize=function(t){return arguments.length?(i=+t,n):i},n.tickPadding=function(t){return arguments.length?(o=+t,n):o},n.tickSubdivide=function(){return arguments.length&&n},n};var Rl="bottom",Dl={top:1,right:1,bottom:1,left:1};Bo.svg.brush=function(){function n(i){i.each(function(){var i=Bo.select(this).style("pointer-events","all").style("-webkit-tap-highlight-color","rgba(0,0,0,0)").on("mousedown.brush",u).on("touchstart.brush",u),o=i.selectAll(".background").data([0]);o.enter().append("rect").attr("class","background").style("visibility","hidden").style("cursor","crosshair"),i.selectAll(".extent").data([0]).enter().append("rect").attr("class","extent").style("cursor","move");var a=i.selectAll(".resize").data(p,Et);a.exit().remove(),a.enter().append("g").attr("class",function(n){return"resize "+n}).style("cursor",function(n){return Pl[n]}).append("rect").attr("x",function(n){return/[ew]$/.test(n)?-3:null}).attr("y",function(n){return/^[ns]/.test(n)?-3:null}).attr("width",6).attr("height",6).style("visibility","hidden"),a.style("display",n.empty()?"none":null);var s,f=Bo.transition(i),h=Bo.transition(o);c&&(s=Pi(c),h.attr("x",s[0]).attr("width",s[1]-s[0]),e(f)),l&&(s=Pi(l),h.attr("y",s[0]).attr("height",s[1]-s[0]),r(f)),t(f)})}function t(n){n.selectAll(".resize").attr("transform",function(n){return"translate("+s[+/e$/.test(n)]+","+f[+/^s/.test(n)]+")"})}function e(n){n.select(".extent").attr("x",s[0]),n.selectAll(".extent,.n>rect,.s>rect").attr("width",s[1]-s[0])}function r(n){n.select(".extent").attr("y",f[0]),n.selectAll(".extent,.e>rect,.w>rect").attr("height",f[1]-f[0])}function u(){function u(){32==Bo.event.keyCode&&(C||(y=null,z[0]-=s[1],z[1]-=f[1],C=2),_())}function p(){32==Bo.event.keyCode&&2==C&&(z[0]+=s[1],z[1]+=f[1],C=0,_())}function v(){var n=Bo.mouse(M),u=!1;x&&(n[0]+=x[0],n[1]+=x[1]),C||(Bo.event.altKey?(y||(y=[(s[0]+s[1])/2,(f[0]+f[1])/2]),z[0]=s[+(n[0]p?(u=r,r=p):u=p),v[0]!=r||v[1]!=u?(e?o=null:i=null,v[0]=r,v[1]=u,!0):void 0}function m(){v(),S.style("pointer-events","all").selectAll(".resize").style("display",n.empty()?"none":null),Bo.select("body").style("cursor",null),L.on("mousemove.brush",null).on("mouseup.brush",null).on("touchmove.brush",null).on("touchend.brush",null).on("keydown.brush",null).on("keyup.brush",null),N(),w({type:"brushend"})}var y,x,M=this,b=Bo.select(Bo.event.target),w=a.of(M,arguments),S=Bo.select(M),k=b.datum(),E=!/^(n|s)$/.test(k)&&c,A=!/^(e|w)$/.test(k)&&l,C=b.classed("extent"),N=X(),z=Bo.mouse(M),L=Bo.select(Qo).on("keydown.brush",u).on("keyup.brush",p);if(Bo.event.changedTouches?L.on("touchmove.brush",v).on("touchend.brush",m):L.on("mousemove.brush",v).on("mouseup.brush",m),S.interrupt().selectAll("*").interrupt(),C)z[0]=s[0]-z[0],z[1]=f[0]-z[1];else if(k){var T=+/w$/.test(k),q=+/^n/.test(k);x=[s[1-T]-z[0],f[1-q]-z[1]],z[0]=s[T],z[1]=f[q]}else Bo.event.altKey&&(y=z.slice());S.style("pointer-events","none").selectAll(".resize").style("display",null),Bo.select("body").style("cursor",b.style("cursor")),w({type:"brushstart"}),v()}var i,o,a=w(n,"brushstart","brush","brushend"),c=null,l=null,s=[0,0],f=[0,0],h=!0,g=!0,p=Ul[0];return n.event=function(n){n.each(function(){var n=a.of(this,arguments),t={x:s,y:f,i:i,j:o},e=this.__chart__||t;this.__chart__=t,Cl?Bo.select(this).transition().each("start.brush",function(){i=e.i,o=e.j,s=e.x,f=e.y,n({type:"brushstart"})}).tween("brush:brush",function(){var e=mu(s,t.x),r=mu(f,t.y);return i=o=null,function(u){s=t.x=e(u),f=t.y=r(u),n({type:"brush",mode:"resize"})}}).each("end.brush",function(){i=t.i,o=t.j,n({type:"brush",mode:"resize"}),n({type:"brushend"})}):(n({type:"brushstart"}),n({type:"brush",mode:"resize"}),n({type:"brushend"}))})},n.x=function(t){return arguments.length?(c=t,p=Ul[!c<<1|!l],n):c},n.y=function(t){return arguments.length?(l=t,p=Ul[!c<<1|!l],n):l},n.clamp=function(t){return arguments.length?(c&&l?(h=!!t[0],g=!!t[1]):c?h=!!t:l&&(g=!!t),n):c&&l?[h,g]:c?h:l?g:null},n.extent=function(t){var e,r,u,a,h;return arguments.length?(c&&(e=t[0],r=t[1],l&&(e=e[0],r=r[0]),i=[e,r],c.invert&&(e=c(e),r=c(r)),e>r&&(h=e,e=r,r=h),(e!=s[0]||r!=s[1])&&(s=[e,r])),l&&(u=t[0],a=t[1],c&&(u=u[1],a=a[1]),o=[u,a],l.invert&&(u=l(u),a=l(a)),u>a&&(h=u,u=a,a=h),(u!=f[0]||a!=f[1])&&(f=[u,a])),n):(c&&(i?(e=i[0],r=i[1]):(e=s[0],r=s[1],c.invert&&(e=c.invert(e),r=c.invert(r)),e>r&&(h=e,e=r,r=h))),l&&(o?(u=o[0],a=o[1]):(u=f[0],a=f[1],l.invert&&(u=l.invert(u),a=l.invert(a)),u>a&&(h=u,u=a,a=h))),c&&l?[[e,u],[r,a]]:c?[e,r]:l&&[u,a])},n.clear=function(){return n.empty()||(s=[0,0],f=[0,0],i=o=null),n},n.empty=function(){return!!c&&s[0]==s[1]||!!l&&f[0]==f[1]},Bo.rebind(n,a,"on")};var Pl={n:"ns-resize",e:"ew-resize",s:"ns-resize",w:"ew-resize",nw:"nwse-resize",ne:"nesw-resize",se:"nwse-resize",sw:"nesw-resize"},Ul=[["n","e","s","w","nw","ne","se","sw"],["e","w"],["n","s"],[]],jl=rc.format=lc.timeFormat,Fl=jl.utc,Hl=Fl("%Y-%m-%dT%H:%M:%S.%LZ");jl.iso=Date.prototype.toISOString&&+new Date("2000-01-01T00:00:00.000Z")?Io:Hl,Io.parse=function(n){var t=new Date(n);return isNaN(t)?null:t},Io.toString=Hl.toString,rc.second=Ft(function(n){return new uc(1e3*Math.floor(n/1e3))},function(n,t){n.setTime(n.getTime()+1e3*Math.floor(t))},function(n){return n.getSeconds()}),rc.seconds=rc.second.range,rc.seconds.utc=rc.second.utc.range,rc.minute=Ft(function(n){return new uc(6e4*Math.floor(n/6e4))},function(n,t){n.setTime(n.getTime()+6e4*Math.floor(t))},function(n){return n.getMinutes()}),rc.minutes=rc.minute.range,rc.minutes.utc=rc.minute.utc.range,rc.hour=Ft(function(n){var t=n.getTimezoneOffset()/60;return new uc(36e5*(Math.floor(n/36e5-t)+t))},function(n,t){n.setTime(n.getTime()+36e5*Math.floor(t))},function(n){return n.getHours()}),rc.hours=rc.hour.range,rc.hours.utc=rc.hour.utc.range,rc.month=Ft(function(n){return n=rc.day(n),n.setDate(1),n},function(n,t){n.setMonth(n.getMonth()+t)},function(n){return n.getMonth()}),rc.months=rc.month.range,rc.months.utc=rc.month.utc.range;var Ol=[1e3,5e3,15e3,3e4,6e4,3e5,9e5,18e5,36e5,108e5,216e5,432e5,864e5,1728e5,6048e5,2592e6,7776e6,31536e6],Yl=[[rc.second,1],[rc.second,5],[rc.second,15],[rc.second,30],[rc.minute,1],[rc.minute,5],[rc.minute,15],[rc.minute,30],[rc.hour,1],[rc.hour,3],[rc.hour,6],[rc.hour,12],[rc.day,1],[rc.day,2],[rc.week,1],[rc.month,1],[rc.month,3],[rc.year,1]],Il=jl.multi([[".%L",function(n){return n.getMilliseconds()}],[":%S",function(n){return n.getSeconds()}],["%I:%M",function(n){return n.getMinutes()}],["%I %p",function(n){return n.getHours()}],["%a %d",function(n){return n.getDay()&&1!=n.getDate()}],["%b %d",function(n){return 1!=n.getDate()}],["%B",function(n){return n.getMonth()}],["%Y",Ae]]),Zl={range:function(n,t,e){return Bo.range(Math.ceil(n/e)*e,+t,e).map(Vo)},floor:Et,ceil:Et};Yl.year=rc.year,rc.scale=function(){return Zo(Bo.scale.linear(),Yl,Il)};var Vl=Yl.map(function(n){return[n[0].utc,n[1]]}),Xl=Fl.multi([[".%L",function(n){return n.getUTCMilliseconds()}],[":%S",function(n){return n.getUTCSeconds()}],["%I:%M",function(n){return n.getUTCMinutes()}],["%I %p",function(n){return n.getUTCHours()}],["%a %d",function(n){return n.getUTCDay()&&1!=n.getUTCDate()}],["%b %d",function(n){return 1!=n.getUTCDate()}],["%B",function(n){return n.getUTCMonth()}],["%Y",Ae]]);Vl.year=rc.year.utc,rc.scale.utc=function(){return Zo(Bo.scale.linear(),Vl,Xl)},Bo.text=At(function(n){return n.responseText}),Bo.json=function(n,t){return Ct(n,"application/json",Xo,t)},Bo.html=function(n,t){return Ct(n,"text/html",$o,t)},Bo.xml=At(function(n){return n.responseXML}),"function"==typeof define&&define.amd?define(Bo):"object"==typeof module&&module.exports&&(module.exports=Bo),this.d3=Bo}();rally-0.9.1/rally/ui/templates/libs/nv.d3.1.1.15-beta.min.css0000664000567000056710000002113413073417716024365 0ustar jenkinsjenkins00000000000000/* https://github.com/novus/nvd3 Copyright (c) 2011-2014 Novus Partners, Inc. http://www.apache.org/licenses/LICENSE-2.0 */ .chartWrap{margin:0;padding:0;overflow:hidden}.nvtooltip.with-3d-shadow,.with-3d-shadow .nvtooltip{-moz-box-shadow:0 5px 10px rgba(0,0,0,.2);-webkit-box-shadow:0 5px 10px rgba(0,0,0,.2);box-shadow:0 5px 10px rgba(0,0,0,.2);-webkit-border-radius:6px;-moz-border-radius:6px;border-radius:6px}.nvtooltip{position:absolute;background-color:rgba(255,255,255,1);padding:1px;border:1px solid rgba(0,0,0,.2);z-index:10000;font-family:Arial;font-size:13px;text-align:left;pointer-events:none;white-space:nowrap;-webkit-touch-callout:none;-webkit-user-select:none;-khtml-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none}.nvtooltip.with-transitions,.with-transitions .nvtooltip{transition:opacity 250ms linear;-moz-transition:opacity 250ms linear;-webkit-transition:opacity 250ms linear;transition-delay:250ms;-moz-transition-delay:250ms;-webkit-transition-delay:250ms}.nvtooltip.x-nvtooltip,.nvtooltip.y-nvtooltip{padding:8px}.nvtooltip h3{margin:0;padding:4px 14px;line-height:18px;font-weight:400;background-color:rgba(247,247,247,.75);text-align:center;border-bottom:1px solid #ebebeb;-webkit-border-radius:5px 5px 0 0;-moz-border-radius:5px 5px 0 0;border-radius:5px 5px 0 0}.nvtooltip p{margin:0;padding:5px 14px;text-align:center}.nvtooltip span{display:inline-block;margin:2px 0}.nvtooltip table{margin:6px;border-spacing:0}.nvtooltip table td{padding:2px 9px 2px 0;vertical-align:middle}.nvtooltip table td.key{font-weight:400}.nvtooltip table td.value{text-align:right;font-weight:700}.nvtooltip table tr.highlight td{padding:1px 9px 1px 0;border-bottom-style:solid;border-bottom-width:1px;border-top-style:solid;border-top-width:1px}.nvtooltip table td.legend-color-guide div{width:8px;height:8px;vertical-align:middle}.nvtooltip .footer{padding:3px;text-align:center}.nvtooltip-pending-removal{position:absolute;pointer-events:none}svg{-webkit-touch-callout:none;-webkit-user-select:none;-khtml-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;display:block;width:100%;height:100%}svg text{font:400 12px Arial}svg .title{font:700 14px Arial}.nvd3 .nv-background{fill:#fff;fill-opacity:0}.nvd3.nv-noData{font-size:18px;font-weight:700}.nv-brush .extent{fill-opacity:.125;shape-rendering:crispEdges}.nvd3 .nv-legend .nv-series{cursor:pointer}.nvd3 .nv-legend .disabled circle{fill-opacity:0}.nvd3 .nv-axis{pointer-events:none}.nvd3 .nv-axis path{fill:none;stroke:#000;stroke-opacity:.75;shape-rendering:crispEdges}.nvd3 .nv-axis path.domain{stroke-opacity:.75}.nvd3 .nv-axis.nv-x path.domain{stroke-opacity:0}.nvd3 .nv-axis line{fill:none;stroke:#e5e5e5;shape-rendering:crispEdges}.nvd3 .nv-axis .zero line,.nvd3 .nv-axis line.zero{stroke-opacity:.75}.nvd3 .nv-axis .nv-axisMaxMin text{font-weight:700}.nvd3 .x .nv-axis .nv-axisMaxMin text,.nvd3 .x2 .nv-axis .nv-axisMaxMin text,.nvd3 .x3 .nv-axis .nv-axisMaxMin text{text-anchor:middle}.nv-brush .resize path{fill:#eee;stroke:#666}.nvd3 .nv-bars .negative rect{zfill:brown}.nvd3 .nv-bars rect{zfill:#4682b4;fill-opacity:.75;transition:fill-opacity 250ms linear;-moz-transition:fill-opacity 250ms linear;-webkit-transition:fill-opacity 250ms linear}.nvd3 .nv-bars rect.hover{fill-opacity:1}.nvd3 .nv-bars .hover rect{fill:#add8e6}.nvd3 .nv-bars text{fill:rgba(0,0,0,0)}.nvd3 .nv-bars .hover text{fill:rgba(0,0,0,1)}.nvd3 .nv-multibar .nv-groups rect,.nvd3 .nv-multibarHorizontal .nv-groups rect,.nvd3 .nv-discretebar .nv-groups rect{stroke-opacity:0;transition:fill-opacity 250ms linear;-moz-transition:fill-opacity 250ms linear;-webkit-transition:fill-opacity 250ms linear}.nvd3 .nv-multibar .nv-groups rect:hover,.nvd3 .nv-multibarHorizontal .nv-groups rect:hover,.nvd3 .nv-discretebar .nv-groups rect:hover{fill-opacity:1}.nvd3 .nv-discretebar .nv-groups text,.nvd3 .nv-multibarHorizontal .nv-groups text{font-weight:700;fill:rgba(0,0,0,1);stroke:rgba(0,0,0,0)}.nvd3.nv-pie path{stroke-opacity:0;transition:fill-opacity 250ms linear,stroke-width 250ms linear,stroke-opacity 250ms linear;-moz-transition:fill-opacity 250ms linear,stroke-width 250ms linear,stroke-opacity 250ms linear;-webkit-transition:fill-opacity 250ms linear,stroke-width 250ms linear,stroke-opacity 250ms linear}.nvd3.nv-pie .nv-slice text{stroke:#000;stroke-width:0}.nvd3.nv-pie path{stroke:#fff;stroke-width:1px;stroke-opacity:1}.nvd3.nv-pie .hover path{fill-opacity:.7}.nvd3.nv-pie .nv-label{pointer-events:none}.nvd3.nv-pie .nv-label rect{fill-opacity:0;stroke-opacity:0}.nvd3 .nv-groups path.nv-line{fill:none;stroke-width:1.5px}.nvd3 .nv-groups path.nv-line.nv-thin-line{stroke-width:1px}.nvd3 .nv-groups path.nv-area{stroke:none}.nvd3 .nv-line.hover path{stroke-width:6px}.nvd3.nv-line .nvd3.nv-scatter .nv-groups .nv-point{fill-opacity:0;stroke-opacity:0}.nvd3.nv-scatter.nv-single-point .nv-groups .nv-point{fill-opacity:.5!important;stroke-opacity:.5!important}.with-transitions .nvd3 .nv-groups .nv-point{transition:stroke-width 250ms linear,stroke-opacity 250ms linear;-moz-transition:stroke-width 250ms linear,stroke-opacity 250ms linear;-webkit-transition:stroke-width 250ms linear,stroke-opacity 250ms linear}.nvd3.nv-scatter .nv-groups .nv-point.hover,.nvd3 .nv-groups .nv-point.hover{stroke-width:7px;fill-opacity:.95!important;stroke-opacity:.95!important}.nvd3 .nv-point-paths path{stroke:#aaa;stroke-opacity:0;fill:#eee;fill-opacity:0}.nvd3 .nv-indexLine{cursor:ew-resize}.nvd3 .nv-distribution{pointer-events:none}.nvd3 .nv-groups .nv-point.hover{stroke-width:20px;stroke-opacity:.5}.nvd3 .nv-scatter .nv-point.hover{fill-opacity:1}.nvd3.nv-stackedarea path.nv-area{fill-opacity:.7;stroke-opacity:0;transition:fill-opacity 250ms linear,stroke-opacity 250ms linear;-moz-transition:fill-opacity 250ms linear,stroke-opacity 250ms linear;-webkit-transition:fill-opacity 250ms linear,stroke-opacity 250ms linear}.nvd3.nv-stackedarea path.nv-area.hover{fill-opacity:.9}.nvd3.nv-stackedarea .nv-groups .nv-point{stroke-opacity:0;fill-opacity:0}.nvd3.nv-linePlusBar .nv-bar rect{fill-opacity:.75}.nvd3.nv-linePlusBar .nv-bar rect:hover{fill-opacity:1}.nvd3.nv-bullet{font:10px sans-serif}.nvd3.nv-bullet .nv-measure{fill-opacity:.8}.nvd3.nv-bullet .nv-measure:hover{fill-opacity:1}.nvd3.nv-bullet .nv-marker{stroke:#000;stroke-width:2px}.nvd3.nv-bullet .nv-markerTriangle{stroke:#000;fill:#fff;stroke-width:1.5px}.nvd3.nv-bullet .nv-tick line{stroke:#666;stroke-width:.5px}.nvd3.nv-bullet .nv-range.nv-s0{fill:#eee}.nvd3.nv-bullet .nv-range.nv-s1{fill:#ddd}.nvd3.nv-bullet .nv-range.nv-s2{fill:#ccc}.nvd3.nv-bullet .nv-title{font-size:14px;font-weight:700}.nvd3.nv-bullet .nv-subtitle{fill:#999}.nvd3.nv-bullet .nv-range{fill:#bababa;fill-opacity:.4}.nvd3.nv-bullet .nv-range:hover{fill-opacity:.7}.nvd3.nv-sparkline path{fill:none}.nvd3.nv-sparklineplus g.nv-hoverValue{pointer-events:none}.nvd3.nv-sparklineplus .nv-hoverValue line{stroke:#333;stroke-width:1.5px}.nvd3.nv-sparklineplus,.nvd3.nv-sparklineplus g{pointer-events:all}.nvd3 .nv-hoverArea{fill-opacity:0;stroke-opacity:0}.nvd3.nv-sparklineplus .nv-xValue,.nvd3.nv-sparklineplus .nv-yValue{stroke-width:0;font-size:.9em;font-weight:400}.nvd3.nv-sparklineplus .nv-yValue{stroke:#f66}.nvd3.nv-sparklineplus .nv-maxValue{stroke:#2ca02c;fill:#2ca02c}.nvd3.nv-sparklineplus .nv-minValue{stroke:#d62728;fill:#d62728}.nvd3.nv-sparklineplus .nv-currentValue{font-weight:700;font-size:1.1em}.nvd3.nv-ohlcBar .nv-ticks .nv-tick{stroke-width:2px}.nvd3.nv-ohlcBar .nv-ticks .nv-tick.hover{stroke-width:4px}.nvd3.nv-ohlcBar .nv-ticks .nv-tick.positive{stroke:#2ca02c}.nvd3.nv-ohlcBar .nv-ticks .nv-tick.negative{stroke:#d62728}.nvd3.nv-historicalStockChart .nv-axis .nv-axislabel{font-weight:700}.nvd3.nv-historicalStockChart .nv-dragTarget{fill-opacity:0;stroke:none;cursor:move}.nvd3 .nv-brush .extent{fill-opacity:0!important}.nvd3 .nv-brushBackground rect{stroke:#000;stroke-width:.4;fill:#fff;fill-opacity:.7}.nvd3.nv-indentedtree .name{margin-left:5px}.nvd3.nv-indentedtree .clickable{color:#08C;cursor:pointer}.nvd3.nv-indentedtree span.clickable:hover{color:#005580;text-decoration:underline}.nvd3.nv-indentedtree .nv-childrenCount{display:inline-block;margin-left:5px}.nvd3.nv-indentedtree .nv-treeicon{cursor:pointer}.nvd3.nv-indentedtree .nv-treeicon.nv-folded{cursor:pointer}.nvd3 .background path{fill:none;stroke:#ccc;stroke-opacity:.4;shape-rendering:crispEdges}.nvd3 .foreground path{fill:none;stroke:#4682b4;stroke-opacity:.7}.nvd3 .brush .extent{fill-opacity:.3;stroke:#fff;shape-rendering:crispEdges}.nvd3 .axis line,.axis path{fill:none;stroke:#000;shape-rendering:crispEdges}.nvd3 .axis text{text-shadow:0 1px 0 #fff}.nvd3 .nv-interactiveGuideLine{pointer-events:none}.nvd3 line.nv-guideline{stroke:#ccc}rally-0.9.1/rally/ui/templates/libs/nv.d3.1.1.15-beta.min.js0000664000567000056710000056002413073417716024217 0ustar jenkinsjenkins00000000000000/* https://github.com/novus/nvd3 Copyright (c) 2011-2014 Novus Partners, Inc. http://www.apache.org/licenses/LICENSE-2.0 */ (function(){function t(e,t){return(new Date(t,e+1,0)).getDate()}function n(e,t,n){return function(r,i,s){var o=e(r),u=[];o1)while(op||r>d||d3.event.relatedTarget&&d3.event.relatedTarget.ownerSVGElement===undefined||a){if(l&&d3.event.relatedTarget&&d3.event.relatedTarget.ownerSVGElement===undefined&&d3.event.relatedTarget.className.match(t.nvPointerEventsClass))return;u.elementMouseout({mouseX:n,mouseY:r}),c.renderGuideLine(null);return}var f=s.invert(n);u.elementMousemove({mouseX:n,mouseY:r,pointXValue:f}),d3.event.type==="dblclick"&&u.elementDblclick({mouseX:n,mouseY:r,pointXValue:f})}var h=d3.select(this),p=n||960,d=r||400,v=h.selectAll("g.nv-wrap.nv-interactiveLineLayer").data([o]),m=v.enter().append("g").attr("class"," nv-wrap nv-interactiveLineLayer");m.append("g").attr("class","nv-interactiveGuideLine");if(!f)return;f.on("mousemove",g,!0).on("mouseout",g,!0).on("dblclick",g),c.renderGuideLine=function(t){if(!a)return;var n=v.select(".nv-interactiveGuideLine").selectAll("line").data(t!=null?[e.utils.NaNtoZero(t)]:[],String);n.enter().append("line").attr("class","nv-guideline").attr("x1",function(e){return e}).attr("x2",function(e){return e}).attr("y1",d).attr("y2",0),n.exit().remove()}})}var t=e.models.tooltip(),n=null,r=null,i={left:0,top:0},s=d3.scale.linear(),o=d3.scale.linear(),u=d3.dispatch("elementMousemove","elementMouseout","elementDblclick"),a=!0,f=null,l=navigator.userAgent.indexOf("MSIE")!==-1;return c.dispatch=u,c.tooltip=t,c.margin=function(e){return arguments.length?(i.top=typeof e.top!="undefined"?e.top:i.top,i.left=typeof e.left!="undefined"?e.left:i.left,c):i},c.width=function(e){return arguments.length?(n=e,c):n},c.height=function(e){return arguments.length?(r=e,c):r},c.xScale=function(e){return arguments.length?(s=e,c):s},c.showGuideLine=function(e){return arguments.length?(a=e,c):a},c.svgContainer=function(e){return arguments.length?(f=e,c):f},c},e.interactiveBisect=function(e,t,n){"use strict";if(!e instanceof Array)return null;typeof n!="function"&&(n=function(e,t){return e.x});var r=d3.bisector(n).left,i=d3.max([0,r(e,t)-1]),s=n(e[i],i);typeof s=="undefined"&&(s=i);if(s===t)return i;var o=d3.min([i+1,e.length-1]),u=n(e[o],o);return typeof u=="undefined"&&(u=o),Math.abs(u-t)>=Math.abs(s-t)?i:o},e.nearestValueIndex=function(e,t,n){"use strict";var r=Infinity,i=null;return e.forEach(function(e,s){var o=Math.abs(t-e);o<=r&&oT.height?0:x}v.top=Math.abs(x-S.top),v.left=Math.abs(E.left-S.left)}t+=a.offsetLeft+v.left-2*a.scrollLeft,u+=a.offsetTop+v.top-2*a.scrollTop}return s&&s>0&&(u=Math.floor(u/s)*s),e.tooltip.calcTooltipPosition([t,u],r,i,h),w}var t=null,n=null,r="w",i=50,s=25,o=null,u=null,a=null,f=null,l={left:null,top:null},c=!0,h="nvtooltip-"+Math.floor(Math.random()*1e5),p="nv-pointer-events-none",d=function(e,t){return e},v=function(e){return e},m=function(e){if(t!=null)return t;if(e==null)return"";var n=d3.select(document.createElement("table")),r=n.selectAll("thead").data([e]).enter().append("thead");r.append("tr").append("td").attr("colspan",3).append("strong").classed("x-value",!0).html(v(e.value));var i=n.selectAll("tbody").data([e]).enter().append("tbody"),s=i.selectAll("tr").data(function(e){return e.series}).enter().append("tr").classed("highlight",function(e){return e.highlight});s.append("td").classed("legend-color-guide",!0).append("div").style("background-color",function(e){return e.color}),s.append("td").classed("key",!0).html(function(e){return e.key}),s.append("td").classed("value",!0).html(function(e,t){return d(e.value,t)}),s.selectAll("td").each(function(e){if(e.highlight){var t=d3.scale.linear().domain([0,1]).range(["#fff",e.color]),n=.6;d3.select(this).style("border-bottom-color",t(n)).style("border-top-color",t(n))}});var o=n.node().outerHTML;return e.footer!==undefined&&(o+=""),o},g=function(e){return e&&e.series&&e.series.length>0?!0:!1};return w.nvPointerEventsClass=p,w.content=function(e){return arguments.length?(t=e,w):t},w.tooltipElem=function(){return f},w.contentGenerator=function(e){return arguments.length?(typeof e=="function"&&(m=e),w):m},w.data=function(e){return arguments.length?(n=e,w):n},w.gravity=function(e){return arguments.length?(r=e,w):r},w.distance=function(e){return arguments.length?(i=e,w):i},w.snapDistance=function(e){return arguments.length?(s=e,w):s},w.classes=function(e){return arguments.length?(u=e,w):u},w.chartContainer=function(e){return arguments.length?(a=e,w):a},w.position=function(e){return arguments.length?(l.left=typeof e.left!="undefined"?e.left:l.left,l.top=typeof e.top!="undefined"?e.top:l.top,w):l},w.fixedTop=function(e){return arguments.length?(o=e,w):o},w.enabled=function(e){return arguments.length?(c=e,w):c},w.valueFormatter=function(e){return arguments.length?(typeof e=="function"&&(d=e),w):d},w.headerFormatter=function(e){return arguments.length?(typeof e=="function"&&(v=e),w):v},w.id=function(){return h},w},e.tooltip.show=function(t,n,r,i,s,o){var u=document.createElement("div");u.className="nvtooltip "+(o?o:"xy-tooltip");var a=s;if(!s||s.tagName.match(/g|svg/i))a=document.getElementsByTagName("body")[0];u.style.left=0,u.style.top=0,u.style.opacity=0,u.innerHTML=n,a.appendChild(u),s&&(t[0]=t[0]-s.scrollLeft,t[1]=t[1]-s.scrollTop),e.tooltip.calcTooltipPosition(t,r,i,u)},e.tooltip.findFirstNonSVGParent=function(e){while(e.tagName.match(/^g|svg$/i)!==null)e=e.parentNode;return e},e.tooltip.findTotalOffsetTop=function(e,t){var n=t;do isNaN(e.offsetTop)||(n+=e.offsetTop);while(e=e.offsetParent);return n},e.tooltip.findTotalOffsetLeft=function(e,t){var n=t;do isNaN(e.offsetLeft)||(n+=e.offsetLeft);while(e=e.offsetParent);return n},e.tooltip.calcTooltipPosition=function(t,n,r,i){var s=parseInt(i.offsetHeight),o=parseInt(i.offsetWidth),u=e.utils.windowSize().width,a=e.utils.windowSize().height,f=window.pageYOffset,l=window.pageXOffset,c,h;a=window.innerWidth>=document.body.scrollWidth?a:a-16,u=window.innerHeight>=document.body.scrollHeight?u:u-16,n=n||"s",r=r||20;var p=function(t){return e.tooltip.findTotalOffsetTop(t,h)},d=function(t){return e.tooltip.findTotalOffsetLeft(t,c)};switch(n){case"e":c=t[0]-o-r,h=t[1]-s/2;var v=d(i),m=p(i);vl?t[0]+r:l-v+c),mf+a&&(h=f+a-m+h-s);break;case"w":c=t[0]+r,h=t[1]-s/2;var v=d(i),m=p(i);v+o>u&&(c=t[0]-o-r),mf+a&&(h=f+a-m+h-s);break;case"n":c=t[0]-o/2-5,h=t[1]+r;var v=d(i),m=p(i);vu&&(c=c-o/2+5),m+s>f+a&&(h=f+a-m+h-s);break;case"s":c=t[0]-o/2,h=t[1]-s-r;var v=d(i),m=p(i);vu&&(c=c-o/2+5),f>m&&(h=f);break;case"none":c=t[0],h=t[1]-r;var v=d(i),m=p(i)}return i.style.left=c+"px",i.style.top=h+"px",i.style.opacity=1,i.style.position="absolute",i},e.tooltip.cleanup=function(){var e=document.getElementsByClassName("nvtooltip"),t=[];while(e.length)t.push(e[0]),e[0].style.transitionDelay="0 !important",e[0].style.opacity=0,e[0].className="nvtooltip-pending-removal";setTimeout(function(){while(t.length){var e=t.pop();e.parentNode.removeChild(e)}},500)}}(),e.utils.windowSize=function(){var e={width:640,height:480};return document.body&&document.body.offsetWidth&&(e.width=document.body.offsetWidth,e.height=document.body.offsetHeight),document.compatMode=="CSS1Compat"&&document.documentElement&&document.documentElement.offsetWidth&&(e.width=document.documentElement.offsetWidth,e.height=document.documentElement.offsetHeight),window.innerWidth&&window.innerHeight&&(e.width=window.innerWidth,e.height=window.innerHeight),e},e.utils.windowResize=function(e){if(e===undefined)return;var t=window.onresize;window.onresize=function(n){typeof t=="function"&&t(n),e(n)}},e.utils.getColor=function(t){return arguments.length?Object.prototype.toString.call(t)==="[object Array]"?function(e,n){return e.color||t[n%t.length]}:t:e.utils.defaultColor()},e.utils.defaultColor=function(){var e=d3.scale.category20().range();return function(t,n){return t.color||e[n%e.length]}},e.utils.customTheme=function(e,t,n){t=t||function(e){return e.key},n=n||d3.scale.category20().range();var r=n.length;return function(i,s){var o=t(i);return r||(r=n.length),typeof e[o]!="undefined"?typeof e[o]=="function"?e[o]():e[o]:n[--r]}},e.utils.pjax=function(t,n){function r(r){d3.html(r,function(r){var i=d3.select(n).node();i.parentNode.replaceChild(d3.select(r).select(n).node(),i),e.utils.pjax(t,n)})}d3.selectAll(t).on("click",function(){history.pushState(this.href,this.textContent,this.href),r(this.href),d3.event.preventDefault()}),d3.select(window).on("popstate",function(){d3.event.state&&r(d3.event.state)})},e.utils.calcApproxTextWidth=function(e){if(typeof e.style=="function"&&typeof e.text=="function"){var t=parseInt(e.style("font-size").replace("px","")),n=e.text().length;return n*t*.5}return 0},e.utils.NaNtoZero=function(e){return typeof e!="number"||isNaN(e)||e===null||e===Infinity?0:e},e.utils.optionsFunc=function(e){return e&&d3.map(e).forEach(function(e,t){typeof this[e]=="function"&&this[e](t)}.bind(this)),this},e.models.axis=function(){"use strict";function m(e){return e.each(function(e){var i=d3.select(this),m=i.selectAll("g.nv-wrap.nv-axis").data([e]),g=m.enter().append("g").attr("class","nvd3 nv-wrap nv-axis"),y=g.append("g"),b=m.select("g");p!==null?t.ticks(p):(t.orient()=="top"||t.orient()=="bottom")&&t.ticks(Math.abs(s.range()[1]-s.range()[0])/100),b.transition().call(t),v=v||t.scale();var w=t.tickFormat();w==null&&(w=v.tickFormat());var E=b.selectAll("text.nv-axislabel").data([o||null]);E.exit().remove();switch(t.orient()){case"top":E.enter().append("text").attr("class","nv-axislabel");var S=s.range().length==2?s.range()[1]:s.range()[s.range().length-1]+(s.range()[1]-s.range()[0]);E.attr("text-anchor","middle").attr("y",0).attr("x",S/2);if(u){var x=m.selectAll("g.nv-axisMaxMin").data(s.domain());x.enter().append("g").attr("class","nv-axisMaxMin").append("text"),x.exit().remove(),x.attr("transform",function(e,t){return"translate("+s(e)+",0)"}).select("text").attr("dy","-0.5em").attr("y",-t.tickPadding()).attr("text-anchor","middle").text(function(e,t){var n=w(e);return(""+n).match("NaN")?"":n}),x.transition().attr("transform",function(e,t){return"translate("+s.range()[t]+",0)"})}break;case"bottom":var T=36,N=30,C=b.selectAll("g").select("text");if(f%360){C.each(function(e,t){var n=this.getBBox().width;n>N&&(N=n)});var k=Math.abs(Math.sin(f*Math.PI/180)),T=(k?k*N:N)+30;C.attr("transform",function(e,t,n){return"rotate("+f+" 0,0)"}).style("text-anchor",f%360>0?"start":"end")}E.enter().append("text").attr("class","nv-axislabel");var S=s.range().length==2?s.range()[1]:s.range()[s.range().length-1]+(s.range()[1]-s.range()[0]);E.attr("text-anchor","middle").attr("y",T).attr("x",S/2);if(u){var x=m.selectAll("g.nv-axisMaxMin").data([s.domain()[0],s.domain()[s.domain().length-1]]);x.enter().append("g").attr("class","nv-axisMaxMin").append("text"),x.exit().remove(),x.attr("transform",function(e,t){return"translate("+(s(e)+(h?s.rangeBand()/2:0))+",0)"}).select("text").attr("dy",".71em").attr("y",t.tickPadding()).attr("transform",function(e,t,n){return"rotate("+f+" 0,0)"}).style("text-anchor",f?f%360>0?"start":"end":"middle").text(function(e,t){var n=w(e);return(""+n).match("NaN")?"":n}),x.transition().attr("transform",function(e,t){return"translate("+(s(e)+(h?s.rangeBand()/2:0))+",0)"})}c&&C.attr("transform",function(e,t){return"translate(0,"+(t%2==0?"0":"12")+")"});break;case"right":E.enter().append("text").attr("class","nv-axislabel"),E.style("text-anchor",l?"middle":"begin").attr("transform",l?"rotate(90)":"").attr("y",l?-Math.max(n.right,r)+12:-10).attr("x",l?s.range()[0]/2:t.tickPadding());if(u){var x=m.selectAll("g.nv-axisMaxMin").data(s.domain());x.enter().append("g").attr("class","nv-axisMaxMin").append("text").style("opacity",0),x.exit().remove(),x.attr("transform",function(e,t){return"translate(0,"+s(e)+")"}).select("text").attr("dy",".32em").attr("y",0).attr("x",t.tickPadding()).style("text-anchor","start").text(function(e,t){var n=w(e);return(""+n).match("NaN")?"":n}),x.transition().attr("transform",function(e,t){return"translate(0,"+s.range()[t]+")"}).select("text").style("opacity",1)}break;case"left":E.enter().append("text").attr("class","nv-axislabel"),E.style("text-anchor",l?"middle":"end").attr("transform",l?"rotate(-90)":"").attr("y",l?-Math.max(n.left,r)+d:-10).attr("x",l?-s.range()[0]/2:-t.tickPadding());if(u){var x=m.selectAll("g.nv-axisMaxMin").data(s.domain());x.enter().append("g").attr("class","nv-axisMaxMin").append("text").style("opacity",0),x.exit().remove(),x.attr("transform",function(e,t){return"translate(0,"+v(e)+")"}).select("text").attr("dy",".32em").attr("y",0).attr("x",-t.tickPadding()).attr("text-anchor","end").text(function(e,t){var n=w(e);return(""+n).match("NaN")?"":n}),x.transition().attr("transform",function(e,t){return"translate(0,"+s.range()[t]+")"}).select("text").style("opacity",1)}}E.text(function(e){return e}),u&&(t.orient()==="left"||t.orient()==="right")&&(b.selectAll("g").each(function(e,t){d3.select(this).select("text").attr("opacity",1);if(s(e)s.range()[0]-10)(e>1e-10||e<-1e-10)&&d3.select(this).attr("opacity",0),d3.select(this).select("text").attr("opacity",0)}),s.domain()[0]==s.domain()[1]&&s.domain()[0]==0&&m.selectAll("g.nv-axisMaxMin").style("opacity",function(e,t){return t?0:1}));if(u&&(t.orient()==="top"||t.orient()==="bottom")){var L=[];m.selectAll("g.nv-axisMaxMin").each(function(e,t){try{t?L.push(s(e)-this.getBBox().width-4):L.push(s(e)+this.getBBox().width+4)}catch(n){t?L.push(s(e)-4):L.push(s(e)+4)}}),b.selectAll("g").each(function(e,t){if(s(e)L[1])e>1e-10||e<-1e-10?d3.select(this).remove():d3.select(this).select("text").remove()})}a&&b.selectAll(".tick").filter(function(e){return!parseFloat(Math.round(e.__data__*1e5)/1e6)&&e.__data__!==undefined}).classed("zero",!0),v=s.copy()}),m}var t=d3.svg.axis(),n={top:0,right:0,bottom:0,left:0},r=75,i=60,s=d3.scale.linear(),o=null,u=!0,a=!0,f=0,l=!0,c=!1,h=!1,p=null,d=12;t.scale(s).orient("bottom").tickFormat(function(e){return e});var v;return m.axis=t,d3.rebind(m,t,"orient","tickValues","tickSubdivide","tickSize","tickPadding","tickFormat"),d3.rebind(m,s,"domain","range","rangeBand","rangeBands"),m.options=e.utils.optionsFunc.bind(m),m.margin=function(e){return arguments.length?(n.top=typeof e.top!="undefined"?e.top:n.top,n.right=typeof e.right!="undefined"?e.right:n.right,n.bottom=typeof e.bottom!="undefined"?e.bottom:n.bottom,n.left=typeof e.left!="undefined"?e.left:n.left,m):n},m.width=function(e){return arguments.length?(r=e,m):r},m.ticks=function(e){return arguments.length?(p=e,m):p},m.height=function(e){return arguments.length?(i=e,m):i},m.axisLabel=function(e){return arguments.length?(o=e,m):o},m.showMaxMin=function(e){return arguments.length?(u=e,m):u},m.highlightZero=function(e){return arguments.length?(a=e,m):a},m.scale=function(e){return arguments.length?(s=e,t.scale(s),h=typeof s.rangeBands=="function",d3.rebind(m,s,"domain","range","rangeBand","rangeBands"),m):s},m.rotateYLabel=function(e){return arguments.length?(l=e,m):l},m.rotateLabels=function(e){return arguments.length?(f=e,m):f},m.staggerLabels=function(e){return arguments.length?(c=e,m):c},m.axisLabelDistance=function(e){return arguments.length?(d=e,m):d},m},e.models.bullet=function(){"use strict";function m(e){return e.each(function(e,n){var p=c-t.left-t.right,m=h-t.top-t.bottom,g=d3.select(this),y=i.call(this,e,n).slice().sort(d3.descending),b=s.call(this,e,n).slice().sort(d3.descending),w=o.call(this,e,n).slice().sort(d3.descending),E=u.call(this,e,n).slice(),S=a.call(this,e,n).slice(),x=f.call(this,e,n).slice(),T=d3.scale.linear().domain(d3.extent(d3.merge([l,y]))).range(r?[p,0]:[0,p]),N=this.__chart__||d3.scale.linear().domain([0,Infinity]).range(T.range());this.__chart__=T;var C=d3.min(y),k=d3.max(y),L=y[1],A=g.selectAll("g.nv-wrap.nv-bullet").data([e]),O=A.enter().append("g").attr("class","nvd3 nv-wrap nv-bullet"),M=O.append("g"),_=A.select("g");M.append("rect").attr("class","nv-range nv-rangeMax"),M.append("rect").attr("class","nv-range nv-rangeAvg"),M.append("rect").attr("class","nv-range nv-rangeMin"),M.append("rect").attr("class","nv-measure"),M.append("path").attr("class","nv-markerTriangle"),A.attr("transform","translate("+t.left+","+t.top+")");var D=function(e){return Math.abs(N(e)-N(0))},P=function(e){return Math.abs(T(e)-T(0))},H=function(e){return e<0?N(e):N(0)},B=function(e){return e<0?T(e):T(0)};_.select("rect.nv-rangeMax").attr("height",m).attr("width",P(k>0?k:C)).attr("x",B(k>0?k:C)).datum(k>0?k:C),_.select("rect.nv-rangeAvg").attr("height",m).attr("width",P(L)).attr("x",B(L)).datum(L),_.select("rect.nv-rangeMin").attr("height",m).attr("width",P(k)).attr("x",B(k)).attr("width",P(k>0?C:k)).attr("x",B(k>0?C:k)).datum(k>0?C:k),_.select("rect.nv-measure").style("fill",d).attr("height",m/3).attr("y",m/3).attr("width",w<0?T(0)-T(w[0]):T(w[0])-T(0)).attr("x",B(w)).on("mouseover",function(){v.elementMouseover({value:w[0],label:x[0]||"Current",pos:[T(w[0]),m/2]})}).on("mouseout",function(){v.elementMouseout({value:w[0],label:x[0]||"Current"})});var j=m/6;b[0]?_.selectAll("path.nv-markerTriangle").attr("transform",function(e){return"translate("+T(b[0])+","+m/2+")"}).attr("d","M0,"+j+"L"+j+","+ -j+" "+ -j+","+ -j+"Z").on("mouseover",function(){v.elementMouseover({value:b[0],label:S[0]||"Previous",pos:[T(b[0]),m/2]})}).on("mouseout",function(){v.elementMouseout({value:b[0],label:S[0]||"Previous"})}):_.selectAll("path.nv-markerTriangle").remove(),A.selectAll(".nv-range").on("mouseover",function(e,t){var n=E[t]||(t?t==1?"Mean":"Minimum":"Maximum");v.elementMouseover({value:e,label:n,pos:[T(e),m/2]})}).on("mouseout",function(e,t){var n=E[t]||(t?t==1?"Mean":"Minimum":"Maximum");v.elementMouseout({value:e,label:n})})}),m}var t={top:0,right:0,bottom:0,left:0},n="left",r=!1,i=function(e){return e.ranges},s=function(e){return e.markers},o=function(e){return e.measures},u=function(e){return e.rangeLabels?e.rangeLabels:[]},a=function(e){return e.markerLabels?e.markerLabels:[]},f=function(e){return e.measureLabels?e.measureLabels:[]},l=[0],c=380,h=30,p=null,d=e.utils.getColor(["#1f77b4"]),v=d3.dispatch("elementMouseover","elementMouseout");return m.dispatch=v,m.options=e.utils.optionsFunc.bind(m),m.orient=function(e){return arguments.length?(n=e,r=n=="right"||n=="bottom",m):n},m.ranges=function(e){return arguments.length?(i=e,m):i},m.markers=function(e){return arguments.length?(s=e,m):s},m.measures=function(e){return arguments.length?(o=e,m):o},m.forceX=function(e){return arguments.length?(l=e,m):l},m.width=function(e){return arguments.length?(c=e,m):c},m.height=function(e){return arguments.length?(h=e,m):h},m.margin=function(e){return arguments.length?(t.top=typeof e.top!="undefined"?e.top:t.top,t.right=typeof e.right!="undefined"?e.right:t.right,t.bottom=typeof e.bottom!="undefined"?e.bottom:t.bottom,t.left=typeof e.left!="undefined"?e.left:t.left,m):t},m.tickFormat=function(e){return arguments.length?(p=e,m):p},m.color=function(t){return arguments.length?(d=e.utils.getColor(t),m):d},m},e.models.bulletChart=function(){"use strict";function m(e){return e.each(function(n,h){var g=d3.select(this),y=(a||parseInt(g.style("width"))||960)-i.left-i.right,b=f-i.top-i.bottom,w=this;m.update=function(){m(e)},m.container=this;if(!n||!s.call(this,n,h)){var E=g.selectAll(".nv-noData").data([p]);return E.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),E.attr("x",i.left+y/2).attr("y",18+i.top+b/2).text(function(e){return e}),m}g.selectAll(".nv-noData").remove();var S=s.call(this,n,h).slice().sort(d3.descending),x=o.call(this,n,h).slice().sort(d3.descending),T=u.call(this,n,h).slice().sort(d3.descending),N=g.selectAll("g.nv-wrap.nv-bulletChart").data([n]),C=N.enter().append("g").attr("class","nvd3 nv-wrap nv-bulletChart"),k=C.append("g"),L=N.select("g");k.append("g").attr("class","nv-bulletWrap"),k.append("g").attr("class","nv-titles"),N.attr("transform","translate("+i.left+","+i.top+")");var A=d3.scale.linear().domain([0,Math.max(S[0],x[0],T[0])]).range(r?[y,0]:[0,y]),O=this.__chart__||d3.scale.linear().domain([0,Infinity]).range(A.range());this.__chart__=A;var M=function(e){return Math.abs(O(e)-O(0))},_=function(e){return Math.abs(A(e)-A(0))},D=k.select(".nv-titles").append("g").attr("text-anchor","end").attr("transform","translate(-6,"+(f-i.top-i.bottom)/2+")");D.append("text").attr("class","nv-title").text(function(e){return e.title}),D.append("text").attr("class","nv-subtitle").attr("dy","1em").text(function(e){return e.subtitle}),t.width(y).height(b);var P=L.select(".nv-bulletWrap");d3.transition(P).call(t);var H=l||A.tickFormat(y/100),B=L.selectAll("g.nv-tick").data(A.ticks(y/50),function(e){return this.textContent||H(e)}),j=B.enter().append("g").attr("class","nv-tick").attr("transform",function(e){return"translate("+O(e)+",0)"}).style("opacity",1e-6);j.append("line").attr("y1",b).attr("y2",b*7/6),j.append("text").attr("text-anchor","middle").attr("dy","1em").attr("y",b*7/6).text(H);var F=d3.transition(B).attr("transform",function(e){return"translate("+A(e)+",0)"}).style("opacity",1);F.select("line").attr("y1",b).attr("y2",b*7/6),F.select("text").attr("y",b*7/6),d3.transition(B.exit()).attr("transform",function(e){return"translate("+A(e)+",0)"}).style("opacity",1e-6).remove(),d.on("tooltipShow",function(e){e.key=n.title,c&&v(e,w.parentNode)})}),d3.timer.flush(),m}var t=e.models.bullet(),n="left",r=!1,i={top:5,right:40,bottom:20,left:120},s=function(e){return e.ranges},o=function(e){return e.markers},u=function(e){return e.measures},a=null,f=55,l=null,c=!0,h=function(e,t,n,r,i){return"

"+t+"

"+"

"+n+"

"},p="No Data Available.",d=d3.dispatch("tooltipShow","tooltipHide"),v=function(t,n){var r=t.pos[0]+(n.offsetLeft||0)+i.left,s=t.pos[1]+(n.offsetTop||0)+i.top,o=h(t.key,t.label,t.value,t,m);e.tooltip.show([r,s],o,t.value<0?"e":"w",null,n)};return t.dispatch.on("elementMouseover.tooltip",function(e){d.tooltipShow(e)}),t.dispatch.on("elementMouseout.tooltip",function(e){d.tooltipHide(e)}),d.on("tooltipHide",function(){c&&e.tooltip.cleanup()}),m.dispatch=d,m.bullet=t,d3.rebind(m,t,"color"),m.options=e.utils.optionsFunc.bind(m),m.orient=function(e){return arguments.length?(n=e,r=n=="right"||n=="bottom",m):n},m.ranges=function(e){return arguments.length?(s=e,m):s},m.markers=function(e){return arguments.length?(o=e,m):o},m.measures=function(e){return arguments.length?(u=e,m):u},m.width=function(e){return arguments.length?(a=e,m):a},m.height=function(e){return arguments.length?(f=e,m):f},m.margin=function(e){return arguments.length?(i.top=typeof e.top!="undefined"?e.top:i.top,i.right=typeof e.right!="undefined"?e.right:i.right,i.bottom=typeof e.bottom!="undefined"?e.bottom:i.bottom,i.left=typeof e.left!="undefined"?e.left:i.left,m):i},m.tickFormat=function(e){return arguments.length?(l=e,m):l},m.tooltips=function(e){return arguments.length?(c=e,m):c},m.tooltipContent=function(e){return arguments.length?(h=e,m):h},m.noData=function(e){return arguments.length?(p=e,m):p},m},e.models.cumulativeLineChart=function(){"use strict";function D(b){return b.each(function(b){function q(e,t){d3.select(D.container).style("cursor","ew-resize")}function R(e,t){M.x=d3.event.x,M.i=Math.round(O.invert(M.x)),rt()}function U(e,t){d3.select(D.container).style("cursor","auto"),x.index=M.i,k.stateChange(x)}function rt(){nt.data([M]);var e=D.transitionDuration();D.transitionDuration(0),D.update(),D.transitionDuration(e)}var A=d3.select(this).classed("nv-chart-"+S,!0),H=this,B=(f||parseInt(A.style("width"))||960)-u.left-u.right,j=(l||parseInt(A.style("height"))||400)-u.top-u.bottom;D.update=function(){A.transition().duration(L).call(D)},D.container=this,x.disabled=b.map(function(e){return!!e.disabled});if(!T){var F;T={};for(F in x)x[F]instanceof Array?T[F]=x[F].slice(0):T[F]=x[F]}var I=d3.behavior.drag().on("dragstart",q).on("drag",R).on("dragend",U);if(!b||!b.length||!b.filter(function(e){return e.values.length}).length){var z=A.selectAll(".nv-noData").data([N]);return z.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),z.attr("x",u.left+B/2).attr("y",u.top+j/2).text(function(e){return e}),D}A.selectAll(".nv-noData").remove(),w=t.xScale(),E=t.yScale();if(!y){var W=b.filter(function(e){return!e.disabled}).map(function(e,n){var r=d3.extent(e.values,t.y());return r[0]<-0.95&&(r[0]=-0.95),[(r[0]-r[1])/(1+r[1]),(r[1]-r[0])/(1+r[0])]}),X=[d3.min(W,function(e){return e[0]}),d3.max(W,function(e){return e[1]})];t.yDomain(X)}else t.yDomain(null);O.domain([0,b[0].values.length-1]).range([0,B]).clamp(!0);var b=P(M.i,b),V=g?"none":"all",$=A.selectAll("g.nv-wrap.nv-cumulativeLine").data([b]),J=$.enter().append("g").attr("class","nvd3 nv-wrap nv-cumulativeLine").append("g"),K=$.select("g");J.append("g").attr("class","nv-interactive"),J.append("g").attr("class","nv-x nv-axis").style("pointer-events","none"),J.append("g").attr("class","nv-y nv-axis"),J.append("g").attr("class","nv-background"),J.append("g").attr("class","nv-linesWrap").style("pointer-events",V),J.append("g").attr("class","nv-avgLinesWrap").style("pointer-events","none"),J.append("g").attr("class","nv-legendWrap"),J.append("g").attr("class","nv-controlsWrap"),c&&(i.width(B),K.select(".nv-legendWrap").datum(b).call(i),u.top!=i.height()&&(u.top=i.height(),j=(l||parseInt(A.style("height"))||400)-u.top-u.bottom),K.select(".nv-legendWrap").attr("transform","translate(0,"+ -u.top+")"));if(m){var Q=[{key:"Re-scale y-axis",disabled:!y}];s.width(140).color(["#444","#444","#444"]).rightAlign(!1).margin({top:5,right:0,bottom:5,left:20}),K.select(".nv-controlsWrap").datum(Q).attr("transform","translate(0,"+ -u.top+")").call(s)}$.attr("transform","translate("+u.left+","+u.top+")"),d&&K.select(".nv-y.nv-axis").attr("transform","translate("+B+",0)");var G=b.filter(function(e){return e.tempDisabled});$.select(".tempDisabled").remove(),G.length&&$.append("text").attr("class","tempDisabled").attr("x",B/2).attr("y","-.71em").style("text-anchor","end").text(G.map(function(e){return e.key}).join(", ")+" values cannot be calculated for this time period."),g&&(o.width(B).height(j).margin({left:u.left,top:u.top}).svgContainer(A).xScale(w),$.select(".nv-interactive").call(o)),J.select(".nv-background").append("rect"),K.select(".nv-background rect").attr("width",B).attr("height",j),t.y(function(e){return e.display.y}).width(B).height(j).color(b.map(function(e,t){return e.color||a(e,t)}).filter(function(e,t){return!b[t].disabled&&!b[t].tempDisabled}));var Y=K.select(".nv-linesWrap").datum(b.filter(function(e){return!e.disabled&&!e.tempDisabled}));Y.call(t),b.forEach(function(e,t){e.seriesIndex=t});var Z=b.filter(function(e){return!e.disabled&&!!C(e)}),et=K.select(".nv-avgLinesWrap").selectAll("line").data(Z,function(e){return e.key}),tt=function(e){var t=E(C(e));return t<0?0:t>j?j:t};et.enter().append("line").style("stroke-width",2).style("stroke-dasharray","10,10").style("stroke",function(e,n){return t.color()(e,e.seriesIndex)}).attr("x1",0).attr("x2",B).attr("y1",tt).attr("y2",tt),et.style("stroke-opacity",function(e){var t=E(C(e));return t<0||t>j?0:1}).attr("x1",0).attr("x2",B).attr("y1",tt).attr("y2",tt),et.exit().remove();var nt=Y.selectAll(".nv-indexLine").data([M]);nt.enter().append("rect").attr("class","nv-indexLine").attr("width",3).attr("x",-2).attr("fill","red").attr("fill-opacity",.5).style("pointer-events","all").call(I),nt.attr("transform",function(e){return"translate("+O(e.i)+",0)"}).attr("height",j),h&&(n.scale(w).ticks(Math.min(b[0].values.length,B/70)).tickSize(-j,0),K.select(".nv-x.nv-axis").attr("transform","translate(0,"+E.range()[0]+")"),d3.transition(K.select(".nv-x.nv-axis")).call(n)),p&&(r.scale(E).ticks(j/36).tickSize(-B,0),d3.transition(K.select(".nv-y.nv-axis")).call(r)),K.select(".nv-background rect").on("click",function(){M.x=d3.mouse(this)[0],M.i=Math.round(O.invert(M.x)),x.index=M.i,k.stateChange(x),rt()}),t.dispatch.on("elementClick",function(e){M.i=e.pointIndex,M.x=O(M.i),x.index=M.i,k.stateChange(x),rt()}),s.dispatch.on("legendClick",function(e,t){e.disabled=!e.disabled,y=!e.disabled,x.rescaleY=y,k.stateChange(x),D.update()}),i.dispatch.on("stateChange",function(e){x.disabled=e.disabled,k.stateChange(x),D.update()}),o.dispatch.on("elementMousemove",function(i){t.clearHighlights();var s,f,l,c=[];b.filter(function(e,t){return e.seriesIndex=t,!e.disabled}).forEach(function(n,r){f=e.interactiveBisect(n.values,i.pointXValue,D.x()),t.highlightPoint(r,f,!0);var o=n.values[f];if(typeof o=="undefined")return;typeof s=="undefined"&&(s=o),typeof l=="undefined"&&(l=D.xScale()(D.x()(o,f))),c.push({key:n.key,value:D.y()(o,f),color:a(n,n.seriesIndex)})});if(c.length>2){var h=D.yScale().invert(i.mouseY),p=Math.abs(D.yScale().domain()[0]-D.yScale().domain()[1]),d=.03*p,m=e.nearestValueIndex(c.map(function(e){return e.value}),h,d);m!==null&&(c[m].highlight=!0)}var g=n.tickFormat()(D.x()(s,f),f);o.tooltip.position({left:l+u.left,top:i.mouseY+u.top}).chartContainer(H.parentNode).enabled(v).valueFormatter(function(e,t){return r.tickFormat()(e)}).data({value:g,series:c})(),o.renderGuideLine(l)}),o.dispatch.on("elementMouseout",function(e){k.tooltipHide(),t.clearHighlights()}),k.on("tooltipShow",function(e){v&&_(e,H.parentNode)}),k.on("changeState",function(e){typeof e.disabled!="undefined"&&(b.forEach(function(t,n){t.disabled=e.disabled[n]}),x.disabled=e.disabled),typeof e.index!="undefined"&&(M.i=e.index,M.x=O(M.i),x.index=e.index,nt.data([M])),typeof e.rescaleY!="undefined"&&(y=e.rescaleY),D.update()})}),D}function P(e,n){return n.map(function(n,r){if(!n.values)return n;var i=t.y()(n.values[e],e);return i<-0.95&&!A?(n.tempDisabled=!0,n):(n.tempDisabled=!1,n.values= n.values.map(function(e,n){return e.display={y:(t.y()(e,n)-i)/(1+i)},e}),n)})}var t=e.models.line(),n=e.models.axis(),r=e.models.axis(),i=e.models.legend(),s=e.models.legend(),o=e.interactiveGuideline(),u={top:30,right:30,bottom:50,left:60},a=e.utils.defaultColor(),f=null,l=null,c=!0,h=!0,p=!0,d=!1,v=!0,m=!0,g=!1,y=!0,b=function(e,t,n,r,i){return"

"+e+"

"+"

"+n+" at "+t+"

"},w,E,S=t.id(),x={index:0,rescaleY:y},T=null,N="No Data Available.",C=function(e){return e.average},k=d3.dispatch("tooltipShow","tooltipHide","stateChange","changeState"),L=250,A=!1;n.orient("bottom").tickPadding(7),r.orient(d?"right":"left"),s.updateState(!1);var O=d3.scale.linear(),M={i:0,x:0},_=function(i,s){var o=i.pos[0]+(s.offsetLeft||0),u=i.pos[1]+(s.offsetTop||0),a=n.tickFormat()(t.x()(i.point,i.pointIndex)),f=r.tickFormat()(t.y()(i.point,i.pointIndex)),l=b(i.series.key,a,f,i,D);e.tooltip.show([o,u],l,null,null,s)};return t.dispatch.on("elementMouseover.tooltip",function(e){e.pos=[e.pos[0]+u.left,e.pos[1]+u.top],k.tooltipShow(e)}),t.dispatch.on("elementMouseout.tooltip",function(e){k.tooltipHide(e)}),k.on("tooltipHide",function(){v&&e.tooltip.cleanup()}),D.dispatch=k,D.lines=t,D.legend=i,D.xAxis=n,D.yAxis=r,D.interactiveLayer=o,d3.rebind(D,t,"defined","isArea","x","y","xScale","yScale","size","xDomain","yDomain","xRange","yRange","forceX","forceY","interactive","clipEdge","clipVoronoi","useVoronoi","id"),D.options=e.utils.optionsFunc.bind(D),D.margin=function(e){return arguments.length?(u.top=typeof e.top!="undefined"?e.top:u.top,u.right=typeof e.right!="undefined"?e.right:u.right,u.bottom=typeof e.bottom!="undefined"?e.bottom:u.bottom,u.left=typeof e.left!="undefined"?e.left:u.left,D):u},D.width=function(e){return arguments.length?(f=e,D):f},D.height=function(e){return arguments.length?(l=e,D):l},D.color=function(t){return arguments.length?(a=e.utils.getColor(t),i.color(a),D):a},D.rescaleY=function(e){return arguments.length?(y=e,D):y},D.showControls=function(e){return arguments.length?(m=e,D):m},D.useInteractiveGuideline=function(e){return arguments.length?(g=e,e===!0&&(D.interactive(!1),D.useVoronoi(!1)),D):g},D.showLegend=function(e){return arguments.length?(c=e,D):c},D.showXAxis=function(e){return arguments.length?(h=e,D):h},D.showYAxis=function(e){return arguments.length?(p=e,D):p},D.rightAlignYAxis=function(e){return arguments.length?(d=e,r.orient(e?"right":"left"),D):d},D.tooltips=function(e){return arguments.length?(v=e,D):v},D.tooltipContent=function(e){return arguments.length?(b=e,D):b},D.state=function(e){return arguments.length?(x=e,D):x},D.defaultState=function(e){return arguments.length?(T=e,D):T},D.noData=function(e){return arguments.length?(N=e,D):N},D.average=function(e){return arguments.length?(C=e,D):C},D.transitionDuration=function(e){return arguments.length?(L=e,D):L},D.noErrorCheck=function(e){return arguments.length?(A=e,D):A},D},e.models.discreteBar=function(){"use strict";function E(e){return e.each(function(e){var i=n-t.left-t.right,E=r-t.top-t.bottom,S=d3.select(this);e.forEach(function(e,t){e.values.forEach(function(e){e.series=t})});var T=p&&d?[]:e.map(function(e){return e.values.map(function(e,t){return{x:u(e,t),y:a(e,t),y0:e.y0}})});s.domain(p||d3.merge(T).map(function(e){return e.x})).rangeBands(v||[0,i],.1),o.domain(d||d3.extent(d3.merge(T).map(function(e){return e.y}).concat(f))),c?o.range(m||[E-(o.domain()[0]<0?12:0),o.domain()[1]>0?12:0]):o.range(m||[E,0]),b=b||s,w=w||o.copy().range([o(0),o(0)]);var N=S.selectAll("g.nv-wrap.nv-discretebar").data([e]),C=N.enter().append("g").attr("class","nvd3 nv-wrap nv-discretebar"),k=C.append("g"),L=N.select("g");k.append("g").attr("class","nv-groups"),N.attr("transform","translate("+t.left+","+t.top+")");var A=N.select(".nv-groups").selectAll(".nv-group").data(function(e){return e},function(e){return e.key});A.enter().append("g").style("stroke-opacity",1e-6).style("fill-opacity",1e-6),A.exit().transition().style("stroke-opacity",1e-6).style("fill-opacity",1e-6).remove(),A.attr("class",function(e,t){return"nv-group nv-series-"+t}).classed("hover",function(e){return e.hover}),A.transition().style("stroke-opacity",1).style("fill-opacity",.75);var O=A.selectAll("g.nv-bar").data(function(e){return e.values});O.exit().remove();var M=O.enter().append("g").attr("transform",function(e,t,n){return"translate("+(s(u(e,t))+s.rangeBand()*.05)+", "+o(0)+")"}).on("mouseover",function(t,n){d3.select(this).classed("hover",!0),g.elementMouseover({value:a(t,n),point:t,series:e[t.series],pos:[s(u(t,n))+s.rangeBand()*(t.series+.5)/e.length,o(a(t,n))],pointIndex:n,seriesIndex:t.series,e:d3.event})}).on("mouseout",function(t,n){d3.select(this).classed("hover",!1),g.elementMouseout({value:a(t,n),point:t,series:e[t.series],pointIndex:n,seriesIndex:t.series,e:d3.event})}).on("click",function(t,n){g.elementClick({value:a(t,n),point:t,series:e[t.series],pos:[s(u(t,n))+s.rangeBand()*(t.series+.5)/e.length,o(a(t,n))],pointIndex:n,seriesIndex:t.series,e:d3.event}),d3.event.stopPropagation()}).on("dblclick",function(t,n){g.elementDblClick({value:a(t,n),point:t,series:e[t.series],pos:[s(u(t,n))+s.rangeBand()*(t.series+.5)/e.length,o(a(t,n))],pointIndex:n,seriesIndex:t.series,e:d3.event}),d3.event.stopPropagation()});M.append("rect").attr("height",0).attr("width",s.rangeBand()*.9/e.length),c?(M.append("text").attr("text-anchor","middle"),O.select("text").text(function(e,t){return h(a(e,t))}).transition().attr("x",s.rangeBand()*.9/2).attr("y",function(e,t){return a(e,t)<0?o(a(e,t))-o(0)+12:-4})):O.selectAll("text").remove(),O.attr("class",function(e,t){return a(e,t)<0?"nv-bar negative":"nv-bar positive"}).style("fill",function(e,t){return e.color||l(e,t)}).style("stroke",function(e,t){return e.color||l(e,t)}).select("rect").attr("class",y).transition().attr("width",s.rangeBand()*.9/e.length),O.transition().attr("transform",function(e,t){var n=s(u(e,t))+s.rangeBand()*.05,r=a(e,t)<0?o(0):o(0)-o(a(e,t))<1?o(0)-1:o(a(e,t));return"translate("+n+", "+r+")"}).select("rect").attr("height",function(e,t){return Math.max(Math.abs(o(a(e,t))-o(d&&d[0]||0))||1)}),b=s.copy(),w=o.copy()}),E}var t={top:0,right:0,bottom:0,left:0},n=960,r=500,i=Math.floor(Math.random()*1e4),s=d3.scale.ordinal(),o=d3.scale.linear(),u=function(e){return e.x},a=function(e){return e.y},f=[0],l=e.utils.defaultColor(),c=!1,h=d3.format(",.2f"),p,d,v,m,g=d3.dispatch("chartClick","elementClick","elementDblClick","elementMouseover","elementMouseout"),y="discreteBar",b,w;return E.dispatch=g,E.options=e.utils.optionsFunc.bind(E),E.x=function(e){return arguments.length?(u=e,E):u},E.y=function(e){return arguments.length?(a=e,E):a},E.margin=function(e){return arguments.length?(t.top=typeof e.top!="undefined"?e.top:t.top,t.right=typeof e.right!="undefined"?e.right:t.right,t.bottom=typeof e.bottom!="undefined"?e.bottom:t.bottom,t.left=typeof e.left!="undefined"?e.left:t.left,E):t},E.width=function(e){return arguments.length?(n=e,E):n},E.height=function(e){return arguments.length?(r=e,E):r},E.xScale=function(e){return arguments.length?(s=e,E):s},E.yScale=function(e){return arguments.length?(o=e,E):o},E.xDomain=function(e){return arguments.length?(p=e,E):p},E.yDomain=function(e){return arguments.length?(d=e,E):d},E.xRange=function(e){return arguments.length?(v=e,E):v},E.yRange=function(e){return arguments.length?(m=e,E):m},E.forceY=function(e){return arguments.length?(f=e,E):f},E.color=function(t){return arguments.length?(l=e.utils.getColor(t),E):l},E.id=function(e){return arguments.length?(i=e,E):i},E.showValues=function(e){return arguments.length?(c=e,E):c},E.valueFormat=function(e){return arguments.length?(h=e,E):h},E.rectClass=function(e){return arguments.length?(y=e,E):y},E},e.models.discreteBarChart=function(){"use strict";function w(e){return e.each(function(e){var u=d3.select(this),p=this,E=(s||parseInt(u.style("width"))||960)-i.left-i.right,S=(o||parseInt(u.style("height"))||400)-i.top-i.bottom;w.update=function(){g.beforeUpdate(),u.transition().duration(y).call(w)},w.container=this;if(!e||!e.length||!e.filter(function(e){return e.values.length}).length){var T=u.selectAll(".nv-noData").data([m]);return T.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),T.attr("x",i.left+E/2).attr("y",i.top+S/2).text(function(e){return e}),w}u.selectAll(".nv-noData").remove(),d=t.xScale(),v=t.yScale().clamp(!0);var N=u.selectAll("g.nv-wrap.nv-discreteBarWithAxes").data([e]),C=N.enter().append("g").attr("class","nvd3 nv-wrap nv-discreteBarWithAxes").append("g"),k=C.append("defs"),L=N.select("g");C.append("g").attr("class","nv-x nv-axis"),C.append("g").attr("class","nv-y nv-axis").append("g").attr("class","nv-zeroLine").append("line"),C.append("g").attr("class","nv-barsWrap"),L.attr("transform","translate("+i.left+","+i.top+")"),l&&L.select(".nv-y.nv-axis").attr("transform","translate("+E+",0)"),t.width(E).height(S);var A=L.select(".nv-barsWrap").datum(e.filter(function(e){return!e.disabled}));A.transition().call(t),k.append("clipPath").attr("id","nv-x-label-clip-"+t.id()).append("rect"),L.select("#nv-x-label-clip-"+t.id()+" rect").attr("width",d.rangeBand()*(c?2:1)).attr("height",16).attr("x",-d.rangeBand()/(c?1:2));if(a){n.scale(d).ticks(E/100).tickSize(-S,0),L.select(".nv-x.nv-axis").attr("transform","translate(0,"+(v.range()[0]+(t.showValues()&&v.domain()[0]<0?16:0))+")"),L.select(".nv-x.nv-axis").transition().call(n);var O=L.select(".nv-x.nv-axis").selectAll("g");c&&O.selectAll("text").attr("transform",function(e,t,n){return"translate(0,"+(n%2==0?"5":"17")+")"})}f&&(r.scale(v).ticks(S/36).tickSize(-E,0),L.select(".nv-y.nv-axis").transition().call(r)),L.select(".nv-zeroLine line").attr("x1",0).attr("x2",E).attr("y1",v(0)).attr("y2",v(0)),g.on("tooltipShow",function(e){h&&b(e,p.parentNode)})}),w}var t=e.models.discreteBar(),n=e.models.axis(),r=e.models.axis(),i={top:15,right:10,bottom:50,left:60},s=null,o=null,u=e.utils.getColor(),a=!0,f=!0,l=!1,c=!1,h=!0,p=function(e,t,n,r,i){return"

"+t+"

"+"

"+n+"

"},d,v,m="No Data Available.",g=d3.dispatch("tooltipShow","tooltipHide","beforeUpdate"),y=250;n.orient("bottom").highlightZero(!1).showMaxMin(!1).tickFormat(function(e){return e}),r.orient(l?"right":"left").tickFormat(d3.format(",.1f"));var b=function(i,s){var o=i.pos[0]+(s.offsetLeft||0),u=i.pos[1]+(s.offsetTop||0),a=n.tickFormat()(t.x()(i.point,i.pointIndex)),f=r.tickFormat()(t.y()(i.point,i.pointIndex)),l=p(i.series.key,a,f,i,w);e.tooltip.show([o,u],l,i.value<0?"n":"s",null,s)};return t.dispatch.on("elementMouseover.tooltip",function(e){e.pos=[e.pos[0]+i.left,e.pos[1]+i.top],g.tooltipShow(e)}),t.dispatch.on("elementMouseout.tooltip",function(e){g.tooltipHide(e)}),g.on("tooltipHide",function(){h&&e.tooltip.cleanup()}),w.dispatch=g,w.discretebar=t,w.xAxis=n,w.yAxis=r,d3.rebind(w,t,"x","y","xDomain","yDomain","xRange","yRange","forceX","forceY","id","showValues","valueFormat"),w.options=e.utils.optionsFunc.bind(w),w.margin=function(e){return arguments.length?(i.top=typeof e.top!="undefined"?e.top:i.top,i.right=typeof e.right!="undefined"?e.right:i.right,i.bottom=typeof e.bottom!="undefined"?e.bottom:i.bottom,i.left=typeof e.left!="undefined"?e.left:i.left,w):i},w.width=function(e){return arguments.length?(s=e,w):s},w.height=function(e){return arguments.length?(o=e,w):o},w.color=function(n){return arguments.length?(u=e.utils.getColor(n),t.color(u),w):u},w.showXAxis=function(e){return arguments.length?(a=e,w):a},w.showYAxis=function(e){return arguments.length?(f=e,w):f},w.rightAlignYAxis=function(e){return arguments.length?(l=e,r.orient(e?"right":"left"),w):l},w.staggerLabels=function(e){return arguments.length?(c=e,w):c},w.tooltips=function(e){return arguments.length?(h=e,w):h},w.tooltipContent=function(e){return arguments.length?(p=e,w):p},w.noData=function(e){return arguments.length?(m=e,w):m},w.transitionDuration=function(e){return arguments.length?(y=e,w):y},w},e.models.distribution=function(){"use strict";function l(e){return e.each(function(e){var a=n-(i==="x"?t.left+t.right:t.top+t.bottom),l=i=="x"?"y":"x",c=d3.select(this);f=f||u;var h=c.selectAll("g.nv-distribution").data([e]),p=h.enter().append("g").attr("class","nvd3 nv-distribution"),d=p.append("g"),v=h.select("g");h.attr("transform","translate("+t.left+","+t.top+")");var m=v.selectAll("g.nv-dist").data(function(e){return e},function(e){return e.key});m.enter().append("g"),m.attr("class",function(e,t){return"nv-dist nv-series-"+t}).style("stroke",function(e,t){return o(e,t)});var g=m.selectAll("line.nv-dist"+i).data(function(e){return e.values});g.enter().append("line").attr(i+"1",function(e,t){return f(s(e,t))}).attr(i+"2",function(e,t){return f(s(e,t))}),m.exit().selectAll("line.nv-dist"+i).transition().attr(i+"1",function(e,t){return u(s(e,t))}).attr(i+"2",function(e,t){return u(s(e,t))}).style("stroke-opacity",0).remove(),g.attr("class",function(e,t){return"nv-dist"+i+" nv-dist"+i+"-"+t}).attr(l+"1",0).attr(l+"2",r),g.transition().attr(i+"1",function(e,t){return u(s(e,t))}).attr(i+"2",function(e,t){return u(s(e,t))}),f=u.copy()}),l}var t={top:0,right:0,bottom:0,left:0},n=400,r=8,i="x",s=function(e){return e[i]},o=e.utils.defaultColor(),u=d3.scale.linear(),a,f;return l.options=e.utils.optionsFunc.bind(l),l.margin=function(e){return arguments.length?(t.top=typeof e.top!="undefined"?e.top:t.top,t.right=typeof e.right!="undefined"?e.right:t.right,t.bottom=typeof e.bottom!="undefined"?e.bottom:t.bottom,t.left=typeof e.left!="undefined"?e.left:t.left,l):t},l.width=function(e){return arguments.length?(n=e,l):n},l.axis=function(e){return arguments.length?(i=e,l):i},l.size=function(e){return arguments.length?(r=e,l):r},l.getData=function(e){return arguments.length?(s=d3.functor(e),l):s},l.scale=function(e){return arguments.length?(u=e,l):u},l.color=function(t){return arguments.length?(o=e.utils.getColor(t),l):o},l},e.models.historicalBar=function(){"use strict";function w(E){return E.each(function(w){var E=n-t.left-t.right,S=r-t.top-t.bottom,T=d3.select(this);s.domain(d||d3.extent(w[0].values.map(u).concat(f))),c?s.range(m||[E*.5/w[0].values.length,E*(w[0].values.length-.5)/w[0].values.length]):s.range(m||[0,E]),o.domain(v||d3.extent(w[0].values.map(a).concat(l))).range(g||[S,0]),s.domain()[0]===s.domain()[1]&&(s.domain()[0]?s.domain([s.domain()[0]-s.domain()[0]*.01,s.domain()[1]+s.domain()[1]*.01]):s.domain([-1,1])),o.domain()[0]===o.domain()[1]&&(o.domain()[0]?o.domain([o.domain()[0]+o.domain()[0]*.01,o.domain()[1]-o.domain()[1]*.01]):o.domain([-1,1]));var N=T.selectAll("g.nv-wrap.nv-historicalBar-"+i).data([w[0].values]),C=N.enter().append("g").attr("class","nvd3 nv-wrap nv-historicalBar-"+i),k=C.append("defs"),L=C.append("g"),A=N.select("g");L.append("g").attr("class","nv-bars"),N.attr("transform","translate("+t.left+","+t.top+")"),T.on("click",function(e,t){y.chartClick({data:e,index:t,pos:d3.event,id:i})}),k.append("clipPath").attr("id","nv-chart-clip-path-"+i).append("rect"),N.select("#nv-chart-clip-path-"+i+" rect").attr("width",E).attr("height",S),A.attr("clip-path",h?"url(#nv-chart-clip-path-"+i+")":"");var O=N.select(".nv-bars").selectAll(".nv-bar").data(function(e){return e},function(e,t){return u(e,t)});O.exit().remove();var M=O.enter().append("rect").attr("x",0).attr("y",function(t,n){return e.utils.NaNtoZero(o(Math.max(0,a(t,n))))}).attr("height",function(t,n){return e.utils.NaNtoZero(Math.abs(o(a(t,n))-o(0)))}).attr("transform",function(e,t){return"translate("+(s(u(e,t))-E/w[0].values.length*.45)+",0)"}).on("mouseover",function(e,t){if(!b)return;d3.select(this).classed("hover",!0),y.elementMouseover({point:e,series:w[0],pos:[s(u(e,t)),o(a(e,t))],pointIndex:t,seriesIndex:0,e:d3.event})}).on("mouseout",function(e,t){if(!b)return;d3.select(this).classed("hover",!1),y.elementMouseout({point:e,series:w[0],pointIndex:t,seriesIndex:0,e:d3.event})}).on("click",function(e,t){if(!b)return;y.elementClick({value:a(e,t),data:e,index:t,pos:[s(u(e,t)),o(a(e,t))],e:d3.event,id:i}),d3.event.stopPropagation()}).on("dblclick",function(e,t){if(!b)return;y.elementDblClick({value:a(e,t),data:e,index:t,pos:[s(u(e,t)),o(a(e,t))],e:d3.event,id:i}),d3.event.stopPropagation()});O.attr("fill",function(e,t){return p(e,t)}).attr("class",function(e,t,n){return(a(e,t)<0?"nv-bar negative":"nv-bar positive")+" nv-bar-"+n+"-"+t}).transition().attr("transform",function(e,t){return"translate("+(s(u(e,t))-E/w[0].values.length*.45)+",0)"}).attr("width",E/w[0].values.length*.9),O.transition().attr("y",function(t,n){var r=a(t,n)<0?o(0):o(0)-o(a(t,n))<1?o(0)-1:o(a(t,n));return e.utils.NaNtoZero(r)}).attr("height",function(t,n){return e.utils.NaNtoZero(Math.max(Math.abs(o(a(t,n))-o(0)),1))})}),w}var t={top:0,right:0,bottom:0,left:0},n=960,r=500,i=Math.floor(Math.random()*1e4),s=d3.scale.linear(),o=d3.scale.linear(),u=function(e){return e.x},a=function(e){return e.y},f=[],l=[0],c=!1,h=!0,p=e.utils.defaultColor(),d,v,m,g,y=d3.dispatch("chartClick","elementClick","elementDblClick","elementMouseover","elementMouseout"),b=!0;return w.highlightPoint=function(e,t){d3.select(".nv-historicalBar-"+i).select(".nv-bars .nv-bar-0-"+e).classed("hover",t)},w.clearHighlights=function(){d3.select(".nv-historicalBar-"+i).select(".nv-bars .nv-bar.hover").classed("hover",!1)},w.dispatch=y,w.options=e.utils.optionsFunc.bind(w),w.x=function(e){return arguments.length?(u=e,w):u},w.y=function(e){return arguments.length?(a=e,w):a},w.margin=function(e){return arguments.length?(t.top=typeof e.top!="undefined"?e.top:t.top,t.right=typeof e.right!="undefined"?e.right:t.right,t.bottom=typeof e.bottom!="undefined"?e.bottom:t.bottom,t.left=typeof e.left!="undefined"?e.left:t.left,w):t},w.width=function(e){return arguments.length?(n=e,w):n},w.height=function(e){return arguments.length?(r=e,w):r},w.xScale=function(e){return arguments.length?(s=e,w):s},w.yScale=function(e){return arguments.length?(o=e,w):o},w.xDomain=function(e){return arguments.length?(d=e,w):d},w.yDomain=function(e){return arguments.length?(v=e,w):v},w.xRange=function(e){return arguments.length?(m=e,w):m},w.yRange=function(e){return arguments.length?(g=e,w):g},w.forceX=function(e){return arguments.length?(f=e,w):f},w.forceY=function(e){return arguments.length?(l=e,w):l},w.padData=function(e){return arguments.length?(c=e,w):c},w.clipEdge=function(e){return arguments.length?(h=e,w):h},w.color=function(t){return arguments.length?(p=e.utils.getColor(t),w):p},w.id=function(e){return arguments.length?(i=e,w):i},w.interactive=function(e){return arguments.length?(b=!1,w):b},w},e.models.historicalBarChart=function(){"use strict";function x(e){return e.each(function(d){var T=d3.select(this),N=this,C=(u||parseInt(T.style("width"))||960)-s.left-s.right,k=(a||parseInt(T.style("height"))||400)-s.top-s.bottom;x.update=function(){T.transition().duration(E).call(x)},x.container=this,g.disabled=d.map(function(e){return!!e.disabled});if(!y){var L;y={};for(L in g)g[L]instanceof Array?y[L]=g[L].slice(0):y[L]=g[L]}if(!d||!d.length||!d.filter(function(e){return e.values.length}).length){var A=T.selectAll(".nv-noData").data([b]);return A.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),A.attr("x",s.left+C/2).attr("y",s.top+k/2).text(function(e){return e}),x}T.selectAll(".nv-noData").remove(),v=t.xScale(),m=t.yScale();var O=T.selectAll("g.nv-wrap.nv-historicalBarChart").data([d]),M=O.enter().append("g").attr("class","nvd3 nv-wrap nv-historicalBarChart").append("g"),_=O.select("g");M.append("g").attr("class","nv-x nv-axis"),M.append("g").attr("class","nv-y nv-axis"),M.append("g").attr("class","nv-barsWrap"),M.append("g").attr("class","nv-legendWrap"),f&&(i.width(C),_.select(".nv-legendWrap").datum(d).call(i),s.top!=i.height()&&(s.top=i.height(),k=(a||parseInt(T.style("height"))||400)-s.top-s.bottom),O.select(".nv-legendWrap").attr("transform","translate(0,"+ -s.top+")")),O.attr("transform","translate("+s.left+","+s.top+")"),h&&_.select(".nv-y.nv-axis").attr("transform","translate("+C+",0)"),t.width(C).height(k).color(d.map(function(e,t){return e.color||o(e,t)}).filter(function(e,t){return!d[t].disabled}));var D=_.select(".nv-barsWrap").datum(d.filter(function(e){return!e.disabled}));D.transition().call(t),l&&(n.scale(v).tickSize(-k,0),_.select(".nv-x.nv-axis").attr("transform","translate(0,"+m.range()[0]+")"),_.select(".nv-x.nv-axis").transition().call(n)),c&&(r.scale(m).ticks(k/36).tickSize(-C,0),_.select(".nv-y.nv-axis").transition().call(r)),i.dispatch.on("legendClick",function(t,n){t.disabled=!t.disabled,d.filter(function(e){return!e.disabled}).length||d.map(function(e){return e.disabled=!1,O.selectAll(".nv-series").classed("disabled",!1),e}),g.disabled=d.map(function(e){return!!e.disabled}),w.stateChange(g),e.transition().call(x)}),i.dispatch.on("legendDblclick",function(e){d.forEach(function(e){e.disabled=!0}),e.disabled=!1,g.disabled=d.map(function(e){return!!e.disabled}),w.stateChange(g),x.update()}),w.on("tooltipShow",function(e){p&&S(e,N.parentNode)}),w.on("changeState",function(e){typeof e.disabled!="undefined"&&(d.forEach(function(t,n){t.disabled=e.disabled[n]}),g.disabled=e.disabled),x.update()})}),x}var t=e.models.historicalBar(),n=e.models.axis(),r=e.models.axis(),i=e.models.legend(),s={top:30,right:90,bottom:50,left:90},o=e.utils.defaultColor(),u=null,a=null,f=!1,l=!0,c=!0,h=!1,p=!0,d=function(e,t,n,r,i){return"

"+e+"

"+"

"+n+" at "+t+"

"},v,m,g={},y=null,b="No Data Available.",w=d3.dispatch("tooltipShow","tooltipHide","stateChange","changeState"),E=250;n.orient("bottom").tickPadding(7),r.orient(h?"right":"left");var S=function(i,s){if(s){var o=d3.select(s).select("svg"),u=o.node()?o.attr("viewBox"):null;if(u){u=u.split(" ");var a=parseInt(o.style("width"))/u[2];i.pos[0]=i.pos[0]*a,i.pos[1]=i.pos[1]*a}}var f=i.pos[0]+(s.offsetLeft||0),l=i.pos[1]+(s.offsetTop||0),c=n.tickFormat()(t.x()(i.point,i.pointIndex)),h=r.tickFormat()(t.y()(i.point,i.pointIndex)),p=d(i.series.key,c,h,i,x);e.tooltip.show([f,l],p,null,null,s)};return t.dispatch.on("elementMouseover.tooltip",function(e){e.pos=[e.pos[0]+s.left,e.pos[1]+s.top],w.tooltipShow(e)}),t.dispatch.on("elementMouseout.tooltip",function(e){w.tooltipHide(e)}),w.on("tooltipHide",function(){p&&e.tooltip.cleanup()}),x.dispatch=w,x.bars=t,x.legend=i,x.xAxis=n,x.yAxis=r,d3.rebind(x,t,"defined","isArea","x","y","size","xScale","yScale","xDomain","yDomain","xRange","yRange","forceX","forceY","interactive","clipEdge","clipVoronoi","id","interpolate","highlightPoint","clearHighlights","interactive"),x.options=e.utils.optionsFunc.bind(x),x.margin=function(e){return arguments.length?(s.top=typeof e.top!="undefined"?e.top:s.top,s.right=typeof e.right!="undefined"?e.right:s.right,s.bottom=typeof e.bottom!="undefined"?e.bottom:s.bottom,s.left=typeof e.left!="undefined"?e.left:s.left,x):s},x.width=function(e){return arguments.length?(u=e,x):u},x.height=function(e){return arguments.length?(a=e,x):a},x.color=function(t){return arguments.length?(o=e.utils.getColor(t),i.color(o),x):o},x.showLegend=function(e){return arguments.length?(f=e,x):f},x.showXAxis=function(e){return arguments.length?(l=e,x):l},x.showYAxis=function(e){return arguments.length?(c=e,x):c},x.rightAlignYAxis=function(e){return arguments.length?(h=e,r.orient(e?"right":"left"),x):h},x.tooltips=function(e){return arguments.length?(p=e,x):p},x.tooltipContent=function(e){return arguments.length?(d=e,x):d},x.state=function(e){return arguments.length?(g=e,x):g},x.defaultState=function(e){return arguments.length?(y=e,x):y},x.noData=function(e){return arguments.length?(b=e,x):b},x.transitionDuration=function(e){return arguments.length?(E=e,x):E},x},e.models.indentedTree=function(){"use strict";function g(e){return e.each(function(e){function k(e,t,n){d3.event.stopPropagation();if(d3.event.shiftKey&&!n)return d3.event.shiftKey=!1,e.values&&e.values.forEach(function(e){(e.values||e._values)&&k(e,0,!0)}),!0;if(!O(e))return!0;e.values?(e._values=e.values,e.values=null):(e.values=e._values,e._values=null),g.update()}function L(e){return e._values&&e._values.length?h:e.values&&e.values.length?p:""}function A(e){return e._values&&e._values.length}function O(e){var t=e.values||e._values;return t&&t.length}var t=1,n=d3.select(this),i=d3.layout.tree().children(function(e){return e.values}).size([r,f]);g.update=function(){n.transition().duration(600).call(g)},e[0]||(e[0]={key:a});var s=i.nodes(e[0]),y=d3.select(this).selectAll("div").data([[s]]),b=y.enter().append("div").attr("class","nvd3 nv-wrap nv-indentedtree"),w=b.append("table"),E=y.select("table").attr("width","100%").attr("class",c);if(o){var S=w.append("thead"),x=S.append("tr");l.forEach(function(e){x.append("th").attr("width",e.width?e.width:"10%").style("text-align",e.type=="numeric"?"right":"left").append("span").text(e.label)})}var T=E.selectAll("tbody").data(function(e){return e});T.enter().append("tbody"),t=d3.max(s,function(e){return e.depth}),i.size([r,t*f]);var N=T.selectAll("tr").data(function(e){return e.filter(function(e){return u&&!e.children?u(e):!0})},function(e,t){return e.id||e.id||++m});N.exit().remove(),N.select("img.nv-treeicon").attr("src",L).classed("folded",A);var C=N.enter().append("tr");l.forEach(function(e,t){var n=C.append("td").style("padding-left",function(e){return(t?0:e.depth*f+12+(L(e)?0:16))+"px"},"important").style("text-align",e.type=="numeric"?"right":"left");t==0&&n.append("img").classed("nv-treeicon",!0).classed("nv-folded",A).attr("src",L).style("width","14px").style("height","14px").style("padding","0 1px").style("display",function(e){return L(e)?"inline-block":"none"}).on("click",k),n.each(function(n){!t&&v(n)?d3.select(this).append("a").attr("href",v).attr("class",d3.functor(e.classes)).append("span"):d3.select(this).append("span"),d3.select(this).select("span").attr("class",d3.functor(e.classes)).text(function(t){return e.format?e.format(t):t[e.key]||"-"})}),e.showCount&&(n.append("span").attr("class","nv-childrenCount"),N.selectAll("span.nv-childrenCount").text(function(e){return e.values&&e.values.length||e._values&&e._values.length?"("+(e.values&&e.values.filter(function(e){return u?u(e):!0}).length||e._values&&e._values.filter(function(e){return u?u(e):!0}).length||0)+")":""}))}),N.order().on("click",function(e){d.elementClick({row:this,data:e,pos:[e.x,e.y]})}).on("dblclick",function(e){d.elementDblclick({row:this,data:e,pos:[e.x,e.y]})}).on("mouseover",function(e){d.elementMouseover({row:this,data:e,pos:[e.x,e.y]})}).on("mouseout",function(e){d.elementMouseout({row:this,data:e,pos:[e.x,e.y]})})}),g}var t={top:0,right:0,bottom:0,left:0},n=960,r=500,i=e.utils.defaultColor(),s=Math.floor(Math.random()*1e4),o=!0,u=!1,a="No Data Available.",f=20,l=[{key:"key",label:"Name",type:"text"}],c=null,h="images/grey-plus.png",p="images/grey-minus.png",d=d3.dispatch("elementClick","elementDblclick","elementMouseover","elementMouseout"),v=function(e){return e.url},m=0;return g.options=e.utils.optionsFunc.bind(g),g.margin=function(e){return arguments.length?(t.top=typeof e.top!="undefined"?e.top:t.top,t.right=typeof e.right!="undefined"?e.right:t.right,t.bottom=typeof e.bottom!="undefined"?e.bottom:t.bottom,t.left=typeof e.left!="undefined"?e.left:t.left,g):t},g.width=function(e){return arguments.length?(n=e,g):n},g.height=function(e){return arguments.length?(r=e,g):r},g.color=function(t){return arguments.length?(i=e.utils.getColor(t),scatter.color(i),g):i},g.id=function(e){return arguments.length?(s=e,g):s},g.header=function(e){return arguments.length?(o=e,g):o},g.noData=function(e){return arguments.length?(a=e,g):a},g.filterZero=function(e){return arguments.length?(u=e,g):u},g.columns=function(e){return arguments.length?(l=e,g):l},g.tableClass=function(e){return arguments.length?(c=e,g):c},g.iconOpen=function(e){return arguments.length?(h=e,g):h},g.iconClose=function(e){return arguments.length?(p=e,g):p},g.getUrl=function(e){return arguments.length?(v=e,g):v},g},e.models.legend=function(){"use strict";function c(h){return h.each(function(c){var h=n-t.left-t.right,p=d3.select(this),d=p.selectAll("g.nv-legend").data([c]),v=d.enter().append("g").attr("class","nvd3 nv-legend").append("g"),m=d.select("g");d.attr("transform","translate("+t.left+","+t.top+")");var g=m.selectAll(".nv-series").data(function(e){return e}),y=g.enter().append("g").attr("class","nv-series").on("mouseover",function(e,t){l.legendMouseover(e,t)}).on("mouseout",function(e,t){l.legendMouseout(e,t)}).on("click",function(e,t){l.legendClick(e,t),a&&(f?(c.forEach(function(e){e.disabled=!0}),e.disabled=!1):(e.disabled=!e.disabled,c.every(function(e){return e.disabled})&&c.forEach(function(e){e.disabled=!1})),l.stateChange({disabled:c.map(function(e){return!!e.disabled})}))}).on("dblclick",function(e,t){l.legendDblclick(e,t),a&&(c.forEach(function(e){e.disabled=!0}),e.disabled=!1,l.stateChange({disabled:c.map(function(e){return!!e.disabled})}))});y.append("circle").style("stroke-width",2).attr("class","nv-legend-symbol").attr("r",5),y.append("text").attr("text-anchor","start").attr("class","nv-legend-text").attr("dy",".32em").attr("dx","8"),g.classed("disabled",function(e){return e.disabled}),g.exit().remove(),g.select("circle").style("fill",function(e,t){return e.color||s(e,t)}).style("stroke",function(e,t){return e.color||s(e,t)}),g.select("text").text(i);if(o){var b=[];g.each(function(t,n){var r=d3.select(this).select("text"),i;try{i=r.getComputedTextLength();if(i<=0)throw Error()}catch(s){i=e.utils.calcApproxTextWidth(r)}b.push(i+28)});var w=0,E=0,S=[];while(Eh&&w>1){S=[],w--;for(var x=0;x(S[x%w]||0)&&(S[x%w]=b[x]);E=S.reduce(function(e,t,n,r){return e+t})}var T=[];for(var N=0,C=0;NA&&(A=L),"translate("+O+","+k+")"}),m.attr("transform","translate("+(n-t.right-A)+","+t.top+")"),r=t.top+t.bottom+k+15}}),c}var t={top:5,right:0,bottom:5,left:0},n=400,r=20,i=function(e){return e.key},s=e.utils.defaultColor(),o=!0,u=!0,a=!0,f=!1,l=d3.dispatch("legendClick","legendDblclick","legendMouseover","legendMouseout","stateChange");return c.dispatch=l,c.options=e.utils.optionsFunc.bind(c),c.margin=function(e){return arguments.length?(t.top=typeof e.top!="undefined"?e.top:t.top,t.right=typeof e.right!="undefined"?e.right:t.right,t.bottom=typeof e.bottom!="undefined"?e.bottom:t.bottom,t.left=typeof e.left!="undefined"?e.left:t.left,c):t},c.width=function(e){return arguments.length?(n=e,c):n},c.height=function(e){return arguments.length?(r=e,c):r},c.key=function(e){return arguments.length?(i=e,c):i},c.color=function(t){return arguments.length?(s=e.utils.getColor(t),c):s},c.align=function(e){return arguments.length?(o=e,c):o},c.rightAlign=function(e){return arguments.length?(u=e,c):u},c.updateState=function(e){return arguments.length?(a=e,c):a},c.radioButtonMode=function(e){return arguments.length?(f=e,c):f},c},e.models.line=function(){"use strict";function m(g){return g.each(function(m){var g=r-n.left-n.right,b=i-n.top-n.bottom,w=d3.select(this);c=t.xScale(),h=t.yScale(),d=d||c,v=v||h;var E=w.selectAll("g.nv-wrap.nv-line").data([m]),S=E.enter().append("g").attr("class","nvd3 nv-wrap nv-line"),T=S.append("defs"),N=S.append("g"),C=E.select("g");N.append("g").attr("class","nv-groups"),N.append("g").attr("class","nv-scatterWrap"),E.attr("transform","translate("+n.left+","+n.top+")"),t.width(g).height(b);var k=E.select(".nv-scatterWrap");k.transition().call(t),T.append("clipPath").attr("id","nv-edge-clip-"+t.id()).append("rect"),E.select("#nv-edge-clip-"+t.id()+" rect").attr("width",g).attr("height",b),C.attr("clip-path",l?"url(#nv-edge-clip-"+t.id()+")":""),k.attr("clip-path",l?"url(#nv-edge-clip-"+t.id()+")":"");var L=E.select(".nv-groups").selectAll(".nv-group").data(function(e){return e},function(e){return e.key});L.enter().append("g").style("stroke-opacity",1e-6).style("fill-opacity",1e-6),L.exit().remove(),L.attr("class",function(e,t){return"nv-group nv-series-"+t}).classed("hover",function(e){return e.hover}).style("fill",function(e,t){return s(e,t)}).style("stroke",function(e,t){return s(e,t)}),L.transition().style("stroke-opacity",1).style("fill-opacity",.5);var A=L.selectAll("path.nv-area").data(function(e){return f(e)?[e]:[]});A.enter().append("path").attr("class","nv-area").attr("d",function(t){return d3.svg.area().interpolate(p).defined(a).x(function(t,n){return e. utils.NaNtoZero(d(o(t,n)))}).y0(function(t,n){return e.utils.NaNtoZero(v(u(t,n)))}).y1(function(e,t){return v(h.domain()[0]<=0?h.domain()[1]>=0?0:h.domain()[1]:h.domain()[0])}).apply(this,[t.values])}),L.exit().selectAll("path.nv-area").remove(),A.transition().attr("d",function(t){return d3.svg.area().interpolate(p).defined(a).x(function(t,n){return e.utils.NaNtoZero(c(o(t,n)))}).y0(function(t,n){return e.utils.NaNtoZero(h(u(t,n)))}).y1(function(e,t){return h(h.domain()[0]<=0?h.domain()[1]>=0?0:h.domain()[1]:h.domain()[0])}).apply(this,[t.values])});var O=L.selectAll("path.nv-line").data(function(e){return[e.values]});O.enter().append("path").attr("class","nv-line").attr("d",d3.svg.line().interpolate(p).defined(a).x(function(t,n){return e.utils.NaNtoZero(d(o(t,n)))}).y(function(t,n){return e.utils.NaNtoZero(v(u(t,n)))})),O.transition().attr("d",d3.svg.line().interpolate(p).defined(a).x(function(t,n){return e.utils.NaNtoZero(c(o(t,n)))}).y(function(t,n){return e.utils.NaNtoZero(h(u(t,n)))})),d=c.copy(),v=h.copy()}),m}var t=e.models.scatter(),n={top:0,right:0,bottom:0,left:0},r=960,i=500,s=e.utils.defaultColor(),o=function(e){return e.x},u=function(e){return e.y},a=function(e,t){return!isNaN(u(e,t))&&u(e,t)!==null},f=function(e){return e.area},l=!1,c,h,p="linear";t.size(16).sizeDomain([16,256]);var d,v;return m.dispatch=t.dispatch,m.scatter=t,d3.rebind(m,t,"id","interactive","size","xScale","yScale","zScale","xDomain","yDomain","xRange","yRange","sizeDomain","forceX","forceY","forceSize","clipVoronoi","useVoronoi","clipRadius","padData","highlightPoint","clearHighlights"),m.options=e.utils.optionsFunc.bind(m),m.margin=function(e){return arguments.length?(n.top=typeof e.top!="undefined"?e.top:n.top,n.right=typeof e.right!="undefined"?e.right:n.right,n.bottom=typeof e.bottom!="undefined"?e.bottom:n.bottom,n.left=typeof e.left!="undefined"?e.left:n.left,m):n},m.width=function(e){return arguments.length?(r=e,m):r},m.height=function(e){return arguments.length?(i=e,m):i},m.x=function(e){return arguments.length?(o=e,t.x(e),m):o},m.y=function(e){return arguments.length?(u=e,t.y(e),m):u},m.clipEdge=function(e){return arguments.length?(l=e,m):l},m.color=function(n){return arguments.length?(s=e.utils.getColor(n),t.color(s),m):s},m.interpolate=function(e){return arguments.length?(p=e,m):p},m.defined=function(e){return arguments.length?(a=e,m):a},m.isArea=function(e){return arguments.length?(f=d3.functor(e),m):f},m},e.models.lineChart=function(){"use strict";function N(m){return m.each(function(m){var C=d3.select(this),k=this,L=(a||parseInt(C.style("width"))||960)-o.left-o.right,A=(f||parseInt(C.style("height"))||400)-o.top-o.bottom;N.update=function(){C.transition().duration(x).call(N)},N.container=this,b.disabled=m.map(function(e){return!!e.disabled});if(!w){var O;w={};for(O in b)b[O]instanceof Array?w[O]=b[O].slice(0):w[O]=b[O]}if(!m||!m.length||!m.filter(function(e){return e.values.length}).length){var M=C.selectAll(".nv-noData").data([E]);return M.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),M.attr("x",o.left+L/2).attr("y",o.top+A/2).text(function(e){return e}),N}C.selectAll(".nv-noData").remove(),g=t.xScale(),y=t.yScale();var _=C.selectAll("g.nv-wrap.nv-lineChart").data([m]),D=_.enter().append("g").attr("class","nvd3 nv-wrap nv-lineChart").append("g"),P=_.select("g");D.append("rect").style("opacity",0),D.append("g").attr("class","nv-x nv-axis"),D.append("g").attr("class","nv-y nv-axis"),D.append("g").attr("class","nv-linesWrap"),D.append("g").attr("class","nv-legendWrap"),D.append("g").attr("class","nv-interactive"),P.select("rect").attr("width",L).attr("height",A>0?A:0),l&&(i.width(L),P.select(".nv-legendWrap").datum(m).call(i),o.top!=i.height()&&(o.top=i.height(),A=(f||parseInt(C.style("height"))||400)-o.top-o.bottom),_.select(".nv-legendWrap").attr("transform","translate(0,"+ -o.top+")")),_.attr("transform","translate("+o.left+","+o.top+")"),p&&P.select(".nv-y.nv-axis").attr("transform","translate("+L+",0)"),d&&(s.width(L).height(A).margin({left:o.left,top:o.top}).svgContainer(C).xScale(g),_.select(".nv-interactive").call(s)),t.width(L).height(A).color(m.map(function(e,t){return e.color||u(e,t)}).filter(function(e,t){return!m[t].disabled}));var H=P.select(".nv-linesWrap").datum(m.filter(function(e){return!e.disabled}));H.transition().call(t),c&&(n.scale(g).ticks(L/100).tickSize(-A,0),P.select(".nv-x.nv-axis").attr("transform","translate(0,"+y.range()[0]+")"),P.select(".nv-x.nv-axis").transition().call(n)),h&&(r.scale(y).ticks(A/36).tickSize(-L,0),P.select(".nv-y.nv-axis").transition().call(r)),i.dispatch.on("stateChange",function(e){b=e,S.stateChange(b),N.update()}),s.dispatch.on("elementMousemove",function(i){t.clearHighlights();var a,f,l,c=[];m.filter(function(e,t){return e.seriesIndex=t,!e.disabled}).forEach(function(n,r){f=e.interactiveBisect(n.values,i.pointXValue,N.x()),t.highlightPoint(r,f,!0);var s=n.values[f];if(typeof s=="undefined")return;typeof a=="undefined"&&(a=s),typeof l=="undefined"&&(l=N.xScale()(N.x()(s,f))),c.push({key:n.key,value:N.y()(s,f),color:u(n,n.seriesIndex)})});if(c.length>2){var h=N.yScale().invert(i.mouseY),p=Math.abs(N.yScale().domain()[0]-N.yScale().domain()[1]),d=.03*p,g=e.nearestValueIndex(c.map(function(e){return e.value}),h,d);g!==null&&(c[g].highlight=!0)}var y=n.tickFormat()(N.x()(a,f));s.tooltip.position({left:l+o.left,top:i.mouseY+o.top}).chartContainer(k.parentNode).enabled(v).valueFormatter(function(e,t){return r.tickFormat()(e)}).data({value:y,series:c})(),s.renderGuideLine(l)}),s.dispatch.on("elementMouseout",function(e){S.tooltipHide(),t.clearHighlights()}),S.on("tooltipShow",function(e){v&&T(e,k.parentNode)}),S.on("changeState",function(e){typeof e.disabled!="undefined"&&m.length===e.disabled.length&&(m.forEach(function(t,n){t.disabled=e.disabled[n]}),b.disabled=e.disabled),N.update()})}),N}var t=e.models.line(),n=e.models.axis(),r=e.models.axis(),i=e.models.legend(),s=e.interactiveGuideline(),o={top:30,right:20,bottom:50,left:60},u=e.utils.defaultColor(),a=null,f=null,l=!0,c=!0,h=!0,p=!1,d=!1,v=!0,m=function(e,t,n,r,i){return"

"+e+"

"+"

"+n+" at "+t+"

"},g,y,b={},w=null,E="No Data Available.",S=d3.dispatch("tooltipShow","tooltipHide","stateChange","changeState"),x=250;n.orient("bottom").tickPadding(7),r.orient(p?"right":"left");var T=function(i,s){var o=i.pos[0]+(s.offsetLeft||0),u=i.pos[1]+(s.offsetTop||0),a=n.tickFormat()(t.x()(i.point,i.pointIndex)),f=r.tickFormat()(t.y()(i.point,i.pointIndex)),l=m(i.series.key,a,f,i,N);e.tooltip.show([o,u],l,null,null,s)};return t.dispatch.on("elementMouseover.tooltip",function(e){e.pos=[e.pos[0]+o.left,e.pos[1]+o.top],S.tooltipShow(e)}),t.dispatch.on("elementMouseout.tooltip",function(e){S.tooltipHide(e)}),S.on("tooltipHide",function(){v&&e.tooltip.cleanup()}),N.dispatch=S,N.lines=t,N.legend=i,N.xAxis=n,N.yAxis=r,N.interactiveLayer=s,d3.rebind(N,t,"defined","isArea","x","y","size","xScale","yScale","xDomain","yDomain","xRange","yRange","forceX","forceY","interactive","clipEdge","clipVoronoi","useVoronoi","id","interpolate"),N.options=e.utils.optionsFunc.bind(N),N.margin=function(e){return arguments.length?(o.top=typeof e.top!="undefined"?e.top:o.top,o.right=typeof e.right!="undefined"?e.right:o.right,o.bottom=typeof e.bottom!="undefined"?e.bottom:o.bottom,o.left=typeof e.left!="undefined"?e.left:o.left,N):o},N.width=function(e){return arguments.length?(a=e,N):a},N.height=function(e){return arguments.length?(f=e,N):f},N.color=function(t){return arguments.length?(u=e.utils.getColor(t),i.color(u),N):u},N.showLegend=function(e){return arguments.length?(l=e,N):l},N.showXAxis=function(e){return arguments.length?(c=e,N):c},N.showYAxis=function(e){return arguments.length?(h=e,N):h},N.rightAlignYAxis=function(e){return arguments.length?(p=e,r.orient(e?"right":"left"),N):p},N.useInteractiveGuideline=function(e){return arguments.length?(d=e,e===!0&&(N.interactive(!1),N.useVoronoi(!1)),N):d},N.tooltips=function(e){return arguments.length?(v=e,N):v},N.tooltipContent=function(e){return arguments.length?(m=e,N):m},N.state=function(e){return arguments.length?(b=e,N):b},N.defaultState=function(e){return arguments.length?(w=e,N):w},N.noData=function(e){return arguments.length?(E=e,N):E},N.transitionDuration=function(e){return arguments.length?(x=e,N):x},N},e.models.linePlusBarChart=function(){"use strict";function T(e){return e.each(function(e){var l=d3.select(this),c=this,v=(a||parseInt(l.style("width"))||960)-u.left-u.right,N=(f||parseInt(l.style("height"))||400)-u.top-u.bottom;T.update=function(){l.transition().call(T)},b.disabled=e.map(function(e){return!!e.disabled});if(!w){var C;w={};for(C in b)b[C]instanceof Array?w[C]=b[C].slice(0):w[C]=b[C]}if(!e||!e.length||!e.filter(function(e){return e.values.length}).length){var k=l.selectAll(".nv-noData").data([E]);return k.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),k.attr("x",u.left+v/2).attr("y",u.top+N/2).text(function(e){return e}),T}l.selectAll(".nv-noData").remove();var L=e.filter(function(e){return!e.disabled&&e.bar}),A=e.filter(function(e){return!e.bar});m=A.filter(function(e){return!e.disabled}).length&&A.filter(function(e){return!e.disabled})[0].values.length?t.xScale():n.xScale(),g=n.yScale(),y=t.yScale();var O=d3.select(this).selectAll("g.nv-wrap.nv-linePlusBar").data([e]),M=O.enter().append("g").attr("class","nvd3 nv-wrap nv-linePlusBar").append("g"),_=O.select("g");M.append("g").attr("class","nv-x nv-axis"),M.append("g").attr("class","nv-y1 nv-axis"),M.append("g").attr("class","nv-y2 nv-axis"),M.append("g").attr("class","nv-barsWrap"),M.append("g").attr("class","nv-linesWrap"),M.append("g").attr("class","nv-legendWrap"),p&&(o.width(v/2),_.select(".nv-legendWrap").datum(e.map(function(e){return e.originalKey=e.originalKey===undefined?e.key:e.originalKey,e.key=e.originalKey+(e.bar?" (left axis)":" (right axis)"),e})).call(o),u.top!=o.height()&&(u.top=o.height(),N=(f||parseInt(l.style("height"))||400)-u.top-u.bottom),_.select(".nv-legendWrap").attr("transform","translate("+v/2+","+ -u.top+")")),O.attr("transform","translate("+u.left+","+u.top+")"),t.width(v).height(N).color(e.map(function(e,t){return e.color||h(e,t)}).filter(function(t,n){return!e[n].disabled&&!e[n].bar})),n.width(v).height(N).color(e.map(function(e,t){return e.color||h(e,t)}).filter(function(t,n){return!e[n].disabled&&e[n].bar}));var D=_.select(".nv-barsWrap").datum(L.length?L:[{values:[]}]),P=_.select(".nv-linesWrap").datum(A[0]&&!A[0].disabled?A:[{values:[]}]);d3.transition(D).call(n),d3.transition(P).call(t),r.scale(m).ticks(v/100).tickSize(-N,0),_.select(".nv-x.nv-axis").attr("transform","translate(0,"+g.range()[0]+")"),d3.transition(_.select(".nv-x.nv-axis")).call(r),i.scale(g).ticks(N/36).tickSize(-v,0),d3.transition(_.select(".nv-y1.nv-axis")).style("opacity",L.length?1:0).call(i),s.scale(y).ticks(N/36).tickSize(L.length?0:-v,0),_.select(".nv-y2.nv-axis").style("opacity",A.length?1:0).attr("transform","translate("+v+",0)"),d3.transition(_.select(".nv-y2.nv-axis")).call(s),o.dispatch.on("stateChange",function(e){b=e,S.stateChange(b),T.update()}),S.on("tooltipShow",function(e){d&&x(e,c.parentNode)}),S.on("changeState",function(t){typeof t.disabled!="undefined"&&(e.forEach(function(e,n){e.disabled=t.disabled[n]}),b.disabled=t.disabled),T.update()})}),T}var t=e.models.line(),n=e.models.historicalBar(),r=e.models.axis(),i=e.models.axis(),s=e.models.axis(),o=e.models.legend(),u={top:30,right:60,bottom:50,left:60},a=null,f=null,l=function(e){return e.x},c=function(e){return e.y},h=e.utils.defaultColor(),p=!0,d=!0,v=function(e,t,n,r,i){return"

"+e+"

"+"

"+n+" at "+t+"

"},m,g,y,b={},w=null,E="No Data Available.",S=d3.dispatch("tooltipShow","tooltipHide","stateChange","changeState");n.padData(!0),t.clipEdge(!1).padData(!0),r.orient("bottom").tickPadding(7).highlightZero(!1),i.orient("left"),s.orient("right");var x=function(n,o){var u=n.pos[0]+(o.offsetLeft||0),a=n.pos[1]+(o.offsetTop||0),f=r.tickFormat()(t.x()(n.point,n.pointIndex)),l=(n.series.bar?i:s).tickFormat()(t.y()(n.point,n.pointIndex)),c=v(n.series.key,f,l,n,T);e.tooltip.show([u,a],c,n.value<0?"n":"s",null,o)};return t.dispatch.on("elementMouseover.tooltip",function(e){e.pos=[e.pos[0]+u.left,e.pos[1]+u.top],S.tooltipShow(e)}),t.dispatch.on("elementMouseout.tooltip",function(e){S.tooltipHide(e)}),n.dispatch.on("elementMouseover.tooltip",function(e){e.pos=[e.pos[0]+u.left,e.pos[1]+u.top],S.tooltipShow(e)}),n.dispatch.on("elementMouseout.tooltip",function(e){S.tooltipHide(e)}),S.on("tooltipHide",function(){d&&e.tooltip.cleanup()}),T.dispatch=S,T.legend=o,T.lines=t,T.bars=n,T.xAxis=r,T.y1Axis=i,T.y2Axis=s,d3.rebind(T,t,"defined","size","clipVoronoi","interpolate"),T.options=e.utils.optionsFunc.bind(T),T.x=function(e){return arguments.length?(l=e,t.x(e),n.x(e),T):l},T.y=function(e){return arguments.length?(c=e,t.y(e),n.y(e),T):c},T.margin=function(e){return arguments.length?(u.top=typeof e.top!="undefined"?e.top:u.top,u.right=typeof e.right!="undefined"?e.right:u.right,u.bottom=typeof e.bottom!="undefined"?e.bottom:u.bottom,u.left=typeof e.left!="undefined"?e.left:u.left,T):u},T.width=function(e){return arguments.length?(a=e,T):a},T.height=function(e){return arguments.length?(f=e,T):f},T.color=function(t){return arguments.length?(h=e.utils.getColor(t),o.color(h),T):h},T.showLegend=function(e){return arguments.length?(p=e,T):p},T.tooltips=function(e){return arguments.length?(d=e,T):d},T.tooltipContent=function(e){return arguments.length?(v=e,T):v},T.state=function(e){return arguments.length?(b=e,T):b},T.defaultState=function(e){return arguments.length?(w=e,T):w},T.noData=function(e){return arguments.length?(E=e,T):E},T},e.models.lineWithFocusChart=function(){"use strict";function k(e){return e.each(function(e){function U(e){var t=+(e=="e"),n=t?1:-1,r=M/3;return"M"+.5*n+","+r+"A6,6 0 0 "+t+" "+6.5*n+","+(r+6)+"V"+(2*r-6)+"A6,6 0 0 "+t+" "+.5*n+","+2*r+"Z"+"M"+2.5*n+","+(r+8)+"V"+(2*r-8)+"M"+4.5*n+","+(r+8)+"V"+(2*r-8)}function z(){a.empty()||a.extent(w),I.data([a.empty()?g.domain():w]).each(function(e,t){var n=g(e[0])-v.range()[0],r=v.range()[1]-g(e[1]);d3.select(this).select(".left").attr("width",n<0?0:n),d3.select(this).select(".right").attr("x",g(e[1])).attr("width",r<0?0:r)})}function W(){w=a.empty()?null:a.extent();var n=a.empty()?g.domain():a.extent();if(Math.abs(n[0]-n[1])<=1)return;T.brush({extent:n,brush:a}),z();var s=H.select(".nv-focus .nv-linesWrap").datum(e.filter(function(e){return!e.disabled}).map(function(e,r){return{key:e.key,values:e.values.filter(function(e,r){return t.x()(e,r)>=n[0]&&t.x()(e,r)<=n[1]})}}));s.transition().duration(N).call(t),H.select(".nv-focus .nv-x.nv-axis").transition().duration(N).call(r),H.select(".nv-focus .nv-y.nv-axis").transition().duration(N).call(i)}var S=d3.select(this),L=this,A=(h||parseInt(S.style("width"))||960)-f.left-f.right,O=(p||parseInt(S.style("height"))||400)-f.top-f.bottom-d,M=d-l.top-l.bottom;k.update=function(){S.transition().duration(N).call(k)},k.container=this;if(!e||!e.length||!e.filter(function(e){return e.values.length}).length){var _=S.selectAll(".nv-noData").data([x]);return _.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),_.attr("x",f.left+A/2).attr("y",f.top+O/2).text(function(e){return e}),k}S.selectAll(".nv-noData").remove(),v=t.xScale(),m=t.yScale(),g=n.xScale(),y=n.yScale();var D=S.selectAll("g.nv-wrap.nv-lineWithFocusChart").data([e]),P=D.enter().append("g").attr("class","nvd3 nv-wrap nv-lineWithFocusChart").append("g"),H=D.select("g");P.append("g").attr("class","nv-legendWrap");var B=P.append("g").attr("class","nv-focus");B.append("g").attr("class","nv-x nv-axis"),B.append("g").attr("class","nv-y nv-axis"),B.append("g").attr("class","nv-linesWrap");var j=P.append("g").attr("class","nv-context");j.append("g").attr("class","nv-x nv-axis"),j.append("g").attr("class","nv-y nv-axis"),j.append("g").attr("class","nv-linesWrap"),j.append("g").attr("class","nv-brushBackground"),j.append("g").attr("class","nv-x nv-brush"),b&&(u.width(A),H.select(".nv-legendWrap").datum(e).call(u),f.top!=u.height()&&(f.top=u.height(),O=(p||parseInt(S.style("height"))||400)-f.top-f.bottom-d),H.select(".nv-legendWrap").attr("transform","translate(0,"+ -f.top+")")),D.attr("transform","translate("+f.left+","+f.top+")"),t.width(A).height(O).color(e.map(function(e,t){return e.color||c(e,t)}).filter(function(t,n){return!e[n].disabled})),n.defined(t.defined()).width(A).height(M).color(e.map(function(e,t){return e.color||c(e,t)}).filter(function(t,n){return!e[n].disabled})),H.select(".nv-context").attr("transform","translate(0,"+(O+f.bottom+l.top)+")");var F=H.select(".nv-context .nv-linesWrap").datum(e.filter(function(e){return!e.disabled}));d3.transition(F).call(n),r.scale(v).ticks(A/100).tickSize(-O,0),i.scale(m).ticks(O/36).tickSize(-A,0),H.select(".nv-focus .nv-x.nv-axis").attr("transform","translate(0,"+O+")"),a.x(g).on("brush",function(){var e=k.transitionDuration();k.transitionDuration(0),W(),k.transitionDuration(e)}),w&&a.extent(w);var I=H.select(".nv-brushBackground").selectAll("g").data([w||a.extent()]),q=I.enter().append("g");q.append("rect").attr("class","left").attr("x",0).attr("y",0).attr("height",M),q.append("rect").attr("class","right").attr("x",0).attr("y",0).attr("height",M);var R=H.select(".nv-x.nv-brush").call(a);R.selectAll("rect").attr("height",M),R.selectAll(".resize").append("path").attr("d",U),W(),s.scale(g).ticks(A/100).tickSize(-M,0),H.select(".nv-context .nv-x.nv-axis").attr("transform","translate(0,"+y.range()[0]+")"),d3.transition(H.select(".nv-context .nv-x.nv-axis")).call(s),o.scale(y).ticks(M/36).tickSize(-A,0),d3.transition(H.select(".nv-context .nv-y.nv-axis")).call(o),H.select(".nv-context .nv-x.nv-axis").attr("transform","translate(0,"+y.range()[0]+")"),u.dispatch.on("stateChange",function(e){k.update()}),T.on("tooltipShow",function(e){E&&C(e,L.parentNode)})}),k}var t=e.models.line(),n=e.models.line(),r=e.models.axis(),i=e.models.axis(),s=e.models.axis(),o=e.models.axis(),u=e.models.legend(),a=d3.svg.brush(),f={top:30,right:30,bottom:30,left:60},l={top:0,right:30,bottom:20,left:60},c=e.utils.defaultColor(),h=null,p=null,d=100,v,m,g,y,b=!0,w=null,E=!0,S=function(e,t,n,r,i){return"

"+e+"

"+"

"+n+" at "+t+"

"},x="No Data Available.",T=d3.dispatch("tooltipShow","tooltipHide","brush"),N=250;t.clipEdge(!0),n.interactive(!1),r.orient("bottom").tickPadding(5),i.orient("left"),s.orient("bottom").tickPadding(5),o.orient("left");var C=function(n,s){var o=n.pos[0]+(s.offsetLeft||0),u=n.pos[1]+(s.offsetTop||0),a=r.tickFormat()(t.x()(n.point,n.pointIndex)),f=i.tickFormat()(t.y()(n.point,n.pointIndex)),l=S(n.series.key,a,f,n,k);e.tooltip.show([o,u],l,null,null,s)};return t.dispatch.on("elementMouseover.tooltip",function(e){e.pos=[e.pos[0]+f.left,e.pos[1]+f.top],T.tooltipShow(e)}),t.dispatch.on("elementMouseout.tooltip",function(e){T.tooltipHide(e)}),T.on("tooltipHide",function(){E&&e.tooltip.cleanup()}),k.dispatch=T,k.legend=u,k.lines=t,k.lines2=n,k.xAxis=r,k.yAxis=i,k.x2Axis=s,k.y2Axis=o,d3.rebind(k,t,"defined","isArea","size","xDomain","yDomain","xRange","yRange","forceX","forceY","interactive","clipEdge","clipVoronoi","id"),k.options=e.utils.optionsFunc.bind(k),k.x=function(e){return arguments.length?(t.x(e),n.x(e),k):t.x},k.y=function(e){return arguments.length?(t.y(e),n.y(e),k):t.y},k.margin=function(e){return arguments.length?(f.top=typeof e.top!="undefined"?e.top:f.top,f.right=typeof e.right!="undefined"?e.right:f.right,f.bottom=typeof e.bottom!="undefined"?e.bottom:f.bottom,f.left=typeof e.left!="undefined"?e.left:f.left,k):f},k.margin2=function(e){return arguments.length?(l=e,k):l},k.width=function(e){return arguments.length?(h=e,k):h},k.height=function(e){return arguments.length?(p=e,k):p},k.height2=function(e){return arguments.length?(d=e,k):d},k.color=function(t){return arguments.length?(c=e.utils.getColor(t),u.color(c),k):c},k.showLegend=function(e){return arguments.length?(b=e,k):b},k.tooltips=function(e){return arguments.length?(E=e,k):E},k.tooltipContent=function(e){return arguments.length?(S=e,k):S},k.interpolate=function(e){return arguments.length?(t.interpolate(e),n.interpolate(e),k):t.interpolate()},k.noData=function(e){return arguments.length?(x=e,k):x},k.xTickFormat=function(e){return arguments.length?(r.tickFormat(e),s.tickFormat(e),k):r.tickFormat()},k.yTickFormat=function(e){return arguments.length?(i.tickFormat(e),o.tickFormat(e),k):i.tickFormat()},k.brushExtent=function(e){return arguments.length?(w=e,k):w},k.transitionDuration=function(e){return arguments.length?(N=e,k):N},k},e.models.linePlusBarWithFocusChart=function(){"use strict";function B(e){return e.each(function(e){function nt(e){var t=+(e=="e"),n=t?1:-1,r=q/3;return"M"+.5*n+","+r+"A6,6 0 0 "+t+" "+6.5*n+","+(r+6)+"V"+(2*r-6)+"A6,6 0 0 "+t+" "+.5*n+","+2*r+"Z"+"M"+2.5*n+","+(r+8)+"V"+(2*r-8)+"M"+4.5*n+","+(r+8)+"V"+(2*r-8)}function rt(){h.empty()||h.extent(x),Z.data([h.empty()?k.domain():x]).each(function(e,t){var n=k(e[0])-k.range()[0],r=k.range()[1]-k(e[1]);d3.select(this).select(".left").attr("width",n<0?0:n),d3.select(this).select(".right").attr("x",k(e[1])).attr("width",r<0?0:r)})}function it(){x=h.empty()?null:h.extent(),S=h.empty()?k.domain():h.extent(),D.brush({extent:S,brush:h}),rt(),r.width(F).height(I).color(e.map(function(e,t){return e.color||w(e,t)}).filter(function(t,n){return!e[n].disabled&&e[n].bar})),t.width(F).height(I).color(e.map(function(e,t){return e.color||w(e,t)}).filter(function(t,n){return!e[n].disabled&&!e[n].bar}));var n=J.select(".nv-focus .nv-barsWrap").datum(U.length?U.map(function(e,t){return{key:e.key,values:e.values.filter(function(e,t){return r.x()(e,t)>=S[0]&&r.x()(e,t)<=S[1]})}}):[{values:[]}]),i=J.select(".nv-focus .nv-linesWrap").datum(z[0].disabled?[{values:[]}]:z.map(function(e,n){return{key:e.key,values:e.values.filter(function(e,n){return t.x()(e,n)>=S[0]&&t.x()(e,n)<=S[1]})}}));U.length?C=r.xScale():C=t.xScale(),s.scale(C).ticks(F/100).tickSize(-I,0),s.domain([Math.ceil(S[0]),Math.floor(S[1])]),J.select(".nv-x.nv-axis").transition().duration(P).call(s),n.transition().duration(P).call(r),i.transition().duration(P).call(t),J.select(".nv-focus .nv-x.nv-axis").attr("transform","translate(0,"+L.range()[0]+")"),u.scale(L).ticks(I/36).tickSize(-F,0),J.select(".nv-focus .nv-y1.nv-axis").style("opacity",U.length?1:0),a.scale(A).ticks(I/36).tickSize(U.length?0:-F,0),J.select(".nv-focus .nv-y2.nv-axis").style("opacity",z.length?1:0).attr("transform","translate("+C.range()[1]+",0)"),J.select(".nv-focus .nv-y1.nv-axis").transition().duration(P).call(u),J.select(".nv-focus .nv-y2.nv-axis").transition().duration(P).call(a)}var N=d3.select(this),j=this,F=(v||parseInt(N.style("width"))||960)-p.left-p.right,I=(m||parseInt(N.style("height"))||400)-p.top-p.bottom-g,q=g-d.top-d.bottom;B.update=function(){N.transition().duration(P).call(B)},B.container=this;if(!e||!e.length||!e.filter(function(e){return e.values.length}).length){var R=N.selectAll(".nv-noData").data([_]);return R.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),R.attr("x",p.left+F/2).attr("y",p.top+I/2).text(function(e){return e}),B}N.selectAll(".nv-noData").remove();var U=e.filter(function(e){return!e.disabled&&e.bar}),z=e.filter(function(e){return!e.bar});C=r.xScale(),k=o.scale(),L=r.yScale(),A=t.yScale(),O=i.yScale(),M=n.yScale();var W=e.filter(function(e){return!e.disabled&&e.bar}).map(function(e){return e.values.map(function(e,t){return{x:y(e,t),y:b(e,t)}})}),X=e.filter(function(e){return!e.disabled&&!e.bar}).map(function(e){return e.values.map(function(e,t){return{x:y(e,t),y:b(e,t)}})});C.range([0,F]),k.domain(d3.extent(d3.merge(W.concat(X)),function(e){return e.x})).range([0,F]);var V=N.selectAll("g.nv-wrap.nv-linePlusBar").data([e]),$=V.enter().append("g").attr("class","nvd3 nv-wrap nv-linePlusBar").append("g"),J=V.select("g");$.append("g").attr("class","nv-legendWrap");var K=$.append("g").attr("class","nv-focus");K.append("g").attr("class","nv-x nv-axis"),K.append("g").attr("class","nv-y1 nv-axis"),K.append("g").attr("class","nv-y2 nv-axis"),K.append("g").attr("class","nv-barsWrap"),K.append("g").attr("class","nv-linesWrap");var Q=$.append("g").attr("class","nv-context");Q.append("g").attr("class","nv-x nv-axis"),Q.append("g").attr("class","nv-y1 nv-axis"),Q.append("g").attr("class","nv-y2 nv-axis"),Q.append("g").attr("class","nv-barsWrap"),Q.append("g").attr("class","nv-linesWrap"),Q.append("g").attr("class","nv-brushBackground"),Q.append("g").attr("class","nv-x nv-brush"),E&&(c.width(F/2),J.select(".nv-legendWrap").datum(e.map(function(e){return e.originalKey=e.originalKey===undefined?e.key:e.originalKey,e.key=e.originalKey+(e.bar?" (left axis)":" (right axis)"),e})).call(c),p.top!=c.height()&&(p.top=c.height(),I=(m||parseInt(N.style("height"))||400)-p.top-p.bottom-g),J.select(".nv-legendWrap").attr("transform","translate("+F/2+","+ -p.top+")")),V.attr("transform","translate("+p.left+","+p.top+")"),i.width(F).height(q).color(e.map(function(e,t){return e.color||w(e,t)}).filter(function(t,n){return!e[n].disabled&&e[n].bar})),n.width(F).height(q).color(e.map(function(e,t){return e.color||w(e,t)}).filter(function(t,n){return!e[n].disabled&&!e[n].bar}));var G=J.select(".nv-context .nv-barsWrap").datum(U.length?U:[{values:[]}]),Y=J.select(".nv-context .nv-linesWrap").datum(z[0].disabled?[{values:[]}]:z);J.select(".nv-context").attr("transform","translate(0,"+(I+p.bottom+d.top)+")"),G.transition().call(i),Y.transition().call(n),h.x(k).on("brush",it),x&&h.extent(x);var Z=J.select(".nv-brushBackground").selectAll("g").data([x||h.extent()]),et=Z.enter().append("g");et.append("rect").attr("class","left").attr("x",0).attr("y",0).attr("height",q),et.append("rect").attr("class","right").attr("x",0).attr("y",0).attr("height",q);var tt=J.select(".nv-x.nv-brush").call(h);tt.selectAll("rect").attr("height",q),tt.selectAll(".resize").append("path").attr("d",nt),o.ticks(F/100).tickSize(-q,0),J.select(".nv-context .nv-x.nv-axis").attr("transform","translate(0,"+O.range()[0]+")"),J.select(".nv-context .nv-x.nv-axis").transition().call(o),f.scale(O).ticks(q/36).tickSize(-F,0),J.select(".nv-context .nv-y1.nv-axis").style("opacity",U.length?1:0).attr("transform","translate(0,"+k.range()[0]+")"),J.select(".nv-context .nv-y1.nv-axis").transition().call(f),l.scale(M).ticks(q/36).tickSize(U.length?0:-F,0),J.select(".nv-context .nv-y2.nv-axis").style("opacity",z.length?1:0).attr("transform","translate("+k.range()[1]+",0)"),J.select(".nv-context .nv-y2.nv-axis").transition().call(l),c.dispatch.on("stateChange",function(e){B.update()}),D.on("tooltipShow",function(e){T&&H(e,j.parentNode)}),it()}),B}var t=e.models.line(),n=e.models.line(),r=e.models.historicalBar(),i=e.models.historicalBar(),s=e.models.axis(),o=e.models.axis(),u=e.models.axis(),a=e.models.axis(),f=e.models.axis(),l=e.models.axis(),c=e.models.legend(),h=d3.svg.brush(),p={top:30,right:30,bottom:30,left:60},d={top:0,right:30,bottom:20,left:60},v=null,m=null,g=100,y=function(e){return e.x},b=function(e){return e.y},w=e.utils.defaultColor(),E=!0,S,x=null,T=!0,N=function(e,t,n,r,i){return"

"+e+"

"+"

"+n+" at "+t+"

"},C,k,L,A,O,M,_="No Data Available.",D=d3.dispatch("tooltipShow","tooltipHide","brush"),P=0;t.clipEdge(!0),n.interactive(!1),s.orient("bottom").tickPadding(5),u.orient("left"),a.orient("right"),o.orient("bottom").tickPadding(5),f.orient("left"),l.orient("right");var H=function(n,r){S&&(n.pointIndex+=Math.ceil(S[0]));var i=n.pos[0]+(r.offsetLeft||0),o=n.pos[1]+(r.offsetTop||0),f=s.tickFormat()(t.x()(n.point,n.pointIndex)),l=(n.series.bar?u:a).tickFormat()(t.y()(n.point,n.pointIndex)),c=N(n.series.key,f,l,n,B);e.tooltip.show([i,o],c,n.value<0?"n":"s",null,r)};return t.dispatch.on("elementMouseover.tooltip",function(e){e.pos=[e.pos[0]+p.left,e.pos[1]+p.top],D.tooltipShow(e)}),t.dispatch.on("elementMouseout.tooltip",function(e){D.tooltipHide(e)}),r.dispatch.on("elementMouseover.tooltip",function(e){e.pos=[e.pos[0]+p.left,e.pos[1]+p.top],D.tooltipShow(e)}),r.dispatch.on("elementMouseout.tooltip",function(e){D.tooltipHide(e)}),D.on("tooltipHide",function(){T&&e.tooltip.cleanup()}),B.dispatch=D,B.legend=c,B.lines=t,B.lines2=n,B.bars=r,B.bars2=i,B.xAxis=s,B.x2Axis=o,B.y1Axis=u,B.y2Axis=a,B.y3Axis=f,B.y4Axis=l,d3.rebind(B,t,"defined","size","clipVoronoi","interpolate"),B.options=e.utils.optionsFunc.bind(B),B.x=function(e){return arguments.length?(y=e,t.x(e),r.x(e),B):y},B.y=function(e){return arguments.length?(b=e,t.y(e),r.y(e),B):b},B.margin=function(e){return arguments.length?(p.top=typeof e.top!="undefined"?e.top:p.top,p.right=typeof e.right!="undefined"?e.right:p.right,p.bottom=typeof e.bottom!="undefined"?e.bottom:p.bottom,p.left=typeof e.left!="undefined"?e.left:p.left,B):p},B.width=function(e){return arguments.length?(v=e,B):v},B.height=function(e){return arguments.length?(m=e,B):m},B.color=function(t){return arguments.length?(w=e.utils.getColor(t),c.color(w),B):w},B.showLegend=function(e){return arguments.length?(E=e,B):E},B.tooltips=function(e){return arguments.length?(T=e,B):T},B.tooltipContent=function(e){return arguments.length?(N=e,B):N},B.noData=function(e){return arguments.length?(_=e,B):_},B.brushExtent=function(e){return arguments.length?(x=e,B):x},B},e.models.multiBar=function(){"use strict";function C(e){return e.each(function(e){var C=n-t.left-t.right,k=r-t.top-t.bottom,L=d3.select(this);d&&e.length&&(d=[{values:e[0].values.map(function(e){return{x:e.x,y:0,series:e.series,size:.01}})}]),c&&(e=d3.layout.stack().offset(h).values(function(e){return e.values}).y(a)(!e.length&&d?d:e)),e.forEach(function(e,t){e.values.forEach(function(e){e.series=t})}),c&&e[0].values.map(function(t,n){var r=0,i=0;e.map(function(e){var t=e.values[n];t.size=Math.abs(t.y),t.y<0?(t.y1=i,i-=t.size):(t.y1=t.size+r,r+=t.size)})});var A=y&&b?[]:e.map(function(e){return e.values.map(function(e,t){return{x:u(e,t),y:a(e,t),y0:e.y0,y1:e.y1}})});i.domain(y||d3.merge(A).map(function(e){return e.x})).rangeBands(w||[0,C],S),s.domain(b||d3.extent(d3.merge(A).map(function(e){return c?e.y>0?e.y1:e.y1+e.y:e.y}).concat(f))).range(E||[k,0]),i.domain()[0]===i.domain()[1]&&(i.domain()[0]?i.domain([i.domain()[0]-i.domain()[0]*.01,i.domain()[1]+i.domain()[1]*.01]):i.domain([-1,1])),s.domain()[0]===s.domain()[1]&&(s.domain()[0]?s.domain([s.domain()[0]+s.domain()[0]*.01,s.domain()[1]-s.domain()[1]*.01]):s.domain([-1,1])),T=T||i,N=N||s;var O=L.selectAll("g.nv-wrap.nv-multibar").data([e]),M=O.enter().append("g").attr("class","nvd3 nv-wrap nv-multibar"),_=M.append("defs"),D=M.append("g"),P=O.select("g");D.append("g").attr("class","nv-groups"),O.attr("transform","translate("+t.left+","+t.top+")"),_.append("clipPath").attr("id","nv-edge-clip-"+o).append("rect"),O.select("#nv-edge-clip-"+o+" rect").attr("width",C).attr("height",k),P.attr("clip-path",l?"url(#nv-edge-clip-"+o+")":"");var H=O.select(".nv-groups").selectAll(".nv-group").data(function(e){return e},function(e,t){return t});H.enter().append("g").style("stroke-opacity",1e-6).style("fill-opacity",1e-6),H.exit().transition().selectAll("rect.nv-bar").delay(function(t,n){return n*g/e[0].values.length}).attr("y",function(e){return c?N(e.y0):N(0)}).attr("height",0).remove(),H.attr("class",function(e,t){return"nv-group nv-series-"+t}).classed("hover",function(e){return e.hover}).style("fill",function(e,t){return p(e,t)}).style("stroke",function(e,t){return p(e,t)}),H.transition().style("stroke-opacity",1).style("fill-opacity",.75);var B=H.selectAll("rect.nv-bar").data(function(t){return d&&!e.length?d.values:t.values});B.exit().remove();var j=B.enter().append("rect").attr("class",function(e,t){return a(e,t)<0?"nv-bar negative":"nv-bar positive"}).attr("x",function(t,n,r){return c?0:r*i.rangeBand()/e.length}).attr("y",function(e){return N(c?e.y0:0)}).attr("height",0).attr("width",i.rangeBand()/(c?1:e.length)).attr("transform",function(e,t){return"translate("+i(u(e,t))+",0)"});B.style("fill",function(e,t,n){return p(e,n,t)}).style("stroke",function(e,t,n){return p(e,n,t)}).on("mouseover",function(t,n){d3.select(this).classed("hover",!0),x.elementMouseover({value:a(t,n),point:t,series:e[t.series],pos:[i(u(t,n))+i.rangeBand()*(c?e.length/2:t.series+.5)/e.length,s(a(t,n)+(c?t.y0:0))],pointIndex:n,seriesIndex:t.series,e:d3.event})}).on("mouseout",function(t,n){d3.select(this).classed("hover",!1),x.elementMouseout({value:a(t,n),point:t,series:e[t.series],pointIndex:n,seriesIndex:t.series,e:d3.event})}).on("click",function(t,n){x.elementClick({value:a(t,n),point:t,series:e[t.series],pos:[i(u(t,n))+i.rangeBand()*(c?e.length/2:t.series+.5)/e.length ,s(a(t,n)+(c?t.y0:0))],pointIndex:n,seriesIndex:t.series,e:d3.event}),d3.event.stopPropagation()}).on("dblclick",function(t,n){x.elementDblClick({value:a(t,n),point:t,series:e[t.series],pos:[i(u(t,n))+i.rangeBand()*(c?e.length/2:t.series+.5)/e.length,s(a(t,n)+(c?t.y0:0))],pointIndex:n,seriesIndex:t.series,e:d3.event}),d3.event.stopPropagation()}),B.attr("class",function(e,t){return a(e,t)<0?"nv-bar negative":"nv-bar positive"}).transition().attr("transform",function(e,t){return"translate("+i(u(e,t))+",0)"}),v&&(m||(m=e.map(function(){return!0})),B.style("fill",function(e,t,n){return d3.rgb(v(e,t)).darker(m.map(function(e,t){return t}).filter(function(e,t){return!m[t]})[n]).toString()}).style("stroke",function(e,t,n){return d3.rgb(v(e,t)).darker(m.map(function(e,t){return t}).filter(function(e,t){return!m[t]})[n]).toString()})),c?B.transition().delay(function(t,n){return n*g/e[0].values.length}).attr("y",function(e,t){return s(c?e.y1:0)}).attr("height",function(e,t){return Math.max(Math.abs(s(e.y+(c?e.y0:0))-s(c?e.y0:0)),1)}).attr("x",function(t,n){return c?0:t.series*i.rangeBand()/e.length}).attr("width",i.rangeBand()/(c?1:e.length)):B.transition().delay(function(t,n){return n*g/e[0].values.length}).attr("x",function(t,n){return t.series*i.rangeBand()/e.length}).attr("width",i.rangeBand()/e.length).attr("y",function(e,t){return a(e,t)<0?s(0):s(0)-s(a(e,t))<1?s(0)-1:s(a(e,t))||0}).attr("height",function(e,t){return Math.max(Math.abs(s(a(e,t))-s(0)),1)||0}),T=i.copy(),N=s.copy()}),C}var t={top:0,right:0,bottom:0,left:0},n=960,r=500,i=d3.scale.ordinal(),s=d3.scale.linear(),o=Math.floor(Math.random()*1e4),u=function(e){return e.x},a=function(e){return e.y},f=[0],l=!0,c=!1,h="zero",p=e.utils.defaultColor(),d=!1,v=null,m,g=1200,y,b,w,E,S=.1,x=d3.dispatch("chartClick","elementClick","elementDblClick","elementMouseover","elementMouseout"),T,N;return C.dispatch=x,C.options=e.utils.optionsFunc.bind(C),C.x=function(e){return arguments.length?(u=e,C):u},C.y=function(e){return arguments.length?(a=e,C):a},C.margin=function(e){return arguments.length?(t.top=typeof e.top!="undefined"?e.top:t.top,t.right=typeof e.right!="undefined"?e.right:t.right,t.bottom=typeof e.bottom!="undefined"?e.bottom:t.bottom,t.left=typeof e.left!="undefined"?e.left:t.left,C):t},C.width=function(e){return arguments.length?(n=e,C):n},C.height=function(e){return arguments.length?(r=e,C):r},C.xScale=function(e){return arguments.length?(i=e,C):i},C.yScale=function(e){return arguments.length?(s=e,C):s},C.xDomain=function(e){return arguments.length?(y=e,C):y},C.yDomain=function(e){return arguments.length?(b=e,C):b},C.xRange=function(e){return arguments.length?(w=e,C):w},C.yRange=function(e){return arguments.length?(E=e,C):E},C.forceY=function(e){return arguments.length?(f=e,C):f},C.stacked=function(e){return arguments.length?(c=e,C):c},C.stackOffset=function(e){return arguments.length?(h=e,C):h},C.clipEdge=function(e){return arguments.length?(l=e,C):l},C.color=function(t){return arguments.length?(p=e.utils.getColor(t),C):p},C.barColor=function(t){return arguments.length?(v=e.utils.getColor(t),C):v},C.disabled=function(e){return arguments.length?(m=e,C):m},C.id=function(e){return arguments.length?(o=e,C):o},C.hideable=function(e){return arguments.length?(d=e,C):d},C.delay=function(e){return arguments.length?(g=e,C):g},C.groupSpacing=function(e){return arguments.length?(S=e,C):S},C},e.models.multiBarChart=function(){"use strict";function A(e){return e.each(function(e){var b=d3.select(this),O=this,M=(u||parseInt(b.style("width"))||960)-o.left-o.right,_=(a||parseInt(b.style("height"))||400)-o.top-o.bottom;A.update=function(){b.transition().duration(k).call(A)},A.container=this,S.disabled=e.map(function(e){return!!e.disabled});if(!x){var D;x={};for(D in S)S[D]instanceof Array?x[D]=S[D].slice(0):x[D]=S[D]}if(!e||!e.length||!e.filter(function(e){return e.values.length}).length){var P=b.selectAll(".nv-noData").data([T]);return P.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),P.attr("x",o.left+M/2).attr("y",o.top+_/2).text(function(e){return e}),A}b.selectAll(".nv-noData").remove(),w=t.xScale(),E=t.yScale();var H=b.selectAll("g.nv-wrap.nv-multiBarWithLegend").data([e]),B=H.enter().append("g").attr("class","nvd3 nv-wrap nv-multiBarWithLegend").append("g"),j=H.select("g");B.append("g").attr("class","nv-x nv-axis"),B.append("g").attr("class","nv-y nv-axis"),B.append("g").attr("class","nv-barsWrap"),B.append("g").attr("class","nv-legendWrap"),B.append("g").attr("class","nv-controlsWrap"),c&&(i.width(M-C()),t.barColor()&&e.forEach(function(e,t){e.color=d3.rgb("#ccc").darker(t*1.5).toString()}),j.select(".nv-legendWrap").datum(e).call(i),o.top!=i.height()&&(o.top=i.height(),_=(a||parseInt(b.style("height"))||400)-o.top-o.bottom),j.select(".nv-legendWrap").attr("transform","translate("+C()+","+ -o.top+")"));if(l){var F=[{key:"Grouped",disabled:t.stacked()},{key:"Stacked",disabled:!t.stacked()}];s.width(C()).color(["#444","#444","#444"]),j.select(".nv-controlsWrap").datum(F).attr("transform","translate(0,"+ -o.top+")").call(s)}H.attr("transform","translate("+o.left+","+o.top+")"),d&&j.select(".nv-y.nv-axis").attr("transform","translate("+M+",0)"),t.disabled(e.map(function(e){return e.disabled})).width(M).height(_).color(e.map(function(e,t){return e.color||f(e,t)}).filter(function(t,n){return!e[n].disabled}));var I=j.select(".nv-barsWrap").datum(e.filter(function(e){return!e.disabled}));I.transition().call(t);if(h){n.scale(w).ticks(M/100).tickSize(-_,0),j.select(".nv-x.nv-axis").attr("transform","translate(0,"+E.range()[0]+")"),j.select(".nv-x.nv-axis").transition().call(n);var q=j.select(".nv-x.nv-axis > g").selectAll("g");q.selectAll("line, text").style("opacity",1);if(m){var R=function(e,t){return"translate("+e+","+t+")"},U=5,z=17;q.selectAll("text").attr("transform",function(e,t,n){return R(0,n%2==0?U:z)});var W=d3.selectAll(".nv-x.nv-axis .nv-wrap g g text")[0].length;j.selectAll(".nv-x.nv-axis .nv-axisMaxMin text").attr("transform",function(e,t){return R(0,t===0||W%2!==0?z:U)})}v&&q.filter(function(t,n){return n%Math.ceil(e[0].values.length/(M/100))!==0}).selectAll("text, line").style("opacity",0),g&&q.selectAll(".tick text").attr("transform","rotate("+g+" 0,0)").style("text-anchor",g>0?"start":"end"),j.select(".nv-x.nv-axis").selectAll("g.nv-axisMaxMin text").style("opacity",1)}p&&(r.scale(E).ticks(_/36).tickSize(-M,0),j.select(".nv-y.nv-axis").transition().call(r)),i.dispatch.on("stateChange",function(e){S=e,N.stateChange(S),A.update()}),s.dispatch.on("legendClick",function(e,n){if(!e.disabled)return;F=F.map(function(e){return e.disabled=!0,e}),e.disabled=!1;switch(e.key){case"Grouped":t.stacked(!1);break;case"Stacked":t.stacked(!0)}S.stacked=t.stacked(),N.stateChange(S),A.update()}),N.on("tooltipShow",function(e){y&&L(e,O.parentNode)}),N.on("changeState",function(n){typeof n.disabled!="undefined"&&(e.forEach(function(e,t){e.disabled=n.disabled[t]}),S.disabled=n.disabled),typeof n.stacked!="undefined"&&(t.stacked(n.stacked),S.stacked=n.stacked),A.update()})}),A}var t=e.models.multiBar(),n=e.models.axis(),r=e.models.axis(),i=e.models.legend(),s=e.models.legend(),o={top:30,right:20,bottom:50,left:60},u=null,a=null,f=e.utils.defaultColor(),l=!0,c=!0,h=!0,p=!0,d=!1,v=!0,m=!1,g=0,y=!0,b=function(e,t,n,r,i){return"

"+e+"

"+"

"+n+" on "+t+"

"},w,E,S={stacked:!1},x=null,T="No Data Available.",N=d3.dispatch("tooltipShow","tooltipHide","stateChange","changeState"),C=function(){return l?180:0},k=250;t.stacked(!1),n.orient("bottom").tickPadding(7).highlightZero(!0).showMaxMin(!1).tickFormat(function(e){return e}),r.orient(d?"right":"left").tickFormat(d3.format(",.1f")),s.updateState(!1);var L=function(i,s){var o=i.pos[0]+(s.offsetLeft||0),u=i.pos[1]+(s.offsetTop||0),a=n.tickFormat()(t.x()(i.point,i.pointIndex)),f=r.tickFormat()(t.y()(i.point,i.pointIndex)),l=b(i.series.key,a,f,i,A);e.tooltip.show([o,u],l,i.value<0?"n":"s",null,s)};return t.dispatch.on("elementMouseover.tooltip",function(e){e.pos=[e.pos[0]+o.left,e.pos[1]+o.top],N.tooltipShow(e)}),t.dispatch.on("elementMouseout.tooltip",function(e){N.tooltipHide(e)}),N.on("tooltipHide",function(){y&&e.tooltip.cleanup()}),A.dispatch=N,A.multibar=t,A.legend=i,A.xAxis=n,A.yAxis=r,d3.rebind(A,t,"x","y","xDomain","yDomain","xRange","yRange","forceX","forceY","clipEdge","id","stacked","stackOffset","delay","barColor","groupSpacing"),A.options=e.utils.optionsFunc.bind(A),A.margin=function(e){return arguments.length?(o.top=typeof e.top!="undefined"?e.top:o.top,o.right=typeof e.right!="undefined"?e.right:o.right,o.bottom=typeof e.bottom!="undefined"?e.bottom:o.bottom,o.left=typeof e.left!="undefined"?e.left:o.left,A):o},A.width=function(e){return arguments.length?(u=e,A):u},A.height=function(e){return arguments.length?(a=e,A):a},A.color=function(t){return arguments.length?(f=e.utils.getColor(t),i.color(f),A):f},A.showControls=function(e){return arguments.length?(l=e,A):l},A.showLegend=function(e){return arguments.length?(c=e,A):c},A.showXAxis=function(e){return arguments.length?(h=e,A):h},A.showYAxis=function(e){return arguments.length?(p=e,A):p},A.rightAlignYAxis=function(e){return arguments.length?(d=e,r.orient(e?"right":"left"),A):d},A.reduceXTicks=function(e){return arguments.length?(v=e,A):v},A.rotateLabels=function(e){return arguments.length?(g=e,A):g},A.staggerLabels=function(e){return arguments.length?(m=e,A):m},A.tooltip=function(e){return arguments.length?(b=e,A):b},A.tooltips=function(e){return arguments.length?(y=e,A):y},A.tooltipContent=function(e){return arguments.length?(b=e,A):b},A.state=function(e){return arguments.length?(S=e,A):S},A.defaultState=function(e){return arguments.length?(x=e,A):x},A.noData=function(e){return arguments.length?(T=e,A):T},A.transitionDuration=function(e){return arguments.length?(k=e,A):k},A},e.models.multiBarHorizontal=function(){"use strict";function C(e){return e.each(function(e){var i=n-t.left-t.right,y=r-t.top-t.bottom,C=d3.select(this);p&&(e=d3.layout.stack().offset("zero").values(function(e){return e.values}).y(a)(e)),e.forEach(function(e,t){e.values.forEach(function(e){e.series=t})}),p&&e[0].values.map(function(t,n){var r=0,i=0;e.map(function(e){var t=e.values[n];t.size=Math.abs(t.y),t.y<0?(t.y1=i-t.size,i-=t.size):(t.y1=r,r+=t.size)})});var k=b&&w?[]:e.map(function(e){return e.values.map(function(e,t){return{x:u(e,t),y:a(e,t),y0:e.y0,y1:e.y1}})});s.domain(b||d3.merge(k).map(function(e){return e.x})).rangeBands(E||[0,y],.1),o.domain(w||d3.extent(d3.merge(k).map(function(e){return p?e.y>0?e.y1+e.y:e.y1:e.y}).concat(f))),d&&!p?o.range(S||[o.domain()[0]<0?m:0,i-(o.domain()[1]>0?m:0)]):o.range(S||[0,i]),T=T||s,N=N||d3.scale.linear().domain(o.domain()).range([o(0),o(0)]);var L=d3.select(this).selectAll("g.nv-wrap.nv-multibarHorizontal").data([e]),A=L.enter().append("g").attr("class","nvd3 nv-wrap nv-multibarHorizontal"),O=A.append("defs"),M=A.append("g"),_=L.select("g");M.append("g").attr("class","nv-groups"),L.attr("transform","translate("+t.left+","+t.top+")");var D=L.select(".nv-groups").selectAll(".nv-group").data(function(e){return e},function(e,t){return t});D.enter().append("g").style("stroke-opacity",1e-6).style("fill-opacity",1e-6),D.exit().transition().style("stroke-opacity",1e-6).style("fill-opacity",1e-6).remove(),D.attr("class",function(e,t){return"nv-group nv-series-"+t}).classed("hover",function(e){return e.hover}).style("fill",function(e,t){return l(e,t)}).style("stroke",function(e,t){return l(e,t)}),D.transition().style("stroke-opacity",1).style("fill-opacity",.75);var P=D.selectAll("g.nv-bar").data(function(e){return e.values});P.exit().remove();var H=P.enter().append("g").attr("transform",function(t,n,r){return"translate("+N(p?t.y0:0)+","+(p?0:r*s.rangeBand()/e.length+s(u(t,n)))+")"});H.append("rect").attr("width",0).attr("height",s.rangeBand()/(p?1:e.length)),P.on("mouseover",function(t,n){d3.select(this).classed("hover",!0),x.elementMouseover({value:a(t,n),point:t,series:e[t.series],pos:[o(a(t,n)+(p?t.y0:0)),s(u(t,n))+s.rangeBand()*(p?e.length/2:t.series+.5)/e.length],pointIndex:n,seriesIndex:t.series,e:d3.event})}).on("mouseout",function(t,n){d3.select(this).classed("hover",!1),x.elementMouseout({value:a(t,n),point:t,series:e[t.series],pointIndex:n,seriesIndex:t.series,e:d3.event})}).on("click",function(t,n){x.elementClick({value:a(t,n),point:t,series:e[t.series],pos:[s(u(t,n))+s.rangeBand()*(p?e.length/2:t.series+.5)/e.length,o(a(t,n)+(p?t.y0:0))],pointIndex:n,seriesIndex:t.series,e:d3.event}),d3.event.stopPropagation()}).on("dblclick",function(t,n){x.elementDblClick({value:a(t,n),point:t,series:e[t.series],pos:[s(u(t,n))+s.rangeBand()*(p?e.length/2:t.series+.5)/e.length,o(a(t,n)+(p?t.y0:0))],pointIndex:n,seriesIndex:t.series,e:d3.event}),d3.event.stopPropagation()}),H.append("text"),d&&!p?(P.select("text").attr("text-anchor",function(e,t){return a(e,t)<0?"end":"start"}).attr("y",s.rangeBand()/(e.length*2)).attr("dy",".32em").text(function(e,t){return g(a(e,t))}),P.transition().select("text").attr("x",function(e,t){return a(e,t)<0?-4:o(a(e,t))-o(0)+4})):P.selectAll("text").text(""),v&&!p?(H.append("text").classed("nv-bar-label",!0),P.select("text.nv-bar-label").attr("text-anchor",function(e,t){return a(e,t)<0?"start":"end"}).attr("y",s.rangeBand()/(e.length*2)).attr("dy",".32em").text(function(e,t){return u(e,t)}),P.transition().select("text.nv-bar-label").attr("x",function(e,t){return a(e,t)<0?o(0)-o(a(e,t))+4:-4})):P.selectAll("text.nv-bar-label").text(""),P.attr("class",function(e,t){return a(e,t)<0?"nv-bar negative":"nv-bar positive"}),c&&(h||(h=e.map(function(){return!0})),P.style("fill",function(e,t,n){return d3.rgb(c(e,t)).darker(h.map(function(e,t){return t}).filter(function(e,t){return!h[t]})[n]).toString()}).style("stroke",function(e,t,n){return d3.rgb(c(e,t)).darker(h.map(function(e,t){return t}).filter(function(e,t){return!h[t]})[n]).toString()})),p?P.transition().attr("transform",function(e,t){return"translate("+o(e.y1)+","+s(u(e,t))+")"}).select("rect").attr("width",function(e,t){return Math.abs(o(a(e,t)+e.y0)-o(e.y0))}).attr("height",s.rangeBand()):P.transition().attr("transform",function(t,n){return"translate("+(a(t,n)<0?o(a(t,n)):o(0))+","+(t.series*s.rangeBand()/e.length+s(u(t,n)))+")"}).select("rect").attr("height",s.rangeBand()/e.length).attr("width",function(e,t){return Math.max(Math.abs(o(a(e,t))-o(0)),1)}),T=s.copy(),N=o.copy()}),C}var t={top:0,right:0,bottom:0,left:0},n=960,r=500,i=Math.floor(Math.random()*1e4),s=d3.scale.ordinal(),o=d3.scale.linear(),u=function(e){return e.x},a=function(e){return e.y},f=[0],l=e.utils.defaultColor(),c=null,h,p=!1,d=!1,v=!1,m=60,g=d3.format(",.2f"),y=1200,b,w,E,S,x=d3.dispatch("chartClick","elementClick","elementDblClick","elementMouseover","elementMouseout"),T,N;return C.dispatch=x,C.options=e.utils.optionsFunc.bind(C),C.x=function(e){return arguments.length?(u=e,C):u},C.y=function(e){return arguments.length?(a=e,C):a},C.margin=function(e){return arguments.length?(t.top=typeof e.top!="undefined"?e.top:t.top,t.right=typeof e.right!="undefined"?e.right:t.right,t.bottom=typeof e.bottom!="undefined"?e.bottom:t.bottom,t.left=typeof e.left!="undefined"?e.left:t.left,C):t},C.width=function(e){return arguments.length?(n=e,C):n},C.height=function(e){return arguments.length?(r=e,C):r},C.xScale=function(e){return arguments.length?(s=e,C):s},C.yScale=function(e){return arguments.length?(o=e,C):o},C.xDomain=function(e){return arguments.length?(b=e,C):b},C.yDomain=function(e){return arguments.length?(w=e,C):w},C.xRange=function(e){return arguments.length?(E=e,C):E},C.yRange=function(e){return arguments.length?(S=e,C):S},C.forceY=function(e){return arguments.length?(f=e,C):f},C.stacked=function(e){return arguments.length?(p=e,C):p},C.color=function(t){return arguments.length?(l=e.utils.getColor(t),C):l},C.barColor=function(t){return arguments.length?(c=e.utils.getColor(t),C):c},C.disabled=function(e){return arguments.length?(h=e,C):h},C.id=function(e){return arguments.length?(i=e,C):i},C.delay=function(e){return arguments.length?(y=e,C):y},C.showValues=function(e){return arguments.length?(d=e,C):d},C.showBarLabels=function(e){return arguments.length?(v=e,C):v},C.valueFormat=function(e){return arguments.length?(g=e,C):g},C.valuePadding=function(e){return arguments.length?(m=e,C):m},C},e.models.multiBarHorizontalChart=function(){"use strict";function C(e){return e.each(function(e){var d=d3.select(this),m=this,k=(u||parseInt(d.style("width"))||960)-o.left-o.right,L=(a||parseInt(d.style("height"))||400)-o.top-o.bottom;C.update=function(){d.transition().duration(T).call(C)},C.container=this,b.disabled=e.map(function(e){return!!e.disabled});if(!w){var A;w={};for(A in b)b[A]instanceof Array?w[A]=b[A].slice(0):w[A]=b[A]}if(!e||!e.length||!e.filter(function(e){return e.values.length}).length){var O=d.selectAll(".nv-noData").data([E]);return O.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),O.attr("x",o.left+k/2).attr("y",o.top+L/2).text(function(e){return e}),C}d.selectAll(".nv-noData").remove(),g=t.xScale(),y=t.yScale();var M=d.selectAll("g.nv-wrap.nv-multiBarHorizontalChart").data([e]),_=M.enter().append("g").attr("class","nvd3 nv-wrap nv-multiBarHorizontalChart").append("g"),D=M.select("g");_.append("g").attr("class","nv-x nv-axis"),_.append("g").attr("class","nv-y nv-axis").append("g").attr("class","nv-zeroLine").append("line"),_.append("g").attr("class","nv-barsWrap"),_.append("g").attr("class","nv-legendWrap"),_.append("g").attr("class","nv-controlsWrap"),c&&(i.width(k-x()),t.barColor()&&e.forEach(function(e,t){e.color=d3.rgb("#ccc").darker(t*1.5).toString()}),D.select(".nv-legendWrap").datum(e).call(i),o.top!=i.height()&&(o.top=i.height(),L=(a||parseInt(d.style("height"))||400)-o.top-o.bottom),D.select(".nv-legendWrap").attr("transform","translate("+x()+","+ -o.top+")"));if(l){var P=[{key:"Grouped",disabled:t.stacked()},{key:"Stacked",disabled:!t.stacked()}];s.width(x()).color(["#444","#444","#444"]),D.select(".nv-controlsWrap").datum(P).attr("transform","translate(0,"+ -o.top+")").call(s)}M.attr("transform","translate("+o.left+","+o.top+")"),t.disabled(e.map(function(e){return e.disabled})).width(k).height(L).color(e.map(function(e,t){return e.color||f(e,t)}).filter(function(t,n){return!e[n].disabled}));var H=D.select(".nv-barsWrap").datum(e.filter(function(e){return!e.disabled}));H.transition().call(t);if(h){n.scale(g).ticks(L/24).tickSize(-k,0),D.select(".nv-x.nv-axis").transition().call(n);var B=D.select(".nv-x.nv-axis").selectAll("g");B.selectAll("line, text")}p&&(r.scale(y).ticks(k/100).tickSize(-L,0),D.select(".nv-y.nv-axis").attr("transform","translate(0,"+L+")"),D.select(".nv-y.nv-axis").transition().call(r)),D.select(".nv-zeroLine line").attr("x1",y(0)).attr("x2",y(0)).attr("y1",0).attr("y2",-L),i.dispatch.on("stateChange",function(e){b=e,S.stateChange(b),C.update()}),s.dispatch.on("legendClick",function(e,n){if(!e.disabled)return;P=P.map(function(e){return e.disabled=!0,e}),e.disabled=!1;switch(e.key){case"Grouped":t.stacked(!1);break;case"Stacked":t.stacked(!0)}b.stacked=t.stacked(),S.stateChange(b),C.update()}),S.on("tooltipShow",function(e){v&&N(e,m.parentNode)}),S.on("changeState",function(n){typeof n.disabled!="undefined"&&(e.forEach(function(e,t){e.disabled=n.disabled[t]}),b.disabled=n.disabled),typeof n.stacked!="undefined"&&(t.stacked(n.stacked),b.stacked=n.stacked),C.update()})}),C}var t=e.models.multiBarHorizontal(),n=e.models.axis(),r=e.models.axis(),i=e.models.legend().height(30),s=e.models.legend().height(30),o={top:30,right:20,bottom:50,left:60},u=null,a=null,f=e.utils.defaultColor(),l=!0,c=!0,h=!0,p=!0,d=!1,v=!0,m=function(e,t,n,r,i){return"

"+e+" - "+t+"

"+"

"+n+"

"},g,y,b={stacked:d},w=null,E="No Data Available.",S=d3.dispatch("tooltipShow","tooltipHide","stateChange","changeState"),x=function(){return l?180:0},T=250;t.stacked(d),n.orient("left").tickPadding(5).highlightZero(!1).showMaxMin(!1).tickFormat(function(e){return e}),r.orient("bottom").tickFormat(d3.format(",.1f")),s.updateState(!1);var N=function(i,s){var o=i.pos[0]+(s.offsetLeft||0),u=i.pos[1]+(s.offsetTop||0),a=n.tickFormat()(t.x()(i.point,i.pointIndex)),f=r.tickFormat()(t.y()(i.point,i.pointIndex)),l=m(i.series.key,a,f,i,C);e.tooltip.show([o,u],l,i.value<0?"e":"w",null,s)};return t.dispatch.on("elementMouseover.tooltip",function(e){e.pos=[e.pos[0]+o.left,e.pos[1]+o.top],S.tooltipShow(e)}),t.dispatch.on("elementMouseout.tooltip",function(e){S.tooltipHide(e)}),S.on("tooltipHide",function(){v&&e.tooltip.cleanup()}),C.dispatch=S,C.multibar=t,C.legend=i,C.xAxis=n,C.yAxis=r,d3.rebind(C,t,"x","y","xDomain","yDomain","xRange","yRange","forceX","forceY","clipEdge","id","delay","showValues","showBarLabels","valueFormat","stacked","barColor"),C.options=e.utils.optionsFunc.bind(C),C.margin=function(e){return arguments.length?(o.top=typeof e.top!="undefined"?e.top:o.top,o.right=typeof e.right!="undefined"?e.right:o.right,o.bottom=typeof e.bottom!="undefined"?e.bottom:o.bottom,o.left=typeof e.left!="undefined"?e.left:o.left,C):o},C.width=function(e){return arguments.length?(u=e,C):u},C.height=function(e){return arguments.length?(a=e,C):a},C.color=function(t){return arguments.length?(f=e.utils.getColor(t),i.color(f),C):f},C.showControls=function(e){return arguments.length?(l=e,C):l},C.showLegend=function(e){return arguments.length?(c=e,C):c},C.showXAxis=function(e){return arguments.length?(h=e,C):h},C.showYAxis=function(e){return arguments.length?(p=e,C):p},C.tooltip=function(e){return arguments.length?(m=e,C):m},C.tooltips=function(e){return arguments.length?(v=e,C):v},C.tooltipContent=function(e){return arguments.length?(m=e,C):m},C.state=function(e){return arguments.length?(b=e,C):b},C.defaultState=function(e){return arguments.length?(w=e,C):w},C.noData=function(e){return arguments.length?(E=e,C):E},C.transitionDuration=function(e){return arguments.length?(T=e,C):T},C},e.models.multiChart=function(){"use strict";function C(e){return e.each(function(e){var u=d3.select(this),f=this;C.update=function(){u.transition().call(C)},C.container=this;var k=(r||parseInt(u.style("width"))||960)-t.left-t.right,L=(i||parseInt(u.style("height"))||400)-t.top-t.bottom,A=e.filter(function(e){return!e.disabled&&e.type=="line"&&e.yAxis==1}),O=e.filter(function(e){return!e.disabled&&e.type=="line"&&e.yAxis==2}),M=e.filter(function(e){return!e.disabled&&e.type=="bar"&&e.yAxis==1}),_=e.filter(function(e){return!e.disabled&&e.type=="bar"&&e.yAxis==2}),D=e.filter(function(e){return!e.disabled&&e.type=="area"&&e.yAxis==1}),P=e.filter(function(e){return!e.disabled&&e.type=="area"&&e.yAxis==2}),H=e.filter(function(e){return!e.disabled&&e.yAxis==1}).map(function(e){return e.values.map(function(e,t){return{x:e.x,y:e.y}})}),B=e.filter(function(e){return!e.disabled&&e.yAxis==2}).map(function(e){return e.values.map(function(e,t){return{x:e.x,y:e.y}})});a.domain(d3.extent(d3.merge(H.concat(B)),function(e){return e.x})).range([0,k]);var j=u.selectAll("g.wrap.multiChart").data([e]),F=j.enter().append("g").attr("class","wrap nvd3 multiChart").append("g");F.append("g").attr("class","x axis"),F.append("g").attr("class","y1 axis"),F.append("g").attr("class","y2 axis"),F.append("g").attr("class","lines1Wrap"),F.append("g").attr("class","lines2Wrap"),F.append("g").attr("class","bars1Wrap"),F.append("g").attr("class","bars2Wrap"),F.append("g").attr("class","stack1Wrap"),F.append("g").attr("class","stack2Wrap"),F.append("g").attr("class","legendWrap");var I=j.select("g");s&&(x.width(k/2),I.select(".legendWrap").datum(e.map(function(e){return e.originalKey=e.originalKey===undefined?e.key:e.originalKey,e.key=e.originalKey+(e.yAxis==1?"":" (right axis)"),e})).call(x),t.top!=x.height()&&(t.top=x.height(),L=(i||parseInt(u.style("height"))||400)-t.top-t.bottom),I.select(".legendWrap").attr("transform","translate("+k/2+","+ -t.top+")")),d.width(k).height(L).interpolate("monotone").color(e.map(function(e,t){return e.color||n[t%n.length]}).filter(function(t,n){return!e[n].disabled&&e[n].yAxis==1&&e[n].type=="line"})),v.width(k).height(L).interpolate("monotone").color(e.map(function(e,t){return e.color||n[t%n.length]}).filter(function(t,n){return!e[n].disabled&&e[n].yAxis==2&&e[n].type=="line"})),m.width(k).height(L).color(e.map(function(e,t){return e.color||n[t%n.length]}).filter(function(t,n){return!e[n].disabled&&e[n].yAxis==1&&e[n].type=="bar"})),g.width(k).height(L).color(e.map(function(e,t){return e.color||n[t%n.length]}).filter(function(t,n){return!e[n].disabled&&e[n].yAxis==2&&e[n].type=="bar"})),y.width(k).height(L).color(e.map(function(e,t){return e.color||n[t%n.length]}).filter(function(t,n){return!e[n].disabled&&e[n].yAxis==1&&e[n].type=="area"})),b.width(k).height(L).color(e.map(function(e,t){return e.color||n[t%n.length]}).filter(function(t,n){return!e[n].disabled&&e[n].yAxis==2&&e[n].type=="area"})),I.attr("transform","translate("+t.left+","+t.top+")");var q=I.select(".lines1Wrap").datum(A),R=I.select(".bars1Wrap").datum(M),U=I.select(".stack1Wrap").datum(D),z=I.select(".lines2Wrap").datum(O),W=I.select(".bars2Wrap").datum(_),X=I.select(".stack2Wrap").datum(P),V=D.length?D.map(function(e){return e.values}).reduce(function(e,t){return e.map(function(e,n){return{x:e.x,y:e.y+t[n].y}})}).concat([{x:0,y:0}]):[],$=P.length?P.map(function(e){return e.values}).reduce(function(e,t){return e.map(function(e,n){return{x:e.x,y:e.y+t[n].y}})}).concat([{x:0,y:0}]):[];h.domain(l||d3.extent(d3.merge(H).concat(V),function(e){return e.y})).range([0,L]),p.domain(c||d3.extent(d3.merge(B).concat($),function(e){return e.y})).range([0,L]),d.yDomain(h.domain()),m.yDomain(h.domain()),y.yDomain(h.domain()),v.yDomain(p.domain()),g.yDomain(p.domain()),b.yDomain(p.domain()),D.length&&d3.transition(U).call(y),P.length&&d3.transition(X).call(b),M.length&&d3.transition(R).call(m),_.length&&d3.transition(W).call(g),A.length&&d3.transition(q).call(d),O.length&&d3.transition(z).call(v),w.ticks(k/100).tickSize(-L,0),I.select(".x.axis").attr("transform","translate(0,"+L+")"),d3.transition(I.select(".x.axis")).call(w),E.ticks(L/36).tickSize(-k,0),d3.transition(I.select(".y1.axis")).call(E),S.ticks(L/36).tickSize(-k,0),d3.transition(I.select(".y2.axis")).call(S),I.select(".y2.axis").style("opacity",B.length?1:0).attr("transform","translate("+a.range()[1]+",0)"),x.dispatch.on("stateChange",function(e){C.update()}),T.on("tooltipShow",function(e){o&&N(e,f.parentNode)})}),C}var t={top:30,right:20,bottom:50,left:60},n=d3.scale.category20().range(),r=null,i=null,s=!0,o=!0,u=function(e,t,n,r,i){return"

"+e+"

"+"

"+n+" at "+t+"

"},a,f,l,c,a=d3.scale.linear(),h=d3.scale.linear(),p=d3.scale.linear(),d=e.models.line().yScale(h),v=e.models.line().yScale(p),m=e.models.multiBar().stacked(!1).yScale(h),g=e.models.multiBar().stacked(!1).yScale(p),y=e.models.stackedArea().yScale(h),b=e.models.stackedArea().yScale(p),w=e.models.axis().scale(a).orient("bottom").tickPadding(5),E=e.models.axis().scale(h).orient("left"),S=e.models.axis().scale(p).orient("right"),x=e.models.legend().height(30),T=d3.dispatch("tooltipShow","tooltipHide"),N=function(t,n){var r=t.pos[0]+(n.offsetLeft||0),i=t.pos[1]+(n.offsetTop||0),s=w.tickFormat()(d.x()(t.point,t.pointIndex)),o=(t.series.yAxis==2?S:E).tickFormat()(d.y()(t.point,t.pointIndex)),a=u(t.series.key,s,o,t,C);e.tooltip.show([r,i],a,undefined,undefined,n.offsetParent)};return d.dispatch.on("elementMouseover.tooltip",function(e){e.pos=[e.pos[0]+t.left,e.pos[1]+t.top],T.tooltipShow(e)}),d.dispatch.on("elementMouseout.tooltip",function(e){T.tooltipHide(e)}),v.dispatch.on("elementMouseover.tooltip",function(e){e.pos=[e.pos[0]+t.left,e.pos[1]+t.top],T.tooltipShow(e)}),v.dispatch.on("elementMouseout.tooltip",function(e){T.tooltipHide(e)}),m.dispatch.on("elementMouseover.tooltip",function(e){e.pos=[e.pos[0]+t.left,e.pos[1]+t.top],T.tooltipShow(e)}),m.dispatch.on("elementMouseout.tooltip",function(e){T.tooltipHide(e)}),g.dispatch.on("elementMouseover.tooltip",function(e){e.pos=[e.pos[0]+t.left,e.pos[1]+t.top],T.tooltipShow(e)}),g.dispatch.on("elementMouseout.tooltip",function(e){T.tooltipHide(e)}),y.dispatch.on("tooltipShow",function(e){if(!Math.round(y.y()(e.point)*100))return setTimeout(function(){d3.selectAll(".point.hover").classed("hover",!1)},0),!1;e.pos=[e.pos[0]+t.left,e.pos[1]+t.top],T.tooltipShow(e)}),y.dispatch.on("tooltipHide",function(e){T.tooltipHide(e)}),b.dispatch.on("tooltipShow",function(e){if(!Math.round(b.y()(e.point)*100))return setTimeout(function(){d3.selectAll(".point.hover").classed("hover",!1)},0),!1;e.pos=[e.pos[0]+t.left,e.pos[1]+t.top],T.tooltipShow(e)}),b.dispatch.on("tooltipHide",function(e){T.tooltipHide(e)}),d.dispatch.on("elementMouseover.tooltip",function(e){e.pos=[e.pos[0]+t.left,e.pos[1]+t.top],T.tooltipShow(e)}),d.dispatch.on("elementMouseout.tooltip",function(e){T.tooltipHide(e)}),v.dispatch.on("elementMouseover.tooltip",function(e){e.pos=[e.pos[0]+t.left,e.pos[1]+t.top],T.tooltipShow(e)}),v.dispatch.on("elementMouseout.tooltip",function(e){T.tooltipHide(e)}),T.on("tooltipHide",function(){o&&e.tooltip.cleanup()}),C.dispatch=T,C.lines1=d,C.lines2=v,C.bars1=m,C.bars2=g,C.stack1=y,C.stack2=b,C.xAxis=w,C.yAxis1=E,C.yAxis2=S,C.options=e.utils.optionsFunc.bind(C),C.x=function(e){return arguments.length?(getX=e,d.x(e),m.x(e),C):getX},C.y=function(e){return arguments.length?(getY=e,d.y(e),m.y(e),C):getY},C.yDomain1=function(e){return arguments.length?(l=e,C):l},C.yDomain2=function(e){return arguments.length?(c=e,C):c},C.margin=function(e){return arguments.length?(t=e,C):t},C.width=function(e){return arguments.length?(r=e,C):r},C.height=function(e){return arguments.length?(i=e,C):i},C.color=function(e){return arguments.length?(n=e,x.color(e),C):n},C.showLegend=function(e){return arguments.length?(s=e,C):s},C.tooltips=function(e){return arguments.length?(o=e,C):o},C.tooltipContent=function(e){return arguments.length?(u=e,C):u},C},e.models.ohlcBar=function(){"use strict";function x(e){return e.each(function(e){var g=n-t.left-t.right,x=r-t.top-t.bottom,T=d3.select(this);s.domain(y||d3.extent(e[0].values.map(u).concat(p))),v?s.range(w||[g*.5/e[0].values.length,g*(e[0].values.length-.5)/e[0].values.length]):s.range(w||[0,g]),o.domain(b||[d3.min(e[0].values.map(h).concat(d)),d3.max(e[0].values.map(c).concat(d))]).range(E||[x,0]),s.domain()[0]===s.domain()[1]&&(s.domain()[0]?s.domain([s.domain()[0]-s.domain()[0]*.01,s.domain()[1]+s.domain()[1]*.01]):s.domain([-1,1])),o.domain()[0]===o.domain()[1]&&(o.domain()[0]?o.domain([o.domain()[0]+o.domain()[0]*.01,o.domain()[1]-o.domain()[1]*.01]):o.domain([-1,1]));var N=d3.select(this).selectAll("g.nv-wrap.nv-ohlcBar").data([e[0].values]),C=N.enter().append("g").attr("class","nvd3 nv-wrap nv-ohlcBar"),k=C.append("defs"),L=C.append("g"),A=N.select("g");L.append("g").attr("class","nv-ticks"),N.attr("transform","translate("+t.left+","+t.top+")"),T.on("click",function(e,t){S.chartClick({data:e,index:t,pos:d3.event,id:i})}),k.append("clipPath").attr("id","nv-chart-clip-path-"+i).append("rect"),N.select("#nv-chart-clip-path-"+i+" rect").attr("width",g).attr("height",x),A.attr("clip-path",m?"url(#nv-chart-clip-path-"+i+")":"");var O=N.select(".nv-ticks").selectAll(".nv-tick").data(function(e){return e});O.exit().remove();var M=O.enter().append("path").attr("class",function(e,t,n){return(f(e,t)>l(e,t)?"nv-tick negative":"nv-tick positive")+" nv-tick-"+n+"-"+t}).attr("d",function(t,n){var r=g/e[0].values.length*.9;return"m0,0l0,"+(o(f(t,n))-o(c(t,n)))+"l"+ -r/2+",0l"+r/2+",0l0,"+(o(h(t,n))-o(f(t,n)))+"l0,"+(o(l(t,n))-o(h(t,n)))+"l"+r/2+",0l"+ -r/2+",0z"}).attr("transform",function(e,t){return"translate("+s(u(e,t))+","+o(c(e,t))+")"}).on("mouseover",function(t,n){d3.select(this).classed("hover",!0),S.elementMouseover({point:t,series:e[0],pos:[s(u(t,n)),o(a(t,n))],pointIndex:n,seriesIndex:0,e:d3.event})}).on("mouseout",function(t,n){d3.select(this).classed("hover",!1),S.elementMouseout({point:t,series:e[0],pointIndex:n,seriesIndex:0,e:d3.event})}).on("click",function(e,t){S.elementClick({value:a(e,t),data:e,index:t,pos:[s(u(e,t)),o(a(e,t))],e:d3.event,id:i}),d3.event.stopPropagation()}).on("dblclick",function(e,t){S.elementDblClick({value:a(e,t),data:e,index:t,pos:[s(u(e,t)),o(a(e,t))],e:d3.event,id:i}),d3.event.stopPropagation()});O.attr("class",function(e,t,n){return(f(e,t)>l(e,t)?"nv-tick negative":"nv-tick positive")+" nv-tick-"+n+"-"+t}),d3.transition(O).attr("transform",function(e,t){return"translate("+s(u(e,t))+","+o(c(e,t))+")"}).attr("d",function(t,n){var r=g/e[0].values.length*.9;return"m0,0l0,"+(o(f(t,n))-o(c(t,n)))+"l"+ -r/2+",0l"+r/2+",0l0,"+(o(h(t,n))-o(f(t,n)))+"l0,"+(o(l(t,n))-o(h(t,n)))+"l"+r/2+",0l"+ -r/2+",0z"})}),x}var t={top:0 ,right:0,bottom:0,left:0},n=960,r=500,i=Math.floor(Math.random()*1e4),s=d3.scale.linear(),o=d3.scale.linear(),u=function(e){return e.x},a=function(e){return e.y},f=function(e){return e.open},l=function(e){return e.close},c=function(e){return e.high},h=function(e){return e.low},p=[],d=[],v=!1,m=!0,g=e.utils.defaultColor(),y,b,w,E,S=d3.dispatch("chartClick","elementClick","elementDblClick","elementMouseover","elementMouseout");return x.dispatch=S,x.options=e.utils.optionsFunc.bind(x),x.x=function(e){return arguments.length?(u=e,x):u},x.y=function(e){return arguments.length?(a=e,x):a},x.open=function(e){return arguments.length?(f=e,x):f},x.close=function(e){return arguments.length?(l=e,x):l},x.high=function(e){return arguments.length?(c=e,x):c},x.low=function(e){return arguments.length?(h=e,x):h},x.margin=function(e){return arguments.length?(t.top=typeof e.top!="undefined"?e.top:t.top,t.right=typeof e.right!="undefined"?e.right:t.right,t.bottom=typeof e.bottom!="undefined"?e.bottom:t.bottom,t.left=typeof e.left!="undefined"?e.left:t.left,x):t},x.width=function(e){return arguments.length?(n=e,x):n},x.height=function(e){return arguments.length?(r=e,x):r},x.xScale=function(e){return arguments.length?(s=e,x):s},x.yScale=function(e){return arguments.length?(o=e,x):o},x.xDomain=function(e){return arguments.length?(y=e,x):y},x.yDomain=function(e){return arguments.length?(b=e,x):b},x.xRange=function(e){return arguments.length?(w=e,x):w},x.yRange=function(e){return arguments.length?(E=e,x):E},x.forceX=function(e){return arguments.length?(p=e,x):p},x.forceY=function(e){return arguments.length?(d=e,x):d},x.padData=function(e){return arguments.length?(v=e,x):v},x.clipEdge=function(e){return arguments.length?(m=e,x):m},x.color=function(t){return arguments.length?(g=e.utils.getColor(t),x):g},x.id=function(e){return arguments.length?(i=e,x):i},x},e.models.pie=function(){"use strict";function S(e){return e.each(function(e){function q(e){var t=(e.startAngle+e.endAngle)*90/Math.PI-90;return t>90?t-180:t}function R(e){e.endAngle=isNaN(e.endAngle)?0:e.endAngle,e.startAngle=isNaN(e.startAngle)?0:e.startAngle,m||(e.innerRadius=0);var t=d3.interpolate(this._current,e);return this._current=t(0),function(e){return A(t(e))}}function U(e){e.innerRadius=0;var t=d3.interpolate({startAngle:0,endAngle:0},e);return function(e){return A(t(e))}}var o=n-t.left-t.right,f=r-t.top-t.bottom,S=Math.min(o,f)/2,x=S-S/5,T=d3.select(this),N=T.selectAll(".nv-wrap.nv-pie").data(e),C=N.enter().append("g").attr("class","nvd3 nv-wrap nv-pie nv-chart-"+u),k=C.append("g"),L=N.select("g");k.append("g").attr("class","nv-pie"),k.append("g").attr("class","nv-pieLabels"),N.attr("transform","translate("+t.left+","+t.top+")"),L.select(".nv-pie").attr("transform","translate("+o/2+","+f/2+")"),L.select(".nv-pieLabels").attr("transform","translate("+o/2+","+f/2+")"),T.on("click",function(e,t){E.chartClick({data:e,index:t,pos:d3.event,id:u})});var A=d3.svg.arc().outerRadius(x);y&&A.startAngle(y),b&&A.endAngle(b),m&&A.innerRadius(S*w);var O=d3.layout.pie().sort(null).value(function(e){return e.disabled?0:s(e)}),M=N.select(".nv-pie").selectAll(".nv-slice").data(O),_=N.select(".nv-pieLabels").selectAll(".nv-label").data(O);M.exit().remove(),_.exit().remove();var D=M.enter().append("g").attr("class","nv-slice").on("mouseover",function(e,t){d3.select(this).classed("hover",!0),E.elementMouseover({label:i(e.data),value:s(e.data),point:e.data,pointIndex:t,pos:[d3.event.pageX,d3.event.pageY],id:u})}).on("mouseout",function(e,t){d3.select(this).classed("hover",!1),E.elementMouseout({label:i(e.data),value:s(e.data),point:e.data,index:t,id:u})}).on("click",function(e,t){E.elementClick({label:i(e.data),value:s(e.data),point:e.data,index:t,pos:d3.event,id:u}),d3.event.stopPropagation()}).on("dblclick",function(e,t){E.elementDblClick({label:i(e.data),value:s(e.data),point:e.data,index:t,pos:d3.event,id:u}),d3.event.stopPropagation()});M.attr("fill",function(e,t){return a(e,t)}).attr("stroke",function(e,t){return a(e,t)});var P=D.append("path").each(function(e){this._current=e});M.select("path").transition().attr("d",A).attrTween("d",R);if(l){var H=d3.svg.arc().innerRadius(0);c&&(H=A),h&&(H=d3.svg.arc().outerRadius(A.outerRadius())),_.enter().append("g").classed("nv-label",!0).each(function(e,t){var n=d3.select(this);n.attr("transform",function(e){if(g){e.outerRadius=x+10,e.innerRadius=x+15;var t=(e.startAngle+e.endAngle)/2*(180/Math.PI);return(e.startAngle+e.endAngle)/2v?r[p]:""})}}),S}var t={top:0,right:0,bottom:0,left:0},n=500,r=500,i=function(e){return e.x},s=function(e){return e.y},o=function(e){return e.description},u=Math.floor(Math.random()*1e4),a=e.utils.defaultColor(),f=d3.format(",.2f"),l=!0,c=!0,h=!1,p="key",v=.02,m=!1,g=!1,y=!1,b=!1,w=.5,E=d3.dispatch("chartClick","elementClick","elementDblClick","elementMouseover","elementMouseout");return S.dispatch=E,S.options=e.utils.optionsFunc.bind(S),S.margin=function(e){return arguments.length?(t.top=typeof e.top!="undefined"?e.top:t.top,t.right=typeof e.right!="undefined"?e.right:t.right,t.bottom=typeof e.bottom!="undefined"?e.bottom:t.bottom,t.left=typeof e.left!="undefined"?e.left:t.left,S):t},S.width=function(e){return arguments.length?(n=e,S):n},S.height=function(e){return arguments.length?(r=e,S):r},S.values=function(t){return e.log("pie.values() is no longer supported."),S},S.x=function(e){return arguments.length?(i=e,S):i},S.y=function(e){return arguments.length?(s=d3.functor(e),S):s},S.description=function(e){return arguments.length?(o=e,S):o},S.showLabels=function(e){return arguments.length?(l=e,S):l},S.labelSunbeamLayout=function(e){return arguments.length?(g=e,S):g},S.donutLabelsOutside=function(e){return arguments.length?(h=e,S):h},S.pieLabelsOutside=function(e){return arguments.length?(c=e,S):c},S.labelType=function(e){return arguments.length?(p=e,p=p||"key",S):p},S.donut=function(e){return arguments.length?(m=e,S):m},S.donutRatio=function(e){return arguments.length?(w=e,S):w},S.startAngle=function(e){return arguments.length?(y=e,S):y},S.endAngle=function(e){return arguments.length?(b=e,S):b},S.id=function(e){return arguments.length?(u=e,S):u},S.color=function(t){return arguments.length?(a=e.utils.getColor(t),S):a},S.valueFormat=function(e){return arguments.length?(f=e,S):f},S.labelThreshold=function(e){return arguments.length?(v=e,S):v},S},e.models.pieChart=function(){"use strict";function v(e){return e.each(function(e){var u=d3.select(this),a=this,f=(i||parseInt(u.style("width"))||960)-r.left-r.right,d=(s||parseInt(u.style("height"))||400)-r.top-r.bottom;v.update=function(){u.transition().call(v)},v.container=this,l.disabled=e.map(function(e){return!!e.disabled});if(!c){var m;c={};for(m in l)l[m]instanceof Array?c[m]=l[m].slice(0):c[m]=l[m]}if(!e||!e.length){var g=u.selectAll(".nv-noData").data([h]);return g.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),g.attr("x",r.left+f/2).attr("y",r.top+d/2).text(function(e){return e}),v}u.selectAll(".nv-noData").remove();var y=u.selectAll("g.nv-wrap.nv-pieChart").data([e]),b=y.enter().append("g").attr("class","nvd3 nv-wrap nv-pieChart").append("g"),w=y.select("g");b.append("g").attr("class","nv-pieWrap"),b.append("g").attr("class","nv-legendWrap"),o&&(n.width(f).key(t.x()),y.select(".nv-legendWrap").datum(e).call(n),r.top!=n.height()&&(r.top=n.height(),d=(s||parseInt(u.style("height"))||400)-r.top-r.bottom),y.select(".nv-legendWrap").attr("transform","translate(0,"+ -r.top+")")),y.attr("transform","translate("+r.left+","+r.top+")"),t.width(f).height(d);var E=w.select(".nv-pieWrap").datum([e]);d3.transition(E).call(t),n.dispatch.on("stateChange",function(e){l=e,p.stateChange(l),v.update()}),t.dispatch.on("elementMouseout.tooltip",function(e){p.tooltipHide(e)}),p.on("changeState",function(t){typeof t.disabled!="undefined"&&(e.forEach(function(e,n){e.disabled=t.disabled[n]}),l.disabled=t.disabled),v.update()})}),v}var t=e.models.pie(),n=e.models.legend(),r={top:30,right:20,bottom:20,left:20},i=null,s=null,o=!0,u=e.utils.defaultColor(),a=!0,f=function(e,t,n,r){return"

"+e+"

"+"

"+t+"

"},l={},c=null,h="No Data Available.",p=d3.dispatch("tooltipShow","tooltipHide","stateChange","changeState"),d=function(n,r){var i=t.description()(n.point)||t.x()(n.point),s=n.pos[0]+(r&&r.offsetLeft||0),o=n.pos[1]+(r&&r.offsetTop||0),u=t.valueFormat()(t.y()(n.point)),a=f(i,u,n,v);e.tooltip.show([s,o],a,n.value<0?"n":"s",null,r)};return t.dispatch.on("elementMouseover.tooltip",function(e){e.pos=[e.pos[0]+r.left,e.pos[1]+r.top],p.tooltipShow(e)}),p.on("tooltipShow",function(e){a&&d(e)}),p.on("tooltipHide",function(){a&&e.tooltip.cleanup()}),v.legend=n,v.dispatch=p,v.pie=t,d3.rebind(v,t,"valueFormat","values","x","y","description","id","showLabels","donutLabelsOutside","pieLabelsOutside","labelType","donut","donutRatio","labelThreshold"),v.options=e.utils.optionsFunc.bind(v),v.margin=function(e){return arguments.length?(r.top=typeof e.top!="undefined"?e.top:r.top,r.right=typeof e.right!="undefined"?e.right:r.right,r.bottom=typeof e.bottom!="undefined"?e.bottom:r.bottom,r.left=typeof e.left!="undefined"?e.left:r.left,v):r},v.width=function(e){return arguments.length?(i=e,v):i},v.height=function(e){return arguments.length?(s=e,v):s},v.color=function(r){return arguments.length?(u=e.utils.getColor(r),n.color(u),t.color(u),v):u},v.showLegend=function(e){return arguments.length?(o=e,v):o},v.tooltips=function(e){return arguments.length?(a=e,v):a},v.tooltipContent=function(e){return arguments.length?(f=e,v):f},v.state=function(e){return arguments.length?(l=e,v):l},v.defaultState=function(e){return arguments.length?(c=e,v):c},v.noData=function(e){return arguments.length?(h=e,v):h},v},e.models.scatter=function(){"use strict";function I(q){return q.each(function(I){function Q(){if(!g)return!1;var e,i=d3.merge(I.map(function(e,t){return e.values.map(function(e,n){var r=f(e,n),i=l(e,n);return[o(r)+Math.random()*1e-7,u(i)+Math.random()*1e-7,t,n,e]}).filter(function(e,t){return b(e[4],t)})}));if(D===!0){if(x){var a=X.select("defs").selectAll(".nv-point-clips").data([s]).enter();a.append("clipPath").attr("class","nv-point-clips").attr("id","nv-points-clip-"+s);var c=X.select("#nv-points-clip-"+s).selectAll("circle").data(i);c.enter().append("circle").attr("r",T),c.exit().remove(),c.attr("cx",function(e){return e[0]}).attr("cy",function(e){return e[1]}),X.select(".nv-point-paths").attr("clip-path","url(#nv-points-clip-"+s+")")}i.length&&(i.push([o.range()[0]-20,u.range()[0]-20,null,null]),i.push([o.range()[1]+20,u.range()[1]+20,null,null]),i.push([o.range()[0]-20,u.range()[0]+20,null,null]),i.push([o.range()[1]+20,u.range()[1]-20,null,null]));var h=d3.geom.polygon([[-10,-10],[-10,r+10],[n+10,r+10],[n+10,-10]]),p=d3.geom.voronoi(i).map(function(e,t){return{data:h.clip(e),series:i[t][2],point:i[t][3]}}),d=X.select(".nv-point-paths").selectAll("path").data(p);d.enter().append("path").attr("class",function(e,t){return"nv-path-"+t}),d.exit().remove(),d.attr("d",function(e){return e.data.length===0?"M 0 0":"M"+e.data.join("L")+"Z"});var v=function(e,n){if(F)return 0;var r=I[e.series];if(typeof r=="undefined")return;var i=r.values[e.point];n({point:i,series:r,pos:[o(f(i,e.point))+t.left,u(l(i,e.point))+t.top],seriesIndex:e.series,pointIndex:e.point})};d.on("click",function(e){v(e,_.elementClick)}).on("mouseover",function(e){v(e,_.elementMouseover)}).on("mouseout",function(e,t){v(e,_.elementMouseout)})}else X.select(".nv-groups").selectAll(".nv-group").selectAll(".nv-point").on("click",function(e,n){if(F||!I[e.series])return 0;var r=I[e.series],i=r.values[n];_.elementClick({point:i,series:r,pos:[o(f(i,n))+t.left,u(l(i,n))+t.top],seriesIndex:e.series,pointIndex:n})}).on("mouseover",function(e,n){if(F||!I[e.series])return 0;var r=I[e.series],i=r.values[n];_.elementMouseover({point:i,series:r,pos:[o(f(i,n))+t.left,u(l(i,n))+t.top],seriesIndex:e.series,pointIndex:n})}).on("mouseout",function(e,t){if(F||!I[e.series])return 0;var n=I[e.series],r=n.values[t];_.elementMouseout({point:r,series:n,seriesIndex:e.series,pointIndex:t})});F=!1}var q=n-t.left-t.right,R=r-t.top-t.bottom,U=d3.select(this);I.forEach(function(e,t){e.values.forEach(function(e){e.series=t})});var W=N&&C&&A?[]:d3.merge(I.map(function(e){return e.values.map(function(e,t){return{x:f(e,t),y:l(e,t),size:c(e,t)}})}));o.domain(N||d3.extent(W.map(function(e){return e.x}).concat(d))),w&&I[0]?o.range(k||[(q*E+q)/(2*I[0].values.length),q-q*(1+E)/(2*I[0].values.length)]):o.range(k||[0,q]),u.domain(C||d3.extent(W.map(function(e){return e.y}).concat(v))).range(L||[R,0]),a.domain(A||d3.extent(W.map(function(e){return e.size}).concat(m))).range(O||[16,256]);if(o.domain()[0]===o.domain()[1]||u.domain()[0]===u.domain()[1])M=!0;o.domain()[0]===o.domain()[1]&&(o.domain()[0]?o.domain([o.domain()[0]-o.domain()[0]*.01,o.domain()[1]+o.domain()[1]*.01]):o.domain([-1,1])),u.domain()[0]===u.domain()[1]&&(u.domain()[0]?u.domain([u.domain()[0]-u.domain()[0]*.01,u.domain()[1]+u.domain()[1]*.01]):u.domain([-1,1])),isNaN(o.domain()[0])&&o.domain([-1,1]),isNaN(u.domain()[0])&&u.domain([-1,1]),P=P||o,H=H||u,B=B||a;var X=U.selectAll("g.nv-wrap.nv-scatter").data([I]),V=X.enter().append("g").attr("class","nvd3 nv-wrap nv-scatter nv-chart-"+s+(M?" nv-single-point":"")),$=V.append("defs"),J=V.append("g"),K=X.select("g");J.append("g").attr("class","nv-groups"),J.append("g").attr("class","nv-point-paths"),X.attr("transform","translate("+t.left+","+t.top+")"),$.append("clipPath").attr("id","nv-edge-clip-"+s).append("rect"),X.select("#nv-edge-clip-"+s+" rect").attr("width",q).attr("height",R>0?R:0),K.attr("clip-path",S?"url(#nv-edge-clip-"+s+")":""),F=!0;var G=X.select(".nv-groups").selectAll(".nv-group").data(function(e){return e},function(e){return e.key});G.enter().append("g").style("stroke-opacity",1e-6).style("fill-opacity",1e-6),G.exit().remove(),G.attr("class",function(e,t){return"nv-group nv-series-"+t}).classed("hover",function(e){return e.hover}),G.transition().style("fill",function(e,t){return i(e,t)}).style("stroke",function(e,t){return i(e,t)}).style("stroke-opacity",1).style("fill-opacity",.5);if(p){var Y=G.selectAll("circle.nv-point").data(function(e){return e.values},y);Y.enter().append("circle").style("fill",function(e,t){return e.color}).style("stroke",function(e,t){return e.color}).attr("cx",function(t,n){return e.utils.NaNtoZero(P(f(t,n)))}).attr("cy",function(t,n){return e.utils.NaNtoZero(H(l(t,n)))}).attr("r",function(e,t){return Math.sqrt(a(c(e,t))/Math.PI)}),Y.exit().remove(),G.exit().selectAll("path.nv-point").transition().attr("cx",function(t,n){return e.utils.NaNtoZero(o(f(t,n)))}).attr("cy",function(t,n){return e.utils.NaNtoZero(u(l(t,n)))}).remove(),Y.each(function(e,t){d3.select(this).classed("nv-point",!0).classed("nv-point-"+t,!0).classed("hover",!1)}),Y.transition().attr("cx",function(t,n){return e.utils.NaNtoZero(o(f(t,n)))}).attr("cy",function(t,n){return e.utils.NaNtoZero(u(l(t,n)))}).attr("r",function(e,t){return Math.sqrt(a(c(e,t))/Math.PI)})}else{var Y=G.selectAll("path.nv-point").data(function(e){return e.values});Y.enter().append("path").style("fill",function(e,t){return e.color}).style("stroke",function(e,t){return e.color}).attr("transform",function(e,t){return"translate("+P(f(e,t))+","+H(l(e,t))+")"}).attr("d",d3.svg.symbol().type(h).size(function(e,t){return a(c(e,t))})),Y.exit().remove(),G.exit().selectAll("path.nv-point").transition().attr("transform",function(e,t){return"translate("+o(f(e,t))+","+u(l(e,t))+")"}).remove(),Y.each(function(e,t){d3.select(this).classed("nv-point",!0).classed("nv-point-"+t,!0).classed("hover",!1)}),Y.transition().attr("transform",function(e,t){return"translate("+o(f(e,t))+","+u(l(e,t))+")"}).attr("d",d3.svg.symbol().type(h).size(function(e,t){return a(c(e,t))}))}clearTimeout(j),j=setTimeout(Q,300),P=o.copy(),H=u.copy(),B=a.copy()}),I}var t={top:0,right:0,bottom:0,left:0},n=960,r=500,i=e.utils.defaultColor(),s=Math.floor(Math.random()*1e5),o=d3.scale.linear(),u=d3.scale.linear(),a=d3.scale.linear(),f=function(e){return e.x},l=function(e){return e.y},c=function(e){return e.size||1},h=function(e){return e.shape||"circle"},p=!0,d=[],v=[],m=[],g=!0,y=null,b=function(e){return!e.notActive},w=!1,E=.1,S=!1,x=!0,T=function(){return 25},N=null,C=null,k=null,L=null,A=null,O=null,M=!1,_=d3.dispatch("elementClick","elementMouseover","elementMouseout"),D=!0,P,H,B,j,F=!1;return I.clearHighlights=function(){d3.selectAll(".nv-chart-"+s+" .nv-point.hover").classed("hover",!1)},I.highlightPoint=function(e,t,n){d3.select(".nv-chart-"+s+" .nv-series-"+e+" .nv-point-"+t).classed("hover",n)},_.on("elementMouseover.point",function(e){g&&I.highlightPoint(e.seriesIndex,e.pointIndex,!0)}),_.on("elementMouseout.point",function(e){g&&I.highlightPoint(e.seriesIndex,e.pointIndex,!1)}),I.dispatch=_,I.options=e.utils.optionsFunc.bind(I),I.x=function(e){return arguments.length?(f=d3.functor(e),I):f},I.y=function(e){return arguments.length?(l=d3.functor(e),I):l},I.size=function(e){return arguments.length?(c=d3.functor(e),I):c},I.margin=function(e){return arguments.length?(t.top=typeof e.top!="undefined"?e.top:t.top,t.right=typeof e.right!="undefined"?e.right:t.right,t.bottom=typeof e.bottom!="undefined"?e.bottom:t.bottom,t.left=typeof e.left!="undefined"?e.left:t.left,I):t},I.width=function(e){return arguments.length?(n=e,I):n},I.height=function(e){return arguments.length?(r=e,I):r},I.xScale=function(e){return arguments.length?(o=e,I):o},I.yScale=function(e){return arguments.length?(u=e,I):u},I.zScale=function(e){return arguments.length?(a=e,I):a},I.xDomain=function(e){return arguments.length?(N=e,I):N},I.yDomain=function(e){return arguments.length?(C=e,I):C},I.sizeDomain=function(e){return arguments.length?(A=e,I):A},I.xRange=function(e){return arguments.length?(k=e,I):k},I.yRange=function(e){return arguments.length?(L=e,I):L},I.sizeRange=function(e){return arguments.length?(O=e,I):O},I.forceX=function(e){return arguments.length?(d=e,I):d},I.forceY=function(e){return arguments.length?(v=e,I):v},I.forceSize=function(e){return arguments.length?(m=e,I):m},I.interactive=function(e){return arguments.length?(g=e,I):g},I.pointKey=function(e){return arguments.length?(y=e,I):y},I.pointActive=function(e){return arguments.length?(b=e,I):b},I.padData=function(e){return arguments.length?(w=e,I):w},I.padDataOuter=function(e){return arguments.length?(E=e,I):E},I.clipEdge=function(e){return arguments.length?(S=e,I):S},I.clipVoronoi=function(e){return arguments.length?(x=e,I):x},I.useVoronoi=function(e){return arguments.length?(D=e,D===!1&&(x=!1),I):D},I.clipRadius=function(e){return arguments.length?(T=e,I):T},I.color=function(t){return arguments.length?(i=e.utils.getColor(t),I):i},I.shape=function(e){return arguments.length?(h=e,I):h},I.onlyCircles=function(e){return arguments.length?(p=e,I):p},I.id=function(e){return arguments.length?(s=e,I):s},I.singlePoint=function(e){return arguments.length?(M=e,I):M},I},e.models.scatterChart=function(){"use strict";function F(e){return e.each(function(e){function K(){if(T)return X.select(".nv-point-paths").style("pointer-events","all"),!1;X.select(".nv-point-paths").style("pointer-events","none");var i=d3.mouse(this);h.distortion(x).focus(i[0]),p.distortion(x).focus(i[1]),X.select(".nv-scatterWrap").call(t),b&&X.select(".nv-x.nv-axis").call(n),w&&X.select(".nv-y.nv-axis").call(r),X.select(".nv-distributionX").datum(e.filter(function(e){return!e.disabled})).call(o),X.select(".nv-distributionY").datum(e.filter(function(e){return!e.disabled})).call(u)}var C=d3.select(this),k=this,L=(f||parseInt(C.style("width"))||960)-a.left-a.right,I=(l||parseInt(C.style("height"))||400)-a.top-a.bottom;F.update=function(){C.transition().duration(D).call(F)},F.container=this,A.disabled=e.map(function(e){return!!e.disabled});if(!O){var q;O={};for(q in A)A[q]instanceof Array?O[q]=A[q].slice(0):O[q]=A[q]}if(!e||!e.length||!e.filter(function(e){return e.values.length}).length){var R=C.selectAll(".nv-noData").data([_]);return R.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),R.attr("x",a.left+L/2).attr("y",a.top+I/2).text(function(e){return e}),F}C.selectAll(".nv-noData").remove(),P=P||h,H=H||p;var U=C.selectAll("g.nv-wrap.nv-scatterChart").data([e]),z=U.enter().append("g").attr("class","nvd3 nv-wrap nv-scatterChart nv-chart-"+t.id()),W=z.append("g"),X=U.select("g");W.append("rect").attr("class","nvd3 nv-background"),W.append("g").attr("class","nv-x nv-axis"),W.append("g").attr("class","nv-y nv-axis"),W.append("g").attr("class","nv-scatterWrap"),W.append("g").attr("class","nv-distWrap"),W.append("g").attr("class","nv-legendWrap"),W.append("g").attr("class","nv-controlsWrap");if(y){var V=S?L/2:L;i.width(V),U.select(".nv-legendWrap").datum(e).call(i),a.top!=i.height()&&(a.top=i.height(),I=(l||parseInt(C.style("height"))||400)-a.top-a.bottom),U.select(".nv-legendWrap").attr("transform","translate("+(L-V)+","+ -a.top+")")}S&&(s.width(180).color(["#444"]),X.select(".nv-controlsWrap").datum(j).attr("transform","translate(0,"+ -a.top+")").call(s)),U.attr("transform","translate("+a.left+","+a.top+")"),E&&X.select(".nv-y.nv-axis").attr("transform","translate("+L+",0)"),t.width(L).height(I).color(e.map(function(e,t){return e.color||c(e,t)}).filter(function(t,n){return!e[n].disabled})),d!==0&&t.xDomain(null),v!==0&&t.yDomain(null),U.select(".nv-scatterWrap").datum(e.filter(function(e){return!e.disabled})).call(t);if(d!==0){var $=h.domain()[1]-h.domain()[0];t.xDomain([h.domain()[0]-d*$,h.domain()[1]+d*$])}if(v!==0){var J=p.domain()[1]-p.domain()[0];t.yDomain([p.domain()[0]-v*J,p.domain()[1]+v*J])}(v!==0||d!==0)&&U.select(".nv-scatterWrap").datum(e.filter(function(e){return!e.disabled})).call(t),b&&(n.scale(h).ticks(n.ticks()&&n.ticks().length?n.ticks():L/100).tickSize(-I,0),X.select(".nv-x.nv-axis").attr("transform","translate(0,"+p.range()[0]+")").call(n)),w&&(r.scale(p).ticks(r.ticks()&&r.ticks().length?r.ticks():I/36).tickSize(-L,0),X.select(".nv-y.nv-axis").call(r)),m&&(o.getData(t.x()).scale(h).width(L).color(e.map(function(e,t){return e.color||c(e,t)}).filter(function(t,n){return!e[n].disabled})),W.select(".nv-distWrap").append("g").attr("class","nv-distributionX"),X.select(".nv-distributionX").attr("transform","translate(0,"+p.range()[0]+")").datum(e.filter(function(e){return!e.disabled})).call(o)),g&&(u.getData(t.y()).scale(p).width(I).color(e.map(function(e,t){return e.color||c(e,t)}).filter(function(t,n){return!e[n].disabled})),W.select(".nv-distWrap").append("g").attr("class","nv-distributionY"),X.select(".nv-distributionY").attr("transform","translate("+(E?L:-u.size())+",0)").datum(e.filter(function(e){return!e.disabled})).call(u)),d3.fisheye&&(X.select(".nv-background").attr("width",L).attr("height",I),X.select(".nv-background").on("mousemove",K),X.select(".nv-background").on("click",function(){T=!T}),t.dispatch.on("elementClick.freezeFisheye",function(){T=!T})),s.dispatch.on("legendClick",function(e,i){e.disabled=!e.disabled,x=e.disabled?0:2.5,X.select(".nv-background").style("pointer-events",e.disabled?"none":"all"),X.select(".nv-point-paths").style("pointer-events",e.disabled?"all":"none"),e.disabled?(h.distortion(x).focus(0),p.distortion(x).focus(0),X.select(".nv-scatterWrap").call(t),X.select(".nv-x.nv-axis").call(n),X.select(".nv-y.nv-axis").call(r)):T=!1,F.update()}),i.dispatch.on("stateChange",function(e){A.disabled=e.disabled,M.stateChange(A),F.update()}),t.dispatch.on("elementMouseover.tooltip",function(e){d3.select(".nv-chart-"+t.id()+" .nv-series-"+e.seriesIndex+" .nv-distx-"+e.pointIndex).attr("y1",function(t,n){return e.pos[1]-I}),d3.select(".nv-chart-"+t.id()+" .nv-series-"+e.seriesIndex+" .nv-disty-"+e.pointIndex).attr("x2",e.pos[0]+o.size()),e.pos=[e.pos[0]+a.left,e.pos[1]+a.top],M.tooltipShow(e)}),M.on("tooltipShow",function(e){N&&B(e,k.parentNode)}),M.on("changeState",function(t){typeof t.disabled!="undefined"&&(e.forEach(function(e,n){e.disabled=t.disabled[n]}),A.disabled=t.disabled),F.update()}),P=h.copy(),H=p.copy()}),F}var t=e.models.scatter(),n=e.models.axis(),r=e.models.axis(),i=e.models.legend(),s=e.models.legend(),o=e.models.distribution(),u=e.models.distribution(),a={top:30,right:20,bottom:50,left:75},f=null,l=null,c=e.utils.defaultColor(),h=d3.fisheye?d3.fisheye.scale(d3.scale.linear).distortion(0):t.xScale(),p=d3.fisheye?d3.fisheye.scale(d3.scale.linear).distortion(0):t.yScale(),d=0,v=0,m=!1,g=!1,y=!0,b=!0,w=!0,E=!1,S=!!d3.fisheye,x=0,T=!1,N=!0,C=function(e,t,n){return""+t+""},k=function(e,t,n){return""+n+""},L=null,A={},O=null,M=d3.dispatch("tooltipShow","tooltipHide","stateChange","changeState"),_="No Data Available.",D=250;t.xScale(h).yScale(p),n.orient("bottom").tickPadding(10),r.orient(E?"right":"left").tickPadding(10),o.axis("x"),u.axis("y"),s.updateState(!1);var P,H,B=function(i,s){var o=i.pos[0]+(s.offsetLeft||0),u=i.pos[1]+(s.offsetTop||0),f=i.pos[0]+(s.offsetLeft||0),l=p.range()[0]+a.top+(s.offsetTop||0),c=h.range()[0]+a.left+(s.offsetLeft||0),d=i.pos[1]+(s.offsetTop||0),v=n.tickFormat()(t.x()(i.point,i.pointIndex)),m=r.tickFormat()(t.y()(i.point,i.pointIndex));C!=null&&e.tooltip.show([f,l],C(i.series.key,v,m,i,F),"n",1,s,"x-nvtooltip"),k!=null&&e.tooltip.show([c,d],k(i.series.key,v,m,i,F),"e",1,s,"y-nvtooltip"),L!=null&&e.tooltip.show([o,u],L(i.series.key,v,m,i,F),i.value<0?"n":"s",null,s)},j=[{key:"Magnify",disabled:!0}];return t.dispatch.on("elementMouseout.tooltip",function(e){M.tooltipHide(e),d3.select(".nv-chart-"+t.id()+" .nv-series-"+e.seriesIndex+" .nv-distx-"+e.pointIndex).attr("y1",0),d3.select(".nv-chart-"+t.id()+" .nv-series-"+e.seriesIndex+" .nv-disty-"+e.pointIndex).attr("x2",u.size())}),M.on("tooltipHide",function(){N&&e.tooltip.cleanup()}),F.dispatch=M,F.scatter=t,F.legend=i,F.controls=s,F.xAxis=n,F.yAxis=r,F.distX=o,F.distY=u,d3.rebind(F,t,"id","interactive","pointActive","x","y","shape","size","xScale","yScale","zScale","xDomain","yDomain","xRange","yRange","sizeDomain","sizeRange","forceX","forceY","forceSize","clipVoronoi","clipRadius","useVoronoi"),F.options=e.utils.optionsFunc.bind(F),F.margin=function(e){return arguments.length?(a.top=typeof e.top!="undefined"?e.top:a.top,a.right=typeof e.right!="undefined"?e.right:a.right,a.bottom=typeof e.bottom!="undefined"?e.bottom:a.bottom,a.left=typeof e.left!="undefined"?e.left:a.left,F):a},F.width=function(e){return arguments.length?(f=e,F):f},F.height=function(e){return arguments.length?(l=e,F):l},F.color=function(t){return arguments.length?(c=e.utils.getColor(t),i.color(c),o.color(c),u.color(c),F):c},F.showDistX=function(e){return arguments.length?(m=e,F):m},F.showDistY=function(e){return arguments.length?(g=e,F):g},F.showControls=function(e){return arguments.length?(S=e,F):S},F.showLegend=function(e){return arguments.length?(y=e,F):y},F.showXAxis=function(e){return arguments.length?(b=e,F):b},F.showYAxis=function(e){return arguments.length?(w=e,F):w},F.rightAlignYAxis=function(e){return arguments.length?(E=e,r.orient(e?"right":"left"),F):E},F.fisheye=function(e){return arguments.length?(x=e,F):x},F.xPadding=function(e){return arguments.length?(d=e,F):d},F.yPadding=function(e){return arguments.length?(v=e,F):v},F.tooltips=function(e){return arguments.length?(N=e,F):N},F.tooltipContent=function(e){return arguments.length?(L=e,F):L},F.tooltipXContent=function(e){return arguments.length?(C=e,F):C},F.tooltipYContent=function(e){return arguments.length?(k=e,F):k},F.state=function(e){return arguments.length?(A=e,F):A},F.defaultState=function(e){return arguments.length?(O=e,F):O},F.noData=function(e){return arguments.length?(_=e,F):_},F.transitionDuration=function(e){return arguments.length?(D=e,F):D},F},e.models.scatterPlusLineChart=function(){"use strict";function B(e){return e.each(function(e){function $(){if(S)return z.select(".nv-point-paths").style("pointer-events","all"),!1;z.select(".nv-point-paths").style("pointer-events","none");var i=d3.mouse(this);h.distortion(E).focus(i[0]),p.distortion(E).focus(i[1]),z.select(".nv-scatterWrap").datum(e.filter(function(e){return!e.disabled})).call(t),g&&z.select(".nv-x.nv-axis").call(n),y&&z.select(".nv-y.nv-axis").call(r),z.select(".nv-distributionX").datum(e.filter(function(e){return!e.disabled})).call(o),z.select(".nv-distributionY").datum(e.filter(function(e){return!e.disabled})).call(u)}var T=d3.select(this),N=this,C=(f||parseInt(T.style("width"))||960)-a.left-a.right,j=(l||parseInt(T.style("height"))||400)-a.top-a.bottom;B.update=function(){T.transition().duration(M).call(B)},B.container=this,k.disabled=e.map(function(e){return!!e.disabled});if(!L){var F;L={};for(F in k)k[F]instanceof Array?L[F]=k[F].slice(0):L[F]=k[F]}if(!e||!e.length||!e.filter(function(e){return e.values.length}).length){var I=T.selectAll(".nv-noData").data([O]);return I.enter().append("text").attr("class","nvd3 nv-noData").attr("dy","-.7em").style("text-anchor","middle"),I.attr("x",a.left+C/2).attr("y",a.top+j/2).text(function(e){return e}),B}T.selectAll(".nv-noData").remove(),h=t.xScale(),p=t.yScale(),_=_||h,D=D||p;var q=T.selectAll("g.nv-wrap.nv-scatterChart").data([e]),R=q.enter().append("g").attr("class","nvd3 nv-wrap nv-scatterChart nv-chart-"+t.id()),U=R.append("g"),z=q.select("g");U.append("rect").attr("class","nvd3 nv-background").style("pointer-events","none"),U.append("g").attr("class","nv-x nv-axis"),U.append("g").attr("class","nv-y nv-axis"),U.append("g").attr("class","nv-scatterWrap"),U.append("g").attr("class","nv-regressionLinesWrap"),U.append("g").attr("class","nv-distWrap"),U.append("g").attr("class","nv-legendWrap"),U.append("g").attr("class","nv-controlsWrap"),q.attr("transform","translate("+a.left+","+a.top+")"),b&&z.select(".nv-y.nv-axis").attr("transform","translate("+C+",0)"),m&&(i.width(C/2),q.select(".nv-legendWrap").datum(e).call(i),a.top!=i.height()&&(a.top=i.height(),j=(l||parseInt(T.style("height"))||400)-a.top-a.bottom),q.select(".nv-legendWrap").attr("transform","translate("+C/2+","+ -a.top+")")),w&&(s.width(180).color(["#444"]),z.select(".nv-controlsWrap").datum(H).attr("transform","translate(0,"+ -a.top+")").call(s)),t.width(C).height(j).color(e.map(function(e,t){return e.color||c(e,t)}).filter(function(t,n){return!e[n].disabled})),q.select(".nv-scatterWrap").datum(e.filter(function(e){return!e.disabled})).call(t),q.select(".nv-regressionLinesWrap").attr("clip-path","url(#nv-edge-clip-"+t.id()+")");var W=q.select(".nv-regressionLinesWrap").selectAll(".nv-regLines").data(function(e){return e});W.enter().append("g").attr("class","nv-regLines");var X=W.selectAll(".nv-regLine").data(function(e){return[e]}),V=X.enter().append("line").attr("class","nv-regLine").style("stroke-opacity",0);X.transition().attr("x1",h.range()[0]).attr("x2",h.range()[1]).attr("y1",function(e,t){return p(h.domain()[0]*e.slope+e.intercept)}).attr("y2",function(e,t){return p(h.domain()[1]*e.slope+e.intercept)}).style("stroke",function(e,t,n){return c(e,n)}).style("stroke-opacity",function(e,t){return e.disabled||typeof e.slope=="undefined"||typeof e.intercept=="undefined"?0:1}),g&&(n.scale(h).ticks(n.ticks()?n.ticks():C/100).tickSize(-j,0),z.select(".nv-x.nv-axis").attr("transform","translate(0,"+p.range()[0]+")").call(n)),y&&(r.scale(p).ticks(r.ticks()?r.ticks():j/36).tickSize(-C,0),z.select(".nv-y.nv-axis").call(r)),d&&(o.getData(t.x()).scale(h).width(C).color(e.map(function(e,t){return e.color||c(e,t)}).filter(function(t,n){return!e[n].disabled})),U.select(".nv-distWrap").append("g").attr("class","nv-distributionX"),z.select(".nv-distributionX").attr("transform","translate(0,"+p.range()[0]+")").datum(e.filter(function(e){return!e.disabled})).call(o)),v&&(u.getData(t.y()).scale(p).width( j).color(e.map(function(e,t){return e.color||c(e,t)}).filter(function(t,n){return!e[n].disabled})),U.select(".nv-distWrap").append("g").attr("class","nv-distributionY"),z.select(".nv-distributionY").attr("transform","translate("+(b?C:-u.size())+",0)").datum(e.filter(function(e){return!e.disabled})).call(u)),d3.fisheye&&(z.select(".nv-background").attr("width",C).attr("height",j),z.select(".nv-background").on("mousemove",$),z.select(".nv-background").on("click",function(){S=!S}),t.dispatch.on("elementClick.freezeFisheye",function(){S=!S})),s.dispatch.on("legendClick",function(e,i){e.disabled=!e.disabled,E=e.disabled?0:2.5,z.select(".nv-background").style("pointer-events",e.disabled?"none":"all"),z.select(".nv-point-paths").style("pointer-events",e.disabled?"all":"none"),e.disabled?(h.distortion(E).focus(0),p.distortion(E).focus(0),z.select(".nv-scatterWrap").call(t),z.select(".nv-x.nv-axis").call(n),z.select(".nv-y.nv-axis").call(r)):S=!1,B.update()}),i.dispatch.on("stateChange",function(e){k=e,A.stateChange(k),B.update()}),t.dispatch.on("elementMouseover.tooltip",function(e){d3.select(".nv-chart-"+t.id()+" .nv-series-"+e.seriesIndex+" .nv-distx-"+e.pointIndex).attr("y1",e.pos[1]-j),d3.select(".nv-chart-"+t.id()+" .nv-series-"+e.seriesIndex+" .nv-disty-"+e.pointIndex).attr("x2",e.pos[0]+o.size()),e.pos=[e.pos[0]+a.left,e.pos[1]+a.top],A.tooltipShow(e)}),A.on("tooltipShow",function(e){x&&P(e,N.parentNode)}),A.on("changeState",function(t){typeof t.disabled!="undefined"&&(e.forEach(function(e,n){e.disabled=t.disabled[n]}),k.disabled=t.disabled),B.update()}),_=h.copy(),D=p.copy()}),B}var t=e.models.scatter(),n=e.models.axis(),r=e.models.axis(),i=e.models.legend(),s=e.models.legend(),o=e.models.distribution(),u=e.models.distribution(),a={top:30,right:20,bottom:50,left:75},f=null,l=null,c=e.utils.defaultColor(),h=d3.fisheye?d3.fisheye.scale(d3.scale.linear).distortion(0):t.xScale(),p=d3.fisheye?d3.fisheye.scale(d3.scale.linear).distortion(0):t.yScale(),d=!1,v=!1,m=!0,g=!0,y=!0,b=!1,w=!!d3.fisheye,E=0,S=!1,x=!0,T=function(e,t,n){return""+t+""},N=function(e,t,n){return""+n+""},C=function(e,t,n,r){return"

"+e+"

"+"

"+r+"

"},k={},L=null,A=d3.dispatch("tooltipShow","tooltipHide","stateChange","changeState"),O="No Data Available.",M=250;t.xScale(h).yScale(p),n.orient("bottom").tickPadding(10),r.orient(b?"right":"left").tickPadding(10),o.axis("x"),u.axis("y"),s.updateState(!1);var _,D,P=function(i,s){var o=i.pos[0]+(s.offsetLeft||0),u=i.pos[1]+(s.offsetTop||0),f=i.pos[0]+(s.offsetLeft||0),l=p.range()[0]+a.top+(s.offsetTop||0),c=h.range()[0]+a.left+(s.offsetLeft||0),d=i.pos[1]+(s.offsetTop||0),v=n.tickFormat()(t.x()(i.point,i.pointIndex)),m=r.tickFormat()(t.y()(i.point,i.pointIndex));T!=null&&e.tooltip.show([f,l],T(i.series.key,v,m,i,B),"n",1,s,"x-nvtooltip"),N!=null&&e.tooltip.show([c,d],N(i.series.key,v,m,i,B),"e",1,s,"y-nvtooltip"),C!=null&&e.tooltip.show([o,u],C(i.series.key,v,m,i.point.tooltip,i,B),i.value<0?"n":"s",null,s)},H=[{key:"Magnify",disabled:!0}];return t.dispatch.on("elementMouseout.tooltip",function(e){A.tooltipHide(e),d3.select(".nv-chart-"+t.id()+" .nv-series-"+e.seriesIndex+" .nv-distx-"+e.pointIndex).attr("y1",0),d3.select(".nv-chart-"+t.id()+" .nv-series-"+e.seriesIndex+" .nv-disty-"+e.pointIndex).attr("x2",u.size())}),A.on("tooltipHide",function(){x&&e.tooltip.cleanup()}),B.dispatch=A,B.scatter=t,B.legend=i,B.controls=s,B.xAxis=n,B.yAxis=r,B.distX=o,B.distY=u,d3.rebind(B,t,"id","interactive","pointActive","x","y","shape","size","xScale","yScale","zScale","xDomain","yDomain","xRange","yRange","sizeDomain","sizeRange","forceX","forceY","forceSize","clipVoronoi","clipRadius","useVoronoi"),B.options=e.utils.optionsFunc.bind(B),B.margin=function(e){return arguments.length?(a.top=typeof e.top!="undefined"?e.top:a.top,a.right=typeof e.right!="undefined"?e.right:a.right,a.bottom=typeof e.bottom!="undefined"?e.bottom:a.bottom,a.left=typeof e.left!="undefined"?e.left:a.left,B):a},B.width=function(e){return arguments.length?(f=e,B):f},B.height=function(e){return arguments.length?(l=e,B):l},B.color=function(t){return arguments.length?(c=e.utils.getColor(t),i.color(c),o.color(c),u.color(c),B):c},B.showDistX=function(e){return arguments.length?(d=e,B):d},B.showDistY=function(e){return arguments.length?(v=e,B):v},B.showControls=function(e){return arguments.length?(w=e,B):w},B.showLegend=function(e){return arguments.length?(m=e,B):m},B.showXAxis=function(e){return arguments.length?(g=e,B):g},B.showYAxis=function(e){return arguments.length?(y=e,B):y},B.rightAlignYAxis=function(e){return arguments.length?(b=e,r.orient(e?"right":"left"),B):b},B.fisheye=function(e){return arguments.length?(E=e,B):E},B.tooltips=function(e){return arguments.length?(x=e,B):x},B.tooltipContent=function(e){return arguments.length?(C=e,B):C},B.tooltipXContent=function(e){return arguments.length?(T=e,B):T},B.tooltipYContent=function(e){return arguments.length?(N=e,B):N},B.state=function(e){return arguments.length?(k=e,B):k},B.defaultState=function(e){return arguments.length?(L=e,B):L},B.noData=function(e){return arguments.length?(O=e,B):O},B.transitionDuration=function(e){return arguments.length?(M=e,B):M},B},e.models.sparkline=function(){"use strict";function d(e){return e.each(function(e){var i=n-t.left-t.right,d=r-t.top-t.bottom,v=d3.select(this);s.domain(l||d3.extent(e,u)).range(h||[0,i]),o.domain(c||d3.extent(e,a)).range(p||[d,0]);var m=v.selectAll("g.nv-wrap.nv-sparkline").data([e]),g=m.enter().append("g").attr("class","nvd3 nv-wrap nv-sparkline"),b=g.append("g"),w=m.select("g");m.attr("transform","translate("+t.left+","+t.top+")");var E=m.selectAll("path").data(function(e){return[e]});E.enter().append("path"),E.exit().remove(),E.style("stroke",function(e,t){return e.color||f(e,t)}).attr("d",d3.svg.line().x(function(e,t){return s(u(e,t))}).y(function(e,t){return o(a(e,t))}));var S=m.selectAll("circle.nv-point").data(function(e){function n(t){if(t!=-1){var n=e[t];return n.pointIndex=t,n}return null}var t=e.map(function(e,t){return a(e,t)}),r=n(t.lastIndexOf(o.domain()[1])),i=n(t.indexOf(o.domain()[0])),s=n(t.length-1);return[i,r,s].filter(function(e){return e!=null})});S.enter().append("circle"),S.exit().remove(),S.attr("cx",function(e,t){return s(u(e,e.pointIndex))}).attr("cy",function(e,t){return o(a(e,e.pointIndex))}).attr("r",2).attr("class",function(e,t){return u(e,e.pointIndex)==s.domain()[1]?"nv-point nv-currentValue":a(e,e.pointIndex)==o.domain()[0]?"nv-point nv-minValue":"nv-point nv-maxValue"})}),d}var t={top:2,right:0,bottom:2,left:0},n=400,r=32,i=!0,s=d3.scale.linear(),o=d3.scale.linear(),u=function(e){return e.x},a=function(e){return e.y},f=e.utils.getColor(["#000"]),l,c,h,p;return d.options=e.utils.optionsFunc.bind(d),d.margin=function(e){return arguments.length?(t.top=typeof e.top!="undefined"?e.top:t.top,t.right=typeof e.right!="undefined"?e.right:t.right,t.bottom=typeof e.bottom!="undefined"?e.bottom:t.bottom,t.left=typeof e.left!="undefined"?e.left:t.left,d):t},d.width=function(e){return arguments.length?(n=e,d):n},d.height=function(e){return arguments.length?(r=e,d):r},d.x=function(e){return arguments.length?(u=d3.functor(e),d):u},d.y=function(e){return arguments.length?(a=d3.functor(e),d):a},d.xScale=function(e){return arguments.length?(s=e,d):s},d.yScale=function(e){return arguments.length?(o=e,d):o},d.xDomain=function(e){return arguments.length?(l=e,d):l},d.yDomain=function(e){return arguments.length?(c=e,d):c},d.xRange=function(e){return arguments.length?(h=e,d):h},d.yRange=function(e){return arguments.length?(p=e,d):p},d.animate=function(e){return arguments.length?(i=e,d):i},d.color=function(t){return arguments.length?(f=e.utils.getColor(t),d):f},d},e.models.sparklinePlus=function(){"use strict";function v(e){return e.each(function(c){function O(){if(a)return;var e=C.selectAll(".nv-hoverValue").data(u),r=e.enter().append("g").attr("class","nv-hoverValue").style("stroke-opacity",0).style("fill-opacity",0);e.exit().transition().duration(250).style("stroke-opacity",0).style("fill-opacity",0).remove(),e.attr("transform",function(e){return"translate("+s(t.x()(c[e],e))+",0)"}).transition().duration(250).style("stroke-opacity",1).style("fill-opacity",1);if(!u.length)return;r.append("line").attr("x1",0).attr("y1",-n.top).attr("x2",0).attr("y2",b),r.append("text").attr("class","nv-xValue").attr("x",-6).attr("y",-n.top).attr("text-anchor","end").attr("dy",".9em"),C.select(".nv-hoverValue .nv-xValue").text(f(t.x()(c[u[0]],u[0]))),r.append("text").attr("class","nv-yValue").attr("x",6).attr("y",-n.top).attr("text-anchor","start").attr("dy",".9em"),C.select(".nv-hoverValue .nv-yValue").text(l(t.y()(c[u[0]],u[0])))}function M(){function r(e,n){var r=Math.abs(t.x()(e[0],0)-n),i=0;for(var s=0;s2){var h=M.yScale().invert(i.mouseY),p=Infinity,d=null;c.forEach(function(e,t){h=Math.abs(h);var n=Math.abs(e.stackedValue.y0),r=Math.abs(e.stackedValue.y);if(h>=n&&h<=r+n){d=t;return}}),d!=null&&(c[d].highlight=!0)}var v=n.tickFormat()(M.x()(s,a)),m=t.style()=="expand"?function(e,t){return d3.format(".1%")(e)}:function(e,t){return r.tickFormat()(e)};o.tooltip.position({left:f+u.left,top:i.mouseY+u.top}).chartContainer(D.parentNode).enabled(g).valueFormatter(m).data({value:v,series:c})(),o.renderGuideLine(f)}),o.dispatch.on("elementMouseout",function(e){N.tooltipHide(),t.clearHighlights()}),N.on("tooltipShow",function(e){g&&O(e,D.parentNode)}),N.on("changeState",function(e){typeof e.disabled!="undefined"&&y.length===e.disabled.length&&(y.forEach(function(t,n){t.disabled=e.disabled[n]}),S.disabled=e.disabled),typeof e.style!="undefined"&&t.style(e.style),M.update()})}),M}var t=e.models.stackedArea(),n=e.models.axis(),r=e.models.axis(),i=e.models.legend(),s=e.models.legend(),o=e.interactiveGuideline(),u={top:30,right:25,bottom:50,left:60},a=null,f=null,l=e.utils.defaultColor(),c=!0,h=!0,p=!0,d=!0,v=!1,m=!1,g=!0,y=function(e,t,n,r,i){return"

"+e+"

"+"

"+n+" on "+t+"

"},b,w,E=d3.format(",.2f"),S={style:t.style()},x=null,T="No Data Available.",N=d3.dispatch("tooltipShow","tooltipHide","stateChange","changeState"),C=250,k=["Stacked","Stream","Expanded"],L={},A=250;n.orient("bottom").tickPadding(7),r.orient(v?"right":"left"),s.updateState(!1);var O=function(i,s){var o=i.pos[0]+(s.offsetLeft||0),u=i.pos[1]+(s.offsetTop||0),a=n.tickFormat()(t.x()(i.point,i.pointIndex)),f=r.tickFormat()(t.y()(i.point,i.pointIndex)),l=y(i.series.key,a,f,i,M);e.tooltip.show([o,u],l,i.value<0?"n":"s",null,s)};return t.dispatch.on("tooltipShow",function(e){e.pos=[e.pos[0]+u.left,e.pos[1]+u.top],N.tooltipShow(e)}),t.dispatch.on("tooltipHide",function(e){N.tooltipHide(e)}),N.on("tooltipHide",function(){g&&e.tooltip.cleanup()}),M.dispatch=N,M.stacked=t,M.legend=i,M.controls=s,M.xAxis=n,M.yAxis=r,M.interactiveLayer=o,d3.rebind(M,t,"x","y","size","xScale","yScale","xDomain","yDomain","xRange","yRange","sizeDomain","interactive","useVoronoi","offset","order","style","clipEdge","forceX","forceY","forceSize","interpolate"),M.options=e.utils.optionsFunc.bind(M),M.margin=function(e){return arguments.length?(u.top=typeof e.top!="undefined"?e.top:u.top,u.right=typeof e.right!="undefined"?e.right:u.right,u.bottom=typeof e.bottom!="undefined"?e.bottom:u.bottom,u.left=typeof e.left!="undefined"?e.left:u.left,M):u},M.width=function(e){return arguments.length?(a=e,M):a},M.height=function(e){return arguments.length?(f=e,M):f},M.color=function(n){return arguments.length?(l=e.utils.getColor(n),i.color(l),t.color(l),M):l},M.showControls=function(e){return arguments.length?(c=e,M):c},M.showLegend=function(e){return arguments.length?(h=e,M):h},M.showXAxis=function(e){return arguments.length?(p=e,M):p},M.showYAxis=function(e){return arguments.length?(d=e,M):d},M.rightAlignYAxis=function(e){return arguments.length?(v=e,r.orient(e?"right":"left"),M):v},M.useInteractiveGuideline=function(e){return arguments.length?(m=e,e===!0&&(M.interactive(!1),M.useVoronoi(!1)),M):m},M.tooltip=function(e){return arguments.length?(y=e,M):y},M.tooltips=function(e){return arguments.length?(g=e,M):g},M.tooltipContent=function(e){return arguments.length?(y=e,M):y},M.state=function(e){return arguments.length?(S=e,M):S},M.defaultState=function(e){return arguments.length?(x=e,M):x},M.noData=function(e){return arguments.length?(T=e,M):T},M.transitionDuration=function(e){return arguments.length?(A=e,M):A},M.controlsData=function(e){return arguments.length?(k=e,M):k},M.controlLabels=function(e){return arguments.length?typeof e!="object"?L:(L=e,M):L},r.setTickFormat=r.tickFormat,r.tickFormat=function(e){return arguments.length?(E=e,r):E},M}})();rally-0.9.1/rally/ui/templates/libs/README.rst0000664000567000056710000000030213073417716022167 0ustar jenkinsjenkins00000000000000=============================== Third-party files for templates =============================== This directory includes third-party files (JavaScript, CSS) that can be embedded into templates. rally-0.9.1/rally/ui/templates/libs/angular.1.3.3.min.js0000664000567000056710000036250513073417716023732 0ustar jenkinsjenkins00000000000000/* AngularJS v1.3.3 (c) 2010-2014 Google, Inc. http://angularjs.org License: MIT */ (function(T,U,t){'use strict';function v(b){return function(){var a=arguments[0],c;c="["+(b?b+":":"")+a+"] http://errors.angularjs.org/1.3.3/"+(b?b+"/":"")+a;for(a=1;a").append(b).html();try{return b[0].nodeType===mb?Q(c):c.match(/^(<[^>]+>)/)[1].replace(/^<([\w\-]+)/,function(a,b){return"<"+Q(b)})}catch(d){return Q(c)}}function qc(b){try{return decodeURIComponent(b)}catch(a){}} function rc(b){var a={},c,d;r((b||"").split("&"),function(b){b&&(c=b.replace(/\+/g,"%20").split("="),d=qc(c[0]),A(d)&&(b=A(c[1])?qc(c[1]):!0,Jb.call(a,d)?G(a[d])?a[d].push(b):a[d]=[a[d],b]:a[d]=b))});return a}function Kb(b){var a=[];r(b,function(b,d){G(b)?r(b,function(b){a.push(Da(d,!0)+(!0===b?"":"="+Da(b,!0)))}):a.push(Da(d,!0)+(!0===b?"":"="+Da(b,!0)))});return a.length?a.join("&"):""}function nb(b){return Da(b,!0).replace(/%26/gi,"&").replace(/%3D/gi,"=").replace(/%2B/gi,"+")}function Da(b,a){return encodeURIComponent(b).replace(/%40/gi, "@").replace(/%3A/gi,":").replace(/%24/g,"$").replace(/%2C/gi,",").replace(/%3B/gi,";").replace(/%20/g,a?"%20":"+")}function Gd(b,a){var c,d,e=ob.length;b=y(b);for(d=0;d/,">"));}a=a||[];a.unshift(["$provide",function(a){a.value("$rootElement",b)}]);c.debugInfoEnabled&&a.push(["$compileProvider",function(a){a.debugInfoEnabled(!0)}]);a.unshift("ng");d=Lb(a,c.strictDi);d.invoke(["$rootScope","$rootElement","$compile","$injector",function(a,b,c,d){a.$apply(function(){b.data("$injector", d);c(b)(a)})}]);return d},e=/^NG_ENABLE_DEBUG_INFO!/,f=/^NG_DEFER_BOOTSTRAP!/;T&&e.test(T.name)&&(c.debugInfoEnabled=!0,T.name=T.name.replace(e,""));if(T&&!f.test(T.name))return d();T.name=T.name.replace(f,"");va.resumeBootstrap=function(b){r(b,function(b){a.push(b)});d()}}function Id(){T.name="NG_ENABLE_DEBUG_INFO!"+T.name;T.location.reload()}function Jd(b){return va.element(b).injector().get("$$testability")}function Mb(b,a){a=a||"_";return b.replace(Kd,function(b,d){return(d?a:"")+b.toLowerCase()})} function Ld(){var b;tc||((oa=T.jQuery)&&oa.fn.on?(y=oa,H(oa.fn,{scope:Ka.scope,isolateScope:Ka.isolateScope,controller:Ka.controller,injector:Ka.injector,inheritedData:Ka.inheritedData}),b=oa.cleanData,oa.cleanData=function(a){var c;if(Nb)Nb=!1;else for(var d=0,e;null!=(e=a[d]);d++)(c=oa._data(e,"events"))&&c.$destroy&&oa(e).triggerHandler("$destroy");b(a)}):y=R,va.element=y,tc=!0)}function Ob(b,a,c){if(!b)throw Wa("areq",a||"?",c||"required");return b}function pb(b,a,c){c&&G(b)&&(b=b[b.length-1]); Ob(u(b),a,"not a function, got "+(b&&"object"===typeof b?b.constructor.name||"Object":typeof b));return b}function La(b,a){if("hasOwnProperty"===b)throw Wa("badname",a);}function uc(b,a,c){if(!a)return b;a=a.split(".");for(var d,e=b,f=a.length,g=0;g")+d[2];for(d=d[0];d--;)c=c.lastChild;f=Xa(f,c.childNodes);c=e.firstChild;c.textContent=""}else f.push(a.createTextNode(b));e.textContent="";e.innerHTML="";r(f,function(a){e.appendChild(a)});return e}function R(b){if(b instanceof R)return b;var a; I(b)&&(b=P(b),a=!0);if(!(this instanceof R)){if(a&&"<"!=b.charAt(0))throw Qb("nosel");return new R(b)}if(a){a=U;var c;b=(c=df.exec(b))?[a.createElement(c[1])]:(c=Ec(b,a))?c.childNodes:[]}Fc(this,b)}function Rb(b){return b.cloneNode(!0)}function tb(b,a){a||ub(b);if(b.querySelectorAll)for(var c=b.querySelectorAll("*"),d=0,e=c.length;d 4096 bytes)!"));else{if(p.cookie!==A)for(A=p.cookie,d=A.split("; "),da={},f=0;fl&&this.remove(q.key),b},get:function(a){if(l").parent()[0])});var f=ca(a,b,a,c,d,e);C.$$addScopeClass(a);var g=null;return function(b, c,d){Ob(b,"scope");d=d||{};var e=d.parentBoundTranscludeFn,h=d.transcludeControllers;d=d.futureParentElement;e&&e.$$boundTransclude&&(e=e.$$boundTransclude);g||(g=(d=d&&d[0])?"foreignobject"!==sa(d)&&d.toString().match(/SVG/)?"svg":"html":"html");d="html"!==g?y(T(g,y("
").append(a).html())):c?Ka.clone.call(a):a;if(h)for(var k in h)d.data("$"+k+"Controller",h[k].instance);C.$$addScopeInfo(d,b);c&&c(d,b);f&&f(b,d,d,e);return d}}function ca(a,b,c,d,e,f){function g(a,c,d,e){var f,k,l,q,s,p,B;if(n)for(B= Array(c.length),q=0;qK.priority)break;if(v=K.scope)K.templateUrl||(L(v)?(ya("new/isolated scope",M||N,K,Y),M=K):ya("new/isolated scope",M,K,Y)),N=N||K;ga=K.name;!K.templateUrl&&K.controller&&(v=K.controller,z=z||{},ya("'"+ga+"' controller",z[ga],K,Y),z[ga]=K);if(v=K.transclude)w=!0,K.$$tlb||(ya("transclusion",fa,K,Y),fa=K),"element"==v?(H=!0,x=K.priority,v=Y,Y=e.$$element=y(U.createComment(" "+ga+ ": "+e[ga]+" ")),d=Y[0],Ab(g,Ya.call(v,0),d),Ga=C(v,f,x,k&&k.name,{nonTlbTranscludeDirective:fa})):(v=y(Rb(d)).contents(),Y.empty(),Ga=C(v,f));if(K.template)if(Na=!0,ya("template",da,K,Y),da=K,v=u(K.template)?K.template(Y,e):K.template,v=Qc(v),K.replace){k=K;v=Pb.test(v)?Rc(T(K.templateNamespace,P(v))):[];d=v[0];if(1!=v.length||d.nodeType!==la)throw ia("tplrt",ga,"");Ab(g,Y,d);za={$attr:{}};v=V(d,[],za);var of=a.splice(R+1,a.length-(R+1));M&&D(v);a=a.concat(v).concat(of);Pc(e,za);za=a.length}else Y.html(v); if(K.templateUrl)Na=!0,ya("template",da,K,Y),da=K,K.replace&&(k=K),F=nf(a.splice(R,a.length-R),Y,e,g,w&&Ga,l,s,{controllerDirectives:z,newIsolateScopeDirective:M,templateDirective:da,nonTlbTranscludeDirective:fa}),za=a.length;else if(K.compile)try{Q=K.compile(Y,e,Ga),u(Q)?B(null,Q,zb,aa):Q&&B(Q.pre,Q.post,zb,aa)}catch(ba){c(ba,ua(Y))}K.terminal&&(F.terminal=!0,x=Math.max(x,K.priority))}F.scope=N&&!0===N.scope;F.transcludeOnThisElement=w;F.elementTranscludeOnThisElement=H;F.templateOnThisElement=Na; F.transclude=Ga;p.hasElementTranscludeDirective=H;return F}function D(a){for(var b=0,c=a.length;bq.priority)&&-1!=q.restrict.indexOf(f)&&(k&&(q=mc(q,{$$start:k,$$end:l})),b.push(q),h=q)}catch(O){c(O)}}return h}function Pc(a,b){var c=b.$attr,d=a.$attr,e=a.$$element;r(a,function(d,e){"$"!=e.charAt(0)&& (b[e]&&b[e]!==d&&(d+=("style"===e?";":" ")+b[e]),a.$set(e,d,!0,c[e]))});r(b,function(b,f){"class"==f?(N(e,b),a["class"]=(a["class"]?a["class"]+" ":"")+b):"style"==f?(e.attr("style",e.attr("style")+";"+b),a.style=(a.style?a.style+";":"")+b):"$"==f.charAt(0)||a.hasOwnProperty(f)||(a[f]=b,d[f]=c[f])})}function nf(a,b,c,d,e,f,g,h){var k=[],l,q,n=b[0],p=a.shift(),B=H({},p,{templateUrl:null,transclude:null,replace:null,$$originalDirective:p}),O=u(p.templateUrl)?p.templateUrl(b,c):p.templateUrl,E=p.templateNamespace; b.empty();s(J.getTrustedResourceUrl(O)).then(function(s){var F,J;s=Qc(s);if(p.replace){s=Pb.test(s)?Rc(T(E,P(s))):[];F=s[0];if(1!=s.length||F.nodeType!==la)throw ia("tplrt",p.name,O);s={$attr:{}};Ab(d,b,F);var x=V(F,[],s);L(p.scope)&&D(x);a=x.concat(a);Pc(c,s)}else F=n,b.html(s);a.unshift(B);l=A(a,F,c,e,b,p,f,g,h);r(d,function(a,c){a==F&&(d[c]=b[0])});for(q=ca(b[0].childNodes,e);k.length;){s=k.shift();J=k.shift();var z=k.shift(),C=k.shift(),x=b[0];if(!s.$$destroyed){if(J!==n){var S=J.className;h.hasElementTranscludeDirective&& p.replace||(x=Rb(F));Ab(z,y(J),x);N(y(x),S)}J=l.transcludeOnThisElement?M(s,l.transclude,C):C;l(q,s,x,d,J)}}k=null});return function(a,b,c,d,e){a=e;b.$$destroyed||(k?(k.push(b),k.push(c),k.push(d),k.push(a)):(l.transcludeOnThisElement&&(a=M(b,l.transclude,e)),l(q,b,c,d,a)))}}function v(a,b){var c=b.priority-a.priority;return 0!==c?c:a.name!==b.name?a.name"+b+"";return c.childNodes[0].childNodes;default:return b}}function Ga(a,b){if("srcdoc"==b)return J.HTML;var c=sa(a);if("xlinkHref"==b||"form"==c&&"action"==b|| "img"!=c&&("src"==b||"ngSrc"==b))return J.RESOURCE_URL}function R(a,c,d,e,f){var h=b(d,!0);if(h){if("multiple"===e&&"select"===sa(a))throw ia("selmulti",ua(a));c.push({priority:100,compile:function(){return{pre:function(c,d,l){d=l.$$observers||(l.$$observers={});if(k.test(e))throw ia("nodomevents");l[e]&&(h=b(l[e],!0,Ga(a,e),g[e]||f))&&(l[e]=h(c),(d[e]||(d[e]=[])).$$inter=!0,(l.$$observers&&l.$$observers[e].$$scope||c).$watch(h,function(a,b){"class"===e&&a!=b?l.$updateClass(a,b):l.$set(e,a)}))}}}})}} function Ab(a,b,c){var d=b[0],e=b.length,f=d.parentNode,g,h;if(a)for(g=0,h=a.length;g=a)return b;for(;a--;)8===b[a].nodeType&&qf.call(b,a,1);return b}function De(){var b={},a=!1,c=/^(\S+)(\s+as\s+(\w+))?$/;this.register=function(a,c){La(a,"controller");L(a)?H(b,a):b[a]=c};this.allowGlobals=function(){a=!0};this.$get=["$injector","$window",function(d,e){function f(a,b,c,d){if(!a||!L(a.$scope))throw v("$controller")("noscp",d, b);a.$scope[b]=c}return function(g,h,k,l){var m,p,q;k=!0===k;l&&I(l)&&(q=l);I(g)&&(l=g.match(c),p=l[1],q=q||l[3],g=b.hasOwnProperty(p)?b[p]:uc(h.$scope,p,!0)||(a?uc(e,p,!0):t),pb(g,p,!0));if(k)return k=function(){},k.prototype=(G(g)?g[g.length-1]:g).prototype,m=new k,q&&f(h,q,m,p||g.name),H(function(){d.invoke(g,m,h,p);return m},{instance:m,identifier:q});m=d.instantiate(g,h,p);q&&f(h,q,m,p||g.name);return m}}]}function Ee(){this.$get=["$window",function(b){return y(b.document)}]}function Fe(){this.$get= ["$log",function(b){return function(a,c){b.error.apply(b,arguments)}}]}function Yb(b,a){if(I(b)){b=b.replace(rf,"");var c=a("Content-Type");if(c&&0===c.indexOf(Tc)&&b.trim()||sf.test(b)&&tf.test(b))b=pc(b)}return b}function Uc(b){var a={},c,d,e;if(!b)return a;r(b.split("\n"),function(b){e=b.indexOf(":");c=Q(P(b.substr(0,e)));d=P(b.substr(e+1));c&&(a[c]=a[c]?a[c]+", "+d:d)});return a}function Vc(b){var a=L(b)?b:t;return function(c){a||(a=Uc(b));return c?a[Q(c)]||null:a}}function Wc(b,a,c){if(u(c))return c(b, a);r(c,function(c){b=c(b,a)});return b}function Ie(){var b=this.defaults={transformResponse:[Yb],transformRequest:[function(a){return L(a)&&"[object File]"!==Ja.call(a)&&"[object Blob]"!==Ja.call(a)?Za(a):a}],headers:{common:{Accept:"application/json, text/plain, */*"},post:ta(Zb),put:ta(Zb),patch:ta(Zb)},xsrfCookieName:"XSRF-TOKEN",xsrfHeaderName:"X-XSRF-TOKEN"},a=!1;this.useApplyAsync=function(b){return A(b)?(a=!!b,this):a};var c=this.interceptors=[];this.$get=["$httpBackend","$browser","$cacheFactory", "$rootScope","$q","$injector",function(d,e,f,g,h,k){function l(a){function c(a){var b=H({},a);b.data=a.data?Wc(a.data,a.headers,d.transformResponse):a.data;a=a.status;return 200<=a&&300>a?b:h.reject(b)}var d={method:"get",transformRequest:b.transformRequest,transformResponse:b.transformResponse},e=function(a){var c=b.headers,d=H({},a.headers),e,f,c=H({},c.common,c[Q(a.method)]);a:for(e in c){a=Q(e);for(f in d)if(Q(f)===a)continue a;d[e]=c[e]}(function(a){var b;r(a,function(c,d){u(c)&&(b=c(),null!= b?a[d]=b:delete a[d])})})(d);return d}(a);H(d,a);d.headers=e;d.method=rb(d.method);var f=[function(a){e=a.headers;var d=Wc(a.data,Vc(e),a.transformRequest);D(d)&&r(e,function(a,b){"content-type"===Q(b)&&delete e[b]});D(a.withCredentials)&&!D(b.withCredentials)&&(a.withCredentials=b.withCredentials);return m(a,d,e).then(c,c)},t],g=h.when(d);for(r(n,function(a){(a.request||a.requestError)&&f.unshift(a.request,a.requestError);(a.response||a.responseError)&&f.push(a.response,a.responseError)});f.length;){a= f.shift();var k=f.shift(),g=g.then(a,k)}g.success=function(a){g.then(function(b){a(b.data,b.status,b.headers,d)});return g};g.error=function(a){g.then(null,function(b){a(b.data,b.status,b.headers,d)});return g};return g}function m(c,f,k){function n(b,c,d,e){function f(){m(c,b,d,e)}N&&(200<=b&&300>b?N.put(r,[b,c,Uc(d),e]):N.remove(r));a?g.$applyAsync(f):(f(),g.$$phase||g.$apply())}function m(a,b,d,e){b=Math.max(b,0);(200<=b&&300>b?z.resolve:z.reject)({data:a,status:b,headers:Vc(d),config:c,statusText:e})} function J(){var a=l.pendingRequests.indexOf(c);-1!==a&&l.pendingRequests.splice(a,1)}var z=h.defer(),F=z.promise,N,C,r=p(c.url,c.params);l.pendingRequests.push(c);F.then(J,J);!c.cache&&!b.cache||!1===c.cache||"GET"!==c.method&&"JSONP"!==c.method||(N=L(c.cache)?c.cache:L(b.cache)?b.cache:q);if(N)if(C=N.get(r),A(C)){if(C&&u(C.then))return C.then(J,J),C;G(C)?m(C[1],C[0],ta(C[2]),C[3]):m(C,200,{},"OK")}else N.put(r,F);D(C)&&((C=Xc(c.url)?e.cookies()[c.xsrfCookieName||b.xsrfCookieName]:t)&&(k[c.xsrfHeaderName|| b.xsrfHeaderName]=C),d(c.method,r,f,n,k,c.timeout,c.withCredentials,c.responseType));return F}function p(a,b){if(!b)return a;var c=[];Cd(b,function(a,b){null===a||D(a)||(G(a)||(a=[a]),r(a,function(a){L(a)&&(a=ea(a)?a.toISOString():Za(a));c.push(Da(b)+"="+Da(a))}))});0=k&&(s.resolve(q),p(O.$$intervalId),delete f[O.$$intervalId]);n||b.$apply()},h);f[O.$$intervalId]=s;return O}var f={}; e.cancel=function(b){return b&&b.$$intervalId in f?(f[b.$$intervalId].reject("canceled"),a.clearInterval(b.$$intervalId),delete f[b.$$intervalId],!0):!1};return e}]}function Pd(){this.$get=function(){return{id:"en-us",NUMBER_FORMATS:{DECIMAL_SEP:".",GROUP_SEP:",",PATTERNS:[{minInt:1,minFrac:0,maxFrac:3,posPre:"",posSuf:"",negPre:"-",negSuf:"",gSize:3,lgSize:3},{minInt:1,minFrac:2,maxFrac:2,posPre:"\u00a4",posSuf:"",negPre:"(\u00a4",negSuf:")",gSize:3,lgSize:3}],CURRENCY_SYM:"$"},DATETIME_FORMATS:{MONTH:"January February March April May June July August September October November December".split(" "), SHORTMONTH:"Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec".split(" "),DAY:"Sunday Monday Tuesday Wednesday Thursday Friday Saturday".split(" "),SHORTDAY:"Sun Mon Tue Wed Thu Fri Sat".split(" "),AMPMS:["AM","PM"],medium:"MMM d, y h:mm:ss a","short":"M/d/yy h:mm a",fullDate:"EEEE, MMMM d, y",longDate:"MMMM d, y",mediumDate:"MMM d, y",shortDate:"M/d/yy",mediumTime:"h:mm:ss a",shortTime:"h:mm a"},pluralCat:function(b){return 1===b?"one":"other"}}}}function ac(b){b=b.split("/");for(var a=b.length;a--;)b[a]= nb(b[a]);return b.join("/")}function Yc(b,a){var c=Aa(b);a.$$protocol=c.protocol;a.$$host=c.hostname;a.$$port=aa(c.port)||wf[c.protocol]||null}function Zc(b,a){var c="/"!==b.charAt(0);c&&(b="/"+b);var d=Aa(b);a.$$path=decodeURIComponent(c&&"/"===d.pathname.charAt(0)?d.pathname.substring(1):d.pathname);a.$$search=rc(d.search);a.$$hash=decodeURIComponent(d.hash);a.$$path&&"/"!=a.$$path.charAt(0)&&(a.$$path="/"+a.$$path)}function xa(b,a){if(0===a.indexOf(b))return a.substr(b.length)}function Fa(b){var a= b.indexOf("#");return-1==a?b:b.substr(0,a)}function bc(b){return b.substr(0,Fa(b).lastIndexOf("/")+1)}function cc(b,a){this.$$html5=!0;a=a||"";var c=bc(b);Yc(b,this);this.$$parse=function(a){var b=xa(c,a);if(!I(b))throw eb("ipthprfx",a,c);Zc(b,this);this.$$path||(this.$$path="/");this.$$compose()};this.$$compose=function(){var a=Kb(this.$$search),b=this.$$hash?"#"+nb(this.$$hash):"";this.$$url=ac(this.$$path)+(a?"?"+a:"")+b;this.$$absUrl=c+this.$$url.substr(1)};this.$$parseLinkUrl=function(d,e){if(e&& "#"===e[0])return this.hash(e.slice(1)),!0;var f,g;(f=xa(b,d))!==t?(g=f,g=(f=xa(a,f))!==t?c+(xa("/",f)||f):b+g):(f=xa(c,d))!==t?g=c+f:c==d+"/"&&(g=c);g&&this.$$parse(g);return!!g}}function dc(b,a){var c=bc(b);Yc(b,this);this.$$parse=function(d){var e=xa(b,d)||xa(c,d),e="#"==e.charAt(0)?xa(a,e):this.$$html5?e:"";if(!I(e))throw eb("ihshprfx",d,a);Zc(e,this);d=this.$$path;var f=/^\/[A-Z]:(\/.*)/;0===e.indexOf(b)&&(e=e.replace(b,""));f.exec(e)||(d=(e=f.exec(d))?e[1]:d);this.$$path=d;this.$$compose()}; this.$$compose=function(){var c=Kb(this.$$search),e=this.$$hash?"#"+nb(this.$$hash):"";this.$$url=ac(this.$$path)+(c?"?"+c:"")+e;this.$$absUrl=b+(this.$$url?a+this.$$url:"")};this.$$parseLinkUrl=function(a,c){return Fa(b)==Fa(a)?(this.$$parse(a),!0):!1}}function $c(b,a){this.$$html5=!0;dc.apply(this,arguments);var c=bc(b);this.$$parseLinkUrl=function(d,e){if(e&&"#"===e[0])return this.hash(e.slice(1)),!0;var f,g;b==Fa(d)?f=d:(g=xa(c,d))?f=b+a+g:c===d+"/"&&(f=c);f&&this.$$parse(f);return!!f};this.$$compose= function(){var c=Kb(this.$$search),e=this.$$hash?"#"+nb(this.$$hash):"";this.$$url=ac(this.$$path)+(c?"?"+c:"")+e;this.$$absUrl=b+a+this.$$url}}function Bb(b){return function(){return this[b]}}function ad(b,a){return function(c){if(D(c))return this[b];this[b]=a(c);this.$$compose();return this}}function Ke(){var b="",a={enabled:!1,requireBase:!0,rewriteLinks:!0};this.hashPrefix=function(a){return A(a)?(b=a,this):b};this.html5Mode=function(b){return Ua(b)?(a.enabled=b,this):L(b)?(Ua(b.enabled)&&(a.enabled= b.enabled),Ua(b.requireBase)&&(a.requireBase=b.requireBase),Ua(b.rewriteLinks)&&(a.rewriteLinks=b.rewriteLinks),this):a};this.$get=["$rootScope","$browser","$sniffer","$rootElement",function(c,d,e,f){function g(a,b,c){var e=k.url(),f=k.$$state;try{d.url(a,b,c),k.$$state=d.state()}catch(g){throw k.url(e),k.$$state=f,g;}}function h(a,b){c.$broadcast("$locationChangeSuccess",k.absUrl(),a,k.$$state,b)}var k,l;l=d.baseHref();var m=d.url(),p;if(a.enabled){if(!l&&a.requireBase)throw eb("nobase");p=m.substring(0, m.indexOf("/",m.indexOf("//")+2))+(l||"/");l=e.history?cc:$c}else p=Fa(m),l=dc;k=new l(p,"#"+b);k.$$parseLinkUrl(m,m);k.$$state=d.state();var q=/^\s*(javascript|mailto):/i;f.on("click",function(b){if(a.rewriteLinks&&!b.ctrlKey&&!b.metaKey&&2!=b.which){for(var e=y(b.target);"a"!==sa(e[0]);)if(e[0]===f[0]||!(e=e.parent())[0])return;var g=e.prop("href"),h=e.attr("href")||e.attr("xlink:href");L(g)&&"[object SVGAnimatedString]"===g.toString()&&(g=Aa(g.animVal).href);q.test(g)||!g||e.attr("target")||b.isDefaultPrevented()|| !k.$$parseLinkUrl(g,h)||(b.preventDefault(),k.absUrl()!=d.url()&&(c.$apply(),T.angular["ff-684208-preventDefault"]=!0))}});k.absUrl()!=m&&d.url(k.absUrl(),!0);var n=!0;d.onUrlChange(function(a,b){c.$evalAsync(function(){var d=k.absUrl(),e=k.$$state,f;k.$$parse(a);k.$$state=b;f=c.$broadcast("$locationChangeStart",a,d,b,e).defaultPrevented;k.absUrl()===a&&(f?(k.$$parse(d),k.$$state=e,g(d,!1,e)):(n=!1,h(d,e)))});c.$$phase||c.$digest()});c.$watch(function(){var a=d.url(),b=d.state(),f=k.$$replace,l=a!== k.absUrl()||k.$$html5&&e.history&&b!==k.$$state;if(n||l)n=!1,c.$evalAsync(function(){var d=k.absUrl(),e=c.$broadcast("$locationChangeStart",d,a,k.$$state,b).defaultPrevented;k.absUrl()===d&&(e?(k.$$parse(a),k.$$state=b):(l&&g(d,f,b===k.$$state?null:k.$$state),h(a,b)))});k.$$replace=!1});return k}]}function Le(){var b=!0,a=this;this.debugEnabled=function(a){return A(a)?(b=a,this):b};this.$get=["$window",function(c){function d(a){a instanceof Error&&(a.stack?a=a.message&&-1===a.stack.indexOf(a.message)? "Error: "+a.message+"\n"+a.stack:a.stack:a.sourceURL&&(a=a.message+"\n"+a.sourceURL+":"+a.line));return a}function e(a){var b=c.console||{},e=b[a]||b.log||w;a=!1;try{a=!!e.apply}catch(k){}return a?function(){var a=[];r(arguments,function(b){a.push(d(b))});return e.apply(b,a)}:function(a,b){e(a,null==b?"":b)}}return{log:e("log"),info:e("info"),warn:e("warn"),error:e("error"),debug:function(){var c=e("debug");return function(){b&&c.apply(a,arguments)}}()}}]}function qa(b,a){if("__defineGetter__"=== b||"__defineSetter__"===b||"__lookupGetter__"===b||"__lookupSetter__"===b||"__proto__"===b)throw ja("isecfld",a);return b}function ra(b,a){if(b){if(b.constructor===b)throw ja("isecfn",a);if(b.window===b)throw ja("isecwindow",a);if(b.children&&(b.nodeName||b.prop&&b.attr&&b.find))throw ja("isecdom",a);if(b===Object)throw ja("isecobj",a);}return b}function ec(b){return b.constant}function Oa(b,a,c,d){ra(b,d);a=a.split(".");for(var e,f=0;1h?bd(g[0],g[1],g[2],g[3],g[4],c,d):function(a,b){var e=0,f;do f=bd(g[e++],g[e++],g[e++],g[e++],g[e++],c,d)(a,b),b=t,a=f;while(e=this.promise.$$state.status&&d&&d.length&&b(function(){for(var b,e,f=0,g=d.length;fa)for(b in l++,f)e.hasOwnProperty(b)||(s--,delete f[b])}else f!==e&&(f=e,l++);return l}}c.$stateful=!0;var d=this,e,f,h,k=1< b.length,l=0,p=g(a,c),m=[],q={},n=!0,s=0;return this.$watch(p,function(){n?(n=!1,b(e,e,d)):b(e,h,d);if(k)if(L(e))if(Ra(e)){h=Array(e.length);for(var a=0;ax&&(X=4-x,t[X]||(t[X]=[]),t[X].push({msg:u(e.exp)?"fn: "+(e.exp.name||e.exp.toString()):e.exp,newVal:g,oldVal:k}));else if(e===c){r=!1;break a}}catch(v){f(v)}if(!(m=M.$$childHead||M!==this&&M.$$nextSibling))for(;M!==this&&!(m=M.$$nextSibling);)M=M.$parent}while(M=m);if((r||O.length)&&!x--)throw s.$$phase=null,a("infdig",b,t);}while(r|| O.length);for(s.$$phase=null;E.length;)try{E.shift()()}catch(y){f(y)}},$destroy:function(){if(!this.$$destroyed){var a=this.$parent;this.$broadcast("$destroy");this.$$destroyed=!0;if(this!==s){for(var b in this.$$listenerCount)m(this,this.$$listenerCount[b],b);a.$$childHead==this&&(a.$$childHead=this.$$nextSibling);a.$$childTail==this&&(a.$$childTail=this.$$prevSibling);this.$$prevSibling&&(this.$$prevSibling.$$nextSibling=this.$$nextSibling);this.$$nextSibling&&(this.$$nextSibling.$$prevSibling= this.$$prevSibling);this.$destroy=this.$digest=this.$apply=this.$evalAsync=this.$applyAsync=w;this.$on=this.$watch=this.$watchGroup=function(){return w};this.$$listeners={};this.$parent=this.$$nextSibling=this.$$prevSibling=this.$$childHead=this.$$childTail=this.$root=this.$$watchers=null}}},$eval:function(a,b){return g(a)(this,b)},$evalAsync:function(a){s.$$phase||O.length||h.defer(function(){O.length&&s.$digest()});O.push({scope:this,expression:a})},$$postDigest:function(a){E.push(a)},$apply:function(a){try{return l("$apply"), this.$eval(a)}catch(b){f(b)}finally{s.$$phase=null;try{s.$digest()}catch(c){throw f(c),c;}}},$applyAsync:function(a){function b(){c.$eval(a)}var c=this;a&&x.push(b);n()},$on:function(a,b){var c=this.$$listeners[a];c||(this.$$listeners[a]=c=[]);c.push(b);var d=this;do d.$$listenerCount[a]||(d.$$listenerCount[a]=0),d.$$listenerCount[a]++;while(d=d.$parent);var e=this;return function(){var d=c.indexOf(b);-1!==d&&(c[d]=null,m(e,1,a))}},$emit:function(a,b){var c=[],d,e=this,g=!1,h={name:a,targetScope:e, stopPropagation:function(){g=!0},preventDefault:function(){h.defaultPrevented=!0},defaultPrevented:!1},k=Xa([h],arguments,1),l,m;do{d=e.$$listeners[a]||c;h.currentScope=e;l=0;for(m=d.length;lHa)throw Ba("iequirks");var d=ta(ka);d.isEnabled=function(){return b};d.trustAs=c.trustAs;d.getTrusted=c.getTrusted;d.valueOf=c.valueOf;b||(d.trustAs=d.getTrusted=function(a,b){return b},d.valueOf=ma);d.parseAs=function(b,c){var e=a(c);return e.literal&&e.constant?e:a(c,function(a){return d.getTrusted(b,a)})};var e=d.parseAs,f=d.getTrusted,g=d.trustAs;r(ka,function(a,b){var c=Q(b);d[bb("parse_as_"+c)]=function(b){return e(a, b)};d[bb("get_trusted_"+c)]=function(b){return f(a,b)};d[bb("trust_as_"+c)]=function(b){return g(a,b)}});return d}]}function Se(){this.$get=["$window","$document",function(b,a){var c={},d=aa((/android (\d+)/.exec(Q((b.navigator||{}).userAgent))||[])[1]),e=/Boxee/i.test((b.navigator||{}).userAgent),f=a[0]||{},g,h=/^(Moz|webkit|ms)(?=[A-Z])/,k=f.body&&f.body.style,l=!1,m=!1;if(k){for(var p in k)if(l=h.exec(p)){g=l[0];g=g.substr(0,1).toUpperCase()+g.substr(1);break}g||(g="WebkitOpacity"in k&&"webkit"); l=!!("transition"in k||g+"Transition"in k);m=!!("animation"in k||g+"Animation"in k);!d||l&&m||(l=I(f.body.style.webkitTransition),m=I(f.body.style.webkitAnimation))}return{history:!(!b.history||!b.history.pushState||4>d||e),hasEvent:function(a){if("input"==a&&9==Ha)return!1;if(D(c[a])){var b=f.createElement("div");c[a]="on"+a in b}return c[a]},csp:$a(),vendorPrefix:g,transitions:l,animations:m,android:d}}]}function Ue(){this.$get=["$templateCache","$http","$q",function(b,a,c){function d(e,f){d.totalPendingRequests++; var g=a.defaults&&a.defaults.transformResponse;if(G(g))for(var h=g,g=[],k=0;kb;b=Math.abs(b);var g=b+"",h="",k=[],l=!1;if(-1!==g.indexOf("e")){var m=g.match(/([\d\.]+)e(-?)(\d+)/);m&&"-"==m[2]&&m[3]>e+1?(g="0",b=0):(h=g,l=!0)}if(l)0b&&(h=b.toFixed(e));else{g=(g.split(md)[1]||"").length;D(e)&&(e=Math.min(Math.max(a.minFrac, g),a.maxFrac));b=+(Math.round(+(b.toString()+"e"+e)).toString()+"e"+-e);0===b&&(f=!1);b=(""+b).split(md);g=b[0];b=b[1]||"";var m=0,p=a.lgSize,q=a.gSize;if(g.length>=p+q)for(m=g.length-p,l=0;lb&&(d="-",b=-b);for(b= ""+b;b.length-c)e+=c;0===e&&-12==c&&(e=12);return Cb(e,a,d)}}function Db(b,a){return function(c,d){var e=c["get"+b](),f=rb(a?"SHORT"+b:b);return d[f][e]}}function nd(b){var a=(new Date(b,0,1)).getDay();return new Date(b,0,(4>=a?5:12)-a)}function od(b){return function(a){var c=nd(a.getFullYear());a=+new Date(a.getFullYear(),a.getMonth(),a.getDate()+(4-a.getDay()))-+c;a=1+Math.round(a/ 6048E5);return Cb(a,b)}}function id(b){function a(a){var b;if(b=a.match(c)){a=new Date(0);var f=0,g=0,h=b[8]?a.setUTCFullYear:a.setFullYear,k=b[8]?a.setUTCHours:a.setHours;b[9]&&(f=aa(b[9]+b[10]),g=aa(b[9]+b[11]));h.call(a,aa(b[1]),aa(b[2])-1,aa(b[3]));f=aa(b[4]||0)-f;g=aa(b[5]||0)-g;h=aa(b[6]||0);b=Math.round(1E3*parseFloat("0."+(b[7]||0)));k.call(a,f,g,h,b)}return a}var c=/^(\d{4})-?(\d\d)-?(\d\d)(?:T(\d\d)(?::?(\d\d)(?::?(\d\d)(?:\.(\d+))?)?)?(Z|([+-])(\d\d):?(\d\d))?)?$/;return function(c,e,f){var g= "",h=[],k,l;e=e||"mediumDate";e=b.DATETIME_FORMATS[e]||e;I(c)&&(c=Hf.test(c)?aa(c):a(c));W(c)&&(c=new Date(c));if(!ea(c))return c;for(;e;)(l=If.exec(e))?(h=Xa(h,l,1),e=h.pop()):(h.push(e),e=null);f&&"UTC"===f&&(c=new Date(c.getTime()),c.setMinutes(c.getMinutes()+c.getTimezoneOffset()));r(h,function(a){k=Jf[a];g+=k?k(c,b.DATETIME_FORMATS):a.replace(/(^'|'$)/g,"").replace(/''/g,"'")});return g}}function Df(){return function(b){return Za(b,!0)}}function Ef(){return function(b,a){W(b)&&(b=b.toString()); if(!G(b)&&!I(b))return b;a=Infinity===Math.abs(Number(a))?Number(a):aa(a);if(I(b))return a?0<=a?b.slice(0,a):b.slice(a,b.length):"";var c=[],d,e;a>b.length?a=b.length:a<-b.length&&(a=-b.length);0b||37<=b&&40>=b||q(a)});if(e.hasEvent("paste"))a.on("paste cut",q)}a.on("change",m);d.$render=function(){a.val(d.$isEmpty(d.$modelValue)?"":d.$viewValue)}}function Gb(b,a){return function(c,d){var e,f;if(ea(c))return c;if(I(c)){'"'==c.charAt(0)&&'"'==c.charAt(c.length-1)&&(c=c.substring(1,c.length-1));if(Kf.test(c))return new Date(c);b.lastIndex=0;if(e=b.exec(c))return e.shift(),f=d?{yyyy:d.getFullYear(),MM:d.getMonth()+1,dd:d.getDate(), HH:d.getHours(),mm:d.getMinutes(),ss:d.getSeconds(),sss:d.getMilliseconds()/1E3}:{yyyy:1970,MM:1,dd:1,HH:0,mm:0,ss:0,sss:0},r(e,function(b,c){c=s};g.$observe("min",function(a){s=p(a);h.$validate()})}if(A(g.max)||g.ngMax){var r;h.$validators.max=function(a){return h.$isEmpty(a)||D(r)||c(a)<= r};g.$observe("max",function(a){r=p(a);h.$validate()})}h.$isEmpty=function(a){return!a||a.getTime&&a.getTime()!==a.getTime()}}}function rd(b,a,c,d){(d.$$hasNativeValidators=L(a[0].validity))&&d.$parsers.push(function(b){var c=a.prop("validity")||{};return c.badInput&&!c.typeMismatch?t:b})}function sd(b,a,c,d,e){if(A(d)){b=b(d);if(!b.constant)throw v("ngModel")("constexpr",c,d);return b(a)}return e}function qd(b){function a(a,b){b&&!f[a]?(l.addClass(e,a),f[a]=!0):!b&&f[a]&&(l.removeClass(e,a),f[a]= !1)}function c(b,c){b=b?"-"+Mb(b,"-"):"";a(ib+b,!0===c);a(td+b,!1===c)}var d=b.ctrl,e=b.$element,f={},g=b.set,h=b.unset,k=b.parentForm,l=b.$animate;f[td]=!(f[ib]=e.hasClass(ib));d.$setValidity=function(b,e,f){e===t?(d.$pending||(d.$pending={}),g(d.$pending,b,f)):(d.$pending&&h(d.$pending,b,f),ud(d.$pending)&&(d.$pending=t));Ua(e)?e?(h(d.$error,b,f),g(d.$$success,b,f)):(g(d.$error,b,f),h(d.$$success,b,f)):(h(d.$error,b,f),h(d.$$success,b,f));d.$pending?(a(vd,!0),d.$valid=d.$invalid=t,c("",null)):(a(vd, !1),d.$valid=ud(d.$error),d.$invalid=!d.$valid,c("",d.$valid));e=d.$pending&&d.$pending[b]?t:d.$error[b]?!1:d.$$success[b]?!0:null;c(b,e);k.$setValidity(b,e,d)}}function ud(b){if(b)for(var a in b)return!1;return!0}function ic(b,a){b="ngClass"+b;return["$animate",function(c){function d(a,b){var c=[],d=0;a:for(;d(?:<\/\1>|)$/,Pb=/<|&#?\w+;/,bf=/<([\w:]+)/,cf=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/gi,ha={option:[1,'"],thead:[1,"","
"],col:[2,"","
"],tr:[2,"","
"], td:[3,"","
"],_default:[0,"",""]};ha.optgroup=ha.option;ha.tbody=ha.tfoot=ha.colgroup=ha.caption=ha.thead;ha.th=ha.td;var Ka=R.prototype={ready:function(b){function a(){c||(c=!0,b())}var c=!1;"complete"===U.readyState?setTimeout(a):(this.on("DOMContentLoaded",a),R(T).on("load",a))},toString:function(){var b=[];r(this,function(a){b.push(""+a)});return"["+b.join(", ")+"]"},eq:function(b){return 0<=b?y(this[b]):y(this[this.length+b])},length:0,push:Mf,sort:[].sort, splice:[].splice},yb={};r("multiple selected checked disabled readOnly required open".split(" "),function(b){yb[Q(b)]=b});var Lc={};r("input select option textarea button form details".split(" "),function(b){Lc[b]=!0});var Mc={ngMinlength:"minlength",ngMaxlength:"maxlength",ngMin:"min",ngMax:"max",ngPattern:"pattern"};r({data:Sb,removeData:ub},function(b,a){R[a]=b});r({data:Sb,inheritedData:xb,scope:function(b){return y.data(b,"$scope")||xb(b.parentNode||b,["$isolateScope","$scope"])},isolateScope:function(b){return y.data(b, "$isolateScope")||y.data(b,"$isolateScopeNoTemplate")},controller:Hc,injector:function(b){return xb(b,"$injector")},removeAttr:function(b,a){b.removeAttribute(a)},hasClass:Tb,css:function(b,a,c){a=bb(a);if(A(c))b.style[a]=c;else return b.style[a]},attr:function(b,a,c){var d=Q(a);if(yb[d])if(A(c))c?(b[a]=!0,b.setAttribute(a,d)):(b[a]=!1,b.removeAttribute(d));else return b[a]||(b.attributes.getNamedItem(a)||w).specified?d:t;else if(A(c))b.setAttribute(a,c);else if(b.getAttribute)return b=b.getAttribute(a, 2),null===b?t:b},prop:function(b,a,c){if(A(c))b[a]=c;else return b[a]},text:function(){function b(a,b){if(D(b)){var d=a.nodeType;return d===la||d===mb?a.textContent:""}a.textContent=b}b.$dv="";return b}(),val:function(b,a){if(D(a)){if(b.multiple&&"select"===sa(b)){var c=[];r(b.options,function(a){a.selected&&c.push(a.value||a.text)});return 0===c.length?null:c}return b.value}b.value=a},html:function(b,a){if(D(a))return b.innerHTML;tb(b,!0);b.innerHTML=a},empty:Ic},function(b,a){R.prototype[a]=function(a, d){var e,f,g=this.length;if(b!==Ic&&(2==b.length&&b!==Tb&&b!==Hc?a:d)===t){if(L(a)){for(e=0;e":function(a,c,d,e){return d(a,c)>e(a,c)},"<=":function(a,c,d,e){return d(a,c)<=e(a,c)},">=":function(a,c,d,e){return d(a,c)>=e(a,c)},"&&":function(a, c,d,e){return d(a,c)&&e(a,c)},"||":function(a,c,d,e){return d(a,c)||e(a,c)},"!":function(a,c,d){return!d(a,c)},"=":!0,"|":!0}),Uf={n:"\n",f:"\f",r:"\r",t:"\t",v:"\v","'":"'",'"':'"'},gc=function(a){this.options=a};gc.prototype={constructor:gc,lex:function(a){this.text=a;this.index=0;for(this.tokens=[];this.index=a&&"string"===typeof a},isWhitespace:function(a){return" "===a||"\r"===a||"\t"===a||"\n"===a||"\v"===a||"\u00a0"===a},isIdent:function(a){return"a"<=a&&"z">=a||"A"<=a&&"Z">=a||"_"===a||"$"===a},isExpOperator:function(a){return"-"===a||"+"===a||this.isNumber(a)},throwError:function(a,c,d){d=d||this.index;c=A(c)?"s "+c+"-"+this.index+" ["+this.text.substring(c,d)+"]":" "+d;throw ja("lexerr",a,c,this.text);},readNumber:function(){for(var a="",c=this.index;this.index< this.text.length;){var d=Q(this.text.charAt(this.index));if("."==d||this.isNumber(d))a+=d;else{var e=this.peek();if("e"==d&&this.isExpOperator(e))a+=d;else if(this.isExpOperator(d)&&e&&this.isNumber(e)&&"e"==a.charAt(a.length-1))a+=d;else if(!this.isExpOperator(d)||e&&this.isNumber(e)||"e"!=a.charAt(a.length-1))break;else this.throwError("Invalid exponent")}this.index++}this.tokens.push({index:c,text:a,constant:!0,value:Number(a)})},readIdent:function(){for(var a=this.index;this.indexa){a=this.tokens[a];var g=a.text;if(g===c||g===d||g===e||g===f||!(c||d||e||f))return a}return!1},expect:function(a,c,d,e){return(a= this.peek(a,c,d,e))?(this.tokens.shift(),a):!1},consume:function(a){if(0===this.tokens.length)throw ja("ueoe",this.text);var c=this.expect(a);c||this.throwError("is unexpected, expecting ["+a+"]",this.peek());return c},unaryFn:function(a,c){var d=jb[a];return H(function(a,f){return d(a,f,c)},{constant:c.constant,inputs:[c]})},binaryFn:function(a,c,d,e){var f=jb[c];return H(function(c,e){return f(c,e,a,d)},{constant:a.constant&&d.constant,inputs:!e&&[a,d]})},identifier:function(){for(var a=this.consume().text;this.peek(".")&& this.peekAhead(1).identifier&&!this.peekAhead(2,"(");)a+=this.consume().text+this.consume().text;return Ib[a]||cd(a,this.options,this.text)},constant:function(){var a=this.consume().value;return H(function(){return a},{constant:!0,literal:!0})},statements:function(){for(var a=[];;)if(0","<=",">="))a=this.binaryFn(a,c.text,this.relational());return a},additive:function(){for(var a= this.multiplicative(),c;c=this.expect("+","-");)a=this.binaryFn(a,c.text,this.multiplicative());return a},multiplicative:function(){for(var a=this.unary(),c;c=this.expect("*","/","%");)a=this.binaryFn(a,c.text,this.unary());return a},unary:function(){var a;return this.expect("+")?this.primary():(a=this.expect("-"))?this.binaryFn(fb.ZERO,a.text,this.unary()):(a=this.expect("!"))?this.unaryFn(a.text,this.unary()):this.primary()},fieldAccess:function(a){var c=this.text,d=this.consume().text,e=cd(d,this.options, c);return H(function(c,d,h){return e(h||a(c,d))},{assign:function(e,g,h){(h=a(e,h))||a.assign(e,h={});return Oa(h,d,g,c)}})},objectIndex:function(a){var c=this.text,d=this.expression();this.consume("]");return H(function(e,f){var g=a(e,f),h=d(e,f);qa(h,c);return g?ra(g[h],c):t},{assign:function(e,f,g){var h=qa(d(e,g),c);(g=ra(a(e,g),c))||a.assign(e,g={});return g[h]=f}})},functionCall:function(a,c){var d=[];if(")"!==this.peekToken().text){do d.push(this.expression());while(this.expect(","))}this.consume(")"); var e=this.text,f=d.length?[]:null;return function(g,h){var k=c?c(g,h):g,l=a(g,h,k)||w;if(f)for(var m=d.length;m--;)f[m]=ra(d[m](g,h),e);ra(k,e);if(l){if(l.constructor===l)throw ja("isecfn",e);if(l===Rf||l===Sf||l===Tf)throw ja("isecff",e);}k=l.apply?l.apply(k,f):l(f[0],f[1],f[2],f[3],f[4]);return ra(k,e)}},arrayDeclaration:function(){var a=[];if("]"!==this.peekToken().text){do{if(this.peek("]"))break;a.push(this.expression())}while(this.expect(","))}this.consume("]");return H(function(c,d){for(var e= [],f=0,g=a.length;fa.getHours()?c.AMPMS[0]:c.AMPMS[1]},Z:function(a){a=-1*a.getTimezoneOffset();return a=(0<=a?"+":"")+(Cb(Math[0=h};d.$observe("min",function(a){A(a)&&!W(a)&&(a=parseFloat(a, 10));h=W(a)&&!isNaN(a)?a:t;e.$validate()})}if(d.max||d.ngMax){var k;e.$validators.max=function(a){return e.$isEmpty(a)||D(k)||a<=k};d.$observe("max",function(a){A(a)&&!W(a)&&(a=parseFloat(a,10));k=W(a)&&!isNaN(a)?a:t;e.$validate()})}},url:function(a,c,d,e,f,g){gb(a,c,d,e,f,g);hc(e);e.$$parserName="url";e.$validators.url=function(a){return e.$isEmpty(a)||Vf.test(a)}},email:function(a,c,d,e,f,g){gb(a,c,d,e,f,g);hc(e);e.$$parserName="email";e.$validators.email=function(a){return e.$isEmpty(a)||Wf.test(a)}}, radio:function(a,c,d,e){D(d.name)&&c.attr("name",++kb);c.on("click",function(a){c[0].checked&&e.$setViewValue(d.value,a&&a.type)});e.$render=function(){c[0].checked=d.value==e.$viewValue};d.$observe("value",e.$render)},checkbox:function(a,c,d,e,f,g,h,k){var l=sd(k,a,"ngTrueValue",d.ngTrueValue,!0),m=sd(k,a,"ngFalseValue",d.ngFalseValue,!1);c.on("click",function(a){e.$setViewValue(c[0].checked,a&&a.type)});e.$render=function(){c[0].checked=e.$viewValue};e.$isEmpty=function(a){return a!==l};e.$formatters.push(function(a){return na(a, l)});e.$parsers.push(function(a){return a?l:m})},hidden:w,button:w,submit:w,reset:w,file:w},wc=["$browser","$sniffer","$filter","$parse",function(a,c,d,e){return{restrict:"E",require:["?ngModel"],link:{pre:function(f,g,h,k){k[0]&&(Bd[Q(h.type)]||Bd.text)(f,g,h,k[0],c,a,d,e)}}}}],ib="ng-valid",td="ng-invalid",Qa="ng-pristine",Fb="ng-dirty",vd="ng-pending",Zf=["$scope","$exceptionHandler","$attrs","$element","$parse","$animate","$timeout","$rootScope","$q","$interpolate",function(a,c,d,e,f,g,h,k,l, m){this.$modelValue=this.$viewValue=Number.NaN;this.$validators={};this.$asyncValidators={};this.$parsers=[];this.$formatters=[];this.$viewChangeListeners=[];this.$untouched=!0;this.$touched=!1;this.$pristine=!0;this.$dirty=!1;this.$valid=!0;this.$invalid=!1;this.$error={};this.$$success={};this.$pending=t;this.$name=m(d.name||"",!1)(a);var p=f(d.ngModel),q=null,n=this,s=function(){var c=p(a);n.$options&&n.$options.getterSetter&&u(c)&&(c=c());return c},O=function(c){var d;n.$options&&n.$options.getterSetter&& u(d=p(a))?d(n.$modelValue):p.assign(a,n.$modelValue)};this.$$setOptions=function(a){n.$options=a;if(!(p.assign||a&&a.getterSetter))throw Hb("nonassign",d.ngModel,ua(e));};this.$render=w;this.$isEmpty=function(a){return D(a)||""===a||null===a||a!==a};var E=e.inheritedData("$formController")||Eb,x=0;qd({ctrl:this,$element:e,set:function(a,c){a[c]=!0},unset:function(a,c){delete a[c]},parentForm:E,$animate:g});this.$setPristine=function(){n.$dirty=!1;n.$pristine=!0;g.removeClass(e,Fb);g.addClass(e,Qa)}; this.$setUntouched=function(){n.$touched=!1;n.$untouched=!0;g.setClass(e,"ng-untouched","ng-touched")};this.$setTouched=function(){n.$touched=!0;n.$untouched=!1;g.setClass(e,"ng-touched","ng-untouched")};this.$rollbackViewValue=function(){h.cancel(q);n.$viewValue=n.$$lastCommittedViewValue;n.$render()};this.$validate=function(){W(n.$modelValue)&&isNaN(n.$modelValue)||this.$$parseAndValidate()};this.$$runValidators=function(a,c,d,e){function f(){var a=!0;r(n.$validators,function(e,f){var g=e(c,d); a=a&&g;h(f,g)});return a?!0:(r(n.$asyncValidators,function(a,c){h(c,null)}),!1)}function g(){var a=[],e=!0;r(n.$asyncValidators,function(f,g){var k=f(c,d);if(!k||!u(k.then))throw Hb("$asyncValidators",k);h(g,t);a.push(k.then(function(){h(g,!0)},function(a){e=!1;h(g,!1)}))});a.length?l.all(a).then(function(){k(e)},w):k(!0)}function h(a,c){m===x&&n.$setValidity(a,c)}function k(a){m===x&&e(a)}x++;var m=x;(function(a){var c=n.$$parserName||"parse";if(a===t)h(c,null);else if(h(c,a),!a)return r(n.$validators, function(a,c){h(c,null)}),r(n.$asyncValidators,function(a,c){h(c,null)}),!1;return!0})(a)?f()?g():k(!1):k(!1)};this.$commitViewValue=function(){var a=n.$viewValue;h.cancel(q);if(n.$$lastCommittedViewValue!==a||""===a&&n.$$hasNativeValidators)n.$$lastCommittedViewValue=a,n.$pristine&&(n.$dirty=!0,n.$pristine=!1,g.removeClass(e,Qa),g.addClass(e,Fb),E.$setDirty()),this.$$parseAndValidate()};this.$$parseAndValidate=function(){var a=n.$$lastCommittedViewValue,c=a,d=D(c)?t:!0;if(d)for(var e=0;e=f}}}}},ue=function(){return{restrict:"A",priority:100,require:"ngModel",link:function(a,c,d,e){var f=c.attr(d.$attr.ngList)||", ",g="false"!==d.ngTrim,h=g?P(f):f;e.$parsers.push(function(a){if(!D(a)){var c=[];a&&r(a.split(h),function(a){a&&c.push(g?P(a):a)});return c}}); e.$formatters.push(function(a){return G(a)?a.join(f):t});e.$isEmpty=function(a){return!a||!a.length}}}},$f=/^(true|false|\d+)$/,we=function(){return{restrict:"A",priority:100,compile:function(a,c){return $f.test(c.ngValue)?function(a,c,f){f.$set("value",a.$eval(f.ngValue))}:function(a,c,f){a.$watch(f.ngValue,function(a){f.$set("value",a)})}}}},xe=function(){return{restrict:"A",controller:["$scope","$attrs",function(a,c){var d=this;this.$options=a.$eval(c.ngModelOptions);this.$options.updateOn!==t? (this.$options.updateOnDefault=!1,this.$options.updateOn=P(this.$options.updateOn.replace(Yf,function(){d.$options.updateOnDefault=!0;return" "}))):this.$options.updateOnDefault=!0}]}},Xd=["$compile",function(a){return{restrict:"AC",compile:function(c){a.$$addBindingClass(c);return function(c,e,f){a.$$addBindingInfo(e,f.ngBind);e=e[0];c.$watch(f.ngBind,function(a){e.textContent=a===t?"":a})}}}}],Zd=["$interpolate","$compile",function(a,c){return{compile:function(d){c.$$addBindingClass(d);return function(d, f,g){d=a(f.attr(g.$attr.ngBindTemplate));c.$$addBindingInfo(f,d.expressions);f=f[0];g.$observe("ngBindTemplate",function(a){f.textContent=a===t?"":a})}}}}],Yd=["$sce","$parse","$compile",function(a,c,d){return{restrict:"A",compile:function(e,f){var g=c(f.ngBindHtml),h=c(f.ngBindHtml,function(a){return(a||"").toString()});d.$$addBindingClass(e);return function(c,e,f){d.$$addBindingInfo(e,f.ngBindHtml);c.$watch(h,function(){e.html(a.getTrustedHtml(g(c))||"")})}}}}],$d=ic("",!0),be=ic("Odd",0),ae=ic("Even", 1),ce=Ia({compile:function(a,c){c.$set("ngCloak",t);a.removeClass("ng-cloak")}}),de=[function(){return{restrict:"A",scope:!0,controller:"@",priority:500}}],Bc={},ag={blur:!0,focus:!0};r("click dblclick mousedown mouseup mouseover mouseout mousemove mouseenter mouseleave keydown keyup keypress submit focus blur copy cut paste".split(" "),function(a){var c=wa("ng-"+a);Bc[c]=["$parse","$rootScope",function(d,e){return{restrict:"A",compile:function(f,g){var h=d(g[c],null,!0);return function(c,d){d.on(a, function(d){var f=function(){h(c,{$event:d})};ag[a]&&e.$$phase?c.$evalAsync(f):c.$apply(f)})}}}}]});var ge=["$animate",function(a){return{multiElement:!0,transclude:"element",priority:600,terminal:!0,restrict:"A",$$tlb:!0,link:function(c,d,e,f,g){var h,k,l;c.$watch(e.ngIf,function(c){c?k||g(function(c,f){k=f;c[c.length++]=U.createComment(" end ngIf: "+e.ngIf+" ");h={clone:c};a.enter(c,d.parent(),d)}):(l&&(l.remove(),l=null),k&&(k.$destroy(),k=null),h&&(l=qb(h.clone),a.leave(l).then(function(){l=null}), h=null))})}}}],he=["$templateRequest","$anchorScroll","$animate","$sce",function(a,c,d,e){return{restrict:"ECA",priority:400,terminal:!0,transclude:"element",controller:va.noop,compile:function(f,g){var h=g.ngInclude||g.src,k=g.onload||"",l=g.autoscroll;return function(f,g,q,n,r){var t=0,E,x,B,v=function(){x&&(x.remove(),x=null);E&&(E.$destroy(),E=null);B&&(d.leave(B).then(function(){x=null}),x=B,B=null)};f.$watch(e.parseAsResourceUrl(h),function(e){var h=function(){!A(l)||l&&!f.$eval(l)||c()},q= ++t;e?(a(e,!0).then(function(a){if(q===t){var c=f.$new();n.template=a;a=r(c,function(a){v();d.enter(a,null,g).then(h)});E=c;B=a;E.$emit("$includeContentLoaded",e);f.$eval(k)}},function(){q===t&&(v(),f.$emit("$includeContentError",e))}),f.$emit("$includeContentRequested",e)):(v(),n.template=null)})}}}}],ye=["$compile",function(a){return{restrict:"ECA",priority:-400,require:"ngInclude",link:function(c,d,e,f){/SVG/.test(d[0].toString())?(d.empty(),a(Ec(f.template,U).childNodes)(c,function(a){d.append(a)}, {futureParentElement:d})):(d.html(f.template),a(d.contents())(c))}}}],ie=Ia({priority:450,compile:function(){return{pre:function(a,c,d){a.$eval(d.ngInit)}}}}),je=Ia({terminal:!0,priority:1E3}),ke=["$locale","$interpolate",function(a,c){var d=/{}/g;return{restrict:"EA",link:function(e,f,g){var h=g.count,k=g.$attr.when&&f.attr(g.$attr.when),l=g.offset||0,m=e.$eval(k)||{},p={},q=c.startSymbol(),n=c.endSymbol(),s=/^when(Minus)?(.+)$/;r(g,function(a,c){s.test(c)&&(m[Q(c.replace("when","").replace("Minus", "-"))]=f.attr(g.$attr[c]))});r(m,function(a,e){p[e]=c(a.replace(d,q+h+"-"+l+n))});e.$watch(function(){var c=parseFloat(e.$eval(h));if(isNaN(c))return"";c in m||(c=a.pluralCat(c-l));return p[c](e)},function(a){f.text(a)})}}}],le=["$parse","$animate",function(a,c){var d=v("ngRepeat"),e=function(a,c,d,e,l,m,p){a[d]=e;l&&(a[l]=m);a.$index=c;a.$first=0===c;a.$last=c===p-1;a.$middle=!(a.$first||a.$last);a.$odd=!(a.$even=0===(c&1))};return{restrict:"A",multiElement:!0,transclude:"element",priority:1E3,terminal:!0, $$tlb:!0,compile:function(f,g){var h=g.ngRepeat,k=U.createComment(" end ngRepeat: "+h+" "),l=h.match(/^\s*([\s\S]+?)\s+in\s+([\s\S]+?)(?:\s+as\s+([\s\S]+?))?(?:\s+track\s+by\s+([\s\S]+?))?\s*$/);if(!l)throw d("iexp",h);var m=l[1],p=l[2],q=l[3],n=l[4],l=m.match(/^(?:([\$\w]+)|\(([\$\w]+)\s*,\s*([\$\w]+)\))$/);if(!l)throw d("iidexp",m);var s=l[3]||l[1],A=l[2];if(q&&(!/^[$a-zA-Z_][$a-zA-Z0-9_]*$/.test(q)||/^(null|undefined|this|\$index|\$first|\$middle|\$last|\$even|\$odd|\$parent)$/.test(q)))throw d("badident", q);var v,x,B,J,z={$id:Ma};n?v=a(n):(B=function(a,c){return Ma(c)},J=function(a){return a});return function(a,f,g,l,n){v&&(x=function(c,d,e){A&&(z[A]=c);z[s]=d;z.$index=e;return v(a,z)});var m=pa();a.$watchCollection(p,function(g){var l,p,C=f[0],v,z=pa(),E,H,w,D,G,u,I;q&&(a[q]=g);if(Ra(g))G=g,p=x||B;else{p=x||J;G=[];for(I in g)g.hasOwnProperty(I)&&"$"!=I.charAt(0)&&G.push(I);G.sort()}E=G.length;I=Array(E);for(l=0;lE;)d=t.pop(),m(S,d.label,!1),d.element.remove(); r(S,function(a,c){0a&&q.removeOption(c)})}for(;Q.length>w;)Q.pop()[0].element.remove()}var u;if(!(u=s.match(d)))throw bg("iexp",s,ua(f));var D=c(u[2]||u[1]),z=u[4]||u[6],y=/ as /.test(u[0])&&u[1],w=y?c(y):null,F=u[5],I=c(u[3]||""),E=c(u[2]?u[1]:z),N=c(u[7]),L=u[8]?c(u[8]):null,R={},Q=[[{element:f,label:""}]],T={};v&&(a(v)(e),v.removeClass("ng-scope"),v.remove());f.empty();f.on("change",function(){e.$apply(function(){var a=N(e)||[],c;if(n)c=[],r(f.val(),function(d){d=L?R[d]:d;c.push("?"=== d?t:""===d?null:h(w?w:E,d,a[d]))});else{var d=L?R[f.val()]:f.val();c="?"===d?t:""===d?null:h(w?w:E,d,a[d])}g.$setViewValue(c);p()})});g.$render=p;e.$watchCollection(N,l);e.$watchCollection(function(){var a=N(e),c;if(a&&G(a)){c=Array(a.length);for(var d=0,f=a.length;d@charset "UTF-8";[ng\\:cloak],[ng-cloak],[data-ng-cloak],[x-ng-cloak],.ng-cloak,.x-ng-cloak,.ng-hide:not(.ng-hide-animate){display:none !important;}ng\\:form{display:block;}'); //# sourceMappingURL=angular.min.js.map rally-0.9.1/rally/ui/templates/verification/0000775000567000056710000000000013073420067022227 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/ui/templates/verification/report.html0000664000567000056710000002770513073417716024452 0ustar jenkinsjenkins00000000000000{% extends "/base.html" %} {% block html_attr %} ng-app="App" ng-controller="Controller" id="page-html"{% endblock %} {% block title_text %}{% raw %}{{title}}{% endraw %}{% endblock %} {% block libs %} {% if include_libs %} {% else %} {% endif %} {% endblock %} {% block js_before %} "use strict"; {{ include_raw_file("/task/directive_widget.js") }} var controllerFunction = function($scope, $location) { $scope.data = {{ data }}; /* Calculate columns width in percent */ var td_ctr_width = 4; var td_result_width = Math.round(1 / ($scope.data.uuids.length+3) * 100); $scope.td_width_ = { counter: td_ctr_width, test_name: (100 - td_ctr_width - (td_result_width * $scope.data.uuids.length)), test_result: td_result_width } $scope.td_width = (function(vers_num) { var uuid_w = Math.round(1 / (vers_num+3) * 100); return {test: 100 - (uuid_w * vers_num), uuid: uuid_w} })($scope.data.uuids.length) var bitmask = {"success": 1, "skip": 2, "xfail": 4, "uxsuccess": 8, "fail": 16}; for (var i in $scope.data.tests) { var t = $scope.data.tests[i]; var bits = 0; for (var uuid in t.by_verification) { var status = t.by_verification[uuid].status; if (status in bitmask) { bits |= bitmask[status] } } $scope.data.tests[i].filter = bits; } $scope.set_filter = function(status) { if (status in $scope.state) { $scope.state[status] = !$scope.state[status]; $scope.filter_bits ^= bitmask[status] } } $scope.state = {"success": true, "skip": true, "xfail": true, "uxsuccess": true, "fail": true}; $scope.filter_by_status = function(test, index, arr) { return test.filter & $scope.filter_bits } $scope.filter_bits = (function(filter){ var bits = 0; for (var status in $scope.state){ if ($scope.state[status]) { bits ^= bitmask[status] } } return bits })(); $scope.toggle_filters_flag = true; $scope.toggle_filters = function() { if ($scope.toggle_filters_flag) { $scope.toggle_filters_flag = false; $scope.state = {"success": false, "skip": false, "xfail": false, "uxsuccess": false, "fail": false}; $scope.filter_bits = 0 } else { $scope.toggle_filters_flag = true $scope.state = {"success": true, "skip": true, "xfail": true, "uxsuccess": true, "fail": true}; $scope.filter_bits = 31 } } var title = "verification result"; if ($scope.data.uuids.length > 1) { title = "verifications results" } $scope.title = title; $scope.srt_dir = false; $scope.get_tests_count = function() { var ctr = 0; for (var i in $scope.data.tests) { if ($scope.data.tests[i].filter & $scope.filter_bits) { ctr++ } } return ctr } var title = angular.element(document.getElementById("page-header")); var header = angular.element(document.getElementById("content-header")); var tests = angular.element(document.getElementById("tests")); var sync_positions = function() { var title_h = title[0].offsetHeight; var header_h = header[0].offsetHeight; header.css({top:title_h+"px"}) tests.css({"margin-top": (title_h+header_h)+"px"}); } /* Make page head sticky */ window.onload = function() { title.css({position:"fixed", top:0, width:"100%"}); header.css({position:"fixed", width:"100%", background:"#fff"}); sync_positions(); window.onresize = sync_positions; var goup = document.getElementById("button-goup"); goup.onclick = function () { scrollTo(0, 0) }; window.onscroll = function() { if (window.scrollY > 50) { goup.style.display = "block"; } else { goup.style.display = "none"; } } } $scope.show_header = true; $scope.toggle_header = (function(e) { return function() { $scope.show_header = (e.style.display === "none"); e.style.display = $scope.show_header ? "table" : "none"; sync_positions() } })(document.getElementById("verifications")) }; if (typeof angular === "object") { angular.module("App", []) .controller("Controller", ["$scope", "$location", controllerFunction]) .directive("widget", widgetDirective) } {% endblock %} {% block css %} div.header {margin:0 !important} div.header .content-wrap { padding-left:10px } .status.status-success { background: #cfc; color: #333 } .status.status-uxsuccess { background: #ffd7af; color: #333 } .status.status-fail { background: #fbb; color: #333 } .status.status-xfail { background: #ffb; color: #333 } .status.status-skip { background: #ccf5ff; color: #333 } .status.checkbox { font-size:18px; text-align:center; cursor:pointer; padding:0 } .column { display:block; float:left; padding:4px 0 4px 8px; box-sizing:border-box; background:#fff; font-size:12px; font-weight:bold; border:#ccc solid; border-width:0 0 1px } .button { margin:0 5px; padding:0 8px 1px; background:#47a; color:#fff; cursor:pointer; border:1px #036 solid; border-radius:11px; font-size:12px; font-weight:normal; opacity:.8} .button:hover { opacity:1 } #button-goup { padding:3px 10px 5px; text-align:center; cursor:pointer; background:#fff; color:#036; line-height:14px; font-size:14px; position:fixed; bottom:0; right:10px; border:#ccc solid; border-width:1px 1px 0; border-radius:15px 15px 0 0} {% endblock %} {% block css_content_wrap %}width:100%; padding:0{% endblock %} {% block body_attr %} id="page-body" style="position:relative"{% endblock %} {% block header_text %}{% raw %}{{title}}{% endraw %}{% endblock %} {% block content %} {% raw %}

processing ...

Verification UUID Status Started at Finished at Tests count Tests duration, sec success skipped expected failures unexpected success failures
{{uuid}} {{data.verifications[uuid].status}} {{data.verifications[uuid].started_at}} {{data.verifications[uuid].finished_at}} {{data.verifications[uuid].tests_count}} {{data.verifications[uuid].tests_duration}} {{data.verifications[uuid].success}} {{data.verifications[uuid].skipped}} {{data.verifications[uuid].expected_failures}} {{data.verifications[uuid].unexpected_success}} {{data.verifications[uuid].failures}}
Filter tests by status:
Toggle Header Toggle Tags Toggle All Filters
Test name (shown {{get_tests_count()}})
{{uuid}}
{{t.name}}
{{tag}}
{{t.by_verification[uuid].status}} {{t.by_verification[uuid].duration}}
{{uuid}}
{{t.by_verification[uuid].details}}
{% endraw %} {% endblock %} rally-0.9.1/rally/ui/templates/task/0000775000567000056710000000000013073420067020507 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/ui/templates/task/trends.html0000664000567000056710000004252213073417716022710 0ustar jenkinsjenkins00000000000000{% extends "/base.html" %} {% block html_attr %} ng-app="App"{% endblock %} {% block title_text %}Rally Tasks Trends{% endblock %} {% block libs %} {% if include_libs %} {% else %} {% endif %} {% endblock %} {% block js_before %} "use strict"; {{ include_raw_file("/task/directive_widget.js") }} var controllerFunction = function($scope, $location) { $scope.data = {{ data }}; {% raw %} $scope.location = { /* #/path/hash/sub/div */ normalize: function(str) { /* Remove unwanted characters from string */ if (typeof str !== "string") { return "" } return str.replace(/[^\w\-\.]/g, "") }, uri: function(obj) { /* Getter/Setter */ if (! obj) { var uri = {path: "", hash: "", sub: "", div: ""}; var arr = ["div", "sub", "hash", "path"]; angular.forEach($location.url().split("/"), function(value){ var v = $scope.location.normalize(value); if (v) { var k = arr.pop(); if (k) { this[k] = v }} }, uri); return uri } var arr = [obj.path, obj.hash, obj.sub, obj.div], res = []; for (var i in arr) { if (! arr[i]) { break }; res.push(arr[i]) } return $location.url("/" + res.join("/")) }, path: function(path, hash) { /* Getter/Setter */ if (path === "") { return this.uri({}) } path = this.normalize(path); var uri = this.uri(); if (! path) { return uri.path } uri.path = path; var _hash = this.normalize(hash); if (_hash || hash === "") { uri.hash = _hash } return this.uri(uri) }, hash: function(hash) { /* Getter/Setter */ if (hash) { this.uri({path:this.uri().path, hash:hash}) } return this.uri().hash } } /* Dispatch */ $scope.route = function(uri) { if (! $scope.wload_map) { return } if (uri.path in $scope.wload_map) { $scope.view = {is_wload:true}; $scope.wload = $scope.wload_map[uri.path]; $scope.nav_idx = $scope.nav_map[uri.path]; $scope.showTab(uri); } else { $scope.wload = null; $scope.view = {is_main:true} } } $scope.$on("$locationChangeSuccess", function (event, newUrl, oldUrl) { $scope.route($scope.location.uri()) }); $scope.showNav = function(nav_idx) { $scope.nav_idx = nav_idx } /* Tabs */ $scope.tabs = [ { id: "total", name: "Total", visible: function(){ return true } }, { id: "actions", name: "Atomic actions", visible: function(){ return ($scope.wload.length !== 1) && $scope.wload.actions.length } }, { id: "config", name: "Configuration", visible: function(){ return !! $scope.wload.config.length } } ]; $scope.tabs_map = {}; angular.forEach($scope.tabs, function(tab){ this[tab.id] = tab }, $scope.tabs_map); $scope.showTab = function(uri) { $scope.tab = uri.hash in $scope.tabs_map ? uri.hash : "total" } for (var i in $scope.tabs) { $scope.tabs[i].isVisible = function() { if ($scope.wload) { if (this.visible()) { return true } /* If tab should be hidden but is selected - show another one */ if (this.id === $scope.location.hash()) { for (var i in $scope.tabs) { var tab = $scope.tabs[i]; if (tab.id != this.id && tab.visible()) { $scope.tab = tab.id; return false } } } } return false } } /* Other helpers */ $scope.showError = function(message) { return (function (e) { e.style.display = "block"; e.textContent = message })(document.getElementById("page-error")) } /* Initialization */ angular.element(document).ready(function(){ if (! $scope.data.length) { return $scope.showError("No data...") } /* Compose data mapping */ $scope.nav = []; $scope.nav_map = {}; $scope.wload_map = {}; var prev_cls, prev_met, met = [], itr = 0, cls_idx = 0; for (var idx in $scope.data) { var w = $scope.data[idx]; if (! prev_cls) { prev_cls = w.cls } else if (prev_cls !== w.cls) { $scope.nav.push({name:prev_cls, met:met, idx:cls_idx}); prev_cls = w.cls; met = []; itr = 1; cls_idx += 1 } if (prev_met !== w.met) { itr = 1 }; w.ref = $scope.location.normalize(w.cls+"."+w.met+(itr > 1 ? "-"+itr : "")); w.order_idx = itr > 1 ? " ["+itr+"]" : "" $scope.wload_map[w.ref] = w; $scope.nav_map[w.ref] = cls_idx; met.push({name:w.met, itr:itr, idx:idx, order_idx:w.order_idx, ref:w.ref}); prev_met = w.met; itr += 1; } if (met.length) { $scope.nav.push({name:prev_cls, met:met, idx:cls_idx}) } /* Start */ $scope.route($scope.location.uri()); $scope.$digest() }); }; if (typeof angular === "object") { angular.module("App", []) .controller("Controller", ["$scope", "$location", controllerFunction]) .directive("widget", widgetDirective) } {% endraw %} {% endblock %} {% block css %} .aside { margin:0 20px 0 0; display:block; width:255px; float:left } .aside > div { margin-bottom: 15px } .aside > div div:first-child { border-top-left-radius:4px; border-top-right-radius:4px } .aside > div div:last-child { border-bottom-left-radius:4px; border-bottom-right-radius:4px } .navcls { color:#678; background:#eee; border:1px solid #ddd; margin-bottom:-1px; display:block; padding:8px 9px; font-weight:bold; text-align:left; overflow:hidden; text-overflow:ellipsis; white-space:nowrap; cursor:pointer } .navcls.expanded { color:#469 } .navcls.active { background:#428bca; background-image:linear-gradient(to bottom, #428bca 0px, #3278b3 100%); border-color:#3278b3; color:#fff } .navmet { color:#555; background:#fff; border:1px solid #ddd; font-size:12px; display:block; margin-bottom:-1px; padding:8px 10px; text-align:left; text-overflow:ellipsis; white-space:nowrap; overflow:hidden; cursor:pointer } .navmet:hover { background:#f8f8f8 } .navmet.active, .navmet.active:hover { background:#428bca; background-image:linear-gradient(to bottom, #428bca 0px, #3278b3 100%); border-color:#3278b3; color:#fff } .navmet.single, .single, .single td { color:#999 } .navmet.active.single { color:#ccc } .tabs { list-style:outside none none; margin:0 0 5px; padding:0; border-bottom:1px solid #ddd } .tabs:after { clear:both } .tabs li { float:left; margin-bottom:-1px; display:block; position:relative } .tabs li div { border:1px solid transparent; border-radius:4px 4px 0 0; line-height:20px; margin-right:2px; padding:10px 15px; color:#428bca } .tabs li div:hover { border-color:#eee #eee #ddd; background:#eee; cursor:pointer; } .tabs li.active div { background:#fff; border-color:#ddd #ddd transparent; border-style:solid; border-width:1px; color:#555; cursor:default } .failure-mesg { color:#900 } .failure-trace { color:#333; white-space:pre; overflow:auto } .link { color:#428BCA; padding:5px 15px 5px 5px; text-decoration:underline; cursor:pointer } .link.active { color:#333; text-decoration:none } .chart { padding:0; margin:0; width:890px } .chart svg { height:300px; padding:0; margin:0; overflow:visible; float:right } .chart.lower svg { height:180px } .chart-label-y { font-size:12px; position:relative; top:5px; padding:0; margin:0 } .clearfix { clear:both } .sortable > .arrow { display:inline-block; width:12px; height:inherit; color:#c90 } .content-main { margin:0 5px; display:block; float:left } .content-wrap { width:900px } .chart-title { color:#f60; font-size:20px; padding:8px 0 3px } {% endblock %} {% block media_queries %} @media only screen and (min-width: 320px) { .content-wrap { width:900px } .content-main { width:600px } } @media only screen and (min-width: 900px) { .content-wrap { width:880px } .content-main { width:590px } } @media only screen and (min-width: 1000px) { .content-wrap { width:980px } .content-main { width:690px } } @media only screen and (min-width: 1100px) { .content-wrap { width:1080px } .content-main { width:790px } } @media only screen and (min-width: 1200px) { .content-wrap { width:1180px } .content-main { width:890px } } {% endblock %} {% block body_attr %} ng-controller="Controller"{% endblock %} {% block header_text %}tasks trends report{% endblock %} {% block content %} {% raw %}

Trends overview

Scenario Number of runs Min duration Max duration Avg duration SLA
{{w.ref}} {{w.length}} - {{w.stat.min | number:4}} - {{w.stat.max | number:4}} - {{w.stat.avg | number:4}}
Compare workload runs

{{wload.cls}}.{{wload.met}}{{wload.order_idx}}

  • {{t.name}}
{% endraw %} {% endblock %} {% block js_after %} if (! window.angular) {(function(f){ f(document.getElementById("content-nav"), "none"); f(document.getElementById("content-main"), "none"); f(document.getElementById("page-error"), "block").textContent = "Failed to load AngularJS framework" })(function(e, s){e.style.display = s; return e})} {% endblock %} rally-0.9.1/rally/ui/templates/task/report.html0000664000567000056710000007463513073417720022731 0ustar jenkinsjenkins00000000000000{% extends "/base.html" %} {% block html_attr %} ng-app="App"{% endblock %} {% block title_text %}Rally Task Report{% endblock %} {% block libs %} {% if include_libs %} {% else %} {% endif %} {% endblock %} {% block js_before %} "use strict"; {{ include_raw_file("/task/directive_widget.js") }} var controllerFunction = function($scope, $location) { $scope.source = {{ source }}; $scope.scenarios = {{ data }}; {% raw %} $scope.location = { /* #/path/hash/sub/div */ normalize: function(str) { /* Remove unwanted characters from string */ if (typeof str !== "string") { return "" } return str.replace(/[^\w\-\.]/g, "") }, uri: function(obj) { /* Getter/Setter */ if (! obj) { var uri = {path: "", hash: "", sub: "", div: ""}; var arr = ["div", "sub", "hash", "path"]; angular.forEach($location.url().split("/"), function(value){ var v = $scope.location.normalize(value); if (v) { var k = arr.pop(); if (k) { this[k] = v }} }, uri); return uri } var arr = [obj.path, obj.hash, obj.sub, obj.div], res = []; for (var i in arr) { if (! arr[i]) { break }; res.push(arr[i]) } return $location.url("/" + res.join("/")) }, path: function(path, hash) { /* Getter/Setter */ if (path === "") { return this.uri({}) } path = this.normalize(path); var uri = this.uri(); if (! path) { return uri.path } uri.path = path; var _hash = this.normalize(hash); if (_hash || hash === "") { uri.hash = _hash } return this.uri(uri) }, hash: function(hash) { /* Getter/Setter */ if (hash) { this.uri({path:this.uri().path, hash:hash}) } return this.uri().hash } } /* Dispatch */ $scope.route = function(uri) { if (! $scope.scenarios_map) { return } // Expand menu if there is only one menu group if ($scope.nav.length === 1) { $scope.nav_idx = $scope.nav[0].idx; } if (uri.path in $scope.scenarios_map) { $scope.view = {is_scenario:true}; $scope.scenario = $scope.scenarios_map[uri.path]; $scope.nav_idx = $scope.nav_map[uri.path]; if ($scope.scenario.iterations.histogram.views.length) { $scope.mainHistogram = $scope.scenario.iterations.histogram.views[0] } if ($scope.scenario.atomic.histogram.views.length) { $scope.atomicHistogram = $scope.scenario.atomic.histogram.views[0] } $scope.outputIteration = 0; $scope.showTab(uri); } else { $scope.scenario = null; if (uri.path === "source") { $scope.view = {is_source:true} } else { $scope.view = {is_main:true} } } } $scope.$on("$locationChangeSuccess", function (event, newUrl, oldUrl) { $scope.route($scope.location.uri()) }); $scope.showNav = function(nav_idx) { $scope.nav_idx = nav_idx } /* Tabs */ $scope.tabs = [ { id: "overview", name: "Overview", visible: function(){ return !! $scope.scenario.iterations.pie.length } },{ id: "details", name: "Details", visible: function(){ return !! $scope.scenario.atomic.pie.length } },{ id: "output", name: "Scenario Data", visible: function(){ return $scope.scenario.has_output } },{ id: "hooks", name: "Hooks", visible: function(){ return $scope.scenario.hooks.length } },{ id: "failures", name: "Failures", visible: function(){ return !! $scope.scenario.errors.length } },{ id: "task", name: "Input task", visible: function(){ return !! $scope.scenario.config } } ]; $scope.tabs_map = {}; angular.forEach($scope.tabs, function(tab){ this[tab.id] = tab }, $scope.tabs_map); $scope.showTab = function(uri) { $scope.tab = uri.hash in $scope.tabs_map ? uri.hash : "overview"; if (uri.hash === "output") { if (typeof $scope.scenario.output === "undefined") { var has_additive = !! $scope.scenario.additive_output.length; var has_complete = !! ($scope.scenario.complete_output.length && $scope.scenario.complete_output[0].length); $scope.scenario.output = { has_additive: has_additive, has_complete: has_complete, length: has_additive + has_complete, active: has_additive ? "additive" : (has_complete ? "complete" : "") } } if (uri.sub && $scope.scenario.output["has_" + uri.sub]) { $scope.scenario.output.active = uri.sub } } else if (uri.hash === "hooks") { if ($scope.scenario.hooks.length) { var hook_idx = parseInt(uri.sub); if (isNaN(hook_idx) || ($scope.scenario.hooks.length - hook_idx) <= 0) { hook_idx = 0 } if ($scope.scenario.hook_idx === hook_idx) { return } $scope.scenario.hooks.cur = $scope.scenario.hooks[hook_idx]; $scope.scenario.hook_idx = hook_idx; if (typeof $scope.scenario.hooks.cur.active === "undefined") { if ($scope.scenario.hooks.cur.additive.length) { $scope.scenario.hooks.cur.active = "additive" } if ($scope.scenario.hooks.cur.complete.length) { if (typeof $scope.scenario.hooks.cur.active === "undefined") { $scope.scenario.hooks.cur.active = "complete" } $scope.set_hook_run() } } } } } for (var i in $scope.tabs) { if ($scope.tabs[i].id === $scope.location.hash()) { $scope.tab = $scope.tabs[i].id } $scope.tabs[i].isVisible = function() { if ($scope.scenario) { if (this.visible()) { return true } /* If tab should be hidden but is selected - show another one */ if (this.id === $scope.location.hash()) { for (var i in $scope.tabs) { var tab = $scope.tabs[i]; if (tab.id != this.id && tab.visible()) { $scope.tab = tab.id; return false } } } } return false } } $scope.set_hook_run = function(idx) { if (typeof idx !== "undefined") { $scope.scenario.hooks.cur.run_idx = idx } else if (typeof $scope.scenario.hooks.cur.run_idx === "undefined") { $scope.scenario.hooks.cur.run_idx = 0 } idx = $scope.scenario.hooks.cur.run_idx; if (($scope.scenario.hooks.cur.complete.length - idx) > 0) { $scope.scenario.hooks.cur.run = $scope.scenario.hooks.cur.complete[idx] } } $scope.complete_hooks_as_dropdown = function() { return $scope.scenario.hooks.cur.complete.length > 10 } /* Other helpers */ $scope.showError = function(message) { return (function (e) { e.style.display = "block"; e.textContent = message })(document.getElementById("page-error")) } $scope.compact_atomics = function() { return ($scope.scenario && $scope.scenario.atomic.iter.length < 9) } /* Initialization */ angular.element(document).ready(function(){ if (! $scope.scenarios.length) { return $scope.showError("No data...") } /* Compose data mapping */ $scope.nav = []; $scope.nav_map = {}; $scope.scenarios_map = {}; var met = [], itr = 0, cls_idx = 0; var prev_cls, prev_met; for (var idx in $scope.scenarios) { var sc = $scope.scenarios[idx]; if (! prev_cls) { prev_cls = sc.cls } else if (prev_cls !== sc.cls) { $scope.nav.push({cls:prev_cls, met:met, idx:cls_idx}); prev_cls = sc.cls; met = []; itr = 1; cls_idx += 1 } if (prev_met !== sc.met) { itr = 1 }; sc.ref = $scope.location.normalize(sc.cls+"."+sc.met+(itr > 1 ? "-"+itr : "")); $scope.scenarios_map[sc.ref] = sc; $scope.nav_map[sc.ref] = cls_idx; met.push({name:sc.name, itr:itr, idx:idx, ref:sc.ref}); prev_met = sc.met; itr += 1; } if (met.length) { $scope.nav.push({cls:prev_cls, met:met, idx:cls_idx}) } /* Start */ var uri = $scope.location.uri(); uri.path = $scope.location.path(); $scope.route(uri); $scope.$digest() }) }; if (typeof angular === "object") { angular.module("App", []) .controller("Controller", ["$scope", "$location", controllerFunction]) .directive("widget", widgetDirective) } {% endraw %} {% endblock %} {% block css %} .aside { margin:0 20px 0 0; display:block; width:255px; float:left } .aside > div { margin-bottom: 15px } .aside > div div:first-child { border-top-left-radius:4px; border-top-right-radius:4px } .aside > div div:last-child { border-bottom-left-radius:4px; border-bottom-right-radius:4px } .navcls { color:#678; background:#eee; border:1px solid #ddd; margin-bottom:-1px; display:block; padding:8px 9px; font-weight:bold; text-align:left; overflow:hidden; text-overflow:ellipsis; white-space:nowrap; cursor:pointer } .navcls.expanded { color:#469 } .navcls.active { background:#428bca; background-image:linear-gradient(to bottom, #428bca 0px, #3278b3 100%); border-color:#3278b3; color:#fff } .navmet { color:#555; background:#fff; border:1px solid #ddd; font-size:12px; display:block; margin-bottom:-1px; padding:8px 10px; text-align:left; text-overflow:ellipsis; white-space:nowrap; overflow:hidden; cursor:pointer } .navmet:hover { background:#f8f8f8 } .navmet.active, .navmet.active:hover { background:#428bca; background-image:linear-gradient(to bottom, #428bca 0px, #3278b3 100%); border-color:#3278b3; color:#fff } .buttn { color:#555; background:#fff; border:1px solid #ddd; border-radius:5px; font-size:12px; margin-bottom:-1px; padding:5px 7px; text-align:left; text-overflow:ellipsis; white-space:nowrap; overflow:hidden; cursor:pointer } .buttn:hover { background:#f8f8f8 } .buttn.active, .bttn.active:hover { background:#428bca; background-image:linear-gradient(to bottom, #428bca 0px, #3278b3 100%); border-color:#3278b3; color:#fff; cursor:default } .tabs { list-style:outside none none; margin:0 0 5px; padding:0; border-bottom:1px solid #ddd } .tabs:after { clear:both } .tabs li { float:left; margin-bottom:-1px; display:block; position:relative } .tabs li div { border:1px solid transparent; border-radius:4px 4px 0 0; line-height:20px; margin-right:2px; padding:10px 15px; color:#428bca } .tabs li div:hover { border-color:#eee #eee #ddd; background:#eee; cursor:pointer; } .tabs li.active div { background:#fff; border-color:#ddd #ddd transparent; border-style:solid; border-width:1px; color:#555; cursor:default } .failure-mesg { color:#900 } .failure-trace { color:#333; white-space:pre; overflow:auto } .link { color:#428BCA; padding:5px 15px 5px 5px; text-decoration:underline; cursor:pointer } .link.active { color:#333; text-decoration:none; cursor:default } .chart { padding:0; margin:0; width:890px } .chart svg { height:300px; padding:0; margin:0; overflow:visible; float:right } .chart.lower svg { height:180px } .chart-label-y { font-size:12px; position:relative; top:5px; padding:0; margin:0 } .expandable { cursor:pointer } .clearfix { clear:both } .sortable > .arrow { display:inline-block; width:12px; height:inherit; color:#c90 } .content-main { margin:0 5px; display:block; float:left } {% endblock %} {% block media_queries %} @media only screen and (min-width: 320px) { .content-wrap { width:900px } .content-main { width:600px } } @media only screen and (min-width: 900px) { .content-wrap { width:880px } .content-main { width:590px } } @media only screen and (min-width: 1000px) { .content-wrap { width:980px } .content-main { width:690px } } @media only screen and (min-width: 1100px) { .content-wrap { width:1080px } .content-main { width:790px } } @media only screen and (min-width: 1200px) { .content-wrap { width:1180px } .content-main { width:890px } } {% endblock %} {% block body_attr %} ng-controller="Controller"{% endblock %} {% block header_text %}task results{% endblock %} {% block content %} {% raw %}

Task overview

Scenario Load duration (s) Full duration (s) Iterations Runner Errors Hooks Success (SLA)
{{sc.ref}} {{sc.load_duration | number:3}} {{sc.full_duration | number:3}} {{sc.iterations_count}} {{sc.runner}} {{sc.errors.length}} {{sc.hooks.length}}

Input file

{{source}}

{{scenario.cls}}.{{scenario.name}} ({{scenario.full_duration | number:3}}s)

  • {{t.name}}
{% endraw %} {% endblock %} {% block js_after %} if (! window.angular) {(function(f){ f(document.getElementById("content-nav"), "none"); f(document.getElementById("content-main"), "none"); f(document.getElementById("page-error"), "block").textContent = "Failed to load AngularJS framework" })(function(e, s){e.style.display = s; return e})} {% endblock %} rally-0.9.1/rally/ui/templates/task/directive_widget.js0000664000567000056710000002054613073417716024404 0ustar jenkinsjenkins00000000000000var widgetDirective = function($compile) { var Chart = { _render: function(node, data, chart, do_after){ nv.addGraph(function() { d3.select(node) .datum(data).transition().duration(0) .call(chart); if (typeof do_after === "function") { do_after(node, chart) } nv.utils.windowResize(chart.update); }) }, _widgets: { Pie: "pie", StackedArea: "stack", Lines: "lines", Histogram: "histogram" }, get_chart: function(widget) { if (widget in this._widgets) { var name = this._widgets[widget]; return Chart[name] } return function() { console.log("Error: unexpected widget:", widget) } }, pie: function(node, data, opts, do_after) { var chart = nv.models.pieChart() .x(function(d) { return d.key }) .y(function(d) { return d.values }) .showLabels(true) .labelType("percent") .donut(true) .donutRatio(0.25) .donutLabelsOutside(true) .color(function(d){ if (d.data && d.data.color) { return d.data.color } }); var colorizer = new Chart.colorizer("errors"), data_ = []; for (var i in data) { data_.push({key:data[i][0], values:data[i][1], color:colorizer.get_color(data[i][0])}) } Chart._render(node, data_, chart) }, colorizer: function(failure_key, failure_color) { this.failure_key = failure_key || "failed_duration"; this.failure_color = failure_color || "#d62728"; // red this.color_idx = -1; /* NOTE(amaretskiy): this is actually a result of d3.scale.category20().range(), excluding red color (#d62728) which is reserved for errors */ this.colors = ["#1f77b4", "#aec7e8", "#ff7f0e", "#ffbb78", "#2ca02c", "#98df8a", "#ff9896", "#9467bd", "#c5b0d5", "#8c564b", "#c49c94", "#e377c2", "#f7b6d2", "#7f7f7f", "#c7c7c7", "#bcbd22", "#dbdb8d", "#17becf", "#9edae5"]; this.get_color = function(key) { if (key === this.failure_key) { return this.failure_color } if (this.color_idx > (this.colors.length - 2)) { this.color_idx = 0 } else { this.color_idx++ } return this.colors[this.color_idx] } }, stack: function(node, data, opts, do_after) { var chart = nv.models.stackedAreaChart() .x(function(d) { return d[0] }) .y(function(d) { return d[1] }) .useInteractiveGuideline(opts.guide) .showControls(opts.controls) .clipEdge(true); chart.xAxis .axisLabel(opts.xname) .tickFormat(opts.xformat) .showMaxMin(opts.showmaxmin); chart.yAxis .orient("left") .tickFormat(d3.format(opts.yformat || ",.3f")); var colorizer = new Chart.colorizer(), data_ = []; for (var i in data) { data_.push({key:data[i][0], values:data[i][1], color:colorizer.get_color(data[i][0])}) } Chart._render(node, data_, chart, do_after); }, lines: function(node, data, opts, do_after) { var chart = nv.models.lineChart() .x(function(d) { return d[0] }) .y(function(d) { return d[1] }) .useInteractiveGuideline(opts.guide) .clipEdge(true); chart.xAxis .axisLabel(opts.xname) .tickFormat(opts.xformat) .rotateLabels(opts.xrotate) .showMaxMin(opts.showmaxmin); chart.yAxis .orient("left") .tickFormat(d3.format(opts.yformat || ",.3f")); var colorizer = new Chart.colorizer(), data_ = []; for (var i in data) { data_.push({key:data[i][0], values:data[i][1], color:colorizer.get_color(data[i][0])}) } Chart._render(node, data_, chart, do_after) }, histogram: function(node, data, opts) { var chart = nv.models.multiBarChart() .reduceXTicks(true) .showControls(false) .transitionDuration(0) .groupSpacing(0.05); chart .legend.radioButtonMode(true); chart.xAxis .axisLabel("Duration (seconds)") .tickFormat(d3.format(",.2f")); chart.yAxis .axisLabel("Iterations (frequency)") .tickFormat(d3.format("d")); Chart._render(node, data, chart) } }; return { restrict: "A", scope: { data: "=" }, link: function(scope, element, attrs) { scope.$watch("data", function(data) { if (! data) { return console.log("Chart has no data to render!") } if (attrs.widget === "Table") { var ng_class = attrs.lastrowClass ? " ng-class='{"+attrs.lastrowClass+":$last}'" : ""; var template = "" + "" + "" + "" + "" + "
{{i}}
{{i}}" + "
"; var el = element.empty().append($compile(template)(scope)).children()[0] } else if (attrs.widget === "TextArea") { var template = "
{{str}}
"; var el = element.empty().append($compile(template)(scope)).children()[0] } else { var el_chart = element.addClass("chart").css({display:"block"}); var el = el_chart.html("").children()[0]; var do_after = null; if (attrs.widget in {StackedArea:0, Lines:0}) { /* Hide widget if not enough data */ if ((! data.length) || (data[0].length < 1) || (data[0][1].length < 2)) { return element.empty().css({display:"none"}) } /* NOTE(amaretskiy): Dirty fix for changing chart width in case if there are too long Y values that overlaps chart box. */ var do_after = function(node, chart){ var g_box = angular.element(el_chart[0].querySelector(".nv-y.nv-axis")); if (g_box && g_box[0] && g_box[0].getBBox) { try { // 30 is padding aroung graphs var width = g_box[0].getBBox().width + 30; } catch (err) { // This happens sometimes, just skip silently return } // 890 is chart width (set by CSS) if (typeof width === "number" && width > 890) { width = (890 * 2) - width; if (width > 0) { angular.element(node).css({width:width+"px"}); chart.update() } } } } } else if (attrs.widget === "Pie") { if (! data.length) { return element.empty().css({display:"none"}) } } var opts = { xname: attrs.nameX || "", xrotate: attrs.rotateX || 0, yformat: attrs.formatY || ",.3f", controls: attrs.controls === "true", guide: attrs.guide === "true", showmaxmin: attrs.showmaxmin === "true" }; if (attrs.formatDateX) { opts.xformat = function(d) { return d3.time.format(attrs.formatDateX)(new Date(d)) } } else { opts.xformat = d3.format(attrs.formatX || "d") } Chart.get_chart(attrs.widget)(el, data, opts, do_after); } if (attrs.nameY) { /* NOTE(amaretskiy): Dirty fix for displaying Y-axis label correctly. I believe sometimes NVD3 will allow doing this in normal way */ var label_y = angular.element("
").addClass("chart-label-y").text(attrs.nameY); angular.element(el).parent().prepend(label_y) } if (attrs.description) { var desc_el = angular.element("
").addClass(attrs.descriptionClass || "h3").text(attrs.description); angular.element(el).parent().prepend(desc_el) } if (attrs.title) { var title_el = angular.element("
").addClass(attrs.titleClass || "h2").text(attrs.title); angular.element(el).parent().prepend(title_el) } angular.element(el).parent().append(angular.element("
")) }); } } }; rally-0.9.1/rally/ui/utils.py0000664000567000056710000000221713073417716017272 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import jinja2 def get_template(template): def include_raw_file(file_name): try: return jinja2.Markup(loader.get_source(env, file_name)[0]) except jinja2.TemplateNotFound: # NOTE(amaretskiy): re-raise error to make its message clear raise IOError("File not found: %s" % file_name) loader = jinja2.PackageLoader("rally.ui", "templates") env = jinja2.Environment(loader=loader) env.globals["include_raw_file"] = include_raw_file return env.get_template(template) rally-0.9.1/rally/verification/0000775000567000056710000000000013073420067017614 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/verification/__init__.py0000664000567000056710000000000013073417716021722 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/verification/reporter.py0000775000567000056710000000575713073417716022060 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Reporter - its the mechanism for exporting rally verification into specified system or formats. """ import abc import jsonschema import six from rally.common.plugin import plugin from rally import consts configure = plugin.configure REPORT_RESPONSE_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "files": { "type": "object", "patternProperties": { ".{1,}": {"type": "string"} } }, "open": { "type": "string", }, "print": { "type": "string" } }, "additionalProperties": False } @plugin.base() @six.add_metaclass(abc.ABCMeta) class VerificationReporter(plugin.Plugin): """Base class for all reporters for verifications.""" def __init__(self, verifications, output_destination): """Init reporter :param verifications: list of results to generate report for :param output_destination: destination of report """ super(VerificationReporter, self).__init__() self.verifications = verifications self.output_destination = output_destination @classmethod @abc.abstractmethod def validate(cls, output_destination): """Validate destination of report. :param output_destination: Destination of report """ @abc.abstractmethod def generate(self): """Generate report :returns: a dict with 3 optional elements: - key "files" with a dictionary of files to save on disk. keys are paths, values are contents; - key "print" - data to print at CLI level - key "open" - path to file which should be open in case of --open flag """ @staticmethod def make(reporter_cls, verifications, output_destination): """Initialize reporter, generate and validate report. It is a base method which is called from API layer. It cannot be overridden. Do not even try! :) :param reporter_cls: class of VerificationReporter to be used :param verifications: list of results to generate report for :param output_destination: destination of report """ report = reporter_cls(verifications, output_destination).generate() jsonschema.validate(report, REPORT_RESPONSE_SCHEMA) return report rally-0.9.1/rally/verification/manager.py0000664000567000056710000004042013073417716021607 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import inspect import os import re import shutil import sys import six from rally.common.i18n import _LE, _LI from rally.common.io import subunit_v2 from rally.common import logging from rally.common.plugin import plugin from rally import exceptions from rally.verification import context from rally.verification import utils LOG = logging.getLogger(__name__) URL_RE = re.compile( r"^(?:http|ftp)s?://" # http:// or https:// r"(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+" # domain r"(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|" # domain r"localhost|" # localhost r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})" # IP r"(?::\d+)?" # optional port r"(?:/?|[/?]\S+)$", re.IGNORECASE) class VerifierSetupFailure(exceptions.RallyException): msg_fmt = "Failed to set up verifier '%(verifier)s': %(message)s" def configure(name, namespace="default", default_repo=None, default_version=None, context=None): """Decorator to configure plugin's attributes. :param name: Plugin name that is used for searching purpose :param namespace: Plugin namespace :param default_repo: Default repository to clone :param default_version: Default version to checkout :param context: List of contexts that should be executed for verification """ def decorator(plugin): plugin._configure(name, namespace) plugin._meta_set("default_repo", default_repo) plugin._meta_set("default_version", default_version) plugin._meta_set("context", context or {}) return plugin return decorator @plugin.base() @six.add_metaclass(abc.ABCMeta) class VerifierManager(plugin.Plugin): """Verifier base class. This class provides an interface for operating specific tool. """ # These dicts will be used for building docs. PS: we should find a better # place for them RUN_ARGS = {"pattern": "a regular expression of tests to launch.", "concurrency": "Number of processes to be used for launching " "tests. In case of 0 value, number of processes" " will be equal to number of CPU cores.", "load_list": "a list of tests to launch.", "skip_list": "a list of tests to skip (actually, it is a dict " "where keys are names of tests, values are " "reasons).", "xfail_list": "a list of tests that are expected to fail " "(actually, it is a dict where keys are names " "of tests, values are reasons)."} @classmethod def _get_doc(cls): run_args = {} for parent in inspect.getmro(cls): if hasattr(parent, "RUN_ARGS"): for k, v in parent.RUN_ARGS.items(): run_args.setdefault(k, v) doc = cls.__doc__ or "" doc += "\n**Running arguments**:\n%s" % "\n".join( sorted([" * *%s*: %s" % (k, v) for k, v in run_args.items()])) doc += "\n**Installation arguments**:\n" doc += (" * *system_wide*: Whether or not to use the system-wide " "environment for verifier instead of a virtual environment. " "Defaults to False.\n" " * *source*: Path or URL to the repo to clone verifier from." " Defaults to %(default_source)s\n" " * *version*: Branch, tag or commit ID to checkout before " "verifier installation. Defaults to '%(default_version)s'." % {"default_source": cls._meta_get("default_repo"), "default_version": cls._meta_get( "default_version") or "master"}) return doc def __init__(self, verifier): """Init a verifier manager. :param verifier: `rally.common.objects.verifier.Verifier` instance """ self.verifier = verifier @property def base_dir(self): return os.path.expanduser( "~/.rally/verification/verifier-%s" % self.verifier.uuid) @property def home_dir(self): return os.path.join(self.base_dir, "for-deployment-%s" % self.verifier.deployment["uuid"]) @property def repo_dir(self): return os.path.join(self.base_dir, "repo") @property def venv_dir(self): return os.path.join(self.base_dir, ".venv") @property def environ(self): env = os.environ.copy() if not self.verifier.system_wide: # activate virtual environment env["VIRTUAL_ENV"] = self.venv_dir env["PATH"] = "%s:%s" % ( os.path.join(self.venv_dir, "bin"), env["PATH"]) return env def validate_args(self, args): """Validate given arguments to be used for running verification. :param args: A dict of arguments with values """ # NOTE(andreykurilin): By default we do not use jsonschema here. # So it cannot be extended by inheritors => requires duplication. if "pattern" in args: if not isinstance(args["pattern"], six.string_types): raise exceptions.ValidationError( "'pattern' argument should be a string.") if "concurrency" in args: if (not isinstance(args["concurrency"], int) or args["concurrency"] < 0): raise exceptions.ValidationError( "'concurrency' argument should be a positive integer or " "zero.") if "load_list" in args: if not isinstance(args["load_list"], list): raise exceptions.ValidationError( "'load_list' argument should be a list of tests.") if "skip_list" in args: if not isinstance(args["skip_list"], dict): raise exceptions.ValidationError( "'skip_list' argument should be a dict of tests " "where keys are test names and values are reasons.") if "xfail_list" in args: if not isinstance(args["xfail_list"], dict): raise exceptions.ValidationError( "'xfail_list' argument should be a dict of tests " "where keys are test names and values are reasons.") def validate(self, run_args): """Validate a verifier context and run arguments.""" context.ContextManager.validate(self._meta_get("context")) self.validate_args(run_args) def _clone(self): """Clone a repo and switch to a certain version.""" source = self.verifier.source or self._meta_get("default_repo") if not URL_RE.match(source) and not os.path.exists(source): raise exceptions.RallyException("Source path '%s' is not valid." % source) if logging.is_debug(): LOG.debug("Cloning verifier repo from %s into %s.", source, self.repo_dir) else: LOG.info("Cloning verifier repo from %s.", source) cmd = ["git", "clone", source, self.repo_dir] default_version = self._meta_get("default_version") if default_version and default_version != "master": cmd.extend(["-b", default_version]) utils.check_output(cmd) version = self.verifier.version if version: LOG.info("Switching verifier repo to the '%s' version." % version) utils.check_output(["git", "checkout", version], cwd=self.repo_dir) else: output = utils.check_output(["git", "describe", "--all"], cwd=self.repo_dir).strip() if output.startswith("heads/"): # it is a branch version = output[6:] else: head = utils.check_output(["git", "rev-parse", "HEAD"], cwd=self.repo_dir).strip() if output.endswith(head[:7]): # it is a commit ID version = head else: # it is a tag version = output self.verifier.update_properties(version=version) def install(self): """Clone and install a verifier.""" utils.create_dir(self.base_dir) self._clone() if self.verifier.system_wide: self.check_system_wide() else: self.install_venv() def uninstall(self, full=False): """Uninstall a verifier. :param full: If False (default behaviour), only deployment-specific data will be removed """ path = self.base_dir if full else self.home_dir if os.path.exists(path): shutil.rmtree(path) def install_venv(self): """Install a virtual environment for a verifier.""" if os.path.exists(self.venv_dir): # NOTE(andreykurilin): It is necessary to remove the old env while # performing update action. LOG.info("Deleting old virtual environment.") shutil.rmtree(self.venv_dir) LOG.info("Creating virtual environment. It may take a few minutes.") LOG.debug("Initializing virtual environment in %s directory.", self.venv_dir) utils.check_output(["virtualenv", "-p", sys.executable, self.venv_dir], cwd=self.repo_dir, msg_on_err="Failed to initialize virtual env " "in %s directory." % self.venv_dir) LOG.debug("Installing verifier in virtual environment.") # NOTE(ylobankov): Use 'develop mode' installation to provide an # ability to advanced users to change tests or # develop new ones in verifier repo on the fly. utils.check_output(["pip", "install", "-e", "./"], cwd=self.repo_dir, env=self.environ) def check_system_wide(self, reqs_file_path=None): """Check that all required verifier packages are installed.""" LOG.debug("Checking system-wide packages for verifier.") import pip reqs_file_path = reqs_file_path or os.path.join(self.repo_dir, "requirements.txt") required_packages = set( [r.name.lower() for r in pip.req.parse_requirements( reqs_file_path, session=False)]) installed_packages = set( [r.key for r in pip.get_installed_distributions()]) missed_packages = required_packages - installed_packages if missed_packages: raise VerifierSetupFailure( "Missed package(s) for system-wide installation found. " "Please install '%s'." % "', '".join(sorted(missed_packages)), verifier=self.verifier.name) def checkout(self, version): """Switch a verifier repo.""" LOG.info("Switching verifier repo to the '%s' version.", version) utils.check_output(["git", "checkout", "master"], cwd=self.repo_dir) utils.check_output(["git", "remote", "update"], cwd=self.repo_dir) utils.check_output(["git", "pull"], cwd=self.repo_dir) utils.check_output(["git", "checkout", version], cwd=self.repo_dir) def configure(self, extra_options=None): """Configure a verifier. :param extra_options: a dictionary with external verifier specific options for configuration. :raises NotImplementedError: This feature is verifier-specific, so you should override this method in your plugin if it supports configuration """ raise NotImplementedError( _LI("'%s' verifiers don't support configuration at all.") % self.get_name()) def is_configured(self): """Check whether a verifier is configured or not.""" return True def get_configuration(self): """Get verifier configuration (e.g., the config file content).""" return "" def override_configuration(self, new_configuration): """Override verifier configuration. :param new_configuration: Content which should be used while overriding existing configuration :raises NotImplementedError: This feature is verifier-specific, so you should override this method in your plugin if it supports configuration """ raise NotImplementedError( _LE("'%s' verifiers don't support configuration at all.") % self.get_name()) def extend_configuration(self, extra_options): """Extend verifier configuration with new options. :param extra_options: Options to be used for extending configuration :raises NotImplementedError: This feature is verifier-specific, so you should override this method in your plugin if it supports configuration """ raise NotImplementedError( _LE("'%s' verifiers don't support configuration at all.") % self.get_name()) def install_extension(self, source, version=None, extra_settings=None): """Install a verifier extension. :param source: Path or URL to the repo to clone verifier extension from :param version: Branch, tag or commit ID to checkout before verifier extension installation :param extra_settings: Extra installation settings for verifier extension :raises NotImplementedError: This feature is verifier-specific, so you should override this method in your plugin if it supports extensions """ raise NotImplementedError( _LE("'%s' verifiers don't support extensions.") % self.get_name()) def list_extensions(self): """List all verifier extensions.""" return [] def uninstall_extension(self, name): """Uninstall a verifier extension. :param name: Name of extension to uninstall :raises NotImplementedError: This feature is verifier-specific, so you should override this method in your plugin if it supports extensions """ raise NotImplementedError( _LE("'%s' verifiers don't support extensions.") % self.get_name()) @abc.abstractmethod def list_tests(self, pattern=""): """List all verifier tests. :param pattern: Filter tests by given pattern """ def parse_results(self, results_data): """Parse subunit results data of a test run.""" # TODO(andreykurilin): Support more formats. return subunit_v2.parse(six.StringIO(results_data)) @abc.abstractmethod def run(self, context): """Run verifier tests. Verification Component API expects that this method should return an object. There is no special class, you do it as you want, but it should have the following properties: .. code-block:: none .totals = { "tests_count": , "tests_duration": , "failures": , "skipped": , "success": , "unexpected_success": , "expected_failures": } .tests = { : { "status": , "name": , "duration": , "reason": , # optional "traceback": # optional }, ... } """ rally-0.9.1/rally/verification/utils.py0000664000567000056710000000565013073417716021343 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import subprocess from oslo_utils import encodeutils import six from six.moves import configparser from rally.common import logging LOG = logging.getLogger(__name__) def check_output(*args, **kwargs): """Run command with arguments and return its output. If the exit code was non-zero it raises a CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute and output in the output attribute. The difference between check_output from subprocess package and this function: * Additional arguments: - "msg_on_err" argument. It is a message that should be written in case of error. Reduces a number of try...except blocks - "debug_output" argument(Defaults to True). Print or not output to LOG.debug * stderr is hardcoded to stdout * In case of error, prints failed command and output to LOG.error * Prints output to LOG.debug """ msg_on_err = kwargs.pop("msg_on_err", None) debug_output = kwargs.pop("debug_output", True) kwargs["stderr"] = subprocess.STDOUT try: output = subprocess.check_output(*args, **kwargs) except subprocess.CalledProcessError as exc: if msg_on_err: LOG.error(msg_on_err) LOG.error("Failed cmd: '%s'" % exc.cmd) LOG.error("Error output: '%s'" % encodeutils.safe_decode(exc.output)) raise if output and debug_output: LOG.debug("Subprocess output: '%s'" % encodeutils.safe_decode(output)) return output def create_dir(dir_path): if not os.path.isdir(dir_path): os.makedirs(dir_path) return dir_path def extend_configfile(extra_options, conf_path): conf_object = configparser.ConfigParser() conf_object.read(conf_path) conf_object = add_extra_options(extra_options, conf_object) with open(conf_path, "w") as configfile: conf_object.write(configfile) raw_conf = six.StringIO() conf_object.write(raw_conf) return raw_conf.getvalue() def add_extra_options(extra_options, conf_object): for section in extra_options: if section not in (conf_object.sections() + ["DEFAULT"]): conf_object.add_section(section) for option, value in extra_options[section].items(): conf_object.set(section, option, value) return conf_object rally-0.9.1/rally/verification/context.py0000664000567000056710000000314013073417716021657 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import functools from rally.common.plugin import plugin from rally.task import context # all VerifierContexts should be always hidden configure = functools.partial(context.configure, hidden=True) @plugin.base() class VerifierContext(context.BaseContext): """Verifier context that will be run before starting a verification.""" def __init__(self, ctx): super(VerifierContext, self).__init__(ctx) self.verification = self.context.get("verification", {}) self.verifier = self.context["verifier"] @classmethod def validate(cls, config): # do not validate jsonschema. pass class ContextManager(context.ContextManager): @staticmethod def validate(ctx): for name, config in ctx.items(): VerifierContext.get(name, allow_hidden=True).validate(config) def _get_sorted_context_lst(self): return sorted([ VerifierContext.get(name, allow_hidden=True)(self.context_obj) for name in self.context_obj["config"].keys()]) rally-0.9.1/rally/osclients.py0000664000567000056710000007104613073417720017521 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import os from oslo_config import cfg from six.moves.urllib import parse from rally.cli import envutils from rally.common.i18n import _ from rally.common import logging from rally.common import objects from rally.common.plugin import plugin from rally import consts from rally import exceptions LOG = logging.getLogger(__name__) CONF = cfg.CONF OSCLIENTS_OPTS = [ cfg.FloatOpt("openstack_client_http_timeout", default=180.0, help="HTTP timeout for any of OpenStack service in seconds") ] CONF.register_opts(OSCLIENTS_OPTS) _NAMESPACE = "openstack" def configure(name, default_version=None, default_service_type=None, supported_versions=None): """OpenStack client class wrapper. Each client class has to be wrapped by configure() wrapper. It sets essential configuration of client classes. :param name: Name of the client :param default_version: Default version for client :param default_service_type: Default service type of endpoint(If this variable is not specified, validation will assume that your client doesn't allow to specify service type. :param supported_versions: List of supported versions(If this variable is not specified, `OSClients.validate_version` method will raise an exception that client doesn't support setting any versions. If this logic is wrong for your client, you should override `validate_version` in client object) """ def wrapper(cls): cls = plugin.configure(name=name, namespace=_NAMESPACE)(cls) cls._meta_set("default_version", default_version) cls._meta_set("default_service_type", default_service_type) cls._meta_set("supported_versions", supported_versions or []) return cls return wrapper @plugin.base() class OSClient(plugin.Plugin): def __init__(self, credential, api_info, cache_obj): self.credential = credential self.api_info = api_info self.cache = cache_obj def choose_version(self, version=None): """Return version string. Choose version between transmitted(preferable value if present), version from api_info(configured from a context) and default. """ # NOTE(andreykurilin): The result of choose is converted to string, # since most of clients contain map for versioned modules, where a key # is a string value of version. Example of map and its usage: # # from oslo_utils import importutils # ... # version_map = {"1": "someclient.v1.client.Client", # "2": "someclient.v2.client.Client"} # # def Client(version, *args, **kwargs): # cls = importutils.import_class(version_map[version]) # return cls(*args, **kwargs) # # That is why type of version so important and we should ensure that # version is a string object. # For those clients which doesn't accept string value(for example # zaqarclient), this method should be overridden. version = (version or self.api_info.get(self.get_name(), {}).get("version") or self._meta_get("default_version")) if version is not None: version = str(version) return version @classmethod def get_supported_versions(cls): return cls._meta_get("supported_versions") @classmethod def validate_version(cls, version): supported_versions = cls.get_supported_versions() if supported_versions: if str(version) not in supported_versions: raise exceptions.ValidationError(_( "'%(vers)s' is not supported. Should be one of " "'%(supported)s'") % {"vers": version, "supported": supported_versions}) else: raise exceptions.RallyException( _("Setting version is not supported.")) try: float(version) except ValueError: raise exceptions.ValidationError(_( "'%s' is invalid. Should be numeric value.") % version) def choose_service_type(self, service_type=None): """Return service_type string. Choose service type between transmitted(preferable value if present), service type from api_info(configured from a context) and default. """ return (service_type or self.api_info.get(self.get_name(), {}).get("service_type") or self._meta_get("default_service_type")) @classmethod def is_service_type_configurable(cls): """Just checks that client supports setting service type.""" if cls._meta_get("default_service_type") is None: raise exceptions.RallyException(_( "Setting service type is not supported.")) @property def keystone(self): return OSClient.get("keystone")(self.credential, self.api_info, self.cache) def _get_session(self, auth_url=None, version=None): LOG.warning( "Method `rally.osclient.OSClient._get_session` is deprecated since" " Rally 0.6.0. Use " "`rally.osclient.OSClient.keystone.get_session` instead.") return self.keystone.get_session(version) def _get_endpoint(self, service_type=None): kw = {"service_type": self.choose_service_type(service_type), "region_name": self.credential.region_name} if self.credential.endpoint_type: kw["interface"] = self.credential.endpoint_type api_url = self.keystone.service_catalog.url_for(**kw) return api_url def _get_auth_info(self, user_key="username", password_key="password", auth_url_key="auth_url", project_name_key="project_id", domain_name_key="domain_name", user_domain_name_key="user_domain_name", project_domain_name_key="project_domain_name", cacert_key="cacert", endpoint_type="endpoint_type", ): kw = { user_key: self.credential.username, password_key: self.credential.password, auth_url_key: self.credential.auth_url, cacert_key: self.credential.cacert, } if project_name_key: kw.update({project_name_key: self.credential.tenant_name}) if "v2.0" not in self.credential.auth_url: kw.update({ domain_name_key: self.credential.domain_name}) kw.update({ user_domain_name_key: self.credential.user_domain_name or "Default"}) kw.update({ project_domain_name_key: self.credential.project_domain_name or "Default"}) if self.credential.endpoint_type: kw[endpoint_type] = self.credential.endpoint_type return kw @abc.abstractmethod def create_client(self, *args, **kwargs): """Create new instance of client.""" def __call__(self, *args, **kwargs): """Return initialized client instance.""" key = "{0}{1}{2}".format(self.get_name(), str(args) if args else "", str(kwargs) if kwargs else "") if key not in self.cache: self.cache[key] = self.create_client(*args, **kwargs) return self.cache[key] @classmethod def get(cls, name, namespace=_NAMESPACE): return super(OSClient, cls).get(name, namespace) @configure("keystone", supported_versions=("2", "3")) class Keystone(OSClient): @property def keystone(self): raise exceptions.RallyException(_("Method 'keystone' is restricted " "for keystoneclient. :)")) @property def service_catalog(self): return self.auth_ref.service_catalog @property def auth_ref(self): if "keystone_auth_ref" not in self.cache: sess, plugin = self.get_session() self.cache["keystone_auth_ref"] = plugin.get_access(sess) return self.cache["keystone_auth_ref"] def get_session(self, version=None): key = "keystone_session_and_plugin_%s" % version if key not in self.cache: from keystoneauth1 import discover from keystoneauth1 import identity from keystoneauth1 import session version = self.choose_version(version) auth_url = self.credential.auth_url if version is not None: auth_url = self._remove_url_version() password_args = { "auth_url": auth_url, "username": self.credential.username, "password": self.credential.password, "tenant_name": self.credential.tenant_name } if version is None: # NOTE(rvasilets): If version not specified than we discover # available version with the smallest number. To be able to # discover versions we need session temp_session = session.Session( verify=(self.credential.cacert or not self.credential.insecure), timeout=CONF.openstack_client_http_timeout) version = str(discover.Discover( temp_session, password_args["auth_url"]).version_data()[0]["version"][0]) if "v2.0" not in password_args["auth_url"] and ( version != "2"): password_args.update({ "user_domain_name": self.credential.user_domain_name, "domain_name": self.credential.domain_name, "project_domain_name": self.credential.project_domain_name, }) identity_plugin = identity.Password(**password_args) sess = session.Session( auth=identity_plugin, verify=( self.credential.cacert or not self.credential.insecure), timeout=CONF.openstack_client_http_timeout) self.cache[key] = (sess, identity_plugin) return self.cache[key] def _remove_url_version(self): """Remove any version from the auth_url. The keystone Client code requires that auth_url be the root url if a version override is used. """ url = parse.urlparse(self.credential.auth_url) path = os.path.join(*os.path.split(url.path)[:-1]) parts = (url.scheme, url.netloc, path, url.params, url.query, url.fragment) return parse.urlunparse(parts) def create_client(self, version=None): """Return a keystone client. :param version: Keystone API version, can be one of: ("2", "3") If this object was constructed with a version in the api_info then that will be used unless the version parameter is passed. """ import keystoneclient from keystoneclient import client # Use the version in the api_info if provided, otherwise fall # back to the passed version (which may be None, in which case # keystoneclient chooses). version = self.choose_version(version) sess = self.get_session(version=version)[0] kw = {"version": version, "session": sess, "timeout": CONF.openstack_client_http_timeout} if keystoneclient.__version__[0] == "1": # NOTE(andreykurilin): let's leave this hack for envs which uses # old(<2.0.0) keystoneclient version. Upstream fix: # https://github.com/openstack/python-keystoneclient/commit/d9031c252848d89270a543b67109a46f9c505c86 from keystoneauth1 import plugin kw["auth_url"] = sess.get_endpoint(interface=plugin.AUTH_INTERFACE) if self.credential.endpoint_type: kw["interface"] = self.credential.endpoint_type return client.Client(**kw) @configure("nova", default_version="2", default_service_type="compute") class Nova(OSClient): @classmethod def validate_version(cls, version): from novaclient import api_versions from novaclient import exceptions as nova_exc try: api_versions.get_api_version(version) except nova_exc.UnsupportedVersion: raise exceptions.RallyException( "Version string '%s' is unsupported." % version) def create_client(self, version=None, service_type=None): """Return nova client.""" from novaclient import client as nova client = nova.Client( session=self.keystone.get_session()[0], version=self.choose_version(version), endpoint_override=self._get_endpoint(service_type)) return client @configure("neutron", default_version="2.0", default_service_type="network", supported_versions=["2.0"]) class Neutron(OSClient): def create_client(self, version=None, service_type=None): """Return neutron client.""" from neutronclient.neutron import client as neutron kw_args = {} if self.credential.endpoint_type: kw_args["endpoint_type"] = self.credential.endpoint_type client = neutron.Client( self.choose_version(version), session=self.keystone.get_session()[0], endpoint_override=self._get_endpoint(service_type), **kw_args) return client @configure("glance", default_version="2", default_service_type="image", supported_versions=["1", "2"]) class Glance(OSClient): def create_client(self, version=None, service_type=None): """Return glance client.""" import glanceclient as glance session = self.keystone.get_session()[0] client = glance.Client( version=self.choose_version(version), endpoint_override=self._get_endpoint(service_type), session=session) return client @configure("heat", default_version="1", default_service_type="orchestration", supported_versions=["1"]) class Heat(OSClient): def create_client(self, version=None, service_type=None): """Return heat client.""" from heatclient import client as heat # ToDo: Remove explicit endpoint_type or interface initialization # when heatclient no longer uses it. kw_args = {} if self.credential.endpoint_type: kw_args["endpoint_type"] = self.credential.endpoint_type kw_args["interface"] = self.credential.endpoint_type client = heat.Client( self.choose_version(version), session=self.keystone.get_session()[0], # Remove endpoint once requirement is python-heatclient>=1.6 endpoint=self._get_endpoint(service_type), endpoint_override=self._get_endpoint(service_type), **kw_args) return client @configure("cinder", default_version="2", default_service_type="volumev2", supported_versions=["1", "2"]) class Cinder(OSClient): def create_client(self, version=None, service_type=None): """Return cinder client.""" from cinderclient import client as cinder client = cinder.Client( self.choose_version(version), session=self.keystone.get_session()[0], endpoint_override=self._get_endpoint(service_type)) return client @configure("manila", default_version="1", default_service_type="share", supported_versions=["1", "2"]) class Manila(OSClient): def create_client(self, version=None, service_type=None): """Return manila client.""" from manilaclient import client as manila manila_client = manila.Client( self.choose_version(version), session=self.keystone.get_session()[0], service_catalog_url=self._get_endpoint(service_type)) return manila_client @configure("ceilometer", default_version="2", default_service_type="metering", supported_versions=["1", "2"]) class Ceilometer(OSClient): def create_client(self, version=None, service_type=None): """Return ceilometer client.""" from ceilometerclient import client as ceilometer client = ceilometer.get_client( self.choose_version(version), session=self.keystone.get_session()[0], endpoint_override=self._get_endpoint(service_type)) return client @configure("gnocchi", default_service_type="metric", default_version="1", supported_versions=["1"]) class Gnocchi(OSClient): def create_client(self, version=None, service_type=None): """Return gnocchi client.""" # NOTE(sumantmurke): gnocchiclient requires keystoneauth1 for # authenticating and creating a session. from gnocchiclient import client as gnocchi service_type = self.choose_service_type(service_type) sess = self.keystone.get_session()[0] gclient = gnocchi.Client(version=self.choose_version( version), session=sess, service_type=service_type) return gclient @configure("ironic", default_version="1", default_service_type="baremetal", supported_versions=["1"]) class Ironic(OSClient): def create_client(self, version=None, service_type=None): """Return Ironic client.""" from ironicclient import client as ironic client = ironic.get_client( self.choose_version(version), session=self.keystone.get_session()[0], endpoint=self._get_endpoint(service_type)) return client @configure("sahara", default_version="1.1", supported_versions=["1.0", "1.1"], default_service_type="data-processing") class Sahara(OSClient): # NOTE(andreykurilin): saharaclient supports "1.0" version and doesn't # support "1". `choose_version` and `validate_version` methods are written # as a hack to covert 1 -> 1.0, which can simplify setting saharaclient # for end-users. def choose_version(self, version=None): return float(super(Sahara, self).choose_version(version)) @classmethod def validate_version(cls, version): super(Sahara, cls).validate_version(float(version)) def create_client(self, version=None, service_type=None): """Return Sahara client.""" from saharaclient import client as sahara client = sahara.Client( self.choose_version(version), session=self.keystone.get_session()[0], sahara_url=self._get_endpoint(service_type)) return client @configure("zaqar", default_version="1.1", default_service_type="messaging", supported_versions=["1", "1.1"]) class Zaqar(OSClient): def choose_version(self, version=None): # zaqarclient accepts only int or float obj as version return float(super(Zaqar, self).choose_version(version)) def create_client(self, version=None, service_type=None): """Return Zaqar client.""" from zaqarclient.queues import client as zaqar client = zaqar.Client(url=self._get_endpoint(), version=self.choose_version(version), session=self.keystone.get_session()[0]) return client @configure("murano", default_version="1", default_service_type="application-catalog", supported_versions=["1"]) class Murano(OSClient): def create_client(self, version=None, service_type=None): """Return Murano client.""" from muranoclient import client as murano client = murano.Client(self.choose_version(version), endpoint=self._get_endpoint(service_type), token=self.keystone.auth_ref.auth_token) return client @configure("designate", default_version="1", default_service_type="dns", supported_versions=["1", "2"]) class Designate(OSClient): def create_client(self, version=None, service_type=None): """Return designate client.""" from designateclient import client version = self.choose_version(version) api_url = self._get_endpoint(service_type) api_url += "/v%s" % version session = self.keystone.get_session()[0] if version == "2": return client.Client(version, session=session, endpoint_override=api_url) return client.Client(version, session=session, endpoint=api_url) @configure("trove", default_version="1.0", supported_versions=["1.0"], default_service_type="database") class Trove(OSClient): def create_client(self, version=None, service_type=None): """Returns trove client.""" from troveclient import client as trove client = trove.Client(self.choose_version(version), session=self.keystone.get_session()[0], endpoint=self._get_endpoint(service_type)) return client @configure("mistral", default_service_type="workflowv2") class Mistral(OSClient): def create_client(self, service_type=None): """Return Mistral client.""" from mistralclient.api import client as mistral client = mistral.client( mistral_url=self._get_endpoint(service_type), service_type=self.choose_service_type(service_type), auth_token=self.keystone.auth_ref.auth_token) return client @configure("swift", default_service_type="object-store") class Swift(OSClient): def create_client(self, service_type=None): """Return swift client.""" from swiftclient import client as swift auth_token = self.keystone.auth_ref.auth_token client = swift.Connection(retries=1, preauthurl=self._get_endpoint(service_type), preauthtoken=auth_token, insecure=self.credential.insecure, cacert=self.credential.cacert, user=self.credential.username, tenant_name=self.credential.tenant_name, ) return client @configure("ec2") class EC2(OSClient): def create_client(self): """Return ec2 client.""" import boto kc = self.keystone() if kc.version != "v2.0": raise exceptions.RallyException( _("Rally EC2 benchmark currently supports only" "Keystone version 2")) ec2_credential = kc.ec2.create(user_id=kc.auth_user_id, tenant_id=kc.auth_tenant_id) client = boto.connect_ec2_endpoint( url=self._get_endpoint(), aws_access_key_id=ec2_credential.access, aws_secret_access_key=ec2_credential.secret, is_secure=self.credential.insecure) return client @configure("monasca", default_version="2_0", default_service_type="monitoring", supported_versions=["2_0"]) class Monasca(OSClient): def create_client(self, version=None, service_type=None): """Return monasca client.""" from monascaclient import client as monasca # Change this to use session once it's supported by monascaclient client = monasca.Client( self.choose_version(version), self._get_endpoint(service_type), token=self.keystone.auth_ref.auth_token, timeout=CONF.openstack_client_http_timeout, insecure=self.credential.insecure, **self._get_auth_info(project_name_key="tenant_name")) return client @configure("senlin", default_version="1", default_service_type="clustering", supported_versions=["1"]) class Senlin(OSClient): def create_client(self, version=None, service_type=None): """Return senlin client.""" from senlinclient import client as senlin return senlin.Client( self.choose_version(version), **self._get_auth_info(project_name_key="project_name", cacert_key="cert", endpoint_type="interface")) @configure("magnum", default_version="1", supported_versions=["1"], default_service_type="container-infra",) class Magnum(OSClient): def create_client(self, version=None, service_type=None): """Return magnum client.""" from magnumclient import client as magnum api_url = self._get_endpoint(service_type) session = self.keystone.get_session()[0] return magnum.Client( session=session, interface=self.credential.endpoint_type, magnum_url=api_url) @configure("watcher", default_version="1", default_service_type="infra-optim", supported_versions=["1"]) class Watcher(OSClient): def create_client(self, version=None, service_type=None): """Return watcher client.""" from watcherclient import client as watcher_client watcher_api_url = self._get_endpoint( self.choose_service_type(service_type)) client = watcher_client.Client( self.choose_version(version), endpoint=watcher_api_url, session=self.keystone.get_session()[0]) return client class Clients(object): """This class simplify and unify work with OpenStack python clients.""" def __init__(self, credential, api_info=None): self.credential = credential self.api_info = api_info or {} self.cache = {} def __getattr__(self, client_name): """Lazy load of clients.""" return OSClient.get(client_name)(self.credential, self.api_info, self.cache) @classmethod def create_from_env(cls): creds = envutils.get_creds_from_env_vars() return cls( objects.Credential( creds["auth_url"], creds["admin"]["username"], creds["admin"]["password"], creds["admin"]["tenant_name"], endpoint_type=creds["endpoint_type"], user_domain_name=creds["admin"].get("user_domain_name"), project_domain_name=creds["admin"].get("project_domain_name"), endpoint=creds["endpoint"], region_name=creds["region_name"], https_cacert=creds["https_cacert"], https_insecure=creds["https_insecure"] )) def clear(self): """Remove all cached client handles.""" self.cache = {} def verified_keystone(self): """Ensure keystone endpoints are valid and then authenticate :returns: Keystone Client """ from keystoneclient import exceptions as keystone_exceptions try: # Ensure that user is admin if "admin" not in [role.lower() for role in self.keystone.auth_ref.role_names]: raise exceptions.InvalidAdminException( username=self.credential.username) except keystone_exceptions.Unauthorized: raise exceptions.InvalidEndpointsException() except keystone_exceptions.AuthorizationFailure: raise exceptions.HostUnreachableException( url=self.credential.auth_url) return self.keystone() def services(self): """Return available services names and types. :returns: dict, {"service_type": "service_name", ...} """ if "services_data" not in self.cache: services_data = {} available_services = self.keystone.service_catalog.get_endpoints() for stype in available_services.keys(): if stype in consts.ServiceType: services_data[stype] = consts.ServiceType[stype] else: services_data[stype] = "__unknown__" self.cache["services_data"] = services_data return self.cache["services_data"] rally-0.9.1/rally/exceptions.py0000664000567000056710000001702013073417720017667 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six from rally.common.i18n import _ class RallyException(Exception): """Base Rally Exception To correctly use this class, inherit from it and define a "msg_fmt" property. That msg_fmt will get printf'd with the keyword arguments provided to the constructor. """ msg_fmt = _("%(message)s") def __init__(self, message=None, **kwargs): self.kwargs = kwargs if "%(message)s" in self.msg_fmt: kwargs.update({"message": message}) super(RallyException, self).__init__(self.msg_fmt % kwargs) def format_message(self): return six.text_type(self) class ImmutableException(RallyException): msg_fmt = _("This object is immutable.") class InvalidArgumentsException(RallyException): msg_fmt = _("Invalid arguments: '%(message)s'") class InvalidConfigException(RallyException): msg_fmt = _("This config has invalid schema: `%(message)s`") class InvalidRunnerResult(RallyException): msg_fmt = _("Type of result of `%(name)s` runner should be" " `base.ScenarioRunnerResult`. Got: `%(results_type)s`") class InvalidTaskException(InvalidConfigException): msg_fmt = _("Task config is invalid: `%(message)s`") class NotFoundScenarios(InvalidTaskException): msg_fmt = _("There are no benchmark scenarios with names: `%(names)s`.") class InvalidTaskConfig(InvalidTaskException): msg_fmt = _("Input task is invalid!\n\n" "Subtask %(name)s[%(pos)s] has wrong configuration" "\Subtask configuration:\n%(config)s\n" "\nReason:\n %(reason)s") class NotFoundException(RallyException): msg_fmt = _("The resource can not be found: %(message)s") class ThreadTimeoutException(RallyException): msg_fmt = _("Iteration interrupted due to timeout.") class PluginNotFound(NotFoundException): msg_fmt = _("There is no plugin with name: `%(name)s` in " "%(namespace)s namespace.") class PluginWithSuchNameExists(RallyException): msg_fmt = _("Plugin with such name: %(name)s already exists in " "%(namespace)s namespace. It's module allocates at " "%(existing_path)s. You are trying to add plugin whose module " "allocates at %(new_path)s.") class NoSuchConfigField(NotFoundException): msg_fmt = _("There is no field in the task config with name `%(name)s`.") class NoSuchRole(NotFoundException): msg_fmt = _("There is no role with name `%(role)s`.") class TaskNotFound(NotFoundException): msg_fmt = _("Task with uuid=%(uuid)s not found.") class DeploymentNotFound(NotFoundException): msg_fmt = _("Deployment %(deployment)s not found.") class DeploymentNameExists(RallyException): msg_fmt = _("Deployment name '%(deployment)s' already registered.") class DeploymentNotFinishedStatus(RallyException): msg_fmt = _("Deployment '%(name)s' (UUID=%(uuid)s) is in" " '%(status)s' status.") class DeploymentIsBusy(RallyException): msg_fmt = _("There are allocated resources for the deployment with " "uuid=%(uuid)s.") class RallyAssertionError(RallyException): msg_fmt = _("Assertion error: %(message)s") class ResourceNotFound(NotFoundException): msg_fmt = _("Resource with id=%(id)s not found.") class TimeoutException(RallyException): msg_fmt = _("Rally tired waiting for %(resource_type)s %(resource_name)s:" "%(resource_id)s to become %(desired_status)s current " "status %(resource_status)s") class GetResourceFailure(RallyException): msg_fmt = _("Failed to get the resource %(resource)s: %(err)s") class GetResourceNotFound(GetResourceFailure): msg_fmt = _("Resource %(resource)s is not found.") class GetResourceErrorStatus(GetResourceFailure): msg_fmt = _("Resource %(resource)s has %(status)s status.\n" "Fault: %(fault)s") class ScriptError(RallyException): msg_fmt = _("Script execution failed: %(message)s") class TaskInvalidStatus(RallyException): msg_fmt = _("Task `%(uuid)s` in `%(actual)s` status but `%(require)s` is " "required.") class ChecksumMismatch(RallyException): msg_fmt = _("Checksum mismatch for image: %(url)s") class InvalidAdminException(InvalidArgumentsException): msg_fmt = _("user '%(username)s' doesn't have 'admin' role") class InvalidEndpointsException(InvalidArgumentsException): msg_fmt = _("wrong keystone credentials specified in your endpoint" " properties. (HTTP 401)") class HostUnreachableException(InvalidArgumentsException): msg_fmt = _("unable to establish connection to the remote host: %(url)s") class InvalidScenarioArgument(RallyException): msg_fmt = _("Invalid scenario argument: '%(message)s'") class BenchmarkSetupFailure(RallyException): msg_fmt = _("Unable to setup benchmark: '%(message)s'") class ContextSetupFailure(RallyException): msg_fmt = _("Unable to setup context '%(ctx_name)s': '%(msg)s'") class ValidationError(RallyException): msg_fmt = _("Validation error: %(message)s") class NoNodesFound(RallyException): msg_fmt = _("There is no nodes matching filters: %(filters)r") class UnknownRelease(RallyException): msg_fmt = _("Unknown release '%(release)s'") class CleanUpException(RallyException): msg_fmt = _("Cleanup failed.") class ImageCleanUpException(CleanUpException): msg_fmt = _("Image Deletion Failed") class EncryptionTypeDeleteException(CleanUpException): msg_fmt = _("EncryptionType Deletion Failed") class IncompatiblePythonVersion(RallyException): msg_fmt = _("Incompatible python version found '%(version)s', " "required '%(required_version)s'") class WorkerNotFound(NotFoundException): msg_fmt = _("Worker %(worker)s could not be found") class WorkerAlreadyRegistered(RallyException): msg_fmt = _("Worker %(worker)s already registered") class SaharaClusterFailure(RallyException): msg_fmt = _("Sahara cluster %(name)s has failed to %(action)s. " "Reason: '%(reason)s'") class LiveMigrateException(RallyException): msg_fmt = _("Live Migration failed: %(message)s") class MigrateException(RallyException): msg_fmt = _("Migration failed: %(message)s") class InvalidHostException(RallyException): msg_fmt = _("Live Migration failed: %(message)s") class MultipleMatchesFound(RallyException): msg_fmt = _("Found multiple %(needle)s: %(haystack)s") def __init__(self, **kwargs): if "hint" in kwargs: self.msg_fmt += ". Hint: %(hint)s" super(MultipleMatchesFound, self).__init__(**kwargs) class SSHTimeout(RallyException): pass class SSHError(RallyException): pass class InvalidConnectionString(RallyException): msg_fmt = _("The connection string is not valid: %(message)s. Please " "check your connection string.") class DowngradeNotSupported(RallyException): msg_fmt = _("Database schema downgrade is not supported.") rally-0.9.1/rally/deployment/0000775000567000056710000000000013073420067017312 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/deployment/engines/0000775000567000056710000000000013073420067020742 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/deployment/engines/multihost.py0000664000567000056710000000662513073417716023364 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six from six.moves.urllib import parse import rally from rally.common import db from rally.common import objects from rally import consts from rally.deployment import engine @engine.configure(name="MultihostEngine") class MultihostEngine(engine.Engine): """Deploy multihost cloud with existing engines. Sample configuration: .. code-block:: json { "type": "MultihostEngine", "controller": { "type": "DevstackEngine", "provider": { "type": "DummyProvider" } }, "nodes": [ {"type": "Engine1", "config": "Config1"}, {"type": "Engine2", "config": "Config2"}, {"type": "Engine3", "config": "Config3"}, ] } If {controller_ip} is specified in configuration values, it will be replaced with controller address taken from credential returned by controller engine: .. code-block:: json ... "nodes": [ { "type": "DevstackEngine", "local_conf": { "GLANCE_HOSTPORT": "{controller_ip}:9292", ... """ def __init__(self, *args, **kwargs): super(MultihostEngine, self).__init__(*args, **kwargs) self.nodes = [] def _deploy_node(self, config): deployment = objects.Deployment(config=config, parent_uuid=self.deployment["uuid"]) deployer = engine.Engine.get_engine(config["type"], deployment) with deployer: credentials = deployer.make_deploy() return deployer, credentials def _update_controller_ip(self, obj): if isinstance(obj, dict): keyval = obj.items() elif isinstance(obj, list): keyval = enumerate(obj) for key, value in keyval: if isinstance(value, six.string_types): obj[key] = value.format(controller_ip=self.controller_ip) elif type(value) in (dict, list): self._update_controller_ip(value) def deploy(self): self.deployment.update_status(consts._DeployStatus.DEPLOY_SUBDEPLOY) self.controller, self.credentials = self._deploy_node( self.config["controller"]) credential = self.credentials[0] self.controller_ip = parse.urlparse(credential.auth_url).hostname for node_config in self.config["nodes"]: self._update_controller_ip(node_config) self.nodes.append(self._deploy_node(node_config)[0]) return self.credentials def cleanup(self): subdeploys = db.deployment_list(parent_uuid=self.deployment["uuid"]) for subdeploy in subdeploys: rally.api.Deployment.destroy(subdeploy["uuid"]) rally-0.9.1/rally/deployment/engines/lxc.py0000664000567000056710000001513613073417720022112 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import netaddr from rally.common.i18n import _ from rally.common import logging from rally.common import objects from rally.deployment import engine from rally.deployment.serverprovider import provider from rally.deployment.serverprovider.providers import lxc from rally import exceptions LOG = logging.getLogger(__name__) START_SCRIPT = "start.sh" def get_script_path(name): return os.path.join(os.path.abspath(os.path.dirname(__file__)), "lxc", name) @engine.configure(name="LxcEngine") class LxcEngine(engine.Engine): """Deploy with other engines in lxc containers. Sample configuration: .. code-block:: json { "type": "LxcEngine", "provider": { "type": "DummyProvider", "credentials": [{"user": "root", "host": "example.net"}] }, "distribution": "ubuntu", "release": "raring", "tunnel_to": ["10.10.10.10", "10.10.10.11"], "start_lxc_network": "10.1.1.0/24", "container_name_prefix": "devstack-node", "containers_per_host": 16, "start_script": "~/start.sh", "engine": { ... } } """ CONFIG_SCHEMA = { "type": "object", "properties": { "type": {"type": "string"}, "distribution": {"type": "string"}, "release": {"type": "string"}, "start_lxc_network": {"type": "string", "pattern": "^(\d+\.){3}\d+\/\d+$"}, "containers_per_host": {"type": "integer"}, "tunnel_to": {"type": "array", "items": {"type": "string", "pattern": "^(\d+\.){3}\d+$"}}, "container_name": {"type": "string"}, "provider": {"type": "object", "properties": {"type": {"type": "string"}}}, }, "required": ["type", "containers_per_host", "container_name", "provider"] } def validate(self): super(LxcEngine, self).validate() if "start_lxc_network" not in self.config: return lxc_net = netaddr.IPNetwork(self.config["start_lxc_network"]) num_containers = self.config["containers_per_host"] if lxc_net.size - 3 < num_containers: message = _("Network size is not enough for %d hosts.") raise exceptions.InvalidConfigException(message % num_containers) def _deploy_first(self, lxc_host, name, distribution, release): lxc_host.prepare() lxc_host.create_container(name, distribution, release) lxc_host.start_containers() items = lxc_host.get_server_object(name).get_credentials().items() # filter out all keys where value is None credentials = dict(filter(lambda x: x[1] is not None, items)) engine_config = self.config["engine"].copy() engine_config["provider"] = {"type": "DummyProvider", "credentials": [credentials]} deployment = objects.Deployment(config=engine_config, parent_uuid=self.deployment["uuid"]) deployer = engine.Engine.get_engine(engine_config["name"], deployment) deployer.deploy() lxc_host.stop_containers() def _get_provider(self): return provider.ProviderFactory.get_provider(self.config["provider"], self.deployment) @logging.log_deploy_wrapper(LOG.info, _("Create containers on host")) def deploy(self): name = self.config["container_name"] start_script = self.config.get("start_script", get_script_path(START_SCRIPT)) distribution = self.config["distribution"] release = self.config.get("release") network = self.config.get("start_lxc_network") if network: network = netaddr.IPNetwork(network) else: ip = "0" self.provider = self._get_provider() for server in self.provider.create_servers(): config = {"tunnel_to": self.config.get("tunnel_to", [])} if network: config["network"] = str(network) ip = str(network.ip).replace(".", "-") else: ip = "0" name_prefix = "%s-%s" % (name, ip) first_name = name_prefix + "-000" lxc_host = lxc.LxcHost(server, config) self._deploy_first(lxc_host, first_name, distribution, release) for i in range(1, self.config["containers_per_host"]): clone_name = "%s-%03d" % (name_prefix, i) lxc_host.create_clone(clone_name, first_name) lxc_host.start_containers() info = {"host": server.get_credentials(), "containers": lxc_host.containers, "forwarded_ports": lxc_host._port_cache.items(), "config": config} self.deployment.add_resource(provider_name="LxcEngine", info=info) for container in lxc_host.get_server_objects(): container.ssh.run("/bin/sh -e", stdin=open(start_script, "rb")) if network: network += 1 admin = objects.Credential("", "", "", "").to_dict( include_permission=True) return {"openstack": [{"admin": admin, "users": []}]} def cleanup(self): resources = self.deployment.get_resources() for resource in resources: server = provider.Server.from_credentials(resource.info["host"]) lxc_host = lxc.LxcHost(server, resource.info["config"]) lxc_host.containers = resource.info["containers"] lxc_host.destroy_containers() lxc_host.destroy_ports(resource.info["forwarded_ports"]) lxc_host.delete_tunnels() self.deployment.delete_resource(resource.id) self._get_provider().destroy_servers() rally-0.9.1/rally/deployment/engines/__init__.py0000664000567000056710000000000013073417716023050 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/deployment/engines/existing.py0000664000567000056710000001434613073417720023160 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common import objects from rally import consts from rally.deployment import engine @engine.configure(name="ExistingCloud") class ExistingCloud(engine.Engine): """Just use an existing OpenStack deployment without deploying anything. To use ExistingCloud, you should put credential information to the config: .. code-block:: json { "type": "ExistingCloud", "auth_url": "http://localhost:5000/v2.0/", "region_name": "RegionOne", "endpoint_type": "public", "admin": { "username": "admin", "password": "password", "tenant_name": "demo" }, "https_insecure": False, "https_cacert": "", } Or, using keystone v3 API endpoint: .. code-block:: json { "type": "ExistingCloud", "auth_url": "http://localhost:5000/v3/", "region_name": "RegionOne", "endpoint_type": "public", "admin": { "username": "admin", "password": "admin", "user_domain_name": "admin", "project_name": "admin", "project_domain_name": "admin", }, "https_insecure": False, "https_cacert": "", } To specify extra options use can use special "extra" parameter: .. code-block:: json { "type": "ExistingCloud", "auth_url": "http://localhost:5000/v2.0/", "region_name": "RegionOne", "endpoint_type": "public", "admin": { "username": "admin", "password": "password", "tenant_name": "demo" }, "https_insecure": False, "https_cacert": "", "extra": {"some_var": "some_value"} } """ CONFIG_SCHEMA = { "type": "object", "definitions": { "user": { "type": "object", "oneOf": [ { "description": "Keystone V2.0", "properties": { "username": {"type": "string"}, "password": {"type": "string"}, "tenant_name": {"type": "string"}, }, "required": ["username", "password", "tenant_name"], "additionalProperties": False }, { "description": "Keystone V3.0", "properties": { "username": {"type": "string"}, "password": {"type": "string"}, "domain_name": {"type": "string"}, "user_domain_name": {"type": "string"}, "project_name": {"type": "string"}, "project_domain_name": {"type": "string"}, }, "required": ["username", "password", "project_name"], "additionalProperties": False } ], } }, "properties": { "type": {"type": "string"}, "auth_url": {"type": "string"}, "region_name": {"type": "string"}, "endpoint": {"oneOf": [ # NOTE(andreykurilin): it looks like we do not use endpoint # var at all {"type": "string", "description": ""}, {"type": "null", "description": ""}]}, "endpoint_type": {"enum": [consts.EndpointType.ADMIN, consts.EndpointType.INTERNAL, consts.EndpointType.PUBLIC, None]}, "https_insecure": {"type": "boolean"}, "https_cacert": {"type": "string"}, "admin": {"$ref": "#/definitions/user"}, "users": { "type": "array", "items": {"$ref": "#/definitions/user"} }, "extra": {"type": "object", "additionalProperties": True} }, "required": ["type", "auth_url", "admin"], "additionalProperties": False } def _create_credential(self, common, user, permission): return objects.Credential( common["auth_url"], user["username"], user["password"], tenant_name=user.get("project_name", user.get("tenant_name")), permission=permission, region_name=common.get("region_name"), endpoint_type=common.get("endpoint_type"), endpoint=common.get("endpoint"), domain_name=user.get("domain_name"), user_domain_name=user.get("user_domain_name", None), project_domain_name=user.get("project_domain_name", None), https_insecure=common.get("https_insecure", False), https_cacert=common.get("https_cacert") ) def deploy(self): permissions = consts.EndpointPermission users = [self._create_credential(self.config, user, permissions.USER) for user in self.config.get("users", [])] users = [user.to_dict(include_permission=True) for user in users] admin = self._create_credential(self.config, self.config.get("admin"), permissions.ADMIN) admin = admin.to_dict(include_permission=True) return {"openstack": [{"admin": admin, "users": users}]} def cleanup(self): pass rally-0.9.1/rally/deployment/engines/devstack.py0000664000567000056710000001312513073417720023124 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import six from rally.common.i18n import _ from rally.common import logging from rally.common import objects from rally import consts from rally.deployment import engine from rally.deployment.serverprovider import provider LOG = logging.getLogger(__name__) DEVSTACK_REPO = "https://git.openstack.org/openstack-dev/devstack" DEVSTACK_BRANCH = "master" DEVSTACK_USER = "rally" def get_script(name): return open(os.path.join(os.path.abspath( os.path.dirname(__file__)), "devstack", name), "rb") def get_updated_server(server, **kwargs): credentials = server.get_credentials() credentials.update(kwargs) return provider.Server.from_credentials(credentials) @engine.configure(name="DevstackEngine") class DevstackEngine(engine.Engine): """Deploy Devstack cloud. Sample configuration: .. code-block:: json { "type": "DevstackEngine", "devstack_repo": "https://example.com/devstack/", "local_conf": { "ADMIN_PASSWORD": "secret" }, "provider": { "type": "ExistingServers", "credentials": [{"user": "root", "host": "10.2.0.8"}] } } """ CONFIG_SCHEMA = { "type": "object", "properties": { "type": {"type": "string"}, "provider": {"type": "object"}, "local_conf": {"type": "object"}, "localrc": {"type": "object"}, "devstack_repo": {"type": "string"}, "devstack_branch": {"type": "string"}, }, "required": ["type", "provider"] } def __init__(self, deployment): super(DevstackEngine, self).__init__(deployment) self.local_conf = { "DATABASE_PASSWORD": "rally", "RABBIT_PASSWORD": "rally", "SERVICE_TOKEN": "rally", "SERVICE_PASSWORD": "rally", "ADMIN_PASSWORD": "admin", "RECLONE": "yes", "SYSLOG": "yes", } if "localrc" in self.config: LOG.warning("'localrc' parameter is " "deprecated for deployment config " "since 0.1.2. Please use 'local_conf' instead.") if "local_conf" not in self.config: self.config["local_conf"] = self.config["localrc"] if "local_conf" in self.config: self.local_conf.update(self.config["local_conf"]) @logging.log_deploy_wrapper(LOG.info, _("Prepare server for devstack")) def prepare_server(self, server): script_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "devstack", "install.sh")) server.ssh.run("/bin/sh -e", stdin=open(script_path, "rb")) if server.password: server.ssh.run("chpasswd", stdin="rally:%s" % server.password) @logging.log_deploy_wrapper(LOG.info, _("Deploy devstack")) def deploy(self): self.servers = self.get_provider().create_servers() devstack_repo = self.config.get("devstack_repo", DEVSTACK_REPO) devstack_branch = self.config.get("devstack_branch", DEVSTACK_BRANCH) local_conf = "[[local|localrc]]\n" for k, v in self.local_conf.items(): if k.upper() == "ENABLE_PLUGIN": if isinstance(v, list): for plugin in v: local_conf += "enable_plugin %s\n" % (plugin) elif isinstance(v, six.string_types): local_conf += "enable_plugin %s\n" % (v) else: local_conf += "%s=%s\n" % (k, v) for server in self.servers: self.deployment.add_resource(provider_name="DevstackEngine", type="credentials", info=server.get_credentials()) cmd = "/bin/sh -e -s %s %s" % (devstack_repo, devstack_branch) server.ssh.run(cmd, stdin=get_script("install.sh")) devstack_server = get_updated_server(server, user=DEVSTACK_USER) devstack_server.ssh.run("cat > ~/devstack/local.conf", stdin=local_conf) devstack_server.ssh.run("~/devstack/stack.sh") admin_credential = objects.Credential( "http://%s:5000/v2.0/" % self.servers[0].host, "admin", self.local_conf["ADMIN_PASSWORD"], "admin", consts.EndpointPermission.ADMIN) return { "openstack": [ { "admin": admin_credential.to_dict(include_permission=True), "users": [] } ]} def cleanup(self): for resource in self.deployment.get_resources(type="credentials"): server = provider.Server.from_credentials(resource.info) devstack_server = get_updated_server(server, user=DEVSTACK_USER) devstack_server.ssh.run("~/devstack/unstack.sh") self.deployment.delete_resource(resource.id) rally-0.9.1/rally/deployment/engines/devstack/0000775000567000056710000000000013073420067022546 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/deployment/engines/devstack/install.sh0000664000567000056710000000150413073417716024557 0ustar jenkinsjenkins00000000000000#!/bin/sh DEVSTACK_REPO=$1 DEVSTACK_BRANCH=$2 if command -v apt-get then apt-get update apt-get install -y --force-yes git sudo elif command -v yum then yum install -y git sudo else echo "Unable to install git and sudo" exit 2 fi useradd rally -m || echo "Warning: user rally is already exists" >&2 mkdir -m 700 /home/rally/.ssh || true cp /root/.ssh/authorized_keys /home/rally/.ssh/ || true chown -R rally /home/rally/.ssh || true cat >> /etc/sudoers <&2 echo "export PATH=$PATH:/sbin/" >> /home/rally/.bashrc cd /home/rally if [ -d devstack ]; then cd devstack su rally -c "git pull" else su rally -c "git clone -b $DEVSTACK_BRANCH $DEVSTACK_REPO" fi rally-0.9.1/rally/deployment/engines/lxc/0000775000567000056710000000000013073420067021530 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/deployment/engines/lxc/start.sh0000664000567000056710000000066213073417716023234 0ustar jenkinsjenkins00000000000000#!/bin/sh IP=`ip -4 address show eth0 | grep inet | awk '{print $2}' | cut -d '/' -f -1` if [ -z $IP ] then echo "Error: ip address is not set" 1>&2 exit 1 fi sed -i "s/^my_ip.*/my_ip = $IP/" /etc/nova/nova.conf for SERVICE in nova-compute nova-network ; do start-stop-daemon -S -b --name $SERVICE --exec \ /usr/local/bin/$SERVICE -- \ --config-file /etc/nova/nova.conf \ --logfile /var/log/nova-$SERVICE done rally-0.9.1/rally/deployment/__init__.py0000664000567000056710000000000013073417716021420 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/deployment/serverprovider/0000775000567000056710000000000013073420067022373 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/deployment/serverprovider/__init__.py0000664000567000056710000000000013073417716024501 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/deployment/serverprovider/__main__.py0000664000567000056710000000123713073417716024477 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import serverprovider # noqa rally-0.9.1/rally/deployment/serverprovider/provider.py0000664000567000056710000001236013073417716024610 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import jsonschema import six from rally.common.plugin import plugin from rally.common import sshutils from rally.common import utils configure = plugin.configure class Server(utils.ImmutableMixin): """Represent information about created Server. Provider.create_servers should return list of instance of Server """ def __init__(self, host, user, key=None, password=None, port=22): self.host = host self.port = port self.user = user self.key = key self.password = password self.ssh = sshutils.SSH(user, host, key_filename=key, port=port, password=password) super(Server, self).__init__() def get_credentials(self): return { "host": self.host, "port": self.port, "user": self.user, "key": self.key, "password": self.password, } @classmethod def from_credentials(cls, creds): return cls(creds["host"], creds["user"], key=creds["key"], port=creds["port"], password=creds["password"]) class ResourceManager(object): """Supervise resources of a deployment. :param deployment: a dict with data on a deployment :param provider_name: a string of a name of the provider """ def __init__(self, deployment, provider_name): self.deployment = deployment self.provider_name = provider_name def create(self, info, type=None): """Create a resource. :param info: a payload of a resource :param type: a string of a resource or None :returns: a list of dicts with data on a resource """ return self.deployment.add_resource(self.provider_name, type=type, info=info) def get_all(self, type=None): """Return registered resources. :param type: a string to filter by a type, if is None, then returns all :returns: a list of dicts with data on a resource """ return self.deployment.get_resources(provider_name=self.provider_name, type=type) def delete(self, resource_id): """Delete a resource. :param resource_id: an ID of a resource """ self.deployment.delete_resource(resource_id) @plugin.base() @six.add_metaclass(abc.ABCMeta) class ProviderFactory(plugin.Plugin): """Base class of all server providers. It's a base class with self-discovery of subclasses. Each subclass has to implement create_servers() and destroy_servers() methods. By default, each server provider located as a submodule of the package rally.deployment.serverprovider.providers is auto-discovered. Each provider supervises its own resources using a ResourceManager. Example of usage with a simple provider: .. code-block:: python # Add new provider with __name__ == "A" class A(ProviderFactory): def __init__(self, deployment, config): # do something def create_servers(self, image_uuid, type_id, amount): # Create the requested number of servers of a given type from # the image passed as the first parameter. return [server_1, server_2, ...] def destroy_servers(self): # Destroy servers created in create_servers(). """ def __init__(self, deployment, config): self.deployment = deployment self.config = config self.resources = ResourceManager(deployment, self.__class__.__name__) self.validate() def validate(self): # TODO(miarmak): remove this checking, when config schema is done for # all available providers if hasattr(self, "CONFIG_SCHEMA"): jsonschema.validate(self.config, self.CONFIG_SCHEMA) # FIXME(boris-42): Remove this method. And explicit create provider @staticmethod def get_provider(config, deployment): """Returns instance of server provider by name.""" provider_cls = ProviderFactory.get(config["type"]) return provider_cls(deployment, config) @abc.abstractmethod def create_servers(self, image_uuid=None, type_id=None, amount=1): """Create VMs with chosen image. :param image_uuid: Identificator of image :param type_id: Vm type identificator :param amount: amount of required VMs :returns: list of Server instances. """ pass @abc.abstractmethod def destroy_servers(self): """Destroy already created vms.""" pass rally-0.9.1/rally/deployment/serverprovider/providers/0000775000567000056710000000000013073420067024410 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/deployment/serverprovider/providers/lxc.py0000664000567000056710000003346313073417716025570 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import re import time import netaddr from six import moves from rally.common.i18n import _ from rally.common import logging from rally.deployment.serverprovider import provider from rally import exceptions LOG = logging.getLogger(__name__) INET_ADDR_RE = re.compile(r" *inet ((\d+\.){3}\d+)\/\d+ .*") IPT_PORT_TEMPLATE = ("iptables -t nat -{action} PREROUTING -d {host_ip}" " -p tcp --syn --dport {port}" " -j DNAT --to-destination {ip}:22") def _get_script(filename): path = os.path.abspath(os.path.join(os.path.dirname(__file__), "lxc", filename)) return open(path, "rb") def _get_script_from_template(template_filename, **kwargs): template = _get_script(template_filename).read() return moves.StringIO(template.format(**kwargs)) class LxcHost(object): """Represent lxc enabled host.""" def __init__(self, server, config): """Initialize LxcHost object. :param server: Server object :param config: dictionary with following key/values: network ipv4 network for containers lxc_bridge bridge interface name (default lxcbr0) tunnel_to ip address for make tunnel to forward_ssh use ssh port forwarding (do not use for controller nodes) """ self.config = config if "network" in config: self.network = netaddr.IPNetwork(config["network"]) else: self.network = None self.server = server self.containers = [] self.path = "/var/lib/lxc/" self._port_cache = {} def _get_updated_server(self, **kwargs): credentials = self.server.get_credentials() credentials.update(kwargs) return provider.Server.from_credentials(credentials) @property def backingstore(self): if not hasattr(self, "_backingstore"): code = self.server.ssh.execute("df -t btrfs %s" % self.path)[0] self._backingstore = "" if code else "btrfs" return self._backingstore def prepare(self): if self.network: dhcp_start = str(self.network.network + 2) dhcp_end = str(self.network.network + self.network.size - 2) dhcp_range = ",".join([dhcp_start, dhcp_end]) values = { "USE_LXC_BRIDGE": "true", "LXC_BRIDGE": self.config.get("lxc_bridge", "lxcbr0"), "LXC_ADDR": self.network.network + 1, "LXC_NETMASK": self.network.netmask, "LXC_NETWORK": self.network, "LXC_DHCP_RANGE": dhcp_range, "LXC_DHCP_MAX": self.network.size - 3, } config = moves.StringIO() for name, value in values.items(): config.write("%(name)s=\"%(value)s\"\n" % {"name": name, "value": value}) config.seek(0) self.server.ssh.run("cat > /tmp/.lxc_default", stdin=config) self.server.ssh.run("/bin/sh", stdin=_get_script("lxc-install.sh")) self.create_local_tunnels() self.create_remote_tunnels() def create_local_tunnels(self): """Create tunnel on lxc host side.""" for tunnel_to in self.config["tunnel_to"]: script = _get_script_from_template("tunnel-local.sh", net=self.network, local=self.server.host, remote=tunnel_to) self.server.ssh.run("/bin/sh", stdin=script) def create_remote_tunnels(self): """Create tunnel on remote side.""" for tunnel_to in self.config["tunnel_to"]: script = _get_script_from_template("tunnel-remote.sh", net=self.network, local=tunnel_to, remote=self.server.host) server = self._get_updated_server(host=tunnel_to) server.ssh.run("/bin/sh", stdin=script) def delete_tunnels(self): for tunnel_to in self.config["tunnel_to"]: remote_server = self._get_updated_server(host=tunnel_to) remote_server.ssh.execute("ip tun del t%s" % self.network.ip) self.server.ssh.execute("ip tun del t%s" % tunnel_to) def get_ip(self, name): """Get container's ip by name.""" cmd = "lxc-attach -n %s ip addr list dev eth0" % name for attempt in range(1, 16): code, stdout = self.server.ssh.execute(cmd)[:2] if code: continue for line in stdout.splitlines(): m = INET_ADDR_RE.match(line) if m: return m.group(1) time.sleep(attempt) msg = _("Timeout waiting for ip address of container \"%s\"") % name raise exceptions.TimeoutException(msg) def get_port(self, ip): """Get forwarded ssh port for instance ip. Ssh port forwarding is used for containers access from outside. Any container is accessible by host's ip and forwarded port. E.g: 6.6.6.6:10023 -> 10.1.1.11:22 6.6.6.6:10024 -> 10.1.1.12:22 6.6.6.6:10025 -> 10.1.1.13:22 where 6.6.6.6 is host's ip. Ip->port association is stored in self._port_cache to reduce number of iptables calls. """ if not self._port_cache: self._port_cache = {} port_re = re.compile(r".+ tcp dpt:(\d+).*to:([\d\.]+)\:22") cmd = "iptables -n -t nat -L PREROUTING" code, out, err = self.server.ssh.execute(cmd) for l in out: m = port_re.match(l) if m: self._port_cache[m.group(2)] = int(m.group(1)) port = self._port_cache.get(ip) if port is None: if self._port_cache: port = max(self._port_cache.values()) + 1 else: port = 1222 self._port_cache[ip] = port cmd = IPT_PORT_TEMPLATE.format(host_ip=self.server.host, ip=ip, port=port, action="I") self.server.ssh.run(cmd) return port def create_container(self, name, distribution, release=None): cmd = ["lxc-create"] if self.backingstore == "btrfs": cmd += ["-B", "btrfs"] cmd += ["-n", name, "-t", distribution] if release: if distribution == "ubuntu": cmd += ["--", "-r", release] elif distribution == "debian": cmd = ["SUITE=%s" % release] + cmd self.server.ssh.run(" ".join(cmd)) self.configure_container(name) self.containers.append(name) def create_clone(self, name, source): cmd = ["lxc-clone"] if self.backingstore == "btrfs": cmd.append("--snapshot") cmd.extend(["-o", source, "-n", name]) self.server.ssh.execute(" ".join(cmd)) self.configure_container(name) self.containers.append(name) def configure_container(self, name): path = os.path.join(self.path, name, "rootfs") conf_script = _get_script("configure_container.sh") self.server.ssh.run("/bin/sh -e -s %s" % path, stdin=conf_script) def start_containers(self): for name in self.containers: self.server.ssh.run("lxc-start -d -n %s" % name) def stop_containers(self): for name in self.containers: self.server.ssh.run("lxc-stop -n %s" % name) def destroy_ports(self, ipports): script = "" for ip, port in ipports: cmd = IPT_PORT_TEMPLATE.format(action="D", port=port, ip=ip, host_ip=self.server.host) script += cmd + "\n" self.server.ssh.run("/bin/sh -e", stdin=script) def destroy_containers(self): for name in self.containers: self.server.ssh.run("lxc-stop -n %s" % name) self.server.ssh.run("lxc-destroy -n %s" % name) def get_server_object(self, name, wait=True): """Create Server object for container.""" ip = self.get_ip(name) if self.config.get("forward_ssh", False): server = self._get_updated_server(port=self.get_port(ip)) else: server = self._get_updated_server(host=ip) if wait: server.ssh.wait(timeout=300) return server def get_server_objects(self, wait=True): """Generate Server objects from all containers.""" for name in self.containers: yield self.get_server_object(name, wait) @provider.configure(name="LxcProvider") class LxcProvider(provider.ProviderFactory): """Provide lxc container(s) on given host. Sample configuration: .. code-block:: json { "type": "LxcProvider", "distribution": "ubuntu", "start_lxc_network": "10.1.1.0/24", "containers_per_host": 32, "tunnel_to": ["10.10.10.10"], "forward_ssh": false, "container_name_prefix": "rally-multinode-02", "host_provider": { "type": "ExistingServers", "credentials": [{"user": "root", "host": "host.net"}] } } """ CONFIG_SCHEMA = { "type": "object", "properties": { "type": {"type": "string"}, "distribution": {"type": "string"}, "release": {"type": "string"}, "start_lxc_network": {"type": "string", "pattern": "^(\d+\.){3}\d+\/\d+$"}, "containers_per_host": {"type": "integer"}, "forward_ssh": {"type": "boolean"}, "tunnel_to": {"type": "array", "items": {"type": "string", "pattern": "^(\d+\.){3}\d+$"}}, "container_name_prefix": {"type": "string"}, "host_provider": {"type": "object", "properties": {"type": {"type": "string"}}}, }, "required": ["type", "containers_per_host", "container_name_prefix", "host_provider"], } def validate(self): super(LxcProvider, self).validate() if "start_lxc_network" not in self.config: return lxc_net = netaddr.IPNetwork(self.config["start_lxc_network"]) num_containers = self.config["containers_per_host"] if lxc_net.size - 3 < num_containers: message = _("Network size is not enough for %d hosts.") raise exceptions.InvalidConfigException(message % num_containers) def get_host_provider(self): return provider.ProviderFactory.get_provider( self.config["host_provider"], self.deployment) @logging.log_deploy_wrapper(LOG.info, _("Create containers on host")) def create_servers(self): host_provider = self.get_host_provider() name_prefix = self.config["container_name_prefix"] hosts = [] if "start_lxc_network" in self.config: network = netaddr.IPNetwork(self.config["start_lxc_network"]) else: network = None distribution = self.config.get("distribution", "ubuntu") release = self.config.get("release") for server in host_provider.create_servers(): config = {"tunnel_to": self.config.get("tunnel_to", []), "forward_ssh": self.config.get("forward_ssh", False)} if network: config["network"] = str(network) host = LxcHost(server, config) host.prepare() ip = str(network.ip).replace(".", "-") if network else "0" first_name = "%s-000-%s" % (name_prefix, ip) host.create_container(first_name, distribution, release) for i in range(1, self.config.get("containers_per_host", 1)): name = "%s-%03d-%s" % (name_prefix, i, ip) host.create_clone(name, first_name) host.start_containers() hosts.append(host) if network: network += 1 servers = [] for host in hosts: for server in host.get_server_objects(): servers.append(server) info = {"host": host.server.get_credentials(), "config": host.config, "forwarded_ports": host._port_cache.items(), "container_names": host.containers} self.resources.create(info) return servers @logging.log_deploy_wrapper(LOG.info, _("Destroy host(s)")) def destroy_servers(self): for resource in self.resources.get_all(): server = provider.Server.from_credentials(resource["info"]["host"]) lxc_host = LxcHost(server, resource["info"]["config"]) lxc_host.containers = resource["info"]["container_names"] lxc_host.destroy_containers() lxc_host.destroy_ports(resource["info"]["forwarded_ports"]) lxc_host.delete_tunnels() self.resources.delete(resource["id"]) host_provider = self.get_host_provider() host_provider.destroy_servers() rally-0.9.1/rally/deployment/serverprovider/providers/__init__.py0000664000567000056710000000000013073417716026516 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/deployment/serverprovider/providers/openstack.py0000664000567000056710000002734713073417720026770 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import itertools import json import os import novaclient.exceptions from rally.common.i18n import _ from rally.common import logging from rally.common import objects from rally.deployment.serverprovider import provider from rally import exceptions from rally import osclients from rally.task import utils LOG = logging.getLogger(__name__) SERVER_TYPE = "server" KEYPAIR_TYPE = "keypair" def _get_address(s): if s.accessIPv4: return s.accessIPv4 if s.accessIPv6: return s.accessIPv6 for a in itertools.chain(s.addresses.get("public", []), *s.addresses.values()): return a["addr"] raise RuntimeError("No address found for %s" % s) def _cloud_init_success(s): status, stdout, stderr = s.ssh.execute( "cat /run/cloud-init/result.json") if status: LOG.debug("Failed to read result.json on %s: %s" % (s, stderr)) return False # Not finished (or no cloud-init) res = json.loads(stdout) if res["v1"]["errors"]: raise RuntimeError("cloud-init exited with errors on %s: %s" % (s, res["v1"]["errors"])) LOG.debug("cloud-init finished with no errors") return True # Success! @provider.configure(name="OpenStackProvider") class OpenStackProvider(provider.ProviderFactory): """Provide VMs using an existing OpenStack cloud. Sample configuration: .. code-block:: json { "type": "OpenStackProvider", "amount": 42, "user": "admin", "tenant": "admin", "password": "secret", "auth_url": "http://example.com/", "flavor_id": 2, "image": { "checksum": "75846dd06e9fcfd2b184aba7fa2b2a8d", "url": "http://example.com/disk1.img", "name": "Ubuntu Precise(added by rally)", "format": "qcow2", "userdata": "disable_root: false" }, "secgroup_name": "Rally" } """ CONFIG_SCHEMA = { "type": "object", "properties": { "type": {"type": "string"}, "deployment_name": {"type": "string"}, "amount": {"type": "integer"}, "user": {"type": "string"}, "nics": {"type": "array", "items": {"type": "object", "properties": {"net-id": {"type": "string"}}, "additionalProperties": False, "required": ["net-id"]}}, "password": {"type": "string"}, "tenant": {"type": "string"}, "auth_url": {"type": "string"}, "region": {"type": "string"}, "config_drive": {"type": "boolean"}, "flavor_id": {"type": "string"}, "wait_for_cloud_init": {"type": "boolean"}, "image": { "type": "object", "properties": { "checksum": {"type": "string"}, "name": {"type": "string"}, "format": {"type": "string"}, "userdata": {"type": "string"}, "url": {"type": "string"}, "uuid": {"type": "string"}, }, "additionalProperties": False, "anyOf": [ { "description": "Create Image", "required": ["name", "format", "url", "checksum"], }, { "description": "Existing image from checksum", "required": ["checksum"] }, { "description": "Existing image from uuid", "required": ["uuid"] } ] }, "secgroup_name": {"type": "string"}, }, "additionalProperties": False, "required": ["user", "password", "tenant", "deployment_name", "auth_url", "flavor_id", "image"] } def __init__(self, deployment, config): super(OpenStackProvider, self).__init__(deployment, config) user_credential = objects.Credential( config["auth_url"], config["user"], config["password"], config["tenant"], region_name=config.get("region")) clients = osclients.Clients(user_credential) self.nova = clients.nova() self.sg = None try: self.glance = clients.glance() except KeyError: self.glance = None LOG.warning(_("Glance endpoint not available in service catalog" ", only existing images can be used")) def get_image_uuid(self): """Get image uuid. Download image if necessary.""" image_uuid = self.config["image"].get("uuid") if image_uuid: return image_uuid else: if not self.glance: raise exceptions.InvalidConfigException( "If glance is not available in the service catalog" " obtained by the openstack server provider, then" " images cannot be uploaded so the uuid of an" " existing image must be specified in the" " deployment config." ) for image in self.glance.images.list(): if image.checksum == self.config["image"]["checksum"]: LOG.info(_("Found image with appropriate checksum. Using it.")) return image.id LOG.info(_("Downloading new image %s") % self.config["image"]["url"]) image = self.glance.images.create( name=self.config["image"]["name"], copy_from=self.config["image"]["url"], disk_format=self.config["image"]["format"], container_format="bare") image.get() if image.checksum != self.config["image"]["checksum"]: raise exceptions.ChecksumMismatch(url=self.config["image"]["url"]) return image.id def get_userdata(self): userdata = self.config["image"].get("userdata") return userdata def create_keypair(self): public_key_path = self.config.get( "ssh_public_key_file", os.path.expanduser("~/.ssh/id_rsa.pub")) public_key = open(public_key_path, "r").read().strip() key_name = self.config["deployment_name"] + "-key" try: key = self.nova.keypairs.find(name=key_name) self.nova.keypairs.delete(key.id) except novaclient.exceptions.NotFound: pass keypair = self.nova.keypairs.create(key_name, public_key) self.resources.create({"id": keypair.id}, type=KEYPAIR_TYPE) return keypair, public_key_path def get_nics(self): return self.config.get("nics") def create_security_group_and_rules(self): sec_group_name = self.config.get("secgroup_name", "rally_security_group") rule_params = { "cidr": "0.0.0.0", "from_port": 0, "to_port": 0, "ip_protocol": "tcp" } self.sg = self.nova.security_groups.create(sec_group_name, sec_group_name) self.nova.security_group_rules.create( self.sg.id, **rule_params) def create_servers(self): """Create VMs with chosen image.""" image_uuid = self.get_image_uuid() userdata = self.get_userdata() flavor = self.config["flavor_id"] nics = self.get_nics() keypair, public_key_path = self.create_keypair() self.create_security_group_and_rules() sg_args = {"security_groups": [self.sg.name]} if self.sg else {} os_servers = [] for i in range(self.config.get("amount", 1)): name = "%s-%d" % (self.config["deployment_name"], i) server = self.nova.servers.create( name, image_uuid, flavor, nics=nics, key_name=keypair.name, userdata=userdata, config_drive=self.config.get("config_drive", False), **sg_args) os_servers.append(server) self.resources.create({"id": server.id}, type=SERVER_TYPE) kwargs = { "ready_statuses": ["ACTIVE"], "update_resource": utils.get_from_manager(), "timeout": 120, "check_interval": 5 } servers = [] for os_server in os_servers: os_server = utils.wait_for(os_server, **kwargs) server = provider.Server(host=_get_address(os_server), user="root", key=public_key_path) servers.append(server) for s in servers: s.ssh.wait(timeout=120, interval=5) if self.config.get("wait_for_cloud_init", False): for s in servers: utils.wait_for(s, is_ready=_cloud_init_success) return servers def delete_security_group(self): sg_name = self.config.get("secgroup_name", "rally_security_group") sgs = self.nova.security_groups.list(serch_opts={"name": sg_name}) if sgs: for secgroup in sgs: self.nova.security_groups.delete(secgroup.id) def destroy_servers(self): for resource in self.resources.get_all(type=SERVER_TYPE): try: self.nova.servers.delete(resource["info"]["id"]) except novaclient.exceptions.NotFound: LOG.warning("Nova instance %s not found, so not deleting." % resource["info"]["id"]) try: self.resources.delete(resource.id) except exceptions.ResourceNotFound: LOG.warning( "Instance resource record not found in DB, not removing." " Deployment: %(deployment)s Instance ID:%(id)s" " Instance Nova UUID:%(uuid)s" % dict(deployment=resource.deployment_uuid, id=resource.id, uuid=resource["info"]["id"] ) ) for resource in self.resources.get_all(type=KEYPAIR_TYPE): try: self.nova.keypairs.delete(resource["info"]["id"]) except novaclient.exceptions.NotFound: LOG.warning("Nova keypair %s not found, so not deleting." % resource["info"]["id"]) try: self.resources.delete(resource.id) except exceptions.ResourceNotFound: LOG.warning( "Keypair resource record not found in DB, not removing." " Deployment: %(deployment)s Keypair ID:%(id)s" " Keypair Name:%(name)s" % dict(deployment=resource.deployment_uuid, id=resource.id, name=resource["info"]["id"] ) ) finally: self.delete_security_group() rally-0.9.1/rally/deployment/serverprovider/providers/virsh.py0000664000567000056710000001121113073417716026120 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import subprocess import time import uuid import netaddr from rally.deployment.serverprovider import provider @provider.configure(name="VirshProvider") class VirshProvider(provider.ProviderFactory): """Create VMs from prebuilt templates. Sample configuration: .. code-block:: json { "type": "VirshProvider", "connection": "alex@performance-01", "template_name": "stack-01-devstack-template", "template_user": "ubuntu", "template_password": "password" } where : * connection - ssh connection to vms host * template_name - vm image template * template_user - vm user to launch devstack * template_password - vm password to launch devstack """ CONFIG_SCHEMA = { "type": "object", "properties": { "type": { "type": "string" }, "connection": { "type": "string", "pattern": "^.+@.+$" }, "template_name": { "type": "string" }, "template_user": { "type": "string" }, "template_password": { "type": "string" } }, "required": ["connection", "template_name", "template_user"] } def create_servers(self, image_uuid=None, type_id=None, amount=1): """Create VMs with chosen image. :param image_uuid: Indetificator of image :param amount: amount of required VMs Returns list of VMs uuids. """ return [self.create_vm(str(uuid.uuid4())) for i in range(amount)] def create_vm(self, vm_name): """Clone prebuilt VM template and start it.""" virt_url = self._get_virt_connection_url(self.config["connection"]) cmd = ["virt-clone", "--connect=%s" % virt_url, "-o", self.config["template_name"], "-n", vm_name, "--auto-clone"] subprocess.check_call(cmd) cmd = ["virsh", "--connect=%s" % virt_url, "start", vm_name] subprocess.check_call(cmd) self.resources.create({"name": vm_name}) return provider.Server( self._determine_vm_ip(vm_name), self.config["template_user"], password=self.config.get("template_password"), ) def destroy_servers(self): """Destroy already created vms.""" for resource in self.resources.get_all(): self.destroy_vm(resource["info"]["name"]) self.resources.delete(resource) def destroy_vm(self, vm_name): """Destroy single vm and delete all allocated resources.""" print("Destroy VM %s" % vm_name) vconnection = self._get_virt_connection_url(self.config["connection"]) cmd = ["virsh", "--connect=%s" % vconnection, "destroy", vm_name] subprocess.check_call(cmd) cmd = ["virsh", "--connect=%s" % vconnection, "undefine", vm_name, "--remove-all-storage"] subprocess.check_call(cmd) return True @staticmethod def _get_virt_connection_url(connection): """Format QEMU connection string from SSH url.""" return "qemu+ssh://%s/system" % connection def _determine_vm_ip(self, vm_name): ssh_opt = "-o StrictHostKeyChecking=no" script_path = os.path.dirname(__file__) + "/virsh/get_domain_ip.sh" cmd = ["scp", ssh_opt, script_path, "%s:~/get_domain_ip.sh" % self.config["connection"]] subprocess.check_call(cmd) tries = 0 ip = None while tries < 3 and not ip: cmd = ["ssh", ssh_opt, self.config["connection"], "./get_domain_ip.sh", vm_name] out = subprocess.check_output(cmd) try: ip = netaddr.IPAddress(out) except netaddr.core.AddrFormatError: ip = None tries += 1 time.sleep(10) # TODO(akscram): In case of None this method returns result "None". return str(ip) rally-0.9.1/rally/deployment/serverprovider/providers/virsh/0000775000567000056710000000000013073420067025543 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/deployment/serverprovider/providers/virsh/get_domain_ip.sh0000775000567000056710000000047213073417716030712 0ustar jenkinsjenkins00000000000000#!/bin/sh # Get the MAC address of the first interface. mac=$(virsh dumpxml $1 | xml2 | awk -F= '$1 == "/domain/devices/interface/mac/@address" {print $2; exit}') # Get the ip address assigned to this MAC from dnsmasq ip=$(awk -vmac=$mac '$2 == mac {print $3}' /var/lib/libvirt/dnsmasq/default.leases ) echo $ip rally-0.9.1/rally/deployment/serverprovider/providers/existing.py0000664000567000056710000000443513073417716026631 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.deployment.serverprovider import provider @provider.configure(name="ExistingServers") class ExistingServers(provider.ProviderFactory): """Just return endpoints from its own configuration. Sample configuration: .. code-block:: json { "type": "ExistingServers", "credentials": [{"user": "root", "host": "localhost"}] } """ CREDENTIALS_SCHEMA = { "type": "object", "properties": { "host": {"type": "string"}, "port": {"type": "integer"}, "user": {"type": "string"}, "key": {"type": "string"}, "password": {"type": "string"} }, "required": ["host", "user"] } CONFIG_SCHEMA = { "type": "object", "properties": { "type": {"type": "string"}, "credentials": { "type": "array", "items": CREDENTIALS_SCHEMA }, }, "required": ["credentials"] } def __init__(self, deployment, config): super(ExistingServers, self).__init__(deployment, config) self.credentials = config["credentials"] def create_servers(self): servers = [] for credential in self.credentials: servers.append(provider.Server(host=credential["host"], user=credential["user"], key=credential.get("key"), password=credential.get("password"), port=credential.get("port", 22))) return servers def destroy_servers(self): pass rally-0.9.1/rally/deployment/serverprovider/providers/cobbler.py0000664000567000056710000001013013073417716026374 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import six.moves.xmlrpc_client as xmlrpclib from rally.common.i18n import _ from rally.deployment.serverprovider import provider @provider.configure(name="CobblerProvider") class CobblerProvider(provider.ProviderFactory): """Creates servers via PXE boot from given cobbler selector. Cobbler selector may contain a combination of fields to select a number of system. It's user responsibility to provide selector which selects something. Since cobbler stores servers password encrypted the user needs to specify it configuration. All servers selected must have the same password. Sample configuration: .. code-block:: json { "type": "CobblerProvider", "host": "172.29.74.8", "user": "cobbler", "password": "cobbler", "system_password": "password" "selector": {"profile": "cobbler_profile_name", "owners": "user1"} } """ COBBLER_SELECTOR_SCHEMA = { "type": "object", "properties": { "profile": {"type": "string"}, "owners": {"type": "string"} }, "additionalProperties": False, } CONFIG_SCHEMA = { "type": "object", "properties": { "host": {"type": "string"}, "user": {"type": "string"}, "password": {"type": "string"}, "system_password": {"type": "string"}, "selector": COBBLER_SELECTOR_SCHEMA, }, "required": ["host", "user", "password", "selector"] } def __init__(self, deployment, config): super(CobblerProvider, self).__init__(deployment, config) self.config = config self.cobbler = xmlrpclib.Server(uri="http://%s/cobbler_api" % config["host"]) @staticmethod def ip_for_system(rendered_system): for key, value in rendered_system.items(): if "ip_address" in key and value: return value raise RuntimeError(_("No valid ip address found for system ") + "'%s'" % rendered_system["name"]) def create_by_rebooting(self, system_name): """Ask cobbler to re-boot server which is controlled by given system. :param system_name: cobbler object as seen in Cobbler WebGUI :returns: rally Server """ token = self.cobbler.login(self.config["user"], self.config["password"]) handle = self.cobbler.get_system_handle(system_name, token) self.cobbler.power_system(handle, "reboot", token) rendered = self.cobbler.get_system_as_rendered(system_name) return provider.Server(host=self.ip_for_system(rendered), user=rendered["power_user"], key=rendered.get("redhat_management_key"), password=self.config.get("system_password", ""), port=22) def create_servers(self): systems = self.cobbler.find_system(dict(self.config["selector"])) if not systems: raise RuntimeError(_("No associated systems selected by ") + "%s" % self.config["selector"]) servers = [self.create_by_rebooting(system) for system in systems] return servers def destroy_servers(self): """Don't implement this. Since bare metal servers are usually getting out of operation by powering off, it's better to allow the user to decide how to do it. """ pass rally-0.9.1/rally/deployment/serverprovider/providers/lxc/0000775000567000056710000000000013073420067025176 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/deployment/serverprovider/providers/lxc/lxc-install.sh0000664000567000056710000000211013073417716027765 0ustar jenkinsjenkins00000000000000#!/bin/sh apt-get update apt-get install -yq btrfs-tools #configure networking [ grep lxctun /etc/iproute2/rt_tables ] || echo "16 lxctun" >> /etc/iproute2/rt_tables sysctl net.ipv4.conf.all.rp_filter=0 sysctl net.ipv4.conf.default.rp_filter=0 for iface in `ls /sys/class/net/ | grep -v "lo"` ; do sysctl net.ipv4.conf."$iface".rp_filter=0 > /dev/null 2> /dev/null || true done # configure btrfs storage if [ ! -d "/var/lib/lxc" ]; then mkdir /var/lib/lxc if ! df -t btrfs /var/lib/lxc > /dev/null 2>&1; then echo "Creating btrfs volume." SIZE=`df -h /var | awk '/[0-9]%/{print $(NF-2)}'` truncate -s $SIZE /var/rally-btrfs-volume LOOPDEV=`losetup -f` losetup $LOOPDEV /var/rally-btrfs-volume mkfs.btrfs $LOOPDEV mount $LOOPDEV /var/lib/lxc fi fi # install lxc if [ dpkg -s lxc > /dev/null 2>&1 ]; then echo "Lxc already installed" else DEBIAN_FRONTEND='noninteractive' apt-get install -yq lxc service lxc stop cat /tmp/.lxc_default >> /etc/default/lxc || true rm /tmp/.lxc_default || true service lxc start fi rally-0.9.1/rally/deployment/serverprovider/providers/lxc/configure_container.sh0000664000567000056710000000014713073417716031566 0ustar jenkinsjenkins00000000000000#!/bin/sh CONTAINER=$1 mkdir -p $CONTAINER/root/.ssh cp ~/.ssh/authorized_keys $CONTAINER/root/.ssh/ rally-0.9.1/rally/deployment/serverprovider/providers/lxc/tunnel-local.sh0000664000567000056710000000061413073417716030137 0ustar jenkinsjenkins00000000000000rule="from {net} to {remote} lookup lxctun" ip rule del $rule 2> /dev/null ip rule add $rule iptables -t nat -D POSTROUTING -s {net} -d {remote} -j ACCEPT 2> /dev/null iptables -t nat -I POSTROUTING -s {net} -d {remote} -j ACCEPT ip tun del t{remote} ip tun add t{remote} mode ipip local {local} remote {remote} ip link set t{remote} up ip route add {remote}/32 dev t{remote} table lxctun exit 0 rally-0.9.1/rally/deployment/serverprovider/providers/lxc/tunnel-remote.sh0000664000567000056710000000023713073417716030341 0ustar jenkinsjenkins00000000000000ip tun del t{net.ip} ip tun add t{net.ip} mode ipip local {local} remote {remote} ip link set t{net.ip} up ip route add {net} dev t{net.ip} src {local} exit 0 rally-0.9.1/rally/deployment/engine.py0000664000567000056710000001334213073417716021143 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import jsonschema import six from rally.common.i18n import _, _LE from rally.common import logging from rally.common.plugin import plugin from rally import consts from rally.deployment.serverprovider import provider from rally import exceptions LOG = logging.getLogger(__name__) configure = plugin.configure # FIXME(boris-42): We should make decomposition of this class. # it should be called DeploymentManager # and it should just manages server providers and engines # engines class should have own base. @plugin.base() @six.add_metaclass(abc.ABCMeta) class Engine(plugin.Plugin): """Base class of all deployment engines. It's a base class with self-discovery of subclasses. Each subclass has to implement deploy() and cleanup() methods. By default, each engine located as a submodule of the package rally.deployment.engines is auto-discovered. Example of usage with a simple engine: # Add new engine with __name__ == "A" class A(Engine): def __init__(self, deployment): # do something def deploy(self): # Make a deployment and return OpenStack credentials. # The credentials may have either admin or ordinary users # permissions (depending on how the deploy engine has been # initialized). return [credential_1, credential_2, ...] def cleanup(self): # Destroy OpenStack deployment and free resource An instance of this class used as a context manager on any unsafe operations to a deployment. Any unhandled exceptions bring a status of the deployment to the inconsistent state. with Engine.get_engine("A", deployment) as deploy: # deploy is an instance of the A engine # perform all usage operations on your cloud """ def __init__(self, deployment): self.deployment = deployment @property def config(self): return self.deployment["config"] def validate(self, config=None): # TODO(sskripnick): remove this checking when config schema # is done for all available engines if hasattr(self, "CONFIG_SCHEMA"): jsonschema.validate(config or self.config, self.CONFIG_SCHEMA) # FIXME(boris-42): Get rid of this method def get_provider(self): if "provider" in self.config: return provider.ProviderFactory.get_provider( self.config["provider"], self.deployment) # FIXME(boris-42): Get rid of this method @staticmethod def get_engine(name, deployment): """Returns instance of a deploy engine with corresponding name.""" try: engine_cls = Engine.get(name) return engine_cls(deployment) except exceptions.PluginNotFound as e: LOG.error(_LE("Deployment %(uuid)s: Deploy engine for %(name)s " "does not exist.") % {"uuid": deployment["uuid"], "name": name}) deployment.update_status(consts.DeployStatus.DEPLOY_FAILED) raise exceptions.PluginNotFound( namespace=e.kwargs.get("namespace"), name=name) @abc.abstractmethod def deploy(self): """Deploy OpenStack cloud and return credentials.""" @abc.abstractmethod def cleanup(self): """Cleanup OpenStack deployment.""" @logging.log_deploy_wrapper(LOG.info, _("OpenStack cloud deployment.")) def make_deploy(self): self.deployment.set_started() credentials = self.deploy() self.deployment.set_completed() return credentials @logging.log_deploy_wrapper(LOG.info, _("Destroy cloud and free " "allocated resources.")) def make_cleanup(self): self.deployment.update_status(consts.DeployStatus.CLEANUP_STARTED) self.cleanup() provider = self.get_provider() if provider: provider.destroy_servers() self.deployment.update_status(consts.DeployStatus.CLEANUP_FINISHED) def __enter__(self): return self def __exit__(self, exc_type, exc_value, exc_traceback): if exc_type is not None: exc_info = None if not issubclass(exc_type, exceptions.InvalidArgumentsException): exc_info = (exc_type, exc_value, exc_traceback) LOG.error(_LE("Deployment %(uuid)s: Error has occurred into " "context of the deployment"), {"uuid": self.deployment["uuid"]}, exc_info=exc_info) status = self.deployment["status"] if status in (consts.DeployStatus.DEPLOY_INIT, consts.DeployStatus.DEPLOY_STARTED): self.deployment.update_status( consts.DeployStatus.DEPLOY_FAILED) elif status == consts.DeployStatus.DEPLOY_FINISHED: self.deployment.update_status( consts.DeployStatus.DEPLOY_INCONSISTENT) elif status == consts.DeployStatus.CLEANUP_STARTED: self.deployment.update_status( consts.DeployStatus.CLEANUP_FAILED) rally-0.9.1/rally/aas/0000775000567000056710000000000013073420067015676 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/aas/__init__.py0000664000567000056710000000007213073417716020015 0ustar jenkinsjenkins00000000000000# FIXME(andreykurilin): implement Rally-as-a-Service pass rally-0.9.1/rally/api.py0000664000567000056710000013176713073417720016276 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import re import sys import time import jinja2 import jinja2.meta import jsonschema from oslo_config import cfg from requests.packages import urllib3 from rally.common.i18n import _, _LI, _LE, _LW from rally.common import logging from rally.common import objects from rally.common.plugin import discover from rally.common import utils from rally.common import version as rally_version from rally import consts from rally.deployment import engine as deploy_engine from rally import exceptions from rally import osclients from rally.task import engine from rally.verification import context as vcontext from rally.verification import manager as vmanager from rally.verification import reporter as vreporter CONF = cfg.CONF LOG = logging.getLogger(__name__) class _Deployment(object): @classmethod def create(cls, config, name): """Create a deployment. :param config: a dict with deployment configuration :param name: a str represents a name of the deployment :returns: Deployment object """ try: deployment = objects.Deployment(name=name, config=config) except exceptions.DeploymentNameExists as e: if logging.is_debug(): LOG.exception(e) raise deployer = deploy_engine.Engine.get_engine( deployment["config"]["type"], deployment) try: deployer.validate() except jsonschema.ValidationError: LOG.error(_LE("Deployment %s: Schema validation error.") % deployment["uuid"]) deployment.update_status(consts.DeployStatus.DEPLOY_FAILED) raise with deployer: credentials = deployer.make_deploy() deployment.update_credentials(credentials) return deployment @classmethod def destroy(cls, deployment): """Destroy the deployment. :param deployment: UUID or name of the deployment """ # TODO(akscram): We have to be sure that there are no running # tasks for this deployment. # TODO(akscram): Check that the deployment have got a status that # is equal to "*->finished" or "deploy->inconsistent". deployment = objects.Deployment.get(deployment) try: deployer = deploy_engine.Engine.get_engine( deployment["config"]["type"], deployment) with deployer: deployer.make_cleanup() except exceptions.PluginNotFound: LOG.info(_("Deployment %s will be deleted despite exception") % deployment["uuid"]) for verifier in _Verifier.list(): _Verifier.delete(verifier.name, deployment["name"], force=True) deployment.delete() @classmethod def recreate(cls, deployment, config=None): """Performs a cleanup and then makes a deployment again. :param deployment: UUID or name of the deployment :param config: an optional dict with deployment config to update before redeploy """ deployment = objects.Deployment.get(deployment) deployer = deploy_engine.Engine.get_engine( deployment["config"]["type"], deployment) if config: if deployment["config"]["type"] != config["type"]: raise exceptions.RallyException( "Can't change deployment type.") try: deployer.validate(config) except jsonschema.ValidationError: LOG.error(_LE("Config schema validation error.")) raise with deployer: deployer.make_cleanup() if config: deployment.update_config(config) credentials = deployer.make_deploy() deployment.update_credentials(credentials) @classmethod def get(cls, deployment): """Get the deployment. :param deployment: UUID or name of the deployment :returns: Deployment instance """ return objects.Deployment.get(deployment) @classmethod def service_list(cls, deployment): """Get the services list. :param deployment: Deployment object :returns: Service list """ # TODO(astudenov): put this work into Credential plugins admin = deployment.get_credentials_for("openstack")["admin"] clients = osclients.Clients(objects.Credential(**admin)) return clients.services() @staticmethod def list(status=None, parent_uuid=None, name=None): """Get the deployments list. :returns: Deployment list """ return objects.Deployment.list(status, parent_uuid, name) @classmethod def check(cls, deployment): """Check keystone authentication and list all available services. :returns: Service list """ # TODO(astudenov): put this work into Credential plugins services = cls.service_list(deployment) users = deployment.get_credentials_for("openstack")["users"] for endpoint_dict in users: osclients.Clients(objects.Credential(**endpoint_dict)).keystone() return services class _Task(object): TASK_RESULT_SCHEMA = objects.task.TASK_RESULT_SCHEMA @staticmethod def list(**filters): return objects.Task.list(**filters) @staticmethod def get(task_id): return objects.Task.get(task_id) @staticmethod def get_detailed(task_id, extended_results=False): """Get detailed task data. :param task_id: str task UUID :param extended_results: whether to return task data as dict with extended results :returns: rally.common.db.sqlalchemy.models.Task :returns: dict """ task = objects.Task.get_detailed(task_id) if task and extended_results: task = dict(task) task["results"] = objects.Task.extend_results(task["results"]) return task @classmethod def render_template(cls, task_template, template_dir="./", **kwargs): """Render jinja2 task template to Rally input task. :param task_template: String that contains template :param template_dir: The path of directory contain template files :param kwargs: Dict with template arguments :returns: rendered template str """ def is_really_missing(mis, task_template): # NOTE(boris-42): Removing variables that have default values from # missing. Construction that won't be properly # checked is {% set x = x or 1} if re.search(mis.join(["{%\s*set\s+", "\s*=\s*", "[^\w]+"]), task_template): return False # NOTE(jlk): Also check for a default filter which can show up as # a missing variable if re.search(mis + "\s*\|\s*default\(", task_template): return False return True # NOTE(boris-42): We have to import builtins to get the full list of # builtin functions (e.g. range()). Unfortunately, # __builtins__ doesn't return them (when it is not # main module) from six.moves import builtins env = jinja2.Environment( loader=jinja2.FileSystemLoader(template_dir, encoding="utf8")) env.globals.update(cls.create_template_functions()) ast = env.parse(task_template) # NOTE(Julia Varigina): # Bug in jinja2.meta.find_undeclared_variables # # The method shows inconsistent behavior: # it does not return undeclared variables that appear # in included templates only (via {%- include "some_template.yaml"-%}) # and in the same time is declared in jinja2.Environment.globals. # # This is different for undeclared variables that appear directly # in task_template. The method jinja2.meta.find_undeclared_variables # returns an undeclared variable that is used in task_template # and is set in jinja2.Environment.globals. # # Despite this bug, jinja resolves values # declared in jinja2.Environment.globals for both types of undeclared # variables and successfully renders templates in both cases. required_kwargs = jinja2.meta.find_undeclared_variables(ast) missing = (set(required_kwargs) - set(kwargs) - set(dir(builtins)) - set(env.globals)) real_missing = [mis for mis in missing if is_really_missing(mis, task_template)] if real_missing: multi_msg = _("Please specify next template task arguments: %s") single_msg = _("Please specify template task argument: %s") raise TypeError((len(real_missing) > 1 and multi_msg or single_msg) % ", ".join(real_missing)) return env.from_string(task_template).render(**kwargs) @classmethod def create_template_functions(cls): def template_min(int1, int2): return min(int1, int2) def template_max(int1, int2): return max(int1, int2) def template_round(float1): return int(round(float1)) def template_ceil(float1): import math return int(math.ceil(float1)) return {"min": template_min, "max": template_max, "ceil": template_ceil, "round": template_round} @classmethod def create(cls, deployment, tag): """Create a task without starting it. Task is a list of benchmarks that will be called one by one, results of execution will be stored in DB. :param deployment: UUID or name of the deployment :param tag: tag for this task :returns: Task object """ deployment = objects.Deployment.get(deployment) if deployment["status"] != consts.DeployStatus.DEPLOY_FINISHED: raise exceptions.DeploymentNotFinishedStatus( name=deployment["name"], uuid=deployment["uuid"], status=deployment["status"]) return objects.Task(deployment_uuid=deployment["uuid"], tag=tag) @classmethod def validate(cls, deployment, config, task_instance=None): """Validate a task config against specified deployment. :param deployment: UUID or name of the deployment :param config: a dict with a task configuration """ deployment = objects.Deployment.get(deployment) task = task_instance or objects.Task( deployment_uuid=deployment["uuid"], temporary=True) benchmark_engine = engine.TaskEngine(config, task, deployment) benchmark_engine.validate() @classmethod def start(cls, deployment, config, task=None, abort_on_sla_failure=False): """Start a task. Task is a list of benchmarks that will be called one by one, results of execution will be stored in DB. :param deployment: UUID or name of the deployment :param config: a dict with a task configuration :param task: Task object. If None, it will be created :param abort_on_sla_failure: If set to True, the task execution will stop when any SLA check for it fails """ deployment = objects.Deployment.get(deployment) task = task or objects.Task(deployment_uuid=deployment["uuid"]) if task.is_temporary: raise ValueError(_( "Unable to run a temporary task. Please check your code.")) LOG.info("Benchmark Task %s on Deployment %s" % (task["uuid"], deployment["uuid"])) benchmark_engine = engine.TaskEngine( config, task, deployment, abort_on_sla_failure=abort_on_sla_failure) try: benchmark_engine.run() except Exception: deployment.update_status(consts.DeployStatus.DEPLOY_INCONSISTENT) raise @classmethod def abort(cls, task_uuid, soft=False, async=True): """Abort running task. :param task_uuid: The UUID of the task :type task_uuid: str :param soft: If set to True, task should be aborted after execution of current scenario, otherwise as soon as possible before all the scenario iterations finish [Default: False] :type soft: bool :param async: don't wait until task became in 'running' state [Default: False] :type async: bool """ if not async: current_status = objects.Task.get_status(task_uuid) if current_status in objects.Task.NOT_IMPLEMENTED_STAGES_FOR_ABORT: LOG.info(_LI("Task status is '%s'. Should wait until it became" " 'running'") % current_status) while (current_status in objects.Task.NOT_IMPLEMENTED_STAGES_FOR_ABORT): time.sleep(1) current_status = objects.Task.get_status(task_uuid) objects.Task.get(task_uuid).abort(soft=soft) if not async: LOG.info(_LI("Waiting until the task stops.")) finished_stages = [consts.TaskStatus.ABORTED, consts.TaskStatus.FINISHED, consts.TaskStatus.CRASHED] while objects.Task.get_status(task_uuid) not in finished_stages: time.sleep(1) @classmethod def delete(cls, task_uuid, force=False): """Delete the task. :param task_uuid: The UUID of the task :param force: If set to True, then delete the task despite to the status :raises TaskInvalidStatus: when the status of the task is not in FINISHED, FAILED or ABORTED and the force argument is not True """ if force: objects.Task.delete_by_uuid(task_uuid, status=None) elif objects.Task.get_status(task_uuid) in ( consts.TaskStatus.ABORTED, consts.TaskStatus.FINISHED, consts.TaskStatus.CRASHED): objects.Task.delete_by_uuid(task_uuid, status=None) else: objects.Task.delete_by_uuid( task_uuid, status=consts.TaskStatus.FINISHED) class _Verifier(object): @classmethod def list_plugins(cls, namespace=None): """List all plugins for verifiers management. :param namespace: Verifier plugin namespace """ return [{"name": p.get_name(), "namespace": p.get_namespace(), "description": p.get_info()["title"], "location": "%s.%s" % (p.__module__, p.__name__)} for p in vmanager.VerifierManager.get_all(namespace=namespace)] @classmethod def create(cls, name, vtype, namespace=None, source=None, version=None, system_wide=False, extra_settings=None): """Create a verifier. :param name: Verifier name :param vtype: Verifier plugin name :param namespace: Verifier plugin namespace. Should be specified when there are two verifier plugins with equal names but in different namespaces :param source: Path or URL to the repo to clone verifier from :param version: Branch, tag or commit ID to checkout before verifier installation :param system_wide: Whether or not to use the system-wide environment for verifier instead of a virtual environment :param extra_settings: Extra installation settings for verifier """ # check that the specified verifier type exists vmanager.VerifierManager.get(vtype, namespace=namespace) LOG.info("Creating verifier '%s'.", name) try: verifier = cls.get(name) except exceptions.ResourceNotFound: verifier = objects.Verifier.create( name=name, source=source, system_wide=system_wide, version=version, vtype=vtype, namespace=namespace, extra_settings=extra_settings) else: raise exceptions.RallyException( "Verifier with name '%s' already exists! Please, specify " "another name for verifier and try again." % verifier.name) properties = {} default_namespace = verifier.manager._meta_get("namespace") if not namespace and default_namespace: properties["namespace"] = default_namespace default_source = verifier.manager._meta_get("default_repo") if not source and default_source: properties["source"] = default_source if properties: verifier.update_properties(**properties) verifier.update_status(consts.VerifierStatus.INSTALLING) try: verifier.manager.install() except Exception: verifier.update_status(consts.VerifierStatus.FAILED) raise verifier.update_status(consts.VerifierStatus.INSTALLED) LOG.info("Verifier %s has been successfully created!", verifier) return verifier.uuid @staticmethod def get(verifier_id): """Get a verifier. :param verifier_id: Verifier name or UUID """ return objects.Verifier.get(verifier_id) @staticmethod def list(status=None): """List all verifiers. :param status: Status to filter verifiers by """ return objects.Verifier.list(status) @classmethod def delete(cls, verifier_id, deployment_id=None, force=False): """Delete a verifier. :param verifier_id: Verifier name or UUID :param deployment_id: Deployment name or UUID. If specified, only the deployment-specific data will be deleted for verifier :param force: Delete all stored verifier verifications. If deployment_id specified, only verifications of this deployment will be deleted """ verifier = cls.get(verifier_id) verifications = _Verification.list(verifier_id, deployment_id) if verifications: d_msg = ((" for deployment '%s'" % deployment_id) if deployment_id else "") if force: LOG.info("Deleting all verifications created by verifier " "%s%s.", verifier, d_msg) for verification in verifications: _Verification.delete(verification.uuid) else: raise exceptions.RallyException( "Failed to delete verifier {0} because there are stored " "verifier verifications{1}! Please, make sure that they " "are not important to you. Use 'force' flag if you would " "like to delete verifications{1} as well." .format(verifier, d_msg)) if deployment_id: LOG.info("Deleting deployment-specific data for verifier %s.", verifier) verifier.set_deployment(deployment_id) verifier.manager.uninstall() LOG.info("Deployment-specific data has been successfully deleted!") else: LOG.info("Deleting verifier %s.", verifier) verifier.manager.uninstall(full=True) objects.Verifier.delete(verifier_id) LOG.info("Verifier has been successfully deleted!") @classmethod def update(cls, verifier_id, system_wide=None, version=None, update_venv=False): """Update a verifier. :param verifier_id: Verifier name or UUID :param system_wide: Switch to using the system-wide environment :param version: Branch, tag or commit ID to checkout :param update_venv: Update the virtual environment for verifier """ if system_wide is None and version is None and not update_venv: # nothing to update raise exceptions.RallyException( "At least one of the following parameters should be " "specified: 'system_wide', 'version', 'update_venv'.") verifier = cls.get(verifier_id) LOG.info("Updating verifier %s.", verifier) if verifier.status != consts.VerifierStatus.INSTALLED: raise exceptions.RallyException( "Failed to update verifier %s because verifier is in '%s' " "status, but should be in '%s'." % ( verifier, verifier.status, consts.VerifierStatus.INSTALLED) ) system_wide_in_use = (system_wide or (system_wide is None and verifier.system_wide)) if update_venv and system_wide_in_use: raise exceptions.RallyException( "It is impossible to update the virtual environment for " "verifier %s when it uses the system-wide environment." % verifier) # store original status to set it again after updating or rollback original_status = verifier.status verifier.update_status(consts.VerifierStatus.UPDATING) properties = {} # store new verifier properties to update old ones sw_is_checked = False if version: properties["version"] = version backup = utils.BackupHelper() rollback_msg = ("Failed to update verifier %s. It has been " "rollbacked to the previous state." % verifier) backup.add_rollback_action(LOG.info, rollback_msg) backup.add_rollback_action(verifier.update_status, original_status) with backup(verifier.manager.repo_dir): verifier.manager.checkout(version) if system_wide_in_use: verifier.manager.check_system_wide() sw_is_checked = True if system_wide is not None: if system_wide == verifier.system_wide: LOG.info( "Verifier %s is already switched to system_wide=%s. " "Nothing will be changed.", verifier, verifier.system_wide) else: properties["system_wide"] = system_wide if not system_wide: update_venv = True # we need to install a virtual env else: # NOTE(andreykurilin): should we remove previously created # virtual environment?! if not sw_is_checked: verifier.manager.check_system_wide() if update_venv: backup = utils.BackupHelper() rollback_msg = ("Failed to update the virtual environment for " "verifier %s. It has been rollbacked to the " "previous state." % verifier) backup.add_rollback_action(LOG.info, rollback_msg) backup.add_rollback_action(verifier.update_status, original_status) with backup(verifier.manager.venv_dir): verifier.manager.install_venv() properties["status"] = original_status # change verifier status back verifier.update_properties(**properties) LOG.info("Verifier %s has been successfully updated!", verifier) return verifier.uuid @classmethod def configure(cls, verifier, deployment_id, extra_options=None, reconfigure=False): """Configure a verifier. :param verifier: Verifier object or (name or UUID) :param deployment_id: Deployment name or UUID :param extra_options: Extend verifier configuration with extra options :param reconfigure: Reconfigure verifier """ if not isinstance(verifier, objects.Verifier): verifier = cls.get(verifier) verifier.set_deployment(deployment_id) LOG.info( "Configuring verifier %s for deployment '%s' (UUID=%s).", verifier, verifier.deployment["name"], verifier.deployment["uuid"]) if verifier.status != consts.VerifierStatus.INSTALLED: raise exceptions.RallyException( "Failed to configure verifier %s for deployment '%s' " "(UUID=%s) because verifier is in '%s' status, but should be " "in '%s'." % (verifier, verifier.deployment["name"], verifier.deployment["uuid"], verifier.status, consts.VerifierStatus.INSTALLED)) msg = ("Verifier %s has been successfully configured for deployment " "'%s' (UUID=%s)!" % (verifier, verifier.deployment["name"], verifier.deployment["uuid"])) vm = verifier.manager if vm.is_configured(): LOG.info("Verifier is already configured!") if not reconfigure: if not extra_options: return vm.get_configuration() else: # Just add extra options to the config file. if logging.is_debug(): LOG.debug("Adding the following extra options: %s " "to verifier configuration.", extra_options) else: LOG.info( "Adding extra options to verifier configuration.") vm.extend_configuration(extra_options) LOG.info(msg) return vm.get_configuration() LOG.info("Reconfiguring verifier.") raw_config = vm.configure(extra_options=extra_options) LOG.info(msg) return raw_config @classmethod def override_configuration(cls, verifier_id, deployment_id, new_configuration): """Override verifier configuration (e.g., rewrite the config file). :param verifier_id: Verifier name or UUID :param deployment_id: Deployment name or UUID :param new_configuration: New configuration for verifier """ verifier = cls.get(verifier_id) if verifier.status != consts.VerifierStatus.INSTALLED: raise exceptions.RallyException( "Failed to override verifier configuration for deployment " "'%s' (UUID=%s) because verifier %s is in '%s' status, but " "should be in '%s'." % ( verifier.deployment["name"], verifier.deployment["uuid"], verifier, verifier.status, consts.VerifierStatus.INSTALLED) ) verifier.set_deployment(deployment_id) LOG.info("Overriding configuration of verifier %s for deployment '%s' " "(UUID=%s).", verifier, verifier.deployment["name"], verifier.deployment["uuid"]) verifier.manager.override_configuration(new_configuration) LOG.info("Configuration of verifier %s has been successfully " "overridden for deployment '%s' (UUID=%s)!", verifier, verifier.deployment["name"], verifier.deployment["uuid"]) @classmethod def list_tests(cls, verifier_id, pattern=""): """List all verifier tests. :param verifier_id: Verifier name or UUID :param pattern: Pattern which will be used for matching """ verifier = cls.get(verifier_id) if verifier.status != consts.VerifierStatus.INSTALLED: raise exceptions.RallyException( "Failed to list verifier tests because verifier %s is in '%s' " "status, but should be in '%s'." % ( verifier, verifier.status, consts.VerifierStatus.INSTALLED) ) if pattern: verifier.manager.validate_args({"pattern": pattern}) return verifier.manager.list_tests(pattern) @classmethod def add_extension(cls, verifier_id, source, version=None, extra_settings=None): """Add a verifier extension. :param verifier_id: Verifier name or UUID :param source: Path or URL to the repo to clone verifier extension from :param version: Branch, tag or commit ID to checkout before installation of the verifier extension :param extra_settings: Extra installation settings for verifier extension """ verifier = cls.get(verifier_id) if verifier.status != consts.VerifierStatus.INSTALLED: raise exceptions.RallyException( "Failed to add verifier extension because verifier %s " "is in '%s' status, but should be in '%s'." % ( verifier, verifier.status, consts.VerifierStatus.INSTALLED) ) LOG.info("Adding extension for verifier %s.", verifier) # store original status to rollback it after failure original_status = verifier.status verifier.update_status(consts.VerifierStatus.EXTENDING) try: verifier.manager.install_extension(source, version=version, extra_settings=extra_settings) finally: verifier.update_status(original_status) LOG.info("Extension for verifier %s has been successfully added!", verifier) @classmethod def list_extensions(cls, verifier_id): """List all verifier extensions. :param verifier_id: Verifier name or UUID """ verifier = cls.get(verifier_id) if verifier.status != consts.VerifierStatus.INSTALLED: raise exceptions.RallyException( "Failed to list verifier extensions because verifier %s " "is in '%s' status, but should be in '%s.'" % ( verifier, verifier.status, consts.VerifierStatus.INSTALLED) ) return verifier.manager.list_extensions() @classmethod def delete_extension(cls, verifier_id, name): """Delete a verifier extension. :param verifier_id: Verifier name or UUID :param name: Verifier extension name """ verifier = cls.get(verifier_id) if verifier.status != consts.VerifierStatus.INSTALLED: raise exceptions.RallyException( "Failed to delete verifier extension because verifier %s " "is in '%s' status, but should be in '%s'." % ( verifier, verifier.status, consts.VerifierStatus.INSTALLED) ) LOG.info("Deleting extension for verifier %s.", verifier) verifier.manager.uninstall_extension(name) LOG.info("Extension for verifier %s has been successfully deleted!", verifier) class _Verification(object): @classmethod def start(cls, verifier_id, deployment_id, tags=None, **run_args): """Start a verification. :param verifier_id: Verifier name or UUID :param deployment_id: Deployment name or UUID :param tags: List of tags to assign them to verification :param run_args: Dictionary with run arguments for verification """ # TODO(ylobankov): Add an ability to skip tests by specifying only test # names (without test IDs). Also, it would be nice to # skip the whole test suites. For example, all tests # in the class or module. deployment = objects.Deployment.get(deployment_id) if deployment["status"] != consts.DeployStatus.DEPLOY_FINISHED: raise exceptions.DeploymentNotFinishedStatus( name=deployment["name"], uuid=deployment["uuid"], status=deployment["status"]) verifier = _Verifier.get(verifier_id) if verifier.status != consts.VerifierStatus.INSTALLED: raise exceptions.RallyException( "Failed to start verification because verifier %s is in '%s' " "status, but should be in '%s'." % ( verifier, verifier.status, consts.VerifierStatus.INSTALLED) ) verifier.set_deployment(deployment_id) if not verifier.manager.is_configured(): _Verifier.configure(verifier, deployment_id) # TODO(andreykurilin): save validation results to db verifier.manager.validate(run_args) verification = objects.Verification.create( verifier_id=verifier_id, deployment_id=deployment_id, tags=tags, run_args=run_args) LOG.info("Starting verification (UUID=%s) for deployment '%s' " "(UUID=%s) by verifier %s.", verification.uuid, verifier.deployment["name"], verifier.deployment["uuid"], verifier) verification.update_status(consts.VerificationStatus.RUNNING) context = {"config": verifier.manager._meta_get("context"), "run_args": run_args, "verification": verification, "verifier": verifier} try: with vcontext.ContextManager(context): results = verifier.manager.run(context) except Exception as e: verification.set_error(e) raise # TODO(ylobankov): Check that verification exists in the database # because users may delete verification before tests # finish. verification.finish(results.totals, results.tests) LOG.info("Verification (UUID=%s) has been successfully finished for " "deployment '%s' (UUID=%s)!", verification.uuid, verifier.deployment["name"], verifier.deployment["uuid"]) return verification, results @classmethod def rerun(cls, verification_uuid, deployment_id=None, failed=False, tags=None, concurrency=0): """Rerun tests from a verification. :param verification_uuid: Verification UUID :param deployment_id: Deployment name or UUID :param failed: Rerun only failed tests :param tags: List of tags to assign them to verification :param concurrency: The number of processes to use to run verifier tests """ # TODO(ylobankov): Improve this method in the future: put some # information about re-run in run_args. run_args = {} if concurrency: run_args["concurrency"] = concurrency verification = cls.get(verification_uuid) tests = verification.tests if failed: tests = [t for t, r in tests.items() if r["status"] == "fail"] if not tests: raise exceptions.RallyException( "There are no failed tests from verification (UUID=%s)." % verification_uuid) else: tests = tests.keys() deployment = _Deployment.get(deployment_id or verification.deployment_uuid) LOG.info("Re-running %stests from verification (UUID=%s) for " "deployment '%s' (UUID=%s).", "failed " if failed else "", verification.uuid, deployment["name"], deployment["uuid"]) return cls.start( verification.verifier_uuid, deployment["uuid"], load_list=tests, tags=tags, **run_args) @staticmethod def get(verification_uuid): """Get a verification. :param verification_uuid: Verification UUID """ return objects.Verification.get(verification_uuid) @staticmethod def list(verifier_id=None, deployment_id=None, tags=None, status=None): """List all verifications. :param verifier_id: Verifier name or UUID :param deployment_id: Deployment name or UUID :param tags: Tags to filter verifications by :param status: Status to filter verifications by """ return objects.Verification.list(verifier_id, deployment_id=deployment_id, tags=tags, status=status) @classmethod def delete(cls, verification_uuid): """Delete a verification. :param verification_uuid: Verification UUID """ verification = cls.get(verification_uuid) LOG.info("Deleting verification (UUID=%s).", verification.uuid) verification.delete() LOG.info("Verification has been successfully deleted!") @classmethod def report(cls, uuids, output_type, output_dest=None): """Generate a report for a verification or a few verifications. :param uuids: List of verifications UUIDs :param output_type: Plugin name of verification reporter :param output_dest: Destination for verification report """ verifications = [cls.get(uuid) for uuid in uuids] reporter_cls = vreporter.VerificationReporter.get(output_type) reporter_cls.validate(output_dest) LOG.info("Building '%s' report for the following verification(s): " "'%s'.", output_type, "', '".join(uuids)) result = vreporter.VerificationReporter.make(reporter_cls, verifications, output_dest) LOG.info(_LI("The report has been successfully built.")) return result @classmethod def import_results(cls, verifier_id, deployment_id, data, **run_args): """Import results of a test run into Rally database. :param verifier_id: Verifier name or UUID :param deployment_id: Deployment name or UUID :param data: Results data of a test run to import :param run_args: Dictionary with run arguments """ # TODO(aplanas): Create an external deployment if this is missing, as # required in the blueprint [1]. # [1] https://blueprints.launchpad.net/rally/+spec/verification-import verifier = _Verifier.get(verifier_id) verifier.set_deployment(deployment_id) LOG.info("Importing test results into a new verification for " "deployment '%s' (UUID=%s), using verifier %s.", verifier.deployment["name"], verifier.deployment["uuid"], verifier) verifier.manager.validate_args(run_args) verification = objects.Verification.create(verifier_id, deployment_id=deployment_id, run_args=run_args) verification.update_status(consts.VerificationStatus.RUNNING) try: results = verifier.manager.parse_results(data) except Exception as e: verification.set_failed(e) raise verification.finish(results.totals, results.tests) LOG.info("Test results have been successfully imported.") return verification, results class _DeprecatedAPIClass(object): """Deprecates direct usage of api classes.""" def __init__(self, cls): self._cls = cls def __getattr__(self, attr, default=None): LOG.warning(_LW("'%s' is deprecated since Rally 0.8.0 in favor of " "'rally.api.API' class.") % self._cls.__name__[1:]) return getattr(self._cls, attr, default) Deployment = _DeprecatedAPIClass(_Deployment) Task = _DeprecatedAPIClass(_Task) class API(object): CONFIG_SEARCH_PATHS = [sys.prefix + "/etc/rally", "~/.rally", "/etc/rally"] CONFIG_FILE_NAME = "rally.conf" def __init__(self, config_file=None, config_args=None, rally_endpoint=None, plugin_paths=None, skip_db_check=False): """Initialize Rally API instance :param config_file: Path to rally configuration file. If None, default path will be selected :type config_file: str :param config_args: Arguments for initialization current configuration :type config_args: list :param rally_endpoint: [Restricted]Rally endpoint connection string. :type rally_endpoint: str :param plugin_paths: Additional custom plugin locations :type plugin_paths: list :param skip_db_check: Allows to skip db revision check :type skip_db_check: bool """ if rally_endpoint: raise NotImplementedError(_LE("Sorry, but Rally-as-a-Service is " "not ready yet.")) try: config_files = ([config_file] if config_file else self._default_config_file()) CONF(config_args or [], project="rally", version=rally_version.version_string(), default_config_files=config_files) logging.setup("rally") if not CONF.get("log_config_append"): # The below two lines are to disable noise from request module. # The standard way should be we make such lots of settings on # the root rally. However current oslo codes doesn't support # such interface. So I choose to use a 'hacking' way to avoid # INFO logs from request module where user didn't give specific # log configuration. And we could remove this hacking after # oslo.log has such interface. LOG.debug( "INFO logs from urllib3 and requests module are hide.") requests_log = logging.getLogger("requests").logger requests_log.setLevel(logging.WARNING) urllib3_log = logging.getLogger("urllib3").logger urllib3_log.setLevel(logging.WARNING) LOG.debug("urllib3 insecure warnings are hidden.") for warning in ("InsecurePlatformWarning", "SNIMissingWarning", "InsecureRequestWarning"): warning_cls = getattr(urllib3.exceptions, warning, None) if warning_cls is not None: urllib3.disable_warnings(warning_cls) # NOTE(wtakase): This is for suppressing boto error logging. LOG.debug("ERROR log from boto module is hide.") boto_log = logging.getLogger("boto").logger boto_log.setLevel(logging.CRITICAL) # Set alembic log level to ERROR alembic_log = logging.getLogger("alembic").logger alembic_log.setLevel(logging.ERROR) except cfg.ConfigFilesNotFoundError as e: cfg_files = e.config_files raise exceptions.RallyException(_LE( "Failed to read configuration file(s): %s") % cfg_files) # Check that db is upgraded to the latest revision if not skip_db_check: self.check_db_revision() # Load plugins plugin_paths = plugin_paths or [] if "plugin_paths" in CONF: plugin_paths.extend(CONF.get("plugin_paths") or []) for path in plugin_paths: discover.load_plugins(path) # NOTE(andreykurilin): There is no reason to auto-discover API's. We # have only 4 classes, so let's do it in good old way - hardcode them:) self._deployment = _Deployment self._task = _Task self._verifier = _Verifier self._verification = _Verification def _default_config_file(self): for path in self.CONFIG_SEARCH_PATHS: abspath = os.path.abspath(os.path.expanduser(path)) fpath = os.path.join(abspath, self.CONFIG_FILE_NAME) if os.path.isfile(fpath): return [fpath] def check_db_revision(self): rev = rally_version.database_revision() # Check that db exists if rev["revision"] is None: raise exceptions.RallyException(_LE( "Database is missing. Create database by command " "`rally-manage db create'")) # Check that db is updated if rev["revision"] != rev["current_head"]: raise exceptions.RallyException(_LE( "Database seems to be outdated. Run upgrade from " "revision %(revision)s to %(current_head)s by command " "`rally-manage db upgrade'") % rev) @property def deployment(self): return self._deployment @property def task(self): return self._task @property def verifier(self): return self._verifier @property def verification(self): return self._verification rally-0.9.1/rally/consts.py0000664000567000056710000001504713073417720017026 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ There is a lot of situations when we would like to work with Enum or Const. E.g. work around Tasks. We would like to use Enum in DB to store status of task and also in migration that creates DB and in business logic to set some status so as to avoid copy paste or direct usage of enums values we create singletons for each enum. (e.g. TaskStatus) """ from rally.common import utils JSON_SCHEMA = "http://json-schema.org/draft-04/schema" class _TaskStatus(utils.ImmutableMixin, utils.EnumMixin): """Consts that represents task possible states.""" INIT = "init" VALIDATING = "validating" VALIDATED = "validated" VALIDATION_FAILED = "validation_failed" RUNNING = "running" FINISHED = "finished" CRASHED = "crashed" ABORTING = "aborting" SLA_FAILED = "sla_failed" SOFT_ABORTING = "soft_aborting" ABORTED = "aborted" PAUSED = "paused" class _SubtaskStatus(utils.ImmutableMixin, utils.EnumMixin): """Consts that represents task possible states.""" INIT = "init" VALIDATING = "validating" VALIDATED = "validated" VALIDATION_FAILED = "validation_failed" RUNNING = "running" FINISHED = "finished" CRASHED = "crashed" ABORTING = "aborting" SLA_FAILED = "sla_failed" SOFT_ABORTING = "soft_aborting" ABORTED = "aborted" PAUSED = "paused" class _DeployStatus(utils.ImmutableMixin, utils.EnumMixin): DEPLOY_INIT = "deploy->init" DEPLOY_STARTED = "deploy->started" DEPLOY_SUBDEPLOY = "deploy->subdeploy" DEPLOY_FINISHED = "deploy->finished" DEPLOY_FAILED = "deploy->failed" DEPLOY_INCONSISTENT = "deploy->inconsistent" CLEANUP_STARTED = "cleanup->started" CLEANUP_FINISHED = "cleanup->finished" CLEANUP_FAILED = "cleanup->failed" class _EndpointPermission(utils.ImmutableMixin, utils.EnumMixin): ADMIN = "admin" USER = "user" class _EndpointType(utils.ImmutableMixin, utils.EnumMixin): INTERNAL = "internal" ADMIN = "admin" PUBLIC = "public" class _Service(utils.ImmutableMixin, utils.EnumMixin): """OpenStack services names, by rally convention.""" NOVA = "nova" NOVA_NET = "nova-network" CINDER = "cinder" MANILA = "manila" EC2 = "ec2" GLANCE = "glance" CLOUD = "cloud" HEAT = "heat" KEYSTONE = "keystone" NEUTRON = "neutron" DESIGNATE = "designate" CEILOMETER = "ceilometer" MONASCA = "monasca" S3 = "s3" SENLIN = "senlin" TROVE = "trove" SAHARA = "sahara" SWIFT = "swift" MISTRAL = "mistral" MURANO = "murano" IRONIC = "ironic" GNOCCHI = "gnocchi" MAGNUM = "magnum" WATCHER = "watcher" class _ServiceType(utils.ImmutableMixin, utils.EnumMixin): """OpenStack services types, mapped to service names.""" VOLUME = "volume" SHARE = "share" EC2 = "ec2" IMAGE = "image" CLOUD = "cloudformation" ORCHESTRATION = "orchestration" IDENTITY = "identity" CLUSTERING = "clustering" COMPUTE = "compute" NETWORK = "network" DNS = "dns" METERING = "metering" MONITORING = "monitoring" S3 = "s3" DATABASE = "database" DATA_PROCESSING = "data-processing" DATA_PROCESSING_MOS = "data_processing" OBJECT_STORE = "object-store" WORKFLOW_EXECUTION = "workflowv2" APPLICATION_CATALOG = "application-catalog" BARE_METAL = "baremetal" METRIC = "metric" CONTAINER_INFRA = "container-infra" INFRA_OPTIM = "infra-optim" def __init__(self): self.__names = { self.CLUSTERING: _Service.SENLIN, self.COMPUTE: _Service.NOVA, self.VOLUME: _Service.CINDER, self.SHARE: _Service.MANILA, self.EC2: _Service.EC2, self.IMAGE: _Service.GLANCE, self.CLOUD: _Service.CLOUD, self.ORCHESTRATION: _Service.HEAT, self.IDENTITY: _Service.KEYSTONE, self.NETWORK: _Service.NEUTRON, self.DNS: _Service.DESIGNATE, self.METERING: _Service.CEILOMETER, self.MONITORING: _Service.MONASCA, self.S3: _Service.S3, self.DATABASE: _Service.TROVE, self.DATA_PROCESSING: _Service.SAHARA, self.DATA_PROCESSING_MOS: _Service.SAHARA, self.OBJECT_STORE: _Service.SWIFT, self.WORKFLOW_EXECUTION: _Service.MISTRAL, self.APPLICATION_CATALOG: _Service.MURANO, self.BARE_METAL: _Service.IRONIC, self.METRIC: _Service.GNOCCHI, self.CONTAINER_INFRA: _Service.MAGNUM, self.INFRA_OPTIM: _Service.WATCHER, } def __getitem__(self, service_type): """Mapping protocol to service names. :param name: str, service name :returns: str, service type """ return self.__names[service_type] class _HookStatus(utils.ImmutableMixin, utils.EnumMixin): """Hook result statuses.""" SUCCESS = "success" FAILED = "failed" VALIDATION_FAILED = "validation_failed" class _TagType(utils.ImmutableMixin, utils.EnumMixin): TASK = "task" SUBTASK = "subtask" VERIFICATION = "verification" class _VerifierStatus(utils.ImmutableMixin, utils.EnumMixin): """Verifier statuses.""" INIT = "init" INSTALLING = "installing" INSTALLED = "installed" UPDATING = "updating" EXTENDING = "extending" FAILED = "failed" # NOTE(andreykurilin): In case of updating these statuses, please do not forget # to update doc reference too class _VerificationStatus(utils.ImmutableMixin, utils.EnumMixin): """Verification statuses.""" INIT = "init" RUNNING = "running" FINISHED = "finished" FAILED = "failed" CRASHED = "crashed" TaskStatus = _TaskStatus() SubtaskStatus = _SubtaskStatus() DeployStatus = _DeployStatus() EndpointPermission = _EndpointPermission() ServiceType = _ServiceType() Service = _Service() EndpointType = _EndpointType() HookStatus = _HookStatus() TagType = _TagType() VerifierStatus = _VerifierStatus() VerificationStatus = _VerificationStatus() rally-0.9.1/rally/task/0000775000567000056710000000000013073420067016074 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/task/exporter.py0000775000567000056710000000244213073417716020332 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Exporter - its the mechanism for exporting rally tasks into some specified system by connection string. """ import abc import six from rally.common.plugin import plugin configure = plugin.configure @plugin.base() @six.add_metaclass(abc.ABCMeta) class Exporter(plugin.Plugin): def __init__(self, connection_string): self.connection_string = connection_string @abc.abstractmethod def export(self, task_uuid): """Export results of the task to the task storage. :param task_uuid: uuid of task results """ @abc.abstractmethod def validate(self): """Used to validate connection string.""" TaskExporter = Exporter rally-0.9.1/rally/task/__init__.py0000664000567000056710000000000013073417716020202 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/task/service.py0000664000567000056710000003314313073417720020114 0ustar jenkinsjenkins00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import functools import inspect import six from rally.common.plugin import discover from rally.common.plugin import meta from rally import exceptions from rally.task import atomic def service(service_name, service_type, version, client_name=None): """Mark class as an implementation of partial service APIs. :param service_name: name of the service (e.g. Nova) :type service_name: str :param service_type: type of the service (e.g. Compute) :type service_type: str :param version: version of service (e.g. 2.1) :type version: str :param client_name: name of client for service. If None, service_name will be used instead. :type client_name: str """ def wrapper(cls): cls._meta_init() cls._meta_set("name", service_name.lower()) cls._meta_set("type", service_type.lower()) cls._meta_set("version", str(version)) cls._meta_set("client_name", client_name or service_name) return cls return wrapper def compat_layer(original_impl): """Set class which should be unified to common interface :param original_impl: implementation of specific service API :type original_impl: cls """ def wrapper(cls): cls._meta_init() cls._meta_set("impl", original_impl) return cls return wrapper def should_be_overridden(func): """Mark method which should be overridden by subclasses.""" func.require_impl = True return func # TODO(andreykurilin): remove _DevNullDict and _ServiceWithoutAtomic when we # start support inner atomics class _DevNullDict(dict): """Do not keep anything.""" def __setitem__(self, key, value): pass class _ServiceWithoutAtomic(object): def __init__(self, service): self._service = service self._atomic_actions = _DevNullDict() def atomic_actions(self): return self._atomic_actions def __getattr__(self, name): return getattr(self._service, name) def __str__(self): return "'%s' without atomic actions" % self._service.__name__ def __repr__(self): return "<%s>" % str(self) def method_wrapper(func): """Wraps service's methods with some magic 1) Each service method should not be called with positional arguments, since it can lead mistakes in wrong order while writing version compatible code. We had such situation in KeystoneWrapper (see https://review.openstack.org/#/c/309470/ ): .. code-block:: python class IdentityService(Service): def add_role(self, role_id, user_id, project_id): self._impl(role_id, user_id, project_id) class KeystoneServiceV2(Service): def add_role(self, user_id, role_id, project_id): pass class KeystoneServiceV3(Service): def add_role(self, role_id, user_id, project_id): pass Explanation of example: The signature of add_role method is different in KeystoneServiceV2 and KeystoneServiceV3. Since IdentityService uses positional arguments to make call to self._impl.add_role, we have swapped values of role_id and user_id in case of KeystoneServiceV2. Original code and idea are taken from `positional` library. 2) We do not need keep atomics for some actions, for example for inner actions (until we start to support them). Previously, we used "atomic_action" argument with `if atomic_action` checks inside each method. To reduce number of similar if blocks, let's write them in one place, make the code cleaner and support such feature for all service methods. """ @functools.wraps(func) def wrapper(instance, *args, **kwargs): args_len = len(args) if args_len > 1: message = ("%(name)s takes at most 1 positional argument " "(%(given)d given)" % {"name": func.__name__, "given": args_len}) raise TypeError(message) if kwargs.pop("no_atomic", False): instance = _ServiceWithoutAtomic(instance) return func(instance, *args, **kwargs) return wrapper class ServiceMeta(type): """Alternative implementation of abstract classes for Services. Common class of specific Service should not be hardcoded for any version of API. We expect that all public methods of specific common class are overridden in all versioned implementation. """ def __new__(mcs, name, parents, dct): for field in dct: if not field.startswith("_") and callable(dct[field]): dct[field] = method_wrapper(dct[field]) return super(ServiceMeta, mcs).__new__(mcs, name, parents, dct) def __init__(cls, name, bases, namespaces): super(ServiceMeta, cls).__init__(name, bases, namespaces) bases = [c for c in cls.__bases__ if type(c) == ServiceMeta] if not bases: # nothing to check return # obtain all properties of cls, since namespace doesn't include # properties of parents not_implemented_apis = set() for name, obj in inspect.getmembers(cls): if (getattr(obj, "require_impl", False) and # name in namespace means that object was introduced in cls name not in namespaces): # it is not overridden... not_implemented_apis.add(name) if not_implemented_apis: raise exceptions.RallyException( "%s has wrong implementation. Implementation of specific " "version of API should override all required methods of " "base service class. Missed method(s): %s." % (cls.__name__, ", ".join(not_implemented_apis))) @six.add_metaclass(ServiceMeta) class Service(meta.MetaMixin): """Base help class for Cloud Services(for example OpenStack services). A simple example of implementation: .. code-block:: # Implementation of Keystone V2 service @service("keystone", service_type="identity", version="2") class KeystoneV2Service(Service): @atomic.action_timer("keystone_v2.create_tenant") def create_tenant(self, tenant_name): return self.client.tenants.create(project_name) # Implementation of Keystone V3 service @service("keystone", service_type="identity", version="3") class KeystoneV3Service(Service): @atomic.action_timer("keystone_v3.create_project") def create_project(self, project_name): return self.client.project.create(project_name) """ def __init__(self, clients, name_generator=None, atomic_inst=None): """Initialize service class :param clients: an instance of rally.osclients.Clients :param name_generator: a method for generating random names. Usually it is generate_random_name method of RandomNameGeneratorMixin instance. :param atomic_inst: an object to store atomic actions. Usually, it is `_atomic_actions` property of ActionTimerMixin instance """ self._clients = clients self._name_generator = name_generator if atomic_inst is None: self._atomic_actions = atomic.ActionTimerMixin().atomic_actions() else: self._atomic_actions = atomic_inst self.version = None if self._meta_is_inited(raise_exc=False): self.version = self._meta_get("version") def generate_random_name(self): if not self._name_generator: raise exceptions.RallyException( "You cannot use `generate_random_name` method, until you " "initialize class with `name_generator` argument.") return self._name_generator() class UnifiedService(Service): """Base help class for unified layer for Cloud Services A simple example of Identity service implementation: .. code-block:: import collections Project = collections.namedtuple("Project", ["id", "name"]) # Unified entry-point for Identity OpenStack service class Identity(UnifiedService): # this method is equal in UnifiedKeystoneV2 and UnifiedKeystoneV3. # Since there is no other implementation except Keystone, there # are no needs to copy-paste it. @classmethod def _is_applicable(cls, clients): cloud_version = clients.keystone().version.split(".")[0][1:] return cloud_version == impl._meta_get("version") def create_project(self, project_name, domain_name="Default"): return self._impl.create_project(project_name, domain_name=domain_name) # Class which unifies raw keystone v2 data to common form @compat_layer(KeystoneV2Service) class UnifiedKeystoneV2(Identity): def create_project(self, project_name, domain_name="Default"): if domain_name.lower() != "default": raise NotImplementedError( "Domain functionality not implemented in Keystone v2") tenant = self._impl.create_tenant(project_name) return Project(id=tenant.id, name=tenant.name) # Class which unifies raw keystone v3 data to common form @compat_layer(KeystoneV3Service) class UnifiedKeystoneV3(Identity): def create_project(self, project_name, domain_name="Default"): project = self._impl.create_project(project_name, domain_name=domain_name) return Project(id=project.id, name=project.name) """ def __init__(self, clients, name_generator=None, atomic_inst=None): """Initialize service class :param clients: an instance of rally.osclients.Clients :param name_generator: a method for generating random names. Usually it is generate_random_name method of RandomNameGeneratorMixin instance. :param atomic_inst: an object to store atomic actions. Usually, it is `_atomic_actions` property of ActionTimerMixin instance """ super(UnifiedService, self).__init__(clients, name_generator, atomic_inst) if self._meta_is_inited(raise_exc=False): # it is an instance of compatibility layer for specific Service impl_cls = self._meta_get("impl") self._impl = impl_cls(self._clients, self._name_generator, self._atomic_actions) self.version = impl_cls._meta_get("version") else: # it is a base class of service impl_cls, _all_impls = self.discover_impl() if not impl_cls: raise exceptions.RallyException( "There is no proper implementation for %s." % self.__class__.__name__) self._impl = impl_cls(self._clients, self._name_generator, self._atomic_actions) self.version = self._impl.version def discover_impl(self): """Discover implementation for service One Service can have different implementations(not only in terms of versioning, for example Network service of OpenStack has Nova-network and Neutron implementation. they are quite different). Each of such implementations can support several versions. This method is designed to choose the proper helper class based on available services in the cloud and based on expected version. Returns a tuple with implementation class as first element, a set of all implementations as a second element """ # find all classes with unified implementation impls = {cls: cls._meta_get("impl") for cls in discover.itersubclasses(self.__class__) if (cls._meta_is_inited(raise_exc=False) and cls._meta_get("impl"))} service_names = {o._meta_get("name") for o in impls.values()} enabled_services = None # let's make additional calls to cloud only when we need to make a # decision based on available services if len(service_names) > 1: enabled_services = list(self._clients.services().values()) for cls, impl in impls.items(): if (enabled_services is not None and impl._meta_get("name") not in enabled_services): continue if cls.is_applicable(self._clients): return cls, impls return None, impls @classmethod def is_applicable(cls, clients): """Check that implementation can be used in cloud.""" if cls._meta_is_inited(raise_exc=False): impl = cls._meta_get("impl", cls) client = getattr(clients, impl._meta_get("client_name")) return client.choose_version() == impl._meta_get("version") return False rally-0.9.1/rally/task/functional.py0000664000567000056710000001562613073417716020631 0ustar jenkinsjenkins00000000000000# Copyright 2015: Red Hat, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import exceptions class FunctionalMixin(object): """Functional assertions. The Rally core team deliberately decided not to use an existing framework for this such a `testtools`. Using 'testtools' would introduce the following problems: - Rally production code works with testing tools code that is not designed to be used in production. - Rally code depends on a bunch of new libs introduced by testtools and testtools itself, which means: more code on which Rally is dependent, more time required to install Rally, more disk space required by Rally. - Classes like Scenario & Context are inherited from testtools.TestCase that makes these classes really hard to learn (for instance: running dir(base.Scenario) you cannot see a ton of methods inside it) - It won't be clear for end users what exceptions are raised: unittest exception are going to be raised during production runs instead of Rally assertion exceptions. """ def _concatenate_message(self, default, extended): if not extended: return default if default[-1] != ".": default += "." return default + " " + extended.capitalize() def assertEqual(self, first, second, err_msg=None): if first != second: msg = "%s != %s" % (repr(first), repr(second)) raise exceptions.RallyAssertionError( self._concatenate_message(msg, err_msg)) def assertNotEqual(self, first, second, err_msg=None): if first == second: msg = "%s == %s" % (repr(first), repr(second)) raise exceptions.RallyAssertionError( self._concatenate_message(msg, err_msg)) def assertTrue(self, value, err_msg=None): if not value: msg = "%s is not True" % repr(value) raise exceptions.RallyAssertionError( self._concatenate_message(msg, err_msg)) def assertFalse(self, value, err_msg=None): if value: msg = "%s is not False" % repr(value) raise exceptions.RallyAssertionError( self._concatenate_message(msg, err_msg)) def assertIs(self, first, second, err_msg=None): if first is not second: msg = "%s is not %s" % (repr(first), repr(second)) raise exceptions.RallyAssertionError( self._concatenate_message(msg, err_msg)) def assertIsNot(self, first, second, err_msg=None): if first is second: msg = "%s is %s" % (repr(first), repr(second)) raise exceptions.RallyAssertionError( self._concatenate_message(msg, err_msg)) def assertIsNone(self, value, err_msg=None): if value is not None: msg = "%s is not None" % repr(value) raise exceptions.RallyAssertionError( self._concatenate_message(msg, err_msg)) def assertIsNotNone(self, value, err_msg=None): if value is None: msg = "%s is None" % repr(value) raise exceptions.RallyAssertionError( self._concatenate_message(msg, err_msg)) def assertIn(self, member, container, err_msg=None): msg = "%s not found in %s" % (repr(member), repr(container)) if member not in container: raise exceptions.RallyAssertionError( self._concatenate_message(msg, err_msg)) def assertNotIn(self, member, container, err_msg=None): msg = "%s found in %s" % (repr(member), repr(container)) if member in container: raise exceptions.RallyAssertionError( self._concatenate_message(msg, err_msg)) def assertIsInstance(self, first, second, err_msg=None): if not isinstance(first, second): msg = "%s is not instance of %s" % (repr(first), repr(second)) raise exceptions.RallyAssertionError( self._concatenate_message(msg, err_msg)) def assertIsSubset(self, member, container, err_msg=None): msg = "%s not found in %s" % (repr(member), repr(container)) if set(member) - set(container): raise exceptions.RallyAssertionError( self._concatenate_message(msg, err_msg)) def assertIsNotSubset(self, member, container, err_msg=None): msg = "%s found in %s" % (repr(member), repr(container)) if not (set(member) - set(container)): raise exceptions.RallyAssertionError( self._concatenate_message(msg, err_msg)) def assertIsNotInstance(self, first, second, err_msg=None): if isinstance(first, second): msg = "%s is instance of %s" % (repr(first), repr(second)) raise exceptions.RallyAssertionError( self._concatenate_message(msg, err_msg)) def assertLessEqual(self, first, second, err_msg=None): msg = "%s is greater than %s" % (repr(first), repr(second)) if first > second: raise exceptions.RallyAssertionError( self._concatenate_message(msg, err_msg)) def assertLess(self, first, second, err_msg=None): msg = "%s is greater or equal to %s" % (repr(first), repr(second)) if first >= second: raise exceptions.RallyAssertionError( self._concatenate_message(msg, err_msg)) def assertGreaterEqual(self, first, second, err_msg=None): msg = "%s is less than %s" % (repr(first), repr(second)) if first < second: raise exceptions.RallyAssertionError( self._concatenate_message(msg, err_msg)) def assertGreater(self, first, second, err_msg=None): msg = "%s is less or equal to %s" % (repr(first), repr(second)) if first <= second: raise exceptions.RallyAssertionError( self._concatenate_message(msg, err_msg)) rally-0.9.1/rally/task/validation.py0000775000567000056710000007236213073417720020617 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import functools import os import re from glanceclient import exc as glance_exc from novaclient import exceptions as nova_exc import six from rally.common.i18n import _ from rally.common import objects from rally.common import yamlutils as yaml from rally import consts from rally import exceptions from rally import osclients from rally.plugins.openstack.context.nova import flavors as flavors_ctx from rally.plugins.openstack import types as openstack_types from rally.task import types # TODO(boris-42): make the validators usable as a functions as well. # At the moment validators can only be used as decorators. class ValidationResult(object): def __init__(self, is_valid, msg=None): self.is_valid = is_valid self.msg = msg def validator(fn): """Decorator that constructs a scenario validator from given function. Decorated function should return ValidationResult on error. :param fn: function that performs validation :returns: rally scenario validator """ def wrap_given(*args, **kwargs): """Dynamic validation decorator for scenario. :param args: the arguments of the decorator of the benchmark scenario ex. @my_decorator("arg1"), then args = ("arg1",) :param kwargs: the keyword arguments of the decorator of the scenario ex. @my_decorator(kwarg1="kwarg1"), then kwargs = {"kwarg1": "kwarg1"} """ @functools.wraps(fn) def wrap_validator(config, clients, deployment): # NOTE(amaretskiy): validator is successful by default return (fn(config, clients, deployment, *args, **kwargs) or ValidationResult(True)) def wrap_scenario(scenario): # TODO(boris-42): remove this in future. wrap_validator.permission = getattr(fn, "permission", consts.EndpointPermission.USER) scenario._meta_setdefault("validators", []) scenario._meta_get("validators").append(wrap_validator) return scenario return wrap_scenario return wrap_given @validator def number(config, clients, deployment, param_name, minval=None, maxval=None, nullable=False, integer_only=False): """Checks that parameter is number that pass specified condition. Ensure a parameter is within the range [minval, maxval]. This is a closed interval so the end points are included. :param param_name: Name of parameter to validate :param minval: Lower endpoint of valid interval :param maxval: Upper endpoint of valid interval :param nullable: Allow parameter not specified, or parameter=None :param integer_only: Only accept integers """ val = config.get("args", {}).get(param_name) num_func = float if integer_only: # NOTE(boris-42): Force check that passed value is not float, this is # important cause int(float_numb) won't raise exception if type(val) == float: return ValidationResult(False, "%(name)s is %(val)s which hasn't int type" % {"name": param_name, "val": val}) num_func = int # None may be valid if the scenario sets a sensible default. if nullable and val is None: return ValidationResult(True) try: number = num_func(val) if minval is not None and number < minval: return ValidationResult( False, "%(name)s is %(val)s which is less than the minimum " "(%(min)s)" % {"name": param_name, "val": number, "min": minval}) if maxval is not None and number > maxval: return ValidationResult( False, "%(name)s is %(val)s which is greater than the maximum " "(%(max)s)" % {"name": param_name, "val": number, "max": maxval}) return ValidationResult(True) except (ValueError, TypeError): return ValidationResult( False, "%(name)s is %(val)s which is not a valid %(type)s" % {"name": param_name, "val": val, "type": num_func.__name__}) def _file_access_ok(filename, mode, param_name, required=True): if not filename: return ValidationResult(not required, "Parameter %s required" % param_name) if not os.access(os.path.expanduser(filename), mode): return ValidationResult( False, "Could not open %(filename)s with mode %(mode)s " "for parameter %(param_name)s" % {"filename": filename, "mode": mode, "param_name": param_name}) return ValidationResult(True) @validator def file_exists(config, clients, deployment, param_name, mode=os.R_OK, required=True): """Validator checks parameter is proper path to file with proper mode. Ensure a file exists and can be accessed with the specified mode. Note that path to file will be expanded before access checking. :param param_name: Name of parameter to validate :param mode: Access mode to test for. This should be one of: * os.F_OK (file exists) * os.R_OK (file is readable) * os.W_OK (file is writable) * os.X_OK (file is executable) If multiple modes are required they can be added, eg: mode=os.R_OK+os.W_OK :param required: Boolean indicating whether this argument is required. """ return _file_access_ok(config.get("args", {}).get(param_name), mode, param_name, required) def check_command_dict(command): """Check command-specifying dict `command', raise ValueError on error.""" if not isinstance(command, dict): raise ValueError("Command must be a dictionary") # NOTE(pboldin): Here we check for the values not for presence of the keys # due to template-driven configuration generation that can leave keys # defined but values empty. if command.get("interpreter"): script_file = command.get("script_file") if script_file: if "script_inline" in command: raise ValueError( "Exactly one of script_inline or script_file with " "interpreter is expected: %r" % command) # User tries to upload a shell? Make sure it is same as interpreter interpreter = command.get("interpreter") interpreter = (interpreter[-1] if isinstance(interpreter, (tuple, list)) else interpreter) if (command.get("local_path") and command.get("remote_path") != interpreter): raise ValueError( "When uploading an interpreter its path should be as well" " specified as the `remote_path' string: %r" % command) elif not command.get("remote_path"): # No interpreter and no remote command to execute is given raise ValueError( "Supplied dict specifies no command to execute," " either interpreter or remote_path is required: %r" % command) unexpected_keys = set(command) - set(["script_file", "script_inline", "interpreter", "remote_path", "local_path", "command_args"]) if unexpected_keys: raise ValueError( "Unexpected command parameters: %s" % ", ".join(unexpected_keys)) @validator def valid_command(config, clients, deployment, param_name, required=True): """Checks that parameter is a proper command-specifying dictionary. Ensure that the command dictionary is a proper command-specifying dictionary described in `vmtasks.VMTasks.boot_runcommand_delete' docstring. :param param_name: Name of parameter to validate :param required: Boolean indicating that the command dictionary is required """ # TODO(amaretskiy): rework this validator into ResourceType, so this # will allow to validate parameters values as well command = config.get("args", {}).get(param_name) if command is None and not required: return ValidationResult(True) try: check_command_dict(command) except ValueError as e: return ValidationResult(False, str(e)) for key in "script_file", "local_path": if command.get(key): return _file_access_ok( filename=command[key], mode=os.R_OK, param_name=param_name + "." + key, required=True) return ValidationResult(True) def _get_validated_image(config, clients, param_name): image_context = config.get("context", {}).get("images", {}) image_args = config.get("args", {}).get(param_name) image_ctx_name = image_context.get("image_name") if not image_args: msg = _("Parameter %s is not specified.") % param_name return (ValidationResult(False, msg), None) if "image_name" in image_context: # NOTE(rvasilets) check string is "exactly equal to" a regex # or image name from context equal to image name from args if "regex" in image_args: match = re.match(image_args.get("regex"), image_ctx_name) if image_ctx_name == image_args.get("name") or ( "regex" in image_args and match): image = { "size": image_context.get("min_disk", 0), "min_ram": image_context.get("min_ram", 0), "min_disk": image_context.get("min_disk", 0) } return (ValidationResult(True), image) try: image_id = openstack_types.GlanceImage.transform( clients=clients, resource_config=image_args) image = clients.glance().images.get(image_id) if hasattr(image, "to_dict"): # NOTE(stpierre): Glance v1 images are objects that can be # converted to dicts; Glance v2 images are already # dict-like image = image.to_dict() if not image.get("size"): image["size"] = 0 if not image.get("min_ram"): image["min_ram"] = 0 if not image.get("min_disk"): image["min_disk"] = 0 return (ValidationResult(True), image) except (glance_exc.HTTPNotFound, exceptions.InvalidScenarioArgument): message = _("Image '%s' not found") % image_args return (ValidationResult(False, message), None) def _get_flavor_from_context(config, flavor_value): if "flavors" not in config.get("context", {}): raise exceptions.InvalidScenarioArgument("No flavors context") flavors = [flavors_ctx.FlavorConfig(**f) for f in config["context"]["flavors"]] resource = types.obj_from_name(resource_config=flavor_value, resources=flavors, typename="flavor") flavor = flavors_ctx.FlavorConfig(**resource) flavor.id = "" % flavor.name return (ValidationResult(True), flavor) def _get_validated_flavor(config, clients, param_name): flavor_value = config.get("args", {}).get(param_name) if not flavor_value: msg = "Parameter %s is not specified." % param_name return (ValidationResult(False, msg), None) try: flavor_id = openstack_types.Flavor.transform( clients=clients, resource_config=flavor_value) flavor = clients.nova().flavors.get(flavor=flavor_id) return (ValidationResult(True), flavor) except (nova_exc.NotFound, exceptions.InvalidScenarioArgument): try: return _get_flavor_from_context(config, flavor_value) except exceptions.InvalidScenarioArgument: pass message = _("Flavor '%s' not found") % flavor_value return (ValidationResult(False, message), None) @validator def validate_share_proto(config, clients, deployment): """Validates value of share protocol for creation of Manila share.""" allowed = ("NFS", "CIFS", "GLUSTERFS", "HDFS", ) share_proto = config.get("args", {}).get("share_proto") if six.text_type(share_proto).upper() not in allowed: message = _("Share protocol '%(sp)s' is invalid, allowed values are " "%(allowed)s.") % {"sp": share_proto, "allowed": "', '".join(allowed)} return ValidationResult(False, message) @validator def image_exists(config, clients, deployment, param_name, nullable=False): """Returns validator for image_id :param param_name: defines which variable should be used to get image id value. :param nullable: defines image id param is required """ image_value = config.get("args", {}).get(param_name) if not image_value and nullable: return ValidationResult(True) res = _get_validated_image(config, clients, param_name) if res: return res[0] @validator def flavor_exists(config, clients, deployment, param_name): """Returns validator for flavor :param param_name: defines which variable should be used to get flavor id value. """ return _get_validated_flavor(config, clients, param_name)[0] @validator def image_valid_on_flavor(config, clients, deployment, flavor_name, image_name, validate_disk=True, fail_on_404_image=True): """Returns validator for image could be used for current flavor :param flavor_name: defines which variable should be used to get flavor id value. :param image_name: defines which variable should be used to get image id value. :param validate_disk: flag to indicate whether to validate flavor's disk. Should be True if instance is booted from image. Should be False if instance is booted from volume. Default value is True. :param fail_on_404_image: flag what indicate whether to validate image or not. """ valid_result, flavor = _get_validated_flavor(config, clients, flavor_name) if not valid_result.is_valid: return valid_result valid_result, image = _get_validated_image(config, clients, image_name) if not image and not fail_on_404_image: return ValidationResult(True) if not valid_result.is_valid: return valid_result if flavor.ram < image["min_ram"]: message = _("The memory size for flavor '%s' is too small " "for requested image '%s'") % (flavor.id, image["id"]) return ValidationResult(False, message) if flavor.disk and validate_disk: if image["size"] > flavor.disk * (1024 ** 3): message = _("The disk size for flavor '%s' is too small " "for requested image '%s'") % (flavor.id, image["id"]) return ValidationResult(False, message) if image["min_disk"] > flavor.disk: message = _("The disk size for flavor '%s' is too small " "for requested image '%s'") % (flavor.id, image["id"]) return ValidationResult(False, message) @validator def network_exists(config, clients, deployment, network_name): """Validator checks that network with network_name exist.""" network = config.get("args", {}).get(network_name, "private") networks = [net.label for net in clients.nova().networks.list()] if network not in networks: message = _("Network with name %(network)s not found. " "Available networks: %(networks)s") % { "network": network, "networks": networks } return ValidationResult(False, message) @validator def external_network_exists(config, clients, deployment, network_name): """Validator checks that external network with given name exists.""" ext_network = config.get("args", {}).get(network_name) if not ext_network: return ValidationResult(True) networks = [net.name for net in clients.nova().floating_ip_pools.list()] if networks and isinstance(networks[0], dict): networks = [n["name"] for n in networks] if ext_network not in networks: message = _("External (floating) network with name %(network)s " "not found. " "Available networks: %(networks)s") % { "network": ext_network, "networks": networks} return ValidationResult(False, message) @validator def required_parameters(config, clients, deployment, *required_params): """Validator for checking required parameters are specified. :param *required_params: list of required parameters """ missing = set(required_params) - set(config.get("args", {})) if missing: message = _("%s parameters are not defined in " "the benchmark config file") % ", ".join(missing) return ValidationResult(False, message) @validator def required_services(config, clients, deployment, *required_services): """Validator checks if specified OpenStack services are available. :param *required_services: list of services names """ available_services = list(clients.services().values()) if consts.Service.NOVA_NET in required_services: creds = deployment.get_credentials_for("openstack") nova = osclients.Clients( objects.Credential(**creds["admin"])).nova() for service in nova.services.list(): if (service.binary == consts.Service.NOVA_NET and service.status == "enabled"): available_services.append(consts.Service.NOVA_NET) for service in required_services: # NOTE(andreykurilin): validator should ignore services configured via # context(a proper validation should be in context) service_config = config.get("context", {}).get( "api_versions", {}).get(service, {}) if (service not in available_services and not ("service_type" in service_config or "service_name" in service_config)): return ValidationResult( False, _("'{0}' service is not available. Hint: If '{0}' " "service has non-default service_type, try to setup " "it via 'api_versions' context.").format(service)) @validator def required_neutron_extensions(config, clients, deployment, *required_extensions): """Validator checks if the specified Neutron extension is available :param required_extensions: list of Neutron extensions """ extensions = clients.neutron().list_extensions().get("extensions", []) aliases = map(lambda x: x["alias"], extensions) for extension in required_extensions: if extension not in aliases: msg = (_("Neutron extension %s is not configured") % extension) return ValidationResult(False, msg) @validator def required_cinder_services(config, clients, deployment, service_name): """Validator checks that specified Cinder service is available. It uses Cinder client with admin permissions to call 'cinder service-list' call :param service_name: Cinder service name """ creds = deployment.get_credentials_for("openstack") admin_client = osclients.Clients( objects.Credential(**creds["admin"])).cinder() for service in admin_client.services.list(): if (service.binary == six.text_type(service_name) and service.state == six.text_type("up")): return ValidationResult(True) msg = _("%s service is not available") % service_name return ValidationResult(False, msg) @validator def required_clients(config, clients, deployment, *components, **kwargs): """Validator checks if specified OpenStack clients are available. :param *components: list of client components names :param **kwargs: optional parameters: admin - bool, whether to use admin clients """ if kwargs.get("admin", False): creds = deployment.get_credentials_for("openstack") clients = osclients.Clients(objects.Credential(**creds["admin"])) for client_component in components: try: getattr(clients, client_component)() except ImportError: return ValidationResult( False, _("Client for {0} is not installed. To install it run " "`pip install python-{0}client`").format(client_component)) @validator def required_contexts(config, clients, deployment, *context_names): """Validator checks if required benchmark contexts are specified. :param *context_names: list of strings and tuples with context names that should be specified. Tuple represent 'at least one of the'. """ missing_contexts = [] context = config.get("context", {}) for name in context_names: if isinstance(name, tuple): if not set(name) & set(context): # formatted string like: 'foo or bar or baz' formatted_names = "'{}'".format(" or ".join(name)) missing_contexts.append(formatted_names) else: if name not in context: missing_contexts.append(name) if missing_contexts: message = (_("The following contexts are required but missing from " "the benchmark configuration file: %s") % ", ".join(missing_contexts)) return ValidationResult(False, message) @validator def required_param_or_context(config, clients, deployment, arg_name, ctx_name): """Validator checks if required image is specified. :param arg_name: name of parameter :param ctx_name: name of context """ message = ("Parameter {} is required but not described into context {}" " or arguments of scenario").format(arg_name, ctx_name) if ctx_name in config.get("context", {}): return ValidationResult(True) if arg_name in config.get("args", {}): return ValidationResult(True) return ValidationResult(False, message) @validator def required_openstack(config, clients, deployment, admin=False, users=False): """Validator that requires OpenStack admin or (and) users. This allows us to create 4 kind of benchmarks: 1) not OpenStack related (validator is not specified) 2) requires OpenStack admin 3) requires OpenStack admin + users 4) requires OpenStack users :param admin: requires OpenStack admin :param users: requires OpenStack users """ if not (admin or users): return ValidationResult( False, _("You should specify admin=True or users=True or both.")) creds = deployment.get_credentials_for("openstack") if creds["admin"] and creds["users"]: return ValidationResult(True) if creds["admin"]: if users and not config.get("context", {}).get("users"): return ValidationResult(False, _("You should specify 'users' context")) return ValidationResult(True) if creds["users"] and admin: return ValidationResult(False, _("Admin credentials required")) @validator def required_api_versions(config, clients, deployment, component, versions): """Validator checks component API versions.""" versions = [str(v) for v in versions] versions_str = ", ".join(versions) msg = _("Task was designed to be used with %(component)s " "V%(version)s, but V%(found_version)s is " "selected.") if component == "keystone": if "2.0" not in versions and hasattr(clients.keystone(), "tenants"): return ValidationResult(False, msg % {"component": component, "version": versions_str, "found_version": "2.0"}) if "3" not in versions and hasattr(clients.keystone(), "projects"): return ValidationResult(False, msg % {"component": component, "version": versions_str, "found_version": "3"}) else: used_version = config.get("context", {}).get("api_versions", {}).get( component, {}).get("version", getattr(clients, component).choose_version()) if not used_version: return ValidationResult( False, _("Unable to determine the API version.")) if str(used_version) not in versions: return ValidationResult( False, msg % {"component": component, "version": versions_str, "found_version": used_version}) @validator def volume_type_exists(config, clients, deployment, param_name): """Returns validator for volume types. check_types: defines variable to be used as the flag to determine if volume types should be checked for existence. """ val = config.get("args", {}).get(param_name) if val: volume_types_list = clients.cinder().volume_types.list() if not volume_types_list: message = (_("Must have at least one volume type created " "when specifying use of volume types.")) return ValidationResult(False, message) @validator def restricted_parameters(config, clients, deployment, param_names, subdict=None): """Validates that parameters is not set. :param param_names: parameter or parameters list to be validated. :param subdict: sub-dict of "config" to search for param_names. if not defined - will search in "config" """ if not isinstance(param_names, (list, tuple)): param_names = [param_names] restricted_params = [] for param_name in param_names: args = config.get("args", {}) a_dict, a_key = (args, subdict) if subdict else (config, "args") if param_name in a_dict.get(a_key, {}): restricted_params.append(param_name) if restricted_params: msg = (_("You can't specify parameters '%(params)s' in '%(a_dict)s'") % {"params": ", ".join(restricted_params), "a_dict": subdict if subdict else "args"}) return ValidationResult(False, msg) @validator def validate_heat_template(config, clients, deployment, *param_names): """Validates heat template. :param param_names: list of parameters to be validated. """ if param_names is None: return ValidationResult(False, _( "validate_heat_template validator accepts non empty arguments " "in form of `validate_heat_template(\"foo\", \"bar\")`")) for param_name in param_names: template_path = config.get("args", {}).get(param_name) if not template_path: return ValidationResult(False, _( "Path to heat template is not specified. Its needed for " "heat template validation. Please check the content of `%s` " "scenario argument.") % param_name) template_path = os.path.expanduser(template_path) if not os.path.exists(template_path): return ValidationResult(False, _("No file found by the given path " "%s") % template_path) with open(template_path, "r") as f: try: clients.heat().stacks.validate(template=f.read()) except Exception as e: dct = { "path": template_path, "msg": str(e), } msg = (_("Heat template validation failed on %(path)s. " "Original error message: %(msg)s.") % dct) return ValidationResult(False, msg) @validator def workbook_contains_workflow(config, clients, deployment, workbook, workflow_name): """Validate that workflow exist in workbook when workflow is passed :param workbook: parameter containing the workbook definition :param workflow_name: parameter containing the workflow name """ wf_name = config.get("args", {}).get(workflow_name) if wf_name: wb_path = config.get("args", {}).get(workbook) wb_path = os.path.expanduser(wb_path) file_result = _file_access_ok(config.get("args", {}).get(workbook), os.R_OK, workbook) if not file_result.is_valid: return file_result with open(wb_path, "r") as wb_def: wb_def = yaml.safe_load(wb_def) if wf_name not in wb_def["workflows"]: return ValidationResult( False, "workflow '{}' not found in the definition '{}'".format( wf_name, wb_def)) rally-0.9.1/rally/task/processing/0000775000567000056710000000000013073420067020250 5ustar jenkinsjenkins00000000000000rally-0.9.1/rally/task/processing/__init__.py0000664000567000056710000000000013073417716022356 0ustar jenkinsjenkins00000000000000rally-0.9.1/rally/task/processing/charts.py0000664000567000056710000006110713073417716022122 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import bisect import collections import math import six from rally.common.plugin import plugin from rally.common import streaming_algorithms as streaming from rally.task.processing import utils @plugin.base() @six.add_metaclass(abc.ABCMeta) class Chart(plugin.Plugin): """Base class for charts. This is a base for all plugins that prepare data for specific charts in HTML report. Each chart must at least declare chart widget and prepare data that is suitable for rendering by JavaScript. """ @abc.abstractproperty def widget(self): """Widget name to display this chart by JavaScript.""" def __init__(self, workload_info, zipped_size=1000): """Setup initial values. :param workload_info: dict, generalized info about iterations. The most important value is `iterations_count' that should have int value of total data size :param zipped_size: int maximum number of points on scale """ self._data = collections.OrderedDict() # Container for results self._workload_info = workload_info self.base_size = workload_info.get("iterations_count", 0) self.zipped_size = zipped_size def add_iteration(self, iteration): """Add iteration data. This method must be called for each iteration. If overridden, this method must use streaming data processing, so chart instance could process unlimited number of iterations, with low memory usage. """ for name, value in self._map_iteration_values(iteration): if name not in self._data: self._data[name] = utils.GraphZipper(self.base_size, self.zipped_size) self._data[name].add_point(value) def render(self): """Generate chart data ready for drawing.""" return [(name, points.get_zipped_graph()) for name, points in self._data.items()] def _fix_atomic_actions(self, iteration): """Set `0' for missed atomic actions. Since some atomic actions can absent in some iterations due to failures, this method must be used in all cases related to atomic actions processing. """ for name in self._workload_info["atomic"]: iteration["atomic_actions"].setdefault(name, 0) return iteration @abc.abstractmethod def _map_iteration_values(self, iteration): """Get values for processing, from given iteration.""" class MainStackedAreaChart(Chart): widget = "StackedArea" def _map_iteration_values(self, iteration): if iteration["error"]: result = [("duration", 0), ("idle_duration", 0)] if self._workload_info["iterations_failed"]: result.append( ("failed_duration", iteration["duration"] + iteration["idle_duration"])) else: result = [("duration", iteration["duration"]), ("idle_duration", iteration["idle_duration"])] if self._workload_info["iterations_failed"]: result.append(("failed_duration", 0)) return result class AtomicStackedAreaChart(Chart): widget = "StackedArea" def _map_iteration_values(self, iteration): iteration = self._fix_atomic_actions(iteration) atomics = list(iteration["atomic_actions"].items()) if self._workload_info["iterations_failed"]: if iteration["error"]: failed_duration = ( iteration["duration"] + iteration["idle_duration"] - sum([(a[1] or 0) for a in atomics])) else: failed_duration = 0 atomics.append(("failed_duration", failed_duration)) return atomics class AvgChart(Chart): """Base class for charts with average results.""" widget = "Pie" def add_iteration(self, iteration): for name, value in self._map_iteration_values(iteration): if name not in self._data: self._data[name] = streaming.MeanComputation() self._data[name].add(value or 0) def render(self): return [(k, v.result()) for k, v in self._data.items()] class AtomicAvgChart(AvgChart): def _map_iteration_values(self, iteration): iteration = self._fix_atomic_actions(iteration) return list(iteration["atomic_actions"].items()) class LoadProfileChart(Chart): """Chart for parallel durations.""" widget = "StackedArea" def __init__(self, workload_info, name="parallel iterations", scale=100): """Setup chart with graph name and scale. :workload_info: dict, generalized info about iterations :param name: str name for X axis :param scale: int number of X points """ super(LoadProfileChart, self).__init__(workload_info) self._name = name # NOTE(boris-42): Add 2 points at the end of graph so at the end of # graph there will be point with 0 running iterations. self._duration = workload_info["load_duration"] * (1 + 2.0 / scale) self._tstamp_start = workload_info["tstamp_start"] self.step = self._duration / float(scale) self._time_axis = [self.step * x for x in six.moves.range(int(scale)) if (self.step * x) < self._duration] self._time_axis.append(self._duration) self._running = [0] * len(self._time_axis) def _map_iteration_values(self, iteration): return (iteration["timestamp"], iteration["duration"]) def add_iteration(self, iteration): timestamp, duration = self._map_iteration_values(iteration) ts_start = timestamp - self._tstamp_start started_idx = bisect.bisect(self._time_axis, ts_start) ended_idx = bisect.bisect(self._time_axis, ts_start + duration) if self._time_axis[ended_idx - 1] == ts_start + duration: ended_idx -= 1 for idx in range(started_idx + 1, ended_idx): self._running[idx] += 1 if started_idx == ended_idx: self._running[ended_idx] += duration / self.step else: self._running[started_idx] += ( self._time_axis[started_idx] - ts_start) / self.step self._running[ended_idx] += ( ts_start + duration - self._time_axis[ended_idx - 1]) / self.step def render(self): return [(self._name, list(zip(self._time_axis, self._running)))] class HistogramChart(Chart): """Base class for chart with histograms. This chart is relatively complex, because actually it is a set of histograms, that usually can be switched by dropdown select. And each histogram has several data views. """ widget = "Histogram" def _init_views(self, min_value, max_value): """Generate initial data for each histogram view.""" if not self.base_size: return [] min_value, max_value = min_value or 0, max_value or 0 views = [] for view, bins in [ ("Square Root Choice", int(math.ceil(math.sqrt(self.base_size)))), ("Sturges Formula", int(math.ceil(math.log(self.base_size, 2) + 1))), ("Rice Rule", int(math.ceil(2 * self.base_size ** (1.0 / 3))))]: bin_width = float(max_value - min_value) / bins x_axis = [min_value + (bin_width * x) for x in range(1, bins + 1)] views.append({"view": view, "bins": bins, "x": x_axis, "y": [0] * len(x_axis)}) return views def add_iteration(self, iteration): for name, value in self._map_iteration_values(iteration): if name not in self._data: raise KeyError("Unexpected histogram name: %s" % name) for i, view in enumerate(self._data[name]["views"]): for bin_i, bin_v in enumerate(view["x"]): if (value or 0) <= bin_v: self._data[name]["views"][i]["y"][bin_i] += 1 break def render(self): data = [] for name, hist in self._data.items(): for idx, v in enumerate(hist["views"]): graph = {"key": name, "view": v["view"], "disabled": hist["disabled"], "values": [{"x": x, "y": y} for x, y in zip(v["x"], v["y"])]} try: data[idx].append(graph) except IndexError: data.append([graph]) return {"data": data, "views": [{"id": i, "name": d[0]["view"]} for i, d in enumerate(data)]} class MainHistogramChart(HistogramChart): def __init__(self, workload_info): super(MainHistogramChart, self).__init__(workload_info) views = self._init_views(self._workload_info["min_duration"], self._workload_info["max_duration"]) self._data["task"] = {"views": views, "disabled": None} def _map_iteration_values(self, iteration): return [("task", 0 if iteration["error"] else iteration["duration"])] class AtomicHistogramChart(HistogramChart): def __init__(self, workload_info): super(AtomicHistogramChart, self).__init__(workload_info) for i, atomic in enumerate(self._workload_info["atomic"].items()): name, value = atomic self._data[name] = { "views": self._init_views(value["min_duration"], value["max_duration"]), "disabled": i} def _map_iteration_values(self, iteration): iteration = self._fix_atomic_actions(iteration) return list(iteration["atomic_actions"].items()) @six.add_metaclass(abc.ABCMeta) class Table(Chart): """Base class for tables. Each Table subclass represents HTML table which can be easily rendered in report. Subclasses are responsible for setting up both columns and rows: columns are set simply by `columns' property (list of str columns names) and rows must be initialized in _data property, with the following format: self._data = {name: [streaming_ins, postprocess_func or None], ...} where: name - str name of table row parameter streaming_ins - instance of streaming algorithm postprocess_func - optional function that processes final result, None means usage of default self._round() This can be done in __init__() or even in add_iteration(). """ widget = "Table" @abc.abstractproperty def columns(self): """List of columns names.""" def _round(self, ins, has_result): """This is a default post-process function for table cell value. :param ins: streaming_algorithms.StreamingAlgorithm subclass instance :param has_result: bool, whether current row is effective :returns: rounded float :returns: str "n/a" """ return round(ins.result(), 3) if has_result else "n/a" def _row_has_results(self, values): """Determine whether row can be assumed as having values. :param values: row values list [(StreamingAlgorithm, function or None), ...] :returns: bool """ for ins, fn in values: if isinstance(ins, (streaming.MinComputation, streaming.MaxComputation, streaming.MeanComputation)): # NOTE(amaretskiy): None means this computation # has never been called return ins.result() is not None return True def get_rows(self): """Collect rows values finally, after all data is processed. :returns: [str_name, (float or str), (float or str), ...] """ rows = [] for name, values in self._data.items(): row = [name] has_result = self._row_has_results(values) for ins, fn in values: fn = fn or self._round row.append(fn(ins, has_result)) rows.append(row) return rows def render(self): return {"cols": self.columns, "rows": self.get_rows()} class MainStatsTable(Table): columns = ["Action", "Min (sec)", "Median (sec)", "90%ile (sec)", "95%ile (sec)", "Max (sec)", "Avg (sec)", "Success", "Count"] def __init__(self, *args, **kwargs): super(MainStatsTable, self).__init__(*args, **kwargs) iters_num = self._workload_info["iterations_count"] for name in (list(self._workload_info["atomic"].keys()) + ["total"]): self._data[name] = [ [streaming.MinComputation(), None], [streaming.PercentileComputation(0.5, iters_num), None], [streaming.PercentileComputation(0.9, iters_num), None], [streaming.PercentileComputation(0.95, iters_num), None], [streaming.MaxComputation(), None], [streaming.MeanComputation(), None], [streaming.MeanComputation(), lambda st, has_result: ("%.1f%%" % (st.result() * 100) if has_result else "n/a")], [streaming.IncrementComputation(), lambda st, has_result: st.result()]] def _map_iteration_values(self, iteration): return dict(iteration["atomic_actions"], total=iteration["duration"]) def add_iteration(self, iteration): for name, value in self._map_iteration_values(iteration).items(): self._data[name][-1][0].add() if iteration["error"]: self._data[name][-2][0].add(0) else: self._data[name][-2][0].add(1) for idx, dummy in enumerate(self._data[name][:-2]): self._data[name][idx][0].add(value) class OutputChart(Chart): """Base class for charts related to scenario output.""" def __init__(self, workload_info, zipped_size=1000, title="", description="", label="", axis_label=""): super(OutputChart, self).__init__(workload_info, zipped_size) self.title = title self.description = description self.label = label self.axis_label = axis_label def _map_iteration_values(self, iteration): return iteration def render(self): return {"title": self.title, "description": self.description, "widget": self.widget, "data": super(OutputChart, self).render(), "label": self.label, "axis_label": self.axis_label} @plugin.configure(name="StackedArea") class OutputStackedAreaChart(OutputChart): """Display results as stacked area. This plugin processes additive data and displays it in HTML report as stacked area with X axis bound to iteration number. Complete output data is displayed as stacked area as well, without any processing. Keys "description", "label" and "axis_label" are optional. Examples of using this plugin in Scenario, for saving output data: .. code-block:: python self.add_output( additive={"title": "Additive data as stacked area", "description": "Iterations trend for foo and bar", "chart_plugin": "StackedArea", "data": [["foo", 12], ["bar", 34]]}, complete={"title": "Complete data as stacked area", "description": "Data is shown as stacked area, as-is", "chart_plugin": "StackedArea", "data": [["foo", [[0, 5], [1, 42], [2, 15], [3, 7]]], ["bar", [[0, 2], [1, 1.3], [2, 5], [3, 9]]]], "label": "Y-axis label text", "axis_label": "X-axis label text"}) """ widget = "StackedArea" def render(self): result = super(OutputStackedAreaChart, self).render() # NOTE(amaretskiy): transform to Table if there is a single iteration if result["data"] and len(result["data"][0][1]) == 1: rows = [[v[0], v[1][0][1]] for v in result["data"]] result.update({"widget": "Table", "data": {"cols": ["Name", self.label or "Value"], "rows": rows}}) return result @plugin.configure(name="Lines") class OutputLinesChart(OutputStackedAreaChart): """Display results as generic chart with lines. This plugin processes additive data and displays it in HTML report as linear chart with X axis bound to iteration number. Complete output data is displayed as linear chart as well, without any processing. Examples of using this plugin in Scenario, for saving output data: .. code-block:: python self.add_output( additive={"title": "Additive data as stacked area", "description": "Iterations trend for foo and bar", "chart_plugin": "Lines", "data": [["foo", 12], ["bar", 34]]}, complete={"title": "Complete data as stacked area", "description": "Data is shown as stacked area, as-is", "chart_plugin": "Lines", "data": [["foo", [[0, 5], [1, 42], [2, 15], [3, 7]]], ["bar", [[0, 2], [1, 1.3], [2, 5], [3, 9]]]], "label": "Y-axis label text", "axis_label": "X-axis label text"}) """ widget = "Lines" @plugin.configure(name="Pie") class OutputAvgChart(OutputChart, AvgChart): """Display results as pie, calculate average values for additive data. This plugin processes additive data and calculate average values. Both additive and complete data are displayed in HTML report as pie chart. Examples of using this plugin in Scenario, for saving output data: .. code-block:: python self.add_output( additive={"title": "Additive output", "description": ("Pie with average data " "from all iterations values"), "chart_plugin": "Pie", "data": [["foo", 12], ["bar", 34], ["spam", 56]]}, complete={"title": "Complete output", "description": "Displayed as a pie, as-is", "chart_plugin": "Pie", "data": [["foo", 12], ["bar", 34], ["spam", 56]]}) """ widget = "Pie" @plugin.configure(name="Table") class OutputTable(OutputChart, Table): """Display complete output as table, can not be used for additive data. Use this plugin for complete output data to display it in HTML report as table. This plugin can not be used for additive data because it does not contain any processing logic. Examples of using this plugin in Scenario, for saving output data: .. code-block:: python self.add_output( complete={"title": "Arbitrary Table", "description": "Just show columns and rows as-is", "chart_plugin": "Table", "data": {"cols": ["foo", "bar", "spam"], "rows": [["a row", 1, 2], ["b row", 3, 4], ["c row", 5, 6]]}}) """ widget = "Table" @plugin.configure(name="StatsTable") class OutputStatsTable(OutputTable): """Calculate statistics for additive data and display it as table. This plugin processes additive data and compose statistics that is displayed as table in HTML report. Examples of using this plugin in Scenario, for saving output data: .. code-block:: python self.add_output( additive={"title": "Statistics", "description": ("Table with statistics generated " "from all iterations values"), "chart_plugin": "StatsTable", "data": [["foo stat", 12], ["bar", 34], ["spam", 56]]}) """ columns = ["Action", "Min (sec)", "Median (sec)", "90%ile (sec)", "95%ile (sec)", "Max (sec)", "Avg (sec)", "Count"] def add_iteration(self, iteration): for name, value in self._map_iteration_values(iteration): if name not in self._data: iters_num = self._workload_info["iterations_count"] self._data[name] = [ [streaming.MinComputation(), None], [streaming.PercentileComputation(0.5, iters_num), None], [streaming.PercentileComputation(0.9, iters_num), None], [streaming.PercentileComputation(0.95, iters_num), None], [streaming.MaxComputation(), None], [streaming.MeanComputation(), None], [streaming.IncrementComputation(), lambda v, na: v.result()]] self._data[name][-1][0].add(None) self._data[name][-2][0].add(1) for idx, dummy in enumerate(self._data[name][:-1]): self._data[name][idx][0].add(value) @plugin.configure(name="TextArea") class OutputTextArea(OutputChart): """Arbitrary text This plugin processes complete data and displays of output in HTML report. Examples of using this plugin in Scenario, for saving output data: .. code-block:: python self.add_output( complete={"title": "Script Inline", "chart_plugin": "TextArea", "data": ["first output", "second output", "third output"]]}) """ widget = "TextArea" _OUTPUT_SCHEMA = { "key_types": { "title": six.string_types, "description": six.string_types, "chart_plugin": six.string_types, "data": (list, dict), "label": six.string_types, "axis_label": six.string_types}, "required": ["title", "chart_plugin", "data"]} def validate_output(output_type, output): # TODO(amaretskiy): this validation is simple and must be improved. # Maybe it is worth to add classmethod OutputChart.validate(), so # we could have flexible validation for custom chart plugins if output_type not in ("additive", "complete"): return ("unexpected output type: '%s', " "should be in ('additive', 'complete')" % output_type) if type(output) != dict: return ("%(name)s output item has wrong type '%(type)s', " "must be 'dict'" % {"name": output_type, "type": type(output).__name__}) for key in _OUTPUT_SCHEMA["required"]: if key not in output: return ("%(name)s output missing key '%(key)s'" % {"name": output_type, "key": key}) for key in output: if key not in _OUTPUT_SCHEMA["key_types"]: return ("%(name)s output has unexpected key '%(key)s'" % {"name": output_type, "key": key}) proper_type = _OUTPUT_SCHEMA["key_types"][key] if not isinstance(output[key], proper_type): if type(proper_type) == tuple: return ("Value of %(name)s output %(key)s has wrong type " "'%(actual_type)s', should be in %(types)r" % {"name": output_type, "key": key, "actual_type": type(output[key]).__name__, "types": tuple(t.__name__ for t in proper_type)}) return ("Value of %(name)s output %(key)s has wrong type " "'%(actual_type)s', should be %(proper_type)s" % {"name": output_type, "key": key, "actual_type": type(output[key]).__name__, "proper_type": proper_type.__name__}) rally-0.9.1/rally/task/processing/plot.py0000664000567000056710000003305413073417720021607 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import datetime as dt import hashlib import json import six from rally.common import objects from rally.common.plugin import plugin from rally.common import version from rally.task.processing import charts from rally.ui import utils as ui_utils def _process_hooks(hooks): """Prepare hooks data for report.""" hooks_ctx = [] for hook in hooks: hook_ctx = {"name": hook["config"]["name"], "desc": hook["config"].get("description", ""), "additive": [], "complete": []} for res in hook["results"]: started_at = dt.datetime.utcfromtimestamp(res["started_at"]) finished_at = dt.datetime.utcfromtimestamp(res["finished_at"]) triggered_by = "%(event_type)s: %(value)s" % res["triggered_by"] for i, data in enumerate(res.get("output", {}).get("additive")): try: hook_ctx["additive"][i] except IndexError: chart_cls = plugin.Plugin.get(data["chart_plugin"]) hook_ctx["additive"].append([chart_cls]) hook_ctx["additive"][i].append(data) complete_charts = [] for data in res.get("output", {}).get("complete"): chart_cls = plugin.Plugin.get(data.pop("chart_plugin")) data["widget"] = chart_cls.widget complete_charts.append(data) if complete_charts: hook_ctx["complete"].append( {"triggered_by": triggered_by, "started_at": started_at.strftime("%Y-%m-%d %H:%M:%S"), "finished_at": finished_at.strftime("%Y-%m-%d %H:%M:%S"), "status": res["status"], "charts": complete_charts}) for i in range(len(hook_ctx["additive"])): chart_cls = hook_ctx["additive"][i].pop(0) iters_count = len(hook_ctx["additive"][i]) first = hook_ctx["additive"][i][0] descr = first.get("description", "") axis_label = first.get("axis_label", "") chart = chart_cls({"iterations_count": iters_count}, title=first["title"], description=descr, label=first.get("label", ""), axis_label=axis_label) for data in hook_ctx["additive"][i]: chart.add_iteration(data["data"]) hook_ctx["additive"][i] = chart.render() if hook_ctx["additive"] or hook_ctx["complete"]: hooks_ctx.append(hook_ctx) return hooks_ctx def _process_scenario(data, pos): main_area = charts.MainStackedAreaChart(data["info"]) main_hist = charts.MainHistogramChart(data["info"]) main_stat = charts.MainStatsTable(data["info"]) load_profile = charts.LoadProfileChart(data["info"]) atomic_pie = charts.AtomicAvgChart(data["info"]) atomic_area = charts.AtomicStackedAreaChart(data["info"]) atomic_hist = charts.AtomicHistogramChart(data["info"]) errors = [] output_errors = [] additive_output_charts = [] complete_output = [] for idx, itr in enumerate(data["iterations"], 1): if itr["error"]: typ, msg, trace = itr["error"] errors.append({"iteration": idx, "type": typ, "message": msg, "traceback": trace}) for i, additive in enumerate(itr["output"]["additive"]): try: additive_output_charts[i].add_iteration(additive["data"]) except IndexError: chart_cls = plugin.Plugin.get(additive["chart_plugin"]) chart = chart_cls( data["info"], title=additive["title"], description=additive.get("description", ""), label=additive.get("label", ""), axis_label=additive.get("axis_label", "Iteration sequence number")) chart.add_iteration(additive["data"]) additive_output_charts.append(chart) complete_charts = [] for complete in itr["output"]["complete"]: complete_chart = dict(complete) chart_cls = plugin.Plugin.get(complete_chart.pop("chart_plugin")) complete_chart["widget"] = chart_cls.widget complete_charts.append(complete_chart) complete_output.append(complete_charts) for chart in (main_area, main_hist, main_stat, load_profile, atomic_pie, atomic_area, atomic_hist): chart.add_iteration(itr) kw = data["key"]["kw"] cls, method = data["key"]["name"].split(".") additive_output = [chart.render() for chart in additive_output_charts] iterations_count = data["info"]["iterations_count"] return { "cls": cls, "met": method, "pos": str(pos), "name": method + (pos and " [%d]" % (pos + 1) or ""), "runner": kw["runner"]["type"], "config": json.dumps({data["key"]["name"]: [kw]}, indent=2), "hooks": _process_hooks(data["hooks"]), "iterations": { "iter": main_area.render(), "pie": [("success", (data["info"]["iterations_count"] - len(errors))), ("errors", len(errors))], "histogram": main_hist.render()}, "load_profile": load_profile.render(), "atomic": {"histogram": atomic_hist.render(), "iter": atomic_area.render(), "pie": atomic_pie.render()}, "table": main_stat.render(), "additive_output": additive_output, "complete_output": complete_output, "has_output": any(additive_output) or any(complete_output), "output_errors": output_errors, "errors": errors, "load_duration": data["info"]["load_duration"], "full_duration": data["info"]["full_duration"], "created_at": data["created_at"], "sla": data["sla"], "sla_success": all([s["success"] for s in data["sla"]]), "iterations_count": iterations_count, } def _process_tasks(tasks_results): tasks = [] source_dict = collections.defaultdict(list) position = collections.defaultdict(lambda: -1) for scenario in tasks_results: name = scenario["key"]["name"] position[name] += 1 source_dict[name].append(scenario["key"]["kw"]) tasks.append(_process_scenario(scenario, position[name])) source = json.dumps(source_dict, indent=2, sort_keys=True) return source, sorted(tasks, key=lambda r: (r["cls"], r["met"], int(r["pos"]))) def _extend_results(results): """Transform tasks results into extended format. This is a temporary workaround adapter that allows working with task results using new schema, until database refactoring actually comes. :param results: tasks results list in old format :returns: tasks results list in new format """ extended_results = [] for result in results: generic = {"id": None, "task_uuid": None, "key": result["key"], "data": {"sla": result["sla"], "hooks": result.get("hooks"), "raw": result["result"], "full_duration": result["full_duration"], "load_duration": result["load_duration"]}, "created_at": result.get("created_at"), "updated_at": None} extended_results.extend( objects.Task.extend_results([generic], True)) return extended_results def plot(tasks_results, include_libs=False): extended_results = _extend_results(tasks_results) template = ui_utils.get_template("task/report.html") source, data = _process_tasks(extended_results) return template.render(version=version.version_string(), source=json.dumps(source), data=json.dumps(data), include_libs=include_libs) def trends(tasks_results): trends = Trends() for i, scenario in enumerate(_extend_results(tasks_results), 1): trends.add_result(scenario) template = ui_utils.get_template("task/trends.html") return template.render(version=version.version_string(), data=json.dumps(trends.get_data())) class Trends(object): """Process workloads results and make trends data. Group workloads results by their input configuration, calculate statistics for these groups and prepare it for displaying in trends HTML report. """ def __init__(self): self._data = {} def _to_str(self, obj): """Convert object into string.""" if obj is None: return "None" elif isinstance(obj, six.string_types + (int, float)): return str(obj).strip() elif isinstance(obj, (list, tuple)): return ",".join(sorted([self._to_str(v) for v in obj])) elif isinstance(obj, dict): return "|".join(sorted([":".join([self._to_str(k), self._to_str(v)]) for k, v in obj.items()])) raise TypeError("Unexpected type %(type)r of object %(obj)r" % {"obj": obj, "type": type(obj)}) def _make_hash(self, obj): return hashlib.md5(self._to_str(obj).encode("utf8")).hexdigest() def add_result(self, result): key = self._make_hash(result["key"]["kw"]) if key not in self._data: self._data[key] = { "actions": {}, "sla_failures": 0, "name": result["key"]["name"], "config": json.dumps(result["key"]["kw"], indent=2)} for sla in result["sla"]: self._data[key]["sla_failures"] += not sla["success"] stat = {row[0]: dict(zip(result["info"]["stat"]["cols"], row)) for row in result["info"]["stat"]["rows"]} ts = int(result["info"]["tstamp_start"] * 1000) for action in stat: # NOTE(amaretskiy): some atomic actions can be missed due to # failures. We can ignore that because we use NVD3 lineChart() # for displaying trends, which is safe for missed points if action not in self._data[key]["actions"]: self._data[key]["actions"][action] = { "durations": {"min": [], "median": [], "90%ile": [], "95%ile": [], "max": [], "avg": []}, "success": []} try: success = float(stat[action]["Success"].rstrip("%")) except ValueError: # Got "n/a" for some reason success = 0 self._data[key]["actions"][action]["success"].append( (ts, success)) for tgt, src in (("min", "Min (sec)"), ("median", "Median (sec)"), ("90%ile", "90%ile (sec)"), ("95%ile", "95%ile (sec)"), ("max", "Max (sec)"), ("avg", "Avg (sec)")): self._data[key]["actions"][action]["durations"][tgt].append( (ts, stat[action][src])) def get_data(self): trends = [] for wload in self._data.values(): trend = {"stat": {}, "name": wload["name"], "cls": wload["name"].split(".")[0], "met": wload["name"].split(".")[1], "sla_failures": wload["sla_failures"], "config": wload["config"], "actions": []} for action, data in wload["actions"].items(): action_durs = [(k, sorted(v)) for k, v in data["durations"].items()] if action == "total": trend.update( {"length": len(data["success"]), "durations": action_durs, "success": [("success", sorted(data["success"]))]}) else: trend["actions"].append( {"name": action, "durations": action_durs, "success": [("success", sorted(data["success"]))]}) for stat, comp in (("min", charts.streaming.MinComputation()), ("max", charts.streaming.MaxComputation()), ("avg", charts.streaming.MeanComputation())): for k, v in trend["durations"]: for i in v: if isinstance(i[1], (float,) + six.integer_types): comp.add(i[1]) trend["stat"][stat] = comp.result() trends.append(trend) return sorted(trends, key=lambda i: i["name"]) rally-0.9.1/rally/task/processing/utils.py0000664000567000056710000000522713073417716021777 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. class GraphZipper(object): def __init__(self, base_size, zipped_size=1000): """Init graph zipper. :param base_size: Amount of points in raw graph :param zip_size: Amount of points that should be in zipped graph """ self.base_size = base_size self.zipped_size = zipped_size if self.base_size >= self.zipped_size: self.compression_ratio = self.base_size / float(self.zipped_size) else: self.compression_ratio = 1 self.point_order = 0 self.cached_ratios_sum = 0 self.ratio_value_points = [] self.zipped_graph = [] def _get_zipped_point(self): if self.point_order - self.compression_ratio <= 1: order = 1 elif self.point_order == self.base_size: order = self.base_size else: order = self.point_order - int(self.compression_ratio / 2.0) value = ( sum(p[0] * p[1] for p in self.ratio_value_points) / self.compression_ratio ) return [order, value] def add_point(self, value): self.point_order += 1 if self.point_order > self.base_size: raise RuntimeError("GraphZipper is already full. " "You can't add more points.") if not isinstance(value, (int, float)): value = 0 if self.compression_ratio <= 1: # We don't need to compress self.zipped_graph.append([self.point_order, value]) elif self.cached_ratios_sum + 1 < self.compression_ratio: self.cached_ratios_sum += 1 self.ratio_value_points.append([1, value]) else: rest = self.compression_ratio - self.cached_ratios_sum self.ratio_value_points.append([rest, value]) self.zipped_graph.append(self._get_zipped_point()) self.ratio_value_points = [[1 - rest, value]] self.cached_ratios_sum = self.ratio_value_points[0][0] def get_zipped_graph(self): return self.zipped_graph rally-0.9.1/rally/task/scenario.py0000664000567000056710000002530713073417720020262 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import inspect import random import six from rally.common.i18n import _ from rally.common import logging from rally.common.objects import task # noqa from rally.common.plugin import plugin from rally.common import utils from rally import consts from rally import exceptions from rally.task import atomic from rally.task import functional from rally.task.processing import charts LOG = logging.getLogger(__name__) def configure(name=None, namespace="default", context=None): """Configure scenario by setting proper meta data. This can also transform plain function into scenario plugin, however this approach is deprecated - now scenarios must be represented by classes based on rally.task.scenario.Scenario. :param name: str scenario name :param namespace: str plugin namespace :param context: default task context that is created for this scenario. If there are custom user specified contexts this one will be updated by provided contexts. """ def wrapper(scen): scen.is_classbased = hasattr(scen, "run") and callable(scen.run) if not scen.is_classbased: plugin.from_func(Scenario)(scen) scen._meta_init() if name: if "." not in name.strip("."): msg = (_("Scenario name must include a dot: '%s'") % name) raise exceptions.RallyException(msg) scen._set_name_and_namespace(name, namespace) else: scen._meta_set("namespace", namespace) scen._meta_set("default_context", context or {}) return scen return wrapper class ConfigurePluginMeta(type): """Finish Scenario plugin configuration. After @scenario.configure() is performed to cls.method, method.im_class is pointing to FuncPlugin class instead of original cls. There is no way to fix this, mostly because im_class is add to method when it's called via cls, e.g. cls.method. Decorator is different case so there is no information about cls. method._plugin is pointing to FuncPlugin that has FuncPlugin pointer to method. What should be done is to set properly FuncPluing.func_ref to the cls.method This metaclass iterates over all cls methods and fix func_ref of FuncPlugin class so func_ref will be cls.method instead of FuncPlugin.method. Additionally this metaclass sets plugin names if they were not set explicit via configure(). Default name is . As well we need to keep cls_ref inside of _meta because Python3 loves us. Viva black magic and dirty hacks. """ def __init__(cls, name, bases, namespaces): super(ConfigurePluginMeta, cls).__init__(name, bases, namespaces) for name, field in namespaces.items(): if callable(field) and hasattr(field, "_plugin"): field._plugin._meta_set("cls_ref", cls) if not field._meta_get("name", None): field._set_name_and_namespace( "%s.%s" % (cls.__name__, field.__name__), field.get_namespace()) field._plugin.func_ref = getattr( cls, field._plugin.func_ref.__name__) @plugin.base() @six.add_metaclass(ConfigurePluginMeta) class Scenario(plugin.Plugin, atomic.ActionTimerMixin, functional.FunctionalMixin, utils.RandomNameGeneratorMixin): """This is base class for any benchmark scenario. You should create subclass of this class. And your test scenarios will be auto discoverable and you will be able to specify it in test config. """ RESOURCE_NAME_FORMAT = "s_rally_XXXXXXXX_XXXXXXXX" def __init__(self, context=None): super(Scenario, self).__init__() self.context = context or {} self.task = self.context.get("task", {}) self._idle_duration = 0.0 self._output = {"additive": [], "complete": []} @classmethod def get_default_context(cls): return cls._meta_get("default_context") @staticmethod def _validate_helper(validators, clients, config, deployment): for validator in validators: try: result = validator(config, clients=clients, deployment=deployment) except Exception as e: LOG.exception(e) raise exceptions.InvalidScenarioArgument(e) else: if not result.is_valid: raise exceptions.InvalidScenarioArgument(result.msg) @staticmethod def _validate_scenario_args(scenario, name, config): if scenario.is_classbased: # We need initialize scenario class to access instancemethods scenario = scenario().run args, _varargs, varkwargs, defaults = inspect.getargspec(scenario) hint_msg = (" Use `rally plugin show --name %s` to display " "scenario description." % name) # scenario always accepts an instance of scenario cls as a first arg missed_args = args[1:] if defaults: # do not require args with default values missed_args = missed_args[:-len(defaults)] if "args" in config: missed_args = set(missed_args) - set(config["args"]) if missed_args: msg = ("Argument(s) '%(args)s' should be specified in task config." "%(hint)s" % {"args": "', '".join(missed_args), "hint": hint_msg}) raise exceptions.InvalidArgumentsException(msg) if varkwargs is None and "args" in config: redundant_args = set(config["args"]) - set(args[1:]) if redundant_args: msg = ("Unexpected argument(s) found ['%(args)s'].%(hint)s" % {"args": "', '".join(redundant_args), "hint": hint_msg}) raise exceptions.InvalidArgumentsException(msg) @classmethod def validate(cls, name, config, admin=None, users=None, deployment=None): """Semantic check of benchmark arguments.""" scenario = Scenario.get(name) cls._validate_scenario_args(scenario, name, config) validators = scenario._meta_get("validators", default=[]) if not validators: return admin_validators = [v for v in validators if v.permission == consts.EndpointPermission.ADMIN] user_validators = [v for v in validators if v.permission == consts.EndpointPermission.USER] # NOTE(boris-42): Potential bug, what if we don't have "admin" client # and scenario have "admin" validators. if admin: cls._validate_helper(admin_validators, admin, config, deployment) if users: for user in users: cls._validate_helper(user_validators, user, config, deployment) def sleep_between(self, min_sleep, max_sleep=None, atomic_delay=0.1): """Call an interruptable_sleep() for a random amount of seconds. The exact time is chosen uniformly randomly from the interval [min_sleep; max_sleep). The method also updates the idle_duration variable to take into account the overall time spent on sleeping. :param min_sleep: Minimum sleep time in seconds (non-negative) :param max_sleep: Maximum sleep time in seconds (non-negative) :param atomic_delay: parameter with which time.sleep would be called int(sleep_time / atomic_delay) times. """ if max_sleep is None: max_sleep = min_sleep if not 0 <= min_sleep <= max_sleep: raise exceptions.InvalidArgumentsException( "0 <= min_sleep <= max_sleep") sleep_time = random.uniform(min_sleep, max_sleep) utils.interruptable_sleep(sleep_time, atomic_delay) self._idle_duration += sleep_time def idle_duration(self): """Returns duration of all sleep_between.""" return self._idle_duration def add_output(self, additive=None, complete=None): """Add iteration's custom output data. This saves custom output data to task results. The main way to get this data processed is to find it in HTML report ("Scenario Data" tab), where it is displayed by tables or various charts (StackedArea, Lines, Pie). Take a look at "Processing Output Charts" section of Rally Plugins Reference to find explanations and examples about additive and complete output types and how to display this output data by specific widgets. Here is a simple example how to add both additive and complete data and display them by StackedArea widget in HTML report: .. code-block:: python self.add_output( additive={"title": "Additive data in StackedArea", "description": "Iterations trend for foo and bar", "chart_plugin": "StackedArea", "data": [["foo", 12], ["bar", 34]]}, complete={"title": "Complete data as stacked area", "description": "Data is shown as-is in StackedArea", "chart_plugin": "StackedArea", "data": [["foo", [[0, 5], [1, 42], [2, 15]]], ["bar", [[0, 2], [1, 1.3], [2, 5]]]], "label": "Y-axis label text", "axis_label": "X-axis label text"}) :param additive: dict with additive output :param complete: dict with complete output :raises RallyException: if output has wrong format """ for key, value in (("additive", additive), ("complete", complete)): if value: message = charts.validate_output(key, value) if message: raise exceptions.RallyException(message) self._output[key].append(value) @classmethod def _get_doc(cls): if cls.is_classbased: return cls.run.__doc__ return cls.__doc__ rally-0.9.1/rally/task/sla.py0000775000567000056710000001433013073417720017233 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ SLA (Service-level agreement) is set of details for determining compliance with contracted values such as maximum error rate or minimum response time. """ import abc import jsonschema import six from rally.common.i18n import _ from rally.common.plugin import plugin configure = plugin.configure def _format_result(criterion_name, success, detail): """Returns the SLA result dict corresponding to the current state.""" return {"criterion": criterion_name, "success": success, "detail": detail} class SLAChecker(object): """Base SLA checker class.""" def __init__(self, config): self.config = config self.unexpected_failure = None self.aborted_on_sla = False self.aborted_manually = False self.sla_criteria = [SLA.get(name)(criterion_value) for name, criterion_value in config.get("sla", {}).items()] def add_iteration(self, iteration): """Process the result of a single iteration. The call to add_iteration() will return True if all the SLA checks passed, and False otherwise. :param iteration: iteration result object """ return all([sla.add_iteration(iteration) for sla in self.sla_criteria]) def merge(self, other): self._validate_config(other) self._validate_sla_types(other) return all([self_sla.merge(other_sla) for self_sla, other_sla in six.moves.zip( self.sla_criteria, other.sla_criteria)]) def _validate_sla_types(self, other): for self_sla, other_sla in six.moves.zip_longest( self.sla_criteria, other.sla_criteria): self_sla.validate_type(other_sla) def _validate_config(self, other): self_config = self.config.get("sla", {}) other_config = other.config.get("sla", {}) if self_config != other_config: message = _( "Error merging SLACheckers with configs %s, %s. " "Only SLACheckers with the same config could be merged." ) % (self_config, other_config) raise TypeError(message) def results(self): results = [sla.result() for sla in self.sla_criteria] if self.aborted_on_sla: results.append(_format_result( "aborted_on_sla", False, _("Task was aborted due to SLA failure(s)."))) if self.aborted_manually: results.append(_format_result( "aborted_manually", False, _("Task was aborted due to abort signal."))) if self.unexpected_failure: results.append(_format_result( "something_went_wrong", False, _("Unexpected error: %s") % self.unexpected_failure)) return results def set_aborted_on_sla(self): self.aborted_on_sla = True def set_aborted_manually(self): self.aborted_manually = True def set_unexpected_failure(self, exc): self.unexpected_failure = exc @plugin.base() @six.add_metaclass(abc.ABCMeta) class SLA(plugin.Plugin): """Factory for criteria classes.""" def __init__(self, criterion_value): self.criterion_value = criterion_value self.success = True @staticmethod def validate(config): properties = dict([(s.get_name(), s.CONFIG_SCHEMA) for s in SLA.get_all()]) schema = { "type": "object", "properties": properties, "additionalProperties": False, } jsonschema.validate(config, schema) @abc.abstractmethod def add_iteration(self, iteration): """Process the result of a single iteration and perform a SLA check. The call to add_iteration() will return True if the SLA check passed, and False otherwise. :param iteration: iteration result object :returns: True if the SLA check passed, False otherwise """ def result(self): """Returns the SLA result dict corresponding to the current state.""" return _format_result(self.get_name(), self.success, self.details()) @abc.abstractmethod def details(self): """Returns the string describing the current results of the SLA.""" def status(self): """Return "Passed" or "Failed" depending on the current SLA status.""" return "Passed" if self.success else "Failed" @abc.abstractmethod def merge(self, other): """Merge aggregated data from another SLA instance into self. Process the results of several iterations aggregated in another instance of SLA together with ones stored in self so that the code sla1 = SLA() sla1.add_iteration(a) sla1.add_iteration(b) sla2 = SLA() sla2.add_iteration(c) sla2.add_iteration(d) sla1.merge(sla2) is equivalent to sla1 = SLA() sla1.add_iteration(a) sla1.add_iteration(b) sla1.add_iteration(c) sla1.add_iteration(d) The call to merge() will return True if the SLA check passed, and False otherwise. :param other: another SLA object :returns: True if the SLA check passed, False otherwise """ def validate_type(self, other): if type(self) != type(other): message = _( "Error merging SLAs of types %s, %s. " "Only SLAs of the same type could be merged." ) % (type(self), type(other)) raise TypeError(message) rally-0.9.1/rally/task/engine.py0000664000567000056710000006515413073417720017730 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import copy import json import threading import time import traceback import jsonschema from oslo_config import cfg import six from rally.common.i18n import _ from rally.common import logging from rally.common import objects from rally.common import utils from rally import consts from rally import exceptions from rally.task import context from rally.task import hook from rally.task import runner from rally.task import scenario from rally.task import sla LOG = logging.getLogger(__name__) CONF = cfg.CONF TASK_ENGINE_OPTS = [ cfg.IntOpt("raw_result_chunk_size", default=1000, min=1, help="Size of raw result chunk in iterations"), ] CONF.register_opts(TASK_ENGINE_OPTS) class ResultConsumer(object): """ResultConsumer class stores results from ScenarioRunner, checks SLA. Also ResultConsumer listens for runner events and notifies HookExecutor about started iterations. """ def __init__(self, key, task, subtask, workload, runner, abort_on_sla_failure): """ResultConsumer constructor. :param key: Scenario identifier :param task: Instance of Task, task to run :param subtask: Instance of Subtask :param workload: Instance of Workload :param runner: ScenarioRunner instance that produces results to be consumed :param abort_on_sla_failure: True if the execution should be stopped when some SLA check fails """ self.key = key self.task = task self.subtask = subtask self.workload = workload self.runner = runner self.load_started_at = float("inf") self.load_finished_at = 0 self.workload_data_count = 0 self.sla_checker = sla.SLAChecker(key["kw"]) self.hook_executor = hook.HookExecutor(key["kw"], self.task) self.abort_on_sla_failure = abort_on_sla_failure self.is_done = threading.Event() self.unexpected_failure = {} self.results = [] self.thread = threading.Thread(target=self._consume_results) self.aborting_checker = threading.Thread(target=self.wait_and_abort) if "hooks" in self.key["kw"]: self.event_thread = threading.Thread(target=self._consume_events) def __enter__(self): self.thread.start() self.aborting_checker.start() if "hooks" in self.key["kw"]: self.event_thread.start() self.start = time.time() return self def _consume_results(self): task_aborted = False while True: if self.runner.result_queue: results = self.runner.result_queue.popleft() self.results.extend(results) for r in results: self.load_started_at = min(r["timestamp"], self.load_started_at) self.load_finished_at = max(r["duration"] + r["timestamp"], self.load_finished_at) success = self.sla_checker.add_iteration(r) if (self.abort_on_sla_failure and not success and not task_aborted): self.sla_checker.set_aborted_on_sla() self.runner.abort() self.task.update_status( consts.TaskStatus.SOFT_ABORTING) task_aborted = True # save results chunks chunk_size = CONF.raw_result_chunk_size while len(self.results) >= chunk_size: results_chunk = self.results[:chunk_size] self.results = self.results[chunk_size:] results_chunk.sort(key=lambda x: x["timestamp"]) self.workload.add_workload_data(self.workload_data_count, {"raw": results_chunk}) self.workload_data_count += 1 elif self.is_done.isSet(): break else: time.sleep(0.1) def _consume_events(self): while not self.is_done.isSet() or self.runner.event_queue: if self.runner.event_queue: event = self.runner.event_queue.popleft() self.hook_executor.on_event( event_type=event["type"], value=event["value"]) else: time.sleep(0.01) def __exit__(self, exc_type, exc_value, exc_traceback): self.finish = time.time() self.is_done.set() self.aborting_checker.join() self.thread.join() if exc_type: self.sla_checker.set_unexpected_failure(exc_value) if objects.Task.get_status( self.task["uuid"]) == consts.TaskStatus.ABORTED: self.sla_checker.set_aborted_manually() load_duration = max(self.load_finished_at - self.load_started_at, 0) LOG.info("Load duration is: %s" % utils.format_float_to_str( load_duration)) LOG.info("Full runner duration is: %s" % utils.format_float_to_str(self.runner.run_duration)) LOG.info("Full duration is: %s" % utils.format_float_to_str( self.finish - self.start)) results = { "load_duration": load_duration, "full_duration": self.finish - self.start, "sla": self.sla_checker.results(), } if "hooks" in self.key["kw"]: self.event_thread.join() results["hooks"] = self.hook_executor.results() if self.results: # NOTE(boris-42): Sort in order of starting # instead of order of ending self.results.sort(key=lambda x: x["timestamp"]) self.workload.add_workload_data(self.workload_data_count, {"raw": self.results}) self.workload.set_results(results) @staticmethod def is_task_in_aborting_status(task_uuid, check_soft=True): """Checks task is in abort stages :param task_uuid: UUID of task to check status :type task_uuid: str :param check_soft: check or not SOFT_ABORTING status :type check_soft: bool """ stages = [consts.TaskStatus.ABORTING, consts.TaskStatus.ABORTED] if check_soft: stages.append(consts.TaskStatus.SOFT_ABORTING) return objects.Task.get_status(task_uuid) in stages def wait_and_abort(self): """Waits until abort signal is received and aborts runner in this case. Has to be run from different thread simultaneously with the runner.run method. """ while not self.is_done.isSet(): if self.is_task_in_aborting_status(self.task["uuid"], check_soft=False): self.runner.abort() self.task.update_status(consts.TaskStatus.ABORTED) break time.sleep(2.0) class TaskEngine(object): """The Task engine class is used to execute benchmark scenarios. An instance of this class is initialized by the API with the task configuration and then is used to validate and execute all specified in config subtasks. .. note:: Typical usage: ... engine = TaskEngine(config, task, deployment) engine.validate() # to test config engine.run() # to run config """ def __init__(self, config, task, deployment, abort_on_sla_failure=False): """TaskEngine constructor. :param config: Dict with configuration of specified benchmark scenarios :param task: Instance of Task, the current task which is being performed :param deployment: Instance of Deployment, :param abort_on_sla_failure: True if the execution should be stopped when some SLA check fails """ try: self.config = TaskConfig(config) except Exception as e: task.set_failed(type(e).__name__, str(e), json.dumps(traceback.format_exc())) if logging.is_debug(): LOG.exception(e) raise exceptions.InvalidTaskException(str(e)) self.task = task self.deployment = deployment self.abort_on_sla_failure = abort_on_sla_failure @logging.log_task_wrapper(LOG.info, _("Task validation of scenarios names.")) def _validate_config_scenarios_name(self, config): available = set(s.get_name() for s in scenario.Scenario.get_all()) specified = set() for subtask in config.subtasks: for s in subtask.workloads: specified.add(s.name) if not specified.issubset(available): names = ", ".join(specified - available) raise exceptions.NotFoundScenarios(names=names) @logging.log_task_wrapper(LOG.info, _("Task validation of syntax.")) def _validate_config_syntax(self, config): for subtask in config.subtasks: for workload in subtask.workloads: scenario_cls = scenario.Scenario.get(workload.name) scenario_context = copy.deepcopy( scenario_cls.get_default_context()) try: runner.ScenarioRunner.validate(workload.runner) context.ContextManager.validate(workload.context) context.ContextManager.validate(scenario_context, allow_hidden=True) sla.SLA.validate(workload.sla) for hook_conf in workload.hooks: hook.Hook.validate(hook_conf) except (exceptions.RallyException, jsonschema.ValidationError) as e: kw = workload.make_exception_args(six.text_type(e)) raise exceptions.InvalidTaskConfig(**kw) def _validate_config_semantic_helper(self, admin, user_context, workloads, deployment): with user_context as ctx: ctx.setup() for workload in workloads: try: scenario_cls = scenario.Scenario.get(workload.name) scenario_cls.validate( workload.name, workload.to_dict(), admin=admin, users=ctx.context["users"], deployment=deployment) except exceptions.InvalidScenarioArgument as e: kw = workload.make_exception_args(six.text_type(e)) raise exceptions.InvalidTaskConfig(**kw) @logging.log_task_wrapper(LOG.info, _("Task validation of semantic.")) def _validate_config_semantic(self, config): # map workloads to platforms platforms = collections.defaultdict(list) for subtask in config.subtasks: for workload in subtask.workloads: # TODO(astudenov): We need to use a platform validator # in future to identify what kind of users workload # requires (regular users or admin) scenario_cls = scenario.Scenario.get(workload.name) namespace = scenario_cls.get_namespace() platforms[namespace].append(workload) # FIXME(astudenov): currently there is no credentials for # namespace 'default', thus 'opentack' is used as a workaround if "default" in platforms: default_workloads = platforms.pop("default") platforms["openstack"].extend(default_workloads) for platform, workloads in platforms.items(): creds = self.deployment.get_credentials_for(platform) admin = objects.Credential(**creds["admin"]) # TODO(astudenov): move this check to validator of Credential if platform == "openstack": from rally import osclients clients = osclients.Clients(admin) clients.verified_keystone() workloads_with_users = [] workloads_with_existing_users = [] for workload in workloads: if creds["users"] and "users" not in workload.context: workloads_with_existing_users.append(workload) else: workloads_with_users.append(workload) if workloads_with_users: ctx_conf = {"task": self.task, "admin": {"credential": admin}} user_context = context.Context.get( "users", namespace=platform)(ctx_conf) self._validate_config_semantic_helper( admin, user_context, workloads_with_users, self.deployment) if workloads_with_existing_users: ctx_conf = {"task": self.task, "config": {"existing_users": creds["users"]}} # NOTE(astudenov): allow_hidden=True is required # for openstack existing_users context user_context = context.Context.get( "existing_users", namespace=platform, allow_hidden=True)(ctx_conf) self._validate_config_semantic_helper( admin, user_context, workloads_with_existing_users, self.deployment) @logging.log_task_wrapper(LOG.info, _("Task validation.")) def validate(self): """Perform full task configuration validation.""" self.task.update_status(consts.TaskStatus.VALIDATING) try: self._validate_config_scenarios_name(self.config) self._validate_config_syntax(self.config) self._validate_config_semantic(self.config) except Exception as e: exception_info = json.dumps(traceback.format_exc(), indent=2, separators=(",", ": ")) self.task.set_failed(type(e).__name__, str(e), exception_info) if logging.is_debug(): LOG.exception(e) raise exceptions.InvalidTaskException(str(e)) def _get_runner(self, config): config = config or {"type": "serial"} return runner.ScenarioRunner.get(config["type"])(self.task, config) def _prepare_context(self, ctx, name): scenario_cls = scenario.Scenario.get(name) namespace = scenario_cls.get_namespace() # FIXME(astudenov): currently there is no credentials for # namespace 'default', thus 'opentack' is used as a workaround if namespace == "default": namespace = "openstack" creds = self.deployment.get_credentials_for(namespace) existing_users = creds["users"] # TODO(astudenov): use credential plugin in future refactoring admin = objects.Credential(**creds["admin"]) scenario_context = copy.deepcopy(scenario_cls.get_default_context()) if existing_users and "users" not in ctx: scenario_context.setdefault("existing_users", existing_users) elif "users" not in ctx: scenario_context.setdefault("users", {}) scenario_context.update(ctx) context_obj = { "task": self.task, "admin": {"credential": admin}, "scenario_name": name, "config": scenario_context } return context_obj @logging.log_task_wrapper(LOG.info, _("Benchmarking.")) def run(self): """Run the benchmark according to the test configuration. Test configuration is specified on engine initialization. :returns: List of dicts, each dict containing the results of all the corresponding benchmark test launches """ self.task.update_status(consts.TaskStatus.RUNNING) for subtask in self.config.subtasks: subtask_obj = self.task.add_subtask(**subtask.to_dict()) for workload in subtask.workloads: if ResultConsumer.is_task_in_aborting_status( self.task["uuid"]): LOG.info("Received aborting signal.") self.task.update_status(consts.TaskStatus.ABORTED) return key = workload.make_key() workload_obj = subtask_obj.add_workload(key) LOG.info("Running benchmark with key: \n%s" % json.dumps(key, indent=2)) runner_obj = self._get_runner(workload.runner) context_obj = self._prepare_context( workload.context, workload.name) try: with ResultConsumer(key, self.task, subtask_obj, workload_obj, runner_obj, self.abort_on_sla_failure): with context.ContextManager(context_obj): runner_obj.run(workload.name, context_obj, workload.args) except Exception as e: LOG.debug(traceback.format_exc()) LOG.exception(e) if objects.Task.get_status( self.task["uuid"]) != consts.TaskStatus.ABORTED: self.task.update_status(consts.TaskStatus.FINISHED) class TaskConfig(object): """Version-aware wrapper around task. """ HOOK_CONFIG = { "type": "object", "properties": { "name": {"type": "string"}, "description": {"type": "string"}, "args": {}, "trigger": { "type": "object", "properties": { "name": {"type": "string"}, "args": {}, }, "required": ["name", "args"], "additionalProperties": False, } }, "required": ["name", "args", "trigger"], "additionalProperties": False, } CONFIG_SCHEMA_V1 = { "type": "object", "$schema": consts.JSON_SCHEMA, "patternProperties": { ".*": { "type": "array", "items": { "type": "object", "properties": { "args": {"type": "object"}, "runner": { "type": "object", "properties": {"type": {"type": "string"}}, "required": ["type"] }, "context": {"type": "object"}, "sla": {"type": "object"}, "hooks": { "type": "array", "items": HOOK_CONFIG, } }, "additionalProperties": False } } } } CONFIG_SCHEMA_V2 = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "version": {"type": "number"}, "title": {"type": "string"}, "description": {"type": "string"}, "tags": { "type": "array", "items": {"type": "string"} }, "subtasks": { "type": "array", "minItems": 1, "items": { "type": "object", "properties": { "title": {"type": "string"}, "group": {"type": "string"}, "description": {"type": "string"}, "tags": { "type": "array", "items": {"type": "string"} }, "run_in_parallel": {"type": "boolean"}, "workloads": { "type": "array", "minItems": 1, "maxItems": 1, "items": { "type": "object", "properties": { "name": {"type": "string"}, "args": {"type": "object"}, "runner": { "type": "object", "properties": { "type": {"type": "string"} }, "required": ["type"] }, "sla": {"type": "object"}, "hooks": { "type": "array", "items": HOOK_CONFIG, }, "context": {"type": "object"} }, "additionalProperties": False, "required": ["name", "runner"] } } }, "additionalProperties": False, "required": ["title", "workloads"] } } }, "additionalProperties": False, "required": ["title", "subtasks"] } CONFIG_SCHEMAS = {1: CONFIG_SCHEMA_V1, 2: CONFIG_SCHEMA_V2} def __init__(self, config): """TaskConfig constructor. :param config: Dict with configuration of specified task """ if config is None: # NOTE(stpierre): This gets reraised as # InvalidTaskException. if we raise it here as # InvalidTaskException, then "Task config is invalid: " # gets prepended to the message twice. raise Exception(_("Input task is empty")) self.version = self._get_version(config) self._validate_version() self._validate_json(config) self.title = config.get("title", "Task") self.tags = config.get("tags", []) self.description = config.get("description") self.subtasks = self._make_subtasks(config) # if self.version == 1: # TODO(ikhudoshyn): Warn user about deprecated format @staticmethod def _get_version(config): return config.get("version", 1) def _validate_version(self): if self.version not in self.CONFIG_SCHEMAS: allowed = ", ".join([str(k) for k in self.CONFIG_SCHEMAS]) msg = (_("Task configuration version {0} is not supported. " "Supported versions: {1}")).format(self.version, allowed) raise exceptions.InvalidTaskException(msg) def _validate_json(self, config): try: jsonschema.validate(config, self.CONFIG_SCHEMAS[self.version]) except Exception as e: raise exceptions.InvalidTaskException(str(e)) def _make_subtasks(self, config): if self.version == 2: return [SubTask(s) for s in config["subtasks"]] elif self.version == 1: subtasks = [] for name, v1_workloads in config.items(): for v1_workload in v1_workloads: v2_workload = copy.deepcopy(v1_workload) v2_workload["name"] = name subtasks.append( SubTask({"title": name, "workloads": [v2_workload]})) return subtasks class SubTask(object): """Subtask -- unit of execution in Task """ def __init__(self, config): """Subtask constructor. :param config: Dict with configuration of specified subtask """ self.title = config["title"] self.tags = config.get("tags", []) self.group = config.get("group") self.description = config.get("description") self.workloads = [Workload(wconf, pos) for pos, wconf in enumerate(config["workloads"])] self.context = config.get("context", {}) def to_dict(self): return { "title": self.title, "description": self.description, "context": self.context, } class Workload(object): """Workload -- workload configuration in SubTask. """ def __init__(self, config, pos): self.name = config["name"] self.runner = config.get("runner", {}) self.sla = config.get("sla", {}) self.hooks = config.get("hooks", []) self.context = config.get("context", {}) self.args = config.get("args", {}) self.pos = pos def to_dict(self): workload = {"runner": self.runner} for prop in "sla", "args", "context", "hooks": value = getattr(self, prop) if value: workload[prop] = value return workload def to_task(self): """Make task configuration for the workload. This method returns a dict representing full configuration of the task containing a single subtask with this single workload. :return: dict containing full task configuration """ # NOTE(ikhudoshyn): Result of this method will be used # to store full task configuration in DB so that # subtask configuration in reports would be given # in the same format as it was provided by user. # Temporarily it returns to_dict() in order not # to break existing reports. It should be # properly implemented in a patch that will update reports. # return {self.name: [self.to_dict()]} return self.to_dict() def make_key(self): return {"name": self.name, "pos": self.pos, "kw": self.to_task()} def make_exception_args(self, reason): return {"name": self.name, "pos": self.pos, "config": self.to_dict(), "reason": reason} rally-0.9.1/rally/task/atomic.py0000664000567000056710000001023013073417720017720 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import functools from rally.common import utils class ActionTimerMixin(object): def __init__(self): self._atomic_actions = collections.OrderedDict() def atomic_actions(self): """Returns the content of each atomic action.""" return self._atomic_actions class ActionTimer(utils.Timer): """A class to measure the duration of atomic operations This would simplify the way measure atomic operation duration in certain cases. For example, if we want to get the duration for each operation which runs in an iteration for i in range(repetitions): with atomic.ActionTimer(instance_of_action_timer, "name_of_action"): self.clients(). """ def __init__(self, instance, name): """Create a new instance of the AtomicAction. :param instance: instance of subclass of ActionTimerMixin :param name: name of the ActionBuilder """ super(ActionTimer, self).__init__() self.instance = instance self.name = self._get_atomic_action_name(instance, name) self.instance._atomic_actions[self.name] = None @classmethod def _get_atomic_action_name(cls, instance, name): # TODO(boris-42): It was quite bad idea to store atomic actions # inside {}. We should refactor this in 0.2.0 release # and store them inside array, that will allow us to # store atomic actions with the same name if name not in instance._atomic_actions: return name name_template = name + " (%i)" i = 2 while name_template % i in instance._atomic_actions: i += 1 return name_template % i def __exit__(self, type_, value, tb): super(ActionTimer, self).__exit__(type_, value, tb) self.instance._atomic_actions[self.name] = self.duration() def action_timer(name): """Provide measure of execution time. Decorates methods of the Scenario class. This provides duration in seconds of each atomic action. """ def wrap(func): @functools.wraps(func) def func_atomic_actions(self, *args, **kwargs): with ActionTimer(self, name): f = func(self, *args, **kwargs) return f return func_atomic_actions return wrap def optional_action_timer(name, argument_name="atomic_action", default=True): """Optionally provide measure of execution time. Decorates methods of the Scenario class. This provides duration in seconds of each atomic action. When the decorated function is called, this inspects the kwarg named by ``argument_name`` and optionally sets an ActionTimer around the function call. The ``atomic_action`` keyword argument does not need to be added to the function; it will be popped from the kwargs dict by the wrapper. :param name: The name of the timer :param argument_name: The name of the kwarg to inspect to determine if a timer should be set. :param default: Whether or not to set a timer if ``argument_name`` is not present. """ def wrap(func): @functools.wraps(func) def func_atomic_actions(self, *args, **kwargs): if kwargs.pop(argument_name, default): with ActionTimer(self, name): f = func(self, *args, **kwargs) else: f = func(self, *args, **kwargs) return f return func_atomic_actions return wrap rally-0.9.1/rally/task/utils.py0000775000567000056710000003623713073417720017626 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import itertools import time import traceback import jsonschema from novaclient import exceptions as nova_exc import six from rally.common.i18n import _ from rally.common import logging from rally import consts from rally import exceptions LOG = logging.getLogger(__name__) def get_status(resource, status_attr="status"): """Get the status of a given resource object. The status is returned in upper case. The status is checked for the standard field names with special cases for Heat and Ceilometer. :param resource: The resource object or dict. :param status_attr: Allows to specify non-standard status fields. :return: The status or "NONE" if it is not available. """ for s_attr in ["stack_status", "state", status_attr]: status = getattr(resource, s_attr, None) if isinstance(status, six.string_types): return status.upper() # Dict case if ((isinstance(resource, dict) and status_attr in resource.keys() and isinstance(resource[status_attr], six.string_types))): return resource[status_attr].upper() return "NONE" class resource_is(object): def __init__(self, desired_status, status_getter=None): self.desired_status = desired_status self.status_getter = status_getter or get_status def __call__(self, resource): return self.status_getter(resource) == self.desired_status.upper() def __str__(self): return str(self.desired_status) def get_from_manager(error_statuses=None): error_statuses = error_statuses or ["ERROR"] error_statuses = map(lambda str: str.upper(), error_statuses) def _get_from_manager(resource, id_attr="id"): # catch client side errors try: res = resource.manager.get(getattr(resource, id_attr)) except Exception as e: if getattr(e, "code", getattr(e, "http_status", 400)) == 404: raise exceptions.GetResourceNotFound(resource=resource) raise exceptions.GetResourceFailure(resource=resource, err=e) # catch abnormal status, such as "no valid host" for servers status = get_status(res) if status in ("DELETED", "DELETE_COMPLETE"): raise exceptions.GetResourceNotFound(resource=res) if status in error_statuses: raise exceptions.GetResourceErrorStatus( resource=res, status=status, fault=getattr(res, "fault", "n/a")) return res return _get_from_manager def manager_list_size(sizes): def _list(mgr): return len(mgr.list()) in sizes return _list @logging.log_deprecated("Use wait_for_status instead.", "0.1.2", once=True) def wait_for(resource, is_ready=None, ready_statuses=None, failure_statuses=None, status_attr="status", update_resource=None, timeout=60, check_interval=1, id_attr="id"): """Waits for the given resource to come into the one of the given statuses. The method can be used to check resource for status with a `is_ready` function or with a list of expected statuses and the status attribute In case when the is_ready checker is not provided the resource should have status_attr. It may be an object attribute or a dictionary key. The value of the attribute is checked against ready statuses list and failure statuses. In case of a failure the wait exits with an exception. The resource is updated between iterations with an update_resource call. :param is_ready: A predicate that should take the resource object and return True iff it is ready to be returned :param ready_statuses: List of statuses which mean that the resource is ready :param failure_statuses: List of statuses which mean that an error has occurred while waiting for the resource :param status_attr: The name of the status attribute of the resource :param update_resource: Function that should take the resource object and return an 'updated' resource. If set to None, no result updating is performed :param timeout: Timeout in seconds after which a TimeoutException will be raised :param check_interval: Interval in seconds between the two consecutive readiness checks :returns: The "ready" resource object """ if is_ready is not None: return wait_is_ready(resource=resource, is_ready=is_ready, update_resource=update_resource, timeout=timeout, check_interval=check_interval) else: return wait_for_status(resource=resource, ready_statuses=ready_statuses, failure_statuses=failure_statuses, status_attr=status_attr, update_resource=update_resource, timeout=timeout, check_interval=check_interval, id_attr=id_attr) @logging.log_deprecated("Use wait_for_status instead.", "0.1.2", once=True) def wait_is_ready(resource, is_ready, update_resource=None, timeout=60, check_interval=1): resource_repr = getattr(resource, "name", repr(resource)) start = time.time() while True: if update_resource is not None: resource = update_resource(resource) if is_ready(resource): return resource time.sleep(check_interval) if time.time() - start > timeout: raise exceptions.TimeoutException( desired_status=str(is_ready), resource_name=resource_repr, resource_type=resource.__class__.__name__, resource_id=getattr(resource, "id", ""), resource_status=get_status(resource)) def wait_for_status(resource, ready_statuses, failure_statuses=None, status_attr="status", update_resource=None, timeout=60, check_interval=1, check_deletion=False, id_attr="id"): resource_repr = getattr(resource, "name", repr(resource)) if not isinstance(ready_statuses, (set, list, tuple)): raise ValueError("Ready statuses should be supplied as set, list or " "tuple") if failure_statuses and not isinstance(failure_statuses, (set, list, tuple)): raise ValueError("Failure statuses should be supplied as set, list or " "tuple") # make all statuses upper case ready_statuses = set(s.upper() for s in ready_statuses or []) failure_statuses = set(s.upper() for s in failure_statuses or []) if (ready_statuses & failure_statuses): raise ValueError( "Can't wait for resource's %s status. Ready and Failure" "statuses conflict." % resource_repr) if not ready_statuses: raise ValueError( "Can't wait for resource's %s status. No ready " "statuses provided" % resource_repr) if not update_resource: raise ValueError( "Can't wait for resource's %s status. No update method." % resource_repr) start = time.time() latest_status = get_status(resource, status_attr) latest_status_update = start while True: try: if id_attr == "id": resource = update_resource(resource) else: resource = update_resource(resource, id_attr=id_attr) except exceptions.GetResourceNotFound: if check_deletion: return else: raise status = get_status(resource, status_attr) if status != latest_status: current_time = time.time() delta = current_time - latest_status_update LOG.debug( "Waiting for resource %(resource)s. Status changed: " "%(latest)s => %(current)s in %(delta)s" % {"resource": resource_repr, "latest": latest_status, "current": status, "delta": delta}) latest_status = status latest_status_update = current_time if status in ready_statuses: return resource if status in failure_statuses: raise exceptions.GetResourceErrorStatus( resource=resource, status=status, fault="Status in failure list %s" % str(failure_statuses)) time.sleep(check_interval) if time.time() - start > timeout: raise exceptions.TimeoutException( desired_status="('%s')" % "', '".join(ready_statuses), resource_name=resource_repr, resource_type=resource.__class__.__name__, resource_id=getattr(resource, id_attr, ""), resource_status=get_status(resource, status_attr)) @logging.log_deprecated("Use wait_for_status instead.", "0.1.2", once=True) def wait_for_delete(resource, update_resource=None, timeout=60, check_interval=1): """Wait for the full deletion of resource. :param update_resource: Function that should take the resource object and return an 'updated' resource, or raise exception rally.exceptions.GetResourceNotFound that means that resource is deleted. :param timeout: Timeout in seconds after which a TimeoutException will be raised :param check_interval: Interval in seconds between the two consecutive readiness checks """ start = time.time() while True: try: resource = update_resource(resource) except exceptions.GetResourceNotFound: break time.sleep(check_interval) if time.time() - start > timeout: raise exceptions.TimeoutException( desired_status="deleted", resource_name=getattr(resource, "name", repr(resource)), resource_type=resource.__class__.__name__, resource_id=getattr(resource, "id", ""), resource_status=get_status(resource)) def format_exc(exc): return [exc.__class__.__name__, str(exc), traceback.format_exc()] def infinite_run_args_generator(args_func): for i in itertools.count(): yield args_func(i) def check_service_status(client, service_name): """Check if given openstack service is enabled and state is up.""" try: for service in client.services.list(): if service_name in str(service): if service.status == "enabled" and service.state == "up": return True except nova_exc.NotFound: LOG.warning(_("Unable to retrieve a list of available services from " "nova. Pre-Grizzly OpenStack deployment?")) return False return False class ActionBuilder(object): """Builder class for mapping and creating action objects. An action list is an array of single key/value dicts which takes the form: [{"action": times}, {"action": times}...] Here 'action' is a string which indicates an action to perform and 'times' is a non-zero positive integer which specifies how many times to run the action in sequence. This utility builder class will build and return methods which wrapper the action call the given amount of times. """ SCHEMA_TEMPLATE = { "type": "array", "$schema": consts.JSON_SCHEMA, "items": { "type": "object", "properties": {}, "additionalProperties": False, "minItems": 0 } } ITEM_TEMPLATE = { "type": "integer", "minimum": 0, "exclusiveMinimum": True, "optional": True } def __init__(self, action_keywords): """Create a new instance of the builder for the given action keywords. :param action_keywords: A list of strings which are the keywords this instance of the builder supports. """ self._bindings = {} self.schema = dict(ActionBuilder.SCHEMA_TEMPLATE) for kw in action_keywords: self.schema["items"]["properties"][kw] = ( ActionBuilder.ITEM_TEMPLATE) def bind_action(self, action_key, action, *args, **kwargs): """Bind an action to an action key. Static args/kwargs can be optionally binded. :param action_key: The action keyword to bind the action to. :param action: A method/function to call for the action. :param args: (optional) Static positional args to prepend to all invocations of the action. :param kwargs: (optional) Static kwargs to prepend to all invocations of the action. """ self.validate([{action_key: 1}]) self._bindings[action_key] = { "action": action, "args": args or (), "kwargs": kwargs or {} } def validate(self, actions): """Validate the list of action objects against the builder schema. :param actions: The list of action objects to validate. """ jsonschema.validate(actions, self.schema) def _build(self, func, times, *args, **kwargs): """Build the wrapper action call.""" def _f(): for i in range(times): func(*args, **kwargs) return _f def build_actions(self, actions, *args, **kwargs): """Build a list of callable actions. A list of callable actions based on the given action object list and the actions bound to this builder. :param actions: A list of action objects to build callable action for. :param args: (optional) Positional args to pass into each built action. These will be appended to any args set for the action via its binding. :param kwargs: (optional) Keyword args to pass into each built action. These will be appended to any kwards set for the action via its binding. """ self.validate(actions) bound_actions = [] for action in actions: action_key = list(action)[0] times = action.get(action_key) binding = self._bindings.get(action_key) dft_kwargs = dict(binding["kwargs"]) dft_kwargs.update(kwargs or {}) bound_actions.append( self._build(binding["action"], times, *(binding["args"] + args), **dft_kwargs)) return bound_actions rally-0.9.1/rally/task/hook.py0000664000567000056710000001605313073417720017415 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import collections import threading import jsonschema import six from rally.common.i18n import _, _LE from rally.common import logging from rally.common.plugin import plugin from rally.common import utils as rutils from rally import consts from rally import exceptions from rally.task.processing import charts from rally.task import trigger from rally.task import utils LOG = logging.getLogger(__name__) configure = plugin.configure class HookExecutor(object): """Runs hooks and collects results from them.""" def __init__(self, config, task): self.config = config self.task = task self.triggers = collections.defaultdict(list) for hook in config.get("hooks", []): hook_cls = Hook.get(hook["name"]) trigger_obj = trigger.Trigger.get( hook["trigger"]["name"])(hook, self.task, hook_cls) event_type = trigger_obj.get_listening_event() self.triggers[event_type].append(trigger_obj) if "time" in self.triggers: self._timer_thread = threading.Thread(target=self._timer_method) self._timer_stop_event = threading.Event() def _timer_method(self): """Timer thread method. It generates events with type "time" to inform HookExecutor about how many time passed since beginning of the first iteration. """ stopwatch = rutils.Stopwatch(stop_event=self._timer_stop_event) stopwatch.start() seconds_since_start = 0 while not self._timer_stop_event.isSet(): self.on_event(event_type="time", value=seconds_since_start) seconds_since_start += 1 stopwatch.sleep(seconds_since_start) def _start_timer(self): self._timer_thread.start() def _stop_timer(self): self._timer_stop_event.set() if self._timer_thread.ident is not None: self._timer_thread.join() def on_event(self, event_type, value): """Notify about event. This method should be called to inform HookExecutor that particular event occurred. It runs hooks configured for event. """ if "time" in self.triggers: # start timer on first iteration if event_type == "iteration" and value == 1: self._start_timer() for trigger_obj in self.triggers[event_type]: started = trigger_obj.on_event(event_type, value) if started: LOG.info(_("Hook %s is trigged for Task %s by %s=%s") % (trigger_obj.hook_cls.__name__, self.task["uuid"], event_type, value)) def results(self): """Returns list of dicts with hook results.""" if "time" in self.triggers: self._stop_timer() results = [] for triggers_group in self.triggers.values(): for trigger_obj in triggers_group: results.append(trigger_obj.get_results()) return results @plugin.base() @six.add_metaclass(abc.ABCMeta) class Hook(plugin.Plugin): """Factory for hook classes.""" def __init__(self, task, config, triggered_by): self.task = task self.config = config self._triggered_by = triggered_by self._thread = threading.Thread(target=self._thread_method) self._started_at = 0.0 self._finished_at = 0.0 self._result = { "status": consts.HookStatus.SUCCESS, "started_at": self._started_at, "finished_at": self._finished_at, "triggered_by": self._triggered_by, } @staticmethod def validate(config): config_schema = Hook.get(config["name"]).CONFIG_SCHEMA jsonschema.validate(config["args"], config_schema) trigger.Trigger.validate(config["trigger"]) def _thread_method(self): # Run hook synchronously self.run_sync() def set_error(self, exception_name, description, details): """Set error related information to result. :param exception_name: name of exception as string :param description: short description as string :param details: any details as string """ self.set_status(consts.HookStatus.FAILED) self._result["error"] = {"etype": exception_name, "msg": description, "details": details} def set_status(self, status): """Set status to result.""" self._result["status"] = status def add_output(self, additive=None, complete=None): """Save custom output. :param additive: dict with additive output :param complete: dict with complete output :raises RallyException: if output has wrong format """ if "output" not in self._result: self._result["output"] = {"additive": [], "complete": []} for key, value in (("additive", additive), ("complete", complete)): if value: message = charts.validate_output(key, value) if message: raise exceptions.RallyException(message) self._result["output"][key].append(value) def run_async(self): """Run hook asynchronously.""" self._thread.start() def run_sync(self): """Run hook synchronously.""" try: with rutils.Timer() as timer: self.run() except Exception as exc: LOG.error(_LE("Hook %s failed during run."), self.get_name()) LOG.exception(exc) self.set_error(*utils.format_exc(exc)) self._started_at = timer.timestamp() self._result["started_at"] = self._started_at self._finished_at = timer.finish_timestamp() self._result["finished_at"] = self._finished_at @abc.abstractmethod def run(self): """Run method. This method should be implemented in plugin. Hook plugin should call following methods to set result: set_status - to set hook execution status Optionally the following methods should be called: set_error - to indicate that there was an error; automatically sets hook execution status to 'failed' add_output - provide data for report """ def result(self): """Wait and return result of hook.""" if self._thread.ident is not None: # hook is still running, wait for result self._thread.join() return self._result rally-0.9.1/rally/task/context.py0000664000567000056710000001422213073417720020135 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import jsonschema import six from rally.common import logging from rally.common.plugin import plugin from rally.common import utils from rally.task import functional LOG = logging.getLogger(__name__) def configure(name, order, namespace="default", hidden=False): """Context class wrapper. Each context class has to be wrapped by configure() wrapper. It sets essential configuration of context classes. Actually this wrapper just adds attributes to the class. :param name: Name of the class, used in the input task :param order: As far as we can use multiple context classes that sometimes depend on each other we have to specify order of execution. Contexts with smaller order are run first :param hidden: If it is true you won't be able to specify context via task config """ def wrapper(cls): cls = plugin.configure(name=name, namespace=namespace, hidden=hidden)(cls) cls._meta_set("order", order) return cls return wrapper # TODO(andreykurilin): move it to some common place. @six.add_metaclass(abc.ABCMeta) class BaseContext(plugin.Plugin, functional.FunctionalMixin, utils.RandomNameGeneratorMixin): """This class is a factory for context classes. Every context class should be a subclass of this class and implement 2 abstract methods: setup() and cleanup() It covers: 1) proper setting up of context config 2) Auto discovering & get by name 3) Validation by CONFIG_SCHEMA 4) Order of context creation """ RESOURCE_NAME_FORMAT = "c_rally_XXXXXXXX_XXXXXXXX" CONFIG_SCHEMA = {"type": "null"} def __init__(self, ctx): config = ctx.get("config", {}).get(self.get_name(), {}) # NOTE(amaretskiy): self.config is a constant data and must be # immutable or write-protected type to prevent # unexpected changes in runtime if isinstance(config, dict): if hasattr(self, "DEFAULT_CONFIG"): for key, value in self.DEFAULT_CONFIG.items(): config.setdefault(key, value) self.config = utils.LockedDict(config) elif isinstance(config, list): self.config = tuple(config) else: # NOTE(amaretskiy): It is improbable that config can be a None, # number, boolean or even string, # however we handle this self.config = config self.context = ctx def __lt__(self, other): return self.get_order() < other.get_order() def __gt__(self, other): return self.get_order() > other.get_order() def __eq__(self, other): return self.get_order() == other.get_order() def __ne__(self, other): return not self.__eq__(other) @classmethod def validate(cls, config): jsonschema.validate(config, cls.CONFIG_SCHEMA) @classmethod def get_order(cls): return cls._meta_get("order") @abc.abstractmethod def setup(self): """Prepare environment for test. This method is executed only once before load generation. self.config contains input arguments of this context self.context contains information that will be passed to scenario The goal of this method is to perform all operation to prepare environment and store information to self.context that is required by scenario. """ @abc.abstractmethod def cleanup(self): """Clean up environment after load generation. This method is run once after load generation is done to cleanup environment. self.config contains input arguments of this context self.context contains information that was passed to scenario """ def __enter__(self): return self def __exit__(self, exc_type, exc_value, exc_traceback): self.cleanup() @plugin.base() class Context(BaseContext): def __init__(self, ctx): super(Context, self).__init__(ctx) self.task = self.context.get("task", {}) class ContextManager(object): """Create context environment and run method inside it.""" def __init__(self, context_obj): self._visited = [] self.context_obj = context_obj @staticmethod def validate(ctx, allow_hidden=False): for name, config in ctx.items(): Context.get(name, allow_hidden=allow_hidden).validate(config) def _get_sorted_context_lst(self): return sorted([ Context.get(ctx_name, allow_hidden=True)(self.context_obj) for ctx_name in self.context_obj["config"].keys()]) def setup(self): """Creates benchmark environment from config.""" self._visited = [] for ctx in self._get_sorted_context_lst(): self._visited.append(ctx) ctx.setup() return self.context_obj def cleanup(self): """Destroys benchmark environment.""" ctxlst = self._visited or self._get_sorted_context_lst() for ctx in ctxlst[::-1]: try: ctx.cleanup() except Exception as e: LOG.error("Context %s failed during cleanup." % ctx.get_name()) LOG.exception(e) def __enter__(self): try: self.setup() except Exception: self.cleanup() raise def __exit__(self, exc_type, exc_value, exc_traceback): self.cleanup() rally-0.9.1/rally/task/runner.py0000664000567000056710000003140413073417720017763 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import collections import copy import multiprocessing import time import jsonschema import six from rally.common import logging from rally.common.plugin import plugin from rally.common import utils as rutils from rally.task.processing import charts from rally.task import scenario from rally.task import types from rally.task import utils LOG = logging.getLogger(__name__) configure = plugin.configure def format_result_on_timeout(exc, timeout): return { "duration": timeout, "idle_duration": 0, "output": {"additive": [], "complete": []}, "atomic_actions": {}, "error": utils.format_exc(exc) } def _get_scenario_context(iteration, context_obj): context_obj = copy.deepcopy(context_obj) context_obj["iteration"] = iteration + 1 # Numeration starts from `1' return context_obj def _run_scenario_once(cls, method_name, context_obj, scenario_kwargs, event_queue): iteration = context_obj["iteration"] event_queue.put({ "type": "iteration", "value": iteration, }) # provide arguments isolation between iterations scenario_kwargs = copy.deepcopy(scenario_kwargs) LOG.info("Task %(task)s | ITER: %(iteration)s START" % {"task": context_obj["task"]["uuid"], "iteration": iteration}) scenario_inst = cls(context_obj) error = [] try: with rutils.Timer() as timer: getattr(scenario_inst, method_name)(**scenario_kwargs) except Exception as e: error = utils.format_exc(e) if logging.is_debug(): LOG.exception(e) finally: status = "Error %s: %s" % tuple(error[0:2]) if error else "OK" LOG.info("Task %(task)s | ITER: %(iteration)s END: %(status)s" % {"task": context_obj["task"]["uuid"], "iteration": iteration, "status": status}) return {"duration": timer.duration() - scenario_inst.idle_duration(), "timestamp": timer.timestamp(), "idle_duration": scenario_inst.idle_duration(), "error": error, "output": scenario_inst._output, "atomic_actions": scenario_inst.atomic_actions()} def _worker_thread(queue, cls, method_name, context_obj, scenario_kwargs, event_queue): queue.put(_run_scenario_once(cls, method_name, context_obj, scenario_kwargs, event_queue)) def _log_worker_info(**info): """Log worker parameters for debugging. :param info: key-value pairs to be logged """ info_message = "\n\t".join(["%s: %s" % (k, v) for k, v in info.items()]) LOG.debug("Starting a worker.\n\t%s" % info_message) @plugin.base() @six.add_metaclass(abc.ABCMeta) class ScenarioRunner(plugin.Plugin): """Base class for all scenario runners. Scenario runner is an entity that implements a certain strategy of launching benchmark scenarios, e.g. running them continuously or periodically for a given number of times or seconds. These strategies should be implemented in subclasses of ScenarioRunner in the_run_scenario() method. """ CONFIG_SCHEMA = {} def __init__(self, task, config, batch_size=0): """Runner constructor. It sets task and config to local variables. Also initialize result_queue, where results will be put by _send_result method. :param task: Instance of objects.Task :param config: Dict with runner section from benchmark configuration """ self.task = task self.config = config self.result_queue = collections.deque() self.event_queue = collections.deque() self.aborted = multiprocessing.Event() self.run_duration = 0 self.batch_size = batch_size self.result_batch = [] @staticmethod def validate(config): """Validates runner's part of task config.""" runner = ScenarioRunner.get(config.get("type", "serial")) jsonschema.validate(config, runner.CONFIG_SCHEMA) @abc.abstractmethod def _run_scenario(self, cls, method_name, context, args): """Runs the specified benchmark scenario with given arguments. :param cls: The Scenario class where the scenario is implemented :param method_name: Name of the method that implements the scenario :param context: Benchmark context that contains users, admin & other information, that was created before benchmark started. :param args: Arguments to call the scenario method with :returns: List of results fore each single scenario iteration, where each result is a dictionary """ def run(self, name, context, args): scenario_plugin = scenario.Scenario.get(name) # NOTE(boris-42): processing @types decorators args = types.preprocess(name, context, args) if scenario_plugin.is_classbased: cls, method_name = scenario_plugin, "run" else: cls, method_name = (scenario_plugin._meta_get("cls_ref"), name.split(".", 1).pop()) with rutils.Timer() as timer: self._run_scenario(cls, method_name, context, args) self.run_duration = timer.duration() def abort(self): """Abort the execution of further benchmark scenario iterations.""" self.aborted.set() @staticmethod def _create_process_pool(processes_to_start, worker_process, worker_args_gen): """Create a pool of processes with some defined target function. :param processes_to_start: number of processes to create in the pool :param worker_process: target function for all processes in the pool :param worker_args_gen: generator of arguments for the target function :returns: the process pool as a deque """ process_pool = collections.deque() for i in range(processes_to_start): kwrgs = {"processes_to_start": processes_to_start, "processes_counter": i} process = multiprocessing.Process(target=worker_process, args=next(worker_args_gen), kwargs={"info": kwrgs}) process.start() process_pool.append(process) return process_pool def _join_processes(self, process_pool, result_queue, event_queue): """Join the processes in the pool and send their results to the queue. :param process_pool: pool of processes to join :param result_queue: multiprocessing.Queue that receives the results :param event_queue: multiprocessing.Queue that receives the events """ while process_pool: while process_pool and not process_pool[0].is_alive(): process_pool.popleft().join() if result_queue.empty() and event_queue.empty(): # sleep a bit to avoid 100% usage of CPU by this method time.sleep(0.01) while not event_queue.empty(): self.send_event(**event_queue.get()) while not result_queue.empty(): self._send_result(result_queue.get()) self._flush_results() result_queue.close() event_queue.close() def _flush_results(self): if self.result_batch: sorted_batch = sorted(self.result_batch) self.result_queue.append(sorted_batch) del self.result_batch[:] _RESULT_SCHEMA = { "fields": [("duration", float), ("timestamp", float), ("idle_duration", float), ("output", dict), ("atomic_actions", dict), ("error", list)] } def _result_has_valid_schema(self, result): """Check whatever result has valid schema or not.""" # NOTE(boris-42): We can't use here jsonschema, this method is called # to check every iteration result schema. And this # method works 200 times faster then jsonschema # which totally makes sense. for key, proper_type in self._RESULT_SCHEMA["fields"]: if key not in result: LOG.warning("'%s' is not result" % key) return False if not isinstance(result[key], proper_type): LOG.warning( "Task %(uuid)s | result['%(key)s'] has wrong type " "'%(actual_type)s', should be '%(proper_type)s'" % {"uuid": self.task["uuid"], "key": key, "actual_type": type(result[key]), "proper_type": proper_type.__name__}) return False for action, value in result["atomic_actions"].items(): if not isinstance(value, float): LOG.warning( "Task %(uuid)s | Atomic action %(action)s has wrong type " "'%(type)s', should be 'float'" % {"uuid": self.task["uuid"], "action": action, "type": type(value)}) return False for e in result["error"]: if not isinstance(e, str): LOG.warning("error value has wrong type '%s', should be 'str'" % type(e)) return False for key in ("additive", "complete"): if key not in result["output"]: LOG.warning("Task %(uuid)s | Output missing key '%(key)s'" % {"uuid": self.task["uuid"], "key": key}) return False type_ = type(result["output"][key]) if type_ != list: LOG.warning( "Task %(uuid)s | Value of result['output']['%(key)s'] " "has wrong type '%(type)s', must be 'list'" % {"uuid": self.task["uuid"], "key": key, "type": type_.__name__}) return False for key in result["output"]: for output_data in result["output"][key]: message = charts.validate_output(key, output_data) if message: LOG.warning("Task %(uuid)s | %(message)s" % {"uuid": self.task["uuid"], "message": message}) return False return True def _send_result(self, result): """Store partial result to send it to consumer later. :param result: Result dict to be sent. It should match the ScenarioRunnerResult schema, otherwise ValidationError is raised. """ if not self._result_has_valid_schema(result): LOG.warning( "Task %(task)s | Runner `%(runner)s` is trying to send " "results in wrong format" % {"task": self.task["uuid"], "runner": self.get_name()}) return self.result_batch.append(result) if len(self.result_batch) >= self.batch_size: sorted_batch = sorted(self.result_batch, key=lambda r: result["timestamp"]) self.result_queue.append(sorted_batch) del self.result_batch[:] def send_event(self, type, value=None): """Store event to send it to consumer later. :param type: Event type :param value: Optional event data """ self.event_queue.append({"type": type, "value": value}) def _log_debug_info(self, **info): """Log runner parameters for debugging. The method logs the runner name, the task id as well as the values passed as arguments. :param info: key-value pairs to be logged """ info_message = "\n\t".join(["%s: %s" % (k, v) for k, v in info.items()]) LOG.debug("Starting the %(name)s runner (task UUID: %(task)s)." "\n\t%(info)s" % {"name": self._meta_get("name"), "task": self.task["uuid"], "info": info_message}) rally-0.9.1/rally/task/trigger.py0000664000567000056710000000442413073417720020117 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import jsonschema import six from rally.common.i18n import _ from rally.common import logging from rally.common.plugin import plugin configure = plugin.configure LOG = logging.getLogger(__name__) @plugin.base() @six.add_metaclass(abc.ABCMeta) class Trigger(plugin.Plugin): """Factory for trigger classes.""" def __init__(self, context, task, hook_cls): self.context = context self.config = self.context["trigger"]["args"] self.task = task self.hook_cls = hook_cls self._runs = [] @staticmethod def validate(config): config_schema = Trigger.get(config["name"]).CONFIG_SCHEMA jsonschema.validate(config["args"], config_schema) @abc.abstractmethod def get_listening_event(self): """Returns event type to listen.""" def on_event(self, event_type, value=None): """Launch hook on specified event.""" LOG.info(_("Hook %s is triggered for Task %s by %s=%s") % (self.hook_cls.__name__, self.task["uuid"], event_type, value)) hook = self.hook_cls(self.task, self.context.get("args", {}), {"event_type": event_type, "value": value}) hook.run_async() self._runs.append(hook) def get_results(self): results = {"config": self.context, "results": [], "summary": {}} for hook in self._runs: hook_result = hook.result() results["results"].append(hook_result) results["summary"].setdefault(hook_result["status"], 0) results["summary"][hook_result["status"]] += 1 return results rally-0.9.1/rally/task/types.py0000664000567000056710000002250513073417716017625 0ustar jenkinsjenkins00000000000000# Copyright (C) 2014 Yahoo! Inc. All Rights Reserved. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import copy import operator import re import six from rally.common.plugin import plugin from rally import exceptions from rally import osclients from rally.task import scenario def _get_preprocessor_loader(plugin_name): """Get a class that loads a preprocessor class. This returns a class with a single class method, ``transform``, which, when called, finds a plugin and defers to its ``transform`` class method. This is necessary because ``convert()`` is called as a decorator at import time, but we cannot be confident that the ResourceType plugins may not be loaded yet. (In fact, since ``convert()`` is used to decorate plugins, we can be confident that not all plugins are loaded when it is called.) This permits us to defer plugin searching until the moment when ``preprocess()`` calls the various preprocessors, at which point we can be certain that all plugins have been loaded and finding them by name will work. """ def transform(cls, *args, **kwargs): plug = ResourceType.get(plugin_name) return plug.transform(*args, **kwargs) return type("PluginLoader_%s" % plugin_name, (object,), {"transform": classmethod(transform)}) def convert(**kwargs): """Decorator to define resource transformation(s) on scenario parameters. The ``kwargs`` passed as arguments are used to map a key in the scenario config to the resource type plugin used to perform a transformation on the value of the key. For instance: @types.convert(image={"type": "glance_image"}) This would convert the ``image`` key in the scenario configuration to a Glance image by using the ``glance_image`` resource plugin. Currently ``type`` is the only recognized key, but others may be added in the future. """ preprocessors = dict([(k, _get_preprocessor_loader(v["type"])) for k, v in kwargs.items()]) def wrapper(func): func._meta_setdefault("preprocessors", {}) func._meta_get("preprocessors").update(preprocessors) return func return wrapper def preprocess(name, context, args): """Run preprocessor on scenario arguments. :param name: Plugin name :param context: dictionary object that must have admin and credential entries :param args: args section of benchmark specification in rally task file :returns processed_args: dictionary object with additional client and resource configuration """ preprocessors = scenario.Scenario.get(name)._meta_get("preprocessors", default={}) clients = osclients.Clients(context["admin"]["credential"]) processed_args = copy.deepcopy(args) for src, preprocessor in preprocessors.items(): resource_cfg = processed_args.get(src) if resource_cfg: processed_args[src] = preprocessor.transform( clients=clients, resource_config=resource_cfg) return processed_args @plugin.base() @six.add_metaclass(abc.ABCMeta) class ResourceType(plugin.Plugin): @classmethod @abc.abstractmethod def transform(cls, clients, resource_config): """Transform the resource. :param clients: openstack admin client handles :param resource_config: scenario config of resource :returns: transformed value of resource """ def obj_from_name(resource_config, resources, typename): """Return the resource whose name matches the pattern. resource_config has to contain `name`, as it is used to lookup a resource. Value of the name will be treated as regexp. An `InvalidScenarioArgument` is thrown if the pattern does not match unambiguously. :param resource_config: resource to be transformed :param resources: iterable containing all resources :param typename: name which describes the type of resource :returns: resource object uniquely mapped to `name` or `regex` """ if "name" in resource_config: # In a case of pattern string exactly matches resource name matching_exact = [resource for resource in resources if resource.name == resource_config["name"]] if len(matching_exact) == 1: return matching_exact[0] elif len(matching_exact) > 1: raise exceptions.InvalidScenarioArgument( "{typename} with name '{pattern}' " "is ambiguous, possible matches " "by id: {ids}".format(typename=typename.title(), pattern=resource_config["name"], ids=", ".join(map( operator.attrgetter("id"), matching_exact)))) # Else look up as regex patternstr = resource_config["name"] elif "regex" in resource_config: patternstr = resource_config["regex"] else: raise exceptions.InvalidScenarioArgument( "{typename} 'id', 'name', or 'regex' not found " "in '{resource_config}' ".format(typename=typename.title(), resource_config=resource_config)) pattern = re.compile(patternstr) matching = [resource for resource in resources if re.search(pattern, resource.name)] if not matching: raise exceptions.InvalidScenarioArgument( "{typename} with pattern '{pattern}' not found".format( typename=typename.title(), pattern=pattern.pattern)) elif len(matching) > 1: raise exceptions.InvalidScenarioArgument( "{typename} with name '{pattern}' is ambiguous, possible matches " "by id: {ids}".format(typename=typename.title(), pattern=pattern.pattern, ids=", ".join(map(operator.attrgetter("id"), matching)))) return matching[0] def obj_from_id(resource_config, resources, typename): """Return the resource whose name matches the id. resource_config has to contain `id`, as it is used to lookup a resource. :param resource_config: resource to be transformed :param resources: iterable containing all resources :param typename: name which describes the type of resource :returns: resource object mapped to `id` """ if "id" in resource_config: matching = [resource for resource in resources if resource.id == resource_config["id"]] if len(matching) == 1: return matching[0] elif len(matching) > 1: raise exceptions.MultipleMatchesFound( needle="{typename} with id '{id}'".format( typename=typename.title(), id=resource_config["id"]), haystack=matching) else: raise exceptions.InvalidScenarioArgument( "{typename} with id '{id}' not found".format( typename=typename.title(), id=resource_config["id"])) else: raise exceptions.InvalidScenarioArgument( "{typename} 'id' not found in '{resource_config}'".format( typename=typename.title(), resource_config=resource_config)) def _id_from_name(resource_config, resources, typename, id_attr="id"): """Return the id of the resource whose name matches the pattern. resource_config has to contain `name`, as it is used to lookup an id. Value of the name will be treated as regexp. An `InvalidScenarioArgument` is thrown if the pattern does not match unambiguously. :param resource_config: resource to be transformed :param resources: iterable containing all resources :param typename: name which describes the type of resource :param id_attr: id or uuid should be returned :returns: resource id uniquely mapped to `name` or `regex` """ try: return getattr(obj_from_name(resource_config, resources, typename), id_attr) except AttributeError: raise exceptions.RallyException( "There is no attribute {attr} in the object {type}".format( attr=id_attr, type=typename)) def _name_from_id(resource_config, resources, typename): """Return the name of the resource which has the id. resource_config has to contain `id`, as it is used to lookup a name. :param resource_config: resource to be transformed :param resources: iterable containing all resources :param typename: name which describes the type of resource :returns: resource name mapped to `id` """ return obj_from_id(resource_config, resources, typename).name rally-0.9.1/CONTRIBUTING.rst0000664000567000056710000001611213073417716016460 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _contribute: Contribute to Rally =================== Where to begin -------------- Please take a look `our Roadmap`_ to get information about our current work directions. In case you have questions or want to share your ideas, be sure to contact us either at `Rally-dev/Lobby`_ channel on **Gitter** messenger (or, less preferably, at the ``#openstack-rally`` IRC channel on **irc.freenode.net**). If you are going to contribute to Rally, you will probably need to grasp a better understanding of several main design concepts used throughout our project (such as **benchmark scenarios**, **contexts** etc.). To do so, please read :ref:`this article `. How to contribute ----------------- 1. You need a `Launchpad`_ account and need to be joined to the `OpenStack team`_. You can also join the `Rally team`_ if you want to. Make sure Launchpad has your SSH key, Gerrit (the code review system) uses this. 2. Sign the CLA as outlined in the `account setup`_ section of the developer guide. 3. Tell git your details: .. code-block:: bash git config --global user.name "Firstname Lastname" git config --global user.email "your_email@youremail.com" 4. Install git-review. This tool takes a lot of the pain out of remembering commands to push code up to Gerrit for review and to pull it back down to edit it. It is installed using: .. code-block:: bash pip install git-review Several Linux distributions (notably Fedora 16 and Ubuntu 12.04) are also starting to include git-review in their repositories so it can also be installed using the standard package manager. 5. Grab the Rally repository: .. code-block:: bash git clone git@github.com:openstack/rally.git 6. Checkout a new branch to hack on: .. code-block:: bash git checkout -b TOPIC-BRANCH 7. Start coding 8. Run the test suite locally to make sure nothing broke, e.g. (this will run py34/py27/pep8 tests): .. code-block:: bash tox **(NOTE: you should have installed tox<=1.6.1)** If you extend Rally with new functionality, make sure you have also provided unit and/or functional tests for it. 9. Commit your work using: .. code-block:: bash git commit -a Make sure you have supplied your commit with a neat commit message, containing a link to the corresponding blueprint / bug, if appropriate. 10. Push the commit up for code review using: .. code-block:: bash git review -R That is the awesome tool we installed earlier that does a lot of hard work for you. 11. Watch your email or `review site`_, it will automatically send your code for a battery of tests on our `Jenkins setup`_ and the core team for the project will review your code. If there are any changes that should be made they will let you know. 12. When all is good the review site will automatically merge your code. (This tutorial is based on: http://www.linuxjedi.co.uk/2012/03/real-way-to-start-hacking-on-openstack.html) Testing ------- Please, don't hesitate to write tests ;) Unit tests ^^^^^^^^^^ *Files: /tests/unit/** The goal of unit tests is to ensure that internal parts of the code work properly. All internal methods should be fully covered by unit tests with a reasonable mocks usage. About Rally unit tests: - All `unit tests`_ are located inside /tests/unit/* - Tests are written on top of: *testtools* and *mock* libs - `Tox`_ is used to run unit tests To run unit tests locally: .. code-block:: console $ pip install tox $ tox To run py34, py27 or pep8 only: .. code-block:: console $ tox -e #NOTE: is one of py34, py27 or pep8 To run a single unit test e.g. test_deployment .. code-block:: console $ tox -e -- #NOTE: is one of py34, py27 or pep8 # is the unit test case name, e.g tests.unit.test_osclients To debug issues on the unit test: - Add breakpoints on the test file using ``import pdb;`` ``pdb.set_trace()`` - Then run tox in debug mode: .. code-block:: console $ tox -e debug #NOTE: use python 2.7 #NOTE: is the unit test case name or .. code-block:: console $ tox -e debug34 #NOTE: use python 3.4 #NOTE: is the unit test case name To get test coverage: .. code-block:: console $ tox -e cover #NOTE: Results will be in /cover/index.html To generate docs: .. code-block:: console $ tox -e docs #NOTE: Documentation will be in doc/source/_build/html/index.html Functional tests ^^^^^^^^^^^^^^^^ *Files: /tests/functional/** The goal of `functional tests`_ is to check that everything works well together. Functional tests use Rally API only and check responses without touching internal parts. To run functional tests locally: .. code-block:: console $ source openrc $ rally deployment create --fromenv --name testing $ tox -e cli #NOTE: openrc file with OpenStack admin credentials Output of every Rally execution will be collected under some reports root in directory structure like: reports_root/ClassName/MethodName_suffix.extension This functionality implemented in tests.functional.utils.Rally.__call__ method. Use 'gen_report_path' method of 'Rally' class to get automatically generated file path and name if you need. You can use it to publish html reports, generated during tests. Reports root can be passed throw environment variable 'REPORTS_ROOT'. Default is 'rally-cli-output-files'. Rally CI scripts ^^^^^^^^^^^^^^^^ *Files: /tests/ci/** This directory contains scripts and files related to the Rally CI system. Rally Style Commandments ^^^^^^^^^^^^^^^^^^^^^^^^ *Files: /tests/hacking/* This module contains Rally specific hacking rules for checking commandments. For more information about Style Commandments, read the `OpenStack Style Commandments manual`_. .. references: .. _our Roadmap: https://docs.google.com/a/mirantis.com/spreadsheets/d/16DXpfbqvlzMFaqaXAcJsBzzpowb_XpymaK2aFY2gA2g/edit#gid=0 .. _Rally-dev/Lobby: https://gitter.im/rally-dev/Lobby .. _Launchpad: https://launchpad.net/ .. _OpenStack team: https://launchpad.net/openstack .. _Rally team: https://launchpad.net/rally .. _account setup: http://docs.openstack.org/infra/manual/developers.html#development-workflow .. _review site: http://review.openstack.org/ .. _Jenkins setup: http://jenkins.openstack.org/ .. _unit tests: http://en.wikipedia.org/wiki/Unit_testing .. _Tox: https://tox.readthedocs.org/en/latest/ .. _functional tests: https://en.wikipedia.org/wiki/Functional_testing .. _OpenStack Style Commandments manual: http://docs.openstack.org/developer/hacking/ rally-0.9.1/doc/0000775000567000056710000000000013073420067014554 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/feature_request/0000775000567000056710000000000013073420067017757 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/feature_request/multiple_attach_volume.rst0000664000567000056710000000072413073417716025271 0ustar jenkinsjenkins00000000000000====================== Multiple attach volume ====================== Use Case -------- Since multiple volume attaching support to OpenStack Mitaka, one volume can be attached to several instances or hosts, Rally should add scenarios about multiple attach volume. Problem Description ------------------- Rally lack of scenarios about multiple attach volume. Possible solution ----------------- * Add nova scenarios "multi_attach_volume" and "multi_detach_volume" rally-0.9.1/doc/feature_request/check_queue_perfdata.rst0000664000567000056710000000132513073417716024650 0ustar jenkinsjenkins00000000000000==================== Check queue perfdata ==================== Use case -------- Sometimes OpenStack services use common messaging system very prodigally. For example Neutron metering agent sending all database table data on new object creation i.e https://review.openstack.org/#/c/143672/. It cause to Neutron degradation and other obvious problems. It will be nice to have a way to track messages count and messages size in queue during tests/benchmarks. Problem description ------------------- Heavy usage of queue isn’t checked. Possible solution ----------------- * Before running tests/benchmarks start process which will connect to queue topics and measure messages count, size and other data which we need. rally-0.9.1/doc/feature_request/capture_task_logging.rst0000664000567000056710000000062713073417716024720 0ustar jenkinsjenkins00000000000000========================== Capture Logs from services ========================== Use case -------- A developer is executing various task and would like to capture logs as well as test results. Problem description ------------------- In case of errors it is quite hard to debug what happened. Possible solution ----------------- * Add special context that can capture the logs from tested services. rally-0.9.1/doc/feature_request/installation_script_enhancements.rst0000664000567000056710000000124013073417716027332 0ustar jenkinsjenkins00000000000000====================================================================== Enhancements to installation script: ``--version`` and ``--uninstall`` ====================================================================== Use case -------- User might wish to control which rally version is installed or even purge rally from the machine completely. Problem description ------------------- #. Installation script doesn't allow to choose version. #. No un-install support. Possible solution ----------------- #. Add ``--version`` option to installation script. #. Add ``--uninstall`` option to installation script or create an un-installation script rally-0.9.1/doc/feature_request/launch_specific_benchmark.rst0000664000567000056710000000133213073417716025650 0ustar jenkinsjenkins00000000000000============================ Launch Specific Benchmark(s) ============================ Use case -------- A developer is working on a feature that is covered by one or more specific benchmarks/scenarios. He/she would like to execute a rally task with an existing task template file (YAML or JSON) indicating exactly which benchmark(s) will be executed. Problem description ------------------- When executing a task with a template file in Rally, all benchmarks are executed without the ability to specify one or a set of benchmarks the user would like to execute. Possible solution ----------------- * Add optional flag to rally task start command to specify one or more benchmarks to execute as part of that test run. rally-0.9.1/doc/feature_request/explicitly_specify_existing_users_for_scenarios.rst0000664000567000056710000000171013073417716032476 0ustar jenkinsjenkins00000000000000=============================================== Explicitly specify existing users for scenarios =============================================== Use Case -------- Rally allows to reuse existing users for scenario runs. And we should be able to use only specified set of existing users for specific scenarios. Problem Description ------------------- For the moment if used `deployment` with existing users then Rally chooses user for each scenario run randomly. But there are cases when we may want to use one scenario with one user and another with different one specific user. Main reason for it is in different set of resources that each user has and those resources may be required for scenarios. Without this feature Rally user is forced to make all existing users similar and have all required resources set up for all scenarios he uses. But it is redundant. Possible solution ----------------- * Make it possible to use explicitly existing_users context rally-0.9.1/doc/feature_request/distributed_load_generation.rst0000664000567000056710000000075313073417720026254 0ustar jenkinsjenkins00000000000000=========================== Distributed load generation =========================== Use Case -------- Some OpenStack projects (Marconi, MagnetoDB) require a real huge load, like 10-100k request per second for benchmarking. To generate such huge load Rally have to create load from different servers. Problem Description ------------------- * Rally can't generate load from different servers * Result processing can't handle big amount of data * There is no support for chunking results rally-0.9.1/doc/feature_request/implemented/0000775000567000056710000000000013073420067022262 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/feature_request/implemented/add_possibility_to_specify_concurrency_for_tempest.rst0000664000567000056710000000105313073417716035451 0ustar jenkinsjenkins00000000000000======================================== Running Tempest using custom concurrency ======================================== Use case -------- User might want to use specific concurrency for running tests based on his deployment and available resources. Problem description ------------------- "rally verify start" command does not allow to specify concurrency for tempest tests. And they always run using concurrency equal to amount of CPU cores. Possible solution ----------------- * Add ``--concurrency`` option to "rally verify start" command. rally-0.9.1/doc/feature_request/implemented/LDAP_support.rst0000664000567000056710000000167213073417716025345 0ustar jenkinsjenkins00000000000000=============================================== Support benchmarking clouds that are using LDAP =============================================== Use Case -------- A lot of production clouds are using LDAP with read only access. It means that load can be generated only by existing in system users and there is no admin access. Problem Description ------------------- Rally is using admin access to create temporary users that will be used to produce load. Possible Solution ----------------- * Add some way to pass already existing users Current Solution ---------------- * Allow the user to specify existing users in the configuration of the *ExistingCloud* deployment plugin * When such an *ExistingCloud* deployment is active, and the benchmark task file does not specify the *"users"* context, use the existing users instead of creating the temporary ones. * Modify the *rally show ...* commands to list resources for each user separately. rally-0.9.1/doc/feature_request/implemented/stop_scenario_after_several_errors.rst0000664000567000056710000000447113073417716032177 0ustar jenkinsjenkins00000000000000================================== Stop scenario after several errors ================================== Use case -------- Starting long tests on the big environments. Problem description ------------------- When we start a rally scenario on the env where keystone dies we get a lot of time before timeout happens. Example ------- Times in hard tests 05:25:40 rally-scenarios.cinder 05:25:40 create-and-delete-volume [4074 iterations, 15 threads] OK 8.91 08:00:02 create-and-delete-snapshot [5238 iterations, 15 threads] OK 17.46 08:53:20 create-and-list-volume [4074 iterations, 15 threads] OK 3.18 12:04:14 create-snapshot-and-attach-volume [2619 iterations, 15 threads] FAIL 14:18:44 create-and-attach-volume [2619 iterations, 15 threads] FAIL 14:23:47 rally-scenarios.vm 14:23:47 boot_runcommand_metadata_delete [5 iterations, 5 threads] FAIL 16:30:46 rally-scenarios.nova 16:30:46 boot_and_list_server [5820 iterations, 15 threads] FAIL 19:19:30 resize_server [5820 iterations, 15 threads] FAIL 02:51:13 boot_and_delete_server_with_secgroups [5820 iterations, 60 threads] FAIL Times in light variant 00:38:25 rally-scenarios.cinder 00:38:25 create-and-delete-volume [14 iterations, 1 threads] OK 5.30 00:40:39 create-and-delete-snapshot [18 iterations, 1 threads] OK 5.65 00:41:52 create-and-list-volume [14 iterations, 1 threads] OK 2.89 00:45:18 create-snapshot-and-attach-volume [9 iterations, 1 threads] OK 17.75 00:48:54 create-and-attach-volume [9 iterations, 1 threads] OK 20.04 00:52:29 rally-scenarios.vm 00:52:29 boot_runcommand_metadata_delete [5 iterations, 5 threads] OK 128.86 00:56:42 rally-scenarios.nova 00:56:42 boot_and_list_server [20 iterations, 1 threads] OK 6.98 01:04:48 resize_server [20 iterations, 1 threads] OK 22.90 In the hard test we have a lot of timeouts from keystone and a lot of time on test execution Possible solution ----------------- Improve SLA check functionality to work "online". And add ability to control execution process and stop load generation in case of sla check failures. rally-0.9.1/doc/feature_request/historical_performance_data.rst0000664000567000056710000000121213073417716026227 0ustar jenkinsjenkins00000000000000=========================== Historical performance data =========================== Use case -------- OpenStack is really rapidly developed. Hundreds of patches are merged daily and it's really hard to track how performance is changed during time. It will be nice to have a way to track performance of major functionality of OpenStack running periodically rally task and building graphs that represent how performance of specific method is changed during the time. Problem description ------------------- There is no way to bind tasks Possible solution ----------------- * Add grouping for tasks * Add command that creates historical graphs rally-0.9.1/doc/feature_request/comparing_results_of_2_tasks.rst0000664000567000056710000000077313073417716026401 0ustar jenkinsjenkins00000000000000======================================= Ability to compare results between task ======================================= Use case -------- During the work on performance it's essential to be able to compare results of similar task before and after change in system. Problem description ------------------- There is no command to compare two or more tasks and get tables and graphs. Possible solution ----------------- * Add command that accepts 2 tasks UUID and prints graphs that compares result rally-0.9.1/doc/feature_request/installing_isolated.rst0000664000567000056710000000155513073417716024556 0ustar jenkinsjenkins00000000000000================================================================================== Installation script: ``--pypi-mirror``, ``--package-mirror`` and ``--venv-mirror`` ================================================================================== Use case -------- Installation is pretty easy when there is an Internet connection available. And there is surely a number of OpenStack uses when whole environment is isolated. In this case, we need somehow specify where installation script should take required libs and packages. Problem description ------------------- #. Installation script can't work without direct Internet connection Possible solution #1 -------------------- #. Add ``--pypi-mirror`` option to installation script. #. Add ``--package-mirror`` option to installation script. #. Add ``--venv-mirror`` option to installation script. rally-0.9.1/doc/feature_request/README.rst0000664000567000056710000000065013073417716021456 0ustar jenkinsjenkins00000000000000================ Feature requests ================ To request a new feature, you should create a document similar to other feature requests. And contribute it to this directory using the next instruction_. If you don't have time to contribute your feature request via Gerrit, please contact Andrey Kurilin (andr.kurilin@gmail.com) .. _instruction: http://rally.readthedocs.org/en/latest/contribute.html#how-to-contribute rally-0.9.1/doc/feature_request/production_ready_cleanup.rst0000664000567000056710000000175513073417716025611 0ustar jenkinsjenkins00000000000000======================== Production read cleanups ======================== Use Case -------- Rally should delete in any case all resources that it created during benchmark. Problem Description ------------------- * (implemented) Deletion rate limit You can kill cloud by deleting too many objects simultaneously, so deletion rate limit is required * (implemented) Retry on failures There should be few attempts to delete resource in case of failures * (implemented) Log resources that failed to be deleted We should log warnings about all non deleted resources. This information should include UUID of resource, it's type and project. * (implemented) Pluggable It should be simple to add new cleanups adding just plugins somewhere. * Disaster recovery Rally should use special name patterns, to be able to delete resources in such case if something went wrong with server that is running Rally. And you have just new instance (without old Rally DB) of Rally on new server. rally-0.9.1/doc/feature_request/persistence_benchmark_env.rst0000664000567000056710000000172413073417716025732 0ustar jenkinsjenkins00000000000000================================================ Add support of persistence benchmark environment ================================================ Use Case -------- To benchmark many of operations like show, list, detailed you need to have already these resource in cloud. So it will be nice to be able to create benchmark environment once before benchmarking. So run some amount of benchmarks that are using it and at the end just delete all created resources by benchmark environment. Problem Description ------------------- Fortunately Rally has already a mechanism for creating benchmark environment, that is used to create load. Unfortunately it's atomic operation: (create environment, make load, delete environment). This should be split to 3 separated steps. Possible solution ----------------- * Add new CLI operations to work with benchmark environment: (show, create, delete, list) * Allow task to start against benchmark environment (instead of deployment) rally-0.9.1/doc/feature_request/multi_scenarios_load_gen.rst0000664000567000056710000000204613073417716025552 0ustar jenkinsjenkins00000000000000====================================== Using multi scenarios to generate load ====================================== Use Case -------- Rally should be able to generate real life load. Simultaneously create load on different components of OpenStack, e.g. simultaneously booting VM, uploading image and listing users. Problem Description ------------------- At the moment Rally is able to run only 1 scenario per benchmark. Scenario are quite specific (e.g. boot and delete VM for example) and can't actually generate real life load. Writing a lot of specific benchmark scenarios that will produce more real life load will produce mess and a lot of duplication of code. Possible solution ----------------- * Extend Rally task benchmark configuration in such way to support passing multiple benchmark scenarios in single benchmark context * Extend Rally task output format to support results of multiple scenarios in single benchmark separately. * Extend rally task plot2html and rally task detailed to show results separately for every scenario. rally-0.9.1/doc/ext/0000775000567000056710000000000013073420067015354 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/ext/__init__.py0000664000567000056710000000000013073417716017462 0ustar jenkinsjenkins00000000000000rally-0.9.1/doc/ext/plugin_reference.py0000664000567000056710000003460413073417720021253 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from docutils.parsers import rst import json import re from rally.common.plugin import discover from rally.common.plugin import plugin from rally import plugins from utils import category, subcategory, section, paragraph, parse_text, \ make_definitions, note JSON_SCHEMA_TYPES_MAP = {"boolean": "bool", "string": "str", "number": "float", "integer": "int", "array": "list", "object": "dict"} def process_jsonschema(schema): """Process jsonschema and make it looks like regular docstring.""" if not schema: # nothing to parse return if "type" in schema: # str if schema["type"] == "string": doc = schema.get("description", "") if "pattern" in schema: doc += ("\n\nShould follow next pattern: %s." % schema["pattern"]) return {"doc": doc, "type": "str"} # int or float elif schema["type"] in ("integer", "number"): doc = schema.get("description", "") if "minimum" in schema: doc += "\n\nMin value: %s." % schema["minimum"] if "maximum" in schema: doc += "\n\nMax value: %s." % schema["maximum"] return {"doc": doc, "type": JSON_SCHEMA_TYPES_MAP[schema["type"]]} # bool or null elif schema["type"] in ("boolean", "null"): return {"doc": schema.get("description", ""), "type": "bool" if schema["type"] == "boolean" else "null"} # list elif schema["type"] == "array": info = {"doc": schema.get("description", ""), "type": "list"} if "items" in schema: if info["doc"]: info["doc"] += "\n\n" info["doc"] += ("Elements of the list should follow format(s) " "described below:\n\n") items = schema["items"] if "type" in items: itype = JSON_SCHEMA_TYPES_MAP.get(items["type"], items["type"]) info["doc"] += "- Type: %s. " % itype if "description" in items: # add indention desc = items["description"].split("\n") info["doc"] += "\n ".join(desc) if itype in ("list", "dict"): new_schema = copy.copy(items) new_schema.pop("description", None) new_schema = json.dumps(new_schema, indent=4) new_schema = "\n ".join( new_schema.split("\n")) info["doc"] += ("\n Format:\n\n" " .. code-block:: json\n\n" " %s\n" % new_schema) else: info["doc"] += " - ``%s`` " % items return info elif isinstance(schema["type"], list): # it can be too complicated for parsing... do not do it deeply return {"doc": schema.get("description", ""), "type": "/".join(schema["type"])} # dict elif schema["type"] == "object": info = {"doc": schema.get("description", ""), "type": "dict", "parameters": []} required_parameters = schema.get("required", []) if "properties" in schema: for name in schema["properties"]: if isinstance(schema["properties"][name], str): pinfo = {"name": name, "type": schema["properties"][name], "doc": ""} else: pinfo = process_jsonschema(schema["properties"][name]) if name in required_parameters: pinfo["required"] = True pinfo["name"] = name info["parameters"].append(pinfo) elif "patternProperties" in schema: info.pop("parameters", None) info["patternProperties"] = [] for k, v in schema["patternProperties"].items(): info["patternProperties"].append(process_jsonschema(v)) info["patternProperties"][-1]["name"] = k info["patternProperties"][-1]["type"] = "str" elif (not (set(schema.keys()) - {"type", "description", "$schema", "additionalProperties"})): # it is ok, schema accepts any object. nothing to add more pass elif "oneOf" in schema: # Example: # SCHEMA = {"type": "object", "$schema": consts.JSON_SCHEMA, # "oneOf": [{"properties": {"foo": {"type": "string"}} # "required": ["foo"], # "additionalProperties": False}, # {"properties": {"bar": {"type": "string"}} # "required": ["bar"], # "additionalProperties": False}, # oneOf = copy.deepcopy(schema["oneOf"]) for item in oneOf: for k, v in schema.items(): if k not in ("oneOf", "description"): item[k] = v return {"doc": schema.get("description", ""), "type": "dict", "oneOf": [process_jsonschema(item) for item in oneOf]} else: raise Exception("Failed to parse jsonschema: %s" % schema) if "definitions" in schema: info["definitions"] = schema["definitions"] return info else: raise Exception("Failed to parse jsonschema: %s" % schema) # enum elif "enum" in schema: doc = schema.get("description", "") doc += "\nSet of expected values: '%s'." % ("', '".join( [e or "None" for e in schema["enum"]])) return {"doc": doc} elif "anyOf" in schema: return {"doc": schema.get("description", ""), "anyOf": [process_jsonschema(i) for i in schema["anyOf"]]} elif "oneOf" in schema: return {"doc": schema.get("description", ""), "oneOf": [process_jsonschema(i) for i in schema["oneOf"]]} elif "$ref" in schema: return {"doc": schema.get("description", "n/a"), "ref": schema["$ref"]} else: raise Exception("Failed to parse jsonschema: %s" % schema) CATEGORIES = { "Common": ["OS Client"], "Deployment": ["Engine", "Provider Factory"], "Task Component": ["Chart", "Context", "Exporter", "Hook", "Resource Type", "SLA", "Scenario", "Scenario Runner", "Trigger"], "Verification Component": ["Verifier Context", "Verification Reporter", "Verifier Manager"] } # NOTE(andreykurilin): several bases do not have docstings at all, so it is # redundant to display them IGNORED_BASES = ["Resource Type", "Task Exporter", "OS Client"] class PluginsReferenceDirective(rst.Directive): optional_arguments = 1 option_spec = {"base_cls": str} def _make_arg_items(self, items, ref_prefix, description=None, title="Parameters"): terms = [] for item in items: iname = item.get("name", "") or item.pop("type") if "type" in item: iname += " (%s)" % item["type"] terms.append((iname, [item["doc"]])) return make_definitions(title=title, ref_prefix=ref_prefix, terms=terms, descriptions=description) def _make_plugin_section(self, plugin_cls, base_name=None): section_name = plugin_cls.get_name() if base_name: section_name += " [%s]" % base_name section_obj = section(section_name) info = plugin_cls.get_info() if info["title"]: section_obj.append(paragraph(info["title"])) if info["description"]: section_obj.extend(parse_text(info["description"])) if info["namespace"]: section_obj.append(paragraph( "**Namespace**: %s" % info["namespace"])) if base_name: ref_prefix = "%s-%s-" % (base_name, plugin_cls.get_name()) else: ref_prefix = "%s-" % plugin_cls.get_name() if info["parameters"]: section_obj.extend(self._make_arg_items(info["parameters"], ref_prefix)) if info["returns"]: section_obj.extend(parse_text( "**Returns**:\n%s" % info["returns"])) if info["schema"]: schema = process_jsonschema(info["schema"]) if "type" in schema: if "parameters" in schema: section_obj.extend(self._make_arg_items( items=schema["parameters"], ref_prefix=ref_prefix)) elif "patternProperties" in schema: section_obj.extend(self._make_arg_items( items=schema["patternProperties"], ref_prefix=ref_prefix, description=["*Dictionary is expected. Keys should " "follow pattern(s) described bellow.*"])) elif "oneOf" in schema: section_obj.append(note("One of the following groups of " "parameters should be provided.")) for i, oneOf in enumerate(schema["oneOf"], 1): description = None if oneOf.get("doc", None): description = [oneOf["doc"]] section_obj.extend(self._make_arg_items( items=oneOf["parameters"], ref_prefix=ref_prefix, title="Option %s of parameters" % i, description=description)) else: section_obj.extend(self._make_arg_items( items=[schema], ref_prefix=ref_prefix)) else: raise Exception("Failed to display provided schema: %s" % info["schema"]) filename = info["module"].replace(".", "/") ref = "https://github.com/openstack/rally/blob/master/%s.py" % filename section_obj.extend(parse_text("**Module**:\n`%s`__\n\n__ %s" % (info["module"], ref))) return section_obj def _make_plugin_base_section(self, base_cls, base_name=None): if base_name: title = ("%ss" % base_name if base_name[-1] != "y" else "%sies" % base_name[:-1]) subcategory_obj = subcategory(title) else: subcategory_obj = [] for p in sorted(base_cls.get_all(), key=lambda o: o.get_name()): # do not display hidden contexts if p._meta_get("hidden", False): continue subcategory_obj.append(self._make_plugin_section(p, base_name)) return subcategory_obj @staticmethod def _parse_class_name(cls): name = "" for word in re.split(r'([A-Z][a-z]*)', cls.__name__): if word: if len(word) > 1 and name: name += " " name += word return name def _get_all_plugins_bases(self): """Return grouped and sorted all plugins bases.""" bases = [] bases_names = [] for p in discover.itersubclasses(plugin.Plugin): base_ref = getattr(p, "base_ref", None) if base_ref == p: name = self._parse_class_name(p) if name in bases_names: raise Exception("Two base classes with same name '%s' are " "detected." % name) bases_names.append(name) category_of_base = "Common" for cname, cbases in CATEGORIES.items(): if name in cbases: category_of_base = cname bases.append((category_of_base, name, p)) return sorted(bases) def run(self): plugins.load() bases = self._get_all_plugins_bases() if "base_cls" in self.options: for _category_name, base_name, base_cls in bases: if base_name == self.options["base_cls"]: return self._make_plugin_base_section(base_cls) raise Exception("Failed to generate plugins reference for '%s'" " plugin base." % self.options["base_cls"]) categories = {} for category_name, base_name, base_cls in bases: # FIXME(andreykurilin): do not ignore anything if base_name in IGNORED_BASES: continue if category_name not in categories: categories[category_name] = category(category_name) category_of_base = categories[category_name] category_of_base.append(self._make_plugin_base_section(base_cls, base_name)) return [content for _name, content in sorted(categories.items())] def setup(app): plugins.load() app.add_directive("generate_plugin_reference", PluginsReferenceDirective) rally-0.9.1/doc/ext/cli_reference.py0000664000567000056710000002025513073417720020521 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import inspect from docutils.parsers import rst from rally.cli import cliutils from rally.cli import main from rally.cli import manage from utils import (category, subcategory, hint, make_definition, note, paragraph, parse_text, warning) class Parser(object): """A simplified interface of argparse.ArgumentParser""" def __init__(self): self.parsers = {} self.subparser = None self.defaults = {} self.arguments = [] def add_parser(self, name, help=None, description=None, formatter_class=None): parser = Parser() self.parsers[name] = {"description": description, "help": help, "fclass": formatter_class, "parser": parser} return parser def set_defaults(self, command_object=None, action_fn=None, action_kwargs=None): if command_object: self.defaults["command_object"] = command_object if action_fn: self.defaults["action_fn"] = action_fn if action_kwargs: self.defaults["action_kwargs"] = action_kwargs def add_subparsers(self, dest): # NOTE(andreykurilin): there is only one expected call if self.subparser: raise ValueError("Can't add one more subparser.") self.subparser = Parser() return self.subparser def add_argument(self, *args, **kwargs): if "action_args" in args: return self.arguments.append((args, kwargs)) DEFAULT_UUIDS_CMD = { "deployment": ["rally deployment create"], "task": ["rally task start"], "verification": ["rally verify start", "rally verify import_results"] } def compose_note_about_default_uuids(argument, dest): # TODO(andreykurilin): add references to commands return note("The default value for the ``%(arg)s`` argument is taken from " "the Rally environment. Usually, the default value is equal to" " the UUID of the last successful run of ``%(cmd)s``, if the " "``--no-use`` argument was not used." % { "arg": argument, "cmd": "``, ``".join(DEFAULT_UUIDS_CMD[dest])}) def compose_use_cmd_hint_msg(cmd): return hint("You can set the default value by executing ``%(cmd)s ``" " (ref__).\n\n __ #%(ref)s" % {"cmd": cmd, "ref": cmd.replace(" ", "-")}) def make_arguments_section(category_name, cmd_name, arguments, defaults): elements = [paragraph("**Command arguments**:")] for args, kwargs in arguments: # for future changes... # :param args: a single command argument which can represented by # several names(for example, --uuid and --task-id) in cli. # :type args: tuple # :param kwargs: description of argument. Have next format: # {"dest": "action_kwarg_", # "help": "just a description of argument" # "metavar": "[optional] metavar of argument. Example:" # "Example: argument '--file'; metavar 'path' ", # "type": "[optional] class object of argument's type", # "required": "[optional] boolean value"} # :type kwargs: dict dest = kwargs.get("dest").replace("action_kwarg_", "") description = [] if cmd_name != "use": # lets add notes about specific default values and hint about # "use" command with reference if dest in ("deployment", "task"): description.append(compose_note_about_default_uuids( args[0], dest)) description.append( compose_use_cmd_hint_msg("rally %s use" % dest)) elif dest == "verification": description.append(compose_note_about_default_uuids( args[0], dest)) description.append( compose_use_cmd_hint_msg("rally verify use")) description.append(kwargs.get("help")) action = kwargs.get("action") if not action: arg_type = kwargs.get("type") if arg_type: description.append("**Type**: %s" % arg_type.__name__) skip_default = dest in ("deployment", "task_id", "verification") if not skip_default and dest in defaults: description.append("**Default**: %s" % defaults[dest]) metavar = kwargs.get("metavar") ref = "%s_%s_%s" % (category_name, cmd_name, args[0].replace("-", "")) if metavar: args = ["%s %s" % (arg, metavar) for arg in args] elements.extend(make_definition(", ".join(args), ref, description)) return elements def get_defaults(func): """Return a map of argument:default_value for specified function.""" spec = inspect.getargspec(func) if spec.defaults: return dict(zip(spec.args[-len(spec.defaults):], spec.defaults)) return {} def make_command_section(category_name, name, parser): # NOTE(andreykurilin): there is only one category in rally-manage, so # let's just hardcode it. cmd = "rally-manage" if category_name == "db" else "rally" section = subcategory("%s %s %s" % (cmd, category_name, name)) section.extend(parse_text(parser["description"])) if parser["parser"].arguments: defaults = get_defaults(parser["parser"].defaults["action_fn"]) section.extend(make_arguments_section( category_name, name, parser["parser"].arguments, defaults)) return section def make_category_section(name, parser): category_obj = category("Category: %s" % name) # NOTE(andreykurilin): we are re-using `_add_command_parsers` method from # `rally.cli.cliutils`, but, since it was designed to print help message, # generated description for categories contains specification for all # sub-commands. We don't need information about sub-commands at this point, # so let's skip "generated description" and take it directly from category # class. description = parser.defaults["command_object"].__doc__ # TODO(andreykurilin): write a decorator which will mark cli-class as # deprecated without changing its docstring. if description.startswith("[Deprecated"): i = description.find("]") msg = description[1:i] description = description[i+1:].strip() category_obj.append(warning(msg)) category_obj.extend(parse_text(description)) for command in sorted(parser.subparser.parsers.keys()): subparser = parser.subparser.parsers[command] category_obj.append(make_command_section(name, command, subparser)) return category_obj class CLIReferenceDirective(rst.Directive): optional_arguments = 1 option_spec = {"group": str} def run(self): parser = Parser() categories = copy.copy(main.categories) categories["db"] = manage.DBCommands if "group" in self.options: categories = {k: v for k,v in categories.items() if k == self.options["group"]} cliutils._add_command_parsers(categories, parser) content = [] for category in sorted(categories.keys()): content.append(make_category_section( category, parser.parsers[category]["parser"])) return content def setup(app): app.add_directive("make_cli_reference", CLIReferenceDirective) rally-0.9.1/doc/ext/include_vars.py0000664000567000056710000000422213073417720020406 0ustar jenkinsjenkins00000000000000# Copyright 2017: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from docutils import nodes import json from oslo_utils import importutils def include_var(name, rawtext, text, lineno, inliner, options=None, content=None): """ :param name: The local name of the interpreted role, the role name actually used in the document. :param rawtext: A string containing the enitre interpreted text input, including the role and markup. Return it as a problematic node linked to a system message if a problem is encountered. :param text: The interpreted text content. :param lineno: The line number where the interpreted text begins. :param inliner: The docutils.parsers.rst.states.Inliner object that called include_var. It contains the several attributes useful for error reporting and document tree access. :param options: A dictionary of directive options for customization (from the "role" directive), to be interpreted by the role function. Used for additional attributes for the generated elements and other functionality. :param content: A list of strings, the directive content for customization (from the "role" directive). To be interpreted by the role function. :return: """ obj = importutils.import_class(text) if isinstance(obj, (tuple, list)): obj = ", ".join(obj) elif isinstance(obj, dict): obj = json.dumps(dict, indent=4) else: obj = str(obj) return [nodes.Text(obj)], [] def setup(app): app.add_role("include-var", include_var) rally-0.9.1/doc/ext/utils.py0000664000567000056710000000600413073417720017070 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Docutils is awful library. Let's apply some hacks and aliases to simplify usage """ from docutils import frontend from docutils import nodes from docutils import utils from docutils.parsers import rst import string import six def parse_text(text): parser = rst.Parser() settings = frontend.OptionParser(components=(rst.Parser,)).get_default_values() document = utils.new_document(text, settings) parser.parse(text, document) return document.children paragraph = lambda text: parse_text(text)[0] note = lambda msg: nodes.note("", paragraph(msg)) hint = lambda msg: nodes.hint("", *parse_text(msg)) warning = lambda msg: nodes.warning("", paragraph(msg)) category = lambda title: parse_text("%s\n%s" % (title, "-" * len(title)))[0] subcategory = lambda title: parse_text("%s\n%s" % (title, "~" * len(title)))[0] section = lambda title: parse_text("%s\n%s" % (title, "\"" * len(title)))[0] def make_definition(term, ref, descriptions): """Constructs definition with reference to it.""" ref = ref.replace("_", "-").replace(" ", "-") definition = parse_text( ".. _%(ref)s:\n\n* *%(term)s* [ref__]\n\n__ #%(ref)s" % {"ref": ref, "term": term}) for descr in descriptions: if descr: if isinstance(descr, (six.text_type, six.binary_type)): if descr[0] not in string.ascii_uppercase: descr = descr.capitalize() descr = paragraph(" %s" % descr) definition.append(descr) return definition def make_definitions(title, ref_prefix, terms, descriptions=None): """Constructs a list of definitions with reference to them.""" raw_text = ["**%s**:" % title] if descriptions: for descr in descriptions: raw_text.append(descr) for term, definitions in terms: ref = ("%s%s" % (ref_prefix, term)).lower().replace( ".", "-").replace("_", "-").replace(" ", "-") raw_text.append(".. _%s:" % ref) raw_text.append("* *%s* [ref__]" % term) for d in definitions: d = d.strip() if d else None if d: if d[0] not in string.ascii_uppercase: # .capitalize() removes existing caps d = d[0].upper() + d[1:] d = "\n ".join(d.split("\n")) raw_text.append(" %s" % d) raw_text.append("__ #%s" % ref) return parse_text("\n\n".join(raw_text) + "\n") rally-0.9.1/doc/specs/0000775000567000056710000000000013073420067015671 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/specs/in-progress/0000775000567000056710000000000013073420067020141 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/specs/in-progress/distributed_runner.rst0000664000567000056710000001203013073417716024611 0ustar jenkinsjenkins00000000000000.. This work is licensed under a Creative Commons Attribution 3.0 Unported License. http://creativecommons.org/licenses/by/3.0/legalcode .. This template should be in ReSTructured text. The filename in the git repository should match the launchpad URL, for example a URL of https://blueprints.launchpad.net/heat/+spec/awesome-thing should be named awesome-thing.rst . Please do not delete any of the sections in this template. If you have nothing to say for a whole section, just write: None For help with syntax, see http://sphinx-doc.org/rest.html To test out your formatting, see http://www.tele3.cz/jbar/rest/rest.html ============================ Implement Distributed Runner ============================ We need a Distributed Runner in Rally that will run tasks on many nodes simultaneously. Problem description =================== Currently there are several runners in Rally, but they all can only run on the same host that Rally itself runs on. It limits test load that Rally can generate. In some cases required load can not be generated from one host. In current implementation Runner object runs actual subtask and generates test results while TaskEngine via ResultConsumer retrieves these results, checks them against specified SLA and stores in DB. There are several aspects that should be kept in mind when reasoning about distributed load generation: - Even one active runner is able to produce significant amounts of result data so that TaskEngine could barely process it in time. We assume that the single TaskEngine instance definitely will not be able to process several streams of raw test result data from several simultaneous runners. - We need test results to be checked against SLA as soon as possible so that we could stop load generation on SLA violation immediately (or close to) and protect the environment being tested. On the other hand we need results from all runners to be analysed, i.e. checking SLA on a single runner is not enough. - Since we expect long task duration we want to provide to user at least partial information about task execution as soon as possible. Proposed change =============== It is proposed to introduce two new component, RunnerAgent and a new plugin of runner type, DistributedRunner, and refactor existing components, TaskEngine, Runner and SLA, so that overall interaction will look as follows. .. image:: ../../source/images/Rally_Distributed_Runner.png :align: center 1. TaskEngine - create subtask context - create instance of Runner - run Runner.run() with context object and info about sceanario - in separated thread consume iteration result chunks & SLA from Runner - delete context 2. RunnerAgent - is executed on agent nodes - runs Runner for received task iterations with given context and args - collects iteration result chunks, stores them on local filesystem, sends them on request to DistributedRunner - aggregates SLA data and periodically sends it to DistributedRunner - stops Runner on receive of corresponding message 3. DistributedRunner - is a regular plugin of Runner type - communicates with remote RunnerAgents wia message queue (ZeroMQ) - provides context, args and SLA to RunnerAgents - distributes task iterations to RunnerAgents - aggregates SLA data from RunnerAgents - merges chunks of task result data It is supposed to use separate communication channels for task results and SLA data. - SLA data is sent periodically (e.g. once per second) for iterations that are already finished. - Task results are collected into chunks and stored locally by RunnerAgent and only send on request. Alternatives ------------ No way Implementation ============== Assignee(s) ----------- Primary assignee: Illia Khudoshyn Work Items ---------- - Refactor current SLA mechanism to support aggregated SLA data - Refactor current Runner base class - collect iteration results into chunks, ordered by timestamp - perform local SLA checks - aggregate SLA data - Refactor TaskEngine to reflect changes in Runner - operate chunks of ordered test results rather then stream of raw result items - apply SLA checks to aggregated SLA data - analyze SLA data and consume test results in separate threads - Develop infrastructure that will allow multi-node Rally configuration and run - Implement RunnerAgent - run Runner - cache prepared chunks of iteration results - comunicate via ZMQ with DistributedRunner(send task results and SLA on separate channels) - terminate Runner on 'stop' command from TaskEngine - Implement DistributedRunner that will - feed tasks to RunnerAgents - receive chunks of result data from RunnerAgents, merge it and provide merged data to TaskEngine - receive aggregated SLA data from RunnerAgents, merge it and provide data to TaskEngine - translate 'stop' command from TaskEngine to RunnerAgents Dependencies ============ - DB model refactoring (boris-42) - Report generation refactoring (amaretsky) rally-0.9.1/doc/specs/in-progress/pluggable-types.rst0000664000567000056710000002567513073417716024025 0ustar jenkinsjenkins00000000000000.. This work is licensed under a Creative Commons Attribution 3.0 Unported License. http://creativecommons.org/licenses/by/3.0/legalcode ============================= Make Resource Types Pluggable ============================= Rally's current type resolution subsystem (``rally.task.types``) isn't easily pluggable, is tied to OpenStack, and cannot handle resources that must be resolved by the context of each iteration rather than the context of the subtask. This spec aims to solve all three problems. Problem description =================== The Rally ``types.set()`` decorator is used to convert resource names or regular expressions to resource objects. For instance, in a task file a user can specify: .. code-block:: yaml image: regex: cirros-.*-disk flavor: name: m1.tiny Rally will convert those into the matching image ID and flavor ID. As it currently exists, this process has several shortcomings and bugs: * Although it is technically pluggable, the resource type classes do not call ``rally.common.plugin.configure`` and the code is not patterned as a plugin, with code in ``rally.plugins``. Technically, a user could implement a subclass of ``rally.task.types.ResourceType`` in a plugin and use it, but this is not obvious from the code or documentation, and it would not be registered as a plugin. Moreover, OpenStack-specific resources are in the ``rally.task.types`` module instead of the OpenStack plugin. * It is tied to OpenStack. ``rally.task.types.preprocess()`` loads an OpenStack Clients object and passes it to the resource type objects. * In some cases, resources must be loaded by the client context created for each iteration, not by the admin context. For instance, when Glance images are created by the ``images`` context they are created as private images in each tenant; trying to load the image with the admin context fails. We need to be able to support this use case, without taking on a significant or universal performance penalty. Proposed change =============== This change is very involved and is broken into a number of distinct sections. Create ``types.convert()`` -------------------------- First, we will add a new function, ``types.convert()``, to replace ``types.set()``. ``types.convert()`` will accept arguments differently than ``types.set()``. For instance, this: .. code-block:: python @types.set(image=types.ImageResourceType, flavor=types.FlavorResourceType) ...will change to: .. code-block:: python @types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"}) This has a number of advantages: * Resource type classes can be named or removed, or the interface changed, without breaking the public API. * Users will not have to import types in the code. Currently this is only a single module, but this spec proposes to change that. * Plugins are loaded automatically, rather than users having to import them explicitly. * We can use the existing plugin deprecation mechanisms. * By passing a dict to ``types.convert()`` instead of a class, we could in theory pass arguments to the types. Nothing in this spec requires that functionality, but it is provided for the future. * ``set`` is a reserved keyword, so by renaming the function we eliminate a bit of code that is in violation of the OpenStack Style Guidelines. Convert ``ResourceType`` to plugin ---------------------------------- Next, the code will be rearranged to make it obviously pluggable, and a ``types.configure()`` call will be added to register the ``ResourceType`` subclasses as plugins. OpenStack resources will be moved into the OpenStack plugin space, and documentation will be added to make it clear that ``ResourceType`` can be subclassed by other plugins. The old resource type classes will be left in place, but deprecated. ``types.set()`` will also be deprecated at this point. Switch scenarios to ``types.convert()`` and new type plugins ------------------------------------------------------------ After resource type plugins are created, all existing scenarios will be changed over to ``types.convert()``. This will allow us to make the changes below that affect the type conversion API without having to make further changes to the scenarios. Change type preprocessing signature ----------------------------------- The arguments with which each preprocessor is called will be changed. Instead of: .. code-block:: python def transform(cls, clients, resource_config): ...it will be: .. code-block:: python def preprocess(self, resource_config, context=None, clients=None): Within the types subsystem proper, only ``context`` will be passed; ``clients`` will remain for compatibility with the validation subsystem, which does not have a context object yet, and remains tied to OpenStack. If ``clients`` is not passed to ``transform()``, the responsibility for creating OpenStack clients (or doing anything else with the subtask context) will lie with the ``ResourceType`` subclass itself. This entails a small performance penalty, but it's necessary to divorce the types subsystem from OpenStack. If ``clients`` is passed, then a deprecation warning will be logged. When the validation subsystem is made independent from OpenStack, the ``clients`` keyword argument should be removed. This also makes it so that ``ResourceType.transform()`` is no longer a class method, which will allow the resource classes to retain persistent information about a single decorated scenario function. ``transform`` will also be renamed to ``preprocess``, which will be more consistent with ``rally.task.types.preprocess`` and will make it easier to add a second resource type resolution hook, described below. Add ``ResourceType.map_for_scenario()`` --------------------------------------- A new hook will be added to the runners. In addition to ``ResourceType.preprocess()``, which is run after contexts but before the scenarios start, ``ResourceType.map_for_scenario(self, scenario_context, resource_config)`` will run before each iteration of the scenario. Together with the change to make ``types.set()`` accept objects instead of classes, this will solve the issue of resources that must be resolved per-iteration. For instance, to resolve images, ``ImageResourceType.preprocess()`` would resolve images for each set of credentials created for the subtask, as well as for the admin credentials, and cache them; ``ImageResourceType.map_for_scenario()`` would be passed the mapped scenario context and the resource configuration, and would choose the correct image ID to pass to the scenario. Note that image listing and resolution is not done by ``map_for_scenario()``; we should strive to keep the performance profile of ``map_for_scenario()`` as small as possible. In order to simplify the type resolution workflow, only ``map_for_scenario()`` will be able to rewrite arguments, but the default implementation in ``rally.task.types.ResourceType`` will rewrite it with the value cached in ``preprocess()``. For instance: .. code-block:: python class ResourceType(plugin.Plugin): @abc.abstractmethod def preprocess(self, context, resource_config): pass @abc.abstractmethod def map_for_scenario(self, scenario_context, resource_config): pass class FlavorResourceType(ResourceType): def preprocess(self, resource_config, context=None, clients=None): self._flavor_id = resource_config.get("id") if not self._flavor_id: novaclient = clients.nova() self._flavor_id = _id_from_name( resource_config=resource_config, resources=novaclient.flavors.list(), typename="flavor") def map_for_scenario(self, scenario_context, resource_config): return self._flavor_id class ImageResourceType(ResourceType): def preprocess(self, resource_config, context=None, clients=None): self._image_id = resource_config.get("id") if not self._image_id: self._images = {} all_images = clients.glance().images.list() for image in all_images: self._images.setdefault(image["owner"], []).append(image) def map_for_scenario(self, scenario_context, resource_config): if self._image_id: return self._image_id else: return _id_from_name( resource_config=resource_config, resources=self._images[scenario_context["user"]], typename="image") This demonstrates two different workflows. Flavors, which exist globally for all users and tenants, can be easily resolved once, at preprocessing time, and ``map_for_scenario()`` needs only to substitute the single, canonical flavor ID on each iteration. This does lead to some redundancy -- flavor arguments will be rewritten on each iteration, for instance -- but as it's only a matter of changing a few values in the argument dict, the performance penalty will be minimal. Images are more complicated, because images can exist on a per-user basis, and remain invisible to other users. In order to properly resolve image IDs, we must first find all images in ``preprocess()``, and then select the correct image for each iteration (and for the user that maps to each iteration) in ``map_for_scenario()``. Remove deprecated code ---------------------- Finally, in some future release we will remove the old, deprecated resource type classes and ``types.set()``. Alternatives ------------ Type resolution could be done in a single step (as opposed to the two step ``preprocess()``/``map_for_scenario()``) if we passed the results in the context object instead of rewriting scenario arguments. This is less straightforward, though; the scenario author would then need to know where to look in the context to find the resource object, even though for any given iteration there is exactly one resource object that is appropriate. Implementation ============== Assignee(s) ----------- Primary assignee: stpierre aka Chris St. Pierre Work Items ---------- * Create ``types.convert()``. * Rearrange the code into plugins and add plugin documentation. Deprecate ``types.set()`` and the old resource type classes. * Convert existing scenarios to use ``types.convert()``. * Convert ``ResourceType.transform()`` to ``ResourceType.preprocess()`` and create a new abstract intermediate subclass, ``OpenStackResourceType``, to which to offload OpenStack client creation. * Add the ``ResourceType.map_for_scenario()`` hook. * Rewrite any resource types that need to take advantage of the new ``map_for_scenario()`` hook. This will likely be limited to ``ImageResourceType`` and ``EC2ImageResourceType``. If there are obvious patterns that can be abstracted out, then add a new abstract intermediate subclass. * In the indeterminate future, remove the deprecated resource type classes and ``types.set()``. Dependencies ============ None. rally-0.9.1/doc/specs/in-progress/cleanup_refactoring.rst0000664000567000056710000001661613073417716024726 0ustar jenkinsjenkins00000000000000.. This work is licensed under a Creative Commons Attribution 3.0 Unported License. http://creativecommons.org/licenses/by/3.0/legalcode ========================= Refactoring Rally Cleanup ========================= Current generic mechanism is nice but it doesn't work enough well in real life. And in cases of existing users, persistence context and disaster cleanups it doesn't work well. This proposal should be useful for covering following use cases. Problem description =================== There are 5 use cases that require cleanup refactoring: #. Benchmarking with existing tenants. Keep existing resources instead of deleting all resources in the tenants. #. Persistence benchmark context. Create benchmark environment once before benchmarking. After that run some amount of benchmarks that are using it and at the end just delete all created resources by context cleanups. #. Disaster cleanup. Delete all resources created by Rally in such case if something went wrong with server that is running Rally. #. Isolated task It is quite important to add ability to run few instances of Rally against cloud simultanesouly (and one cleanup, won't affect the others) #. Testing that cleanups works How to ensure that Rally cleaned all resources. Proposed change =============== Use consistent resource names as described in https://review.openstack.org/201545 * Resources created by Rally are deleted after a task finishes by `UserCleanup.cleanup()`. * Resources created by contexts are deleted when the environment is not necessary by the context class `cleanup()`. Specifically, there are three cases we need to be able to handle: * Cleanup of all resources created by a single subtask run; * Cleanup of all resources created by contexts; and * Cleanup of all resources, possibly (or probably) out-of-band. In each case, this can be handled by matching resource names with a subset of plugins. For instance, to clean up scenario resources, we will do something like: .. code-block:: python scenarios = [cls for cls in discover.itersubclasses(scenario.Scenario) if issubclass(cls, utils.RandomNameGeneratorMixin)] for resource in resource_manager.list(): manager = resource_manager_cls(raw_resource=resource, ...) if utils.name_matches_object(resource_manager.name, scenarios, task_id=task_id): manager.delete() This is pseudocode that hides much of the complexity of our current cleanup process, but it demonstrates the basic idea: #. Generate a list of subclasses to delete resources for. In this case we use ``rally.task.scenario.Scenario``, but for context cleanup it would be ``rally.task.context.Context``, and for global cleanup it would be ``rally.common.plugin.plugin.Plugin``. In all three cases we would only delete resources for plugins that have ``rally.common.utils.RandomNameGeneratorMixin`` as a superclass; this lets us easily perform global cleanup without needing to worry about which plugin subclasses might implement ``RandomNameGeneratorMixin``. #. For each resource manager, list resources. #. If the resource name matches the list of possible patterns gleaned from the set of classes, delete it. A fair bit of functionality will need to be added to support this: * ``rally.plugins.openstack.cleanup.manager.cleanup()`` will need to accept a keyword argument specifying the type of cleanup. This should be a superclass that will be used to discover the subclasses to delete resources for. It will be passed to ``rally.plugins.openstack.cleanup.manager.SeekAndDestroy``, which will also need to accept the argument and generate the list of classes. * ``rally.plugins.openstack.cleanup.base``, ``rally.plugins.openstack.cleanup.manager`` and ``rally.plugins.openstack.cleanup.resources`` need to be moved out of the context space, since they will be used not only by the cleanup context to do scenario cleanup, but also to do out-of-band cleanup of all resources. * A new function, ``name()``, will need to be added to ``rally.plugins.openstack.cleanup.base.ResourceManager`` so that we can determine the name of a resource in order to match it. * A ``task_id`` keyword argument will be added to ``name_matches_object`` and ``name_matches_pattern`` in order to ensure that we only match names from the currently-running task. This will need to be passed along starting with ``rally.plugins.openstack.cleanup.manager.cleanup()``, and added as a keyword argument to every intermediate function. Additionally, a new top-level command will be added:: rally cleanup [--deployment ] [--task ] This will invoke cleanup of all resources, either for a specific task, or for any rally-created resource at all, regardless of task ID. This will not be ``rally task cleanup`` because it can be run with or without a task. Alternatives ------------ * Use OpenStack project resources cleaner (ospurge). This enables us to purge the tenants, regardless of resource naming, so we only need to keep track of Rally tenants (naming could be a solution here) and resources in admin tenant. In this case, we need to think about a case where Rally needs to cleanup some resources from a existing tenant while leaving the rest available. * Use/enhance Tempest cleanup command (tempest/cmd/cleanup.py). Compare functionality or fix the version in tempest. Maybe tempest_lib would be a better place for this, and for the cleanup code in general. In this case, we need to think about a case where a Rally scenario creates a tenant, and then deletes it but some resources are left around. And also we need to think about a case of benchmark on existing tenants. Implementation ============== Assignee(s) ----------- Primary assignee: wtakase aka Wataru Takase Other contributors: rvasilets aka Roman Vasilets stpierre aka Chris St. Pierre Work Items ---------- #. Consider how to deal resources which don't be named by generate_random_name(). For example, Neutron ports which are created as side-effect of other resources (routers, networks, servers) don't have resource names. In this case, ports always have an "owner" so cleanup should check port's owner's name. And what about floating IPs? (Needed by use cases 1, 2, 3, 4, 5) #. Modify ``name_matches_{object,pattern}`` to accept a task ID. #. Add ``name()`` functions to all ``ResourceManager`` subclasses. #. Move ``rally.plugins.openstack.cleanup.manager.{base,manager,resources}`` to ``rally.plugins.openstack.cleanup``. #. Modify ``rally.plugins.openstack.cleanup.manager.cleanup()`` to accept a task ID and a superclass, pass them along to ``SeekAndDestroy``, and generally Do The Right Thing with them. #. Create the ``rally cleanup`` command. #. Support negative filter which deletes unmatched resources. (Needed by use cases 3, 5) Dependencies ============ * Consistent resource names: https://review.openstack.org/201545 * Add name pattern filter for resource cleanup: https://review.openstack.org/#/c/139643/ * Finish support of benchmarking with existing users: https://review.openstack.org/#/c/168524/ * Add support of persistence benchmark environment: https://github.com/openstack/rally/blob/master/doc/feature_request/persistence_benchmark_env.rst * Production ready cleanups: https://github.com/openstack/rally/blob/master/doc/feature_request/production_ready_cleanup.rst rally-0.9.1/doc/specs/in-progress/task_and_verification_export.rst0000664000567000056710000000570013073417716026633 0ustar jenkinsjenkins00000000000000.. This work is licensed under a Creative Commons Attribution 3.0 Unported License. http://creativecommons.org/licenses/by/3.0/legalcode .. This template should be in ReSTructured text. The filename in the git repository should match the launchpad URL, for example a URL of https://blueprints.launchpad.net/heat/+spec/awesome-thing should be named awesome-thing.rst . Please do not delete any of the sections in this template. If you have nothing to say for a whole section, just write: None For help with syntax, see http://sphinx-doc.org/rest.html To test out your formatting, see http://www.tele3.cz/jbar/rest/rest.html ==================================================== Export task and verifications into external services ==================================================== Currently Rally stores all information about executed tasks and verifications in its database and it is also able to provide this data in JSON format or in the form of HTML reports. There is a request for Rally to export this data into external services (like test management system or Google Docs) via its API. Problem description =================== There are many, including a lot of proprietary, test management systems in the market available as SaaS and/or On-Premises, like TestRail, TestLink, TestLodge etc, which objective is to manage, organize and track all testing efforts. Most of the systems provide an API for importing test data. The systems also possess data model somewhat similar to Rally's one. It usually includes (among others) models for project, test suite test case, test plan and test execution results. It is suggested to provide Rally users an ability to export information about testing their environments into such test management systems in order to integrate benchmarking via Rally into rest of their testing activities. Since different test management systems have alike yet different API for the purpose it is reasonable to implement this export functionality via plugins. Proposed change =============== 1. Implement a base class Exporter for an export plugin at *rally/task/exporter.py*. ..code-block:: python class Exporter(plugin.Plugin): def export(self, task, connection_string): ... 2. Implement a CLI command of the form ..code-block:: shell rally task export 3. Implement a base class VerifyExporter for an export plugin at *rally/verify/exporter.py*. ..code-block:: python class VerifyExporter(plugin.Plugin): def export(self, verification, connection_string): ... 4. Implement a CLI command of the form ..code-block:: shell rally verify export Alternatives ------------ No way Implementation ============== Assignee(s) ----------- Primary assignee: rvasilets Work Items ---------- - Implement plugin base class - Implement CLI command - Implement plugin for TestRail Dependencies ============ None rally-0.9.1/doc/specs/in-progress/deployment_type.rst0000664000567000056710000001451013073417716024124 0ustar jenkinsjenkins00000000000000.. This work is licensed under a Creative Commons Attribution 3.0 Unported License. http://creativecommons.org/licenses/by/3.0/legalcode ============================ Rally Deployment Unification ============================ Make Rally be able to examine any software through the API, unbound it from OpenStack. Problem description =================== Rally is able to examine only system that use Keystone as a authentication services, which limits sphere where Rally is suitable. At the moment to run Rally Task or Rally Verify you must specify OpenStack deployment which contains credentials of it. These credentials are used in Rally Task & Verify for different setups and validations. Rally is not able to store more than one credential for one deployment, so it is impossible to support multi-scenario runs related to different systems. Proposed change =============== * Modify 'Deployment' database model to be able to store credentials of many different systems, adding type of system. Now we have model Deployment with admin and users columns, which are credentials for Keystone (tight coupled with OpenStack). There is next model now: .. code-block:: python class Deployment(BASE, RallyBase): ... admin = sa.Column(types.PickleType, nullable=True) users = sa.Column(types.PickleType, default=[], nullable=False) ... and values of columns in DB something like that: ``admin = {admin_creds} or None`` ``users = [{user_creds1}, {user_creds2}, ...] or []`` We need to decouple deployment from OpenStack and make credentials more flexible, we describe it in one column named ``credentials``, where we can store special structure containing credentials for many different systems, including type of credentials for each. .. code-block:: python class Deployment(BASE, RallyBase): ... credentials = sa.Column(types.PickleType, default=[], nullable=False) ... So, for current OpenStack credentials we will have next data in credentials column in DB after migration: .. code-block:: python credentials = [ [ "openstack", {admin: {admin_creds} or None, users: [{user_creds1}, {user_creds2}, ...] or []} ], ] and for multi-credentials deployment: .. code-block:: python credentials = [ [ "openstack", {admin: {admin_creds} or None, users: [{user_creds1}, {user_creds2}, ...] or []} ], [ "zabbix", {"url": "example.com", "login": "admin", "password": "admin"} ] ] Future summarized schema in DB: ``credentials = [[, ], ... ]`` To implement this point we need to write db migration, tests for it and write adapters for credentials get/create/update methods, mostly for support backward compatibility in ``rally.api`` module methods. * Get rid of ``rally.common.objects.credential.Credential`` class and fix it usages mostly in ``rally.osclients`` if needed. Refactor all usages of passing ``rally.common.objects.credential.Credential`` to ``rally.osclients.OSClient``, make possible to take dict as credentials for ``rally.osclients.OSClient`` class, initialise ``rally.plugins.openstack.credentials.OpenStackCredentials`` class in ``OSClient`` ``__init__`` method. Base class for credentials will be inherited from plugins.Plugin and must implement validation method, it will be placed in ``rally.plugins.common.credentials``: .. code-block:: python @six.add_metaclass(abc.ABCMeta) @plugin.configure(name="base_credentials", schema="{...}") class Credentials(plugin.Plugin): def __init__(self, credentials): self.validate(credentials) super(Credentials, self).__setattr__("credentials", credentials) def __getattr__(self, item): if item in self.__dict__: return self.__dict__[item] return self.credentials[item] def __setattr__(self, key, value): self.credentials[key] = value def to_dict(self): return self.credentials.copy() def validate(self, obj): jsonschema.validate(obj, self._meta_get("schema")) and we need to add child for openstack credentials, it will be placed in ``rally.plugins.openstack.credentials``: .. code-block:: python openstack_credentials_schema = { "type": "object", "properties": { "auth_url": {"type": "string"}, "username": {"type": "string"}, "password": {"type": "string"}, }, "required": ["auth_url", "username", "password"] } @plugin.configure(name="openstack_credentials", schema=openstack_credentials_schema) class OpenStackCredentials(Credentials): pass Replace usage of ``rally.common.objects.credential.Credential`` to ``rally.plugins.openstack.credentials.OpenStackCredentials`` in ``rally.osclients`` * Update cli to show deployment type in output of 'rally deployment list'. Make possible to show deployments list in case of multi-scenario as: .. code-block:: shell > rally deployment list # (in case of many deployments) uuid | name | created_at | type | credential -------+--------+------------+-----------+--------------------------------- | | 21-02-2016 | openstack | {"admin": {...}, "users": [...]} | zabbix | {"login": "login", "psw": "..."} Alternatives ------------ None Implementation ============== Assignee(s) ----------- Primary assignee: rpromyshlennikov aka Rodion Promyshlennikov (rpromyshlennikov@mirantis.com) Work Items ---------- - Change Deployment db model class - Write migrations - Make adapters for credentials get/create/update methods for temporary support changed data format - Remove all usages of passing ``rally.common.objects.credential.Credential`` to ``rally.osclients.OSClient`` - Create new plugin based class for credentials - Write subclass of rally.plugins.common.credentials.Credential for OpenStack credentials with proper validation of them - Migrate to new credentials class - Remove ``rally.common.objects.credential.Credential`` class - Improve CLI-client to make possible show multi-credentials deployments. - Feature refactoring: remove adapters after "Multi Scenario support" implementation. Dependencies ============ None rally-0.9.1/doc/specs/in-progress/improve_atomic_actions_format.rst0000664000567000056710000001035113073417716027007 0ustar jenkinsjenkins00000000000000.. This work is licensed under a Creative Commons Attribution 3.0 Unported License. http://creativecommons.org/licenses/by/3.0/legalcode .. This template should be in ReSTructured text. The filename in the git repository should match the launchpad URL, for example a URL of https://blueprints.launchpad.net/heat/+spec/awesome-thing should be named awesome-thing.rst . Please do not delete any of the sections in this template. If you have nothing to say for a whole section, just write: None For help with syntax, see http://sphinx-doc.org/rest.html To test out your formatting, see http://www.tele3.cz/jbar/rest/rest.html ============================================= New Atomic actions format in workload results ============================================= Currently atomic actions data in workload results is insufficient, therefore some new features can not be implemented. Problem description =================== The main problem is that current format does not support nested atomic actions. Also, atomic actions data does not include timestamps for each action start and end time. Having this data will allow us to inspect atomic actions runtime better and generate detailed reports. Since word "atomic" means something that can not be split into parts and we introduce nested atomic actions, we should use different term instead of "atomic actions". Proposed change =============== Term "atomic actions" should be renamed to just "actions". Change actions results schema from type "object" to "array" and extend it with timestamps and nested actions. Nested actions will be represented by "children" key and have unlimited nesting. With timestamps, there is no need to save durations anymore, so get rid of this value. Since this change is not backward compatible, we need to create a database migration script. The migration will use iteration start timestamp as start timestamp for first action and then calculate further timestamps based on actions order and their durations. Benefits of new format ---------------------- Nested actions will make actions measurement more detailed and flexible since we could have data what sub-actions were run during specific action runtime, without complicated changes at code. Start and end timestamps will provide us with accurate information about action runtime within the whole iteration and ability to create `Gantt charts `_. Schema modification ------------------- Schema location is *rally.common.objects.task.TASK_RESULT_SCHEMA ["properties"]["result"]["properties"]["atomic_actions"]* should be moved to *rally.common.objects.task.TASK_RESULT_SCHEMA ["properties"]["result"]["properties"]["actions"]* and changed: AS IS: .. code-block:: python { "type": "object" } Here keys are actions names, and values are their durations. Actions data is actually represented by collections.OrderedDict, so we have real order saved. Example: .. code-block:: python OrderedDict([("keystone.create_tenant", 0.1234), ("keystone.create_users", 1.234)]) TO BE: .. code-block:: python { "type": "array", "items": { "type": "object", "properties": { "name": {"type": "string"}, # name of action "started_at": {"type": "number"}, # float UNIX timestamp "finished_at": {"type": "number"}, # float UNIX timestamp "children": {"$ref": "#/"}, }, "required": ["name", "started_at", "finished_at", "children"], "additionalProperties": False }, "minItems": 0 } Example how this data can be represented: .. code-block:: python [{"name": "keystone.create_tenant", "started_at": 1455281370.288397, "finished_at": 1455281372.672342, "children": []}, {"name": "keystone.create_users", "started_at": 1455281372.931324, "finished_at": 1455281373.375184, "children": []}] Alternatives ------------ None Implementation ============== Assignee(s) ----------- Primary assignee: Alexander Maretskiy Work Items ---------- - Rename atomic actions into actions - Improve actions results format - Create a DB migartion that transforms results to new format Dependencies ============ None rally-0.9.1/doc/specs/in-progress/README.rst0000664000567000056710000000070213073417716021636 0ustar jenkinsjenkins00000000000000Rally Specs =========== Specs are detailed description of proposed changes in project. Usually they answer on what, why, how to change in project and who is going to work on change. This directory contains files with accepted by not implemented specs, 1 file is 1 spec. If you are looking for full rally road map overview go `here `_. rally-0.9.1/doc/specs/in-progress/new_rally_input_task_format.rst0000664000567000056710000002625513073417716026521 0ustar jenkinsjenkins00000000000000.. This work is licensed under a Creative Commons Attribution 3.0 Unported License. http://creativecommons.org/licenses/by/3.0/legalcode .. This template should be in ReSTructured text. The filename in the git repository should match the launchpad URL, for example a URL of https://blueprints.launchpad.net/heat/+spec/awesome-thing should be named awesome-thing.rst . Please do not delete any of the sections in this template. If you have nothing to say for a whole section, just write: None For help with syntax, see http://sphinx-doc.org/rest.html To test out your formatting, see http://www.tele3.cz/jbar/rest/rest.html ==================================== Make the new Rally input task format ==================================== Current Rally format is not flexible enough to cover all use cases that are required. Let's change it! Problem description =================== Why do we need such fundamental change? - Multi scenarios load generation support. This is very important, because it will allow to use Rally for more real life load generation. Like making load on different components and HA testing (where one scenario tries for example to authenticate another is disabling controller) - Ability to add require meta information like (title and descriptions) That are required to generate clear reports - Fixing UX issues. Previous format is very hard for understanding and end users have issues with understanding how it works exactly. Proposed change =============== Make a new format that address all issues. Old format JSON schema: .. code-block:: python { "type": "object", "$schema": "http://json-schema.org/draft-04/schema", "patternProperties": { ".*": { "type": "array", "items": { "type": "object", "properties": { "args": { "type": "object" }, "runner": { "type": "object", "properties": { "type": {"type": "string"} }, "required": ["type"] }, "context": { "type": "object" }, "sla": { "type": "object", }, }, "additionalProperties": False } } } } Old format sample: .. code-block:: yaml --- : - args: runner: context: : ... sla: : - -//- - -//- : -//- Every element of list corresponding to is separated task, that generates environment according to context, generates load using specified runner that runs multiple times with it's args. New format JSON schema: .. code-block:: python { "type": "object", "$schema": "http://json-schema.org/draft-04/schema", "properties": { "version": {"type": "number"}, "title": {"type": "string"}, "description": {"type": "string"}, "tags": { "type": "array", "items": {"type": "string"} }, "subtasks": { "type": "array", "items": { "type": "object", "properties": { "title": {"type": "string"}, "description": {"type": "string"}, "tags": { "type": "array", "items": {"type": "string"} }, "run_in_parallel": {"type": "boolean"}, "workloads": { "type": "array", "items": { "type": "object", "properties": { "name": {"type": "string"}, "args": { "type": "object" }, "runner": { "type": "object", "properties": { "type": {"type": "string"} }, "required": ["type"] }, "sla": { "type": "object" }, "context": { "type": "object" } }, "required": ["name", "runner"] } }, "context": { "type": "object" } }, "required": ["title", "workloads"] } } }, "required": ["title", "tasks"] } New format sample: .. code-block:: yaml --- # Having Dictionary on top level allows us in future to add any new keys. # Keeping the schema of format more or less same for end users. # Version of format version: 1 # Allows to set title of report. Which allows end users to understand # what they can find in task report. title: "New Input Task format" # Description allows us to put all required information to explain end # users what kind of results they can find in reports. description: "This task allows you to certify that your cloud works" # Explicit usage "rally task start --tag" --tag attribute tags: ["periodic", "nova", "cinder", "ha"] subtasks: # Note every task is executed serially (one by one) # # Using list for describing what benchmarks (tasks) to run is much # better idea then using Dictionary. It resolves at least 3 big issues: # # 1) Bad user experience # 1.1) Users do not realize that Rally can run N benchmarks # 1.2) Keys of Dictionary were Scenario names (reasonable question why?!) # 1.3) Users tried to put N times same k-v (to run one benchmark N times) # 2) No way to specify order of scenarios execution, especially in case # where we need to do chain like: ScenarioA -> SecnearioB -> ScenarioA # 3) No way to support multi scenario load, because we used scenario name # as a identifier of single task - # title field is required because in case of multi scenario load # we can't use scenario name for it's value. title: "First task to execute" description: "We will stress Nova" # optional # Tags are going to be used in various rally task reports for filtering # and grouping. tags: ["nova", "my_favorite_task", "do it"] # The way to execute scenarios (one by one or all in parallel) run_in_parallel: False # Single scenario load can be generated by specifying only one element # in "workloads" section. workloads: - # Full name of scenario plugin name: "NovaServers.boot_and_delete" # Arguments that are passed to "NovaServers.boot_and_delete" plugin args: image: name: "^cirros$" flavors: name: "m1.small" # Specification of load that will be generated runner: type: "constant" times: 100 concurrency: 10 # Benchmark success of criteria based on results sla: # Every key means SLA plugin name, values are config of plugin # Only if all criteria pass task is marked as passed failure_rate: max: 0 # Specification of context that creates env for benchmark scenarios # E.g. it creates users, tenants, sets quotas, uploads images... context: # Each key is the name of context plugin # This context creates temporary users and tenants users: # These k-v will be passed as arguments to this `users` plugin tenants: 2 users_per_tenant: 10 # This context set's quotas for created by `users` context tenants quotas: nova: cpu: -1 - title: "Second task to execute" description: "Multi Scenario load generation with common context" run_in_parallel: True # If we put 2 or more scenarios to `scenarios` section we will run # all of them simultaneously which allows us to generate more real life # load workloads: - name: "CinderVolumes.create_and_delete" args: size: 10 runner: type: "constant" times: 100 concurrency: 10 sla: failure_rate: max: 0 - name: "KeystoneBasic.create_and_delete_users" args: name_length: 20 runner: type: "rps" rps: 1 times: 1000 sla: max_seconds_per_iteration: 10 - name: "PhysicalNode.restart" args: ip: "..." user: "..." password: "..." runner: type: "rps" rps: 10 times: 10 sla: max_seconds_per_iteration: 100 # This scenario is called in own independent and isolated context context: {} # Global context that is used if scenario doesn't specify own context: users: tenants: 2 users_per_tenant: 10 Alternatives ------------ No way Implementation ============== Assignee(s) ----------- Primary assignee: boris-42 aka Boris Pavlovic Work Items ---------- - Implement OLD -> NEW format converter - Switch benchmark engine to use new format. This should affect only benchmark engine - Implement new DB schema format, that will allow to store multi-scenario output data - Add support for multi scenario results processing in rally task detailed|sla_check|report - Add timestamps to task, scenarios and atomics - Add support for usage multi-runner instance in single task with common context - Add support for scenario's own context - Add ability to use new format in rally task start. - Deprecate OLD format Dependencies ============ None rally-0.9.1/doc/specs/in-progress/pluggable_validators.rst0000664000567000056710000001476413073417716025110 0ustar jenkinsjenkins00000000000000.. This work is licensed under a Creative Commons Attribution 3.0 Unported License. http://creativecommons.org/licenses/by/3.0/legalcode .. This template should be in ReSTructured text. The filename in the git repository should match the launchpad URL, for example a URL of https://blueprints.launchpad.net/heat/+spec/awesome-thing should be named awesome-thing.rst . Please do not delete any of the sections in this template. If you have nothing to say for a whole section, just write: None For help with syntax, see http://sphinx-doc.org/rest.html To test out your formatting, see http://www.tele3.cz/jbar/rest/rest.html ================================= Rally Task Validation Refactoring ================================= Problem description =================== * Current validator system is pluggable - but it doesn't use our plugin mechanism which creates problems (e.g. validators are imported directly and used in code, instead of using their names, which doesn't allow to rename them or move without breaking backward compatibility). * Current mechanism of validation leads to a lot of OpenStack related code in the Rally task engine. * It's hard to use the same validators for different types of plugins, current approach is used only for scenarios. Proposed change =============== To create unified validation mechanism that can be used for all types of future deployments and type of plugins in the same way. So we will be able to remove `OpenStack related code `_ from the task engine, and create a bunch of common validators (e.g. jsonschema) that can be used by any plugin. As a bonus of refactoring, it allows us to switch to common mechanism of plugins. Alternatives ------------ No way Implementation ============== Here is an example of base class for all pluggable validators. .. code-block:: python import abc import six from rally.common.plugin import plugin from rally.task import validation def configure(name, namespace="default"): return plugin.configure(name=name, namespace=namespace) @six.add_metaclass(abc.ABCMeta) @configure(name="base_validator") class Validator(plugin.Plugin): def validate(self, cache, deployment, cfg, plugin_cfg): """ Method that validates something. :param cache: this is cross validator cache where different validators could store information about environment like initialized OpenStack clients, images, etc and share it through validators. E.g. if your custom validators need to perform 200 OpenStack checks and each validator plugin need to initialize client, Rally will take extra 2 minutes for validation step. As well, its not efficient to fetch all image each time if we have image related validators. :param deployment: Deployment object, deployment which would be used for validation :param cfg: dict, configuration of subtask :param plugin_cfg: dict, with exact configuration of the plugin """ pass def add(name, **kwargs): """ Add validator instance to the validator plugin class meta. Get validator class by name. Initialize an instance. Add validator instance to validators list stored in the Validator meta by 'validator_v2' key. This would be used to iterate and execute through all validators used during execution of subtask. :param kwargs: dict, arguments used to initialize validator class instance :param name: str, name of the validator plugin """ validator = Validator.get(name)(**kwargs) def wrapper(p): p._meta_setdefault("validators_v2", []) p._meta_get("validators_v2").append(validator) return p return wrapper @abc.abstractmethod def validate(plugin, deployment, cfg, plugin_cfg): """ Execute all validate() method of all validators stored in meta of Validator. Iterate during all validators stored in the meta of Validator and execute proper validate() method and add validation result to the list. :param plugin: is plugin class instance that has validators and should be validated :param deployment: Deployment object, deployment which would be used for validation :param cfg: dict, configuration of subtask :param plugin_cfg: dict, with exact configuration of the plugin """ results = [] cache = {} for v in plugin._meta_get("validators_v2"): try: v.validate(cache, deployment, cfg, plugin_cfg) except Exception as e: results.append(validation.ValidationResult(is_valid=False, msg=e)) return results New design allows us to use the same validator and same validation mechanism for different types of plugins (context, sla, runner, scenarios) which was not possible before. For example, we could implement jsonschema validation as a plugin. .. code-block:: python import jsonschema @configure(name="jsonschema") class JsonSchemaValidator(Validator): def __init__(self, schema=None): super(JsonSchemaValidator, self).__init__() self.schema = schema or {} def validate(self, cache, deployment, cfg, plugin_cfg): jsonschema.validate(plugin_cfg, self.schema) @validator.add("jsonschema", schema="") class SomeContext(base.Context): pass class SomeScenario(base.Scenario): @validator.add("jsonschema", schema="") def some_function(self): pass Assignee(s) ----------- Primary assignee: - boris-42 - rvasilets Work Items ---------- - Create validation module with base plugin and method of adding validators - Add support to task engine of new validation mechanism - Port all old validators to new mechanism - Deprecate old validation mechanism - Remove deprecated in new release Dependencies ============ None rally-0.9.1/doc/specs/in-progress/refactor_scenario_utils.rst0000664000567000056710000002635113073417716025621 0ustar jenkinsjenkins00000000000000.. This work is licensed under a Creative Commons Attribution 3.0 Unported License. http://creativecommons.org/licenses/by/3.0/legalcode ========================================================= Refactor scenarios' utils into central os-services tree ========================================================= It's hard to reuse code from different scenario utils in areas like context. Problem description =================== * Code that wraps openstack services from different scenario utils is difficult to reuse in context plugins (or, sometimes in different scenarios plugins), which causes code duplications. * Wrappers don't fully integrate with the current structure (example: network operations need to alternate between calls to utils and calls to network wrappers). * It is impossible to do versioning of current utils which makes them hard to reuse as a base for out of tree plugins. * Is is not possible to have separated common functionality (e.g. network) and specific implementation features (nova network and neutron) Proposed change =============== Group all service related utils under a single tree accessible from all areas of the project. Also, inheritance structure in scenarios is problematic. This would be a great opportunity to move to composition. Alternatives ------------ None comes to mind. Implementation ============== Current source tree ------------------- .. code-block:: rally/ | +-- plugins/ +-- openstack/ | +-- scenarios/ | | | | | +-- nova/ | | | | | | | +-- servers.py | | | | | | | +-- utils.py | | | | | +-- ... | +-- wrappers/ | | | +-- keystone.py | | | +-- network.py keystone scenarios use plugins/openstack/scenarios/keystone/utils.py .. code-block:: python @atomic.action_timer("keystone.create_tenant") def _tenant_create(self, name_length=10, **kwargs): """Creates keystone tenant with random name. :param name_length: length of generated (random) part of name :param kwargs: Other optional parameters :returns: keystone tenant instance """ name = self._generate_random_name(length=name_length) return self.admin_clients("keystone").tenants.create(name, **kwargs) .. code-block:: python class KeystoneBasic(kutils.KeystoneScenario): """Basic benchmark scenarios for Keystone.""" @validation.number("name_length", minval=10) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["keystone"]}) def create_tenant(self, name_length=10, **kwargs): """Create a keystone tenant with random name. :param name_length: length of the random part of tenant name :param kwargs: Other optional parameters """ self._tenant_create(name_length=name_length, **kwargs) while keystone contexts use plugins/openstack/wrappers/keystone.py .. code-block:: python @six.add_metaclass(abc.ABCMeta) class KeystoneWrapper(object): def __init__(self, client): self.client = client def __getattr__(self, attr_name): return getattr(self.client, attr_name) @abc.abstractmethod def create_project(self, project_name, domain_name="Default"): """Creates new project/tenant and return project object. :param project_name: Name of project to be created. :param domain_name: Name or id of domain where to create project, for implementations that don't support domains this argument must be None or 'Default'. """ @abc.abstractmethod def delete_project(self, project_id): """Deletes project.""" class KeystoneV2Wrapper(KeystoneWrapper): def create_project(self, project_name, domain_name="Default"): self._check_domain(domain_name) tenant = self.client.tenants.create(project_name) return KeystoneV2Wrapper._wrap_v2_tenant(tenant) def delete_project(self, project_id): self.client.tenants.delete(project_id) class KeystoneV3Wrapper(KeystoneWrapper): def create_project(self, project_name, domain_name="Default"): domain_id = self._get_domain_id(domain_name) project = self.client.projects.create( name=project_name, domain=domain_id) return KeystoneV3Wrapper._wrap_v3_project(project) def delete_project(self, project_id): self.client.projects.delete(project_id) Users context: .. code-block:: python @context.configure(name="users", order=100) class UserGenerator(UserContextMixin, context.Context): """Context class for generating temporary users/tenants for benchmarks.""" def _create_tenants(self): cache["client"] = keystone.wrap(clients.keystone()) tenant = cache["client"].create_project( self.PATTERN_TENANT % {"task_id": task_id, "iter": i}, domain) Suggested change ---------------- .. code-block:: plugins/ | +-- openstack/ | | +-- scenarios/ | | | | | +-- neutron/ | +-- authenticate/ | +-- services/ | # Here we will store base code for openstack services. | # like wait_for, and wait_for_delete +-- base.py | +-- compute/ | | | +-- compute.py | +-- identity/ | | # Here is common service when we care to do things | | # and regardless of which API/service is used for | | # that. So we will implement here parts that can be | | # done in both. | +-- identity.py | | # Here is api for working with specific API | | # version/service Like keystone_v2/keystone_v3 or | | # nova_network/neutron. This will be used in | | # main.py for implementation. | +-- kestone_v2.py | | | +-- kestone_v3.py | +-- network/ | | # Here is common service when we care to do things | | # and regardless of which API/service is used for | | # that. So we will implement here parts that can be | | # done in both. | +-- network.py | | # Here is api for working with specific API | | # version/service Like nova_network/neutron. | | # This will be used in main.py for implementation. | +-- nova_network.py | | | +-- neutron.py | +-- ... Base class that allow us to use atomic actions in services is inside the rally/plugins/openstack/services/base.py: .. code-block:: python class Service(object): def __init__(self, clients, atomic_inst=None): self.clients = clients if atomic_inst: if not isinstance(atomic_inst, ActionTimerMixin): raise TypeError() # NOTE(boris-42): This allows us to use atomic actions # decorators but they will add values # to the scenario or context instance self._atomic_actions = atomic_inst._atomic_actions else: # NOTE(boris-42): If one is using this not for scenarios and # context, Service instance will store atomic # actions data. self._atomic_actions = costilus.OrderedDict() Implementation of IdentityService in services/identity/identity.py: .. code-block:: python class IdentityService(Service): """Contains only common methods for Keystone V2 and V3.""" def __init__(self, clients, atomic_inst=None, version=None): super(self).__init__(clients, atomic_inst=atomic_inst) if version: if version == "2": self.impl = KeystoneV2Service() else: self.impl = KeysotneServiceV3() else: self.impl = auto_discover_version() def project_create(self, name, **kwargs): result = self.impl.project_create(name) # handle the difference between implementations return magic(result) # ... Inside services/identity/keystone_v2.py: .. code-block:: python class KeystoneV2Service(KeystoneService): # NOTE(boris-42): we can use specific atomic action names # for specific implementation of service @atomic.action_timer("keystone_v2.tenant_create") def project_create(self, project_name): """Implementation.""" Inside services/identity/keystone_v3.py: .. code-block:: python class KeystoneV3Service(KeystoneService): @atomic.action_timer("keystone_v3.project_create") def project_create(self, project_name): """Implementation.""" def domain_create(self, *args, **kwargs): """Specific method for KesytoneV3.""" Both context.keystone and scenario.keystone can use now services/identity.py usage is the same in context and scenario, so it's enough to show in case of scenario. .. code-block:: python from rally.plugins.openstack.services.identity import identity from rally.plugins.openstack.services.identity import keystone_v3 class KeystoneBasic(scenario.OpenStackScenario): # no more utils.py """Basic benchmark scenarios for Keystone.""" @validation.number("name_length", minval=10) @validation.required_openstack(admin=True) @scenario.configure(context={"admin_cleanup": ["keystone"]}) def create_tenant(self, name_length=10, **kwargs): """Create a keystone tenant with random name. :param name_length: length of the random part of tenant name :param kwargs: Other optional parameters """ name = self._generate_random_name(length=name_length) # NOTE(boris-42): Code above works in keystone V2 and V3 # as well it will add atomic action, and name # will be "keystone_v3.project_create" or # "keystone_v2.tenant_create" depending on used # version common.Identity(self.clients, self).create_project(name, **kwargs) # NOTE(boris-42): If you need specific operation for keystone v3 keystone_v3.KeystoneV3Service(self.clients, self).domain_create() # NOTE(boris-42): One of the nice thing is that we can move # initialization of services to __init__ method # of sceanrio. Assignee(s) ----------- - boris-42 Work Items ---------- #. Create a base.Service class #. Create for each project services #. Use in all scenarios and context services instead of utils #. Deprecate utils #. Remove utils Dependencies ============ none rally-0.9.1/doc/specs/template.rst0000664000567000056710000000471413073417716020253 0ustar jenkinsjenkins00000000000000.. This work is licensed under a Creative Commons Attribution 3.0 Unported License. http://creativecommons.org/licenses/by/3.0/legalcode .. This template should be in ReSTructured text. The filename in the git repository should match the launchpad URL, for example a URL of https://blueprints.launchpad.net/heat/+spec/awesome-thing should be named awesome-thing.rst . Please do not delete any of the sections in this template. If you have nothing to say for a whole section, just write: None For help with syntax, see http://sphinx-doc.org/rest.html To test out your formatting, see http://www.tele3.cz/jbar/rest/rest.html ======================= The title of your Spec ======================= Rally Road map: https://docs.google.com/a/mirantis.com/spreadsheets/d/16DXpfbqvlzMFaqaXAcJsBzzpowb_XpymaK2aFY2gA2g/edit#gid=0 Introduction paragraph -- why are we doing anything? Problem description =================== A detailed description of the problem. Proposed change =============== Here is where you cover the change you propose to make in detail. How do you propose to solve this problem? If this is one part of a larger effort make it clear where this piece ends. In other words, what's the scope of this effort? Include where in the heat tree hierarchy this will reside. Alternatives ------------ This is an optional section, where it does apply we'd just like a demonstration that some thought has been put into why the proposed approach is the best one. Implementation ============== Assignee(s) ----------- Who is leading the writing of the code? Or is this a blueprint where you're throwing it out there to see who picks it up? If more than one person is working on the implementation, please designate the primary author and contact. Primary assignee: Can optionally list additional ids if they intend on doing substantial implementation work on this blueprint. Work Items ---------- Work items or tasks -- break the feature up into the things that need to be done to implement it. Those parts might end up being done by different people, but we're mostly trying to understand the timeline for implementation. Dependencies ============ - Include specific references to specs and/or blueprints in heat, or in other projects, that this one either depends on or is related to. - Does this feature require any new library dependencies or code otherwise not included in OpenStack? Or does it depend on a specific version of library? rally-0.9.1/doc/specs/implemented/0000775000567000056710000000000013073420067020174 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/specs/implemented/split_plugins.rst0000664000567000056710000000321713073417716023634 0ustar jenkinsjenkins00000000000000.. This work is licensed under a Creative Commons Attribution 3.0 Unported License. http://creativecommons.org/licenses/by/3.0/legalcode ==================== Re-organize Plugins ==================== Move all plugins under rally/plugins to simplify Rally code base Problem description =================== Rally code is coupled with Rally engine and infra as well as OpenStack specific code. This makes contribution harder as new-comers need to understand Rally code as well as many different plugins. It also makes reviewing much harder. Proposed change =============== Moving all plugins under a single directory, with "OpenStack" as its sub-directory would make everything simpler. Alternatives ------------ None comes to mind. Implementation ============== .. code-block:: shell rally/ | +-- plugins/ | +-- common/ | | | +-- runners/ | +-- sla/ | +-- contexts/ | +-- scenarios/ | +-- openstack/ | +-- runners/ +-- sla/ +-- contexts/ +-- scenarios/ NOTE: looking at the current code base we can see that: #. All ``runners`` and ``sla`` will go under ``common``. #. All ``contexts`` will go under ``openstack``. #. Most of ``scenarios`` (except for ``dummy``) will go under ``openstack``. Assignee(s) ----------- - yfried - boris-42 Work Items ---------- - Move all OpenStack related plugins and code under ``plugins/openstack/`` and all other plugins code under ``plugins/common/``. Dependencies ============ - Plugin unification rally-0.9.1/doc/specs/implemented/db_refactoring.rst0000664000567000056710000002512413073417716023711 0ustar jenkinsjenkins00000000000000.. This work is licensed under a Creative Commons Attribution 3.0 Unported License. http://creativecommons.org/licenses/by/3.0/legalcode ============================== Scaling & Refactoring Rally DB ============================== There are a lot of use cases that can't be done because of DB schema that we have. This proposal describes what and why we should change in DB. Problem description =================== There are 3 use cases that requires DB refactoring: 1. scalable task engine Run benchmarks with billions iterations Generate distributed load 10k-100k RPS Generate all reports/aggregated based on that data 2. multi scenario load generation Running multiple scenarios as a part of single subtask requires changes in the way how we are storing subtask results. 3. task debugging and profiling Store complete results of validation in DB (e.g. what validators were run, what validators passed, what didn't passed and why). Store durations of all steps (validation/task) as well as other execution stats needed by CLI and to generate graphs in reports. Store statuses, duration, errors of context cleanup steps. Current schema doesn't work for those cases. Proposed change =============== Changes in DB ------------- Existing DB schema ~~~~~~~~~~~~~~~~~~ .. code-block:: +------------+ +-------------+ | Task | | TaskResult | +------------+ +-------------+ | | | | | id | | id | | uuid <--+----+- task_uuid | | | | | +------------+ +-------------+ * Task - stores task status, tags, validation log * TaskResult - stores all information about workloads, including configuration, conext, sla, results etc. New DB schema ~~~~~~~~~~~~~ .. code-block:: +------------+ +-------------+ +--------------+ +---------------+ | Task | | Subtask | | Workload | | WorkloadData | +------------+ +-------------+ +--------------+ +---------------+ | | | | | | | | | id | | id <----+--+ | id <-----+--+ | id | | uuid <--+----+- task_uuid | +-+- subtask_id | +-+- workload_id | | ^ | | uuid | | uuid | | uuid | +---+--------+ +---^---------+ | | | | +--------------------------------+- task_uuid | | | | | +--------------+ | | +----------------------------------------------------+- task_uuid | | | +---------------+ +-------+---------+ | +--------+ + | Tag | | +--------+ | | | | | id | | | uuid -+--+ | type | | tag | +--------+ * Task - stores information about task, when it was started/updated/finished, it's status, description, and so on. As well it used to aggregate all subtasks related to this task * SubTask - stores information about subtask, when it was started/updated/ finished, it's status, description, configuration, aggregated information about workloads. Without subtasks we won't be able to track information about task execution, and run many subtasks in single task. * Workload - aggregated information about some specific workload (required for reports) as well as information how these workloads are executed in parallel/serial and status of each workload. Without workloads table we won't be able to support multiple workloads per single subtas * WorkloadData - contains chunks of raw data for future data analyze and reporting. This is complete information that we don't need always, as well for getting overview of what happened. As we have multiple chunks per Workload, we won't be able to store them without creating this table. * Tag - contains tags bound to tasks and subtasks by uuid and type Task table ~~~~~~~~~~ .. code-block:: id : INT, PK uuid : UUID # Optional deployment_uuid : UUID # Full input task configuration input_task : TEXT title : String description : TEXT # Structure of verification results: # [ # { # "name": , # full validator function name, # # validator plugin name (in the future) # "input": , # smallest part of # "message": , # message with description # "success": , # did validatior pass # "duration": # duration of validation process # }, # ..... # ] validation_result : TEXT # Duration of verification can be used to tune verification process. validation_duration : FLOAT # Duration of benchmarking part of task task_duration : FLOAT # All workloads in the task are passed pass_sla : BOOL # Current status of task status : ENUM(init, validating, validation_failed, aborting, soft_aborting, aborted, crashed, validated, running, finished) Task.status diagram of states ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .. code-block:: INIT -> VALIDATING -> VALIDATION_FAILED -> ABORTING -> ABORTED -> SOFT_ABORTING -> ABORTED -> CRASHED -> VALIDATED -> RUNNING -> FINISHED -> ABORTING -> ABORTED -> SOFT_ABORTING -> ABORTED -> CRASHED Subtask table ~~~~~~~~~~~~~ .. code-block:: id : INT, PK uuid : UUID task_uuid : UUID title : String description : TEXT # Position of Subtask in Input Task position : INT # Context and SLA could be defined both Subtask-wide and per workload context : JSON sla : JSON run_in_parallel : BOOL duration : FLOAT # All workloads in the task are passed pass_sla : BOOL # Current status of task status : ENUM(running, finished, crashed) Workload table ~~~~~~~~~~~~~~ .. code-block:: id : INT, PK uuid : UUID subtask_id : INT task_uuid : UUID # Unlike Task's and Subtask's title which is arbitrary # Workload's name defines scenario being executed name : String # Scenario plugin docstring description : TEXT # Position of Workload in Input Task position : INT runner : JSON runner_type : String # Context and SLA could be defined both Subtask-wide and per workload context : JSON sla : JSON args : JSON # SLA structure that contains all detailed info looks like: # [ # { # "name": , # "duration": , # "success": , # "message": , # } #] # sla_results : TEXT # Context data structure (order makes sense) #[ # { # "name": string # "setup_duration": FLOAT, # "cleanup_duration": FLOAT, # "exception": LIST # exception info # "setup_extra": DICT # any custom data # "cleanup_extra": DICT # any custom data # # } #] context_execution : TEXT starttime : TIMESTAMP load_duration : FLOAT full_duration : FLOAT # Shortest and longest iteration duration min_duration : FLOAT max_duration : FLOAT total_iteration_count : INT failed_iteration_count : INT # Statictics data structure (order makes sense) # { # "": { # "min_duration": FLOAT, # "max_duration": FLOAT, # "median_duration": FLOAT, # "avg_duration": FLOAT, # "percentile90_duration": FLOAT, # "percentile95_duration": FLOAT, # "success_count": INT, # "total_count": INT # }, # ... # } statistics : JSON # Aggregated information about actions # As for SLA result pass_sla : BOOL # Profile information collected during the run of scenario # This is internal data and format of it can be changed over time # _profiling_data : Text WorkloadData ~~~~~~~~~~~~ .. code-block:: id : INT, PK uuid : UUID workload_id : INT task_uuid : UUID # Chunk order it's used to be able to sort output data chunk_order : INT # Amount of iterations, can be useful for some of algorithms iteration_count : INT # Number of failed iterations failed_iteration_count : INT # Full size of results in bytes chunk_size : INT # Size of zipped results in bytes zipped_chunk_size : INT started_at : TIMESTAMP finished_at : TIMESTAMP # Chunk_data structure # [ # { # "duration": FLOAT, # "idle_duration": FLOAT, # "timestamp": FLOAT, # "errors": LIST, # "output": { # "complete": LIST, # "additive": LIST, # }, # "actions": LIST # }, # ... # ] chunk_data : BLOB # compressed LIST of JSONs Tag table ~~~~~~~~~ .. code-block:: id : INT, PK uuid : UUID of task or subtask type : ENUM(task, subtask) tag : TEXT - (uuid, type, tag) is unique and indexed Open questions ~~~~~~~~~~~~~~ None. Alternatives ------------ None. Implementation ============== Assignee(s) ----------- - boris-42 (?) - ikhudoshyn Milestones ---------- Target Milestone for completion: N/A Work Items ---------- TBD Dependencies ============ - There should be smooth transition of code to work with new data structure rally-0.9.1/doc/specs/implemented/improve_scenario_output_format.rst0000664000567000056710000002613113073417716027274 0ustar jenkinsjenkins00000000000000.. This work is licensed under a Creative Commons Attribution 3.0 Unported License. http://creativecommons.org/licenses/by/3.0/legalcode .. This template should be in ReSTructured text. The filename in the git repository should match the launchpad URL, for example a URL of https://blueprints.launchpad.net/heat/+spec/awesome-thing should be named awesome-thing.rst . Please do not delete any of the sections in this template. If you have nothing to say for a whole section, just write: None For help with syntax, see http://sphinx-doc.org/rest.html To test out your formatting, see http://www.tele3.cz/jbar/rest/rest.html ======================================= Improvements for scenario output format ======================================= Current implementation of how scenario saves output data is limited and does not meet the needs - it neither allows having more than one data set, nor saving custom data structures by each iteration. There is simply a dict with int values. This specification proposes how this can be significantly improved. Problem description =================== At first, let's clarify types of desired output. Output divides on two main types: additive and complete. *Additive output* requires processing and representation for the whole scenario. For example each iteration has duration - this additive data can be taken from each iteration and analyzed how it changes during the scenario execution. *Complete output* data is completely created by iteration and does not require extra processing. It is related to this specific iteration only. Currently scenario can just return a single dict with int values - this is an additive data only, and it is stored in iteration results according to this schema: .. code-block:: "result": { ... "scenario_output": { "type": "object", "properties": { "data": { "type": "object" }, "errors": { "type": "string" }, }, "required": ["data", "errors"] } } Here are main issues: * single data set - this does not allow to split data (if required) among different sources. For example scenario runs two (or more) third-party tools or scripts but has to put all data into single dict * output is additive only - so its representation makes sense only after putting data from all iterations together. Scenario iteration can not save its own data list that can be processed independently from another iterations. * there is no specific data for HTML report generation like chart title and chart type, so report uses hardcoded values. As result, HTML report can represent output by a single chart of single type: .. code-block:: .--------. | Output | -----' '----------- Scenario output -------------------- | | | SINGLE StackedArea | | | -------------------- Proposed change =============== Scenario should have ability to save arbitrary number of both additive and complete output data. This data should include titles and instructions how to be processed and displayed in HTML report. Here is proposed iterations results structure for output data: .. code-block:: "result": { ... "output": { "additive": [ # Each iteration duplicates "title", "description", "chart" and # items keys, however this seems to be less evil than keeping # aggregated metadata on upper level of task results schema. # "chart" is required by HTML report and should be a name of # existent Chart subclass that is responsible for processing # and displaying the data {"title": "How some durations changes during the scenario", "description": "Some details explained here", "chart": "OutputStackedAreaChart", "items": [[, ], ...] # Additive data }, ... # More data if required ], "complete": [ # Complete data from this specific iteration. # "widget" is required by HTML report and should be a name # of chart widget (see details below) that responsible for # displaying data. We do not need to specify "chart" here # because this data does not require processing - it is # already processed and represents a result of Chart.render() {"title": "Interesting data from specific iteration", "description": "Some details explaind here", "widget": "StackedArea", "data": [ [ , [[, ], ...] ], ... ] }, ... # More data if required ] } } **NOTES**: * for backward compatibility, data from deprecated "scenario_output" should be transformed into "output/data/additive[0]" on-the-fly (for example if we load task results from file) * as you can see, there is no container *output/errors* - that is because value of *errors* is not used at all and not required (there is another container for errors in iteration results) How scenario saves output data ------------------------------ Scenario should be extended with method *add_output()*: .. code-block:: class Scenario(...): def __init__(self, context=None): ... self._output = {"additive": [], "complete": []} ... def add_output(self, additive=None, complete=None): """Add iteration values for additive output. :param additive: dict with additive output :param complete: dict with complete output :raises RallyException: When additive or complete has wrong format """ for key, value in (("additive", additive), ("complete", complete)): if value: try: jsonschema.validate( value, task.OUTPUT_SCHEMA["properties"][key]["items"]) self._output[key].append(value) except jsonschema.ValidationError: raise exceptions.RallyException( "%s output has wrong format" % key.capitalize()) Here is an example how scenario can save different output: .. code-block:: class SomePlugin(Scenario): def specific_scenario(self): ... self.add_output(additive={"title": "Foo data", "description": "Some words about Foo", "chart": "OutputStackedAreaChart", "items": [["foo 1", 12], ["foo 2", 34]]}) self.add_output(additive={"title": "Bar data", "description": "Some words about Bar", "chart": "OutputAvgChart", "items": [["bar 1", 56], ["bar 2", 78]]}) self.add_output(complete={"title": "Complete data", "description": "Some details here", "widget": "StackedArea", "data": [["foo key", [ ... ]], ... ]}) self.add_output(complete={"title": "Another data", "description": "Some details here", "widget": "Pie", "data": [["bar key", [ ... ]], ... ]}) self.add_output(complete={"title": "Yet another data", "description": "Some details here", "widget": "Table", "data": [["spam key", [ ... ]], ... ]}) Displaying scenario output in HTML report ----------------------------------------- The following changes are planned for HTML report and charts classes: * rename tab *Output* to *Scenario Data* * implement subtabs under *Scenario Data*: *Aggregated* and *Per iteration* * *Aggregated* subtab shows charts with additive data * *Per iteration* subtab shows charts with complete data, for each iteration * Both subtabs (as well as parent tab) are shown only if there is something to display * add base class OutputChart and generic charts classes for processing output data: OutputStackedAreaChart, OutputAvgChart, OutputStatsTable * add optional *title* and *description* arguments to OutputChart.__init__() so title and description - this is important for custom charts * add *WIDGET* property to each OutputChart subclass to bind it to specific chart widget (StackedArea, Pie, Table). For example, AvgChart will be bound to "Pie". This will allow defining both how to process and how to display some data simply by single class name * update return value format of OutputChart.render() with title and widget: {"title": , "description": , "widget": , "data": [...]} UI sketch for active "Aggregated" subtab: .. code-block:: .---------------. | Scenario Data | ----' '------------------- Aggregated Per iteration ------------- ---------------------------- | | | Any available chart widget | | | ---------------------------- ---------------------------- | | | Any available chart widget | | | ---------------------------- [... more charts] UI sketch for active "Per iteration" subtab, let it be iteration 5 selected by dropdown: .. code-block:: .---------------. | Scenario Data | ----' '------------------- Aggregated Per iteration ---------- [iteration 5] ---------------------------- | | | Any available chart widget | | | ---------------------------- ---------------------------- | | | Any available chart widget | | | ---------------------------- [... more charts] Alternatives ------------ None Implementation ============== Assignee(s) ----------- Primary assignee: * amaretskiy Work Items ---------- * Update task results schema with *output* container * Extend Scenario with method *add_output()* * Bound Chart subclasses to specific charts widgets * Add generic Charts subclasses for output data * Changes in HTML report related to *Output* tab * Add scenario with example output data Dependencies ============ None rally-0.9.1/doc/specs/implemented/verification_refactoring.rst0000664000567000056710000007321513073417716026012 0ustar jenkinsjenkins00000000000000.. This work is licensed under a Creative Commons Attribution 3.0 Unported License. http://creativecommons.org/licenses/by/3.0/legalcode .. This template should be in ReSTructured text. The filename in the git repository should match the launchpad URL, for example a URL of https://blueprints.launchpad.net/rally/+spec/awesome-thing should be named awesome-thing.rst . Please do not delete any of the sections in this template. If you have nothing to say for a whole section, just write: None For help with syntax, see http://sphinx-doc.org/rest.html To test out your formatting, see http://www.tele3.cz/jbar/rest/rest.html =============================== Refactor Verification Component =============================== Rally Verification was introduced long time ago as an easy way to launch Tempest. It allows to manage(install, uninstall, configure and etc), launch Tempest and process the results(store, compare, displaying in different formats). There is a lot of code related to Verification which can be used not only for Tempest. Since `rally verify` was implemented to launch subunit-based applications(Tempest is a such tool), our code is ready to launch whatever we want subunit-frameworks by changing only one var - path to tests. Problem description =================== Rally is a good framework for any kind of testing (performance, functional and etc), so it is pretty sad when we have a lot of hardcode and binding to specific application. * non-pluggable architecture Most of Rally components (for example Task or Deployment) are pluggable. You can easily extend Rally framework for such components. But we cannot say the same about Verification. * subunit-trace ``subunit-trace`` library is used to display the live progress and summary at user-friendly way for each launch of Verification. There are several issues across this library: 1. It is Tempest requirements. It is second time when Rally Verification component uses dependency from Tempest. ``tools/colorizer.py`` was used from Tempest repo before ``subunit-trace``. This script was removed from Tempest which led to breakage of whole Verification stuff. Also, ``rally verify install`` supports ``--source`` option for installing Tempest from non-default repos which can miss ``subunit-trace`` requirement. 2. Bad calculation(for example, skip of whole TestCase means 1 skipped test) * Code duplication To simplify usage of Tempest, it is required to check existence of images, roles, networks and other resources. While implementing these checks, we re-implemented ... "Context" class which is used in Tasks. It was called TempestResourcesContext. * Inner storage based on deployment In case of several deployments and one type of verifier(one repo), Rally creates several directories in ``~/.rally/tempest`` (``for-tempest-`` where is a UUID of deployment). Each of these directories will include same files. The difference only in config files which can be stored wherever we want. Also, we have one more directory with the same data - cache directory (``~/.rally/tempest/base``). * Word "Tempest" hardcoded in logging, help messages, etc. Proposed change =============== Most of subunit-based frameworks can be launched in the same way, but they can accept different arguments, different setup steps and so on. .. note:: In further text, we will apply labels "old" for code which was implemented before this spec and "new" for proposed change. Also, all references for old code will be linked to `0.3.3`__ release which is latest release at the time of writing this spec. __ http://rally.readthedocs.org/en/0.3.3/release_notes/archive/v0.3.3.html Declare base Verification entities ---------------------------------- Lets talk about all entities which represents Verification. Old model ~~~~~~~~~ Old implementation uses only one entity - results of a single verification launch. **DB Layer** * `Verification`__ It represents a summary of a single verification launch results. Also, it is linked to full results (see next entity - VerificationResult). __ https://github.com/openstack/rally/blob/0.3.3/rally/common/db/sqlalchemy/models.py#L186 * `VerificationResult`__ The full results of a single launch. Since support of migrations was added recently, not all places are cleared yet, so ``VerificationResults`` can store results in two formats(old and current format). It would be nice to fix it and support only 1 format. __ https://github.com/openstack/rally/blob/0.3.3/rally/common/db/sqlalchemy/models.py#L217 **Object layer** It is a bad practise to provide an access to db stuff directly and we don't do that. ``rally.common.objects`` layer was designed to hide all db related stuff. * `Verification`__ Just represents results. __ https://github.com/openstack/rally/blob/0.3.3/rally/common/objects/verification.py#L28 New model ~~~~~~~~~ We want to support different verifiers and want to identify them, so let's declare three entities: * **Verifier type**. The name of entity is a description it self. Each type should be represented by own plugin which implements interface for verification tool. For example, Tempest, Gabbi should be such types. * **Verifier**. An instance of ``verifier type``. I can be described with following options: * *source* - path to git repository of tool. * *system-wide* - whether or not to use the local env instead of virtual environment when installing verifier. * *version* - branch, tag or hash of commit to install verifier from. By default it is "master" branch. * **Verification Results**. Result of a single launch. **DB Layer** * **Verifier**. We should add one more table to store different verifiers. New migration should be added, which check existence verification launches and create "default" verifier(type="Tempest", source="n/a") and map all of launches to it. .. code-block:: class Verifier(BASE, RallyBase): """Represent a unique verifier.""" __tablename__ = "verifiers" __table_args__ = ( sa.Index("verification_uuid", "uuid", unique=True), ) id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) uuid = sa.Column(sa.String(36), default=UUID, nullable=False) deployment_uuid = sa.Column( sa.String(36), sa.ForeignKey(Deployment.uuid), nullable=False, ) name = sa.Column(sa.String(255), unique=True) description = sa.Column(sa.String(1000)) status = sa.Column(sa.Enum(*list(consts.VerifierStatus), name="enum_verifier_status"), default=consts.VerifierStatus.INIT, nullable=False) started_at = sa.Column(sa.DateTime) updated_at = sa.Column(sa.DateTime) type = sa.Column(sa.String(255), nullable=False) settings = info = sa.Column( sa_types.MutableJSONEncodedDict, default={"system-wide": False, "source": "n/a"}, nullable=False, ) * `Verification`__ It should be extended with a link to Verifier. * `VerificationResult`__ We can leave it as it is. Move storage from deployment depended logic to verifier ------------------------------------------------------- Old structure of ``~/.rally/tempest`` dir: .. code-block:: yaml base: tempest_base-: # Cached Tempest repository tempest: api api_schema cmd ... ... requirements.txt setup.cfg setup.py ... for-deployment-: # copy-paste of tempest_base- + files and directories listed below .venv # Directory for virtual environment: exists if user didn't # specify ``--system-wide`` argument while tempest # installation (``rally verify install`` command). tempest.conf # Only this file is unique for each deployment. It stores # Tempest configuration. subunit.stream # Temporary result-file produced by ``rally verify start``. As you can see there are a lot of copy-pasted repositories and little unique data. New structure(should be located in ``~/.rally/verifiers``): .. code-block:: yaml verifier-: # Storage for unique verifier. is a uuid of verifier. repo: # Verifier code repository. It is same for all deployments. Also one # virtual environment can be used across all deployment too. ... for-deployment-: # Folder to store unique for deployment data. is a deployment uuid # here. Currently we have only configuration file to store, but lets # reserve place to store more data. settings.conf ... Each registered verifier is a unique entity for Rally and can be used by all deployments. If there is deployment specific data(for example, configuration file) required for verifier, it should be stored separately from verifier. Command line interface ---------------------- `rally verify` commands are not so hardcoded as other parts of Verification component, but in the same time they are not flexible. Old commands: .. code-block:: none compare Compare two verification results. detailed Display results table of a verification with detailed errors. discover Show a list of discovered tests. genconfig Generate Tempest configuration file. import Import Tempest tests results into the Rally database. install Install Tempest. list List verification runs. reinstall Uninstall Tempest and install again. results Display results of a verification. show Display results table of a verification. showconfig Show configuration file of Tempest. start Start verification (run Tempest tests). uninstall Remove the deployment's local Tempest installation. use Set active verification. There is another problem of old CLI. Management is split across all commands and you can do the same things via different commands. Moreover, you can install Tempest in virtual environment via ``rally verify install`` and use ``--system-wide`` option in ``rally verify start``. Lets provide more strict CLI. Something like: .. code-block:: none list-types create-verifier delete-verifier list-verifiers update-verifier extend-verifier use-verifier configure discover start compare export import list show use list-types ~~~~~~~~~~ Verifiers types should be implemented on base Rally plugin mechanism. It allow to not create types manually, Rally will automatically load them and user will need only interface to list them. create-verifier ~~~~~~~~~~~~~~~ Just creates a new verifier based on type. Example: .. code-block:: bash $ rally verify create-verifier tempest-mitaka --type tempest --source "https://git.openstack.org/openstack/tempest" --version "10.0.0" --system-wide This command should process next steps: 1. Clone Tempest repository from "https://git.openstack.org/openstack/tempest"; 2. Call ``git checkout 10.0.0``; 3. Check that all requirements from requirements.txt are satisfied; 4. Put new verifier as default one Also, it would be nice to store verifier statuses like "Init", "Ready-to-use", "Failed", "Updating". delete-verifier ~~~~~~~~~~~~~~~ Deletes verifier virtual environment(if it was created), repository, deployment specific files(configuration files). Also, it will remove verification results produced by this verifier. list-verifiers ~~~~~~~~~~~~~~ List all available verifiers. update-verifier ~~~~~~~~~~~~~~~ This command gives ability to update git repository(``git pull`` or ``git checkout``) or start/stop using virtual environment. Also, configuration file can be update via this interface. extend-verifier ~~~~~~~~~~~~~~~ Verifier can have an interface to extend itself. For example, Tempest supports plugins. For verifiers which do not support any extend-mechanism, lets print user-friendly message. use-verifier ~~~~~~~~~~~~ Choose the default verifier. configure ~~~~~~~~~ An interface to configure verifier for an specific deployment. Usage examples: .. code-block:: bash # At this step we assume that configuration file was not created yet. # Create configuration file and show it. $ rally verify configure # Configuration file already exists, so just show it. $ rally verify configure # Recreate configuration file and show it $ rally verify configure --renew # Recreate configuration file using predefined configuration options and # show it. # via json: $ rally verify configure --renew \ > --options '{"section_name": {"some_key": "some_var"}}' # via config file, which can be json/yaml or ini format: $ rally verify configure --renew --options ~/some_file.conf # Replace configuration file by another file and show it $ rally verify configure --replace ./some_config.conf Also, we can provide ``--silent`` option to disable ``show`` action. discover ~~~~~~~~ Discover and list tests. start ~~~~~ Start verification. Basically, there is no big difference between launching different verifiers. Current arguments: ``--set``, ``--regex``, ``--tests-file``, ``xfails-file``, ``--failing``. Argument ``--set`` is specific for Tempest. Each verifier can have specific search arguments. Lets introduce new argument ``--filter-by``. In this case, set_name for Tempest can be specified like ``--filter-by set=smoke``. compare ~~~~~~~ Compare two verification results. export ~~~~~~ Part of `Export task and verifications into external services`__ spec __ https://github.com/openstack/rally/blob/0.3.2/doc/specs/in-progress/task_and_verification_export.rst import ~~~~~~ Import outer results in Rally database. list ~~~~ List all verifications results. show ~~~~ Show verification results in different formats. Refactor base classes --------------------- Old implementation includes several classes: * Main class **Tempest**. This class combines manage and launch logic. .. code-block:: python # Description of a public interface(all implementation details are skipped) class Tempest(object): base_repo_dir = os.path.join(os.path.expanduser("~"), ".rally/tempest/base") def __init__(self, deployment, verification=None, tempest_config=None, source=None, system_wide=False): pass @property def venv_wrapper(self): """This property returns the command for activation virtual environment. It is hardcoded on tool from Tempest repository: https://github.com/openstack/tempest/blob/10.0.0/tools/with_venv.sh We should remove this hardcode in new implementation.""" @property def env(self): """Returns a copy of environment variables with addition of pathes to tests""" def path(self, *inner_path): """Constructs a path for inner files of ~/.rally/tempest/for-deployment- """ @property def base_repo(self): """The structure of ~/.rally/tempest dir was changed several times. This method handles the difference.""" def is_configured(self): pass def generate_config_file(self, override=False): """Generate configuration file of Tempest for current deployment. :param override: Whether or not to override existing Tempest config file """ def is_installed(self): pass def install(self): """Creates local Tempest repo and virtualenv for deployment.""" def uninstall(self): """Removes local Tempest repo and virtualenv for deployment.""" def run(self, testr_args="", log_file=None, tempest_conf=None): """Run Tempest.""" def discover_tests(self, pattern=""): """Get a list of discovered tests. :param pattern: Test name pattern which can be used to match """ def parse_results(self, log_file=None, expected_failures=None): """Parse subunit raw log file.""" def verify(self, set_name, regex, tests_file, expected_failures, concur, failing): """Launch verification and save results in database.""" def import_results(self, set_name, log_file): """Import outer subunit-file to Rally database""" def install_plugins(self, *args, **kwargs): """Install Tempest plugin.""" * class ``TempestConfig`` was designed to obtain all required settings from OpenStack public API and generate configuration file. It has not-bad interface (just ``init`` and ``generate`` public methods), but implementation can be better(init method should not start obtaining data). * class ``TempestResourcesContext`` looks like context which we have for Task component. ``TempestConfig`` and ``TempestResourcesContext`` are help classes and in new implementation they will be optional. New implementation should looks like: * ``VerifierManager``. It is a main class which represents a type of Verifier and provide an interface for all management stuff(i.e. install, update, delete). Also, it should be an entry-point for configuration and extend-mechanism which are optional. * ``VerifierLauncher``. It takes care about deployment's task - preparation and launching verification and so on. * ``VerifierContext``. The inheritor of rally.task.context.Context class with hardcoded "hidden=True" value, since it should be inner helper class. * ``VerifierSettings``. Obtains required data from public APIs and constructs deployment specific configuration files for Verifiers. Proposed implementation will be described below in `Implementation`_ section. Remove dependency from external libraries and scripts ----------------------------------------------------- Currently our verification code has two redundant dependencies: * subunit-trace * /tools/with_venv.sh subunit-trace ~~~~~~~~~~~~~ It should not be a hard task to remove this dependency. With small modifications ``rally.common.io.subunit.SubunitV2StreamResult`` can print live progress. Also, we an print summary info based on parsed results. with_venv.sh script ~~~~~~~~~~~~~~~~~~~ It is tempest in-tree script. Its logic is too simple - just activate virtual environment and execute transmitted cmd in it. I suppose that we can rewrite this script in python and put it to Verification component. Alternatives ------------ Stop development of Rally Verification. Implementation ============== Implementation details ---------------------- Below you can find an example of implementation. It contains some implementation details and notes for future development. .. note:: Proposed implementation is not ideal and not finished. It should be reviewed without nits. rally.common.objects.Verifier ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Basically, it will be the same design as `rally.common.objects.Verification`__. There is no reasons to store old class. ``Verifier`` interface should be enough. __ https://github.com/openstack/rally/blob/0.3.3/rally/common/objects/verification.py#L28 VerifierManager ~~~~~~~~~~~~~~~ .. code-block:: python import os import shutil import subprocess from rally.common.plugin import plugin class VerifierManager(plugin.Plugin): def __init__(self, verifier): """Init manager :param verifier: `rally.common.objects.Verifier` instance """ self.verifier = self.verifier @property def home_dir(self): """Home directory of verifier""" return "~/.rally/verifier-%s" % self.verifier.id @property def repo_path(self): """Path to local repository""" return os.path.join(self.home_dir, "repo") def mkdirs(self): """Create all directories""" if not self.home_dir: os.mkdir(self.home_dir) deployment_path = os.path.join( base_path, "for-deployment-%s" % self.deployment.id)) if not deployment_path: os.mkdir(deployment_path) def _clone(self): """Clone and checkout git repo""" self.mkdirs() source = self.verifier.source or self._meta_get("default_repo") subprocess.check_call(["git", "clone", source, self.repo_path]) version = self.verifier.version or self._meta_get("default_version") if version: subprocess.check_call(["git", "checkout", version], cwd=self.repo_path) def _install_virtual_env(self): """Install virtual environment and all requirement in it.""" if os.path.exists(os.path.join(self.repo_path, ".venv")): # NOTE(andreykurilin): It is necessary to remove old env while # processing update action shutils.rmtree(os.path.join(self.repo_path, ".venv")) # TODO(andreykurilin): make next steps silent and print output only # on failure or debug subprocess.check_output(["virtualenv", ".venv"], cwd=self.repo_path) # TODO: install verifier and its requirements here. def install(self): if os.path.exists(self.home_dir): # raise a proper exception raise Exception() self._clone() if system_wide: # There are several ways to check requirements. It can be done # at least via two libraries: `pip`, `pkgutils`. The code below # bases on `pip`, but it can be changed for better solution while # implementation. import pip requirements = set(pip.req.parse_requirements( "%s/requirements.txt" % self.repo_path, session=False)) installed_packages = set(pip.get_installed_distributions()) missed_packages = requirements - installed_packages if missed_packages: # raise a proper exception raise Exception() else: self._install_virtual_env() def delete(self): """Remove all""" shutils.rmtree(self.home_dir) def update(self, update_repo=False, version=None, update_venv=False): """Update repository, version, virtual environment.""" pass def extend(self, *args, **kwargs): """Install verifier extensions. .. note:: It is an optional interface, so it raises UnsupportedError by-default. If specific verifier needs this interface, it should just implement it. """ raise UnsupportedAction("%s verifier is not support extensions." % self.get_name()) For example, the implementation of verifier for Tempest will need to implement only one method ``extend``: .. code-block:: python @configure("tempest_manager", default_repo="https://github.com/openstack/tempest", default_version="master", launcher="tempest_launcher") class TempestManager(VerifierManager): def extend(self, *args, **kwargs): """Install tempest-plugin.""" pass VerifierLauncher ~~~~~~~~~~~~~~~~ .. code-block:: python import os import subprocess from rally.common.io import subunit_v2 from rally.common.plugin import plugin class EmptyContext(object): """Just empty default context.""" def __init__(self, verifier, deployment): pass def __enter__(self): return def __exit__(self, exc_type, exc_value, exc_traceback): # do nothing return class VerifierLauncher(plugin.Plugin): def __init__(self, deployment, verifier): """Init launcher :param deployment: `rally.common.objects.Deployment` instance :param verifier: `rally.common.objects.Verifier` instance """ self.deployment = deployment self.verifier = self.verifier @property def environ(self): """Customize environment variables.""" return os.environ.copy() @property def _with_venv(self): """Returns arguments for activation virtual environment if needed""" if self.verifier.system_wide: return [] # FIXME(andreykurilin): Currently, we use "tools/with_venv.sh" script # from Tempest repository. We should remove this dependency. return ["activate-venv"] @property def context(self): ctx = self._meta_get("context") if ctx: ctx = VerifierContext.get(ctx) return ctx or EmptyContext def configure(self, override=False): # by-default, verifier doesn't support this method raise NotImplementedError def configure_if_necessary(self): """Check existence of config file and create it if necessary.""" pass def transform_kwargs(self, **kwargs): """Transform kwargs into the list of testr arguments.""" args = ["--subunit", "--parallel"] if kwargs.get("concurrency"): args.append("--concurrency") args.append(kwargs["concurrency"]) if kwargs.get("re_run_failed"): args.append("--failing") if kwargs.get("file_with_tests"): args.append("--load-list") args.append(os.path.abspath(kwargs["file_with_tests"])) if kwargs.get("regexp"): args.append(kwargs["regexp"]) return args def run(self, regexp=None, concurrency=None, re_run_failed=False, file_with_tests=None): self.configure_if_necessary() cmd = [self._with_venv, "testr", "run"] cmd.extend(self.transform_kwargs( regexp=regexp, concurrency=concurrency, re_run_failed=re_run_failed, file_with_tests=file_with_tests)) with self.context(self.deployment, self.verifier): verification = subprocess.Popen( cmd, env=self.environ(), cwd=self.verifier.manager.home_dir, stdout=subprocess.PIPE, stderr=subprocess.stdout) results = subunit_v2.parse(verification.stdout, live=True) verification.wait() return results An example of VerifierLauncher for Tempest: .. code-block:: python @configure("tempest_verifier") class TempestLauncher(VerifierLauncher): @property def configfile(self): return os.path.join(self.verifier.manager.home_dir, "for-deployment-%s" % self.deployment.id, "tempest.conf") @property def environ(self): """Customize environment variables.""" env = super(TempestLauncher, self).environ env["TEMPEST_CONFIG_DIR"] = os.path.dirname(self.configfile) env["TEMPEST_CONFIG"] = os.path.basename(self.configfile) env["OS_TEST_PATH"] = os.path.join(self.verifier.manager.home_dir, "tempest", "test_discover") return env def configure(self, override=False): if os.path.exists(self.configfile): if override: os.remove(self.configfile) else: raise AlreadyConfiguredException() # Configure Tempest. def configure_if_necessary(self): try: self.configure() except AlreadyConfiguredException: # nothing to do. everything is ok pass def run(self, set_name, **kwargs): if set_name == "full": pass elif set_name in consts.TempestTestsSets: kwargs["regexp"] = set_name elif set_name in consts.TempestTestsAPI: kwargs["regexp"] = "tempest.api.%s" % set_name super(TempestLauncher, self).run(**kwargs) VerifierContext ~~~~~~~~~~~~~~~ .. code-block:: python from rally import osclients from rally.task import context class VerifierContext(context.Context): def __init__(self, **ctx): super(VerifierContext, self).__init__(ctx) # There are no terms "task" and "scenario" in Verification del self.task del self.map_for_scenario self.clients = osclients(self.context["deployment"].credentials) @classmethod def _meta_get(cls, key, default=None): # It should be always hidden if key == "hidden": return True return super(VerifierContext, cls)._meta_get(key, default) Example of context for Tempest: .. code-block:: python @configure("tempest_verifier_ctx") class TempestContext(VerifierContext): def __init__(self, **kwargs): super(TempestContext, self).__init__(**kwargs) self.clients = osclients(self.context["deployment"].credentials) def setup(self): # create required resources and save them to self.context pass def cleanup(self): # remove created resources pass Assignee(s) ----------- Primary assignee: Andrey Kurilin Work Items ---------- 1) CLI and API related changes. Lets provide new interface as soon as possible, even if some APIs will not be implemented. As soon we deprecate old interface as soon we will be able to remove it and provide clear new one. 2) Provide base classes for Verifiers 3) Rewrite Tempest verifier based on new classes. Dependencies ============ None rally-0.9.1/doc/specs/implemented/hook_plugins.rst0000664000567000056710000001721013073417716023437 0ustar jenkinsjenkins00000000000000.. This work is licensed under a Creative Commons Attribution 3.0 Unported License. http://creativecommons.org/licenses/by/3.0/legalcode ======================= New Plugins Type - Hook ======================= Problem description =================== Rally lacks a plugin type that would run some code on specified iteration. New plugin type is required for reliability testing of OpenStack. This type of plugin would give an ability to activate factors on some iteration and provide timestamps and some info about executed actions to rally report. Proposed change =============== Add a new section to task config: Schema of hook section allows to specify number of iteration and a list of hook plugins that should be executed on this iteration. .. code:: json { "KeystoneBasic.create_delete_user": [ { "args": {}, "runner": { "type": "constant", "times": 100, "concurrency": 10 }, "hook": [ # new section { "name": "example_hook", "args": { "cmd": "bash enable_factor_1" }, "trigger": { "name": "event", "args": { "unit": "time", "at": [1, 50, 100] # seconds since start } } }, { "name": "example_hook", "args": { "cmd": "bash enable_factor_2" }, "trigger": { "name": "event", "args": { "unit": "iteration", "at": [35, 40, 45] # iteration numbers } } }, { "name": "example_hook", "args": { "cmd": "bash enable_factor_3" }, "trigger": { "name": "periodic", "args": { "unit": "iteration", "step": 20, # execute hook each 20 iterations "start": 0, "end": 1000 } } }, { "name": "example_hook", "args": { "cmd": "bash enable_factor_4" }, "trigger": { "name": "periodic", "args": { "unit": "time", "step": 15, # execute hook each 15 seconds "start": 100, "end": 200 } } } ] } ] } Add a new base class for such plugins, that should: - contain common logic for schema validation - save timestamps when "run" method started/finished - provide abstract method 'run' which should be implemented in plugins this method should be called after specified iteration has been executed Add new classes for trigger plugins, that should: - contain validation schema for its configuration - contain "get_listening_event" and "on_event" methods Trigger plugin classes should: - implement "get_listening_event" methods that return events to listen - implement "on_event" methods that check event type and value; launch hook if needed Add HookExecuter class to run hook plugins, that should: - control when to run a hook specified in config - receive result of hook execution from hook plugin - return a full result of hook execution in the following format: .. code:: json [{ # this is config of specific hook; it should not be empty! "config": {...}, "results":[ { # value is time in seconds "triggered_by": {"event_type": "iteration", "value": 20}, "started_at": 1470331269.134323, "finished_at": 1470331319.761103, "status": "success", # same output format as in scenarios; this key can be missed # if no output was added "output": {} } ], "summary": {"success": 1} }] Modify ResultConsumer, that should: - control HookExecuter and provide info about iterations - add a full result to TaskResult Example code of base class: .. code:: python @plugin.base() @six.add_metaclass(abc.ABCMeta) class Hook(plugin.Plugin): @classmethod def validate(cls, config): # schema validation pass def __init__(self, config): self.config = config @abc.abstractmethod def run(self): pass example_hook class: .. code:: python @hook.configure(name="example_hook") class ExampleHook(hook.Hook): CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "cmd": { "type": "string" }, "required": [ "cmd", ], "additionalProperties": False, } def __init__(self, config): super(ExampleHook, self).__init__(config) self.cmd = self.config["cmd"] def run(self): # do some action rc = os.system(self.cmd) Example of hook result that goes to TaskResult (list of dicts): .. code:: python [{ # this is config of specific hook; it should not be empty! "config": {...}, "results":[ { "triggered_by": {"event_type": "iteration", "value": 20}, "started_at": 1470331269.134323, "finished_at": 1470331319.761103, "status": "success", # same output format as in scenarios; this key can be missed # if no output was added "output": {} }, { # value is time in seconds "triggered_by": {"event_type": "time", "value": 150.0}, "started_at": 1470331270.352342, "finished_at": 1470331333.623303, "status": "failed", "error": { "etype": "Exception", # type of exception "msg": "exception message", # additional information to help (for example, traceback) "details": "" } } ], "summary": {"success": 1, "failed": 1} }] Alternatives ------------ Use sla section for such plugins, but this looks weird Implementation ============== Assignee(s) ----------- Primary assignee: - astudenov - ylobankov - amaretskiy Work Items ---------- - Implement new section in task config - Add example of hook plugin that runs specified command as subprocess - Add trigger plugins for iterations - Add trigger plugins for time - Add hooks results into HTML report Dependencies ============ None rally-0.9.1/doc/specs/implemented/sla_pd_plugin.rst0000664000567000056710000000267213073417716023564 0ustar jenkinsjenkins00000000000000.. This work is licensed under a Creative Commons Attribution 3.0 Unported License. http://creativecommons.org/licenses/by/3.0/legalcode ================================== SLA Performance degradation plugin ================================== Problem description =================== During density and reliability testing of OpenStack with Rally we observed test cases, during execution of which performance of OpenStack cluster has been drammatically degradated. Proposed change =============== Develop a new Rally SLA plugin: *performance_degradation* This SLA plugin should find minimum and maximum duration of iterations completed without errors during Rally task execution. Assuming that minimum duration is 100%, it should calculate performance degradation against maximum duration. SLA plugin results: - failure if performance degradation is more than value set in plugin's max_degradation parameter; - success if degradation is less - performance degradation value as a percentage. How to enable this plugin: .. code:: json "sla": { "performance_degradation": { "max_degradation": 50 } } Alternatives ------------ None Implementation ============== Assignee(s) ----------- Primary assignee: anevenchannyy Work Items ---------- - Implement plugin - Add non-voting job with this plugin to the most important OpenStack services Dependencies ============ None rally-0.9.1/doc/specs/implemented/README.rst0000664000567000056710000000066113073417716021675 0ustar jenkinsjenkins00000000000000Rally Specs =========== Specs are detailed description of proposed changes in project. Usually they answer on what, why, how to change in project and who is going to work on change. This directory contains files with implemented specs, 1 file is 1 spec. If you are looking for full rally road map overview go `here `_. rally-0.9.1/doc/specs/implemented/consistent_resource_names.rst0000664000567000056710000001062113073417716026220 0ustar jenkinsjenkins00000000000000.. This work is licensed under a Creative Commons Attribution 3.0 Unported License. http://creativecommons.org/licenses/by/3.0/legalcode ========================= Consistent Resource Names ========================= To facilitate better cleanup of ephemeral resources created by Rally, random resource names need to be consistently used across all scenarios and all plugins. Additionally, to support Rally's use against systems other than OpenStack, plugins need greater control over both the format and the list of characters used in generating resource names. Problem description =================== Currently we use a few different cleanup mechanisms, some of which (Keystone) use resource names, while most others use tenant membership. As a result, if Rally is interrupted before cleanup completes it may not be possible to know which resources were created by Rally (and thus should be cleaned up after the fact). Random names are generated from a fairly limited set of digits and ASCII letters. This should be configurable by each plugin, along with all other parts of the random name, in order to support benchmarking systems other than OpenStack, which may have different naming restrictions. Finally, each Rally task should include some consistent element in its resource names, distinct from other Rally tasks, to support multiple independent Rally runs and cleanup. Proposed change =============== Random names will consist of three components: * A random element derived from the task ID that is the same for all random names in the task; * A random element that should be different for all names in the task; and * Any amount of formatting as determined by the plugin. The format of the random name will be given by a class variable, ``RESOURCE_NAME_FORMAT``, on each scenario and context plugin. This variable is a ``mktemp(1)``-like string that describes the format; the default for scenario plugins will be:: RESOURCE_NAME_FORMAT = "s_rally_XXXXXXXX_XXXXXXXX" And for context plugins:: RESOURCE_NAME_FORMAT = "c_rally_XXXXXXXX_XXXXXXXX" The format must have two separate sets of at least three consecutive 'X's. (That is, they must match: ``^.*(? Work Items ---------- - Update task.engine and task.processing for class-based scenarios - Transform all Dummy scenarios into class-based implementations as first stage of usage class-based scenarios. Dependencies ============ None rally-0.9.1/doc/specs/README.rst0000664000567000056710000000101213073417716017361 0ustar jenkinsjenkins00000000000000Rally Specs =========== Specs are detailed description of proposed changes in project. Usually they answer on what, why, how to change in project and who is going to work on change. This directory contains 2 subdirectories: - in-progress - These specs are approved, but they are not implemented yet - implemented - Implemented specs archive If you are looking for full rally road map overview go `here `_. rally-0.9.1/doc/source/0000775000567000056710000000000013073420067016054 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/source/conf.py0000664000567000056710000002254113073417720017361 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # Rally documentation build configuration file, created by # sphinx-quickstart on Fri Jan 10 23:19:18 2014. # # This file is execfile() with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # auto-generated file. # # All configuration values have a default; values that are commented out # serve to show the default. import datetime import os import subprocess import sys import rally.common.version # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.extend([ # os.path.abspath("../.."), # os.path.abspath("../"), # os.path.abspath("./") #]) sys.path.insert(0, os.path.abspath('../../')) sys.path.insert(0, os.path.abspath('../')) sys.path.insert(0, os.path.abspath('./')) # -- General configuration ---------------------------------------------------- on_rtd = os.environ.get('READTHEDOCS') == 'True' # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = "1.0" # Add any Sphinx extension module names here, as strings. # They can be extensions coming with Sphinx (named "sphinx.ext.*") or your # custom ones. extensions = [ "sphinx.ext.autodoc", "sphinx.ext.doctest", "sphinx.ext.todo", "sphinx.ext.coverage", "sphinx.ext.ifconfig", "sphinx.ext.viewcode", "ext.cli_reference", "ext.plugin_reference", "ext.include_vars" ] todo_include_todos = True # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # The suffix of source filenames. source_suffix = ".rst" # The encoding of source files. # source_encoding = "utf-8-sig" # The master toctree document. master_doc = "index" # General information about the project. project = u"Rally" copyright = u"%d, OpenStack Foundation" % datetime.datetime.now().year # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = rally.common.version.version_string() # The full version, including alpha/beta/rc tags. release = rally.common.version.version_string() # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = "" # Else, today_fmt is used as the format for a strftime call. # today_fmt = "%B %d, %Y" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [ "feature_request/README.rst", "samples/README.rst", "**/README.rst" ] # The reST default role (used for this markup: `text`) to use for all documents # default_role = None # If true, "()" will be appended to :func: etc. cross-reference text. add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # -- Options for HTML output -------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. if not on_rtd: html_theme = "openstackrally" else: html_theme = "default" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. if not on_rtd: import oslosphinx theme_dir = os.path.join(os.path.dirname(oslosphinx.__file__), 'theme') html_theme_path = [theme_dir, "_templates"] else: html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". if not on_rtd: html_static_path = ["_templates/openstackrally/_static"] else: html_static_path = [] # If not "", a "Last updated on:" timestamp is inserted at every page bottom, # using the given strftime format. git_cmd = [ "git", "log", "--pretty=format:'%ad, commit %h'", "--date=local", "-n1"] html_last_updated_fmt = subprocess.Popen( git_cmd, stdout=subprocess.PIPE).communicate()[0] # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. html_use_smartypants = False # Custom sidebar templates, maps document names to template names. html_sidebars = {'**': ['searchbox.html', 'globaltoc.html']} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = "" # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = "%sdoc" % project # -- Options for LaTeX output ------------------------------------------------- latex_elements = { # The paper size ("letterpaper" or "a4paper"). #"papersize": "letterpaper", # The font size ("10pt", "11pt" or "12pt"). #"pointsize": "10pt", # Additional stuff for the LaTeX preamble. #"preamble": "", } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]) latex_documents = [ ("index", "%s.tex" % project, u"%s Documentation" % project, u"OpenStack Foundation", "manual"), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output ------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). # man_pages = [ # ("index", "rally", u"Rally Documentation", # [u"Rally Team"], 1) # ] # If true, show URL addresses after external links. # man_show_urls = False # -- Options for Texinfo output ----------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ("index", "Rally", u"Rally Documentation", u"Rally Team", "Rally", "Testing framework and tool for all kinds of tests", "Development"), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: "footnote", "no", or "inline". # texinfo_show_urls = "footnote" rally-0.9.1/doc/source/contribute.rst0000664000567000056710000001611213073417716020774 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _contribute: Contribute to Rally =================== Where to begin -------------- Please take a look `our Roadmap`_ to get information about our current work directions. In case you have questions or want to share your ideas, be sure to contact us either at `Rally-dev/Lobby`_ channel on **Gitter** messenger (or, less preferably, at the ``#openstack-rally`` IRC channel on **irc.freenode.net**). If you are going to contribute to Rally, you will probably need to grasp a better understanding of several main design concepts used throughout our project (such as **benchmark scenarios**, **contexts** etc.). To do so, please read :ref:`this article `. How to contribute ----------------- 1. You need a `Launchpad`_ account and need to be joined to the `OpenStack team`_. You can also join the `Rally team`_ if you want to. Make sure Launchpad has your SSH key, Gerrit (the code review system) uses this. 2. Sign the CLA as outlined in the `account setup`_ section of the developer guide. 3. Tell git your details: .. code-block:: bash git config --global user.name "Firstname Lastname" git config --global user.email "your_email@youremail.com" 4. Install git-review. This tool takes a lot of the pain out of remembering commands to push code up to Gerrit for review and to pull it back down to edit it. It is installed using: .. code-block:: bash pip install git-review Several Linux distributions (notably Fedora 16 and Ubuntu 12.04) are also starting to include git-review in their repositories so it can also be installed using the standard package manager. 5. Grab the Rally repository: .. code-block:: bash git clone git@github.com:openstack/rally.git 6. Checkout a new branch to hack on: .. code-block:: bash git checkout -b TOPIC-BRANCH 7. Start coding 8. Run the test suite locally to make sure nothing broke, e.g. (this will run py34/py27/pep8 tests): .. code-block:: bash tox **(NOTE: you should have installed tox<=1.6.1)** If you extend Rally with new functionality, make sure you have also provided unit and/or functional tests for it. 9. Commit your work using: .. code-block:: bash git commit -a Make sure you have supplied your commit with a neat commit message, containing a link to the corresponding blueprint / bug, if appropriate. 10. Push the commit up for code review using: .. code-block:: bash git review -R That is the awesome tool we installed earlier that does a lot of hard work for you. 11. Watch your email or `review site`_, it will automatically send your code for a battery of tests on our `Jenkins setup`_ and the core team for the project will review your code. If there are any changes that should be made they will let you know. 12. When all is good the review site will automatically merge your code. (This tutorial is based on: http://www.linuxjedi.co.uk/2012/03/real-way-to-start-hacking-on-openstack.html) Testing ------- Please, don't hesitate to write tests ;) Unit tests ^^^^^^^^^^ *Files: /tests/unit/** The goal of unit tests is to ensure that internal parts of the code work properly. All internal methods should be fully covered by unit tests with a reasonable mocks usage. About Rally unit tests: - All `unit tests`_ are located inside /tests/unit/* - Tests are written on top of: *testtools* and *mock* libs - `Tox`_ is used to run unit tests To run unit tests locally: .. code-block:: console $ pip install tox $ tox To run py34, py27 or pep8 only: .. code-block:: console $ tox -e #NOTE: is one of py34, py27 or pep8 To run a single unit test e.g. test_deployment .. code-block:: console $ tox -e -- #NOTE: is one of py34, py27 or pep8 # is the unit test case name, e.g tests.unit.test_osclients To debug issues on the unit test: - Add breakpoints on the test file using ``import pdb;`` ``pdb.set_trace()`` - Then run tox in debug mode: .. code-block:: console $ tox -e debug #NOTE: use python 2.7 #NOTE: is the unit test case name or .. code-block:: console $ tox -e debug34 #NOTE: use python 3.4 #NOTE: is the unit test case name To get test coverage: .. code-block:: console $ tox -e cover #NOTE: Results will be in /cover/index.html To generate docs: .. code-block:: console $ tox -e docs #NOTE: Documentation will be in doc/source/_build/html/index.html Functional tests ^^^^^^^^^^^^^^^^ *Files: /tests/functional/** The goal of `functional tests`_ is to check that everything works well together. Functional tests use Rally API only and check responses without touching internal parts. To run functional tests locally: .. code-block:: console $ source openrc $ rally deployment create --fromenv --name testing $ tox -e cli #NOTE: openrc file with OpenStack admin credentials Output of every Rally execution will be collected under some reports root in directory structure like: reports_root/ClassName/MethodName_suffix.extension This functionality implemented in tests.functional.utils.Rally.__call__ method. Use 'gen_report_path' method of 'Rally' class to get automatically generated file path and name if you need. You can use it to publish html reports, generated during tests. Reports root can be passed throw environment variable 'REPORTS_ROOT'. Default is 'rally-cli-output-files'. Rally CI scripts ^^^^^^^^^^^^^^^^ *Files: /tests/ci/** This directory contains scripts and files related to the Rally CI system. Rally Style Commandments ^^^^^^^^^^^^^^^^^^^^^^^^ *Files: /tests/hacking/* This module contains Rally specific hacking rules for checking commandments. For more information about Style Commandments, read the `OpenStack Style Commandments manual`_. .. references: .. _our Roadmap: https://docs.google.com/a/mirantis.com/spreadsheets/d/16DXpfbqvlzMFaqaXAcJsBzzpowb_XpymaK2aFY2gA2g/edit#gid=0 .. _Rally-dev/Lobby: https://gitter.im/rally-dev/Lobby .. _Launchpad: https://launchpad.net/ .. _OpenStack team: https://launchpad.net/openstack .. _Rally team: https://launchpad.net/rally .. _account setup: http://docs.openstack.org/infra/manual/developers.html#development-workflow .. _review site: http://review.openstack.org/ .. _Jenkins setup: http://jenkins.openstack.org/ .. _unit tests: http://en.wikipedia.org/wiki/Unit_testing .. _Tox: https://tox.readthedocs.org/en/latest/ .. _functional tests: https://en.wikipedia.org/wiki/Functional_testing .. _OpenStack Style Commandments manual: http://docs.openstack.org/developer/hacking/ rally-0.9.1/doc/source/install_and_upgrade/0000775000567000056710000000000013073420067022053 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/source/install_and_upgrade/db_migrations.rst0000664000567000056710000000526113073417716025441 0ustar jenkinsjenkins00000000000000.. Copyright 2016 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _db_migrations: Database upgrade in Rally ========================= Information for users --------------------- Rally supports DB schema versioning (schema versions are called *revisions*) and migration (upgrade to the latest revision). End user is provided with the following possibilities: - Print current revision of DB. .. code-block:: shell rally-manage db revision - Upgrade existing DB to the latest state. This is needed when previously existing Rally installation is being upgraded to a newer version. In this case user should issue command .. code-block:: shell rally-manage db upgrade **AFTER** upgrading Rally package. DB schema will get upgraded to the latest state and all existing data will be kept. **WARNING** Rally does NOT support DB schema downgrade. One should consider backing up existing database in order to be able to rollback the change. Information for developers -------------------------- DB migration in Rally is implemented via package *alembic*. It is highly recommended to get familiar with it's documentation available by the link_ before proceeding. If developer is about to change existing DB schema they should create a new DB revision and a migration script with the following command. .. code-block:: shell alembic --config rally/common/db/sqlalchemy/alembic.ini revision -m or .. code-block:: shell alembic --config rally/common/db/sqlalchemy/alembic.ini revision --autogenerate -m It will generate migration script -- a file named `_.py` located in `rally/common/db/sqlalchemy/migrations/versions`. Alembic with parameter ``--autogenerate`` makes some "routine" job for developer, for example it makes some SQLite compatible batch expressions for migrations. Generated script should then be checked, edited if it is needed to be and added to Rally source tree. **WARNING** Even though alembic supports schema downgrade, migration scripts provided along with Rally do not contain actual code for downgrade. .. references: .. _link: https://alembic.readthedocs.org rally-0.9.1/doc/source/install_and_upgrade/install.rst0000664000567000056710000001460113073417716024264 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _install: Installation process ==================== Automated installation ---------------------- The easiest way to install Rally is by executing its `installation script`_ .. code-block:: bash wget -q -O- https://raw.githubusercontent.com/openstack/rally/master/install_rally.sh | bash # or using curl curl https://raw.githubusercontent.com/openstack/rally/master/install_rally.sh | bash The installation script will also check if all the software required by Rally is already installed in your system; if run as **root** user and some dependency is missing it will ask you if you want to install the required packages. By default it will install Rally in a virtualenv in ``~/rally`` when run as standard user, or install system wide when run as root. You can install Rally in a **venv** by using the option ``--target``: .. code-block:: bash ./install_rally.sh --target /foo/bar You can also install Rally system wide by running script as root and without ``--target`` option: .. code-block:: bash sudo ./install_rally.sh Run ``./install_rally.sh`` with option ``--help`` to have a list of all available options: .. code-block:: console $ ./install_rally.sh --help Usage: install_rally.sh [options] This script will install rally either in the system (as root) or in a virtual environment. Options: -h, --help Print this help text -v, --verbose Verbose mode -s, --system Instead of creating a virtualenv, install as system package. -d, --target DIRECTORY Install Rally virtual environment into DIRECTORY. (Default: $HOME/rally). -f, --overwrite Remove target directory if it already exists. -y, --yes Do not ask for confirmation: assume a 'yes' reply to every question. -D, --dbtype TYPE Select the database type. TYPE can be one of 'sqlite', 'mysql', 'postgres'. Default: sqlite --db-user USER Database user to use. Only used when --dbtype is either 'mysql' or 'postgres'. --db-password PASSWORD Password of the database user. Only used when --dbtype is either 'mysql' or 'postgres'. --db-host HOST Database host. Only used when --dbtype is either 'mysql' or 'postgres' --db-name NAME Name of the database. Only used when --dbtype is either 'mysql' or 'postgres' -p, --python EXE The python interpreter to use. Default: /usr/bin/python. **Notes:** the script will check if all the software required by Rally is already installed in your system. If this is not the case, it will exit, suggesting you the command to issue **as root** in order to install the dependencies. You also have to set up the **Rally database** after the installation is complete: .. code-block:: bash rally-manage db recreate .. include:: ../../../devstack/README.rst Rally & Docker -------------- First you need to install Docker; Docker supplies `installation instructions for various OSes`_. You can either use the official Rally Docker image, or build your own from the Rally source. To do that, change directory to the root directory of the Rally git repository and run: .. code-block:: bash docker build -t myrally . If you build your own Docker image, substitute ``myrally`` for ``rallyforge/rally`` in the commands below. The Rally Docker image is configured to store local settings and the database in the user's home directory. For persistence of these data, you may want to keep this directory outside of the container. This may be done via the following steps: .. code-block:: bash sudo mkdir /var/lib/rally_container sudo chown 65500 /var/lib/rally_container docker run -it -v /var/lib/rally_container:/home/rally rallyforge/rally .. note:: In order for the volume to be accessible by the Rally user (uid: 65500) inside the container, it must be accessible by UID 65500 *outside* the container as well, which is why it is created in ``/var/lib/rally``. Creating it in your home directory is only likely to work if your home directory has excessively open permissions (e.g., ``0755``), which is not recommended. You can find all task samples, docs and certification tasks at /opt/rally/. Also you may want to save the last command as an alias: .. code-block:: bash echo 'alias dock_rally="docker run -it -v /var/lib/rally_container:/home/rally rallyforge/rally"' >> ~/.bashrc After executing ``dock_rally``, or ``docker run ...``, you will have bash running inside the container with Rally installed. You may do anything with Rally, but you need to create the database first: .. code-block:: console user@box:~/rally$ dock_rally rally@1cc98e0b5941:~$ rally-manage db recreate rally@1cc98e0b5941:~$ rally deployment list There are no deployments. To create a new deployment, use: rally deployment create rally@1cc98e0b5941:~$ In case you have SELinux enabled and Rally fails to create the database, try executing the following commands to put SELinux into Permissive Mode on the host machine .. code-block:: bash sed -i 's/SELINUX=enforcing/SELINUX=permissive/' /etc/selinux/config setenforce permissive Rally currently has no SELinux policy, which is why it must be run in Permissive mode for certain configurations. If you can help create an SELinux policy for Rally, please contribute! More about docker: https://www.docker.com/ .. references: .. _installation script: https://raw.githubusercontent.com/openstack/rally/master/install_rally.sh .. _installation instructions for various OSes: https://docs.docker.com/installation/ rally-0.9.1/doc/source/install_and_upgrade/index.rst0000664000567000056710000000140513073417716023723 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ========================= Installation and upgrades ========================= .. toctree:: :glob: install db_migrations rally-0.9.1/doc/source/Makefile0000664000567000056710000001514613073417716017532 0ustar jenkinsjenkins00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/rally.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/rally.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/rally" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/rally" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." rally-0.9.1/doc/source/plugins/0000775000567000056710000000000013073420067017535 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/source/plugins/plugin_reference.rst0000664000567000056710000000021713073417716023612 0ustar jenkinsjenkins00000000000000:tocdepth: 1 .. _plugin-reference: Plugins Reference ================= .. contents:: :depth: 2 :local: .. generate_plugin_reference:: rally-0.9.1/doc/source/plugins/index.rst0000664000567000056710000000507413073417716021413 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _plugins: Rally Plugins ============= Rally has a plugin oriented architecture - in other words Rally team is trying to make all places of code pluggable. Such architecture leads to the big amount of plugins. :ref:`plugin-reference` contains a full list of all official Rally plugins with detailed descriptions. .. toctree:: :maxdepth: 1 plugin_reference How plugins work ---------------- Rally provides an opportunity to create and use a **custom benchmark scenario, runner, SLA, deployment or context** as a **plugin**: .. image:: ../images/Rally-Plugins.png :align: center Placement --------- Plugins can be quickly written and used, with no need to contribute them to the actual Rally code. Just place a Python module with your plugin class into the ``/opt/rally/plugins`` or ``~/.rally/plugins`` directory (or its subdirectories), and it will be automatically loaded. Additional paths can be specified with the ``--plugin-paths`` argument, or with the ``RALLY_PLUGIN_PATHS`` environment variable, both of which accept comma-delimited lists. Both ``--plugin-paths`` and ``RALLY_PLUGIN_PATHS`` can list either plugin module files, or directories containing plugins. For instance, both of these are valid: .. code-block:: bash rally --plugin-paths /rally/plugins ... rally --plugin-paths /rally/plugins/foo.py,/rally/plugins/bar.py ... You can also use a script ``unpack_plugins_samples.sh`` from ``samples/plugins`` which will automatically create the ``~/.rally/plugins`` directory. How to create a plugin ---------------------- To create your own plugin you need to inherit your plugin class from plugin.Plugin class or its subclasses. Also you need to decorate your class with ``rally.task.scenario.configure`` .. code-block:: python from rally.task import scenario @scenario.configure(name="my_new_plugin_name") class MyNewPlugin(plugin.Plugin): pass .. toctree:: :glob: :maxdepth: 1 implementation/** rally-0.9.1/doc/source/plugins/implementation/0000775000567000056710000000000013073420067022562 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/source/plugins/implementation/runner_plugin.rst0000664000567000056710000000636713073417716026226 0ustar jenkinsjenkins00000000000000.. Copyright 2016 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _plugins_runner_plugin: Scenario runner as a plugin =========================== Let's create a scenario runner plugin that runs a given benchmark scenario a random number of times (chosen at random from a given range). Creation ^^^^^^^^ Inherit a class for your plugin from the base *ScenarioRunner* class and implement its API (the *_run_scenario()* method): .. code-block:: python import random from rally.task import runner from rally import consts @runner.configure(name="random_times") class RandomTimesScenarioRunner(runner.ScenarioRunner): """Sample scenario runner plugin. Run scenario random number of times, which is chosen between min_times and max_times. """ CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "type": { "type": "string" }, "min_times": { "type": "integer", "minimum": 1 }, "max_times": { "type": "integer", "minimum": 1 } }, "additionalProperties": True } def _run_scenario(self, cls, method_name, context, args): # runners settings are stored in self.config min_times = self.config.get('min_times', 1) max_times = self.config.get('max_times', 1) for i in range(random.randrange(min_times, max_times)): run_args = (i, cls, method_name, runner._get_scenario_context(context), args) result = runner._run_scenario_once(run_args) # use self.send_result for result of each iteration self._send_result(result) Usage ^^^^^ You can refer to your scenario runner in the benchmark task configuration files in the same way as any other runners. Don't forget to put your runner-specific parameters in the configuration as well (*"min_times"* and *"max_times"* in our example): .. code-block:: json { "Dummy.dummy": [ { "runner": { "type": "random_times", "min_times": 10, "max_times": 20, }, "context": { "users": { "tenants": 1, "users_per_tenant": 1 } } } ] } Different plugin samples are available `here `_. rally-0.9.1/doc/source/plugins/implementation/hook_and_trigger_plugins.rst0000664000567000056710000002706013073417716030376 0ustar jenkinsjenkins00000000000000.. Copyright 2016 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _hook_and_trigger_plugins: Hooks. Hook trigger plugins =========================== Why Hooks? ---------- All Rally workloads repeat their actions as many times as it is configured by runner. Once run, there is no way to interrupt the runner to evaluate any change or restart event on the stability of the cloud under test. For example we would like to test how configuration change or cloud component restart would affect performance and stability. Task hooks were added to fill this gap and allow to use Rally for reliability and high availability testing. Generally, hooks allow to perform any actions on specified iteration or specified time since the workload has been started. Also, task html-report provides results of hook execution. They can contain graphical or textual information with timing and statistics. Hooks & Triggers Overview ------------------------- Architecture ^^^^^^^^^^^^ Rally uses runners to specify how many times the workload should be executed. Hooks do not use runners, instead they rely on trigger plugins to specify when and how many times hook should be called. Therefore hooks are isolated from workload runners and do not affect them because each hook is executed in separate thread. Sample of usage ^^^^^^^^^^^^^^^ Hooks can be added to the task configuration. Lets take a look at hook configuration: .. code-block:: json { "name": "sys_call", "args": "/bin/echo 123", "trigger": { "name": "event", "args": { "unit": "iteration", "at": [5, 50, 200, 1000] } } } It specifies hook plugin with name "sys_call". "args" field contains string that will be used by sys_call plugin, but in case of any other hook plugin it can contain any other Python object, that is assumed to be passed to the hook. "trigger" field specifies which trigger plugin should be used to run this hook. "trigger" contains similar fields "name" and "args" which represent trigger plugin name and arguments for trigger plugin. In this example "event" trigger is specified and configured to run the hook at 5th, 50th, 200th and 1000th iterations. Here is a full task config that contains previous hook configuraiton: .. code-block:: json { "Dummy.dummy": [ { "args": { "sleep": 0.01 }, "runner": { "type": "constant", "times": 1500, "concurrency": 1 }, "hooks": [ { "name": "sys_call", "args": "/bin/echo 123", "trigger": { "name": "event", "args": { "unit": "iteration", "at": [5, 50, 200, 1000] } } } ] } ] } .. note:: In this example, runner is configured to run workload 1500 times. So there is a limit for iterations and hook will be triggered only if certain iteration is started by runner. In other words, if trigger specifies iteration out of runner iterations scope then such trigger will not be called. Task report for this example will contain minimal information about hook execution: duration of each hook call and its status(success of failure). Let's take a look at more complicated config that can produce graphical and textual information. .. code-block:: yaml --- Dummy.dummy: - args: sleep: 0.75 runner: type: "constant" times: 20 concurrency: 2 hooks: - name: sys_call description: Run script args: sh rally/rally-jobs/extra/hook_example_script.sh trigger: name: event args: unit: iteration at: [2, 5, 8, 13, 17] - name: sys_call description: Show time args: date +%Y-%m-%dT%H:%M:%S trigger: name: event args: unit: time at: [0, 2, 5, 6, 9] - name: sys_call description: Show system name args: uname -a trigger: name: event args: unit: iteration at: [2, 3, 4, 5, 6, 8, 10, 12, 13, 15, 17, 18] sla: failure_rate: max: 0 hook_example_script.sh generates dummy output in JSON format. Grafical information format is the same as for workloads and the same types of charts are supported for the hooks. Here is a report that shows aggregated table and chart with hook results: .. image:: ../../images/Hook-Aggregated-Report.png Here is report that shows lines chart and pie chart for first hook on the second iteration: .. image:: ../../images/Hook-Per-Hook-Report.png Browse existing Hooks_ and Triggers_. Writing your own Hook plugin ---------------------------- Problem description ^^^^^^^^^^^^^^^^^^^ Hook plugin should implement custom action that can be done one or multiple times during the workload. Examples of such actions might be the following: - Destructive action inside cloud (`Fault Injection`_) - Getting information about current state of cloud (load/health) - Upgrading/downgrading a component of cloud - Changing configuration of cloud - etc. Plugin code ^^^^^^^^^^^ The following example shows simple hook code that performs system call. It is inherited from the base *Hook* class and contains implemented ``run()`` method: .. code-block:: python import shlex import subprocess from rally import consts from rally.task import hook @hook.configure(name="simple_sys_call") class SimpleSysCallHook(hook.Hook): """Performs system call.""" CONFIG_SCHEMA = { "$schema": consts.JSON_SCHEMA, "type": "string", } def run(self): proc = subprocess.Popen(shlex.split(self.config), stdout=subprocess.PIPE, stderr=subprocess.STDOUT) proc.wait() if proc.returncode: self.set_error( exception_name="n/a", # no exception class description="Subprocess returned {}".format(proc.returncode), details=proc.stdout.read(), ) Any exceptions risen during execution of ``run`` method will be caught by Hook base class and saved as a result. Although hook should manually call ``Hook.set_error()`` to indicate logical error in case if there is no exception raised. Also there is a method for saving charts data: ``Hook.add_output()``. Plugin Placement ^^^^^^^^^^^^^^^^ There are two folders for hook plugins: - `OpenStack Hooks`_ - `Common Hooks`_ Sample of task that uses Hook ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. code-block:: json { "Dummy.dummy": [ { "args": { "sleep": 0.01 }, "runner": { "type": "constant", "times": 10, "concurrency": 1 }, "hooks": [ { "name": "simple_sys_call", "args": "/bin/echo 123", "trigger": { "name": "event", "args": { "unit": "iteration", "at": [3, 6] } } } ] } ] } Results of task execution ^^^^^^^^^^^^^^^^^^^^^^^^^ Result of previous task example: .. image:: ../../images/Hook-Results.png Writing your own Trigger plugin ------------------------------- Problem description ^^^^^^^^^^^^^^^^^^^ Trigger plugin should implement an event processor that decides whether to start hook or not. Rally has two basic triggers that should cover most cases: - `Event Trigger`_ - `Periodic Trigger`_ Plugin code ^^^^^^^^^^^ This example shows the code of the existing Event trigger: .. code-block:: python from rally import consts from rally.task import trigger @trigger.configure(name="event") class EventTrigger(trigger.Trigger): """Triggers hook on specified event and list of values.""" CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "oneOf": [ { "properties": { "unit": {"enum": ["time"]}, "at": { "type": "array", "minItems": 1, "uniqueItems": True, "items": { "type": "integer", "minimum": 0, } }, }, "required": ["unit", "at"], "additionalProperties": False, }, { "properties": { "unit": {"enum": ["iteration"]}, "at": { "type": "array", "minItems": 1, "uniqueItems": True, "items": { "type": "integer", "minimum": 1, } }, }, "required": ["unit", "at"], "additionalProperties": False, }, ] } def get_listening_event(self): return self.config["unit"] def on_event(self, event_type, value=None): if not (event_type == self.get_listening_event() and value in self.config["at"]): # do nothing return super(EventTrigger, self).on_event(event_type, value) Trigger plugins must override two methods: - ``get_listening_event`` - this method should return currently configured event name. (So far Rally supports only "time" and "iteration") - ``on_event`` - this method is called each time certain events occur. It calls base method when the hook is triggered on specified event. Plugin Placement ^^^^^^^^^^^^^^^^ All trigger plugins should be placed in `Trigger folder`_. .. references: .. _Hooks: ../plugin_reference.html#task-hooks .. _Triggers: ../plugin_reference.html#task-hook-triggers .. _Fault Injection: ../plugin_reference.html#fault-injection-hook .. _Event Trigger: ../plugin_reference.html#event-hook-trigger .. _Periodic Trigger: ../plugin_reference.html#periodic-hook-trigger .. _Common Hooks: https://github.com/openstack/rally/tree/master/rally/plugins/common/hook .. _OpenStack Hooks: https://github.com/openstack/rally/tree/master/rally/plugins/openstack/hook .. _Trigger folder: https://github.com/openstack/rally/tree/master/rally/plugins/common/trigger rally-0.9.1/doc/source/plugins/implementation/scenario_plugin.rst0000664000567000056710000000471313073417716026511 0ustar jenkinsjenkins00000000000000.. Copyright 2016 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _plugins_scenario_plugin: Scenario as a plugin ==================== Let's create a simple scenario plugin that list flavors. Creation ^^^^^^^^ Inherit a class for your plugin from the base *Scenario* class and implement a scenario method inside it. In our scenario, we'll first list flavors as an ordinary user, and then repeat the same using admin clients: .. code-block:: python from rally.task import atomic from rally.task import scenario class ScenarioPlugin(scenario.Scenario): """Sample plugin which lists flavors.""" @atomic.action_timer("list_flavors") def _list_flavors(self): """Sample of usage clients - list flavors You can use self.context, self.admin_clients and self.clients which are initialized on scenario instance creation""" self.clients("nova").flavors.list() @atomic.action_timer("list_flavors_as_admin") def _list_flavors_as_admin(self): """The same with admin clients""" self.admin_clients("nova").flavors.list() @scenario.configure() def list_flavors(self): """List flavors.""" self._list_flavors() self._list_flavors_as_admin() Usage ^^^^^ You can refer to your plugin scenario in the benchmark task configuration files in the same way as any other scenarios: .. code-block:: json { "ScenarioPlugin.list_flavors": [ { "runner": { "type": "serial", "times": 5, }, "context": { "create_flavor": { "ram": 512, } } } ] } This configuration file uses the *"create_flavor"* context which we created in :ref:`plugins_context_plugin`. rally-0.9.1/doc/source/plugins/implementation/sla_plugin.rst0000664000567000056710000000566513073417716025474 0ustar jenkinsjenkins00000000000000.. Copyright 2016 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _plugins_sla_plugin: SLA as a plugin =============== Let's create an SLA (success criterion) plugin that checks whether the range of the observed performance measurements does not exceed the allowed maximum value. Creation ^^^^^^^^ Inherit a class for your plugin from the base *SLA* class and implement its API (the *add_iteration(iteration)*, the *details()* method): .. code-block:: python from rally.task import sla from rally.common.i18n import _ @sla.configure(name="max_duration_range") class MaxDurationRange(sla.SLA): """Maximum allowed duration range in seconds.""" CONFIG_SCHEMA = { "type": "number", "minimum": 0.0, } def __init__(self, criterion_value): super(MaxDurationRange, self).__init__(criterion_value) self._min = 0 self._max = 0 def add_iteration(self, iteration): # Skipping failed iterations (that raised exceptions) if iteration.get("error"): return self.success # This field is defined in base class # Updating _min and _max values self._max = max(self._max, iteration["duration"]) self._min = min(self._min, iteration["duration"]) # Updating successfulness based on new max and min values self.success = self._max - self._min <= self.criterion_value return self.success def details(self): return (_("%s - Maximum allowed duration range: %.2f%% <= %.2f%%") % (self.status(), self._max - self._min, self.criterion_value)) Usage ^^^^^ You can refer to your SLA in the benchmark task configuration files in the same way as any other SLA: .. code-block:: json { "Dummy.dummy": [ { "args": { "sleep": 0.01 }, "runner": { "type": "constant", "times": 5, "concurrency": 1 }, "context": { "users": { "tenants": 1, "users_per_tenant": 1 } }, "sla": { "max_duration_range": 2.5 } } ] } rally-0.9.1/doc/source/plugins/implementation/context_plugin.rst0000664000567000056710000001142713073417716026372 0ustar jenkinsjenkins00000000000000.. Copyright 2016 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _plugins_context_plugin: Context as a plugin =================== So what are contexts doing? These plugins will be executed before scenario iteration starts. For example, a context plugin could create resources (e.g., download 10 images) that will be used by the scenarios. All created objects must be put into the *self.context* dict, through which they will be available in the scenarios. Let's create a simple context plugin that adds a flavor to the environment before the benchmark task starts and deletes it after it finishes. Creation ^^^^^^^^ Inherit a class for your plugin from the base *Context* class. Then, implement the Context API: the *setup()* method that creates a flavor and the *cleanup()* method that deletes it. .. code-block:: python from rally.task import context from rally.common import logging from rally import consts from rally import osclients LOG = logging.getLogger(__name__) @context.configure(name="create_flavor", order=1000) class CreateFlavorContext(context.Context): """This sample creates a flavor with specified options before task starts and deletes it after task completion. To create your own context plugin, inherit it from rally.task.context.Context """ CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "additionalProperties": False, "properties": { "flavor_name": { "type": "string", }, "ram": { "type": "integer", "minimum": 1 }, "vcpus": { "type": "integer", "minimum": 1 }, "disk": { "type": "integer", "minimum": 1 } } } def setup(self): """This method is called before the task starts.""" try: # use rally.osclients to get necessary client instance nova = osclients.Clients(self.context["admin"]["credential"]).nova() # and than do what you need with this client self.context["flavor"] = nova.flavors.create( # context settings are stored in self.config name=self.config.get("flavor_name", "rally_test_flavor"), ram=self.config.get("ram", 1), vcpus=self.config.get("vcpus", 1), disk=self.config.get("disk", 1)).to_dict() LOG.debug("Flavor with id '%s'" % self.context["flavor"]["id"]) except Exception as e: msg = "Can't create flavor: %s" % e.message if logging.is_debug(): LOG.exception(msg) else: LOG.warning(msg) def cleanup(self): """This method is called after the task finishes.""" try: nova = osclients.Clients(self.context["admin"]["credential"]).nova() nova.flavors.delete(self.context["flavor"]["id"]) LOG.debug("Flavor '%s' deleted" % self.context["flavor"]["id"]) except Exception as e: msg = "Can't delete flavor: %s" % e.message if logging.is_debug(): LOG.exception(msg) else: LOG.warning(msg) Usage ^^^^^ You can refer to your plugin context in the benchmark task configuration files in the same way as any other contexts: .. code-block:: json { "Dummy.dummy": [ { "args": { "sleep": 0.01 }, "runner": { "type": "constant", "times": 5, "concurrency": 1 }, "context": { "users": { "tenants": 1, "users_per_tenant": 1 }, "create_flavor": { "ram": 1024 } } } ] } rally-0.9.1/doc/source/images/0000775000567000056710000000000013073420067017321 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/source/images/Report-Overview.png0000664000567000056710000011506113073417716023121 0ustar jenkinsjenkins00000000000000PNG  IHDR-dsBIT|d IDATxw|ylG &6(QrrKby8on'7o|qb;e[rQ(j %vlA-sX4R$EQ !9sϜs6oެB!B!UeSJs!B!jӯvB!B!=B!B!Ĭ B!B1 H.B!B !B!B!B!, B!B1 H.B!Bt!B!bt[],"1}\J' C @fFH7]#K3(+/'u4;X˫XQRYN?JիDžn/e9TATlv4"1vB!⊻]sxYv 6#㑉:v U9%jWV̯X5GX!B7rGxc'a<_] vN9v͜zutF6')h %l8xzwKA!B!_߃8rfd['{neck+5BTgl4E^LoFĎW:$,{6Mժ4B!B!>.I?78#7d.#+mR 4e&GʚȕecXfd﷦avtLy~4 .2 Bj6;N2C±MK';Z` @d{_ttˌ 0-a؝x:0Pp Gb(`l25, H4Vcw8qmRf$D0R`8<`Z6jpsDBaXr:ѕ6f F6ÎeF BDVtÉnk`F#B3xyX N($\n.xDwp9lB!BЭ==n1t p%3|dx_#'?GW1ԙXm  W2ekC G.\@W)J@7{^G"<鬸>>vu'!4ɷ߾̩1_E~ I33?6x4A$OǝԢj/ʓ{Π[B.[:fv'?ƨx_f_Od%7?}ŝ\$TI[lyTV;W#nfV,rux"M!Tmqf~!Fמ5OTEϸIԧcMi2FOq~wP-y{ީq؋o~.?g[5nYiƒzaW KB! q ^22SC$J IpjpWRYDÑü28XØؿ{h\FnJqBapNXztf-=GΒ~g uW]u/{UCgPKayPO[) 9#Ap2>T[j%/rq:O[󟠱[ *H"4/'iYyG.~N=EK=oQo_ W8/7I^yl{u~\ƻ?-/=cNvɭX˭7i܃R:l/}-;y?ApG [B.׬4_\m+o-m|m[tO7 Sx'Ue>ygP< nG>~}cslڹ'w !B!,rC`xH/ZOðG9u$CXJ#4GZ: [lb*m0:Tb6~zWBJ_"BCwf 1?;=_ױ.r!Kn wOp/n#Ob7V`F9a{:]q뗊uҚ 8w'abPk7JcCCx'joEGNARX0;~;(fʒy'xc=RQ ȁ8aHh%b Xm`S7;5tOn$sVpgk9_޺v;~[Lqۚv~G:hDl#5OٴuO!97mMؙAjtTNBzZh t4sub.c欜S/B!۞vR`k>ʱQ4g" mp^BPg*3$œO\ GC;O{ og$Z4rJ}^BB4g4 (}4vtpÊ2[X0/Lǵ| [:qəx;&ah$HwK\#}~Pvv79~Xn/n:@4aR|vfo[-v0$o}"zaB&8 tr_|y+E٤gd[q?4B_ +aCVkOES#hNҳL&k>;M,$Y~-n|5 FsVӕy=M韱!Nm _sA~%:B!B4]YQ=tt .zz0orzi4%p\a磵{rJm*6g}*s)SH+cȴX|8eM F M,CGi6<m®'4-^" vt}= tCC'JĴL4tbLS+hDz@YѩX|lbcy}9-ńۚ8է(Yus2iCY7?H/^=mb>-گ:O;NdXlCqWj:Hm#w~n VO-cӵ+f\gu244q51y}TYy+׭];X_ηkB!bv} P}I߼ʲbbP`a|8L"׈m~VD:RXAz{(-$_O袡s+M7"'̎*|c=wd99"hkٸXS)G C tsB_? lOLn>5R`:^6c~d=_J6ݴ'AR|F$(5hOpc-?MNU~ g%y|S}ۉt3Q\lB#fO`ņ5]x#`κ/}ˮϞ!,e#.+Ssm~yꨉ.JV]Kmy;xKݺ_{~I.B!{oI'i&)H{+lawzHJIAElxJ)quihdF途RAz(]xFw3bǎ`ε7@o5)X}k mqz:9~nf>rO7 P1 u{:;yے9qS5[z]tYSHh %a'[0- 3lͣhn)Ln%qksfJ{1v9/|YϪzc1dxG%7ɻ1ʿ?+2o, 椣F`@mwy ~6]WNZ>Q:V$BԙBqJ7`}Q<9ǝY@҅?*IB!B|}:8Mgޢ4i2at(=:plTfu7Q]G]Y?Mvy`S;rچq#ڵ_< Q3*'xчq~^#:.߼0D޲M 0,mjd:_ _ ^qOo% zrRs θ0Γ̱F~';̝nqn;1ܽ4; ~aq:Gg / m )j ;436ǯ~?_^o3} ӯ@)Z|p|>3^wϹfw??I$IB!C@[hE7B#>"g$ Ĕd݊2>6L0$)AgthHf>p10HFfͬc{W34 D.;2Ez^?=XR韜o.qC[v6:߲]پlܖE|b]1B!Bc1;vHPo \ iͫ䚵Y2o+S+AY~N8~j/s䟾z=# o5/rM[ 3G8<Ƌ?w"BJ\s~ӲwKZn-u^Nޏ^JWnyB̑-3>87 ̡45\<|Q|HȽ !B|мt!eR!mfFGoaT&co, C]G[ZؽE2 guI;Ma/$8jG!\V>TGWb]4AR\/|{bnPsea;*ޑ$x!YEe B!lwYTEsnaטPQk:nqo`ba#ɥD\cz46 9i#&;D~$q7DĴPךrL%NT"<6XXaswЊN$mtMbB! qO-]KYF8@g;K4p h<‰a}-mtEx?{GjF_`\l}W9ADN3t~U5!,,03[|c߁CT׵2\f5ZZA~{(b^癱lʄfܽD,Wb:Wnj*R©U>r>FC&+Z Htǯ֡:BhKw]Zk7TOlOfGOq˽7Q7wR>@ jJfl۴P}U4a.JM^O@S 4 34JwsUs>eʵ[¬d:E!/hȸo[߇r%R.iϛ~۟~CA%+g^E'#AO8 ?b*-gJJs04 +OI4? +'Qe/fJ0fjմUTʖk(Ms@ڗv;z`xYKfzl\޳FhO}oZyM|TvS6vw %YurʋsHtPQ?'&?~c57`TS~e|[s>o-?fS>OB!ޓĽ>9>aA>o=f:lo9RC:YX?Bi*2Fô"x(^Ptuq'xix +NQ۸;n,7QAjm稯+s|s,M T̯gou7$W6h~'q}ܻq7y摝=RYh1BCldq͌Qj`OO/*99DG{h`GPb_m?gh7Yg|Ώ, Т4WⱧv2b-(aQqNb|6$m>>zMenm4S{ip 0Ѝse+3{s%Dqc'3gˋNWRo"Zhfvodu-}>%)+*c-[sҷ5޳ ;hmjįűv<<VC>Qg!?_COedz;x0q<-G]Ћ4՟`۹ n' +IyUZ{ ?mM}D2i?@x9j;(").B!+փ>j l,G= IDATŢ< LRDu+'EksOaHT "c|ncAy.ҜDGxd?LJ n6=D븝t;;aUY2*^h';Y89NSC}bfefSYCifa#ΙK]#})ˈ¤s<tm 瓕:'4?7E'y JN8©nNV8Tfh+YXҋTlwmZF7au{ _IjJ*0Oy"޳b@FNa!vUw멡w-[YAZ; (׬8E|gx0HFv"d)#|W^=_$=9$yQe~N<6M h8#c GɈ?ėP&B!.%`tMf'>nθ8eM0/ww41$Q$%- E.ZOs8'-Mc4HKen⛻c|tQ`A2I!?ٖh'vS8?Q|!2Bm x kqFŒ(xc|LnNV#z;;;Z=!O-`ScmK3[ŗnȧܥM(OFۏ1ϺoGK[KXQIũT[՛ArRnc<e0g\4a \ ?k|'s|^lC+[)"ud9xXVHש>Fdn[A\j#xg!;ę\`>Cn鳲G8 Ϝ),3J8464G2*4֬/!izHIK#ézG9޼qg?/YƭY01gKzAÉF;iO>J=kD-gZ=@gWj96Ʋ4όI`wai>17#}Kbo*<[2nܼ%i8Vbo]:nܸ4"-N_Aj!Y nj+c5JYb2Iv^}]C 25*wݍܺRʊ?ΓB!ż$qs'lNf@p8ttI6|d6{hiGH^0Z̊9.^(by~)%tqSo5Ã̻vǮvNۊY"8>8[l&[7. = ݘ<'Aʹ;am 7;/f$sƹX$r5l;ZOD[ ;LڼvNg%RWH(($W7XɆM%QG3(k›AA\zx-CuD;,mP9/'{ٜJ#Cn2RXk@tVi6z,`Oo+%)Nt "`xHK7vN ȵ!:;/3(j R=8 %Uh6:ǂxXQH$`A|"z.<0sr"svAR'&++􄳶I`0p=vlNұLd<28\/Z,tN;JYXiFFMNLޞ2$NtZw_6sLqpg"qD{4HwT&*8xEvJ*wt?'^T# pZNun;'l-4ELgұct5' rIE S#E|յ7 37*r8dEyQgxi(+LOi# (/C_4p-&S34pPxe'[HNϩF|$yR. e4lg|u C1C_ml0vȉa[[J9|!;v3ib=Z#=#zu;^.nYg =P }~FQT4ŮAF X4-O s]l/'H8'Θa8?#A>]2A嵆^ZCD]]'9݁cpBVFFh 37Jp槱t5'dyXRAzi KhS͕ʒۿfԾ)@iRfԚQPekJxDh;/,\̈&TSu R$:8=2MO$>bMqG"BQ|lMctDH"akQJI'ȩ1G'GNu 2  4́Q:i XXk$m /Ng?ψ˚D0U ~"zawj K4l.i9YGRh3YFL3;"C5s شǣqgO3z2 D,,@xw' N3pv ErqGi;ʞ={9QBHqxIJQ\N\/ty8h? ]J- tݎ  g)ɯHY\ɞ:iloU7X\4tg쉚"(J=nB&߰8%,+]u,NF rf=<.rYQh8ɯ`iAE*^R\6@')3̼Tfj twd 3Jm?^CEBV*skVV!Bb=蓿\&7tds|SS}k Ӹ6`#enƒ426#Rj+h0 Yhzj }ɠϚG&ad`t<+ɁǡlTvj&4Yh]7,oߚƖE=D^0&O]dHcI^';DB;ܳ%!^?blkfeR^Bp;ήR(3BC(٬ϲZÑQ 5wh(BK #Eɬ[8Naw])x ;-c8,_7L~\gza|DT#4ْXVV\dxfqNa筚c"A"Ejo+]T)QzP63gחLM\|I67Nւ|fRw9@8P\wM1w "%Phmƍ;YK֐oli{\7cO)bN\O; @'m5ܼċE Î6c¸~{X7[k9DQ7fZGNNEyk>qŕ;YY t\nGVB9x:xm[zI8mOh._ ÖAvS8,tyjf9h[͖l |G 0!nܻ(B{C?|þ(7$=JM8!Kl盻|imw} sw&ŗH*"=E[kwz:ŋJIxK^V1ɎvRZ\:5Z@)Ex/>³|_fHO ^;DJa}H+ZD{0G0ܹ,\B!Wp>>CVZgPMGZ'.rJR\h\m^@jZ7/ +%ڪyn)B2-/G 6̯uo 5{XRN 0ɷP}ڇ#m1. C;q'cg5ht\"7zXsibi$fTiK]ۏNޚٻ說sknKrIOH*M>6lcceԱm~G}"bBH%{zU"%Vւ{9g]83/#a̘MWR3Ӊ !yt5lz<3F15/rϦJ dDb}&ԦFƢw*q9䥔o?_Hr6i0m饶h!꼨II~!PIL"JGfzR,*u|L9'1"!VG%T؃atB2$0<ȰtJ@VlN 1Dq[96C8IAN?NX]7w}݅OV63`(R&^Vy4lN&!*66iq1O>՝ ggl˪2X J,M45v Mf晧22, ϯ(PhCIOvqm*ӆaP8-44 `̈f+wpdjuFf0qkKKYb2.#tJM!@bфAA\wA~q ~\NZ-{! MgĘ|nj"5޼T gHDF 6n P1c LOFB ˨²8KM^ORSQ"HΛ J;*ҭ5l I%$:06'E*VUEI!l]X*&y1-Yq]S',!멨·´c=:̄d&51h}- iGJO\$̤o\ǚ[+iHÒqBq0!d(DK`\N/)ČY(b׮ET ?D=wkʰeb"  )Jp)3"l>AA=AV2z݇;="ˌ3S6ۦ<IJAg0;5zp88ݞm0"TkT{ Pݍͧ@g4a25YÃ6`39,8R*:=}jv+VHـj~ >`tZ%|8z!I|۲NO ? {N%p9,X.Tnߦq\lM`@QsW 5U+{X,xb!Hk=?֮.~~LbQjL |pm8]|Phi>{.sڱXmxȃNir`;C/)huM=keF_Y'4ĈfoZj IDATpR9<}8mVvpÞ2^ Ӆߞ> =zΈ)ذۯKԄF'EAz(A]AAߖA5qӋ>k{HAAmI   pxAAAAatu=ÝAx!  16 cdL \.1آ    nzV\AAAAAAAa0   px]AAA    tAAAA@mc=?4&   {<Z'MHGd;::'dܶ6+˅15:* a7V/}46    }MWW7veZهE5I奭ZLJHL >@tXNMJk[ ulz.$Mg{75?0IE ʚ0BAAAA#0ځT@}O{@ʄDRla*B0A@eB_%krkm %GP@YZ }1LA'Z   G$C555a4𭌻~qdIt4Ы6cF+hmc2` SRZhfcC1hx6Z[s[{iBH@襦!:.re[o3]11QD*m#%.1uvKXzhC#Hf&H9C*\ rw"  16]eW5D8D`,]AA q_AOH@{t|AAA!5q5   *AAAa    *@dxAAAA2]PgZw'P݆,C`m$CqC噷mhW7 Ozc{p jwnG 0$IB坣>;Lڎűi3Y x+Ti!krq!: x !!Nh3b8yI 9Ǎ&BEQWAMOֈ4BCs urr@u=y/eITC wr63vuC1JwYWI:=2XrFi`(bob}i shUNaB^"4PRT/u<)QeNb̘l5౷QZXCh6tɄJja$ Nd:[忸yxR9[hop=~) I?_ e?2oA']$|n~j},ݫ?⡻'x=¶miv| W߸k>S$h_?*9{ضmfo|~pϣ *I9Qs<"Qu(^4rރ󇜈g3@u9's7!+Yǧ>7GNyɧYԪ'&,@[}'ϣ7KT JoGB'rTq]CI}y_6oy6!e$O'0μ>4g೵ݫ11zaCvdBl0 Fs])eX#̢ ~'kQ az$:jX8 >) n#ƫa~M&_~Hº#ZUDfWOZb%8"3Kng qsmQ{XݎYny2AH4nq_VyHWvT%eS⊋NgdXAJ wSkknኩQ+e|ky%_,E;?}wZGM"s9dNNƒt#wѮsċoqPa[N|%Nlߢi [?-*& V,>%䇇/cDx/+s;sr xܚ_^/a3-`}A^]M̫jiUSvN/)S%+~!XZ pGL?_ʬQ;kMlFF\'b. >;-(ˣ'qyw"|)' /z澲Ͼ/໋4pb=='?_y Oz}3nM,.QίyLSO۷O_ykƎ/x wZwG7ry#i_1Ͽ)]>vGlMCS+IQCl:*M>5_;[8֫XGIԓ?Gӗj?/5f\W^ٙ'8;8-7^{fo]>f Wʇ0fy[Ba{x55[Υⅷ {\9]Ea9R={ < =f.2U|7̹Ϝ>6cTU-~+ֳjVo{dPt.K[86ysxP-+ace_} Fp-2'˴O~/$~ cs!^e8Ɯt1\z*ywYUcE=s.&'~|\Yh9&5B/Ѧ;ᅨR)&;ZA v?ffN#ĜRxlїlzk{4!-e5;yln9ayn,\]ϧ \}Ȳ( k|M r},m<7ˑ39jE>m,wHɢokUO~>&ގ^;g-yݺKVQguTQ·?b18*}f8aV}JL|I!j'L%or-=>3yF 9u}l\%XB15#Mǿk˖Qi/[{~ \rzWڊ&21qZV@jLɛ Oͺݥ,n1U~[%kSu;ޟi (;xxc Ȏj]>Zďk6c X6}ûUŤ !fMdѿxbs 2>ff#+ZXgV)sŋ叴ط_kb*ULI޹? m3Jʼn!&` W9gPv3jBʍ"qlҝy퉧YIXyוW=e= Vu6$Rҿ_54 Brr0ڷ_^KQ(;m4^VϞMxuU|TҘs E|_wN;orgrٗ'z'ݴ?VV|Zt-e7_ҙpc{KB1әgc~UkGѿ<ȷ[(\9W`qɎ Qκx3gs᥍x(Z_;T]?~6Ue$LHً\u &ɼ'/p 4Xcxo 0\몱|lo%IE㘔 oz:h5gɲb,.ƪnJ1vW<|GTT,䱇Ͽ&t™{\[׬/_{g*9sֲnc)GOfK)Ȥf"2n~k#caSjj:]H*RjP\Ҏw~` &.Od+/̒nr, ]HZ[6mjtJw%9'?ӯ w+ٴnx4`L 6{W5k>*a%q}ٚ@μ!kJUH_'|6*^FJzL{`Ӆ4 +O1rwM|x=W/L=)+`*c1946P*i %khC!;5p§bL"lv#eeeգ)N{@)*Ic'b> WΊVPc}{Ss$-JI@c&#Zf5'47ǡІ: zye~Yױդ^ ^v *Ѷ;e >+=~oߢJn" )?#|X `kGw~S wم1F/yw?v  ࢥV)`#:S,cSi,LgҩBjA 3JB&@CVRycآ9akB2vS= h2ButfdDP}ަe_QἹd[òP?s/=UB>ހ8*~ӛ9ƩĈYqI| T=8}eΠE"< CDž,N$$"89ǿGucٴ{W { ]WB M%NcRض pٗp˭g L:a?]}^{ &x Zow6r?`Jeٷr h Zbٷr,9ac8 w/r!2۞Q2^ ,@☇HFj"$1֎Dhh"\8E8E5`*=,y~n7]ɣQ^ >$I;hwZ!h m EC($ BVj[ I!iى+>xl٬x,aEIht]t-VN?< ^|`Aj% K#u--44Tm) *NZBz+TZL0bNߙ'Xۛ´6 "D-lŔKO>/Y\jnNcD{ gSywyetB9 K3rſ{?f.}2`3NRgSKS3)~9y( ~3)b)P==}i6%3]OBKBJ< i|ulV6Y#mtvu{ a!C s&Q!,p1g_kvpm,mJf̆j+xtHIIJm^e+#sQ܇yw6q %Q$SW_ẅ́bm-gƝK~,8>܄Z>BĤfN~P v8^xv>l2枫B1ĶgKo'y[TimWNB]dH`ڄ\pp䑣3l!o<FyLΰN!ۏO_MR"Iz_s5Y?Mwq^2}o'{UKRC:cF"j=QiH!i&޼DHjHӋGGWKL^Z\SPl")o?;^|9a-,C<\MZo#]8k"IC bF-xd8r$3YGiI=?TƎnKU4ad#YGiI9|ctVE^n ΚM6A$AAS&d Ը;inKhК;HO W=AR1FO ²*z"jmti^AF 3Ab]5BBumNBsjgKI) zl}N'fy׵걶i&:* ͞Hiq]q1*{$'j,CIl)h&kQ&Z7Il$Lk$77_[UgdV*:=fZK61uF6&g47SQՊ 86, sovߋ}BѺzNeDnS4l*dS`s߅:n$cՔ(BJM^ҦM!ai|V6V1bh¼n(_2i~5y3ܲF)Q#1k4ъk1Pcm0y[j\YGGHl99;jX[T!g:cB}ԔТd7!P0~>n"bJ۰{R;. *_'ǹcN!RwYZ䦇T\HUL $*:29\NQU?'``鬣t ]n5a*,vycuURZՊnDҥ ͌3u 0F6r$)&:}Gu qG@]VZ>1Mϰ_3:nnVETL*6gyQd)XFKMل)6:]ZzØp| 6Vt7q"JEc3?0aZ ;Ǒ롴BQjKYݴg.14U>(V֭BpXF A"JkZp'*%ܴXf֯m <;I&d[+ 6aЅXu J1#: їW_!fB-T!aTZe?nͅ Vh@VrL"YqpLFT\$up)t2~ӃhB<en,VAFZr:9M GCQsl&\6 Vb a˨z}F xl;Cu(8mX.ɠY"ر:d!F~-',abszADo AavV(uFBMF4ϫsc p FG9Bd\8$`-~6+m0=+mCzY(FrapN8|>NZV˯k th5cr?~>Pi ޔe߅E1_w$ EkiRMrF31LŶ&(`HNLPq}M/#˻'}yOo_~_5i)Qy4TWO"lPh~G G|Tvl'o#J_ /$ Imz:og99j(xY>!Zqk-v?$I2IہՅ_n.rR;%9'_ųщC}P43ZeݼBg8}$fwk0:YG7(edd*[KHYq~ E.W#?Vea\4#5Pi}&$\-r]Y{ܒNhғMz6:HY_Ճ7.1PWʖ.ހDpd :s5g鷳aCzdΖ665XUDG1!5j>l\Rf8Ei-С#7+8ʦ-4[BI|Wg#1.B#%`aS_Bk0MRT ~HJܾv]HU>{7 ҏ_BYip$٨XrxLJn&M_E1<_I3Zmm؜QWv& L>n$:g=ka@SG馴h ZYJOMH|e#.fiWCVv"( 䌟@znWه$qLndaNǔZNJ# EaUG[R6 nOSRXAOׅa$SmV=xh[Gu6XPJsdg}nKbm.eJz\aYYz*7N wiI5[@ ^zkKY[R I9LldÚBz%1z\zՃG2Av8+i-]Xc-lXBK]d*f͍ tY&L) hwDWƕ.=odZEϬ&p_eOdFvL ,[JQE(|N \wf>MϹ&9~ڒfRI 0P]L@"W"ky6KdESfމCf,"b5}& A2{ +\HGVXpR;֪#Qf K&_;䀇Xd rI]Y[l\q}ǃOGQ#L 5@sk7TR:7QP~'cq o 6Qyxݸ2&N'ZJ"J_-8`R/[†-j9U:V/_Coc 3V%gG#~׳n]%'.Qq7p}jk&|>;{?VͬX׋7fZm~|m ..㋟iloK|]3\J'pO{!I*R&ٻ uo7|7jL\{w.5,[.'2Yޝqv}űX)Q"H%DzɊ-U%v*ЕT%N%r\IUY%GM c `{ $<0~t3=3yO;a +;cVZ}|]Dw7 "^DNOw15rm//#=͡gΥE2w"F9bNzzg_-d>^>37&3~3o0?+ͱqxqSn{k\3-s}y ϶}09xy9Ώ K'at7w J_兟J GwѮ`;u+_c^Vީ]^[==ٷHbF#DFHg&z.oKO^fKKy)::Nvbs9pzCmS\|s'G <9ɲ0ܵpF#\+p/I^^x)v>Lϸa%ұ!/pq{pq(M8?{پ=o|>pԳM{ 0*_ gևyz|[_F6l^_sT0}T͔#LscG.2cwx 0}]fʝG7^l=ޓaf/ b8x=|>/1M4=^t29J|.#z?E,+N_=AoғrO=ZGvuPjqI;Fژ8q?*+M_nmK^oG;y,/Q3.du M?+"gϥ3|Gl$ .tsmKӒ&cm\xN.q%wiw1>ˑRo plNI}Cta|˅3ml{kBVFYy%O?TId1#P;W2;eƫsP~u_7fYx'wi9 aa!@ DݼsiV_[4=TyEܼr><",mRɦϭ?rN ]d#̗,l!uv i-vߏg`E iA,;]͌_l_})Z0=u}C[Y߿Μe6 Xfx,dz2ӬXZSRZEis5y9!=Qw&IGe|o ʊU|5h, H2] a#ʼgMO #ɋ_wNQʧjٮ0[L+hzog:6{rw䔖3?8˛4q H%I늙SbUMnƢj<__cyC=SSs0 ޷6!ϐ7!a)_J49xPa'Nt"ƾyôM/:F4]@*J}"zyg}LylXO{h3et:K$n}`8\T,%<©UpAP|8f.ʗn==𹝤" +f4;px8ϝ;Jωi+Vn~w-~V$aMpݴ+6pLq.-mw?$>ːQ=~ӋmǰKy~ʁESyDZs&G s<0Nn%h;Ϟ?P:G_caar8qm6'Jc6v&vR 6 00Lvҙ[qcJF89~vdֳp.tIaq81 06LD:i70 Ç=nYd) Áe4J\F.^?Ɨh)r2>6J s`k{5wFM[`e͗n=,ܸ}LkQSv30y&y,^\9|M-\[M O2ىm7Ÿ?_, c,_a gcSuB:ƴ~9W0 )6Ҽ$m]IDAT:W?~d颅x#~._17n";w0,Ƚ3f&P%e慰;R%&N{7~ݗ( 3[8}12n]w|6>_1f|QaZC길-3t`13DSI2ذM0,NT"ÅfN܆Y}Oƙ7WyX!_H֧Xt'"ftc7 #S6 0REىp}JYlT:\D' XZ׿y>4aRPZ t a5N-- ,8$t8[L'S3M02M8ϴsH7LGBH殣|ULs[?ϺPԉ\ u9ÎFWwrjz-RW| .|;#&Zhgy}'0QAߥ Ԫo_ļǧTe[~sWwz'ቆ_nt@Xu)sj~e7ۇnyBziްb7 ~3s:4mGc}ϩؑ3ؗą`M]33&ixntyS>r\#|SKcV&߸ɥo/8AfLa5~뎎 ᣺yCx89YUah9)rx%| fK3Z<˩#KM'z:d~qBkYD iӍۘ%=XJ*sI %2sdۻxwta"t\Ds+9OxeGW]K\iO`c-^קC֯DE /Y3K~F$1._ԈɢE,y9r6-.ȕ̾\svY*x2Õ>Nt0q'ۆOظJnrF 1=ƞ=i %xtvT{Y4[Ħj7dKtGm{-.]ɜ<|ӽv%Iu]%K*]$;n"|IG荘,^ZI_%?M,"J p7Q_t*e~b+J d"} qxs*l`IYo(ךfpdiY3e5[œ.Gyx˃4PXc= F 5則 8L+CA,[ەCCc1S=WLS4YHi.h<@07nbyKXv9Q."X@ˎp`1ƢNlj ).3=c˃͔qbɆ,#wQLͰ¤F9>pJ)#㣴,ҺBkk=7+YDCM!kpjCt$ͧq>-sins鷛8[ª8h>zˑGSs.|7ְaQ Ng"}Cx+Wi =ܻa7ͥߣr̡k\SԴNqL?Mo ys )s2ȔI[yhq9ӟ- ںwOǐPq y$]D2!>jC3 &^[Q|Eqt/.."H:<;.3iܹ4V WPf>bV? %  nJk){rf¬ᱧZP\d &nV,$@%>q` QYQ˴Ӽb18}Ge,o(oO3:zZGHJ[[Mic3U! V3\ü|ܶYƣjJ |KM^Pl\\G}|Mll  fXdB;Scss|Y-|P?~fp gtNuy#O"h+={oR.^cN 4by#_XQ;:K;.r4񴉯/y!~q./6P1P7{,'|_m`u$3ܩv;M߬es"2W/̰~j@Gٱ"rd '|e]9n}}d28eee}eefzwWN9K6=O?2Qڎ˽9 df{ww5`|X EDDD> X@0Ma:杣@fqgRx>2i"Sqg3rNL'2` ]]816s8aJ215K"crٰ),ܐ#b:`z6v>޷ L$b &g,EnJ1tux{ؾI1KyqޚJ:dr:A,ɭ]x& CS^a d E>߶ݏsmߴmGJeaw3DDDDD$ |^RRQCR&add~7CDDDDD v\^:M,"q"""""""Y#-'"""""""F렋dt,.""""""EDDDDDDг{6MDDDDDD~;}=tl?pA޺6E#l@j x ?~~;λol%qg/3%EDDDDD=?]u(vH'&HYv׍J13=Ea.8̼z|&w.fDfHg,,ӁI:{VxtXºCuwieYI2HZLςEz6Nt&F2 6 :Kl!͒w&șJ:RLO͐ lN7>eeM0qy\v8 L2t6A 0$ /Lhe`syxX)"[w \>?^'"""""r|lVг<2?`H{[io P ?0oaM- $SX<'9Ş;b.~6{Ɉy~GE޿>ֹLW;_{Y^{0i't/cֻJ1q04DJRNnfv_1֯Ho*gzmjYP39Me0j+[zഒ?Us#id<>Cx ?؊KKr?Wy=[6ุcRew~nV>e8EDDDDDHAz GJx#zb?6e8c3_[6_\݇)\ƪEsXrŞ;z- M4顭l}ti.vMZ?OR7Ndl#' x4j%Fteg#^[ݰľCIѲ_.0>5B~c+s˘9*G&4,[+X\tWn! ߼̡s׈oIER4o%֭:b6͞Hϥu\rO~2X8Jyɗ>FM:2^a!kqt`a Tk.é""""""փ~*|v;f~T?!fT:0 Sa8ãjKqe⧘JC3 E<ETV5.mD*If_S ^Ӂ kmO0O*p`U5o?MbrRIx Kδ'!c3Yy3a ofj uA6[EW0F]̺Gs"\ jκ;vfjCp(LW9Kq0QU-o0a'f˥y:\atv+oO[#gǬipu3.$H ?EN~1&}g2N0~1w9+!{u5+gc}6w pYɞL$ \d<{]A^#}D"g5uYKY| nz"7,e"3dH3jfY0cw/an5c ,hUcozx)^ NNO(.#WO>1\y`I-} ϼ=0؞H=3|Ѝ  Gkȵ.?A\7`;;\$^'-Ƕ+9;g[i.3=|Gz TS~0w%fν\%ɐVZ룫m%)tyϜVZTË`s&w>?WmΏx[y1N׳桥1amRm˼~}N~7EDDDDDq5;pCwIw;( gaeLwyw8vp2=ew|p(ARDDDDDDD["F;s@7 Ln+xaB=tN"("['pV|e0v5/],X,$9"""""y5˲H$LLLJ4TW0 ANNN~7GDDDDD=t訊HP@ """""""Y@]DDDDDD$ (dt,.""""""EDDDDDDHP@ """""""Y@]DDDDDD$ (dt,.""""""EDDDDDDHP@ """""""Y@]DDDDDD$ (dt,.""""""EDDDDDDHP@ """""""Y@]DDDDDD$ (dt,.""""""EDDDDDDHP@ """""""Y@]DDDDDD$ (dt,.""""""EDDDDDDHP@ """""""Y@]DDDDDD$ (dt,.""""""EDDDDDDHP@ """""""Y@]DDDDDD$ (dt,.""""""EDDDDDDHP@ """""""Y@]DDDDDD$ (d@*2IENDB`rally-0.9.1/doc/source/images/Report-Collage.png0000664000567000056710000056004713073417716022671 0ustar jenkinsjenkins00000000000000PNG  IHDRW4bKGDC pHYs  tIME "z1: IDATxwxM$$ޛbAv^z+6TP{(J[Jz!nN!T$lrf339m6֭[eA(""C^Qp )..#=0A$Q $''7=v&#::Z -)yG} A8L7pF:b۫ /pŸ袋x衇;4 MD "B(C!ꫂ XٔeYAh; ELAUAZzlJ-4MQB HB'b ±,BK6%a4]o*IЇ ‘(3NXc$,t0PEaAZlܣ= 9J/gI1{覍mۍdMʞ} SxӲe,F^{|%0$i $6 r(:b\u-lTUsp]^?폶 HͅDv%5"rx&\l Z_~أzF%lBj]OnTIbMO 坱YeNچ"F]oDLX =mٴ]1#xoPug4Pd~qX#+=Zdc# x( m6A'd||H;a NQDi?d8q_˦Ry۟߻'3J3 ͛)_1}ϼߤK7SIe"A*jA0: >8EAn66 Imh͢gyH+&ni.: N,N’xyn$¿F;m a_9ox_47U-wqY2['3#JGnGuCA!G]m ]'p9dVEضJTb*;i8 fAQto=]Cԁs%N´t$W"w OqI}ߨM%kX#APn(2ڶ ^}+9* Xt,*=ᚂwpHX$TT* ; Q:5('_r5;\TG Wm'ڤ 'K7Eَ7c׫,@=A}HEC}5:b4?u*mYTmW,l[SINP\+3r.NAbN$/_:.ɤD,Y+/OۂnCE@nJ;[%x0^ X] s:+XAxJ7ڷeUb}~z Ln{gz}-wh굿"eê$O8sVcTCIw[MEGGJ#Öljjܘ9UddU6:D(.]MDtLg9x"vc}~|7m~|4؎b&>dn!%[V G$VJvͶm۴P>r qI u3Qvm4d9$ Ye4Q0QTe ahaMK-Iiv!AaÆocƌaܹ(-B*|Q OTmFm0dI0 UU=r}mudE$\$BdI‚P#Ac?ƌsĶWVV駟Wfʗ_`69Y93+PGC@?wU[>~YaPQz9)WoӲ~}`5=[t o56S~u̚5Yf/I_2O?O>ͅnZkqW>@L-²mvT$2aa19 'E1fD7ˈ1,Ctኳ{ҧW'vO]8G+ƜۙD: g2.6mWTSSQn5յw_5r [ 5Xl*0w7%ߒ|tomarJ (i,[6Pה )6JA#1>&wVRXK[”}<>uv(P*?3>D,=>E]cmIG`䝯l,巌:;cܹ3]tcY?˯/SJ,#iuM>O< [~[ϼ]s3x'۱_~O4||* 3qiO?,H:[Jx+'SؖL⬍<;-'_guA8 SR^XlْKY Ѓ%Yٜj1w ."R#7@@nՁksIF$,^˗o#=9?~oM&}BxmLW pDǷ[9㩫dkѮVS&3o~sWm  Aݠ-9XlݒYSoE>'֯]KvIl^DMpv4.J$7 cfiYCxxX+2?G.rŧbz+x deS2v[؋lݚ>Lm|&&3k߂gS~P g~kLIo͢[ Jt21w`UAd^x g'wı- a3靷-ʼno7~xn2lg{/P۸|7|&Y[,׵s&h?Idoӏ?NNN6O<0&qx{ϙ>sW`~ZSWo-';G݁L|qVnw>0zn2X>i93GDVd͗~F]U9xfXOeDw5ד+J$NC۪MbZѭ[72(zCewŔ1m4uFb;w>52gʝɝd״6-,;567똾4V" %<3sn)̧Vy+=8Lc[AUo|`X4_maGF6T \*mou'P+76pƏ1jI>FJ?M ?@&۩,ס3ތu$jytPdCRx ɗcfxj}^ND0 ~mQe:t@j<-ɑޝ E|>lܼkNFEgp.gl7 c9op8a$?#0'_0 ݫ5!1lxte\]mHV;ym(/ʥs"bȭ!>Z Ǯ{m۶\ <|%SIbb4;tFqUWskmGmY˔%"2w^;h[g$+XPo]F# ׃˥q)9* ~E:FVP$`HDX3v1Wrt,4NG:G '?גs#鏧W,oSˏmc'ХcktQ(CyA>bۇScO=qv܍K[?SMp^(#F=GׄƝϬP VQ^ARZ=WdrnQVQCezYd[uC$"Rrq&ZdnY0}+=ԯ]݀OcsVtt62O0恧yeXd+ndQ|"JR.μh$!#}1OZb$=θhڮv9Տfq҅`,qcއIM#l­>bSH_!(tA.Ӊ$;$+(!Rz ߫EėfsjhQe Y Go>-?.He?5{Cl2{v1ΆYЮ j*' .Y G J rӘg(C#u$ p ,ޘp  q271nMqĨ$ O'&2$Ͼ*充L mvxYЪjR7Rur*qʹ>|<~=<|Cj@lG.{렓Og8:!9~wH%u'iu/~j׾5SjauCUBy9.FU bR;'qe* )oȹH5嚳z^Bœ5jB6}ooAc٦?!fQF)+O™Fq r' سp4~5uSĶL # , Ա,n:覉e;8;a`iMi &_5UQ$UUUQwu%"1}u6rkY!MUiAzg`LoP IDAT6)rDлmiIl-Y헜 듊]Ot3SIvReZл}$-mi،r0u(q1W{HȆ닋Y -oGi5_O`Pﮤo+]i"{|%H5DFcP 2s ,9 O!qjmmw@?-Ќ @-9E(N )CC]5i-T 4D "Ye8Nss[\G1tmO6Wq&5UnrH -(ؘV3|.  LâƠ_(ʫHJN o%אSxc$(* pLk5˲k|gضar:p$$$P]QN6)9d4AdL 9tnl@J$,,L% [bY`ӽMiS34/VWbٔzA,\NCP>iMW.>IT|bv>ڎ*$E!:B&/k;pVaa0먦ILxyi|>QlQGA{M%"Aa z&/-u0eM>>Om.{nM" 0l1 dEEMZbccڶ-. QFq?^sA}:N* 1%|7 Zʿd?]]e =eY(0 1{&,"갶ktu(x6k?-<ˡOC^Ve15 GLDD^WTUQf.VJ?Ww=]%g \v7U%E^!$] **JD WSSC\\iKnh-l[,=t@% uMF-a|ôPYB$\0 mmyS̝g&|#-e}g\9Shۺ Znwr?$ I>AZ}>OT1hp'~-=ŕԉz?Vwck^^6\x5'to?>8a0dPwni<\|I8d;ƍõu|2 ) HUE Nx{";/ 7Ļ3pvVߕOZ1B9iݺ"[^' "+}]m[HĶt/ 8o*&2m$#aihtS{p8:MŲ%JJv9Ѓй͒ nIQ1  I"*5R0H684 v&ĮQeY'rݍan±%8|8{ЏMnaırTWWx݃%VГzrcopãYS"eo)-9P*H USS1e0}ΈmɤŐ~jLNӕMU~r 睐myz4mBXDGaЫ ~l]VcɷHDpF f1xIY!'0mdYap٭w#A$̓.pMxqX,uӏI(I*ewSSQBdL f|O'ƒg3mb|Al<~ؐI:ʶLG:Q]\Jr\"^yv CZ8WVl+CO 볠"9o3[9sJΛsH(^UJɈ%3xpgpUcY,=:S")Ѫc'j=ضj%8m?I[AeQ-D!}ol]59 7W@aἅi9QeP2{V6/H#WZ$aYNNd}f,OVq5wi 63f). M}{E ,X88,Bs_kAw84*j*}2>2Y[HNN#VU0MYQ'"1 ǣha,\}#cT`?ty[!ta?jEoCwSSNfv> 㧉<([pQcݳ]BS%tcW<e{vxP+dt[9Wn[/qi*Aġ)XEkt E NM,w;ZBqQ~L v-MaYL $ 4$L=4У$4Mc`Y&Ul?hTLFeL=90t=tуȪCPmڶŌiS9+ WC BeLTEưB}a[h 6 5ME׍cVi˭,pŎnsc[&Ke&nC%7a`M$:ҽ[BM%!ya1mėI'8Ad^}iRe|:β[UD39ĵFvA݁VN0!*իI՟M7[#QcB$ךTd3cfNɿ~sq:co=AL Bx$IfޢeнckbtdY6`79c@gff耎{HjۑmiFæ̣ѯ},)]iݳ3`֫1Ӎ&VdH|_z*a/ fkL /4B 9dz)1FQ4l$FTںЏƩ'k؍-AؔAJԔ8`]:ӱUKYf?v-ɹ ^8`uz6Y~ 3PK 8mA 8ypojtT۲$FiccNN:9%N G7ӡܭi I u &Xm*2b$ci`c5eȊa(2S0]dS5fnqb\v|m.2z=NMA7mFT ]Gmfh`Z&NME7LlBQ$$Yl,Ʃr(JΖ}W4 \4p.ݰP$McWV ⰅSUòm68,BtS gҬ:HƩk~O7-0w^`㯺_w!p j66;ҠރS$GUޭRzh7x{Lk\n5Vw[{l&mj ň/ G !=~qϵ @.dz_vr$*1 uL!ށ_7 EA$$)4|^;;Ii(L At[ 'p}` r@Ro2&H< xPU0PT}נ~/Nxd"cY&یQs?h4%aHM$ǯ%Aw-ݖ0H _~¨_}eo3f aJu$UAe`LJ,;;0 ?_yl ˶TnmT;BGS D4$I"P4>n~^Ie<: RRzVϬμRWOÖ=> a|s헼<'i 5 _@',<)YahZhV %{yGLi,٘vV`C`<~h7ŒdK2X<.N= 4栺z#ґ[2 %4;9 [^fi#xD`h m۸\`$IjmAK.݈8yc^p1\wL_'h430NH">~䤾$g^xO*4Cs/1" B?"<K@`ӧx;?nXGT-ŏPS]͋tJ}GUds O)oBAC"tdrσ0`y(^m\,yYwj @~g\UJC\_O9Ъ4{˨' }!:s߷OVp]7s#ѫJ~?FBL@xw&eY!q> p`ݜtx6}_FPaD}\qdz\yUHc ɃOm '=g+[AZPAhlbmʃ@XNUOVE1 2(X+ @Rf1EՁO>aRQd<>6MU)~/;co2r(vaUtuc:Nu8bW8׿öd憖A8H2Ĵv\~ID8Zȿ'^ Mq$ @hpj@ S) RSSθ?{uo>h u:7pwq'N6ofwgMMI &$!ԅ:eTF3s 6.`}^Bh3wsoA/Xghie' gO䷿1'^q'Qsqq<[wbC94vُz|] $$ƇkH;"(*-avv"Ňv77x͖UmΌ#y<X<qw0fCp?oPaGKl%o}VjyBcRIvH%{xSD{.36UšWե 7ㆥ󙑝8A5ޜKٱ|/mb vHh hk'鴡 0ޫP(fˑZ/F}~'{[]iӲ%ɕBCS+wv&cՂ n0er.To=KZ)c2ZOxpg# 2}@7hxRϚWTiVS7ML,=ܰt>y9HÝK|ctJ{"z ۫V:\6vN&F+ԺJ|E˦SW?ad{i=6 M3Ɔ+M8o<lļ^45䓭]Qftr(1%3'^\ۋ[,jaq?ʡ$2vw$3ǓȬ(#/3ܩH8у%퇽;幨}_UxDyt͋kH@qCˀvq88^N'n"+m4.joID{,J"6`ǡRk4ZD`tƠNǎg#'n8Q&%1U1v,@%ZjϪ4DtkOT4G +.Yt CNcS ; A]rJˊ8xI”Ihx,,1 pj*.A''#af\f< K S]LLL.OA(2zf"PUU%t qdU[@uN_B8f= rxhhHP5""2`ՂkX, @s1djbrrn , 3˲, %\&|kXY@,`h" i`Xr,n(HH 'ē \ISՏ>w$~.K1)D6U??$#*zt,&&k/]@Ba"X,`iC@?ZQ@5:,3|谉5j_' tɕziݗ?)I4N67/Ovz M'k&qcٽsD)ǎ$3ͯ=;L\dDS_O۷3PT덢OfE=,Amk/rտyV" +X<{ <&}!olbb=fG5 5LLLLL> n?7Z|e/aCn:Mthl9~.ηWƣB XlvWHl޸@ѱr& ?}%tϿuٽ*s*(o8շ,iQEt6  \2<E 9E/;hMn0,uH@\>=X̓`bbr^L`0x>[@n#J O]Ʒ# Q.G/O+ߝGÑD',\}>GYsS'SXƴTU԰Δ(N MMTL$-'pjJJQZJ N#2.! (RQt,# Ci9 ɛE$c1aPWufclv;3"Y<뉿~be+/q}Trp&&0˪A\1'SǠSۦFEj*@ʂtZG3wR2vjP\ticDK8._;c &9C):vd `0881| w4 D]CCv1ѐcZj{ܻ}$F$$^T] dq#y<־*7Z{[rת.ITo[Äw~6W!?6jd Zpo2@5^D{.Y,nvȮtqŠuȴf}l?Mg"$>\&Xț>CJa~vcOǦ?daG} 0WF>,(ohUIKgSmVOTqgSP ET"H};X.\Q1Rz^Osӣƒ:pk e-vQ1;(+-'##| %'x7YƮ(5#dL|]2lؼIc#. 22|.X4oڻih3glZB 5i%ʻسesg\w֚P\\ P]YDŁM?'1=֒c<?@]ʨ^bKYTu䉸}-?α7]ך7 t@4\ + (yZ8[qN8j@nK$Ǔ&iF8h(x&iY4Dc4 A9op:y"hNP#I"n; !˰u20F>3YK8cGS_o&&]ׯPa#?AMLLL4w69XPǎCH%e$ރ,Kp6!{FhDe(H4T ]sq0uVqҪDA݀wxҒ㐴!ZfNCюm6w9y!I`=e g]}vh)z#wRc#)!9VwQmemM:{I;7|^Zĭ_pC] {8Xůq'şIn7^FϑGH2c{7MƤXξ.&'_>x?2+7EL{UL&7QfwiT?;Ch8QBecͽ/U5wܸ_cG`)e˘:>I㏲JHʷ@|m'YCt4WRRz?^}0Zq6o-öU5S  b`)Kh+ϧbq~GvzvwR66arr^g &MƱwScd711 t@711 OrqyPymķ]a{}U{w! I10'D6TsV:a wo=&-oCt"E0t}$I(  Wf1 Mуlb9r$ ˊ3ϾdDC>!<$:m$Ig IߵRu,Νxt5[ryH@x5W]/{/%Z̿/~_z2s<$˿Oz2W2n0UJO 1KMK_|3[{6ldrvpGC{+7zU-LUbބD>pM_DōmtK8=Q70-;~)e !"{^~_*ePb3yrG}5oRsq7@3eEg_Cv:Ux$ǧIpK~HE vfa]y3ز}7-{\):lbbr-cO(z "VoWzhA+dY NDQ":)Ja?WF mtɡUDS!cDr|^VWnbbrJ,[ei.oΥ>61eT]ý= 1&0ɡvE"&1PItWtc=w.3MS[ɓVUCǢX*, (B DdBJ] ǎGePں&QB0t4UATM1.MI ȿ%[k|Б&;=Y;0$`nj5rObFvn X$q_>WLQ6#غv;3/?Xo$"F&BhW/NLT\"jo# 4WB4p/u$7_v+FvJ9sQ΀.ڷ҇>u џ#i85'x1BDwYPϮ*(02qq?ksDAD47>aHvW:İd\!]@=Qθ=>280™5QO/b6'z&IHP80 YBsRs9WP?%G+CQc;IFzU}P5 >ej zx:IIǑ~_<6o)'kIFlϖpLn(/M{~a:,2ia6t?¬܉h5QsG1} z= oǐs$MXi-ߺgzm)Q)y ;O_ny?/~S뷢4kߣbk.sx'߽k4CQXχ_qD]7K繍E@L:әnDw[r&O{M䉵ǘ8`#Ed}i ؛_¿vGjQe2v9,f'N_c!UE TUCtBFHеߺ~GtBal>i]7 j_>*ޟh/NÑs^;V\*ct ܣEnv}4;u/*v!h#hOx;ud;MG;=RuPx7fᎊ +1mAum޴tj7$IzZ -N6s`br鰪ׁ į l/qSCv:&%p-Sс(Fd<~݇h Y<Ͻ&}hu-ٺU/h55-$[>@X0S(̆z1grQnw89^|\DgU5 r=?#ohn{LNp0vU@+q)IVL73m(m{b:rYV^}c./ߪ$egWGB]"ٌl(l[y n"#6-;z6 nn d9-N|mt3ݻym棬whruUv8#߸7'7nq=}IVn6uÒniQі}<~^+r33_!Cr046!psC'w(p c 5HCkꙌ1n` oa?q611*zۉ8ԁ>H2<-dϾ'_x ujxu2;w3#;w*$L76[)E+1qy/ ݷ#Mq3OFErWO(3`~>R1:ay衇س% om$H0 $ټ>L\>Tf ~=hb2k>> $kKY:FByq 7)deg0.' %hCdȸt lvʊ? a``YLo'Ơ_' ASTvo\2)U|Y*&Ơ_\􀿝wlqp%io9,-{X|l{y`rz":ׯ#cR6ٳ( "9{_9J<5$5m\y#׿J{@`i,,&o,}iv,[Vz򖬤pS*Y0w: =z XEUI5bƥyf@u'doKTgҌ#^ Mel+C;W|nMOLp\(N/3fx/q\&vcqCSޟio!~dQH'bz()k6 팞4v⢓l–>,^xuN6v3k){1D&OCҘ133vLo/a֜y ptMGe1:5S,]Qj6ƍb[ɝ4caJn̎$1y.t 8Ѓ|F&&&&W)f8<\ @ B(g `|L̳m*V?6|n:!H_-||(aP5 ^+ (L(¢!,`0,+(v9QCA@QF4%DtTL ]EGDDB Ŋ(() ,cs$[%񌻪"+jEQPC!tD$B`\ zXu,g(qs[ג\|(0 !GX]RĦ=InN {57+UP(RY-(lf\R$?Vˆ#u[ΜiA hJ*ЌpM79Ki˲1&&&~} άf*ğzfR"A O@O(d)cȾm`8BBίx:OM70 0u>D-"I"PQ<W5MrKf aiᬰI, IDATH]EEQ%UU 璮g0z8+kT,[DQ03maCuj8a ޿ ]0  "JYIO'35Cgִ+ܳkLh.|@G=GEk4:ErVDЇV}te,aˢvl#-3~ϯZi 1B݁tZ4"W1uB6r]icѳ)/y'p Ps~:zyy^8;$ @9zV[z %DmpҦC> v系 ]Iu~Z=k0)g U#Ο8БJO?"yOgAaIv6g|Rw(7TJ˚bmu5(ex5L4bBW3Hs.jfT{|'FA!ıyeV&˺6C}6*'zt6ҥ2";Q,&k\Iw^{QYP$γ t%?yB6cG 8.s >:4o$NHC'lhsDcb\zZ$!_Ȋfr<{HsSuFP͡ZCu`WK~.l?˘(B>yי7_x3rBvc"00xg(舥F<9l~w-"Յ- 43`Eo~v&3 3̧,~Yr!/cK'őkdqGO{'(9#5V楿D|l~9 tD*doQ3;+@P+޲>x7mԟq(lYIk_aCl ኑtyyjJJ8YF:zoaL;@ww&:ۚ)<!n*A!Ѵb䘱8l oMyqz(:tGsr}[?ފ;>CT;LAA=3&csɚ=0Z̖ [%aǎCMI6u,_;[7L-̺EܴdTG`Xs$;eF[ᝂFOQQZ@s 'xٷxww=;qn%1Tu䉸}{]|ccW.|m)43+:KۊYi9D9,״X">{& {* GfpxzeqTeshh'iZ.۶0ZfY*n[4Mv݌M9h-UYpɘcMt i6N%kHΦfܒ̌%qNΜ H4/@C>,:{d2^v{el=XɊi`d#! b2]Qn15ǹaz*"x{gk Wn%HM]E,˦Iҟc@71∂0 fYgFӯ‡_ F26}_Oݍ|[areg׏?ͼ9Ev'":LȓIу,}þF+&eT=C oyWTãf :sxeeAg({6O~)9( NUiOw~a6^2܇*'%/Y7OOp{H^o3'?*X|` kг5߂y5>lvE#ʈA78.pGr3r9Nw@ bs ItJCC6D< p~`C `bw0X a !! D\vaMa$8ŵp= wE.Soxg2qL,55-dd$siجfة_G1Яۙ0I\oo/ ?a&J2~߇4V'0sR3Í"w f+@,84-@eOsz %IB4dY>Ȋ'| iF5GJB gx${+ ol$J̿q__y o~` N w4rí㈚p.n(_?rUMe/PXcʜ%5 “/a(Go~pIŏ\?W##s.wI31Dɂ1sS#(l/S{[M!mض J*{(=8c9zJ}aZ{hc~$""Gl]%{1V϶݇l~ˮ'?kH~MiF?ml "eosm[cFd|o=ebbja)}x_xkO ݁#@ez& |. z8y[#%kjD(&RbOZN6l+w0놕v ˇ:9M%Aog՗DѬI2 g)bRh,߇lM)}CAEβr-C,>y\VOujS͚\WWFOa|.qW librc$y"s wrX)xb5kG3:> cttA!)1 EDDAۗTӛHDPCHRSvwm#!y (7>\<8b#9A=|=U#e"$AAXJ4+A8711uῖW91m'heނ@gW7".>ۗW616;Mjb'n{>{q8= ƖNO."=џwgw71^/=X=Qܚʡ7l^V$g,m=L{3j`FM{ЏiT3%;f&OuBt' :uQh7XM< IV 3nd n v~\1a>tIV0baAGcEjtQ"3+tk3a =@wO(t~ŋ$g?z@_vvss]Ed&)g}x,!]W"vb"sLW%*^~at,'AQ=o ~(&QVG=,A6n÷Mx 2?}s DMeDaY69m;ocȜh %ݬX˪[#?]y?/H/HBKEcq ;6tĬ+_=߸T 6"݃/$%g2MNj5iF#E']E_/'8ʷIxvv. [h~aƶ芓{7o/[oz$lhB-Gq/@=0dʒ*w.vT76m-?乻UT* 0t ]+" Þ>!b#$PrC2=ܾ`؜?TN᱓7ooX')F"#4.`D 5 D&wq|(62.zCy"үxlheJOQU6/ށ AJzêȍF-Ȯw7pٔ״3g$ݽxf&Rz[ρ陔*3yM˳iDz3rHJN&08eq&jk|75rj,-ܚ;^coV@.&'Wl(h!L9csncz(om7;5$qI8Fʫy<ьuQZ+?=|, D^B΀4GL ݝmI[} ^rE댎C 4u $X+d; j3&1n] !&M퀓H+ޤ4ZD+ $^E=*I}= ~qg Z_P iA ~ZF%G3gb{ (B==D@@Gm"sxv3m$Ƹi̜ͅH:{хo"C"&6n")WwGЙ`.{m}&t3W: i=ÿ-RS㙹h!n#Zz0Ȱ!rtNz3m]$F53"}qFcctvtAd1vuDa v].UUztXy5c]_Yڃ鑻rذyܺ~AbAA;B qh6Kp`HVUoHKKj~ sD ʼn'5#2MJCwډMeu 7sF(m;|\}etv؅!:YhyWٶe'VW|b8m4mفd9]k]ܕ)_9,r?m>Wnʱ=tƦwJĦ}(F`G[D6x%?.B+ -'ƥav4ʔv=QL?*ުq"}$K`~.'9=gbY\-6WXY8)Wۦq`/4\"C'@9iD~dlX^~Y81a(dD"yGmN/Ǐa0"7q:ܵxÚ MML`O8NHwqTW~lת.Yݒ{6$@B˛//$@BC PC70Ud[VzZmߝc6 pǿV;39s9߼H43p3HCdX QM QaVQD$rhofr~ea" ߽fZ  AʏuR ; |Y $UcnfG0` Bl\)cs).`BfM퓦`XIYh!gN?m,Xp YIdB ,q֑X>,k䣪AcAxbYy$&" `_=DŽdрX YSVJFDE$.{,w\6\{(JD־*.I!Q15-VRFePiW4irˮycG!'<̂|ʪ#2y R1H2fANZ^тA PZL]""U]WL$eLuM]ep86xEW$O$KrxW0d3FJrg.q}p{C^ ' ?n4pk W<ǯ9߯OM!N-A. Wq|^jqSTD; |J_LO{.4*|A>%;AQ練Fj'{ͯ/:{#Wð߉_=Dw9<:::z/Q ȣt!у]rz[ۛǣ~-WPw`u+(-T[xEi?#`aR|){܃Qlso<ė's'xQRϹ18ԇ5pf?c p丫Yxy_nR5 _5b|-gQ+ %̡ETڇ|󈒝q|j3Ąˢkh,R0@ez2ݦ`L|,VCkouvaV;[[w GDL96 j`-$1+&_-JYsTm_q>;d;6]Fi#誷WpǢ0; }Y?Z}ڰ͉B c//^ vޏA44ۿt vEFd NHGPS쯢j9_bSnh932ry3Y` Ҡ;Ŝ i{qjw,ZNZ%xaS﫥7? Es[r" YqtY|s4R` ",Tm'֚Hb㵆]ćF#/o F珼,K*}Vݡ\]®Ʒ-0',MΊoo5|;y넆&zvZ{Jhrᨤ0G~$[6UWU4Mg_l&N#d֒>L˦FdJ]մi̮Hb>ێq#Vc حfb>ؚYYB~ԓOgĂ[QJAfͅgd2އIJJap𫧿~K]u 3dNQ1@^ 4S3l(5k`kxǹng\dABO&r&/+ڐ/@(Y[5]q&֪tcנwm{mMi) ?9DC|BC]C4A ""DG(;CFbsM.4,[G'U"8>L $lBPPYP5M b6"0`(x Q 5UUxNL>Uf4V,"ٌp"a68{5HCq~T-EsvH90wbD@DEBAow[dA;,( j2A4ф1Bq0B0'@C UIS0BFՓ}:Acȃ?x_WկЈָ-یP$t.M?va3]x.6Ƙ>DB/ wB^]Lr# &E#?khֻyix7rJBXh^ IDATq6,9F16H1 W5jiddf0lP<6D[8*ϿJGbH$''!/^j \4u;*S%rXo\rvgH}JnGm]k_}>˖y8sٝ'ۗ}qG ́0cC;ZPT sD!kU0䷐;n16wL9v'r,@@SF#ߟ=@CTO(!`!H`:؎ h}_)wQG]u x}DъXXdwр]xzi fĹC!hQ2o8[ԻvU1a$A)_G?Ȼn Յ"fnv]i>*[SO>A0bmAUhmEe23LKB6l^FS$P4 ZP`bJ|oP{#x=NRRYvZk=`>qM+Wĥm[t6U5HT4kYZ˫(/lpӥ( >BվHlb,wbS%,"/WzF]GV*xnGRvyqdgIEi7UEEͺag!+#kHeZl64c}e)8c޹6Do=N1Vmgd:ꪎ`0}֬&l>c~y@eGg7^,$DIh04h iTW",Ζ ш.?Y9h =tt>k)0H\SSR?8ucC#DrtHIKϚϦDߍ۩IOqa +=̘mXGy 3qvQ"D>}>f_ɶM[5{ ;gUK9s;N%h<\\S/9 |Mttt>y*x$I"-$$%!j#$Y")!!E}OYmd>B0%?nOH?V22nwn=#9&UU "D=e`0 CNXjƀ+@dA4 ,IgeY m.!i`p:ת0aJzZfS^ 'yE9zѹHP~:<Ćj!rXeÎ~m~WsLdP9]1*hܳ -n&dF#ű0U4*5S:QIKKPVhL=Gw7eĦT0Z-Mf ՗2e,*Z(HN&;N]蜣l dt6,BPQ1J*{j5ىGKC3ch|b$Ѐ&v bh6p  LKhu;o6.1(ąHt  ))q4tś 󓜑AGsXR- q`4|&DQ jh:2(K|>DQ|zLф;a1`J#n ̓.]: -N bTJSS4Fg0Zkˡ2)a"ջveTTcb^2m}HqE9y4Ce: i 47uͪ-GIJK kD i<:::.O7i3vf_ʣsD=ϥ( V IQIPUEEQdUy'o J *$#A{ lSuAAI146w7+g]{n9+rUٿg7bH$bmvy{z 3R_OxL)!h'{|&k,=ul+G5MF@h45XƠ|FrXGGD{9N(=߸Jl4hD)$dcR6R =B{:e1lnjojȷ0h0.;5%GX0s,{˛ID-tb )Tf"vt3>+5q] 2n: C\n7~8aDKo961ЏnD dIt|Go&"*X vkx*ωtA63VDY3}e#=Mȶp ԍQ| *ȣث ՆtЂG=x<'tAQSTTTɊh]u\6|$`_/o=Fc]%ݝ [ 3s޾1Ut^߃{RdZKtZ!WM:!v^QLbݮJrhssyE,&v:f|v:œTd}Z1pxb 1a0[p;])UK11R±67`"31bT=觓:::'l8Ce'|6=?>M[yjc-Pի2nuW"(') "7^7m{[u:J)T@[%i<Oo%z!$8ou'hJ[%3/4ę] %6ř1tGpz=ٷu5y"eΪ\GWZq @TmvDo+&O92+vnF*Bɮnᝋ}އIi'Ddr`g)y.U1+an^4}a(θ3O z{Wx\ 74OXT4(MOӃ.$o3-u)Sn!1+¥9h4ͷ\_z;<5&OÀ)GSi獧df4s@PO@j1)>+v7> ɉ3aY/霏A>9#I9+V|D{lȟ8 bsUQkHo>a.Y28 kvcEBr9& M8`1^!im4];!^z!)z[_BSrU:͛ra7(vRm9҄:HI/8b<7^}l/Mc׺b-X#L:s容tV?Iܺ)mWϾv3#}Uh qx^1EOM81zԚ0Y"39kov8&(~B6" j#FUUC44MEﲄ '2IDPU*2i;/$REIF d AxU,]t) ErXGGGW/Jk` `iz/FAt]At QifeKu\GG"Jv[l^ /^CM.ĺկ /YgG4~g$C'U:C+ sݽqxNN壣sn!\)'xlibFn>0+Qˉ''ދKC?6CJN(`qtU#T-0uttt>m,\L4y$9: k7hɂֳs? =⋯pMHzoYCsɸ4!ttΐGq.?6'Gq+sm6=}XkEEd&3aEtX( 0ۅKӧO0vl6-']bႋfPTTĪލ}=A[<:LYr9+ osgQ{`OŽ.,1t6Wyj$JODHeFzi%%4< =6ߏ3)}2,F7qx$"5wKUT6S]+ބT$RVq UO 2Cqq3wݛZz{4u ~ ͭOog'z--TSݸjjZ{i,}GMN>Jضn  o(l-wV%磥rߨG0~ *z]Py\Զάj5ů}V`w{m^&#Q|wY"$!EE`IN%*&0 l#66d&%%YN(aqh'+J$K7jk 8u9w4w+~ŏChuhdoxj ΣdbC̾N~Y=G|13冡Bw?2?^^΄şgcWUy$Gky?pIaz9Y~̝Zj [|c(;PK{_*CdUю$Q\ǁJ&d1" n$p M:6D]Etv~)%Yh:9.OrJuiyx;1_w cd 08ʣ6Qcm,_DZB4#!DIy;u}\~Tr 2It= EZGD :;_*.{Wc!Fس~$2r EU0lpp;C ) \H#*&-fLrgn[>O@2Õ8X,79_# /ol5UyYwy<:dc.X{gF\%PDuĂYUM{qy gDlv(kԚ!yT60>ӬQћ+p>)z Ӣǖ0k|V<YxQK~3hi) Ft 4қc\1'%ʗp [ZCqF,:}INL^)Ps9hIiI wp9ٸq.ǃ<~:OLB44WnV͠8XOhjF*NLev^2OrHVm( 7]~/~o3tt G5ӣy6G|V^UK6lgڬT*CA+(3h+g\1+(kw1'7^++Y j&tf`SHݤpsM E =vhaO@%`6!#8UWDTR3NB}g.ƾ}ƪOL[~Ș}{M+8Aӓ1즧(=$IͭdHTSFFRϼ{l<]CFA_]9eG[3a<6Qf@0372 d禣:aH=Qb/;JJhmpmn3̰7@Gg/9ER{>rkz*Jk~Jjj(;I3f;HeÎ7VEjpM `up\8VpL@Goj!ΝhO@Xh6@F*bMFN(w؁qƎ^Tw|fzcci*h}~ HL.H*Z.3AbzGjsdI4Q,|姘uu;0ЇGp; : Gp_ g϶uB[s/Q9 ߷+|u+@|'x~ӑ@djBvZUMӏ@%_}u xwNrspV'/#&Ә7!ʊrkAǩFBz.1Bf͟Kbz>LogSn˫LMo׾qϏhB;9ꗮ,I<[Ke0vqq#֣I*dt9"_#SB fԩD#w6]99~g>!1HM~rbavUp"c;B!ңH~'9Fp󐓱ӧ0oB&UȒǁCAc#hvi;vNagDVM SPҤ!3QKO*zݥ3K`( T>C}AvycYWutUOfM`JfM L5"GG/l; hDpffϜuDh^ܶ8,ª-Xh6-c( sd$GZX2 {4k>79Ř۶);RInV2@ Ē$lZ5DJxĻw})R7pFoeMɿs^sr510HuشHA:67W흕,"Mw0c P^2:.aOb,G@ȋPddݏkL23 S?z,Ҭ\Cl+fsCs%+b Ϛ$"J2{nbƼkJW;#Df ^ࣩԉcٴ ]=$GE;fX:1Z5)0HE7G55s|8ʒPE+[60ut:;/fYDAh$v*c||b1Zg/o!7;֮^ $f66͚>9obS]GGU/fFSA $4]GH.!zs\ .<::,4 áW9 q^7ȓe?-Ј,$h&dsa+Th)aazI]Fu0:-K'A4^::Ou*ywE4$''Awy^DQh<݊!2"Ef~ٌ*#HdF>9JO@H`2x0  fdžW H2dAi||^ F8p^=AJfJ[z#Quttt9ׁũ|t|r=?bU;^2a/fS{-V9^=L7-κ]V8XG!}vrʧ_"&Ԏ =CJJ8gGda?C \w+Meko?r2$̼g);u|DXT6YORj d|K羗t/~z/75ql8m2M8<;vW[qV?Ɗ*#{nWv'}ˬعYlpO'߻풏q׃?e= o6<)etSda,喥F FgsEig׾ȿ^,CL IDAT:d.҈gh"tEvxJٷ{jfTA3󮤴%nVv0;/E0 Mb1[ldI,Z)eX];c]2W= o7> Ns&ד3JQSII,sl?tz} =md$!p\Q`?}W#-me#)ͥlq;n>;%@e)J;]}CIFEج]D"HHIG 7WcxySe\wRQĄr 1 .{Ԓ5a]&L(vrUy^^.Tt8r!( ^a$ ""A}zk>ɰ۟utttttt?8, ;xXD wQ44z('MšrHH 3ʆHJEm7EyϮb>jwM `x 2~قv A?bXUT4D͆j+("jL& "*~ƍ[%Y"q|_Պ$TDQP{]Xm6\?6J0hUQQ~$و`2c2ŀ]S+ bN,m8"UMQ^ FǾoۃ$hH~B0D4Q>| va.5(rX%w(h^G t^>&aD6rdڣ4]MFN$M#n+tOO7?_Vt m7-cK3qLbn`xqpXS'1/}[~(nx7^5wA+R3~A!?!O_, ?>zoLN1S+1&M0[sysx(yX+|X( բѱw_Om=N8#R웍|RT~xu&_^&I$#"4 F.p焂.$=. E0 Zu 6|^XAl֯Dym3u!9Ϯ5kI760.+^Lqlfm'̠R[]I6A_( !!6#g?I+h=L oHu3. {X8(Rr-&sQt]GQU6{ߍײfbмr˧56ĭILU{uMgÊR볻=6?>I&3RYAk;Y~x W4Py=D'k=_c͞ļHo}_1tU.Qt?Z&LdD$㎛<$ZsĘWțr20=(|=k7[nNz93;j.v+ED(b5s0k,9z[_R#sU+!L l"$4'(ofCE 7_:~ON?&"98<`K-ߣ*t"JnB2DZ'8{;gR4WXziϵH;V0wR9[Z/~}/IɌ|q{+KqN6]GW0c5-?1 ~%IɖmvL[Sk%5'.p5pM} ?JEC?)a) 6|,!)SSZE>D_WI3\a2]ZSNIvf<^͗ﷲg\vYrTZpUz"bkײu^J5Ć x]g?d@Mc)*h1 w" )PI4"E>rilF) ߰qc8I !a4ua'xZe E9~\t<]p$G**m4vrQDGkf``M `$HmrX$u$h:{H'bcq!5=^9q8>bBYq^6I+ Xhrgh*lDSwb&,ˈh(J>"Q (r GbJ2(#G ĦfK0 {^BplX1\[k7U>RRŌ8he%Q0 NP 9"(#qAfۉ1}(@rH ݝdI )Wr? ߇!4k4O0]NKs)7 >S=~b`Wє}^hǛޗJ x L$czAbB?$<"gbᣯARZńUo_fSzzDk1w:~9؍;GD~blhpp} vb 8| ֐Y0|=F@*rpϘKmUc μPU蔜QE!I}aT8z!+[< ?ak/Aѧ 463 L˕Kv,;Ch^/bG53iiL#w%.Nj>I qô)c}{M>P 2x5|lYL20&{ V*’s]42)@AFiFzZ*m4 (hs>fwIQEҩ+=FSm 3oIE``@ &&*5SSq71ӵ䏜aɪ9|3kZܩl[ƍQTvmOM]E)ϥz狒DE^EުyMGWw3eI9۸0-=$D[Ɏ%9%u)LayϝXH~Oߞ@/o Nvlʢ{X$eSSY_PWQ)}lvjĴ$Fs(;C7'fYqb@+A}O$D đ3L˳~IƓP_o̊7 ̬>j*ʩ 0>.oDVSll|d--,Z0_>.ҽ0Z-%(=pw⿿ D&m>Gn\Y#Ü2ngV/6h P8mAڅy~f+9$;Y'OdeE l3ӏfVV CTP!GoCʞ\t8BcBD茏k-H\Ł bO [BC'%qn2ctiS/XH;oGLl75y˙dGyM1'Mѻr4Ɏ:kUY{^KXqz#I qp5l_###l{čsv{<Y8B8YpN${zq? IDAT YPCg~SUiԂ}O4M@@;]={ヂ}@OfЀ=C^bCq :EQl0$]~FA$m,\]K;Y9hBOmByy<`IbhY(5`i(SW;2ET9́a>0ITxusSfPx+aOMZ̩nluk)ɹCWv|ٷMΟO~'z 2ŷ'yţ2ش0 l߶\fS-=Q)l|i7xx_R6v)JYL7 E{|;U;Ib\ C' j(##]~OY ""v@MLDhjs $F3BxEQ d2 PQ\IpTf^}!AUfwKFr\Tz.dU{e_&(؊$;<.q#+*VCe:::W cG x5`'/Uh!1&!m 8}Gǡj!h nC116B +"@v#(VzYb!XLM(bBDFFzEÈOJJuGFl:$C+ѣĤ˷쎸} {( KG Q<̗OeɀkXﻣ ܊ AM#Ol|nkF F~a7)h'*>O} ;}-W?J _#|d_u]i.A{<<ut@.3'U_\@ѹ0~3yZ. [utt9D=ToL Ek SDdΕ?baM'!$k 7?`'aQL*c3j[_t]GG1I"`3׻/Ӥ-*Hsg;rz/zs]˻w ;qyǵxpkq-Bwϣ\x #=#NG&%Z8vtgdC;ionD񻨩'˴wtQ|5ptMj}{-$+&=uĤEs9UP%Znj"r5POk['Ek['m=:р!zƪ4vǞ-)^|@wޑn##x<8Fl=PNyއvnxe /+ol-w5glذwW!{{:m*5ؼa7^~4לa=Tcϱ ?Sk}vTּ2':xͷ)ͺ2jé}}k 1~FxevkKٱpaz8vعq-%M}S{~¥EQF;[ 8T 7saD[G줺˾3[`LCyY+Py917Ա}!4Ԩ1}iZZ)L ԡёA8 Dbbs n.;G?;\uzq8T@@x ~mްƥ7^Aro-9l\E7x稯,%/7fz[HMBnzp{<䤧^RO&)ٵq- "w8*gڎbI<ࠢx'df;1$itBN{p]d#Mb9͉F]kʨmq`<^qNV.NJjgZ QU^2q aOeGX+$AFkȣPqYac*"62a32DJUk ?hH̪w\,EQ;{ YC=޵@$'L) -0'^IN牺ڒ>h UNS4EzU Ǎ{?P-ߺUttt>tAU1|aE Xy|iܞm(\deosBxΗog8~LnH]@eM[C9R/)EDa4Y=-Ɍ!,&L̚OeNYđXmF(9Sϋ Jٵ982neFɩ$Ea ˍgą-D"B-*롲="Q4@;_|0ęFB`SkE.]Xi?1)<>Bmg׭y` n >2Ȗ&5ʋk^ꛖc ya9Cٞ7ɓo\\N| )KӤśpmVȔ[%Ԩ5沃zco05JCEI%Cʝ~(p_MS,uV-wo=8; ƳP-X3+=[nJRd5gy(q xع .~K,lۺ[D, gl]+W+bF޵ bU?\fަ t9O_P X觧JѲED@=t[klD tA(}I\GG,П;ο5u'?seXD`WY76z i3sb PߞAb\fhwwc u Y9K#_ܡwjI 1"q ً tM{Ε8 aaazPvlS.z,gx\okr;5ĸgCO\\GGZ0۟ħ>oxtqs9(u_ţ˲,usE΅ }Uv=KGo c;"+ 33F텺}}9^:::W}z rWX9+=;(̌nj3GO2Qz|&QaaA k.ƅ;i"5:ch >z^dD>~]0^ފC̚%-$$ %3 &g/>X,)3h8> ̛?Fs"^{%"ϯ-o/F5(|>?YfO/#Ъtg )\[n#3:i0g#z'h>MO_gn+SRlb vꩲr|.lv+k&[^{18F_:uo},ݻl}m8>yqܸ''r@P,4VP43Poeྥ'+*K[M'\ fq[fD]οyRutt'FQ3;tt5wYh4^/pI֡>' 4|]$Zɺ{Xtuͬ}k3ԒW8,zژ={&ф'نd2Ivn6~>h#d͘EřriTE%"3BeWuNc,N枥d n8OۗS, `N}e%Oh7259mVz$B@@PL]"vUfbZ,6K)VQmD ݕ$ IݻT2mJΤCD|ҘEۿYԼOsLq҈$-'((M`EŹΤNM&;Է91LHόR>&3GɄh0!k~'@sk?&iB:{s Pq^YFm߉8,kFh6PȆ5/SQvg8+D67d2q`6퇫'nbPY³(<>UCSv|E;`DZJ4n7_􌹾=ho[xgKiv\pҺt4Vq>O렏wRr/945lzq;.{0%u}<ͥT7v }!A|[ߥš9:p-$$e '!:D|2F H 21s\Tx>}[>rV#h55#6Y7*0DQvho"pژ{w""`YD>vbS(mq05) 4D ׏18祶KvB& @dPTp:c0^$ID ¶ukYz]]=tˡGYs>($Adkh2u:w$˗݀hc4A4 BSPճ#Mc:#L0Z$F\ϖ[*(F`u&JL$}~NdJv'>2;>e)?TZ@)3f0<4+:!"w12!kQ' +Ԯ2##$<sk> r@a@UɴiJ !&[~"%2A LHp fgq>3v9am.{=dd?Dd@SK' D'PcoDFEֵ"J^ xcŕeRc`"'1-'wN)l@4 D^Pf9ˮpRTS{4!B4Fef$si1?_v2 bȦ8w9^_d8msЙ,89YyOyuyTuR g4GښJJ%^:{hw( S r$Oߧ=@0ůO~jF1# KCRYBK.vl`)l߿i Wr4hC{p45+ atwGC*L]UwX-).#'+nMյ2;Y$ei@Pmfs)$p=` '6܈, fJ賨98TLeH =6`4ٴQ(3o5ڄ-؄f##Hd&,b!%9YiV1%NBGrinXu3˪ J /ý(B "{L4m|CӬe /QQu8%MΛŲ~O=KpAÃ$\Yut@AfM9Ovpӫ/&{Jbgh.bRpvO3ZJQaܴrU]I&"]% ْ-N@ '%V>MUw2P #Kn f1eyr[gR-sIɁ5LLݧ##\&)+|G|;7| l,#tG170u/͙&>q dA!f?OϽBX* R,q?3{o]x$MMieC2SÌëZHpjwȒU7eS0 Y=/R̒,ztӅ&'%6#YD<^Xն)VɈhvV  $! 01_F=<.'Δcc|~?Q@ h|V H Qϣ G@44Uh20h <HMSu+@v L&d@Ud,3GL Gga @SP)hhM"oD@Q1Hقb6f>/ |f AS+AEH(Ț0:AR5$Q`1!a IDAT f~BQ$ƼG[Pe?FMA@2~DɈ*賈D ݱ ks= tDZ 2F?=Rx _fF9𸽤FqcdRl2g]}y/U\YK@KǧsF@vZ5/]a4[Х s@:>0YAj '">_وsx[H(6 ]eE4!Ls9״ѕ z Ib9rͧ;__ a^1h_q] &>!tvQ?n4JlXxDVrl$<::W.}ZR7[H|T좩{t"=MmDS{Jonxh[ a^}D%P[VOxiYIXTa3L?y^xm"Cܲr_ync9re/$zZz9դܛVvenxyZhrI ŗbaQ2=J$ZO9bTZN$;-\ʻDRgЩ iᤵv,J^Z;#V/+ЯU&L LG>[uttW;$uttNt%>N$IdƼiT>CmU5.# IIX vz 4RvȝKR]VJei)-#AXc` $_W<&j l(+[fT/EZʎPB@/S)Ȍmlql>ATFq,&Nh;0أ?p⋛BٴZ==oR !2>WHp}W8יx<>⭣ϋ.fA׮ dA{ ї& >-i @+ZIЦwE9[d9d09^T4@ iegϘ$.h ABS4sJi=ddEFvO" Wǚ ,,FFF&]|+ Whtt&݂~[ut=R1 ?F( "(!J"(dazhI(0z8zlhl.Lj$I8tA٪a80H"$6$iL\9ث}zݳ(IDA@ g?iƱps}qш*q.%bAVq>:::: h>޳.4vc<#C<pؙ1hhM7׼_Uy陿ͻ'6:.h[47wIMֿw -+); &nfў>[8lo[ p9*:@P{e~#=M4EիWGG] UPr mM]Jj,$#3@TϿߟ$=av(OWܢ.|㧼U>BNn8D\TOrnt(ʊՍ۹"NVd&㥩7A u<4*x`?yv7?~|bM27^aټo,16=9y7fz/Ʊ(N 8T܁X'k(S {˜C_ 3|,)Je~ԛܕϟɶ!?uxK ]1yoQۅe~G'_x,QATA; ts .! 3w`4Q[]A̎n?/SmP|߿*$EnqG ]\_ș*d U踄wqQI~N:Cc.ܶ*^dтpѴ5P^Q,N3-K{i|ͧZHbG`VHH#1_@t,H>+QAt|yaݎX:MX"A&͍-_YGOUs x]nv:#zNU|EWJZZC#2xz_WlcИ!CڞfNNU}d%вE gx ־LӡC"GtE}h[Fώ(JK0Cc9UҎjGhÉ1:VaCxkg-3bzUhzkc}ؓVuQ܏z3xA#v# jm`Hs{Cپ89i1,_⪮L?PDcC=~mDRsQG^a;Vw7jA[,3o}ѹ`~A8(Ҙ/`6xt4Pnf% ōE dPVr9xriqbL<ġ2g>I+ƻ}G:::cF T}uuX&N4D\ÅOa6-_HUc?F  ̊\to9VP𺜴P^ZBc#O݉Mrb c ;q QDMu%n{сCm%("oǶ={,6"V^J^{vlDec+#Gr.(S7³dlٵ8r* ]feEl0Y>Z+R|$O NSv αzrc&ޅj &2dl?[a»%(htE7A`#!:APijf H3Hm[D8YWc1l);50"#°F`Z@dT$Q11nCcz bPHMS$h h*"&xkUU/^-xw6*|1iHĮ0[m v2׉$ Q͍K4564/- )e&ڹ'+6E8ѹ zjr ёId1mZ61фNn~Sf=JJY|aFABXս:{)hi w.b׾2GPCb J!9hlްxgSٺ(BR F,G 3lNn8̽a&Xv =b2ȄDvZ{Y 3Rp3<۾gf+v]`{BIX-˖Klcq7Sqwɑ)6`/ ;aA!B$ȹKZ9;{̜;IV9vNby$8y'ԲOy3^|'o ?jW=~ErdV޼  qav-ܲ` !O?TqPO`2h&Яg> "н^3hdzK+\XCCJ1UL."EQDF$G7@gUgffjZC:Bw5cH$.v] mf6)AO?>? ؓ kQCE3h}q].pr}C<޲CuzZ?G}ۿ=:Z<=x Q[{jhhhgzż+v<,caT\2pW@&"V=m?pO5l},_>?~fNٻ6,b׃!Ɋ?>Fє?Ǣ{C]ǵsO⊥u6&q9nv_KЌ5z[L%λ67-R+pEv꣔gfS Je=}?@-iD/?86lËȌfڭdLYDq7MlZ> +βkk%ϓO>ɴiө/ S*H&un@mX^ҳs.455{nfL+?oaTMv׿ΦM8q _m.t42uM?o t6Ȯ"QA:==9.+\ؾ$`~pC}ob6Jř˖9qpٌl*K[ZIOחϿڋ cavVLK;w~^}F Qx_31>d D3R }zMṸֿ̘7ի{]jOE9N8W&dOb>rO~AՌ#҇r:zm1eX7spHЅE{'qZ`]'']vW'16Hy)<,yeo؜l_"哧]3~{w쥵Ӧ\`ML%~ߏ0gWX[e߽֚ əx]ܹmvQ_#4֟ѓ ).)MNi- IidZ@ x9xT+gB$[>hOs qY]vZn݊( #f]d1'|EL_MRc'7.b2?͡C9kc lW3yKx⯯j>5XK/o}e˖˯@Aq55R}UwsZ&G߾;F|y=jNjT )¨&?$Pr CRQ^|3 s:~Х$R58=Mn!#|'S׫cR:wU'r)v~V4/ב(ݙKۂSS^E$ :Dgūk94zΠw>c4#KsSd$fRVMFzi9%PCA W1s*"*KsH%-BJc> Y6w2&DQ"5٬-P?5#՞g͞$@QX4w\efLQ^E$CM$(s=w1,F?W9 X%r̬bIz]y#ffZYn#f&bu}aKJt7}NN($FLkvrY r()꠿Z;1tX%IADEEAAo$@ŷ:F;pH=A{xjh:p{N{(ʴ#+*!ēkY2G[lٴ[v!oJ` `3/n"M= XU?>97wpgA6{q-S'/-h8TCw@KdBQUBVzII O?4~}k!7m/p=j |"v Xr}ݵ 8)@MMM,?ݵG6:< %bOtRQ^L= jn԰`4){ V=ȧ>q<:kEdrOE??'^ΜEKشB.R0!Sjׯ滖N{:d y$fS^^IIG?U \^zNqUv4aLcK>}ֲlx%\ٲ,ׇ-Łd4!tH"1EQF¬>'o2']SY>W̚# 1,q+01ՀNKc2kjAw^%&1DY!S0=+0^On"&=O?"_t?f3fgmfΙͩ{QHA)<a;Ռ3o~KY{/G=w&'7S1s!O1(j7&[5C/9޺O@Gǟ=sPD Q*44ŠWܒ V㦉4wH$'-AF ST )Su@%9Ndm|%DbUkZ8&t@ @,{s!˩ ݞI+U0ۀ znzuZtUUZih\5A>]u³UNa/~YO!DFDxi&;R7Ȳy8p>%V?[WI4-Zrא@ OKXv M_]tzS-dZz:@Ú^x|$sE)ti ב`~n/yYIdB $Ԡ`,/'}@_3ԝl_|_~_̝sf|{76@w3 zP@D@V-9'S [FS@,VL5MgJVfA Gw,Invb 􈻗yïS4&?IjtJpI80lJ Dq1\HY,+z4',F<w:he.^n hEmoX4:4/yEؓSPetUc1t:=  GfV:vZHưT y9ںU"6'֮YϢiE,>r$ h4:j"򣌦 Fdp _(7q" #1\@O0JLQaUc<N Lεm2E5=mѣ%eYOcN@8zzB8 ~)ab=j" zIXucKEIpfPwpDH=> f2|lXzfQ]_ț0jn>{}[8~D`:a| IDAT:!aza|Q~Ux_ N|tUEtx@k!Ѥc &)YF{4Ge\((n?Nͬ#ɬǬz Eo(F;H`C-nuyC# |oj /ٷzݴ܅b0[X5xMm:_+. NQd>qN`{3Oi7cn<Ƃ[Feo`DڇH,7IXxגqe:W@_If?#UjL[UEE(otvV^BLVo 'Yv3Fjr61,j[8-0S/On/! ȊJZ\d3E DAbԡDQC,_b\l1hp cA:HUT޹]&А1tEQU6`2ttvN oI 66ֶPYQIRQCn|9C g9^[&dR0?Q.6H(#W=QrX++/>ϴ%wQfC9pT}Mx#z7hR*ֽQ3{tmbl%%;{hnՈ=Gq|k6pJ6;3(rjYl)-ͧ H|$Q`3ev 6^Ũ;chi8WL4#K$McF3I!HD4à0%0(`2B:X$=q&S R5 +Mzxo~PќbJ(*tux8LA.WܩJ>>-B郏8Ee1pT\S^Nuy9G@$9wEUQkLQY(rX)tZI6cЉu]|% Fb?aoS? D?pcRxa^W^o޸4tp:PT=)F" oli@GUQ-+ȿƍ=wg^ȂG^ #8wp(FJjKUngܭp[ަ !X9 9( !]|n@4`4 Y! qx2:Bh ^+A* $ otȱ(C7d}&5> QbX) L]̓owH_{s$s%0 ^z *HQ#9xcm:\ ~ފq/EN+yv#u 4Wl,P_r̘3ގ}œ8>@٤6]gtO>GPUEEwb_ \.mVи}ު(pGYƞNdwq=0E͉[@8Eh^_J.և ?ƽKg }b^{qf>x|y٧rj %}O|L/q"rd6`ټbu Sdzro3ZNYɉw=r?o}SR6aP|ʯ<_C~s-B)sPӱ5:YF&ư%L!/m;o?r8]h i$ۊ`O#QTCkSww;{:(.)4p=~QqPbP.\A^_>_nwno~_^oO(,mnn] %Q@Q=۞sȊMeN>>=Q7>tBtyBxtBBtBxB w\K- pL[!u+(o ]$'NI## ^rb 3o,&%\_|g\qMnz֓S8NWR.06LJp j[CJpYuNօ}F1`oWRZ1)Jݑ@LFIBm?H%`"S y<ִڼﺋ]dZ%ZX' zz=aq 3^xh FErxdvNWNW6W>oNwPH@$t?%G}8- ݈p8hA󻌚@и@ք@Uh $ppr| F3Pь Q%#&c<^. FQU@gDRdH dD UС2PX`0Q!:Tɀ (P|S %!@0lF F/,)H tCK,VXX^gw*53uh9peUXl$Mɳa?3&o!V2sRI*`OEɵx{8"Q0ɖ]Uί@\5&qjϲK Wx|k=;DsM_k:f8+Sn2-#%ш p<)Hsb2JS Y I'DcKϧt3)FJ30ǿ]=5>HXgٻa+?ݛYt-n[Oܛi?̹w0gn?#f!~=AYp{y)GW_v &g7"eNNxM\WT(.9ja8dli*!W/W_\]. $"T9^6b~߻; "J>I!˗ή,HT-:D,nK^o@E$ eb54>DE!d!*.wEĈ|7aO#!$ddAAT9p̣ۅ51KBϏd& K2$<\FSt>"D1%x+&+C[Oe1GkG/5~e{sO^ o6·H:3SZOEE>tF#rR8'w&P (j*623%ؑj9-gm1'1//LYJb8(U 2~l¬iԩ\~ 1ݴܷ4}xͭ豛W?3rOE)gj\~4T`Mm)ӱ7c-6ex[(2ʒ12jIBWmwSoo|V6̊Wv*n]vJ(m'ffˀ!4J&DQz#$vLJ&7O/d}9smdXfg~*g.f=3oښL:?s#aRa*7Y!QZTgrp.Ja7s]ٶi߳[{AO1PO?ESo1ԏQYzfm׸PE=#)-)ߍf o#ֆ Yg *i=!%jN*j - \+7>p (hC5 aXG(Md PLVL{EfHtn n+Dv^6Fse/ Q'7]sl "';^ņFg&fD1[ԙt: NZYru;I@S%vc=DӁh}v{((+ՌŚ$ )fx0%cMǹY嘌5𫿂JƕOde6a_b|Zm N͏M7}d'wbI ?@RnNqf%Ids;~[T`'֬t^avմa7N4%o |~X8dodk>fM8aLýd%m &2c2c~U>/.¢$UA{1Z#;1a_6.?"Mk\zв_7Ԃ~8TßbGy'ion Ep ys؈))eƠfAh}4렏fۂ FՂ>#?G-ЇՎ%uX xHH&5>[&QǼ?Ǹld]Ìs9TsGD>s*Ucϭ$8ЊٖFYQ Rw=H7.,&\Arssپeʗ}F2rb +z.?B;͌bd,>A#.bи*+?;Xrǝ,iAWK l:P?㦖nh?Io )*q%%Tƈֻ=oJN#8M2ɉ:|>9ƭMzB;7`KaHc8R20ej!0Se$ٓɳ:OOd{M't+dJ<ڃ>Ntq$V_UnW7~!?WB-( Z3 1ykއ( AP444FKEZMϛLnF:d K˙8u*sUwzxO4qmƗŏ~z|>n7}\.:;'@2ZMkK'^Azz=tvtG(#wQ:L~o^~oz/<|=?޿oDmzwttMa'G~o<]Mh{|ZT;;qݸ\^zi;wpC>Q}MNZOgz@>zYCM0n+w4w3>]1#)IZzEe[8 AI3sz&WAѼv~7"b(^~ÁrrsHIw[IN0MYԵI{hb2$ W{#DU(9٭gC@]SOөT).. oQ`8( \\XW}B(죵?(hNW{mwbDٓH00 $Xl6ILLINA l3xgׯoOӯ Mݙ $ 16}(NWO8ZMN GٻzCK82OKw)nr|7GtRVRJ_fL)|1z#R(7o wgf%@ȒLe[A&ф 6_m q/?]r69IՆ5@jm= t_3`Le8LtkOEwr4eļ}l0Ƅ)Ezc4ᑳd4 hnpQV)@K4O|c!;N[(z+E2-5݊dbj Iy9 HfjecD"GDU4%ɓ'[DJJEL?B'vr*LUτ 2*^@f[2Yg " \v`׸LEy{w .uʞfJHbLϼYA/^M]Ɛdyל i7]\|[N A,ϟZMIF"? ӓyd#5;˲j_wuڃW=ɂ5FRJ:iiv2-FqNŖwTF*Jp$[%Zڝ:0 3fw7WQm'A%C!! { YSX>syر}DO$w: Nrio`撻MI )vmX@DǣJB5=ÂSGtߙ+Xb'%2fݙA`SŸ, JΰEATdH"9Ɉ& +43i$LDZz&HRȚd4g))0sw({oo70WSG#T +7'.YVEe(\Qx󮮊 ~t Ѯ:}6"" *FEQt44Bj6$?I3eJFbbF"45!-##~ ^e$^LAm;Wmhh]y &;HDcs`٘/op۴rj2Β8|Mɤ 8MbVc gHO E<À#tA@'3 `4"\U/Z]AƭEyq100\w+J-' `P1I¯+7`D5|鼸 iD b`L4_"tr n/t݇<_b?G^ WM~'KXq‡.nONV/JfKR$-)nAPDrh?_Eȹ 3ƙ;.Fff&5>4]tUEċ7Z[)g͘Gg\ܯsxκҌ9 BQ}e76?44444nhXgZ)Oޠ'뉄":lN$IdF( | gE' TNLC zaܚEDGO1ar%b-M:nJ,w&J*$ĭq:+_~ 較S {hkmT* ubh\C-qzFGt\ĐDY oX-]<4wuOQZNƤ~uDn$QqG&љLfr2nC$6h"1ىj@^[]KU D=zH^nu`|PPw' zCwanLNh3S4[>XA Kj=:rz}RZh2ȷhVK⭣zab$q?ʣc;'",2j xyt-D%+)Y<8'[s!-}GKC]ؓL!++.f;f[0b쥰GHIa1 zxcoϫ)3e|/ӝ3,'f03R6u1u̒"':HK;fv{wQJm㱘u#]Bd'܉]#I]bcBI:YhART^ރG1ѝ'k618GIy9YYvo]/%HI~261ukZyl9D([C{qǭXz-7/ xE̙DG[̺ja-̘9D嗜ih!\} !fS5wdS4L:v~<1i vTرd>߹Zćы%6=PFϸK٭?-]+QALρܾ`8K''+T&@qziIt!Hf^utH0Vp8j!i"J$$~?SݠjJ$! 6HWv`& bFss y +fi(ՊiD䆎8c5M=a2GEC7yK8FXDجLD033hF$rsacZwUEM}q3p2Co<ϴ^8zn.gؔ._%]E?}4\5tu<8&u{?p-w)mke.Tg. VIgazt/5P#q+nN̍&p1f?w͟ _ֺ_ Љȉː%!Όsp䋣5t8DG~|s_*t<;Wǟ^?(4D"c:bnmm)7\8}Y`Ʀ@N&J''BYb#e`_oRSNEWrX8z蛿[%@oHnJ { {\q=X~9ݼ%ױ& ىA+VɬEBA7韬mU.aUiЛRcā,]lE0yYw{?tK~A Uvh lvahZoޜn2R\qaX9!oȎy?Ocx;X,)AlKQ<HMq3d%D40BT6`hraj(Qql߱3WtɇÝ@}U hĘ,h8>!R%O)Xǟ.##)|$ϟCڈDvڥ 3rb[y[˰yP0 ;y+\õK>Ft421@Q\v.[^Y s~B߶&kHJOLȲ&/ҙ K61l-=h⤬R&&D^z<I\b /e%n"ш|$e3ݵńٚ;Yj%#Kdu% QQQR3-l|֚n{E< s<$ZC8TM ;)FŐ+o.!-=Z{)qRkPCA/uK}0lA1NŞF‚;ZHKN?'#AT+{v&`#t0pnfjm1葁5V ؾ| xKtO?O~>g5@1z,jS3cw]NΓmyJ~J 'í{iIMQ5SǐBW>gu7#aŠ _π޹8p*n3. ;3pl:7RbOO@m@M#&!=lv:vW\2.suMĤ:|Uҽ0uT!sks5cy|=c q.< U\9/gz(tK#dGtsj231n+@<3h+{$F^FfaTr ,eہ/Wb߱ w O~tJ KVco{Yv˶+L@FAWޞUɞѤ&'?c!:tvg4.6} "*-;s~yf}4\b.Mxb\cD#A&J/BFZK+锞DuyX~#nz8]4BʷN%+&OHȞΣ^K8c&ŷA0l*hS` E0v84$6e<01 } à`0x^KʧrbZ7jߛx%7~Miy!f;wGvmy&i{ݦVЩҬ]'ӔfWULtbn)M| FK34kLcx>g_U)q6+IIfӎt؇l޼}ohG&:OxWRq4k_*V LJ4k;cb#-3^}yRҬcYSU]]EQD"fMӴvY՟;DHuc&c* ڱ |11ZL|՚['%Gf+cKčr}3z4صyYE04=4L#[K-x*dPX_(eJ {)ۂ#cu3x|o/-,EcFQU_K]pӨDt&!Q itѣ<)hSޑ=3ѶrOUW-_Kȕ⥻NI&;'[(ypL&ՓCUjhR /p@0?&Ps41BU;:9b`ӧ )S;q8e[3ov-ׂ '6~ Î{'o t5=b"6i(2]"|=a8Ϝ2@_\'Ɲ؋$bv|x;u<(~iBz%'(JN[}ƣGVG=ȉ񻲡~-qJS(;|/]QXW=E߿7#YNQPPYqڪZ y;LnBgEpH=(#H .@ƛ(3;u 2`p̺]|;U{AT%ï~}8?NUoE⠩\5<'W^9f.Yqs˜siܵw>WG?|!?r-wж4k@Mj C`T~1|W^|\͛6Q4(RI].vKm1+E36x,kgBltȢ6*u;sq?ݏ{T`qFe6|W&1xE`(^62Zpo ]/{wLhuVg,/\s$6DN 33bOh |Uehk`}f>?}avԇ(mC¶uhɶ0sgm3YVѦuOFc9vϢAcpstɥcdyk]1(nfFD>^Y$uCdY\;v ӧd Sq҉qW=z*4'u{ao: ;ȴT
+ª PW>.Kbnuդf~Nj}-d&#$Yܛy^\EKQQ0LPU9ё)aE=6;ͳp;**}4aL@3"GUH.%EUю4D;+:owE%яt:1sS&O"jNY11#XmJi(F(suYαQgqoG=c߆ vzH$"w.0/P6L[N[,['s&g9OrgPVoW|6B9JSZcaߓI$ X/ԉQg݋}٧ ќn ([Ѱ0?c*!υLLbȚ5IiTCYg=lc^~#3ypqB{DʱDyH."!-^f# 3xN#i#|J9ESuIQIρ0 iIX,̘kg!/:KItRR'e:Z[l,o)Sc[rwij=l>z{K{>AoPXwUm:h zc q6&) -'6->{5V`2Ys~6gΖZ%ѰL'űy\zOrKWo™g6@a],8s'}q'̵KoLک5+#2ѧ%)^HL^?}Q9VFŌA3Z~x/n_P;7ᖮ|m;IJHPARV6~]7_M;퟿BMT9)TýhXyߕ@(&&F*Aq^<-;McwNiYTyp+IRB$sY Vډ]<ݯ~+:0i' h>tEU1ͣ][L@Ut@ SAU @=S]|.ڟA?;ǦA*.NBIc躉N ݯ -wߥ[߿Y!Bvpi^ב:>~ۛsuPow//jl役+޿ xoH q z{vQH[=H !Ĕ8*뽤DǢ:!! !f)ƀH2_z<=ޝgo  li6t@SWInuW8a4}{{\#Еn&>oDoBwwdکwnghS^pǃ?d⏨"hÙ'wنj0]65$sƫ w;&?w- Jwl@Mv>?Htl 9nbדiwz+3_ӨojB%eswxsݤx#RQBfPK&CDO=YP@ca&Z~v/t'~<~ΊkY1ox{1L87^FWٟf_U>yū ?}+GYt/(sۋ]KƁq42̠T5)Э M|;Q9IM¢*\wDB@T8^ R⣃h\qI3K'jSNWrBuUDcINjі{ODFU>c-8ݱdu-SZ*Q$y>ѷOO<Hn% >sSZU'OOM-^{%}>V-KYr%7`އ1k>4Bֲ@% c r«o`I ngwI%M{ I}lYJ0/㟠(}qQF++TB%Sxם .V9RveESH5<9+ۥK&]Ρ@TXXOԮ/o8EԗPRR0 U=H8洱ij=tM:leW7w7ҤWxc4vz?91Tl[v8kjXg`D.9y?]qu+n/ݫGv||ߺA&ޮ65mi9x.Vf;j[wm8DgG5p9a(*ϥ21MP-6{m^s5))ф8.87G9:ydX$f.Z2]qim~jՑiŢ||rmfǠ5纂*e-Iǽ !BA"q!8={ٰf-}AR#'˨;9@$DGX756 iA9ƳD]c uQBhh/gXl噿4e|Bij |{3FH69줏79(-zz?_ک;BsjAHDG,`TTEB[J8ajEPPMpD1 &PA0};wk,(&lv'^fߜ7\ɡ) rQ$6q;n 2ZjVI{K/aql]5S[?g;+ؾjp4tS= z(H_ML|:bXl6Tבiޛ2kx,Vq X-.yz gTԻb'e{ÔT51yhOPrtE_;%U6rԞ<ߞfZLWW# ᅮGS3paK浨OJ8U]Ʒ !h4Oq;ȖŌfa? {!D w | ]?Hrϑ9#:Wާ05mNzyI3ӧqkeAUU.]D8uAm+\qYֱIla֭Byc,˶"sLZ{һ3ixof.[>bs=ZYf~<p=ElRޝ*=TȔؿt?^M%ۮa1l]*#^ʺ ;PG/s<܍L` >cϾ⶯`%xwe9^=7_|7"93t2^~ӱ pɻx.2byw:ƱR*7fͮc%(DB!D18gWP3=)Nf_^|GPU%7V~8G0wVɦi;46n!(^ޥQ¸FJEu*=oF~p~z#tCcf%8?wvS=EwЬ]\ue9XJ09D/*fd(*};Β.28ޝEN~IlV+ٿm'o|>5匾'~8^2]${b9LQ~'2 cҨds>M<:~[]NAS޵w=Joݨ~}{3Ȏh<\;6mv#gu*Qqv \w׿pGb+2n|upy1_gj 7 {Ya;X\ ӽ$8BAƠ q|)bEihcJ?Unx9T8V/BL09zJ-h,7UD8E5 Ғ2:u$d yٲi}@Q@7 ,,^!a`7ߧpdbdP!Å躉Iv:eiн[)o.b"UT0)#i7(]䛯=k2 ݩ-^69_\sþk}uGn_AiԼWƮX5'Uc3 gl$>JEi@:];XGH^ o֋Jnk&ݳ}aV2ܳF`@Ӣ < X1 vCnn'vGAWLà3MӤ"Hߡ]1M ΅Bqn9e2SҨFZT=F .)[ğ塻'`4J*XGet̔4]q2'tq7TpZMª] Ba(4CPrK4T[Xd&_!\nX0 SPXmFs @XЏjZ#Hw!%W$lsru-ÌE=M=S+*=Dva/惷aq\sɌ\4 eZٳ%6][>| yɭ~t+#,φU~ʘViԘɌҫ]#tkюvf$ƶ% FÁrn2 ʎ5KI= &]!__i= ~ -.ޠst|;JOgH8)ƒUN`~2l^Whe1JѷW^LLr ).܃JӤKg%YjiBwa{PS@Lڅj{I9ÈkQ?֮ZIEU 63?#5a%TUr(*L||7=ؿdpt\͓D;H. {7J`;ו Dگ?h^ &_:e m нa/_||:me&ky|/乷B!4BF$9R-i׾w:oA/(]B|4B!B!BqB!B!B!B!]!B!]!B! !B! !B!Bt!B!Bt!B!BH.B!BH.B!B ЅB!B ЅB!B!B!B!B!B!$@B!B!$@B!B!B!B!B!B!]!B! ,RB!\pHE!:l-yX$fBtlw :4n~皵kq:RB/-ZX4>}:6MjOѡYvB igAQ+20Z48 u]ΗB 8ITqiܥBtxHQ!΃]ڦB 6@?c4M TUɹ5 ~L4 6L$BSS_a,Dt멪'&0XƦܒiWiTI!8mSbBMu5i#Z(&|W0-{LSC-^[Lp8>B3pҞH107njn\*JŢaǝ4;:劢`&l"%l7ݦIs=rs9sh  (  !Z'pzbGI9z2o*6 fw(u y IDAT#̪u(C Gr8>w&]n&y~ipA$؈DF$&*:.œÓhoS@nr !Jt:6MJEB@L;* cKQ]ۈf#&Gߴ:w*!j${8P[CkotbM\QVkINJ M#@'#տ!⧶+mS!ę M㍸MUXbc+^±IUѫ[[vp齿Xg,hܻ4u8s ('o;#Sfzz%xy筼KTp:s{9ۓ>frٲBs"@? (NCm {# WB7L4Tu!p>&qn'}/ Aڵ1bH/ⲻcO8Ÿ={Vm h?ʪl ٴx"IVJϽ+1SVAqڜ'HS%:;fNu]5az2vm$0(SI`Ӽӛ3 F]wK_3|<"Ң2(>>]ͧLc QUHrkhkIqzϡ'A?j Wr0ꢞls01L$Ua'/ywM!}9ARS?ah6R*jLEuB ʷ=1Ϩ<+wnϙ8W0 ٽs b:VmU1 ]{8e+>'b(I<{+%q (vs Ͻݷ[+ej^‡}^GUއGjbA:@?HQX4kjA7 kV5IO#/˃®AÇrѴOL 3 g|*bXݝL$w}mb<ع"sOzҿ^#œ4vnX˟^~ۮ хq'A5( 4P[VIb妴[ז=P74JG.$On=d[SoYovk/[`QUINw]:_ !B=ʒ4wjcT͂Q.<\R)ٱ0hjs[v,*{迸c[}-bRYi0wL~`  _D":cTʊm%f :bia3b#wMUP-V g&=ú2cnU؜a =3ʁGS p(fC;vM)؈F4aV"a]&Bje͚5={l$EQ_MNx˓a~1W4:q8Bo>٥K\.),6Tt ~/r$ׁpf=|y4@P Hy#7qtd}]]r> 5x#5r&.͚;6>_W[<ϒNqi֎-\զ<'Y;mFc\ Bέ_2 Сw)\vKQ!zeȎZA]]CjOѡYvҍ\qAׯyiTËnr&B!B!٣J!B! !B!Bt!B!`*BoNh&- !ڃ444\ژ,i q^n8GCC_A111-qOqa;*{ߙ{H $@)"}]uײֺڻbGE@(v^$@ :3?^kxsg;sϜsvvzOy-^@*"!!AI]pb5] |/ 1{.B ] @p fotZY@ 8 tلl}v qLֶl޼{~z5sB~|-jZxTWF t TX>@:TW#,}`JR@eIs栮Ӎ^C.'2:ĜAy|fO?8=/Z39]U.k\92t5yya\+tZUg#vkԶ|o`Ox~{{7o||Nxxxѧ QMSAS+c00 &# ?߀9()2Y&ɈN0 XL F#JFF:^|@mnfq}Oտ;f|u/W|Gf>t.fno"%U8%Rmǟi_;b I8,Âi)l $oFƁF#޹4oMT'8:g]1nB4:Tlf T#A36/%V3ry-&6"*btF#MNNBbУ {U-b,TbvCu4VaZk'faoN6^Bh];KH) ;w&LRL8ET48 Cy(S_9"wc% Nb"B-{YN'Ȭs! uOrZ֬Ɣ s^OAnB:Vu登;/Bm~Ʈf2pԱJKvkoq1t:l1X*`wK۾}<]|Oa$f.3McAdH%7\{Q39o/ᩧ`X,]ʐXt9W-ܮ,;f$ki`{2h` cEYh鄘s0ld= ҫtYhuhhʩ':,%_}Izzّٛ_G2t( wPPV;~H>|`aR3h2_-xCG'hٯa2h+IEbK/7FūP[ش2*E!p^!-GaD0]D䐳Qr]A!Dmw_dwq:i W^a;ބ:w)>/`lܑ .\W!!Ov菉8Yٸ3U[P]Ɩie7JC^>Y7?ڊxbZ)zML.:-lY U]ŧǛyc7}>W+Wk2wbo?vnO}6.çzjv@GA_ߚ3/ヅ̙)ٻ_.՗gM1o.ϽۗPH8{W>(~ ^q ?Z O>wgOfzYf慸ރ҇B d(OQ7܂[Z MT4Ԯ6lQ5aVthf 1Lx>~s^t4hbSٜ[FEE&#{!:m8<>XWT3{+ع'^|PSOl^_O^]zv.`ޜ[|[)m5Ѓ2˯ؽVY8j?[ >^ !q P{;xX¾sU9{h:#~K!ڲe& +~:FlT(1q47cyyx홧s|5.Bˬ*ZKϳM_hUAΆ~ӑd'o Y[7_'&"LF0vK3xTݷ.,#[)|{gIbJ/؉"gU_eîmva5;nMj1^iص Gc2DxtW4Bl|<{E̢uꉟ->a$v諒/W\ysVO?[klJm_/|?o_|JzZ2Vm!;D/O>p5b hUHwr'UH]nw|rV7Kuy~li޷>"Te>ì\u?7YvhgPʒ1?q<:t}y^qΘoɩaH˦ _!56tJBL;l>TkJI1ijsr x Fu)W?{>1ڗdś,Xzdh6nBN$Ib[)}X[e;BZR<^}C2'aU9.F$amĀrwfJR2%U-v6AS)ϯg;o3f_M^yfgʓn߷;nf3wY.wZ0&)ɒͥ/c.> 8+S ɧ] ǞZO(7D$3,3_ F_4MEE`0Tkcp *(c@ <9&60u-mv () ,Z 7<#$Dјu{ư=kdafaQx[_f[ }Q* ;XC`8#q~ 6 ǝ݀Wgd0| 7 FQT z^G$ޓaLHTΡظ$%GlԸћ 4  2k0=MITWOi?>֗S ը#)>ѣ!)) MO}i}0 /zBd"dPn┞otX&! q| 5W\AoܩHMQɌ;L0\4?I੬ 2" MOn(O:TУT [ jMmy}Sm'2o@FWne0p䐸EɌ-TM#::߇SO5Ѱc.Ns{3Hd_.n%̤8,IԵ؝>-к6(`doM%,,UQ0 kΎ&^}y6,[^}=^܀hhh%!&p>4 Lv6T#:6*V94gϼ/yc8RO_}687IުE<˴8܀DY{<4PFq62&šF$ ,Tbcc0&sVL\P4 )>Mc4z!LxX;۠=Cj=9,o4.\CӸ3)NtZ%F:n'3 _]e=!=~SWnm]74Og9GOZϿ12Ξ<$NSC}Ua3pt7!th?m/hGSh+ -Ògi=|VyM.e\|`tz AO\l(΀ mqc;*Gj&=n뷹9l!ow'5icRm=E;}"CFbm{ZQ2o䪋ƠMn?:rS[ NAxx}~|~`*n y2> OQC4<>6UUaoٙq:Z:pkhw"oFvHvrhD8>^D0*zhpr`˗^GcS+ޮVEV3yOp˦$<ewyߵ~#P%=ե;ZhncS^읔l;UGVH<ޅh)Vy_^T |u[dhʶZg '|<9m,>/\w} N-ƴ*5p8oېSRݿN@|6JJJ{:p]_kw+=eMUy%ͳo+emٲvgѼȌ@XٿS 2ټC\7Zv}`+߶pl޼ z-Eص?^yyl9=Ǔ/B PPUӦLݯ>3FzW=.:+hX]ǔ5<~?ȭhd#I)cBfPRHpȼŃOۮGd>m&!e ,l0c1,0$ǡQTي$KDF+K)-E_GMHXǤ$3$7x8[LpH8zH:!@(@"*<<%=Y58?=i Fλ㭷B钝퍧'ڙˬM_s?^qΕ7U4"Bq l p-2XC":,qX3(d <,6[EEw;%lX»o`űuq4_͝ϝ3ODdqQ>#º #s0p덗䌙4nL l,`Zz( 6^;oΞ Q#ҫ`j16J[*Y7Ѕ;niȬF$qM̞Cpߙm9 1H'8m+f.  ~%oyLuHK s%RH 7 }њ @4gfP۹q)/}W!s Al{g.V  =3/)$WOOr\p1}RKwz-7W>Mz>sH\|x쪢l[!v-[r öiQ" %L ÓSNG+j\~ml[W/;d(Crw5xQ+oWxç0.˯oѥZ?>F uAiGpՉ}л=G6vl¯6F {~NY[FٙWɨ}(AڐQhM 5$9)`@Z9;I6G3"ʞl22c2BƠ!TִLy~.nS,Vv1zh?* g fAY %HJ*F N8w;CƠ5簴>Q^9`BhetN  L JȒά8Hƪ2Q ѹWĐl'}goLqU')c0:{?.P~4Ғ7cmޛ#GjZ5zE}n:o!7\Pܝv*~ғKm%u9,PO. bٗkt }e(of̨a=w!Cڱk8%rYvWL $ YXE!;z嗦|7kqϟho-e/en8vl>fsp$yز-M FBgF{4/#qLSS!E*^_j>.Ow&%%HK?͜w?)W\^m]dD #]p zUUՏFdCrAtV߳>g왉n?Ǟ}I櫏ޣK[C}o]~zX+3H8k;=J!N]Q zTTԯR9m_$ w, 8@D Ge1`3n0 A"I@ 8's=ȲA/Ɩr珊GtUT3.?u<~4U4LNFHЉɖ$'fPPz^ rp8>jAK*zMФ4P>0$I,* ={k&v;eOa( FW[ 8 t^cDumI}~t:4 U B w4$ MUO_@w\? |7^ C$!KS@S ڢL 8Y C,w;:O ~Mv;<)N' N~ɍ~\j qNUzIj=l4DO 2G@ y84 E #iH(GNq"G@ tIm@J[[)o|C@sap:/@ptvv jo@ ^t:#~ R}5^ N/m^pawLAwl '񉘍z|tOA~xBUMůIE&V)` ,,Ltdotz[Xc29;|Ti.}dz6DDh@zMF!"2S :4b=>&xA'5?XNیfarT>p!Q2iЀ y0Iu7?{/&L;VjS/.6g%Z[Ծ?Z`ʪ*Wm祘hhx\CT BC!j,1Q= >.6#9=D%"Ą릨E#$HOZ_7 |**Y)mb`Ro:p:ȶhbb%&! -e(x6BDIA/2c~Cʹ9ęvI` #] ^6p[;S4:|SPr~lFK\s~Nc]hEa5 \S)ST&Rv6(ْ[Ѳ ~ϊ%+'v)=kroΚ6XXف$QԈ44xdR,0$[3)(ʫڙ68vNm";!T(?膟۪2*PYܭ k]FXNmǣ=M(]z:[vXx}2ZkZxgKV1gsKWce`~?c5>66R3zW5%vf'˲/K 3(<+;;P%({k^ CbfRwb0W`KE"lbK'TU:ksǐVQb`j7TVHXQj_hG5H 7qp!0Za'X=`:{ r {Ŗ=xtNj$d@dPh6F&y{S=g dGbB :f@ZZ{:n[RE@ .^gzy>[ y&y},\WW1آnQp qlx>%a\tXJ5P N<$?0<(*Ǡ/ehF"v r6Cdir3:NϞ $x_k2RRՆb@E; `||$X)mScB-ۺqr<53}D"32"H(kĨӎ-RE⺫/݉_iD_r)S5 'Ɉ1T6F.QSW VDHa |C{{;}`4=l&WKx>`/aeR3CՑxf_Ʀ),;mH͐yt͌%<|~oKNOaVvw9̌bѳjocSmM RK9YebSfӳ2d }xʿ_1w0JY0tO'BC̬Fېw#s #b4bc.\tYx;0 |> 罷dLbS 1RUJaq~?>Ĉ9"4M;4ODPPP|~_>CM7vSL*λp"B)k%ꧪ),έ##FjFhBC"{+/}~􋪪?k_{髦iB~ NH4MC阴<nƯtz6]C5pb>?Ii}# 8}`gckR%]G%m\Yy/^x݉4~`4b4~9}'L4iPKF)E6S%F"kWqv)Դy44SYZHaA^FXq7YPnoPO)IX;]e恗>b\2/z:ń7 wzBBjػe9{.Ԩ^tҒiA|D(U -WTraF&E.NuȔޤ| )8U8loI. ҈vɄ%QT^ִ3e|*Ze{aom"^:de%<(};30#1+N`4 [H8}AIUXwh;u,]N$ŅHzAV2{EKk'<L|\8'D&NH||ѱDEGc2Z*s3L&59V]W>lۿ&YK.x_..JsEv7#o{r>)KpZQY^A/`کor%犗\ 8.˲Ȗ(N@pL&@ Y Ltemxd  q:( -48%>iqἵ׍zV.F`ہOlJC[ k*_¡RYOML2\M`s-9A/@p,i?^@ ৣ+ũ8 :2guAfJ"ioEZp .ˌN-1#֓4l<2.&t;D% dNfoE!F1&Hw%I?jIMӾ7Ѯ{-$<1m( K2^vLI{_D$gk~?E={zXqr=1w?`BG< ?>Af?q|̧s^̎*"28mX`Ky ?N01] _K E8 {ヌO:>ɽ^]]9vaO}߹cK(f'.GKi w4'TNUպOΊ$qϏ$IjGEGG;J~.dAxp]7`{w>r߃RZ#{I+K(ɱ|^sWKE 8Vz,$|Ȋurd 6.E%{YQ$It2廷!a4l޹/PNIס;j1C?o.3op`Ӛ)eeŪ=$غa=OJ֯L l-Ibgtc AǮ=lۚdU=IoZǁC~ǹfKOOo{.3#IL);fv_۟- wzW+7Xl)m]4፹Gob4cŲ4;FaU˗ޚoUTԷW'Rl9kz6 Su*ZVu2у~+v)8iv':_n+dҕԷw]PMc}#E%Ex:6dA`oA-48(/q}9m xs4MekA-rs7SX*v+mͧ^@O1rI`U=FEwRU\W5Wf#s>Y?r/X-2$"BӇuq|z[sѯwN{;DȮrIlsҸ/ᓦ065%陑g$$>k)٧_daogxy*k[PyOѫn>Yo/+sXl6p)7,CzfMgܜ%5^2{bL&3i2yO6W\069g/<gaj=e*dSVf4Θ[`7GGAtbp NMZ'ϻ;ܛ IDAT?f$ckk838ge 7֑UQѻYŏ ªדgcO~Z&בHKu[SV(m%s@PkNs7عОob kGOWvrc⽹w%%.aJD g;7WmICx׸ٷ|Z|>@B/A`غEp n硗>&}iLj%%!ԌH>]5&8(= #hs)gCq`_;->zIa ؽz8MeڛEV[tø`"lغ= c5_~]FŠ$~mmؑSQ[H|:y2nښ[ޗ AVΞNUqWLu;tMAi>njk-61R:8PZøHD(;U>&{bh)!?pAԃ}B׶ yV.եk|h {w^ ygG7~'1 Yr1M^2󎓢vrpbШ111=k諉- ;*׸vOy׏z;;33oh6C2 /{_! Uۿ?+Vȋ1]%cpsi:B2qX+<̛Cgg9%:hlEѸ䡣P<~l6j"/oM.b%UG):*NomNcO3v݁KgCc$uW)9OAo/}a0 ygI,N{O3#ˑ] ֒e7A#5-P Q5L=!g]YqrQ7{ b x y٧y?gֽӾ5^wG/f\ Fس#~w@{!@LffN.$`&YFpŏo3>He4/}%G+2nͫV 0i6G Üu>+ Vbq$@r] [Ѻ3S%7Ɏlm;u<}$:$s t5Ԑ5.rcpZMuses蔓4Х;P\.ȶ$=r(RQ5MСC%j, <ظ, l=gD!&v$e1]mA1esż\Z{|7 pRax|*JFCHX.I|>F$dG7u0g(TQC*'tD!6DDFG;Y.fbc9m^A݊ @Gֵ#WYǯP8Aw9YOo:fO^ 6s :M2]푐֮0B,2Zc?Ͽ vMUsOF^z+'%$ E>>ӊ.Gscol涫4.Ӓ˯ujZa2K(*r-0,̞Rku TM#(FmآhJOP#a \>`AB IF|l 6>V22":ZN$:7+є .S#¶.R:ClL4mm$نMJrzGk8+d{LsS)Ȁ$ECBl4m-M$`/k*= vZHKϠՇ/aEւ#. { 0-=DG3>*7)˧;]8V^bbQUB8GH' hHt4ՒQf]SQh)xVFkfCA?yttx;i Ɋ6S:S_\%%!@ M~'*===D;hξEr^= 8FUSqٺ!U3;%Hވ<8 fŪ1B!^}c5\.1?ST6nȹsFz/O4M3m4MEuɄ͜7q]%P\͘M,Әu<:}}geW:93eu \FF?^xaE3_.2fx hq|*z200tC(h=)<`)zRG,d̖0M.s.0{\6c&Wq՗c"h+`:{.-*IHk40"1B?#0r?Ow#=±r{zz0dpKl2cw8 ㉁ȉ;P#-qTϸ+@ @2RV'cng```pz<~!aG∿Êzk]oIZ, BU2*Ag Ut1LkPaDet]a[=Z[3IY^#tA Ht0QQۉO^_}}}HnGU„U|B 1ji IlQHuc/fL Aev;UmǎPI><.UՑS+$Vu~E&:aUlx+*dӇ,}Ծk@?z^P:`!}d:B?fF2%h`};"k8`6ty<!p;E0}bEP4lSdʿKJ^clp.Jk|LMZ&l"JаFQTxxM+4JgsآaHdx5 ^:,q,y"y|X!2GC^MСlټYށj%8iQW+CrXtjcdf ͭ,[IZG)u_X/-W̠3DbljcXC6u8R35-ݸ `OR߯Xs3HuZjJ*幷wۅ"sK?:*{+83hJrt|s6S )`ɢxWo" 6o,a]M/+Yǘ1X_iپ_^xi3`K+O<l{1 7.gq(RGMMM >Ծ?! V*4]C.\FGo#y+oO?4kdd ݭ;[Oڱ={w0rD?U{-ȓo$ԸgV>KNOV8UOlΥ%<__0&OwNe1qL*O<"c'O$}5rGNmlQ%?Ƀ>!,Z)i#xq" 2̼h%Ǐtxu'iÎP U=u >:)떒`+PWɰ!ʢ,ͬ YGh^X8basE}5;ٲmݼ}7e23u z<v%{-$4ek^}Cr*M7Hɟ@LV-G-G,VP(t^E^~q|<: A v{9Kxw-.?DxVVPZYOfK͚z>6m PVMV͛hd_'GItIXAzb _:v[[ Nj\7lt=R )bܳl\6)ijBH|Ξ/o!o)XUM@T$E/˫+9kMU$wퟹ躎i "$锏B;c4-ĭ7^G$֢u!ֈPA!IEs#"hhF݄SHKZ849CWBNj,+7&Q50ehm# P3m NZkeF؇5)t2nd # b``pZ+M1i$**p{} e0v&DzW* wQ?dJMV,RGgbSydM%}Asẙ%KYSj_H=lq~[p\*Dby{9:M}!ta,X~-e<~|64ph<@-;଱ v=DkD*v.e;%|ƫIn޻2=5̦8q12+wÏ{Z eʽmT֢5870ZΝHTUb£vbًv";ٴvt&VmNƎ}MoZLB'mő s|?2YnYΙuq|<9g 1#~7(]a4bxo^Μ/$ZqfD{tFe8 c=;9MSSRBba4/IC*og.+ox7RS[ҲiO#ğn-^ˌ_]{)S9C eڽA. ?=ЩS>Ĵ )#姞Ǖ7%okLOan)-b^~ ]ŋ]!|<|Aq-cixx[N~4-Y=tshp˷ˮd}1YF}SLAY3<,rͫ_G39ct6~aeDZ,vl6n/Cbh;#RM2btEfdf&T:2PUeZaGc>"3YYY{xnl,Cy>B& k:B ݕ-bbEcZ{-|\xn!d4]%JEX0 dQJr$gA7Sǒ/DP0 +8^&CVw~۟lB4`H$ a,Xd_H#IFEƻ{pFG]o50%E$( $A=2nbZ3}]4D{"/]ӇIR@I(IFGB&Q2v9y`}H9G-):C:pGCGA *ĭzd3h\ܽ-H-eymdAŃWAt!O9c5|:^ZigŠp+ry8Dv3oO+wEfߞ=̻,RbKgdk]W~w7_?8Nl'6cE*w3嚟77p=| ظk/yc82)'FjR7\:VrVL#SosLɳ;~oHH\/D~űF )'$F~\= I>]]itw"H_=LGg/X: G㰚NlTqn/bܦ C :=&&F3\4V1(cqvu~(_' P =;)?^SjH ϻ?Q6׾g\}_<ϧ=PP LA7jD 'TU% tu<YQzi]7wVٳaa,}*} d $x  ɏGp8<0EnE9\F4IH( `x DT뺱? NStnkD8Q0i:y=ML,Jci6ÁNIQ8dTNb^(e]elkƥ27g\uNE˲Ӯ1C40z:_&Bà 맽ryHJ d0s8… 1*zпhND?˪Q~zʜSc1dJW&GUSBxu>708wMy +$ʺʸ}t:e_hv.(F0 {mYu:ݟrNR OaAz:׋&IIeNIB_^FT7!G#7fv'c;y<1-?#ՖbyIW<;#9>BHG%hl۹69IH mM8Mh:d!}JZ;q"ˑQ%vKESC2A$:XI\ba$h_Wn BKJJ5jԀys6Pc?rЫz{k,]$?/p8<`uE9%??|[ m>b},v#wW'N3J&:zq{qX$;؀%5N(ZYfa5vI IDATLB˃PDFkk;ljcA?2]M2 n;G9y=ȒNP7e6!IY;D0@ Bo[5d$ !I!"JHn,11 Ʉ d3GdIB%rD2P<(J́BsqzO#\-yTm:Xt;BX]Mtu*uwcv=$a:.;/,x%ر@A6 33K%bS衶 RVRJKĖ*R⬬]NnvڗX}лKYH^όiSyiV xcR{fq6ڂOٺn lR}MK)95w3lyw;?7s"V7/d$rnA0]./CsxkV݁FŠ%)i&X.5vzb2uE $ÔL,&:yLMػm=+ۘDH2fµE|sb%%m!6w %wVv60kĔggK<~;{Cj۸qiLrwZn=֔߀lx ^ljċ/b>ִlWҘ |'Lgמ ։h/CQNǪ}WEd33$3E//1(Q1p*MΙDze,]'H_g7,gϠABk3u .ƍNtN 5O,x/ﳫY[#gɊ7_aOb6mW}/"oĘ .`A<3( HJd˶&(?*µc-߾;u}^b/RK[.'zh]9shZ]Jey$GZhwC*'eTL1ZOOq5)a,`96gr#ASH:֘6o/"&I䠱f{DYiji'ZqXɦx^XIL8l(ndx+.o Ud6 N h?}AO{˙9mbFg'mwPWYFn;Ёr}噴t83quuF)T|Vq%XY szVØQCxH>lCF1%7tuJTŤdϏ#nA xݭK9?, 5P^ĝDFW8k6߾d2G_O?ōcM{I= dB%/m uLui몊=%j7"9sq믱T IS,:E+l(C%3!Sn$69aH38!Y$e⫯h!kQZRW 4Q‘!9>$yi*ŇzdG2]nZ$JI~S2q65V3bm\:vmNgQY^4#>s^B#95% %%0}hW޲}L?dwɡP;6@HTl!n]>v&-Nt7ȖMR 8VF\\e{fTUepn!M\2.'cHYx Gn)I葆OG~uMӎ B~?S#atB*Mnw_Z5ƳG4\9ǻ ZJtR n&QsX}?D2,7kGy^[ˍ7_Cfl0s (8FhFW;pcr_{g&YlisoI3'Ѧ.042QOWNTA\|u$kOrkmQ&]t̾'2:}OUNd^tt(h幗3;}g~AuCA?tCA7#7G^K?hPUƝZLt$Iwt8 ! ;DѼ#_7@nZt`2``9s86q?'w|%{]>4LtOO k,c_ް O˩A78u0k4x>,s}"۽"}폕/?ÿ8* O;Aohh0ک,$%%}]p!3/Q> 耍]G'_+F'Bi/*Brr'^1d``ݓe{Ek`5v&i[i qNW^Yw184K4q^;?`̠1_Fɧ:b{'}I,}σi| ey}-ϱeq+犮1͞-!y`q1ttYOfgiҴr!@t4%ҵ;LQ\zdL@QuL& EQ1Ⱦ$H"EЎ`@tIAq+,}gbs!IP[Q1[,fS+5 $ ]F& Eӱs_q6QB'jM}adI K2h,K*L(D?uAv`]:< CW\%!RC,\\;ZO?ep{qݧv,)*k33<S I raiC G{sϛF;֪*&͡EmB#:%FgLʁ2fO}er)>PǁfKfv) cFd2uN+-™krίl~LC_槥(h30ƧDpp5R6[Sk-mvæv0}\:ni` <^QINgWC*5*-OBHVm%PYo-Vay6]Ҳǒ?i2 G I2j0aws2eׁJZ:AeO% +2|ʻ)czJQG֑H%ˁ6k-v0c&.gSxRtu+Iuyp8-:|v7xjSlmJp&]?,dBj\7e}і^xp [}s~swQʲmupL:KTBf\L+yŒLdJV+ivnC2ްj'zCdl꠻KX26'4vtHpوqbYyƌ՝h3{K$gdsvsbmI} "'IbKQ#SfN|wnO HGk7'd׈Ma$,YdduB;™G{dwo'{HvmEՃtv6m4{2([KnJv `, N ~sPm732<=Y(D(Y"su2<']4|ws ʎ,jo& 4Gy-Ace8㈵y )8DS>O'Qj#;ouNJ%60&EQ>Po,}k1-.R#F-Ed~jg"܁ }VA=aQYMأQb{wՁdw =ŐIVcw1>r?he+1rt7)g RR^K3J8>E׳k.>]ݼ k> $edcYquu˜ʌ\6._GfL֨+6\vܸxF $.jn&NdoP} ;|c.陙c|01|/UUhpYȴZն%K0 q{?Ilx܏3M?8xʉ$$*z+xxßClwď#I",[M3o 6DIBB@'Zwa=a-Y61daDA>Z{ 7ܘ\̒MP(4 #4/ojY€J7/k©tMTCHVlfxg!C#-k6rޜI@G[3F^cJ}Y_ӧQ6 誦ɣǎ=b#8ocAhOr=uU3 +>JgL<`mѤ;eMn2m#:}+4f:B0R{b*VSS 7%ɡ>EDV1>/L7PM-r3)9P1#]\(A?XaΠj㛔dO8N&wf\L[e)ڿWsX\tkɑK SbɛѬ $ Cε&QtQN O>ݰa v A1"Go~rVo+a{jf9;cԕdwY3ν%o_;YV."^m5zT,CUͪw[kYm?3. #?߻3_ E[7c9ֻg S7oKWKеFX,x?}\T&HX@ ]S EU$%IH&a$B$ )DY! A0e1} P*<'阕EUUߟǽw/[sκXY eCԉ0G9;|Κto`6q~:e(~:s zθ7#th>Z]~}uԩ)l_Q6n$-U{[LY9n䛗Eu&^ǠX'3f`[3e&yI^zrߺ&76RW_G8`R`%@Cp8UQP%KATŠƎ&z<tM%@O!+6n 蚊ր?]E;;fŠk(Jtkjd i4bomd*¡@= 3,[25BG"Ǫk(J(`=V"qJ8t]CPQA0~TUG9|@'6O )"Ib244 DB6_8"}ttR{ge+RNS}]<?r*| 6{G~} Qkl׳qk9(G 8))Ã>^UoR׫uR G?{_GugfnKew ئ@ ʾd}%&yw$MH%!y) ظe[]Vҕt{q军dd~m{gΝsyίLK'SR?6=E&tq.,|ͫ9}t2, k7Yߨ\ [_s-sQrvjfsx."ys2;"Qʫ{wlq>/zWiw4((uJ^: mDA_?ư7P0㗏|&vK :g/"m IDAT|z>/`'Nb:y8*OI*\̼x_)tETVs(Fspb,o~}B(mj!UsXo.՘ɏƏM< \s<c?zYUm4K}BIA&M5ԢfZYKsX(77#IE7 l$2l:Xǧyg6dWuS(˚ʼ'; d|}<; 0GUUTM`V5<' 4C6N$?;3 _ '>CNo'}>rUUQ4H, W j2t&/eyf|%,zcȹ֢P/9izݸzI˟3-9Y?Y@аI&QEhID_* ãSd/K1'PB %= %AeS(J 2F$TEEh'aJEfZ7pv.xO6+x}2?jh&õDA9HXV§ O@ l^*QM%]`# k2}1*#" ! e֕Rl U)U| xOCx3 D%MGh G|%z)?\2>oE6ߍ"7rvRWkqUEhHBғ/Q#th|V~@gO BE{hL'zoRcw" 9Ķ_pN %0]X.~7V^zm+VRwQQ q(,Z~=5o5C} cx\{Eny!,]On/P-a߶M,~5G T-_Np-Vu]۷WEGC˫2%:F͊9w;dö+͢U٘rb5MFrȊ@}c3wܲ~pbV&]+fv~fb5KpVTAx7קiЌi!7tBj(Y jz9ܽASOWQDp 28YVt(< B!L-Kә%o݆ɤIhon5<@قM#QLF*&ګ;t"1UHGH0KV(AB㷴NxDL8h4F.螷8"nx_ۗ#bq@"軲Q2egNǛ y3 aR[ QM!Mgc9 JΆCPI&\(Wcx(~MƯFTe?J!5O_)nG6=B-Q%z;[CCaM(E럓瑩/s$GZ6#PzSϋ'-䧾G\ċzA0JYfL) L8kwO=1u׮_Rl=>̭K Ebpp^n_q$p;=QbmKJ0uʛsd~^ctW{;֞:Xt.{vg׳v!#znYVuЫVcfA`VomՃ,\Ee1Aa~Xzf}QRr #Um ʱ#v1Rx޽_d(f! w_مcdڭ3K8z -?ydw 4{ZeVuťV~ǰm,V+*zy'y)Cݼp^YAh'vvA09yYFݹGE~k|ټW|J(Ir1frp F b?-Aa.ORI5ad<Ȭ P!ިj.wQAb9hjfqM ]JRie$'O ̋[ЅAvYS^L& io릢EE3$_}~/v/_M 6?דfh^oy [Y$ڷSUJ7bޑ %4݀~p& ܱf 9Vz{{Xf^},/MB%;&RV{[QT Na`؍ّF5P% Pv5"V;LzG˗V⿞>ڵKx̓y:J2uq4@΃ܲr6~Mjfr_e^Y`Pbav?JzF:K2LN::d8IK41ss q1;p$9Cg3r˚XwR{ #NYE)vX4+^7DPB:Ggs,OG9lYp6c'ȱqtXl;^{+3'쿻i (Q:y ^csii7]7ĉh=MJݼw8IKI ل,7#{rhP zH6 남DTm]4dT26tv I'aA\0Ҋ<փN@҃9)%D d]O<Cx5q-x,ţDj ^%BL>yb寶.ckhy OMY9](?4W'CKY/gpD^ p;ڒDzf :ʪ"‘x[wr-KyìX:?B8&Jr^)BpQi7!yfg3%QwV0Sh "qTZ<4 rr\h~5)§Y5 z\N zS*/{ 5_SDE89F{|5c'YgƠ+Ht!{LI nIl1KEq.C(Q91 Qdq)yJ6cGHCɠJ̒dMufEEtttW ;_,G PTl6s-XTB2 x:eVeըmbެ,w0q%t>I G(J֐Kx}fv][BH"3݂'GCMY"f>`44׉Ѡ'!vEMQ$1Y09l2(2(3j'ooy[n 0 Ѡ'a0c1dEh&ncGE/i>U4M#Mg!_gژbC*s`#= OԸݚ:=X@g%z3HdLpFp"iQ~\((!M%Fk*M&D9Î(\.ejŔ㵕(l>e%?% _~И S_+#bS:i+GДi.Wc]Ǟfy}Fb 3s)z;NW0YY{lGrD31Qh0N{xQ6YŨ;wW "<V FLT!&4wx|^UMrnFg&z X-U@')*) 3(36'9͇fJ+Xl|d8@ i8+tf;XI2#n$F( Ƃ*A1?>5W )1S%L`++*-\p ^5`M}@S_2+)453.2̏Sm1\J(ff2LW e=K߹O)in#ܿٵY iANa>bHu,G>6K;ҰM\a=+ c 03g"nevY{ZFn5 >>:|l~7]^Z떤k4'(36SБ>jV;WcoիVkv 3 a(oR2{Mܼ]{m d=mNi^ BZ ä.mJJGCKah3&C3(-/ &Ŗ@IYN日h c̚I##uC1ےʣ 4fsݢB:3olxaڮaxn^xrh"JƍK`Χ$벣>DRl~1Nv+Du0_<^d2{b,_@mn )HO@]`wŅA,)>t_~[T"X4ҍiØU͒RHg^z6Dl|]jV|JBgi&<:a?X^Bul?"foH?@idFC}+&] Yp#!,<-D+ R|x5_+kJɴރR5OxEdUF'ƻ(3qY,3er% Q k{6'?]z'=O;^BHΏ[t $@T= C23KaͧAhMkd#ʄQP4u>ﯢq ͛&Z.qK{RpLXcHFɉ!/I5,F]hd./4Vғdwr5:;7ݶM[v2jBa~)( \?i t +wi={9]1G@+{T/g BLo@0:*JuF1R]M3DNqu{lgUb1ܸמۉ7Nɬ\}l&wF|!nZY 1vn|UofGhmo[n3|b/+gDxc>>V⃚J^{h 5b )gɚ,OV{1HSaz{o+YW\Ofe[_@` /,2$[>|HY!Fdk)a`ϑv2 mmJ8y QVQĘ۶wr>X_C4-q!&IcnCϧo$@U~޾#GW1qQCܿ270P6'̻K83ryE'',t>4vs*_oXyz6ſ|n1/q#C^_L.]Ž=S`tOq޵;o@I(1Qk91˗8p {.%AD=,ϓwRK (ӯ8!/1MEFC@´*H;uSgο썰OرsT *$\ܯ թĠOfjlnavŬ+=,-?\)2Nwֽo5^.P1}_=~l-*G=,I)Ңw<.=_ڵ"mz9řKo\f5g]xVݰA"a灣fpn9 wW=^Oo*TǑ&oIf(4S(aTUC174nt)?~G͌'kVe>94}P4]؃HOal5_+gƠu1I&cJȲ$ (*lpn@Ԧ"v,%q>]UmĂtUgS :P8Χ.=ORɅoWS# р"K.4vASì[o1A ]{I f(WNȂ(Ŗ6'PB %= %t>gՉ YN%{7a ,+>ÖξOn ȁZ-I/O е\>d>T:"^hmt:?o?fcߘb I`KU8E &.뺈E<"4(Z L DSWcKz([AF(I dlUwf㵔=H]T,_^`+Wa4ggFE9݉K*)j꧴0gb4(QR<Ch~'V\ %i@ [&-~ s_G>yWb%5˦,A4 ubMz(hzk IDAT) 㣣S(,!HuHdvpQ5%as>D#FAzܢvj8vH64MI뾁.*{ZE&چ o5ga  б gߠ`FZ/>4:\]F7{cBb!QMpG}4 Z[ZpVӈEc(1z_r+nX*Lj߆kˣ?HazHln4D zo GqVɲ 5JYɉQÄ=Okȁ?`v/I>OXS`=R]R4lACf\J`4 肊`|%$J}0^ocd2–lKr'1ϔrM?c7'!𽟼?{w#1\]۶9{ѾzNDgҁnrqn޻ovK}6n܄;p7;YeWgnH$${ af2Lf4@+Bubccj;QeaB0whNm9M1RH/&jm'܂"b]G6/R7Xg>g}Tu^9DN>`ʆb5$1$a`H)trFռC!ʯ~U: %tgo=ݔ/^]Y^ߺ1O, @G*dD0FG6 p,KӋ'cCbӖ(-cd\Rۺ/`eu{]d>ɯ_o7cO~A &5{4^wmxA,6-AҬ] J3ޞP(lvQ ިkczNŽv68>}ZKYޮVz\VxO/8 ,Wd21F(Fs>z׍Ru_}UqH1,$3ɢ~Y'~IJ6)w5jJ!r:e): LѩnE8M 3%M7as## xtaJO<wuoq& E\r^ؚ/;",aD5!V1.Fq*,܀|? [!]57H(@_VDIGA5cHkb%D! ay"qbУ!`GR Iy=nbBrJ*g{lj4RS'7wlN҈K$.wu-$,wSEpv %tN xQf Hd&aHN1cAI((eF{qTII&fWF#J qüYi5w#w< +l3ϛɱ'O,J:8 A3pfaZIbʱN?5E3? ޽n95+qZ?MHd_S?]@k ˅`ثRZ=YPKGGǒkxz^z'>OoHO/\] Ws/q_A ?ݣDhG..Uc ϹEjlz6Gml(Xru]Sy\Xh,_Ly|j{' 5?LpsfͅZ'5"BrФ֤')MSdVKƩ$^.j1ڇ an Miq6H,v aI@0D&=ʳ-wPXE z"q$sc *,ʜR"I\B %teT%h:5AIU/0`7@ \d% ȪћBV+ NɈw܍d!!zd ,C$tNz0Whc|d~$ 8Ľ3 7(4c6h}aPu7}J-ę3 ^*l "@ @}93; zV^G@CV5t2񒪪tј(O@$J(N',c0Txl$dQIe{Пy%/wôyY q@? hf8:sT tKi7dH/}r u~s5BwtiH( m$ H@kj| =/:Fa|q4%Q/SAT,G;Qfidx٧[[ vvi5aNIf#,`9Xƪ"*rtezG2(M3S_ςbL:IfRgZ*o$T:3Z`bR̅ꤳsF.Ƕeț5}B (UJPGe$ |;GvqIECط]m>_3Bima|_Cˑ#-,[N+e͚jUTDπ<)uu_}D&ɦ'ŦmI@?cnԊL@? U6fjx.}nԓ7ixh{MCHTiZޜL((xm"]3;^W̜5 kDbԃLUD-F9{h*2.B"dMA?}- `xGF S. U5p\8d|Is96_p[/q& m?NZ{F鍃5M;|s@Ozk2kӤ Fc5!m3h4] .bN4'"hmXOJ]j6Kki;ۚi7Öeۆg 5?S6 ROsb=4ްơvoo}XOSϾ%2?)0:ԃp`.F|lye8‡^_Q{ôu{%4=fƽ!ZZ B,g|q ʒEty_o7&-O~א찃`}KpEz^óy?) L)X"Ǐ筗^`E` qw& kcI(S(J:qc>h0c?lU*MsǹeN֌V,C0Ì b!2tL(H}aUWȪF}w3d9~3; Ĵqutz Z]] ,X "uǎ62ldN4x942MF1&1v=͕җ)4BP1AB4ВO(JșM2^z>p ٫ϺK̅9ys+1p- x;ΜHD){a=˓sMM&j“TMo}(xZa+}m`t'b(Ds{=$gbQU9"0pOWo7226O~A" x|OĹF>Q6z.9xApZKCWZ@_] $@{d9AP9K=;^LoT" k*Ď;uu%&uO?DA'GΨ@+$!K 4Uo'ld4u 7LHj&|thGX24q}k*h5( tɲ<19 S[M<ę^4-%7/mm&(+.Scccj~Q(z_2[k^ESLCe%39t5];BdO{pTY:$4:3;rrqwy2entq?Sڼ,(WAfZnP4/ 4 C(p@dwb6@YF б w_'ӸqjICpb2[lY5THZN| QȝJ9$U3yJD)2_^hsA/.:3FbS#BLMw1Mw&b4n?e5tD! f<}#g|O#Ɂ75 q7$qӓrB8W-M5?xs"Qf?Gd_';ヘmZ)1#i ݴ/~爑`VX076w!G|;:DI",[EErXX7DJA?v:ɂ{M(ץ_Ӧ%oDthI<]<{<Z*V1ŧ_/?vO{+ X Z~p_ zI:_ϿZE?=sV_Ҷcw?pWÂ~GeU.fA?=Mnr^H*I. Ħ 4Nߥ=$qкFZl.њBC壴u/ 25vƂQSmSjpJD8ӞBLIwƩ 5?s$p۟7Dod8fY9}*hp4;b08OĮki xC4 bG0}cS;Jm8fTPͼlR݇ԩyfh43>G\*I\4gہ&2Љ1G I_x~T~lGcGyuF+=5,bP,xշ;!eSZ˯|O?t'm0oCVg!4 .B48W#q TqFzYlNkrӝwSgJulܼ>R:zHsh;1 @$#Ei$lC(Jezwʝ;wsϹR^d W3q\w6hq6ӠOj?b:kywSyS(J%,Է!:9b}1̂yH,l,%=pA?҃~GxIiy5(FcMM5-8hjp%S57;=_O~m 3sQ UVK + 'kyRʬ B(qኟJiΝ(i7d< P}_LN';>hO-B$QaڥlUhY=0+}L+{}O?3g~>BM.AE -AdY>1~kS\k ' I8A7*}}U9w\>YVP+W 厛/mH-Gam>-KxZ&_x!o_|rU~ylmWWnϒ@CChjbG=[p KRGPٷC0Xu] C7<~swŷ.?ug;vNN<ަ>.:"ize£mO[pDx.7?Y<{L!j lezGky:nbiQ`h?vbq`./m >DW#. bIѧ(L3~ιl}g%˯U3>Ybge YFFt>qq#.귆M#NiȻ清yP @D^>߁}؍!2'Hv4YJv)[JԥTe\CPpo ׳%t4jz9yC5 EpcpAHY>1Q:g{ZFwqɍFn\|R4k.߼L4U<}?k?q"样 wt6jl&HHdGY|\MiKg\ 1o5ZK%3kf Y7.  IDATB@=1NQ`BaR:::-% $QnHM#&gb6/ZT&Z:pdDKxC\3gP&_ȊM݅Kzx946tP0'x; uпf}a3emH F72|A+J_x V*嫟aj0/4s@Py#"зowt'eb jԫ$eY_bƸP <^Yۮ?z/.T_;mmNRfӹSϚ}=HK.[fn*G%1!HM$*W^;n/A8r]y&bMlwp3P1 0n2lidGcYP$ @ar &){BHl[I BD\A> 2D Pه_Q *AE/U?MC5=Zxk2vPɇ5LDiXQ,MFŐX́n %kQ؍F.h L\ rOGrB ݽNbc( $A b$("&FPְHP%N_(A ,양0#ЛKyӝ̝3 STv鸞x*Ap0trԗSN$))(@wB6F<47{^2_`WƤ8U \ ^1eL32s18{")g05>VQ|$YtovZw֒`)X|j/3"67~+/? "}={[Eg=,.:g~Hk ߜuT\lb SilsP9df*ѧp_ESe57a,]CY%!/]v~o+.Ĵ 4.ag LT_'{vs $|: ә,Ы]bagʪoV&9^RbB tVU'wIbe+Te5}wTQE"J]LȰ]w^= ;VQTMSC U/!+  UU U=PMdM&?WnK=ݱOo=[~=P&~KŁvHJD&,v|aAY|C]-%V(*ʚoI6\*mgwCj{Ah#hM]Hw<:Җє} 5yQQm&Mi};--~1Ź HY2h|Tʽ 8w/>_ ;ӧeYHC͚LE֭liQ 1,Q\| xcūHKOg}߁秽a?ΞzdY&zxUvlEJ-bjN!-Dgs:%Ӎ~=d'M]&2b[`0-SIRdEƦ':qduzxh2նA>Ftb<#i\:Y]TW l/BN*=T6u^g@&5- ;~JYE=ӊe}MOuk/1wld!-}CbbQO,.b_%VGOBJF* Qf̶x0~zh3M䣎(2)ٷ nBJJZ!ћmŘT$XIΞr0ht;d&?xH,f{u 8Qmítut8Phd5X/'?;ˍVO"N::'߰Tx_ `D+fuL:G`KcX.δRcº_C;5dq>jG<U/F?2ku!NԌ,^\_%GljAp!} (3E☐GRlI'N'p#rX 51ΩGwww c ;BܟX- r=<:zIIv ~kpߏOzoS Ux ~vqѰRjuxD7PerVxp:@YT&ipS.v5诽 S/8%(x2[z\ׅYjƲ07(H..j;MF vQ / 0/o--]_>,*h>bӲ MdD`ObtP]MG!;BbZ&h`/ඤ0F[쓱㣳jz- ^svT? ]8 I2cʧkQ\Krz6յToLME9٩AЏhOL[;ZsSC|jm $YzxkƲӱYuCN}Ad?"нA|T'J^bbFԱ#~]{ʽwIfI<]V5oa%FuIvhUoV{=0-|ptӆ'_SRF^ !" @Mlo,#.!.ocws/ePt{9p"LfIDH f@wlE6#]lԣϗRE$r:wvtDz;oͯ~y{}D"3L]Ce} 3Tl]ػK^ƪ?CDp7ܩ\|mLD(*I __;hRrHΧ0¼T32 1>c2OsgMb,HݵsWBj&Me=ILWs%M0# !CŠDMSq8!#柽(d7,+Ȫ'rE%4OU8?@fw "cxyI'nz|A;zTq~(i_f+"r䄄iP s.z;x]%FwΩ9QkoAP$f"?Ɏ6LOBUE`4Hv0tp7bE+ l "("D4Yn{~ qAjhE !0{R4 EP5PT^m}nZ]%15=3r|pRΦ^2`>𑼞j1~$?CC&;~, U_]Ś]TnM@팗P$1 uμGDtioB 8vՎ˜B@"|4;HOz0Sلld@GG'  O^e l, + Ta.]ty|(bD^ƘXf%b5(ܹ`T0wEx< xnlv;n͂@S将|mq<ÝO<ǂPC˚ܡy|9>' vgm 7ΣWM%jֹ7J`3? .Ȭ0 q\`na,ʆ+PvcJv4`kRI)amFvxr%LS$zgKge`c]ԔDwns//D'A_/ HN8; 63 ֹS@TZ>Z^^{%Buk_^JZ{8hq۠ɖ.a^ @[cu_FBΖv-xAD&'qgU;3rRFjyF*! T45TbcFlRǩ"B1 [z>n9TU;;z>34:1Vr5KƜ8 ޣ)g1k_~}*p`'e &so㢇v=\w,\=+F3O`Pep4&Ν>GWU r,~f :gLj\.VqMOs̼v1%"'U07;lpVpɨ[~ @ XUבDc>Gٗ.!M{IAәy=}!μ_e«7r?y3y#@:(DvX(&];db*F:|L].F7xXGb5I2;?TN^:pö݃>%? EU=Zj_4׵WKdyؓww+b̞{SκX3e8 y lr0>FfysyRn;7 ,hc͈&bĬ ^#,[s.g^g:'YSL[ ʡa'[-%=OGb+̘/lb<'JфCiQdA02P<=M<yϿ?Ml_ƹSKN0 "de^ƀUkcAȈ~`08_%4ǵ=orLkښ:{ڙj yxd խ^2?wp;r)'<UhrhdG~`Ѡ\ QKuΝ7]BAI. *i??J4dG <>5%B:~E[h>ADV!?4NvˤD[X>%G8G&]<˟;vqWYTc'U?h4-M1.\ݛ6m:Fu%V{ +7ho?ُ?>˗Vq\xXrʻ(*`qZ7l.?:x>yiݱ[v%8}P[O5. "Ʉ z1[x}lV#~EQ~{xeaKs25gui!)u~w.sCwOTek}U'p-9UM,$Ew~g|=1ϫGC)ew7?xJ~~yqYP}-u&i~<΍g† 95ˬ F+JC0c?۷.ǯ H nՄ(lA|- f$ɀ,5\l)W@-ύUDK͌GDDɈu܉ȁLHz7SBeFswO0A3"_>"}O)lz VbLό>+m飤/^* 86G##^/Cje䒰DZC(NIPx} }cƟ\rRU\#pͻޠ<ɝ "vHWW.Zzzͽ>ڜ>ff ?(=ge ykLɎBȺ761?? |wYo]0vhPj=,7p~(,Ƒ5h@lqc5c/X8 t LEW\νlhMU>dg  tM?=όnȡ'玧uf4 {$zSS ^p2{uWOzތKoorhJcc3F ܩZfMʣθziw^sRi$<c`08-kMXG::,cQy\oj%(zeJ{TywtZIyIv.oV 6:}D[ !$W߇z/ٹak IDAT R~rqdSJH ~ H$F1%/^үI =5LAaߺ$aLʊ2dVӊ"_Z#&m(s"P͟XL^HQĪ,rտ8(@4ttttN3zaO0sYy dĆ::'YQnpOrnYCjLdVF;VV^?u^r?O;A:_YWsbz~yU1խRbD[#OAgB&ttt.Ӿ&^\LGG'c@Cmߗ6p a8zg?H=V:W(}U|Bk0!NVy2t$WO^"0>kiB|ވ#~r^>`՚Vɱ&_ q Qصm;NEA}t9qTڛiĭY]MVk(|aYKZΖ*֭Nc{+?ZbBr̩= Gh@€$nrvu X-aZgzQ$6!Q ~E{'a[qxZ|fudN/"JcRET;D潎͡:qF?DZ<O#ƪ[aNLj܀#)#վUo%*7eEaP揬Ӌ_[pˊgs~a @] >o~|ؿ^I"zeŘ/Ʉh# >,F!iZdBzSx'[qw &"1Mn*\qí+C=$?M#FJPDK-7_ëOU3W͘ g\q;{䶛gQ`zG(jqX>$QTV<8s'= %q!mn&HѺhڹeƒ0<~_SCCռ?x(7s?%5fNJ⮣) 6Sǒ1Ԕ(}4cl_ɴOv-![Ə/LHOVs=4}GM3c?m+ܶ-:J5m s|4*}n?.k6pv"Vd-ä ^ Tw2|ً+g./i BM2't @ ~:3',*HXQ(|vO~y`}>f N<}\~wrz~;| =+z zjeA΀yUD f8z.O?*?=N.uttt>%зM,H_vKqryk\emE;7ω9rNu\5^}2 ';A:|@x}|ua.q_6qB {a+01yS1gnl @.O>G pڷ#--mPgӧUjËS)^?{G׶3HWD ( r}<|$Ռ)1Ea>Y ԵgCD^}미,(LD"`Ŷ&d1)cl| *lcbX}LI>jDId_k'$a3 %R%Cz=mwcM~W/K:@N6t r~q3 =3rnf\Iú}.Ω-u⮣ &վ+50q1/2nǧ (*F(b70E,KW٢ #0TMkOoIv:GLHb_m"%[|VWFFk*y\Q׶sއNe@ 7(D$ݸsLZ>4MqCNCE DY$ Q \DAN,LֶeE){wo<15cWe¿AK.:u=t 蘬;::::َ9)NwN~RJ;T 'O9eDňg9;Yj=ZA}=~"=mȲHrb`cy 8Ut~s{d 9Čh0bmqeXvw+nnX.xtL^7ᡷӋ'GF_kPI+%M}Tnn֨;6oE XcL͞Uic4>Y2 g9yȦsڰo6[{H0,gvݏ-m2EHNjvעOYR Ii+AIOGGGg 7R|2ͼe &BM>A.X@Ys][$]ʏ/m;2 ~&eFq,H#l3dZ?xe^= 28(3|1+0.woEf'=*i`;i⼮.n[72Ԥh;*%joɦɬ 4=H~ MrPAGGGT >.YFeLTHj(:ˮ[Ff8+v![vc2\ htș tv4#{8"&O?cgo4Nch7k8Z\F I`2a0M&$ Ɉ $ dD2P1} I΄h lzpےɄ(6FXvL&8VR:{.'0.!!."% v7s0;oQW(ʲdA aMՠ~Jy!q޸ROH{/oDn'g6kN9vuԉs :EѼddJ#/%Hw0IFԀdۣn:::\[@^F`Nn3Ņ h 'fR]8G4x>h3ڨ?{rV|RµWg9.K.YS ʿ2#ƬceG}#rj MQМx' ?:Mjtف$wۻf)8?/OޥТsix;Sr-)g_ź˯!;Z>bRSgqEQ`P&=$_t So`c 382hbϓ;~whȍaڒ5 ):Io (%'QrPh疙yQU2Ņ'=摫}]ι(hn~}ʹ†?CBH9pFl!bY5(Wgq9z⮪*,#MH_t9,OV ȎV'3Sc@TW -)VGo,>+4/}:g9-Owqq@77*Rg lRi{%| Κ?qg/R'5Ftt7Uk:&bZWg!m!Q|뙧;rDY4?`~Z߃UqܬW/fqU^_n>| zd30`Y܏W!&D\)ƽѫE2k@9¢1ӆ`0ẋ`0P%|Bu4 hA$0c-X-fT UUІm;*р$D TUVADW?%hM%#JPME4&("hk@ $AgBh_@M_: 119Kx\8 ah U  -^&ۈ oɊd8 ]0 헹+etAFtqďd t=];1a3^[Qv[WD"&[[[OZMJBB@ttt;a=!n4O(uttt>y:K\\\ڷErXqېn}Afgr*ҴBEI,0<́rcz'__o>Bܽ#rL'S|ŝ{dRh.@xk#Nn6#25nȇUI4tZߤ*BْMk7XcY8oVOw+nCta{x2nj`ED]d}dMËwnA言> ΐЎIӔےEք yvTqG>nz2}uG;MOWiMrz,U (*2*l${,*+n%'ɂ'99]6#w05;APU=TGg,R} iSI7غqNev~V-3 g3/>hYb9hLh P8LV;V|n$ Q1`1I<^4Q$ фd TL>FK~$@Pd4394Ľl'o}3Φn>v7\-Ў&%&f._B^؎hA2bن5nMJg/qstC["/2 o2<.=g&*2d@KIw4IBTМW B56Q;JwM% {;BPd0Hhr5 #Hg`X q-LY =I>JvUbL9toTDV]5_;HY;@eED vK]fMCGLvX|wX2kl'̽}įh qgqttrrt ge )1[۞3!knhͼ͈Yki'C2#ddV>%kV0mQT3w֔SW05͕ۈʘFgãJ{ Dc0b;q{/N^^qnp{꽬Zi?18؀ o?̝3ws)N1:'ҢB~]Pe nk_GLG­S+i`ﯡW.Iuh'(ـu#x%hQ~ljˌJ{o6yVS77aQArtUCHTg>pWοn:O0+*kAA?MTbV_;fdp33,g@ ?^\ܕJl<)zoLN77PY^Nrj*4Tג>*{g;Qtv IDAT9`q{ǻxx3+deL`V"Chl f,@_zQGk^%f# ֭~V4W&0HOKrX15?!7$I=m>C˨!삟7 = $/ /w'"oPRAx6}IxnW Dewшܘ؄_=VJ_aZB x  U3>Z!%77ү7vxxA  4uzLՆR_ރ'ZI22.IzX~~|O!Ai\}]%"s*{F>͛"l|c"̢ݏG;P _Iզk@{j[qpdFVt1eF.[=s6|$ z.jM1F/ʊf/(^qӦ#)kYl'VY<c16Ӌvbwv[t_h ݌/DȄz$/Kk'p5P#f,*̍,7c&Bj9ZMz/RhBt^^xs>xs=@D&8@]* pۈ3;}S o\COδe6`T .,A~$DU%7`L]dn<N6d|J+29;*"Ip?=Vtk]” Wn 8'=l?XܑV5㎛R}²zg{)L.$ s0{u 'Z aw7OܢG&iI|8 ;?_ PPլQՏIw={f}Jy&3aݿp۽K(l#B<{~8k/g,uvT^&6ؗ KɉjbҸX:y[ٷd89tjU?WzS;}rkZ$O /x:gbmwyg''(.[`en Sݜ3/KtM6n%TJfйK/68F$/Ơ{=n_m`szOJSkz,2222.ؒÉBRGBr@,(H[1DePXXACk;Orɟ}1Kg|)$87{GFT w~>|'lD?!!k+5a=}bGVAM.G5m3s<^ۋ = 0OxjakRAP>|9*Z<_ߺ$=avw^t[:c"ߺ{Eg,3f&'}?>wWFFV[xy<vQh'"ok)nKpID[>f9ED 9מ}NF-CRi@47rp!ħBW&{pTA4פNVE0%_u>WDd22C2Om;ބFR{}f'Z|~l>A|\0ϓ_Z}|طc]R_#S;ftEK/)cpKdCʁKsckcn7'*j˥ L2222$1ʇm&+f"πB BTNT`u̟9O]k|4i<,$IB80Sci]Lgs!1)pi1DB6ƠR!I^Li"t_-. GYzX.B\Z♐*7.cm˼+v^<2)y`8˚P\1+qq76,B taԷ'?5m?!6 %hݧ= 662"\`o Xӫ2wXdVvL01r~]č[z/Jo~q7O0'sÈ *i(bM˰,'1&2عOּFLq~"B}9ySDrUFf0bP}7ߕpEV@PRp-3Xa+4A8@Y.= 43n!gؽkK6א *Q6"G+Lˑ}Fae}wR_Fk`rQ:x45x|nM|?,,LÎג;"hzLtEɾ-8#ܣddfJCed ƭ3F _,/%A tՄ\q*~dŇ\sp!DqOçV3+',,w>쌌+~.X+~. QCh% |>zNxt1bh4RpYY ̚9]7>] M8Zp* ʧ0!###38_iHS'0zSRNPuw뷣VJy|g KN~18n$}f2s9q*䎍'܉10NnX >nXݸV鹌>4YzV+ ⦅5P>+V ^\^AbrHTBb mѬu\Qֵ4Pi18CܭԨ{ο,I\kk+zkǃhn+ZBϹo eĪU;x_vU\-.tk>reddqvdr;U̗j 5/ upuJ='f/{0E$T*, x!UM]$d\u3vl"h{q)} a; 9 NJgS݇ǨUlEgؼ3sq `EhP ]]^5nheܘ|܊ˆ33C ^]1w>{uJCPk\^fP(*pQddd.[FFFFFdL)l A!q9T gDNf }|H` A*BB (鳸PhJID*qD'G%`4xQ*4d@Rl|1I\k]5s)ioJ}S+aAZ|x3'8\H+?;JX:鶩P<9[ĉP۱CpvL~Ex PBv;wqiEvt-z*w~ēEj:6W#_M@g3sD~n nņZ-룧D~~xn*m!,m$k1W ~@ۛw31l6?bɄ*WbľjV$#Y}lµsUM/ MD} ʡn%qg',.foQ-ٱs'Mt!UQ߅(nUHJ̙qemqc .Na0zh MN_y+ J TbnPtIKkS@C}y17rsߞ_CJ=4 n}&*M,]g[LQ) DAbgFAADX4Oamחq(\־X$@n\(}-TW5@w5hT}$M[F^|z1|%%1KK .q)l/ E*S猥|yHbe ̐@N'+CA/:X H(j|]UuāoNWqDz(_ɮɨ(YA9bW15uCQ(lxT}u52m!7܅e]FFfHs^1膾r!TF@TT(vC(z C`(6 GD$Ɓ!k%|P-nG"Ƈh*9̺[|%22+?/l #$IA/HHON򚔗Qvh #x{FƦLYqc__=^T*%{7`D^ .abX|}4Zu<--؝DA߾=߹P>K?7wV~qbEop *O,g2n?PNÎZa૧ҏR{)7܂JGQBO!ki!!$}5]tX(xzZ[Ie1t~eSl8]dOV“:7,_1Il9PZ =-|ujh$v;ɋBJ/jgyJV  ϗC^2mD|dd.'HrO*Iלmp&BtLa*~L2z7^_yI!O߼OpD,?|6~W8qlW3g QW^ n 0 }[ih'qhY)鍄<ηZAiG1rlA^ I氘ʏw:]}(f%9K ;?#z|Qh|yЌv3ܵkޞ͆;5Y0ѣ'ik ٴVU Y4}>O}_$ FFFPύѩgN8ȼ Y=5hF,*[d g`v7f>fM"nd"jPHFFfPCn{Ęx:Z4p q9lvKj+# >b$krZ,inD# Y HR* Ccr%|5[ZXl"l#c(VCDdl@^Mh4t?:=%=7PERSMQA4 !T_jPKɍQLɊ"HH(]讛qCihea:-`R/O#?o$5.h#\EW AvR*#%ϺUpȝǏVtWd68J^<扨EB`t| ߘȔ^1jr X,k~x<_p?W5(K7n7 \V@/ (ھ%n@2)ɋBB\X,}qs/EHtKB8lVFFscLκyiW)o&ln /+yuvޜ4:{zz0Wyt.8p=`f``%s Ы< >慎3^Ig#QC ;;wna̘["mQH2vzo?qP IiTwo!5#X~sR-R`B 56}amy@ MzTP&Eנ'D FPx͂z V3:eA ll"($ǎɦ''ΆeLFʎf|^}}c`њnBљO`j3U0CNJm^:n.d<^_ <ج1_7sh>nX>uo~HFj1kRIIj0J¬p6+e G!km# "{U&!YY23 _f0v%Ž"PY/`\8 &`Q `Bj7py*(܏( [va3wCeS1Gˬ9Kq#{ rMOJ?١dp7A S/hF.Q/P(|.}N, 񂠸*s?kg]wuCoo/&hΖ IDAT' .ՕaArkKxƏEэq< եX'02)}im7)!Svj*)ۻ ȞͪͥDDFR0&@cx=.AˌwR夽UIxOđCde`5昹4Y˝7 |5vc}F5&bvt։8,=d01q>dbtU|A:}rT>`'/wxgu_8=[4){%/S#MP*;2'+}ٯŲpC~Fۑc7.2] `JaYC<zW|ifpPYRAcl&@],LΊtц -J!yTA)#u^NۃDlL"Z()#:RZqM$E asl 6%U8hԌVK@p Q`/F=U$kq(A[K6t@yiii0jǃnc͖ NL0&1!9~ituӫP*n%ĥgXHZR8]v:ۛLK˂AJc5e[k x]v[Pj5>͞BIe3QDыAA! )Zi#jZ跺Ö́'_*jp\D&aQNCht >jv׃VhDm=F3!9Kv{J&/CvcÆP '^|OH${7o[1 爒td+nIA^z$2@ `TitGdZlɡ!3= 5.FLCUAd#lۀJ0%l gu*~o:F]kC3r _}݄/quQRIʼnC0л;9nUHM'<؈kFA/ԍd7*yP{ۉ!RNP5rZz\FIW+痂Yǡ"_wYyF4rҌ4#GHMM+D/Jt H7> q"0W/[+ˉNN=o5$N+[HJ& V>$DJj"0CO ZCx`y]WSK'Q៕tPߩ"хKOFj4- G9~].K|#L LYt+o^+ OFFPg|eDT"aA2ƍE=d _b32_nJ??Kk?^D?Mr1"YJj"2$B?$ $R??3{+x5$DpO*|e@8l)a (8QeFi !`^QK+3Cчuw3Zc( "T?zWlz%2oJ;N>(xJ+mGy*&Io6njLL$Ns-ePLCXd>'BS2-##3hO~C~?bs{|X]WN/I~>yssnG` *Q[WՏ\HC$_-/YKBXn>:k=-@1s&\ /tIUjop}vb ~rJV>"Zvh< ===xx :4,3ԟGRAk *,_1E{03vt|4%;1=̽NDM ;i$̤@^]gqc ̶Ė-[ R;23 +VMKpw%#'Og&22gt,`ln*c[_߯]-Oދ\fbIIM%)vn~;Iy_Ӎؽ7]>rʏ[k7я(~7ՌL ?}7<{)o{^]7+rDቬ04o cǧ?]{6ɝAF*awxٴ S??k亮r8ʡ'/pF[/}X @qcYZ(;m5iLxOnZgN**(-p8ަY}I7]-T/@Ӷs-]4$M_O_w%׬;0` ΃?|ۜ zPJG $?5@}] Ňظq7)xNZ۬l޴)88U}gDW|Ґe9\\JTPo2!~iS0Tֶbo,8nvnB]Ezj: ɩ|݉ (Mòq8|-C e5r!Ά tۻJQ5Gy~e~UE A |ӗBFjA:=}Z3;}۔M+]);khrJղyxv򿘕" g6W3̑wߠ19ƖZzl1GZɱמ$p~z'ECK_p#it)bikO Aƪ4|OJg_7.>8hdvnz馥;kƎoN3(~S"27c0pu$cB&dŰibp]F?>niNwruq+6mfty5Bo» jC$Ϳ::ͼK.UV}VFf_\KljOx8o/u2O:E!9_~+Cżߓ" IFsh6Q|0f L#8ǍfOSi 5U5{D߃u?sHo$>1+y-LN`[ kK ϙ[=i*'67 -ya {(.<δ #PTl{UJc}9sgD.;+[X2*" *GC{}=Fl(QIN`Yql]T!60j\>FyX;yMqmE_rz#[fjf-CXZ  .ğ;#5xB&n_9MSn#*+8vj BrR)q\T*\nśW3brN ÉVvVB8]m6:n׃BѢv! "xH yPԸvPQ /JQDVUj$/v $$TjgTrv N8v* Ρdd.v>r都!\#bZ;&-'kwM6aft~bnK]bs3רpbٷ&.H=vd]V/o~bX99pS 6y7F]Y6^ SMEDEE(@u<^'vV]XWsOF#=]ꦻ^J%FzzzPJ >>Fdd ?5E wx׹?3MZU$$:b0Ƹ8N7/N77v8N۱qc @jXѫՔsf9s|Oy_Fe녵b7>! Lrn妫^}_9^+dÄw5-{ph_lat;I ًаGB`ڷIy=m/G?}tPSQv^ABH93?IaDNېL<;/sh+!59m^ֹ ;m3:7&bch${Bi?I $s .m+9tw4i,J4`:Gu2Xy1q\8ՁG^ޚ0׸r;鲩n$ FAeKz=W2j]|P9Э,h\C /^xdU%L(Yx\6zͅCv'3 15r۫`O 7UTrU6B*{T[ L؏͟_>p(UUq:4+eef,,D[$?{ IҀl#屫p8p toҭËD>2ެ Qy'9vwyD<ʚ oD8PMNj)ה1{K;$X]ԄY)<ЀlZX0go ME,BNl^ XʆQpM>! aWpR)0}DM?ǏݫOCCk0f&ek m($\Zh5EUiǼyl`/fgRo~fK!609>F+=qav)@Emzj?e׶`δ~F7  qK0'2\3% ySAD! ^b =^7l:ʻy˧\Ɣ Ϗy[s ݭn#1=:Nчc}(HE."JBwW(Hz?zp9-6=YAq1o-!"Ѓ}k-EGU.=$x1,fihh]0ǓCTtE'EEDxE`/zr&S~^L~B7!MeK/MNﲹ)itw; Ɛv}@3&\.W?נ>5` w![)ݱ1':pNm0= |!ź؃`2i\Cc/===#C+ #U;FdYO=(]˽½D_/GYӴ{ d; 5FKqܗ.(OfǨ*H[Pi]z^2صk'fG̙=UU5o":;$"%_p-LvUQ%3xjtۘ0F&cEl~g^F*5 Ko1UMfLhU7Ofmܲh xCxvu|}JR<6x1t~_o"ATFE:- IDAT>JrzT& Bю0]MU))ap<,^IM๿=Ϭ?:k0dN+p԰]F2=+mɝTʊ62m2 S G fa6nϵ7,c\ˍJp-f:[ymd5+yn#o} IL,OTX0铏M0#UpMl`wS7Vb cBK9cfgQXeOlz /搗=D~lxxr]GiӁ,˨(Htj8t:=h \TL0Atzh>L A\~Ib'glvdVW;ݳgFevQ5]*JA>N];8pØUd.A:PBKYumc37)]ĤNYT4v 騦0'ZqD7P9ĤidRv,_t=zŻI#xQkv ECNYIZ=j0v\-7iUEfr A#s L%1U;ȝ>(#4x1jl6IaÈ_uƌ%)ST~~ڊ= ħ0:5NҒV]LZ2Vm'(4mwH0c 6De hNZvz2")(1~d&NH9z8ƌ23y8:_8Ip8ǚ+|H4Y)۳!kQv+ټ֭AeE9rOhkP0o17"/4ҙLшUHKM) D[nk95skCF qi>p`ҧ̢03_`ѢBn>H@VF<2ƥ(5kžR"}i*OoO72Z%-"dD[~+s_O^FRVQOlL%Ŕ%7'ۈN!R=IxLr9]fz:+$#zFՑ"s0vLą+ɫ; $geP]UenGcz$6!D,lMtiSk7̘7Mm &<.7)ACqN.j^UddJ}x2w(ֿN5:ӆ>"03ЬohsB܂]Қ2=ҝϊuiUU %#@H dEFg lbuϥcs*+*===6R.oID49ٹ(\: ,Qɒ|_t:fĠq,3M+,K=1*38bӘyB! 3҉ɧWptb,d!#ʃ[xιHN'>!~ӽW^D M%;SNJrҤo9bWxʦvN w10/(Gfdj'߸w>3I1g֎F ѣ&Taᶛ+cyϏ}wDb/--m {rYDN?%(uL3m|z5ޗӮwvɪO g Sh-E{ R(.kEҋLPhmd[HMѭ(Y76)av]#F8 (PUׂxbƁ.Zڭ.JALw翽 9р7UxSX.$˄Uo/GBA̾$@@XrS*0@rlv1lwd/ Y MvO?1&{/,~v*;[OwLviəo/q}ΆL!CH;Lr;3 }+lg!2m~ؾZ#;a"ub_z >I>.8yޥg _pB>EBE ٛiiMIiڃqX=hkkbqMysNLJ71)FЇa&0J|O%m570oGf C+twÆ_^Y 7&^m9sUR#̤G{ 9>[̌@&Op:/2v;c+?!ٟoB' |՝-LX0kK2v1,___md r8\Hn"ILB/ ph+) tT!;=+{#\Iww7^}qΕc$+" r`Wdʣ|cqTZn]vE02j!y' |Wz%J3"⫏hO˺J9(IȜH/1M,Z4KRB4 Q8m7Uv.豜GHYpމQ׏_jT w4˦_PÏ} }6Ǝw|7\!_]u r'F;7_7 oOBCcYIxz߮ ӝh֦XF"h2,k#:\3W@>/sAGn(ͻ'$>~ ~'NNeNTdt4ǖ~V8Xf험=ZhX癄Gb0xJ;O1/x(~Sb3]1ȀMK*1/,O7l`ƔWz()rĘAh\!X6eޛiά`[ ?MMG>Z^k8m̜G>IIm!Z:>rtakиrEa'H*.跣]^Sγ|D~+̞d#FTHOӺA5o=5r9]ONZ4{jtc4E)9WJCɑLŋ9y d/BAHFwk !۫xΙYk$21܃%, Q4f?͟o$5<I')6x  vf]]!9*&1ԗޮ6n&_$|<ʊ$/'Z=|غj%?Ɍbeˆ'[(~CC?]k&" B.hp͇a/a`0c;IæÁ؏Oѳ o%192mfY;Ŋӏw:0i;@~:A'cBU~_rH@W_LHT8L"̬/L/$*MyLɉOZ業dG0'0.®bS xMLs Ɛ>_& 1bvzݻwFeYɄ`򦫶$j1w{W)QIgNl/8諭a,XʷzGށ1h:z {$ԑy"(nctR*kMf?zcCT܇##x[7?Dpb.IvN&U8,!xQa.229q8 t t͎tޮdYEokG@ 4=H[͇L g/5)S&7Md8tMQTql\vMmh^FF(*G $ BEqyH|k'|ݱy3APޮ"n[qp`~32ą׆ [[=xz<|n3FN=&CDjCۉӋ477Sw6PolM%kT:+R&"ݣ}{(Ml|zW_x?C"|U^[w}-Ć A*ywarR(fj_ˊj-7ONb4q1,hod劕LND\͸<<0<0 8 j~:B՛.~ؽWmob)Sl},쉇hT2C7%'* w܅]%5?}[ 씱:I4sK1s) `y;*ɊCli$!%of(ʢm)ƀm[cgNA֎zvߋQQl߲&"b2JT~V}FPrGkV!*6]zZ N;Cwé2QOlesW""_0?q/K:?}TҁR4r>`sl9WUuN %tP~˽Ve@#6mMEnővtaXTΒH޷ GHɃydNز,@ֽykf{ywG'!EdeSEYSAҸ|C͝JRϣMWy21p(J-QL7RTLe_Cc ;*1%fLC6kEJMF!df\V?BBr91s:_wTˁnXzq)ǿ''0}TwrZf}t0;@wN7ݰKI'_+&`Bhln _z=(!/NOeY36JP)ƁCeu/PK VD`ay*ĆxhLC{lB=/vEGN@&/a^p0!ZӹF34)XXr7X44 ^Z6!^,> tnQb&a_\l:m7" o>+蘆˩ǯN?:|^$BT ۊi%R񍄎KK#$ >zr%Ĉ'6>6; t@bv4F<~|dXI9)#{$+c<=/; q<,Qd[Njx|bʴ/qf< +?.Jwvo㾅12!?~Zϻzmv<܁ @st;" VZ)$4{Q_~Or(\`Ң BDZGVPt~Cdb.:b#Ay1D8Fl6V4FߝLJ7P twue6*i(zJ?{]ąx2 (x_Ao-B{~k~ 1qu>PAQK!niooCQfr:bJqgQJBGMD͵p8LL\1rX;qua&a`RCCCc(0 xyyx;Bn tڈ!,;"y//˟%3B1?*wQrV??|Njl \T1+MV9ZLP?|oF??_0zzzGDQDD=mo y;X ˓x9J]E8AFaD^^; o݃fAHdyf4N~qEJ|Ѫ.7+ h}x$_<סw/L{R'g  %P:vW'2j\͈jH[[Yz3 # UwS 2.&\. .V]YѸ\ 3CG}y}x[-Kn`e:y1 `{V@aMh3P۹em,1ޮfIZj{Jp(zsI*%^#stw"kj ` /[֥Are'Lxv/44BV6I͟H,R͈f* 'df :MUU\-䦆j4444+/g;K U_?>`V(L9ܽԑ*vtlW"|H.jhß@o(;K =yH=я h(`15K&)0?t IDATU]8w຿h\C:2F3z,-AhxdD o(-kAFS4444# 2EPFL7CrRA or"pv0$̙[>=#'?ëeUE;hSPMP ~ |NAVE,~^HÆ [4K60)Oh:^~2qSaۿ'/pZ0]qwc7jeFJDBLd>ZLfx0@^;zHT2׼fG s>ICշCjIS5{QǮc Mħgq(JV;\s#,-nRviHQ)|^aMw0;CGL=PP{t®/(e0¤oÔ'X=%eOᲬsjǽ;eKEAg',.zVHU)+ȲSqCoWYq{N^KdoeoEWeUuy~$IWosRBxSFYwREX7o~U\?)^zZQKKSJVT<ϵvK~IbWCL>Drg 0$v{5m YE`ԥp(!4wB}(UPq^1 sw>Veނx-y.0>^lZt/ӑn \ wNvS8m-lBqxͺJ`1@ǠwHtii {AQd=/=sp,`yIjmWmzQBl+utUU6?Tpv6"TUfqAi_ͽn\>yp?Gҁd Yjk^tPQ 4T)cp:H:. qȲ Iҡ*v C%cINHzdxzNpiXjhh Ouu"\m3G:ceglF߃?-mm+;.Jus@fuhUؓDS0ꁑ92шb2/|Op;(vA{{ ,1 74~;۩pL"<[ҒӱUkVɏ]>^] ɤ;'n=$yWOOJCٗZw'Oiŏ$?o CWłT:53ghOkkhhhh Gs`e'|<Y^iͥǺ&b d.Y6z@]E 茐?nyf7ݧsOTDOj{UX6}qC=Ѧ'5D;oѦ20u輗Θ^EU\<1@0yd&G 9B~t'0BCy{ ī/[kI}S-EqdĐzׯ¥$$M)YzѠIՈ ;!(K::w tj8铰68[h;Rz{'#sl,4hXʀZk@:[{RxV1 >/PW?~V7o̤MnbB`ۊbήFyWBh"驟QB" ٣BKɳ#b:v֞,l bۯFU! ? )yxE"o5{E4s]w7&'h+/>z.Td ƠDx$0.Oj36Z3bAݯ_9tBXnzW{HAU}t /eKI [;(-WGB\E0zjZtxMb̸vmɓkxFƯe.Uo3++yFQ?_z=G+Z A3hظ9hSdšDah 9XI`T't7c|  A$/Jу:#"O}wϜ]\~zg͉4D=4>`dؼd|kd]cmX44^6Otjgђkq8e&ͣY8dfaNlzKLEj!+ŒKohwf ~`2-"(ڟMWxx:x{PTYFt"FT])NAk; r QQqٳy%y;*}! @U@EQFjs3牨Aq D9HrcUU2KFu(\Uv Ⱦx* ҧ'B;*n Q9!x@b㚜DZ*B2` -%k i{ĝ͇3 8;}*(A!|!U)*_V6G@Ow`‘d+f&^~:pnoYV=A~^x5J47QYƙ"5|_=*)dGQ(+ޏB ic̙ACTkj%v4V& bӡ6"V잁(i, #%-W3 n[J90Nz:/Sٱ]BrɮUd+ Ӻ3E]N<;Eq7U㔆r܎:UQNi':\dTTmZuEO O >!(QO 1r⠿dͿm;"[ªȌ .@{B$|amt@:0$^`fpSO  ^ҏ֯ (sCnҴ'M_@ȡg`OϽ:zt|ybS +MT2{CBf6Hݬ9s׿{[ri>{MXH&^B୏,%hc-bvNBp\g5/ػu5o]ZG-Nk}6ApSU!8/Qb&qic^r(rN6+&TABIA\;b ;B6tU_AMΨB0U-fM=fLv>XDz{/gFt_';qͼK֣~kypEvرO{`!ln\Ͳ,.yvxEuUc!|NfŒhhh a?ȇ72 ݆3Q2B0Qe"[py@9(v3~"oX߸ NDc6It7"(:gc.LMk a#r۫ICc(26m+W?g DEa^SR?_lN!6`h<}" 7#$w&}cǪ(],~]I:O | [b6t&Z Z/ij?Aԋ;/֙s: H+kBw`0 O n'oo1 `lc:k:S9 Z6MrM$>tѧ֖ntvvj}/7r}aqyEq4LEww7^^^#^7qۚj)9gPũpOݶ)#޲,#IgYkí+@: lfk=x$=1 tΌ!W|ztƾkTuxmϠ"CG|/B*<6=ackUnt sAAKKb3 `e1X.b@?UtAIWjSCBI|PN֑r͐ ų #g{!,rVЭ.ZCox;*kv'9"fh~-OƲ |cצ֖S{:}{ݥBS9о֋rCBݮZU`aO|asWZE1qBq~ubPe'N=jyFg " OeԀͅ  '> ƞ "HXIBn cw#;t"]As<1[4#gU?BzqcZu~Ǚa.$+`с v;/UwpY Ryۻ(;}H@B@@¥dP)bE*JG*ұN[X;ժS[`Tm*K .1 dCo&>3L}<}_yL4H_͹zyםP tewPy)J"H!8ˬgU,4 (lNO?0W/_EN:xO3f͞Ɨ?GN?(syL&ԅ7~>0fƍ<)4~ԛ[F/}3ZwO6YB OԬ p !]Ϛ1ee l| I>췃B.Oy?07@7|x}JHPOM̾b::401$E^^:''!19͇}l / 044e";ȻnnGj7c7V_YYyܸ<#=cu_A iBAABmG7>"k0,ȘSs *F\r-NωН h+EKek@{2"7ϞDrl.]Ze*>Z:=y -H<^ʎ{p;ux0|5l,9"1cЛS`f~$,P+ysKɞ$D8JwS{rFV:W}|;b8GCh+=UShXcU xnSͺ-Zٽű>₭RqW)jsҔ /r#1z̐Qo:X{}lHӧ9T }\50t!R5EE=>ulVdeI?$@=р3g 6̉92<"%ǽȓ? o #sU=nONHJ&c*kEk'7G)0|+;kc><&-&#=e$Fٙ$:yi:N,}̹Zע/ɞ:£g31gϬ][Vuܼl>m /HFV^m spٽi1F( n&iewSeCקzW F_y_n;xbep=t3F%FQ k8`B})Go|uv$zs߳ j` ujk%ѣ];@7C=hg0CJ.B!UYEv&ݛrn.mp !L`J;o[;6qp6QpH.]+W\(-Ȏsqm0i`6~~t8vn5e#كK>;0:Bfn6:@/c?{F/f;n:NU#Xz `TɋjgD} {+-?8 ?OzGg<18[!B!B کҲ'{ 3ΛvӶEObo~ɢ%Wo0 ֽ"'K=ul :k?r1ӱܬ`; C2 v]֑(M~u|fYXPir vjK1CrDeo]tO~˂apST`՗j݋~=uY1u A*m^^Sr$S]Ot.lƈB}U= #?wt;Wط!(u1Qvkڋ8hH+5EcLOV`hě.v< cEc>TމsYMt4+Gck߳\~JN_]=ZŻpُ^\ 4m[ W?p]_vj J.B!p8L 8! ~.*[` 0?&{ L3n0Ր6T>. ÁOb { W9ܹs" ؂7maSQ=dcÏ۹08љȂqhyC[vp\RsoT}zbn{sro+ޏ 7>q‘ρe)ter}!9 lWv#eL=\Pe*t{&Zk/~}L$h!2}!?ڒ3;3 $ѷB!B9B!B!B!B!]!B!]!B! !B! !B!Bt!B!Bt!B!BM\Kj.IENDB`rally-0.9.1/doc/source/images/Rally_Distributed_Runner.png0000664000567000056710000051020713073417716025021 0ustar jenkinsjenkins00000000000000PNG  IHDRP,vogAMA asRGB cHRMz&u0`:pQ<bKGD pHYsIuIDATxl[urJp*YWm:R #@4A 6Rͨ#;bmܹ:ӊ?:Vb-L&AACΰ؈"xew]b|Ȟ9Iχda>>}}˲,*jW*TA >P* @|TA >P* @dɤРd2Y{-9mFW:XfmhhXB\.< ޾F ȅhThTdR\n*m7\W~`rƍܬVܬ+NvU4TVX*R*ROOZ[[Fox/VNF}H$<ֻRh4V*;::NuuuT*voj|NVwa?l,;s>iT* .O>Q0\B)w{w&,-vر^N:T*nuttT3 iqaR)u8?;_wѻᆱ)IԔ6mtK}/hCCCOSSjW,*N. z'IzgnЦZǙL&#z[ުhvpCr9'(8!uip͂[۫X,,s=KX\CY3JgR-ۗm~#EO/*s&cZ+W>~]a}Y`}^Q;>>^݇ޮOmVqE;砿7w1\vY\N jllt.'(:e'cy핗_(6jllƍkRyUW_if/=mXudM=PNk~466]ۻ?dE=<<\Q}wvW{^wlذA7n\p sdm>Bqϵ_9XD"aI$YDbQIlӞuPȚ[Tݪf=e.$9OLLܰp]mǫ133cuS$Z~<1;;"mQ9ϱ+> è8z,WBeLOOWl~޶?WsV>377gub^T{,,,WuoD"m̂XZ;33l\}7u6u<߯Z9[i.E3j T޳쮻.t^b1MOOkffFiznڷouשEa8eV*4::޹s kSN9HR"iQ"pA%TqqVp8̌&&&󋾕y||{2 4U'맟~gmXԔo/J2 iP( eYeYy}%NY|^PHDBlVӞa-:;;ҥK?_lVxy/;_v۩S%illLijvvFGGu=2MSPHx\i*+ 9 ,yϯ?ϪݎlV :Iτs8;;빖Ǐٮ/_vb1peg޷۳geYϱ?C|b1}L|OwvvM;55|?a15gϵH$4==k> NpXSZoω{**_g=P|ݶPOnKw/)}۷8=zr5MyVOCBZ=d-˿my;>0yV W`ILt[H$ÇRBA|xm߾}ǎӮ]<_xy~O-RI׮]ӕ+W<W5{嗝1f?z衺f+w+ qW>7ZSS~_kY_/8Vk8^prXbQ`bZYbkz|۶m5׭~8vݻwz[l ڵkvڥ\.+W֕+W}vPHlV^eJ'tyϞ=Ux=V;_N׶?r,pTZg!(8_ {/Z~J$?^Ν;5S( uY[[z{{^nITVX[['xgweaMX􌭺=Lz-O0Ro^{5sv4ωۣ>uzWjyW[cz{{4M>|Tj><+/N;=~d[^,|BvdQP/W{]~Î; 9k߾}umW;4voV qOHWi+][]z-缅5tZÊF5?61:ǧG  _yS6x>>>m{dRJRowU-|) zǕ=º{,9sF ZK~8뽅: V հRs)H^bΝ/SZE /WԔ&''+ Ϲtw4::T*G?>oO>3 )J)׵o]7v{޽{=~Y:V__~fWzeeEK$$KH$.;ۅƜC-410 gzccci ֭z:ُUk˹9+ 9aMLLXV,sޛD"aZiz0 +[5669?PȚqf"HzqKn 0H$6DkkV˕_#жPdN( nTj@W-sՎ{vv1ׯ. }?.P333S *<;~GJV1X͛7;hn޼횚H$fΜ9h4y/T&ѯ~+0 ?O<{ǹ{``@[lYTS{L;]SO=/瞓iD"VWWs}k} W_} =ѡO>Sa:xS1\A0t#-8c2zzSN:s?ۊ~mڵȈ~魷һm='Ν;k^#vieg߱n^snzꩧDO֙3gtgȈG}T;vSO=;̅5R[i2ԉ'ߦo;#ԼF}>|XogB{L<`cto ٳG>lkIΝ;cǎyCi~k```FSB655˗K/U Y`{U`0(4\;pQ'OԥKuy}Q~ƮIeYjW=NP233*RI_Wm|WקP(,)˩U|ް}L}+ֻ[?<6J?@bQ[lqn5M[`ݻ%z# oV` r9%IOx:66Fx `]+Jrd2i @b1v\v7|PzP(xծšslVe)L/eYb'TMRڲe jW+2jWaV ѡ}嗊{|^anMLBA~m:uJ裏Vs{ .8˞y555IFFFB̙3裏d"vء}{ѻ'ڱcz{{twܡ^6bQO+ G֭[500`0`;J%-$髯ҟgݻv|G5ۤZWdtE"nݪ|PvZŢ.^X׹]I!I!˿hddijN~ ?~\W^u0 {޽{.W\ѶmcGQ0ږ6yѷm}d2|rk$W_]R6?bQ|N:SN9糫ٳ'NhfffU;YkP6$9rDy/HX333Hij}$~6?11Qs}{sss5%T[yݦܗ}ܵZpw{0EIyۗƖs >>s^K<avv~kP2MӷlfLOOWr9uvvze׫zzz媖S(eMMMIi*;]LԲo>9}OLLxMRڴiS:s P>w>t:]WݖK9xbNNN:=X%)kllLDBpYsΪUe2:b166ogg;VW\SqHٟzk|>sNˈFJ&>ρjwpZ@xpvv֊bXz z+zMis},v/=zq׹Z/<4kԷ]jZ=Pt=J*sވKjYgcccj]^fk^O.&gggܬվV/˲<+Wogj'xi^^?ˍ066Aϸ`P==h=<:{lmmmNOOIz} B:V_b`08cڎ9<7*z.~=3DQtܹKXݻyJ}vyoJ%5559D>\NKKy3g<E:x`0AMLL,x^xᅪP(G$B=ڜu$>H$d29---M)?z9]?]uC=< ^_7t >M^?eY,}[PϭCDBiwݢŏ{{] j'xڭkM P2ԗ_~)˲RuyK>y8LlΝOӪ˛L&eYK;o9֭[+]>`% |ڵkVc{cg/w?~ARk׮յ;D 5gq_Y g~G__(ZU,=%oZ!u P$pcS煸V-[8{9*X6o._~y~!vy<aGT*)hxxb7xcQyx>\1YSXtx1<<\QN:Ν;=˖0/V~#ϲ .,i_k*mݺy>00\.Iver9%IOXkj[˵) I7<z)ǕL&U,U,NkhhHx3ӧ=CCCW&Q&Q{{:;;@ZSn`駟J &''i&MMMill9|9X,5(ZG#HͲj311y~$ gY+T]111cqYaܜ577gB!:Xpx:LOOlD"Qu}Ce抝z{^ݏp8l,^ p8\deYx<^jV~]Gծzt`1 nivZp`0d2D"Qq 0 MLLh׮]Ug}VӞۦ%9N_ZHԤg*9Z;==:::51R}U-矫FÊbJ$2MSϟVϞ=DH$tYI=힨ծAMLL(8ld{'Tc&&&먣CX̷QeYjW`J%]vMt]wҽ4FbQBAMMM=F]Eg߶=uXov,t-U4u5MsYBb\Xr\uƍθo?p ?DQ544w\.焧 8:s訆aJsϭv-n!w nXɤ&&& o)u*J%]vy}mڴImmm]5*~A >P* @|TA >P* @|TA >P* @`H&u=կAdrErr|^h:źqjWSzlV`o͊D"ڼy j{5|=jddDMMM wuttv`Pf=yMMMU,뮻V~d{%JtjPP*RwwmG[СCVt{?{̏BNuQ477SVXonMyvm599[W BA?Sf.n6~[9뮻o8RxEْd_??o_1\Q{ON]BP՞nL&#IjÆ eW}>ۥ,Ûo?G]tITJ/^ԃ>Xs6_W眖d2zw>رc|}jkkvV{nqʯIR)m۶E糐H$wwhyVZ\yXUDj211aB!K xY'Z2$YDJ$Ą0 )~/Hx^OLLXHĩ][<ws.]^6,˲|p^<weYkzzlpؒdV=2MW8ve577{> ޗ_캻} ai>*X,fU\'~[]ml6kYX9~k׭Zu$kllBpZbעٌN[ˬ<6TfU PpfffƲ,˚uB ;|T;PC+wdb5;;k9r2;^:611ᄶv}v633S5u+]; Pιbl{04}.u٬6mjh^Hܖr=33c8uL|0W:v뒤,\]vɲ,o?Oβh4v ڵksjwyGtYzhii /|>sU}0TKKLT"%_4֮GP^$g򪦦&n۷osm>εs+wkg}-Azu{뭷$Io<^ŋ˅Ib`:qOy /T-GmpXSSS2`믿Y{z˖->L裏$Iw}g;vHJ$UNe{귿$9A-8eHҾ}444SN9!؉' 5sjhhH٣ DaR)uvv*멧RWWׂRަ7o4?h6=~n9u*Ihtuu3MSR9'I:.\g=ݞk" P(8?<ʳn@ |loB9`]z{HD[n,;*Zwmյmj?33#IU'hK.s]/{;;[ZZZdFGGU*T(do?Q}}}jnnVWWJR]MMMn;^E"'oG(XPcm5 ]ÓjmmUOON<S_ʯZl$a")KZTմw^P(x/B|{`=HŢ'|r劤ozڽJ'l{9rDwֹs駟J~ӟ.֦Ar9ٳGSSS:wsK}=}Ԕtyɪ/]iMP~ 3?koaT$ie2={$y(jhhP{{gۿ/u$]v9|>pQ\~V7TKz}TАU(4<&''i&',t -iСC>cǎ0 '. :v오ړQ=sڹsgP:L&t:;v޲^~m#T]_U.S4Uggvܩkɤ23^]}#G|C?z򇇇5449\VX,&Iя~aggs< 3vK&ڲeg=:O~9ahΝ.8VFE"Xx B$111ᬓf-IV"lH$.2ʷ_gfX,5 YxܳmLӬ̌gӖa$+X333+ Ysss rfff*o-~b1z5 fuW<Bm:==ln65Mb/o+/Vfkmk]j#z˫v-Zccc ^vjG`Y!.@5BAbbr[T˗uwVS*t5555yX{>/2ʷ+o|۷W_iow]wU}o;\.C4U*쬧\N\<իP(8,T{㻝}u]w%ivn:H&Q6xX,ŋz衇jc_P* @|TA >P* @|TA >P* @|TA >P* @|TA >P* @|TA >P* @|TA >P* @|TA >P* @|TA >P* @|TA >P-'N+j|||RUTR.SPXph4h4Z׺eJ۹X,vUp5!˩A FРd2}}gJRtjG&6lؠV577;m_5KRJR74ػL&c_WggbKKZZZVVTjWᆰ,Ke/t[%40ݾVpc n9\sky[[OhyfuttxرC---O>DO<ڪX,>}z衇Ԥt:>)t:-4%I'Nx?(L:!bPP0$={T,;#ݻWXa جDBPHTJK:h4 aPodR}}}$04==i!Is.SggswvvV3|j Ŵcǎek?%[spbb³ܜŬD"aZU_/;7=0e{v6woTxDEnTw}mv@wOW{zT0^cu,>룼iqmn{Y8ZSϵ?33BU뷔彈ݞ6kwZ޿}=UnV=PM?$駟422h4;Nc{gS;#ifyOr_Zm500P+{N{{'*jɓ5Cje;>@ ֜bX3$)`0l6cǎittTT N8"yo߾br֦̾6*L**JihhZ ZK@boK/U k@@Νa ǿ yvW ī{[$+z^/qz>O-R_Ң~9|{B`KKFFF477i7ng_,A'N8='OJ$9 KR0wyڹpP-_m_*mݺU|8_x-ZTxmmmO?tͺE^k*Xܽ9yם~:::488iI!j.zڽV ?+NZ $UzG3n[ Ò~;::Y;LNRV66w(lNWV=m˖-r}YgFj?PVŎzoF ;;utLF'N4x١kPP#ѭ Ǫ.\{Lz뭺 :yR^d2NXW>N׻X,5$=ÒߥRIJ&;7 :)O&y' ᄭ{4߻]RT d2e 3Vخ *]]˞͞;&''W6r#@kF24SJ&T{{ pn?wݷm]rEwBBq_MJ&JjooАg7Q(R>oRI\N]֡Ѩ|^G$9{sѨ$I/BNMMP&Q:a Z=d4\N討;*uX4 ` 1M2 ÒT fffP(n"$YH27i}E"2==]vݲlzW[V8p8\p8lYq~me1B!kvvG"]'z>޹9+XEy~ϳ{Uۭ5W8jsڷcX:T+{׮-annnm,k#RsO?u\ܱcnܹwbO?T/^ԥKԤ{rt6o欶[>NgPП'}gڼy7*+VO&qowܡ;vT-Ǯמ={cֱm~5~[ZZ499ݻw+8jRSNO>իWc=#{|[5>ۖNqKk}~www2yOji:]tI[nÊkbj8L2`޽6-:y.]'|R=*݅g-[r˾Vq=, n4T%o.Ţ%Isss WK.SSSSE7<$̨iYq) GUPp& bY*: a,D"zֶ[t:Nu( /g}v]V*` TA >P* @|TA >P* @|TA >P* @|TA >P* @|TA >P* @|V%W_jW5'k߾}jiiYj,ZJH\N{i]<0tiT[.@moowS0tm۶mq=s?ᇫ]-XT*I2MSmmm]%֬L�$Il^nI$R_O PS IЮ]V:yڽ{$innN-;5[-ƘgΝk׮vu%TPzTA >P* @|TA >P* @ծW,U(}ZGCCqF WtR:Jy^/U4uX,:DZRmXT.[p"@]"q}ihhH*] |^L{O^FU,.]rz~ג fGGG%d=mPM.S>+za[("8uD"J$0 Ic|tZ[lQkkZ[[{n?~|C_]7U=uaj =uꔤՌѩ{OO~VwiE"S9ӥK$͇vO?rC݊F^4PO٩|>X,&˲dYsbttt( 98qBa2^|EJ%`- $% XEdRpxF ))V}|^&֦X,&4+8[ ѣGA^iJ&2MjCy׃Nwz̙u ڵkΞ=܆ uǎ+WhjjrS&qaB!9=Yi|J&''_Bt}8HŰoك^~) E%IR)I ڪI333 BN/R{mSL&U*T,F!ԛD0ԯkI3<,boߴivޭ^xAxY^k}D"I=C}&0eӣig}VpXwƍFaG:H_~eIRkkӣ ѦMبT*P(^ 5{ב;v(H8۵k&&&_P(I###ڻwN:|;VKKŢl"Iַ)+H!Ji޽uչ\IڿlTql._^zIRSSN>LF|?ҥKs͛%B= 3Cp8H8`Yڕa Ifp )jllT(җ_~ {[ {QwoUnT,̌:::VJ XО={t 8聊'Tn%|TA >P* @|TA >P* @|TA >P* @|TA >P* @|TA >P* @|TA >P* @|TA >P* @|jW/W_իWW*ٺuۧծ b22󺭭MmmmuŢN>͛7>쳪eԡծ \X,˲V7`$Ilv*J|:ծpÄa:tH۷oW o@e||\୥ךL&'|RiVu V.Skk"d]J%mڴI|^a?\C^srS ӧSoOb>S1 C\j+_iԔWZ>% 8Wȑ#2MSDB{Q0tzU K*Nxj2 P˸S0tm۶m]rE=LӔiڳg?rEJ%'"%ɚpMLLXX,VzqO;5y6yBV$<\6}PhsssNY f-IV$|{X,fYu\u797oZknORC+;acaXl֚5گ.Vig577)2}Y'@soka4z^T,sؘS?4=ܜd_i:ef>PJ$N[ PBvn$Z$s{oV,Ͼp߾}]`U߿_ۜne.]$jiiQ0ٳgH$qO:%i~Mv/z ÊF  zꩧ$Io=>Iҹs~_J;&I :w+ܹs2CSS٬.H$"4U(<^V9-N8&0{뭷,kIcݻWXL1O7lؠ~iJ%=K(rΝ;%͏+ /<$]vm(X:{~β@ K á;So{$ݳgg={<ȯڷvHOOc\illL}}}es#ӞpႤM{gY=٬?OjooWXchkkSooΟ?IR*ZRo{NקaOD_jnVd[ֱ_/g_J&^gR' B BO~[˽[ zL&F r!>@ҢA}嗚Q8iNzJ%pIccc^a(HT< up#@uGTً] *:qAlݺuIꫯvr9iOH700{Vz$СCέpBA|^ݒ/TJyfJ%ie2g&Ŏ-o>j||y{U*i~|h4Z/Xl TX {n +ixxXi*+ {4?9{ݻwKR/~ r9E> P(hxxXԧ~*Ib֦X,QMNN/ɹO~e2%ILsN'W:裪}ɤŢ3yT6{r0&XI$+ͮvU8:9̌ǝ6dakvvY'V=H$,IV"f+뺗Wۇ$kzzڲ,˚B 9,rʷ_2;;kQqsssV($YHXD"N;fYg}:k8[=u+ʳ>Ok>N4W\>kZ˲qoP6]LkrsPX,ŋzG*&Arh=)<ۗJ%]vͳ^&ΝӶm<Ư|i6bWq?[BAӟ*}o}˷.uްg}=%(k||\/_}=d` U,u]w՜շ9}X ߣ^?XIrspFjjjȊ#ȑ#ںu%<^gzuuuv `-Xz`%V^ww3Jikk֭[544w}W??|>ɖz)uuuV*`]F7d?;t%?~\7C @^v53.-߬vW:2̲{i:f544A7n'`p!@g}!q% 166f YRi !@7|>a}FGoo>CqIҹsus\^_g2[-U^*V05N/[}I^{5gYkk+-_n.F2 CRX\zRIdR2 YsvyhTU `ڰaN3gH euww+*ijjJ'O\zzzzܬl6l6H$Q>| .(q:u,k+qAfҲչKdR===жmۜ_[o󚝝U04DL&=e/Ǧ!iF?pQ뵷K}g7n$} Nyڵkj7;Y["nnw﮺0 ;w Obs4^XiD"vgOSSSv<**>XMOѕǵet]e{.u/JRRT]]O JP-'i޽ھ}6lذXsk݊F:/פU~N8QukEܬD 7\:nI&=u# )4MiddDu,k.E"MMMUV 78?hjjRKK˒7t:^zb;ӟo3\/RjWWOO"3YG5t҂eTX\OV ~=jkk[GFFJӣ={8b1*NC\N$]paEEp tݫ:zzST@ E"Ik+JT{{F+֌F$r9]cE+*i\.h4zbQhԷFڸq۝1>>L&h4~ߺ/xIؘ̯~+I3SO=F=sڵkܭ[:=0߯f;vY/+; );wS566^ /^g!8qB ζgΜqz9rD8n߾]===|g⥩)b1Tmt:Na={bcw[۱cV{4=۷O:uSѣXv 1MSt'dz-gcǎ) 4MW]#G0F.j'7n_TGjp8\0 yz1>iܴ,˚s6;;933[{ޓvOQmfU{mV;.{='iqz{=6 Be-ey{ױfggeYa Zr|>OkZ{+1PoRfczzZMMM2 aJyV-|G}y~5I?Y' V|L6wY~m۶M*z|o>VV>H;w$򗿬yǞ={<li?Io$ICCC4M0[oR۶m---J/z&0 CZJLq#lٲE|^pXڶm.]0MSa('_5D-oZ%Ϸ-I窖g}&ID"!-VWPP8bbHz饗ln񏌌,J -$ :c_2\x[ZZ zJ%]vM`#Y L&}9sfQ_400;Nr{^|sN잣K `m8s,bT*n', V燌Bo}[N>~>윏\./:&@%n῅LNN:2.'Pi>w^MMM)9N:V?_|!Io[|8uiIJ˹{۫p8QMNNh㷿rJ%:ujYʿ%ITg2EQ&ZE---,lGQJ}R^[YTRkk7'ԛ3/4|OSi~ i{6]tž;}T(*zr9uvv* ٳofE7ӧ{,X/XcΞ=p8NQ߿|Ar9azgX, ۫h4l뾥{1dڱcg#<"I:sgc۷K҂K /yoKmi;CtIciEE6+Ix׮]/+{nۥ_Z^ޞ={$Ugz}w&tOSSSuv{b[}=Uz޽NojCXmd+8̌F,ƣ>Z=vW]nnѣ|y ijjJl|ɲ0 mv> 9sǎ;xԩS f-NlݺU_{LЉ '<-JO%'$}駞W\e<,D"jmP@  *HH;i/³Ǘr2\Lq/,{/~; B:݆###4?+ P` {Q~Gβ}I^|Eg\.h4eܱcT*T,5003ϨT*91u{衇$IO䌩`r%'<$9rw&Q{{6nܨL&buuuyi͒'$;:5`]]˿8uTOOϢ{m[oIk=~IvܩbBഡ=L *AMMMD"2MS]]]Jjiiؘfffڪ.*J)WyZg?q9x566ԩSp EHDTJhj0Ԥx<)566:uR<_1ݫX,T*۾U~jkkS0Ąi&EQEQgy;{$ICCCڸq6nܨ~[xfvܩP(!EQߡ 5::F;czwhhHڴi?/Xz(H(G?H$4557:m{nO͎1P` D"NII:q;$g˗K/)Ν;ӫ%͇yiꭷիWuAٳ kg}988>}ZgΜĄ|ju[^m[q9>;?'D޵kLŋui'r4;;/^s=6i]z l~mشEַ{v;=zTW^Ց#G#]~}:|i,XWF'իd2Fڼy?8m811zsf`Yڕa I~⋛8rGk~|TA S(V` @pK( W{{j v` :qL\uM%護ǕW:`#@B `]{衇_j*umiOOzzzVhdR7oVGGjWe]XY ~===z饗bQ\nURm;zXJ&ZvDBht` hhhX*,J6U0t^>}Z===,ɓ컼~zGTڣ>kvvV ë]%XbRT׺---jjjr^9sfV(|vҥWoYT7`0]vyX,pjhhP4U2ƍr_J%Q6mRd2FjllTcc6nܨt:]aRIhT7nTss6lؠh4L&ٶھQ*Nեf8i.SCCU,5<ROO>uww/iM6髯R"}ݧ!;w2$I2 CBADBΊ… R8vJRڿLT<ׅ s]Pgg?*HhRsss/ef9mXTggfff41hRX IDAT1^{M[nU*ʞpܲM-C:z._SNvLؘz{{3[;ڪ^{Mvh47ߔa:{ϵa?>Cg|> >^X,AIR[[)NwnLFi*{O0 9s~|^lV---͛7S/^T[[.^(Iz_ҧ(T w ᩧoI@@}}} ~Qr9 S޹9=#fff%)Iy(*?ɤݫ+:ÿ( U Sp8yb1y:tHLF|so,:tUھ}fff422Ү\GRTaYj=R>#q۵}v)W^mۊbڶmu|M}ǵm6s=pႾK>|nB Qf|GtnTJ6GFFzeMOO/wܡm۶;t^*tٺmP(R<_jrHҧ~CU}to;W_Ν;h1eunzꩧܽ_qJWmN ւ~HHd2v# U$U,˪BeYI)@ PT Bm;g˲*mWɽP(T~(Ag[|~zr-ff`0XUX,VT*L&S}wnڲUmf֛g4L}[օhd2;??_T*Nk]v(g^>XMbQ,b6S7[,W+X"F D**dylff3 lTH$RM6ڦ)S>w5+`bYVe~~~l@ ujj.4OMM9 *[c{5 Omn؆eU-. ւ~1P7|>_5km<V6m  || ò,KgΜ[瞫crrRH{ʲ}gme)m3h||\o[[c]pA|-d6|!m۲m)77NxIo{nI7ǹU(+(;vm O>х l $ '͌F5>>nA8pnLZ_$iuرCDBBj<~tZ_n|>/fȸ9/_,I:s挮]pr\WP('|R=ЂcWJmd@ݦK\"˲dU(R"h[=P7BѪsرcm[HDsss:r䈺tI$IQܽ+fhڵKPhAGI?d2H$D"}K}($I_}$o_p٥ryY۳m[\NtO>6h7zn T*ĉ:qfggo>9rij'/m߇z׿t{Uo۶MX'NhАO>\YVd{w[einnNpX{6lǏkzzZz_Z znTJxjӽ{*J>MNA]xTzzz`ElvX utt(J-/^z[Vw!߯x<@ }9ImoM'u{4::p8T*l6t:SNɲ,g#s[믿Ltr9mݺ Զn*IFM'2Jw߭%~_PH\j]]]UwQ=#JfJRS(rّI7CT*  # IR]0]СCszIW{+|>"r&&&NN~_~Mh::::m8;;[׆PGGGcoVlvMCĄgYVf{.ߗ.Rui$j˔|F^+jRYS.M+L귿'˲]5?@ #GOYɤvڥaj׮]J&:v؂>v옒ɤvᬟDZZQ8iv) t6VeOò,K/g}V`Pw߭O>drFGG566`%ixxXGQ0 wY٣TV )ʶm=zTCCCR"pp߾} Um'nBýїrl6^مBAlXҋ_ ъl6fa1wq?XMp٩P( pFNl۶0;;[oS6ryUMDRER%ɴ(mG>ض])KZkL&`y6|K,TbX|&yߗ"4,׶=rΛղQ^h.L1bIX,t|>_ y BuAbeY+R J'f{>jjj>5z}BUk/ݖY`)u_zߪךo۾obx븜ߥhz/&zn2~_K+\WM\pnBuzߗaxqh,r`9uҥ[߱1)*ɨR(((HjJRr m-=זH̰8xܳ7qTfgguږLe3LM<oڋ<_(Jꖭ-W9LsܬFe3% ]r)g-P(4-i .x-ݔe1fgg={{'qzR[}5;v^Pl~˽l6 Iҹs9h8%R A^4Z^ݿkٲ[h2m*SSS]ۈW٧שb1M^ܽ8\-6ky}julںr6׼FǻQLaoF5NMMU7sl^2{LVLmZ_mN^OۍFε]kjZ)i۶?߮j"@]6J=y/hbv՗V3t\V-Rۨ~z b1'05Gnƫlݾ&Z(@u?.p,b۶Z Pb%Ts skFƍ9'315njjʩ٦eYNyMiʚYτmYٞ)Gru>{e+lqujļJ&$Ig6Tfff^oװ9۫]6LMM9k|Ylzmѝ&]|XlvVcd477''IW0ܜ3vƨ=v,˪V@@ǎS|>>Iqs{{{\;H3ٳg1;@ PoueYJ$Mk,ZòddD`P\NSwwl٢tk\.ԩS ڹs$顇$=sM}%IoFUjoؾ@@Mw9nDq>co^޽{Ƒ\]vɶ}ՖŲ,r9II>וUeUKyMqczzW<77:y#<'OP(UΜ9"D"Uy睒k׮I._,unH^}U{O 1 T7ϧ}066/JҊ_3~_J^>}IgS===[>H$Ƿmְ,###Ugtsﱱ1glХ?VNnӒT:|[?\9zxmӧuA=Uϙ+uvvuy}Wn~o+ |۪fV3~Us>SI7P>|XΝsk.KYyI7z}ai,Kpm}v-x |>.\R?Xկ~q+رc>/_V.S4uNE"MNN*z]&bcǎվF]κQPhcLFիW={/ԞqB%y$IpXկ$4b[0N/}FӣL&Sӣ_?4bz7Zb86SN9c_jNl{ .f9/~aݵk/_x^}Ն_`Q/^W,$?~m977@ P2رC@@u_\k^|m`%Å󩷷󩳳Seʕ+U˧iaz.#}sl+M岲٬*ԅ z0anȔŜB}WǏwz^fY7s bdٺ>}zaAL]keC~,-Z{ZF? MOO;mOx.o+0=ܲ]6oV:)JNݥ^yS{sь#\<wzƍUG\.+N;e|%}3,j[Nz)IY.!Ԛlk yguaB!% 3is=2JuXPLﰵP(hbbB[lQGG::: kÉp8E}Nv{ktttIZ?s ^PpB,|qvhu9/~aL5m /4jQUk uM/`\4Ԏeǝ^c s-ði_c9J 3<#۶[|I% '{Ǫfhz{{['&&}?P___R"P8VTR:O?/M(8;;{:ĄJRnݪ>nJɓ'7d۶B:;;n?ORxuL P"pӧOkxxV:urWnj-jzwKx<|F1f|>3ȩS <$駟V<w`ʼo9sFtZRIu>?كέ{Y s^yfУG:ɓR__߂(FF|A@ Q=nϞ=,غuݳXJ zH{+قr f@ P5L͘H`ϥa{G7:X,3Cl d2LŲ,g`3%8TbXŶJ0txu٬_,25,dmY/5 ۲,geffdr:y,SSSUۻU;[s0tfq_wܳԷ3t $ɺ:նo0J,NfFR'Nk-s~ v}cSh9037{oz>;ImY܂`2m<33SUH$✗3ٻb^ H&eYUmV|Y`m5*5NfpF띤r{&_{.k{9_qbcW]:?jUv+J^vѾjש=~^˙v1{-gvyi]fL^ƬU6sѶ4~ޔ1FcgyBs,f1rxi!^.-kګnnŴJ[oz`5CKG$b z*{sÄZݞqeU|17am `@% UBP݇۶zz6y͖մ;Vb1|öw61뷲zQ^4 J_H_ }o\/F/^1<^AB_`[VC/|ydFyyZo+ǠN^RŬl `[Gk|Pס~ BNgBގeU,˪B̌#8.f{ Qj:-ӫli+X,-P(woVp)I?ͭP7ÄHbv&Dd2U=L@ʺFy=/G>D"Ԕ󘹕}εJ' 5Rn'`j"@]RH$R BUfmp޶;,5 nR&D"Ue3A{ڰl +eU~BcEU*շZUD"_.&@5㲹o57 2G{9[Z]w=(2D"XPr+Uh4ZN J\OkZ?ZDflplfc=Ä)1jCUL4m:ṳ.ԛr~~bvI9"ȒoolP6\ [i&hzQ^+=Nf9ݪkZߏz`5adYuttTdY繁uttK}}}ڷo:;;bg˒K>I__;˼ Wwwl٢'OT*UH$W377'IqFr|>رc/533@ IMOO/K$ҥK~$ԩS* MӒ;wV=p-[,헻.֮cǎrb/te-wz@~b߯r>ٶP(L&bd2,g{{$ο@@2/d۶Ѩu=3UۊD"d2  ޽{`ѣazhՏ$r9]|۶mkǪK,R.s_,533`FR(tIRvv+SpqӻѲ,>}Z>/i?=ϧH$Iiijj?[*H襗^%IW^Uoo^(>I7w֊_|QT,z uww^Ѕ ֽ{$IΝ:𥳐%I`PǏw_r-gP(wo!۶%IXLCCC.X聺 vm|>hf:s$ŋ* l{v}˲$IO=\<WGGGU@ϧ]vISG@@sssf-[hbbs_JGQ<{_$B1pKRNF~YmtENRIZrw$IoOΐ }u3t``lSH욾~rl6mwh7+uunݶ,Kc=V^|mP922"˲4< h``@rzG=qRGGGѡp8p89rD@@=\ݺ}}}ucvtt(^T=k-3S>qr9iҕ+W.TeY:tP]?䓒AE^،466b7nOgϞ]W^]n+Vossszw/?^~eݻW;CK.Ih6Rrcɶm?z.\йstB!8JА R)k* C=T5j#~_bQ|~:߿_###U6^ww~"H}ٳGPHop8:s挢Ѩ:1{rmԩS:sLU> tllLm PD[eY)9Aі-[k׮emݮ:e+ǭh[QוѱnΡ[i[-eJR=\.W5ITĄ3 ɓ'U.떋p3u˘!_^χaglLYl٢-[(׍Mj֙ubqgțᆶlRu0׀Fy}.tn5;2>;;<бinxllLx w$+Uiݻ뮙fۦڨUu0 J%i``c*J&iwQ6e=bSh4`3RbX`XeYIH$RbH$RT B2P(T Bdj`X,#U-ߨf}כHT%:ٶ]-_n0bJ0tmT_kŴپ_ JRd2geUBPzfF̺۲J,s bXT*bXVSSSNgffQ^RbXfԔSw]j 6r/7??_u\Z-BkTws>LMM5},T'ehVfmΥfru^׹ٮܟw~eh^:Ʀ H<`ЩkqwFmcL&SUP(6*H$R2zO0:Ԯ^u-rNk{Z/&uh֣X,:2+WTfffpXžPRBJX;_5F$ۿJ0RT*7_w@.Kuk0h}Qڐ,W6 YP^̙p* Vգv,Pǽ^mެP) M宧v[lZ{L~j3BoNdYaŖ& =B{{5Æ9wh־ cwPl]240׽v3#b KUULhy%˿KqzѨ;:}GW昼{K.KoUxIR5_|YtAIқo,c{嗗Z###N|>Y%Iz'i+%IoFUk~X׸NKoo⋺ vڥ\.W7 95jǎ1|몺* S{U<ɤ"H`2 |>?.Iڷo_q?sIҏ~?V.+r&$IGQGGGՏmےT5`0cgPؘ36aGG۷2u]U6ݵkWp!mxx oI^B!r95Kqlumw`$Iv۲];SrϯHjClü6Μ9?^XUutuuIRr)hQ%WSW˲499.Mzeعsg;v*KzےvZnhhH`Psss@ݸ^Q}mkcU6<|lH>OCCCueMOOwѿ뿶xֹp8>ЫIMNN* );$;FTZT T_}$) ƿ;jT*-fL&uw_Rz>W;!Jv$)nl<K/ӧO̙3dUص9_ٶ-˲d۶^z{b׃ Swr,u|_n/ۧkϞ=T$Ywqy۶mۜ_pAtZ^eѨ~mvmzg477j@ `s'NP:^vo8azݻWBAy w}n4UF[heۿۢez~GUAd\v$+iJ$[j^;vLǎS*oV|̗_~YJ`䤮^Z8{zzZqs\*zG4>>~iJ1Wb{WZ>VگM ̰sy-t~رcP?5$ ˗jLHv*J)kvvVl@V(fC\.+ZQ(4;;t:ݰfѱ/#lVx|Tv,ovvVpu!I{|>B!%4|Z\O~I+K:V8n^Lzu-U ٳg1w&H-l ZfhhH@@|^rsSg϶Tng6'򗿬*eYzUo3n;-z7՞;wsΟiZ9Wk1e7be=xPKG3sssUC3{ϧh49,rƶ]r, 7epllLcccU!\R~ZWn3ncq˯~ciIjTgAf \zm|>fArfKvYx+ofE^L~U3ZeYud2-Yh ?`jFF6@-zR,*333U;k4ufVL^fH{f'8olhy/}_jtwp k FnӬg]s ,^l`0Xo\3ٛ-˪b̌Sfq0B!}ԥle937;̶@%9k.kmZ^̹^;CbhSvKk>y}xWv=n+jʱsyz;e7F\BPeff233l\ۊbgMw}Ͷv)u0k?߮j"@]6Bj>mWbX,Vɤu)eYζZ W |$ض]I&UH$RW}iF·gS@ KFy=h\O2L% 9vHnX,VhUZ%24{Ϭs?睲jČs6Ir< 9 iL&\-˪T]W+@ݷYζe)c4*iOZ BL6هe2' B-^˙/5av2djmUa~yhT%T^ [ j˱L&BH$RG?Nb|soxu_ͺjYIujZo5Z{+V:LXaqkZ?ZD-&3yņA;X5C ˹Aw ;33ㄙniĄ4E̽L&JX] As5nc,vc=̄bնm˱Q^ڏ X9|PסVl2.m3VޫJŹݻv{4 joTFᥗIL YKr1jq /CophVTkk3㷮]Gk||†Q*t٪8N**KKgk׮m;vȶm uڵKDBRI;w\t}>sIҏ~T.<z{{em^p_{_|l6sҥKJ$$ŪW^qq!^Њ];΢L0|>JUa_~劖`sV BP7BѪ2:;;566IIR(Zp[`N^I7CګWJj BUXV{w7npB]v- {{{ua>|XxiǏرcV:m zgO$z7{+$9Y1iT/DcpX۶m$={V^VV۟X9===d2U?===*JNx:33xӺ=a\.T*5| \˛o)IO?= %Io֊ݲ<y.ǕJרN8Qw>J=#Ikڦ'OT6]v9qqutt8=X SoooՏ,fȶw^g_r7nܨʕ+n)˲LÅz'+׿usl$N[X?*)[ޮ .\$_\رrb1I/0LR?^K ~h7?ް'Oԑ#G!J&E n:11l61MOOKMLLTsR)J%aٶ]5c=&۶ff1ㅚġC477p8BPW4U.#<ԹT*)J9:tP-W^u)JNҟg D<{{FGFFH$եp8T*T*1 +ȶmeinnNTjz8qB4>>.MLL(NkvvVpXGeYN .]bJTr[s-:ydYn%mYRH$R5üYoNi`0Xl(qX,Vf|]YkgrovJ4ڏeY h_T*P(T>'Vٲbsm7~uoVF烤J0$ɪdy`]>3󭑍z|>_bJ2:+3\>bJ,[u*m=d3Leffr: >''K7??L&?XU/󞼘2ז[ž:nkhVSGR;ĽU:::$ݜXi~zJ%={VT.{{{f)߯l6K.ipps?rY/_Zޥ/ գ\.ںukl^u18m۶M]oZw?-- ) rJڶmv9Gggƍy.gfoXF6qgR@ \.'IJ&ε, -h49^yٶ,cYmF5>>.IG_9p=Y:YRI]]]n;c>9rDRu +H,޽[oF=I[GGBТb^v1^>XUNpo%mmz`*No3rSX,VlۮXUunіe9=ɤQZ=۶uSOWlL˹J2lثm~~g}>od2bao}۶+m7-]Wy/8=EZlYLF큚׸){mfj{ʣrڭs Ƴ?߮jb Td477 wz֎ u1uvv_o$9c꫒nNfz ?$9@@ǎSgl\.k``@]]]eYS\.+k֭V8v[r}}}S8VWW$500 ˲dY3ƴ{R uuu[ccc*J+Bszr-,ۿ$O?{n޽T*v)Iң>g}Vt+ SWW1r^x[n=T٩}ᇊo[IBG?Qv$9ËuUy睒q_]mkjjJbљlq||*?xFGGk.e2 n؉V/ŋFO*I:~FGGݭL&#۶ 4>>_Y4>>P(L&K/ɶmb1E%I~5d  [-K#/ ήQrYNR P#Iz\7 jrrRPH|^׃>ʺhvwAn9(ƕ,˪BJ(r3=T-n7ަ;??_[LZ>Dhj'̺fKYNReffjY˲p/~~L-SSScx<*6<>w͐$ѐڭѵQ.k]Ľ5ԩSr,KFe˖kn5{r7nTMx`&[̤g\-===|-K.9k$}z衪mV$+T*91f13'w.]Ç7,WezꩧV"m., / Izꩧ}QӞI} ۭWBv9gҺF.V*`M1s'$) rj_UMeWVW^ZuƍqAof6YU~(Iڿ3;4ONNjrrB[_#Yjw^ݻWx\BApXsss^tfy智7 f_up?٬fgg $I{crnnN*JJRθ###OO+;<ӒL$=,cn"~_XT*9AW4URQRq&&3L^ҵ~k觧G/j}S4 IRU+V 3yѣG=6ӧ'k2XN-g]>TO((˩OSwwlۖ$r9gjf5>>\-IXLCCC$ϧd2]vittT]]]d=di/?fpFe۶tv7nN᪟N}ǒnt===] Mj\r^̄hW\QoooՏWoo|>.Irz7Hy˽V_+VF{鷿mڰrY wIS,%yO&nh [݃J ^oz`K&J>wL&SbXd2JXtƭX,VbXX,6|.4|n~~dwI&J,s&:3W*7'Tҟ&$I1Q.ɞ/H$R~ 1!-~&qj,EѺ׾ن{V&2=گv,rڭR4l73Tm5!W-f]Z|u(I0t3b2t1rڶm&@l4{yLٷ IAd~BPqx^f1eiX,VM.OX,ԥپdӐuff,+n `Zoz`51"˗5==;wj{&'|T*%Ijx{zb1[z뭷__5aSq}?sW_}4٩/BtZ]Aqr?i[n?g}]jܹӳm%i޽4==˗/'?٣i~+n [RT][VَI̠^mzJY쬾+8ciYd2rY۶ Z|IVw]GQ6#˲BV[q ?XR>#=MoվՆL&5<<>IR onUy|{ׯ_ב#GtYgyf̈́CCCT*fycݪ[b۶9n ɓ* .z{{[ +[]VPVT*t1*uLaTX=P=PkLIP(kttl:`P333*. 5`SBz'CN +hddD;w7 aOX1|lV`!@^ooo`vͬ\.&PױE"N;g&m+ի.`:vPu 7|Eœ`P~EQIs=6)F֠JRiw!nYe;:$ILf]J4<<,I*æR*%IJ&jwslV}}}DT[.N˲,IkF> KAuhΝ n:<;;˘9fgg588(I ڹsg`uZ[|z"ɓ'uȑBPeݸqC}t//-:-J$UOMM. "mElBeٳk{F> EeY>}Z. pDQe=zT,K|I4͌J*$)lV P.eY'jmxj6)* @ź7u(<ݬpV*֥ZX6[Jx pb],!*)@{b!*)@bCب!*)@{bh!*)@bC(!*)@ gk*6-ذ[Jx bC[/!*)D ok*6m4ZJx bSY+!*)@M!*)AM]!*)BMV*6[OV;D%<XP^Jx !*)G TJx 15l@K Q O6TbCTShQJKRPGRZ]*I@@?$I/bㄧ*ТB1%χB!8q`!@T*魷ҋ/(I~zJ.V*rYrY>O>*!f_zU_}$/ԥK- [uwHnvm߾y}I@:T.u }JUŋ 6@ ]vI&pmݦz"TYBA\NtZ/_VPi!eiǎVSmڞW\mKv(Ν;߯@ dV+kBPЇ~7vZA-sLVf.fP({W?,*ڢP(… :w܂awmtw2>dY}~z˽tC߯ݻw,D\˗5==㔚>mܹschddD;w\=vV *VU6ӧ5>>^eYz{!,]eBA̙3 [F:x =|"N' }Y\̸jݘeV VXQBAcccUpZ_~Y=:;;]D4P*瞫 CN8A 6-TT*k3 饗^vu&wB@ w}WCCC.-GQ.uVr9 }gL26?kw1LOO;=Om&<s]$[+ѣd %Iwr"RXW_}`nܸbR? u( v~d2L 6zbYT*b`R,jw1ڊ+?\.aaev f5<<\.@ " *VSPHH$קΪT*xP*4;;)HHBlnwچ+lvnm[SWWvޭY vu+ ݻե}9aeYm[xl$RXfI٬N>u,c=~XsA*+ _gΜiس4uEy_+b\.˚r\2@@v߯@ @DBA\NtZΝŋ=|ddD###ڹs|>_͊+b[PЅ t9gM/PH~_;wԶmtwI6ׯ* -{jTYbE,7`+ F׮][04~uwo%I====*׺r7nH^J_~.]B t?x%%@*Vjl[]Ų,رnF.]ƕ+W<㽻j @ 6+T[ޙ_tU5zfƆ ~umݲ^`"@ŊX[6uon{~JZmKepsu? iʹ6/@r,Ivl6Kp Y k*x @<T@ P*x @<T@ P*x @<T@ P*x @<T@ P*x @<bSKR Ju JEo3ͮhRԊlg:::~N<0ZCccc Kq]rEw$\.kvvVVWWx}]ٶ_}u$QT*.ֿIR&Qoo׫=  _ 9;:: ze=#eYO7N;aÇmlݺU駟j?d۶ɤ$Im[HD>lU^~e%fYa!BA݊b {eݸqC~F8V"X1ZX聊5>Oo$)/$=Sc<%;lۖ$8q.XwʔH$<$ǝ񸺻%Ip4- ֭[ק. z=q$W_]#1b|NPX(J:tRluFrF;;;^?AD<٩`0R9pfff$IhTLF===*ILFSSSp~&''WHl\X%IΝzŋ ΏeYrF:zhmާYwnn911p\K.IFFFO?]r;tvvj풤[||$iffF{Uoo>)r9yÔ^@sXLf0% 3g?nM|zo~~^dRHD|^֖-[Yzw6_$ڵk+>Ӓ[*:?wu$飏>Z~Ν+^k{Irz_vrT*T*d2ٶm[}>t }br:zs[T;{'޴+eٷo$ի+^kAJ^˒zk-++(9)ٳgIrzd2L&Sc&~wbJR:r䈤(⫯$}嗞˘fYɓf,SS9!٬lV О={VvаkN:Ą%IX T*W^yEt!LxX;e۶SfYgJ9^j4$uwwklljrT*|Ptq|iO>$^s-fЖ-[am=@5,K Q8[>HTѡaٶh4RSSzfz˲ѡp8-[O\NHY_;)IʓZ*eYJ$ JRڻwѨ{nɶm8XQT*.?e2q!Ν;߯aOI:fyI۵gϞzzirray y睪 ^=zԳfPЅ t9% Ǐz衺u ƚ/+J^OS IٻԔkpp){\o{?;wVE:eYF:v-9*Vz  u…v8V"̌J?P>ק~[X)M+innNx|EoH~ia9㓶믿.۶OO شᰂ=gggl%m[=٬9P(fz^o6ch̸Xrz|+mc͊[*x @<T@ P*x @<T@ P*x @<T@ P4U.]!@Ŋsε(XaǸ:*J݅5::*I.V@PPww$))H=P"FFF^e)JHXT*%˲$}:22"rX>Orf]4,R6U8r}]|v #@Ŋm BD"> hvvVRERY ODB d۶]D` Tt:'|RmW= `6+ ꫯjnn9˲oh+Tl6ӧOk||9˲c=C ~י3gmIF:xz{{]\5D\˗5==irevڥ_@Pu rΝ;/zȈFFFsN9A( pΝ;猷% kΝڶmNzHI6ׯ* -{njbM0q7ѵk `0(߯Aq۵}vIROOϺQY.u IիWW_/ԥKT(, t?!,*,mbYvnWUϥK~7qʕ㒶KV*;믿V: "\ذ7onu`= @ņfu[R=aHbW5Xr,IPßVT@ P*x @<T@ P*x @<T@ P*x @<T@ P*x @<T@ P*x @<T@ P*x @<T@ P*x @<T@ P*x @<T@ P*x @<T@ P*x @<T@ P*x @<T@ P*x @JuveEOS.e-b_TG۶ %I=\ _ p+e٪[u}-m6 U=688^O?T>T*?G}K<~R_loz{{ua=,K\N}}}d2v#˿vYWF;C[߯d۶SfǯT*յscBA~~_jddD;w:kN=S{T|?N{Rl6Wjeh2.EѦd*e5l ˲*U$T-dX4md2YU'6kBe5ot<$USg9Ө` WU*mv,da}kema}{-k w{d[n6zKDBeɶmEe2E"IjỵGjrrRhTm+93mLFbݶmnF%Yv\i% AEі>eYJ&bΘKHYY|Ar9ٶL&gVmoppih4|>H$9ͭ+Jv]aɤB籛eY={Hyf2=Njz<:I&''K/5,ѣGD488Ҹl o$_v©N?~\sNJ rmنwttTo>,I|NXt'hbb V*(3߿,.;w!f({w;otu.ԔSwI 'rF:v$ihhHlٳg5 ԧ~`YSS^ٶn% 8qB~_lK5;˟8qB###^V(nN0RE(rnϗ_DB\Nli;vȶmr9;;vL׮]S"|˼n~_8jppP}}}ѣG^SOVV=PcI~pĉ -OT;Eg۶m3g0>>[zңqU.3O<Ģw;$'o+]2f?z~g .HR8ܹsQs u?wVww&''%}NulF(uu׿$=Su:=?}V*=qr9uww+ yN\LӱT*;`իW[Z?577[jddD?OC$`pQm( Z=TW\t'rm-n>a/6챹ci#O>dK5=z뭦w{+V?[; jo2Ne2꫚T"p¬`07xcUfo[6]Io:5344d2^xAssstz@F"K-y OZ[.ڶ4|`Ԏ_;=Bhx*ik7`ru?|P(8^\ooN8y%ɪ ry)[m` p2̭?я\vhhH.\PX̌rrrrEo#oDQVO[azb1U*ϟ'NHu/K꧿_SSSn/ effi{.v( yyw{|ұcǔL&% QizיMnUhbbX;|3ڻw*j?f>Μ9vڵ>it}\V.[gFFA9}YH 1fMgiRx I7{еP(hllLcccu 9?+O~I7^ފK\Ą%ݼptZpjvvСCFaRN.T.{cNa6D`xllLxy6/mԫx9|>Ξ=ovvI{n53)[\.kllL*`0_.5;;}Ο?}OW^ƜBA'OT.eYZ>CǕJ400(1}[r`0^z6.]xQNpifFf8AI7{R{aeYR)?pz8pYgtsYYհ Em++ͪ\.kddY~֭NXf566I+v建tI'x^vk*Hߌ411BKKt%=zi灁',f:x&''ꫯl+ض],"O ԭ+@sd2,**Pcnf;nds_Pn0F@IDAT^2g?Xf_f~b6"H ̵YUFu,bvvb +SSS _L&|> >|hd~~)_mj]7v_cqmi{/ ֢JRiw rYϟgAu]ڳg,RIҥK_5S6եKm6WjLIs ~ڶm-[()4ޔVm۶;lT*ׯkppj9S_׮]ӽޫG}Tw_]֖jMcfT*>H/=|M}?;wƍ _.=ps|͹F׮]z衆u6mt;wN{oݾktФJt`ddLRIo.]'|R<~ݹ^爤~k׮ikŴZC JJ B] U*%I |cR[}}}p/z%IPX J ,R.S0ŋ$I|^~*** my, S`1P*x @<T@ P*x @<T@ P*x @ֽx<uttff8ny{~f+^1R)vR8P[RIwlnwqL֭[U*]$IJBbemݺU\ND϶nOO|>_e Šx ƍ+.n599)I pŠ A `]JR$%I HXZ&-֥iI7{nCCCD"W^y֧I>݈٬$I Ij*u;lwzzzذ,*`M)`-!@<T@ P*xT*v"zTkwKR~ze8NeY]tI=W_}m۶ihhyO?TtwkppPnVc:H$.d۫x[T*K ǝ ' 7PgggU-8qBL'Jj6;4[oR$˲477@ )EyEQ% }*nV *#~_U?4>>@ ӧO׭ /6ދ/`0X?\.H$/BVgg~;>L`P\NzUC X\փ>(I_竟?rttZ\N>l:|z^,*c>WZ.599H$Rkմi(jأeYl6u TFGGzKn&tZGeY:~?裒˗/Wu$]reyI7ԕُ/` @uP(85%j˲,kiHln`fY}f`U:R.}pg'x~YMEj[Ҋ|dqUh,C1(IоГC[s,VDcD{%Ak(k'=n] tJg%{t4Wh-J@yǙx@$O>$O>OPhAC! "Lbpp6Mٳf^'""Z@%"""""ZAzܼy<Ԏi}YD"wL&q Y&""j<"D"x<  ۲e v8Na:kՆn _Ɔ puY׋rW ђahqgggt +̹qVΝ;KUU+ؾ}{9DDDDDv;f^rJEm)/?VF+k5:U""""** [/?VF+ RfuVb(BsssADDDD4q2 lub խۗDDr\>g۷Ozz!E'h|lNDDDD4lqZy~?p(}h>WDDDDD"\r/Ux1JDDDDDDDDDTDDDDDDDDDDE0JDDDDDDDDDTDDDDDDDDDDE0JDDDDDDDDDTDDDDDkH6-YEDDD1JDDDDTbՅ7" ~Oºur055Up H. ֭CUUL&&"""Z@%""""p*QSS-[,xSNa֭wۍ`0UUZ'Ν;ۍ8q`pKUUŖgCв%"Ja+whv#Hpnܸ0Rv;;<.]yK&un˅]vcccp\2x ?;W?/Zkmm={2t2xJDDD+[U^z߿=ܳto&n^=xR`hϗe9^[n͛~]w ֌X,I޽y߉t@%""""Z%{%_t#}^]/puQ]]LNN~yK| ?fX,VL&(d2 ˅H$D"fesG15K2-MMM}i /r̼/U,ԄrW Q pB ./w1 Jx<hoo6n܈@ Pbuuu?555ظq#No:F0߸q}eƊJ `9`0&b֭X644@4Al޼ؼy3:;;G/wU{"X>7oرc.bAN03;wE$xLMMjOV}UCMM :;;]ꫯfxtl혜,w5Yva˖-y z裏}:::,͟;}這DFMM ^/t]fD5PBu[d]]]ظq#sN M^TUUl:44Tofl'NP<ЛfqFtuu!N#2=" UUU2eO9rPUU.'.ӯelv-xbɖ*QF F4]ed EQ F*7t]ϛ7 oa~=Jh4/O0f9_QD"fbbBWuKRĄ o^VR)#  kDQ#+bD"Q0 Jׯ4[o~/ Mӊ]Uռ{Ri0|>w+ӘAs' t:.//|a-XDB޿jfx^CQYf|v ohWN>OSU0QQM}jH$ M,?w% Y犢^7/`㉉ D7J 'F(B-۰PD5@5w 9Ms?4Pʼn|r]^9_PNPE o(b9& y,tbm411!#7DDDڦp{n=J体9Fe LUUCQ-j_asgʝ_jb9.6=OUUKPޓ6")b܃t: UU 'c&&&,EQ o y3Wl۱XDjQŁY<3E@T<2*"Y(ʫ܃x(&J]eXVSus)H+ۂ0"-B-X ^uߞiŲIJs[jz Ԋt=20rT A{[3 mBD\¼}1Al_XD5^|v܉x<-k7xΝP_bl6:::q%9= _W-}ƈN~~gΜnfˈ| XF4ž}>fPt͖%"""""ZKEi*vʕ+JUUUBMM k.(9mxx-/lmmG?`gOO,~m@uu\*YgϞ'uUl6 ]סiv;=< À48NL|u0Gp8Pe kپ6M+D߳p\sֈV5*4Z(hkkBl6477#!ƍ t:PHGG<N<_X<82744@Q@UՂehkk4W_TU2 /`0Fx^<䓸۳g%GFL+UZUUKfʦi(O$H U5 mm\.hp8ly gΜA8(H$8<x//W_x`$viOxݲ &1::Z0mss3\D"P(xlNb1(lujc%ySSShjjBoor 6Z3On}_;߽{7gE28 o߾y $|献DGGǢSz饗-XEСCfD"xg( o~<#غue=AY<Ϙ;>|0",Q]]m9}p8 i|׮]Bp:p|Eȑ#zo˖-4M޻~D#EQ|uڵk4 ?񏋦liaqLLLǏG wݼ\h4*x^YGlx뭷 ] |I("mjjなrl-B/oƂ➿. 6Mv)a^`f̘vCUUÁVh4(tP(daR)98N#J Mӌh4j0~a-߅D"!I$s77_sE4Ͳnyew2O򻪪L&n[1+XJDDDTqZ &&&lw^[eDQ#JYiwK d=˫L&cB!& Y cf[h˛vқ(ve ŜOF M,BRn!eQUU`fF_,\Ӎd2㮻tgY\t /glH&WϜ9ݻwX NQ]] it:mW'ML27nܰ[:ŋ駟b2Dnp N/ ;uEKSbU;iӃ7oBun躎}!͖**DDDDDDDDbzzt:q9ޛ6mh( N#@i}}=v**DDDDDDDDp* Nchh`0H$Rt~UU ۋt:@ g}w^ŋQWW>Df~EQp**DDDDDDDDnvQ]]N[zrzW3g@4x<TWWٳy777v`ݺuظq#֭[] Uae[xU ̘p{n޾JpY{سgv;٬|p̙3U4۷ol֭%͵kӟ=VD+R8Z@f^7N_nْTis-^eW*X6E @WW6n܈.'L&rD0>>./b1tuuahhl֒>7MWWQڲe <q AUUrha׮]CGG؈~&N# hiiA0č7frPWW19s7oӧe`&+KꫯAT"hooGgg'TU~"""" L&x*Ο?ASS-fBطol6b 4M訜VWWgYӧo].FGGS "jhh&rP(;DDDDDp0 \r2}r AUU477˛I / B4|=frϟgh`0JDDDDTDߤ. MMM`|?{,{L߿{CCTUE8ƺuԄ@ `ghbBp8pA†  Ns.&&&vey<P*(t. . W^w.]KkЀ .w5; """"P_|_%Ivjkk/65N166fI744[nY; ] EiaK+/̛O<{衇thmm>NqX|Dk[U6@KK PSS)G"w}PMMMݻef]]]D"ѣG宆GBKۗ:˅:x<$I$Itvv. dRD{{e\Μ9vԩS^]*Qjhh녢(hoo|>Ο?UU'O,:g}ߏ>199@=rn7UU166]0JDDK+! BUUd2?ϟG&F0DWW-W_(PU'N(*U4ODDDDTzzzݍ/ʕ+tafɾS_&9m``H~VDfq9;vLvA+xWo>]Jkǖ2l6c׮]뮻t]tvv"u q]~[U8Ͷf2DUUf崑hV0O) т8LLL?0v؁`0r"ZC ^/|I_E}6yx<3A'e-W}}=4MwttGŃ>xǃ=i BUUL,kdddM &<*u*ػ>˱*@4uP""""JR_;O>s=rhYrMӨ|><+UTʗfT*U@b20~mK IӉ_|5}^o6 <"pȑR[~K-5DDDDDW:;;( n޼YUڸq#x$>C?8&&&d uu]}6TU]iR_T""""""Zs/Iܺ+F,˽TbbVڨo8|D"f󩫫C]]TUqD"r[9*u+T""""""ZsR_ ? ?^P0U_ՠ*qb!" d677CUU躾2^/~,iV+b'DDDDD"\.VܾO5o7?#?G?i†hSᩧYŠ:EYЫtZJS0ȚLk%@%""""Z!4~1Xw}}}U\۷P0#opZ6 ΝPUa뮻 >::[M߽{7Ξ= x饗.W^AKK N:UQ}DDDDDn||ezу +j0S`Ϡ8o]8Z111'O"  Lߥo@~&[[[.wϞ=fdkUBJpbJDDDD+H9_#( ^~eٳSSS8u0TU| nl6uQ+RnLw՛urWsO O#]^t~?L}^099Yp-[`׮]TY*-Vf1==M6wS}׺Rmޮ󈈨4.^cǎɾ݋ݻw絒6bDDke>7p\hjjB8W-ϝ;`uR{{;N~=>ʦpoft-ăg}쿗hXёb-ڗjzq}ڙf1Lw92 +Xr:w8n7ٿ^ww7t⣏>*kL<7UJ?-T?ۡ*p1fEJ&iPGxTfӿ(-{K?o!TiT=S a܌7x---Plْёw!XUUӉ#੧03Rxrui2Nq)8qxW^Ass3p\r@|ue+>vra-~gkp/\7xrEXs{iy>z͙{챼uwww?~/ىH$b1Be[&-8jiRѣ㨫(hkkC[[8:: xv킢(krfz{ ?.4ӽ% o޼_i4-&wz{{;N'<(x,}7F4$Itvvĉ6W^iq.Pgf(u"xV^kmttΝ}X8q"ʥ x^}<~m98PP($ԄD"_~###Ć r / @ߏ-[鸥n‰'"xގ&8$IBC=T0P?1<9-H )if n(H$4lEQh4jO<_Y5 ]Wk_QCUU^W/V'a~|:u|>CY<pr:Fm:QenjR_kVk*\c>c۝w^T>m{{zOl?5>j66]*M MF&Yh+vlkSrs5P8iF(}oe)X, |Nֈ[e29orfJq|13 Ð* 1KE>f癩DuBAeð^l4W*2B%*.s 7_9PDZUeYq|=܉J~5r8S@]ݖ:شKRNPㄅPi!28cjPiz7>t:g rV@24r]kO(H0-gzzm۶9q41Ԕeznbbד,񪤙傪PUJ&p\r֭CKKKר._ `/S{7o>oigg'Ԅq9pB2 (~?WB*NB @ /Kݭ,51`x펈*W=Ϣi6>>]Wz}3*,q>uThɬr6,2Ѩei7)D M'>Qa{%D]7fLЈdH!iĴR⥗^*7siyH$NKv*8N'FFF|ߞrdp%all ---*PU^pgΜ/x|YcD+þ}Wd}G=ott:gyz ^xᅂomm59R_~=իtzI#ix;#DppPU*whb*Bmm-EOt:d2 ^4 |Z>hooOT)Rݸ\.޽[nh>eL&KV2dp8x7҂Fipk׮y yU BQMۍ57Rr8СCsulV.^7h:ƙ3gP__oiPՕTBs4X eX-_?j/rY}A/iRLDTݤ(Eh4L/CutZ7}>_^?LRF4_91y0T*%f^l] Rb٩TJG# UUȭJi>egQUh}RsXK.X~ s??`FUJ=jw@74uz|Pa}B!嚯}sRgu} s_\h\3:6/5+|De]Śir 7=ōnoU':k"i=|1,'nHRQM,`Q|6"n-yzUU kx^@%`9[&bbB)-R&u[nE)b9 5JBHim 9PWJ$jvq^ak.qݲlX_ tKf M<ט⭔*QŨg}1ʶed2;wǎB!۷*?WJ pݖNEbDn=Oi˵l8s qi9r`[l(sǙ3g0:: ǃd2 χw]uu\hmmE? ϖQ]1<<~PU>O?\[[h4gbddD*^/=jy:w{h6룭4 ,r9pE'^gYLOOcӦMy b19K,n?Nw]t1`9?j"o{-}:Bm9r;wtQlr.?tGn~dַ- ڊh4M6{mm?_{qMF ;}ȮFFFL& GTDߧO?4G.nf# L7\nNݍ|;\7055_dw뙛}֭݇[ݕWZEW{ - fkQY ^:rg_I(k]0=-y"IKb<0 8N˖&"]Vy*K*W+E@k[{uʝ.{^#ZZ*hWGs>̭E~>OUɯN>XYJ=Tc[jt:P(4kY i*݋Y|w:DYYMg4(洕TR(whtQWW!O/EA[[ۂ:wܬV.\(l63-=2 t]G"@kk+,(NL300 p.\[ttm`f>~A{{;>(twwFuNr]yAnDQl۶ %k1bx@b1111`2_XnΞ=[b7pp!Xn;f|Ljp2Ӄ[Zx_}6^uA(rGONN GPU^^Ζ. >&''Q]]ǏΜ9 hu=CD+ċ/={4X:::[֭[0P!CU;j '*),"nll,z0ddR'ǎ`h=|WfIčZ:λixqq< n3/fAQ;~*=55ضm[^pSjN[ޅ< N/{',(h|'>EA8ˏ)V\O L&-s' T"UO2N34o.n)Ao`d`PD0p>[݋`0hl1D`]we/}C?~n]ߋ{lOGLUx'r~9Zl4M[}h  gW^|/v-]*۷o0ZAhMDcc#֭[.D"KȥL&#쀾dhhh(N't]p(\j.xl~e~b:;;-{ /Z*"P'"?h;0??r$@UUlܸ- i-t:]X2[ikvKߏx #Nn>3طoo) a|{Css3"He,fùs砪*Z[[_BUUE0ˏD")v.f߾}P.\@$]_ `^2-w1o{h ‹e/}V/s}n111ad20/̕ o4ɿ{^FuK1iaF (r9bZ"(.E~a2e=E1R7 Y.=w>ONz8N# B_L:QՇ0[~xZcd6cNOe~>N$2>$Zd2^1[j~TBoחbz*2t]/噯f[vr#wL&cBilW\zMQYT*ef{rg c:ܼl`Z\ߖzeuhp_d!TGѨ5Ѩ%p[&ql3>4|).|Vi/ cZ-t,;]z2se2^B{2 W뗛fu2|rtb1[~xr?Umm^Snw(]/>k"i y5?J-[nh4fq|ysYυo*2D"Q0M*HgB̝>t]lURoK2G\}a?yh(~afZVRWeݻ7a0oZVw1Uo.u,d2m*0UxJRA>@-\z\6"(/Ld2r[Y{yB!0'ͿB V"`jT"b=f{syeX)T"EDDDDDdK31?7J'B?7ooVGt" !JE1u%q8~8>- D"عs'frB,Coo/֭[TUUahhȒ ;wD6E ֭[n:ʑ].:;;erI3<< )Ol8rTU` ǏGss{߱n3<#X,]סizzz`Q[[+,4Hg z0OR RD4ohEӊ޾yEAGG<OODD&X iӦ w߾NK[l7ꫯJ}vÞ={`ٳ7( ^}ռ`eWW( ~?Ξ=Gƒn||---2ݖ-[patvvbÆ |u]?;v7kǃ 6ȑ#@2D8F4]:J&xL4==mg۵kﵵbr[n'|2LNNvޝ71h .VP A'|R`*QePo|n T f*7߄(u]~_`8z(,~饗dPuϞ=9!`p8p8h7oDOOVK>ذ>7glq_%[W*wqV ۍ d2 $]GGG/v믿.i躎{Z۪/" @:F,C,4xqLOO[ijj2J+]hcfLld2)8xr֭]z[DDC駟Euo \.גf[+7n7,D__v;$`۶m4A0 òe\rk+g2̹3Ǭ1n rb!I9rDNf2PϮ!h5aӉ_|4%""_/c?ǩ%.o6AE9W& ܹs]?s%x_X;t:122Ro6m*YQeש@wFmm-4MC0B__t i!fmmm{ݍ . ̾ hhh(\np8˗qI-wDD,Uk""?--wQVcǎyE{PUU&%%>bkH__e 0 `8q℥5(xwgnˠgccOu 񠱱a_[v};0̼V#*)P֨\ߟi8Gۈoҵ\,om\Q?ob1hA(#teZ ]1>>f8}YݻWҭ( <W4jmmۑH$N-ӎ9#G 7MrϟG}}[ rF-vΝ͛ ;\Ӊ*DDDDDDk/}-or+7TT~?.:~xy_xA+g}PUDyطoZZZ*jjjHc ڊ;vI˯P prXX^FDT@%"""""ZNǙO# ^*miN"PWmhhx{>{zz]eDD+7]""""""Z~_EYFGGQWW^$I$IL@>rl@%"""""""~󟗻De*Boo/"Ȣw\\.4v܉7rajj2:Foo/\.+ -زkxxxtDDDDDDDDD fetƍPU7nXT~`'N} |DP]]GСCx %{nx<8߿ގGZ199YpمQSS#CꉖDtǪr]bXEo?gʲ\"RD"H.ʬtA4r/Ul6H$.[NMxIhkkC&AOOɓPȑ#8rg$IM000˅|>$;:߸\.رC,@%;iv-G"1>>nYn+Z744׊4NHXzm& ʚ;]||y:k~^RDb:\}*OR'hr!H䝇+s^t@qL|y.[L7u]onuz|JafwhR$=gN?sn{ՖE)&Xr1/L\s|M,"]=SnY Vzs K] ?wON5Margycbb7og}׋p81K]ֆl6g}JpyR)a{6Emmmei,K4ix<6lg}L&׋x<`0׋h4*C,;H͛~:N'<|ԩSBr \h4x>%LԎ_ ^{ݖם|ђKu>Om-os}}}l6>v]N8t:;;qܹY_-7;s _eE__ .^hy\QK>(<yKBEAww|mQHz-n066lذ\9@?ew v3aWAO5} ] tQߩZ/LW؇~۷ow0Amގjx^|?كg}<^~e{Oxi<"ơC022;vY# ѣصkW*DD.^)Zl6lڴIgy%$I\zW_ͻ,ׯ_}###%;v>X6-88_mm,*Lol]vw͆7o.oUPh٘_[/Bu(Ԛu.=qڵ끙 8yaL&Յ`0ag!ڽ{7`^`Dkr:Gyׯ_Fzzz"ZT>S3}2WhEO:N Gyjj  o߾rf~&\D?wx)~555 *Pˤ;=??^~=wkiQG G RSgS(xZ|ȇfp)"Z曬-[@ `:3X3/ ,\{;޽{NDDU~# F"˿;BQhq{:222YzveΝ;QUU]o}GyA#'D"YQԅt >'E̕+uRK`XZ;p X r%ɢ*Pg2ϖW$۷SG '~7cӦM!тUQ[UUUq…+ٱ\.\._W ?{ܾR`+WH$rDz0JD%'^5D"H&hhhȐ@[nEKK˜>fF 8v)[vvv`Ysv9pWW$y3/ѷAoo/$z{{QSSF\|0664C<G?ÈD"B" ĉbEhP---E,Coo/t]]P: ‰'Tr\4 pXBzI466"`O~u\rqFԠJ#6oތu֡7/%竮FSS%8<[TUUYFǞBWWQ]]m)\"6n܈*TUU,k$Buu\HfBuu56olɯ f^]|ļܹs'6o,Ŝ.P~{oool.Knur W<1ϛl6H$&u7DNqbƍyΝ;eX`V-ʺn:Ye񷪪 @]]]T:RnnMMMaΝhjj5o} L?L@+رcfBc87OO-ǟX,`0EQiӦW0}R](gKp:Nd UU-LLLX](T*ex^KYn111Wo)lek^0D"lF(i2L0TU52Le>P(oمeʘ[zORe:11!H'5W~e2B- Yiۼжr݆yͼ|> ~111!w`.(2*b~1|^BrE:oh&A/w=EUUe>DŽm%)>u](yu$SOr!WJY,CԁWu0fe_Mf IR~\O1MUUcbbbֲYRzT߯'HiH$D"a͘ >//bi6HX~sg4F2sJ}}/ی-y9K~I{Ϝ4_,I~##7?EI׷ߥ(UnՍ۷j{,veuh̵/DQ#HM+40f"edc37PHl=[LƘoUFsB.{K*2&&& sn>.s],b\eދlg+k*W~҉m#v^PA/: t:-e nCӴyP0 \4dd0du- c&czyts0OGtb=um^OQvsDI5n c&iz-CeE@ʼ⡃H ib\A`4-LoUj@ NF5TU {^/UU54M3Bdtrw{sMLLNrL-K-5PRP.VeqVP+`喲|hTKjg3u]S2v-;    AiE`8Wk[(Y,Mchm:[}ZbbQw@"Pm>@-E:̭D Es~sw! r+^I*9Cw;jeEwu,+%!irrRϖKQhnn.w1W'OʾfSR4Ѭb,ӢѨtG Uۇx<>g.\ ǏŋKuD_@if;v쀮H&bb[nL(_M6>>g}v^a>z衼ifsFeqQ|^z%-/]EQ -}ر$uADDDD0zBo}miiA&D v9؋yPl3NCUUƋ/Á˗/1/}<D8SO=ׯ %)07oܺuhYEm4tZJwuW|%Iz8p6mڄR*o~󛼿B\.p%{F{{;_y~6o ѝߧK?.J}v֢u];w./}:f@@n*F(l||@@ oc3lL/X@ ނH&Goo/z{{1557*l2fAzjjRЧPы D044$,P~/\m\"Z<9HzDB._ `f?jkka{Lt:qyB!q<oӵk׊C~_ɤ<K@x-bDI! "l\s9ߞYWgϞ]p~[g; Rl6444CQx<Z-Z򗿴L(ʢGDDD399974Km7ք`'X0f'3mn M4˨߅ Fis33k2bR,v_#BD+bcaoҜV c>.9)1k3eDZ~Dsnyr"sBnY\}XF74.4JLWl~esyfC57OEQ UUe3Jl*3{2<֗k~lכ~֔r{;>Pi[N ]ӮT}עRmޮlz]X,X,)*_4#6<<`0 0pJ+/^{ |n޼P(?z( |>2Lޞr7߄yl-p J!@4ٚ0 CӟTs!\> ncjjʒ0DMM t]Hɤl qY\~aiy$ ~ܼy|><0.]ׯ͛ӧO4Ø#V馦ۋ7BUUx<ܸq"Zq( 8TUΝ;*:::F3sCUUtvv"f᭷ނ(Ν;sN#',=MMMعs' 4Mo;v @UU\.ܹPoԑC<G]]# |}Zn\.qy-C BPEN ?8n]e7oތNG.h~i(tuuahh?8t]G0FDDDwFz1ic˯l67/iY޸F|eLO捊llcnE<﹭_9L&cLLLȲLLLpMS(oM x]wQNzR)>d2xoKK,Cs+n_~ O<3 UU [mbeɝ i("'>5֫ZI*U7+u)` ;[MI94ߓ{!Q60-[/we2רWsEr[9J-5,0z^ˏ;H(J|r_̽&ØsWnBVW&&&事bABut:[O+ o2 o|t"UUnnn+I M@ը[klxALB%`1zg@]8B2 ]GEQp9)c >_~2?0wv.[l?rg-jEXMvޝ'ZED8tgΜAkkkC~"Q__UUen/"1+T,4QU2rHM,ӏ9RlXohhH>6W4MC0?Ǐns m1@]dRp8o} LKl6+[fY;w۷o_Ўjt: 1::*D"hll;-_Wii#e}ʴw^ϱuV H,iL~-/^X>~3l61455֧w>|Pb 7 SqD+˗ގh4h }Yq -UBQ+˾\|4=>رͯ 6lYt:d+C`b֭[p K/`@zx?1ogp:8<|^e~ )we>(N< .X7kq9 Ӊ_]66m^:kZD- ?я<r׮] rWÇu`+c2ֲ8H7n@[[199 %fq%ܸq[lxDKQqm2/ÜlN< Bhnng}K.|bSU?k׮%oFgg'c : /_o>|gebWX`WUUo`߾}rZww7رchllӝN'}]}111ÇE_pKŶl|씯O*]qi9}!z,:o479=}Xnݢ#ߏW_}uuuri.2ۯk_<WCCoJuX~ Q>.Ein>H,#Qlo׹<gЀ+W L:U.^7v;TUk ݍcǎ- 6V|q_ۇׯD2nGCCd.]F5ˣEYw,_*?F #J-ḨtØqXEU(]&1h^ BF"(XBeFE2Qf[VLFg"I0Oޏf2˱]n{lz# 1s*Ҙ~/_n:0 cbb"^ڜOQ)-1-PN:BUCQ0_l:OWt6`-49b[hzg[YhzWWɤJ B| Y{)1b}}=zzz055wy' aiӟWښֆA9Zt0IDATp"c0::[JDB=ӘWi}Qx<@Q3?_p$kͲۺ!9rn[i\幉V<PiUU*`j,ٳg122¾dhͨ<,}~ɿ YCtw.s?kSU(NAُOZBUUYZ>(vڵj"p `ddذa~iK^xϛ7wzCCRΜ9ׯ6 onܸ!eͯPQ&͢=.^IyT Қp8:p5ax^ttth+^mm-9#G/Z34M5MH5kn/ 2~&STEb>lY~,0ӗ{{{{7n@CCp]w^}U#F E/UvX"oLyX+|laZ:*i -v@R-d1㛸Y>s9pn޼Y"L60q:un6Ā .$ReߺBt/^DKK :;;Z+d%[mUȊۗ*7]"" X@HLl||HlHE.O2,tY+4=NcjjJIfXpihh4MC<NJz(Tt]f7WBQm%ID" ^~뭷( N81kD"Ky/LΫ\tbZ0իW-byDwfɀzL&4߉vBӧO[5od2uj5G\/͘m_@M5M3t]7TUϝ?QMQdS>w4S] زTU#~/6=HXK|r-Yeo0~a3#7wv o(bkQM'*b#݋ߺ4&&&T*%ӊߢN{s:8n#Jr"ߣ[S_ cd6㽃G]Ķ{K43fIYYmlTiV>Ƴ* ?S9sLFv?::*<3u>LΝñcfСC˗/[;y,]d^uuu0 R)9k>>uiQҥKJvp>O`rn7`?ػw/&&&*OMMAunl6<'OmF$ B!d2߿[gypR.M088sYҶҥK~e=/]Z9Xi0 .Rf0`rXipmmmBH$hllDuuXuv8B!aԠJ3ԼgATWW8s̜}*?P+wDDDKG?jkkad`sxxkC49r6 x7gSruPvʛgzzxrfʢ׭ˑUv̙3Sz.>CCCعs'0TU>P[[ ݎ{`Ε.N#ɾAv;j^~hhh pt 6,k ײs522xnY{h\vMvP[[D"/zUFy㈾M\h4 ߏh4 0,fd2P$ !JY2P(+WV.;,2躎h4/1ZA.3A 4A{w<lU͆)qx^INa^Z)\~WE/زvN%y@^?s-Q'''aÆr-t:o: TU>?p8lLJl˒Gn?׿.8xX8|4;#'E밡"Gkk뜃QqdQ\xb'e* $"""DDDer50sW^y---tXP泬b\. E޽{DPUZ00<_by͕ƍfgU/d׭sw=q ^t @~{ݖh>01<zzzpMDQh]eDDDDDDDDe"?|yv ( 㥗^*^t pY@w peznb>-n]y֭ԑ}颵cn݉KEJ[lXL ZL$1dKR1hiW_YAwQwZb2 ~GGG FDDDDT)@%""*#1jB2D SO=5y_xÇ2{..]rY}l69"84@HMMM X . }lb g}Vv;N'0",k477[1N#`ͨO~Yb[bv\.ly,Z۷X5>>>r"򛚚B2(vtN Z*TUUͫ><ܾDDT)@%""*>DA{{;:::(ʜ|XgΜix8_|20^/<؈x<).-$N>cͭvsħNf{<>cX҂x<Mf-W1o@UUtuuNFUU}}}K^DDDDDReS, ̀0 uX:I޽H&Hs'9j,n'ڵkؾ};jkkD"-O-.L|~ GcL6t^dY\t s|art9!|AKO\uQ,XLnH$FB!477QZ&d2?_}ռF/RxN,g?{f]$~p7׿}?0odgKDDDRII{9K |br[IJp8,#36sn>00²P>6mror-zϺfY=zmރ(طokŶyăM6mKDDDTxEDDDDTl6؈d2ٳ| nr]]]Z\%"""T&"""` @cc# .њ*Qkhh@'""""ZӾQElp\p\E6-6LraΝ닸tws~@tzֲNMMr ll@R@ #"""ZM@%""""Zft?8<$.\ǃ~8/ UUm6@KK '''ef!K~.Zd2}… ͆>˾yѣp:EDDDD+DDDDDˬpPϟg} ]KgW\(^'''-Xdt] CQp\[o;v?.wDDDbت}7ι~IC~3@ss3fwE4E%3< ݋xEAKK \.z{{SO=`ReÇ~9Ӊ`0@ Z$Ixꩧ( ]D=#"ijj 8$2`0h9bG\/X̾(eD"!B!#1 ÈFv-in`jR)bB!9M49o&uBu`xނe2OLL:u0 p:(a^7DTU5E1D^tZj*B!111!/^E1o#s>2L&c~CUU{4&>BCLF|+.JYUTprwn.r(0Fo3;xtYa1$iR)9μE/w"h洹7bc.$j. (-*< Gs6wE0qX#瓁Rz v&YP\f #Jbάj4p??.yJVW^A]u"'NѿPO(v>sxWyi}9t: $^5[l߉NR ǭ[ՑqWWPUU*Y?̫b>k,e|2`llhf޽cZJ_R5^-Fmm-\h4*D%""ʕ+jJ Μ9#ٍ񰮈h ذaC4tL,ܷf1X~}4M o +ʙ뮻3M:FMM yF4iu)q8PEQaۡ*{=w}_zQicUK-EOO1==]U%"ڴiӲ>eu iZ(wb1={V?M6w444m'ۨϛ7 o>t<@$4s߳,ֵXϙ.waIS,7bo:$p8 ̴`~%9v|7byP;putqLt8x"ַ<s/cQ:::xȞ5~֪DVfڛWKL\WW:[UUUr庺:9Ro]]a8L7oFcc#qF YF\.W^0siuuuQy>EliiyvuuYuF`e] -X這`DWW,C]]~`f~XOSSㅔU|oΝL&uV<#rZoouHD+fF0PWW_Wl4 `rHuƍK믗4~۲B]]D+TQ?cccSNw=ؘ%\.ל7kgy{1躎^{ ի%/?{u D4?dJ+ eiN,bde @% $2L̮ɰ"鉷Ll6"&i&4RCJH}p^l˖lK/s~琛-[pn!FWDjعs'VLىjt:|~饗R% ?F8i  !ӧFww7vEg]]]JӉѢNHDZcٙf3"v;+tXw{gg'U͖ÁD}vtwwK]FhUT`غlL(,tH6kWW +PcıGpoܺ'QIRz'5!r$/^ N_üSO:.k笅dxY~\[?"cڵ)4 IG.{ISE^vطox "2,h4hmmMI+l,U\$xEQ_^7冽cΝ3 !K*2=%e>c6YNe^VF466G,#t:.kPWWr=dӕevڄi[^ONiL~[5^9~QCϋe{]WCP2p뼱/{R!qĿcwS{r`yބ[o #Bf N'S) Bף˗/Gww7, ۱cT3)]xҎ|rLexʛ?vcxxrY' f˓e^/aٰac$H߿^.;9L7ʖfo'? O;23r {.xOuž~)wbVٟ? ~\Pn<769ۿYϢ /@⊜FEi!kr5H Inݺ}Y~ԆS[o[8Ze9g (iy,s<uzϺɶL5Jr 6lؐNCggg:ܺuՉBYT7;p )- VX wяx)8q}v/US?ļ[/'_ͻ!2P3Ih4iX h/FAEEp`xx8m_klW^y%og֣w}7~< B9`LnIYUU8~xʫԼo\%~Bp; ]52괷ؗعA@.yJҕI`{mFD!V5Μ9vUkS]r%A:H?07!mdڇ^hD8GQf NַLP<9.^D" .@L6$elFcppXsj %͚ 8NH:tݪ&J!Bn.>|߻~8?oB!s@ͣ:XVl6l6p8 `Uxݹs'jjj $}v2v-!U/^|IՇ˙.ϓ'OٸMMMhjjR 0rx_s{ufղexgQSS2LNt:^ݱdʩڶmp8CŢZ/LB!fsv21y~C~'XtBWwA53-?9 Wbvqhݞ~/ zYӾ}sCo)@5So J!V"pג7`*6-_(ŋK/?a~?Sy,C__,Xu֥؏t&c?O:w}k֬}7nvcQXWY: ҥK5k֤ȉb8s en,]wm{?LGw]ZJUtyg>ٲ.ضdpxx?ӮB)cB!淾9AV.7O oǗDG6gn>`?7 @ᯎhKr~tB)@#dбPB̩m j :}29S)Z=>_wiO ?(H|oz ~y_!.)J!d>P !d2 0 9FlX%ۍX,s9bSُd<1_X_쓭fBH;u61秱e_fE = ƂO?NY*ãieӖ+dr~eٕ6H9,߿ t=Wnw[gr-ָ={l^[SF)/_._.9BHQu2c7|>)i,ƦiKeVkʲYGys*,ODGtcA)qYUYcqUreݎ hT7Yx%IJSvȴ~ټoNTN3%8*6d O$+ ;> _2?ݮWr Pd2`P fa$+[=%Id7 H$eIÖ\2g.PӝC<lAԩ*! )`2y걐^3SdY-KNʲ, 6`-<Ȓ7obcZt7%ݺR\nA4a@D,ߟ<[\睈\̂ʀa間||$kV,sM'bڀ! J,tۓKe`3S:]=+Wrn6r9_}/E6V)hDrGܱz6PBMN !r"^[ow]TWW~\|b444@3 L&S6Լx>088;qŊsϤ))P(p8/\~/_JsH$2/[ߓ?N[NDGG|M9s}}}ll9 Aj*>`Ruuuix"`ƍ ]ŋq>sYSw*6lؐv(8uTt#I҄b1HɄWw׋W5k֤,cϞ=͛7i=l6>N7}J=B$X֔rFx^b11z3?^Qz~zٳ+WOH% 裏jժgbx \2$ ?FEEEߢ(bz=X_p$ʖ̙3ǓS~X4[w8hooV}xŪﯾ*76bA`2f}}}ؽ{7<dB{{;HA(+1趵/v---H$8uv[b<]ss3<Nw;'A`B!(JhӧD"`hmmThą xN/2<PPYN:[ ұѹsM kZ^^:qKuhr8bΝ;ǃ/kYxʒN"իW3@ڶm ǃ XCӉ v%l6=z4Z^t)e}*~%I~ʼSi&U nذaf\t =шiɺ3a`6>l2wu>rBvox722we2x^XV<3b8pvڕrlx衇o>l6={[a "|>n޽MMMxwxpf޽ZXjjkku?mB!-.\ֆJvf3jkk܌@ 0X,ٌW-HvnD"ߏZ֢-_<Fh4xrNz=p;wy\DQl6gguo6y AM'X,`UcǎFFFH$j!ΰkL2aAO,KFU9qRd2fG'cM,J>`(io"C O (hlllV}v A`ٺ:{j:[̕+WUuۑmNfRWWכ6X H.\^t:-;l6A@<<֟:Flr+ya2Z8z(F#$IuOs]wr;w>|8%Bkk+***vX_hDoooJ~۷oGgg'f3t:?_XZdB!(P5Bjjjpq<QUǃ>aRQU7phZDQ8׿ucZ,#GRͳdլeC`c=xGRʪja6SA]f 6m8zuuu 6 :WNk\HLe޹d$ajz"@CC$I’%K0000JKGn*++100ۍe˖b^iV+^/~|+//pIǃ6 X^mmmnx!B tfYufr}Յuz꩔y= ۝ B}.W`Zl毑2M~n7_߉;X/,c~BܖWpzFan$ b1TatiZ& $!|y~vJ)($IUfV~"AplFH$Xr崬sB2FCC(BätxrwU^ŔcsŊ)ys5!BG!GB8򉢘2LmZFh~|cUݮ*;ъGEZdDQ_4v!t:eϧZi"+?[VQQqn,/[ʑӍ뼳TUGg>Ff|>YEYre+G5yzǷ[c1r(IrFcJdyl?SHYxr\.r,#ؖe9,ɟGrd;1]itt>)[x\5/OD杈l晎wlA5my"lH$k~KXL$>h2^K˕n:*FrOf#߿7Sy3`jyW\.Wg,^x{/_Rj[߳g%?5j/\ӞBWZٴOB(:K孼!TNl~|#P &qU0ClH$ϣ 2n"lSnL&SM.@y 9uAeǓn"B0I|r0L9wm9ꝮNޓYS7lB0].We\4锝N'߶rLM4]vߟ]ctyLxJw|+˕D%eId)L&t@e$d ;Fov-T>b& fGԩ*!h4(%Ho;xN4?6 @O=-{}f(V ~o^y啔W뎙FV֯B0<bGc}W3$b}mܸQ5] ^ʼsxۙx6D)rDզ.477pQ KK˄7lea4j9Q)axrY'NzOT(_g6 jA͗nz-;|&c^~j زe˔˕mzh4@EEEe2BZQ>~m9b h4=J2{"Bub<[d l6qF\.dL6?֧Xڊ`0al0 _l}]&i܀^lFgg'ݻ'NN:p8 0 SSSYZ`Awygu?22׽̩Sfm6Fl6%e\M3<Ax|9 !cAwł~׭[QSF'fxbc[y!IlϞ=7Syf@[[b0@q,8{zB!j: z,Lg- GrdJ~:Fp8) zB4@El6 ӎM,o~iof3ZZZx3xtb…]t 6 GEkkkʼ𰪥Rr@ p8Ⱥʼ73F'Oh:N N4Tt:H[c֭y_!7j|>`0kرBh4A@8V=خo0=}W__ F[gy?N{B!fDҭ{}21VtS&^uu5dYfkBYkk+z!>|fTTT aٲeޒٱc6n܈:u /VZ8r_۷tKpBA7Z/!QXQ䝂O|QF#h4+δ|䧜?y$l6Bh'i٠ls6<tf]lP'Ad*jR.|' <*+c#<'Nl`1ut:3{V&*\ ;[LX$C\P-BH|th RBHj߯ YV>j2șK5^ъ}>l4,\_eX *˟ IRPcLGa1 f |f^frwX`[v:enBfbnBRQSGTB!sU,sH^>7cL6JHь' \z/Zy"g}6tb˘ID|u*2Ad; Ñv~˕݄bnBRu^ _B'-={U;u?s/~zр]5E!WqD|&_"@oo/$Ijh49h4I?Zmэ\hrZSٖs!x$,[ДB䱇Zv^K!B-(: `Æ шBpz. !iW^`di0;6[oM ~\|yBoDQ>}mB!rSt]wraXfMѼ2Of??|]]]8qDO!WgtD?$IAaĶmoL( Xt)g̔޽\" pQ~~#2vlڴiFUT]]M77;wΝ+t1!d+IdF @AD"iʲ,i.Kf"I&I<%˥ rJYrM[rYyXQѬauQ.-;~4&Im\by=XYVkrq:|}qGV\2q~Ng|߇s|R#_.EʊgϞK~j(_"%BW 2 |JR~L!$Ig2!@kk+^xtww!^/v;oMY[[ ׋'xΝ^GMM z{{yÇC$|>> ܬ\jbΝ?$ BغukJ|ɌFkkkƺ=#K.A"H`ӦMx.>{>-BW(&LѣG<(t!AEEnѣY]"edݹ(Hի3oPOss3&u2Fϣ"$۷׶7o ׋~Yಷժz(\Ch4p8 oz=_6H$x7y9 fer5DQ~^h4ظq#<N> ٌP(+^SO=!tJWף($IRomۆ&z*`֭}ÈFn\㘋/-['thT]7^}Uq UZAs=4w'Utr- `NEEN>D"a8{,TtttbGJ+M6fͺ>w܁[nr^bLe{|OY\j% ᶻZjiF}Bܲn:銁Qub`0v/<t(v.\A`)Ӄ_%:::[o=Ɵ~z{{xpĉ/39 !sU4Mi! y;Nyj:k׮A˗/͔N> m8H`ɒ%0ؼy32@`FW}5]Eiɒ%#aٲe/%Sh4f8tttLzH_UmI5` ?V+yʏLT;>ׁؕxsBWB!0-NSQJePZvŋnh4;38kY4]Ni9p:uuuXj, {IZfZV+j=u g}V5Zskk+j_'OFٳg+D}ل [dŽ;T:::x?~˗/`NK3^AUW?󰖢9=!ׯ_WM7TY>: Wn`˪kq:tٺuk΁@@ЎLeΜ9/kD-K.uf uttpLz~J0Ǖz|\ȟ_PB!)轣%V e'(GGGS vU:v,rd% ].*?QeAd$|>9 F!d,dy|>rUAEͫLN0u/LAdAdYQ^\kݕ8x<s*;[7FQj} )6d2g.`<~2S&s,yصByO]ҝvj>vSbQ~h4sS5s\jUeL[nXLoAPt7nٍR689Ɩ +27 Pee|}vsm=@q.C)y*oӭnYDMV̔ʲzP>sSdy\Ƃ~ `W9$ 2ʠ 7eEQ$),t΃xDυίˮ%SnYVYr-hQl]+>,p/%FFF2r-_1ܖi{/+~Pj[_ G~BPgiX `0o.<[e t7', PMP ,h\䖕nӉD"LQ7|'@ey)'W@>ꞌs <$tD(c2c] 9>.m<.C)s;d:7D" x\9ϵ, V(̮ &R%yRe}!6LNor`6 N P !}>P'M~?-ZR  455e+yp˗# I Lf4\ֿݪURV^^̘L)pFh?}48$I(z)}"B>}n[ofS;b @wwQrddU~`R#,^2V^O9gK aB7,PaXҦ9u\Qa7pP(e(NT[iǎظqctx<=z4: BXx Q!"֮]H>sDh4eq:k,!!BnJgΜb:u1OtjSFj|]Q{n>yc֍|z=A-QyYRFf9OV(lFmmmڿ/^ >9D"xXDvKݓGd23b(--M;rDTWW#"#1' 0 TBHqt$rTרBe۶mJ7Wr%B)P(@ ۭ(b1 ``` YZ囚~HK7](O?^?L H$;QpYVBRy`A/!C#ұ&e*n㼞ʾHs){2-7S>S{:l0y~ʁZ{}$Ͽerlyr?7؏UBf  >>nP ɟA9H$2%Vy%IJx X<$ߌD")`l䁔'$_O<ب(ɣ `)SՂ [rze20Al'lI>N6Dx,o@Ro`5npT2YZL$IRWtaA[(;μPeƠV>#4S*!3Bf m@ RX`5@R1=gfil>(|>U!etPAYL X:q:Y'$_O|Μ9cǎa8v***裏4L£>CCCxꩧR8=(⩧R-ȑ#z㉕D"FHO}gB4b?hite BxҶ,XBB!tix}8QI`X-Nhh[ ]B!d.X,9LH$ ^ۭ*O"@II jjj2:Ya nB!s < M* 6d2AC'l700 QzS(J!a FyTիWvyԩSz444@EalݺuB!e7oF8Fmm-B{1x<YP__p8 ٌP(P(ٌp8\.", ocp\رcGfrc-M`~N Ák\p555^. Ν@J[`Bo:B!dڔ|*++a66CCCX,MR[[477zUFs; Fٌ+W!kZ466ڵk<͵kبz( " ,S5]"k42Dp5*Al۶Mm @W'dL= B!p8 \F:444F& ===jҥKQSS#G(//$Iɹ̛ 4Mx,X6l@__illĚ5k`6qĉzʕ+yBAڵkUFFFx`rR 'ݧt(+++uO˖&S+nNi-#S&o'Q TB!dX둵kBl{MI VX,P(70nPo"aN8`쇲(x<40`+B溵kv>ϟ-knާe&i :z@!3BHtOC*++QRRR lذ!2o8k9ǎ l(++âE5(ֲbe"xB+(WPnKMKh4"> -Z會]]]())@PRRүyD"nm۶A߿?mX,6n7JJJTn7*++a0PRRZ6eeeyn,wB.@%BW}O$)i $IhD0($I)̛D"c2r|>_Jڊ \v $t MMMY mL$_ĹE"|mx(]y>߃#oO9sJɋD"]y$͛vJ[ww7AUq)alݺ۷o$I(06 v7n(<:|c/JKKaXuV|>XVx޴}oݺطov;.\uo o2x2TBHQ:;;u/^ uW ɓuz6S6S͗K7G穨@EEmۆ^P(QF=!e"ks#whM_1F^|yv}mSXX~/gXX~{x~d(~k@ r?f}}=JKKU QWW`?ohjjWDpy@]]z-^^˳q9*!8P TBHQ4n ]4Ǘ#ox~]ya%"\.XVjV=LL$S`pA@e}ѣʁCR`Rl<P`0bH s͚53!$+z2#(~m7<[(ŋq]w^Ip[.5h4X,^ϗ}/^^1&$faZ3W ȑ#+sN! CCCOPQQݎzk*[2>(/KZc;=ػw/c߾}æMp A8q`2߯驮jMyhqXЪЛ4wM-qq&>4m0ij+tnz/۰yg6>>~2=p-߿ 9T1Sqۿ4z~;k{~{Z{{ysv[UmD_<2/ۻέΗ+2>m(//W=Dlhh@oo/vܩ=ۋ&cXz>hjjoDŽaYѨq("; c?:Uv6mc\.dTS fu. (Z~%%%虿=-_)tnz7_}^t7^<:?Z\;?-u񒒒L].R& ̛7OUv %ƺ)--j׿ul6 `Æ Zعsgw IPQQf8uh"a>Ջ͗ Gr~ B0 Bnn}W !jbtt$ NJIzaDx:6&~*x~6-$hii0vR)@j% [ cB˯$R r[k&߷Vrߥ/qm'gy-\WQQIp!>u%70r8lNap(njo/'wxYGUOOԄ7Bc˖-C7+ҥӖ-[x~6nǃ7|SZx? ա_u,Τ ?{,嶯oܘpq;}گQ誑In'?oOPr뭸[,tJїq۰qiqW9ym_7Vgw W1_ǯ?^0n~+űhL&1V/~ o k!*@ [fsرcxwQQQۍv8pեGvnZj*444tɒhp1۷ЛxZ ॗ^Boo/!nˀnf۷:wx[. n%X~UU.\zM:GLʱ#'AĞ8SP~?jjjrP[[HX LeW\uه⣳Wg<uR@q2_ܘxK o4r;vg><9~|`,Paڦq2HjşXPKBo|bM1eٌ?Ы`b[wށU_%%%@av|cEtwwcڵ)a$p8z2PY'@8d,pY/˗u:A`|jMG!dv(h'6GWO@u;/ote?˗/ǧmYbֵ|h47]>)#JPn---A3>^˖-C @,CWW<Ϝo:8Ccc#:;;)cIa444 nضD"hmmŕ+WRfF$ & 0D4E,+j hP2mYUjy;~O )'NHBI'L%xdߎk{#;nǧ~e\ uhkC@\Rh&v<މru㧿Ыg0xw\nDQDii) j X2qeYՒnŊcǎ߲e αX pk׮ASF-[׾Ax000766vtF͌i "0ۑH$v4mmmyZZZr {zuVB! A x۷%%%y?j*8N `0<׮]SNCmݺu%%%eBfP(im:. $eʕu~#7SS3|2/ 6K'8)nvl߾>Ξ=ѱ &6 6~_L)^/>O Y2y3ϫV^G]] ]9aٲelA?P⣏>%ٌQh4pfR!dZf=܃x?ƞZر7nlC=3gn0FFl۶ կ~͛7^K~hp)Ut?'N({G7rYb1<E۶m+tqT***`Zp8e˖qG7L%7+}M*tQ2 :3!M4E8 z%׷wƉ'TP25VE]]F!"<:;;~ߡ*!dt:466LW>Aqo5jdKo@,vލp8s嫖źupʕ,#!ol2|k_$ 9r022D"Ȃ###bjH$8|0h4{ رchoof:۷ofC,*!CB!v#H3~iJX p)XX @ ߪP(ҧ[θlF)&+_(JV͟)lx<D"ˑm[(Tgi$ )ٺOp8`S^TFsʏmtuaӓdF}ZC8ơCr.lՅZTVV+d}fW-sIǾGQϞlFee%sG !L^($ ׯG[[͛U%IPZZ6,Y_YXz5000.l6L'  YY[>Bf  ̱@6hM&Ddħ KD"jUeL&YEY`0_E9 Ld9'*_ITV[g4u++?.+c"HEQD")y\.^WՇK^VtAHOYwϗ\ݓ?mLU)˖>|)Av:r+i=˟+˲,,B]>kt\ٺAZbtt$ovܙL&N4[r۱m<|ywmN3^BWk={6/Q^S|4dY\\--㗐-%lx<Χ'I4|OmҰ>;b3x 2SX dd, QAv\r<]. @|^bt:S)l)LI6^L&j,PÂ-/ Qc^2V:;;!˲^AT!OhiӦrMٳGa4Uthhh$I9wA!d&wmm-JKKywGD?OiN Ɍz'rSb7Mcfh4cI>'.$I5,C[[DIDATB!sOY__u,Nf=ep(^^w˩`2x܌;w[U$Fՠ>ʾ x Nޱc^<95jvl6GfteM8pXn4*tXn^__3g`׮]dp8ۋ۷Vi8Δew1w0Yl$ ׯ͛~D"'O~dKKKa[oƔ]=.X,>e+j\._dK!h@ Bc֖ŽЇ>.y\. @v\YӎNSA|QxǗn!QRR`lv3ŽG(---tqP(sM)\XEeX,G"WLo~x %N>t:DQnU.H`xx8r2OfLH$p)u]Xreho6._ w})-Yn^O^RnQPQQC6S٦+˚.2crGQSʟGMM oM2ʕ*ammmͶŋU9~ʙnEMh4}kȮ$⹦SJ$n'aKԺޘ.l;{lړQZL?Gw6c/Jl'_?d{&˄6~^>Bf  d.a?s(;Nފp6 v=k!9::Z"Z >yɶ|q[k)li䚎Q ԩ$IZ4y?y^6Bf>B!$(غu+}YDMM .^͛7cxx#kߘӃAlݺUէn8t0$IV1vލgbȑ#D"$;tBW"uxk֬06(޽{a0r`6'Ç37% B!2\z`z!dN B<"I*** ]$2zuuuE$! PUUxSvmΛ 5::xM$7o;tVoW !B)h4tP(+oCCCYbBH$$BѣZQe(nlBjIi&>p&iBO~?0Vk4 vLP,[(J!BH~TVV ͈b0())Qc]  ,Zmmmf`0Tf`0`޼yhnnVRn7,Y2Y7!3!sۦ6-냦Qx|ѢE(--I&n7 j˖-CۃI[(-- t~6 & xgl6d:tD R-Bee%UYbO~RUDȬ !2q|>Ñ!]]]t,iGTB!B (b`ɒ%E;wAO`jԠ555xah4gy#;;;a4Ԅ%Kp>,x׮](--Eoo/DQ,j"dNh4Ƴ> 8z(lFWWW<ӕCjkk܌@ ql HKn} hkkK[VBf$Ihjj`E֖v'd(J!BHz?>۷oǕ+WPWWuttd2Ckk+ZZZrrpwB󡣣7n0*ӈD"ؾ};t:!I2***p\.U[x=On9{< WN(%D,Y555t<]YY*[nl6[N]20l6DQTSٛLJɊ.6Bfr F8}4x03H@$8N\S<֩).0HmmmZF٬ʳ^UUPb|-[L*׋+Vʕ+h4صkDQDoo%Çv <|:jE{{; E l6c}744(j/̩S/^\R@0[ao6, ֯_P(@ 6o\⑛[ >K~'5Ig6fŭ[f(--hDEEŤ~gp,1t&Ix0z)oٲdq8y ]2KQ ,v;l6vڅBصk!ۨ @2[a|`].)HZǏGMM͔/CdY} KI]-ĮЋzkUܭYMMM۷oC=4&zNW\[fK^Ii~c-fycs2!F-Pشi쇃$I 1)H@͌ V]]H$`0`0є`!̱cE_osD/CM:?0?^2߯?[5!C1 Bea} _|U|GH$kd2уBP|QQQYYYӟ!H$000J ]4B ~ t Y׿FKKKʫՓ@5<] ,įSuvٱ _ʸxSU6)`y ?~OOAbIFFFT󱍳wv~fbO|\.ׄj+nEf}H$A$lذE"dFBABfp+7nܘ\%(rIwaa|>nnAO>_CCFGG)db @ )_ƍxmKB!:s tZz+i}:}(Bf[EQx^hyB.psBص]y=0zF:lٲZOP !B) w^l޼YushRc\g>}U'ZSiy:KnSДK|)? l+###0ͪ]pTetzB!dB8Əc mUja2xB}?G6J9)t !2=G}D")[v-L&63jJ!BHuvvY|Rak;vAww7AB!`pp)CGFFTߧ!w wC}jJ!BHt:>}x>veeel)NB9Uuu5AIA3bXBm_BHjWEQ$Ik?`,zUˍbhkk@WٜCTB!BF?'xbyb1x<`@II aa SbX`X;9/لB!BB#knl6ʭ[֯_fܗTiZ\.%KvՅRl6k^]sO!BHb1|'i7k~BW9rhx<4!, `03H$pUBWdY.tQ2d2ԯnǥKjyf\~6 .\@GGn7-Zp8ʕ+'޽{/bځ$FFF`RLG@wwwW!sR"fhd?\_bB9c.WUWI]̴3gΤ,l6Cbh2^so@%B!$5REEN'Qa0ax<vޗf`H477cϞ=-[d\u`N &EП?^b{8sNر5~TVV mmmH$0())IIg6QZZ yLII *++H$x0<@k6gkqGf8Ƀ 477l6+%FLnf ӕ/֖!rh4e&Eŋ.!B!Y`,^8y766D"3::VUzUOgg'zzz SPh4D"8w\W-)>x/]8]MHݵk`͚5 k f'fppPn``555F$ F6 $aɒ%~:$I-`0ip D"b8(""o, ł!cȗۋTUUy3-b<ǢEPSSW^y(++Cee* Q+ eeeE=h!*rv,b!Ad.E\. @6L. !BfQen/tQ`pyveZ3R_Y?5jy/ʯ;(_._.7&3 2ji q>ݷ(oM&,*,87l~ftt4ޛݯT&\&6Z)KeyL&U=MI.[x<]Vx<]әRt3gz={M= , !B!HoXB_E0r>o9zױ_|M FcjvZh47xVƺx<(++իWU^g͚5Z(z{{a2T}VWWh4gw׮] U|AÇs.իWʢhxw}}})󴴴ퟬe,!N3,&O(EúuTB!BfZ"Sk.tEQuI\ :6[nR~u>{ׅ⌉bl$7xF>+D"/|cELORܸqcߌF#^/툢* xDfG)P__I I6lP")hBDQLۢE&o._uVq}K^1ܹ+::̊b(--޽{+Ɂw}Ws:VX`,v'<;IxΫpҥ'sI^Gkk+vޝw V+oHG?Q^B!dnZx1u-En ߼K_jƯ?5lC| W'Lx~/ttɔ˓ >g)Yd2m jaZp8yߕ^nfPO>j>NJ`?LшӧOSǭ ׼Iqa³mxTX,>TTTdݖW\L&ptW+t:gϪ~}A'Snoh4Dm(---t3B!BnV|.aN>ٔ}U23~il;vdw˖-p88x =, ZZZ/zUnF&ӭ]6v Á{l-Z|9СCظq#z=v;l6*++o>|hiiA8n@e7y<,_˖-QVV`ݶ6]v۷<쳸|2߇Y넆B!2~?4ȬS Gtx_7n+/tfL4dªU2jU}?<|>:::p>(~ ~m_*tfT=:;;CU꧞z*e^]&M+*# `P]ڵk)骫Q]]DѴCbf3pUt:Dkkjje9%Oe8<(δ]W$7tBP !BCB  uYDfГm}Tjy:, 9|x &zjl׉Q|t" P !BC ]rTWW󡦦Ft? C!E@%B!Bf3JJJՅD"6OWRRٌw `l𒒒%Nuu5" FGGzB)"8B!BSSnchh{MI a˅ׯc޽xPZ-F#p lz,C{{;lмB[,+zxxU'(J!BHUWWcttv|xa2x: 8~8N'NJY@sp\‰'Ԅh4ӧOOL. $I|l(J!B!2D"a7oAdNSȑ#| @6+W0o4_V&R~M`K/($ @gvMwr*iZy@)M :Nf==Khc3Cd~3MvKjMv3ldZO cz4; ijؙ$w-(4i8*$7bJjfb5IJ-ْ|a9s$|qF555G555See$phŊ2Mno丩.KtZ7'ܟLFu9CCC9o3P1)-9wN[u级t>qohG:s'V};ߙIFr8:p֬Y#0d$١w-IUUU бcD{L*--U ѣGO^ׯW$QEEVXp8<'T@xj J5EwгoJ~}G߱D+: pRjiiN%+4jϞ=:uB>"pWGѡCР@ n\nZ}}}T2ir\:v옖-[/jllT2C* ܿ)SSoIRW؏ݿi 0u׮]bzޞ0JU־q\  mkvF?v횑 bԤ gB%*Pفj:ww9pP5:֭rI{紽~~{%TiCWx. T_WBν|uOAASkڟ{ޞV{?Wן,LvhMuO='ᆲK_KD `MI?/7>^۱{}حo:s|Ǘz턩1P{ۿ}|wGNW!I~Z??{o=!]fI_Fi''Μg=_t{00*ڎ:y ~hI7߽g~-ג>N$?3 МR{CWhg~?'Y~ߗOB5:L=zg[AJJJf}%%%y/BHo_/:_X/קf$I+^ ׌z{@ x쑢no<IR.״tǗ_:_oW8ӧOLo(_dI7F z?ө$o?wjs^zKsw$I?99lW-пeUWW( [l$B!9D&sQ(Rss3PLکS݅Њ^|5CSpr_l#f7k;'-t.r{Β4Mb1|idCNlذ!ozŴzj.S1tRgP߮`08f˭+++]w֬Ys@+Lȑ#R)Z|B 6lؠf^yV(*w'0b1!I/x<.+I2M?E/7vKI7>P|k>?^IjIҳouOo~I҃^]tqxY}_X/U }ahΝ PX>-[d<'OK_vءf{3^;ޥ?y';ISPWW]4bjjj`{~gs|ۼy 488P(D"Eьuq8p@.]RGG?& =+=Iz]?N{Ovu% ippP~_uuuJRy96Yȩt:p8VIҶmݥ1>*լofд?_qUxtI+ ۘP(~~JxGTRXWTTLhџ/7T.۳6w;f[ꦛnSO=f577v@yr /B~Mx֯__/\.[ÇWԩS2MSDBa?bH 87C=D"իW++GU$ʕ+'#<"IڻwRѨ}ќtÒ;StZoa;?HRz饗 v~`2Pɖ Ø!j2aJ$rڿCjkkѡzviGGǤ'2^v PCC.\m۶7c|jkkS"І ۑHD*))… eZZZ|EdqC4=8u}g;饇g|wg\%%%d_2;]_]-Iz?~i͏ `W.]E0S}YɤRԘpXLl_>?zx<.IWl/zrkfT*zKXLK.m6W,%4lkw-<GB].׸I-|]/[4F/w:|c ;.Y!|y^y^?>]  TL| Q O bKJx *&e6WCTSF)k!*)C)+!*)!@Ŵ{Jx !@ŴkJx +!@EN[Jx @EKJx "@ENzJx @EjJx "@Ō(SAS(!*)3*!*)3._!*)bCTS*flG; Q0G $9|w3P(P1f*D%<_p (\ ț\)ȫ醨IȻi( Q ONg|(ㅨhtzh>}:]Çߗ,Y2# @EAVUUiѢEڽ{vޭEB!IҖ-[@YCnj Q$)HA J$@ @x `6l I2MSX,@,i>;?PQ\.&-ۭ6 *s`P`pBx<2 Cd3+cr'NGeee=όsfZq:~XcJP|"$iǎ|>եu4M|&` PWW׌4 9O>/Jy;v,cd2&B_$f[ ɓڿ%B!mذaV2T\V:{)eG$-47ؗvj*I>O^{,YB,0x<555i֭:|hH i*4H(ĉJ& cVXaY9rL]:ԩS #%  Ryy|>n7Epp5Pd2W^yE?oN8(.ёZuxo]ת>~ \nreOZwq*P8fL2ԑ#GtС+#+.]ŋodG~&I9sFtAD"VJ05WX,2yތ[:y򤚚TYYIx: \.*++Ԥ'Oj``@:z*++S,w0"L& e=aࠎ9jrT]]#GhppP2 Cd P0T2wW0jF2 _3_'OTuuNg,NuI+H/0 E|wAZx% n)3fx< V"Pضmۘ}I&/1kFeFQP'> % UUU) *Kx<`0*% |w #S $E"y^GT*CTJ===*++U$$`"\pXiڗ-\PWVOOd2^Z .TMM!4rU߯~-Y$]  G;|:y߯fIRooz{{% tn|u3,L̙3[Y ay<|w(>jn 9񨩩I[nՉ'٩N% )4pvkժUZv|>n7%I% b1:tHGU"VmmjkkU^^.@#*++UYY]v)Lȑ#:t=f"P$o[\.˵tR-^o~#ֹst %1o@ kjEup\   J&zWPofFx@rB_:-_\dɒL:$Ӻxy;vLdJ-[OӺ;LL0-.Kժ$;T.CV]iW4 C+Vo[H3zǎ˸mSNMyU 0Bpr % Vu{ァX,DforXcJ>O^{mQW`jHpYIǬ߭ॱ&uYTY 4v3|`|ȉ0ru2$Q YG"Gp XWA1 (W0^W^WϟwW0x| @,P TȂ @,P T‘0߯|w0EPH6l}Q z O577Lx|FE*`^XnLӔ$;wj`N>-[4Muɓ3?T4M||w 0EGՊb2 Ci*\/i'0$}w~&Pcܲf>P%cx"@,P q$IN3]  TE) A<B%*dA Y`҂hѶL*&%+ܹsE>0s@OO6mڤ`0ݻw+NY'i*++S8V*ʸ?* *L*NEQ=#z^ǵ}v-ZH`P===7R*R0ԦM>0PX:֦MTSSN=zT 1JLa]wupŒ0ܹsD"joo 7ܠ7|S͛GYY^kժU:zjjj}vI$544(>#D"zᇳ *@TkkDgϞi,Ijiii:v PCCØ6_|!@P ua<:tHCpt_T{1IÁ$=r8$ix's\C#%ɬՎ=#I:qℽ,Lڗ[ZZOGcc}ia޽~,S֬Y#ۭgiڼysO%0.T򠲲R@@ifL&i&-ZH ,PYYv= o߮oY)N+_ %ezlѢEڴiRJJJFkh4իWk#Tkx,ájϙ<9u$kA'TGGJKK`Os%7n$=c۷o$i͚5mĉJ$zGp8 d映2L٪U\7t]jNu 7J˗/WGGVZV{cuV ;X[n`- I9N CQii1q].֦NUUUT}}}zW'Oc?/L&x~:uJ555J$  r8FD"jll۷on֬Yt:=n@ H4ߟ`LL.7оk }>Y9ޡ{?t_/䤽}.w/lIC@`!IC~^?27 cv]tihhh޶c~HP(t~4q=f?#wCА ðuhhh(d?xK. ^qύZ̖O*fM4U0̸ÒL&T4Eќ3/H$ NaUpU_8V}},LTDBϟ>m&Im/*X11I&+;}> Psss%֘pam>4fOtܹdBk׮$9rdp88Yڵ_bG:D`Pw7̶MII=^DR)=:unI×pLZp~8t:۷L%%%z*))=y<rΝ;H$TBr{|pn u1!S8?󔔔TgyynwF8.0jYAʘwtdR#<(IJJJsaMddU(7b!Izwo>E"IR(֭[3$0OHΞ=U@Ǐ߻wd:::tRG?Ҿ}TWWg}6d2)0 #<}4M544h߾}7i`pLZZZ+#9=s޽{350?Z*HUgGuuun-_\gɄC7nTss]0H$rF"H[lQuu$ٕ?ϸ=00 Ijkk_?f3[c@!uz_ 0P!˥_|Q~_z7./E\C}q?v.]d'#4%IvsɉDIM8yiJ]]JJJ7dR۷o 7 ׫ lLk.Iz ͨNj߿p\jooW{{86U$cȟ/~(//4| UFok ^/JiӦMӚGٳ9~d`,TH-Jc2ԝw)o4IR#v\ BJ$g?;tf %[oI#(iWN6@iݺu4o,Yt:R577gQd Ȩu\rjmm#` /좌ZIRssTWW6{ݻ'm۶)H(Hx|mTklUk2u0 544oիgϞ1jkkInm߾]7tZ[[L&dɒI%Kńp9UTTH;xѣG3.! BcfPz0bmkWrn߾};&7XηNg覛ng/W[[}+W ?krݭ@ `P^^n~=zTwuLT}}B1C=vBv]OV __e]pE@@oNN<>XB۶mS8ɓf[֦^x.cg?1޺kL"U!z?G ~A}ݗ >ҥK:q:;;٩f577vex@/1Xo|͜kvn!#%I?я2΅F5TWW2Ie#?TVVO^;Ixnc=vnJVǣ1royu ר@̙3dW_Z#ᰆ444~IRUUո)6[PPeevڥ .h``@J$ڼy}Y5Ѭѳ};0zퟚIӧO/p9TJ===ڼynWB,UEE-+%ߧr T~IXΝ;USSg}V]&˥`0'NU v.J*t&[ m0~3… 8yT|>J ,+ł TH%A$JqEI;3uZײ{ne LL>fbvk͚59?GVo6f˔<Zww500uB\T:::ѡn'?ɘ'LNx.d21!JFou\,UUU%I7hTO=$iƍY׳ѓ:]Vij͚5cBx'[ P$Q0T4UuuB".\իW4M֎2v_ZdI L\2T]]JKKeLT PkkƬ oѣGѡk0 pޮH$64M%IL(F|%4.K|ǭ$]wuZf̈́UlllTkkxF Z__|P VZe˖iȑ#:t"SV\9f9dztRɮ~YMMMںu>( 3nUz\LL_&.J) {߯H$zjLa:r}$4MB^7cvI҃>h+P<9dhhh(ߝXUE"LT~?t.AE"uww:߇*ix,o]:{//`.8yH/=%@>;꯮$=FQGn$=յK ogc>H%o=\aՍ ]VV&45:"wztm1l L~8˥*}I>+ %+43 `%I^Z ,,_sXLw}өJUVVҢ| `FPYr xCG`K0 ILo߾WTTH6mڤt:T*͛7gL&URR2x馛nPO|uRTQQ!3ᶣѨΝ;7bG Pb֬YD"1@ ]vrIɤ1 4JJuȑ|20kȭ_^DBА Dd$n*LYC*@F2MS~_r˥&?$W1(詧ÇeԔCfEnvҮ]t:~,iڸq|>N(&T0 x\כo` -[ 6HNSPH2 C---9iΝ 6Hy544ĉO U7T&)NڲeLww0MVsNYfʗ֭[\766vAg*PBzH ڶm[QN&7V=رCm0O[zJi4M( ؞CvRKKx RkkZ[[|\>PWor}7V2444N`rJJJ$IEW+\o:yH/=%@>fx%%Io=[$Izuwl/Nk@ ;v޽qmݺUHDtҥy^IࠜNC`/ZHD"YxLf:Ka)&}/s#bNdz[(A5DEQ% @ 0GRM&7F $ mu-[&I\0L ULTE)ۥx|ɩɓ'JL&.kK,Qֶ7lؠ%K\vǎS25\STt:Xp8&4yD*'?!ۭD"C>AݻWc|oL]<霑KHGW4S&*B---)$I544H~ F=y?'t)|e;vL*++ݻs٪K^wܟR^Zh4ߧ0AEVn[dhTaH>wP`I{+$zW_ީ#։)w5~zワveiaFn?i* WUUUJR>|!á{ξJ`Px<]$qAUUU)Hv瞣cXWd'por>$2>?^xUZK?!I,ߧmRF-Ŧxϧ&ITqcF/y;NGhtsbYRzzzJ(A^EќWCZ*++e$)L===|Y7&eeezD"@ 4UYY.[NH$y朼7uy% W:)y>_TJFM|)I$jnna9 SGZv$СC:ٳg̺[cnvԨJ ,(zdR۷o… USS:UUU{>0q ̖sX80܊R8֖-[~z)4US3|c=;A,LW^O?ތ ЁM0өF?@sWVU]״+ҋOЇ?<tuurM Sя~t}aX~D"J&Ғ%K٩:߿_MMMZ] BjjjR:޽{Р/ڵkWF{~_ݺ;f&˪ohhau}9U2444N 7߯1YDn|3,L̙341B!mذA'vKI7>P|kNVTJ]]]ڼyDN۾nGuCO/_O,$}WѳpXuuu B馛2{gA9NIRIIpH(Nkrݺp¤+++Sii9߲2K.pc"nmܸqL[XrxJxԤ[ĉTgg]jn[Vڵkv U(L*H(СC:zofɡjkkU^^8dJ֭Ct* * *3gҝ/ \*QN=ϙ ҙ~a_Ǔ޺|ɒ%9yl Zp?sx-p1͛Yg![/L4M1aDyy3 WFV<L p8TYYJڵKdRGѡC7 DBHľm r\*//ҥKxbx<~[ΝӉ'L&ǜѬKV^M@ cVQ PG¿_ao@LBVm̴u:ȲUz_={vꋵf D{Vy<I0iְiWݭ˗۷׭['4aÆxΞ9sF;flS!w#Dos\p?|͌rREEz]wu\}+tZϟ$>}Z/^;Ccǎ)LND вeOqgP4~/}[z7$ΛV{RCOIL/ͷFnt|𷫫K^W֭ӫ:3j♞xŒ4giq\Vuu$١uzJ+HRեaZb} \GcIB\xݫz>=x_H$2&@r\Q)/Y&2TRâ8N>c4\.|>AI{OX,#6֧5ުJk!Ɔ>~}>ڢF{cΏỾ0ovxzu'T㉢>9-2[ZZ٩]6nܨf޽[>Μ9G}TgrD cI 5L._,X z]]] f~m6'RXA,an >>[R5eOI,~FY ;rI|yo_{A-Z|A穽$Iw}i cζK* {9=sʸÆ jooWCC۫q Wre߾}W1?n^|Y;/@M|y577W:{>"@EN GN%ͯٲ|72o2߇4!ku֩_K,ɺeG.\P,??}ٖOt=өz+Ɲ![{@pݪ/ʮz544+ݥK{2ztZG?1}zT*_~Y/^ҥKwm魷#B#8PLNӲ]7vzٶz#].T=oF%ڵkРCj]]|#Z~}Ї#۝zdRjnnدW8ڳ`***d.\YcOiH$\]]]t:::2ڝzVx 444K.M<=ixh]vʹkPP4<όW8Yz]r)x|D goGQ/===c?gpoiiVCC|ƍݻw+L*G$=ztFr$I`Px\x\eeezg?50RK|Ѭ-֗~o>p8sIʼÆ vZZZ={M+S[[=*++4v/t9̀.E"E";bXު11)))g/))Q0K&ھ}-ZڦGW~ZzM6)el:fߣ! _.}vws-Z2qͧu֩_K,JcJ drTmDs:ׅ קn … FekC xꩧd P{{{AE#3a/~QjooWKK4001P:uJ555 {{+P(Rkk֬Yc??{۷YPH+VPCC.\8&=ut!=P]]N8St\vzRx:>Ocv{TYYq+/&3 ?w B4izGsN%aQ*UVVfwSNӣ2otP }vID,++SOO޽[7o߽{wOF Zb=r2MS`P:{~$4֦ .^~_/ё̶ReԤp8lW:~4MܹSpX:|_9mК:}uᆲ_Plf+rJIҶmƬpB577 o߾c,X=#jkkS$QUUzzz" n:PMMZ}Tii$iڵ'?Ӆ HUmmmFT ic}3nڵKGgqYm=g'šeےdW:ަ&r\2M3co~󛒤>׫^-[òetI555m=Ӓ/R! U!j_jXaGG_ ,XIܹs1YaȾK/wY|Et 7ŋ% [n% W46+H(d\9'YjՄEaCƌ;Vaz/'yIummDK^~hN//:j9\9wVIq&˨~[UUUڶm9bWHǪLT*Q֯_H$ XBpXzu V_zz-C 25F%/I?wTۘԋuxG^V[uuugTJˁBT2444N@><n|Fݝw) %IkwwTRR"ix͘۷YY+EѨ do#ɮ뮻*I>a{tbpv֥`PHD?^ӣ]r\A-,zeP@Lk;vۿjkފڵV[zJKKp3NEy9Q0 _\Eq ?t\`W.]&ܺgz\F 0EdRwVYYJKKuر)zG^v)i8ݾ}nUUUF^WWI.\(+׫ $#zڴiӸou_oo2TI6444i۫GM;v$Y7nԥKt%566^'NNn[n֬YT*5> )ؿ3*#G;;R~k.w7bzޮD"1cygT]]m>x~evRgguV[oU$I.KgϞU2*`V|aN=c]rJd2)4T~*Hx< D"Ohr\@/tjs9uFׯ9];~YW^4|i5i6vҲeޮ:Ia:p|>߬00*`ڮ]Ms:џ(-`ʦ՜<@IDATթdR?,_\بq<KtF5kdL65-=_ T@NͯꥇwN?t\io[r\e>XըڵkW}x\K,P*R2o|>{V|r݊D"8q"߇ <*P9q].@ML˷-IoёZ4O6.K@@DB۷o`0(׫{W/jǎvEi:K/$ۭ[nET[[+IQ:V4գ>zcr\Cۙ05`*Jx(B֥%Su^zK}[_R/g$)Zz K.zzww{(өJUVVE'Np:.6ݺu>zK+W-"Xg׮]zti}=ܣ;X%ҥK:|.^h񨻻[/^*807?vsTp8T]]}u<<e/W70L5YSSSby-]4]cR1+ETsU*TȂ0$~N}; o0m/}E7V;w#LdE `޻~͒|8!<Ω3akG:nc$IYo<~BA `޻~v7#SgڎΝqlڴI^wz_P$G$Iح+zeWk|yƑ@!sNl}JLЩtu-!7#I=z˹P0|.ӟ CTE)K֭['9r8x~hc!sW#rNHɇ{cX=W&+NS$_^ǵuV\.=cx<}`P'-P}}}F{l_Ѩco?uui͚5r8 /~~MFP;URR"Irso'~Ü$Iu}y{?kSNOk׮|*ЦM*߯#G꾓ɤ~LӔa2MSب9AE"B!kժUD}}}vi}^nn;],ar:{NV^^~﫷W:YhrzLXB~Cz/Mx߹NEUwTUU%I,)LL*… %e }b1YFD3udL&UZZN6@^<U +UGnL&e$4M\.܍vޭcǎi߾}:F~ @_o3NJ/wbowYkT^^.-IP8V:ww1EtZpX$ۭYUiI>OgϞUV\c=f_Mvkk?=+|*QG/<rU4ׯW8.o~m* o d{UiCWepEuuu2MSuuuS w1Bs6?^|Nsqt] 7𜨉H$gRkJa}'7JxW!ަ{ ?ueCтV^v577KҴ,_(ƍ3<ߧ$I>^}ՌkkjnnVsssPom2@Qb Թm@}ۺ4+nvtZŋy0E2S|IIɘ1Nkut3zxcsr]}VCC'a.O$(ؠ용L'0\tqr8lT5665c¥NթC`pm~*JmR}}ڪ{GՊzggvPrqF577̙3D0@s= *kڼynjkk'C=$Ig?Amٲ%g}}vUSSd2)˥Np8x<0iϰ*@UWW_PHuuujhhPmm><˝׭[P(ǪڵkuIt瞓4\X֭ު:y^[oUWWWO-0m (16c,JljLկ~e_?q[,Yqe10*wr;jgKt[`3$g! I_ޜNztGTmP2_1IpBˤ=''j6Li QDDDDDDDDDrG%{8ʋM8IrjSDDDDDDDDD$kwN ߗriA3S*FHͲK[2qOpظ?DC;VIw4A:x6ÆP$5"ܪCV1N^kӟg/?TسLI /G6 4]?LϬgHm<F/+04@blDB5B>#aøc<49KRmPHO׆jsn?~IBң)5*'G6䫱LZ|X<P7hv8]3ȯm޿-!Up[WGYO>?ltXUՂأagrDXҽY J&!-GVrC\aIr׻"fcObD_ۻ<-A+JQ;SDDDDDDDDDc=R5d9m'l% ̱}gpNjөa`=8Xw;f6sJ73$"$T(8S_<n?ra 8кv ?/ KI˓bw"g3'1jc>T Ի>m@^xv3`/ڊKݬɗ]zx׽;5Sc0ƓekM{dBtẴm3Ok-֋RgjZU@W+pp>IRWxf8Uo Nнo3fOO%nםfNO0ygǪxȨOX.MliǤ8Je$ՌeL'"cQ]gT.|{)4R? 1*]g)f"""""""""gvX.._Jp~2~ Kkڗ`?ɩ[]mص!Ŋq,,gS'%46*[Qpf3`ؼF޺&y$bٺ"{4Їg`PQ=eBz=ӇYG=>ʭқVSʦ.=aֱf5?ie[AO7!4}t;c 0/vSڟq;ؔ2cpV 9}=ٻ~- xJ~xbx3noG1u#L5ǞiłqbӖ ! ]p$o<J;kSp~kBz0>E9>y*=5"{,* =K}f'ȍ#v,)"""""""""d }KWf NH>˱c爿H_DXayp7^8/pBP-`W-3Zhga3MmU{;i}  _}6mqvкVζc)ۈJP;rS*us|*Glx6b)IѼ} S,x)KLѱOv(/6Qs<|cɽ7S\δ/<01zZ{z!9!d;-kLG;A AwN'\ġ]OI#[Ϝ>K&\>`ķ`YӜ-Z."&IweI$9U}L~vϥ1 ޘ&nJ[Z5Jpjsڤ?} Qlqg94 \~OLxJǩut4K>wǶӡ3vDXڿVr/~.rц^nA?nk *IE+14Ԝ,dqչr |B#T5n!b|6'6[;V=Yvs:l6;6`!)&4=xebܥ;f$|!vbJ79oS4k9Z4lEOqǑ}c9ߒ yOUSDDDDDDDDDl>փ4w^:Jbtq cB*#?z|ʧIݬM{0+Q:8q,< ۔Hrk}lO'ر%v iE9#ri;rӞLԱӘ[KKuנ=(nc.zz۱yDj<(U@<{6WӼ6=L|*oo~?Gz\ΆyIz)=Cͼ۩KpGӷ_M'qzW5_|Gt{L\NSIwԟ.? Y!|c@ޘz0O/ޢ,jGՁ""""""""""#Uʓ;80 """"""""""'롐6nXR+SƿOEDDDDDDDDD_ɝy(R2ػb &KDDDDDDDDDDcjժiWDDDDDDDDDDr@DDDDDDDDDDru""""""""""rc洈:=CDDDDDDDDDDr\iuz:9-""""""""""N""""""""""XusZDDDDDDDDDDr6EDDDDDDDDD$""""""""""t紈:2=CDDDDDDDDDDr6EDDDDDDDDD$isZDDDDDDDDDDr6EDDDDDDDDD$isZDDDDDDDDDDr6EDDDDDDDDD$isZDDDDDDDDDDr6EDDDDDDDDD$isZDDDDDDDDDDr6EDDDDDDDDD$isZDDDDDDDDDDr6EDDDDDDDDD$isZDDDDDDDD$'E,͆6 g mN)loPfз]7/~(&m~I_l*z%l{6 m"6alKU2{c/Kgq,489ZV*͓Bb̟a8b%;/'2xBD`>u7q IhsZDDDDDDDD+{ǍfK4G0#_=V}?gOBkޣmۏư<]Bǎ#z:'*z_JsyvybX9=$e;gYl}wLic*21iNŽѴkks7UMSM """"""""דࡿvLO!R|6~.+L^f<1rYۖqdw1R-=FX'{~4ң/5gÝ0˂ 僼扏XC=eK@G_zԈȍ.bї~v[Rw[n%Ra5y%O5&ۆYWc٭Qlؚ|r~B- [/8LaR:ٰyE& faϋeC1/“ZW f_-ͮhLc,+ocT(6`-45fOݺ{ ʩkbbb†V[K弖5[XH"f#8khvt{ {75"O!|lca4ډLe@˱qWW}/MX'ܑ7 eq:]Q}dV9b6~i:r>-C4!O̟3&kԝq$u߳2yNoC HǧKEnoDݨM@c榐c=j?Ҏ"l./-ҵOȅg%à<'zd?9-=אE=.˩ ZX_VJR<'>kuu tGI:'gPS[ o/]o5ƎHgSldըUg^^yv/7I{?˯G2N#W}+ݝ\ >!+:A~n⧒iٰQ4vg_jh$Lf~7} coI 5ckcj78֩}LHSsfQDžau83i%s'M_Mq0^,Ǜ{y7Ђ[ ;vSڃfs&cE#[rU;QkMrTi?̜+uj3y 1~΄%_.k!o^1rmF4͑ Fb:b(9+*q3S'>4Sf}52m]X;L wW׮bEfa<&a)yq|&0K^ܒf3ncq^/hy.-Lćo%|Uú`_cbLDdcΜM-g-ϳyAQㅿfGuV1߹8[b".Ъ\}|hVXDX.̖\k]]n]u?\]ZrdQ>x`u,;11ɕ87I )!_[AGkniUx1MYh:.ѱ^k_/7Dn`oh>ٗpb:rktiR{9E ݾewλɤȵAc洉2.laO7wz`.5D1) [l2./·1d9~i鋡0ps>UD†mp+opSCG/a&N&L&}!*v ^7c\x_̪&:UQ|kO6+y46J~SC 0 &citL͸T5+zB`(Kd+bIAkW jF:7 0MTl9BMX6/-D5OܼJcS&.bV\q.W&sƚi@NS-f-(,s6Z1ssp$ӳ5+NңȷwҡOD8Am8h&Nfޡ<߰o~!ntPJ% {V0ޛ.sl̘Ħ~oZ~S=@H񮌙ԅvMgC KZ 5XjeqbUIY <Qy g՟e$U8(o K^Jƍ'䈼web? IDATl{/S.bvy铇p{1Y]vhEPLjі]gS:o"`#"ᯧ]͙fvR yN42=~M.9Pr@jfO_}e"Q2~Ժ nx^&x8U\XrM4K4}Ojw\+ES{k^ʼ6W_Jnk['3\| pr~sv֬XYjj~<8U9@l>/DQbc ;1f9%'`.Af@\uwFX4yձY,xZ)k,^KSgf)Pv8SU.}q\b;ce^|)f ?[v|-dٙnw"FcVQZ~f7>!` zah)$N]^ѲwF,=%a@1vJtǢe_ &irVD#ƽ4JQJ>lg,=t )_$ɤs\7a@>r$7gT.sV ")~˛'yt^M!WNEEO!MCwhD{2yC 믾.<933eit?Au)A[LNC_.?:AnxR<|hQ\K,w[X$\윳X%e^C7Z&9\| Kr#۬Kc,OvsPV~KR6c#唜`]?ǗU PEs s:vW>b`#\¹n-8Yҵ(Ǯ"̍ڳ[G/@Ҋxya!$H< lذs&Gӿ6l NI8>I:{Y&m,È:8^eq56e GY7 $ζ3@w3<| pd#Svq|  ;'3 0eP) i=I`ފ<4cGن]lMYkx<q8)0|-bSSՊ1Cx[uW_Ws3RB0kNĉ+R($d'-c&bZQYo܎O]Xr5ܰe9\_2dwyZ_v,Kc̑l7uسSٛ%G"O_67WEh;*O!W*Pծ$X/89M>s-M&}oտns9-u&+ע\4fniR}q'ŸH!R|6~.+L^ffC՛=Rpf>ܙz,P>ȋ$G2pǽxw/`7|x%/*tJchw^۽@[=4cpp3րI۞c'vHY/Ή Փ;5fl049> T=U:K|ު2uZuMO9Aquo1v,G[[FchKӧ~FǶwѬ;[щ@1~0crK@Ǵrn[]g3Y>8Qb%U\h)]\nW_q]"km5eXmxz{~#qn?,"Ƽru[}^({lƔ?[,i߲׻OSL6.~O6Zv{{Ovc=h?꯵3Loef91@e[o&ܿunIEWqiܠ:R-C`W|'w55x5jG@bc&34)׀݌p$c~nzT 0Tn\5{I:i?VL4MA_p=LxeMrŗJT9o3{L&FϸUtdM2!&EuJLms11Ns;t,y ck5W xn:x-uTZ1sScW"""""7__3~>ȍ,rI[:OiL}/""6"""""7Ǿxty6EDt_^Jlۓ.rI[|l688R6_N ^-+I10.|'1ͶiQ/6[Z=Ϗ3-1q6EDDDDn^1xEDv~|g 救sCڽĺX9cYB):g=pcqIꑴ9{*Zc_m~uY[NLO| O 7|."d}/קD^N/`R:X:D6SGfabS?7-B)QLWG_qڌq,7M̬,lSDRpkFP |9 ~{@&ondŨ)i~턹:vKǎ[\ ͟FO'u*ɢ,,ifZ{cxl6l@|d<p;N$vNHyOҡL$ձv X9g~1zY%ڲ"mN5ԏ.|yp::(FxI0ed;:vKٜr wf^{x, 2 ci4A eŒimNH洈:mNH洈:mNH|sİ"u7I9tj%]uѴ+?IIՃԭ[7ݿ漱%pu:tnBHH30TM{*<=xǙ=]]Yg^2E:gX5n Kݺ hUO2~R><*sbߖy*Ҿw*4K_{&w=o>}ux6̤,^ݖiTe]:tCQ= {WWqsƇcy%9.;sչ&#K?ompݞ]1&ki;YΡEerMYk/6+k,jIkWcLr9lџ~u_*ԡ8> =0I3x}G7@xkNָYnXطgF|@Ht񊅛7"X8u<N&-n@nN;L Gil,jocKs9d<sro͓| X)_m*`"_Ч,?'g;O[(f:ѩ&&Nl="g9]ͽTcZP#Ɛ;u*<1m)+73ŜvB;%)NZ[呭:fp_xIt*n}U2dVt۞aH&E ny"a.9.KCV.syzq1bk4Cݝγ&)6np+,YfﴫrIJ< &1j}/Y}2/I)|3z3aj+"+I|1I˓KxY\),ĄGIayV㊅Љ3s׼_7ܨ2ݜy퍏?~~x vIxG]+wR85%ӀuCheD` .jcӬ~?ulZ P%n&8V,fB}xe11ٲ 򶮲I8͡"ԬRB)Q/UCe?$n2OvjB Yqm銏h:Wμp%?σo` QT -gydVGgnf>Ӊм ʝ >fla,<#xi!p 'WeҗeH uY~شƙ w>w9'?I& $y87{Ҳ~]6#bg;ϳ~Y(Vg8V2-9.Ź,3ӻQ FǡvŹF&Do}Z7s\*sY]rbڋLwc96]{V++k!}6/_x{Ct G_a< |ޜpoR-LOdpj,_{>ua#X&ż/\OٍrKԐz\NǪ,x$|_rҹ%fӝe;yvN]8pW_LЀ/X|Ϋ#hRuuyX>ui\ 1m8;44x>iM!&{L>{fGfu4D=u_cΒټ\o?<;i/AѢHM^ ͏Ig罹,*l)R3/W,66t_2X"r&ܐ,튦/ {g3ϣ7S/fƒdq;V F1zSm^œzaߧ7ɑn4xh4,Ǖ͗Oŋ`C-Ti8cAqFXNq_hL{++k!}ehx>6'~gᵆTTCT&R{yJԢ@^o*#DT2EpYnX8NbJŞxbfҴ#tlF}mM9k ?|,_?Ć;>{ӳ8XzǗ|^$K{'ҴS {\0w^_$0߿ņ>Ei>[%+cT=dg/>g]x[vT̠ȟ-l OSzyNfXB>xG~D+LB>x8c9C< P/oV<2/[ҏ5)Z,M , wij_%Bx6N2\!n%6t+-#ilյ6PIcMW,ΙIVsЭԅlLu(WMQ41d:ZAL.LJ3kc7#WqWnP^uT̷[#,a9)rMX̡s4<-sgRlܸaW&sNɑ6l WVBwȔvL|qq%67Gl"^{6|x@C$!& ީ?~Hb!. oTID rtв{u4ӏJw~<ۭPq@ƪ=(W4=?`op-IitpM_гosqGT<I<{%st[.8iw~:͖Ve)媚I>A<~Ll7\>,m%矮O"y"׽FD|b:-tdVTKqjCTVUb>x)3o;#{QᛘKL?Q?M:_QQ浧ѩe}Px|\]S0eKyhkwפ.t/)6ذr\o3~h|Yg\z=n~}ROl;Ք%5n^~$Hx?㾙xqD%Z'^j.|8bSł#x{mDn4kLq'ذyx\1||fnO/F|2e5 xt>gH6HCov +לtM^oH8y+@ostߴnƙDDI,6_[?&>n3v;vgpP [5 9C)TuJ4t]ǁ.I/Lrm|gB 6omL8u!ݘ; tU{ǻ$.pE]hij98O@t..O_̇gH$Ÿr=HX2x~.b,g2 pvN-4fこn8q 'Z>y^_)\ߪξs9g g4y^1kǕXJB,_nm}Aw?[οI5bkRǻbZ &S߭r\o3~Z=m xx`抹KT-9'P¾_o&kN:p6o XrcBx-;J38W`ł 2-[Qh8a,0pGss.ìbhJ<マ yi"~i! ,`+_O_cSD≳8s['j3\(Oc;vc{wy[ELhqx&cmta~4U[sYoJ&$/W~|=_a,˺ ]%E߯s{9swΜ}Wo~5 $JC:=07e71e$ݫjC%SX=࡝i2JO3W 0Ax$%>fx`3Y" Xp(`0f@LӖ/}#^Ndkb5ZwtӅی0h泌:#67x1Q_wq*~kyٴ-e}a|X)sGW*ctݼ83rŸ'F0YOX(C~eC' rYYA~qe2 ̓ytR\y%V5AsBi^jg}XrFtg@ּeÙإ"\FIxtVCsE%϶ 3QLqt|㲊:;3tWqs}/g&@kUZ@;3Ga ث1GQxotgmtD 4'gyv 7`7i2Bw1WL !߻C*nʄ|x< """""rIlb `W)i{b &""""""""""O""""""""""R49-""""""""""N""""""""""R49-""""""""""N""""""""""R49-""""""""""δk. """"""""""x49-"""""&6)ZP+x '+""""""""""R49-""""""""""N""""""""""R49-""""""""""N""""""""""R49-"""""""""".}3 661Tp`~=n:O3"ۻ0+qX+˂ 3_6MaN;\!~bغӱ@fq·hwZ6ky Ώ|8_0gG;wȲ/sWuw!q=Gd??{Ëx䮗 &qsYmۘtvO> tẹ95s>x1r v εlṡs![9,¹s94o,طbCyqS[@Bp5m;at>/ ;ӷ8⺼FI?#M̟/eOU*\U9u[,iG{-y]2y;ʹ ._Ώ|8g7ty5_1O`.Y} Q"] nI9w92Ϫx8 Y67}&_ǂ :/Sy# 3o:|]n3`P|'ʍ+*758pWo23<|1)s`$$_X4 >Ũzlћ)3c3ձ)<vj8 2X6}L.Âe Tx+Rvy߃,gAitV.)vEҥ[?p` !BW筡DEGMtIH͗P쵗1ˣ*٨'}GHֻ.×矟r9=o$K6t`g{n.jEy퓬Vf#~nPǍkm36rˍTr Λ\]{%wcG܆9|_Fi R#5Fxiҽ1εٟӭR2zV jA. 7t(JCkq;ƟxDGDATBFMNg7c}ac{HrfAuǽt~aeN(9啅2uw}8\TzեH=K{1bJ"U+1/LvKm^C70pD-i |ɐˉ|=VKk$f@hݑ|4n"3I޵ ulp~`X(ņ9e?\d8\^]8y))\{tciz.C[Rʒ<2Zm1x2|̗Uygo9ܿus }I1O͏Ly ?K'׍5yy O{'c> _";3|23)? ӎr|1/~]8 }<ĮQ7P:7yzR=|Ϝ>r/VП6Q+ lSA7Yۈ08 gʍ/7 kp^+}p.}dmxa)齫8m{]O/%N 9O EHxʉ]pFd<5_ ,QRgr! CHW.ӇI8ɂYW<{%Nvb4y lJ9M2|hS'=E{~.{,]F=yV)"#Çеx'z9n ?g-y7Ah D*8.6v[*PhEl>\R1]:O=zv_66әd:\]X/1oAV9.aBۣL8}2ڷX|ʗge #j\o!q y'9Aj홰pf~k\tŨ޸>%DZmgXC:e-)n"u<ݤ8G0I's}9QjYܶQ*vT+k7a58nj r A__|<ʡ}EpfȵD<Ӗ=]; rðNC՟d`+lR Iޖ\H%lAAS'T )F[;0[[1Hث髊S<7kWÍ5g΂׊ +ZϷv W+~`&+6e ?ya9/ًpb at|}+iszo$f/e"K9ϵ` I? 2,s0{++Wp-A pK'۬;t?sX'wHOqN\۸^,Eqf M7Ƅ6I*MÎO둦f{)xz$|kV~ ^bq:=w)Ұ`c<|Ġ߼Xxmu_GbuDk5 `+,'lbqڙ^IJHbwdaϒ TҰbUlI`LK/}x9ǼA񍯹mt]pϽ,38&Frr VzO3_7Rm_+58q/ۅ o1&ѡ(\>뢭$9l[8z ŃT_Ix,H3:|ǒ+"Xyk(Gs4сrYB3,ffķ|R4]<&'ƿHJӧ4 Rfz|ߘ7f kNQR-[ŋ|#L㉪yhςǑAE 2<~'ّӓڦG̦7z/Ǐ)=:b-nBO;z_߿` B5Q tx?&r,q ZQf켙KpL6q'6;q"i7La{Ŗ4"F y`*xfp|{&vppJzqAdi#c^ĔC|#ܸrI׾rq c&ex.W|mGеrPmh,#yVs瞪zUҟ'/5 'X4'JIw:Akz#WH.TIԟywcva.kIKV^㏐|Wd{X&>W{5[xDMi0IG3LH ^{FiKY+Pzʐ~0Dddr2ߌܞ"́G ,u{(MۯСDTiHq&&5qY(uA-aˉ }3pRF+nOTo3=K{gi[ꜿ- {i\<>}ҁɸ[ 46rrˍ,}F/}_Lr\HɜCp6L!(gBFhdQiT'ݙF[)iz>QI[Ϳ0x4fBӻ+&_ ɅJ]ޡt j7eBh傟)R-}ȏ&2!B5ѩ.eE|x< """""rIlb `W)i{b z^DDDDDDDDDD &EDDDDDDDDDirZDDDDDDDDDD &EDDDDDDDDDirZDDDDDDDDDD &EDDDDDDDDDirZDDDDDDDDDD i׮]ADDDDDDDDDD hrZDDDDD.MlRV*A2mOV Cg=DDDDDDDDDDirZDDDDDDDDDD &EDDDDDDDDDirZDDDDDDDDDD &EDDDDDDDDDirZDDDDDDDDDD ,х)l|NM{mI'a8} uocylKd.sfv4n vrYmۘtv]X#wF~Cy4MY'`=m TAcyMSbpCXӎ'^?w:pĊn4{;R.v=)lp[;΢ig㜱u?m-n{hY0غJ_U,c8whZwxwsOqqw<<6fP#y[q\l0<|=J}y=rr#Ǔ~P-m$E>=ɹܹWNd -PcjܔgGHm<4 W[,]wD[ITQK)=_a{ >|>dEwY-,^3>~cePe,b+k)m_|auvfh$<ʮ t~oFD>5Si2|>_,À3~BgԆKmSrŠ<-8f67.1kp ]AwV9Xg^*dݳ%,ƛDiˑx'Kmֈ nGf.cM2:Q_r `ϐ\6'ڞ:)`dP~9 uzsOFk>4)3Ph+N>q{p/391rBn^w8oqDZor߸N+-._?ǒ.50)4W:~̮"eFhCJV~Α=y~aŪq3b>¾/vڛy!5OFɨг1oO~<$.'}GHֻ.×'͠l^~I1gKb.Q[|J DEGMtIH͗PᲔ_ܣF5fsC@ {yW)BB %k_c%o2|e:O6kw{W&O 8z8و.Q'-x? 8)5f3QFmxxRFqݛusQ 3IVf#~m 2MQpaxCr[/9`|O}U^ 990ob_Js澖/}3M7ƹv9b#xuYJFUCj8U {~fCM<8%bZ?N~bW%C/'Xj.˯eȋqa37kNFcvQڒR'v%V IDATx>LRI]J+Ywҥ}|<J=gmFƷ~:~9Br6L^t9Iz[ gw(0@Ivٶp5{ХN"lK[YӍz`RDGkS;vR~-U3v(*A}'Ǝ]̟&O6f98bvٻuz *E¹RNS( 5/se7} tod4~yv|;f(Zzz 69} ϶Yv es %#W}d}ަ0nw3~t&,0аO!Nca{3_r-46]„19qzu"SHMPh| cv<9r""3a :͌-<&"Qq}J:cw rʨS:9y?|ƌkRqӤZNX϶Q׾|ʍ+,7<9Zy1Ź`pm}a֌\K3m +I@stR$=%[a[L NBj,` " ?qRH1jځrG\^O_U.;ą0?ioW'#4n5o|PAd 8ټr" !nNI'J lgOKTX@f')\e&+6eyM܇iKllC/9fvб.U*@=.s<*چ6U wޗomytF'e  ~5g΂ <:WM~p|{fP$98nkJӦM0~qfpž~^ Z{Pl \{)y {sx[i2HCym:==HcQz<̌k9878GQSy_)Cl|FØX܆[w`9&ә oׁ|e;f19qyq4[=hȷzl,s%{,>{_.s) )7 7c"mPKVշڞ:,ִkE[VYuO) ,-Ă3Kͷr!ȂÞ%a!Īy>*ѶUs?oOhvjruJ_ < \n}GOKv2:Nox`M ʇU bXFA--| 8Jr<N")8!ə]h@N'$$+`s֢KP/PLuN-E9sf,& ֳ3nS4%[w^y[ң[sOfmoC7ֻp͙Xfk!l VK9:BTE(lK{SlIfLro35-x| zkn]Xb)Q%᤿X3mb6{9~|/NIk6q'6;q"i7La{Ŗ4%}:FqrCP‚8ndIv d2h(63\ \X_Mfχ8ul˧m5/_n֎~p* 0yl89dSFu .vјȨ~t^1f.fϔCMrppJz@1>ֳmr(70o/36Z73-su_Z棁yRmh,#yVs瞪zUҟ'/5 'X4'Jg56uݯa?}lڨf jR'gݘ~ZmҾf/#$%U+^2ac\ě&lᑜS9oVˆ ݚ1}`8T:b G<3MYi&A@^{bC_l OyuA-7/U1׫ wOPyOF!8з~>AWDj1m9ݫjC%. S˜u7xYlږ2}4#_ȄbgĘ + n^י 60/'2~I;?t `xa:찅Fbԇ=qksWo~5 $JC:=07e7189p,>x(j>˘y}PQI[Ϳ0x4fBӻ+&_ ɅJ]ޡt j7eBh傟)R-}ȏ&2!B5ѩ.enWx&DDDDD&A^K1jR$d@ 1T1梈m1JM(xt}sZDDDD.ٴ%]Yn,wz.No;Dl,ݿ@~vRMX,9_q&u:~?ˣh/;nG@@x\<{;<Л""""r`; U|R7ЗE?9F"<$}C.AX+"~ dƲ ~,qEo<a9_G7EDDDҫ܁cذ3tj/,_/8zLԇM_E=vV=' $6ޘOyd-\X?4Mll}Kll'>Z؅[s~7{poXbchڮƝͺϼ$@iquKgۊH.8>ivn{)E460xSBy߹pp?rбCU? AǻSǯK/~caȐ! 2I_w9ڍ͵<ˤMzX_1mѵާww=cALylcy9U?܌do:Mx'>A\뀝|qܞz'jq \~U#c8܈{y}c"~籗 !` D`2D,Er[ه|5rE|QsG^ͩ[T5ԯ90wF ~+s?w5n>!""""^\?dx;f=S!+C(@h*F qk7Yڕv{r]?/u͂wPܜ׶cV2NC(Lvqe]/Ma:s-ǫq֒k+ܤ08 %7e}0qy<7v]yY5{O礫Y7^oq5cV(Er3%#Y8/. _6f V57 _/c~hCfR.sMa!oc5[49-""""^|5vdwMߺćgTJv0:gA{es7[9a[.ghn(M " snRlshEO$p#}8^u[,Snly91]_flt FPS\lfs>ywfi/YJ>)9Z;-ᗟ8K""""r޴ ^T)!6Rb!ؗbȶs=x鲜+L9{W}+\V]zNC@-T[[T ?06ZE{kƕJ号Dzp[}wOU=Œ홑&SSPzr:ΓGIl8HOD;\'ٳufS\wY֒_Q偝-?c?1mFY_3+k*rʖU+ Sq'afYIh;[~_ff;6`f) """"ry1Y(d~ҍh޾E[΂`߼vVVsO߷iܞ5sܞ#vfuqj <ь|ps;Z4lA o$L5&\W%8|;]K2']:B$k k'1>ixP'OrY JO*~>e2X&|ebA I?˃`1+s1=VX1 s՛dj| zl0S[KA0+bzjmO4Kw9lI>w,&N^;2js ϶lTz5>wH#im9C;aݼo#c=ҍh^>ss7~jhQFO6m*ay&b؞HDN$5:4%L CDӣ|JZOēUEԥMO# q3W4ŪpOl&bjӾE*ru! ogLh Wដ &ǣ;JD:[ƶ]<W߯nb_3~≙)䞃km?{?R^/3# ^ʓӺ &+lf=>fL-_0/S+x_c<84q3Pm9^ȃPȃ?لgo<.\x '6f5A/daҹQ0w'l&;1"7)R_a ~2Cn&a(lW_Ҷޗ& mMBoTخƚ?5AoNRϰ16_yk]/oNKxysZ|'oNKi17EDDDD !y}O-}{]DDDDDDDDDD &EDDDDDDDDDirZDDDDDDDDDD &EDDDDDDDDD<Ga7EDDDDDDDDDirZDDDDDDDDDD &EDDDDDDDDDirZDDDDDDDDDD &EDDDDDDDDDirZDDDDDDDDDD &EDDDDDDDDDirZDDDDDDDDDD &EDDDDDDDDDirZDDDDDDDDDD &EDDDDDDDDDirZDDDDDDDDDD &EDDDDDDDDDirZDDDDDDDDDD &EDDDDDDDDDirZDDDDDDDDDD &EDDDDDDDDDirZDDDDDDDDDD &EDDDDDDDDDirZDDDDDDDDDD U!+x{u7)i8NsM&lW"][֡bt\cxr?Z+cp5oEDDDDDDD$R?mE(Z(XVL&},RxdddƉ'8vkTfS-  \MNkЯ^~t=9rbZqZFF{bĔ i;'5Wj /9eO~ L2IAlxHHH$=5O^}r§i 9'""""""""rz]@cT\uhFmY?;_y p1]$r%6hVo;/lp1/hZԶfa}N[LhIYWr|7{.yϵ"y9ӫ/MZuRB%xl0(z}mϰ9Oio,u&ʖ-CxĻHv t3""""""""rqv${Q^nj~4Z6=8xlKq-}pn)ee_k6pr0nN'k|wQq۠_r7W/}[EDDDDDDD⽿+5pd Ýgތ4)Q M`7㣯-& QWz%)E2?t25v/;pN&Vs/jD毌[ Ǯ^(b&"'s۷,M$$t?ZB\hJa+‹M\N1mA"T羪a筼C^DDe?- _LVbnJڑD=Ģ/`]R5KϿC9;r 5J̍`=m|iN.??o%Ky{kѲtkzL[oⱦ)%*н~ }ǺnoQ[ `YVRtWȋpmyy?ǑF n6/?_ä(_RXt U㻹4= V";Ob9UD~!![n`yp6oߛn}={@whkIӿxi~EJR%/FFQy 8)=[rẂ4jהɽטgˠ-w{=qw,Ćtc$g,@B6CB.Mis~3L7'IMsf%pm8j2 6:й{U.~=<19xX rIoR?9Ǯ%ĝ8KTt]uybtk?Ww/ٹTY3<,wӢnbMz`X֘@`;\=~ Դ49?Y^U7ͫr 2._ଆjz,F/_^ӻx])ZVs{%dLqwd#E}  e)V{Jm}ʑ\+SN.RJ)RJ)Up]ظ|54_G{s,3Do|J[ޣUp'h;ǮeK8Ϸkbв k\m|1Z'[UVs,\ym9u]W[X{%j7bL]pXs|y/N⥋ݝBV6*5MF(mJgSsq89=KdxgD\GNar>~?ӆet>#½8:֡s?>ηi-+RJ)RJwL4OzuY "Ϥc|*ɘq|84 ]idXط~Jimoj:/c^ˑ+RJ)RJDՁ{w;ĂٓGߌߘ1VV9=>֣V*'O3g(}c:#}0hW37k gpėp/s %-*T@*mЧAX88s֝LcpZRXytH9n,J)RJ)R`<KwAJ I* 'O$6ыJ\;6mX⯔RJ)RJ) FrZ> "8$~U )\Dl\,u6pysZ׸چE/J)RJ)R?{leǁ$&`Z5(Ӄb~4[g6Z?7O6,WJ)RJ)RViRJ)RJ)RFiRJ)RJ)RJ6ݜVJ)RJ)RJ)UtsZ)RJ)RJ)TiRJ)RJ)RN7RJ)RJ)RJ:ݜVJ)RJ)RJ)UtsZ)RJ)RJ)TiRJ)RJ)RN7RJ)RJ)RJ:ݜVJ)RJ)RJ)UnNfd ijښL,"GrԍZR:WEa*= 2t5?,!boF.ٴ+RJ}Mxe#r{;&7G[g.¸}z6ɓݘx=uvc N/~q*E'!ƞT!o͎r,p>nIIuTNxvL]0ؼ?3&"ylok-aL4X/2~\QȌoSdJ%F1c}M t%woyq&C.#g-0Kz`H^a58V`2\׿IyJ ;9_\k~b|x>hWPym0iw>cR>7?s6'7=2녜ĉ#~Ijmg~iKbiB+EMb~.#FϒIv8Dƛ2ieX~L1˘Ͽyɼydr*bQ#L(RJA@*u/o AyF'"˃eʳCV% FgL=~e7$IڞQR K(WrB3C§4\4)6qRECOrI%!O@yJy9a|}#t ;k8>UN}LL ջ,C/@*qC>VʁP%ٔt=6Q* fu+IS!|@\+5AJ,N{oqt\:H]VPq(@c~cOǙr=#[ُJ$y_{ H`eJcf8>_g{7'Ϝ66wr\8r+>qy\\|Cy;k\~19o7^W1erw3ҸT8T~95ZI@iW{^53x?J]'GJ4^jXJ<]'bn/UpeҖXHnR+r_U_1W&.JEģ=b^uQ}-yY9,cK-AY?z#d'd4_95dy?gDed&Ob2{xa6˨ dIDb;_3KF2&)r`p1]?Y_F 3̶߮-qTIOI뵇a2쳭sH9-'ZBRJ)D,>{GeKM~\ ToHXI$/JM ׯn^NVHVH-])PFlJ_ESK_W`il#rSb\pinPy~C$&JFw7~}u:]w5Γ.뾖?'ң$|gɞ*;SE$Iz:Dܛvݍ\øoo+ҹW/nCr/`ʱ/JrX[K*\pѠOV :Oɞr MN.1KA⎻yL>xi}μz '\2ue@[h/bDDdMG{):N~X.o*z yW7;Ϝ>p\{Yssum48>}ɺ#_8g8OgNt,5:oq #DZow3Š9Y/8+ hs. _ ۾"&&teJ1d4E(?H$ȹ] 4x_6HV˛PBt!e )yF^PkO9 ͣY*tn$9a\{+dy9)hYޞDV.'4%r:zP1riLf)%2b6%sɅlH]L+&lkBuBG+cf1ȔolOe]ɟmPvq_߱Iib6Om9w?kf]RJ)ԕ)B(Jlz@ iܬl1S!v5X^ !dE]DdewSVIC*T{D$U]~_-μ˼)1*=صC6^;ٜNMBKe i#fꔓkr(b^V,R"K[2%-0.rOC=Lx쟼ȹ_HG_jCd運!y|?I"b"f1C.%e(96b=]ΒӒt,ss% =Y/9}MgKodܨq }bdY~Y8L0YGr ͱ9]I$:YoBRJ)1`cy4BӲ*6AV(&;T>J:#<,.Z$`zeR4_:z4D$M!P^^ޖ"ֳSڛD\}s_auv-[J˖I{$b_Z ,3'( J?/F(:E^Irx \qany!T8ag 5h\ra{ a\7AOX-rtA=V%{S},,)mP9yxɹ9􀍆HJ 3Qd8͸Og+b'@9&FUkD*g~Iv~RN3y~ϙX̥Tu4jKc4r9gE,gJrW߽XIw-^/9>dxBͳbc<8wo5jhd0^% Y\'F΃_?]k0c&م3gcu9q(n\Z[^^0+s9{&KU|bT-끻o~XHH֓85eA~RJ)TM3W~/y~l&?>@m6jAą|c YR5M~ һs; H')];i i7ީl{oj `X; H9x:,K:: VaC:ZAoJԣWV9 6Z:=ϳao7d'zo/KAq.#9)qy~~3͸W1Kyk~@9hIU}\^1y-Sh5Y_n 2qvƟ ǓPۓOq<[سS^3y_Z3qbgIm!BR5ڍ'9*8f$/?MʦqbR6DZ.oϙ`O$2-_) r89իQ/}Iu@ݲ^~])ǬVp,Ui)V/',P(JsRJ)3w7q!XL\g{QvY d`:OITׄm-f}i|yM;< HHp@4o=CΑ} _knϐ,M%^T%15{}x+Ԭd*TLSShu|2vvkOF<N/7H?էǗ1<a,YHy(6.3EaY _i9xmƩymlshs3wjRW|h/mS.亱Ռwc>fqsnjV&`ma&22Ks4%i qkx~Q۸pj><'rB(C`8#߬n r;k 8ůS!ôOlp;+~/)$N_!uӠI6Anp As*^_i,7u[N1w.Ӏ!$2DC%?Ww㟽~Dخ+j)8gJH?ʤ#sOCE'׌M6 }ջ1ou.$6M׵V=MRX]Q[QeT^[*u/o IDATGYsh׌䗅q5c5BrxPj֩d[m^#!O0GDLw?U܉ZHup71n4T CTvs14Zғ7CT.3Hʣe3k5 /ѓHɌz= rZ1*yz'XP=;=2f8g ^Gk?M"sY̾3%ؿGגHux,/1/O1w|̛ҥeQlC?3쏾%]{"9!kOj9s/jCs}h󂓎杧xb8wa7KP=Q۔/sQ Q 5ҧv@kR={ӯ}""rפSW*,3זiWŝ-ٿ&C"*=Xxp}Wo_Ae1|gNgd鰎R{-L]:+?%Ky@MY|2MDd]kOB^5vU+-z er6# x=Y}4*)UOvPhX:6u0 s U-"mp{ٿ>׾s)VyyE6u VWJ)RJ)RJ?mJ' +fwLo7p~_DkOZ~]J)RJ)RJwXRJ)RJ)RN9RJ)RJ)RRJ)RJ)RJBJ)RJ)RJ) nN+RJ)RJ)*t;w((RJ)RJ)*TAAARJ)RJ)R$"aPJ)Rի5J)RJ2}RJ)RJ)RJBJ)RJ)RJ) nN+RJ)RJ)*t9RJ)RJ)RRJ)RJ)RJBJ)RJ)RJ) lן5 E\xל Z&bC׸a AObq܏i^!γ|WM)w?kRjp6MibZ6:[1RF>` ˒OCvRCJZϰe[ŝJ?LmxL˫Jx~LQ{Y Ol1= 9|Wo~|6[ )_9gbӝ=iҼΈZLjUs`ZRc&4UpYG5azƚ 1۾mh֌.?G2?3i:7o½&jO іFa:B<|ڃvҡhKzdq$]6bO(5ٓ84$;4%,1zbt u\@I K*w4Џ?L_O3>qz<9>y.|t>lt Ge4'5^ߏ "q0{7<H`1^n}KOj?ǾgO|*4fԸ>^%k>qlFBF9sz8OFOq`>6v0穝nv[8{)[ eB<0*krXq|tG9y{MI1š8{$?isE~yKqBi#M~gYxOTmKq#:vSBfXbv\4NʙlnF)T됱9X8ȝC(L`𘕈szg-b֥Gwp(Co~*yjƅ,^W RMDW+돗7ż=؈ؽ:u)r^:'sGIQg-NP,>zA.1w<6~k0{၏7~>^z㍇h1kU>G7CVְeV!U-Oq/?fN~aW%k쀝~\>a/o0jJ}ϓll7?qp;?z\Feepxybo|N Y9wM|ȰI9=yY.fظ5\LLW5Om<.Bߟtxo!N?s,-Mc;2EnO3c2?)8kO=ۜ3it`)+{`ۍeo;ֻ}a;an4>=_"~YASK{w.bO8"s)q0ېF=L|s ivNj!TڲOH UI&|aNuSԧKtp6b9+}݊|#ESWp፷9ؒvmxq9Wݭ2ԶgF`x/սdYqti%#ۢpd$er_0G\ 69lQ?cݎر:Z+EsO>2#?MƭĽ^cWY$לƵ'Aى%e>\д*51x'w*6ۜ1]iOn1IG{(U f+բ= >iQ9_rD^\6аz1]N'g qn֎&n${qtlW1LU{OÑzc)\_Ur~?jp~. \RL^*:H{p'k\@_=IKHqTG7[ \O}nsܚ_x0S5oӔ;bQ`m<.Z'F&.ט/w%Vcـ`RkӂЌKSDGi=X 4#s5:ů_&JQS c_ 6>)ѷ3]yU3whw"r q݋B9w'/7[{0zmό૿Α x Yoh9|#IȪ{& z. [78Fine wF}y7\8owWmSKYF~":9?ߕb(wl9{ۅM vviJoM=D'bKd oáH:ś(#mRƼwNJv/~7 jE;]k!=݊וyჅ/S.^NڌyPmYܓ $%e8in׻Fr^c槅[(sp2qL~='rlN(A7PNs͗NM/۞IO}J9KV%80'Jk;?? cLnȴ7sHsFၡVőm +i4ͺ3q-s3+c0~C! #T/±to9;ŔU zJk"濏yI2UoR(2XAϦPN^Yg֎g**tƭ"e\1OͅXINPR\~ӕWsu`w.' _> 7SX^}F[X ppr,%*QX ʟaxg~)\'Q'v&Kz wpOxް+II O Sޠ}?(K֟g?.b'I_#_}]3k~-nvn6z9cx_LNr4oqva*~3䠎-IwvR~p/݂N߰@wD.'4(f#20[-U/ʁmzNxVU*z󛟗oi΀;z,14帣'nA4%`J8< tϜ#6|N_#HÛ}.\WѢ&eݠF, qP;&JHUnrrbwO@|mSkZ65wI"N#y osڝℴ5XJV㮟r8(_x Nx5G^%~}#Jwv;Q`Ljwy? >RPʇĈ,THgIP__$Zx3Dfp;hL<=kKZȈ>GbƥxR7}UɔMy2 p.!!fqq_o(>_O~oOlg3y"v795{" ,glxWfv,↻ ׾ă;3G(&m}`D^jښOPW2Nh5r)ji:x E4oRgݍss8tFܐ^u]m'wܶȽy|ז(KA]u/p)fr')/0VmLeTjH%(}z%+R_ |Ӝ䀤a7}k^z|e'j\Ŕ}ߝ_Wލ<7GiM1H ڧm v^NUp=rsM=c&t}-zZc+QGo*ymS ?#"v6!?oG -XLYop{/9_l&`tIq#N Oܳ-ڠ!!!P͢i*oиRKWq4&-$ܷ5< uOMa=EZ<+|͇,|=`,űs8 >{?:f.Gfē,|/.>M>n4ߌޓ mcos{gn Gu<)^/#b !&{~aJ5_p=zlCeiwG@N'}N[Ӎ j/?ܰ$ú$Y3''*6IvwH~Y{ ٽo 1"1IQ]åhnp 륭 -ĥKGQTk_203K|E:Rͻ2>T]?OByŪMh}v)A k+؈bγD'_+9ڐ3ĚL8!bҼ\>%×3p4qIQ^á iVOZi# bwWƍjяҡCO#}JyZ⑎2vm:ijy{߃ ݆T2^Қ{{a2r_SaΘ}BCx+zjC//%Ǔy]FvAI?u4og͛Ц+zT¯r/ր^8<~'uϨ_hFFhJ~; ^]ei[,k$}+qCܧ_w{O፶1`Of:z\]&"0b$FͫMjB-ǯ˪(']FXXSzip?sfpg"FrM֯%ikvW,* `d΂>g2ϛBw>cZtmnJEw1*Ӛq|Yt5gJx齏RE:SxO~x*H *]xm1vdVFS{!9-ehx b{F [O t FΘMrzIbBnHJ2m{.B1RiFENw4 2H!^[<cg\~ E<"!GNRx[}J=fM3=AF^ΣqPJRXZcq/݋,1 IDATϛ^v|L ͧMZٔҼּV_YgdϤϷ-0Z a=SZ7#hp߾ێoӧ0asx,/3FQAin>;}]-S&\yr``SB7ҼV}ssHDZch~RmgL }Z[sZ)a?K垼Z͛"-ݸ֌o6RJe)iRJ) ݜVJ)RйiRJ)RJ)RJ6ݜVJ)RJ)RJ)UtsZ)RJ)RJ)TiRJ)RJ)Rt1BDRJ)RJ)R$"9RJ)ni;٩AP?X RJ)Uc=RJ)RJ)RJ:ݜVJ)RJ)RJ)UtsZ)RJ)RJ)TiRJ)RJ)RN7RJ)RJ)RJ:ݜVJ)RJ)RJ)UoN'g`I(5(=_z7mTlKz>Z90Sf8[n8Xm;~z0;ZpIZ \>ByAZ7 #I;O!W*̑ث yqB%3~Gzg 2L!g>m݄2[>ۣq4'JL摲w%}5$"Erv3H `HϯpIrtMaoRN\1mޚX9)b O6H?+>\lB©uK7s$=g8#†6o0>XسE؈qa#AcOG J5}>y,6>Jt:Q!Ϙ7G#5 # #l:},ge9=ijt4G t^[9g /|0C,<pXkl0ëù[9kl K s_l{~&n:t^b3C?z5589h^s9=k(&bTXc!fۗܽ šѥGHFg|b2s&K^XLN޳8tߩ'\n#D&mLv=1R=~ѳߗtoӈaviC=RwIck8#7{XWVMB*JR-(U"*VV{YGRܛ/xޯW9~<|<{:΄a*g,]Ci'~w&gr>6i\ܸS˯-;uߟY+qpgGlJ4N;Q? Os:>hDiG\*Ң(:O&,!ݕL]oG~5(_lE_B \LFh;rb))W?m-h⸟9FA]$OfH>7?'S+BDZMz"7/gJ;͝4I<: g fQٿ= h߮EӴu$LvpPN]+Ydׯ|*X:r;Aȟ/[0,aCl*?~Gl*3gcֳDY8Iufq _tgqaHmJQyxPU}Wޮo),aDgEw\Ӿu].1扃$UJ1UR̶fKۥ`K7_5,K1/׎pW)XqUG4j[mmw@_Gc ϻ*[jlq`oBcgZ&}:нEm3qZn^ށjKf:94 6o"x{PIs$]Db] O8}xj4Mt1wb߹_FNVobİԟm 1nϮ#`$b<6[}CpYƨmf7oӏUg6iJ{wh>q ;B ")b6!/SB0!csq#orc'Uܶ^.1~'wyJ0h|u`YÕJVE *{pfK#xfLڏ#9 9!si1}!Ki)g39.c G2vr7Jo3{,cֶ'OvHƽj|y}p<}}!`SgW~0KM=L:U] 2oH|LDCt垼[65eЫq{P8R_=lB9CؤυOF2VB7qkH޷UmwL㚕 \k~ez`w*_ bA`$ifxA?XCdfz+2t% CѲk2+{MSMYzin681~#0t'fv,_IqTZ;Aȯt>.%yOZ0>.ͻ|Ŏу]"7 oF"w/Ȧ벬vFh88P`Q%H>o+ޮ1cE+Bԩk$K9׵MzmeN$JӛV!1C=K׊*nZqzg=)zwd*b˳cs\R9uۯ^<֢6x>b|ycuztCA^kR/]RGsHm[su/\֦{ߺd ePG HgX׼OʹmG24hr'1q0{~j \_6MGXmtma/{>9LU0 OÁQ֍MvYe)hI[ֶ W%gĩJSyqKq2SUfwΧNnEѯoRsˈ.ـ- ;4IrNZli$c| eW;3nN#3/<>HUwyb^6Yu# ր^sOgJГd!4"6\; M )) ["`Dk| gfKۥ%?)ô4.U{s`0fՊc֊(Gy$[ڰK'd߸a̪ܕTl] Pag}ܑdZ3mPO=AY??j[n#'D㐋G¡oh|R;n KR'%# UN3$(jl=@9gY((- f)`s~n/YWAs,9`}NKγ8n*]2}t+AA;`b;9&Kivq-nM^&( kq{|yIh](`l2hٕǧ|I?3eDo此##I1PDZt85qVݿ+y'XXirk}dsc.Zk: קkym|^aվFA=_ᷘTҖ== CEfž-97C2{9R Ynos϶qTO{֔dء֨6wLfYnv m*R|Dx>P?v6 }Jh0[T=)Q})n6j1Um8hɴ|d~2zРvuJinziL#N킋*c?~m(&Z-OcP}Ye%*4E|RT}q/ц_ߺ;q (U12uAz54K;ho] O8ORGB/R \%ݫDUh#gGA'U$CS3~xKn#*KVDlŸ`$MsAM= v$ތAO @ʅ,I< hc-uN4wTIc=`2b.6 ;6Vv^B.1O8EPȓm&'T8yOaCYߎf9_(>]' 8ޏU6TOh| qhf @[}"?wХ7)Q) ~W.#hɻV¶oK JUP Nh ^Rn&`+rS W|;2v)_Nĥ8&dk׈.}҅P%F:ݾJsq<YtX(*TJ=\![ɘ??mRyHı@]]"ϴ;`{yb&8AshdS*GJ( ^kw HΖbq^x^e#ۉzْ]*ܼ:'B %jE:bpZ'ּ[ {עWz y_xuP(1ݫkH7ei'TReFDcd^Lڛ,\ΡrYս^*kǭGt-HLSPa{JϻC eo@>}ZVw7f9Md ?=N-i]BFbħCq<2&P7]єFbrbEHi{%C4IT&K6hz=5e`k.+Gs)ID<#!~XȃhP:ǒz9A"ƪj&C}7?aȼMVaeOb Q[t*C,yDŽ,{xc~Kr'?d0lѦzQ<==okQeߵTxoD0Ps)&7sپոޖg3m :2c+4y }G/}0g1ܾiPoڭ#r_iǨY7p8-勭I]t6Z[TVivy|"ّ f r (P>h^b/*2nm&]ky<Ԙ욲3x4l`6'16S:$IFڕRI{ٲ3mb,:q-fĹ(Wm@;[xN_OhT-@iOdw(PۯX< vpޫ \T(SeլELRvl|!oX?F潮Ӊ=B._.>>b2FNS~p&uN6SK3ۙ G+1xPf *u1ۢ#Wi=` "oi_+Gs3[7yo8PKƵ-B\`7cYvQOa/\@FMxg:}H+)^~ws4i9ߩ%0eDuNM|fX̛hJg-SY×C'*r\>l֎qq?y<bkV@lϒU0G1򣘽 *[X^氥v,f|܋u'¦AK>vhYώIm'gO7O/toGU6t?,SE -ZIM4t,TKFa۶t'yF=Xh(e4JpCkl^}<96t$ZH䅽"5fNb,;Z'7K2Bx+DfA`|gץ=ɸ騽o Y @Еj҈Bjjw7 ^/GlpQX\;7NP-b-c|}Fto@h:a~jU"Spb"8nCS1ٳhzΖ{ٍ y0u]8o3-mtk!aFī|_*p(՚)M8fTW)_|6hcBHid|ڶpSft(ũ?ܭu)RjGرelZLѕ_\*uJwi3-olN0Yh#gD0LpO谮;~jdD_<=/#&RC7UyD $Nٯh?n[biG ӂ < rT /5vľ^s}L¼Wwc??v Zt-$+h[rZ^:d x3\76̗elg$&391z"-ѵrӂ   xiAAx~Oh     )N     )N     )N     ŕ+W䃈     3Ea28-  A^0j숕  B>G^!$B IDAT     }'4&r6u/Oi26X1ztІA۸){<jG5e۞!_f.FGt*Ei(hp?M33-߆f7Rf\ia;v>;iŞ[ðH!XAmYH۱>?Ԧ['g8j  dۜt.S<% p=Ѹ%Y)ֆ/RP%h/obgJ9^UbݩDsߟU ~L=ǬnH#_;ǧ-0Y\{gNA-,˄A*JHeM<vgB|0x<^/"[L.gt4p~$7b22 ),}'@ρs $ .Ew2F?7=4`s oCIdفuG뢚>{-|gCA SLt7z^l|_*V],E`οJƮ>LOQ,=+i^t;zdLa㈇B[^lx-^qHC7Ӧ#O|ciq;v>Ѐ!A!\L+@{z\uBOxAt}&^Z7ǝw4M8s cVg;5(վ7_e [JZSz]mb-إs',N)|pEѸ8szc8p1bgCء 5ҺL ϸ'?I=WݧԔލ2yFq!y{hKwY:+"N7>1;-j{]5}g<]%ݿ4 G] >ySh뿔7zcG3șCyi-j5Ĉ߯ٻZK.D2 ԤGC{\ȍTOvgot03i.ΡӇ 4Z;Z 9lil{d4.n܍W|G֝"K ?Wb .Ύ*h2w@L*0DtJ%}ЈxUEQt.LXB:++ކkPX1ʽيa3-:=}#tK'YBWݙc< Aj{Uòso/~X w \V̓b𠨳-J@mJe\wZ1扃$UJ1ii;Kvfyr`68VF^Kx%H 606*Fm vaV)}ރzMS%'ڔʥw_-ۥfU(~ O8}xj4Mt1wb߹_FNVobİԟm 1nϮ#`$b<6[}CpYƨmf7oӏUg6iJ{<0c|-O\Î@Fvxؼ fG-eԟLH\XG:|;-:F]cmҹ:69D9 9)l]>Gp%Uo+ޮwaU7Rd6)՞WZkkFR8Qi{nߌN_ƅT~0yo|S7@՛!˜*܌n-J^E)ە%ܐSW:1)QƽQVZh߄&m,Qq\Ϭt)nrU8bg ڸB9Uu\۸FanXW'EΑL%>YLtyvlrK*wnkGZtBl/YQ8VGx8&%8~>ω4.߆:W¥hmKF?ki]&|{ԮP}O؎|[~)ASڏ.X{"KpeKZ_e#ܴ o4FG:QAWQItQZC9H0>a%MndqjƧl%mYf^o0jT'N΁rcǹ$8UzJTmRBXR7o(`7S.ru٨o2D8UiJ4/n=/7 q{|jx\*~r"b{ɭ5u#\5v%q>?=yJ / *71#F mU6kLiz YӔd-7L}܅Χt!]k7̼;0C#1NЎHV3{Ap' 촼E;ӣ=[El-P\o5/ӧK7Y☵b/AiLag&-_OPϏukj; A.^j v }Iwh KR' Le넭2Th´,ϱ-DYk-xL)B.}УNfUƅ'+Oeos)p(ӌ6e8;i*kq kL΋&FiE\ $DiqU޻5KSJ__JJXnsH'>,γ8n*]2}t+AA;`b;9'˻R8n7גt) L:lа K0ŋvY &]y|;c>SF&nJ:{97#MEPgb.Lw_/G676p}6VVIm M)^rѾ>CZˎrMs׿& ZW($oŠs@j\?΍{?n o1֥-z{6>(9L}{m7[F-7psnɇ~eLrX/L2~mB)-j걭n6r:.ag&VƯ5*t{:3SHCßQUh2~y(_ Ožt+ц_߶;q (U@H Z/>iqĩ]pQeC7ͣ7IÖ7;bUvZάP_XC*;&RjGL DM`+bQ}HMۢTrAiN (y(gSPYAz54K;=CV]< kCI5Ll3,4XeWQ O8ORGB/R \%ݫDUh#gGA'U$CS3~xKn#*KvH&b(Ө&aWo jT8#f zJR.fٿheO(T@ns O51$mB}Cb[a;b/G [@}'̪B*s 79ˎEݽE|W֕%Kql;9ŧKHpӫlF%Ewe! [:x6̂A>E~KAoRR@]GВw)m!lߗx.ub\O鮱n)}AQcZa}Yu=rbhsr\jХX4m4+65!$GnBȮX*t/SN<}Yܭ ێ}slͱD#NkF. #:9;B8U*)2}jV^"&)];s7 \2/۵7tÑ&e^+p3$dvZμ2M#~PmAے|=7}e̋3Si#=ڼ>Ք\m]'!#tm;@Muv!1ȫ sujd]ٞƅR~p&uN6SK3ۙ G+1xPf *u1ۢ#Wi=` "oi_+Gs3[7yo8PKƵ-B\`7cYvQOa/\@FMxg:}H+)^~ws4i9ߩ%0eDuNM|fX̛hJg-SY×C'*r\>l֎qq?y< >[2bf~|҇8->ZmP)ºGb6-c6^86 ZR\+Gzv>LZ4o;q&u?T#{~1$}5h{ ?Ape2.boNo,u`٠2_2۶W?q[6BC.VomqN[sc?},-jmS&TX˨cX3_Aa_NX@Zx>嫃Xv[ҩ c??Jڑ>vlY5bnteȧE_Uq^wOuT4%;h])WB .'XژPb<+RZ#Y'8ORԙi6i>Kuu5<¯GOOiSes)W[®J=q1PLg8D\'tXם ?5}/ ٞ*ͼ Xԟ̭Wyܴ#qiAG9*A;b_Bӹg&va^]wT̶;1ɟk7At-_e ĴyvśާLwcwYhB{~9~͏Ywmn2I/Yn.6I{6M$_fmi\o&[at27L.iMVoR0^Ioo.$ͻq']lZǴ3`=7NNDS!tħۑ5 IDATbzt3SW.`O_xƐ8qܻ/_t}2<Ȑ@{?} f]\O6I؅@&c5onݬ? MmKH3ֈqt^t+2~GYԙ['g8j  hҊ) n?2C͍Vn q؛&bϭahyskkug^,ĺ~[G4n ̜~bmc $ML)ǫC;(?W?ٵkH[O7|ߜ~1S۔tқuvm oCL32NL۬+T6_֯׉=|G>21'v I3AsSY~j N5S(W( 7iGѵ5F%2n[ң^kGt:ncޤK57A̝/R=(v]Tg`o:4~)hٔzKw׋C"n+_"t{rdY3鱇Ef+0s$+-g)cI~\JڢxL;.Y(yr;H <'Vwb;4w]ZɟDtX>'~jӼ4Rӻ]&!h".7o bN4U_Y{%5SfGw?wEmo뵦^{Ⱦ84:x{עY1_mЇs @'ѣU=붤f!u=k69x7=bvjmr#tkV=Dά{MkQY'F~>:_Ow':wϖФ&|?bܻ@n|7r%k}PǷ;~ {INwq>\Ȧ9ݙZaK0gۓ'pqqnL-˷8 &^8Xg\\\pqvVDeSр!4S*Fpū"-sd]Y6|4>o]ŊQV=n%AmD[:)mYN[8e?6lq̐9M>mռXc~ZjbX]k}M A̞Y=]$OfH>7?'S+BDfgߤ'rrOCI3_C zAo&]s8ЀJ^(UbrYLj;k̛W:W&縘H>>!?G;%Svli],enf8XjǬmO/z{:U[xt-'M9BϮ`.˛z':t@d^ߴz8=ymjСW5=pZ44{لh;s:ZI 9dϲ&hެ^Yh^k4߾e,宵צeߞ}g싈 $j]}'!(⧔RE[ZVQK[6ڗZjU}%3?$dn&4xޯW03}ι9s 7f]Lij`ޢ42a\xWJb xqp7eԠr~۽5 \N-Dnh9?g}x1}%x9=C\Q9훛~Ϯ-cvlq9Y1g|{{`]V[^(8SM .ւkV K?gx [?jۿXWϒփ׎r>bsedFrtF5M̒U:-}0g'!ҔFjL*!bg,|X:f7MZFv%% -Aт1MLCQOFB ^:i0$9 fN{y9㈉anb> 6ϧ XAn:Va~:A`9"{RC8\Z?D4qQ]?e:eQY{)u>ĢٍSQHL\x,ҽ͌gܘI5aݙFbZ!}[&TxP\3(_{|`ftI eĀָںE*{0)1u fpQɸUF^st*ʩ=))cz~mbb&"76RTwng]2O@ XN}"ɸ7-.~xjټ^GO9BFr믹j]iάíˉkVL#c7R9`&12 E32[ j?̉]-}\0IFE\7^-0r].>M^Own۾lkn9{OC呟1f} (d9MNzkݔvl͇9b c|]!]19dUmtÀS5{mP93pv]V _1h*Ads`J055# I_Lqz} DT3nkŰ!U)RûrfNGѶz7 ]͊QؐV=S*nN*x\0o?1mNEק`KA1t/,dR͜gaNS}˻4iHٻ9ۘ>Eդ%]f5}ɋe]ɣ-jqXLHŁ:=G]q*dk0KYTZ%M!?0NI|^w8!.i]._%L \u^| Woo:Mj!$gY9֦Iw1șf-bВH# ע1H{VU>N$JKu"qр!m`[1Foپ]txp$mL.oT*Tyrr'48 _4@ĥd޿S:}B)~G.,' )u6OrV3/ym:5]n(ԽA!y^EɪF*+Tũ}(}7lc oϖ@TpUxOM|u3ZCYw:˽|hG:vrnKHu{pkC~Jy,Q!'](9ssYůMRl ׳SVO~]n f/ӟ1@e[6GEd8ӓ|-3>w=hc-` DRj5V5ØnjUZKY/thT4jC*mnkéL[rR*n_{+1wh_jhs)5"Uqӂ!Qҫ8>Tc7/7wS \ =|r΁_I:gOJ1q_Bw`f7̋_Bx5! w^}bOA.1eQ E]Bdl;)q,5Tśu<>FLL ir#3k0DF_esڎʷo7nD`4 oWj;صQfҌpI)0߬KA|y6_JxTAQ;A}gcHsNᾱ]x[K6|ŠĄaP^S uwR@[m#?&W*ǚmUd 'C9kNھx{{o4w:T|A*dW8UݩO;8_BTEB+sy{6N"*2ҴS>jb|\c G fQl0U|ݘ#yܧIVJwd;e=y(Is5c:[ywRC*-/☒ fθ\4b6[QYA+Sgz4H=²wHN`VMy`IOPSV-^ҳQ&bKSw1gjmě cFt47by,'xrEs4 Ϧ0n놶C1!wwjN"V偷N.^ŸkK-e׏Js\8%9.gqZG*9]6!#(7ӏUa/⟣*pҋJO=yHeFH%Дl|-͌Ђ6rh&C|oSXuD7^B-:'\{L͋2iS$21O//g_53-ۍbpǹL3'/:hK2cJi}y] KnrՇ97.GovF>q0{eh[ag\DPC! ~e9rҡQ{mfV*G96Kn^[蚶|ƾXv9zԪOh;tvlnU_i+7 I|]ѿm0C{(꺀xGf.3g2wx 'XAjOUqv$^҅1 9^UzEj =Bޡ? }Zؚtvǟ1*c 9)c}ft0[Or)>̛[;;dt8SxP<`,o ڂW ~}\lGnGz0N6^חl̠U>gcg1^5>ZF\nrvJO1.9w^t4\g8gDW7gLcLۙ^͠Y-тګƴS&MgŚ ٜZUh+8V5\7}ɹxNKHO.z;ˏڸ%5oc8_ATk>/!9O}R)߯eU 3Z7 6Oc&-#vdz:;oWlɏ:{,̹T]<(q3z"kT K?T#ڻ2ex Z]GGq)98%9.jVbFias'PyYI ;giP -z̭ҕ}(z{/QZI9BH~r5?ξ{c-x:+{>gCASbk1]Yג"aً{CǿH,ީ ~?sF|]iG"{9Nivaӏ^aHu=_ܩwvBNjOnoaZ2G/E+!y@w9V(Xb9廼QowJچCj!-}Z濧=?BrOi~ez3cTw(ƶ`z#oyOD^KQB!B>8-ē]B!ji!B!B!MB!B!B8YB!B!BQdqZ!B!B!DS]xQ~Q!B!B!DRYVYB!OcF Swg4BQc=B!B!B8YB!B!BQdqZ!B!B!Di!B!B!NB!B!B8YB!B!BQ_N+ 35nfz ^>KzOpr>_nQhaƳ@@˷9uϷv}m2ٿeZI'^Bz4hMasz%J'qYҁf4m"fwn@ߕ1fLooҦZnIn?q+$1fʭHOE}=]нB,I)tTG,9&�k,( /J%2n`wx#`(o^Ɓ&ivFۿ-,"4b{nBXnLOqC6+W2ԟ=9Cyfa LQ%[˔ LDk ewf9K>;G33Y' XΘ>g3f+1Qh θ͛Rf˜6cC~T'ނbi i'&v%WYa֟Jd{nvo.ޞo/KGO9_g,_mZ03F}ߩwCO/R4mW x9d:ةgYI7s! sz<uW3ksQX?zX0,ӅFLqH,Iۅ`w>:2R0|-S?N=@T(L}NːQΫ.oWٳ5VxmVlNLb_ˆq0n? od {U,wIDAT&`e>8VtEKC9[߾&U`Gڛ%$ /2}J"S~KJ_Om^cks..ym;fk?]^d~?cm"ђz0xDܽ^b/} n܀6gݫ^,Vb󸵬Ћ]]LIf|WyZ6 q~vːD^}P! lc0-n hԎT+X \>W:4&^KzI˳6I=5.}Vr5}޿S.۽o2h$xU}.>Wg`#Ѳ^ӃdgM9GN@NnMb lHàLf{T/45: [}lTm܃FwtS[yJApwwkѫ%Q9\~/5 >i;l+&q#> ůA+FT,U u3| vGcp[,HEӊ+NYM9??cD~` ^>EvLRzn ;w2Wrz-~熑s{(!MX&}-6!Y:m|yXȚ1>Cxs>;K>۹~VW3p4zVۯc!FЀי0x-Do sSw{"i56_~!%d?w)ގ)pv/ЫoeБLxn*6p ]5;n0o ~Ik3m&zwh>Й|z=?)Kj<΅J)0;5KySˋR^^9ƊXC{K:r[Ӭ<}h4zVyډIVRL+Qrob9\kCU3qP~rBIW8Z9fk,N7uZ`rv7ǔ?p߅6r~ش>L&odے>X6}[­݋9Vcn];gd .r{k3l/gÍҙk!ogڢGvn_;5NhɩE1<>@l_3>a⚫$_ صm 3q.XMl_+}}XIco}EPK?r1%Ò]uydZ֩,z!йtåyxhqNODؾbVr&}UA齻 |؏gJ،-Sa~-]kQC .q2m]vYg`!'^'5n6T 8` r^ =$v4*jH^3<٬٘L9?_cTA2\zE"Mfܣ)1k|Ld7˘0ѻO_/{SY)V-TJ8/l\uN%W1 ,?~0dES}|?UϭeɚHRmn}S|5~/V7ċ1Hz.9@kDd)ML3qmf[9}˜M.\'Sf19x""f%0鍁te; wFv]ž'YuSJ ѫ >7 7f3sB$8_ goĥ*Ӥ+I.{vЃaᥲq$.3az7\IJsOu˻4ƛ^ƜƬ9b[4O]&VnOe^|pċQ+pV(ڪ3vWXOϲAk9~[Dk}"iKwūj3zKIhKb ( -yV(ob!۱f<ȧKwM|f Wwؔ[1Z 6ry&Z 祺%*ӄOoe ~&} hzMyoo݃7(o}w&.\j3wmQe|wHoW`:ӣ/L[;)hu/ ?s&#W`\HV-$ H I8YA/NIħWQрo=q(ޒ1Jr;'c}sݻߒ1jiߋ[9`%-7EդG]W+^pq[L{ٝ\NUs5_ڝVN  Z2R&kZ+sTDl˧_r$IWϒ~5nyg.?xj`'5M^Own۾lkn9{OC呟1x,g_TJTAvK#"c{+\a/Rsz5n$!M:Xμ_ȋvF6!-2>>ǑhLR\'.'2J;rf9\OH_I\x J◼6~RIn.7ޠ<u{"`LdUQAtYSB *>7lgB C*e*fl&W ?u}'_~>Sn,禺T7~ 6g$̓foÍpSiHT*BVR/8 7¶۱Q5Gid,cof/qxa;'&-)okԍ7cl_z3{ (>bzcvUY^z>xUqHK&B~jRf}?F52!}T[h#X g=/w [I8yҪRj@աhh4 ehq).cªhT4j9a ;/5]49Fw檸i}(G_J3Y̕T]YW4ZCVUqwMŏ?`1085xx5hU*ʧ v (]n^9{bVE!f2[/μ6q_Bw`f7̋G2Pro$ETRUoc|O11a_4W#&_;5^?&2*^vt(P}3q#2TVP{5T Į2f4NjLQfi)J13z<-;{Ib)\Gt,14){RSmG\m~!qV/;A*Ʈvqn-I +~s7?Ðy^Liv>`+ݑJ]Qo]S8{/z؎_k5Wa' 吮9j퍷7:뎦R nT_a,W9`v ?|tm S%*^ կ8v0_JӲ7N0eCG0oq.sm,5EW/rhKwcN_|pHnqV/6&Mg)Ɵ1޽۲ .ŵZtJ1OxDz\%9f%唰%a Ff+*+4rvE~g09ώe0~:wc1D_b Hx]W eb"8h.$b}͸2՚P5q?[vEbHoKOiQ_h.:ƹ( |W:|pW5Z Mxs s_l FT/.4 θ՘.ޫ$l8գACrJwjmK'Nxu_|{p[޹h@屜q'd.ۀ*\\nATm4ȜgjGO<$]̶?UjN"V偷N.V1h2Mdza8 mK92;b0CJi i^;(,y-iMD0v9tb䆜ӏL?VcµJ/*>#={ @S673C ʡ*y/ mNa9%ߤz ȟp25/~ȤMXbp>T}Fb׸δl7~Ù2!̜XS@A,ʌ)AxMev),DAˡW*;pܫr8GC1m|ĥh_?m} bsABՆ0k'Gt$SIFN =}X,m{m}kڎc5vDR>ٕ?V܌/L&wjXF NGi`ϜYʸ:%&|Hc`==i4kV*xKB,xWYmɪ- y~vibkn/ݒtkm:fNš^gY~f\ǑrS} F2Wҋ]q(ۂnl0m+FlGnGz0N6^חl̠U>gcg1^5>ZF\ndvJO1.9w^t4\g8g¥iFvnfhP{b֘Vʄ鬽X3S\u.;SN6nI[t<{WP9UbbOKu8"XZVx' \~QU֧!~p~3[kǴ+ ٳ*PVcM0ɭgڧ!>9ǡc9gΣMSFwe꥗1U]<(q3z"kT4cAeZ N|+_Caid%%u@1@1KWu[Dim^l#'H;!}ZdbUǤx:+{>gCAribfu,BH^K^JNxi-!.-*rY^SA~Nٍ~$^aӊ#85VEr6 K J;֧=!c㝺O%'xBFs p]'77{̂dd/^) XBH^ aZɝӅ%__%AV1y|w*9˫mH;vҧV4?vGT!rKt9 `/߰LoFs9"w(ƶ`z#ot†^B!i!-B!cB{=Q |,Z!B!BY\B!B!BxDB!B!E!B!B!GdqQ!B!BY\B!B!BxD}q1m3/ȺBnd:h?q8пpvf7>F c?[3cQG=?7cg~_wh+ܤm$">xFU%]_ƪ8oexg{FE'yq":~GzFEg9,T5ZY]|-it,q"kU^ g·,4Zor ƵN3%Yh>f&7yD]]cNRodw7=z;痝 5|hzN+, o5j,=1EmcSϚq&6<[}!A?$.{;Rزr 'j(~EV8I<#{Ϝˀ̱?D #’Xk֜s$`Z3e>aUk6.XSޣhUJ.\;o/ё7Vr!2^κ <ɒp׳~t Vdl,'q;v3霪=٫7י *~̬I; 囥KA\zcsBց\>16HBQ8S2غ%/Rbf~],=坕c(n!7,Y;f)Ӝy{Oxt v}ɫkPTi:ɤ96om*^_sO?eo<݄5XLZ%no,>GCY??xeIR92F\g̯qBW~UʌwS>?͆QCx98BpF9N8QkӢPVxe/0ub=3a{v؍C(ʕsa\&P]4,lO(j 㽊WuRx>U6;RvY`%a痼ҵ9u~4i9=zOgiר>Mf_ٽu焍;$co.iYmz3]GQD5jϠOwh>wGnYvqlhzi@TT=Z|Èz1O6r% +FD5|~/\f.3U9]_̃kom:v>]pr7ײ;#~9){ d嗯еy]ykKNgC hЦͤ_ITf9ݟ򏞣8jVjuj[~c:29>^r˿>*߷AO@@5 ~`Pƃ0B| LGѣb:1)v,g~`R=|1/C%PQ>+p)5yM,3'&NRviՄMyjnR=~ Bܸc Qyo4i'?0bni%n\&>:J8H+w}AQ/1|ug]>~ -iBe H5;tSТũ>7l\?A5(╗ Q,~Oᣙ߱,ım5vQGrO,ʾϚ:5SKnKuK\b9wbt^DP"$ {fRTʰq>EK1ËwV%>p#mL1Yii }׻#Š3o@F י8yA粢5eˀ^ˈN5hKKf" kf.FՇ":dC+ڂA5B歨v2T\V]\Xfë-kke71_׉_9Pm ,^xkzX%cQj>38mǵ.we2Sװ>8N旘UPl1K;>Xk3 YrƔ45ߦɩ ?1=TVŀ%뎂ɉery$}GEk:u~ڷwb:s_M?u޶Q&96 .>䑴 s}#U _h(OI'WKGtldneűɚ6-yLqbG~[?/X#jfNLq3e~?\WtNbV 5EV7 5l1@pEMKd9$n7qoO ݬ5zsmH|5N¦k޴(èc7c\U}.upo$^\b0 )CX.[.W0Wی(a9P4FƜ%;RvJHn slDm$_B?ͤ5w |H#W4yo/n ǪfݽJ%vIre}Fib棙yx$^KkSL8l ѯ^  cN2e_j< ~]NjS2Tryo{g"0"4{awv 7ıEuz~KQt$+5ߣ&!>Eyyc,pxGǀc"SKWc}d=jQ~/Pۖͧ&/?W*BWP-G}l~2-Ovt,oPmx*@\^=>yx¾GУEs{f_m;qsA;,d ϐ7joGlOqs%s;/pM^Yp{FnM`)RK/x{hȋkf͎ EQDDv3'IB*PQ<cy ;14)u? ^ZrٖcPiz)UrrʏE.@4ق~RA-:`9 "^:mf,^i Z9}}IGzb>G;d_SB+{2j|om:6夻6[WUw~(M͞fpJV6Pg'Ep'J~&=ƌwcZ4,oq4wV?k1p?ekǖXv :ɞU(g+8t%sҙ7@P|ٶ(,غ^]҆Lyxbޘ_kvn˴ޭ2|J?䶋xw>sФ~M.lёD.mV=q{hLNfcW 2jZ4tZ F@vmY:y=[j1h4jڄ2;p49/AN0'\}]иEk*M^ϖF*ݑCo>㏲f6'5l?1g|$d9?8T%e7z$ '6BDYYhqM򢨟A|-ib04|J|3lr*b퍘dHY?ο9L-~EH而?4 &5T8R.qG+_-X/X/b+q+/zyxV0s$hEQpP-11/̋D2_"˶vQ}A^uYN5_YYݗt.".-gf[ٓ܀"yTmإjǖ\^a~`TJi0]ȩR0%v=5 Gs4 r,Qn˾Bߺ֮h*Z+73 o5N>m-c._'sw>ČA}%t5v*pxJrN%׸Wj&o `:;Z^BIX^0e4;uOOfvi 5hѠS\2xȧ?q.+ucY8H<m\"3gegnA´ {s9򝙤f*7B=ņh.K>aْ4wd/|7ъ|V٫Ԏ7VFj]Όzʛ%2 xLcnQmnڱEfnC|56auhhLHV\2)WXv էfN[:IJ zyB沆25jP*>ORZ,Y:4 Т8”HHek9E?$_"gC^59bS>RFne.gP^e|!3vd 85/m`ܞ6IJ2NoDn2C'?M%#h?|% ^iߌVeCyS[ڷ ;OZ)5iyeug&0jy6|*L<ˇCJhȫ)5KS'GG6:qƏiC`rb(/=bmxdнyqū>|>oum9Eܣ[r^n(UXy& !h̷mG o=ZUU1Vzύ\_ _ah};Ow X0g֔>!}s$> Kue@0]ŸXӎ鏖fU/g}%3(Gu CfmK9ZIf4 pׁ 8l+6=OW1O5Ƽi~xWJkl{&g)%s, sB4 )g(ysXS_> )U|bO3hg|_o)lXRsS&f"ũRFe.+N>ݭ$ist]ҏe=iP_2(: }px%WzSRWuɃ2Iu~OFm/g v]9ziN!d.\K`9ɟ"9r% Ao=IWȞ11|;γ]x7ONr[^ZVGaɠA.lc #BRV}&^SJ [kNd YX?4ſܹX(8=a7@"[-B!B!,. !B!B!<"B!B!BB!B!#(B!B!,. !B!B!<"B!B!B|`Ӭ =뉵*&}4iWoߐo>N53yL$COZ~h +3't6sTV=_寍ly{Xg&G/eܺLVw@Fζ?ɩ lTҘaR[G*;peO1m Λ4DTΝYUkIsμO2܋,TcVH$ IDATTSRB!B!(Z\,@=:X;(kǒ!\ k%ĭ;v/w<2]ƱjydH2ݶ8UCV3l>W{'cgk)T7%4 xm)|Ԣ[H[{mmU]/X\(ygq"/{wzՍwsn?$v;ys}tz-\/B!BQݵXI6^:xкO2G5eѪ])轌n,ظrl-dW|&~xkWzɺ̑qS${w]Q#2)l^Y! *=AI|/Xa8ǫ+ѶRCXԫƈrKyq9W#JTlİnmh$ oRA&`zҜs>lW춶H9, $ͽʹrLCo밫mUhsǪ=I@oB6&ey-N},.ǀ2&\4v*&}g}j̛/[3Mw%z쥄B!BQh?Й̚kͻ+%)Xg:G`ܗ|q&`ʰ6Z S<ӿ[Nql j~4tzj6&DU%$0hB޻ @HN˵;8 ws>/x ۵D'zdvRM6"Ɯ*wm/]1,y=wl&8Rj@A\ZBvaDeJfa sWj\TWə>=sc0n2.թX!Ma=Ltz=%H ,oPyq5+E*n!n)Λ=NTYٱUy>, B-yҍ6?VEOmmh{G֬f W$[Tv-NUؽ xBOJe3.UVѩ\Gt繙s1g1`%.cjD/ǎc<ɍGc}#jL.WPB!B!(\,.frp~*V͐')?ԟ5+)&XhV{Upp*i>\LI$KGe5K؄?` d"v_H@?xU"wgx`9$6o]̿xsr!O筇QJf;8ݐYuH8'&z`ڴmE[wQ&e>r´#(isנhPle<,HVhlVk1ád[A\:1Eobĕli} ۷{|eܹdSǝݡR+SZW&1^PL >t/"պch>͊16nTsmǀF~JL<hB!B!\sZA!B!BWrB!B!#(B!B!,. !B!B!<"B!B!BB!B!#(B!B!,. !B!B!<"B!B!BB!B!#(B!B!,. !B!B!<"B!B!BB!B!#(B!B!,. !B!B!<"B!B!BB!B!#{qzI'1 5st^u%ؓv1xl79e" -F'j68i3e_O&d !B!0O'_O SVq]6ά Y3d_ LQc_~<-E U2ZQZ+;*!B!BNw-.v4vN $&/hyP$aNS4?6~P(zIG|KץA_m+%d !B!pkBakQVUEE&MKjڤd'.Os.ݎ!Otg-9ٳa16ICpD-£ZIGXp&wqw:ax`tuhG(-ͻ ド{R߭:I9?A *3>e$N˭5ǐTYt) bH;0[\5bL5ǏEgR l:ԡ3zHZrǤ>WdV<#%NMtGGs:BO1y U[,~?9`VNu^,~[lxwy+z^L!B!nr$aFc'.. 8|ݩfy! ?'WvcV>?uG$-9gC'2t<³!==q37aW1p֏2υyX N.s[ 5L^ӕyuU08^ )E<`>KǀL-_ Yr9|ѠSXQnGs#\+#X0>WUl <j% ba672 !B!aj@Ƈڍ$xz4JXlv-ӱEGX4 be exmU"|GTT֟JxѠym*5Ob?],N2b6RGzi4/ i #'2T !4SR&O*FFe (@S:]b̬W*Kt+Z!ZGx\秏4ԍ*GƐT%q6-|j~ҖoSw![hD-{0!B!Bց9ӆb¨ȎJ!B!cv}3I_s3y~F7żlO#O!5y{ؓԾsz\˰zpp@D^Lk0~Y $R(^;gAz8O F&bM|hMЃ V.VS4WGv&zG ~T8ATۦ}F BJ ߪ(B!BJSP9l&2uAT Ak'寏*4_KW<֌i'!&1ţ'xilNtΞ쏹:l߸3vcl"PhG(Ww*1te7uGD./j|+ҫ RI ۢ>βbƼV[ޞ0r@:(nS9N-̫{٬ŤⴙtE~E!B!/ŹvPFʰ:R ̙Άy3YVu SjB!B!L= =5 OZɆ(`NI)x%?"B!BBL')_I7A~!B!BzK!B!B!GdqQ!B!Bh˩y|!GNx5cW)H|Wޛk>>venHB!DA(E3~;oay qv<z556b ħ #3dczsvދqO+s,: 9v>F;)QfةX-v5X?7XώpĐsB!*+C Auaa2xxH+T%Y/Pv.ȉR-xHtu}T\Qݧ}1=KƵwfJz% 4o7LλCPkvN|B!BoPෝ("6n6\%H[YϿL"VГu|i%Oɥtt8C1g>U\W̃_iJ4 .A gsli\Qu/ީCf>5W3q֠'?Z]z;otyMEib`iΥ1㉎y`|ಝnѝ8ٳa16ICpD-£L;0[\5vӾc#|S1Uzзw'1rH1PVuwM%u|esk%NMtGGs:BO1y |Tٸ.1x\rrF[#Uym_B"mZ^dԹ6ML6|j*>B,.TK*s KxF O-{u+Ww|Je?h_h0̑=0WNmoL{POeCYp_sdnދq0\s.uJ=!B!jՔo匯v:/9gC'2t<³ܭ/IџXQnÙ3x!3KzQ 5L^ӕyuUFvW⟰?OTp,l[r9A[.&o`Wf6x N0#/ VnUYԝ>ʩ>WvcV>?uGdv>ǫ>Ú!k?yvyq_2vq< wz~mՍEq993\`漍XfS N9Ϛ-Eeۍ$aFc'.. J|TTi=g\(Zg`oǡiYmr[Ʈ1}uhJ X/ ®oi pZy #TI|* z27 Tg*1O 1UG{B!⟪A⮙45F`̧̹YcYB^l[QnԐ&+ףWǒ3條Os&eJlOb^haj$X-듊ѧQY4:=ДN~Xq1<̧^<|0lyN8Zj5jBeͩnT90.i~G9Qə!X"U74A_t향jx:PU/CΧ1/Fv@`?*t0xwGnًMP.q;&2O"_3+sb|{vW8;7]/1OԎGy/^'B! ;j@R2I2a LS.0mf;EKXj}(VAQhvN`cE'KCzJ&f2"4҃+`%\C^*ҟ T JpvJ>pi6NɋgvNQv7볛bsW3Sj̾GMt3/ic)S4+[fj|LK`8:i9_Ι͜B!rf $(;g6m. УUΧ1DZ5𛵓>j_* !%}3-FR=q+:(7Fi牢[Y~>ݓc͂jL8i|v1{㑧eދ !BY\tOX`t¤;t>AaORx>'Iry=tg&|r!AzN+€E+]){vζ 7kW/Z9ޙ_YOhT1.}r[S Dn7#\E96}<7]>y-~}Ax^wv݄B!OSXb (RoQ|4q6'N[:gOgsb~WpɈL-E>p ą*2o28|#h0h5ht z;.mt8}vǡf"SDP8vb;[n؞v6\ՇH%~%c8vbk]P%^ah6bOlfYZ8mf]HƊ{oro=ɧh5[t\9c{;O\c 3Cчhl8Üٹc6rݦPO\='خoʼn2d{c@J:~luy4|x_BaNdڗ۹o;:沏ƧJdIbmR.bɵ@?4'sojTcwcP;Sg`\J!B#ݟt1Og]4l%ʺx6kkտd IDAT #/ǻo c@1?E<1:b$tuP>Pj:Z@1kyG+h窾jndҏڡgxso\pҪ}ozՂsx;(gkvS i!4&'"C Nzr( JQI(!mu~$4nf|lf)S'3;1ӯtqhdKǐo0ExcX#1]NUY:xYy+4^֘CðzOɆ>} GDf{ѡ{;>_o;:q5kۆS=͵x|@K|&} DDk/TSZu׺oS)~FM`>ZxÙgkEE?sz2'.grȾM|lfXIV-ظc' S`q9ٜm4Vĝ]6uaߏKW{jhv5fij1jȇփ@ IްAfg*ZqS&c 8 hCgZ)s3^֒%='-o"6.7o@ E<-@ @ 4!@ @ EO&NS'my%ίΥ۔42Reek#Yv VyvȆ%W @ l߹ NbγI!=.wdْÀ 0,2؆7y4~V ʏYbAd |rCl vo̤4\B~77<,IXwqgƛ>cij.~AԲNʥP:wܵ;9Uz@ @ .EFPM;W)RXL120n]R* aRօϊ듳cF/\KI\+qig2!VH @ t=_-@6Р@~85,;v=zdr($y,#. ЇqaT>Nsc ^߰ F/rjyӶ ;w CO<3'l\өh0B3tAG ;v#&21*s`0j~VP}vՇgXa׽ROL_uYj1 a6@Nv~MMdjy&F(@ @ (#npQ˵; gdR6=a fĹYXs#Uw5TccS~~ >icI<0?-̜ÖFX,:x.7[YE={f/e?73GqD:px}~7ٶ{dU9꠳P#Hrzcf}Kq: ӲRh'郈4ݻ1[ x\2PFҜ59む5r)@D@ @ AR!sv'hɩ2bZv5Jskp_sךv\w0uu*ykwזzm:HJ<0zۡ]X=',!s9g I$+C'.'m'Z$;p'VR=zE8k&>rpE .@ @PǢ?K<0u &L"x%%#8~xQ]Shu²/dE;u.3F!zVx8wh;[s=^Web~vp]e a!,Kv&cȏ߱yt! K8p'~u֖j_p9(B<粜|nK壕+w,ߧd_jf~w97l3$л/93z@@ @ eHs[xiB22x@ aCblLM2S=#7r)-O6Ox==^!o]6f:2ExcX#1]N?L\ƀ6` C%ߎ>%w0l`W/znؘƁ9E Cv|ew$ujRתAwcPݵ|E /zdCڶ: e, E46؉.3b`{/P>(u!gw>&@ @ e$o 38N0e Xc^yn30^4qt >I=QcwY8YC*@ @ nR ٞv\:CXt )ik B`Q @ EA)#08DΞXQ"HB+@ @ X@ @ @ ЄxrT @ @ h|t> _'gHǩi6<7̞9:+CYl-YW\ML)3yP;,T[7j ^|BJ^+|:<<3yTȉ7A/|vv.|ק= ۛ/c";v%&n?=c1Ch۩ #^VĜgCz|]1m ԗԹrD^7ԟ-8CBoFsٳl]mvY:\l;;[r0E|>Nc؆7y4~V rVRl fؙ?ײs`bI Ԧo~<(qj3_W5l~i+wf }Ju?5aTf3_]~ xw+O͵WN~G&w|#ߜ}=t̀H5O'tKû&ӦoV«,x+ƃں.57(j.$rK^jqiLgUߗ™膌x/,w9$.~(Gܔ?,vҽ^mrˋܜs^|3$b}Vv{ʏB~YbAd |rC{[p##|֧W̯_ml% *nN̡y\H-brA%<'zWr [C[c;Ȍ1vǖf!$Ins?{Gipj*/;G͠rٛ}Akꘫ`+ݮ:_j,Y{@4W^~םE_d爫*,Zc/ì36n0V;ιH*a.KTWXīDI/ށJ;uLd7ȣr]F=?gYe.TGρLnE@il'YFz|.Y˨;9J{3C,VIƖK׳>͆!4~0>1=K'3dŶg/+N1e< iۄܝ{Ɇ,|>o\ʫۏr"ύ)4zg|*5 u}y={VoifY}$eqZAO06@A^] Chj ou@ɋotL?TO,e̞x F@gytM6kHsXG]/ac_CR[96dR={_W==1㝜γ/L$smL@/4|LJB}))tPТUdkc^s}XТ#-YNkѠ^ЕfU%s*f*^h63b28R\KF+ʟ0.ccOџ&W&Wnk˃,l:SuݵMUEeq8/v kC$r$ 'XȠKBU M?FBj~/PE҉<[vdHHn:SuU̯ujyuSst)dRiDK}Wkѯ>RqvqjR=@'NbZju g*1nÙ| pRY )b^K-fLWԸ}+o9nr?(~(=rs>5/:B#S *S&;[[f&!;Ax^U?~^Sz3l{qP^rZ_ )PҜ54ăZR݋͔O9_{wh](Kzb귤vUuW uVڳt:3bHMPl{_.ݠ1aSD&F+CpQ2~ڦR')AK 4&]š/? .ꚃޥ?- 71si<9:ͼI/S%e-^϶ٜk8_I+yd2n8vxX]6k%= `Wd\b/2oyfV7K2LݟQfa?A 6u ތƏuHcAh?(V{2mlr-{cx=+t?oT({Ykfr/c}F7b>D]d3QJgƨǦ2}s*ǭ]} dp^ѝSQxlC%Oo$' Xx?gf{Eq{Ķ;V eIZe9kel&!2d#tp\T;NEw7;Uk4bD6!@qfϪ>ٖ²aHܕX=-p/,AJR:NM(2M5׌mI塨Z8¯ƺ<eq$)0ݶ).<ձJlke;H0ZWWnrV nq񠾮U>5(ȢnjΑ*9YZZ}*jI/;2jsuv#.aQ6_m#|D"3 UCԈWN-P 1XG.PZZϞDGBsOnEi6p4cni:ɻ7h8[Zq1]=ٞ*S=g*@"iT$h Ǖ4흼qvotuT.j[gPd2Z'Nќ2!\Y&+mn#Lg ~{zf@G@X* ^@lY|AIDw4P*X$+Mڴ]t &@;Z:]z52Ǔ^JwuNI!l;R@u1tfjT {:s bo`]"۬4/uIb;WL:@T| ."96u q{3غLdQ48I'ϕǁ.p G?UO=K-\\|8N)I잇yoԣ,ӟ~L%?Q_Kי=}\<LIr>rW` ʪq~W]kT =ҕ]rԩZp"!;memA >x=t\Gg]fkd6Wm6Ƣܦw\٨%\Z359VA4MrRNV9V9UCY*}Ҁ?VP׬#$%b.「Bu HL6O_܃s Jc]'Q_G&f=:E_ZQaͪa[hԪ w_ڳJ5s*]ȃJr}NPuwtuW)YTܽY֔)zM><•uo1eMufhqM}oŜֹsQGH@>Qor_$~oLgTXH]v7܀?,ϻBJ&L'@/e9낈0Hh<;$@BaI:(RkDvp|:^ߴC.M-ٯDܝauC~9h'6}"O2_$ m o:gij9gEŷ> aOEoV{IN’RSOkIfFMOtqXT"{]ytJIW7]Ȣ*ۀL]wKlCSl'ae Glm1KNwގS >:c52zѤ3r6[O!Isɍܷ*-/q~**𢃂,j~lG>wt}M#q~A40IęP%D u|.WsɟmwIM La^FTAe]T]YN@Iƺ=G*dsj k>D?1i)rd&ª#/{3Q\HX@q.¼!yYC/C-O9d5/.A4U-VgJ>֜{\8z:a8D]lM^J/E՞%W}Cu]tȇuV!5Suen\uvᵏpm e6KPFObq̬o):NGPwZ6@ *郈4e2ge7JߧC͵so5}҃41r6%h|<> 'L f 6m:BT^u*:S]+Ͽԫ=}flL뇬:r:ZLL{a$Aɽ N]Wv'hɩ2bZv5Js|gݵSEkft}Ɔӵ8b=جB 1_W,}g /j`k?(ŭ@xwrxf}D\WISPnZtW#ͻ]L(na)o)]Py'Aʢ~ʲ2SzQZ*;v[,Sbѣf>?ͨ"IWCQ.jV!XI`u_=,XQu^EΒDEcԔ#579\oU^V)~ rzS|`NfЀ?j\]@z H` 2ctj}Hybg:gmcnZir7?(mʨ3WS IDAT ܝo|?~Yɡj9qKBQe4K2+0z4s Bci%0XY/!mMdw:6;S=4ӫn񟩭]_u1]Y5Si9KRh /ѐFS.՞{)n.N)̟mKiue2KV[S/`Tk ɡmߓQ/CJ060^lE~*8Ebn͓ ,Gb,ޞ6hFukom)ݑԉI]噵'Ҳd8dL;d3Vzq6<.\AM3OvD /kɿr\1+o%ޛK6a3qHL\ƀ6` CbLQrgT[E)"y2Fɐ-TR=N}y>gڹa6׿G,~ă֜t >I=Dv)['2BMdbQأݭAc ?ofoXsEȞK@ F*~_'{y܉Cv!NRn'%<VIAJuz a--ٓ=]-LI  G ~UxFkQFa34:x7TN׫$}L?|L@pTІ8X}/jΞX!("rE @ @ A'L @ @ Pi=L"NٛH+ݲ_K)i6e&:o~:O3{tQF~9nW)fΘk*퇘,8~/zl!n@ x*w.ꂓl}Hf#%^aXTeTű ohX+w^S1'w,]%3i(M-KaG sZ]Su;@ @ Y_]e| l-G93a^Ԭ3g/$mrwb%:NgyfNNȗ?a6S SQ$IL"W:<\8Q wsgVX(9+]gXa%cKYfOZ\ڵ~s≠Sֺ2ه2fy'G4uޮ9%A2Nl乥߲#DBr#VL֞Hb[ųCxxyأr?mZ-8nQz2C k!DI{-3.Y4jq+--l3 Mq<\U=Z>icm0 τie3ďe9sMȼ1YYddh{<֓î̛2URVlk*z.F^]\j\_G-#e9QO{=w gTmF+2gt#!ֈ\AtѕvU~/5͋,ZbLm];cÚUêШU,ovaE@ n6}L4]5zij"q_$~oLg 㲻uJ unmȪrAgFR{Ed"tyn'@/euŀ%ea9낈0@o!!;ώ@g&R$3Vy*e`33^R+ԃS圎 La-(]@:Hu: : ^ kFMOtw XJ"Ucf{B$JXo} .$S@ @ CEc؝t'e'~ʈk`(ͭa7c:$xJAL]JGZ'x^lrx7^-id hX=de0KඑnhFGx 8 Зͬ՛NT|FV9tVWE#]cqB>n> h ɢ!Z*` %w/*]\dOxpQ-@ @ (X\8pɁ&ISd2֖dddWwN~L :¾]`יr<.0EXܡlx_b&?w2Kaإ׳TkD͹z!v:.y\չչfѝ^ws!ӹN:uźi)|$il@k ֤MگejK壕+w,Af?HkY߬t;Mf>~b`*S)!mMdw:6;vqf Z@ @P EOle pȘBwf <06LF"-O6Ox==^!o]6f:(3ExcX#1]N?L\ƀ6` C3u_OXAuo&9C E?&Z7R᛫4:^O{<Ġ(=`/#ߴ.~" j\ϠrNs-В)I_*=ٞ;vfxVv?!fd{xɦ#Zc&IسtuWE[ic<__kQ$>] ;}23cMZ@ @P a, ͌Zäz]BOyz#tn)L˹{(="iNͬ!W@ @ T0 `lϿGJZH=tx +nΝ8JF:$ˤNJx bq1 E@ @ w. )/jc'ڄ$#@ ߈@ @ @ ЄxO @ @ h|t> _'gHǩi6<7̞9:+CYl-YW\ML)3yP;,T[7j ^|BJ^+|:<<3yTȉ7A/|vv.|ק= ۛ/c6;B;]Gʞ!ԍW/ NbγI!=.El6OK9{"/śI,?3[h9q$w,ޘf2u%ٍgYpb2laWe8ՊcY5YKųd.+cg kD#\˂Ε΂EYx$.RxpLjǩ_;qVܙ5yk+HSU|v_2ݭ:?ɲ{ k6v\9|sΎ>M"0#/|;?$m".>LN~ZA nj,ּjlޠ՗dc mu"NYrg(T3{BۡOz";opMszەYCK u%k(]3|+Rb=5qZŐEK=~,Üu &j97I~>ٺX%e)ՖG9[R^jKnr+g#0'0q$ٷy!4rAyA 2.'4gUtK`zyn2PpI rS-H4REK:&M?Mgh.u|;5,;v=zdr(Jc`?6:O+pZF-9VZ;YbJ2L_i6 =!_:!+]<{Yq:Ga\-3._t(N)f^HN&K6 u`}R^~ynL<Ы?V΋O?X9سr6s`Ex 4,>`28Sg'c }.]g !4h5FŷJI&JRb'fO# i]s K:G&5$wBy,#. Їq!ǭOV2stTć=/+ޞNNȗ?a6S S[zJys>&%>f:ahQ*Xe߱Rnxu9>jBhzHhё,gY'ɵhP/JD΍A9b3uV41 x)%g~t 5;~ڇ42rs/X^d1`ӡt,뮥n/*y!kXPj'x#n8½F^:Jo4ʶV{,2Nl乥߲#DBr#֙ZWf~ŭkTTλ,%sK9'~N#ZJ}X~F/SoR<tZWSc8Sy9v[LheϷ}ȒVyL^j6czվ[QgN~qܬז AT]GIT|-+y|Z ZW2٩l64 y UJ~ǞJ3Q>gۋUJCJ`NxCRJ0dR6=a fĹYXs#X!E]s0ջ 1fMl hܸk;ONF3oTIYC׳#t63&>jRJY'<$FE?ch?{-ҀlOi^ 4b'sƶ/ײ06'О3͎/-NwoFuVoF+2gt#!ֈ\Atѕ'{r#O-:Hs|NI]G:{v8TT9ծwVyI>"EkX>muo_ \gl~IcV43(YBTJxkjU棔ߥÔ)2V/OirǹY}c&Rݨ#Z3͑,PMS`^bMQor_$~oLgTXH]v7܀?,ϻBJ&L'@/e9낈0Hh<;$@BaI:(Rkui?r]l[=_K!>9;kúć'6}"O2_$ m o:gij9gEŷ> aOEoV{IN’RSOkIfFMOtqXT"{]ytJIW7]Ȣ*ۀL]wKlCSl'ae Glm1KNwގS >:c52zѤ3r6[O!Isɍܷ*-/q~**𢃂,j~lG>wt}M#q~A40IęP%D u|.WsɟmwIM La^FTAe]T]YN@Iƺ=G*dsj k>D?1i)rd&ª#/{3Q\HX@q.¼!yYC/C-O9d5/.A4U-VgJ>֜{\8z:a8D]lM^J/E՞%W}Cu]tȇuV!5Suen\uvjV39\:3C e3KB3 B &%\;6Xn'=Hs) )m^Z&zRɴ`m#ݦ#$ЌNe(UH;:<۵;NkKg~Ȫ# >lИϴf0OI$+$u5aw2zН)#VeW4ɷ]*=UtVoFKgl8]S)VSJ=-Xlu7{rjGx 6#Rܪ$эw'=0jgf OtHu81H%LAw5{:ҼuaKτq;<2d&=$J`d! —uem.eEeQAXD!ғd"R2'aB]?4')2w&;w'y7zmg7"ڱX7 ;K5a?5,4`|tbM 3~I_bnlNZԏƯPuWQ\bmw{\_ByՉ:+4Psbu<_Vl<{Cu3soGDmn_o-PUaF (y+]G _AܱNiurž\My9/H>usAMyw3]garD=62z gywy&L@]Y@Q`P" U<#8ԩv:R{ [=nHk Jܟ~2'EzFZqoy đAb~( Pȥ1xˍؼ{*#5]Rk3蝐Ô\*W"Kž)nlٳn1_8ruq- ;DzÙ|ՎKky x{i[>Bo=UPVaMQ%AL6a-5̿w!i,1K63g=sYu:}Rk6ƳXضb1[Ӻ7ٶ%YMx M4s7_ЊX?tXl1G^/*>m|&PW(2S\Mߠ^O?HoyO=2Ȉ(.X:fرז-.'HcxZVۜʭY8Gi6zky}ӵ*=q/2Hɶ%Lj/e߱2H%L۽o?x-Wxy kAlOO T{yōmqӚ#"<l3Ŷfܠ3X3s:s 5\΢c/`EjAUyj8<[yut^T~DˆG:htǤn|w =(x N~vʤN&U<~G5?a@|:FtpѸ7 =n7n9㏠mOgԈ:}xe2x0%մ_9D6GZw8'ȿ]$7M>F l4hEù(Ҟ&YF,/o-Gƍht`˗<2m +C82d0::"m1 =*܋2tƳ98iٹ#pjvE_FL,=#{mZӠ^ i {p>vWsZFe[Wr*XͰ {O)9htf/;l7X!?,,1Zq5$Eg_T AƎ]\ftT5ݗˊhշ5Osc{k*I]<<zg ==0La _B :_uuk˹bdr۰ӻ2") k+P~],AqZr,unsUg{p:\lNk\[GЧj=N)Yl z/hׅ;# WƚbvWS8z4^aϻVq:&.uXar sϡ֗3wyEr99gv;ڹqޖǴ|Mm$ϪMú mtܩ3`^Dž=#>gT%K1GP|KWݟEsd3A-sўdfMOעvZtEQq7h9:L@DDDDDDDDD"qܿɫķ[}Z""""""""Rg~qO .ol,~6Dֱ.tXDDDDDDDDDΓOOg'J.cU{+rMK~t*-Ȗ\1`(wD~67/Ex?7wo"w Mǻ*EӶUW}{;C|}A7W1}GH՞ISgP0nybLffDcYٸ^77_ȦrsUs3gٛ]f4;ykXs ˶ʸ>#ޜǢcQ]M̪˟Un6ekیQ FOco`@v?9yD9#Դ$\N=+-a]OE'f@y~.^:6O TYLn Q&1ٸ6!TщȌd7)1x +D86#{miImڞ^IUE@Es:i#I; \lv1.EtфDNzs9+,ׄ"`q~9Z/ZYmEn%Z)""""""""'y2.PaO76Ykx / ֭^ÚJ8l&݁G\n竂X6wNwLށC7uvCv|6ٹ^y[`aw,bfO< 9LYMQOYōuT"'iݹxf"wޘ6UCn/O[N/ydvWp$4eatt3j`)|,yK,~g'ɺa*q}䇎8{?uƬB %Irޑƻ;|_?1韏{d&oDDDDDDDDjK_"GMǬ<G+vP޼V$gWڸ~:1 6\T; 0̍ddLCqWɩCZDDDDDDVw\eCގlۯ_^xg6sէ_s*T.ᱩxWxv.m}g6{̳=(]UEͯ`ʵ}s_ vQ7͝5,cu24Ɨr;灮ii=k=)XWdl՚ķ${XMUnTmmqYi"~e!Inӣk!-f,ф;7V7t v8}>~NM:a gKysl,7]WT.6QI[ؗdJ۹!E簲(єov؍ڌoM6FT8Ӧq~[Eu* ;NkAL\8wv+ܵ|c#Ӓ; kc;Ѷ\l5w,{ ݌\f'/{ o.aٶ4^gěXu4}!Vo M{w)qz 5;f>ˋϾDvmccֺhV-iIZ̿[m8h3/ggSK}9L nޙI*˂=PqQDDDDDDV{V03?nhCFsPunLs.ΠmB mpu|V=h9+$= .v#Itb*)n7q$%I ZApөGY̮t~\kP_/F 2 =HmڞT`zNG;mФml3\b<ƾҜK0IMK( .-H66 LۛK^bi(S'}mFZm\BxN+.Y>/tU\ܘK}÷qͨ>Lq5r6!l.ZڜDy%Θ>[}m8jx2醑D%[k>{ȯƜ>G bT|}2̕X䧯|֖:iZϥ+H-yFl?w.8uҘuoQDDDDDDֵ0/B.v՜9{ӹb|>IFv0, H/ydvWp$4eattԦa߈:}xe2x0%ն}=ݫyjCÀ !]{W֮H3wƴ93$&`Ibu Yʃ?w[x74@Kp,;h7tJ=b[(>Fp_RmZS IDATy+:ƥd;~1z𓦗Rͼ#+G,z3sU-g'Y|>]rAӋ6g߈!WӛӹoaOjɍW;x^^) rķQ!"""""""r 2UedfЭ$' -ZDDDDDDDDD"EHDT\("""""""""QqQDDDDDDDDD"⢈DDE"""""""""EDDDDDDDD$"*.HDT\("""""""""QqQDDDDDDDDD"⢈DDE"""""""""EDDDDDDDD$"*.HDT\("""""""""QqQDDDDDDDDD"⢈DDE"""""""""EDDDDDDDD$"[{yf lmޭ<5L++c1%,|-xZɸϡˀP g_oeoaYT8g-eyfgכ.ar?ʿͧ[91kaBeɓiv\fa{t^^dmbb}IjfG~ywbsqͿ?Ť]^\w&;n~ _CgrV|@1|2Iױcі_U]صv98bnc`ZY]q­77o/|4mfgIr\;r7v/"""""""' ?7{NKiv,ŻEӸ{v`HdrW~$?v3oV8[qkL>7;/>?~o,cW0i!yչ|\Ž:Ӽx Ͻ!+6JNDX2loq`SLf5&u)<*Í8B~*5SZ)DwZr9hR6UPanq!UAv blAT{˪~lǗҩ@>/|Ϛ"`ܐ 9'vxM݋YX#˷Fa7Xc ;0gpݩ jnN_krh]im5 n䂰au>]Q:%z Qڅ[V$fʵOmXw=("""""""'ê^sy]'lf6_&Ͽ[moIb;`ĥ0cs=#g8?# `bKT}o-bdž+ ${ %.,[3iA/vr3[K:mBE|%]{]_QѼt*fւn=̃mqߙx@ \ۇQ~U&7~U-=q;R̃'lp^lɟ؈Y#+fjv}z݇rapYg0n^ZY obϲp;4n lV}(jzqpXqшYF<>[рm7 u6[ř㹯c>6R#8i^f*z9v'c㷋L G5ool_u7t7wǿdgIp*X>m^^ $=>KvTodyjz+ڔoo >dRȩn[LJCk4q %X>;&!<;Rxg"^z#a0gVV|ǫӉ);xWywQ{chR͌^yyYtYv/عNsi &).(h` kKa wz[ @/+Rw5lcI1Q@~^C!A/}Khq$R~H`dzFY4YjF-|bziIeylrxy_RA x|Iz4-^ì]?\nLQ 2g/;| k3<8RH}.ddNaڬdU0y,efb|dy] A:];]̸3z:z9j_\ y^DΏ3?0e&5q5|Ӥ}tr?[ugƚuY/=VSa%$ؑSF&HL !IILB4PB?Y3'UrNqnMOpl"2(- ]|qG:yim<{9&AwpssJB8ω tYe~ ׾vmߋ'b7kNqǍ“E&eB !p(-L-|nĄ#-̙? sE*L溛żR9u U6- Fl~9tEE%#xt{GE*- $lflZ9 hئ=M?Ly $yb$vLEkPqVoW_f#NC,?.YO. 5aE֝8c\^?6 BHRWu^hؼ>q"< i_VąWY8:+d|;)%|ڃ& _+`;Rw-gߤSΜ_; Wc4,ϗ՝t9,&f|i2+Vxe$wi ="a]-9 sp@!_- 뙺pAp2_,oNR"5-+ɯ~) V{1twV]DDDDDDD$`{knCl .ތQW4Z"=u}z h廵kM耶Է(೯cc\uvc;kfFİpb3\-Yg puV[IdtN|is@&GYc#G|&U6e|vߥEq` oӗsx?"aCN~.!3*Y1e,A<|年uHkߎ߳$.^ 9-9݅,Y-s~څٝGϸߖN=,w1?mKIhҕ{F\J;qf!9?$춰ZŮ'5#gFlIiж5!tJgߢn&&kם^Mό(nכ GRS:zO,KZ^<87&Ftu."""""""r20B9RzDDE"""""""""EDDDDDDDD$"*.HDT\("""""""""QqQDDDDDDDDD"⢈DDE"""""""""EDDDDDDDD$"*.HDT\("""""""""QqQDDDDDDDDD"⢈D..OvQl)p*#l!P-MzŞVI$X"riL'H:cܗMyOx9qt_{Jz~~[m sy4D%"""""""'ÊnzKB@z=5z?6 v,濫)(C]V6zrgx"""""""rr:ja8lfq`Uѷ'BIN.]ƪ"W0嚖T[-e MbP8+i_.Tw fk1Ikqi _^xg6sէ_s*پl*C^^Cv0 .CD߁ Qcn]}=75q>]+Y՜Q m|}A7W1}GԪA ~Ρ,\!w~]|C˯Z"Hyi%#qpSIq'nRRcV-\g\ݣ 00vsWӀ\)tlXsd˳u]rd%)I8^:|U~0sh 1*ӟxۺt*'M 8duj֕ŞhC㱑PĄg&0נp夞jIYqP%[#ow<6W .;A_%4ib7 LA0_QBKm.\A˼Dz 93~8eի{Y:emGҳwx,c*0G;ȟ: @NZmr~5%Nr%OUy =T9=ō {-_k$l.sC‚z4f}!WUos yYr6U)ٶr<#~fbm͸Agft֮ߎYUR'g;!) ) ]3K3Q4*Xz k* bⰙvo|wZƺ.9,b~gBZm]5YN;][.'ʟW &"""""""'h5H3wƴy#X6zG<&;0`OgxޛN2ㆡgGЧpa1Hn}:xwJF /13^A=k~ўΨyu.`daK0 `˗<2m +C82d0::_6r|h} A$Lb96 ; K"""""""""[\ӏr&KV2vÜ?^]DNPI9ɑԒww> o (rʪ9}ؔH`""""r4⢗o^Y{пwR˯y%\G=qt߃|8993|QN-Hi\&OWgd>,e59:qYX8ź>Us-. 9'qsdgm7Pe.3%{`[+ c%O9췼EDDD@GIbѼ7(p߆6*e_JkP5Ĭx,.vҺs{B?l;nLm6mɧPҳKȃL#IEg_reqA9f}}Yjҳ)k*ټ|.]u~ @'PUy+{- xHKv@E f,)Ϊzl.\A˼##g[B#Īщ$pDYί2v[AMf_(dH^5X 1{)w$K̜ $V \(ZgfMg5Hӎ}y)l;yռ QߝזN4;+-dRnywL9:6i,c/dqC"(6~""""bq&$.GP4dݝHr\emG3e,Zch@K?s+現3cUnၧ8BHwX_NA/6lq[i@C$>Ɖ,v#Qr[듵|alF(.$2xo*+*נ9ݜaGgolk&!䷬9JAV+<6PcZj/փUB/%@ T ]'xEl.pS`y &y)%gSLGn$xytBb=j Vqwl:q7ID&]߮`ӛ0XxjHz)] E1{a'H=Hj:xxbϭXLCϠCH1{Eίn(^ti393> ODf{ѥw'>^o}$ qhhۦS=ypH[f|* jEDk,/TSZu׺oS)IC;IJyf-h>ZrÙgkEE?szJ'.gsrȾM|lVݰX-3ntP`Y9ٜmL-=2uaNWw)d;?~9^3Ռټ,[oypJvٮͰ%.3i6-@ @ KE]p2Hwyٿ#ێƶ|L}Q5QU69wfh?WEWvON559KrG9<?dbIφ+Ј;'^]3 07f=0dR.ҽK}ͩլ@ @ a\/t.r0K57Hm(鯝-%J)c3^- P>V.gC]z1g4~7-%ךv+X}#Sc;@ @ *j1y)oko$Pg!X.ԦOߡ<2I儮L}{˲ ɳ<:Ya5zSmr^unLo0SEN:8O1{b;`_aHt;c wsF>} Hcz[mԁ"gM;'6m|˝71jϟ356ZD@ @ A%q퇋g_lO=gt$ %VN$4g/eqɌVyP26F~)'a6f2sG^MrG`Ė2L|VV7G4vǢUTpe%e<_v̶<ࡱ?I^pL Cn%W3/_|՝dz5;{&pXٳZZس)#.#8f+c@PSl]ը.T%4> λ5Wt\Yyyٳfҩj^5^wm!u{̷{_~D~r*;|sLM<}c1CحM#~.8yO$Sӻ5`K;1} hi<o.é?0h;AF^v,zMPgٺZ۳uL+olge8Պc^X34k9Z 2>'uWuaHd#[йXd)dq0 lI859|9+t6괕;m'269̗mW)C܎ì=`smUߑ?ϯrg!<"3 ;dGIr&=)D盭*K6ފ񠶮+cͫ5ʃ)O}/~Z\8Xv sX&$)c8]嶋.f~ {bWy~U:ǽ}%7B,pٿjU^+_xuοX{o44\^O8\>>Pkl% I)nNJZ}x(*b`?]6i ‘_ ?[ist*زv3sF6f03$P$~vg?͸|ѡ,Y=w1[P{/0${弸(' ݘB㹻`4Q)30pgso\ªAqfE؇LF'u*}I1l11&Z VI)DIjS[LОӡJ^iwh",|MdYhdlS?W&ᄑeمe,0=)u% njfo;qZuRvz991t"}6 :y*SP ̝>鉍qs4A.JV9j2w{ek~hͣ!MW|gIͲV:7 TQֽvlDgd0~⥸jQW?a2zC~`Z\\9'l{ʱP:NQuR7U:5KN ij#yn8FŔVo4ʶV{,2Nl_MĔf=>3\WK9.Zתȩ[wY[75HrN&]FwV#w^za,gC{"{8ϴZpfs8Y9/2м0j 'e~Zl {ŖLunG例Y-xrj"iۓ[.W=W#1a9e2ɥٕg&e22{oЫ+{*]pF/nT*yKQk]J}xPY{ʙ)k/ ~IOL ?ઇD>zUyU,?BX)US7ٟ~Ɩ :4iNر4;iQYhƟytgxwI/w+G'i󗲸dE{ˏ.#5`CR [ЎqW4w|f穕K7}t7gp:~N'ףܷb7MK'A+%O`cNL(3~$~,wiiEw||Nfpdg<,V{2mt=fӔx9?+۲t?oT+{QkVs`cv/c=D]Y,d3QJgRs~#^?Ɣ s*ǭ=Kʴ>͝rqT/E{P.G?+)&xpֳ3Ҹ`[+e$?zO>rHضp.b9>I8.mn6cl6!@qfr|9-ȸˏz0og &^\tQ*dkj~K,ߗ8GcC2tt] }^qM!}v(ZY*8ymW8Lvծ$互[VK/|pnfe,Ih6iÇ%|a^_<(>VxZ ZK(J\@]~?*~0(I|O)ѽfY3N`tr9M'ysVd[0-١ךP4Sz TYC$md={`ƃ׽}.r9[]헮Em *~^R?㗔|-33> =gTyt1v3f9#tǹX{>z&QǨ#v+Fő\yC-:hLmFIɴ r[tN!'f$s9^1p#a:1;?4O:¢֢~p­DDb-J w:DE"Yiѡ=1Ij܆0Яߞe~R"pN Yّ"HIoQY:ֽ8dzDYE"_bΓvCtd:C%S=] [SKsxJzuȢ? 7ф,ݛECؕ4 IDAT]p!i7x*kyڸXn;?;8$Hh`ŶOZҸSF4%3xع䷿[QVUSZ릪ty;RSmEBv(*<6 l~3i.έ.vQjdZFc[IЉ٨%\Z359VA4MrRNV9V9UCe*}Ҁ?v]ZЬ#$-b.|「B D6O_4ۓf?r(dO#rLљ(Kib7򖃤7y{dB|J2AwuO{נqo_~2%Y0`I5djӷ0hߖ7PЪ=u0ӸU#'g89{ KI==>%5U(n6=?a%S5Jt)&]I^v#RܪO6oCڒ5n[-ԁޑ8f1Nd &L^R '$'}Z6eE]Yy-y[g ይ<Nx>$s&\\YX-)HvpZsqsi?|R2v(2XP|/ ^YxzG:T׵K|Xg.Z8\W 9PuZg^z׶Pie$ ƖtpgQ SO%}fR "MDRps` #<9~-(\- h3dϫ .Y,@3:W"]DC D8S/-jOo".k'b~+VxyzA< $Sp9@~ c-Lv d'Ud}ujgOuZNT<.&c t 17.|U͞弜ZQ9k Ȣ*nś>0j"OtH(p81H%LAw5{:2uτqנ?Nz~1tn!LYԏWYWW{]/JkBse%59 P}jmyO!#y(m4E~7+|JYHۇ+j񠼮>ӫYwhrs&j}~*گANx9W )PG 빋*by\JgǀJjLG{4y+72'%Y-GZB:.r]"~\SC~zvMZ=5.~'U *gCAGu1f ^fPbBfzNrQ'dw¼C3,}:@L xmQ]2En#u]ңuog4DLHEVW-ԫ7q(6(Švw#eO#e>Yfa!!O'/Dk+ ,8tɁ&IϷl:I^fjkOUuO6|noUVonhOHxoǏx޺9=ڤzjOg ??nc#GTג/q[^{z?"#=趓3kVɿO1Fu?vՆPpYt׾^g M$ʡXe?3R?‚O&]\Дc|}6CwS=4ӫnٟ]_u1]S5Si9K2xo*+*אFSО{)n.N)ɟ_*cD:*eyJkԯ\'βli Ąh_%س ] Dr^9U90x򐮦u}}Ğ ~?}b}֘xК.E<:>zH.ϼӘk(EgSisҤ] 1Y?5"dϚw@ yPyzտ*qsD\ȲHOU~볆E]d!{2`OaAxdFEY#g<F(f{D ȃBk*ՐND?1Մt N/ gMߋtV + )Ǣ@ @ AŠ&@ @ ZEO>|՝dnΏ_Χ׌i5c6/e8:O3wLQIsy+eW)fϚKjiL}w /v\I!n@ ljw.ꂓD2?Kf#-!S_`tTMTűMr]@g/ܩ2~ݘw][$'y--KaG {_S o;@ @ ^_]Z(>֬CpԵt!^q^ZUΎ /{O=# &lw> ' f 6l:BO1\d;9EBJN+ں>bJ*紟ʧtyb,nv](ạA7Y4Xe{,!X.Y%r]屋_.@ @ epI;xCxaL$0E;0׶%%# 9~x/_]mc CXo:I^f?lhaEsi^*:('(LQ/TKU?g݌Gٟv\Ju^^][CoXNe3sl RH4لK/ƒwԖ{WX߃l~BԶ.Yv<b`*S))M,lr1v'W9am#D@  .s+13hR15G;6LfaELGn$xytB߂%l=۠ݞS39b!,& w9etPF"2vvCބSSG#H-fy.睟.2CxɝsFG? 4>Rɛ:^tO gXW4n;cuϠrNs}Җ@Z=ٞovΘVv?f8s W3gxΦ#vsLسbuWE[ic<_t^oY$<]~WfǚD@  $y&YA+s[8ɟhRtt>(;DSz].ǸfE-Kdm4o@ a&cdg=Ϥ3B?`܉dߐ@bL]'.fH 1X@ @q@ q"xl&<0dMH8@ @ . @ @ M@ @ &v❹/1y1mtM}V$s <3G:T͞žUOwΏ_Χ׌i5c6/]jU5bFMw*1WXt-ᵸ:]ϧS̞5NUxagkc&ͯ#f}?&jcS/t{158dVc^4d=LO v/dd\ddNrW)j4[p]k{V)#~Em  >Zqlӫܟsk&`-}T?[kAB礮ݹ.cD} :W,e,λ Vă1;5e]Qn17Xrg1d_S#]Ʀ22>ețuG>l*;2'}P]3G[D`dG7;ID\޼'h6|^e[1ueyؼFyBu/<_qR+tSŪkXx 32!M{@YTr(O.(]Pv.6CeS{.>+ԷUޗ/i*k/S)#V|־"ϝ*׍IxWU;\>>Pkl% I)nNJZ(dQsoL˓+7gu^96J)ʂZꦪr9\Bu A2M?s$ 'XȰA M?FBj~/VE҉WpS&s zUy%?cO(ō*C%Ox!j~ K@IsԼj9Ku/Z9S>|eߡ}vQv?<\Ԯ_5%Y[+3,I iNڨ=^] 5^mY,y]1`rщ`,7q^c;o9:=aC3z-lh+;O>{NF`J_>Xx?'Qj[Ϻ-KK믾MzolF>\i|È73Ӹ3NT1# ,k-(JDT bl$KI y[iHTt-,):dmhp.53ۓZJNIPt!<;R@) }mYb35=\;ٺ72{L6HKlyn}N2LZz{ uxjI`o@CCI^YTg&p{(trhV8.9Oe#\mg>$iu5~M AY)?Q_Kי=}\SӻodAc'G*dsjS>Tf܇( hץ :B2,"_+,jݐp Ia|A=in#xRyM$J>+C &k-Ѣ3rۿN'EJfKx--q~d(*,j~lG9ۻʷ㿌&݋. ,e(Ȑ"""" (q A Ȗ=,-M3󂲛4m|99};':0 By_Y7`ЄP˚DuPna Y6cVwZ,!d ٽ3` cXTu۪ǙX,c1>ݝU">qz~b$+5FBde}5dyDA<ƹ sb>W~X|R'zP94\69#9;IQ1v> ըk9,CW2)g> z8 6Ŗɣ{ؔ\[LmI:6 \uF}`Zı`K,G6<#egN,l} Cґmlv6]mr6xԢ+נ󧢿CgRt|'{L9J6G@Z :-Z`uWl0|~4S Hde_ubE{&JsnjVƛq(vOÖd o$+B$;ϞdԳ,'*фzL =3?_ů2 7N3$"6DFzk׮fGeچUے+6;vƴ ];q o4S\&)%6j VEW~:H~ak/yP6 wyJcehZ֏HJ]4_CkDcp`'Y]TZPkjی*qխz,Yc}gnPNy__.NX.)figbq•i HdT-X9O> ߍn _YzC yi܋ބ.`i|E`D5|ocR5)gԦ+CKЮP*FA?xg, هFw6)6S.c(4yH~v(s-Ȇi;&-%0j`u!F{{1}&CU7_H;X9X ^\8uZ<>d@3e@upn H&Sj ГNש_tyOu)KaԓKy[r{LΫޭq,ۑJvՉ(TbQ-mW3E^5Kl` Y~FE޽8(ӱ{ӡ<Q 'V-6y,<ߧ0z/BB)B[rW4j!yVT8V5͛2HB%+΂~'ٹtl(}ۈ NMoԃϻZ# l1L/S"܈g͠6Xޝ ˬ:Qeϻ ߅BA_%u2I WsIp s}zy_u41EɋW>-B!B!(ZIB!B!w-Ϧk'K^d;m/B!B! _\ӬM' bqbt:,%?D2Vtڗt ){?J+jL;>b3a(Tc)u>5;έ批9i"KKףz4 8m|4dsR'MZ"2j@:8H:tov|iJǯӫY Kٶ:%lg53(O}xFFdkՕ^+B!BQL;lܣ0s8$$dQpTšifGjz*|_٬ωq_+W`ֽPs"kk[<-bcc,\-;p߼w͌`괥${d䮋 -[֭Xj u^ҝn|{NųoEo3nwͨg&`9grDb5lm$m}TV]YnVx-5d2d\ʓSehN!B!B;i} ,</76y5gRyLYIĦxPb0Fri7˓2C "=fPT:`#UaxQϟބ`Jf,tJ*T.tDV6̓r5^4lQFjQC .]Yn|G6DxrjDTmAws:&B!B!DQ|\zQ߃S,ܵ6H fCQߗƋ={o8mIvǸPYɤO0ydfhlf;,{Z Aբj48b Qߧe3ν,k:B!B!Ŀ-H1~iZ?9e}YNE.#CUcbx*f;jw ~uxcc3ޙ@j뼈-) ;FRK9ԶnUۧB!B!ispl} Cґml#(h1h 0֥>8 6Ŗɣ{ؔl/BR7pCмQimXx3NREUhǂ-:lH:^)B!Bq Mx̜Ƨ^FDQ7'd)&v2LfsMoVQGO?xg, هFIc$2Cz~nƒnZ=>֡ț/Ȇi;&-%0j`"ș>|m4+K^} * !B!B-4ʺuA!B!B_ZIB!B!zIvwke֛:"@'yB!B!7E !B!B!"B!B!B\\tei=~,ѳ a='i-Ϧk'Ż 1Nw!B!B9Y\qbt:,c͜CG79IR ?;^ ̚27 os;cxfDK|'L*W{!fyct\6@!B!tCErZ F>GV dM|bglq E]XSNji">7QYd&o5K !B!.]js&t=GcG5/'zK׾bҢ5O}s9Y)~;aM }@yzu 5УlS,NUR~FVBI)՘v}Ĝg`﯋82H޼p<΍Ky4i ˨= *^ӽi *zz/ Y=2`_ }GX0. -{^lsxd{5ޫh;gy(5>YzǛ ïMR)nM!ky`VQSMź{NZ>VRa6gҏ孼z<;=]!U j ~JIaSڨFmhb~ dvB!B!\p tgߞfl,{GSm̟Qdpu0XPfC,l]_+ IDATw^^>}Ԏwy ~p'Y; T5n>ηN)&qs͌`괥${d䮋 SadY):q;coF=3$8Η'߭ak'ikLp^ẜצqaeWoBZ>/lĺHn"0~I>{WxA:,M |d?2E8Ub-VBswm6>[.a6}?:oM#GX?5E5?3/@B!BKYT C%D7Z d%ÌV,Xp#lHjj al:E8-Kuũ30ooMlo5mr%kH%JW%J*TQtD MxѰE]*uP5t9EJ=Ҝ: Z/T"Xנ{J?΀Ґ'1_j<îEk^LŨޯ,.ص`"u9 0z9z\۷w9ߧGx[bCVg;6aк8,<ӻR~|?AN63䙓P턖`הi>^g,#N[]1.:w̞N֗c΋p/if*x9_xV4ZZ Z݁X8k5lϑ `'IoGwsMęt{z%S*z=}i(.92OA̴ll@AjD3aB0qNR5z^9uƅ]eݕ~;Mkxs~ ⇺9m4QY7bpX7_SV!B!pUށяگC+gS1Qs< N釷4f7i1⡲¢ ~գ]Oɑ.r7.r^Dxٖ#ª4hsYX)=LJ6Y}74/OrRPtƵ䞗(F~^>D8;7zwMR?R"6dAWfK[S˞EDjr0F;o=X8a| B!B!HVoCh]:?`SPl,^@EP^=Pט>5ߍn _YzC CD3^5oL~y땡t+NPl&.\fqmބ.`i|E`D5n=gٰ?22퓷xǤ%dF GՃ/;SL 3d8Na@y3/NR/c3Jm ?kk܏3%8;6,D)y4 [ևyy#5mMq9MjqowmړXeUM݋:a|_G^q~c!B!B!BZIB!B!B+6 ֚67u7EN$B!B$B!B!BE>-B!B!p,. !B!B!"B!B!BB!B!-(B!B!p,. !B!B!"B!B!BB!B!-(B!B!p,. !B!B!"B!B!BB!B!-(B!B!p,. !B!B!"B!B!BB!B!-*HB!B!QE4!B{NvJG5Y ň|,Z!B!BY\B!B!BEB!B!nE!B!B![dqQ!B!BY\B!B!BE}q1cϵb}!g>0|E."G2w"_AQhqgafu&klŲYDZ_4ѹ曳M<>20iHwZ6&Qz( \.y^ WB<>%>8{|E}|h=z}1KMcZLGfqĂ2Y2HmVYʭ gn=v̟'\&4mB.%G sIv83#?c/tL! ǽM[9jVrݟ-3>ƎY`wKDΙgرD}wbs\lm>O=WTy8o[%/Џ5ߗ蓎46=u:|xkNqnO<3m}.&&.W_.~J\J*˿͜koKkΣT7^zHNr!wd}V%귳l_:ITׅdS|^'x'hx͐d` 39л̝Zsl?wGZ" l{ / 艌&~k>Ơ>#yP)kZ<V7o Z{[?U|aa4;OKR7+Qo7qˆW1zK`Gn4_Sp\[2de";c*H9`h{uWUHZ;7܈3N$ud^i 4ˌ&}Z眏b%at~gw$.[q8ҷࣞ#c.fջ7yvh3rEVuͷDH+}wJc֫ۄ(b_y,O6qh"M|h3{br(&uo\2OXxM緱-*R?bnji@kVJS*8H?#Z@@dc4|:w잿ȨQR:T{Z9g3>(ia0:U̢إ/;ژwsI z#~F|<2é S``jKcuv֗N]F-~'9a1'Z`i_ھ V׬;^*^sW`o s3տi6m|o5}cqK8ٿBLU7eէq*?/}S|mo/cs#]0}\k+ӎ.zG3n|I;9{9O/c\vNN/˗WʧÚGNnXr,]R|js&f*)Gm+*m| ^}ӖvpW`GԶ]-_'gKi=ilOn5a&=M.ٹJe'Rmh1S#$d;~O}a ~'maG5.[ >;رc;L@Xn-Ww@||Oq!ǪwD_35}iRb>_@ǭzU:^1߹dSB\2N\Y}cJ%V7Z}Grׄ֟OgиnTCmPi__x.L\{}_#˜gpo_ED·6<rӜz1LCIS`3?5`KO ɖʒ fSoëXkЊ^Hc~++s;NV&U)@UJsi)Rinj}RgpE%s4}o `5րo|?a+͸x zi]˖8,tNVKK=xGz͕~[.mZāv ,O#yv-rn Ea@rGH"?N(_~TPЗ͋&<-55_bNR13sq<]xW/ K酯S΅Y8b%-6c;x>LBҢ D9/XN\&88}ب4 b7BnHkeƧ!;[5#r,;pMds_vA:aG6b&\WjrxяҨJ 7;?Ku?pu'D  ;KšqŋcL3 G֭x X1" f ^axe^$-Fx.~ȶ4G(]<\cT[w+#Nν$ffJ<ΓC{8Mi֯.%٪)d /,)8iMN;xNjy>f4m˨}0AIw>=Hݴzp?c;.L~kwƀ lu%@1} _/L ^`60x/7Xɰ"i,QÜ!wu_vOʶ_eff~Sv@3CNh\r;SGv FҶE弋b#Z9g:/F핋QVt7TGHEB~N,NUmf|͗odJRI;$Z Yů4Q^75c¯FHwhІnٷф_Pޱ (C!ȉ 7׍օHd&a1ŲK|uZ Zm,m6a'|KLϐ2,yl)c`Eu?yv಍ğ`@hP-x$E{<je_ a (yb3cLn (xvS %哅 | ,,0*d'^?[̋K2=|Ѭt67娫mV.۫1W֔3N9ՌW:%8e؋(:ѽk4iWO:OkI3a| O|jV%V\\N59Ly3^5 uiR᥹=n 2卶^kkղwqG5rcKJv74b'StAX/$ OB} *בq<&#ȾF{0a|*m?[PO&a% =ud E0/_L #-с9уP9K n\ʋ$0%I/F "]! a&a%}<]ڜKV=9t?yoF~m-੊W-'W31%x쬂ڀik֚Ȩ4  ?2PzmCbߧX=T}FQdL.a^xQ,zg|5@8]XɴZB dMFؕ1FH\L0_=;S_"v{_FLn/&i5oHN?W>X,6IRKT8(H9ߵUaְǩ#m+&vl9IxΔtnbe?R/a5c1jA1e ^FKszOk=| 7Vָ Ks?٤gkoуXc4yɎe9~3 ~H *w{<;v'@BJ6_f_ٕhn1W^Zn?dXLXn5T,ܝ@F>MDB%cG_rN/j !'A/lɘ.bW{n\ʋUn~ `֪ئ\b9\l*y=P;kfx%.ng[s(]m^ ?}o <[HBBB !vWT^v3UdW9I,PS)J8ȁ1B =S,^lEeIDATƓˮediZ b?LeW\&t^#=îteP7y /5 ǚ$'Xy׊qz֚~h(Cjy11jQ4 zS7xC/>FM}*FЕ|CVb۪nom5oZVOCء|A#L=rdLtҟK24x4y[Y^gP}cvE o^yPn0PxDqorVo/&/U9O1Pi] }~h iT JvnmcKr Dz]k]Ztc|yQBz5; te/4Oa_@q:Z >\Ơ1ɓ9)(oV''uaJk?/F>c1SzO]M塩9eU\:qt~Qݹiz.psa @Lx{%S+0-ݛsѵi6rsF}Z{Ҡъx/y$#>!C:qtm塷rxwMMpI>Hឋ:-WNKpVfy\4.hHȱO9sdI;&5kyX[BIR25~5IMyg=]IXo/Z+$I/ΪsyQ|O(n}j!M9g(WcH۶g8oo~wO⯯}K~m,\#w5)< ޶]”p]UƜ\3|z۟*w7v? oMI\Y?zcx2>՘[Ȱ6Ōk tZKq&\!~#6/j#ۓm$u+e2{ecn:l6quꇑZ1]ؖeo a'uoejߴy{W-霻[]ߦs)d''ҲگY'Њ :YQ 3񻗗3>l]˄E%Sݱ&[f-`G|qt_U$IR5B8K.M6]p~2kMCdP+&瓹\)~UZI$6LxpV䗰rxeit$IN!8nCn?g,fд2R2jtڎ)2 I$YDN-xS C!I$ C$IdA(I$I $I$YJ$I,%I$I$I$ BI$I$I$ɂP$IKg:uSK.5K$I$pڴiC  $I$|eT$I,%I$I$I$ BI$I$I$ɂP$IdA(I$I $I$YJ$I,%I$I$I$ BI$I$I$ɂP$IdA(I$I $I$YJ$I,%I$I$I$ BI$I$I$ɂP$IdA(I$I $I$ BI$I$I$ɂP$IdA(I$I $I$YJ$I,%I$I$I$ BI$I$I$ɂP$I)G+ATv2.|9v|  E0vllZ˞+ª2omw0ls<\n6~F Wv#;_?΂amGIP̕]0Lt{#Wc78;Ou`|x=^lJ$ON+ug¿ph~O,*On}ðj+kK[n?tA~Ys1/Ÿ^/l^:Ue-$I -g/&{ :<V> O ǍO@5m_a`}9:6gB^QP< W`@?~Ich!L| z5ݹ0zxi5 {Zܵ1]ƾ!+WCHÐ>]ͶC7|TXE;^"mq wA@Y 0pPm²c\Hj D<0ٕNQqrH=̏1?,@_z$Gk9wG!t:\82X3[-bZq}\Ilw2p =Dr/35?B,/ʁ?N_C$F[W# ~ Ř[I$ ¸|{A=|Bˁ5n8 ?zzeS[afX:3`rH^4 ~ 5vT%5!к/?p)nWh@1w.pJ-X%, _o^`*h~NCt hK0ѶͯM-G Y F>h ]ZURB(Fv B]IT6vcM$%?=iKASq@1,5Ȇ&$Z%q{J'hMNBؘH%8wc<#4ڱ}ӓ ͉~w0ktk3B1+I$ ¸R 9PNgœ){0Q> u*\¨l_-pz7> B!W+=-4HؾZpKP8& G<{b{! OSCww.qrXK$ѶXRЦ0['Dz0d.Cjt*a[IM$'/b΍ 폕S@Y$k9vBR KdF9@ ¾ \1M(pdX V5A˫E$ ેˡ`o  f Mi69MOWa#'ZZB;}HC)|!(uoz/?3+{w<[Ά&B6{ҠuXNUsV7x ķ4أ4h~EpH: !›oAY ު%*bx+,^YŜ-Uϭ=ʩD5Z4u3h{B_ȵ}Tuĺd>ty!d$DnZbX])Cqc.lL} )qUz5E$ɂ ́W~ӧ/B^YIba՛h+ȩEN_a0Axc8~9vw8lu(+>Of"Xl*Cs0a,,{[:^q $8Q ^ "Lz ~ s#We*ju! ::$-wނuK`MU N$I+Ɇ@բх0h5vp 3p.L}. '2G^C$ڪqolhw&`JOdW~o8ςJu59$IC`pE;ZXjcgۣ`$(*+l* {c?g̛nY0\r߆O!maD?xuڎ'aؙ}i(gOn)eOG]`ua'D>pxaZ_*O¨8 cAA;Ou`|xQmLb^;Km?Qx41JcJV~U/aay%Dms8'sUoW󣴒q9o0V[on,Z<նni0, ^GEvM}k ~ 2@w[O~:Nh?'B=ycYIЬ)tʉhߢPp,\z5JSxa;Cte ?Q5A@ \F5h|bq(Knc·`_=IJwgyc8V1Վ]AX4< >Ha@+XcΤW~$Q0x t{_c_8sηhcP=&0VG(kArZȺk.P~y=@㧠Ы z }|ޥme0Q @akf8 ? 5:zo"qϤ $v=0pP$۴e21UXE_%I: `chd7iߌ_ 9sk)G,@h dWxˁ5n8 ?z\͏־j&8ku$L-ne;н+̊Ndir"Ni߼˾%3+g b+ژ$ޮfͰ`trg>i L1f5+p0CBVqο\ŜKtE˽ hRd~/%n\օOJ`4i*} ρI5̇ТΎķ~!΂.++*q}r_%IrIDAT: @ $r* Cnw2R"HԤSQ pL3z? !5F*; $30l;NvW".<8l}<BpPsh7uXNJ6&Y Fh~`S `6 ͆@*eubP .x G;'N0\-e,ۊqw bk]"O0e+xU]3Us8r܊ϕ[ޛuqUW[A!9$Ox-( TsJ᫇ˡ`o f-!C/۱o?699 @ d_iк xw+s_(9;}woC%4H0aXl9ng_@^>7bkoƤl5/Cnh mupXo3n zcoa cqb%4e12۸\Rsʭƹ\Bfn&mOx|) Z5np>,\kPͺ*IAS._¸`c0@=_L9́W~ӧ/B^qh?g_+- i1[oo*&}1<71&\s>㞏|B>Of"XlنY ybb8ƘjGǓMHAhp4ԃNN~+vΆY/VKs u{~Tmn;tVzu7Æ p!pi[*ht)4&]? A"O4vA[?\=6ND0rLwX뢏IvTx!hvt$iYee~+hi:׭X㾯ca}0lx xRC'5ªa^[8DHɌ3|uq_UX  $iy6,πn]_ ΀Kt`K%0cl^9pdOAIW%Im0{4l(Oח1%I $I$U_k$I$ BI$I$I$ɂP$IdA(I$I $I$YJ$I,%I$I$I$ BI$I$I$ɂP$IdA(I$I $I$YJ$I,%I$I$I$ BI$I$I$ɂP$IdA(I$I $I$YJ$I,%I$ɂP$IdA(I$I:8񤏇RdIENDB`rally-0.9.1/doc/source/images/Report-Trends-Total.png0000664000567000056710000022267413073417716023644 0ustar jenkinsjenkins00000000000000PNG  IHDR  ibKGDC pHYs  tIME *'X IDATxwtlJ^]$ɽ{ [BB - 5C j: 6q]VVھ!ٖ*sejoF<{ki"""""""J """""@("""""" """"""@("""""" """"""@("""""" """"""sd(555Ab^[Պ"55ݮ3EDDDDD:FgFzl0hOCCyyy{u[EDDDDDp*++q:$''3' EDDDDD*0OtȾ8Vu*f#L};(((((t;,xKqdzv{}?=< 'g[\/ܷ20vTrɋ<}8왺t}M ob-S1HLNݹ|N9&5KWqۇk*(\f.̥ozEDDD<}-qs\  m\M쳟O%bKeLs|b[b^za>?ʣl;FdۍiÞ>5LJHGd3 ʋ Y#8w8Fqr5$œ>`jZ޼mփa׻/}^`3?2r&p nkMq=GU|c1|LT!>&-Q/;+:e玻aD~O^$t1lpwn>(,+Ks\*s&U0 wޜO823Ԟd@*aą0 \_˿>-ACs6W–J"}unXy߇/XǍϔ3tO48),@s5o%q\AWh4r~EGG,܃k"8b&H˳yd&'(M\sM?rBN(ˊ(y\:,3ٹ4Wr ՜|NI25sѬ ^ p>j\>/ai"!s¾sywQ]lw=ф?y-qyӳ] \U3Wz'ןD5d\,>6 XfF2戣=V?ץ?=,\#ߎa1a`5_R!a  ܇v+29. zrq'.-( ++*ʟfojnUhw7[ɡ31% lb/nmˁgcPcrŐTy`hF2{BowT{wFkisk.-kY uYٙÜX3sa5]DDDD@~U9Ѭ#vr, "=<C6|.R(^8ϖ.eLDt]j_(O4Q/B[ݚ kiDf-&#݀Fwo[2  v֎NۍLNcRY$G/SbđE~Ǎ˛_+D [ؖ.;>`u00˶Kn7}(blKelaGJ-6+3?#6;X-SQ$IcOkg[?'fǏUpEWҖ׏^6?z/#E9`NL+<9 ͗fJ0+aM7[&0-Oj[[t <[x|rHQ2GdRu:>Âۺ6<>nd(-e A9' 8,kB/\)U1FZF両ܺj^`gVϒp$6Į<""""ЛV{fcrګ8KRȥϺդΛ|0=x'לs<  "Fbah j $ƚP: %Ӊmh#YNodź r7|ۼeے,7c8 k06 ҩy3Y^ƿf+x" Fq݉ɜ盉,oe +kz84N.<֙NFV b߼XY[?'?B='q}XAr폼lC͚on][ͣ3shkہ3g 0ҡdI9OӒg୾Vd6;6m/6XcͼM5cӍ_^ͯ)TT)~Fd{Gj_oܰ_fY%`X]N-ޛZQnR#q˯s.:""""S.S ޛ9̜3|n2c~gj]ۚƐX\Uxu߿*xk55ĺ`i9927*WW37`DQr$?Ǚ<|n۰<|P/dn|pv*g|nO 6߶PBO2w/7Vq8a@S%/,kY7jS}%]̟/iAY5ݺflޜ+e<0#CxvB-zzfOWq+y|Ypǯ q2~~!,pݑIlm6ω]xEDDDd'd04;<ի>W}ݲw671ʐ^ q<ƟBJ$"""ф}"P3~w1j9Bɪ  /^UGDDDddS DL0O\%DZ{܌]Ev3$"""@("{7 z~=T iBBQ BQ BQ BQ .֙SRR"|r-"""""Wիi!"""""ѐQBQ BQ BQ BQ BQ BQ BQ BQ BQ BQ BQ BBQ BQ BQ BQ Blٸû*;]ҏ?J7۞oppl9q]ǒi /j61[y~9!n: bT0߯!j㨋bh+Q ܣ99_|Ò@ɠd]S8_\0,R(n$󈛹()/߿ɵoD鉋3Gxa ;UO@hu'Q" m/c󐒞N]ym<=NٗHDDDDDd!4.䛏>cҬŔ5FpCH%Gnk6Gu?OcWM _Jj[Z$fsX> ;i( 2cI93'8S6N~߼z]÷OF;b6C>2 r@Rs_ko_~`eS=F'l{lv;6hCK W\Oֽqòpn|,1&ͧƐlz <OKd~rEDDDD~>Фy;<d._wAaXHC:J <].uj7~*z5szpڝ7sla5H6V210튺)qvw{ޑzcÔ|(t}jVࣧ?pNDe o?W>)57hosM &ʂq7pT%,aq/?(sy'10w?~+,cߓ?^>$gʝ?ufl^5R=\O, /qՃ@mYOA#>0X̴__Ů_ܢDzۛ>nc;ھzc Tr\QL{/J,y 揾! d‹aВzj P;, .72x paSp ;}JՇ'_/ hUF;i,a\r)OL̠qy[(ÿ#wZ~|[rO]CIOw'_{ v5,i۸̓8vcwƱ V涋RĂW~vdXж:Ω w5{p!YNd = j.]Cf3Ljԍן6l>`Zd[Tr+G`z{߳qWDQٸVxu16^tFv^4<7jj9vqM*q;imnve!y̍<<8Lܰ""""";l+Y\H'P[ Z\EG:l}6bË2p ` +f~-$3v֠g+`T{23(yU kN9v^n֍>k*P=!x-]XE-$67>$6 a0n9*-FbVLDWG.U 1u|Q^QAmK|uo6[ꕲIwr<)x{;ع}:']͙) /I o?QdCyPDDDDg;l# '㭅k=24'| Sx;Sfc{g#:-6,ƞkN9vS19WߒiTϘO%ө\2_OgyCr7.-D< 4W&$n.&B,@0_\rG ڠ'*8NkGz cMT6u~nl.9vIvR 88,=_~ܪj4YnZPDDDDdwwpdR՛﵊V1wak,±׌3mV`gEr଍%ݷg%+e=z̭ZmӤxA'ٍm%n+L#>Sԕ֓uw1i\T^.m @S D_Td1rHfg5'xSbl23_/o-F㧷?|sŵw"6~^o?Z@WIm7w:,Zwo˻o ŎV􊈈t@CNe {烟Sr8q*dh}Le}x; ¬bA9-4Y͂۶[Ăz팅d!̃SY_Oi|}<;Mo44pf& y2/U104W὿?Ļ6ݟi+vތb OԵDFZ+/>UW^Nk#u&/>G<zo\spGTk@Cίix*O;Oi4^ YdsR[, /~K1ISp5JxDS6~ra ^tnGsΘy|rs0i>L1n{X38c'&9;R"""""?@O{G7E;I { 'ڏۦg/as1ӞD~|>|s~Z^N] -1q/ՠ6'^D/;pwq7qo|0[~ZRAslItv0rf.UMD,n{簓qDFu۾zc}{w?Mi$dIH ~~ܑͳ;EDDDDzi """""QG((((((((((l̦Ȟ?y*c?}2<4 5߿MVT( DHt Ӎbt͂G0y&T/nZz :F^ 6}ţP2*gA֯%dB?XP,^gŠ ,o\@/_//8 au폙Q7bi#Џð}wzEDDd)o2I\]OS0JS0ʌz|N{{*ځ׍L c|39GL"1Z+f^p = &JAd*L;wnLޭfWMPL} [q4 ŭ_$7|‚.cdCq䛚ckyuͱGqIG7F!<>9FʤW(B`}3m)n8J~.}mv -۾`o*ld_OF id sA1/: D!Yr޶+ _~d9W98^],bkd's!\f ׆ IDATop Kb5AwY5&ՙϸ;ًh+MF) l%:Yw?7CZ j&[i'5m\YP7сMz)oᆃӱ{-B]fR^U,h [:'Ëx`P*ǿUiRI kK[ё_ѯBޛYmߝY?V_RsοjT ^K^)SE'+"FdV_|?i?{gʗVqrj>~M.,WT}RE{yŽ7:9~G#_OI &5.zUQ̸+ V~WM8rw1Չmg0sF7WbLj5+n? I~YC!-vX3J(fni~ϊ !|~1\Y4oyk5W|m?ZQ|jE$bERfW\6/Kot0\g &`3]89p VxgQޛ@("""{Ēo[}=9{T2&o9@Ѝ>?%^1Z3Ӱ`ُƖ_6o[LM]`IeԉDia3yٜ}~M rupg,x#trCT~:KƯGyp+|_bWl`'c3swl;z :rݧBJ Dh X}C9SE4p`ZEAT|5ڙL`PS: wriϐwE,Û~d,\A}S[ D[m97YA|n^ާqVwq!Nͷ\Z=I+lofbǷvNt|YDwg0S\ 40q\w|PEﰗXZSKmm-~.<.u [MmYҰ=Z܉e@$Ĥ5u`bQ@~K&}!wy1^hbb|I[iE޶OrA. 3<ڟHnt|||wd*v d':d5i(y]fvɶmԴN¿K @ ϸc<̩L6>`$|Vw3۶]Rfr6}`p5*b#gAf#^ɻO~Mmsmaök'8T^X$8gxy1-0#U|{}*XX̺/ßcvѤZd AӬbWf3s;jv7H> Z|SM4Zŗ}îd\& خm~N:sx?}>S{bNQg/+-&Rgv """G\8's݀$ݣwA_z } 6 2;;gSۋ!Dqdɝp>õ|}?mkرud@ #.t]}++ #=‹$QtgefNV\Z#8_2?pFujYgMQj6ڏ3 @ zd_ WBW??tKka9KȤu9{S&)=[w{w&Fq'߅i A.i*<^4$W@8Eoa-;%rtճ[؜ap;~B 0߭KKlNR %8WTrױv3nH?̽&&TE"pdqli$Xwx b~z= """"\G^NibqJjfMt"?Ӎbt͂G09TK^N͵wp(Lܶ.]B&d nŶwB5>Y ׋ 3qZ{ucfyM3YּH-z`~ H]zq_ͭ@("""""W3_r}S1wOv7EDDDDDQTFDDDDDDPDDDDDDx-4$AR)uE? Sc0dto]$NG8s@a??Mm[uk(1?鷿Mk3}>v~hpXk\v?rwu)?3( b!xέ )G^ñy窋L|dd^Ή}r=g^}B!'nrwpxND$nrssu_:4P$ƐDzg{:TP5AVGX]nyiNHtknϪ,?% <@{B8TgQU罒iR?Jq;#k߮:1*Ôׅa0'IvD YuVYo߉c8S6P&/^Iحa%?M~p4NE] K[X\֌i%'I^KPDQ هTԇXdDo=2\;6 =2\pT5 jMe-8|\d&;h>7BٵU~U1G.n3F:,n!LiMA6 mt("@(""";Ws0% 5Gt3$?n_VAv=IuS%5AX->y.rSج "" "CqPYb1Z Va`jݦDFu!~Z:Dtx$3{|,LLA=T5) Q^:t"+ٱë|Bi_@(F4XӰ4j5Y ,[-X-۬msa`m5[,_j57[nh_Igb1Nv,0e5!났n9Jv";ىݦ_$"" "l] ~(@p8.+I\V.+:rr2MbqH$7Lqx"88qbqh,N,-??ܪ5 mlvsÐ>ֵϷZ-B?czꛣ{l0 Hr`HA"uMjՆ(mbd䦺Iqkd^"7i 5b4Di ~_0 $DGnce!u{k@p,HtmlɌְo}N$'.xqa35oRֶv(sgrv="]ٲZ %YR&3P5Af,odk9"ѭKB.=|{>ڇ벑ks9~biB4ߨ-Lv={1H`8Z_v~L PX1Y+)^$kgt_]FqZw kbaI3 KIMmQ s@ (F0K벒䶑u[l E1ewEn75\6Tp4JѶ͈M,0$?LC? [7G0FtCny6 h օ~IօCG:"" ";iBS0JSKPkkYF٭IvgHhjf=l]NMXVV;)+>.9%%~6[8x@ I>9xDT5 %͸V8Lv^cB{@8FTLKq+Rv5A7zfݫ^_٩8Ӗ6!Lvpn"/Űɚ05AJk YNv"t("@(xu!)%.7j1HtHsN`QM@ŵ7%+Kʎ3}YɐDzg{TX-kf8M"Tԅ( RܶEA^jϪͪLQ i ˶nzlg%[a gLvunvO-%/M^D CIK@i ʚYX20TKddd ᥪ1LYmu!fl .MXDwǥmo{,u룭]g֍tnk)eDcv }Ba벑벒\V]N1v@ e!jbZûW)V06D1Lnz%ovd'Nn SV.궞ìd.m " >tvD"믙&h禷_~}_{mm_۾ MY;E6 }N^>u> =!7Iv-,*k.D\ z#B>OL$QVQvÀ$GP5E( Zڀ0Mu}"@INعp:x$7?pkmOx_wXm.<ڶ#*57An@{NC iAa^Yn4Bߍ)K1`5HMhgPDZB1B@F^ni(JخX{jz0^Mh_bVZ7ܤdz-h6 [±M^uvg.[Z}}~F=r, O{ KkZ/oFSs0ԥ #t34?Q=ɻiw躵4}Z^~AjFk{-[_`7Ѵ@mӺS6Zjvx:(A7Zzd.Sҹ*iGty6 l% +䱭 >;*" Xۛg˲ rq;t+S/*mfiy 5A%;ۣ!ɬM 3>u)>9xEm0֞TyNRBZ۫鲡0(j1K, JZ{U0{"=2CG@8Fn .iP M,n!LiM-,)ka /Eϡ7wDDPDD0WYnflbW0$?T8uL`aE8BFCZZ2NqT7E QRm9 ZDEDdR:0Қ s|3.L{`$ T5Nv2O& X,>>{&RPZ6HE]+ևCT0BيfNY^BE}(2?k퇈0/νa@ZD;{&Rnﰮ5Z e8JZ"@(""a H~9'ŸXR̂~v+H%-QCpss0 ili]0 k';INSkn%߆*{i,cO62B{7sH776W wJ";Hוq`H0`҂:_GcKTc|7y~\54Map_ @j)&GK +Hng5L_@E]xTDd }G41MӌQEr1l8vM|c^^Mп|ʕWAyl9Hrp4Ff,o$8]TE]/gR0}|XD8g9(Ffp@Q29)N*E ϹD],BTO'&ugJ[ج0[39&՝9?"`=2\8X\Һ~am xdn">Q}} ([~afu!BXވd8Iq^fkaU/77z,Lո In8P.ev~X3ҝ:+Dgn0.3{=d%wڞƘ= ŪOJ3Pu!6JN[LOrSLD@?{$Ivw~ӛm}7;;XXR$y(BR)dB"Dtwq  t$]Xz3;{fzM+/3'"#mԼ~}-s_A nrn6ېrW<46 A&޵&^PGTUXsU<ṣE8'"PYǡP[1[ < 2E#} $rK&b? ֛ ??y\. ÛuʸЁ8yWu...RP;S;S;}N rXnuVs0 !^\jCNp?Dc?~iсa hvպu +G(jt@#jgڙOƅ{ nQ]șIr@vm]r jgjgj{S~ -\YPw$bjP{?7Z>^ie8>mk7w;ިqβ'}9鑪{IڙDݵ⠊j 9_˿r?澌/{ ~˘B{xEs>dTo_1gXoz8wي ?Ng)ka;!S IDAT"fS Ȩf*^XXwT҂ e_lMڻ^W|#}Uǡk_4xCy !J\{?\Rpf:];C5k`\T<^(AxLTVb5ϡ?'\R0֧BģE- m?W^yC 0[pq(u웚x{כm~T:gbߴsjC~hKZPvv*D!f01&.-pcɉ&UjL#‚ ,NPZp>(f$,7H-%- LeM$ Q4S 8X޵&ޜmʲ9eIlxcU'u vB*iC%-xc}*FJ2$\K !A)e%|Tn:8{V11ⱩTRw֬?ԡYTI1֯b_EUJb pЗeq#%9A طp&UK2hꊅJÉ  |nь"Ň& n:>p@NBN,&;\ຉ(fE1CG ?rXNJd! Ȫ"tEO$ 3]vЏGdZ4!i]vk׉Yq@5ww6quS9 GMl/3 *ΐ((C0RVɉlOMhvp-'@e{휻Ԛ1W*"1 Ȩe&AH{oǂl(`CgۉLLE!w# J0N䃺N2y]GN\}S9G18Ç1ڧP .0zZze)i1ڧ@IEzɾ퇽CFQ TeTa{!ZNð|pPurt]z~>̲H$G>pUA|?/p" H6} /g3 Tp [rbppKڝ}o'ߝ'@xX|, 2>x?Vm|pKcjPɉC]k/#s59 O.j6A5s**8=aXXwPyxZ]k"?GnsLkV>"-Z1 Ă-;&j q8Cm7 BM?|<2&"3$Hmm9n7h[(G!pnmĕtU2WK-zu_Lo# m{.нI`/9Giצ8`zHh -\YXuq|<_|aL eA k"ٴJR̒KJZdr-Cn' bVJ^"4gE$KKl !Znb"1V*2#T(;? Bn-#y]n;A%e-23.Y?͏ǘ@̏~EY|slf1\|8\)g+>nB8pG4n 6"`zH `Hx٭X6+ <"Xt_Q7X:=4wD悚FH>D,r=o;I^`BNYf׽Z\DatTC!bG캹̄EKr&%Tj.޻+(!OW+.2:Tx]b A"ʱ.iTst 和~*iq׬|-Qx&6 8 K#V+*P tamX7XXw6 μa"s*>^ $ ek mrG ;0(P2%l[to2aq}+ e\]q 89WY86^iPYKMfrTҬ-\g^9u[H}=b^wg^{ڭ4Ԏ8̍Q 3 ѹCN"u%b*6$ﻰ e0ۛ*䙅B:JPrsIctAVN `q`>5aZzI}={!*u ? rB.&.ѧJ<N6JU~x<ƢobeђaG޳kI4X{=eEj"va:Lm7I( r& 2Ӭ* mxۙNW!Eдsv Zֺ{!2m{I=-_Y|"Oߺ@t8\a@&uRAkMN}ݎVǍn3rhPh3;"Ъ"uR p~-B'l`wi6C&D#& bR6GԳރ0fTunv,[~.QDWg@:k%O?&x6[qqބF88x-\8wK g<.QX+,C\f)fMNeqnsMֶ; ,˞68*}rLhnsd;&e+f\]K{t_h:ij, YKcZx`mCabnņ$81!}Wi 685ݓw|2fL4l۬ߓ9zIMxc7-Vi;Һ-{dGLf-qU zyi^%ho{r6=9CV l.Qe kȞ\2L! 0 {,x:j.zjGl>~}\-a@`>kmQ&?QxzjAlzMT~ gt jW8\z̬ǀLT.Ywγ \7! :\PEb~'cїUf>w0SE34]C9SF9SHfQ jw_ ^[g)Dd"3[\wyrPa|x[#vgb?y6 EHq?g?{g߻[7}>y6`'61q=}CHXPu,xUDn&jSRqWRKQT~E wB"AsP+oŒ-?Ӆ3g0$먵l%C%w_nkM4mr-sۏ UEO*@y G!sw2_sS䙗\>%0[f3;sq~kqv 77Ɠj]ԑ2Xu>8Ùa2FɌq$;2no( d%29QHJtǴm=޵q#L,bB^1xL%*zz6.["6<QQeU 2^ 2' cϞEgZ eJ}5ns9\7Qq8N]^soF$fGXXF> s|F!έcquZ``*~Z(l'9SgHG&K.`Sn/ܪf_nPYߟ(].f$*83pz3\]¥%g`V*S8R:c}pxGKGxXZBU/h.nJ2sL V=ae%!l;u֝T4\#`#.Z%"n1^fN8@R{Ř,/uKF'-y=MA;m\3qs׌k3w9!tap'p&r{7K`#H@ٚxHGA8 fL;*d^FVBD^LwȂ yIkMiI v/0 +fl[6f&nkZ:HA)ֻ~J1u" ⁶s`QyW@L&lY<]~ ?CQs{XcRIgi2{*!.̷peق$8>N|v&TYG (ea\̕E{]>slz ~j*..b"..r2~G`{o?לZG d2\[W7- C(gˎ1IQ> vnkd׵An+W,eKELz6XϴBwg型1zx71gZ73bt68LiLz600LL&Vfz,pK*њx1 }￷Ɨ~7?=WDic ~~|o~gǥ;7sM'b1^ϵRox-Q!h*ғ㵍[^O[H rߩMC?v+ 96\*Vm!~ >mx/5!0ߜwW^kK ~7}x~yfa zhœz8 x'[mwzCNTE9mg欁Jxoմg ,E2ac7։fF!49g1[Źs=pdI.d~Y)oc9{*MvjnrMCA ώc"7mC S4L "U71Slc6^3o:P8CCwo<7;ƭ+ έKgǴ(ؘu$ ys/;}koAfݙNs&>}_#i.&ӧ 2G8^E?ۻ7"^E(6SiGkaT@-QjZ^ ~9۷a6AMvrZ&Q66M $^v\wQsjۊĂ)]x.ɶ(ҋ}~;p|8)I3>5 Ja{QSVϳ}賗W XBLkMXnrIAY+C+֍=HtgLȶ@lN]=/.ֿ$-ʔ-ő-d~rWjjQ\j-V=u;+cXΔgj{cK\?pa% IpM̃tm--abޜǥ4E̛iBVh#HmMc/Vv2.K J&޺cjA3RsjKgdN2yu s:?}RwZK=h*rL{ĪhvbE5ݩ 126q>žtֵg '_~΢=7+GO"'<,wڱ(VpǴW1qy܀ <ߗՖ jRjPY`xh8Ó)A$^“COɡ's9xkzu|~"ȂNjT7%IϮŚۯxpjQlNC险Nӛ& IDAT2g̥<d~.8~I3ep?uXśOL|GÅ /Ɔ>/'NxqL&[yʜ-7M`Sd`?䂰ZG?D_} = 9 N/_ECGdxR:0<46mNmN{&E''1ۜշqv߼M|7#{O=}Q @y Jo3PPff& 3 Yd@v^*l(!KVֆla3ki`ΜÜ9eTjLA.` >U`*3Q}&PiV?߇] `ڂzXwQq*XwQuXd-s`Cͽň>>ŭh#FPRJQwg˷`-`ƈ%{ %k oam$x%}e!mhslk&Ld;4gh7Z7zo_6xL`23DD/C# ԧNcȩo0rfz"24~l${z]{ȧ qEp|מּ&oB<6XQ_Ҷ, Ќwe/-L!1q7۹׷ t?jJ*d=9I򲱠{( I2@iLo{ ܞI]U;bJ ; o8uԾryk(DUa..̅d~he2?ԾqcxϧAJJjzzoWw'@lj~VD ܞn2j/~eOW>݇һ g ^K> ~,_|gm׏RRpP;wx{T ]; p|qP|(yoVԟ?'FgLdOOqZк~3G>_ϏC]0/׿i&nAqhGlU-^7*ogȋx|q<[~ϔ3Pc,v,07MC{}p~v$/Ӆi?=8\zDcn (8"5Y N\_.IG~P?.n#g5fxFZ8}pqRVI +T@v_SE5ʂ[|s|e>,VK_(tEک??$++Ϥ l(LOڙlxsB(,8=pϔ#ǡ +èv-~/[i|_RJJ:M%h(7LAh 3h[}^ౚ!fۄV͸nhmؾ mzm3O\aw|&nVӂ i9c+J.(L0UJGɦ:uV7RZ.V/|>q||X7kKw_yW NN``f6'y)MC 6$^$K,fUs~qI3dAcA WWq~|/f[ύD6Z}$ =G^'&>%xH,ߛ̆&y9R:## N@^#/A8\<>X|ǒX7ķ}+!}&&eHA.OM~ ɕ ؟p0DncoW4M|y~^Χu\< B   !}C^1pLv^y8|)AAA-Fs#ϥ,oRAAAtQaAAA B   !AAAA    AHAAA $   HAAA$   HAAA$   AAA B   !AAAA    AHAAA #G8\צ~Xd E|l\p_̷AAA F`IE3BϡsaOo݀c~[X nAAA *;=_? n )+wy/AAA#?r _%ߺreՂ oA*B'&^}H=>@LLLP;S;S; {->NCE>H,r]OI93 Oc=xw?6{Jr jgjgjgڙڙڙvv&ޓ.֛?;k#jw˟NJ_˿r?澌/{ ~˘B{  xtؓo}3U:xy O? 2ş ~?7??5s _89|zYw:/p4'1b LmAU(qb4XN]ç~{C'EQNn`,~r"; q͵ @fUA|f'6Ђ困C N` L4V g9pL:r)hL@ :4Nf5hEk,ؼ6\6J/غ8Ir-ɺHL߀Ѯaа*0:ȆŋGIP8 A2P,1E@5 WS1# Cҕ%kZ 9Xf? b6s-1RĤ7M8 3>Snȏ}n]ˏ1QVЄ7ae*AttAgw>`X~pBv` L؁'a-vXjF#Є 0\mB5kЌ yhkW C16%"¨J>7 Z@Y08!55*&5Z2U˅p$U48W(ʮlEPB@8(p A*d9(rRb HHG>ڡv ]v{/t=xv؆7/s?ݶd3d񓳬tCO,[޳.%B1]"+dYd,jPrl+F!`ױr22@s 0rEԮ2-LYdˏ2K4fQ&Z~f`h @+m+ ȑ 1Pmq,8yGy&5A$ U^ 8> {$_]Ƞ7Cv`[ VЊe+6ּeXA c"b< $$^P#@whV.>tۄA/&ũm'0D> ]aWЈ08]/sQ0opj! y:=? Lɂbqq[e .Ʊ58^߂/(Q#GaGfb6ضP=DfU&()E)AsP${t 8K;.s; =Cx3;"'AeH . ĥ. mś;cI7$85I:[_܀Z[TAEbG4fl| 6ppCAOF/t<u pHE^*BεML *kY\\kd s \=2}t8K${8gwo Z0:Z*R8*alYԅ,B9 G wXSX׀bM3oK,&qw0~z0bOb//(Ybo4^Ye^ȋG$c:8jf:qu[A+0wD Ww$~ye,t1WKg<ɋ#h& MKP\usP*t˄P2R697:W bO^, s#0P|D_kԬb̍T>~dQ{_lb. J߷z< שm7&b%!@K`"\#)6um 6 >׆9P<J@  I '@"!+d=z 3($ׂnNfW BMEb?+\"r2VonLv}xcEND^,t✼-hd[=stDc^,G] #)MPjgf-J<.47Xˈ-RX#0z:.f1&OL^GDNKQ[DZ~{~WREd&^TQglN\1Ycb ‡i|;>Dvn3 aZ:`JU(.vL(@5¢ BɌ@OCόЪ$q{9fcUj~Fd)Dh"$*J)GӐJ#Le!h&\{ I׋߂ZpC%Xz\#K\eXPLJՁ:P PBGHA) Ԧy8OVIsd7h۽Sk\ِ]6N>3,˙ò|>}U0VB+[ ]Ǣs0E&!\ˉzfܽpl۩[לGYF[gȅ!MEAK*0ٛ2oY!gsSPLBypI]2n|# c/=]OԅLγk] {!Y"p?ŗƏ49Xp5X"l6gmؒ(e!e_U@ ~5*t1Mg`Px%]ya{P]<QtF. mx \'p)+=LVsm[ ]% C 2/1,fR @VJ6ݔ lp*hx5dae>#9|cˠ~2 I6 ,w$Wf$=Z#Vg:PҹĽTT6D5 7ςw-eD"N'? Yw{PJ(H;Oo"3hְE7VtK]TŸFhJpD`&A\ooR Do MXF!+0PeLD_N}Z7Eh,l]C[Z5/:2qN#+(v~OEHE,gx;`5ۜ:&v ^ۀ˰yp!)HXe [ `!7ޚ]z@i{P|@"(a",E(cp&* KQAQX;%'< 4nPQ 0"j0r%[5 @bq,,"!W8>ƒ$ă"_-dR<~ce+(7xSdՋ@yf{;.lO.}&eq7V~$"3V8)NVٻߩiLz'U EPTTTT!VllQWw՟}EuE"JB'tBڤM1IHPbu"$''3>s7!TpW5˾3s;Q`f b'>(GD-.\Ev ]U~8Nua H lm)N,!odgMMvx{=m W-=8ո5ͳ-|N\>N\8\/NLɄleibߚDZ@ˉ@8ZV,X {(,60tgs #;d_*pW yIu*[4QD!1vdCX)Db7Gfdrlz?ǹOlrwo;i}d~dPӿi-_#О~cޖϨ *~75mޚ@Zo-"꼵?Zji}0Sap򺩩h}^5i3G_HkXBN\σ^XH}CE}scBKH}&4TtUF)\ NV'.g*|4*_NIPm֊@rL/X}~~C㽕A-X !X#'!( <ջvX:NUD4QE۠Ōu?V͑J$",9KOs""{ L`oYwoX߷j?a2vӶ:1G a@ Ee 7?6&* \[<N 8:#}<9 ojS-kl#A5ts7Lᄛd9Daq>ԅ%2Y@u=gEڨ g|~_`)S;qyp#Z m5qùe@Z3}j:¤JuR4 @ޟo$J%$H("Qwؿ};DA)S15ˎ$&$r`T#pNg06kM`7ʖk5K_.ONo-!t nn |DX" 6jBZUQXH1k"J9\Q 9Fp &n,,21Kc"""""" """"""@("""""" GyH4tlwS=98c|/&"""""@x`R6~0]4lep{y۾MDDDDDDogri"K۽9:9QwK寲m~"""""@x5{[>3fg(ZYD6E;=C XS\m."""""r-e,`~?ɏbM loZ}B!:΢:ΪΪ(6{vO3_)8g [qmk9vY 1q[S-}x4TgYuYuVEuVȄN2_+Jh۰Gg0e!i@΍9i [ &ڸ-HG7:e :*Wq<:GoW%^"G\ȼx2mDDDDDDQ BQ BQ BQ BQ BQ BQ BQ BQ BQ BQ Bā_ÚfhRun >SC0Gu=|DDDDDDa};B>8s%\ l9 s 5[1|.7E޶oQ >ƖÃY:<[ϋɁ7 +PTMNjTBzclVzh7A8fZNEDDD::rXU62B& Jgb8`f,w04.3fɫKvPT@׸0:?Ljn ɑ!$GpZvBS q4_#FbxHaWEK RRCae=_ ZMN`\v|=R=‰z{& Ne}a5o}ˋ삭#3+Ϳ!Sae=|+uy3 dhFaaAfG1$=Ko5l)qm^^=BxRM/N8&&*EÆjT%68=EhC^}#x~^m>.u3W's4B&Rl.q@6+}#:%Dj4!9$ͯwVՌ搘NB5H#Қ4pbYp6NtX^ɃSO,ƆԾS!#2XKsy͜7 i3g/M9%XmeM\1?~_-bq=bx1{l4JZthĪz7KXi7V`3=[-?5 =ѓ\JoKcgQQӲhkl\)wc;}ggջޢ]Ĵݟ(?N4+1dM~ecO-oDV/+!Qƒ?j'ۜsV&HQW;XuKw7;)q#ƓR'[3N`6|W1Ǣz~jѦؘR"Cؙ= """i\ڡ=kGY-5_Xֲtٶ=tGtV&U#3BY?naXLOʓ2Ȍ Sq:"L --f*n}5~DwEDDl}blV#hHjذCe4=QcQV_I^!Q\3*So,EXWAx;uaą@GZNkն嚉f̘þf'luu3.pˌ {FO>W Erٰ4.FdIV/v-HMX~&f⒟X3N= """ ~vדSTb)r5*"Bpw!;)^LolAfŃ0g}3gy~V.҅+NJ'#F-G mlX: \vb2jrY_w~N)/""">G bs͓~ q`]ȆAae}5wٰ4z%&CJjIѢo'.gg3kN&+k\Wm$JN/Q04#.Ǜ ú%ة@(""r4wMmNOr./{Zth42ƀm e^P'pEIuyQdӃ8=x߯sykp{j+Hx'ui ~d'kDNJ0+ҝt\?^?\5B3T)wm|]qف#G8@(""OYz6U In35 {CR˃^\o)sPьCuZbi0l84q`ֱ`;HpƑ{Of aԂrB / X;+w1\N#3QCR6m(d4pn<+m*(w@AeU5PTYOA$EnxN@H>g (jªJ4~<sZMԄXLͭAf#`d26.рqh4*g{pP֔ƒÒ-<o( iChty.uy&羳{Ί]M׼0<)tiuB~9m.EX 3Fe2mt&}WS㢨1US\UOaWp615$كIpRF^/-γ ĸe3{l󉓈}USZ18( ʆZ Hć`&1"^)ƒ#aAffP+7 'wef.nɹpp WQ_\xą=:<=pJDPO~E=% :,;ZG$j&92)=8&-:~pWXXHrr2N)aκf؉h``HN`|DΠiͤ燒v7>7]I#N'à tbBcB9LFgM䬾:~)v)M sUջym&=5.z&TG9 ~!'FeˎٟM^c\quC6-_ R"C."\SKnuPwe3.;]c4 uy_{j .8^Š`D|c;!:sG?2ci.K#[rȌNؙ\Tˋ1{e>.OD1C3u<s'!l`˛w2WYQ#ߥ<2Fr-/,7T/vsyɯ; g K3_qOb‚7FyJYS40+c{1.;1Yq4-Ay|~ 8b!BP~Þ]mM"ǝ/meZο \;*SamLt<Ϊѯ 4M%bmA\48+Gdd>%h*XfdSkxys!U6E޼yT~\EH,"/^RziɱGZ#{;3 F͈&-:GϭZMwEAE=_n^]Fʘ@k؞qYZ*C[GB&R"CHfHF ?j WEsIrf,mNzٰ4Fu=~n[x}TOD_C3:f-N7Δai\80oMw4xxuvZ'mۍۭ5|Zke%3m` ߯w(v]ᦴEQ5nJ?3 Vn%f n%>Jty?-~p-ܞzۡn:1okX*𶵌>͡r2j':ˡ?Ky|l-'ul*=B,GZ8K1mb,,۬Yt gYu>zO:=f7n~LKd5eݔ׹-'3.+ bϢ@6 fOx|L+4=1[ .Lq[PyT6P\UOeݏG!Hsr$EҸF~Γ5OKXTP?Wtqze3(- 1z!9X[Px˯dki _6섈`zqfƒ'kIk :)Ϫο=]l? 62sY/SČ%; FdЯgl:U5ZZ]ciL7dsy ݝ2zv~=,ø9zFS_N'XS}"۴mbW6fOr!$E }ɑ(+)^IzJwv;,X9%\ˋ_n#:)=Ϙ8\9EoM~[J/s|Dj'!"X/""h0pZve'mw 3[(Y}:"Kx;_g|nӍ]" V<ݽ%/gn`lam)7la=b|o"GM_sT>&fuF Z:BqqA\2 ۝, m>&Iˎg\vB_#r5UQp6oo_/Nƒu@D=uľ<87]S+_YIRnTWR̲Rj51ux') ԲǛYSȿßly<8iu.f.(u1 5?ۛS{c4hb0Q FAf#2{,<KX %mFlcҮ15dip{'{i ;j'|20-RODc54(IRY[PkKwy\Ljn1\=2qWԺ<ˎ1p¹yL7k],TʂRf FZ-S'f-ٜk+DD~)vq l^_qUdƆqI?tS?;!Fuwr , "6a櫗 n/*Sgktm24#S9o"IvNs_dQ "",>'piفp8(Z焿=͓4?@τpDHN bLn""rh #=S_2{.>][@ʽgDjvpB#k\7u冓RQbѦ)a]L_`3c9-;=㏪=u W6:<{B8gK_vz%EiEDDŽszsYX[L*Hj傁)\00j0?6&Ob\v}tٯ>?v״Z}cfcxmkM3af5sAX-.rBwғVl~b*V<}W?9`Ͼ^Ɔ?#y<~mQ!G`_cx,v2m<RB#S9GaY$Մ/9Ej]Vo4?_Ɠ7OcqDl?esۻ>Z~ο=]|7I ]O568u룿RB KPƔӨsyY-ӵEb'!#0.;ӳ g5b-FbmEDDDD}\f=b x=.N8pͯ\޵{s|aoэ3u.>%] 8-;Ӳ|9)a}#ϸf'"""@xY{\ɟz@w~3=q+K/ޥgWl<-V!DzE8D}pl:(a2Ќh~{V/vEN rJ­<D2{,fǓ-VocyO@0Ih}?( zl.|s~{$fr1R3Μլ ?0&hC9zErLIquC`pC +vjbHIuC$C=yϾ)vt_?GhR4xo}5UuCx ?Ƒ۝is;9Vc512wgj 9t$ݽOlbs'3+'2l<~\(_٧rT\<OKZ6gyd!`cu7pjE,#ɢADDDDD; pեoRٿp{h`%}pz6Ȃ\W0F ceSkt34>("""""# """"""M)"""""@("""""" ;g.9 ^U=:D[޼a f #5kX M `0sVuS1on<֩rwuwb[أҴ;Wln`"yJ|*K*9 >dz0XQdžWbPr qm`9i h %̇r5:6)NN Eױa[]syy=գʼY.e)c'+_`[i#Lo|\]g+_a1KyO4uqG>e ~[Ĩ4.K>Ҵ䫖7^*N3L*Nqo3mQ!ewF~ul`r~; xK脮ca {bu62hK=ͪ^;NJLz:zRr|GQFt= W&v{GFb >oժ6KT,nrXFq/ݗҊCO 2`2VUCYRKip7oUϢgjӞ3d FO((&\/KP~0c*07W=b%;5F~Ȫ,.Mv6M~u+?:5+'5jTdu7c\IBy+Fڈǿc [qU7@ˊ=؆G9"<̞Yw1;uٵճ.܀ɖҹ,7bW5ޞVB1ZS@?/HiWCyc|>?sub웷r9;FoOxxj9~==w\.1Ozj*a=idf>wmHw߯=EDDDDDC!Q BQ BQ BQ BQ B92̀7p^-cկbUzNDDPDDC/.nXVeFDDDPDD?jV=y!qQDG0jfnu69V$Kgy׭񑉜*jZl_u7YqLNzHz_]C} %4|N1\.<oȮ)$'%vGMr\42o曺>1l~5"Fu|[8clVKV;;=&ܜS9k<=Φz tK'8f}Xظ}""@(""OLnSl;o ݿ]D~kRz/+wSV3Wi}R>B^<=0aV<%}}7=, 9Nx˾gsi|luge{{E;sjy1n|7Rߌ- L}<6>erW%8*׎ǘuu\ɿ~moYY>+pkFr6?۷>̊>EDDPDDt)wg1܉k (&yQN7c'ܺf>~BLyL=#SL^D9vR*,hl~9-~í'1B[Z6-]Hyk8; κTbMp<䀏?z)Yz^Dž݂0Xr'Yl*]&%C#1BH@҈ 0^C~eAQc®,}5uaȮB98w/bXv/zˈۿO?O ˟aUU`\Bjcӈ :@ O"vNPӫф?FGq55nWl4d*O=EUNj1GF`mL&[<ᦟAs(mB#j-_CA/3a 6_#&4Z1Snډjx(OQ NGkkƷu,ݮ}@N:>JO &cq o#ǧGU ȸ4R-:<7zDc*Ir<kx&3ޟG̿;>/!uc~s0 #˸YDD:U_~=;fH!""G(D-"""""")((((((((((((((((((((((|rѧOw=?t>#7{Y'BxM:DDDDDDVvl#k8Fj@St@=<6̃Y4 "Fq=gl MDe$[ɺyڎkXKBo""""@x@HbBwXØC%֢Štv9N{7V=Fcze64~n=k665V/qSjPhǽ9 v{ {*'(N?}$=c,c=lYsn`gq)N3a1̈́ inV7~vz-Of߱U-'EZתl-_w_FIsx,Ii}9s5|?I-_~g ?YZxYn yI>/hy=,EβU\>gSyeZ/뉿γ 6O*zi(w6xٙuDzڌW?E 7n>9[%""""rB_wBc )pbV2!l^_o;?ㅷy躁DvxеYEvN>.h2k=;pl`="*)ذޜ6v>xoKhM0cLf8tl^¿Ɔ-|:6/ Θr2S.Ch [J*sR-m܅{_pcɗNblt"lY5w;y䎓2W?|^ݴwXWԇnt1mim=ܟkst'!" 9v?@YRY_f7tp3bY qJR`2/Fg%$gsE/?b ?_f S %2?g]JXqT-_DZ?oHGgJXl'y#6 O;^_v*]} ^t&F%wxTeL&!1@DQ; \]u EDłXV $PH!2DJ@ yZ̴3=iZ~\D}=~?K8vS BU*Lgr<"s;|Xɀ+&l5ca?͵ mF|K X цZ_~9ĸC<2_ga(/cc3C ;lzg;g1뺖;+Hm/wYYd~,Oay]%F]DDDDD|vxҽW@/h w7wUSstGZWLFj M|2}2_39k0t +ޮ'%*ke$x4q1#a' ؞cKSc"w0̥":[ Nɢ%sgc濟 !ֆJ aQ73 \UI \\2[=Xq+i9QRc= 'sYޛ|RFΆڰYܵ}2h`<^"bRCI5@#)o˭Yhdтp f%""""BwlVV~"BH(''gٷ?MXZ~߰UӼLp:VcrlEe{7<ہxR &oS+E`4a4}%""""B@;Cn8|'9Soh^"""""mT_e)nn-jzT\JW٘˺r>CFCE9 'amX-^Ef@l.$y9w=q{ّVK#6j2QpO# K#GQz+=%Y{ t 6N/ mZƦȯ,N8%f%""""B` $a@ ?}ǿ n:z_`4qj~~P8|/Ԫ# 69tLh S{أmG N 4y/d8k)lp <#wl<2!kFf*oN{r,Ϣ5I7ߚBL&]J P7)2?u 6QC5U(bI=RV]CEaۖͧ_ע-ζ2,ͫXԓ Ϥsc ٛi|]Fn5?@j>j Tmbk/^GL<409 sYCY]=;Y6 >2nÙ@*|M^NAEul^Ά: d$tQ4#wNd8֌DDDDDfSIX~O#Tm@Q:D{.Ac:b<(s2c וԹ<=u M_H޼TsË<4ċ-Әz б<بOi<ڴ/x5)'yFyʹՕMߍ>W^CO(_|ӽ ,^/|&_˙.]ksc>Wf kp?nd d~5_ʡښo53/'xĞJ%\cI:'G^dрS`Z_AYi9U V8& F{P+Fp%3c@/$`.): g=rV\ =*ؾ5Zn v:=LX Ú|quFz~=rkǂ#!D>q7MblAtsFW nLu)j|9ahBF:eX` 唖QkG`" ›n犁AVc}US^VJynFgLi4='E3;K~ZFqf}^7 ߚHvFEDDDDD=p           ΤDDDDD-7[:9V%ȃ¸zp$N&glfS """""ҚriZT4y{N޼\ tu.}k%Ǽ_0/>g4JRDDDDDZ5YT0gMUc$/?Ϛr5n%lwCn>t%~+u=1 !ۘ^?UEDDDDDpcq{y` k )*Hu|cUWn]Oez|_LQ~^ .m%#l%;w7_=}Dv/(ۥB("""""rws>f\gBI~3cc18p{,|eTEDDDDDN᧩}Dyw+* 1xᴿ5:b;*Z9~w̵䖛TEDDDDNu <{fs|.o{=Z모8AcQo3mO M0GdWq %%%TPWwu<*""""""oqҾ}F2QbGXzEa0FYʞjXKY , aDo{[j5fC |7JV>շ!(_wj0kDٺlŕF{y1aPāO2pp$hLou\ؗ"0rݜ,X2\5"G*?q<W&ݝ9lQ 4 6JW|/\8KJvPCFJ.8wOG3os^&"""""mҬXm'9[mʨ!gS_NvqunpFȮ?UZ}WJط7>J!`x` yg7c<|sv2i{\tB{#͘S\sęDe| :*1Y09ws٤7xY#I1 0ͽMDDDDD[&u\3 >fYrha999)嬜E9+g,Y9Q4Z$ ]*""""""b [׾Kz#X>疛gȬ,vѐ4i>1DDDDDDN~M#:QU{&|;b20h( 6`t=0vyX66Z9a.!9Gc4TuW_\ӾzXz6kn8H.twKJ^ApU5"""""rf,M'ә4װrrN)>[tF {*qpplzcm~ff=drrr'Y9+gQY9rVIzq-?sː0 T΍wr0Ɋwn3&ds{;ѐVǁzfh׃4~fviXXSKC9+g,Y9+gQʹ#27GwoW6][mhhQGIM߂%B"OFX6>=1ۜHņl~`KhBh˞ǜ5l5~wO#>04c*Sa6𿗧=b2C}sKaY346"""""|3WfpKRtV2̝t=*Q#9ǀKxU\qug\]>/$` Ç~AͼMDDDDD*j: .`ٴBYrhak(g嬜rV;KxT=^"gFWax+-APDDDDD}a/uܚӮ[PDDDDDXm6YN|'ov P!K- IltPPDDDDDXNLJ؋3Jmw 7hhwoG(QC DF. HDDDDڼ͑:W!Sf^-V|ݜ(n8~^.?7"݉wIAH(-y6"wYT-{b^*vsf"k$fZ2Kj,!xȫMyT7i y+~n{L{(""""%i84,ݾ9K`EZsd}Ccx좞3G#999J)嵍q"`y_oWuxa u`g#""""'볹r`8nw t% |.yk]T7D79]M|'#n7[ɯ;PwW3M{Y%϶l25؃b+&ڢ+`ڸexL~*r,ߵo4m4 sN\0NxţME,uTTwܝEDDDD̬;pS!`ӵYYɮ*ܝLLt h\SQEDDD$dm嵍LJmEPekNVgUrfzyx.IkW#iǚ`wTԃGN-rN&#!\5Cht+-K'1֟>U~6Zb^dr\ӷW hPwNǚjSTԯSr;h*@OSQt?C\rxn\~t$*iG~%7rm['ό>av[fLOE^[ǚ梊 6XF0/B(bc<>\u C姀Bs/㜊%WME9e($5W.?ü'B9Dvi-Wg0}6tu=߯3 $8TT8tt%42Kj^<x2~nx:*lh4:rqB t6N/f oٷ )!|NyudWYRCFI GW57NzGAxߵ>0ٶT*GΏP*vg3{M9rLILjc v!ۅ1G^d1E1iEaYs^DqZoWG{8a> Fe4<]-֑ڡ eZ)5Z˗%pAP-Ўy:һ78o13{/#GwTξDT(~nGDD]XYʔy)8O62w IDATLSZ۔cpXJNT7,bwQ5~NܐÉh`Gg2p?g}eXwUN>]VU`o2wm&Q̟Hc-&nO-ӗar0rwc[k2q[> f+úɨ@;UQ9MBZs@wk._9""r~]%]ZX>+rV ObƲtFqv\RXYϼYwu@ f>]Ŝ5*i\牃#䡀phYkk8hc.%ED9V]+3ra捃5=2:{Jyh~ N“4ku$tB؁lͩ` JΡL0/% a;룖qz\cM{Y%ASQMFx~n|iҦL̎J& zțr˹̹yNucn03saORKhj-|a/sdio9ۉG4y.r*9ccME.㜊/n04# "'XSQj(&}2KjXYws9o͑H?w"H7ܭ58zk^D$) Ry[x鲄~\ dɎ_Jb?=Bt=N2˘&oSrk7‡/K>:IqsͧVJ,Y%5dT-Qo#$QoRrx-6Z qXt0 2wI^,5%; y8:z3_ma֚ TlblN\=8avJ-q2 6l6ȫuON8BifI%!|]<"UZ C=9ZW9Tכ3c$! wg/Ѹ:: \*^qPDD-y#Ʈ޷ I#VMT6-ؓgsQ0-p-bg::M9R )4ɡ,vz?bx4EqqB>U{8GPS!2[l,ޚLJw.Q=C~h.(!b\x.o {*{ f6$&w@BO)vg !$(m߳)j`RRݽwf.e Ϧξ<<u֢"@H_K{+DSGEĮN/)29){֦x3 1/翶;l`mkb1ci:"|U-y]ʴb+G"HZϒ;׮ؗF O}k3wɧ/·]l{+ώgyct}[, 8=|a/yuzyӷ!vt"\x.{g//.QeLLza5bzpL_(?F`/IK V!C,eLMɥB՝ "rErHfÅ qXmL_ԟvksN{qN+),' '}=%d%v&BW}]yd- DD:jƿogT~/]A7͉3sl;Ksw6cqm~{t$όgfssrq}STK?P "Xm6>X__ƞޜЏ7'׭cu_dɎBf,K,|KM=4cyxPDCH+bʼd6f1_'^}N=%}*"|X;ίi9>1ʮs)%S]oV "ҮYm6^/'iu2.M;w@iMvv*rp;B("80u<}vkOq5-; C;vw21mb?J2/YS؃7,TEDN}ex VglgvfVfUXŴqMfO1:0sLEm#s䢄PB H+nt 4uTDڍ:7z+]NFr}b ﶱyovvd jLJU*""੣|"-s.a}F)όkly >z:s T˽vl{HHOҺnep _ LK 2`p)>9ٻ~:br%f|x0))9etHTPYύ0(;]ޮL4KkH;`}6EUzF[6x+n_o,K6o,Uv/6l}~h}-7ϐYYU!?0i|r-ͿMDTnt /6eisߜy.euz1όg-C`2(ڏGv囔@0ƌCb~jB[G πVTQ1ׇQNL { ]ּϊRk3oeDr2yȯo)iJkcz&@ݣQc. ̮*F5Eպh[)+ch )3`.3AJk dڽ>ѻ ]W۴z¼5)k3Y"ϝ̝4H7sjW< wgw^O]EoF;{(Z~}/c^ijKbz.Oҽ0[-KѓK-<7)2'_Qã{p˰L7 c{K9W8%in#i~N<>瓩2jf;;/[kFH)v͌怓_L&DPwmfK}}C=y^:YDZ_0/n;#OfvMW`j\|r"ۡy=?LVIivWӶ|==ݗjМ_n= b|㡿~ιOj5Yr)"9sXo'fLe"TGLfiuȮS(r8"r~]y.uYLNۻe2xsB_s]l7n&ORmo:?d/.78V97x7N/}@N> t |p_L:.$HoiMF^,zUGE4<(]gO=p6@d q1$gH+-el茏ifk&$&&rJtrVM^y'03)\Y93e^2 5){Lkdm3oʹ<(#sSMWA""m]#ɛkꨈF˾Q1VPYgvQ[{'~zrU֙: B6d4>47[4۶ .zso/Ixú(SdRSoX51T[& fn0TEDڟP/8 -y DDKҸt ʪA<36g9yŽX^;K)h$@TEDڧ;B|'or3e5:*"gwQ5rAP~/L+r`8w.T_nK^E&( Bd4⁩[նoTpkS\ʹxr[&ύ欧Aw+̋n"DPD]gua|YSGEi\o<(nx\`w'_՗,]ZHZa Hq]ٛǾڬoEs~W9i1{8)6&7ם= L_F'_W.L9*""hO'jꨈWQƣ 618Ə4*4,szﶱ%Bd3Kmw 7 d4(BkwՕ6HSGEނ{9wR.噱w@<Lg0p멮7+ht|ݜr`P!x&%bU:*bJcz^a^,/ "0h0qs+N#}Y'ˮ*~ؒC"psrP *""Ͼ TכykD؛qԥg3!tuU0Рh?: $`nLFOV*""W ٕsnSe51{=;,G#c_n&Jzf\κ H7X}/7kHΝ̝4H7 ^4\cz- >^ƭgh B;`2xenV "PuGl⦙k r뻆qrb+J]@oWepn`"e sMJ.jH&^]Ƽ߳x|yP{*jD@n̕{XEW>YEym#G* BrkR "}yU+v£ 6q廫q6|rF#ÃS.U ǩbe 'Q!_ϏCMGlR "XrvOe19)oNBpWjqǜ-6rN%[t s|IQ "LRJVM>G٤4{sUԟv(`itA DPD~4,>ybVJuQp [^ڬJ.1_g9~ Ȏ]'1};Β4֤+cXVDj^%7,'DD:/s.K7󯯶愾 E՛dԐQ\CfI %@Og޻ng+4K{1{>w ]5et<ӷP!n;+/,!PDZXIuþWRCf-!:lH;=B+H7ܭ5$Ed'wgoNǘVgɼ@fkNvy=pjBgX~Q?(?= n0^X}`T/:-uutK~n !ߍ}B]pt85''GeP+̋'.ţ_l%iܦ%֯ru# h0>~m,{ P("`AViM/CHۯ]=>oWG*'ՄN/30ڏ:`Oq561ix Ϊ3*""rw՝ {fZ2Kjؑ_ɮ/z|FξDqA|!|*}3& Ye5g3 _7OݘnDPDDr(o[OBvS;;yQ6SxxsB_ƿGl❫u% _ͥ};"TEDIFþ~u&޿nBvj]ZvkpF]KÔsܢT*kWl$SB(""ƔQx|>q:+ij-*jUX4SϭISSn_/¼.3ȸ bݵS_>1[ybc vU(Ҫ*jh\F4SO0~EofwTl]6e5LJB(""h0>2]oQiYejȸCةQ>#;15ck?ൟw2(ڟG36Bać61w3 җs{tyo9l/QqqtЇ4CDĎ xlSٔ[﮹x\?8W}g,Yã{;ɁiQUofgXF 3WaD {U!b0`r0hGio9m{n;#V t@݃=y8(dvMqUjtPPDDZ^ bfP wrb*LO>qA EHF =]mjctF0$_ W`D@]= l<(;欧o/ JP/#b^߇ov݋YR5:B(""ύ냣|ISG;:37\KҘ`D섗#&Gힾ$wQPDDN/=?|ji dr,ٱ<<&N7 >v7mcJb˸iX4B(""=jh;|Wc^AeO& *-6/b&exz+[s*N_N3 B(""q}p2y`^dz{I}~{EqfSIB %HP:X(t#vlr"Q_ J . B !;GH@ugٽgY= MC}Te``#rub=\(yI_Sxe(FEBa*y҃:u[tћA3K1t-C[ˆ~&hcNerldVմEDF{YSw8J\ZG˲D^\)7uܶv ejnyڗ+植@(""e 14kZGˢ3t>ќUסY/}}edgQ'Q ""RfTtNsq2x/><3 w恆ATqF*_0/p{%#}sfCʮ]|qV5[DD7=V}@W":eѴTwWaDo reR&d27|>_l;Enawi㔷@h>6GGx ̩z" EgW0hsbr /'zŭWe""r75IeAfvpi ccMp60"rM:4V|%o< ,E̍:IyjÔ@[#F9dYN l;1h3j,5J?~)n.+U+yFũ 7HLJ]?bg\:3z7f|Ǻzp֣^'Ep> 0~,Щ Z̈q-$i\$jR5g 0BqpY?ѣI\lIQA63Bkˇs`EDс4Rd٥\"7Aw)wV?C!3'6 8y~3NdֳpYxkL<\׋c]4ۑm*ֹ;ҙSu's"΢7 c=Alj<"n q2Qx<Ƴ| MTwu,^}=c ]\:-:n\j]_gl捵g~Tp.:<Α\^\*Cnɍ2j9c0 xD䩩LݐŒ)Ss(ހ,.dS %\LD䯼ս!\<|ZGKfYTE|q7O͌[Mto ^*y½d:~rV>_MW e׻Pb7WAJ@B&flHRsoưvay\md3v{^IE6=jBPDDn"yapl r ;#bt4|h+ HТ/wX]η`):@(""75ՉK:wlL(#qUy:Azؼ-O]65y}!$e8MN!T\BxGC%f1kZGo8Cۆ1@ |t`00tubԢ䙭%l-b8Z#@(""USD$ߊN^lI ߱.F]0("e3?Ԉ^PY/Y ikED51ˢ:zͱiEvŃ[[D Kvaބk~}Ƭ_S/ؓZiBe:z81Ī pswmh꣢MuiRՇ>ȉ ڟr<5m ""r^%:7 fO1H,׵7[yf>&nth+=]M`>q2x/fk~ vkRPDDʃ]nKí$%>s::W:cÍp6#WDn^ޮݽ!2ѿ=ϳ#.'#at5 ""R.x{C$e1:}&36'3 RmR"rkxa}ZTenTk's6Չ>nZPDDʗUm|>utx .FV] "rKyK}z2nY4yW]/.?J_Pܝ*7u:o٘-mVv8޻1"YkџUzΦ89YMES ]G&e1c:gf,qa˜XS{1)9t qݘ⨇͋H9h*tkbMl[Ob6LR . إXnѵ63Bkˇs`ml)WހP?7F/GZN -,z{UFZEDtun MaO17`ںc ^!^|; =Eq7޻1yf-f%;ΐgfĝuPPDD{+ҳIefo8~S晭\ibxi hIXDʭ!^P_~Mt|iT[Q Kv3nʻdwv;u36/"€6չ?mM$)3mUB+\|`}$eMv LLJ6oưvat``#Duw_IE""r_?3XW!^e~_M_+ʗn',CRD μ>|7t *\OovkH #l-X/>l%nEFaPDͫTUTBb^Haںys9;0c]fmIM5""@(""Q˟V'y2L@B&flHRs""@(""bwtG W/3H[1:X>WԆB{p:v=vCRd1XU}Ha""@(""RjfUdS;nLhvbֆ m-qEDDPDDԽƵH@י;=2c]`PDDEDD g# nMǬ(-,ܒ-jcYӟͫY::kqA;Gi6((/uGeW.BTF3K1t-C[⋈ȍfrdR2Gu4%>s::W:cÍp6cPDDEDDʄa~iʼ-'~ƍ>AMaY]Q éʸ:|w<J#FDҮv,""@(""eɑI=.C:j)1[MZIuwXDDDPDDʲVa~k[Oyf.ُ5٨Š(Tucܲh.ј~θtfnuqEDDEDfrϤ׬=LQZX>5""" ""r3jcBb)6Ť^u= ;hŷ#P?S)ˁМ#`c̝Sy'j1+Fd`p$0r4_Ǜ2)Kձ.y:g2r^ᡦUX0&NDD,B˩t v'F?0i3bpZfȫþ׳&.a`l/gt䢒/ɑ:w&ϣ-p({4^y f,y 4͗`km\:9GW:Oĉ0;yk?ckFIi)пu( wb,8G6§ZҧEUJDDf ZڈHWGÕ#0Qb -< Vv\8$pY2j\:Tq_Y4ՉCZѢ $""r3«$sV7f}9,4ߤFAv>0o-2U&# 1:@~IX#""RBe)\ 4-=n._ q$pIDG t"""""R~ܰ_;?vlhj/ jۮg]rH f8t!7q@u]˷FPHheuIi`l7lx*w~7l< ==IY ~5'ӃW}ʘCfS1ͱ#8r, TڎeҷPPe"""""" """"""r {EDDDDDEDDDDDD+8WЄYG'!^ɈoDfFe;4r`pǘ}\zځx@a.\Y9 O.eDJ8ՅI2Гz+q~LWCS}#s$*7Os|䳁4范:Άs:jn.1VvC;ǰ2ެ@h~̑q3Uc3xdx,b̪]c7B|ٜWv29PK0+GbkN*y!}9ۆv_OSi|a棣9[ s=~CE)/[9]j}t;3;I$-5}2sЮ Lgdy1 %R%EgW0hsbr /'d6URֈ΄1h֫H*Ti.^)O7hE\ r3y$?V iإ|ڝIÛJ'ǩsa'o?S"L a/I#p]wx5qUUj,{\v{/6Klя6RsD@x%?#KSh 9v-ؓK\qfU.TdxtT~fpqtbǙ/;;/ iNh/M,չX8xVOtp7׺i=+gQT͡[Knn6 .ㄣG.LjP tBNm/$uIି¤}zIR˖84``}ea uVqKW#:ˆ/%;$:`\Wٕ5gw}W|'Rح:̜D'@7z2}b[xEwy~R}߰]?vdb-ir LnN%f~9!R0sV%85O>Շ-9\9,~yMpvّYToԮ jX3aE鷐ӿԒʑU_3^xiWSDDED`99!b$Υ`A5<;2s2gSIKz_}VYb3< k0XC\$m}ZNLш|S~ڲ]3j^:c3xxYx8~xx?͐Oxp .Xq4_s1c7£B"C6%ߦ߻)nkUjs~;w9(5IDATIyzq\wΡXg:YV#&#|B?Ͷ( w_?Xd[ 8pl$sI8lXJL \ 3%LRϤG<` w䫎i+fVY:O#V`dxVlx7+!*A{0WWq=jlpO03e{ XC^zz&rlnj{|j4;]o@cS/9_%UapovqeT>EΖa䑔Y<ޞ.%@GTpXR|d\yb^C Ru6+^! _|s9kٞ""@(""e^IK7 uÇqJ+IDɬؕHNw9:piϧ,&38@K(?k'`oOtIO"?{cjBaz&|<ϫYl+=0wiL9Gz֜/VEٍ~ܞ""@(""7;+P)0o#Eۙ3 ys{M[7nجrIxؽ='F cIؿS G9yEٶx)kc #:4/bJSf#A-#>8- bY6՜x1)6VՏC _K9OƳw Y ?#ڧL%|""@(""7)'B~^i\n y-Fʓi1jx3it]~5}Crܜ㕮lޘVw?ɪmr!*5gaPzuNefIZǖf993fqLua̻ a|7,v{kV3ƚ_ӇFՂ6'>z%""R 6ͦ23y"F3WCDDnPDDDDDDPDDDDDDSPDDDDDDPDDDDDDEDDDDDDPDDDDDDEDDDDDDPDDDDDDnvQuIENDB`rally-0.9.1/doc/source/images/Report-Verify-for-4-Verifications.png0000664000567000056710000027756513073417716026331 0ustar jenkinsjenkins00000000000000PNG  IHDR15( pHYs  tIME2#tEXtCommentCreated with GIMPW IDATxuxo5BHHnA\Sܭ8m4-^hp ĉ&ݹijg7I>av̙;@ "$"1݄YM|t݄YM &  Mtf=@7.p&znj&SAmyrOA/> N>k7ݣkߥooUudT²csD"NsLZwcZ156SqAIu=FDҾN%-|==?QH 24Pݡɋrk.88ᲟSH%T\qGF'Z%恃gaޗK<ѱK;؜Nɛe.^"z$='XgYn\mjװxs?ZQX\0p9, :0Ł2.`@=> sDķq3{*3~5-)\zoVcAk;qa2`C!Cdq ^ft'%pq1=9Y)˞J-@zd}Y;fUH>Zq Lku&#:w2q]ZZSo}rH JnPD%rE#}VO<5sRҵ8%ЮysT k۳od@ ;uqy/FqL;J|G"R盧.Xw|^K8ssNX˚CMB>x%ڵ[);ZᗗcGfJzɢ :z \Z8G17Ld~C 7Y)S`qՑ]:~Ֆy=o$:\EvĎ ?z$hnȩH^^QnӠk+_]&!P2;g,+TZ8@|Y-Uumd7Ogda_8͵[i UI^_ws@;;c~X_P$&׫[dH`YE?RDL3eMRDH`׼Ke<|]: z/Y-Ĥ}Dwٙ=uo+9Z#:{$9T'qd"e#$2"Ϥᤙf F;|Ʌ}F|o=F]j2~ T37=ƩD ~$R?.B84)\ J߆MCVS^k(Y'V'zwj PDDqҬEJS-2{NFuyֶ;zzLbܻyWdc֣.4HQ}Y F3" zJmįGꦔVQ~m +I!ͥF5{ C$\ sON?7nܹݪѻ BD xDOY$5* y֟4yyT^[\&qmV?#V0IR&Zlָɀ; 1?]BQ].$DTmɬj zh g[FN"ǥͪ7n*=};47/+欕A$ JYC"uhZwzdN<:DިIU^+g>oX՚(=U;>%9NzC!K>n8{.'l]^ l+F" wkxlRQ--Dv<:{YbTAT~W^\ 7Wvܹ O ™{o͹ sy3m5 |L^ Vdw|y]z31D< %"'f\6݊0~`'qT'ߍ2yB"5:NNzݪ5֬u8 Grv?'x8QF?nD41wb;{QFk+3SU{ *Ъ*|~3[ʺ;i|f~1~TiƒS2/ O?_& bn"^ >sww+ZT:c32si gR0H_t7פSϣp|)~MjN8q|)ϸB2t.6 @_uqDvEDDE(S4&۴ߢ=lv0 ~2$OQJN69E IS3'&qh۬mr|FM+}x/`I z|Cus`&=LRFSZO޸ 36= ۋyŚt۶^Ğ1kfww~]&j9mpU<6τû?\E["v=!c&uF H{4wae"hݝSX["᷒3)y4ـ \$L.KTitŧKӚڋrZK8ND-O-+2y:Dk8Ka@4Io6'u71oq"ٝk]vӽ$+7BΟ ˫?˕KdUpm$ !x ҔcA`9OӸ;/R-xQarpd Vy.aѡ)#fں˖17lq1Q=<}8.gN%"UzDTgիTgxN^qcuMww6!XQ㸌ӣ9\KȆv"2vXg[P:54}åސ6{];֢ɂ/^%#̴L622MRkbQX4e[?\}dj@E9-; dXM&a/_ePPG{Qg(:fzT㐑t۠QCC䯴m5XTAs cF"nϡxP֒j}6inΟ[`n%L9ne/XN_Ϧ}6Rަ#u%a}hf%o&e:˾CbH7}" q\m5lުj-Q NpQۮn8Nvnn D$kxEϦz_ʝGw];="#Ga;|Y:eldPI/|G{}7}GD~|(ę RHL\խ+WjOަTX[ UHEMRLG\^ԩ)H!'*K9.-&z|Ӛ,WS-1JƄ} khT2_ubl2"2wk {4F4hz9"3 j(:[6}H-0B9Ⱦi@.f]bmH̅ZOo,4&kJ"nrO՘cgCayYgCeYXi1'"8qjqvD&[KgR`*2To{p$JCO twSm\D73'=Iw&%2k+: !Ol]IYypw{ϾNVһCq7 r%N1SzWU]*]Os%dȠހi{V9)1d^kvDxnj3em ꘤íȱ͘*,Eqc k(b"SÔZDӀKeǁbG]IɪB~4d<̈j̹syATcZe, z@OX̬@dfՒ"^e_BkgFM $<S$H9]4VCXvF`f#(/6ΜL ('=UDFNZd.?OR6_M}O 0f^fu?vZ1V3 _\#dhUn 3)k) JJV98:Ƥ֛GsDDF5>gwMј koU V"SDJm v1//:˩|nKt b{J}^柎68{xU7H7͝йO"M#>p6yjesCd$/KEjkmL5"bȐ=)jy `4ٿq[52+^HGTƒ+_ߢ!~XTo3uxmSNɞM*WuTM96n#ԮcC ~4܀HTwWm^RBrD߱ҽ>a2hͱ{A]͟ӆyC붚\Oq7kd&y*+n.&r|(VXt]o|@?j+bh-&q)4"Mb B*_28A,jkl C3CC< tf tuᛸA3+Y2q"Ʋg3'=(߭ߣG|n5>J 2<2OM>f+~l5^\r*syVp" })b^&t2ט6<ŸO#26hzIOʵj\7zꘙTu5:7?cGYO1ܦZh[U}:"G+$D=c\;nwwt IDAT~oj:1DǤ瑪u2l*4"MZi?WHfҒuز1[ kT,0i8xZۿ^#+N"3h0ϑN_JW;h{wt6J6w$Վ|ˆ*ZUVr)q:Bsu/Y{'S|f5bЙ;:kԅoYzN5Ad֔lT\ m?'ٗpJ6i;NzMM(bgZwl`4Y饂2|Zթ7'l:wR}tQ]SW2)vVÀ}[5h#v_ )FVyG+O٭E롫O= "'s6r B0Zvo*i3<ه3% Qr!}qX-5P0FKPID Y|3Šzݔڱ*JT2’.kS~ 39pQ(0&28Yؽ ap)z@ϳJSjx3<Y*qfv;\0G'՟Lb+¨ 4swwqf@$^_3ZwgB1$]f|U\%|bI~ Ĕ@>Wwj ʺ$#JMkr(JIx#Wy+)I?׾'re-R^ըg\arub/j0I8?a kSx}}N߶=]ܧ|Yϱ*0wUǯ\Epmmn~_~)tOQ`\:B(×"L!)EM&n7\dzJC0͸Ɂ폨._DPȉͅb:Dv9S& ]gzquBSZO޸ 36=Xb./W$pvv>|8-mԦ|9vxZڥZw2Q]_E㗟j \オo}MVlõ#o6i׉6dg:=ṻ߸qon¬&zn¬&zn¬&zn¬&zn¬&znQ`` :znQŊ#ObP3 +MA J_1bPp&zn¬&zn¬&zn8``[7W&t:0#"{pmjAR5+g|<56?=K#R&88[ IHz+ݐىC=e*½氳1JhRD6{ WW/==Yzvsm'߫lsvQn;^S|{Q'#yyzxy>BJ]V!v=F!JdoDzuw坬hBq+lWsj/4tF8i՟GwiڸG}SW]^sGcf HJ@-n-hsյCR *OmǏs4{؁2ĭ4(%zvBҐ I/nm0b"ߟ5gl䗎Db*Hά,BV?y_xxA J9y[E\^eB;Ξr|렊NVߎs7>s<;/%17I#\9v:DvהTdvnlkWS>^-V?]+<ݰ'%)O/Zyݗ:_f}+-<̢jKWYA _ŮyQeohk[ڨٚȡWK][jz,O.<ׅO Y;UpTR4y7DqeCR 'W{i  ^t'%ekvcCè{_R[7ߨscHDz~u+YP,722424 'R˽kc7݈?u_k-NTu3:U1L ̍EDoT~U,M]+g1b.J]Uoբ[ ߥ*_5zۭ#H|udNVnn?i+j!v=eFL5 YЉ'g~+ZA2gZ/nFn$|dϯھkwgF\sBnmGr?N2c+v a fW[E8ArRt#ڛZ9?g|4rF7j⹄-{X֫s )EJJqF(d7#<2a2PsNe'B4o =zveQA0gW$Mp,c_aZORID'ۗqпK980@nh%!f=D/$:S&2@joȞlhmȧ+w2{ݨKbI}e~vksf^sE1"}fu,S}nm'rqUnb'".%IرXE*ke.{/neym6mkSgז6gió*=:FDj=ֆd]UPU u/R?S.^~rݪb`+bM=(oW7[V2G2d @o@O MRj^yǮ\ث-{K֫Y I)RR {aPseBd$,jؠNk`!ceM,aWBLDg_"/wXSmrD8jT*۔ۖDKR֪璞6KoCw!]>O I$埯]s`fzIPƣ[9 i璲=ƷR*3oOGWc37T(EjmzWڹ^rbsuɓV07vO*JZw?}vsϨsɉohmgWJm0rSsbvzNC~]-Ӯ_dΟ)>shrnCu<2v4l.kj,('@5Og7uhоʟ:&ix+"eFŻ}KA1X5}ު5.c<"TeNKd! Hx͍O۬OZو3Tl*XT6$O|eumC]<"F$0+g,>3wE~ۿڸ-2uU[jB+XM Q@W,\mQ:jV ]&jُw*}߫; ͹S3V?8; U9\wrQ,uR*np-m yar\D.#qeT6^ #'$#w4@,>~IIh$BTJ#$;t &g=x|>Tfρ+9 +&Q=QSnOdTu"c-Eg,?ADzqɧs sM1^&/_='[: ,ݗnl+eYP8 hͲJA? oj+ ZXkMVxModِV˅ZO<¶Lg&[ژ*twxf/>vͪ7lj_PMeXlÐvr=f$$bT/_bfQ"ZfӢ^ۢ^5_y@1dXmC}ĜQRk?݅G2!Ԍ_Uf HJQ⺌[)5H^TIcJ;Gv &QJ^1ʭuSL Z=5C~\#,rqq] Y0:Kmk\PuS RIQ-ٽA7p@"@7#ObP3 +MA J_1bP &zn¬&zn¬&zn¬&!"ɂ,rPAW# MHD+"1=FJ;Ġ}gxAWxPcĠt.0 0 0IӬ}]iӳ4R{ʳl HzG&q|.bE v @ze^\aNȮI$ϣ4smn'e,,&VT? lڤðN3&‹#m/™N/Ӯ/'{C^x 5XI^U~mOw% )^ͭ;_lGZޱ|oЦ)Hԍ* 5p}脤|0 vʗ R}CR/4nŕ@JH^>OˏqHi^ܚg IK6tÀґR⽷sbC5;ζgF^92V'yL@J;7vOR̉,ROFه[\8qUVSyv ?hm|osgzEvM6tfm駩8_T?lb]}W)BЎkUV>2oѢM#3& ,@sȑv.aN_v$]_R9٨ٚȡj_-aV>X7w羗^X4}f3b[x?⒴,Ed;2 4 ֨ia7E3F'$峓,P!ZY/78wER nV7@5DWQi2#~uR"}wGϮ,6h5]~bQ)i߇ &y80`r-ٍ'B$"8 _M Q)'yR̴~~f=r.n=,6ՔG'tlfndAgooգmHt46ވ릅^fՉW.s ׆mm,ҽ NtE ]5ҹC3ȸ?t$\X޴g"OM:[p< s1O 槆&)2d ?zlթEnOZNPx֘ U#O™uИG[m%G7,%17$>EϾ'\ cW/ISd(wRºP`6[V9ƨjrWU>clER>‡(nB+kjV :֪ۖSXR#E٭o*&i @IfMc>SpA$"-Cl>eYk>`>y,.gT.~e4g߉Y-LbRZLD3غkŌ?'z[NR),%Ey`I9'S!^cnzN ݖRC䳬g<"ty_E#ݿ2BFetQJսa2qU~SQ^ YY !^#sQՓv56DFcݨoV*3 F^nj5HhY9/y|<7'Iqe[ ?ex㩋8ZjfFM+c e#2=)F"==oGsE]+)jk4Okn$Q!ΎB"Qa )_3)j TT}7|m(+y)>yǜQ) ٭KO@I&6Ȓc$F"D4a I!KK'Q)iN]>tu; ;[ y?.cJ/H*_hԡ/ BF?9$NNNB"ɻc^Wxy.Mq [Z[oM]_BX*8Ajuac[3 NmlDD`x+G'y9q?76r{^xͮeTbfсkDi/VU٘Z(S2Ϩ|N`Bq<И0 T"ҔU8n3 HIŊp7ys ʔ0qIzH ͫm~fL GVHW)vVlD^GZI*Ƥ!O랞Qײ(/VѸs sD_RY.f9u*%)gUW-&G|*`^oDkS*J~I݋/i(g'qbEJ4jmqYT 44<$^ӡ^"-E#UMD$1sG"JZU:T0"1/# _Eatܽa<~ !..=VamO)q/ltչ Ξ9sԾ  .9{\u}p2)1>^_5etzu;GTu7wEyTXXXXXx$=EF{{DF艩~<046onXQI^:'.߾϶w?&^|[b\y Ŭ1"4Z)$ʸ۫'9S,x̘ٗTpWx'<>-{K%(ٻ(6U@zGQQX"b5cb4{I4k4{o(" (_/q}?,333;ʳw=.lֶ(U(sĘNNxXD&1f!kq[_vϹH<;aaID]L˓v'[xG,ܴ䴜ģEz7vċo_)Vb7S͙543v-#$2`yStXa_Ys 9'1QDdѱ˨Gu=2GӥM{}շ9Y ۰oaƤ zq ÜB͝W]<[a3GW// sV/ fl$ rƼ/7 {M;ز 1@ߦ |,6+*N_ kvO+c-{0bBT{Ă3J;ʵ?QU"ۆ8%1TKOof;Da_aܨWmZP^=ϩJe-ג9JfjPnyC>44رc+BW';!21(yqA+4` ` ` `DC  F %MHDBW';!21(yqA'\:aV=:aV=:a]CVD lV_۫S S\0೏9n]GDK:8[ho?dCА=osFgu嬳3KOx!kcɆ/dlF:]ZoՔ圉0kix/JsyFV p2$8}x䈈G쥗AFu-eܳ(4J(.ҡ7h{J͍!rxNiL~^%E;m((n1݋ jAzTa,awV2Q $u+x:-i懠` ,LE-ğ, qEoC[L-V!"%˺y⇮SCB\xӉ1 BXv9JDȜdlz9y(UgW1y3 _)+r6nqZݻ|Ts^ [',V?}Z?&IyS>lv-jZܗf6NX|z}׾hx-qY- wa-mvx0,B3 s5-/I4jx3{/_B~1c/2bhaQ2w03e:ܘu33:bnށoj0E||buħ]m =e2~˯\X/qي5j>a^DPJ$~>iEZ)Q7Ya~9f1cvX~M/xu.>y4C%ޯ[G~pa{85~^yX^>/ZEϾǘ/6yɾ_mӰbHڎ{yo2HŻRC8eGb&ѯ]c-k/qGrUsuH/.M|c|=Hvq nnO Kb[{{V$vy$1t/+pvK9w'j 88EIcTZg:Pfʉv6b}){;g!ꮟH֑:zy_z!/>')ֺ~ppvC~}ƥvhҲL((:fٽj0, \Nv07Yeyh@3nfyY2gWXT(?51C`®_aqe[[7u㎤||Px^:/ ֹoë]FJ$1ߣ{e[X|kOGjǚ~~~l%qA(|֎ߍh|V 8ws[zԜ姦9ߚb""[ZQOzٵ_~\jp ""uF^/>Rm+>[']_DL88=-nP_5]eGmܼpϡ?{˓2$X!%UT~۷K%m/Z{ҶV3G"R?h7hÁ+]z8((V̉ ԦNjUhQ cm#ys͜ХQ7-y0 ,|&AR^=)&<)6~i(&:RW@˯Pp;ܻf0V]ELDd[%'*ouR5ϧϞF%tGiogiOySiҴ6.j3s^MNgY`]=}XqЏmϭz=9'eu*gU20|Qn?.8:~iWߐ>k.$"u͆U۵_Oһa%N.vgv;eyg-{jzl56 ],DgELk79fK^-KXHЛh;P0$ԱŪ'w-1</k(f|X`)nKY";c?)Eo懠d'l-‚'$_|0ZN֓#HȼM wDIĤ~v;ӡ恷ͥh[lZ)1Ʀ-5^9CO4JH"E"޶?- Db>?˾{ڹs翿} 5yUlq݆KqqKN Xw$V{qalt\6/ٗ#" []絤[8- -jtl/#S_N{ d{o9m+sF}I,יm-?yqZãWFtl) ˿XgI|U?:bx'*E\n=$#+"7"&0WTgөUdhפ\ܲVZxX3#fiy3 m?fLON??6MȞ6g$oȗ3}YFHČ('!YWǰZڥͨGh5i\0g:Ns({ZLfBkon9?M4Y\q﹓[zKҟ[Vk8ֲ3}QP &1+&0Nj!\,fFɝXÍ8ŭ]`n ??dV&fbCR,ۋC;#a#o:E7ύXW@Kj9{_錷K×jkvnЧٰჃ]NqO4AmCPJ$ƍ?yXxt]Y-Y#hFe𡡡ǎC ^  `I|:R7_hk;7Z "[[ƂRd,Hl֛L^$xc8eYJDMp iwLu0M LuR WzSYx|ZiaYگ3zC:   Ru/z;m>Dpz7ݖHvu/O~oV=>PJlhm.1ԛBšSV/, ~zl*}y9Y'7 z(<|L?S@,|>y9or3[> |n\@ AvD?9':]@(88lWaŎDVL!%oEg! %˹y:J,|"D~DtE~ešVϞZλv>@g^U]h},,XH;gVMvB 'ơq4 %{{ƝT~URةowt?U_k"c1(gL>thǦ!-x;[ۗ'R]0zΌliyں{ɉu˔BV?Y(A''/SQY(D$\g*HDIf!mjx O8%zܩSL2eeG~*6gTe"eUX:|Ʈ]o,~Ԩ-5y;`[Qij kfuط}_34⋪x.,<<2kӐUBgGm J&yˊ?iP (9-qoӶ{|)uɹqhzt S{DP:n/ޓBL_7MpI>DDdhֿZ-? "^RMc2J 0α_׷Oo]qg>Yؾ2ݽs("y5r|q˥ט9en|Gc} x8zu&>hG}q6v >^$uqzX(ɱ}{'H=7ܡ ;$$ÅO6Tz_V=K9Gִrz2efR ߡMʘTm݁O*&>Pl9=1a|s;y(#!͈vJ ~q=YZ?rY^ Qٳl]DDjA=s&Yß=̂꾭 ~,XYI |!vHDIfAA6XPc6.b #W)ލL""_1 V&mm߼`KFߕ~~;T|%91D8x5iWy\g;~שߺVI^ӽ !xDQ&3EF$&]zLzZO7bvoX~נCF7#?Wo? 1{ڰ /dP#z #,o= ;L_~נFoX(D-tto(~Y߾g{J-GW0<с̾mxU3c:w*mN-8m*Z"7R*׽!}?ӶwV-gk?k.`/̝BFOu޹DkH==d{ȅtbI~ezظ n}/Ȋ}aw-⃂a.j d|Iˬ\|Ѓ]<I߮F+}y~ЀHDIdAR+ _s A]e!< Z=OX0 ~{ܶQJGUGT{>t<  ݰlyWlL\z_0WM߰8) *vrNGF!|ƾ7pw0-BU6G_/&jǔ;zY^>frHQ]07"Poz8_;BpxvsA> 0ێXyk;oؾ_zX-9MK5{ZջV z;̾O'/# PR=n˷y BqzVT*oGʖ 3ZGyn̈́^.v3ꁸz?ؐ/>,1$˔&! bcT ?00ꁸ(!-Z|>@CoOXBf=Zv(l 3>سֺ҄5>YK6KߺwxQ9"~MZGեz_1i/+{fӕ}?rYϥ`̋c D%wUs՛?m[|zNηM7#ū8Lǯc*tHͺ5~.v|>4Qq)"cjjx<ң [jM8coZTgw\ܞz5ߞ3Tyʕ|Vt\jnE[x1w:z՛?}](b|VNՓVtDD/ccmu}igwѱc]ޯ//ėS^Ľq8Iۅ2-!}K87f?;8ܸ=+SWTy'n1̸1Pʗ~89~c^Pb-xw{E_}9bR|K |z =Uɲ^HUpDgm|TLѲGN^>`[]p>UIKrOd)ͺX>-YIlzWd{2nJCx.3= vvrmq/Sq\s1=KڥTF:T& M{>Zsэ{ M$Gh%EÃMx.'ag9Vԋ<ݽ~\Mç5i/2u6%+D=#n&bWlJŎH 8*5GDWu"g_$=V羨)rt|"cGE{GLsewdBPՊDb $6bm$.mFERD;{cBɊ;4%En,×)Nu'w^/UGD"Qmm"NQcU&*FqN`b'6킾VdϽ`dNJGE}J̏*-+/IPQyg)uʈDcfQgr[w_>*f' l_TNe+U˟*2sr-Gd6vBbV `'ǀ#+&L>+JRU*Seض!QygqDrf>6R-Ix/}$AZxYR!4jڻd+x؈d~ʭ98~Uk>cHYqcPudBGʥgX6ɚ?ip@k$fIpmÎv-'뉽]ֳGJ7?@&xs' {\~XŁ $]tI{:U6Q0KOOZ#aqPcQnIÅ\k'fP뎍;ӏ]KH"7揻oyY Om@w5& NM>zQL*TI_yjamɯ]#q2u^Y+$aHTv܌ϢO}oazȨELJ?Y[?iĦO^.6Ξf%Y_IQS6-+_sPĸGN~GDwrӤ%MR`'Ө6=*s phRufz1ϢRN^T#d-`d}=Ng, ؘ{Vu֦T,Vn{H,4tԊ+U GPK~VEg\e=>R{*=~LPÞ9E$#CgbGW`i1bi|>L Gv"NWEU⃍wl54&6Z.66Oy;ڣw=HTw־t\p+׫޻#i=|D|9|=~8g)Z|>L| ͣOxn̈́^.v3ꁸ(!-^|>L2z=nGIn)W(RS3C|^b[X̯z -v_0;GjC"$F@E+FC (ª{UªX'zuªX'zuªX'zuªX'zuªXV=&^WZZv</ߌj&I1kر %j\li,c9G;oү&`}4OMl_Ex}z""}=By";m˜H҉Ovn{u}:M3#Jx<^MfNK j.ipJag2]=Cb)ÿoQYx(7zzxe Z{3 X' YdO4ْdpnx(7>mɥI塚]>)P(U yD>aQ1\a~Dj݆kKDwŠ9(fU3koCTomr9u`*bZ אs2ݡ`YbWH51xG좦Ea)6=,/\3і-{iMe{NX\jf'dceW퍦njfP~1jr,P4@9|WaMEo1{8kE-K]P K,k Qɬ'+EFKd7Ɨ[XЋ;㽝*u]!ٕo?R,@Dj]Oq\%D}׶ϾByk^Ɩ^Mjҩ[X`Xt_:?_?/`D5qcϜ N,l\xxT:-S8Q*}%'X;^*]vꖩR邝G>N:q{'Hǭ-8Netܚ[2=ϼrܸ2  '3/w`ؒ푉n…|oD9t*+=+3599]//kjC>Ty9+~*&^M :t){zz0~}{\U퓕=IV¨|݈廽-X(U‰u?WD,nn$l4Z1>+ 9ϝ(`Ma٬kXE{^ڔ5aW,{&6@._zɰw̖ulSQ2|zݠKNf΍9Ls]ĕYWv_2qA1.g g75X [Z>*԰rjab-ު>P"-GTg~K׵{}8#zaQ"tL.sZUQՏSvq r/*M[ YUy6#{">?Jͮǁɛ#7OJ྿~(D!N-揕JǭI=bT:qɖ];7.+N|;RO'U̓J҅Gq{fK Fm*JScR/W3ԱfIxp3RZe^\&.hq̎^wdJh{ZNvvsHC""j>Vstu -m=@v'G& Cq6y[qzy|XA|O>{Zcϼ1U=CEr.~m=""1Re;ur=OGb4bαb4~![NDDe_@/ziUk;Q"nϨFD!u*$"r);ƭL@UYCiXF.wTy.B{ ߧwcG@יk`LUٱ-xm[bό%rm?wDZ=Qչ2/26> r M2 |fMm3[Z)Ąi\9#ǚp9\:#2s]=[bp5=H3;0sV0:Ɂ,Ajyf5nid3tN \#LM\</o67~َo` Ud6,sOZ׹ ;u"r{2'o#pm';7+oGy{JE"gd̚ȺT 鳯+N[l7^JzL 8UӥY(Np;GqˤR鯗"7N_{+ 7ᷛ.-Jg|`Tt֮'j+e2KkR}qgK'n2Jkϥp vDUg]<J:ܭ9~2vIm\:Ig?֛xD92v5Lq[+\S2COee~+u:J&>6nܸqf_̿3O{]< Y|H"z anT/Rn0*XLTz˗uq%4@ΰ3bg>iW.6Dզ+r" F~q/{c{k+\Uձ˚Qu<'1֤ D.}fq6]d7а)Ueаx̎mXNoރ/tDb'R 5)(\Xie YM-__Eb`נ '5,}'| 3G+sfq~M L_\79Xnid4xk{}q~a"H'#>hx&DgGODd/ +ƥs#RjUwu|=dD/,( @$ qdMT&2:Ⱦ_6ڹܩѧ7P#;g{'ˉxx+OyE$DFE%S!"SȦbUOQ^!>lDor;''o_\)e'"4YYjڒHYBDD3U/pvAU[ڿG܁BjORZfovަ2.ے`鯽DDD|1*GYJ"ŹNT2كlDE5Dvki.8}% =]n(G`/+ͺ56XoP Va5ˌPN'a39},hXÆT1F~oK5(?lqoT&}ս@񝫅)/8}6t$G'jhOwKGgX g~̝+ikv{TIb~b(ϣqi8Ȼ-]Hvn$#,>NDž60_M\$1l!a)1;Au~j8\t ݂Zs.%dlt w{e<]LXiզKkQ&x@sPּ>Ŝu9j""3]e ߒ3F6cf7A^='8h"%c̾Ԍi9)ֲ-BdT2[j#`]]Φd睯h=onCTwޑ˗/__!N,'IueZVu^q֞_HˆUy7O.F8g޽G^ij2p" DGˉW)RJu󍫪0sȾ3("]Tq͸ وn*O_ KO7|=pSa);̠6뗮=Eu?msl Ԫcl2v;RC1 0*d@(_Z{q MoYzno{ \Wi^ԄoQ~X=9Z-Q~Ō: y1XO v-JfKXq5rƜ!/cRHQ/9=Qa7'U"vi^-}2y}P:'\j,Vs*rv"!q!=g =.xr_ TwgV &\O8Nt삍޳qT*]z&Ug7\qgK&R.'NZݛJlT:hecRtə4zuT-i.JglY#} ҉zT*oLK6=?q)wԞ;;?Vٶԋʌ8Wݝ[|B{n b4-BeDN-~*bl۲e˖-[DD冬?a{~>賯u$[tO,_ͼaU/'ÙUe82_]/"}4?RK 95',,,,I{e\*]Hw D¦wSt&""rt?#V(QUeMw;/.)#fW!}U׎#rVXmf 3,f;;%ɁHRCeSYsW9 {#+p{}٨lv'\VSi8]&r.j™-oz8{2gD3 s3/ DY%0n|T2++#sx{=8Nv~q"";y_q^npc"";|_q6a7my\ǹu99V4zx͍T[` KIfW'^ھb$T*8kWS4rU ͺtO3Kg- ?+sǩOJ^krtށgUe?n$t.KFf+}=c*sK%&o {kyHRevo혭fUUv0 +;]݅ -WHVu7@sUehMmWTFaˮ>07x sdode=^Wo4^vW,ǚCdP*&|[&f6s 2/ Lڸvc]@˻eF%)rV =vPM8v`NU˾fߺN!N|7zi} ީz3n{,ZKo?y]oduzٹ -TK( |) NXUNX$$+W `eD@Q@@a5^NXUNXUNXTGΩa-zz"w"Ou審EUI-G[}pp9a+ k?gὂt-frN n{8'}րoeO+zˢvֳ]mntO{cya>LNڦ:zywq1u?4M4% wNHzȾ&{,Ⱦ(R~BkhѦmjbnӌr>{9iBPb)XQĹ`!u+UDWO,ٱsoE୨q^C#.I{vy!7t In[Y:Zh=eo-q44fF}XV,tKFh$K/hQɵ_䜟ЎM PKAomO k*kVcn@h}/ ։?%S_F=ٗ$X~=\NLwvGlƲd3 ^WxE|wY{szUCGfpF(YV?,_82_{pu/^vʒ I& foi>ƣ+w|' #fXtb˚e8BAW]xC-frC+y΂3q^N Iտɭ~}:Ē{8EiӇZzY;;&t9ASjS>"0`ffy"e Wൃ{Mu?j3VRW Aek\l-,\7wuQvá:vK$7j\_"|{6>xaǗqԠ rC15l6[SA&5Xl6ɤeu4sA`s Ԧ4X&N>56}yC{iڴC<bm H!4O##<"'DBXcяgf; CTKUN\={(E`ʟ1+|\gc!$}f^d;_zvJ?>g>f!fgzD8Az M@]|z}w?~86wp0y-׼iC:/j*T;mٟ$:&TRm~WmI9Ңs'wo:ԍ"YRD&DEe HwZK0n>Y~ZGtҭdÊ'J>%Y3ƜEYK5hlJdBrTUሦohO[g՘!)7ӮG2':E;9-[Ξ9-"CQ m&jiכgdh7i KciQ !qn-f-Ͽ~-.*-ju+1a;w5e4Qa 1PĞLݰvR;Jfy3k2gQDžሿ-i5GT*6ubWfFfTfBcզFNu]pş^)5 01$\2xSw.ս7q#B!fyKƛ# < |{\W!퇛-/> ' "~IB̮OXև䔧ӽ#?ae~QH\#ҬbBQ.wbLũXE)<O09RĔs>CىCG!f0vfs}5R!8‘"SKkHZ+1//HoG{VQb:V5-?L"TJee}h8q !D6W#!$!GLxTMP-'>//~W2yI%"1-8Y_? !iINSZhRXR[W\=sVN\ ыm|¼2f{7zO}yذENstypS !$+Ζ/HVqd$zIIվNv Ac_wȺmFwMYVmv@ܫ/RDo,dӕL-"L>ro{ן1 bu= LR~qIBDBj&UdԐх}'7}n_-:txh IJ?ґE):W?1:h ~}ƽXԬ9&,,,-f4!b1n*Ek틵BobO&d~PoTLazUC/Γ ^KSrԊPm}b -]?F5 otWc 85@ϘRj9L)%OrR Yt~(%2HO/'F|!߮:Yd09idBEHܩ]׹"ԖV%f Ԡ P!n.,מ4EѴ r%@H._&)V2CPpPX\| 2 $^$X^4˟4&G-5Z\Εowf#ɇ\~1BAD12y%$I=1f#:H,d EytMD)"Z] \(+i=۴AB g6|ɁeOl>(}C%IE<5Ux6[()4:L%50H>.d_Ҥ\ Qᒔӓ[c2n>a֜]"t0:BD$!QbwNLߊU`]o_ծvkLAdjU*Px>DO8Bpŀ = BA1&qN-(H}kq s?=kPS&Q?X]zS&"q!_!٧72kɒ3'`j*nˈ{INI~B;|5CcQ\HfqIC'3uw_}8T@R"i"Ro?ZDlfAܺ=N9B&Wk;ʖ|}oPBnqVl| !DEm"(& Weςnp8Qa՟^|,lyQzD5 I٭MIo$ NRoF6{{RrR/=z+e$GOLyo,Be_.ըF{MQqw"GJ;OX^Xshjfȕh W]*q,zӺI"PÌZߢSKO꽳t`N^D mtݘf!Jɦ%3w>HL^GGe]zAؽ !x>F~٤쨛7^)Dd駇v?ӧcWZ-z.Y SF5sg5Ukޱ<<]Q1c؍ܖݙU儬--TN),^= ve޾۳!KK IZ0nyWX}3"5g_6)Ɠ.(zЍt771{EvvN:,<cۇQE2~Ƭ ?jiDMJ #ro=je㔮ɛBs0"U֭=Vh?sܦFX^ahlų{¦6v3Yk!K8c .;2ncѥCUxEZ87-jn߾LBGRgtr:KaӏI7UQHH$AxQLո*b,VzGŽ|#X༸[}p,5DG]ٻKs^S m;#&:qŹMOkMGdOZ`ה#7ʾ_4&o"'" n5FPKUM5/ѴZW_"pN4X_Y[sq.W Ɩ=؈۫&̼:jj+WFt#_;IIV5x<ӑxa}8RӚu=fYZw; Im=] # x(ik:BHp.TTji޼.Q#Zyۨ6xO p4L5 t]Gͦ-nIŖq]8D „"*%ğvJCwn„B|N0@uV8p l7ƔY֭[-^%?O@Rc>\ItƉͨUGA~67oBC u(?hJ~_??( U6P# HbܗO0z_Fяyg=Hb*J4jq]D?oh'F@bÜ_Y?>D$N B  V ZzLquQ rŁg=E֙+͝6 \h7@@7v= C hq4NHur!d )ԍ8IWpq4N` z8]'@DGIY?ܩ:t4U2ZOIr.M_+xUg ζpz僒BA?A.<9OI1;nKo9Ҫgn0{Bҷqڏ;dlW| !$nnK'N?M .|o)'bR*,~j}јwd9'ֹܪ,:T!i}AqB\y59UH6% 0ݓ0 pUN,{InxEZ2XtPNp91#Kr" pX]Ve~xQRhu{j"P uퟑmۿânДZGJc;QWMc}x%D+zev4c?S7Fc!Bd ԱI>?eUפ |.u_!àaG ra!VK?NuWryM.hUt!ajǩ4kjqa]L7vM?w611ݚؐGmNԒad0gR>UAB03q<_2U+Nzֽﺟ_5c$y<^uW ow;N9ϡcwI+yu(·Wn/||) 5(giLM6`t fk2dY%${\a-u5vf{9)(Oo M߿gОm6maE!Ȉo+rB./č5v[xf.0DTTBd!ucQϧbDDj$s{o^jfb?'^Q~K߸$',`K7oQ {ewޜdI 'h&ܧo3Dl:%UKf1[]ra6Ѽ&AchQk,iSMA4 bPHHCiI8M_& QX:NSǀxu#Z9עˬ{SkA>BiwSUYnT, A)Q:c!#i9k*) '$^cX1Zgއ2Ppm;pٍaܿݸyfakO/_꿹\A~% z0*CˎU;H#,7\(w{|"L.h@ np3"pU N.ݞgcpDj;<?>bS\F p5i5G>ٽ@ݖKZ=\V:WXy6"y0`x|j%ћKf ~9lГ/Ј9},n-%A|QUE7WujwG ( 翛~ZZ~,%\|&G!fپ+x;ymͨUÌ>J"ituJܵђ79)]^j 5OY';/ (CZr SA 3RKǢ+e3=RI%(9o|m9`]8{'Xw3;oܰ=е۠C9BIn1χ:-~sո ǗɕrKY$O8sxG]arԨ!M[i]5ˢvrW%5Ɋ@?KҦ60E ' "~IB̮OXև䔧ӽ#?ae~QH\#ҬbBQ.wbLũXE)<$5&I*6Sp~ WaC(oswǸK^s$2aPVjnfYYpq탙={h}b15)?j뒜;Y#YvB(\y7Zzt2&̺qF5 LշL%iaY%2]QMZYXn@BIB`1!Leh̒(JKaJ[\$"GjcG8lo}T$$|mTf3/9RĔs>CىCG!f0vfsZƆoG %\sD\~Ktzg j[~0D&0 Am!+ʋX"!(&j$$D IUU//~W2yI%"1-8Y_? !iINS\¥"Z[ץe?zpg9 UyYe^';7zO}yذENstyoTSgRLT&`T܏„WSa%BjGᵰ5uu0$f;iaڣůZi{KJcx|L\uFLӠHbYEU R2ILm .}}=xNj&u>٨Щ#&8wԣbF=ϲkbLA/~I^_BfRB3;SH횵Y'T6;zY6%fhj`EB1+P זqfd 1|ሏm:O`1upb~3C$POZەXh dũIz4R֧< Ң#$I)Tgщ|Ƞg"a惣K<۟cv~|Pf>_ *WK !s^t_P[:BXqz5PDCEp G!\Y,5=i"i$J8]H+R!u8(,ZQjQ~J/qkh,_l5BpTP-..=OVt8/Y5(Ff9o$A'̼z^L!(N5$#1B_=,8dֶrCQұ h 엻_.ӔvVsB$tR]SVU7cQ܏WS>pe鹴ޣ/|um&;!UlJ邴b)R}E_eiNڰBH&JⳍexA,z:sإ k4Kwcґ83HӘrW2m 0QDQz5Njr%q SI)HhkYȎreu s2^)"M9v-ޠ؀B IDAT:BQxMP-?݈p8n?Xئ!DQү&Ԥ7^'mCew= y9NifG˲Qsw'gf< ߷`m?VEm6k+9RyRǚCkFPKUM5a*(5[c?(3W{z粈TpD ELx/xh u1IAY~JԖ|\ U__n\YJ0\LbmȸyS%"sDyJ23bP ᑗn?)|J? U8?+l8VlpXLvI8L-px-ʇCR[l@FY{i˝eI AuufMl ٴ&VShOCᢒL@P 5"~廸F켼B{(\P̲^[Χ(zЍt771{EvvN:,z˶ٖu~z2 R8_I!tm,+sO?&bTF!!u/"QF1UHH[}ZjO ;Q`Wnmf%%::_l0l-^Y6qЉ+Εo{Zk:"k^~'x_V*&o"'" n5FPKUM5q C~>7/(AU8c&s:0ZZ,dk1!h퇹/[`3r-&3VwHné8 mT\\9[~cݱo$vGC> vFx5e >ntU! 'g ˫uA[Ӻjͥ"n;m$Vm J:uK*BShE:$gbK$.WF4BH7D D WADgFyyA.N,fZӧ[ju5*| Y[[ߺu #tzy_)(xYjLt^{>҇+9颗.8(y4Liġ8HH8ŜG~Í4@Y kuP8pi Ԉ8%w/&y'TGzz⑺T.$#h,aW?&\f{bNT|?WܡoWK߉n+!BXу.w%}4$1:Cz'$NPn%פ |.u_jӛ7{yƹDOnn6 Y3o I,gajǩ4kjqa]LKC9!ZuS#MRQd>M!6lڴ'Lִ1+p1SίUȦmnM WzyǯvZ/ɏ=8Y;oeIB}NwL4{z;oF, :e2!d ޮy<ˡU[9XɫU.vLb=uwBᢴC-y|Im|F E)NZ!;l33G)SXU}N`nUERW Aek\l-,\7wuQvá:vK$7j\_"|{6>xaǗˋOzdlM L`l&NQBҼ&ORWm7hSҌb8{ ٦i ;8- Q>.kxZ"'DBXcяgf; CTKUN1\Es{Rs77;yǐ8J;YU%9a[x^yýBa+{欘$KJ?A-W1/}njncfGwɭU$973Rע!͇ot:E< eOf4txYް^Qf`UCiI8M_?iε2o`gL7?%h0Kժ=ӔipH!YnT, AJ)PN Hg^^15]D]?w X؍gv+:=zEQW`1q]^\{cى$1rb|w_}."$fԭpOq?} 7#Wuy(A yI}6Gx3ÿ+6u^odKޑo ZZVsDxKm  esEU`o. / )^WJ ':9to֠#N=RP !ћnܢH^GU5/{s%j;[硶qqԀ p竕YY>ۺ߻orbvҎg?q֌_:Z}!FW7O]-|SF_]Pu"<|2$!*ZI{~9Jczm[aI%֮]7lt6P`PE{_˷\5Ewr\-b%ɹRV:tb4mO˫#B)B%3Yh7,~Ɉn?}uI0Ġp$v%θ5*,^.Ԧ=ߴueYL-F,+zqKҖǮjr#]ğN4٧|Ik Zz[F$^1b1"G4(AL4PZDh1ǶvKs;E`!A-X:z & 4avד#-*!S9@լEׯc8ERNvr6&x~b0lGnWl5u&*,"2!&ʷPyNjGɌc6of?c-6CL3?W=%^ߦ\jLlL(t FNu]pş^)5 01$\2xSw.ս7q#B!fyKƛ# < |{\W!퇛-/> ' "~IB̮OXև䔧ӽ#?ae~QH\#ҬbBQ.wbLũXE)<w K ڶNXzն )b9!ġ#ij3TpL~KZd-cCzqN7#EpF9"iqHL2~K=ڳ5BǪ:Zt&$R!m8dEv_=ZkS54"ED_1Q5i@jJ&8IBD$3k Q:$$-zjU'\*u]jpQx*։!z#P\UYq#Z-yԗ [;JkfJ5H!Yq|-TMuдQx-l3z]G:L8 !d"NZaڣůZi{KJcx|"%|r-LӠHbYEUeZn'wq03TNBaG|l3olxhD rO?SUt* (ǨQaϳT.>6 B6$pD?WtQFNL.U]h,[Cc  qĄ@NYxY{K|.SiʮخB!$+NM*3Z|Jɓ -:BB5}Jɇ z9{.f>8K_oez`wȷ8j5"Ly}5~ZP9wEW+:u#gY5(Hj8T$ApyRړ("ZARqC:W(5_( ?ɸ54QK!8F sӊuh'`F+:BH|,D#37Q]Bcf^=b NPQSA'aI!ၯV]Huk[(~wXvFJ/NxLȍiJ;OX^T9r hRrIJ&=e鹴ޣ/|um&;!UlJ邴b)R}E_eiNڰBH&JⳍexA,z:s!Dli@=/Ƥ#qfJ15%&q(!`SC  ٔ5;G@cS$Nrnl #8&0Ϡ5Ry{D(["$a v CH،{}LkD!}n;I4pfh"SiT Ru/+pFMborУ(S?  Oj] BA1&qN-(H}@ig.wwX|rffʓ} f/ܦcUf_;\k#',/u9fTTzԹeI9b'DA$&5_)$ !D3=b䓂-!Otnܸ+Y ~[`\0:X}-2Gߑ+3#Bȸy,n>Fts.n=n,;//#//P$Q:mڼ%gSȦG5ahD{zm>^ ;TH,]濫 O'KGf3^`jمKFᢒתpTKSz,)]7{`D&%[{ꭐ~涹M# фygMmf)֎Cp-\vdǐK;I8"sqn,=e[l˺o_ezp8B6vUk7CN%vNřLlåR7YV|gh (ƺc7ފiO~o᪥6Ոd2IW;xy5r\4:::::\ jkHdnSwHvuO:f k|DPDN4u, zC[Td$4QkU8I'-Z=Q Wm[[0OKnc?V/Kkoӧ"Tp_#']'6~CWQ ġ8HH8ŜQg|͍4@Y KvP8 p Ԉ8%w/oh<KTތFяyg=Hb*J4jq]D?3Wܡ5hN@v=ghq4NHuA@ՠ"LMM!..4N` z8]'@v=h((c3+;UJ~^F)iXΥ+b^Y|PP|=G1ȅ'47Urǭ_xM3GZmf^W]9Cx}L!dgyI'߉+;+>@V^/8iX7_%@$D[ +o /:7[E'pX%E' ~e# ߟ=mƛtʉ1>nnK'N_'Ŀ OqINDanK=ޗV"~j}јw0ݓ0Կ V IDAT$QSZQ?uQxİ6~ %*5GtB+}2~[()4Rк=5Rt^~(cߊHaQQQQOhJxv%x+> "=2yWrGCBo1)1 }B2@JP3I\+jg7l*Y۶Yt,3gz3FyO384;?3MSMcJ)KHPdWV..Iɞʵ/!!([.BN6-35R4-{-ups>||݄AV=z>"#ELlke GnǑ*Oh H"/ykmtYit"^E.7=qf8i EYWuZ'I=ww\sf|-Հ~n%""0;fjalܢ m{rJDWsd9)uN/dfd]m~ʔ2ۯY)سm|YenAz/t}fqUWl/NU1-iy$~8Lm0+- }ʊmu) x3H߸iM! xݒG#WSINejQ[eV%[CtG.Ԯ2E/o([ZU\ KZ.nǨ1*R+Kek7.s]aRߣYaDB1Zt ZOж|Zs;_pe͠sѳjQPzT,sl5LWaƬi8P Ќת\~]"5q3}W.J!pyAnJ/[C?O59x1B)@fBAqklWC~ӞV׊y1z&:|A3'Ş~TH?E:戤Co҆/i7?g'4:Vi^r%Ө}OWuچCvU9pgd9@^ شGWGBZ 4<.밻l 3سO~8={r׭I~{og*@-)a 9$-4bdTY:? ~_QEyRyԨNCD Kdt噘P}5KWn^cNZIfC' BۮW&թ.ܗ9;tk7mzc+JzѝV#suwb(~qE,0@)%;dܓyzg 3(,ک͉1riM~&)WŽGCwoq{s~7so뼜 N/#I\DGi3]|Z(kY \V d7R-M5QnЛXҞ!l ꕮHʜ[kogn5).?UQf(fsuJl]*ˤFИҹ|sAT溒"9i;zvܿeC~>gMǭ!,nbʮfBjgH 6ݔj7mTEĐ!)i@8*`V6Dԭ(5G~hdOVXRG 2`ʺ Amá.ʐzK}.UQC!U״ P UWE:w3;x|#JX;~ !UqN'gUa\p\ <=|>&Ԓ̑PiバeI9ݴnv݂9g~9uK9|3ESDSZMu{R&J^ɖ:XEXV=@nWˎwt2g @]!g).;h.sJ*mgqI̡eD4l#!oP])t-RPhZB{.BxAՕ">X ݶoOOHD  We]{(6J~Ұ&#.=ۿ IrJ.!`pw"e'Y('TJ1 cXCNڏ6=\0u.ֆl A{[}*{<˱ cjkE28-J ~ =r?һӀB v=\aə ݵ@-JM,7Cr-Ք$qdPg0z1j,~}o}m{:DQSG j7b`J=ueG;![&W-(=KEy}áKuZ)Ri" s d(Rhҙ.CPpPBVQjEja 2 FX$j@r7.[>8w_k̟AHⳏQ sO%L16zNz( (GB24<}\bv?Itֲ|CQr`nyFrN _H ԁē ,7CTpiLtàphwn)~ :Tա4En=^pLfAàm/2sj^i2M9j9regH\(+&{bH ?[CZIB 4Ӑ>g,(jp^lZ? x-Li%罼niGuݶ%;ƥfƞ۹/}{Yc23\5wj`xʯUњ}npEuv44,PG7!Ϻt#ZIQkH˵Z#ķmHQTFFåf={#Lv QhhF' aOŌj%4~^ǡnf mVǫ[]϶w.GJ6bEpϹt8m~(#uv^^F^^fbz&)W8p@7Ͽo޽N3kED@1.}Z*q\e^|U\Q[jiv^.^x_TtOXj (@ȋ):,tt aS)Rte g[~åuk֮71נZk׹z0LzZ7zV$s [;:yf_LiuPKז0zV&djК8/ 3خmnD-mG㕆NFS1<׎I.d&*'\q4^l=YƑ΍0w54ukvL?E#֭p!-٨'8|\=Z1ie^GIJ4#xl7i4zqH~8J<ݴSu/4j1NŀMVY[$ Eno\el @OahzPbE +:}m.yuYV!vhj!N*vZ-@ѹupb >^@ݗ7-9=psH#eChbo%Fv6ڬwqN;ϙzU׉L3.|pu0a[>KdL\& Jm'/keBGF־Ŋż{ur59VEZ3.x)Io?;dg"¿u-"iB,'oδ=u !giҮQARvjv(Iw-]4.]<~ [)8 I8aC2#Alҡ1 Sx KNZt +բS\!`7n(g|W|UO뢏~G hErHER/D mE_KO05IafIw"1@,"e![Af/`Oo= C BR8<7f/|oc-[ܒ6)Az ?ʴ{u<xcDzLv5@Hv@ @  ^A @ cv=@ @]@ @ ?&h@ @ ď QG @u@ o 4ou@ / ďM{}@ @ z @ 1A@ @ ~LЮ@ @ @ @ z @ 1ѸQ|gfݩ/+0@ѯixι N>1o}_mśmC/WЏCU5\TiK%oCNnʦˌ-WɈ2Ko>UAXZ:󄻕ո#)J\5kZ寷8ޕG]K"Gz%7$D4dVN,?*W\LSEg"0'Iwia#HU< E ѻ[ 7]v^omթ *2)YZNTBpdQQU-{ws9J\}y>V>~vf+ ^Y|/]H$"psJmd磼OvfNDDga6Y=$CEER7c;n=WsAt*ۋΦtr3auwf7?PEI2甅?ف)ώ ژ(!ȷepuåg^7?ʊ,>/(򍲆*$!Gf/{ ] 2_g}7nE.[Mtp%}Yq_+' #9ϺIR@]P9v?c )o(ůCz8v,4E@nBq;oVy& 1s<-"``[n.iكZ2k۞?m91;puO+rU]դW?_43Fգ 3Q_f4[-㠈;#"[M(sƒ6 cw?1v̝n5gjȓ҇~')e:fzF},7d+}s끪VF;N㳼v$Zn'}Vg"~:B.x|`?+~N(@W^j$3gv313ssjxX)X:F|1oF'YЙUQ=5wJm<76iOkoLX&igDG/67 {;N>4Fu_5vZ SIgk@: "WFerh4a1t*Ťa@(E ^Gc1oi3)$%GFgH%Sn٘oWTV! )o p]?>O.^x~K{=T!t^Ҟlώ߉+Kl>[ E;l: IDATqx#$5y+֏oVT]H7*rH^Csѩ`g qOUmC⊫Dhe;928f,qo嵝8TFs :NKغ.O*Ւ6iriFD)>q2Sl?sV*(*Snlf`϶!{sdJ1}xZ|+WETk_YE8)Wed`I+ޟ.ILҟݸwwdGtƭLtN0zV[-J-g"~B\Əu5=q0?cח]j^㗦xaG! o*}TTbL M5gc5TQpB) FQJ{/]͵+T|:wCnŋ>h:`o޳)^6θ3cX;>TMrR ѳpNod|{>+6θ5ӵ!T|Uѩ#lZtfƵܹ ,~k~rB!DqVJ&MД oH?GŤNV^3tufj[հ͸zzX:oU*Z9c:MG|r't5t T!{ZXl../d&$ v^czŸVȋ9ֳ6[ 9(B(i5G$}z6|M9C=ѱJ;Z,F{ %x6ʁ,<#=BX^8Ʀ8?Z5hqI^e; m`9 Ş}*&(Ȝ$8aؓn}M{;SjqN P,'al$KZI"ޏ*r|ӣjca%Ӥ"\K"p gu v]M;؊R^t'*H\]g=J(_E\Qk3KS0@)e2æxJeέGj9sCy$|eS% U 6T:bɕr=c;VSIVWnp9^i&Ϻ'CkGՒy_4v>N#)s ^W-j]؝B?M )33"}oSmI[w^ jom(G)(LmCm:]Ss'nl*5]POUEa*iwL.BcQ˧tLs]IR=U[ _!q&Vې?JUh71ePviohsYMvF, cb4 0?i#Rrz֍hdOVb4:V)5V`CY!m8E^/}z4R`KUP@5md(HkդExufvJG0 r=w]y4@B<9OϪ Trs''lZPK2GCB%t+߽9~}=߷v'=PkG|d"qv}M#i&z^_x~G.:}>%%rv;ټs7gvZX$J6zJeBע* uY(Ʈ5`4r/m`5"zD~ãh>ӱ  o>UoʐնG1TTYF\)MĜ(Nyj1]C+߅{X6cd1*n丬Qȉ=q6UJ*s:ưtZ Rh츨Ec p]KHcѥSz{5yJboa4-]B$>C 3=+0wgc2 *th3LG yW3}M^zI9|obƥN9ߐ]*W#pXrfCw-PR M\K5<%09ɅF\Y;3w/˼`nê9ߪtԣ<] ՜o+ y{ˎvBMZ2pQz_ C%QN(2E*;MSDt]CVAbLE^J~=a5d(2+d+ ^PX4K$LRF /./J^5Ϡ P$mE(g9oR=~'=PHAQN!GRctq>.jkWw;$:LkYH9w0N<#zl}ώ[s/$+Rսfa_4i[KkM#Mϥ9{E 5]0/+Q (F IHB*k/V-&8[- ԒD1τ"}_}=DU, (m3ک|E'E&ΧG2?a*FRrc|ltəhgJhmZTыF|>EO(KRզݵ+$^BJ3(T0u egF5cJU@#WQ8Q[IQw͸T ΠPhH P8}A1-Fl#j =7oڞ2 3lx_7S RzM]WiǷy-7琵^'9R 0(Ϡ Xy*Bl̋9~yB$HM0*ڕ֥u`F<$8?.xC-1˴F1;$\ڱ?rt7]0(=Zo c<`u4M[Wm>Y0h[x*N9F4&Ȝg@3$.W=1!EԤUg}Z @`iHӳdC58/oCT^^ MdУ: onےWRrRcܗľm=By`IO;5{EתhMF>QُT"'#fG2MĨ| >o c[#s*QpqވHc@7څu6I4?9jCS1Z ߻q腛Y€mD+v=߹#g)ڈ %F>{Y|z}QA=az7[~åuk֮71נZDo!UJ`px RxbBq%1f#{xG4BKZ[Ks:.~ylDv齖6Ec:DŽ_YYWzIaDbu =5ֹ j㌼ܛ#^ a6mk?R$kfcPb::D ,-JNWX}="⍰k/"yQׯ1ū%~o"Bl E" N^&ߐ¿]I6YFϊd_=axGG5,\i`6-,5`Lljaڡ5q^2A>g]ו_5܈Zn7۠++ 捦uc?by]LT OZhz#aji2n~F[(4$٨'8|\=Z1ie^GIJ4#xl7ÖN! H*v6Nս0kӤANl:6YgmHQquH1^? b-iCYo>a58CQx̲ߖ4$nW- ٰsFWqFՓiY#ew7yszupn@E&_&\LCz/;p#ɮBvFcڤoYqǦ'Qo f ^[:ns՜ k+^WP&l &$ } hMOW@O@/ԢS\#; >62t[rG\[gb5e׳,rG5{C9N[@t*hdٯ^RM:񨚫0 1pGثl3`|vW0++R5 ңn7+=RlڑsD} @/he@ A LZLþ]ŝf׎0b:Gt`]>m];{t/R4](oFP RoH7vDao.<<L'SijJl~z±Mh hZ( N@t^}:~w5L=Q'Gzi~@lD(Cv?,5g5}/ͫngb mE_KO05IafIw"1@,"a!⿅jن"ú鿿,SD$pz[݂>ڷ|Gyyߖ-vn D@|۠w=?6qBS?2A]_h5)z @y@ ~j#@ 1A@ @ ~LЮ@ @ @ @ DŽɨ#@| z:)@ Khмys@ 7\?6o w @ @ ď @ @ cv=@ @]@ @ ?&h@ @ ď @ @ cqף~S#ES_`W2`ہTף_s|b$_)z_ ۊ7ۆZY/_FjMS50[zV'Jކ2ܮM?[e~ğ8}po+^~.u w+qGR单=j$o״f_oq+AoD]f\Gm{@s\8zV/&!*'+YM#O Xy$-Dt*8?8I J F7_M.JZy*D[Ƿ֓r cSߟ[[*J|||VNT 7lNb@u$+E!c;2 ^Y|/]H5k#$4[IҟM۴g.r5X$U'wnLɏX>/L¶kwt@4F5"ӭ8'ܹӗsD^v*LAåg^7s5ͣ"܆^¿D24WQOw%JƖFRnǑb-Wbͪ[nd)@m֠ulm{\{xzknV}ɈüClPWo/{<ϘyJ%x됥ݭ/ MPf\[& 1s<-"``[n.f|-Հ~n%""0;fjalܢ m{rJDWsԤU䫺Ivi2g mieWce3z!xqvS.Ku.,x߇v{"1î(Yh2Q {Lf7bqǖxDdҿ eNxІA^z,ZAR8ӭfL yRZ$%_GrLȢƃ y^X 0 oZ kŀ>ϐkhBOIc/go%(ʫ\,#Ixi=S;ŋʯ I3_V[-mnv|ht= (Uh,0|#m&1|c,6:rά 0uSa({{Zm Mc"5Rx #-u_5vZ SIgk%Sn٘oWrZpBhk(OKMNRri/$`s k+|ÅvfW,>^{Y/P.~sӅ{=K{q<;j~'/zpܫ!6Z.;8L&痹8K;< ' 5A2J9hSb5˺o ~/yg>'#;oH8$j#"Jl_W+A I59Vi&r2q_xxY U|j%}vf t> KG^Ij(cW߬$ko\U>䐾p0SZ>EA?*k^Wʖlwosd8p:X($k;qT fũ.;޹A iuku-O*Ւ6iriF7$/'1uT$`Է.H1EڕWvUqC 2%`n=zQՀCsdB3%'Nƶ 9gNJ%2jkVo l78GVDs7]_:YܡŷrUD5[ċrUFz q =qZ >A8J  m˯4%-Dk7{` /450/MJ=SCT(F(Cnh Aѩ m*qNԑNk6?z-Q&=:9m uݕP!y~冄kEkF@E8]_ЅMH?7Re]/svniVj%;QG:46:,QB^-]Y* aRJ2wȼ':hleI-<:˦K \`3JHsX,%%h0}GSԠK:vOv~ IDAT=pt ױFɏ[Wo8hHgޤ_PB 'ט&OjV­6ԦSyj=5wrƮU_$Z,V  yUQf(fsuJl]*ˤFИe/ar溒"9i;zvܿeC~>gMǭ!,nb.tU3N53$ Ӽ沸RJ_2D9% GϮuT\uwRF*e&c(2 (CO/F Lt Xi4P*:w3;x|#JX;~ !UqN'gU,\p\ <=|>&Ԓ̑PiバeI9w)|sκt_ϯ-rIQc:_4khj]_n,dy</zx4߄QԭBV/%α-Y8#v&z^_x~G.:}>%%rv;ټs7gvZX$J6zJeBע* uY(ƦW"KcܥYhAGc(}~\[ߔ!k;*R5J h;F2;.iј-DEo:4h~ #EܼSث>(NyjqUBGuZni2`v=w&侱Hޞ :o˥}MK) `4N|n q4Q]:gۻWӘǯ$fژ$HHfMJp"'T)kq,VIGr:%g6t(5@߄ɵTS~LS@\jeC-~1sǨ>w 2my_Bc\ P1(~)(C `L.Dž] ~-n'Di-7)gD-oxqk &5C*IX jl\R_@`oJŌmu GDDlP4\5 !73x &Giz.٣(2__^gvxFh^^J@a7bHD*Rdoo^{i޶jE7!Wa mYd%y&*!b]@hN+:qL.6p>!yᢏ~Q?U -MQ*zϧ {iY4屌HVzd@-q*AJTG'tX JT^3{OI뤾uew, &pyk62u"ӲE,z)nN.Ռ +JƤ!O bI6BMv| Ԃ;^o7H;l89<߽>ɑ*T@)}M2F^Sr`e^#ϋҿbTDzeuiCQ;O<)Ώ xK2?Q̎w3D vϴM8 GyؠO%X:MSD#add> 忊SfN+C@ 2Y-G8P euOigw_2Hk5i~qY}zF@ u҇#E [~uM'!)-B--hNۿ۶dո켜s;%7"`{xP>k{xlRff򓫻N ,t|aIk2Wݏ~91%=*i$FXP]iw4ތЕvp1^g ^toߥfV:7A c[#s*QpqވHc@7څu6I4?9jCS1Z ߻q腛Y€mD+v=߹#g)ڈ %F>JR?ZvAv8*RJcFiօt\X15ֹ j㌼ܛ#^ ԡR$/7=u"178;7VxyڥZgJ$V:Q2/.>;}@sf峦N3*)ttt%!kLmWCRDעsl,9P&"1@YQ !vM -V+  m֍꿬{–nkYm\[>X6jU[ؒ êCkd |`+﷙jnćA+WW:M~Ĝ\_;&@rx?fGz87Ք}e1'XQ@iH8 Q3JOp8 zb֡ʼ"e9ikG#n-'.^i:l7mT 8M:d̦S1`~$HkWY>;C5`X-ڲ>}QN_[?Kz]c|i'nsj`Ȇwq1oά-ӴMS}w!KNTv/]ىŵ] CY.BȞTh6-Ҝ\~?|~?|s]|*~_}cs,Y:a0"L~}lIϲ?Ʋp^6޼SeuZsSm*]}XfVpߥՠkkMoݡxQG2#_B *nNbp3q;͞xu+Ix#H&.X8 hk`6d΀85ZX,o{ݚcOǬgЕ7+Y*ed-w#gZ@p}աQ>}|o<Czyok͛7[2+wsv<&[5 }F ;p0AqlF*,Ay xb:Ȭ?ڳ,cBvp\D6*^]6ILGV6 mLrl[:7#4M39Ecf)1׎("8[0F nKWND^?Lm0f"S(٪(_tRIߗ/\nZW"G|w=@uņN,~̏LYogYï?BjQX@ANUvS u    z    R;    z    R;a@LL   ߃>  T8 H8 |.HApAAAAvUAAAAj'X@AAAvUAAAAj'X@AAAvUAAAAj'jwfÝW>#IASh8<⭬Wі6D Y FjЏs@ c*6Wd޻u "t0}Տ"H g8%8fwI'qGc%ŝ;߸rym0]\_zCo뽤|a9w͒[7IǓlݦ f,pn+taG_֬ Z:cl;2QycW\Y9*̪r\]viWXHBfOuЧW3 :ϵomD& 6%rõ RG%)aKqws֒F6?L<Ѳ3ozu>V @J?>?~oct>I}|Wt D;*K9%R'̪⪨ WLjQR%WP:VT^+Gg-i~H4#|l.n,I.['[us%}z;KWYSzYײˊ Hr? t˵WjRv 7\ }ƊMH/3,`R){@+Ak.ĊH >Y 8j"$XVju*G<1e_GECֱx%ѻ\^;3JnhXkP 2dP=mӶ>U ݪK72[ WL< ' CVH;/+uՇRU^]7__'6rMۮ<}y,sѣ.^碸74EH ғW n8-E5~>":.?g_3(rOx7ߺ37{/2^+M=/S{v?!M J8T;h#"n_'nCUMPRX%p6iPhQචxU)&PM;o|d壯T4Cv+׏o, (V62(xwvxl}.$EAM?enKV _`e|?ߥn;> ,_Ke0}֝{vin ᫣"WqS]U?Zn=+*{K5bP$h|UrriȻ{򟈦Oaɰ8ꙓ^ݔvMZi<hӍeE^&#&3m`o/O/(yFhg(pʢKX݀NE'/m\n#$YKM#Aӊ|WGq?֖pIscٜeq!~TEon0h}Nidxw֞ɄfL'MMbjҀXbd߮"Ki&:JXWyTv#96oˢ**S(O%)f9 MUR^}(ZP(͹ zzmlIq[t؍g1hݫ3O]   g`lz«W !$̴,n1[9Q?O7e}&g(LH(/ v61hc⚕ZГB.ZA3&F~EV>IMuDoo1wr!PX5qSG-v6=/ zU)(o:D_ԑ5֝CnPxWٟ|ʳThq9O~ؾ?73!Ysr觚$~#=< t$ ȅyl79j%]g.9J.*ruѣl GQMU̠?)ZnZo9Z>C%*9UQ-J.=6}3,둶 t_3ѮM}'48z:Owh}B<S]iAͥ}Cvoq{sKoJL͸fCɁ! #Ced|wdB"=o]iɮM LG.Γ]X,ci Hcji4TIPyK^p@c0i8.5Y[9ztCTRWP5]dIY9F&d"inr9l4æ>Z51mD/C}[X!^ogr z碰8Nsܯ LɄ OicZ}X"o44 hf&Լ*#7+bFS,'MoiT˨!yQQXLdA#*1Ci,Ft=s# @J叄,U\\̒sRbs9J)KOrzY֐((O$$MMs% R&h4u]h 8饽JW@)JO.R\Xϟ}yg{T5[Tk( j1]v*sR4eMlrVm9GKoPʧr RBXOrϺb &Ty^XnP-~n!~{L,~gn^F$'7Vj,ԦKyQɗCYr8; _~һl>tskC:w~wxЋdmGLs;2gyQ;KO:n+8H{a49KT !b%B5*"`hleq38kǶO,+C9SGVe %4R0g;uU{km9 N?m:+ %I92M]DL=4)،JDLQ;t_#;.C tїl9_EcaҤTCqJ T..O<@{~S@a>fX#H|~*/!iBwt^$RF' ;}1#Ѐ`w657Bnּּ(=w(R\SfD}cEngǭ=d'QfL(:;U=|.ȊJSJ5Uh IDATW69V2PJP:Y`)CnB\R^>b֯%Io,ػ_|~ HEQ!4==OisdݺI-ͺ9lhh!׌VLfs由̣t3NeH:h@c*h4RAI@k76`~-4:AXw@*„:"%Ƶצ3yɲ: għkg4(I4 ›ǵ:L79nϢɋRc7i /'r9 sn3up*?"uUߥUR Y07rN}G% U>NyHPDid[qqw|'{ݮ͒μHt{0΀AZT(2n{qijD gФH(R˳2D \M4=R2*U+MKt̂/~4uia#?HxRf#mAi8EjR;Tr^p[՝i9~{ @՗ʳHU\\/|{HGSL; cQ ͫ]?TIHmjΈp}Fˁњ͛3mɎkq)qvi`ӉcAb^5gof_b4tݏ>A_EN9GlGOuUYQ)RjsjY*/[JPAaS:Y`)˧2o*HPbC:Bφ<,Ȉ p0Փ$dafrﶍ^sR e%+MֹwxF4esCDLw  Oim{6|%̔g{5֘&OiGi9 ly3) wuL ,-.&F+H iu*ztŗ䋡;6a(^v`}ܔo{r̷d]9eB,G(uQv7E=8߂p̩kPIREX WQqB^NUP(AocQ"vY?c̀9f^Z 7xv\7ݔ9ýWhU  L08`W`ЊH?/VŃ+S@őtJmmwq`ɇdk2hR5~$(Ϣ6l\El8&_tƳG?}C9/U8/Pe*<]O56Ќo9cT&5eMX=t°awEؒe&ely'ȩH91LSgEE͍kkrvŒzo.WPIHҪoX>sq;͞xu+Ix#H&.X8 hk`6d΀85ZX,o{ݚcOǬgЕ7+Y,yWed-w#gZ@p}աQfA,mk~ l3fa▾R|h?qCoc0}| x8ܯetfEf*e(555b,}].NP Έh:pjiJPxNyU>zyfuwKfp®ޜ?bBݽ޲pöv&(7mBW垅Q (t01<'8H-̚=kλј>HH?Q8J7aJ$1>e ([$ ζCK{s>^>HD>S4kq+"~7 jV~ xhݖR ƌ@v*^w=~6[5E+N6E#kԍU @P5̟uH Rz4手 Y ϲ^۝BjQX@ANUvS u    z    R;    z    R;a@LL   ߃>  T8 H8 |. '\AAA`AAAA V=AAA`AAAA V=AAA`AAAAډڪG}pîqnDi&/ϯx+5?x%o l<ȭQu#"PXAMUx:.poì]:Lr,;/΃z =FyIg`XIqgy7.E^L#juDJw9uE~d9X]V|>AR>/[i '#)9ȼ7}IJS>/Џy$,+\zd4R;,Qn-!K8+R{T>hmmw5,:=)2aTxrZhU¿C[.ܘp/[Rq0jYǻZИõVj;;-3K<}sṶ_1v{ozF 5wjM#?醿^g[@MM5 *7.PRW`25\5 /uG%IWmUʋӭ O頇画+éd"s^G¿KzwMO`s4CPaNr!Ҍlf$ѻndoյG8">,]AfOpdc_nv.+~((q)O\w)-e}Ů@`yy`ڄ4_;J?&ş'tBR3yìȫ({O2.U?h%PD<1e_GECֱx%ѻ\^;<*2:O|\bۭ_%Q'쿼Any-eWF+WPDQo\dNaDA|5r\m6p\&N _o5~l\~ӗ:5K̑K>x9yY:7_iN t:NKx-UNȩwe^Ti(Sq9k!;h\ã]"A|zoŅE#?XN> T^2[ u\XP I~aiӬoPK\ dm>k}-_0M0cԀ2)Z}sFw|(.9J05,S[fi06 ҞE%&iY]F22ls{ pQ?}^}>gL?M9e#Z(u1OroD4__/dʋe2-44KCW&S*\}? W'o}.Ezlxm"%tvKO^_&xHiה!lTpОq3>|Ϡ=Y9/߄~_|J4M=/S{v?!M J8T;h#"n_'nCUMPRXp6iPhQචDY|Cn7]>2]*@t7VYF+Wrxa;CAO>Pp2}U]%/2xRE j7gLyzүM٥ÔYw}yZߺ)ꦴ\D)IL(OjVz]Z|¨<~lCR3 $W5vmkaP0+ǏïV 7=ODܰNqdX|7%]fk먀}"iZ!χ,tc٨FH-*r?L JLf43E8el%[qFn@q[N;sHk}\ BEt-/=-? τw3 ~?Fy.LmXuGqFl;E  *R9Beo _> Y{E#P(͹ zzmlIq[t؍g1hݫ3O]   g`lz«W !$̴,n1[9QN7e}&g(LH(/ v61hc⚕ZГB.ZA3&F~EV>IMuDoo1wr!PXvljN K+*U?M:`SG>״[w :Z0ksC]e)RǕ=M!,Vc܈τd͉jVǎx tc3t}$ MZ$Bo#v>(*jEIO(E5Vr̦k5nԻx,5Ӄ].|yps@˰43_^qUD.~yeTeI)fg)8m.gF#Y#m;4lkf] R 3Nh\#lqu8Lr-*$y2P  7ٽqu-JRȦ-ŐgYy=Yȱ ^|H7_ti*Y -eFBuGanVO14ƿ,Bѡ+ÿQxy+Z%uE^oty\ww Q.Jک'eIY9F&d"inr9l4æ>(NYL^^TM?d)ѷ^5Ё2vv9/GD\'i`YRR2!-%uXE![: ?E53aU!Wlh#変- ?2jl|^VXɎ\A4FTb*+2M<;1ӐYyfztF GBy* T.ܕܭ}@Jf^5$ J;S rS\FƊ%l]ں)NyziϩYO:ůRPAғ ]hgu^a9UZ9$J?rBZL-v\-k5h68ҫqf_8*\aKE9 RKvf;ouFz{֕Odw0oBŚmt g˴w^x $RVrJ-EԦKyQɗCYj~8; _~ҧ_ i1ȜF,M4?hrSqfLՍ27#y -qhbxu)}e0£7ZPlmEV-\nJ2 /gR8yyZh1)JȇO%.:ڍ^z/_L5.!.96s3 ͘Si/1ReSc48,~蹟Ygu(KZZryVgNqZ (7gHԱW·ibhP$$K8 l8  '4u1L42ӤgHc3*K4*2NȎː)@%![WXx@G#,\\RKsG7 $n5"aTw&L{KA/%itC݇8 qn_#tfЙhi͋ s"z5eF7_vvC|EnƤIs[SW;EVTʟTrn;W rNhg)884kS Iů=2?rۡظPAABY$`~=m E<iʏϑ zw&˶4ڎ[󓣅\3Z2SƢ3@il8!IRLF#$'I)"N?οHUa&ܛz. `!]mcEåDXM!-bBPWٳ̩KMIߪBW:UxVJA̓MZU:?|%omI@c0t:RN{XG"¸H'܊̌>vEnğvEjƧ{q ТGqcOS $2 HN?) ,FDL..W&M?qE@JR("e4byijnF0yAF;K|yǁEg$ݹ6KgDM]D-Go>o[PȜ\WjJTk4nK3 2-'9o}(RRCyJx0{hx?ɢ[;>NIp{Ϋr^_LgxSN5*'W / N.ߤIJ kpYJ_:Q6oj2oЖ*))ͥj`^${~SwHhYx8$$h__I'@($AyQEe*gÙ=4*4>yyIdV广.>~?L'9fJt/+.U ݜBq;͞xu+Ix#H&.X8 hk`6d΀85ZX,o{ݚcOǬgЕ7+Y:f[[>kIDAT>9v[pGϴ`8Cף$fX;Nh`tɚ.[8cI3]M FMorւV6FRʾV:QooofX6S坞4Ь]= *r9\YvOS5.Z0`Ol櫾FW;͛7[2+wsv<&[5 }F ;p0AqlF*,Ay H@rx19p%`Ch6*oHb|v?9(-cQ=^rl[:7#4M39Ecf)1׎("8[s'AgꥹiH>?~㣏HT8Jzzdj,W98mF6"Fk$?%0etǏ'hOڨpzz  R  ?!    z    R;    z    R;a@LL   ߃>   AA vF& HM?    H   N    H   N    H   NV=r;5$;vů|F8w#ZMH=7~qx~5[Y-ym@`An BՠeU *7lZɼw~fE aEdyQ6pK 9pt7BȓN: J;wnq(oDU6~35"~e]%jϭ)(#}Gr%oS_-iMPyk=?{k dx%8Izwy;nu6wT";~,r++'Z%Yu7][WF1͞OftkڈjMDm-8qk;/zʙҙb7H% 6u8dN=JTi}+l*Q*7fV _7DE«"FਯN*V=Z0'b iF6]Mr3Y ]NcyOKtv^ 8رe7;g?r~@钗k 3ű+Z&`]7X1 i⥿vyOAZ p]s!VD)HP/p&xjde-_Ko9rD#}f};Jo>L^=YMZyy<OPqmCYq-xFڻ^:J7}\,xsk`gmJ@䯫Өz~£l]̌;ٷ._IiN۾>A}S6 HQc buLMm &@A+r^MsJ5=mӶ>>9dܠ:J0M * /* ݪ©@ڪvN^ȯ qs^tBۮ<}y,sѣ.^碸74EH ғW n8-E5~>":.?g_3(rOx7ߺ37{/2^+M=/S{v?!M J80;h#"n_'nCUMPP|wαR;I|G</ǫ M7wT!~.w| ~wPe+K, ̋.avυ$Ԡ((̍mUWq* oTMYb'SkSvɟ,Ϻsn?MaTuT*n0<4Ͱ>T>߹~~ܯ4)LM6m5jd>LHo,ԪLbk@u:eETa(W*+GݓD4 {gNQ4捻klwm?U"M ?PEn,(4ԿTEE4i}~S}a|zAɃ;Bi֭+=/^xEL)ź=z$k*r.5iW~o,<7O*RHM I5ʄSG7ۯ8rBàIz8m 2;s6^u\ƱtJ*rvnS߹;%q(Pt "cY8* _`DeG+r^{%{;Zϰmv̶ ߿GEwݸ~ֽ:drq-z%RA)L˒5ptZjhrLqVʄb}lg=&Y9=)Rz48ciYdÑTmTGT&c|׮ L-+ @hw@˨MK^Uh ʛפf:usMu@67U',z\oӴbb5ύ8LH֜(& i}G1O>6C'/w;IrajۄMN"4y9rnי yAK\A](4rQTi:@W Iy;t椫:#Qul98"C!M 2Vrl9J.u}(H ,M(?#KzmgmLkASjaf k1rm=^?~P_E$OaiA"7j:n]V$K)O2E7 7;'|O{y躝6ϙԀdq4<9yWxqv*n PUUd56d>/J1j+L#hD%"SPtȳ= {B]gnAHPJYRx}VJl>G b 3IN/db.PAf m]o^ѿ͇ u',1mJIc[0^tV==գAS}BfjlXF(+L8r6eLq"unP:I S>)aX zdN6~ ^[1ޝ h͡g_ W$H$N@vcG~564mkoݬy:m;yqAQ{P^﹦̈wΎ[{ȀO͘#P6uvnzj1 NaSOCs)ύeð7eu¤^%v+d kBN _- !}Ĭ_GK\Yw6@Bhzz@4bȀOuUe[ustmǭZBSt)cәG46fːtb$)&[njW4)|Jڲ `2t:Nu C i (R"Wom\{mz=#{>Ǽ,kqF|i}V3?>h61@3Y4&Ox_i(b)`z|Rht14F'qݏ9vfEqI%uL|^UKc^L8x?IvӘhSМ R$ƽF?پV\ff_^3+f$Ķ3/R3>޻%3`U?x{Z A4) ,FDL..W&M?qEwLJh2Ė܌H aB ~wIGu#*Hsmf 4[|$<)3߶"sr^*MP9|-΀ȴp= l澅ʳHU\\/|{HGS_{wup388S0((aDHT4bq;i8Rj( .KuMUOp;ZEF#u%//뢦^3/Uϟݳ>&UfHSCOl?MZaF7{tWN3;/?Wo3{KSRZӣ4DOʎo_s`0*B +2ew/nɩT!,zM;E{uZL,8竖%.5inOJ]Yv󟛳>;h)I˼TXkq/{_PPVau=,$OrP 7={CP[Y-rz#+~+Nm9{ϫ;Y Q[}lVY?N;EGR/ִwT{+-u0;[)tμYxα#)<ݐzU% !pDaH7.õc̍{0gMgU= Hv oC85IHd歄8+'[:{X׈(t.'ᩮ˧huQ=/\pp7BwpQܔ[0Ru?Xl7wԬ}ɽ,vT*/o_tBmȶi1#|ROuj?>qGl.dޢ1Qw|6Dla]5Ǥ=ZGO "}GPoG*%;>ctAjx3RZjK"%$ϒZkNȝӻi9`@Sn/(F=U:$5MzW>m-a9.!ʣJ\^ c&0gWyOtV [IQ?y^fyO>&MCС!43R3/*`m㘈P;m#WDĹXdGINc٩uڷ kOVm\#5B/U4kd\e{G$ةOh;|_/yWx`{۰OnH ~ ^ge$5CmL M':ۮIyn!k8cհ%y:*UC^I~K6|US '݉n~}9cVUn=8M5x!#Mz.D/]bN2E&8R<;^siƯj7M[\d8^*SU\Խ_/]SX;BX0]u`ݵ_z۩5 +u;|}3W3Ni=][nAeZ-VU[iG3 6Lb>ظHr`,6:8gNlz!/zqƒ%/-Y䥷$W@<P55DU OT=<,M<9@Z OT=PmQ$2FtMaR)R)_^{.\潾+P+?e1E*h+.Wf-{=e^Y3^}v12Guez۟qliꐮO_ݏ λwn{M^٦SiՎ6ě:媏;~.ۯ!t^Q벿skٞPkғyQKm -]* FqFD٬nVm޼e***t뭷u_Ls޷ǵ-lE+3IK>s؞R]6e~]?p};?\5ua]>$S?peO<_8CWfMeV^Q677Ϸv]ԞuL[ӊZmĆ'|R[lr6mSO=&Ukڎ'zBb=Z%h6szd3uߦfz:vhS[цşӉURT}Xzn?IU%M_3Ӕɓt?nlo3|@Ji;NэU;~뮻4o<P]wݥ;ߨRqt򬛉w[CE:}Bw&ͺP<}>4%ڑ;8뿟K_2'ۢ{OywnT5㨋ugykƴZDr=YW ޭ#ُ ܡ .&%+wŞ ;hTǙШs5oiMkc5qx3N?qV6Uum?פ5eEkq/9;|m.=|O{yŵ6+Z&bnK崭!3|r#h0n0{ofs%]<7zu{~9q̿m˜Prcf+&?mڌ1?0OycfsGY_{ô%sԌDyW4?џ4ur#e(;o޷e#3t퓯镻ɇdt.zxzatlL3Vn%|_/<ͳGȊOlѫx :?y{O͟?;_C>n +B{:z-oTy9qXX OG<]CV?eN8$h.a +ߺ<5uI岬B6k&;S,dOլrg%)N_kEZR/\UII,ƞK獑-K{f-op.7V,L;F;Ԛ~`xhH|"\֌ ~g)~l.,${cTMֹ?2W wݠ-f\pUKkry?{ܾٮգ Y5gZO~2B䬢tU) ߜ_A^'V[w]4FDCJQv懊5*ֱ齭uןWj[񋔨+3nƌ~^2m*ٔVdtJǫ$4")T:Y[\v̂t )Z< Y9]XzQǁty zibNQ;JbE{ v-,yzw.)<\]9 k襱£wѪ#K=ܥWt)^Tt8Z;56RCgxlsvvw ԟSӹ>'OAjڴi}Zw=:%;vazߥߛ+{TwukWǰ !N*ԤLg;-Z-@=cgtk7?WXE---jiIS7ϊhLLnkζk纾k.eg;Su ] 䭺'r>H_{A;~n2nZB+<ۏ &|ى%]/fzu.Hu_F_|JGiΥWk..kvVR{SӬfDF4i?gjsvzi-vq8I9;O4&v[U{!a?<AlЇ>Էe۞m9__yd}IAFU߫V)'OT$)"M,@yh)iֺЛijY~.mšSrۯv?샶yϔ/{{Peyƭu35N%RdQz"H+&Izh]ZF*Okӧ^n}?:d^7yq>vu/2mжVj%u[CKq>/;|'<5R'vgkƌ]ҙg㿥>O;n[ aKVT]NubSǯ%GO[ n)34u7UݪKr~{t=p8J[[|FL,-p^Duӯג%;BǍї=#F{?Ө/=FX/pfEKeen-]TLFLF/n@]ީ/LweݺE4S趆/zei_ ߺQ/|nrBJ~S?9Y}7RSu.GN nՉ}_Wݏ ;MN1e.~X2I M91/y#b::ь*{tq^jhqStw7l߼H*s{--[nT 'Gԏ訯 O Kf]ߧW'wʊÚ>pO}=Og#d(1px7M6YfƎK]FP]g^Y}*X-wS_Ѻn;2ޫb4c?ԏ>\_/զn+/O w' /JIGO?]gy(uu9XO0dszyxF;ϜP=EZeo>}R(R(PVk9mCn[ѺMWݤò&_?Uyl%JFm_O̹R/wʯEyd.WP:ƭTꗫ'Ґ%W{wkii" aο|%˿n.W^g=J ['/V__~7ܮ|Pgqn;CO:_iEE:w   bYշgI.R!MwާcF1WCЬE=m?|mJj5YM혨qҟjMsDZF^-oG/o[Cc{߶j歷쉶$l‘TX#E #J8[ Alr5Ե/m3u={B^Mݭ :Qw_H,F$yr<#Mb*~@l޼yZ|9[nhʔ)SgPcLA"c$7@(Q`9rAodF~бL:Q,m[ ۖB-YYm[PuK-k;߲ίwُ$+/St1JI:?tԝo!Is+hu#kZ#3nc%D|t}Ajg '0rxWAo|EO=I+!)oSɍ҉s${k“OBkIa;^вfiܹ'krdgFϻR.|VO~&}㥬~LoQ;^!+hze$ y.]#_gNyEO(rխޱ;{tAa%M>']_,In}?mM>=rY膛dSw02Ab%F7ݼ#UuEf`hNbI: C10;A {j-i5]99Kr}O_e OP}/ò,[c\{HOՏ߬S*ݬ?Wyߗ5 5)s}D?kvQ#xE/өW)5s諟O=zZsܤ>kyT~0tIYݑ8 f/cScѻ_khx{YҸFQZy"-\0NX}EZyCE7XҶ #4=S#S?>}mIrg> .tURVy簏vL.d׸ 0$295$y6S_yw\z&ف"% .PIU5ng vMn29\QIbeQVp׺޸aԙSgPcLR]R*ëDǡh S#WۋuхG:wS쟪7պ:rI >@gOCNDԙSgPcL?IO2@XB' B5AwӋKr*{ad|՚NݹI\NUv{ 0#m譪knĒU~m>IlI -y~UQs?3U/i,w'->[]:Uٺ =gi`pp\_k'Tך}c37o/_Ζ`tϠԘ:ScKsڴ>KOAO:6”F[2j3b+&L֧A 5T؜+"@^^D4&-ڲ*.(mY#UQReXz p|mJ)z4l8f8VI,°!T䨺ّ'"@hMzXҾgpsddpV )fC# 6G1NuFW(a-婢&m0 u5C= gű90ďշfEb+^.[i%29 gEQ[E!(fbhCeRwhFW8 ,F>s+渫-"1x:SB"%2BC+0Җڴ Z֐2+پѭ@d}UV&S c$mϨ5;ץwgTcՖ>@Mi&7sf[Rq,DFFWD $PRzv ,KEl*. N߶g:*ʸtEbuPCɘ#3xlRA Bm0xvGpb5de\p l O9hsmZ-ø 7@QM#"X}vk09mM+{U@0C1(GsOt¾5+"wZRU&>bGn.8LR,j(VaVI,°!05i3Q 2)n5#iw8ɶ,*.0XHC"Ω&%7}0 }Ԓ&-F_)WC{G@sKN\cJ*+ (02F֐Q}kA gQudۖJ *+ 8O:"& ԧRa$SS(UZFDx 裦-u ߑݦ?.p"\K-k EkӚI9.=iG5Z9-u5]|j[Z=HpԻg4=g!W+U8h:^L.꺤k.W֬5d莆n9^ -ẙc~0#Fo.&bcMxDwn Tqp>A@qԐՖ9D6Dkq[2*.FQ)☁Wg6n9fzV,*w])pw^k]{PN2-wNDԙoTQcr͉(v(~;${b{ݲ?#_&G Gp5Awj4oZu#%}|nPۑT[n0'A/P}[Vkzu̯!zsGRUM]XN/.Y}ϻZߩ;6)kswܩӮr&iY3=IRѮ5iշeWk[jO0k~?Zp~x.tbͅ:=YgK e.owLn%l [H L1CJi޼yZ|9[n,5յf<5~ۉϒJb!.8XH ;^#t.Ex'PabyR IDAT?QE--c3FJd E¶F;GTNsӖ4 8pnhCUR)0x`GUVM7F 0RQcܥb\2[)9>{RlK*-=DPƈ%:1bAKӦڴ)0R{S{4sЏ%QE2WۚՎ8L MFˆ5cI,k4Ңl`~`6f@hs]FM<(Im0cI,DF~:vbG7PR"ģނt *+ w QQ8ݲQMsyhCeR=5]5]Y8("z1/oTQR{dN5)Ȩk7ƐmҨ ]6sNcn015mG7ήʊB*/(eC&`W1pm۲`.ۺ12UԤ+" aZ l{wcm9b OᐥHVؖa M9@O2 U)9.`5rMu۶-l) ) Y ,EÖa[P2шM [tE@ުN) 9N nᐥh#mKh&IhcMJ)ǧbOcui1 @p.C"y[$l g9A @ZQ]+r:[;BzH[jjbmͨ9E /FnMl]Z8Hl[n[񵱆eĺX9Ɣ5("Fz/z*TZ9lj7w4:  ֕kͪ5hҘҨƔF40L(z 1I/l#E–"!{WwV81M@.6դgrsfW(-(%a-(^6ץF(IRc+# `k:&YlOyVQI,ƍ0J(f$U59nr 6p㫪Q,b\FHjSj A`p.K e<譪 :9ߨ11ʶߏv>W>6ŵf[BM2!5i! `1]}ӚP&=0]sͣk"]-55VK6˗ܫM^6z@L[Z9<F7R譪`eEZ`M4s/^zjǟ $455UeM6|ʔ^ԮM5i5].n1hhK]Z;`Օ*}C\6S \1gϫ =;WyHCasekzecެL5+7G/ ~`*6b r#)NovJ"N0|:}b)~dUwۆ5+`㸁W&< b͎v]nT +i}z^NzG*붍pwgp];RSIR4$EJ #2jv--d<EԘ:Sc 6G(z58$Mk.{Ο\Rv<Μ%iG/{46Jzؾo:Yhʔ) 7jJJJh3135Π#/]5sir=wǝ::/ӋKzK%kcZKJ5gB4OV+PT}ϕlhsm=^'-^.Xs-N>|ْ2Z|"ُgܫ/L_x[Tzxli#_C\`dNɜ$+CR90DY ͛;uXmXVyIDcJUϕ5qp8PR:S{5Πԙc j|0es֬¡\V2][ Ƹ+J8%.( Hw.bX>WVQaAFRU&GD0+jrse &G1b xlx| I% aP󹲐QyqXcK*/ +D*z! 2)cP0cI~Ȳ< Qޮl5h?j]C`[} @Y?x9]5\]ݷ 2_5VlmRKzd.wy]pwb0*~]廛sδ 546lBZf]gb蹌Ӫm ߜiaքn<:#Ywx9ernN)Gɬ !ŲLV+5-MAtydy4wnN)71 bA LjH->z/_¨&i|ILED#C @(Jz3ZSَ)("[ mZgy ._wGX0X }BeS;#*/ +ЃjK!s2 ۶"._EB &HXΖTj+hEaA W,C{EHҞ2 ޶Rc3#Q=l ]tTŚ 5muz4Ζ~-I+*2c??4'&Yuvöc:|L&TZV0-Ѕojh-0raZye +PA\xFW>΃x_Wӌ1ETVXD1Tje66ĩ3255^r݂j#L FxVWj[K MIGMIGRK,WsъmMmP 1C'~5gZU٬-i1٨SE[TpH!IQi7G1 Ԛj}}׶'(FxӺvml1xyA7Z]bi}}ިiWpRr("M5(HϿR٬@0e6ַ6e}>?ݜ66Ļ x g8 >Ѳ- ? `9mkMiUUN@[[]#mK 5HcKb؈~Ȩ)[ A 5\X™c0sG1iMmިj/1BXzU `Pn8cPZȈ؋Q<ժmn/1Ũ!hMu c]_cFf5VlmRK R0t7bŚb[ :F)WEc\W `vs,cK4|gưlYP12(r2rQ\ 5%/11|崩1U5m|Z HJfsZ]Ӧ559DÌ؃d$&P #c,&P.0r@:|9nN)ϗyn)Q,cc vU~@9HX IY`fWx Lǿ9Wx@oz `Xa/u7bp6*Qm $yi'?0 1λOٜ/;>3( /iV٪ Q8Xӑc54XTk#y(o@F u(7;L9^Ni9+JfbyjO10'> q@p:YiƆ8 Ԛj}}׶37A z]RnNu6klyJcQG*T)j ٶlb>A,ڒԫ;Z AYmkIiKS8%_ilIT *ET Ra8HȖ-z@\MSm"MM մٜLUʋ*EUZVa$hV4RҀΝ3V CrvSUkJSjIeۊgQ !"SUy,QQE;[,RĶlYF\HKg|3ZU٪$pȮ36)\0te|e~oh ]#cTO6řh Q@펧ӯ jhIȈm(Ҩ=dFI s}kQIifK-k.gtZ zקhtz6%5-ck5~D48XDb2٬dVhQ.'A+U8hwޝfjvaV׹p\Aew?]ϝgn6:I{ݵ9zJQ,8 OVvJ"^ _.ׂj= ;|Wu*[Ӫ; sky(ԙ:Sc jLۯXYTfg3,=i{'~OK?6VvzEwaϥh6><ʢr7;Gq7h>3nsu~˧G%y}qruԘ:ScPgjA&eZfT$ie Z2G t~=rᗗ:Sc jL135^=#V>j~\ڤ\AqNN%%z>\otŢJ-|=B$YyڰD=lzҩuo^9KlI -y~UQs?3U/i,wy> 1fH ?o<sOr[Ԙ:Sgj L}\^d1 A @ 1A b@  A @1 b@ 1 bvIDAT4>}+/=}{29JE^W[ODDDDD2eU DDDDDDDDDDDDcT me*L&chcX OJU:@,Wͫc5kGcI]j[F'Y_Zc#+h,6BbBd ysbi  { m\Q4L{]:AXA4I>·CǓa 0;ObebǬqu/A,w@$:70sl ~4 r}lNA#ISUd$'&nhߥ Qidji6߄>%tbv7 7-bkd&Aiu!N-1{$mПv2t`[VJ]KQTo6RhF-sWQݥ/-cӹwfʎ̙B(_Ϥk SWmF&+?0BpOx9d'=ī(-+TWӵMRzc^`Ə-8(6[dJrҏ"gMIkh3Zf^;erQΪ3ߏ<&+gG^y 걹<:-$78\_^zq;ueޏ\15R# Dcţ M}6;sDi0͈ǜ0'!!3{DQz89^LV0]\u%TV*)qyroi M |Atx%oQϽ]K]1ѷ1/fv"#IcvKլ9}f tr#΄r}JeL:]+fK">yoԯʨ(r*Vx$uQsLJ{;n\6 [bzӽ3^y7}l+l?Rv/͖۸1Vk ع1;5%@c>*|NZ\_kY<57otSUO_Jࠡ\I䋳ýt<:_dvCGg]/ Kxj\kǀl퍗{ 1Gk/د5^=Us1V?݈`1.kr1lC؇[2Iw&hv~c2Z1YQEz4X}6}mhu%֌,cԛp#2*ˆfL]󟙪 Ob4f\=lϥ6xO&76ӈ[c_7[ 'DDDDDD5QDDDDDDALDDDDDDALDDDDDDDDDDDDDDDDDDDALDDDDDDALDDDDDDDDDDDDDDDDDDDDDDDDDALDD`TXXէش]2*L,~Vo4A)&,A?1S9Ó l,۾yj1ĊCHmۓK8:7 .6FDzCgZ$ÿnJɨ,fs}I #"Itayj>Ryoݯ m\1%ɋi;m2-F0cBot&qxQs}"2<f;U|>#1‰K'G>4n>3/=Yxٷhb9I')}gvmuM&1qӋӼ?sW}wtH]ʱf帎$D# =̮s@DDDDkƘO.Kv|ȪWpf-KfaVHg{h o$oi&8ʈp*o]i_Qt^~<\8 C8FNq6ܟw$_͑ rlۈz02NfۉMLL k8)=;FI|!GAIXpo 'g  [Vr 1X[$KeM^tr6 y8=ހGu%ُ _#=$ UTf)N,@4&CO`!CK,Y#2d#QFnMz[V Ώ(!]_3x4WA`(.+(/ Qs\[p}~~?.<*Jr=^*ɪ^}s^^\?f9<5El ᐎP<6DDDALDD~$3X"{\Xk.ΫYq#?O7Hpyo OG| g}SĄؠô:Sg }{0tNW>ؽ zNgRmT\P]EuM@v[?.F`bTݗt~aW#eTfɭ:s {? b7yg 0^GGIu^à: K.&::~{1>Ngp+Z0'{#"H&}n?xiOr7it|6:ĭb˟kGEM|-hnÜ8#x0shIp{Pct;j6Ѥ~ w_g2. wG 訢fm-d٬,I@dOkcX-㭳*f"i jK|{sY 0%)OD,DD?(ikȿ((((/Ja[)_IENDB`rally-0.9.1/doc/source/images/Report-Multiple-Overview.png0000664000567000056710000012744713073417716024725 0ustar jenkinsjenkins00000000000000PNG  IHDR#^sBIT|d IDATxwx]՝ާwK,w 6B $0)$I2Irrd<3-74!@ t .`0*ջt$zPwm,ys9{^{IZkk6mR!B!⊲*%B!BqW:!B!Bt!B!bZ]!B!$AB!B!IЅB!Bi@t!B!b]!B!$AB!B!yB!B1 \tJ?Lٙ" x/g5tޮNJoOq=;c}y29,S{ml$b)c@ l=.!B!U"+ ";t[ݚfcѪk56]Pv5b$&5bɏm~)c2FVpBĉB)=c6w޳#M ^B!B%F IweCԴW8lFMb` W4 RhJHFՊU<[tD0㵣k #'Kni 3$2VN'-حP&xx25 ՎajPA2'Hh NjMPfD,62`t EGx\Nh b@cGZor̚ՉiHXV431igtRQ"$Scs&JӱZtR0Ѹn=N}8c 4ÉfE@)fr: !bY\GHыݎ;͇erXIS6'N ]cXB !B!~}xr=J8:"a*V *X\]IS}ǚLI2Z@PzKsy% @[ qj݇+VO.?W6s.+Co7NAΜuz',$cs]+0pg9 t6,Cjnkc斔P~ޞXg 66!(pedGynr}𥇢t/3',7?OWÜ WB_P]^џ>ʣ'z(\y>v(~c;xg]w};Y E<_b΂>Į-xS)•E?'S!{ |RűǺ˸N/B2(;Bwg#-(@p}9AB%hnn'nﬧg~,;8}HT=Ic(d͊\kg qlX͌tE_W6~,ỈmtP*F{u+f,*rdan/oihOP x=OtO`wX fH 4G.&9)DCi`*i*4MGm8 CV,4y$j}0F;̑&#m14O5}f~/Tm hԘ/fCϳ7owWgp4aN&MR)s"69e^}x6x7[>5k޴[{C)R:"U.ŰnHdRXU) 3/ZJ)|%sO k7[ a[ݻS=${,." pCr}),l_rǏq]Db:ωvUF73JOo qo#&-ՃEN0!2o sWx9/3}|Lgd8@s@RoL5yÜ)O'~cϾ5/_'0 ,vrIK[XߚdڛX3Ogz[W)}JM$V%G1֫=:ʦC +1Hӯì+'h0ŋi %La2rɊYys94! ۏst#OC44Qgƾ~R:+=g+~.|ב㭜n~7_COEGsyqSnO=rb^no=wݎP'kgS-)ێ4J8a#LaA$g悅$݋MtMoyo$5oeoh%5ԪoPJh𛵬c|0[)[_ :opkOs3FZidNBAC,>ߚQ@MtQ 븦9xt\q{Zq%i`+aC.B!]*/Ք.9ơc͔o,g6v6QהςH&"t'TMdu$Cb7}8XKjuXv?AuoDp'?4 \KRi8Tm7O̙tUcҐpT~ɯr>7=ȷ7+6w-f/c SŸ&eO+o3<e]Ͻ^2ƤTwE}{_x7-?;cRۇ~F3|s8-{,/qǗ-VMexZmKT}wd]Y'cn, [shVB/'0|1X!B!mѢE$AB!B!IЅB!Bi@t!B!b]!B!J5Mr)֫=~j_b^KWwW{j_b<^KWaSKB!BӯtB!B!9B!B!Ĵ B!B1 hCCC !B!WB!Bq"qB!B!4 B!B1 H.B!BL !B!Ӏ$B!B!4 B!B1 H.B!BL !B!Ӏ$B!B!4 B!B1 H.B!BL !B!Ӏ$B!B!4`(4 W6\eg}}оk[\Xo[|X//y miڻB!E'ICQ3D4a\`5r}NVVfv\B!*wQYA0ydj>#nR0v?Pv2&xSt!B!]TF=-$Xح2TWL)&/fFۗ\鈄B!WE%=1 cPd۳KJ1 bChNY[+ B!.*A7LE0q]`_of:M$W+ <7jMfwMv\3Jgs;1Of)y z&.:5U%8st%)P/m,ʋsp;,W:*!Bt!ޟl}͊+OaKq~ʏEVn~ď2R!AIGk7CVJ@#B!>J.Ͳђ q.Ϗϊ͟dlcR 3$j~Se,u\.pvqiV ?aSw^鈄B!q~5Bed=24C2ଛw_+ކ6Z[zS9W$#nߚKeyx/m Lt.yd\0PJi }t1ccu- + 4%JR] DH ݅?7t}|@'MmDbΜBPC$ +ެ< s54XnΊ @0Bbu$//4 mצJh8MP rC00y3/}g 4Qҏ/(7IR&N:A Laug呛Ӫ~`ALWUExpuDm锕I"oy%N"eO), ¼GL^|9;9Dl/_[{];+LPUǷ73u1!7榕鰆 4Nj>9{07ײo~h$`^d[md~ā|ns9{8R@P Wz.aYP1H >9AM} ұ{Ϝ͒nYN}Peog߶sbՕAQlVlqt$x|fm%?PA?H`8*VŅ͎vSC$2(Mw˲$}0:HMC#=Da{0kRny$1Wsls?/j-M 3qR{O3Oʁ.g,;o%?'';v=@]C+P  ;Y_UK瓛fet{_YR1TP k_Et%=Ii43A{wzZzFEz~)UʞNbvx{Y?VpA^|9w%)Z%rD\'^_@>iRB!t.'#o`[k,vna&k*KNE}+5Rߕϵ2S5uvHN]i~n֌nQPRƌI+^bzL_MJ4B:vB@r=U)ẃ:rξ$UXw 4Qɱ[Lp{ U#dCw6#x>׮*ii;U`8 O7btVo#X2Xz> N|b/8O7S uJg.* zNQu~O[UvNַr^(q̋PçxO[V1UQwah]mĽ3`"54{ @KX8{δRdm/<Ýqӝjv KP0ooޢ?Iz,V,-!nh<}t59 א뱒S0iPG?JдM{1 =J1~|}1%,\t@vB~ IDAT5س`;?qfX@g"Dz\h1*YMNz=ͣLBvNܳX>;~H?K }Xf5-D-?R/57ݷn"Azg3?gJG_H c-RtWWì<2lr\!u +0SI:yT b0ׄ(Ku> 76[Tx֕t}])wVaK&8rǏ3M'8o~yPs3̬E<"$idGeYW"#oSը*spkavkSa*6,%])wBtt3xx])xZ^l>.y~+Ɂa^īVn8كmT,Xtn'vscw5w-9|[3IkyVqtDOOT-VP<R NhR>>i9NtLدV׶2w'(Gl'^ܴ, M%{~ǻx/ˊ|Lna&z $g` ?v DN}Mo!g'ӏ-%#J;Wh>MWT/wbn,/MnQf?,5rj;A߾L2Ϭw Kv*aL+OR$xgx}#>iV͸vLBx}[4.ݵ<HF6t<9yvM1fV` i$t%>['hie%ӡT?ɮȽwȬ43C<>-޷IB hl}\7͈2ɈګytIiwVΦ"ۇ~7)Er;_eO}/#6گ7;o~uwv^IŢ|N]lܺ`|dH3'{TdGkBhYB..aJc)Pjb[oo/~_WZRw7 Y?&|i.dPs9ܹ[7V"&lܰgzFeSAV2AfYܝ.L(0*wcU422ܿ,$5a??ZL+nE>;u8rQef5=>PɍyVCIٙ} 9͠7ANE6v6ecF{w7o65YV'%,(pA8A,9i,"baLhi"QgfT0ϑ1\(ao ddŜb/3]̛"F[k:|y>pهi>y~k[ely׶ m`vk"Q)zZ8rg>r#32}x]v;NOY+U8=vqn_rMϦjݍT9Ã$Iױ-K\m׭,/Á"-g6ܱ|\IX)(_m_Kiۅc܅]P ^Rg&l;ek>ŦŤyp=sfE`b/S“I< #J/9yxNl6v _^%* r?~F3)d͛7#ܼ|J* >NA,?; s!|a\AjZKg7Qrp8XrܲlzH ݙIٔeDyI=ަ9UTLBl flݸEe9xl6NOySNӻ9ҕ<8t ^Ca^.xI+YĂBZ]Gt0R%E3NR :j9rû<>e8<|8=XTnB1Af(K"D':/Ρ"G_B!ĻYWdM!f4R?s VyIR{X\s4丱v񗿩g8#Hb3ɱ7,P9 --|vV_Sؽ>)iQ # =O?BKC7t$xgu֠4Xb~ l4R@m8(-#;qVq= 8}nJ~`ɞC{uӏ EcghS4/o\fMpy@|]SvN݆2FrfcS F2I,!{ ?ZSu)c/=žĭyl6P>z-Dd,Z͌sTedPR>wn'z-y^R Jr vH/+o* d#:ꡠp>dQ#`Ϫ XHh0k|;dgn?c#[&,,bGu[\#GQ)3qrT`oHt8U3焛 NQy~ Qhy.:ڢx'>ߔ=бy8;GggAcG!&ax81ۊÕgJ(S+QfmL11in [Xr4 R(ѡ0 ys+F' t#|%+-@uҵb;Zs'ÛGŒxAi[p p5yÃ+-Y]@#s5gb@5,)%iٿ>AwxԎ~ݱ -ffֶ&ZTfj Nu1,*zZ2vsYUՄB!A~O'3I%D蚆n&ɔF^3P; naLa*LT)$ф! ~Yg#u#G (MS/ gN̊g\Wb+]ӸZ{㒃<8E`,n (ZXk9st1Xϝ43pgbLsW9d Yf?wMJiXt #Mb|^)]Gi?ʱ[)at YX6E+9;Ӟ`K7rkX-9krd. *ha~tW:%su>e*~][|<:t?e;8b}ކSDd^ $&6-rf%&BKI4WkkZw޴kכ`n'T{ꘇ,Y(Ͷr|[cҞ&X<ǠfEʜz3Vwףt -JǦ36eN@)Co3]JX\b_ۤp;'VcT-J:!~p0D0}QK3OwR1I'{+{1i${8txL:k\CqPWֵstl-$RQ㢿$O}:s5vUqb'h m8;s6~ec/raziVI(@4~;ݚūSy=jP$2@`YJh=+STb4nUĢqLMf_`S.)Hcl=։5kfsaZZ-zړwK_fSzOo=@6,n e8SI<'p6>aZ]Eyor2D힓\r8,^$Wu /7(zv|A"e:Vm<Ft'3׭&kohome;%f1k~!wi M5Q;~mךOA,r͜:LhIcm?9decsYZf!HOG FʖP%B!.쒬>=w0xI 6)B% Uwv6~@ D5B(aTA|CQB(vBO඙Dvt thLb*056"Ե舤0L@kaƩm t:2Veun:[8aI7G^JP1KǺȷe΁n1OgJWv,&0 ԐƖCĕ|cc)K$=fdfs:hd,F6v}0HwxdP6+=.;Ϭ̥ԣSWϷuѝ2}MұZsA<9@e @}8u j-qz;=e 94oV1>Pk(3s&Mw-xsXb>3']繒/MKQ:} D }R 5Tˏg;|g~{8P{ㅔ"ٱ-?+7?חkWJ 3KvƟhꇚ>V֮TsS{x<|b6l sImT@;=Qвgw?͖evr?}-{9|$}ɞ4zBVU-}tu ('uF"q6+)eђeϜ q>ւ%+{ii8LhR| s1fAAJP {v3^9\t'E:h8LmKA9Yd5%Xho;`oѤEs]B!Cp/q/x$о3?Cl׿=80/ngl~1)3Y3s|b[u]w՝爷(SfF_9_k8džu3({8;8>Ϗ?55|Bept+}uRa#5|gw[^}1$"ĩ 69&i#,b%+.%,,0r{,x/<϶xg۩\8fp"2)ꚮp~ >"<\缲@*%ehV %/h&Ý8VJ( 6.O\,dYuKkiVCۡٻb?of;(.,l"а]u~v6},VZ906 !eRoUT@Δwp:&4ë2E_GB}LKsٵe 6Ү} n1Oн>oٿP[ڍfne)iv]ϳHQi%kɳ;^jdU/hر:Guޤ@>e+LRG5ѭڒu2OX{uU|(:vjJa Me@,Uwg[_v,"/};DvʡN"n,e*5tB {Y.&m{ԩ- g@A!0#^;ض)pk1Q3:uL {idcDBԗm}KQCر5!>'(ZZ0w֠Z |4Q$h,ͫў$O=$I$4wIօ|k撹ᔝ{m vћ!ѵǚɇ>CU3g>,mpZ)--Nރ19P+ Fzt6WXPkyV;Ar͠6xkީ46 ΁ە/x[c(æڣvdP\k6YȠ!="ldX FX}O,߻%3⢶:,-Z2`PR#|~E#:b….)!>?b 5 5ؽn1{7nj(7uZwrW<BIjU8Q ڑgK@jT[pC޾wJII7oZLX\1y2;&//|7F7!q~WoyRI$Ii߃.{_n0*:q8N7bڳ7QX *&z0[%Z.9X'֋dƬsC^7Y0[LQFƻIٸ5qb#0+zWj<>a00l( Ff3ۉR4=n&3fLN4fQrj) f(X3fҵ,( -H|#£ťζR'ژhPлX,OꍘfکSuսw2uc%{ld_\8Q'<DzRJNI .=>ߓ.ZkA]=$qY! ӌZ+ Fz]{;mjzv-B՛ N^=2[޹7 ]4?ؾN6m4̀ld@=6eh{ķ´-[LE>hRnGD"Q~&7(eK5ZnMX`&dm IDAT$I$]qk2x`zTE~Hҟ%usNjI$I$I."ٳN.$I$I$I֠ N!IΘ~$I$I$Ij5r8I:$I$I$;Bmsؙ~g.t\OG+cx5~$I$I$u&q6BTU.e?KAAA;*$I$I$]Dw#>W[y.Kжa'ZEԺҿc,-:4 UmS$I$I$Isq;vleAQѴ ,2RSUI@hl I!i:ieq6Tgr~3NU[(Ch158ٶmV7 x$I$I$It\tAm!m܏c?^:JK٘Y]0v|T痰H)]/aæx{%)@I^6I}yy8GN;ԻprVev[lY&g[~$l $I$I$IKym(` kϵCz;4jgw.V~epy #dͥtbU6 ĶJ`,GӪٟm 1`l°~)ONmQaz?9 ֜sH$I$I$Iҙ8ޡ~xix AL'f#|VQ~,S{ :^Qqlu:TUAQu zlVTM]it[H$I$I$IkWPO+g=5ԻioڰƁK訫c?CgEUč]QX$I$I$I$5ZԐke6kŊNQg]n_h\Aà:;t -Iw6;)XZ YG;lrMLy9MI$I$I$I:y]~T7vdd5EbJ*|sL ΂}]&7qʎaRg0~ gzcm.r2@,Z$I$I$I&qڭΈf v;4Ttjz*8]NN)׿mnnT.GoQUTKth Nkw p"0ӅB4t*8zt:zIj*NIPPŎ$I$I$Iy⮠ KΏxhM/T^}Utz|(*Mà`8zCP I$I$I$]:tw QkI$I$I$sK)$I$I$As$N$I$I$I.Q.I$I$I$I@ta$]҄$I$I$ 8י\&$]Ju̝N'Z ɇRԔbqyY5]91N͟lSOt+.|<:zH]]N&)2X loa4igjv;wwsۙ _'OaԈ .f ((8_Lv;$AEbNޘQǾr'K j8rYDfn>+Ϣt>p+'gciYƪIԍ9ۃz<ϭgcysu[3GǼĂGHw=!(23яDcCfod)dTkOy)+F!pkU+exugBQ)i6phts(+ǟH/SSt4]/6u5*~㝼6;zk͕Pjsq~pGޘž{ųΣsѣk_o3K.vI7p۽G5~3hm9׬iXKi$-^?FQa+ewNϮ9祵8"L?\.. ( ӉK"\8.Pk_Q4.'NWC}\8֗H".D4_QhE!+0BwK4Jy|l7awډ7\*(G˹r(`9th: \..@(* AQ4+q ^%$BQP@j`ޛP(h8׵DTKСW\tl *aѢ Dy'vG=̱zLQTeh FARFB!{8!yh~jp:p NPf4Tj>)${NU\҆r* 8^(SOԗBM >7B݄h(g#؊Ef[LA\odJS0|'cW\NMPr)TMC'?yq aϤ6o!p9ګi|]BvxZ* ?\Z559 5XC%[g}/g7n :M!jTT]S"!hp/jSꑋf@QKCsp8GPߞ8AzE4R7Ύ I6ؚt~0;T_?ֿ!]'ST89fxF#gNg_&+ .kcbwy^yKr.N=_A̘%6'Znz0f&%!I%*[,ZA|`رuĬ8C|MlHlί})nVDƉv!>ZG,xTć7]+:E'i{*[Lzh&ZD6W=+2roDxں꽅۰grsb`J)W!Y_T]B!\kE+ y1Pq +qKP1E^!ʝÖ#~6VJQ1ӵb+WVa"<$B$zH,,&{Ċиbmc6:]Y9^ I aA[@Kj^̓IŚB(A?{r܏#EhT6j%mDTbPD"FN8Ԗ(^S-j7/cE\L"=cn9_ $D,YD,YKYŪ:׉EEۨpԖRWE+ħKۈ'B o/m"EJEϨ a4qgBxt"Q] z^Xr!DE^sϹF{jn'aزux1aJطJ<:HzkKF'Bgж "*2\ W9_TRGFqs? 9-ѯ}M]}TLxXYp=D^I*w\.ZF8a3bFqu*" МbHtxeX>k1әbON8RLm|.-s>K%]!n$n&/`45$]J@g _+=q43qՒ>2}˟ORJ%Cfa/gW]G颮◵QeLa?`̐8ʖMfQv=7[۷vS'Ek0~ _%0(2gtJ&S["'wrps}WZzI&jo;y8< әKy6niI2ΙOA>eJ AsnSz }yO߈?ntO(׳mt~-rˌtW._M11i+: nӟ}Ɲm`(?~D6Pg{I[#ΑNH3RK.__ 9\\s_8՚Bn"WJ&j(}.&=CƳi3pY>AK3ͧtk0=Їo_%wuL|~(5fZ扵 YplOK&?+1Hba$&ui4ɠ}64СwN,z}S trb;x/86,I=yVOڒ|ƶf*i5L<>=72 QW)e|3>F3IK)vBϗ>Hڞc5Rގ봅B:v|k_>2pSo32ZWy>~fYs '.j ˘;ncլxA-{_x߿垮؉ۻ/~_~W^y_wW6|UWW;?30\ Ƀ O9Jmo~g &yv>\Ӧr)GLv GœBnyȆO1c-wmt$>gTř{X4q<ήb˷(` ~}2vY{\9-[1㟌藊pˌ_6o=I#uquiIZHuʘoc46{g8pB vrb]OB~\ٷIgꜦK}n}[X/~n)'O$2S{&!>|W` @p`$~׍ߓ@\Ս6}U:'Zj#TBOhBt#FȦНD5i!5h `)(F/&*0|`A*aY̞]@EI6k=Y8jMP簑UFXt*={u_s-og)h~{m_M;i-:1gwZ԰l:p)U,~xoԝT[C>rx WQQYzwZt"5! ?Gj9^'ŌE'7y [˴*L]Jw<4kX4"' ApMpm?y P_B{{zօ}ʜ|T8᤮8;cr"{WQW]Jvz 憗'@9G,%}W)p㰾3K<{.FU;I$ctsP}| N+9V-:ޓFqO4:s[ +zaFP]dIxP02wbaf%fBC‰v]1$'wW6WmV]Kn>jN;Ƀ?JȉRr6b $wXr~K0<nz۪a/TAeX 3=azt3_O&.ިGнKa-4QkwP=^L'Vwb $?@` j )5*~0p\ &ѵs?zFxO^f`u % A1/)+,g#hd ΋\9S%إ=;umd/ST7#b!*4^NUi;֮dނ藢` L%VioXB]31YjسkaydPU ,|;w9*jU[wجS ]f|fXBfѩ Sֻ'e:o|YYVY-OJZŽl2Ow=i6Ұ՚}i F1Rc[w t 4LJD<5A ^̬lR^1)s)-aќoMht]|۬*Ұ0lcl\1}' j赆slj2(,'i7ն )B21X 7E]z~Cػw/{f?+:Aw 5#*D%gj~2_v=F3un5(cu{o"6RuN@qrd᧼3|N6z3ٺf&wV ^AԲh2xw&'( L_GeuI1WÑTToqt`4gΝ3b4xhh}9/(> op߯Fz+LBd.7Ͽy|qQP\C>'_#.@̺wx[I sCo; bpeASU,֞W~sN5gl"z L؁ Oo<".}ZO/L|cR#(/ UAљ(7}FkW{3m!+9-^T/"=Úh쉗D ]07L`/c1`s0E|XPP0 .?3(=vB}{L:xy{a21$2u~3x/ 47ZK1 <>x(CVvɴ͠3{Gz7]֨xomsézvKWQ0x&8xFl'LO&n2=o OWqlɶx|*3%I$I%c1~,p5-|-d{Qo֔׺J-o9,c'║O9ًuw_߹ϋKI.I v?Ѩ8MՑI /Ŏ$I$I͍TfҾ ecl{ !KDZ?B\sU1#iuQ KtVGQ67j$I$I$I$5$I$I$I!;$I$I$I \fYg_= 9W_0$I$I$s~͚:egCΒifTUNf$I$IK9m'jRUU-O.˅`M$I$I$I:tNuu57 Uv%!BP[[Kuu5^^^x4N}~׮.?τ6q=3L|ESlo|;4zZ4E\O ˺\\*-l\ObZ4E\OkS8tb2d\ұkFSWWQ]]J$I$I{>>>x{{S䨹$Y*N6 &W$I$I;6\4I]O$I$I$ȭ%I$I$I$tI$I$I$Ijmto;Y?oq$I$I$Iy_gz Ah?zgm%P3?&ٞNP\FpQt>VL^]'D8(+.%NpT>Ft^oqέI^!s|itiv,o(/+Z?v ns"%Lɲ"wНN*+P-^x|Q[IElHbe=Pjغzsҋo?o|i@}W3ޝBvv,^Ă2N^8}ڭ=q;ۓ  y>./g gQ7^rΛ~K'LO#!fl ӳ -Iyڼjtx{@w-gt8ۛNf.x(ثrٹxBg_G) 69mCs;aO(L{" DiGVfO%l^ԣF!hӐv!f|G'~FtglޗO0ӪUtBl\:ɌAnJiߵ *Xtfv{'l.IcWqB̴,A5ң'm0(W3 =9hx%\ obwf7w&51Gv0|Y[Nރ 65}Y-?idU_bWtxib'`+۫5Rpi_yٜuٰ ֱP9?3ͻCX=sh$4ov-9RaGA:nt؟MC;`(`lqؖm9+ў&װ{,Fv'| o gђZ}+<˟J.c^V)) .s9jwa ^f4\ϿCPi8TZu,߇NJv'-#:}AtmuG<8aHѺQy2sMJ bkRĶN!%%Vae&Fݫ* sX-;vV@];>mMELqx/݃U$vN_gdkviX>c k$FS%kPqh!_N?1g$swb]VfoFk[| t&ߤNW "gXGjؼ:+8<6-Ocw~y_:zzv̘#fbta JE9c.Żi?6C0WK%!NYh X<|Iܖ^\9֑&䋕դvL;bVžR;:;sE(sRs-u ZŜU[ɩX|Cҵ m͔淅 V^DUґPene9%ӧ] A F`H1*j'F :E Ul]% D;-;{ӫo7R {_˷UoxO<4'3wdAj;4R2* ( %g^n::FvL]KLm#e.bCצR}p'Lzv)6Ǡ%ff-{;9O׫o>ISH!+=g:I)qny8 gY'u8݉=. de=%ay| H-`B=Ur7<51N|K萂t=bd{)RV6YƋ޷k[Y?W֭?H+q~y+MEu^!9{u^uM'H3ګ/QJw |7tǙA+h6K]~z2vm"Ex^6{zf,"Skʎ=*]nx-.(=5y$} *Yb!O \885疿*, /9/͉v9Vg+_geؚm Ӥ;f3K'ߌ@8l!.6,E3Wn !~7Pn8Bؽ06^JV̺|w Q !8ȂŗsӬ 3a-7!^KIr]Ie`͸Ҷ~'fmLaT0Xz%wQ}++`hLp] Y7 +en-BI3]U{2%%U:]fmfܽh- '{Vf$w$ke+dJMH A]~cZ.>3w',8ms6Q϶u0w %mJP#.+$"Owz^-̓|5v;35f/ʦwto<@ w$ȀPedfWĴ7͗3nh#.@~%q5UWΘxO̴}΋7QΆc><6Pc'߿+O0st^)>w~_'%02-^Ct4̝Żt-m^GօFzLU'ۏ1x'S,> ʁ7 BXsїu-~W7jm|\_ͦ3~?荿up٩(ȇzB))jt[qH,إg#T_@=؎3v .?{_=Q_{ooswKYvwvlDI_8º?RfB}.b%LߋՅmKavt/,ރ(/'kReUP5>Co"]#p(=P~$WMXe_eIv\L',G`ɤl۷̏y"V}:o#K)pzpfQ#ٛQH/벱oO~9!ŷ{ gK?eՁv(*@;I}W؟ 9D :ZP~Af*ӫ!:g$ӘH3˫w䤿hY8‘Y$m 05^qr[q%DWEɁml9B3Ju>k1C GhV J-1drR;>eF*dI{ccL1&$` RBҟ}l6M>ɒdI1`m\"KljV^6ƀCH59stNr(NhladQӕǒ-]T2"e;*u5ו8k`7dة^ʈZٱ&AE9a9\t<2`4囹85yH/ 2IN$hob :R͝F ҋI9e^w?YO{(֛XBI9dѫ/jI>7uMvv]K5ЇTd =yp̨ )7xR^t]V_I|q~o#^8? ӆģv_MnPb5W=iş+[ƗOPZ"t5zڴftphf"X( NkoEX$y9.e'cO˝Jׁkt\O9Iw[+:#вluh e*(C:k,[0GbVTe֤TôۙfRU6;/gycGٔeG9%\24/zb,+fSCƙnaOGhdyO}Z Zn2r cѠ=hF7 @L 퍫I@L▉EKU9i6d? i~j3FTryufx;F3I> Fc0bwOXzjXFwϟ :FFzi.[:6 5UY]"s)uv%ҡi4m^Ic0CM^3ij]cE f5L$6xz?( $ZXq^I^EbsM;M<錝4ZO<݇)I^,H408'O*I8(*`nӀҊbSlNlOu;rWT殥jVAtRujA7B_o 3Pl:I f"N( iABjn Nive2aKWHD~ik0t.$G6o`" $fZ8ӊ6$;X=M8с8C鶁rd5H ͛Ű3vcORT9R}>鳷Sխ3Nʰ/i`ZIxdnn=ԈaX(dY~&OiYXP@ˡ2#LK9&x <}7! cgNqX/ʔqèO'Bےo]YYTR2M 9Cfs3N4Y)NN^nnILfd3 7Ih7fFRT NGGW4 [g^NQ@QOoth1R8xn Ut Ӱ0>f7N%(6 HDҨ0+ 6oa]!,< wx{`3`m1u8q=CU@OeĬ{x%wdF,};aWpg!\uu:wO%K] jrNagz}][D-oN~G=4;Gs7*0|徧Ot+n^w7%nJs v,zL3F-a+ X:P)LgloY3 #;O/] 1nK! IDATB峹nC<Lk^,.:h _5r߿6fy-(l~Tt7Uq蹗NDÞ=T2b\8Ow).O;<(Yہ+-qhwXpٷM\ k4&ws2F58u\+d ӯNMeG>oc;YcnyŤE?^ѷ񩱩ty']SQ{بBW8 _on=u,e֗5BGOT_~m6æ<:=ӫl̹oW4?? |IU/c/?{x%fS[.w&N9y]WQ3s˄ϣ;5 lEyO 5oyCO&V,^K=~:̳}q<.V0:N43gؙ"x#>|q< Cso{aiLqos*ZxPx!kpq~nLdԐg=d.qaz{{q@7Wz.&ҲL9.V>16k>ޣ;s(W 'mHd4s3.MWcsS_ lYf ۏCeE6F t1L.෿_EKri~_DLu͗S3s*3Gmx-qiLIm⩥%ty'qy|q H{iI-b9n:~ vwNjc<;'4Js}A 0 rrr.YA40@ґשH?1υ $@Qx\H?{/Ϗo*EW, pxE4$K`Y*N$]8oiE㘨8=)xF8H0:e g.+ ?~=7MaY$a DžTn+$UVP @4ix=.lI$ Kxq9leD0T;.]BHDFir #! @w񹝐FHٜ|.HƈDM\^ B,D(@w5;ō 3TMC5MG2H\uXR:/Ń]W/LF B R<4yc!=tJP]C{:n8.#b l6z$ݲ!,a[CSlX9| Ocw= Q4*F'ٿf[; ಑{Ym\ƍ AB!BEfQlNyh3 4B!iqj/ !D"}G!B!{z B|Hz!B!x.B!B!x'ЅB!BA=q, 0ޏ$!B!=LBTBep8.v5 ˲P>{)oaiƲx{q9o)Ks6y?%c)-y1{f }Ѕy} L9b!G#ri5^ v6?HeSyo3_X7{֭f>LTWH3jvށV=Qy 3CCQ{?8V/ Q#8W8Wc˲O S.4w+'7͎Ϣ( qkl;Nذ3t4&g9I{fS;vgZfΆluihT&0#Ʀ$T/.cDIzYf]4z1b<љt3f< P#m\(/qS."u BQ\Ǻ(lFMDyuQI-ťc۷ۈ)TMbY9u2t7iL*eM2#OQ]>̈ x [F9 |, cǺ4 @z8.gEΓ9Ԩ-AGO6!>R,D I6" $"'Xr~t˲y[Gxty')C)RC>Tw*_Q]>CXSeX^ͪ ${ذ` *X6XKwe1иEҧec#d;_#fx׿Wn&Eu?/qi~q1pp ߁ŗ8Βs\%1m{,\˚<̮H&>V]{8C'J֐|eaNKQX*zv;|yy:ZY(ɁW_bN;^yk6ï!.)iv>nIms،vmJ4.^E-+V5r^5禠({_|e 96-zEG>bs՗/\#ElLR /Z?ml̓=kaYVΣz6X(g^BgYIAշsǬ1t7 u7SfL&ES຅że=5Ϛ!|:>(L K:n1V?[nnCO+9b2̓Op_:~UKۯeXc-E \OP}8itt]}_vwϬSIsZ}r* &.P K~;br~FYc,I]m9RlRrߥ,{V63~uE{,1>aK#s!^}q>[#ϥC#8g|;ǧѻtB<+tsqk3HB,XƈQs,Yx@RŲx16;GPi~#}mlu,:`Jf21_޴vpnjK #HS5Ηɭ,\ Off'w;=چNn-ISTn% G$i[Y|gDhr?<>H/w-ڱufflg O,m?b⯺?7Η'@Xp*"ua{F׿J#[fn0-Ht~l' B|ud^sKSC9 a3/30s<K_O[J=K_#pI6mcoNs1l{WnI|?_(^C͈\6K0coc2,{WJI#̜E$Ա`5ycPzjs) /O2D$ŝUDq{O`4ObsxΊ賛ɼ^.nk_z7s$cnW涰`uc2רͣ{4nL?_Ʀ1t^72ΧO^j;+f^AV7A=j7$pzp] մ~uMoQ~8A ]7Zo /{/V)\R$];Q+g0r,EeȖ:V,YJtr.nswxmimQ}#-.cXi.. GswgjNۘ~K1CO S;4Ţ?V.@sJH,,h;@KʗQI0bTVtG@N!ج,FA%YK%558=m{i>`㲅83?&Qw98m*SH۵+BlkhXJ; jFaUiEdesJTsxz^+kmM<ԋ=;PƱgS3'p_-9%Xq/K9cEM߹WZ\Ƶ~j$Lp†fJFi[ϫ˷~#_1Gph$`IL͆wv.Y˦ w1q$.&uM_9OΜ—+$T'Xg r`.7Y_cH)a+9&I%n<'3H vC_}L)hp}Td呗@UUTUؘx]xy=[t FP8 `#N3B/]M Nz#6<J|4ô4DPT4=K&8ۉeXXhr͝OxG#E 9*ؼ^V~FD)^+nT|SA#q}_O? ֩:咖[ÍϽw\CUIys,H8HY+)//FMBQxnk)J ;h( ahn5ϮS3!~/BGN{=7M"3-,rebÎݮ| 1҉Dq 3I0&Wr&;iRp}|>/!j Yd{qUl.]?ZFrGn@v }d:innjmV9z8LhjƕKhX#aa̺fil640$ɎU<2w7)Ut:aQeyz˲.Қ# h9Tf9xxa+>=XVߑ cgNqq_q}_B%.ʦg[Ma\}E3O._He50,ÃkT8|Q:6 ڊ͡bʹ$oc;YcnrǛ_[7a%QCp{Tϸݏ/g[4)څR 3y忾idf&1,e ׅ?i<R&}\n_3\BBUqnRN^*ϏEq3ٌ+J7_~ |Β򛏲%5K=9z fK?U?yeԬ[!Po7*6nm8R#Kւx'*Uqv˻صln>8 }7R[x?T879-gDHi .nēfwXcaBeaszy#a ņ@UJ CN/n ,x$D(ǴTn^D"QT]ŌD1,lN^&E< ahvn7v$ %QTۃaC1 HxnlE4G(bxsgfױa MP- ­'FT|)nT3J $"C 44@`7[ut$0,T]C]xPp, P}>\GIDATDCA"qK8UH(H8iǟČ Eũ$%YxhBq)`$$-ۃǥ&|ֵ"E$ bn.D0pT׋ӮC2@]x=NtY/> oXx;}f#a7>IԹsA2F$jzЕ0vb!BˇϭEM)nPjix|8A[NJwՉ%t|)\7Qq4bte ^"<'k2ԏcϱ?o 7KqjH+[7`.6W/ČQe8?.t@(}xeZgY3>IHc_=i ub'j7||:Y}~ܺ7ݪ9Ҙ<[[ !B\[ Q{z6 (LsͫXo=9)~j&-paZٱi#Nϵ\]*HSIʧj%ӫ[}t*\*!B9M#Fױzִ6p!Ə(#NO}vV7{Jx}LBFWe3eNU]BJm$NrX %}$WqeU:8n31@;;=dJbDU1^;F`1: cRpk#;TgBFch :)*σ<=D819q$͐be|!B!й`f"J[Q::hljdXp&V|E -M?+25AQQ.#lb]Jin.g6q"o$Յ-޴׎'HK !qr2Js5{Sq#MqYa;;Inxeg}]ێ, UP9NS}[vk 0G:͘Hm$=)BNH_Z3'PB!C悏6ڎm27iӫsqΎʙ!Q)d޹aJG/+od-4Mѐk[h$:"vêɟO 7D?Iaf`V:ZĶ]Y̸v:f6t9 /x`/+Xy.^q4#Ɓ}+j:LGq&:zdVP c!e\=Zyb^Z{E|3I4hf\jGא2'9dA!!B!hӪM@1o8RÈOrx.§f%p)p4nm93%7!i =։x} )IǮG&Gnfk0@O_ka z{Bᮑ:=QD&;yT%2|誉9)m:r_`~[;%Q|I!v#B@5Ue;gUStphT)k1|槢 o݉,\*B!.%;S3(-t\;ضg08 kh1ĩLڢ'5ܼ4:̖hpR5d$ sB @˖sYȌ^8Tw]ڈq,4E!gn榷$!'~=*mה8|#$lf妠*}84WV3 O(9SAk6 0LYQD{;{;;$ !B!ć ' VQNfhHhaCGՑ!^5lA5ESɎ2)!U3i'8R} GSqQ`f/ro'%| یT7鮠$ԐsǛYF:TQwPH;h]aBZ!CRN? f{R7c\u6鞓h'X!B!s0tD-SFR'?bҥyl WTfS'x7N&Ul SAE!N6t?n[ >&U'33 esS9jFNr0/mm8}\:,稯dX6NVߩ>Ǝ;0t*Oxuo&5ExU뎣6 k3$$t!B!Q,˲-ۋL#l蚂.'O[Hb** iO(I6 ib*v꾯LͦꜶLaX(e,PU3MIòPT]PUIigx, 4HV,,0 N`$&a(~>4-8Sw aANNŮB!":]$ !B!N[!B! $ !B!t!B!b.B!B ЅB!BA@B!B1\ЀN;Vƹ{2[]wcuB!B|tL& W}ȱ, u!B!KB!BN !B!t!B!b.B!B ЅB!BA@B!B1H@B!B! B!B! ]!B!$ !B!t!B!b.B!B ЅB!BA@B!B1H@B!B! B!B! ]!B!$ !B!t!B!b.B!B ЅB!BA@B!B1H@B!B! B!B! ]!B!$ !B!t!B!b.B!B ЅB!BA@B!B1H@B!B! B!B! ]!B!$ !B!t!B!b.B!B ЅB!BA@B!B1H@B!B! B!B! ]!B!$ !B!t!B!b.B!B ЅB!BA@B!B1H@B!B! B!B! ]!B!$ ۯcAQ : : : : : : : : : : : : : : : : : : : : : : : : : : : : : : : : : : : : : : : : : : : : : ۴0&ƒIENDB`rally-0.9.1/doc/source/images/Report-Task-Scenario-Data-Per-iteration.png0000664000567000056710000024133513073417716027351 0ustar jenkinsjenkins00000000000000PNG  IHDR `bKGD pHYs  tIME  IDATxwxlM@H脮TE鈢X^]QPE, 6HDH % ٝ!~x̜3s,Ͼ9gf ˲,DDDDDDcS(((iΡ&8y^Gqq1>O `#""өap׮]á O|rrrYUDDDDpn@ٔN⊊VCCQTTB9iv5?4M5(((((((wTj]|-Y>B"hɕH7NfwRIOױzٻ՟OeW?eo!FP Zw㪫/YҔ\jg2?}{{Pj5>n^0_>rHAU ss?o2z3xí|ory\i#{o.m΍ &NWv[0SEຆan`iUȳe[pc:}A@[yom(ܻ~ěn$m(5 >` <|^y+>׏}EDDDDN >UPMzN* q}W0W(V)\ z|'|>z;3/dIr߄w -d[ig5c75ܱ=o;E؋ <#;qxJk鋙:a ֧QDz`["s Q߀% c6ao8{nKM`QL8%[s(Ц7MϥClkLeԐi4vrPgߘwS) Ip6D;'eFҮOGB+f ['4ज़eH^hqBЩg[`w H36wmyv0(v֋htA;,Zy{`_wV{2t GW)/7y^JAq{q l[¸EXgBu^9&)+7qj5qv>:G2/=ݮ`΋9r!5#O2m,L6-j]`>7qMxw1)۹LY1}^:z(6G+G)p:Iƶ,0 Y d[`.غ!Nգks<_,BIhҜ!vUL".ΑO5Y[_Kg#Up6{$j͛xyY3g=sυ<'r,d3^ {P(ݢb<.֯]]{ WR}ղ,|]Evj8,UYp`ivfz^_b"0y{ y؂}ݜNEbd1>GmsfPiɾ7Uq^}p8bIBaRHr_t :\3 Klܰ+𶯸4nKT]k͈,!%GKc+UZH"vlU1tL 0sp?* ~5!O+KZ;ٺ;ߙ^ 뎋kF|c5g5SH.jJ% •\DDDADd$Ӥeᡩ =#l`hԟVd]wtY @ia fU=  GYeyj6@Ώ1#ĝ>jwI5yltS]-=[?uᎠ^>Nw+D`؏82-l]xx>l9mG1sWSciMpvX&zI 6SiͶ?_['͊}xTe} Pe|U<]1 s0c3_8Q$o3/w% {ol%gsT^ +pNYM\1BuR7lE,c(˅`]7ļLm/]\c # ,*$$:EtL,"ì:TCשlInllmLJyyoD)԰y}W|R~*%9]DԍH[CJ5c<1m nc#H"## [̌iVwk>f~N'}n]u)j-8{V%n^K+etѶܯ?z6[}x_K]4?۲(>Ƕ%'|dnn>-AxS,&a/+o}[v3ie 51{#޺<| WSE"s\,G."""rN\ѳ6k0dJ>Wi܌\i8#x(4{p0nr`?.ύ$8/ e#*ʫڲ1I %46rW?F]kkN)lZ]s=i[7 ?Om5ָ7wdväi}89Q#yk;(Wu+w=?Kg1E-1G> 'gb5auD\,UoL\Œc>_I0(n $N'Ýb+ ci`7d׮%v8[s~⬶lͤ]F;1 }Eo;욾˃]9QsS*㷚F9?_. ^5d`eC+8x;nƭq6o."""r"mĈ#Nj KPBsYa/\5޻+߰ZŬ6z_O.Z6 LRbfąDo12ՔWqjnWe#.(ݚB_t 'OLYYY lpIle7_1oa&ZrwһQvW,-Ύ"1sf|ܩ.GMi+f͜pjCNl-]Ѥ|Dt|&ݚSؼOrowkjyy,}2FѲ}m9]thFȑ*o>gٯ?ww X #[s}ӿQCQWp\9Xiͥ]Q(_ J6UIwςg_mwd[;L]Wv'wt7rI7LarUPӱMtʕ\J^eѹ}:t;_TO}=:>q&Oԛs۴'D|?/uM.Eat wӦ֮ V-Jryí㉁:eU1}^:zЍ,O~cjq_ބ{>du e3yv Ȁd#~N^Z50M{bAUSg&$Ǯl&3$$gBxSY9%Uyw2y+.o={ q0ؿ h^^. !<;im\塙NQ<% ϯֶ<>{0@!Yٕ.[Lu/ثḿnn3(^qohtŃts9#l@l rΜ~8~~/:Ӄ84aT*wCYPG_1بǠAM9y>"""""V ,H'Km@8]Ř1$oa~{z7/f5!"woJ 1~bY>ͮ"5)ppIk'[wg17;sYՋᑎ"SKp֢`K{Nsq\L">ԅɞM؛+u;{yL/>)^jUmn^DDDD >֯ɡ+.ڵUEDYVy\]?[,/F;ia fUumlL0 36/ѮM"6Gy ~+/8>!ʹE&B}w%v#Pn''Ro{ٞtzy>3ruXa,S NE@#3ouxL*=`_vDǹ2sv%&R񖇘.?r '4`.B2( 2sSvia$ jăb0{ g# Oz6[*ԗb:u ](bcc%4kޘ-{Lo("gdff^S_ۯvs 0el5yֺHs%Q/¡O_ ʦ^6ꖿ%b՚| deYxeY6Atã-?2X0|Rie娾L wQ5us`3㶚f9lE"ҋL/Q,-J2TgSMoz^|%>_yQbs"30|lBj.;{_|ի0ذ׃UbH?,azz~jÞ[A7rv>"""""y ܰ&(T*ϥMxvd+Ilm~ JÆ?ˏMr.o|L'=)C2_odG+j$/7⳻h(%qj곇DUiΨ.O)~M}\![H9x#] (? < ;G3/~-ҧ1t|'cegA ykcdaY_>[ǒ4;-bXZ!k2pY?Ix yADDDDDYؘް?eU3vm0acYW)"""""@hDDDDDD<+Q BQ BQ BQ BQ SD7 0sBI >/,K-|2`   W;ȉBԃ{,2PZJ2}ea|͡>,Ӌb`sm]dk` ^!ӥ9uhש,,/)4zt/>/< @Ӌt+/+(+WJia>6cOr:\aJ""""" }`8ӓ4?Sc(G0x 9ݘDx9]x 0LM9l&`+,z"v]M9?_%#Yř0lJ5eTDDDDDDí-+]J(´'̲ IDATxOm5 6!q!89 ad/{t_(o7*"""""Rqb2tHѩcPE}>5Cxcvr[x()TmX : 7+ kE-ŕ4o׌y bn{<{x^B1|8AA_~~>#|&Ox- Ax A/~k*bȧnz6>~*'xK\o~b{ Pò܍Cd_x* Ye}l~{?&-g&՚.Qؽle 1O'0mD2W:B$,_cOz|;b's|'$Ca~~>O>$۶o.;r(ȟ9p[G#MuE"N'<9y42{bֵz[FՆsڶ,+؁hRe)**Jgn'xlPcnL`<7eλW>~E[e+]Yιc} onImO6cSFJ >F<$w//2۶oW\uՕG+""""""`h81M0,Viڤ'u-J-2^ĥYȮVp-kRP@`5*kV]}){ߓ-%4됈PS^_LӤo>\}W?+ް:I{rGӊ\9dەnɰS6lS{2`m3ԡ`a41d,/̄eJ>ax@,O?O>~>9@x`nRa'E9Eq4!1=DwkQiu+Uc;Ðz!7ekFs6lE)f6@lr3&' _fߌ|5cQB^EK/!,,8T$?-,ʧYZ刡I ٝ^{ iõ=bx'y9gߗ.qU^K&/qIClV__C#5.K`Ol&u4&lHtZXG؇ вjM oD-0ZqGmz|) '7aF0 >ymq<6NbRH_ُ\+N䉯J y82RIn^,g'{&tn]爈Cq //^vPN6 prh7Z5gaDDDDD`5BhX6VI ݮ@xB,O1Åaw=DDDDDpZn _q>˃0W֗"""""rF;a"N\\!JrLSwu͆aw`s81vl'׬KlӶD'/< Gn`(""""@x0x=^/e]She#OXRNbwbQ6]AÁ/[HFX́W"""""r#/B-ҽ{wB3Q! SA/8FUm2<t%OvZ#8*""""r£aK;"lqpmL9 c%I σC[A%Q5pb'E z!67gėטW$iחV9\~/x]W@uE{ ff$>|i"'C#a鞵Ͷ܆'g֜>wiEDDDDtZ$%%6rTMt4cc'j)[ϯfkiS>W?='@y~V8+Gi𓻒w?I!8sm#g5&0fƾ9; n/߇V | VЯf!]D7b=`O%"""""gf ̓Ro. ٝશ$}tޘkw쥀KjkOZ@eaJs`[ :|;[P-a|)a<16Q Lviף#U箑L,33}>5.n@kU)fP1173ُәNzne߷qIY;3Qݹm.9Bۇr鳓6 L#}qn4y,&gyzv)OL$_ڑ**IW0L^`ÿ*Kl΀^kYyʦ׎f&ǰM1v8n/ }W +vٴ=qt% ҳh&e|x^,X矧;ͅnc1 ,"""""|65((a/cy'd#1K1""""""I t3-/8S+,Jv˘{1hFU"""""rZpc̙@__\Yg;.!ՄHS"""""r=3̲']^7ggl.i)/25lW79^HgylRR,pFиu3+F1h~fkfMdҬlђK $63E*ܱ-S,ÎǞo_e\rd=tNșû>~ 81pG࠘ Sa\7=nM6~6I#Gg{A-_לЪ^aՌ:KÆHDDDDDI_GJE7CGpU[h:oL;R@`5ൎl- iIJ0` 9 0a -- no>Q <~NZ ÐX\q3x:7#ƕ͒Ȫ޳ᴁiX:_DDDDDD( 'N(-:$@9cIpeR/n VzJvfI c@!=z}⚑e)hX6JhnM@M?C$G.'ٕ^{sicxhHFٵf3M8wdbqqExUU_J955%pQ ~.Τv׋$WաTDDDDDDX!}(>;ic4giJ2*l"o6g4K3 z熗=pZT4v4'0y8oiF{qc(:DDDDD cXuL?eY|>|>^ pNs!ap8v f+K憡)"""""tiʡ&8^VǰSh/^gM0!L6~>VJC;""""""g(qrRh觖9#f i뗱<̓qUWɃq͘h%CxŞx0cS|+L>*S+%qqFD#lK=8vsѣ 4.""""S3ޏL￰ W>%٭/ށpYʇodHz'g-;˳6SzBm_ְsa!'7s9SNbѾ od\âK>4 :8mnbqUQ^ \1np.""""r2យwwfٿn|.Û33yb|kzr״if+cO$k<6i)E8#h:ܕGwGuG-0sX3k"f-dsw\rn ld1x3Jwl .nnKXa7S'_NJIObгjgLشs>mI wI9'N/>nE`y濯O. λ?4K$̷M1}JR8F=p@);Sm{@.jH \6obGkpY@aK;"lqpmL9 c%I σC[A%Q5pb'E z!67gėטW$iחViJGyxZ.m͈d?MgIc<.+ e:Ң={3Y3w>4ѿa0UBBK<(UC׷ <Şeb݈OIf["o,az!z&DKSیxu)~[neaMnc1Z-skxmn_KzYa9i^Y9xn* &C}yoZeOՒ=Sj6#zs~b /Wl芃r}*ןw?I!8sm#g5&0fƾ9; n/߇V | VЯfQcI$.O&錞CKN"|UȦHa;i?7)rYlŅfAb|hn#=m4jƨ yבRjMze!s\Ֆ/Ӿc펽)X =x5[Kh>, BivNLX|Aog EB0[a){ƒcsg3uiqS^a{mpRC~pB,6c/V]\bvZ |A'4ßP}EDDDDN@NZ ÐdCB#8GZ?f3tooF+%/>{_U{m LӬRR<&wn59g'wWHfAY;~WXv!\*3Wg{|_9 $AcՖvmS;ā.9YpܢC3&\o.%#*ٵ$1dD8 UkFs뗥PҢa(ú%; 7O$6X. ? 86H[`(!<#j,hÝ@Gq,^+VgW~Vi_ψwpy EDDDDNB t$ŬQIveޜ.mpu3}v-LuӮGGb(]#Xf0g|j\܀pG)9ރ#{UתRF i͠~ =ccog3i"Uu(o~ WD ItUO?wxlɦ7R )$ R( *Ŏ J~\$^ P^1Bo fH@HpA̜33gq?{fq0ݒ[}g8@v~߰UC#B!JuF gNĪPOYLu_kb8Ӡ][k#;Ce¨ ~ä%MI%*khILwOLXdIf375e,OZI/-KxMm_M:OJޙ#ʴ`E=ddAdrP Ղi>FG鬿xGUKB!UZC+c]4O>Mh+<` v}h;S(4&okCoTCFZڭj9~?rԚEHO3 ̩lNbO$5q^ACirB!@(v*Ɯ\X((`-O{}{a KǺ]:vFҏ,cOבJs#>_*QEo"7$~^44"X!B –SNcBi>!/5S9(~qR?XYprQ ӋKj.:Gq4~,e;4s!B!$;z{^֟&nA/jAxNr:a}ykz=63#bFUS5oC4 #B!o⪪^5[åVUUX,X,f3qqqkF]͝lt:ZV(h4%IQd!B!D!B!$"V#ngW)[;/fn#*'Z!B!$nxF!Nt|̷clqF .%VI&~GLwAރ=})$ !B!|AezczUe5_C/pƳYM5VB!B{1-Me%]'3g{F/hqۉc똿#en!|yȉod?1n^BW8<&֒O{?/Fb ~u yoΦ_[ޱҸB!h { $$KdjvN!-Pa4z.Y>-J{J9G6ID E~](|@XvYl3o]'O^V&4oBj{R;F5 k 쥅 !B!@qCQNUJAn5Mxw]JSś3B1;ѾZ :ȹvyeE_I.%f "\(,,.!B!=> cE"7~,ҵ œ|x}& W838bEV؜?_j:;+$)WϞ j7K2i8="Iٲ=Zܓ;ۡ& ~X P|5o~&ӽ#ՒA!B!ĽWZ_˴&} _sCe¨ ~ä%MI%*khILwOLXdIf3ڛUkQ6ί+"QL֟&2B!ΦzյZVTUb`X0Юm9rw9W7wthZZ-є$sE@+B!?FB!BH B!Bq{չ# !B!]NzB!BB!B b@S):?۳Ӆa?k\F!f !B!DBSKp}3 ױv{Vyj!֋dz{6Kp[ɠB!B;gdAm)LAڢҨB!x ԴkDޖx-5cſ.x"(?}d`dOBE;ES%~)jRm]a: %+!wZ'-jL?x\躌žnM4NԬL-PumãPc1HB!BˁP*j^:`$)]0:mn&k\a-T}8f%s(ϧL hLĩ+?PpM= Pp 4hCՇQhaxe&Pa.lo&J!Bq¼d,iI C/_P;K]qx{z0H?5}''gb ?]^\y2rhXOpNCCh-ri[Y `>E7!ng\,w4QapPjҢB!qV<4ׇʄGOWrmGS[BU $[b>z`9rCc`)`duuBV?y ,ŪL!B!@X10aq2EgWA(JGūռ{HuQnzyT]vAgp-߷qK !B!@ = W}!~z='<#ѐD_0gY< 8\ctEXZ₶vdxÃwh@oOG|.:LpkchSM+mZ!os:tvcwj {1Jǩk8CdT;]۰ڊ]YG=jq@+*T Ÿ-VP5}M U반mRұ^yӱf_AN5/VOmzi]u,7*fƓ?rg NӶOvҶ.GԋPt#m k 5>M !B큐\L?GwoyX|_'R?C ZB?aٳ 0 r!86*1PhSec{aZ±)^6ٷ ˓բ0r3O( S{[wzOoBӥ{'󬺛IU)~Jx쉧X-:ǺM߰+?#Ty_i!B0xӕ/dIRnk&?`$c8rb=mWFyg6,t܁^8q]{Jn4E[ YܹYZj}y^թנ#wa p-!BQՁPܝ.r8!^>~<`ʷJׂw!NF㎽T5-X2M~h:7d'ڛqZ) 0y'`?NљFv%Wl?8Y__j_]ۥ!*jѿ&vnFN#؀GP}ZvzkEŘ96</v ,N-wl;>~lۿɒg+g:C8UnCyc+aμ2-^e{m-~ iг+^Qab$V=qp2ajq W9s<Ը\td*#}U}݀u}0SNߟ؁~ %4Z<'s`&ލӸ $O$:#E^E8t(iƔƮxҮnѨ$nq]=4AQ1^\9U*ul\2k!ПL#hmt^:74m/;E|/VڈA8ྜྷFVZ!B븩ߠ &g߃ -}j;9_uhkQ8QV8N X\(k<ͯkRЗPj eê4.K_8CW-6~,&ept|;)<=K"Gfsxj'|BY]z:_Z: ~aᄹkxzvc|`l擢LrP͵V<\^&Ϗ[ V;h@mK{_sHm/{vSv0˅ʝusVnsvʌ#սF56~?z^B!BT1J]Wk9]n:tEwTMp$|۶ӥ-)}i@=Ă+pjLF{D;/=xCfao*x5oRDaa!iq2RRC-s?ſbE:=| j`҃p=<Ҥ5 N^ء',6.K]K꣪7nn\uMw;{91tt.(aP!|({F0BV1>!9@/{S xxB|p,}ZrS#ds Sp.R2LjSn<_S<kW{bV>f⚪géu W{EQP0|6uM͞+<Ϸujex֧{|gL:תCsEBqBd *̥ mg5 uA_C9y9xի =s;E4 \.7+ɉdcORM3'HIZI? |@.8xQ,*$ep| ۖ'5uUN*z 2kzZs;V 'm=o7ƿqeVZpiѩ6-:%,W}wײeEB!ZߟzcfhNch߽zWH,R5rhTJ[zHj; ˷^BlJ"ш[8Iޥzh=R.zSlpe-DHT+~hjsn~U:Zwđ?u,Ne*o=7nn\|a%VO~ȗ;2tֹ:B\&!BVe??-Me%]'3g{̮ wJjY<{CG#(!BܽqL:8LXmx!52ѭ;IrkcI>F/hqۉc똿#en!|yȉod?1n^BW8<&֒O{?/Fb ~u yoΦ_@/hּ4ԜdUpwVk9Gӟ:y7#=Kiu%q0 2ĭlTã@^v8u:7jc=q7#:3fT?;7ouZ`w! V2wϚ"A]璚諷_,6{ dݪfCܸ0 'Iu \G9 oyl)/?W<_ĝH(7^7y$*_z1n`Ͼ8N@M}yS4Bq(j.=Zbb`6]6Wm{J{{?y{ r^z_M zOѫ,t%WlϐOMt,|kMά}ׂr::^</ի\(9EX:< u@O;t:Z-Z()ߒ˼WSςw!KnB!2[qjI& eIr6\lI%,Xg17R~N\4nD $橄:_O\@HHB@eb "O2T!B!@EEO13qe;R+\˔-ó|$>vYl3o]O^Scz׉g_Jdص| !B!@qCQNUJAn5MxwMqLoxn?gaDj.x eI~'alXl^ݢʥL!B!BBX!.tm:FEd7^_5vմ Φhѽ66gZx I6l׸LNumཱྀ]:NaOʖ%%|np_iWעeJ]Bp HF̘!VۍB!JuF gNĪPOYLu_kb8Ӡ][kv;}ȣLoc$= {dy[5r-- 8]1 |"lv}Kлa @sWP!Bqg%O* MIU#B!NB!BB!B B!B!$ !B!@(B!B5X?'rގ=U(#2`6rB!JW{m,$K˧\|;Vy|jR2_aզROndѬ71azi\B!B;g@z153 NQC4.!B!=Ӗϲ.GЙ3ؽl+p:7ӣޚ/!3kH0f0>f)*=a/x bVzY4!lW,$f&gZ07#y] fyL>rL#*`mHac}4H/ylz GsB!BqL:8LXmx!52ѭ;IrkcI>F/hqۉc똿#en!|yȉod?1n^BW8<&֒O{?/Fb ~u yoΦ_@HhbHU},`Ј΃F2IB!B܋PQˡH΢)?#p`;ߚF%4eTi^)h>h|˖9{$歁u04Spf}փ -]׀:4g\g7.Il>b±E3wƛ<|}iaB!B{/U@I§&gc֚n^‚8p&|q0Zz#ё%a@Fd@ObJs45t|,\EH+V]h P+h4ǶƆN|HiaB!B 񘙌OAeJIӖRY^y>,Ι7^|Ů{ӟVkM(Z=: 7 C<u !B!h~BEr Pzu& "(#v6s~B݉fX]jy[^Yv~ 8= $l=>r`{5ס1#iPT IDAT?0.)^CjmY!*!B!PC;V-byG"]ۀQQ<͇WMa z59)Zto#e)ÿs8B*qڌ}t*3#ž-K٣= }eCk"6(GNqEa Zx"_$3 wQ!BN1t Jd̉LڽiHYLܘL8~w[OkF y eg ^}1/W< Q2Ἥu8C"H[5 @ff/7~ =r a;y8P4/5[G}?';ݩAӊWץa,3I-<KF6#|L | } :+ !BqQTU*:]zjZQUłbl6@m\Fa1ckרmpH"c :<Ȧ{{_ ~ {vB 4}ȢĸΡiוZ嫵o4t[HMXO&B!NC 9|]oWj,>O5z,&,9d)DfсX\r B!hrT+u40F(7y'}kȘm\0aB!B33Jɭ*EOyESʏj%EB"_?c !B!Piξ%ڽobM̆dI^:}fTqB!B>ʹVS1KS܈C8ݛ'127~kޟ=:O:r&_ -B!֏2j5r> DT}U(cƱ! >}-7WrQVVEQ4(ҥ'B!?5G-mj>09=1dwy. \L68S#@_ %uc-Ty?_kB!@ӫ zaܜ:Z/b%sG gWZB! iFgY߁C#^6ؕ[8՛ BobsF@X5$u3B0GF/hqۉc똿#en!|yȉod?1n^BW8<&֒O{?/Fb ~u yoΦ_&W7rt<6Z[3՗B!B{5=jr>5{;Vg_SÁ~k{,gЌQ=zi\$"MS>W ,[3Ф.N)ÙZ[&tY_K8žs۟M/X/W?^$l \e!B!=*NM ɤra\bkMR7/a 8N>8@]YsH0q#e ̏'1O%<55t|, fR6|^M31S!B!𦨨iVx܄w8 b7g<30b`w}@ #'n7fX#[Ov!)L]˟gVL rUKֱ1;NRTf H\X` |`OBnzDS;9Cj:!yP!B\)%ɻkLEfrIyxfRs^)c9m# ݞz[K >1<&ɢgb\2)*^Dca|>:9 & E9%;7NRf%+Y;Őx8bә׾OX R\i%xD&La4q$r,nLXMw?廭ȵ^#_<ʄQ3IĘs(pVyZә>$U0~ tyi6ovB{??py,vi_S9'DAݎq0+ʵ޻zO) >1R6 q!f6 )/94ci]p_! |FsW7bƐ?[>uoUnٮ3!zd!M֡ǠTc%X&aAE'3xm&[p===ѩh+4|"w`΃ns;s%J8@v9œGPB!"&Wzrǝ`Iҙ ލ8{ץ`{YkqV]P̧dDoL⅓|t=/l@a>S6r3a/GK8ٗ\UBp tV䯤= oîغ,~2 OhuǿbÎFc ]_툡,9eQ B!BTci_65 x=a/d vI?l$w[e9_>̲ n(hy&\3\ִ?y)44^sRu2s͏|~˦o}(#e7v|<'û7WEA[[yA#7u69t}(N}gũny^*P!B;}mp'N\~3<켼.گ݁1sK륤sM_Df1V] I B!T~Y_2 E?=:k,u!3<鳇eHY:WE2Lg8]Ϛ7jw/qϢӿw 7c-Nf|T.;``=ϥ,2Lx9)ߧ;B!B!H&+l&XJ_G K P)y?LkwcazߚeU#)fi_)HuXl 7sfƻ浥ȶBn{&`gS0c&yWhՕߧ.Z?'r-VyƣĎˀȶ?FB!t_0]^uWlTm˒uK ^i\2ލ[%z ݜtq+!T4+Q&.G.NPxvFAޭ8͇_k͞Ծ$~/I>NAѺ`F~3+ jKغ'iGi ?fߧ*vKj>09=1㡯\>U*h<V́#8U4Zȯ}7M N-=o^ |e !B#%A;$ _W;ޅ"\NzZq0'?)+~zcxb WXz,90(Vc?y@z1 k;?LJ;j1ϫ+`;/#5/B!=͞E[Y/%F|٨:1;촴ɩB^,"t4Q5 q@;\_`g=\8ZWQCAg/ aڲ)#p>y$9r z3z>=^Yr9}>ncRޓ 2(meX:Eb`&~BbVlx#0c`|8əFTې2/ wi A|f6aB B!N5޺]UoF#ﲔc-_(yM xpG郎"V$^rtr8Y`f$<H B!BKPC;V-byG"]ۀQQ<͇WMa z59)Zto#e)ÿs8B*qڌ}t*3#ž-K٣= Jm_UfDwIkVhA!B!ĽWZ_˴&} _Ce¨ ~ä%MIkȵl$|t;'vl&,bK 쫡.fn"_ ;ڴ~v*#Z{JB!BUUSzaX0Юm9rw9W7w>NCբhh4((rB!CU^mK'_.j4%] +I=FQψh"=/ɷh!mCH6mJ2/6>?M;>..bUB!kI 8vKpjLZr"=9*G!BH `r-E 6Cvu$?N.[B!@(?MrV!C؁/6mzQcrB!BFܖlg۱8S#@_ *$xGu3oΟ&iYwbgEbF 5K=4𗣜/N!B{3z8VU 4'`OLG=CL{sI[YUWc~V=wrB!TᐆG;:rװގrvX/Tww`̃!V=G&rB\wqU_gq{+C.4GYY߆Yj G2ʑ,wi\l#d ,e:pD x@2'}j|41A'T#cHsya"j"|<P|$4QZ.ES35I w0x bL[ah )G+3)B&9$fYHbf$/ 1H{2[Fv@RHu'j˥bSn+6W n$:C~ٗA>eg^%m/^ѻ§ |.~E?i7^Wg;7q;qhU0f.JR4ZP\܆f Î\s.7̣9Aٳf@?)TgU;Ǹ Qt p~(ncV*> >_-_3)B# FB_Y@ );gK\JqaK"Ҙ$#;7b !N ZIz ɥ&JMŀME?I,X3(R07NYƇ˃2vQ$hh})e1 0P>*lyyLO.*0ZPR6×"= iyGgi=myF|{:ak !Dm }:=/OrVaЧT(`%=B=r* |n6ۚc9ф'H6R !js Th@q^Qq KR\`q{]+5Fr_A7PyG5ݸDlD2\bx[)luvs:!Ey=d|~BKؔ0c & " !D sb2,5r+ |ގwnRÄ^M&2D !jq Ը׌;V@r8 pۛ&3nQK}jiݣn:Gn|Ǧꏃ\W*j[-ϋMf0zw5hz/|*3RHbZ:1{iY>c㩖Uuz>|,+66ެܟL咽n(B/pEQߢ%vhTշ:W咽l>'?KގD !ji TQtU'`TШx:2l&db|_ Xڗnp=W_bELM\c_+jASj1g1~k6!A:/~pmBˁx}xY(gpzWW&6%τӴ}w Ʈ1gD!sX<39YXaS)Գf>ßY6RAg+lhȹ\󴳨3k=և-|9.8Y˜B%PSێӿE=>X}; ! /qB{go-kTZhYCߍ(evl'k+@(D#ϙRb=ElO"CqB{']bav F>{ r⌆.V@#fG0I'BH 8K+j;\xN=Ŧ)|]qBh2OvQ3ʖmH+giϥl.AnC5nR[ʃ~ʄbh + !$ qGRrxnl5, 뻰S]?!3 !jUrC.Vֳfqv; ?Ҩƀy{Kz5!jI 4H;~dm~6T^closjO+*%(*5vD[ҙYwhlN!Wӗ[LЧUw^3/{χ{AK)foM]φ*ͦl*eB~JLY,r`5sa?g 58tcؘ!U"1o]ݞa%2I[b 3 !>ť_fCVMu$}o?3!-%4p 4^~szWq3Ƃ$ڸkji&4Z za>y^L8}` q" X{3oJ`ƀ0z~I[c-3 !(7 }LŤ>!<$ydCR!DkGwmٿ,ˁP8vKb>Xx#y3cf(3ug0YMEI-2Ƒ0fxcdݫ/TV Kƅ,ڸlZ<{ 0fgtM %[ кmhw]?`N}MrٰڎX+b_YcJG{U_Nh ovmL5gQ@[^Аy' ĩsg_dա\M'{ }y2zj-EB@e,yA5_7kD^6e&MfL] nA֨8R *fpU ?c{ sҮm/-qr廌]G'GA9{Vx1:2C˓=&}Q)8-ŀzWPUη(t,XZҌhswbvx*v< IJ=ݞ>ڹQ۾׺kl'p9,h0ob`vşuGYR; e$PP__r#X9bDwWT@p%˾ )>z-i~E>ms /w'NSؿ(-=#|!7q=~J[Ij?WJsKzNPR[1?K3>NV{#g+|,q&G+3>9p'[{KQ5;^$=R٦b ?JH߳}XbQ i?:K;Ҏ^0?| 3Ci]/9Wۛ5TWe_^5bRx0oPj]kvuw|iz5$#ۣVʩ!ێ^k _'+\@^>ʣw1hj)EBg?wB(k^RJq҇f2HKA=hԐ~4rTߴ)$6"ݥ4 6۪5!43;‹Wz#jeʒ_Sa0X}캃p,5/')D-&;O?̙Q O|xƖ)B+M|۱u;sxl;p]8Ouw%I|x+:ʑK+uZ Hdm8OB|ɹ:ڶdp?/mU;8~.gGT ߪXNOYkwsxVN[E#Nj qw# Q*uԳctRQiK o&CVADT:-_EiX5}F Cx0!f2~!K,f}  1ƿŜKYrX:lkzL-2b>7b #ZymC:;j;0o4q b)+UNe3-iU;C>!=Ee9N`UG0M|Ts4:HLTB?ccM(B;o&HK_^ώ;xSG\-gkgOnn.jJRd܆(VF$3_(ORRRܵ WE$(>>Akvq8Og Q%39$|Xk:H1owD)%'Y 5 V'Zys#g|"y~MgΓ=< ʌ쏿5nD/BH  2oQwՍ5 7ĸ $ QKֺQ)Rfj%S_ B ۝p3թ8[1s#`Q9NFo41fJ|D3jŊہ'[{_GUBH uرXx#y3cf(3ug0YMEI-2Ƒ0fxcdݫ/TV Kƅ,ڸlZ<{ 0fgtM %[ кmhwCs_i /ӆG_%39p[ 6M[[z_ԓv)A_̓gHADx0u1:,58:t~^hPutEM1ǗZz8WmǢI?nwd5%s@ f/locϘȜǽ.kKjt\.cW|dN,icN $GƐ$o FEEx N g1ՑP{|KBς"M?ևQ߳ux{ۏgHJ!C M} oP( |?Gf位Q|l+)BC}|}}1)7S+FtwE7P!ˠO3ג]dӆNNyQO>9t WGi1 ɸŸA4*u3o%`^^{(m.nC3GaGd.9_;V䆇7{\\㗘4>Mܘ+z7ՕGX$/?H:w7%6j)Q-?~:Oit q!?^$=R٦b{/JH߳}XbQ i?:KA!m`~I&Z_sJx@̹ 8:Upr6VMX};>9*al:N#Wk(BEvxg-j;4dkd*~v)B0aBCQt?j7HO&g("oYfO`7}7J4J02!L;}ΜmFfdR`4qXb[+%e3t vg nkd]uT0c`ʹq,@`⼢rJz3I t bl$geqSwTCލۻ{>MF$ (Jh%fp퍷սNMMEVRP*(J 6. XGG48<)5v)))V|,_M Gy!YAѫN`ofL;RSo6 si4p4"+Sz߰:zki)#Lod- +?j\ik+Xl Af9SPx8OuwMz>R ~#7cmxtA]DrӬ쪜jےd[ g9{W舊V׬PZWyxx\ʲA K piJY}4oΞ?h$1{Yzj$3Iռ~嗰nD;hb\.:J*Z4A+Ea7'bƟ)42F>5~OfÕwտoSQtNb gP%!f2yp3 w.fʄxw Kjl.b⛯1j89_+j˅ZMeAdl&8B#a00)yywR`0J F>#-ۛ2x4)c9s6ҥ6j!">R܈ F$fﲮٶ4>#^6d2#]h4^0 zv:3Rٓ{ׯLmj:^w1PKo*}6߅Eo^Ğ9dЪ ]q2|͔7L,U?߿r?!d2$I,d}U~L);Or2/thXk`Mhi?D[T* ](8X1c`maS~t q@]$cgk-oDo'DmRXb`C jY%K-PeO׼IAnZ~d0R!;#'ϟhͯ] 6 ^`s ,K)`gL)26jՄcy|i)l_Ϝ'ioQ_v%gH9wQ^S/{mp^ "DX֨TFvnD7_0oG=OR*{ l$1H /LP v_H,Q ,ݓȦ)%9WIE:sbhT[4GS\*WL)y|)-._i.Mp1ȩ F8䭱<+6vh\T"[rs5K\-E,4*& y9NT+oB'0Ka0@Ox:Z4ohKzN $=G'Ot6Z>ژ}2YwD*=WVɊY5u:b#8yj9YQ{8+'ZﺼR%z iW:Яy=r MΣUО^xٔWXLR`k$情gKz=?81pxlo"1Sh{|3!{4["h cw[>FֽNe0Dm\ȢI6hʣO*c{MgRu[݆fH{742y;mx7Y2W< c%(Tmi3Sڶ$zfY;)@fjUۼ["S8d%E6xØkT|dsJM`6 ;w3sOxl-07)7?~Ne [ ED>6 uY* ]2h]QSe0~/No#q[h$ld$~oj#=Р.a '[%ۘ32q!>ۥw*VO sf7HctBe( 924'y{L6,6/bSpZ8~Ts:Rų`UP2SIGVf!85xfo3m}_^U>}ʾϼ^Kچ_wO:\"ȋɡ=o =Mϔwo6$w5F <a\ix{ <|[[{ d],yhBo|x 567p7>,jio]:c^R!nGβr2whȃ]ow&]qp !}'rv6YC[TJ9#\<Η%%һ'8JQfF׿;ٜ"՚@x͎4=R٦b&KH߳}XbQ i?:KpJ;Bz(M4񲁘spÛ.K1fOx}^HrV!?\Vѳ|”`kQۜ:W{i;R=vfwe{-*:6q|N- @Ox#[dso>Ҙ_c>tD!BTO۱t>Lsojo3aBCQt?j7H~JZ)QEx9W͞onUh`4KI,&#g#0-;9_= J^HPv IDAT0}`>bR!*yya4j_<JQq&˻-շN\΢q=+YPqqG,4* xX"D598ϳ|c4Xh8\Ҹ.%)/o/}x`Va.ӝ ԃ6A iH#GMۻ{>Bb#]˜K̾3o؋(: 7^Q`f~ -`V8Zɫ.sN~lLㄸi?xjҲ0X3.+9hD}'sS, tybiDIbR BTBƬ9BOۻzoB+M|۱u;sxl;p]8Ouw%I|x+:ʑo,#7ٴzœ@r]b¶%yqndu$>;ՠ^V>x?+"9!NRhSesd˻΍paJAʶ43m}~).-[t@gmc{mkI.JAx#;ZqLΜŝ3;K 6Da0ʺBTP`Qu?dTaKGm"VMQaC>LȰL_Ȓ-})`kh_íh}f1g"&Rv`tįCقhL⍳ˆ5G^ŐTSç[V{(hg]':hb#De4W]Q)10s/'Gf3Y.zvߣIapwl6E%eWljzN8Xk8t2 fh &nz3zIdhR!ȸ Qĥ_`wۢ0LN]h4^0 zv:ʳXٓZFRT*/(nF6F$&lrOZq7D{9ϖQ;l0耓 p*|͔W6E~xᛃ?϶:h)O QVOf(tgL7ԡO:ŷ־I>9cG 78aڀ JI[1v&&9'nLJQ*>!*l&lƮzf-PܱG2cm^ wVC9u/'HA=tLKf~DHo4`PK,j~Fm.0b!\XL8tZhxw'͝5+5ͮYt+ MZQKNؚxq9rqċhKDh=ɴaz֬Te{ƭ>ש YFg[acVf=grX7Q ")O,>LUaP%ZڐSoG3II7b).mJeΓ nCf5oʿcԑA TNJűxx?\9Z).$"=Kkuקg@twᓟO%ĽaWy#-xW uvdS7ѡI -y0s=sLQF xum'n@q3y{m$|3Fnco-dS|7#:OmTԞ^xٔWXLR`(ϒa_Kn3͵G{2"ZcgclK3Ua{ AJ:X8GTJָؙqd.;orZ3/{7{ԓp)pښ9UF[ckGwmٿ,ˁP8vKb>Xx#y3cf(3ug0YMEI-2Ƒ0fxcdݫ/TV ʮm\ȢI6hʣO*c{MgRu[݆fH{7=l /ӆG_%39P.ZH.}"];~L};kZ05CD3l9Gc,Z-ꞹdL& ! tvWg</50^qN* ě}X:yp3sGNϹZan&~1Ĥ3v]$?Z7p)~Mg| Z(U't2Р@芚b/{xq"x2E&a3&. xkTK)@v3^8*ɏ1=9{]i׶v8]ƮʣՓ Ȝ=Y< ҘP H!I>͋Xb@# .KoPsQóNZ7ٿx WwRs.&o5:eL>5M=yϿacea}vU"BQPI;CQ?!o0ًBymH0iQV$+yIx8h3.kܒy1](BT`W9f|a<ߡa* rڃ)7S+FtwE7P!ˠO3ג]dӆNA^OQZ~{LyGfC2.}g1nPcJ[Ij?KO=6##2~Rƒt&>E-kl:lS SZuH ;zy%M"ꄬ^_7,/\Xaq7׃o-6f+-()|!.K>/=9X؃u?{㪲!>ӌ +]\i e.k_3klF1k53Պm{KcH.5q~Pz-mJ.*zS)!}jNs`E)JЛnHN, nsJ;Bz(M4񲁘spYxC_O~[/wL|> GsA}+Ƅbh +,!"j7 ^o2 JXR Mľn4ph Y64dy r%z#iYu ejLhh6j:#@m_ߦ4i=|GIy\r7{oF#w:1Ro !4з7./oNUȰep6ZIPJ  28&j"Eڢ]9لv Ta(-K!4zCٓ`}Wb,̙4jV,P^ 7z+~vG\ֵ}cQVKvR!.0K?9,NgZݕ`⼢rJz3I t vdl$geqSwTCލۻ=&b#х]%4 ۪zv> J 5sݧFCZj#e8dkoIGGYJfpHl:4qƢvy1eFOt i8z:cq+A򆟳ˮTx{. neX*7e};N5 ;YXI`Zw+NB+M|ظc $,sP i=CgO=QQٴAVoã?"s^٫F(l[2ta 2'ujQ^UJ$?>Cܬ۷?m6yWF-=@jnˇx6DeT{4*9Kva <9 C(0hñ-$+ NaC۰ڐU  Ix9W$oɝO/\Q9ظ}HRӝ!CZKA=-\>N.ѵMXWmi(M[Ī0*lhg<ƒ 6 Ye1S/,p KPvU^}f1g"&R4up([p^ƓASj1g1~k6!\2䷍ߐS hh }5k8hb#D˜[кZ52lyQqNb֍h Ezvfa0jeeՇ.BZ9&q0!lؠӥ4H^ 'ٹ &㍃}Y+Veޣ9XLzMGSSސ=)+jkgO4GY +L.}i41L  z;v@rvVQT|+#^&<ߡ! B)))xzֽcl:,}X'60@^zW (l,GfwPW;~8֒3sϷ3s^*Ha48o/z'd][٧[ F~l6Ԏ&ol!{v2C7kqGYRZyK[2ct rcD'? 9VvoZ &#m+"xj;(5Ф7^2~lcj]\׶O3O: D~N <ڛ-E=l,MKof=L Ŗ)L8]qzg}$d6JvdT@`Go!$+$`,GgРL0rYI9r*ߎfmGwMd=߳෉P]vj-si]w7Fk2KQ<kVߞ{Gz1POD ʆ}?ĸ RQ#HfKd*#;7"ݎDzQ*or0XZN#+WSk0x<< g[3"/?.Q s;ogAalub.kSħs*|{ZA-0r  h̙sd17ScV(Ѩh TJ^hȇqD3Lnh5GlJn}#gmۀDzQ)thPpܳp`! ΁K㲫!@k#E%9VHtr>ͤ-™e j՟pi]ۯw{;-e][Q7]IBF>ˇ3#*?u?J6p?YQ{806IF-:bJ J% CGUJ暲pQ+1S) j%3\j( 12 ?Yf5Bm-4L_rШnO A¯e_*[;PV?wK\89[mR);[ >geSvTrڎZ%ϻ[40[ mU)p=D7{?w?p9,#K+hOik}/l+R&t)ۙĴŢ\{NHE9a3blw{i.aΗ@`TPj47@HDHHTeKo<,HT Tk$H7n>8 n Ylkb K r  R#frl7Z< 2ruP)đ|Nr.p;,ePҭe|ZWk;I^HSQgDbc`DGuWg5U܌ 6.g}XqS~B =)R*PX\\v&))*"4 Hɼ yUIs3%5ALDNBW [oߐ;Nһ'(Y׻4&3Wd%] yڤlXhc`f% 0_3<ʢm$ )tқ`A *W,(*R*,ҥ PHBHB&$dw@J9#Y<τʳ}FN$9g@/BA]w2{6\ǿMoio5v1-)-3Ž 3r3;ߕT+?Jtc ד!m"sY[Nҧ'+bP)pn5 4dDޯZD&RD6l-=`378 ycc4?QD4&3ϭ8Fi;˹PݑcL *'7rfkLۢ3yL,1 s/$_8 / FOȰ~1 Y'(n }L`={Y]_^5Wd$aL]]@y'DkX2e2ϴLRbpSBVe1΋0ZI(c,V"1ƱTZ.oyOvm_7[ 0Y(0WX.ǿn(-ŒrR.JѨ,*&,+7VH0-T+d|4,q/Dt\ARRnɎ!>ȥR:*\d̉g*yL5J!fĥs|;shh8|KP}VΤ.ڮ:VPwi"ht[\gI BYY% ܐ<.f2xpԻ ^}NӨ.%!'GQAxmy_}=Hې91_`lӟfH|ԍOubg9&n//aqy) '8gjHΥ IDATY^-qDߨS4p2ČbM*5G׀R i;[0Ng{|&ya$ߖ J*BVZ?ܓC=Xl.2 S,Z2Ԛ>2q>S@ gN+LL}mUDTEi(k;}s 7q^#BG‹k/qϞ?(h` 9Ʊ;v$ }[F3xgOyve~.Wq*Ibm9G|\<), c}fͱi >n@enw'x7`q#m),`gt.S0[VxsI,MMo#k9]oD(" 1XOQ$mbO #:RQ17Ǜm*][\/m^q3>-OPbG<duh\ ;:?}?ZYochi2x]چlwXҭK`S9-|!};^4U?´[ۭQaD`G|U嵸Vp5,8i+̨tAG ~jL.L\#t8ֳCnZS4͍'Ys$D \8] yI`W843w@ og5v s4^OFVwdi0D֝]f-) Y7-D j'f18m۠J,/4X,L&& v @n]E"O38WHD#7 ፴* -:~{:7$'<FmqJו)Fy>/Q>ĥ8}RF@.:7~hFàk(,-7g^L ۦtF)ŸQH-f}4r P7қ@ppPѫ3A^$gۉlr$5, 3n .5Lpb5, S?B0~/tĜ/#NJ no|ieKP'$w*z:z"I:|mj^1'NP()71~QdR Fb:O\I%"2Cx;iᯭ!p]l3 *0dÑp[(?v'@m@,*J$,hg3Ʌ$d[XN {D™<Ś1P^/i#m|Y7{ۋ ^MB{=^wbP F zD8gGzNdYR/mB@BtI%墱Ws0>mgk0XO}^2ba)i"iV2 tthhbGT ŢbnSz5QS-qS(*LbWpYy(Mҫ)7B % ጃʣNߗ\*aʘ(._?qۡ8xSgXXoqi=Cux9ᄓBSVas( ͨWkEV-9^}#ILC0&h`A 5ZF掤d\Ȏ\lhm&isl+k 8Lpmfz6 TL}m(eс8֌:uL(dJȡu@ۧY3 [R+GDxsOζ5NfWٖ9!SO_S9PHUU1%2r!b!QPsU@ g<UI5+:rJf90mIJMa??֝B'yFͧ9;O1o0Mݵ>H =œP)8Odb&;ZNXR.ehk+q*&X#[cj{dwE|e&m%'jo652UUT/Z_abYc2fA,'i)+犯9(A4h )́ OXP'Sh2>|8[)/. ŽcV 3r(VT z>&RupTNE$f/ecuwS3ay&2?Á/zp [ gpw~962-*K-A^KyPhৗ;þV]|":EfVP#lJgD1[j?Lb}-+r9|l:@Rh0?9cdg4I9F@E3{l#}ě^b=G3e|/TlfgY0S#PDo\3lAx/* `eY,=OZ rSٝQ;`-}^z9r;)4&^Ȧk2YMdbIiBɫ<3ry,8η&H d t~o.A19_fg3 I)"K_F@{4JqOEX VBD¬a⫑E ؋L]Eμ?DTHu B^S>TNn)̲טEEg󼟙حYT2w~86J Ց,%@4j]-8q_f u폻$md'.a:J!c32KJ*dGTtx:3VQ8κu |?]S7]l39ϲ-&ޡVP=V0~Qj9_>J,\ׄ T8ϥ70+_32 eLO݆m+w*4@F/B|)ݷO+&\RwRp27F5C*';UNkhd"r"K^Nj'XRZ!^l9Ps9ۈFKd/i덅 C@PY(Wzqzll?ˠ^HԷYx=csQӢL:-ȑ|l4eм}8j`87(9œ9{ i' `~<ֈK~=L1F2 $۩3(V~B  |aB,AuGL`-e=pSsp,o?NV(`6Wjܚ?3/&sdOW}mipTq|Ljv)u8ɍW{7cKexѐ|k 'l]ְ۫Pٺ,tj-?BvBՇT!>v/`ϩ\ym'ZN|L>w6-|ɟc@EgfɯgƇ'; ڹHhPZPrR+/'9W_?.娬R/DqOI]#FbNNb as<5϶f[Se ):\eQTX-*FwlD;'EFшP}:8#S"SK9VIG?ǮY7VP3<og5gR P*Ri=}PQZk.hÛ1Vi|n1/ LpWpBdym[ƫgr4Z`SKj;^ǩlf@kX)KWrݹME&{|&8)8o7MEjeqEEF:kEťq}?û[N1oD+р NoC!uGz7 zJ:;p|>zB=xƢ-5B&]{UH*`Gt-u6gpр6OW֊BBƌ!a0{7XpǔU: jZ!Zל h h1gYitiJӳyaK.M!{{ G61y3}U)lh5k%Af2S%0m[_ø.TK1~XEP8a>=v:q~Ppg[TyjQѲvW[‹iswpt)% z -;^%1U1؁r&^'#DEgM8s4AOFV( YjUn|<c5u,ܝ@pOBt w6g8jDLY,.4X,L&& v @n]Eqtzr92 LD"A*>%bV%iiixy{#SEy 0?m[.7>%bz9 .!f5,u70ȪgGZ1(HwGź h /a|X .E2p)v86<j(+.v`Æ:!kbܸs?r룘@/>T@lS uK=U8g|猌B&dhj]! ozNpGkX=0GRmb01(JXv ?NgR ^ jqk =GTR!fVJ0xx5P^r/F[s"5Wp[DyǓh^AB ?:9œh#OfoZ:0F8‘\`w!| ? ְ)'ag٢ZO*X1x򂞙?kSW&tm$u8)ḅ‡a8V 2=!Μ$N+%r eLXq{5soTD A(4$4<w%1)E:CV\۫}v0u}w8*gk0XD5U±D=:%Y`WFXP{$ Ѿ=ѹ$d4`:*  QPR m!o n ¤ՑX8 ?*@Q+XT쪴0F)c0 |3^4?jX ~[>ڬXH*XB:%UAGW珷V)LPvR3 G;QI?Giy]  ÿðr=^^yrp,UtVMo͇) A(4p<U<B#Wk8Q9d5uo^|+ E#iVOCa(Ƀ>Kg,p3.p4wGAՇSΤM LuRFDkҗ#*|0 RXGErvjDF 莍[T=f#s8Y_ TTȽ;_@I[4剬}=ڛMϮ<7_|ݵVjł1md̂X_2cHÊٯ3~ xтYG>D)YZl?@hi%p[kXw!a't|&'ZHHMpGb+QEWĠܠNuaNzؐY\p 7a1Xyow ]x[ [N۩ .Sd`*e|Dk2 zsUs1fC2n\η$ IY$mś_'׫?&>Ԣ jTB^NVC z.ҢFeXk~<'+ _'@T8"W! BYBÆ_^bN+QEWcH-fpSԋ13_c zvcK- jٴ7<֎èޓKس `ڦV-UkI53qՊO?Lb}-+r9|l:@Rh0?9cdg4I9F@E3{l#}ě^b=G3e|/TlfgY0S#PDo\3lAx/* `eY,=OZ rSٝsh+/͑ %2=GԈu ľҢ"9B~?C-A·Cy=I׾6v1 &i jN[bF1'n,KeTфܠ~g3 I)"S_F@{4ioxl|In'_g;H6twQiLBFBj t5C}Q A䆜R,{i[Tf::3f]0aa68a2敁8mcϘ̼G}/˸抌$z h7b2 KfL@JT #xuJڊ\,?yy_+ ex ʁR$8~{͟^ -xw +[ ȿ'HZT+9~:I3*@NS3+ގ^z)3bS䅆w-X8/@'qԄb tR>j1Y[N߉l*\zJ!c32KJ*䏨lZ6܀B}`#hj2Z:0>?Hf@'DmJaϧ P胿ecfŖtk&sC6rl?бm^%F֨E/vsFV'秨tr.A85%d8X CXSkrR.ȁwrϯmY?+b_8ǚ&dt~keL zByPaTqi igm1n _k&cB[1ngJ/А(7yvQ&nR&*. B`AAf1! r;Jn5PS$z֠KiŚtfߩ<\ԄkQVk #!-ԵTjZ AN8CW͓՗^Ltr!>.j_wqSVJ΍M| TBnJ&{*f޶{ 6yF65٣"rg=iO9~>EVT'JhPZPrR+/'9W_?.娬R/DqOI]#FbNNb as<5q/Dcg3{>\t VRI=rAj3e2|j&qGK^ ^0\ob01Oϑ/j՜I5'&Ca|`ONav5|8/׶h*'l&<]TH 7+)ؽu{qlߖ' Y=A䡝lٴU3wؼfGNO˻vZ)ѵaP_6Ϭջ9v?-g2ڍuD)}3)Z̧t\?ݥ X>AEEpg4*G8OIYLtr)G_ʜurq}2Ǿ36OC@^a` |.j@ U&\O~OB.]{SPR2K(|ddUZE NJqk 8yAϛԕWaw2*bVzDK6g"n] nC *MeުLnU '8Z *ZiIШ,`9L[aFF0 {ۼ_% 7|+,3I<OPIX̩E~"vxu~hǷ&?ܳn.ɇݳbTR9tmFu^0p&b_N}AQmrIKqYD&/e 4m5~,7ھƊF6˘(n:5=lׂ$,U^f, & DEEw{8:{z=rLL&C" Z^e4=M'9NZthEE{-:TE)σ #мε%1碦ubp[LetF,bqCp#,8s h2p]o$í9c6[Ly%t;;_[1nlɥ9džg;K'j"W@ GlT2q}=R;rI)|mة|043,؝Xg|'Vüְ# 0\۞$]s(BtտBU @]CH`< 1[F`zKe]cE8j`f4 vij7.CÅF [8!6Q9dݵFpO#.f='ޚrR73I5p|~ZX L\lˑ@ppS37 9GQIEi0r'÷b1XOy6cD{?n-*DB@P[dQ)#2 YgP(6J;Q{Vcᛇ? {!{m3NV@]l|>H-*rV:kdĎ\2(\PQ#ʤ>A~>%:_= 9)kmϻBEA(j#Jzp-Y%v< ޢU~!DPʽ2:vp2L%(ٶ;Zt}= <Η wN8)J*db͍5{3aqO3jrt246l\ )Xn[]!Æa(R'2z:~*w _*Rf '-n6`HfS^݌N΢B MJ (Odljdm||uZ  ƴ;1 bg ~~ z^!X';=ȭQ(6nɽL&?Er^z 4]E` .̀Mq>;Bi[ 8*感N][X>*C0GkoXj}c'FMq$)w?ܓ ]E z{g _9!?7.neȜ2un3J.P;WQzzeBTT*E"[ -MM!S!|~ZTlt᭍VS-+B؜Z)Em(i, gԋqA/%U/ւ@zN) qLu1n OPH8+awL.J4 P-VZnAZgn(.0݇0Vh\k~IյX:"yݭ3ndʙ,*^Jz/ aC~;n!_F^˱d@Ն.([6%;I1*c#\| (9){᧒T]`3&>˺'z"oG| g }QIs.bxX{}̸(K;`-}^z9r(u/y#s!>˰<nwF˜ga0xe&N+$%HAV:looؙ;Bˀ/ͱ<>qWsLpxJr!16Z{ooN-"UoV793Yhd14veΈ(e#qCPxi(l1VPTjPjDfj;[ ; [ [; ?u+qik(z3*(AbܨT,LZtc'Gh] &{MeP_HP9!3^c~fbghf{!"a} N0x@}ye :)EqX3fz3Q"]7u.݈ɼ5,2gZgd&)Q1d)!h+r޲?Es}$1`+JXjH1wMQ %hh2k@F F[{,ؓ^t rlZԻN.$>@ 5-L^hbZ 1( x9u+-3SPRA* FzC٥WʤlT2t6rl+Ţ%Ѩ%ekMox'py'̃/v7ܓ 7;j)zc)y,F{D B(+_[Y% ܐ<.f2x}j6tl[wiR٨Ut.>9zGkˣD݆\Qͬn`lӟfH< 6hҴ 9 ]YD\d!GR;|ef }zK@P5,*D&p(6G-(oͲf X^r{ WY#:ELg}bpB-A-ݱA@!⬕UܲX.(tXUjl/=+E XֿZEVk}y}C40>1&b/2u]]\'XTHC`/bB$ȵ(xU3\n|}_NB fSSɨZ4llts".TDeC3eɾs hE?f*_sXFC@[M֏+z!R2^8>>.:@ĢhR0V䬒ŢDF),u6rvXvE~>_-ux;?F/gx M K+(:B&KJhPZPrR+Br|95Esm8?G (&I7/Za4RlMf=1σ l#Vde9Oc+@DMlvRyRJiEzXzsv:!lCv4zϴ@D&YBSo[B}Y Z,Xl'j 4 `[X4(({nQ)UE[ FF~b# b(BFƆ,Nt{̥gυ֎RTJ 7+ٸ{Z'DO,ڶ~ny(G36h߿U% VyZ6نCA8KHW\٫k}] ee4BԤXH^jQ/=<<+تetq$%BvFeC3oЪLʳ+x_"vO:$N.Ff}8^)y֒',ޛȮY59>C B,^7 JLR~΢5صm6rjbѵ<(jbF3 S4=d~;uATH{-f1K4<.mC {z6;,b%|>p"R&2obo*6]UBw$hLfjd0r sӅUAv|&e#E>FqwPr23rhw v0z18{dcCQ&D= xb0ҭ+P ܲXĢz!~݈v+g AKc6E^`Stk*""s,Zx18Y[X,"-/4X,L&& v @n]EqtzCx?EED*,*%Z9|\kk)ӫ6􍿋`o[BnQ +>JM2 N"\G7b J͗PxGbRڑ(\.[hh5r,2%ax;_f>.FM? J>\lx~aote?^/ݵ#VqEJ ϿYTxkxw3vn]I&_G9A ,X &gcKϭ{l>yŬO!Ar;mIpZKo0[AJZ!* ꝈV~Ԉo'17;Vdf6B gpt):S9A(3,*<UD&p06og5-RH=S3VyB?ʇSC ~v[xw3 pM X6G 6jg1K_FRidL,'g2yik+-t,{=]mg C\l<(@ 8ip&>TIܗ [ƙ3wop᫰$ Hκ}1x򂞙?kSW&tL >X,vPcu&rb[*U0ȱUɘ1([s6kg͑P3:7qr$Clj`9-%@ H%G}BIm6хg|,b2QbfgGS[_e-2Vc8(Xfq! /B z]n2F6EfrseԊb1٥->Њr?EDZgتeة752'RyǓ `b&BnIÞOĴv+5{( ti-_ Sg㛪?4MҽwKJeoq DD7"TT"{ȦJ)m;I~YZZ<< ͸}9><ߩgҺ劣A[u4bQקdCnZFLjeޠg7)Q, OZR4x9n&r~o z4j{b׮]س =X)YyZzgfj|y I+] ͹ğ8B.Q_Pr#Y4vy92!&<Ϊ 0U8oeޠa*G),P~`xo;tXZoarLׄŬ 6: 6VUg(ۯqrgפwYΩ߯ #-Rn0ᇗog@_ R8\MT&|BK n|K>apsAꦙr,uAקxyToc^`u,F,\ө&tM6xúSv͝ݧMaoTB!&8TYX>]p~4*Brֱd*}bW Woc-8x6FJ& :%[f&`/J2 zx>~:}<*DW(6,2JH44d55XSTbNRY<hĞR.Z9 IDATqG` h_!`B3?ZT\<ȉ%o=GL3?eiDO YyEczo\dļ2ësn^u(3'0~E:-ǔ =vdG\޹)ݟ͗/H!|B.^U)st&[8uxu} "jlqa Z<ƒΆ ㈏K B Mxj4&GbqkLF G+@WlX2\w<-XB":x8YW\{_nZF\7A GzxTst{'}}CiۢG[\?Awlo ~{_w~oƽl&-56n8-R:ƴ.|M84t[vcb7=_A!:8Md@j,,ltAKa_FbH{/KiEG@ ֕c8Τq!H#;" NdҺ tI{BTP[ {~e *mLY!g*,f嚸Icd#1F*վbUXUG#ȨS@O)D+60R|_^{Cl&pǥ]+b3G/$ |ˣRq.׆FkS;s>B>j;?!"1 n%j!;!K۞8eNa[8qlfiʷdRJ*? ڲ1w2KMƮ<Ĵ^!$p lm[^-ebSriZӁ%/9&^ zϞh& !f(K¾J(m~GYݖ?"x4nn]ۢu || rV>щJ0.: NveSD{7aXo`KIODt&1FrٶЛL#r Z'|ٍJѥੵ{g={װ8&t+ֺNZl4aObzsI6Nz w^䞳B.zރ[᠕ vʚJA k*q ⏏u,bߐE bu]+l&X{.GCl[St%yk݃ v9!r#|6Afi" FkݟiU׉˷h aQ̖ j> ;tDlWCڞj77,i5j>BJV~;Q!dJ3ZǑ눃H2I&26 c<4#/.?Ho AS>e+J5p0La00 z. MGīRMQ]B -);t@:GoqFU3)D'ɱL\wc]K1f7$VS^>e[vm2ڶ)eU=3bֻU[4"xGﴀ?/b|G~uog1T8wƤ0gB&$6 ;:+ &GK <;,KA+03n!V*>Y*Q.e WB0GkY{[}<~wq+EбRa @vd#Eep4:Gk|]Дiȅ4ms5n;l~(fBsNeX7U_ٌbd2a2(((`֭tI>iiiXYYhh4T*jX% X+66Yhg.eByx}Ytt7']"cJEp=60xy4@ NpwcPBBB=)LP52+mH)Bf:|]x:J]hxcu+եmy,qJB;NHr>M]]9wl PרU8FL5ѕe7Eijx9j[C zu+*CuNf,z^:V]`Q H'MteiI*_LuG[٭eƿA ^O=B^|O+H 0a.id\aicE`r 9#ʄ|K9՜1-9Y6aBJ6oz6(QXv&n7ׄQ`Tݢ\4@PwwEddi4d1j|xSUyD-PJ{ڼP B!Ddڑ0$WTȧ[T9R6edMDt&[&s9l+MĴ<վ0o23f!f _ n~O@ !D nJ(t ::XNφMѨUx9HvV,8gPd0n!R2?$B-QNVhg%j\I2*(A ɁT*c ڢq슢W7-oۨVӺ>Ζ)QO/0Y}2y눟W{$G.\'j+mS!~U*.OK0KXÍZzV*3bbR&|.vZ |)Jǚۡj(CC| {Rnf09l uѻv*A,F9&܂hrq5 毰d& ֹ0DZKvZ+DQ!MuV_ 綕amZm:A]jj{Щ 4q-|3Φ`lziZ>wN_=+G0tX͹3ܕ%1^n$FgH3#(=+apS> lbsjT4@džΨT*vO4 Jv@)0Yu`|khԾ>VpO-U(q2C}&.Tfo"ώ QIrH;X %T Xp$z*_nOnmOmO{emUͬ{3t^$WVߋ<֫=z>R>BTxb1)`չRYE|K5]ϖ\}):7(wTz =*?İvpfI̭6.'+?f8q}3NĦx*&Qs5F T XBggr l_6#$ ׌$(嗩o1_2Oye[9nD)7 _&Oj2&n7Wa2zO]`pLO\&;ta0u0_{.)}Q^ڪB4z6)$Kr¬R1d嚘Xmo tBsњ:uU]Gm8w)V*ͺE{;(_ve)A}M5 EʭL/狁Xp,ZL0 2|¢D%:j?=]M3y{?X@BOЩ^_3ǼZ3X6>=9 XvSM輛m(uG);ŻOyq<k)Hsn۫syGحt\M˷з*S3fR )!* -UfCcu8U?r+f kx[ss r?GigK]ok- 눯˝<ܗZK{BBWz_.SiCگKvEXDhkh*|]5 ĦÉ,Ndd%]~l`p)\j)A0x h(@x&G V9-٠ɼ`&,6 y13=vcCq!\7ohpl C_SSfx5`ΣWBڍۋrfƯHҮ,5;1eAr)oX7{}kV=o;6cE^h}*$"jlqGE|[ ~-\T9'.W9h GkK'":\rwHrf>-:㢻皐-ɼA!PSPm}}u-=cg"5&,3皤{/l| "7*=@þPԾxmL(<ӪDihFg%<o{g._&53ڎe}5sͱՂCSPXmJ0`.>$Ӫ`qKF^\~ZnvR^6!9#$#1F?EWkLlU3H8 x.Eo!g9Ħa0y\ڵ+6sB"Y`K< ,%~rmhԺWNԎyϴP𷇈,Lhҥʾ'>̏03'c:lR?Ye\-NQ帩Xv/gaQ$Qʦh[ΰL2K !DS?)Q .=n&DӧM׵MfFwcOїZ\4'!-$#D%`MMđ+ASH8nu SeT@,hi:v>QRae5n0ϯaUuk̞Oei7_Ljh`6)3jewODMG<7M8Jw.T\C1w-a%,6Dб.%XתiUבu6.e3L2I&><ն_!(K6Ư qL癿`t* jAMڤV/'& q\H2r:.SY8XMNS Hw^ ғAYu\n^fsloBӈs<`[CTZܼP`W15Oki}_UՇVq U00wXK<^0No,W!3VB_nnv:+fMF^4q &;JzPMFEw=<3S\L6r"&1YkuUZ 21F{}P@c>s!+fYeOбE#N X" x4GPvxjΘ,_ߕĦqvGge7Cf00Ekg23:`~e6tuCS.7]&\:p"iA fB~U7am8oY,[N&2WCBdxB:1 CP[mj`vIfp~|u+ʖa,`fAa9J1b$ ht&jůH=! Xb\3Ҳfłdd2QPP֭[ҹJщ4h4h4T*jRU*OfY {ΏE[>WynPe3ԫ=iΧt dP( !mCE9wv(aXƻlmX,q:_hWz9/g䐹{ ?Ð}4F$6{ x:gdY $DU gtpo 6U[3!4p|Y~/G= 0(w֠m=Øuv p}; !<~Qv]ȣcƯY{"V?BJ6/ƾL\/ݟCZ 4Bn\?\FVYG~<.$p"&1Y ke~ag}O?ld2aX\Rީo<76Ͽ@Fo0OmϚShze \¦uzsCy4V 62`mR\(ݻ76lrOѠVWI [>wLc{кE=Zlɏ*@(UBUd[`+uZ4͕=))ʼWW_of 6c4M jBjrebtkWvJ7mW]+i31r tE|\|9ni"Cu1HMI!^boF'o<PDO IDAT 6q >WKV*Yw9 h2soLFϩ-K!)*9t6#q[plcAto3 :H\}Zl6iE\GRh `Q[0Y4tVXY=e >!~ytUG4`Jd^}|Jy ER٢w}(W^p'.'xx6G棯$x8֘Q.mcg4%`!m i04y>0"6[0u-Pmch~;n Hρ+=Yu.'Hbd#hx;YnѺ7J :,Z,z J^l?2/ g߳k;nCչ?mMTyi;Us\R]>F(AQA]?& z ͆Z~t8]xX{h~Z BT\k 27+KSmkbG.\fz4fTo-aZ_^;lbJ.IF |\tq EAQ^sxo gl[/ϱ=t0|&VnۨCv}3o?CGᑢ's,X Ok x:f Y(,zm +Lr8*eio|=e=tC7_WvBTPP7 Vjf{L?t5@V-w}*Fb#g/es&>{: 5ժi߰0}__ X^"ܹ3[nrf+=fZ [fBF4OZ2|99={q-לd۸d'qCH)V?'~C7ل~VjKm-Pz-* ྉ$ފ1#$gd%]]Tzzaϫ_\Ja= % V0w¦{CdcE&};^#p'g3-% !DeJ(E7B7lb 6H`0~U${&3Hh w!d'CeB Ejj{Pˆ\1F'A w3㢫Jo9fapUU!*m]ȍ`)U~NW`@>_3k@HkV.T]vBTP8hFᯯe뭘ܧ/,;ܭgwǼmgxC-jU~2Þ/ ]*pz>%= Fs8p&!nzM`/yߵw*!7C7ޠc`ϗwd( wxq0X(vdCL2J]Xƀ}/FsUSnB \Ms?dIEu=" ?NXsc;^'aBTPoP -h57/=ʛqt^Nd0r~ V|X34eS^6U+ړGtD#Q 94x;nѶrg]ꋖ`mk/6.7"K[ޭxào[S*5S%uLjLa-| !87AfO]^ c8D3mr}: 19W2\k//-]s#*ju/JfR󸐔Ùk+p7`5(:|]x:($V {XyI9NݞI[w'y;")ɧuMdT!y=؋O_3{P|Y xi*|]sa1"c`UXƀ^{wH 7wuG a75ҍ=Qw_ wR F@3(ȃV#giCㅕ7df̊CܼWBQޡ# tzWyp~֥;%ƤuGK] O0AA~xȗw@sM$9h$":cљk p7/:Eɒ(I+A2E9&H äuA41b="{zI +}5zs8r2? BT*}X0t~=|)Y*)YyZv?gfjr{Jca,8A}/ʅNC=[ؒSD#щ9:NXTvI $0+吋%"i?F[ZC l@ey}5j2KmG$[Τѿ/B|d!*R<mhv }x97󥡏m?ׄ˚a)}VޅJzݠxi&M`QA\GL\Ssk8iBQyi`\Jbx]Jo1 w* YХ6?ERM!P }>S~nz,f&SI_cuk}4GA0v́} 0ZyV)Tx;(0YM%&9tsnGdWRR4.E30H BjԼu~ "T1}.3īsT-25we&OaWJ X^JJ*,6'bB!#& y-mC!B do {':v5*91+"Йޤh9:j6:LqUH B!n{`OZVۙk~Dۛ31jl)gb1G( s BB!gf=?Ȋ<:&3Y|? nv  _ٷUi :PT85|Y;Rqdk JWqɿB![]:-3qҌd(eb 8CiX;la!e(AQa?jy' Zzt){'4Iw?r\B!No'k5ZuW{.c@χ[BZh1^ (L2Ng^;\+Cʰڦhԝ߰9@s]Kt(uEB!{PYO\s۹0<XCv 8~U3zunʿ |mEg44]֞#R&uJu]\U!B{ 'bsf|2+N AK'JT@XĔ `EϬ@3Z0JP@.FS)1B!tH\JOcVup%;HT@X##: /[;|B^VQ)uNܵkqS>>> !mCH{,8s_!Q!X2^|}1ÂGq 5^!5GX|XeWdB!=p0Ey6w:>7v^c~~},rcfq?G;Qi,Q/{o AZHwdTwn ]=Pp9؀Gh mCƣ3z]{>;x"aV"Iy'zC{$C rࢣ3lI w|6U 8lhy`> 6D)ߣ4/czËߝ$#;ydȾ02)~8u)O_䝁S8 mC1q |aE>(;C06VLMKElh7/14l+o_K|kFWxc':o}*/o(k^i 4x`#ibePd*͓ k\6CIVU>˖Y۾gd 2KX7] 4K1RU۾t; 6N-qC)]vN5s ;Ò) hP``m%,՝M<_S8RVڅrĚ<ЃZe/!MKۍkKK@/K ^}Gfcdkh ݐ-Yss&~&6lXj.4z?8gr'Տ3?E*$۔eǭ3EH¿CMe hUri] UKn.qmYs9#2jZmG) 3to7zenlڗ5ӵh}Sٴ&bA hSVN D/FII/j*"EB!B!B!@(B!BB!B B!B!$ !B!@(B!BB!B B!B!$ !B!@(B!BB*aak*M.6| %jz90'8[}p]:٩++8\ ,e&^N[voWBH BQA=ł%k'2bWbh6}{p{)|6m)!4Ә"|?m-fr{TeJBH BQyHc]nC0p DZd`v xy{HFzxwTW݁|q/JZ"!',ᅔɌX|^͓Oxr[oCPN8XֽԞڞ][0f],ʟhwh_o/ھB B![9l.sc7~y$ ~}Kx}$%1~N/fݾ j #g8β2~ i=ZމOxbB,#_"AȘR|$)fj*.=Xvgg4qqb"SIX."$?-cQ&ƼtgV?RoLV˿q6͙w{sy`|{Lؔ@7=/úԯɭw^uXʱ"?dSٛ sx]ܿ1qhB B!*=Koc(0,ZB AD-^$b""P[AKL1 U&6((Q*AW5<} Zz.x$&ov߼oߛwf;qZ6_c5c߂G*}yb5a?㗿s7xڹ ki|_80r ڎ1ra_NnW`BN\ua5?:{S.,5S0yEGcۂ%&D=ry+%{hⲉl;ɋsoB.fM:59 8-v?C'7c"\,gk#"@(""ˋ;ٱbe)W=3nç>n]j0=q7<$s7dyg{0bI%)Kbp LqXB88cYm6x#,`m sxu] X>NK}aT>߂{,ٜO~N|jm%;0<8C4hidRHIIG`6m-kPDDPDD.+P9JX_y3嗧oZ>Jͺ|f|͋[vjߔ^K_:&\WGK,>#D-VF>E:? 5C8&52O62sn,xy82fR:~?~*:4%.=e+XY6yEl85ަIkVm6o`ߠɬzwv5c!i˩Q K@v:^=,{Y<וLZĺy\'oҷYn:nތWƚ\q9v^8Ē+ۨ߻ ۍb B!B|d=A7)YY^C) tcwQߗ]]FA[! YYzkvNarGS }N09˸a?Gvwlopqsgo1d򒫹,_?{sO#B!y:ٻpbuZ11 YqbV͡k0hupFwqowt?;}룡63Juts.x37g#mB!BOsO>  :5aQF5P LF5t63k#1LDr &,&j*9rq(M.F@Uc*,6;:"OtPXl6L0Mk  F3Vрkqc1`s81 q)udVfECÀd@A'H,V+&ȰDIrg^0e{ k3Vq\"u3w#KK^@V_v$f|yk_yy$cģw.?ZO1Y35ÏѮ G.K{oY,u/{z]ܳ?YEeKb/?OxAK^C[BiY>.^bW/eV%tg~_w\A׉bo!ZC:ネUG1&6V̟}^۟u)s?)!V/^z3w $ݵo?-~t{>yrJm:P\el!/(Ǘ.{y5`LNj$;2Ykd]ҕKwn#gF% /I]AںYP2R 蠸8DkDCuySem'b~"_p O6f{M]K:nc2Zz>H撛۸v^W1E~=g!w=_L::Ff]=;=*3-w~vv+3ixQ~6=yfygO y Ws#Ɵ&?w _d7::.]_ma_>w/_?zJ=:x22vٟeZ7>z9~D-!Skn?(= nWq_qϜK[~g^9'ɏQCsTUq|K½7bk~~Vꚞn!B!tuZ]e,[Xb41jqX1MǘVw' z&3a3'F;=v`8^Ό2/C(, <6O#a&=l}xz⚉P.?rV,,`zky : Ǻt3n~|e9phO&Cn^Lf;]w;?{7lkRMdn|]w0Wi2, m{ywǴ;ҵ}Adyrh 5#Mph0 F;=Y"J2L^"X*n J33%Hu>ON ;}k#3Z~sLsXUb׋f$죥0鬫9Ѓ:8?c;{eptv\.D2{X<ܵH'.0zNݑ H-54t/'YNW~,1^1)St4$Iaшb`lGh鎍+q\O*̈́4H(L2vaX';@8Gutd"P8@<tR(*G\_\@aN6EsBe4UkbH(ʽF ?mr>z*HF#u ̠ndޜ,yƾpWŒla;on ic}j}FGL0`qos<:[j޾M;Z $\!Bq2i(.y=`BCf(ⲥ XU{[I`+(%`tΜƻ=tt-F6^]̢V9uplŪjqb)c)0 R[̀j!}݆h6:ѨE-fȻ4u iJUQ`M9x^t."?=h4Yݙ7 y,ڝ\Q7y=Wq;y:՛J"m0`h '614+&,uۃo+Y&[7t#fLFeX#ȱe:v+W]V_͗?G8?nc5q6e{ *&uyr՛ԥ\RoӯLRBI]J] 64`./|Wڣfej5ۑ {F >^D LlS>jiOͤj&!CĒڤ`fDi:1j;ir kyz 0g[.1<6K>;X3Cj4sK0ޟF̢;{Koi`G'S}|?K^ut|\zk7T0X `P0tF$MSxipÍw-ф5=Lmkf:mqt)e!l:p1M>)e!l:I)1\e!s252JsXɑ.OgdM/%[-N^^أ!?̝%ddgōtJtZn N]7go"iF~]u%jelzGaJB0.dv\}3rxj|szƦqB!ByIG61XI6P}VbN6G՚uvr@DMlw, Z u@|JD'ObX /Q~_'NR?sG__KH`QBzxdKrbHxΔ3|Fj5ѻ1~XnTeO]|'<)TS*z{~8J,-ۙ}rK31}N.=^۶/z?7;ίTo'ňJO6bmhPů\MKRҺX>~mz#A{㵵d=T:1Cg'[_?f?x%|[U-j'H!B!nE[Fp\8MI1ԓ֡6X\dlcBF ˆAQSq"Vc*pn5B0Ge൩DA ά a%CqYFo23B8uc6hDtu3>JVA!&N:fHLvtI:IN^thX>rsp$}Eѭ`<42eAqafZWJ⧽g flbG2 z -aUaSdo;l0c\L8xr qh38r24i6KnA^ G4l pX jށb#7Í٨'tt0;D[ q',,ZV|N h}ii=B!B\ ^9b4cXȝs%͈S=B!Bq8.k^%Wͧ CqZ).ЅB!Bt$ t\A\8B!B!ΆRUU%B!Bq]4-B!B!'$B!B!4 B!B1 H.B!BL !B!ӀB B!B0ivcB!B!>]!B!d B!B1 H.B!BLJOOdB!B!҄.B!B\p2IB!B1 H.B!BL !B!Ӏ$B!B!4 B!B1 H.B!BL !B!Ӏ$B!B!4 B!B1 H.B!BL !B!Ӏ$B!B!4 B!B1 ]>>۲{CQvsy:A#B!t0ˇu]GQҪNKndZ !ΖXJe:0SB!V甠+B*DOhDoX0x9)g_65؏l<ӕ8iZo+VuyaVXXKXZ)IB!R甠4VSl|%ħik-g9/tHB!B 1& i) #ACqIЅB!:1艴Fo~sY!ㅎJѹM|mڔFBtqQ-7:=3s%y !b9I`4=ܵty=m{?ʆ-s|qu>x)`6N&Z:E,`ZZkZ?S8B!B\ O ΩS !S26޶D󫮩y]tʛfQvu΋JA%L1qWB"+B!w!ćqLh/*k9yzIxvK't.NN8F-}H.Bi'x>oR|NSJ&Rkʼl3Pvpd۱I$>} P`3L&>+ٸi!J =OtɆqb1Nkă IZ)w 3H9D3ezt]-FgK7 of6^T`$NZF&&;l)8i\yŸR~$U0X B4fw&YN{ CnvNs.F2ݖ)k8@P$J2(F6݌Anґaz$5E%ƗןBz)j6GWwNi2 ZFB!8?cu}[ F#UUi FB,m<}.O>6%nk.of @7.â嫸db2LtIEh=}{9Tۊ?Dń##KVpTfb6LwO#2U}a;Yu,+MUGg̹o-ano'@19ȟ˯3=gӦm%3.bND캦CvH Wq%fiK۩~x[o/a:BuCqEV`yFp~WrpŢSfO{@LAnZ7Rj;Dm[xhŗ_μEKX|)E>LcYz,|kվ{+db4bo~&2&$Ecc|F2^woƯwItxT8̉.OV)s.b/a@'}y&mt}CSa?c~SƘo~.* \ه谁+>-\* B!:O]=w\FrCiW,yYj sf:C{}(GO@֦gzFG`֒ydX'O{<[J/g|&g36ZOφ;i<6цڨ2SX6e#fێ^_,|fmt]Gsl>BigTЍ){.?wu qbSc*jYZx6m(_nӔ@?[MB~\Uh4YG rx`"f/XJ:CsG/G :t;NaR>%n jX븆JNOO܅WrYS&Kt7pWk֫ub1$4߿BnA.Warc}֜YPaVگyq԰j%. ?Ħw ܾz>$s~4c V=tᨋWU*v[ГaZKoq'o`Ʋ)wu̲lfj^~yj; 2Vl-%eEKh[Cg}5GzאWjTڷ¶^,Elvg BMl~5m] ^ry:*j` uȶ$c&JOXAwӋl~W4}PN[yC~l3Sjd$@Wx:[/ÑżRZY/؀%Ɏ}`q[rHҾ^Q56ʪHeof7/P̯^MƤ{;25f\8T.tp])Ϝ=U֜RfW5rڞZ:]_-Gx*kY1 -'W_潺=Ry_HŅ^ 6a(5 =9tM} ~N.FZƆ>B4+pIz.BSe&F|le!cKG:U3+r e@u9dqC|/WyimS7qBlJ7wy__2dsLVffcLt0?JgZ]v*rYbGkQYm]!:+/)Fn[Q 3 4&+.8.VtO28<6U%baRn]{)b{ms-D(Bt܅4fE]-ͥ*+θºe:syfê,{HX,,bbsqܞks NvEiۯ`Z4VԆjge3VwV|Q}o8[p\CYm=j}GihcZݟ2ctf6jcmE鮼^/zYq=B!4 Mu{iJs3w_Ngl,k o:ʉO04EH%kBU\{%L\+sx=i$,yb2kF/8r:f/fw$^g6eV^ܗ.$cG(R+t$Vws^ʚGNV=BMc;`CqsN9s X}hP5 ) xclx~]PxXmMo&m{n`IYpo'6͓[{س5UȲ_9t5gaTFf..f8 z7v45yYVY>qt6{7ćﱣrn,3O>|Y3kYeƠh(3<]X?B?gz杇 g2t&!z&*, q_0{kQKŲ2{ iJg]_sEӃHK+n )}ۅBqqO/t@Wu4UCtn2YP"?Me&IڹGVmLIq0OQIl^sSġ`5X9:Ć &GJ#i"4ZZ%A`wqFtWJ8%p'Ut&FV錤HF"&M%U:ce|~.טfLPjZg?N|dVUZ (&UgdTh PUbQ)fzmh^5FX5ͪ^t2`asPn[f! OJ$ux"V/ɥBmP37:8}&&oVR?dAb>-0cO²l֪F,45YT#1T .iĢ~kػm+5gp}۫rHcT t~xv!>i^ k&WlS``5ĠLGE91q$ M0Y:\LI镑al>qMCEIԿ? ;&I00)9mN:>LgK3"z{ HB9ΰh7Ӥ~i Өr}XLjq:sB[?DL,f"@'躡Ynj:iV] #Fk5&b#?bǙB3X Fԧ1g82Jʊ8fXGi fhQy~CmOyL)RA8텬zgBj!`x\UAp&&jRs8r99WB!4:Ľov1Q>kb  Xt$[5]q܍y(J\%_M gI'#Kd!pݒ=ٯ6_@RG&n4mHYvA&)>hf_3z)u4d(`sMYlH UɳlHXM$bINtѱUlZ'Dn %9o$jF`eӪh$.Yy4m QЉFT4U QT 8O0R.`e>0 k7dԪt,J_8=rc@wV L!ifhkbR#-ľkY3{)dZAF3{Itd00 PKpS4sR*(H 7_P} !FyTRƶIKNIIQ &;Hcr0#G.;sOWUXb4&کN3+%3K(wr}QfUmngݵ5U*fG&WPh=økDFw+`N;bh&p|m2"Vq/]܌3brgSp E3_WZVʖĶ}]RkHk|(* U飾ntV6.*aB!8]&&Wer JyD`!:3vk//3Q|ިn"dX:.vNx+3|0[}sM/NJwdۚшf>b ظq 8gQQP@M[v5(F+Th5Fnv}c4c[*&׳FQZF}O7Qɲ5p~@ltU0زJ.:P0K92öAzHT[:Gyjk]\D_SiLd`|"B!;?#411+hF,/bq1=JlxGvVKp{ b},SS8/f‘8}zXZ]3NGx: nśLz;'Ao ?x`@R#$&G^DIbi MMHh^'/~g>p($APJ9nMӗJ24筣=]WsOxyJזЍ-<^AuB7a OW.cÌ,qaIz crrG[ٻ051RN*=rXD`>_ǟ]WFUD [;8OD"# y%5!= 5'TImu6 u08^o"j"κ Gxji)ًleP)UR[xXqk+f6b &S$S:{A[)mJnݥ4wBC&e_m3EW0Zxs1bȺJ\t- #11ibei vken3VHT)`ÚXd[j$Xz:]^9g{(s+Y73=}4EݝA쫸bo}XrJȤ`Tea;iHd˛Dy̚[F,g\5:jO][b|QfD+}Ӑ3g Bq&{Slzu^=Æ8o}>~.~;<̏}| Oh#wӦZP=omw1ngo>O'džcϏ6gG'O41yt-ykB0<'~zzL)9QuJݓG);g'B'1x kh& * X;ΖiADkDzW³ť7S0;]Tckma +*mWtNpo~l=NWxޒɛI$U ͂;?82&5uHɮMHSާ&frⲥ'|ϲ\573-nz+GƝT7.ć;8wUuN'Une6/t1޽ui;ȴcTF$~V}s}ZW0;;3(yU@N荛(~!yNMKX2{8Zs:00Pctīo j- ,*JFdh|Q{АZܑY9;t;ٵ+A(dѼ||nYB!>}$q#ð/xc=5GӉZ'M[9/ =IW֕^5uhpDQY]Gq Ϻijw HpoE uԞRw*{KRX)!M6&Yy?-(EC{~׵_fG9̙s"trrW-f}^36dgWe+4w@j6aH IB|v,Ź_P0:wNIi3"4QIT[(ɦ0IkUV/cCQ^@W 3!ߖnOhP%! =:Ǻ?BJXBT26-ÒЯ P IfpW3 (ے3н[*Q}Z IZj1{alEi0+7cq@l85EߥO@,`I%̜:/y߭e- ZziRm5P۝z׀ aY=]X )b+E2v9A.!:PK^~V<P0tY I ]\wƟ Vobwdǚ*p4ZkBK g0$ÄT2v ̥X> 7b0 XiHd2Qt!\Hm9|Lۊ9Ldks,E4yԷ! MgL̖wR]eS1E@@BgJcg ӁZ<m<$bF Ai. >g2)l_5GABMXp:k$i{(GJF۷=S_cx*/Gl\4"?A`J1+A%F%!IdvOȞv): kIYs.-"JCd)ҳSa:VhH;cYbo)և,HĸF0WgYuau8]K}E6۩ *$.3nP:E7GVVT8(V'=Ch` %:k4g XǴu .[EDw28Th0IdnX)++#:,҇I1Tm:S~"/1)$%EkM všUؔ_EQY"܁a#3wbJ݇2pztJ$: IBm0 LȪ5kTPJ~]%(Ԅ4҇X>#Jĥ߲Jݣ>ЎYT&>6HM_AAhw$_}̮6.I;?ܫݭ9 h;XÑ cvUG܎SYr A޹v;8dY& š.I(J*%We+'cv;[V)H{uq&(h5$p2r0#JFP][ {lbJ ՞]UA$*5*>ݢA>?`j%2r@5l_mr;a-Se`?@ӫbW d|].*j IDAT2~PFRШUTk?xYԪ˲M2*U*J`g HJj?- i {A" y6]xPwǺCRT*wͯd@ϿTFw9CJjվUAaoG=~LGaXa{oWrǻ,IP(p[RQef ǕJ:HGRRkIJ4k6^Rh9IHJ5_)IRFyӫZg%TAN/zi_t{T IRR(z[H }`5ONw?#?&P4xS+nw  +Fd8I cHAAՎ7ǣiAAAA:]}>lk_]8mcњO*P]wM&  eݎjmA,šAAA8*:P8mt&  ¿G90\AAA#JAAAA8:D.   %Gо    {ߠ   B;   B; tAAAAhJ?&4g:3?=tG,#i5S9\[AX 12:V~;.֛dx^!   ǡ8IӃ7 îiښV>h`YY܈G{`k5+!#sQHȸn;C;AAAX:7Hܾxɯsc okTGZgj%n OB_S3&lĔKkq95K-8zņUJv*=H$R&= ɳwt~'eXT:#q&!Z-X ѱpSR8   1n$0t*h$+:p} J ŕv% RK-qHh 1QD(PQ'a,#AXG5AAAaOǹC i08_Y#=p)@J岠%h *VJj%;XHgSH Aҷ3[JCb^ jQ]8D#q   Hfvt:][#A-aF4>;uv?HjJcQ6_̩߈~HYIHX(a5 ZZitx*"S8[[9 DjQ)vfղMC!Gk ]4(‰)h6Q7}45w~j$DF!t8p:DEEOt(   Pe&dM8{Tׇ>wNJgcFWb@NbX1Ga oc*?(tb^-"ƼL;AAA7wHB"H=1    r̫ phDwAAAw/AAA    G/˲&J8~AAA]IH0>~vOa:>K>KNv>5$|x=)<8Rֶs'xÝxw2x(}Cvd"ol'2׋Jh4P/O$ .`ւYBb3Q,,#r!$&Š#ZX,ב(tlc_™HV|aWu KA (ԇA? EXClKӟ7קG.AgKggd&d{6LQh'߼WtR.]ƺӺhk6J,+Jfb#Qqv1o)M$'Ef42`6؈M 跙L_IJdB5F*X`10L(Yi_|mΘN` v|] zCDG:/^@>8~;6>v!d9@oxdce:$R0U Ҵi1NƬEKYl2ʱHLzēToa,^C@rR;06 ٵjcT,ɴ3X| [Dkݍ)-],n:?6/ ׊쟾c+'f厡#ޖj0ZZZp:G#&AW$ VKXXJiںw}_9IC K1\|]($TWEcx[>7 'N_K K?~y}C;棰ق lۏ&|YoL aPia{4OGxK=5 |?R|}yO(>g ^9xR;iAܝ0A+vH&V~9nddDf;;3g},j)^' '#yA>q_g#qQQ'n砗3x볭} uvT*:1^Sn>;+Gi3L%@fm zk2Nkpd +f᧼Fλ,꭫YEdȔN_>x2Էixz`\OrXͼ:D`9_Z}PU+yOI%:{}GӗE޼46},o('sHR дa6o?(s U\nWw@%QOɜ>XHG|$ NZ&A#I b(^{M)y*uYŒ2^G?nBכ~+võcn} Z~ ͧ hϝ/(=t4V5#_ q(My~;|a>&x>c{Kj㘻@DڹB*YPqs-D:&Yð,{ nMf÷ڂ;S]b\_&ؼOvOwn}j-w gᎇF;<*v OȒS߀2#8W>/V`SLM`k9(8[[GΌ%q}T.` O3uya[XZי7؝)JZv`,g'Do೟['"m0,tLoLY18%Y%ҒwnDmL9ZOsyz8?೒eU|[5YԻ /[߽?H<zĻ^`m.z$ ?d`ewLj<;s %Z}@Vv%o<ş>Ëg#a҄,LˋĬo&e\^)׬m,=O}ƚ y>xc$\Mls^yk+|$w+1c`7POcdOn{y.̨WFM;{&Ɇ}q{=L4T=7-fݓ2~#\=2#vS<2)l^@#5@Ttv,4STze:H]x7O8|DhGśA|h{Cz$ e #-#%r,n%SBws^{p{:n&]AгnqC~=sy>#(^7p]W_OyE{Li[o~u)}9zCM{KR0gnrЧ-O=y{Jݱ \7 ˢyoɯi$ϼxqޚ-Ʃ<ƳǬ Jn3NM壍l+ pųp}/5[ZEcɂlwcL|.k=IP='Gˏђyi|Iz}lQO%I* F#~'~VQ*5єe% Fq5hƍFݭIq.~}z yK=}u$|;ᾟsYKPXK{zuWxrTђ7?ݚA]~dRV+-v'Us<.n(5's+3ht[ myvL8dY5o>?6teѭsc2tQInlzmKIkL3+>ƢuUFY]Bަ2vN#&'#7П{5l^!![^TLNwpsvjʋlrh+aGu=WRZxμ B @PMqQ9.jv\ɦg3XZ(^\Grj|^#Oˆ4U(D֦]BxS⪉8ʦox />6WTO~M5.x[FDZT}o࡛'y|,+>>} UYAyD(ɳNwnLgNFu5QF/5Ŕ[xZܶ-ĭƷQjx=پ ߳^%YNm4,rٝXL5g}K[Hw8HC{L5jF⒧>dS֊|ˋH wLRWo-Oq^r/?p?s}|{`ue;(XƠcLx#3ZKK. e)DӇq/a0iOƺfJg<}NʵS +7(}a&٧K)k>]/1&Of3sDGnaPy5ȝ;m4$$$dvng5_Ia-.2O9S/ܳzܓώb/zBUEY_@SS1lWO:J ݳ$𷰵NKSY2}rpj =".<=G@e,jFJ,A? cᡄHO0vZA7z5|: 6X4N_KG^wVLdJG'zy~#Ɣq+C-8Z ȴvgSfJEg[~{iNAw5u%ǹ$)3o` H`ѡѩq ֓!زj=!":05RMT,dQnCk;)NbP1'ݻv>$oՐ$nP:jp9,x? 6 㐑ePFƠuximpѢTD:4#[ z]K ;zFEj4.a?%P g )ބJ!e4#Oם/+Ɔq'py1փN85Y>#?9=bs C20|^k䶏 zs5}#6Du" c{!EM8l_@ˀG#}|4.$[vrat4j%ԞSΟ!">FK *NʫMUe]ƍJӷ˩+2pr.ٗ`J;l^ap4 /_ʆ:99$Kk1;_C o!g J7ugrg2T1/wUqw03QA͋dC%Nh^|˾[kDuEc27V|Y~@ H>-*.j5.66~~ݷ:IV~,f'1[Xj`Ŭs'YaGvCz;iZAW Puo>z݄65Qyܸ<*:&wX_񗿣K@ ߵ3sGu0TH(%j%(;o4d칯7kgưd Vmi1g&RW̥Zy:'@,ii w7\>xpXXv5'ד.8?떯f,Yl$mOME4AF :LcX[D~^1U5ГkД͚ePҰ3 x܍m̆|NԠVQIvVoAH75JR:$4!QD(ٜ5ex#L$EjRNmСPpx#y&]9e|lrǍSMް_c'ddG,RBPը* QúC %*"8m%l^G @ިElΖ NСjfSϘZ2**w@KMl -؎RAVPQ*PHj^ ìu9𹚱t9xrj[}-j 'MCW fS8:DjfW"QJJy'1Q:!R"XJJ:|g"QՋYz>4cHIBJv#rMrz'g5Bi$0..l凗6Tjoe]VǞ]+ѩ$$ {*t*qՙ: !s%荒鑑ɀрR]&8+^v;vfk# ΈvtPCͪW~Z/݌:.]i Іvk'C{txxZJٻ|I웅>ikEբVu /c@9-yVt7L}5絒pV,Ǐ^䑷c7fh ktMˤoW0*q IDAT&re#Plo-o9] 5>_^ #>$ٌ'pcV )'1l͸}~ VE~zY"V_HsZ'"-JOɺ\) O6GOj桐 Ǒ!)?wUQK/%#fr_`MC낻7m/FCxu7RCLzW7d|VtӐ>-s7P&$c\:QޞukPҙpp66&o= -+@וKI_KΝk@s71nH&,xlv/>;ļx.{.B˯7-Ѡۙb> 9|;e2$͞H׃y+ -V4N M 5Ixc!_pc|7.Jޞcq]S8w@Kf,$BHoh:V-\ChL+Pl-ǕAJ?hvfܻ'Ck[.c]xH EP?|:,{nW oHن*̮1-W_[Tv-:|E vN\uLy{< Ƶ=)j>Xu fEm `BcDqq>DCI!CIdQ&:u( r\y޺F:#θFG" ЯG"6Pj%45>>!Y05,{]WGa<$/g t kXr.2w2ܓ $Wxo{^ð'{2jo΄']$r9o}7ǘ9Zܗ$7dg#+w<{bۯ]%w\Wx;* \y5 ʪ?SXֳh ƈAhţ~+[ Qt4D0 z/%Yi#o䎜 ~&f> Xz+<(al'sRCh9hV-Hz5ͺL.t Q:ZVpq]F ?30sl;fyT$;W|8jïxBU"s,~]<|/hw #'%=V9,:'ُ k /B%,LtDj4bmq#uDc tSUۈ;DM)%DB Wť%bWb+x!dBnk%.ˆ"ࡡ_h 1!멩o+0G xȁ=#PL(ԡ"Aod>M5e;Hj-QI+X뭴ab̄ieV 5$"HMx5XZ(5eDɑzCSf*E0.2SƊr2zׇΜDMUx4TT5Sh*'HHArX8PĎ.cZO j"RgΡ&5̈́j8"B4;@ k>ygyҀDcO WɴgofG} !hk0k}9KC3~6W2RMAYES)*SA&k -nT:#Jmd,0h> x‚VdbS SxTI*4*tęP;ks PbBwRoCIL Tᥱ*xSrs9Jb @wyv=i1.qA1b!L+c|YD472[5“S0Zǧ$!6_:"M1DipT(E-54P>$FjVUbuDEq4!%moކDEB KM=s<ѡ:Z+hVEk6hD";'J[TTبǥ$-1 f7 } GKņ!!DSUnOu C|:1j7ֺZ aZkoTb!>:oC)؄XIF\{>MtbPjhHL6K}e5D ^G3V *C8fPmJZUfCl:e CHt< >w+=~3fsMS^Ep 5c6mL@&$҄92 5n*|(4LqqT-XZU#c67`o }TiiP$T`0Wskquw -ue Htl z"{?x҆G GcX,2R.q1']AN@f{!{)4,wƇ!)WMAqi%P;iN3 KOsA<_>ꞷ9]L   B;p՗AAA$   B; tAAAAh˲'D E$J%JRD.  bGNΝN'"AäV EӉ$]AA(Az"I:BԚC%2,qT*NLw|w ;`8Pl'ۑ+b;؎V9o{H=Yc;}"6q߉pwq͟ I$ FOAAf4 [q~$OAXC~[AA'GHn"IwIHII9&AA:ER.   GhMAAA   B;pčKr0sPji   ?Qk$`?nVVui/gUB Dk^ABZ$pdYg0/Z'` srRbѷGwJХg9M9VCaa)6Hگ P_UCIR Zk^lNd%GR*GVDhEI$zھڐL5^5xPީBi.'Cg2bx irKSꒈA! ᳑%F2:u$F'G{mAKRZ4WOc1ex:&OU۠"h"IIbwvjI 3IOO>d_ ߼?2ێcWR ~gNgZRV*"ob 8)>NC\ΒdHֹ87{+SϢwjQk$ @|̖sN]bpLZw\_te6=Y3m)Y\A7U.şїM]PXpA1g]렵Vؤ8ԞZHu u)t0pH ^6%n2sj\ZWF9Rb: h]#Yzrq+Mi@uZ2|C;S4]*M;%Ύ:+}`lB gHjrP$Dwj-azz_cR{nD1nTU9+hJ<*jm XWBOG[֫c1OC1KVJ{n) cٰ.ݎ&uSMh=O;޽|vPߚɶE i3<&EȉZ -e4zZiU'ӳSyX$ ZX0s9UNN7 ̟9Ut>zxs&>Feb?8s ~Qp(~i!~J2Mu5Un$C Cnjw5slh6Z|)}G2c ȦlVaAn=!]q˗Z=Č:'۷Ƚ>&cܑ}g81ua`|\~^W'(mT߿$AHd 1jlNZ.l*]TOZMnFm鳑sWƪ V\nZl߲kK\lĬYVP5k׳pS3 ʒm,)iQ_]<+RLІ'ѫ['UXu,_ ]nBņxy>9\>_`QdvdZ5XVGOb% rq0=s?M*oʛ6rQ?6;b"zG"vkV@Z Q`dO1Lo"ZJ,Yv[:{63zH"W;!MyuA(n"unwM0cCPj蔦jk#=E+.aD?u ]U~DK[QK٦j&.ݘbMT"̄+4T;,rۻ(}N<$U\@BHEkk굗gwwu^o{/WETDT H!2yR|j?Ȕ ~ւRJ~'{wz6pUܿGW~hl/ioj;{_)36B#ӿJ3kv~ōwsd3(c 2qj CfBH 31=e^8Y9@f{Fn-%gttka+%YH[qȑ,RPC^n.u "i|D7|o>uņSd2㌎fOȚ]d2C$8DӬ!fHT4Aw IV_76F^L -3`c-32x$.FYtv O:F25α~B21KL%$>Ł c֮O%.m~x@8D6n5=0{E,^JNNq1 w&~dS]C׉3s%g;AUs(/k=MQA6\BRz굟Nb i )qF& 1<>#F5 x~`cA@/&ͤ2 O3+dau%e~};o%)2}SCUa3h0;w,dj?5 bv>&]E'\KyOROP>w*x7.bνb.ok?c@~Y3 |Kjv{7N]FkUΫ.֮dQɵߢ8pr&@Qxf|#,| %FZA?03Wo~O_XAs7uPQ8uvצ8u2`E{\=<ߟKƭx@N¬\̿[3WejMTD/$5|2M: 5e.0|KuQ .M>@,IS/, [v=x2S[_BIS%{ORxS( x%$ ̧9s!G>DYU MegA?qbU$RIf,$Z(pB_X-ROhS06ZI˔ﻊv=SRG[s yɐ_u8٬lheJe>KMLY*I|<]Ǿم5RƗ{ʢ~釸lIΓI˘`KgPcnZ;Եԑ<΁*+n{.b:j2]c_Wa̸BeUr+.\쬽PI3IDAT#ep÷ɉa.TP7{P^SIANсd~ gx/{vX !YE4Nj4c1*s;EUmh~P{a0$gw2qVo! u[ -s.TߝAHzgAH5\`ݝ6lC43}=̏~-ݭh/9ÃyLpjSܹf#MSȬ[L+}7"fP﹙Տꩃ͝CYe51́-RNm$;Ѿ s,Eɤ9n;}Se\ynz,iIុtxddλ` GyUl:4@,dfYnb%d}{}]{Y7ad5<#A/ElZ$2<>,RUD,TC?'6S%_c+i_X wn581y]Qm%?0eL0>s=Fruwm?[r]##;xN䋼Jͫ?#w x7Xa'7^L@'j&b3_\\pY*Bzk$>/EVyL*0 ް^>2LNY3XΔS^=2DvI3/ej$'WO㒕 H|Dxsb>=ŬWr5<őZ._ASܶ@/cP7{)M<=QúeI{~7, -lH`Nw6ْ^)]ܙS;tGy(ܸmڶ7tӶd1e9qpW{]O0T<%3Ƹ㇫bצu<ޭzsy/]{6k'g26'e<3ơ~&=OfQznFm7'G~=gQk8{CyP)M裫| yx-k6;$= FٱA= ٩y/aJmW+>'١<,^NCyv]:Nh!uijhoNk/TTяra~C$%y |ڪOH/ǏQʡز_w2e':v$ɔr}e*y8o㓸z@MSXb%Sq?CgzpJa8Ė[}%wbFȣrcA2=]ۨx.O +_z}/ۓKYT=$c{a|Ռt};fZ(\9L8B#002F,;ŋho?EYiʹL.z̺p_0[gk{/?2WÏvV&b!#C|tCMOq|ٍ|cU O#licEϿ{s,>45L_I) _>+(d|bp>󧑛a/nf3cJ;SZ[_2<>=@@cI"XtI5b/~҉/1 _S}1mg p?߬nkT!!!a} 8d '#<>әs?rg_ }+,j,Fu۶Z_\-W-  cĪRXM<18> & K N's,{ _bA@R:f-\Oa qLP66hf1>6 'zx$DŜ ^C,€\Rk~7)5_4疌s`.zHb^J9yN UI0G&evO/`+_l99 3?DfHK)H$`x,$gqĂN_bf(Hski7Lť3&&ck&'8c-t>DzyC(']j'USX01ɔNLJi.gnƋ?OT,mde% &6[3fa۸MC\z ‘AFS3߈ FV|޾0Cѽ?uX3ѿ], 7asbqbA@H@|ӫ:#/o,AlǠb21_ߟ y(>[}&~؉>֚g~{FPP4:}bgVz,f Ә_",3u93?9׭Jk2Y6#NpK?Tgxf,KuOeqP6|`2D+r Fg/CϡQ\}%s$(Ȟ`}w} `n󦖜}VHs%ӹvmxO>=]/‹ڸ[B}Uy0Vn`zy@UcEY2oC,?`C{gC T3y_3%g~=H["|q&_8̆G.|UܥT,+MIEE%S\e˙RMv":q *=g>3ۛl ʨo("or򨨭T.eu45Ta#;rm,0(E唖T3irYAt$:cI5SQE*]Δy ;} ͵%VR_SI~*zjIgPZQKCm5u445QƦ:J9TPW_OEQ6'7v{3E4M !WAۜE,^O|"%htqͤ445Sl^PE[R`/AkV^$IaK$Ig~֊$I$I2%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I]$I0%I$I^r‘IENDB`rally-0.9.1/doc/source/images/Report-Task-Subtask-configuration.png0000664000567000056710000005037313073417716026500 0ustar jenkinsjenkins00000000000000PNG  IHDRIçxbKGD pHYs  tIME  IDATxwteߖlM# IWJj-QT슈DJn (-@HHoݝRY0 :s6Νga""""""R"""""" """"""@("""""" """"""@(""""""Wkqs`#? q8t(| ~!F-iۦ1a>*R.LqC`ce?ɸ*#ٸf 6lfd (OŊaTۂvS2i#lX0kHʁ*}d`3q{פǐ.-_9?b#8*4 eDDDDr F6[Spvowԧt߬++`WuFѰv]_2؛;G]c)f+Wz9+ zBm_93~:Ro_ls& C˞ 7Yi' `&Rmml=u;&}LՐ3oO#d'J]ZvA禡)7b(s,FDFNb29?oe[<&|צyTwzDgxww5mh[naLzɫW?77s?Og.rZW+¹R1 #ܹ""xM0cƯ5TL%`2OJ҉6$/z9|2ݧlgz>3C]̛:K''DSOپv=}yޫ$yݻLl;GӃ0n2vJ<pɿ)C_}|7N ;KB.F>-^ׅw_G1X&SI DʾffvWթV0 XC©M2^ŔIʯ0a/T#;YHo9;O\)&Z+@T"iߞnU)ui/$?AQ+[W+[ď3>Mc>*gqӐ'_`s=P7,/G//nj5pNK/ p"bpsjrxj>w!qyn C]YodM2UyOږXcņTsz)2~ ~5Vnon;Y'1U7㌧!/b$o{ZYyɮ_ig%0po>ς68)󿙅aZ={GrmrlcͼYn_^f$dդM ^k|M0ng%4M(""""ǓY =i^xs0klܝLE}dw<4ZY_*RHW*qq?"o*7}}[8|Bd}ɤ(nj<='7I=Z*w ]1N8rnk~ vCm#xzPc| Y}{0FՆޘ1z=FEG{MF;~^t?ڏVu)c$΍zqUHC-}7izV:OYDDDDsI*k}z7~exRvoݛq` ^ݖJ9eѳe iI߶y4R~LmU+f֞{ÿ*҄+t;qtXud႓.4SƇȁ䅟Wa0w׼=e@xku|Χd{Jי̦_Pc/{ŪBw?X|_7%:iVCkϸ?Z¡\7Nq՛sotoOnx3yWmLS"<"~ظqs@X@]M#eNm-ٲ;W q?~Nܚ4Az4(mLhAFuu39Gvqa4¾?8Θs(%th׆u*k՗ ߳ӑS&er/`Ocg|'|:8$6(Wt[ӰIڙDA&u K tTY5{ׄZȞ&^%3nOf!zc"7&زj@k?fΦؿd Z@kP@;|<')n=n_H})Ghz4jւZ4jș 39W Y>aIU< ="ݎr]`ɑI0p[:BlY8 N " Wv >e >4ɻ,eaOIOڃ[Jjܜ??""""Ra5i"i|epw55x9+7Q2]h/xpǶ`x,_NiVS33>;s]]_6fesx9h;"/0x6?V4o'i3fB=6*-q1踁bd;?|IB[x~LL=d07gD>Op|TƚيTr7GDDDDJO ܳC> !Ugqj !߳$e ^ܝ~YEHIhfp$;bW=gw OmP_:4=e6%ogE:7qKv7y)g~$%izS(=<ܳ~fc۰zj2} -޲Z _Ir~]p?,lY[l nY$eBQ"+)0f\g֙} %-P#?Լwqc?""""r9l*tvl7.j gvα츍*y%paRVL Y)]ХQi82q^ݬY ,Ɖ& F}h:b㎴3h/zlUP:pu|g{>7jҖ } gJ֥xQꅏUNy %=LbK\&bă+­ٸ/?8xαxac$GDDDDJQ |S3f悭8#s@ =erO;/8ȪosESU=V wR>]ѐ"ja|V4Mg̮' aV](.̀b:իV́4ӣqY?ɒ}i C8yFaȯ|υD~I$Xo_2 C\[m@>/߂D|EMVdqsx>GDDDDJO q Og0gZ~ s;.Éʢx ~}o:V:~ ZdYؗI|55r %axe7$tW.4l'eQ@fbDҲs8_&}skq}|6?m?98 rH=$pSꪖm y'x;L5˽*ar|ƋO߬e_$$g_~;ne[Yyk~;lFIȫq-o$m,^^?-4BWLcyrQ{ 6$}ϤeSt,Z /;<feg[Mys=sNЊCsWA1͚זo7gFޣUb)֫'&s_@-)Mh&;uz<£~1}fҲb킿X[+jbRe:1i2dmeᴭ,<=VOKV Y[ٻ^_yJ6xOXYL%4qc~?""""RZ!`'r 4j ?ȟt\7^!/_ Ьukzsh|32?laR&f_jF[{СV W#T#WcŬټ4Xè{};53No?;g~ņ @ jz ~۝Hjk@8uS+k_|þSՃ^Y9A_Hyj^GQ\W"&{ks5k힭jbV׍;?F˄V1iݮ%( 3{ĸz|rn:4mӉmN>yQ1^}džIV6m=VV9?kg 7+DDDDjb2 P3`s}չs.6EDDDD.`lHP}O:T=J©0(""""@(WkYʼn17}-S2U+xjJ """"hi :^7wWIƱX~5'U細("""""} {dx^g"'ǿWh7?g(Up- db|4oוN-*kvPDDDDDPDDDDDD..Q BQ BQ BQ BQ BQ BQ Kz7N'iii`ZJDDDDD b2"((//?cN'G͆٬D+pGZd, >>>jE+X^^n3+rꯠͦl6 |@h. fnPDDDDDDJAXT((((Hi >خDwɣuEDDDDD.c֒U;f̆БfjQDDDDDVM^ZJaEDDDDDJS 00[̘Ԯ""""""'3XEHUDDDDDD.n[&xcy;"eDfe^S\>[*"""""R:!\328jWbpa]EDDDDDJS ńV)eJP%?I)MЛZ}l֓GQ늈\LaqgBBj@jj*gnVӈN """""" """"""@(P['3Qc\vYϸ~]ݖ!,:V G1$"ѷ;~DDDD/8a,Daߒqlq{~L8a"AL=Kua ]M`K%n_xcǦ.Y"`*pkW{s _ͼ궫~CU羙 ?(^/ðɉ2a 6 ,%Sf]ҹϼM6[[Ve(( SqkS,Dݙb`bGM)0-9v`@&NwGG pi6ȶK9|ˆ1}j%Ghbbz2ttc#AD9Ǧ0\ ̹+fADs7Y{4wg2[XoHǨ*OgFDՍ!/gWS(z~192.u4_]<=)n{#eGWC1ǃ9w=?O#o"^ԕȈt4e|}wlgAұxNKԈh|ypb#yobǬ8""""Wc2+MP FtX<}?bHӿ $X ,7'_ʬqm M*2bb` 9P+{yxU>amV#ƒa ڙ0{4 }N>aNb m8ӖFL}qFϨG^oṩR _>p7?CV\VeuZ * $.k-z=/+ S4`S?0"#:qx6Ɉqx~:"+cu. 0<۾cze25 ”}}[k.9_L{ىlh"_LkO<4|q_~(CʁēKEDDD-N~HB-l1QSAsy4F&o%hjxkNڤ|'/գ;q^h,q) ӱc:dbBݞ37Nt0f5㶛8y?d~Ŧ+kCUn6a CKr{~-CߦAX_ej[=h_Lyh L7Rf!Qv"~i腫XN]DDDSWbtzu:|ƽCMlD0f~tW"FcYp>d9܀rmz|g C_, r, {S瞗ɬ3&ًmbyɁ4 (>񸲓I7e9Bx {Hf ӅI} |l*IiC[@ڏ63qeV?$8)Ӻ…-6VV-wg_W1/&y0~,S 1gwQ ?+W.s5C^ύDty ֠Jjst-\x_m;[>'s@}HQ2^{wV08}B)t'TnrP\sʀBB q,/?OϹX+`7Y٩قNbXrM~f)9:58 IDAT!8"""">j]Y3OL|k` O{k!qiN ̃Y|+'֭0иW$򉼾Ddȃ^ί2p&>XxNik`(?ٕr!&ti÷,co|N#Sۃ@IrL`o9`@2 G&Y j nwF/IRzVq(WOfVđurx{{է->i5ЇCY|/=<ݿ%:ռiS2 (Ū@VІdkS=_:s^q%1CXuEc_/N$wd5 Zy'mfߝm%w rw/W_az&[Yv|V >[ 3T/o*|Cm sc9 ~EDDDDD*Mn W î0("""""R!bƤ~)=НǺ.B' *Nylt΍Y=+bQ*!OX6e:e꣸կ""""""!kFZv@*"""""Rz!X̸. Hi `0܊"""""",Z G;I5"""""")zStz#]s0q5 uDDDDDDYM#"""""R:)(y3)Ov%Q R1O 6f : C;?y1BG7sإƕn W îQ%""""rE <`޼[!:G3yQs=?ez qe8 ފߕ{`~ÞٷHЅ'g.^|4S9lAf.w3[i>lŽJbɘLϏe؞q#xժז`~F>)O6 hBwFiA%""""@x1 S!tQԇL}aL ?,=yQ=M@5 fja 8DhbN"紥1~> pϻ2z,oGW,<(^MMUj9l 6n ÃHw%2oT?OZ|2ur6R‡#?axL_ݗ7CtsU\V3„b>)N3ƃѯ2{T\FזŖ^R"]_H!h -Jd,9 bp+\)Laq@ppZGC>qS0l|vO*{EDDDDD..M>k6wɣIDDDDD.s!iPDDDDDDEDDDDDDPDDDDDDPDDDDDD|1O 6f : lCnؒwy _#"""" """"""RXc'J0 |7D;;f;&5;WxٻiTݙۘ; >\T*mn'$<~.f>$+CG2J|f|͆٘*Ѽ30!~&pݗ'/ ZiO>t+5&p%0;߰'ug+t Ҫla\GfƸי!\)S=^yb߆_˳bǡlܶ0#Qۃ«WQ QśA ϱ@e[sc0cL=>'m[5nU>ew VJ+rf/:V7û+1nII X2f$c;4'x|*Ƶ%|R&l\%X0&#m"錋-s׹0 ĒKp-9㌞QXMf.w3[i>lW$=! '!؀CokP%mk ^f?*XIۛD5M$;ps(F[<[LAԬ _堞'\ɬ9B[Q jtgpfљN]4m''oߝ$w a]h>Gn,\ R/NvE<ѫ=Q6+ 51_TL4)O$tnATd,t~n,ݜ3$-zkf)^ʛúA/GxkE,%3 ̒3p{n>MC@֥XyfKd u.%! g-}|x= މVZ?e)#zӟ2cYDDDDJTjj*g#fA.Ue);WHAt̿ͱgBhuxn WF^ea+\ZdTDDDDD*KFEDDDDDDPDDDDDDEDDDDDȏcJm"Atm ?]w~:O K"ڎbm0wŘ;ADToZ]Q $aƎjoƷ㭝-xa /˂gyD EDDDDP'~[ЙܟGBY,W:?ҏNDa+ ٝsEko3f bce=lǶς$ p紾Dۧ(Ow>=by7x;s_LL""}#6eK35+ggu'+w1ĎYsfB.hn? f讁4/_[ȖykHR"˜zWˆo8* bdXm|L'YdĸMDp7?CV=Vŋ12Ϛa>>]I,3XGڲI Tv]3m= aQykeW?Fz2~' ̿X",7ХA~9n R'; hO5,T͡ ;p;s צRPBFҧQオ4'Q@\ w @^'J=y0'AgW#)N YHo}d[j9{2sq: Ɂޕڙ/RD@dKLDyNL&J1CҡX~a& mplA47}mxyɉ"]_>xW{"#9l2+%>wc@"w+RIH9Eer6<-QQ<k=DuX~L~4>aLL>ncskLٺtV OV ̵(V}CY|0fKU:Oʄ!K\RKJSd1YkгK+. dU0,RaPDDDD.ZTF.o==Bsk9dZVNQ.BQ ub>5(^!'qHr}{s6k l;N8nVM<1/#"h;٧~cKDQykIviX(6[[V~5sCχ;)GƷ㭝-xa /˂gyD +c'CߧVyt!!&'Cg0YJOd4Gu(v& m$1O,"Yw67DDDGlp؇L՗DH̠Xzy~{;fy hp =Af讁4/_[Ȗyk4xDDDDD.2=$xupU Ȱ ߲Ո孹lCv&MC76"#:q{:4|#Y0Ix4 EDDDDD4@3j Q ,]Ma66EDDDDDt+{sdq9DDDDDDh4 }7Gt/\լjsλVA>ԛ>&5\4Cxˁt*nIeAQ ,ME L^w%vxƿ zT(^U&tu":@!«(faa3!E6;O6Z^"""""rӢ2{ybY寻Ǜ9C;Wh* 8Ȅ:""""""WTx]]"""""޽GU Ln&b6E,`JmW>K(XAn݅*P "*(TZhYkq}jقB [ D: dhK/ B0s)C$IdJ$I BޞUEՀTV^S5C C6>ČU 8WԸv$IA(?r0^Gl{qjXt9]r>oF6,O֔Le98]$I246B$I$uRZŽW]{6[?r }++7n}{ڸȢ #}{0 C"G>gQ$IRe 'RX8es559W,\99 ږIeU0r +sn|6 vl&I)w(]K$I2,R)uquh@S&ie)=~\*CŇ4;'ּ˞9}3kdqL^.{@)QW\$I^2ڪҸ[k[x 9ڮM/]yUUTUτn!ٜ ϣԟ_/*I$ l:|!W}~/^G ?h{H);gMqK1SN6!=qfE 5uܒ$I B}$[^Na@VQE-E 97˯Ͼ ؝.䤓3]*=k{%dL@4!L$Itg?UHV2olޭk"9.V7?Wp_ Sf4pIDAT>3g%Wu'd Ʈ9g 9zI['ݳ_7a$IA#aآbEEEN]I~ELZ;Fl'"I$exkivv!W>JLzmF~wP_͞}_޳YЗx&?! b* =$I23Gb=Gf֫XOzy~Ͻ+>6M?/ey_ gߗ{qjXt9]r!3ZȆɚsY>b㉐{W> yvw2V kz{ ě9cڋ9V''1my>ή=pɉdQ6v|@b#\s%SM9&hRaH$!O$I ]F˯ yb.cZ֍ Z¹w&3L_(7#HeU0r +snPX9΋$زbyB1g<{ w _y[$I B}yrKL~$-~Ӷ`(R4xD)cĮF>?b OnŸp~fN? lq/R8"74\hK$F,LbwN{wtKCuËHh4OxK$I?w?gђL&_b :tԞ%lɤi,D40U9D;{Q|_i\~7~pΜP:\՝<9\M"DdE"d!hbKK^۲E.siQ|XydR@pZ7Ӆ+?ǿR˟攺$IKFp3dOUS])'}~/sxbv/b8DzT7C$nI$IDt!'\F~ҵ31rfbOէs˵k7Y8~(nxm79+ 9Cob+Yv$Iϙ%6v5Mt̩=KAA߿,]-~˸ોooq+M L>6TWΤoQ}YI$I =X"#I$I@<^2*I$I $I$P$IdJ$I BI$IAyPC',`UNƠ$I$00E"AvYH$I23K0H$AI$Ia&`ۼ-BqI*I$MeX#kgƳ9q\hw%I$0 [NƠ$I$00E"AvYH$I23K0H$AI$Ia&`ۼ-BqI"I$)#xSY;o<3q^GC(I$)#ax{,H$IR;)//oWGJ$IR2%I$ $I$GFλ:aiWU$I2#>e Տ˹U-&$I$eD~ xN*P_@J$IR!" $I HrY:5IV$I2'ɧǥ9ul.WŴWW$I>AV9o=|t̒#$I au>NƠ$I$eRH Ȏ$IQAH0H$AI$Iʠ L׿"thO+I$IvpSFόgws㸦J$IR+ax{,H$IR;)//oWWJ$IR2%I$ $I$ZMʠ+e/lM[zJ2` XR73YL6ٗ dt%e@_7fGЗLw&J,KjJʸ6{v`[C/z3@,4XdG3 &nf X,4TSae}9})4bs_PL\ѤI%ˀ%m, )I[ؙ<2_RLd^c1{N{% XDf?'ʁlG&L#Egh%ˀ%??1^ɞ0` 4 0L Ƴ;qˀeO2x|:r=/J .Cɀe2`I$ı\AVc".ˀeUJf э8Cdi\ādҫI,KZ*xox "iCqFc%dЅFH,55p8'OS#D*)b)UDH%\ ѵke&?2ٕdI,U{\G2yUth-ENc s4GQ9`KUL ~d"%@[(^e9S]6gQȮmA2`$c>bK8٢/}}Ƴ[#=ٚ3,dR?xO8U.~+cQ+ή`:篿#  _'Ѝ.Dˀ%רP3\Nr}Hm4h>؄}ɼ4c> b>l.N\OT~6 fHAl\BN`0ϓ$ˀ%iOxԨ0'E+,i0ȗ`;r~|f)2Lada!9EX!>S18I,K<!È6N?y%ݹ](応M1p8/g1/!LFfB)T3\GwқqJ&q*S8B+/L2`$PΩdnjCGj#K&P <ٱ^P>d7j,'YRI[s]&F Gkɀe2`IS K9oSnםqֆt$Lb)E,,a̮d98)OE9НLjYBGvz6Z.lĞlˀeR?8+ؚ`#<p ;ҙ+ydQ*(L/Eg#.$'|G&9XB:3Q ]5{?׳L,KjM~b\](:qKJ׭*xk؀Ӽ =L%B.j.b׏P,Kj-p͞s9|5| ` bKTwޣ 4^ƍ.s-׬V8on2` XRWʫ,naF~;8t-]| *kȢ=golxQlI*e!Oq=a4N>R+W]\|HN#[LG:C>a!]84CgkPxT27W;`zq=+[m kdȦ;Zna)%BԺT7$"l׻wA*!0. Rn~J3@gHj~+KMSxx(l qQ X,9r*7ӾIϹtrIB\>eޱ5R@ o?Xeܺ8mF, X,e9{96鈿IЕ4*KCrClp%[d]Xe =*>Bp)K,Kj)œ\Mp [Җj p$ٟMbkX|uqp(0 1WyVQ@B|͘r5'6)N7O_wH X,{s ܤU9ҩd!g6M:NmnEr9x0v7aқʱ8KyTɀe2`ISN S⬋ s'@&TsgЦ5hJ9?ҕb#be'0\a R38p5wהˀe9-xL;<|˫,&~ӓ]uϹ1 $Ìf 3g'G~oZp)M.I,KZW(OFGd3)ї fjO #QVOvb3ppkc~FwN X,yxeg\7Ue)A7ӑ\ڒWdR8~d*T$| ælI*]AvS?`"G0hɀe2`IM4Ρ70Q"'@*)z$li7%VdRQHC[HYKk*Ho W fUΡ(|% a5eTZKRs2ْS&jKYKR#&s,]l=үيOb?ssM ُFǖͱ͡\[vG$hr[(?I͝ܥV 26g_ӑy3UX>B]ue %>ȣKbÍf<,ǁ&^wLdZZhY,i䌍(^HZ ('V-?3&aI*YY27(cψGsψOM(4t wXɀ%9(o#,&/ta@M²gŌ_Vl/ 6=x[$ $Ћ>5||C>e r?^-CǴ5yUv*wYɀ% %kŜB!K XEt/DDz (IXP(rW\*:P%$5AQB\mkC֗Is0E )g,K`7F39iTn5 =x o)KRS<2$Qֻ +C|  iZv_))RKoN&ؙH2`Ijb0mD!5u>5>%$5 ct?SJ]ȠCTc2L`'o&-a`>b͆&:]dEIQjc  F-4"ؑle~/Yͥl[IwF)X퀵<z6-6W2`Ij ̘啴D =²(Z 3bfwr;8Y50v"y̬H_cG&@&RLw\ɀ%)ɁM}[ ҎGɍՑd0)J)c( aLc+B/%ONҘ_C{ wYؤYIjPjBvn#4C15t6֊TLz @sCPB9d?NX)tK,Q6ks3zDV5 XR w sЬDÊԢb||"e5k0$ f2jhfyA2`IjtYRї|d5H͚zM8&5&Jmي@ QR׸5*Hm<59fDo)ƜfB$OXʶ  XRg 0Fx!QQؙTԒ_VsLOOpr@bR߶qmRoh*.$#|v%,b"AH¤RF)CD(_@ G{R?mLe+2 ӞK؜(:ӏyBZ|؉YD7.g )`!;=%5Et)ue2@B|Nb/"TBZ٭4񄨢 y,T`^f:A/Z6O{\bd6 ҟ62H%-9 լ|(?QXB-Q$9 ŃtFa*c,B*Ie<0AQͦfG 2`I{KRk|I>، )\GXÆ0(Ra 9 (f'"v i0|#ULpUta Eg X,5fZOo%FǓ!t$&PF o2&HgN!e_7_RM!<(WbT24 C9ND[)D,K"acldB&Hs#jG)cxD9,=9, X,0T3j s{MgdAJ"MFTTR"xޤӁ+ʀerwת-d. ;z?|ϧ.rٖ~Fd2`I&wL^-ԴٟE|%l+E,,y*yهnZU*f.]iCO3C Xjݢ1!me:;sytRK_1I, bKv/YlMlFF!{0ˌ,KGG!\J?z*ȊW 沄rBH'` [=fe>q=Sؖ8VˀeRKW=xn~b41)@:$RI{O|d53kK +ˀ)F9n.xC&& E*eTSI%Ѓ?'VX3SU~pnBˀeRK!\ml@Ҭ& E6;TQ D)"2yĕB X,Zx,^de+#*I!NlK>A"P@!X hϩhjۺ N;؀9uZr'8KGƫ&g^]I#Hc -SXLQ@mIc>o6f(YrJd98_xN* XL5a %@g6c&, HX PK>PBBԲlIJUPM:Բ I!BK!E|M5=BW7: =#X%2`IMp1= "z)ń R4!RhOR!v.=A;iK%mBzї7ᝄjka1WY2`IMI98m־V&p5yubClJ9SD(tR)iDJ."ˀ;PC9LItp=2@ElZOZ~ EƓ4,S#J'O)@ف*Ӆ0DI;F&@ Aѕ~u` ؗQL ғtJvtīd[:3X~aNJ!l" )lҡX+'b?/j)F5Mه7u2*鮭0X)I˳4bi.ݖJ5,& ' SK0it5,!?!d- * `F2Y %ٴgvf'!>% JH ($)`RC1K@?,@{!uKVy0yi..W΅ _I'w*nsKZEJ`OcJ*Wd#dN4C|H,iDĉ^FijMS`Pɭr(o&w,}paomhC-0Nt35RKR RFџ9!'AGr :pi8fN1)ҏ) g؄NM;iK3'6Lc I)lC65@;2 )sʻV}jy94`-e4$-y•&(]{e 'ṚWCbTÄ PN!B )ydR#(sRTN[FlDF%Qa% iQ E3ٙMɧlO/Lh;ym$5gYD\96@b3х 8CG: aPE;ӛ,9TY7lVyNת+Jm`B4d|Y>hw3b5y&D^\4]y\߻nW2of뙜 2"dGA;:pVڝG7Z&"фޡȍ Ovb>9/D(w%o-r*w$M~LEǑ|._SSZaBJs*7XAVeam)҉bn%+&fc/ĭaMG`bM'G&L5c)2 ĆA О(fGT؜ =ٶYG98|Y:Wsӆ oZ]'w^ܱ]KmE,gyI>{f}^a6w,&֗G9 p&<<"!Ikd.":{0+aɕlˑ i7ܛڑ#c]E8)Vj&4>N]j95q-rf3NPf+Fk,5pϰZ@?CyKM)~˝\N" |0^̆g'}Kycե;dt6r;c%\"å d:֜9=,}7|$v\U',$7Ve'5B 11@wwU2rnC&L;ʨ#tfŖ<ϗ0Nf!*z!$M%RA\E:(GZ=ezB&pP /.Iz_-*08wS"jVJ]L:!++(0- } ؄AE6p&xYeRKTB.\ٖ7x^X@NThˣuۜ\MO5i˗+tߝ3ІGۄa^y=[[*KM>^Sݬ~gIEt@5À:Q6ɽ$56l_# ?$.[X'U C6XLj8g56wW2Vh'Z} Yz;2XfKY5dzОȃI5\Ι &68N6 N8?3;َMKٌCܟ*i9u.ʷI睠HAGp-nZuh 5Ct[Z9lD9%;SM^d0L ޔQe>0) b22ḄgZҙ4&YW&?}x<:_ZKD~ N+3.iC9=a*"K,ܞ͵\*g_'vMwy w31\cF9pTBNz݊kctXw;(bl_l tvF4[_-$SW{unXͤNU>z҃+ R.g`+V9+G@>푨zxkkav 6+M`&Q:1|)[bEIn\F9 &{!iZĢSSD%+Ȥ9u{IuVXk͜B(.tIziGMZZIᶆ}9ŝȕ {Z-ujn 3x1!^Wp<3|I);fZB*&b>5!CG/ 5Pjf!]i(5!sѱ 7e~Èz^ fwYnWg'=[-MhXWc(KUWg[%ig˄ؚImXoM}"캒u\RFpa>.W + G 1tizGonHf7:qX*?`5cop%g?+bFMRU*%=^nt$@-y:BΪӆ?B}ζ LvrwE>bU(MWsZ5n'd~Jz#[J==>ԚM{`848|+vrcv"ɃIQ+Y=$@~'ίΡ2i(XRҧb2n>wJVKjV+٬Տ n&2/Sq+|Jf'M=ܛ~Kh9z+mQL:Z]L_!=...r73}fM9 ؀Lŕ7FuS*m1Şg{I"VZeS?Ecc2'.UI8-W0^_(%H j!=]&S}8}À%5M::,ԝt0/xz0 YZ귊ش<`Zhݨj+q*M2B"g+Z!;o $5NB|CjAyۧ"a=hf\msa-j؂%%O6'X@# {YcyЀ0{fЖo Rr*.shc&M*FYcw2N1E}8I & ㆤ}MITr>ZjDH%0(FBQL#A.,Bpc€e*yٌa A;C-:3sK^AgLD3J89EH1\(D*èIzݽ79g(g 4Z@(PZP@xQQP⋊HTņ"XB ef~s6''$9!Orٝy湯.g'-'X9 @.sajvUySGZ -^74zv۲w1ݾfb${t/xSjcS9ZG`;-U،->NTXNZ|X VI_jkL,=O̯鉼^>.{C>f)Sh'8f؀^̶j-qcJ۴5i1<␐OxRG`x`ĥ>>oxKsrm {y{^vˉo+4_;*<_C-}'^9a@S%}r.*I|H.&ɘ$钚H=4lbsq^Zh뽰mLDMqJE TdY^ Y9TǕuxrSWv萈ô{3 3<ֶbYжI X]B2#~]Oˊyplږ9u^x"0Օ.5"R>J dVRsϷL<+2öա1Oqqe ڷ|G8lR6x#q TXz$'Xr4Cmw}D#"Eq?8^a[ok̠HA9kuVl>"ã'#MwޘQVh^[fdjJ2I>'k3GYK3uI՟Hů҉ɘD,+9ֹ7CUL5Jz\e| t(W2Ð! ;ijXa9i4 >7yW5Ue> [jc,K%J=?5i.hFWΖGW.]m[,gWp2gRxCE%JN QUң"ҭK$AL5#%է[QWHf2FcVgixq&k*5\ a96w5OLQMUSt*됩V3Pc_]n;QK%"E#:s$!J$^?ßՕXNl!_ѫUgWZ> k-:RS}·g!R7E$UXۥ* ;"Q)[Ws ?66DQaFgkzcLItXGWo۾qQ9kIw19T}̀s;bX RR3SQQ,RQ<7zإT!pB7yXOŕ##'X9ov5S#WUnl֞e%u_eDQov$cr|iK,x3ǔG\L|W+;阶?c_<ozwk[ٖ=dž 9{R ]Nglcwy;f7,$'ܼ\#` .s}ޓ4kn ƭN,ymmd]Cǐkṯ:yR,v] Q^{gec)_ {Y ]tzU&1,,odZEq ׻Ӝ<{/߱d/:INvO:V$5Mfq[0&R%Ux$]1bueLØZQ]b*QaN$ʹA$!d^m(U {ʃC!d%i,{v\U\sڰ_7nw']hD}t~EC~)B#1=Jf,+Fk"E?%VҐM Ċ:]nf2d噾S7.Ge7_u5[gqwI']uGM. ̈Lz]!!2_2NUL̳CR?n+i.wxZenjMk5Lu|f)y *\L" YS$$!*Jt],_2qD]M~ODjbTM"ae71փw/GOjZU^-RULpLS)(HŊ q/+~]b 3ZokD+Jj;mLŰj,\M\d?o1gZX&'>:Y#H6 JC z6=RAȢʚ6f{u9P#5r V8G~ ɤt3UBVuuöXnmGҹrϡ_j![[>IYCN@9_ɭ>Dzt4k<>f=UT8%-)kHIQUY( ]FZ{|`zB5Sz1]jP[<$R(]9r+S/gIZtx̾n2UDWd0e|YjB'TC`# xZ(5àGSYNUIEcZ>HUUd9 US'UZ0uUUi(Ogn/3ksl=>~~I&dF0QX:-2 "SѡLR_*26VCRԌHեfpT%V6)L盪_!(2 hEr' ܥT}Tm{Iu#lg9r4N5s5t)WTzwx62-p ZOId=æZ(|/7E(XZ>5ՠ=\FUg8XRT72%,T|vOfٙN ;W>.st ~]vJqbvw=bü  O?pË1p=V@N_7:K؀74r4Q@ZX*$6}\Ir" º5aL$"~n.ieKSBr# :ec WrG5m, KԽ0dcZ hEnFZYΜ`x!ro{7f4xu }t4ɌK2g zTu(+K<ӐUԩpZ/SNPVHި46$maz;d2Z*տmͳ]|~Kw3ta pWs߃*nUtT}7y?;!8\@_2s݄aYW3sZթ9EDW6S;Eb~l.UTkfUe[(XDMDMEUU(WݠT1wnÊF$E* PI͉]ٲk++maP maϳ]*=mb7[ʵm د6^ͶϷL_{W>~Va^G˴CXSJ>;NEUI$v3u++Pޥ_|^KDSuHG]Iv"ݦQo__#e\6nvAN7\}6ifs6f`ε6u{%# ^3Nz)_H9Lq-{>|~()n@߁guJgNg BNҩ&Y.?T9P5TRߣ!F45򶖶FaPFwJGHLCv!E>Os_&UPjto!]Av$R4Cٳ4*#Mѣ'P%|JQYbzTgcNicC>f#>}Nr<Y[MDX)#6=9TnuZ:U  K gMxS,E]k9!ƙnPv*ҭ^?v.3!K)@s] Q hk'f;ZzI\6 $?>05sQS0KŲ7xc^g586/4;[^nBo тO-67yg(%:aކUDв|ijCeͣ]Z0郶 ԏTiA!37vixb4#^D],* !zFi!E=/\+?"Rf,z;;KuY_q VNr<񐳽~JM-oqQ/:n<*3kT%E5ufЧQ9ż qy(? B*ZOaȺ*HQMlɪ*)cstJuL}dW8審ϋzf3 _4ײzj~[spN .Ŕc?> }DBWTl6ry{SDAwAF5s4ތ.)1Z jb?q!\zeuʈX&,pCoq*H+ˍg|?҆ۍk{دd+-}4$4'Jt 67yE39H峿?!So1iTX՛\ ҥצcFbYl>fX/޶VE/~J X.ϻ0[;v-l ݮcsgT/~V]Ozz~`e ֿdn4ht6blv2Ftӹ^FCF%Zt5В%0Ćwjʪ(fdsi;*K ^ J"k]jZY `eQBB#"B?k"V@fMND."z~xj%rnTaĞDf;a]|#vi2uCJ]J"Sr?w#4GNr VU }1D+^82>boټO 8SrbBx7JoAo;|qD.Rct8RpZSxՈ'Kt󢖞uEK3Ԉ!Y01/rHKɩk#6i쾵"lPSR\gXlLUC@G5N7VPvEgZx՘_ayf&bg/Ыe1k"io]jl?c¥zUDlX_]I1Qz Dp'J2ԛkjFdRFXAMW+*X>y!~͠A#ܯdccbM蝊S ԟҦ00rG^hPQ4S^%<7g<8hw+ {EӛYcOf~Z^.3z܃ɂy1/*IqաF(S_v"\b[{+HՔBئwwn5UE~_GYmUQZ_{"SlN.)&L УU;=xmOۻ%޶Z^:͔3ӣ;L֛" ]WmGQh4@I,TXl: (aQ/VCI.Y Xe"-e+[l$G?\T[]:be]$`?/KfY,P񡼆0'X9,x̽f'B3Oa<)RĢ;oLzU-?PEYDWt4vR=T4U[O'l21vJz[b.KLsS)UT4l7LZ̪ܰ,3f` A]dj~m+Mz) 8\]b=,B~hlyHg!*t:v--j7۩9F=,eig[LSԃAdvX<FY7tڞaT4.S A'N]qx#>-}dVXYPq֓J Gk{yBnnuZf~݆$*" [5]I\[yLoS2oh0'1Ɋ#l+2?O> L7OJJȆ a DkiZV걓D&'UW1`$E^g#RfCL@j|%)(*g@ Akuwםc G^W3bP}:|?R!̅%:%\{KH ZCh5ૌehVsE'nlmJVN|}d{U΂2ECz ;2n{ jtzNȔ,bH`XIlh xO1WE?PycbXsQ}12q(gR WHbP,VʠSUT0bl3f 瞥W"\')1$sp `J-*#lPs}9Zncz5L ]m>3]*"E#6zfH-/2FԔDA0A6EU>#x)8rE/UUBf60`԰ˌ6hւwkȩj%q,S{Ԁ̚L& !c%٨G*{T=8=: f[H"}q3J >CC8e Q\zL:χ}Ljs(;z^ަ#ItoS_ʊN6lug+nݼzYT]IşmW2E :J ^mS[Qa4!5b Om~oҩ>Rl&'ͬ,R :9h4hǽV" );ޓp'.=XzS}NA;+l;jx) 4X ye]Rm7f;t;m,NLtK\W,S1ڐJAuōvvZ-r+8҈,](Q}cx ۣ@57,]74I-%&En?\hȯ56_83".qu 5={e|ikuKB.`˒q^KmwwSI 5gXWz,tUHu!V@Vȝ~_s#&]Jn?F-'X9,XnAd: .1doE ((\=&6rG0FIiu\B)h7iGFvRDl[IɇiV4z)e6lN{fNë=A!6p{pf^Xe6HQ->=i)j!!y3j!6R~E%ruMm&nk`%Ma$5b/i"M_FBqkҠc5@ I*9'ǚE~^%o}3mhDjp>"n.[pwu 6vo;=tF{j@4]!0(™n\O/_qBΎ[T$Gp`nA>2 2-,B65?qCsN!z:~,[w5?Zxgk@2ctՔ"F% VJG*c0M U#i۴:^QYg=+I5y-TňU8fX`QE3ݭ.{o:MoSŽ˯א!?Xi4Zj Qڶ3H:6dV:ϰ5BiP.?џ%uy=_r_93#ѕ8Ÿ8˳Jwp=PܮӖ6 8a8l_Rqrۼ0_Ab%!=2ʞo1F|JLWEl=-FzzA%PѩXғUbÐ܍rַjuWW3-%2efmT hJAeSTSTW6LjN6RGPtUG]׼Ci14J#1g8BAEjɷQTTMԝknIfmcohĈbu#TuX+U_/a>Za;/Lro رM1'?}CàXqCV13jIm'@]9<ےT7[Cң6x!cV_+摌]EMlC~yYNrcsGqs;Pĺ{}^-!'OiduRxvT/0"K mj0QM UQdHze2Y0et~6~n%)MC,d>PDIMYNL C*t+BeZ(LwG-2(Jm7j`Gst2u5)n r Q2wI#$S LQU?wܹg }a:qO8EU}ϛ- ݍ\&_U1Kc,P&>bKODNr<.j?A7O@4x焕G=l1n]imtXH ^l#f[GRV_vÈ~JE=Ccq~kܥlîQ 3|K5Rq7l6(Sn!*&*6R#.[\Fݢf`smrS%:~YnYo^DW! yTK4Mɴ|NU'#'X9\>sWj_Q WJBo+BAv#xVVU=ji:b#5E XUj[#)qtKuX|sC~Z=_*,=.u ^f7[ޤ!?*m!\C"nXl(2 nKNY4Uzi(+ ;۳ݻJ:-f*QK=&n""*T>0>,|UEaB>}k1|`X#(cteҷbB0_KWmfWfc| m0FtQD"HFӰ)#ר#m#z>Xn*lDʣ+-ȘcGbXUIYγc:u'H֫Zh $:I5b%W/hDGKP; B.*JR_%*f/[T>2&6Q"7_p`u9ɩJC[ wHU{^E_uBK[r4~X;`xrV`=aEqhUCMjĐC!m=<\VYpjgZ(.+n-kiɠs,h' '!+RG5e SU}!#pZk$ $(HEO/1b| oVbE..HU62.BCxml_E~i-a]N/Q3`$|ΪD{M$J.1ɫ>WԽU*4[~J:.IX!j#'X9D($VrE-RcGDrؾ2jijZ| Zsf >mr|nDdĹ흸Gxҥv(6GRbH) 5d`AyjXEFe_УS&~cTNJ5}}QlW (\@W e~AGJ/!iiH $n!r ۵Y-֧GEIQMyS{P`B1Z.V~ E\U3 2)I}͋DC&l|]2&_ZvϬ5191.sZRMhM -<jaۘсAbCR"b;Zs7͠ 0tI *1)k<$KZ#_4Ɩlwz/ي uZhERX2dj߮6jHNseibñVQAuݮ.U~Y>RW M ȪJ}Q>\ӺŭP;%!dU,sGd\-#UT;-r먅QZotu9}:$O嬟=+tMR `@qo;-u?>ϗYʺX ɬ =S ⧦(IuT1yy9skbвPeyxC]Q?(9@:Tc"jP&"' Q iQD (̂HAzR|(nOEmlg;Fg2Hh5X9m^F ,:q.vUO`m$&}f$2aeWp׏奡;#oҎ^/P&xpAȪ)*>u\`Kp͛&j*1 a] sXl@:9@68u,м_Cu͒ac8 ɒi/m6m&g ][Q^ .2ˮ"̰HwL$'(v+@M] $ry>o%:VaUk\g٤ud8C!mrP5x'=^d%E4Gkvr?5b>惖7W=z5VR_"e^mmMr۷ ڶ.qs|܍";72֚smaXjaӃ\h" :֯csݳQՋOgq{)TD2+?k'E6b yy}*.ukI+J:W4$eUUҧm[[y˪+ʄNMφ,sӡ^rm>: Mmt&Igx\F&oDC:S *{w"g]as[T,aeP+AV<ǮSx"XRF:s:kK:dWR OL_û,kJ %+>q6de}Τ Rk!nqG3U9u{}9DlHEm330uXcw뱏XO&VfȵfP¨7/˪4-[Ur\x3{Fֲm|>XWBIk[׻=g vUAe%Jb=z]ltSw:yydTV}gEcMjDl2.'yHϹWmFu}p|{BRN`")nז*4;%²{(BELT61tz YXLHU$j***RMTpA~<-\VER53vjwe|A#6{^\T&/ 㔃Z`V$Ukr\<+[|C"#o5o{U uxy6}ڳib\0g'8/D(@z곉n7 Vt~?o©RhiYc>$3(o چ2x}TWc8QRj R 1~7ꥅ/!ZuF_T>g5RʍjNuzi.6ӀJ#"'=M@=Qe&r;хO^uSxicO,Vɚ>iQY3 4UP iXb(46}bm%zC}ѕ X`Y+4E" ) YᏦ퟾c3or]1ն@Q`Ů6^i`Ec8OpƁ"D!Ozu5#G;ںX^j+LZS*鴫MCUMZqK__֬QUSӥe%:%_eErA. v.~eX]V:-եbiF7EVj^M/j˥2*ۦ`0&}Zψe3_&uN6k6U,+q :L9abXXM~;6+]5:|TU9<UJ8dak\͖2aKER9UZ*eg-s\ZwhOU[ERL3oa같čJ,&UtMВdTĶ򐯸ɞNtK]ǝn>>["c]Ey>",Y%Ab+%ϣ9TRno3UY~:+M9Lnh@)(E+$VvwQ [ x{DF\JHdy Uɡل&m8 ymBas?]gxp6N*wX(4Z,:eXJ15E>.kkH';U#_"Sl8)]w-QTCiEM=Uo(oq=o7+!o5Mvh;UĎ0λ,7Ќ`JT-6I﵉~3.v/ ja2zε+o3%s+M5^5aDTSw-:pcjF n#sfSH:/l<;#4FlE2uʮVi2JLR_9ԃShswm^O*궵SƖ [>bz48ʩԯ4dŖu RTL7Ʃ˪9JMlfi*Q>Y"yk~$UiiLouuY6PEʃ~>RyE)eB\˶^2HqcN$jtkWJC{DNA"VU7]TG(Mp9fK +_VzIpru]zN\2r"iv :BRKs)HUF[{gnd3(ӄ0}GgXFQF]I=:!a>3ln 7%"'>_t8M ϶ ^&$sSuꓕQGp) f^b-)aJ !7M!eX-UfLq2!Q괻4YV9kjzн'5\t Drk^Lw[_|! Լ8m.#}vr[M"ărCc4) Я>(2\ L ol5u]j3o4_msDZCPXw]iG9C-T]~'ar<$`?v:-m7y.mrm޾)m{]7n=3i&lVT X55x̓~geX/6yh9o0^ ʾm#ZfR=vv:CTCӹ XY[DeSC*I <`TCW>>4=v2K5⟊ leYH5v]X[zOk&%::#ե U2G(^n} $E^X]^Xv4a㚭mT-,VC!s],#Vk75PiyJedBŖ,5[NrՈA^mS;IUujϪW%0f 1JH 4@7Cø^%=a@xr&u=lZ/ħiI+i=|"6f-ǰ_wg9ʯgCvmT }w U*'Ħb!;l`k8xj^baw{s|2uo!W3, c,.@P7@ w/Ml{Ѷh}i[`m0{^eFMS tztIT}2+!EqOjuY GND&j.۸jbe*oe49o-U]}XRW6C}3 uEꡌK7 Ѯs 赃clG<n]  o4XIGXeʖY_U$QSo^ꖨ“xZχݑ^Gxک zeuʘ(;u*TP#/гddLhNNt_(ʙ雦+K<2P 90?bjӡfo9i6kbdeE[<*Y*jX>FŘ5=!tfZ(˩TiGL3%*Y"s,`+wQ}{8dG9b<5mXO={\YZex7ssW3RŐz9L5PDHVU$WxŨ kRvw~v)F#UEyeys|l)կs;*-nFF_g, 4 ru3-7˛3H VVbSMiz `tILԝbՎ9&9|KS^Ԣ%J{,zXK `<y|gPhlh??sɆٌc,u[(<77W$n=syX"+#Aym4f6Y#=Qw\Ċrl9lL D}IWvmT9seǧVĎtjB`mx '\SMMb[8Pp4aGz6%jjΙ6Uvyf ,I}lKyHxڌwIj, ܯ^vDEbHUndLBQw)$'"r|vJV@r1_[ 'X}xIa&x>:+V0tͲ0rR:kLja`glm[,PQOH]b99ema;N;7r *.3o#N_n8|iii5wNsW\T~0lX!?@?ڰoZ^f>ݫ5e]=fiHڬ'}6V:8yg|5DEMptۿ`~^4(12v.Q/8}*&8 x&CT# |-)ySWPj`P;C][.TSqkero^YEgU3~[Z9j܉stpn{*HӆWE_APC 31WUnuA [mӾcЦӶ~,m aka>D(<G^$Ho 5f!h@WMoj ">$W! RE^z`G7 +ZQ?2;DNx8ATl?(ā>S]mFTY";)9(I獱WSP.?2%A j&g7D[@90 ~mި <`gaO{KM{Yg g~m>E"(ôu8ӷXm_jyw㷌XdX@Vb%kXѼC&Af䚺ۉ;1S:mLj7 tqEThmB)sf6UJk,oJ36GopP;ܾD\HX76M?"~1;s יH mk}ȩͧDD]"QS,WRn."-h3*ig)uٮL!jPVuBH]oZZd PUo]*=&ʅYF62pvKPeBYa7 {W]$+ۮzT˻5QbBTd1jDU^WݩC"oZ({7_q3&89kb9R وFpAVb/mK{.rJHgV[gʉĺde$:+,3]Lt3 ljoydy8SBRaW))9FnwRTvX=<I/{AУ)1KF>>1]֦ixu:LSw%fA͛-=y= Ѡ=|+Eg4( v?^BXVj:l*+~o8˄-HulABzN I˱n5rZK\z269BUu o@A&\}ja8{ -xg([jNt(h+7q*^d^4(DԷARř:~@2R+RljO@)8$ipɠ3O>BFc:_\Wm+4*&Drp0zgS0фg#rtv^U8CrL ]#yTK ksۭTQ0C]2J{lMq{/celN};|`mXo8tYR R3- `#`zF?[wE&Kef7ǩӃ{|}2dCqHH4@Nft͵]d;FR)+o3uPv.~餍` /G,a`npZv IDATo̍~> >|%yst"QUZOd`M,wH}#Xmd%QWB£gM͝Wd}!ѯ~};cH*!:ܩo:EX]p;m]bͳ|˄f(Sd5vqS% b)!T5mVO5M$/չ ?;ٗ[cgRy7)_N^u+z]M^,bD99sK%b9e2ꁄX&hUdc٦LP u}XE ͭ ![sDa~3JwAH@-2lzbY;-yF2 1ozN-6is6z9/0m]"\f; 8(3-oI C{ւKRԴt?XS)QfJ7Voxwtг FsQvi7x<JJ&{F 9@GͶ6S=J#)+ FV͘UlBIVoLTsTM"vY!bP Lݭ$EE]Sbי";DJsvK<G%I{i7\ήV679 lcFZ,qz:~&Twgnvٲklls?8-]Dt@b4-{ō$l-Ċ7{+ՆKXv}a|le/G ]32O|p,jp2NUA1+C~jY+ɪ3t<*Fj͑ =JCQy5_E?K;[bqoXVIRY=MUFCiK@_ e-SRU ⑾o?,~ ߧ79׹ڎ:ƶ&J =+.TMםO>>nd qo/NhɬH;J<ٗ7ji5[4~e6unVQ6:KXjW&je-xahH6sY.imX8"a3e[W.~3DiStKsg|rďSBbwߣ"UvN]"Ye?G"eL}\E?Lu/FdprJ/Sܥ-2IQ?y\ {Gz̎~Jf-l/QVݩybs_}.re,_ W>vlzA_au͵O#}^≠ 'C砻^9NX]mn8F}P2i(m†vZR?9kHU0`q_apK (OVA_+/8LڷU^RoSj7gN xpE% Z!2O xz@kwcwo\nɛ.Seujj&jޅTSeêr #]QU,U%ymkpEb3/5׌-J"cMvuFTcx{Jm_Cߜӯ$<Ũӯ_];pflu@'Fl[`}Cp kMhH}ծ>y*;h w+~O#+3yc({AC ?SbUl/e:M RӗJd #\/QGD^ rQ/ԁH k[AWXl7;a6uݜ(ƙ ȴlVP)KdV,zB |);lR?T $OdI!;1+95^v5׬C8 a0r?B4Χ7o3T**7+vʛLAFrgty0}_aZ=1my3ӀeM,Ke-gĨN)4PBt/IB:uap!Ma?n-D8K"E1PGHףj$^eite A ? g.H5 ^Yji%YPp KHQ3͖P\ņ LFYd{ޥ7Dkߊ&%qq#5{?׋OT\v ^`_EH8YK3ywy! Qٚؓq4&ˀ,UEQCƋ_WƓ!gbiދ>1̤9y#K2 M,-& Uc'ulbܢ gaT`1\2 ! ]%K31h˯yY҄wYu,!BZ PM]p8%ѨD@R;}&R ^-n2YQY@,q[&id %2MU1[& Y@ H0bz0$IRЗTq wa4qf OPG|/z}83߈>WҏT%$$̇XZht}73pD;L*z&(=ʽTyN,b<Зk_M[.sUW_秳0ܟE籨lC@++~Al ږ-^ X+im +kɷ+~$\ x>DiH-wD% ypoZnR~ I˶x s蓟nY> ~Bm^ɭOj#lR4VdJuQko~K/&IvN$ G#iKGҴubLQGHZ'ՄQ%hf=nI IcjK/mTf"d<ٞܽ%<>`hbsywǢ2!K˘;|~D@2d@c#9eq)/p-!!\TR0jZWS|ZJ[="Efoe= &mm|8`*Guަ`<0hCQٿ`c1oOO`8,15$x 3 )$Cjʹ^ic& (2t<Hsx4݋(D'&v˼m"mGMWu QE€%$$;#.b˨Bw8 s7dc?s.! }Jã:g0.ѨEelƑH¨TB$ $EF[n(Hћ͘?9΋14Rqu.aP~L}* $$"`s~MV3_J)0Vk Y+]E~/x^܅+^G 7VQW˺΀e*cfe|'J_%KW` *|Id_fw4Z SrQbEk4,)) as78A\M- Bس` $TP!wJn͏M[۱.jSܸZڢes$7:CHCӓŞ4HV%1d qVtch؈9UM`@L8K %=F@@&j̋{LDZ;qʟ0i$$Y($0Et[34+7UdHF5ɶd./rHԣG}aAԇѧY ?az6>n{%`3y5V:d*8բWApNj9EQ;u!d*qW3m N/UWlO\S4pX |?3yiYZ"v7Lapi500Lm>"F-s!Q[ܣwdY*b}D,͢QM?U4+r9 Xz[Ɩ䗜.KH୹lh$,N':ŔDS#+6ړs#+Y۱uT.&$>_qm1?\C9}Q]p|MLDN,޿;9]9VB90@|ywy?j+5>դI2lؙؚ4cuĸؓHʷAo HS:!\.R|HM6w$VĻY{ȭy6 |CE/r7 XȁSǝ {p8<hʺ~pmKO36N9!;XhYYV0T+7oB5MJSm4Vrw.3 :Q(Nqz%djh]qPbLYL$ cMAt{V܈xs RU٠.h]iy >eY8N-/,k^[MfGcVt@MW3M!AH@MKŸ8Yj7oHӫؖ:Rę_:G&\ѭa|L?`-\MN#zPG\-}qӟ\L*]/-mFX1vdopr0\/s^%k'F@@@$`7FS ߉dٱ'_=_'Hc6o~H3̢J4͹lɯKX\沘f3{s1;)Z_~jf`|es:^_ﰖhAꢑIѶ+z$ZUG=QKjMC%{3؍k*<^AHMj[[9BoŜPƻc(dulJ5F%!h%1|ʔͼCin0&W8-NV$kCMs9hf.-@jj;m!]-+z ٵ–b$y;WHh@(F@s {XصgmQ >N U9C4sRm(dh|&FpBLFQd*}ךidO%%F Oav>zN mQ-v|HB2y/^=WDp^=ȭ$sWE2NZd±%eiWLNI^Sy!3tz:ӇYחo]zZBʽۼj9xe^Uڊy(Uie šϧ-f!ލ J.m;ەI`.S\Y+whץJ7Δ<+ac ߱K\I#b XeuX=pGJcaHHN ܓ%ޢ%txp1uv Z ~^Wy":Ub9^d$+$wa3!ƌ*ףQ8~X#m<Ԗ ;̚ȀJWlMTA:ׁu0Q]J(`eٸGU[ɒu:Ua7ؔV급z&heSrwcW7I3XK'vblN6:TŪI~ofu]ʷv?+V93{jcY!K-TF?qcw#qNe1B_R]zj x UXy3i KG5u<|^6S|L )thupErWu#.&FӃWQ"2?L#F yE|i~ XԇɴL}*<(ǝN4-rF|`]f'z!}ؤ;K>ܵDp9s㸒w3;y@['>T2p%o9tvif1=IUXM9q ٌaw/w}yUwiZЭW! 9i,.S+ ;ieânVtB%_&֒ 6>:R' `CwZ.LL qz'!e?ZJuͩ2U+Ovl.Gt"G\IvgV"&o26IRI ɲZnr#i9ʀUCǖSPՆ۟/]2%-9\=S"a4$(5Vkb%??^q8m_.QM@H#h%N;:ˤWKX*{r؊kjK$i g$[k!3dޤ8Ijt[H YpLrw8y3`\qe9PXECHQCH9 (i^K:muBG*jh|fnfh `qji-ЍB{f.{ae|cD&叙/Wr/R?ũ2H9ї &Ir)hc "f1F2dSU0bvFiZa'밼{ of !3`۝XaL(&2$/cgDCH$zM9*bWL ,ύe] +T "~cǒ{W," M;nW q]zs!qҫ0?,ZűYDM%n.f,b#F*HPM1h! $M )i#EHI$+Sd؈Gn%e*mr9?-sm5i;nta9QFW`ܡ9O{w%Nu|)<$gxe 6? di%l>&B@z M|ӞL1&T.l}vagä!QpL G\ZHg%$BU1Y)6b/Fq~of"t>nasgnVч=%㢑JƢf1ZBRQrݺ!)$eaѝCݱk;rߙJ-qd 7IR B N(73K0Ik8Ef, K#Ԫɒ&EHad[/* d qr;lδǺcI.: >ŗYd1 aH;u-+2GB!U$H!M4ToՃ߆CKn&1&@kƪvp e$ koxFqs4m[L 9CVx]e|N9%LM^:ۅ Evck}Hk؎ELg (SJW' X]9%>zgFE{>n+Om2QJL.[ O(KWPC_ #: g+#U')f/ 6fC6g=hӋ4DQhzolH>PE6`6ac6Z7lD%`>j ́%y9))Yh0Kr=z|4P@@[zSßhMN.D7c*Ч ;uYR,cBYBkt`@jTуzzd=zA|ڭ׹}1w~ʞ ^})~F@ ;RGCo 1f3 آg+2 {%<]̡ED>L+:t1|͍!u*< 5ɬJd\Љ)*bL$,/TxQ+:\F?d:=dZ)3x "[LfjE &ɠ(~]Qt3̀%-ÍUd~Zdp 甥{G-9Ţ=RURMeѨW RT%Kϻ3W,qY?hqMUCL$EHq5aWv)^N+ܴ# e63A 05$IqL $߰5 X]f;},˳-{s(,xSj{@(U<+zDqXخo}*2 &M6M/f0`ot3$:~D'0Q]XGANv"b0$Xs1̦Q+f3Lv߹(J5C LMrWX:/؈\i7ۮ1WmQU8TZ.(Y՜f 5MYg<>߳؀+U+:?fCF3hVeg~=}P-\T7E'ne ً390rJ~L~GG+.ױ Mhcgy.E\ѡ:$8KK4$U쒏1G2L @+0Cm_h`5 )xS<ċ3 >q$(@ɽ%wex!>Vz]Ȟ< ܋T.L-ߥ_OxwxVs2C纆~VtR@c 7<,4yXl1o9{QQTFcOql´1E5(wBZIU NNg_B7#Xgw+\INfj?UaJLL.)t,,ZJ+T\؞SƩMM{#C G]΃EohnT-:I`&gZ.<1|S9z~ZtDDe$2":'h1Fr7G ˌXp v2Υ>3ws IY0tRϻѶ3L_Snꢢѯ8&ԓʏK::~w'*b$͙u0`.qJb.,ì|'.BGWbJNRr`XrX5E)-8z4Fq>b%qs-$j]pz6ބSoE[h(s$Ww:^7Nfwo~|u@#= Ҋ؇$S91lSXCpSi.TGhy}b7)烙+VG1NfߣySh#Or=! XO{<)*0WYfVrzf%va+#J"_/Gj /U"z6a \C>̏93J/,u%=9MnN~I#\J8J{9}ؓ79`<ȉ&!hVhONC~?.ETC+JL{ ɐޏؒsXxii秜ku|+5$ZxCͪk V[ml]z1&F {u`_S9 ͣް?d"j؍ۘ:B>8n` 2T 0mMYџZ^.%\ŋd1mZI0Hr"xdZ}Wq]wR GvOXWw7Ou2>Z< ZsH,B%#Af0[iykۍi.p4ͼMr4d{e/hlͷt 0cGFMji{N+<ʑ΅z&$c.x9zKFqyQp!'>Џ-Ăxa=[Z>oZ!L!^h."6jt]Ɂ*M1` ` pL'ÀI_ttx&b#, b0مje)ƫ1{؇Ѯ2`p"2O>:/ӐpXXڡ]NuY[ԧd6O'؏E4sjn¯؋s \EdYtczsGs0w3/s0#Y[ȥ60-z\1 zZ=T3uc 1&\mLR+a}4BC5M$Nǜ:naѾ^Fe|R ,Bh<x0#K3=CCT,Y'eY.*Xh"_:T6G*/*ǢPYEE|r8{u (,> }mi>ˏY50ll`a 6Lo+SQ Vf/V ,dHSG'VZ'.ޡhi+hi qz ؓ@ØR_FqCy9nv+ۦ=+$ *`}?m.K3uʫu-OOh_q==f{rX֗mr 1ݢM# 7`uqTGν1d9MشHytu,~ȝ 4f|+k ,xciAF4nF\BG T-l%E=tjFzeL3 40~`Z䳜,m p]#I3#*G'_/$ߓXp,-,y#_Ѽ?G1#|DůEl7r 'r _-3,&?dؤ(^s;$ WmK|ΝSk^UovWzDTKթ3XZ ZW}}ZͶ\WяWh<0fd[#͂/QU؃hbo"dkivfgNMHSH $cG>Kx*&0/OӏÉE?cuNMрuPXJO2̷{?*hȢ&!FeeIX6@RExlFˁ<,%d2 YϹS)7TNa\n:J~3 ؟A`=0ȭ:;jw֛ȗ>~WǍlo%j֍؟?-NⲲ^u9\4c&R08%CMrJRFVЏ f%Aq3l# x,?c!?aNI-gK;4fY}9# jBRu fKs 1r#I)spU/9L0V hd[p89 BV+zUN#p[:B- 7&+ΐ%u]].f.܌$C[̗8mрe"U^f_ HҺ4! N=',:UkVg7`[L+k =̙dŏOuFLA0: A4^5V X.vқ bkzɗN,UDh\JA]xm0d oivx6IRC퓼M=W%D ku%B/6d`I}Ա>+#j XG<Lb _g'Vr$-:w4onb15 a ʉ,4[c?y$TJ=!;ѷҢˀ\DzqWx$eSiAo6ӺˀxI3Q=%I*8o$o?g;WpEkF,)a('Y6GIR^4mƅLq Yn2`0I+<l凟%Ik3 '.dfѬCO6\ˀOކTA+V74`$I L$ɀ%Id$I2`I$ɀ%Id$I2`I$ɀ%Id$I2`I$ɀ%Id$I2`I$$Id$I2`I$$Id$I2`I$$Id$I2`I$$IT Nw$I:`I$$I X$I,I$$I X$I,I$$I X$I,I$$I X$I,I$$I X$I,I$$I X$I,I$$I X$I,I$$I X$I,I$$IK$I,I$$IK$I,I$$IK$I,I$$IK$I,I$$IK$ɀ%I$$IK$ɀ%I$$IK$ɀ%I$$IK$ɀ%I$$IK$ɀ%Id$IK$ɀ%Id$IK$ɀ%Id$IK$ɀ%Id$I2`I$ɀ%Id$I2`I$ɀ%Id$I2`I$ɀ%Id$I2`I$ɀ%Id$I2`I$$Id$I2`I$$Id$I2`I$$Id$I2`I$$Id$I2`I$$I X$I2`I$$I X$I2`I$$I X$I2`I$$I X$I2`I$$I X$I,I$$I X$I,I$$I X$I,I$$I X$I,I$$I X$I,I$$I X$I,I$$I X$I,I$$I X$I,I$$IK$I,I$$IK$I,I$$IK$I,I$$IK$I,I$$IK$ɀ%I$$IK$ɀ%I$$IK$ɀ%I$$IK$ɀ%I$$IK$ɀ%Id$IK$ɀ%Id$IK$ɀ%Id$IK$ɀ%Id$I2`I$ɀ%Id$I2`I$ɀ%Id$I2`I$ɀ%Id$I2`I$ɀ%Id$I2`I$$Id$I2`I$$Id$I2`I$$Id$I2`I$$Id$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I$I]0"^IENDB`rally-0.9.1/doc/source/images/Rally-UseCases.png0000664000567000056710000037011413073417716022640 0ustar jenkinsjenkins00000000000000PNG  IHDRMsBIT|dsRGBgAMA a pHYs+IDATx^xUł#*`!( 7Q;`'^;6XT^EEE,$Q`ATĊ7'sd)@lrk!;3;3;;;99'+9iPKEHR."""""" EDDDDDD@H(@I""""""" ]DDDDDD$$tP."""""" EDDDDDD@H(@I""""""" ]DDDDDD$$tP."""""" EDDDDDD@H(@I""""""" ]DDDDDD$$tP."""""" EDDDDDD@H(@I""""""" ]DDDDDD$$tP."""""" EDDDDDD@H(@I""""""" 3_ϟ=hr-=ut=?l?c}~ߵ^pw}n„ rMfm^{;8馛VSsI'EϜ[gu܋/h$SƦTݤI,=!ՙK.Ăs:wlsuSNuᄏ{衇-:,w?jM gWRR-YsP./M<ٶC bL⮿z;˦Us#~=W^抈H56Iz;ꨣJ6ǻ+,45\vmVsNmjC` 'l5ȤNRMI޷o_x:|zwݨQ :Խ >ѣGGK53g=o޼sztpl}z:n̘1<ygo><$SƦ曠m﮽*Lx[~j|M;]v-׎zQSIQ|k'N*?hœmQ@&^x+fjkvGImO>k{-~zKr5_|%#$P<#VŽK/s=- $(g̘a%Mc^QQiQ &e}|7ްYO=*XsAAds̞=jIGq曗sMqNϔꪫzsrr.r ~|dei34o9sMc{/mr6X7J9dž>9RTF/"""""= >{o>9yr0`/{ۯg\GׯW_}eW_,2GK /`l֬YOGs*zm;~g?-0a-Yvml&2eJ4GDDDDDKfd}'YԪ"N'5Ԁ{KjuyNyM:{:7n j۷oobߘOd-yֳ}VK.u'*E>h;uy݀e]fA?ϡ&j9s>jgz 7ر}aYl_8|&tH9TL#vRht:#] #)t姟~Ҷ2ͧV}W"%%FS{ꩧ^zNE1)= yNkgOo޾:ppI'`$u:5}qV?`1=ςn 8FMa[Z! ǖuhM'k3{]z jBߔK:p 2OP/R=>[T4# :Г~vlMqLIDDDDDOЩEej] i Zj:}ݻwo՝5:jmNM^S/W_}^k\nE.jy}7ڂSO샹EAgpOV[Ӗ%K3> _='Suųh;N{{^架ql,iXPK/ٶ;UuP+ c)5{xc|S?nYfdБH=k) ~f!5udsB i`=pgXGu5TE&HYCE 4wȣ hDDDDDEN`FM&my{&m֞{'vC.X-joSҾBj}b]7={lt&dMq0NOMzo^Io@2ć'oقU|Mw=X| 3j24k O33^9@@̲ՉwFUh1]_O!Ci4ӧOEDDDD$9ENgX7tutcĈe?)5tF;tUjI&A`*:?/e{Q lj^X*ԢϚ0ӌMO 4-8f\hSL@=k駟nYG, R2tATN|[wv2;(xk\`B>R(σg Cdžo/q5 qPzQ(TXX>V܏i ŝ6K 5*]454w(߆zw?xoT5RiQk Xߞ_vt:}h'Oϭ@P@±VƝc2'UW]et_߷o_;5A/t5|4"ПO8rN SH4t>NK_\nM Ok==}k㋋4wPR) %H5 pcnmȒRM9gFX޿_j w> RI 5PSCGglW˰rGrڛSO9+x99LJ3x8?46ĭOjPN=+Fp-UzN 1EB*1rHF2tkߠ:Hm """"" 2Rأ}A4Es/0h+[. ؃0 )ҥK6ma0pA;w.[q0 {7xc k˦yԅ 3²exl %{ 9{|ќ>Sny\xo^6} 7 €6zUtAe.hj\~eqfxAqЪU_^Y* :֭[2K-Tv9 |u3 $W?ˎsPTT-Q* lwܱl}8h֬Yv7x#ZA?Ku>qiӢWK/:t(Zz^x!Z7=;N-Zy@Zw9z( ٳ/MFADMpRt>w;tܨQemVY(0iҤs E t?߃{kW_}<)Cj!qC7-O?{ܓ?~z;DD2I7P[ ƘT# W^Lx"Mο贍tf:t8koT9>k# yڑ8Cm&ڦsÀܚY8+v4Q`["x tGSKєfkbԩM6NTnI:m(4>ߟE5n8#% :YmH4ߢ)S> :kk%DD$cďJjSP%Fm=>ں~_}ern_6uۆ{7F۠%TcaYg5<p|Ty/d)2QEx_ ۍ3ե,;cd;OȈb_(;-i>2x $u 漨mEȰb8O~0eeF|t2g)ǝ@߿?p\8x9QFyϬ_9)|&kςiĽ'_n5Ԝƍ =qDKTsgyfM7RcEo""ߡoтuʼ\멧@a-oG:,nY#^Ǻ?^,}]+f9R t?4s{-A+ߗ@1MFjP|{]zVA G#4  #dP@.:h^ǃrꩧA&`}ۤǔD!. aW^iͽ(H! 힄`QGxxz7ЀXl Җ`hbO, 4qPC%u9`WNjY. :q:K0@槢9N9唲K.vm0 {9m#M:d0p-?Bo{k鈓?q'>(izt>=?裭sUF`O#`UW<0. #Fs:>3{:kǎq(N謕b㎲&Nhuv9(G ]v 9ŭj3F.9sgG>|xYۑGi*gtB߻תUꫯ9Y8cs m9ҎsGSEDD5u(Ѵt0޵JeQȸ@)5ًYT7^\|ڴii}RIGGPڿS39'AԾmj{I=;|VO.ޤKd{j=jS;Dh+OwE08㬖xjy*[M蔖ur]g*v;F柚{Oէ?ǒJucvu|yԢ~26ﳬ:٥-8c~>} =Ԋve>x>4Oǃs< A[M=5""RJz]?<*a nD9h BYR|L;fR R;%#`&Ix0%#͛izAσ~8@N0M$~NNpNN7(<}4%%}UW>XaW:ڲ~yܮ];~4:/F04TtQF(]?Sxuh?O?1cǃ袋W_mM(@XgNjN:PAA2o#+q4+wy')oD& (tIN:t0Q2N}5A;~x!8i7OMtM^č6&p̩''?sS:ڊ1cDv_~*&χA_w@zw'""4tos9ǂZ:0Zۏ"LʲGmp:ԚR{N:He&&ZgQ#K댛MHZ[omˁp?ERB*85fu \\4Ν;[`ukLj頍fe2 9˳,Tx S^) %DB >hl!:ύs""t&a艛8 w ga5 F@ϰ^zPbl` E,=ScJAj:aj^~%\biOZ>o55Uawzxg7?)Skn}р,O> =vاH{'v @8Ï6)Hᤄ[=cc!&m=m)ta5z'ax&iA S9@F hOa9Q@ӁTL2dHLDD4HA 45,I&#R)MG_RkL@G 8ڷoo-)Fcn[au,KkbSK=ё2:A=-#&vxT}' Ox#ڵkY;zH' 5 F;X3f: S@@[oL:mڴ@mPS@`N:ۭ73)Y {j )tNǾQx~q @w70eX7c3uP Aǃq S׉HyY<,iJqI""""""" ]DDDDDD$$tP."""""" EDDaÆ뮻&L.6"k9g\vW~Կ .Joh|x""MtFvCu_~{駣KOwyqi/{[\ׅlso4Q?F΂ XHS]DD?u9ܟr?c,(3gNt裏c=fϞ͛gSԱ:ZθsMcyU>SVm?LϬY80?^3k&O _mЙ矣 9v|-/؃㒊tǛ|W6?u;GomW>/Me.u_ٶ?x=0~gü8̹_+:̗_~)dyLH#䃤^xpxݻw` EEEn=p{~On#<]uU|9=wQGٶ: я۷3f=zhw 'Xjw׮]ݮt^yAinnmlwyg7xhsƍs}qF*['I>mށhۼ{-@ "Ig!<裶.8qb4ǹ3g^z6y.D ?|{{gS_[?-s`?gd>9;&{{7l>x{vᄏOt~m44m>[nw}~?y?]tQ6ɸ;lmqgQV;}=#6+qpq|fHH#A\a e'Xgu9ssAZZk3f,Xi&4ς0 `+?~0vؠyA?c~]vxAIIIQl`zn 0Xve0/x7m6ܹO > / VXawߵÇ}=YfSL gv @0믿n`V z_~ ;_5:th1O0 c2lc7`*b0 } Njp[{a@l#Aa [_vej{9m/l&w7;v lko>8ꨣ콄ur\p'O#<{_jx7t` 9`e {9{?OͿ{lIl|>lװ~/ߎ;hHctFT^EUVVM|0wYYY. ꫯ0rg}k۶rR[kبJSÀ߅ FP0xvam6GVt:6nsm0 W^٭jYfnw+,:p [laYahxj׺ukM82k޼4h [;>uYNV5dC 2Ķ;ώM멑&`ƌ<3@m>àL^q;yܩj5뼞Zv:t@-Җ&}]˨!{scwep}Zɸi4aM ;Һ ^}EN%&h\T,k1Af:,O*af"mSz`o5,['\z\g8:!0\c5gέ?ۥp:c6۬:ҡc@жf>otwov(TxNǀtr Z8&8PH[e>?[{m{>3Ǐ|flxx^[GJ^)1b;CD)P.""Q^Ǩ}8iLOU'!Hm#䑊 WK/jSӫ2xoK-cDI0P3MMGm< `}xnoqeJ0]2ϚUMuڦk3vBy22zu'"Mt O>O޲}-2[t GرcG=c4:oFݧ)/*V6zH]gex͍#{ʔ)ꫯ.됍nQj[ligwsz_Z AX'A:ђ5CD7)5|slXۡcxU!% ޳ԩS0!5 !ĭ*Ds:#%?~^x>ۼ[#{@H#ݚϬ}5ä:'|҂?vQK`OD33imy-i3rRYO`5iLMkRҎB~d1u?9e%T IC8>q\IǖXǖ_DS.""=\XhѶtlFF{^SӉAf1o ?nЖԼ5שYg1 ,8&h_ΐf~9_4+]5Akpkt,F>hA sL5礙q ma ?;F|;3/QHUO/[nBRQf6:8}g1鍊n>g; M ܘ)XSQ[A&Ŝb]qxZN H IM3e4> LEo)#)MLHm}|}d_?5de/}bۡ6G 9gY>zlτ|jIgo>,Ly=1oɺɤqf>PHpHgc?o8l3?ǔc#+caY=.{WM4zڣ]DDD{qgq I! Õz?X-PlcED(@I"""Rc_~3f뮻ܓO>hs~=7t|疮]XXh7o?Q/..vADs[`{m-%>Z[oY-?`x~;[?snV3Mm7k,裏Z- /hNL>=#԰lWcv8q{}kU1y}z!;Gi1!5W^qSL){αf޿^DDXE-"""R7|3h׮]v{lо}`- @v馛~xЦM Lmz?h>պuk{-9{~={ :tJȑ#m~tg}vk=zviye]f=` zzkK6{mpG;SmwWNjˌ7.hѢC9$Xml]aoS^W\qŲaPZAnnmcu  m~۱رcлw`u 6dc؇>}ky6 ۖI5p`w\{>\}v :,,Aw;駟ny=2dCV??z&""ATr]w@_i~mV[w\ZݬY3 SM6#F|K@OL`I2?ݺu vm7 :SNm /<{:ɓmsZ{I'n-XeUl_O?[lEg϶}xr,@{m_|a W]uG^ODCڱ2K.&L]v_~YYY5 x`wm{ Ǐ^Æ Zl|ĉVX€u毺rI'MH}'}Vr:urO<񄭣*3f̰#8#no^~e[&//ώ$~c|۶mzk{}f>S.տ*EEEnuu999믿úX)wrM8NmڴyФ} EDDZ&@^s5)V_}u'Xp쮶jw:&pd9W9׼ys{xkV/>}N:p ܅YfcH}` _e:A?_zǁ> al_᭿n7Ar=\ݭ7tv?1!xUa)xcEMaxڛsL駟ZQGe]M DDd)@jQ;?-O@_]׼'뮻Ά6?hnͬ_ {̧澠=S6jk25,/;jc=:#s8f :|^Z3X8`X8 +}]=@`ypk1 {  EDDZJ˷G[zMtjvRNHMnu0 %Ѐ fkR W =dzGjyzN^'%x-t}H9l^ܩb-!9;6)tO OJYۦL^"HS.""""$ {ȑnС}Z gY-ҺY?+f=^NS|뭷ZJj Y_M|RY㏷.`,} UvUWY{x]Oc>cYEH!࣓mKNZlFƎk25(yNe-WlS3{Z1'ckӾ/Y5, %{@Ip;Xtj $-Z]wՖOݺuv;uF06schvBnh_nX<l | ' 4}YkpmM7KvӦMB:;_~nS~E~p/Ǟ,\k{R)曭S=l{ώ&υ?6<;9'x |i!""qu%y]vz̭Cm ОuG5i Zn[Ҏ 3uli5mgSӽG;w|ݩ65cqk_W>3k'O;Ν; /P"(-/46:*&cjѩ)`ڨ&9]Z.5 wsƍs7t6l[' qL]wvjXK> A T w5Xv""jEDD2m=\7|potvX4WoCϘ贯g2ڳ+u:h~9Xu]֤7t EDD2=xaԩS13ӶHSD! OSǝD EDDiFӁp FPE2;>Vztc܋d""" O4֙=pnC4vNz'~cx9LNDDD^i#Xt裏Z;i" 1vo< &]D2tF5\㮾j6۸qƹ 70+""++}V5w\Ks6mZ4WD$ٔ.""Ҁ{gaA]6m"E֭[{Nז$tSOYmr-^~e馛FsDdI-X]wu.so+**rj4WD$y.""@>ݣ#F(8e͚5s{;믿;M6FQI """;tھ6DSE\xᅮu̙3"" KH| 6pwK-_FZE]EWOI!Աn{zMvaΝ;x}T]DD}nĈ.;;۝xTIW^? MD)@C W_Y9ĉHV>rH+PiH EDDC=Zha"}ؘԢϝ;7*"Rڴ>ܭ*=I͛:}?"""u࣏>rs=&lM۷rFrO4UD~)@>[`՞/TI֭[]wM2͘1#*"Rdi d^zM4)""RԲ3gcvr뮻n4UDny]D)@eƍsK-޽{4ED2A˖-]NN{7|M? EDDo^y駟4cW]uUA̱˺]vj." "+E"ꦛn͛g-2_~q-skE'XhWHRwٝ}n"""Khmqn,!C3<3z&"IW_YZv\aak֬Y4GD)]DDd u%+=jя9虈$k6`ꫯ5[/ EDDP*r~[mբ)"dԘoV_u_u4UD~(@YBW N3[n?e_D(@YBٕSg}nE$3jʆJ|)""Cw1z=S{ND2ˆnhC%~w]DDd QFϩ6|sGD$SZy/"""s_kp"YVZi%ˀ={v4ED~(@m]W)jv<+~i4ED~(@-[XCq묳NLD2 z="""`WwmKO?DGq[4EDe_JJJ?MmqO7߸={Zm:8ߍ݊+:t`eT̝;=sn֬YJSj瞉:M Q~~{'݂ SAbǝov)߻~:?3__~y6몫R_n 4Ȯk,mUa#X7o^Lo[5 dx]&p5X *rKw-XucQXX:, ι943Lwaz4#83^裏v[mHKK۽+.77׍=چ(i86zzy˳ipdI'dEcU cǎu|kݺ;FE\i*>|뭷K/o?:A赼k׮Vv1ǸݻmZ Ыߘcx?u1f̘rrr;>nM6tp-S@s= Y'ONF#z{g&9֭[Ti zEO2ԦmԩA̓'矣ΝAf͂%*UY`AoMV,R-M$8묳0 JSwqu=?9O>2,?iڸG#6F_oX*M]PR/w፸0aB4i2e֐hDz;)O?t4UBNԩS4U2~+2PdiZD d Ьk]wu믣9Bb !#;sqm{W/i8wߵI@BEgcgIJH!Ɵ]M6n׷N}FZ3 mEZ'4{ꫯiև"w Ziu d1PE l*ȩH55K9 C5;.{~S$,W~}{CR:h4.ʀ EDDD$Q|aS+W%Tu |bM>=Mtu*a%7߸p?֝}6'|Y`fhB/auTS\o tu*a@m޵^뮾jWPP`m&?o̝;F!?~{l:gy=Sv믿H+_|o-,,m?>*~j*..yJK/dñ۝~J\ʐLcEJh3zJ\u1zh;7yswޱ!σaS [oeiRHF]vł7|nnРA6^7) g뮻p 6UW]Uds1.??jofGP; \tE > w]r%nV裏ڼ~N8>ئWot+GsE*G'x\M>DSJp2Ot.R+o܉'vX<'H{N=T{=}~?lv(@ZV !6h†~m[0MN0NBMmSZ0L-5A/&N9w[vj  `%]8)m/ Y7˜V,12Ԡ뭷{7„!C;v喋&$KdpωM7mf6 L;vfmeIg &X~s묳'dҤI== EDDD$FauН;wvsq.PPKVXf͚;΂xu|`ĖsUW]5oj [nmAkm!g;tVǺ j#<#}dz #gIu_}og=C9ĞΞQFEKjٹEL`M vG+<6l_\ dtLQNss_}U1gdpnVǛxȒQ.N%"""Rf͚eѾ]z܃|gnu-WN+ajYcYj i[>{lvY{wx[nq/vm-/uyyyOϾzOǾRA15~{}V;&E@Dυ`LOk< 5="tAn7gϞvq.~vRHEp'iVv]m۶,3DzQ.N_"Du]xi {E-GQQuzE{ J+ddq>{]dЈ,.iIӥ6*\on!R›`  ӣ:*ol߾\yyyu]g4~ꩧ~x;z 6lm۶vټ/hl kgay_uŊK. ."[o%r׮]6m{ yglط-8#Axe;nݺla]9g45ƣ2\ U}sWv,ϟoK:l_L7x +++(..d_~žs{Miz;ks19쳃UW]5A2c'cgAոQC@-$(]ojHy -0Ns(]7]j Hc}"E)- ):lt| >=H: ¹rg3fSV|R8@1.&S0KԠIB?hXʴgM&4P;MF iԂqmPA,q|V6H'`[t:af8(j/a'jy,2 x-$sK-z)8/R4Rui&]xTZBڀWkP" 6.h+mS+n/ "doZ\nd \SdN{:HG]_R d5MFCnr(D*q4ޏUQEp& nI%NhVBj$HghRikKjH*[D)@r&!h 7YzM펨ɣF:݊q鼖Zz:1!cC'<"i:e"Xx6 WWx*1|Rx ~j2J6:P#^ *~QVUgY[[ 72H: JD2y; 3De7"4Y`@^ڊ3l)b5q< =f{n=6 #Uy&gyjljC(M2̤~W2d&lbǕ\Vֶ;SߴyOᝨy!@&*'gfQ\Ԏk hNzئr k'5 5LkrcOZ'CIWM$?/i8֜uRO7^~e;y CYBL *;vvxm} :@ci`~uQ'RS, 9!|N ]DM-quqZy]2a3}VwibFa7 KºǏoIE k76tӺukj0CGA77YR5RXzUWY:-(&vj.wQGY_L;_ji۫W/`khrtb.8l@N:Y;`p~1+RKw.=LAa?}Oy~GOg=ܸP;͹5yn lv 7m7va;I/<6p}:e,͞6=>#x-1Mo\Wom'cA:Em: }f M A/Of6lcͲhVȘT`wAq ~k[o^K/յaF4ƒ4:c2vl~aك 'o􂧟~:Zt Ma o.q6Mtq:=㠇A0sr(?*F|Ox`„ AcY[l<#єI/㼹[oվw<lywldaisuk^u{m5~g]Fg?ܦ1& /`='|bG4I4{B\n6s}813 ?0|m8q9'yTvyin#UyK;*{Mh:u [ooOnߵGz`5״8^G:LOwğ[z衠YfA~~̟??xWv)hѢEPRRbrV[Ux= 痉E]t1R:!%itw^4e!nXcSN眈{npvSͿ.@/yhu}\`PCk:7/ǏJb^8y/R4e/g?ނ+ z>qD &kcZdujl')@5ꮻ ~h`kذaA޽+2 >MS7t!$|g~=} <o + ƎxYgVƾ@u޽q LSVXkYkN;4~5t~v ZҐ:W\j~|R0xĈ{+R7zjsϕ線s؀={q?| L((;p]v%Xgu윥0moTJv-8CPAAuk$|ܹv|XGCHRzD >U2,I/ .cn2о´ |ZQR㮆3Oc~yZ6itpcVYRG3vI=n4*RniD|qZmH&5f t2`W1PS}мw\s Ӳ>sz 6oƚWqDR/B :=ig  $ .]X3AA5I3Swڇ~hv፺0`5W/`(B~pFk#L+ -e.߱Hmz0"t|I@3HڵJ47О><ýxCaj84;e?hG>\=$0 !K>0ӧOmДxs̱us1֝tzFӧO}ۆnhs^s_\X I|sп O4/:),j.RK35]2K+Q2%Ts[$Ο8Ze:VZ{-ژcԨQ{-::_GA[[38æqo=noZ{5nZ?֮ދoM{bjMmxd* < dqɹA֥?k ނ,\y>UTQL8{:ǴQi߾M9s뜷luqw4m٠=Y&d&_ i?8AmA=s-pĽcAazo7ôKڷ2!Gkh3۶m[ AVmy{&yPӫ<1!LjwV5Fx}8Y;wޜ6l><2 X ы;]GQ ΫkNgw߽\Xgu О`;Λ63f̈斞 饗_ڎk i6+;GQ<vmg 7Z"hm6 t:>.Ao(m[omI{^EyNZ()"HUm8餓,>}TRƆRwPJ)p$Нp ܺ~v('sѦR:lSe9sqIFFM~I1,kdrpMoZp0}MrLA(zƜ~=H rnOυ~ر[g{;R ͦXOpl錖@S8\pnq\96K7qtt:{ǭG` :Y)>~ hzĵ@'+yhJν wzY~j駟NYW}7{R{-KR{} -ŝD oi4`XKR/6K 4tl> ф#A%RH[]{Ҕ:x`(Y::xp/ a}d1c(̧1Ge7>6lmh BJ!^oj=]Jq/}WS0k H|< sMd95O=zZ 5*;gcaÆ8_ ~X\'N nؾ_''m2$o*&]Ej,SN nVKMdh$ڵk^RH' €풒rSݻwO}E:\\ 5ySriRh 4ߎE.y=m`„ єd+ ZyMA9ǹ'9(M}$7%)]zGЖ-[Z4tqn;?'(&nsss-tn]vY/z𚜜 iGP&dqyzز ʲ.ֆ6 m(<#G`J)dz׬}!A/iվ}`5ְ3[nnRt_1~t ǑæM!U?~% ;9iG _V|z 0"gf~Й9A3\\'xy <Â={Za_0\?/E3gδѮ}ukĪ 9eQ8{M  `۴icQs-tMVoZke/t{k9;x>m48.aq {m|Rǚ h줠<_it ڠ#RRHIl\H_#=T4Tw/%T938t>> y/N:R› K lRMI"f ҎfϞmiH>裖vE%GXd086ȤM3R I"4'R:pȑ.ǎJ !mhOKji﯁#Ջ6R峪0Xf>>cRjϕυcIH\Zwi9'Aya mm}"hD euXLj#y *C_/&R)H8߽%]:}?,>nucEmlE:}VZԛ||vшgEM(> ~ <`&9 7*gMa@g040,5~1ZK:yRrn7cxRN@O-HU6 7JqQ1"$ A^8o \c5^dTyn<_P5uH'Tt,JV 5qǶ}@""{ ] Х ~P;NOԖ 4p `IX~@ēHOϩ<v`Xne|>鈩ʡj ꩽ Fae]f.d9zs.5i&BMoR\[\LK^}qPyMMD(w]yPN BB*:_""#o>R^?FIV =O:Ra&}+4yjHC9A5Cƞ>Wj A( u }ejnIgYZ{s--nIe@%[oZNƕ 7?8 z}pK8՜1L d0 54ŠP_/sHH'[$ޤ\\--_ֿ>õod@1gH#K2ȒS.hWzyY3))bmI+#K/Z>F9 鬍gj^:rG@u0N*3%T1$PG=qmk *h˲t~շo_+X 93-g\uBj>p1v' -TH:'9K-41VyXW4 h漤8:wsDx*:lz:diޓeB>>x'qG5#Qx/" Y.N)?o*NI]j~)65htVE@8_ܸzҥ^ %w ELP@KKGktHE ;)ζIԈAH#^:LٳgYpE/NЉˉƚs`[n9iM 4y<=:s=зhk/2=(@1$V )C+c k2[)8#϶d~6 (#%4qK(5G2rOowi`H oF&l2g:4qcr~ǸєOqCq弿˖Y?єRv/Pt$A_T 7MIη'ƅsUv0k*,p]ala,uq7h`=H8q\s 7ߴip0IӧWf>Z]Ǭ +{/ž͛XavivOSN}lZ&~>hE$2YVyyy ي Kx=e̳ J$ss~)I-4XK;HcBS&N7nƌT/1M@VhfEn֩"{׭3E z0o<18lVϺ-ZXB  C4a Q4}/p5;܏&4رKѣts~茘f-u :3fDщi| Y| Х֩d8iʔ)6NHc;4Nsle8q"w mj .d%$weL2V(Av R@D?+̧f@=$YO'@t쩹m;^{5[FdIQ[9-ڍs3'5PС|˂k^Z⟓N{zx1ܨښ-RR/t;?^PMԴ?,'1z„ KԴݥ,j xTk֡aۤ[gYl\p:i&zUfI% Zm83geP0뮻ZPM qNr 9 hNA|;K)E iatRԝqnN:ES%qŐ"tB47zR9j ~nE<"j.q$ؑbs]TTTIԊLqOc8ԩo+A#? ˃P[O<5 -ROlgV{4 ֑ )O!ҙϋ QhʐhŹǹO 8s^/jΝk/uprL5w2IvV]uUa>kmg:O_W :H}Za*Tj*``jS %y:cPKIkdgg 25_8Mr 9I[s?(\b l1I]DDDDDD$KS Hɵ"D:}4. EDDDDDD@H(@Z(EMEG' ρ>DJ~ƃD7S,8"c~g6Tds{8~E@'|bz0u0ַ*$nv~0bCk]v}Y_O?m7]4ie]{d.~ǎ+;fJ}`_V}TP%e(7P7mh4u۵mdsϚ5}GTi>}wڵ6Fw;0.Ro(j '|J+38#ӴN(Hp|vO>c9'pB4Gꪫ>}z]uU?PEu4i$wioYg͑F k|>el[׻wo+skf47sqn϶u7]|3gδ{M?DTd;(rG]wn ~c>aw]w>hs1vuQnvk?r*=u饗vO,sQ[XXFi-#Fpp@4WM5>VRj_VPP~+bz뭃-qǧ.S?2t{V0&Sˤ>OϏ/<>=>?wKxL<9TeQ|!*k?ueRSI}O]&;>.^^emժUpM7 ,Ύhj=RU>c+ t4UCC›FS3\~gkăZƀHJ!)}v[wj7صo^9K0Pw~_T7F[ѡCV$_|e=T4 >~XOBЕu4ح*VüFS2!h__RRbJӳk-ҵl2Q4]DƨQ,Un]V"""fLCN ""A㚈Hxj̙y[DD$VIYO,_~:ڈ#5P^H9ƭ]D،3lL8Ɣ|ǣg"""""R"^z%_Eb%"("")ȕf[kv4ED~(@%M /=+oԩ6汈H&᷍^ZkhHP."KI^{Yy quוu'"" a!_hHP."K7ߴN*W^y%z&""|>@x"R)**J4[ojEDD2?~wGSDDtYl ,p?X H}S."uϭH&y7u)""RHj޼ر9sf8"""`߷-[FSEDtUK/O"""G' ZkESEDtu첋v""IQHڷo[nhHQ."WYe믻7*""lgϻvM_ Eֵjm_v[4UDD$Zy[lM_ EQvm~7cƌhHr1m$vESEDt{[j\QQQ4EDD$L~'c"" A j]w]7a„hHr=6ɮM ENnQ#A"""I6vXk;""RHrp?ܸq㢩"""3sLWRRvakGSEDt3쳏#OGSDDD^,4$"Rgݖ[niϚ5+*"" ,p=k֬ٳg4UDa(@:գG߻I&ESDDD㣏>rڵE)@:u1pk= ̄"""PXX~Gih ENhڢ>4_=#nuq_4UD(@:ŘGuODD$)Hm]n,Hih En6ZyESEDDֈ#?ܚc44}H[wuݡjc̎?>*""p~7j(曻.]DSEDt*n4{w+R4UDaeo:?ƍs'Ov;c4GDD~͟?m6w` 9"" K5"R/,SOohH;vOa\DE5"RoE~ݬY/jhH?nLbG[戈4<ՠH /NAoO?G}sIՠHZ`?={{\@}׽;6fmIՠHj֬Klx[oEDDCaa{\>}\v""ɡtw}݌3믿:t N;?M8zqIՠH[a-_.RFsDDDȑ#ݻ=X[GSEDE5" 8蠃s={hH]Nml&dQ 4[ 8Э .p{4GDDv]{/p~sI4"`h سgOK91bD4UDDL: >܆T#@I2Hꫯܖ[n7o^z%FsDDD ͩ;0vz戈$jEAl]}ՖzxWX@裏gy|TR 48ڟwͽDDDZ]tqw&Mr۷戈$jE~5׸e]֝{ヒ戈,: ={0`l͢""ɦ]Ds.??}G.,1cƸQF]w՝z.+++#"lJq_]nn{j戈nͳGbhH]DcVr7|Nm9 /B"qHtzs'սֹHuH رc-LwIlk: ͣdV^۴i E$yw4Ţ"GsDDDʛ;waܗ_~Fz,JqDcذanVpw3gN4GDDd!ڝ}6< E$vm7w/x戈;ݸq\-@אj"ɔ."F{tjFF 2,O:$zW^yŵh"#"H1>:Ք3jHDDW_} ryvEsDD2tfͲE:1bݻw4GDDڛSp?{G "h dm;S\qqq4GDD~]~C戈d>"1vewwX}q3f̈戈HS0o戈HcF$yyyޜ\s[ve""t(twꩧ+͜9}#]D#~nȑn喋抈4 E$#s9oZM:cH /|inV抈4. E$#1u]gA:m9K{ƃXp޵kW|u׍抈4>fMD2ڟi6uٍ3F7o""5vk׮8qkѢE4WDqR d4 r-NpoEDF ܭ:uy"$(@G/wuGsED$[;x㍮K.roq4WDqS4Ըo9j\""tsPj6,k`7dwWXӝwz^zh I4 };cs=v[7rHfEsED̟?ߚ!lҍ=u9+"Ҵ)]D^=6}ڴiޑJK?%%%nvs;^y""1 EXveݍ7 8ݯzoEKH]0tܸqnr;Oh7{lX(<7{o՚}/:([o8kΐBJ( R0`-xa ס$]YTq}i˗l1ˍ]rzwOcXmzv] hn"{TZ*dwp9џ:F59UnnUٗ Zif5e͸t] ?r{gr+GWtVߥ*?RwM7v=S2k-%""KJ"F;t&Mr;ݨvxnܹRȬqntq+0@jbW.&|f076]^4Xmh p9n_yŷ]pe {wl[fNw{p- IaX>|\\8' gWS\dzɳ;87}fifhP.|fipޱ0H::U32-?O,ssϵ,#F &m DD(@Y KZ-7on7x;6{{4wȎ[O)u{Aq,7vUըjߣmj;ݺy9/Ʋ),ˠ %3­ttjߵsi07(My^߳p9_I2's2?SwiW^ysLLCa2U0*縂(8(WW!_4npQiz{ek}4l-q3\.S+;, p ]z! U4+U-1\GSC{KϟfV煯-MOmK__iH-?swv sOϺ!C-[FKH]P."Znmm3iٽ{wWXXh2QId+wX,Qr0Pn=,{a{gYŒ(ͶtŮaɾ2~cݰJ3n0Ul:ӣwJ 0R11ukF,Uk) zioWNT%3|7h Kgz}QO4u'"R5OPTTl2, 9|- y.?r hB%AANyAal^^aaѼ N^ҥˆt,+Wu}CU^ Ǧ/\MKy/<*QY,Y \|Ar6/~=oJV8O*wMCM>77o^0|` 6^z`n~?Ꮯ"ڡ=>}[}-_~. j Xؕ4Ѯw2W#%V;~׺>j̏;8Zkh)OU.uDmf76,W_}-),yp"R~=C㏷"G4Rp."pԡUVYj_|E6^z]v3gN,T"~ww{ֱ'^z%ksFEKHCQH=_l aR߿KSNaYgJD6GTO>ĆtPڐm-Rc < f3D>lGKH(@i@3|-w}}ǝx≖J[vnԩViw#<ҝtI6,$t7ǻ;ڃ.605x[y啣%ED*۾KΝ벳^z5#DD/"" )SXcƌq ,Tڋr0Hj/*"{=M6;3vqGwi8@{""FHB͞=ڎҹ;-2nԩ]"/+׹¤8/rW+hcK}m=ouko駟n DD2YI6mڸʽo6nܸqF}=t?voC/ 㺹9nS #0\S~v_M?GnvpC 1;""KHQ3Pl'Nt&L?SwYgvq'tϏ^QSԒ܊vrM 4d+v]՚7cꫯs9C=ԾDrԨQ6on6^!""LH.]Xm:mMov*x:b-\`5Q/ )p%yєmۅa0J:eG q9Iݯ￷()'馛,kwޱ޽{5\3z4 ED2RK-ZhN=T/zz{?V[me5m3Fmh|MazDiYO/ׯt~7*{}t[:\.\h^:mΫ=[-5o8e[z߿ïoÖ,U/X~NOw+֪/{O&.w~d̘1 4uFNDD$/?tAAaJysyAa40./9..||r)6? h~l6?/,)ȉ/ ŸoU}nBPY9~%ANl}~](]g>{_S~ZxR-[+RN;ٳ""]DW;R_iJ*<F^uqewȡ ݥ-Mg@[>pa/ݨ/JxR4 -ty5Htƹ9`u+v*p4q;:6ݬ>W51kr\ vws݌TXϪJޛ82c.r7i$Ӝ֭[GsED)P."ȐZFO<ڸu&%;;8ѐ]~} CEp7k_FF'ĽK[N."D)@iĖ]vY׮]٢+qۥoN:2\X飤 'ِf*ew0UgvJ/~VG4m EDџ5R/Jl,._4(<p59UI:Kݬ>/WZֵbc~WJ>u}w&NHe=tSC[}˵x% D_m rܰh=Y\hfMtsCB{4ֳ|.'ֺL_G갎rQ\>/nC]Iѥ9}k]c]8EҪgU.""e͋⠊դewt0j4itT.""i7P@r:xpd(ՠHE_im;WZ2 7SH]DDDDDD$."""""" EDDDDDD@H(@I""g~EXY-zCP/+w]ܐYĄɄ}i4U.Erl77 [z a9$pA0o[Z0Px6 Mo2]DI*rgQRr.ն{bvݲZjΒutf>t .sxFI4!Զ] sD.;3>8TTnv/n9U͋,+E/8ՠ4yܐc]q^Kg.Y5΍.qРtE]~q+BS+FPeҬm]^h7.T.pahH (JKӞctqtlƚ/SIJ3, epqoOUג|؍NpUS筈4M ED~.j gSsś/WbS0Zp\j˒jϑ%9P:gGW5&2Q."҄.jt'w⠒a@ձ]y.q]M+SDÂ,j9B]…. :n08s.Z7kfxr\Ԗ%=՞#Kzģv yɐGDDIHU{s/g9=3]b?xaN4tXoZErݰgٿis#[$ͺkdӋt5KkB0t+ ױ0 5ntx=.|D%Ƿ2U_u<+ Dym]SVDD$]D) AWAZY*.xMa=Gem ]<|:4Mܼ>_RO#fLvby`%`ח>*ۀz*B0:yirleѫ+͈Ҥ+((~\ҫ58UZKGm᫚ϯlƺKxފe]0p]jCte+%ZDD)R ey=""Fi{EDDD)]DDDDDD$T."""""" EDDDDDD@H(@I""F뗕EOkrEextqCfU>-30nb8H]D5u8zt^77 $!bi^m J]Lk4J(@iѮ< ]^q;6V}ݶ{o3llpŮkMkV_M ]e2Uc~oMRDDt&?9^[sxFI<Զ]6s_6hy$ˎ,nZP1ICDiAU4 /& Mkp#.馕RoMp[E~۾zU)ׯm}^na*~VL*_ZɾWsc_x][:GUǯl5{`7TP6'a~5t^s[חK{X~Qq&- )WM) 祛aJ uESR__1]͉-Sq;տ&rhCoV^KU\fQ[}OL\m{-[t~|7߇eB|.m,JW4O|{,ӽʷWi{KvŏHzAiwyitdwC3F縂ֽF&KzՋ]^h7.#yC-FjiJs],-z65G(MgէC:b7h?{wOY@mhE]~q+P-pJ37 e!1x{|fuq.Ysرt EAt WX:pVn^x5 PDZ\|/f,{W+(tԋR.""*vۅZ\jra?-k6㬙r\z#8w Qiq\4ˍ].B s =ZWaG ]DD*R."ĕjRK\fYntRvYvws]/kҩ,?&,kHӬ-]ʲ.emsn&#S>,X zkruqoUѫbM֘n'wXe+I_UT/pٙ2\\&{cI GO'uKߣ?<=r+HuKJ)m um*> RwOICE3JDDDKHT@O'p+ Jk |V_͓ ؍iqQE A`77Ԣ*\a,60!Ƽ}T.paٶ Q_[49T!B6{ ]KwDDDH,{Wm{;s B1?brɌ0EX2%`˥ng%O>vq.Ԝt&)Qv8 .*Mo/?dAnirPgGAnv㦻5 53ܫ0<;u,>J lP8ǮSUoADD$4Im].4 ~Xn?W:dRqkpZzXs=vQ\7,e:а܅5PYmM _AUiqkxjh}-d""MTm˵vЭ6ط+憖͛.^6ܥy;uVsy٥wDtkmw_AN}:n#vn~y|^ǽfǮϥ4 5(o!OH țҨqNj3.Yر"# upIEU3d/iZ\P[繈H=P HSն띳zGw'f ܧ༁I0""MV[<7lВ}M&+:o m.""B)""""""" tP."""""" EDDDDDD@H(@I""""""" ]DDDDDD$$tP."""""" EDDDDDD@H(@I""""""" ]DDDDDD$$tP."""""" EDDDDDD@H(@I""""""" ]DDDDDD$$tP."""""" EDDDDDD@H(@I""""""" ]DDDDDD$$tP."""""" EDDDDDD@H(@I""""""" ]DDDDDD$$tP."""""" EDDDDDD@H(@I""""""" ]DDDDDD$w&pZMo?ЪEHE$Dhf-م,QldIJ% -MZrtx{3M5Ӽ3~>w۹y#H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H B"OΕ)S_~q6lχv+X} n,Y֭[gg}B!B!\#п;7w\?~FK:-Zԕ/_ބyf~Es͚5U\>7jȍ=>u>g4;ݸq+Əʕ+mܚ5km˖-v{oWHWD Wdɔdz2{ll2?%^xq״iSwm?hm_|Ѝ;֭Z*Z :9wgXGٲe5B!B{t,؉eXֱ?3&~hǴiܗ_~iFi *زW{Xg̘qw ':8aÆ:˽{&iԕKW^ļB!BdRcE?tO?kܸ 1DD%1k׮u6mNVBXϡZjO7/b3A+L;W\aXķz;#:ݻ*gۍ7FKwԩS'v@H@^lʕ+ݔ)S-b@./8]9\O:~g!B!D%W7k}9m۶mݠAlAO?MI :t{ҥKM#jժ宽ZWT)b]3gL/7zK.1X~wp={:*Z/VZUxs NϽ{ixtL`mG zws:*:wly8뮻A~:v~oV!.C]tH:uu>|]3Bxv!}O@߅7i$z^z·>cw16n!CضthwmRYz\ܣ>j |w o9E{~׿ļөB{Ķ/PtiB!zn믧#¦PDKc=]:u 6P-ZB>v:裃 ضɔu f͚  Ŧ-ѣmDzSO=%)ǮQF hB;ɗ1x |O{l_۶B ^x )s΍L C-ݞy%KLٮK.֭[m׿sA(Ss _~Ŷ3fLʺ… ,[.w}O>Įw|R޽{aPԧskqbصk}`ܸqG _boFvRB!BA!4K.`ja1>}Y7kfUC;"[f],s̱Xm96lp9g{O>a~WwW6h,ӜV_~zqp'o^9ps_|Ejw\`VP:-ɸ{K'DHHwqYb p\ŋͣ"C#F;w7vx[#NGjPoķ/ p=glBk͛7woysN~}2؆": $UF@ !B!'R#.P3{҃ %.9V .MdDzC~૯>9z 6I&X3cia(=z(F*[e=,^bE=9Hm o__':+E0`uZ޶$B!~J]O$w "eM 4 E2b'kwhݺp +4?4BrOK2e"[Do5_xh6$ڷom,r `'A͵bΐybSt0̜_m*,V\WgXʩ'Ѐ+V61I7|^!Bȕ=\{"~\˃$iõAp"8i^w g 7|ӄ :VP*mA/X=J.Pow|K1X)zLxx8 䩓|6:Wbq Rx?uX-ywl2hX?m=N}=TbOXO6=U B!Bd_$#H֪U+ 4^\ 9\I&5}رgϞ)"wt-qKFar1z)g[7}r Hr3Xf{(/1c_޽{1뱏FvN9ɒ%倨Ätl b:5BL'ʝwޙC bS\ݹtN&{:mh`naÆ  kҦi8G: Nxx‚;!֕H_ !B!-x\d5k-ݞP~xvm۶ `ƍG *T=UVpy祬;ꨣ~:Y2YfEG Pԥ,۷o_֮]k裏FkG((S=餓۠nu(X`PRlٲAڮ_W~`ժUϟԭ[7emI&qv~,oѢGOvRB!BJcĢʔHJpVWq˸ㆌe%ZVeʔqC qqM*ʘX){oܻ5kͥes3D~⛱r8/b|;K9O\o($pĀ$xcwɵ`>Q;B!Bd/BGs4!g%J ݊qo8qQLb9;h/6!I|oPg$@+P}g5h ^Abz{ظXb\׬Y?ÃLML::.Rfuiue4\$!Μ2RmŊ\{9nk_\5Jo>>|׈~{c_:RԈx?>"cc=ӹ?B!Bd_r@N̞=D9b2q"I$3#B!"!$⡇r]vuM65qg3B!BzѫW/j33LY㪅B!B<$Г38bXY|Nd'Ɲ B!(]!b !#),|$]LoR]d$dĀ dFP!B,Bga4W^yF%e6L#\|_l7|N=Tϙ sF&Mի2B!H !r g8p@O?D9CyO<۷ oӦ-gB??6d߮Xw͛7АoWUB! t!D0'lj̓`ڴimcO:՝r)._|qƍq~g} "pY_ o%1)7zhs|[ c8f̘hs .4rDi:uܣ>jbI_fkذ6lm3.uGW_}%zE5k?Awlٲ=lٲVU>|ve7x;3͢_LgРAV_Q?/+V-"zv[:WV͝uYۉX*sq'x>p&;uN:$WF Wvֿv˗kɵ{嗻rY={ǎSD>e_r%VO?<tb.ׯoͱkL!"#.qTZr!f|駮I&&ƏbA/m[f,[nM8D6 Gazh -̀F8wyǬoDz߳gO¿{?6kw,Ӯ];7awUW@ :8?|y\Ν[_kQ |:)ԙ"9O,n<j#N4j]znܹf+\`׎k5;|ك|I[Ϻ>:IsD\xUR΅ quH;B!.H !rdX*N?`k/nXYe?#,{2<|[rX (`*U2 WseyZ=tsX7vi&[jet,XcŻ|&;=VkD=]wX <" 8ꫯv*T :?vׯ_oG+8|'l}=O<㎳ztMB̻}LB!H !r$qⵉM0V\D5"ьzرYfĩF{pF,xdG\1~85y#)2=SbSv_F.Q8ۛ=uhbʅ p @ӱEu͉G^@ : 87EX`{ ͻ!B"Gdlp&۱2!RfH ώD!b,7|c9'ڃ'Q³czb<KZ9S;ZM?%Cd/"3y!=#GZ]|M+t|/^]{NB:!B"GqG2d/*ďc=ꨣ,:-b3)7 o֬s3.Xͽ0Ǣ ~lsܷ㳖c]: $Ku\%QDs,}p`CSO=e{%/?aa' =qę~w59"}gafΜjB!DvA]#A4q&6 A"_{5θ'wj\{,X֬hѢܿ $2C#"T܉& :—lR$t'^/"㉵ǂ': q㏻ӱs^m۶n ʱv-[Z}dڵu$ϵާOr)@'s:3|=[tՏ8GŠ !"#.ȱʎP,UY=|c5C$p#Q<7k;bʣӀjd'DzN;gܵD#wXj6ڕW^i!߬Y-1 @D3ċk6 XqW'Μ7w}V>GFy^#a԰s= Lcb)_%_~ө!t//B+|[BRĽ i]Vw U=Q 9 cB^H/_h6HXG2:\qO B!";!.B!B$rqB!B! t!B!" @B!B! t!B!" @B!B! t!DaD?w= &B@B8-ZZn?p?h鞅q?|Wg}fcԩZ8pt!B"GuV#C9]~#Sb_l… W\~hɿNم8x=nR!B>D"3sLwM7'|ҕ/_>ZԩS[o}]qKvsY^ .l./v{[f{饗ܣ> V^*Tc_nСcǚsVr~-z})W^2~m+Vtp}g^vw}]r]>}?V~6ol"m'Mz!7߸#v/~#>G}͛>l[&Bݐ]3f N8!Z6mjXb&T=\s_dѣ[ҥKM#f/ȈO<,^zLgǎM`S3ܙgi2=P}&9V6mܝwi&6۵k0vPB&۔Ay /:u;p7p۴i#^*׻woوi΅PhQ;kfA s W_}I-~wމرNf͚yQ_m޼om#Ygei_s纇~u]]L)eRs9l1<B!Ȗ/;Bc_ `6nݺvA(PڲѣG5 BQL8ꫯBkG8裏l> _|1c[H}є}̙":x7m(76 hرc̯y;,E0 (\p0aP[(Ba-[6rJۆzR^z5 oиq ȶ?e>R?/xZjb8F(SN9>?#AѢE%K<޲:RN:)hذBo5mnuSg?y4h]wϷ~ϟ?!Bd7dAB"}[4`"vキa%&p=7${_}%#I@x>HzGgAj4iĒQ&'vb=|SZe!B$3BufEeqD# q;k5FdN ɓ'[ɜ͟??qdjx"`a&@VDpN߷o_ۻJ*b?*BH !-ZsvNèBuvڐ!wd'Fpo)@yO;4wm:no"+#ך2(m۶Y?ج|d#vB!ɌABdK# ĺk3ccYܶ$,ch1D-WAClq$#Bk<ƎNbu52,#DW\)eM=845l!2/b!; XNY΁bA\xq[c9jժYٳf21<9_8qީR@t@PG&b9V}2pw0tk 4 '!< XY |gԃ\wXB!DWBdpF?cҌpCL"Ldj0oIXOi/{O1B! ! XD ŔDm35i\E?zax<㱪3axB0lB!D!.HzpMĒ_veM<7IjbI&R;fSNaމ<&p'7Y  'hD&BdB.BbĖl"\Ů@0tZB!ĞD]0ѷre)ʋrv=鴭3gO?ʕ+m!B5H !<#_6vnݺuJ3oaI&G!BB]TK.qF! `Z 03u5K ]!bg@Bd(~;wh]"ӺukwGGsw!BA]anٲelٲ.o޼R!r&Eq]tIՍ]]!;"ݖ-[ܱ-"g=5ojժEB!҇"`H)G-"g+LhE?#OB!C]aon}uKԩ;ꨣmp`ќB!D@Bdo\ţ%B|[n6W}B!H?B cݺun}q ;h׮v.roB!Į .0-Zd]"qAo1s"B!.W}ou_}[dA-(Z;S]͚5Mٺu1c//c}r?47w2[ 0Tٍ;5mUT^Rټy2e/^8Zٸq;СCݦM\ڵ]Zl=~pW\uRd+VpGvSNuOT& (6lN8"v5k;ɓ݆ "7AZ]zY,Nr[n 0{?nٳgOŰj*ww}ڋ|ح^:˾o>yA@s]~mkx`+m#5Z~}4sHg.2ufe Kc?ýu'xBa5;>s^{>u J>xgd Gt^ڶm N?tw饗RJ{9sLyYgz+z 5\N:EwWS6C;~Y=8`ƍR]6袋PkA-L>=xP^`ժUR]YbEаa` -{\pA hȭ/A(҃%Ǧ_5Z#v;8S}'O"7_^{svmR#ZnmW^y%زeKTVmٳi&ZCb‹n&Yݻ[3=¹P se7.\w}GEKs7&M2O ܙoaٲe>CjXSN,ƍs`ֈ3fy^xᅹ&K~ (Q}6"QV#Z%tPlY/hNo,S Gʕ+g.J-s %"}Bx݇Ύ -IŎ5bŊI#,pHH̾΅ή38vYM6RxLz$sKezQ9r̃d4rJs ƏoÇ[zE~ʔ)wu~_l}VaJ t^o9%K[x@P5}駖ٯ,}1bnfwE}…nÆ B~vO{m(9ط~ka W)D0aq5~̞=ۍ;6~֭HKk׮u_~6ıB~ 3O:{XƽB;ƒOg<`70ꫯO>s=)#<o:{1>uy>׮]; aMH@ى'h~顇2w߃܈B!-XUR%Z2?k޼{ݰaLXt,էzY1Ǿv/YΝ;@0G#RP!:bj1n<￿#nN8wǚPbW r HSZgf7;2O˄N('x'{xI 7CPsOrգG2娣2pz9scpҏps?G\XN3#91~7;/븬#tO< yG(#Xָ#?o>}>n{<"zKg"rV_GC`1$ Y޹WvM6{ ~"$ăW  m/qD/34Bd3H]<H]j=Y 3'?Έq,énݺejR^C`V^ݬD91YC`K60ĺȒ͝%kN5i{ҥb5sL-.XpeUtlE&shS6VCwu1o ϛ7IJmC!N!  2=y=IF;h˸9ųĕ;c{9̞=m?}t~ԩdҥA ۴icm,)PbTR%_.˗۶Pص1vqnCaD^ EP[m۶{RJ7ע{vԍ"#r/Yl>3{݁0'9-[XMv —`ڵmQ{ 7?S2uq{XXO x|hz]̳ôl~_.֢,$:w;ŷv@Qr7w9Kx}8e>er'㽶'@"#B`WX 3/D-JyMƳpBC?o"Ϗw" X!Ɯ9sl_ܽ{"xQ pC^@Lݻwt8 R'QFH aG'gm)QY";Cl2ֱ(͏!h[ng\`B"qǍ<A%fltVJK;uIËqណ+-@p;U$8CqޅGH;GsDga j!B$/"Pd"&a]G ~0IDAT #$:;2ڵ3Is>bU"k۲Cf }B=$BPX%Iˆ?,Xȱfԟ#" y` EX<,/ѵ7q{XaX$>Pо`-?#v}G 6UqHMŃ傸@: BMYxQǃv{XV#/OC|  /E8uPBH L  2⒑e^"j +;Y52P6nX޽ Qd!P?nt$(nz^xP/Y%=Yj[ln6[ND$r#avCm);!!b4JLE;C$$)$t2HswXr }L'Ok:B!H.$EpM56oLO"shoo`n md{$; CCgtDqsNlC;uBwm@0RW! @`xaN^PL$$" qXI 22 7d'~\r kzf:$RC bpAGz!W^IFg,hXHlv|Y"$8CPchu 2 )>#L<:."A/(/aС#B)k&i"N8ٳm$Lu7c~}+9;c)MAQ&X9>xC9c:HH;3/bL#sJy?XzDb:Ezy9nt:P.aBgjv@\ /rfȗ1Bn=r,t =T) ϕ/2>p,QAoAE'B!tЖ2k:,xBXҎhutX ^D7K@;=_o唅69B;uD/+Sg%y:wHpEz}}ʼnm䍏!76N}BY8ϧI& 0ܯwvedSg=h۴S7'w?ⷁƗ?OzZsxiF'r,;:vVbT~|rTޙFɹـo%x]W:x?o|_jm3v/'~;{gOo"7H}\OB9p=7$$ &ªbMP#>s-Cqmyª#9TXN;+N;͆ %k˽qٞPCa"úN:Yhޞe{т`Ax%o3*lAnһܭ[7zEYfJ/C>}o|04'9Ղ;  >sG^zbG]" Ȃ.-KWDĂ5*Y%o߾ހ)dc=+X~cm{cѣGz~yQ7Kxe"tXద=DPDۻĉm f׎o>e+eʡ.C_mC1P:ˎK]8P=*cJrݿc)A Z X$^‚X|h/m5ig3 oƣ-^⋃ڵkoٵkWgxxA(A0tP#@?3m6W]ukXнg \}A5?V|~'lt#\R%{*dA 4fID =Xac9zu׈1z;IGKD1=Y M/ڨQ#ťz1EA/zI%E!exǑh<!S?=Cmǒ t{k<SJ̀cއVk,ټD^ H$!zXpa^ Ubgy_ #هzװlo󂎀25rg[Pc$@彊h`i:̽%%rc!bxgk׮2dq;xvQ  9  i-13xO2\ {KFA =a:xroS!xn2#No/1BE=edj{/b档{gqq<g|ܧ);vhݏBhcNrCaBN/%}V=/),{~zd 4x`zuIcAOnz< 1I(AcL1M&+9m63GzQENN[^T.3BO~Os3GZO\ywp^i D0mwo;: ʼn,ba6X`_΋ I/YNc"KΝN?4MV"$$cDìd=zD >,QI֭kB7 lٲTyy;wDҬz$qS-Z4U% $- {8Sdr)Il/JĥH003Ɂԩ|ђ Nۧ$qO?'H /E,Ix!!m( +҆3;3,Yj޼y-mw3 wVZ$q趡x vIxg>}m3t1˕+gGR9?(aZnmɧug8Zu7Dcԅ KF۴ic}g;.ۇ6 (Iǘwߠlٲ9Iowׄ2Bmlm \O 0 "ÿ|}N$c4Γ /M2/v591+4` !r'X:ᮊՌPXτdWrmx FbA#\0>{^ҷo_s)ǝI尪y -]ށ~ VkCl r{ޛP#3uc5,vӶlҒ]属,Lr-\Z +<.,Q>A~ȻX(jՖqODlyHJx=8xk8|_=Az7v S- C T}B,4?bAaݕ{k ,ers ?$P !! `HEΑđuk¢aFNR\ *=L= K,ᄋ-Y-Xdvօ3gC"8pvC[б>ܮA~wx2`c^ KM3k,߿ݿgOՂ|I4q5c zs<'4+$%0Z/,1)^iĈKu0VQ[ophA+/Xy6ԯ_߾;+o"0kz^چ Ne^~a>LA(l)Cl ώP$>lsd„ fA2eD6r; ^%ìh"Zx :Y"]*+M"'ZW|o[2-@fk-$}o z5}& L$= b@‘wֳrzkJya)Xn3K ba\9hXÂy~Ir`X͚5˖1}qLa qo'ْX:=f "A3qX\|u(e c-Zd#]`–+c2けz\va@OA׈e2zd/%T dE8KQzŰ⚅"<3; ߃$"-h~iD\p!H2ʶu%\ǜYs1v6^2o `%J{`#Ltґ+ޝvW\>,ٸbSG۠M 6C *Vh;7ypN[!rz׉"ft!2D vUVyDqN>x `x,~x@:+#,. >Susα(!HBddM XB@> vܳgOELk#Ʊө~@Tz^iX')k*zNt:#`d"25#n|'!+#j '\C \49^-XPLr2 _|qs&' $޷DB]d8١WRŋ` e)}[xud_z{cG 4 ;;+$Vr:xb܎ׯ_o0LIpV߹[: p.x_qN zS,}F;9 t77wxky l-mc3bI`}>J(DF@N[%B\9.%y,X.޸"Ķ;% z;PG:Ç[<i)TP_poݑ"gw.!rBU\1NFdζX"B!:&\{? |b˗/OI%DF@Gb}]b%ym 7*UNjHİ"iϟ/MuO>IIPeqmg= ECg^*o-aNBg'<;"ّ@i b1,Q9^xᅔccꪫL $"֏$"a $#Qcc2$%%cbd"wΜ9Df|d:a QdodKfݻ[9!]2vj,oƢ2߹sgt0t(G36'f+?I}LBl3}R A-RG]d8rʞ`F8  GiZIWn 8B81k,?b=}bnΊB ]:~ȾN'-3N;M|Ӗufb~BF`;9#}݋D`gH;E&Mdea>@S'|Lj^0Cr!@uԩV!%0i("2hU[s)0nG6~)v@xѲ= NA`%"\r?h.ws5X _ģ%֭[Ᏹ}Nmi&[Z9,߸qc'+k֬ WԫW/S4Ҿ}Xb-Illذbj]'jnݺtŗxX}eC3w1e̛7/Xp-0H;ׯ_vG  `&M͛7ol>5K. >~W\I.nƠ`I.3b;a'֜im"Ha9n/71;Cr6Rŗ[e7$c;6#)fc&Mgի->C%Kd%ZgaO EY<ljE]d*p,xW؄0,[2e,E=I(^]֭݅^hC|֮]#Q\mo0bPD[f=b" c@H"p9Ml}<ɳ*mժU4}0I<'-Z@7B!oۘeJBFBKe},SO=eX@|31/뭷rGq? j׮%X$È= mQ;谢=|=8w}6|(Сjٗ/'BXM6*tF QS$VmԨ{g̢*BM0$3[+$cHppB!D,"s$%(+eLDюU oVoEv 7`:uFYEѢE-˃:aXq/g\L$;C~"C~mv/Q ZwQ ^`ԅHs:# lt% B!T^I@8Չ/o֬{ֵ̪kWnD3Ĥ.]͗O><`%~&b[)uO,kQ F{erS? j‰&?m8!![i^ݒ?bdюwH-iƌF?i$"Y qU!vX!=$I"@zsB)^{^{RǰQĶCs#XR:+%$U]wA 9u^€wX KR'xp  !ҿ″zЂc:nv$I ю&3z5,"CtML(Hgt=XԩSta(4AH}A't6c܃.ěޓAýM/{7vi)ћY?,KY0yCd0(Hc /Ը;!+MJlR|wFfvLux|ݺu'5 uNę{6X˄t>3ӈt,}k֬zpa)M" l0{p ۍU[T[v=z^xi&/8SNTv+[7noB{;~x7ydפI[ IG};w3gNTv{{O/Y&GO?ݕ)Sw}C|Z/gA|~3*ZAUV=3v̵&7Zd>[lqCuwqo>Z袋\]Ϟ=ݘ1cR툊i]ksijarM7#FN;͝xZ;|EnoLOZWw?'9aw}:t`V.]}7Z+ꫯvVrݺu3o8eZ.'}*Co>Ϛ5˵m:ipV,3Fꫮs3ֲeKנAXAhX `"gu\rIVUЮ];yf37v fo I 3!JGx z99~Xsrk3/FNn_|ʗ/o~xV.+Vpw#SP&M5u=}rتU+{5j3"wieɒ%k׮OSzN ;.]:/MSj&~>=ϧgl?mӳM|Z۔)S&>|}_n5*TR|zO6&~i}>MOmڧzqoVx`СQG5mӳM|zO6@OY~vS[l?mӳM\|… !2=hӦM'Ot}y퓽)^x6mZH/k֬ o޼iͧgl?mӳM|zO6&~>=hEOzŞB jժ.?V[nII3I7rcwNnnʔ)ђsϙI'5kf -mo,%JHI$2opř"QT)WfM }+Wt3f̰a"q衇ڵk… ''RB/Ғ-Z4Z*D.sϝ:ur}B!E*B XSx 3phΙ'B!".0pBpDѣG!+B!".0#5qBT>|hn䚈B!A]a,Y\I%Dn$q(B!v t!DQreNo!rX:rsB!".0*V{o7qh9|S2~> !B t!DqArXGTɰaԩStB!D@Bd('tŠϙ3'Z"Dc͚5Gq[l1$B>$ЅJz܆ ɓ%B<^|E7{h.1ND!BWBAL2YfL8!r~?[b_7|cí-Z͛׭\p^Hh/!B@Bd( ;ըQ#N8^"SOo=s1׻U͛7/Z*D^s7nt۷!BB gs^zY-"$C,Qy!BBL8˻e˖EKY 0F.ꪫ-B!5$ЅB\Νڵk-õ9~C/VˢB!"С+U{O?-"CvCNkB!"ȟ?;SFwc@˕-[]qR!BC]iƃ8p0aBTwVZn&YυBa'5kUf ]1b;]:uܨQ܁B!='$,Bҥ~m{FEk^,Yĵjm޼ 4r,!BdrqBd:[nU^=n!T3gZjEkB!2 !3CwDkB!2 t!wݺus 6t󎍗.DC9) WLhB!D!w!np^z%"95kŝɓǽB!4$Ѕ{qnР{݃>Ex vZ38#Z#BH !8#GW^!e˖8]w.hB!Dt!D0/_=#kZ5o܍;u=!BH !/ұVy睖%{ Yt3f:޽{+cB!2!*U#FؐksƍBYpg'Μ.]Xs!B)dAB$sqm۶u&Mr]t{"sQ8j(sg_dŜY$o!BIdAB$˗wÆ s~2d L lݺՒAb co]\!{ t!DpGwyu}嗮QF.GYlذu]uUnwo%+w}-B!B4~_Iw?3B݇N)SxsuqGp{WB!ĞE]t0Vͷ~UZ\ާMm!Į>}X28UP!B!"kP8!DRkwq/^w!Y[ 5G}9OB!D!.Hzlbݺus;\^\ڵ-H{]=իYgx WLh !BG]mX|3fXۛRm7ov[m%JX!5B!D!."\|M hٲ{m!BdDIIm_,Y->,]>{hαh"ץK"mF]vu 6tLĜ.q.BdF]Tv]Xհb KvZ, XBu!?s<;,wI'DpB!Hz$Ѕ 4[nM sEy湻@>_|ӦM)Cb9~qՇy_1ˌߦZ`ꘈz|ύi哚.vԎmzOzX{XN}S,;YxWbEsϹB 'Ov-ZP!Bd.Toɦݶm[s?klq=GըQ,;vtcǎu͚5s]v'׭[M4ɽn̙&nҌ+*N=T_}ذan„ V:m܆ZT)/.СCmu?yZncېG5ngm HXv)DozV^ԁl/o:,+qHNׯ_7nl,X`ׂ|7Z`W^1k]ɒ%mʕ+Ms>tPwɨQ܀ŋ!HOl=ߋ G@uD\s|#.[{G4inpy!BddABd*AĴwK8q 9Dm˾+7߸EO?Ԓ{+VիW#UfzGE^z2IR0~FܝwJ.ZjKۗpDhKl" OaG<֙>B!ȶ/VBi,X S0~xgЮ]c ~'[v 7gy}R /_n_͇aРAlA(rP0njCaԯ_?E-{P\k֬OGydP;e Bl#v EgP@ӷo_ѣvZP\937և5زe- wPn`Ӄ|'Nu:C…P|<gлw ђ ի}7ףz^ Yfw5{`С̜9qO>dp=ر4i~v:ud_n}goƌ&L͛ҥK[N9+WcƌB!r !2?UTM:WiݴiSB!}r憞?~sob믿'Twz᱐{|wz;Al|nDZrs C5vOڵOB4~U[+Wl Y(3El nCnܹ)W~rr<p@X% ?8g\< Lv{#~p<_~JȀB!Dn@]:˖-3X#dD!1шDg.g>օc=H#YggE; pEW|kD5qדHhA"X96>kt Pg9;81~%lWx? C#:iks'X|-W^Og r J8!B"bO?7IH7}t !GLyz@(&a:b!cxJXG= |Mz- _J$Zݙ莇t?y+~W[8=yX֬Ycᙀe{?S"El{,ݓ=y衇lH;!fޮ];*mܸq^!"7 .tpF| 'زrʙK:cݺc˖/_nnݺh_ .l5kte9IpFb9&)|@NJ@'Q](+ϖN3øK2]1wÄ!#N# s2O}LsxseplOd/1HFR9:8(:1(,$~[!" .tΒq(^e?/Lybݨ">Np`]pf&,dHǍK.9bΰboLrb}gp:{饗lxE K!x 3:÷ᖍ;7,dEGR?{:Buo sΉԛ,3dYqbdD-Xq\ɮީS';:uUJa1ԇ:R^B!r]G@ 7h،!NA&|au3<N2,$nnXo'qnՈظt,X !.u>|FƒaXqڋea$0>#ܪUsLq- #ķ !B܀B!B]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!v^ڲg47nL]qxB!"{ .û]z6cg4~bŊhIa5[hQD!Bd$ЅgpUƌx ׺uhiQ^=/k׮51ϧFnذaCy!B=@B`N[jܳhs6lpsu?A"~?o޼y￷:>zh7j(0%׬YcϜ9-_< s·v;^{eP9s9zۻoڴtAwOv[?~ʵ:|W?O9q=(zPu_݂ l~ߓV\iʤԾ#΁hƌ}G@vlCr?e#B _nb ER ђ ^Pz`Ĉ6_rJYV+BԪU+EeP\bŊ믿/ 5kfUR} ]иq A #<2x͛7[=+W 5j :4k{>àFA孾ӧOe˖W^,VƠA8G_tਣ ֭|嗶>zG6N:)o{sL [[#*U.'pBx`Ajն;1cw?ȵk{~vZj*+mڶmRVZ=PРA ƶM,h7Zص袋8 3ϴ7lA(M%JPL 6nhy;F݃}_~ :묠lٲVCo!Bdw$Ѕ"q F-  ŋ7Q CL#Oa\ 6 {+VHヒJܞnٳg2L,]4ׯ_?>oڅ ،gѢE&J=zguZ~}пdɒV *tٶSK׮]lڴ)Mz~jJ_:u\\'|2ZX=N=;y̙3իW|u8L:_p&'L`7ް5p@[O¼cǎ_me~v耡s B!9 !v$_pkƥ=9.nĈXb.nc8nw￿ eTJ4WpawyqƹPYfǻGyլYӅBuUR 4 ź Bs'j~뭷PZ:;֐!C2p?'FPC9ą"څB rxb7es?޽mQwWs=ׅۖ~pPwKvuݺurOwB\ifח:M8p?S\F\[lVkK? pM4qcǎuժUk1oFKfG;hժ;餓,ue?΍rq:q/v#B@Bbo?Zj͛~<KL\M7dmXOq"_駟n`Fߍ5m:R+/|D'_|&Lcr7| m_O?zm1ݏ>v1c9uѽk|+NN'<;AEnTX1e3:7!O/^{uLzꩮAs?{k4m4tl(W}wQ^ƍ{gDL!";".p}Q1waեKD2d"+t}ω@hKfϞ<@$TKO0r- 뮻-v <5kŝNDzz`ؤq+WI@G }ݺu_mެY#2CYRN8 f]o-B!7B"o޼&pf]p\џ}YVdR,>#EtǃEX~a7tPb(YcˣCkz(SYc K-`HX <|8,ŸVv: 8L/ֹV1p@;FZ'u&,tx(I 3'Cu8?#ÇwJz?;z% ntp\={>2ꓑ_!"' .-*Ur_L-y׮]D:+*@d=.Ј8,2z[<2쇀 i\> wi)a B57c8|ІN?mF[E]dM9ԉaƅwզMcSyms>\: =GN:(828W\@7"j\ϧOna"r:T>3֛ܰ8w#C@t0| zzqX_y啨ms1 YB!D |BW_,d^g諗^zɆzm=Y]paMpxΐZŊL'zj08nǎ-C;y=s)1<s~6Ll9o }3Ȟ?ÌzPn^xVdwɜӦM 0vtbI9o2; k[[׾}{JϜ'!akͶx/p8S7Ldže|'\'d9ܱ^CȊ|WxH"ێ19$5+m=mq7@(bڙ/er ؇B!9 t!~5X9B+˘~[;!Bg@B\tɴ+xwߝwB!x$ЅBwuǭwwZ/B! B!B!DaքB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$@]!B!H$ЅB!B$` $,2{suGu}3f:u}YkР}NL4ɍ?ᆴEC!B!HVW_u v۲eK&({/Nc| '+>{:t^y%KFk-X 6M2n֭J*6m)R$zK.u<[~Mr)hѢn"B! s#/B9شiSѬY3+o B- 믿>X@f<7o>àbŊ)K4{7|n)e}6mvPjU[Wre;gʕA׮]?e)u]]6+~ᇠp >|ȑ#'S!B!'2=~RJM.ڡt@5{vn%պN:.VZ[׼ys7qD[b}^z} /tO?۴i-]~}W^=WLzʼv`!XՕ;0[×_~iVomܸі !B!DvD=7i^4vX׿W\9}cA"jq2D(r 6ı͛~ݺu}Ÿ9Εx&aѢE2W1|sc=7lׄɓr<&>q)7ްe?Rqƙ=>}=zؼB!BdG$C"c'yMTzϰj*ץKحiӦ]jՔDׯՁusLcF V_ >XڵkߖM  , (+O?dŸqCeh|Nw=صEsM?pwN:vpB tA&ԩ.Ƚ;)"_~b=tw}6=cR!B!HN$w0Q?ܷ q5.C\}h띧qflSf,&L08^&=hРhι]B EsCBɓ'{-!ێX?.]]ve #BGΏђͥ8|üywm?-B!BD=.YĽk&(Q,ѣhlPm.uE϶NeB!BHM5?=s?/z,=kժn2 !H3ȔNVr YnKzba#ȱf͚e9O =3:;!sM*j; j֬iL$B!BdF=+2ϟ?Z-i 7`Vt"g$:#YW\:w쮻:l2+n+pi7DY= k"O.; ?vh6tB!BzBg~"<ĥwܶq#Q <;o;K{"vFuQI&?Dt$B \ӓ\ FEs=㩎:C1YB>>:VÇxpOZpI'Tu<'xbV!B!詀k2 +9s@lL|6{31وK.>3nԃ2ڶm2F{Z ^;v옒1)'w ί*擉݋D {9ó֥TROόÎ!c b?=ԍ$z$Z0s9B ȶτWB!B$3詀8Vt` +gD+Vix3v8[oy<5kuziذaJ6-.6{G{챖]+9Vݻ' G}dqG8r!Q Xǁo>.r)vHwgvY| K9NB5RNzʔ)v -[FKB!"9@OM7w͜9 w},Xt>lHl3:޼ysqoqኀe9:X<߳gOK/Eٶ;dOٳgǏ qdPg4;Oa#sq']"=!tA2`7}tKqHXOnx0&B!>3 ÃqfM483Y܉&b\!>掕22U;3)Q;s 6 re8Q53cD,ip<N=d=}gO~Gse} ԱEfIGs,s}g18^ \??QW:O@xV\ilO,?z!B!HV cR`]EBKICmD3<`eq< ]±XyϠAL`(ݎ I]ѱp\ Č9ׯ_:҂2pC)x\DsGߗ}}xAzB!B" ԩS-9sL늩B!BzCL;1>?lذVg!B!%Kbp!7nl$΅B!"! z3zh7qDf͚)B!B<$ЅB!B$@.B!B!D .B!B$B!B!D .B!B$B!B!D,B!B\ņ ̙3ʕ+"7QhQʺL2nf=B!B\ɓ-ƌn-v]w5"7n:׭[7 /X{իW&@B!B pcg}]|"7?e˺8}wd .B!nٲe.q.}#<֬Y&Olc Kz@B!B$fv~+RYK*e͛yƍ=݃ '`}feu/^ܶyw\r pW^y m,-Zp }@ /tpupVZYn/D@B!B$J*hѢ9Oħ,X]{Nn͆jkٲ衇kz}qXo׮Y[nme2C=dT.64uc`gbB!"AbŊYvs%+XSb\ΙG#߈G\{q,7nmXǰiI8_r'm}+Le6D,X>Y],uf(6l0;cgWȂ.B!I˳Z#˗"FxV};0Ύh?KΌxqX5y!.B!9=Vn@B!BCG~饗FKDvB]!B!rĦWP!1E".B!B$B!B 9Yw|rn:[Ug͚5n֭ڌM8 4(Z"b@B!BLadO?3^vmҫA[nnɒ%і{8Bqˏ?xW^=w'Z̙3 Fsψ#\Ϟ=9B!Bd2/s=6Ny~믿.7tPװaC7whAاeazH #Gt]v 6`yaFysK.YN}'t'tZ;sM4}߫WK?馛\s\mӧa_v^$".B!xK{y ӧkԨkڴeZ X˕+nVG~6߮];wژC qw1cQYÇwjr~?[]m-[zmJ(ae7D2Pc9}Ŋ->}n˖-\c|\'L`'p x;ulٲ܋B!"ǃ5q/v8w}{1gM6z=._|fGݨQgnAc 7o:ukٲ[袋\`|&u_C΂%KeX^{=&97xr+?~uK.&)#XoTɽH !B!r Y!нup?| (`)b._}m3ft>e/_-ZȄu)Pzu:0Vk=bPu:W}6shr}Gm ͚5pVwϛ;??{nC]!B!2\cca~gBSP6ׯ7ጕ;LX?pvhj֬}rV>:Nmۚ<,$ѣYA9.Xq7ͪlك0g"1ߵkWsǵ]|{_ОB"m4k,?1>Bdqǵ+47!" >ኀ%k:ܓ&M2nܮs1= s ,pcǎM{xJ6r-Bl` I&p&>2zh`, VR%[ b,XXhC5\cnӦMd'DԅqԿkwɮ?W^: ;/b#\.{ ݽKFwY ~CCoaѢE]޼yBL'slRܳ> B ap^jFfϞmF+V 57.$D޽{:omڴ1zuYǑy&ͻ#$KMHjժg۷zi踹;0N6y_;\ >\o,щPret1FOl:qb駻VZYbH'.D3<Ӓt0I4腔ȩKJ奀88^B=!NeL<È pFb&k;B1 oca, wl싸%#<m$Ena(MXqW 93F:H>Pܹ iv:؞N:l*>XF#eyw:òO F]Fx#C-I*Ã^W^y%Z#Bd>k߾}46X}C +Y!Ё3ɔĀc oܸw}x/ ;ի򾉘s,ϜqXܱj#)6 :+8rX‰ _nYSX197uFy6v:#نzpLUc1A8 D]+njx2s ၊sbb! _I?.xj,XІ#L2&bf%H0D+ h <^#xg[/˯O͎e0LOf=ߞe[ayLI+_e=I zy9"qB!Ğwn-`}W\a^,ɐ+fJhMKvG2xv=6̯>)$Ѕv1{%3 _-Jf'gguS7.fWd5BU`BeoΊn=Aʎ].$ЅHrHѺukKDOf3dÈd$ؗ΁u8pzT͛gC/u O8ω'ؠwFԋ})+5HmV{Z^ :<8ԅs]ti/BxFvr:r&Ȗ/nB]dժUARP$0f,D]of_L.\8{l>vA(B 5k ?FA(~P hm ,m6ذaC ۠Yf\`兢6C9$UVPjՠDA(d&M'|rDB!x{-O6-ț7opGK'Ga}sE}PZJ*A||0嘟~iPB\r C;Ǽ뮻:us 5k~{]X8p]Y~ŋ:`ƍA(ʃ%KG}]qA(m žkD:t֮]A:u¶eY֭իuz{\+#x_|qЦMe˖H"vog^zEEKB̃w.<_~e[s;zj͓'OpyEKrXԓwGKލyfaŪKro٬bmgg׌1{άlO4Vf,ӡ4?ܻkY+7 . Z=8pb`1 ~x O>Jpn„ tҶ 9/dt=ڵkgîxO?~C5O/_ގ1p?}t;.e=x {Mff~`'c=C6 >·k矛Ux!"@l9ccóHX$L[ཌྷ0=<2.+- .D6hD;ظQx&C&PA*Uʶi޼+[Ńh%l@mΜ9&ʉ%gD7ް|pd]"ۻx~TQI(3<ː2egs1!ulӍIYb%c{1rC9$6[{;swe~#r8` $I$pͽG*9\}>0M_=I06/*ӈ+93W30C|GC\'?9ZgOqb tMQlm f\VrĻe//Nq~4&E/zWk1A{fmXm6Dpg\yA.rq}5QS}µ5b RNXp޵{09`A$I .{fxv;?_ٟaПeX(y3(=Tw~}OQ︶i(cw߬h=NY~.ʠj{{uyۦsp&z ah⌫{rAg]. XPguVNNNqg[/dM=f&Y 0s_tE ϙ7w󲏞 )//4%l<6 }.xp7׺ .iͭxodioըk'Aj kTY1lgm:7߼WJ,nB;m/./}u;Aܔy{9)k%'_B=CCPSOq}˽m|#uռ^jb ǂ'I$pG(6Ѝ 1<yՙgm$ ﬿WFDMbeg"~GD_I!as 5/f&0{ wܱ:o?C ޷>&_yߔ!A2;0L{e2>#/~@_ִcBo-5H!8]eP g$l oApilڇ~xk4ܾ$#jY֊G\X%#0>h-w"I#+u PS- C~  QB kw5yY]-f=c{Qx K¨eGY%3@A2az2j/ 4alĘEܶF%i3eh6/=qw-{ G|˵ Ƕ!еͻF1 (驭O~3i:==?OOD*]H&Xa'|Q q9 ~Hf}~![X`/'%u`$`ǾMx˽>/F5B{|>;!S0#gCFvê}#jH]MGqLsiP<%B!"< ˆ7" QqCKEB`d4SCؖoS"vD1w0zH`脵'JߍWB/f `ȭU5 Y5^\4^F)wNs:./ .ছn> z藗Q\݈kZ*.}_ ;Ww8w^<<7u/޹H2'ks5or6 rNj=~%gf\7uY6c` 1^/:_$_<:ck7O$,>kк?g{!bbqW^9 w8yq0&wAѪί-n7s 3<5ߨ\6QҗdOZ _Pf@y?E]h,c7 [feK>*w18Zi[wf`2lL'Q+ے()t^ @XVl/+4NQ҈POg- x;Īֈ,8h~,ēʵY":+X bVy\7GOrlڌQr{~sn[vS5OFQܒsb0*8$$ JQd2xlǸ<1 @rAo \ b=]?atƔq t^蠟^mG3|:-ZwF ӷn:Gj@'1/I H2g?3>%MsoIC~}crqJ%H~P7y|_gei1k-^^o%O#L\7nD馛wꩧMn:%I7{1Z284`MG~߭_dўy5h?<0xӁx≬ɠ^{Eb~QЈzI'7p֒^%}2d\sMkI]7b>3FP;c=|oV׺I&nDb}{_} '~k}G?OSLzviEOmVYexgs9gGuT=묳_u}h;_^7B}t~zivX /q7pq8bI'T;A/]v%=Z2x ͦ7 [ҿY4Za$#Fx^Ә1$!Ā'Џ$giB͵g) X+y `da}j-wkq_ L'+P%22`ՖG[:oC=cCȜ~/Ik16=K8J^ /Kz{Zv\[9K# 6"/=+㺧%k{\c1,ZHA,G2xV_}pɑ,.*y\_M4Mc-oF,IzKb$}wWXn7tSkࠃtE#IE뉯M:<q&^ 3sxٙoo,O 7{~sI7&<@h' XmB1ŝ^=!t9z?Hg"^`A0ri~W\38זՒK.n=?% bhG"ݍ'Ax^*|%`H ||_90_o)~HSg8jdILNK),ޘrN2@LM0H$X$ַU|d\&nLb}]\,# \rI[$I }ݑWOJnqɤ@O!ܛh$=>!+)Tz#'+U$ZjZkO<`X#iHOWbl|?6zs+خuIO8X(M1$I`M'K2H9 d 7..(WnXr˘ZĴ*C=S1q7՛ώ2d-&L!QL~Oe@5s2+{.vmW=裭_Ƥ؟}+ߡ:63ٹW\qCe뭷njkaqV[mU׿ᅿ.9>l~5K/Tm1z,磌3|wT!k8'Hep ,㸾25!ՌtWJ+}-瘦&6I$I2_}1=)d$=sroN@.7ȌIHp/5q?C=>t iJps"zFY\p8̛]w]Le!, ]BcS/NJ=Cn!eL}#Āo^Q%%1툗e1uP;,D7d=s#3,iqb 2q饗Ɣ#(xfiƒo%5щ gu֘6Pb=B֊9C &N 22:Ws:nqOw G?9q0b~U9lM\9I$I&yBz$Ú;i:GuIm-IFԍxk}u#F֍j-[hX|oh݈瞫V1|7~_nE]4u#Ji]wUO1^;F֛mYotP O?=/~^s5o_fi%X"ni=L3ӟZKg}6$}͂ .X7"mpooZ$݃_\mZK&O|tMﱾ;\;+mQ֒#-Iԫ_I_ˆ6gV?U#ú[h^4GedffnkLMb4I'FЅ9K<, 1X˱%GQWNJ=q.Dg$ ̂/ ;\Y#cV, ,@*΍d3ho8_خ O_+;XYy x'B ILmM$o1?s^}"O: ӷ%6gpM}k&z礋{B☋,wz߸;NA{I&-Orkw33).Eö_< w]L#r-ـ7N}s1/\zۖ dw7"ܠ9_u. ׍+$I$賘Z{U(]`?:N22(o~P``C~hI8̏~`J3<3<V K.qG8}'Zo/fX 9bzbņ ̈tOVzaeCm]/ϓfq,SN9g{GrO+|Xa:tӵqo(L5"VWO6]/Vа:`.ܾmÛ8v]ۋD͠xrzN<1K &BsO|1H"X6(ַ>)gWvA /׈tIC`{S8p%~F|Y.첸Ї"\ ?USM5UX uo~ Ζ{,=D39{:p}'qiG~Vu~ʡ0p=G`q}f7D x?{f=uvs KHiU'xV[mxM,7|s3N77_HFI}E}=Kr־OM<}d z xmO>֒QSbm{1MctKXN8!sgՈn_ās1_`~o :veG#3u#c.u1ec;ؗsp 㻸FD͋>C׼=;wqq bߛIKu.1@oߘ?Q?Qrwwg zoz9|\szvȬ9x=w}w=0Ш\EƠg~vG_>/5Fi@ 76cknW w™gScE֒I >w,wꫯe|3?$LRN}_`QN z c\ʪ,s9gXЍZnq[b{P Y\YùzTjF1*瞑M\v񱠳:=o:b1pg턻~FYYK/]0 we3.{sjkLg,+QyY|ņ7#F##TV̵6P|eu}{Qgc e]w /޵0՝2dPO>9eh?.osoYenWewKzӂ(Zrl&P1,a8`ZF37> `z^/ԭ 봰m3aF*:묨 Dbw@M2Eogޏ<|Ɔȶnm G։gGl‚^-V_}vcu,H?iIJ{zB$ppq#uBWZ'hHPįDnq{3wB2.+Nz< 'lkzg"н(MA ݱHP&ǒMy Y۾6˞+k9 0%֝ /sq̠1:|UW\qE 8߂2(;z&t hN{ vA?F9 (87srt[keHe0E7903ρP}lwyg bJLH$z=D='zu~P|ȓvu<{֑BYC* !&pE%d40~0<9|&޵#+4SS缕۾ y=qt[_>&NaV;Ew° :3F~zL>QC5ς3 z[Ku"KԞ&@Ǣmծ{9h "[Of{Mu=ahwA=/9Qv3aKϻ.سn%m?>Nbd@\1gS\c=-E8ߜp~6B"޿pYW80lgekU/u#|brJ)xk7uu#RF6}rڡ<7pCk(\/?O Q16,VX!gqƺ𱍺~ )Q.X_bVo¡ZhrL~kM%˳<6y?śiMdYܓ>Xc$I;,żHXXIc>/IY{ ڷuRXX*ybpg>ꨣB-L; ׇ yXӂ8ox0BZX$J4CA(tAaeX Je^k˅裏󒬒uP.G"DCxl:k_ﴞQ?xz +b❥>*@{u饗V]tQx*JO韆wFKo ,3FR}`WgT,۬_sbep@XK9]~34aaw,5k>ϑ}7< i\o2pU'" gCoyv4YBu:fp1uD r~A!UW={\  yD 0 >M)q~W\˴vcs9W7 >~#M(#uwN'L\ Xxy,0( ' 5p$*,s&t:$>oBLP!ұdmAYL+˲־>@(@YB:,bljt}1,ߊ"`#c۝=Y B5aSvX)u wZ`b5X"9?vqki@nWe}p@Y%@Re:\oH=a/po8M$5}rOVN ^,0KR܏]3y ETR'I$x"3)dY"JH-KtU: RϢ-$" v;f!-Boe98^zI6=XYd*I&v)ގ{~zbSFU,u{ג %)뿁"9+:cc2%;}J ȚFRPx~Rg̢93ʹwbpI BsN $I ˯yV330nx>݄Λiz $Л rz满ɸ![˲,~JǚXk=1m8 8!b: \Ŭ:[^`e,ϞcNML; l}d}V\%.DL팭/ñu_yo1ǝ:/_΅T RnWVR$zfAY^=%.@_:Q7lTeuxL~d7F\qLY ?B8 8_VOq؈!\h/'LIǒ<MNl[)xHPrk6L-wTe ll˜W<,qĹ*o] $biBpO_X=yp{cbאw@mԬ^aP`f]f|wSnnv!y/n r \Btw +'` N]pqıw {àؿsS?TY6 YvaQdbТXQ'  nVX.iu58}B U\Cmk.Iũt4khM{t(b*1dMV*_LѼbnGYs)LrMGn:,ASXs=_W}7tXvpt GS}2α߇ #a5iʽk_p|vL4p@L!xFʬۭ=ԣJ+SMu`2I=1=Y#,Foۈk3eۥFl9L'e31ř@nM+cy'iLՈmӑA35XAc9oPsbrԛ_ndkQڗOe0hz=#uW^ԱFg}v|/_5B.썛noz7y6A?'tRF˹ؿgQVϞ26=R\;S Vo:u~ LIhg=mftGޔG9c]WS3:Q^rẅ́L|hcI}L{Gk(4w^4F:M/~s]zYyb{oVt:4-wf b_]7>ʦ#y~{ꩧ+o?ulG=}': ) d'x" !}ߎe%p\w"lKpX^O˼/RyG_˴Dt@//װߴmzxF/]?c=ў2%+НF+A$IF.wGs6l‡r듕2WMe.pU^ sIHҴ{tym-Zps-V :ް~dBs!-$c;nDsn;ە2z?n}dzÞTw:o筧 y>5II O?t5\swSh͓ ZF2⽝Y\KΛ%#V}@Yej%u|ߌHB','˴(݆TI=MgD .~N\CGBV*qbR WB’>$SY|e>#"uos連~'%1|ʮb.sύdi;YyLH< ~iG=! Bd!d;Iq\CY=vAl߭8׸/ Pĺ>Fjݾ~[֦Lsez0ѭddcA*7Z I7!-+'L9d%ډYg5,D"!Fo7hE^:?߫ ݬ, ˺k ݯDBUč+2s<‚,1A\><~'>YyuY{.$K/nuFBlwǂKF!χQw=˻mαEĸpmg%q|eDpOؗemBcTx,vzt%\:q 4I+L{%IS# c<;G̠AA/XoxubLjwwmIL ~B#e.MH#⬏^뼃K\Z\pF4ijY߱A, 7O3EúI8v34Lnqa6MHԧ>eN# Es|::}K\5J{"C#Hp%;.w]I„P LY>`sX }(>QKtltW86zԎOGAzy 1`sя~q=3uV<l˸!'wW1zv\Ṁ˸Xa ?TW'SBz3epnJIn*)LC'W0$IA ,2E7"nxkI]o1FpFDEbxbӬ9i>}Cwe=)nnj$˴-0-岣4_u4K2(~mE3Kᗿesl]vd kfL4Ffilf0aoȖ\kx:ƴS}]=IN[D&$Ұ,6t|hk8Y5b& .X\%hKl,n~6:y&4nլ9>1헛{7܏?x{(q 7D` !̀!q۬,⳷r˨?b^ٖ#(^yqX[%wm" \YvnQ'>¨%C쭲K%+ֺQwx.XF[FxV<&xby0yfĶq+j.] XNpřK6&:/BLG΅K2<{N}Ϣ+[gwGY]#O>9gaw-e?Cb_@B +{MQz8׾a\k% CRuzMxR}lG=;S:*B,>χ;n8w%B"w~.ac같S$I P,|m:cX c^|-"kY>O뭷^XoK1K/=ښֈݺ?w1z#ZK,MdЈ1F~][EYErʮ'?IԩY7B8,eQu%yYgw1g7̍n>a.ts797x .(,M Egqxƌw'xblSe_ȌQ}|uM'dLۨCR8}QN^M^veCئt ,SM5UWcM[ =0b/gq~ @M tۨ/|}//e۴ t;m۟ xJV;yo>2em@]D^(uw=looc?]t./۾,l^6O98.m}P~ l9je ;³}f|$ {W XȲ%P#D&8a1d 춆nuwu։#Xb2&wygt:wqGXd@0t+xNlcТ'J<@ t-2$ .2X:x"YV\q4-ўOD[KEj?I~~uUJϠO/WMT0BT ʑ_A^}G 'P/BՌH2p]s5=!:}}߈2Hgp%NkG2P(g_ ۮdM /0Oz@g]oϿ/Dyw\"`rg/ϩzSK,QodMNK/-#HY=ό*;uvecC#i+_DqJr 70[n14ry iRWyN۶1U`s?].x'τ>:…JIױLNP^,čN ?i$ $8 8S5y v`N$kt aDN>Q.g=w8 %'~<\8 sg= i-Mk H t\KeҎW*ĭ 5<.ʂ^UzN0tM=&l=R?=/FY oi[Y} <'"'n!Zl"sΪOM.N:i|'=K>-|8umUW8,Ĺ `S> =Bx/z;__{ lq=S?A=̠.ۺ6G计:Gp{f;e[}py'4F8mJRdlveTv*I}a71@LC}BI>~,*:XgwF e<8>1_EYNy /rj8">rOؾo}[ϫt5YYa%ԃ9(c e:Zk K.5\=,^fobEK/ iM+08X>)]rb}aiQGK*ȯBQwaJ{d󜵗5XYOs-ۧMg72|7A, HYS5(~."7nw^9=s=lFFo'38cmFTJ|/4bW3":Շ>nF Y׿FO[oA9]vٷfp/ⱬF1M7eoDM#9M>NiLc?9~fh{d.~w]Mgo-u]=裭%U~0S;E3EaYfuZ#䣜f}<ڢfm>OWO=Tۇ~8P~WoYgDZ=t=|tbf>iq]ۏFl:s2I#c&ojDw'gP9/GzwvsJ俤@OiL$y!0t 0 :+Աwy^:e'&DXʎ)9}^O+bTi<ԊU'=㤓N)爽"*gwn|ZfebJ=& ~s>x1:fz>@gigȶ?CAl3S{4 TlPmcc?2 ﺴ_옝۝ϭO~jW]$p{t @eZKƍm{ַQ套^ |$D1>xhfO`.> jo(dsLK.VXatC, N:3~{͚xGT?ϪګOZ򗿌N{G_׀ռX^ ,@ܣκ':#n#bs?y{W뮻n5sieOeo^|g>] v|?쳭%U< ǿKϯbjWqP3T\sMxh J[ m{b s IQ@O!N{dwuW\.\o?i-M+,Z1Jr%N/aͲϕ13HD p=G.*2At|UؿPqBLկB-R!moaEi!nưwO?t,!п/UwqGuwjw#oe'J&x=GgD>'"<[IeBUxB9 ߫Z}ߨ6h8w.~ߍ~x~^#.>.A&ԕ>_`]8]ple ^ /:x'MJL )#I} '1>6Ջ~Z7jɒ6Xڪ:ꨣ_ X>:Ąm&bR/7L3M[8ÕU+qAXkfKDG\3\9>Г;9ߋ W^J+E|ηgD Woq"sĴsQ6H:ߌyu[az72.~ZVD-d:#{aE؟[^'mgI]|zNc喋Cqu{[b:v2Vn!gém+=qVnm]n\ni l)g!"e03<:[{.J]rn$ټiGdbnM"? 9_u+\B HzI(׼7ErGg_oX޹u:X>϶9N6eݱmIOq1^xT βUǵn Q}$J⦁FU})=sZKUG 4|vr7S'SO>񾗶$nΓ enq-%۫;EI\]K;3qoF6kX9#DA<"\wb_w] QsZΚhס?Qfı@s}f7ƻka`NI&]}nQX F{/@GXjVzϐp.sxktP*oz-wZ=ay&څzxŹ8Av3ys=Y'>15P+Ag,ۮ6XY퓛zZ8OeRs;V}wLϒB"=#3ĕOlpý}'IF")Г~!z)н<:_W#òthN-(Nd6n/x[sjޱHD4F%iv3*^p . ʠS!3ITK_rd)БYR&GVxGB͚k3s뮻.ʠeQSgu'!eW\ꔱf^ "9֟ɃZ$?#\u ]GtpdtMtN9X}P5GT>@0Rz2rPF@結$;X[u#c(:y''nDq|8c9^x!/SꩧoߧjK/mcL^xa\w8SԍЭՍO?X>l=3׍ȍv1u\ve_<nsõ~ ?я\sMɣ>+Јz|L34Fvu_=Nh: P$LRx} tMӟZK@$Iy]p=%3曷ڷ&W?W[m֒ g6%\Z$#tqOUJU.\!TWrלeVfTV[\Tsc="6,`AQܪz~~s6.պݺ+j]In`y9=˾P'xbn4;:{.$ĩqB.0w|q ,beYg͗Ċ;9k:M;s%L*swYM+;k9?,ݒp\E\W7}5FŪ,gtqO;#=I&w^vӷՙ7/@_2r[E.dTk Ї:iA>҂[{wϺa\gusGtZqoX\r1>puzQ%XÒ4. uN>zg\?QbUF#)f?<}B7b6,~o^_[YU6( 5?udg]ve%8I'~iAW~M~'C[nz- qy X=}珿'ÝdAO/,ԾDon-x5\ᝧއ<AO! Ԧ02:rS,,,ȦxiHԆ27 FY%}>.XuMbUVibt#Q[Y I|,,ֲJ6M稷icдuatl@܉|.Ú/x! 7˶%.5S΂lplG;v2I$IxKŌ<-oju׍|>?ԉ d'-kaZv 7,ɮ2~Xs.(,Aw}wlSGuT-4 +)\78vӽk5S]'_8s-޾!]YȿPv-wنxf[]T,4WaN e,3O|o{2ވ@eAc;_tE|x1,̧vZ#<du]}hq`I z2|=IN_X}ǫLޖ|oiY(u\8I$x{J(ꫯ ^~cH3H30uŽH{τ2&v쿸xQݟN<O7U> ]ܓN'+AzBp%I2]v%- @$K0&v~|OX۹o7U>$I$6x;3uz$ R'}N.I2D<7I2twZ <9`1]}/)Г>'Gx:VqI$ITYDdaAG2woI| #z$I$I/s g/| Ս7H[>L.e%z$z+>^Y?PsL$I$ygiEǚ%cLkSHot=P|^z /^|4b=\S\o?s=I~ +;Oߑ=s=f C=1M2Vʼ0g I<'aYg袋Bh&vK;ꫯSږ'b-pLt݅妓x㍫[Z|czNU_'WA6;@ϗhw>w7|sdȊDBwQ%kcU>`Ad 4y =qƅdth30CVOl&30+6~DlS%ɸ(}}Ca oF6۬[c՜|;6Anj5׌gy'?IuQG_Sr_iepf$3,2,9|i 2đQ^xZ %=~~裏73fejvmR"oG%X;K,5},nm2NtW\,uYϲ.;Nk¨r:Ų/+B>?hs):nT#H5sT{3Bm; P: .`]r%y>;M'~i{g?YYǓnUfCQYtpܗ?]_Mg׏_c`Ax+8@WAp`IR- I"w1 z$Hp@5\sHN!o;FKネO+"! }s4n2IBSxT0|Wz,~3fbK_R >_g! {N8dln%{d A C lc}}nYĞq 8bֈFm`i џpIZjpm4 pZ/~1FF=m٪>{ w .vyp/6ҊGy$,ĮL3bu@>8Loy# \ę[GY=$*=/1S⫌c;{uOe:V]u |ӟ%@!ܶzFkVXה7OƝ|կ~Vy}8kqkz?Xh{Xr<#G EċFS㎋AHh9%x2x/C:蠃"x@.q<}}}Zq)VXz1`6TIg4NK6~,g8bp52KPo :0g?;j 1l:typyCGN34Û йiзb;fD8KWcxq/6&*m9o-$nxy|k_g{MRbЮ'2F soi\ VȌ܈{Pﮛ8QLZ_BP/9B7 t]JL V\qh_ wFbo$ &dꓟdenQ.51`jbI]g"1ofPo嗏k@3 eda`o_a`;eW}Eak6 2)|lI岸ь@A^:laTSjsojY""Ѝ6FU:o:/&OsSH&|_}խd{~Q% cb /pkM6i-/^\>?wF tgqFrc^fVz#ÅnM&n: &,@{{#@kP^kR1%!>}}'(;Pν{T]v%w `t\S ~p7 t |$/\2,uZ:iy?OnzvN:n:u#N8n}4ԭ_g]oY/rl4/iZR65/4q~?{R8c)/Z_ܫy睷7ZRoV}UWO>dݼO?t׺^cq^?sqߏ:פyaEyVZiDԻ{=\sE)8~L3TOj-I$IcX|[d|yI'^c5ZKdK=SsOk1,%Iw4BX"~Jͥ驧 KlqXMU3<3vl؟QqrZwXjs/Iz/ s)ccT ;W.V*sf<w8qS\FfY/I$I C~^I2I$d^:x{r pMGצ#8"ܛn]b-A{C\o[ $|boj5qNByZۇXc֯=Å^ss%w՟{q.~G%^:/s)m0$+˙lo?\V$I$Dr/x2*$pcXƠ'@1\cYLkpDVVV`s dR'#4%֑"X&;5 us.Q]gVv1iViSAAl Ż$/[9)cɄlrAKbJ<ŲˀbXe7|lB&bKҸBƠ'I$8A#KI6> o$qI2L=I&I$@»5x8z$=I$I$I!6'tR,Ŕ`Oat/i#I$I$I {#L>ӇaEuw0n8/O^Iy')nQCE`Izs Ga<HN $I$I$Irr!5ݜߐŜ7"/L!$ܨ%1{_h z衑8W[,"4|W^y%f+ƟI$I$I 9bHn-"}-WKb_>f~[XO=H+ 4tBHfegwcs/Gz\pA~,3Xac׏dbfN5LF $I$I$I=A0!ݝ/ov+tMcUYo馘FncXMK|պ[m&a.b>_W_תZ+﹎?To}x>p #$@f}m3̶bjX` FZ}_cY{ ߄ꫯSfmDZko}+ 3PM34q.sc^Z?I$F>ͿJXo~c$I$(1=Qwa꥗^w`UX}ш=g %Ż 3q˪=ST\rIfZ}cՏ~~zKo:I>8V5\e]q/BEӟt5Voqڪ:"q]rg{kes].@Own$'# /Xw/b޽=i$I$>$qvs9',Զ7oe],RWtNusO}(%p3`-`D\;p殻w9|;!։ަ?<s< x'<x`u嗇wk~zUջ 354I\^zpGydM&<ӑFs ee0I$&VM F`Jp&슨фDiw\D1~/֗ay՚uo \so,ضU@},\ž+s.k\/ A >Z4D;KN:W_u]:+3Ihs/ 3Kd8kD!L$I &ܥe'ԉX<,g0:8AL >e\b7X =3\ ycdjU{l1q= y7KN'kqv t{@w\F*eew~+b9@gi>YŴ%g?YxB(s"I2 r)E[Q Gup$)jI$Im8NzR{:a^gޝD5nV"Q7BgZM՜seF.[|8Ăzc ' FD.dXW]uհr gïX2I'ӟ%>`/6^;ÎeY&>o,Kcu7H1@O$I$I$@$M31lʲn$qIM=YK$I$Idâz;>S%`3I$I$IWkIٺ\Kx2rH$I$I$IEO8dd=I$I$I$II$I$I$I@O$I$I$"?'oӶ+K/T=s_&z$I$I$IC(N| ]OΝ7M S2,׾ST5 3m|~UW]] z2q@O$I$I$RX`jw8㌘ky/jUV~W|pġK(<do_9䐘ʹi!oV25oyuqExᓉ'z$I$I$I"}.H[Wڪ>ӣzaa# ܢ.}M6 /{s]}+_\p꥗^pp #E|馋㳰Ź:2lfF1_{2񼫹IcO$I$I$HzloS EH7e]6F#Y;]ѹO=pe;>32jb}eyU}&}s,o]w]/9m[hbĮZuY~vA"-I$I$I$CBhX%ffms9z'ZkhN0''YY2ۘLfs_~뮻"霬,;#z$I$I$abj3m1_[q 'T/|5W'tR䝑.I$I$I {{anv݋%~(.I$I$I$ɐFSN9y=I$I$I$II$I$I$I@O$I$I$IF,ݔ-z$I$I$Þ2m؟;I O>dӛ^nI$I$I$ɰ\s5WW>hki2a9n!旟c9Z )Г$I$I$lvK/T/ZT{jM6xjve1lzWZKTSMU-"L3Z@O$I$I$I. %I$I$I$I=I$I$I$II$I$I$I@O$I$I$I. z$I$I$It)Г$I$I$I H$I$I$I$]@ $I$I$I$R'I$I$I$I=I$I$I$II$I$I$I@O$I$I$I. z$I$I$It)Г$I$I$I H$I$I$I$]@ $I$I$I$R'I$I$I$I=I$I$I$II$I$I$I@O$I$I$I. z$I$I$It)Г$I$I$I H$I$I$I$]@ $I$I$I$R'I$I$I$I=I$I$I$II$I$I$I@O$I$I$I. z$I$I$It)Г$I$I$I H$I$I$I$NUF264PoIENDB`rally-0.9.1/doc/source/images/Amqp_rpc_single_reply_queue.png0000664000567000056710000004433413073417716025570 0ustar jenkinsjenkins00000000000000PNG  IHDR#wwHIDATx \T罿{4M&mR`LR㆑(ƨ n@8 ݺREcE`lԠbj\"} 3.ܓa`x>f9}\ 3 33<< < 3 33<< < 3 33<< < 3 333<Ɂ4ͯ޽{^ݻryNNNdd/֭[)h{v卑wè-[>䓁6msj?СOH_&M<};rrrĻ7MViJOhn6TEOoo>Ç#X"+h4%h%v/éSD{…=o<>{hWEۘH?ذ\b*J'RP[#wBoog't" DիhB?ӧE[kn޼U QE/%ۢ WGtTǏ Tio]vOjqb% ~ݷ$ʻ.Poȑ#믿^iJVt7Wj JS={ կ;k,{͚5b-QQQvŸHe?-wx y7_N{F+[#wB >~Ν꧚;wd2^C켑 (5Zќ9sGo/˟|I݆n0GǮIwjwho'7;h =ƍSO=UiJ:x7Tj*'F##FmD?|MK* 駟Vv>x8rsذa_׿Ux W|OHP[Nw`xFBtcǎuMAD;q޽{-BW3F;wd[T۷oӪU+tM6"۷o/VF2 NRT!w0X;-?zABBBϞ=E$yk:U~50aǭW͛x')--~W_}UD!m۶`oG588X9~B͋ѱcG{ʕrOLQ}BCCݻw+mA~{FJls 3BIII˖-EECjphg[H5W#""D{Ŋb*Z'Oo;Ee~gsŚ4i"n߰a\FwhTcIlU111-3]}=z7g̘QiJi]駟*X[ =Oz^t۷o-n7m9R}vw3*M^8oϨwfW߶mJ*aZX^!/^KY9zl!-E|'_ ϐҡC\!e-ެY_|q̙{oׯ0`X-Z̚5Kٝnb'O EY [Xl&ME C +M0TEfذarܥ ˴9aXbwO+buɠAlfhg{dCE;'_;ywgӧ p2r$&xxgИ_HM.EȯA z8:Z>^xWUϜ9.ܙ:uj۶˗gϞ3ϼk'OTz{W"~ܴig}vڵv۶m{饗U@?Irrrf^~億rǏo_h4:κog?ܹd͚5۷?cz6m;to߾jk;Ob'e5B ]x?JVQ mD<͵&+Pxy VQ|YAfrP}=jTj*E ݻ'Q/OII䁁8I}+___oFY(zӧO_G?ǫ<@1bh|R D{ȑvVҕ߿?^7ݞ~iyYkMV- peee_~(O>:/^8++Kw)?ܫ(۳r%UB'yqhGzÇ uΓ̞=[\tݻMEojE?-/~ &W;wh/1L}|HUߵ'III>=x`޸qh3JWR3c EWܵd*BπrرI&5k̺X;wȶo߾h8qBϜ9cpyA|_~ّW|XbI?+#ʼn?c۷KDXW_}UY7~+ZjeciӦU}vN jSO=_m@DپwzRs'qgnٲh+Go5Y3FZJ &?~ܺXk|[}Zh!ڧNbﮜt7o޴XbIghZߦtzOT?ZSR(P&OZwmI^W^yE׮]+~WzeJB4}qVQtEO>Yes T5I&Wv rvpp~ۻwo뻽OH[^^nDGGK,v9~e3k7{TӧGEDD+JJJxXk;Ob'j3F߉ǏvO=h_^a=5>X7xԈ^z)~_/^t3<8гg:Z/$ꁙ>+#$'gA4=/T#DFFV]y;g˖-b=%*U{o.__j'pds T5B|{ x' p)Qa mD۴iSn_^~^Rѓ//B,X wh47k_9sfaaaSƍ+++ gggxxxggwƍG1`XV+L7AU&11?zl޼y̙3“'ON2pπqAiii+V 3F 4(;;bevE8g@x뭷222Çq۷o'O SSS޽KPg@y7/\p 6B/fsiiizz֭[^{8u5#?hx;ҧO+Wȶh1bJJJª,/C@ـgzW;wcƌQx|ʶN{w,pt¬wQ ^QVV4 )))F1??ɒ%W wURRRZZyQFt¬wwD-Q/tӦM4h徍8qϠ$f ak׮=֫!|{˖-B/aZx>~xt IlԞ1~͛˶ٱc>zQRRhѢۏ;xt IlԞxt$L3 3($ Ix! I$I3 $a$gPH$ 3(C@Ig@"I3Ϡ $&IgP$ $32D $gA"I L3Ϡ I&Ige$ H<2D@$gA$L<I&$xe$0I <2$I$ x!$L I< Ia$xeH0I <(C$I@x!$I3 $a$gPH0$ 3dزeK׮] !$L I< DDD$''j… xPH0$ 'YRRrʨ(qSyyy@"I3@PPо}fT\\ܫW/<(C$I@x8x𠐌qƕ ɘ7o޲e $aINÆ xPH0$=z+W>|A"I3j^w˗GDD4mڴgϞӦMKNN>u $aIN %%EqqqC \x{ Ia$.;6;;}(c20,p, wIf-Ss@nn[(a3FZZZD{nIKQOJ, !$ŋFqFK $a971D=C:tqo Ifm>gUxfgcfgg@"It|*%ZF1p˗߿h4@"It B)y*%Qfdd,YDѴoIf aڴ$yldff6xe$ vPfŨwC1, ecȑ:to>e$ ژ'lxK(**ڳgҥK9n!$q؁5+%xe$@I=<2Da Pt+YgP$qK 4H`3Ϡ $jֆ0{Y.aذa999xPH0jFݐ{Y.!++kG>w!$L p>{ӧĉsss $ablx)//߾}{^fΜj d#`37rrr>+VXn].]ZVVge$gz6Z`3jװaϟ~0#"" dc `3j֭[@"&fg8N7o<nݺM0ҥKgPH0Y߲tfg8Yfiڂuy{{9rI60- #/ķ]Ϩ`0(7322ge$vpp!}TnFOOO<(C$Pô0%BDwd0ZmttF32D /La4뒰Yu֭[ϟ??f__֭[ 0xPH!6ݛ0,Z}fqo I60է 0~8_pG>쳨(9=!l0mZqWSUMclxFйs֭[߳g޲eˉ'LQ3(C$𸧽_2S=YuIyyVݿuf͚5dȐ!lH:vTo6%6K< d0oO8lt ۲ [(QUçO6x;d}d=S}" aVy~yyxQEעE ]t/L&SiiiN w$IwFȄI̟'Y$̪by[!h)'pC <ϸzjJJʤI|||<== w$IlAvyFYu*;0DCyEE/ޚ)/p$ '\XXتU .$$$DFF@HuY0JXizƤ\Ax,))YreTT鼼 w$:ga#SHЉ$g}ͨW^xPI>OTe|<>k&I<8x𠐌qƕ ɘ7o޲e dmb}S.x֘7K Uw`$7lPZZge$kx9ƹYZ <$gAH w:YONգz>ϯy#GvUKKK.]:dηns3Iҝy`6Y`gp¬YY$gؘ^//t뛙鞫{_^jaaaBBXmɓSLa#I?Sy {?'IWQQѣ& ?rH`b <ޑ$ݙ}L t*O¬|U۴IxxFqq;pyydJHHXxlٲ]v$d*O´+k88r=ϐj4w^ٳgի_~z^ܜp>!!!YYYž={6 A"*V0j<Ն,G\7p<9~u:l^hQxx8!Tc= ¨/aZ pGnMnn|M//S^t IJl΄As6\g4Z Ir$F]st|z48xe$kˑL%Lѝbp< Ify{r\)'ײY__3Ϡ 2QfzirYaZY_3Ϡ bF=YWa6g8c8p@EGG@jI:vb{ε>a6Bg8/o||< zô>sDыzzp<6HLLl۶?.ܹs'44YfxP^gAb ӎ^4| 'Ю]ÇWӧsrrzG9rxPR0B| i=xFmCBB?t֭sII˽{32T /xVWak >x39}tǎ?<(C4Iˑ40rڈfR/| oWBB7 ջ$o;Ѐg¨y!u=^3Ϡ =ۓ.G03O\x!OI5RgPITHrd٘OgA"ʓb{n"yđ>x!aHy6sddGQ(>x!$IHd w]X\j%p<8|ӧf!MtiCT{6K>xFw^??-ZҥKTT_|a2JKK;ugen|`66Px6H@ڦu\l|GxիWSRR&Mge8&1p|]면I:uY3XVVge3[,y AuL^Q,uN7oOn&Lpҥ3(CSy볍js%iG-ʐ8$K< 5ILT!-\4M'%IuO>}233Fʐӑs`w`ѐ`$I<ýmRR`jʐ1oOΏwdEnly&jm^͒$ cƌmݺ шgeۮy{J I*c9-,I<2Ԑ{.90Er~ sP-,Ip/8(CNzia}@DN%[[Y$^p^+PjM+k /Q9'I!9뾵iosP,Igp^+Pp$툅ra$I7y@^gz k-IPrl$g5 !G'(z!gp|t;$y6V6O i0bAHx;y@rp2;.$ yzo͆$$gArwiXE^iZYV-`gw[ӳ'|7<1͒$zFaay:tХKiӦiZ<g>sDjA/]vW;$ψ:uЋSN͝;… x42t\vz҃#UMR=f sO* zGzڵ+((Pn^:44π_gt#5IRYi #ʃ gn;$ 4(++K[lg@C-Cjչ^*g|*jpޑ$>yOxxe{yȑ#x42ЋYCԻ+*:2gݷw$Iv޽tRFӾ}_~9"""!!s_5 xkp~-21 ;9;${ϰx2lGF/qvڈZ,̈%wW;SIg4NS)3^##3B-oȮhtE!C+($ xnQn?_2ȈiZ82̶i-r͈EAw$I3 2$8gD/ݒ7mPX,S :#Ig@muϨ+B9bs :ΝN$ xQlp\B*:",(IР~r4mYy9S97yƳN$ xعKpfLͣu1Q+. e/E!Pz}&0I 3l zwASCQ ω󾎛zpWnU?ԦFȁ$:*Gh6t$I3~{b #T~NGMNecN٣Pq0I 0 ~~~͛77Z3䉝ry.Px3kf <۵Z/ %GtR( $g8ؘ^/'U_V pLY؜+A'~S`bpr \^2DA'L<^^^EEEHC޿ȑ#sU LJh4[9<..nȐ!/w;x 9BΨtz#)*Eq 3QEueT0I MOOW}(c2~м=Yi砺n,'_wH0$ '3`ӧOח4 ϟ,2eիWe`0̘1g3@(? ?'*R er :? B"I3ONNNPP?3gDDD>RSS,X{O?tΝ?|/;8c6킩GqRH0$ +4kwILLܷohtU:tG,4Lqqq{s@(?$ϭcO}T I&I(^^^/^3/^3HEMD/% !$L I<gg7n\2D $g ۶mj $aIN&00UV/;Sr(@"I3jJyy+W⋅ xPH0$#n{ A"I Lzz^6F١Cn~^+A"I LEEE{Yt)e$ HϨ)o I&$g!$Io\I&$dhݺu~bccwx3(C$I@xFmxƃrrr֮]Ѯ]=zDEE5A I&$*ϰXr…'@"I3<2D $g,7xe$ H<2D@$ xA$L< I&$xe$0I <2D"I$I3Ϡ $aIgPH$32DIgA"I3Ϡ $&Ige$ $<2D $gA$L<I&Ixe$ H<2$I$ x!$L< Ia$xeH0I <(C$I$ x!$I< $a$xPH0I 3(C$ Ix! I$I3 $aIgPH$ 3(C@Ig@"I3Ϡ $&IgP$ $32D $gA"I L<Ϡ I&Ige$ H<2D@$ xA$L<I&$xe$0I <2$I$ x!$L I< Ia$xeH0I 3(C$I@x!$I3 $a$gPH0$ 3(C$ Ig@"I$I3Ϡ $aIgPH$32DIgA"I3L7A"I L39b38$00pb+%(<l3{͛7+WdRmvu%Q]dɒ+WIUy3x eIbb|U,ՙ3gR>녂&$$#mǎ-ۤd'|BPxT/@PmLG&UegΜQ{FDDIƹ낂NUQr:uBB$<*G|3jF)..K.[dRUdj4hPvv68s]^^./&DZ-,,D֭[Æ #"<*g˖-6m"j駟~'7o͍y7ɤz=CZFFFxxСC7nxmzٽzׯ^'DZȭO>ʘPPP0sL.&))ItFڻw!CȤ!t-::… B6lؐB>5\qȧJsb*mN;O?עT[=C&~_zU.4 3f <ls̙O*?~̘1dRsψNII1/YdS9svQ˗/߳gA9'}駟ܹ СC>dRm? 6mѣG )D |G8psΟ}YouV(q_7ǿ^[뗓oaСm۶=ztNNkWLGUo߾ϟް{naӧOr//cNJ]ܡCj>>>&)555 @Sʎׯ{nzƌ7nPsssI͙3'>>׺zj.]l= KfffIIIRROc+cQg@iժMmi̘1miQXXᑝ-FQhL:uŊݹe[ 2VJMMUn/jȐ!y]~СC:u:r+)V| 67ntBS*z-|PDw;p֭[GDDvyy\n0vi&44TvƂ+'tA(,,7fW_u7ضm^{Mn^_{1Ed21k,é.5\ChhZTƎKCCn[-X,Mb.B!B|.:@?~<ӦMcذa >aÆȑ#6lkرL6BYp!)"< 0O>P^zx>Tquя~OSmvGbbbO6mK,oxB!B.*;wtjccc=z4SL@Yy~p|/m-\\z,,Y֭[9s̷X2!B!f|-cUUvm[EE BCCimmEQHNN`0AEEEP`ȯ{@B_[[@磮zfΜȑ#QQQ̟?(<vP;NcɌ5 UUٿ?G󑙙Ijj*bܹXV ؿ?SN%==g}Fee_ff&'Nl6cٹs'OFUUGÁ7o tkc21c!!!޽7vX8rͣ۷c6eРAx<CQQv-Ù?>x^8ѣG7D>S\.cƌ!99?cϚWJJ &L`۶m]y(//8aONgg'f{~w}#<~<`4裏X|9^G}l6b4|'6l -- <NHHw}7O<7oFUUFv27of…tM477-V_XXHff&ҥK{oADD?ͮ];w.˖-k.b2x衇عs'۶m dfϞMss3^cǎc1m4\.+V+ϓO>IZZ---l6~3{q=0yd>շ~;ӦMAo޼y̜9NDzep8$''шn'22nz'N|'X!B!.w韢(ǓLrr2$;;!CҢM5zhXv-[nf|ּUUCjj}z-`f׿l2ORR ,nСCs=?СC\s5̙3G4䭷n:kN~3믿βexQUKj^;'D׳`t:~^O]]{/=ւo7~u],_ȼy6__O&Zpg~8ołx뭷?ot̙y뮻O[n!44Ϯg_ᅬ㷿-Wfҥ |A.|IbbbXxy=B!Bq1\de={v@ݻDQ8nra41ЊM3@kll`p:p88sL>?~8/23g0n8/^ȑ#Yx1NW_}ym⺏>H_~`Z`ƌTTT+`۩b͚5b0tvvfxVv{9 \΂ X, /餺ɤ]@O?4k׮CQ~_Ů]xGq8]wc߾}f-ZDCC:^zV^??1i$|ǎ޽{Gjt:stߛhkkCUUhmmEc4:u*|'466R[[N'B!г;3Ыsٶ~>/>>lƏOYYnIvv6X, CPW j璒f̘A||< pq|>o寯זS|/^Lxx8aaaX,/_^d2iZZZqn[;~tt4zSNp8EQYKH|~? Nggg`p8Xl)))l6Fc)...,AmnϧaٰX,K/izkY͛7ԩS2e >|+Wjc{뾶cʕr=l2ٳgY}}+/XWz 9֕Buc]/XWz 9֕Buc]/XWz 9֕Buc]϶=Ӟ9۶UU6h*d2aZkp:ԄbѢE}?sii)ӦM#55U"]RRҫ+@z1oVѠ0L:N|'`6l!!!`֭A HOO'!!MAAIII. ݎlUWWc1ͽK`\wmm-iii79o壏>"1!{~xozinnւFߵ+P~gZ撝O?|~ӷkk}e *p8ؽ{7k֬k[Y`---iyBWI]]6^cܸq 6I&k׮J]]]Фs 6ڸJx &5ތ3P&?P]]ʕ+0aYYY3>cFëxUUٺu+K,a֬YOPD8q-B!B+2&k̥kjT 22Lk[+N VU joxAEep`N'OnOJJ"%%˩S466~c >pZZZRE…t9pjkk)++#,,**iinƎ 04}(r8q6>tM>7b"""@E{ d2>,Hii)m}BMTT8BCC9r$fWGB!BqPF}V!CJff&k׮wB!Bq%ZǠSt:SN%,,Μ9#cB!B|(}( t: /B!B\itUUihh!B!B|c]!B!]!B! H.B!B\tvB!B!-B!B!eAt!B!2 B!Bq]!B! H.B!B\<ЅB!BoJB!BuЅB!BˀB!B!e@t!B!2 B!Bq]!B! H.B!B\$@B!B!. !B! ЅB!BˀB!B!e@t!B!2 B!Bq]!B! H.B!B\$@B!B!.I*v2 d4 f(%;n{X,Kv|ۍBaZ/]|>}??7& mI|E1`l6۷]MUU!!! 0N!7tM}}=0C#RUOss3vɎs=n`ҥdgg_;Xnqqqj^c_UU{]b^y|A]2.g}&-ZČ3"}-EvF_%(@WU3gP^^NSS@zz:pPRRBee%EZZ|]~?OVz= <$2LUUaZILL t:)**F#pQχeŊv~a py !n"-'B\$EQvtUU),,B۶gcuvv+raʔ)g^Agg'~~V\IMM ,ZH{YxZ̞=E}W_kƇ~w}{, !]t %ĕD]NUUv-??6"""c|>N<$==kR*++ zHHX,ohRXXjՆ,W__Oaa!f ٴiFU2X]ڵk,Z((ϲ2{9|>:BSSmmmرH=ӟ()) 664v;Otyf~wע{EBqq1/nEQ%**jkkq8lܸM7tiԩlذ={0i$F}y !]]k)** 99J|>ફe :.礪*6mvMllc„ X,.]8oݺÇc6ѶZJ Η-[ƤIX,|>=믿nO?e֬Yڵt:sweժU >tAy !]={~<8N˜9sztL^ ~ۍL&t:6 \ى^ﳕPUUEMM D||9[4^/---Z%CLL }i/fO[[x^bbbHJJh4( zx999̘1Czq[o̟?_;oHQQL&H\\\ٴiUUU:t(hȏB /qBn@zz:iiidffr*++ 555([ܹA#`6)//'ό9~Z i&6ǒ3;{_jAҸ{M*ŬYF ;w.f *g^^/feժUu cٲeddd{uV߾cǎ_T}QN8_j`͚5ڵWۡCr7~#GqF)==kL:ċ/}p8կ~( 44UU.۶m jedeex^f]ʕ+ٱcwܡU*TWWj*yhh(3f`޼yZ0'fx&Lx/39x k֬>hDnn.]wf}Ys˥]DFF*555{;v#Fp뭷=RTTļyHIIaڵAtL8nDm^>O?V3EQcСFUUv؁$))Çk~3z< >>TN^ѣGzFM֫%>>sQN:nwMee%̞=,*::NGddd7:;;?~Q|v;ׯgǎA< zdddEff&_|;v`ĉ2]!pf⻬X &L``@W+Q%:߯mz+ߑf^z%1iA]YYoNS_UU=?111ǣhnnzq^{*Z+}}}=/2v]+gv?/χrt /}v|>&!ChPII 󟩫Ҝ8q_~2EaРAdddh3h /pi::.n^zM6p80L$''#//gyWW_ IDATz`,]T<9rOxx8IIIzv;6m&3fgRUU;ftW_:t(f͛ԁz?磪*dddݳ~-MGc2{~^waժUIIIiџ}Ytvvj1bDPU }k~ڊ̙3i] xk)M l6kd.:x0ehذaDFFB^^#$$䢎( > vtvv`.\ثvX`W_}5^u)--ӌ96^}UN'aaau]ZKUKK +Wѣڵ!C0s̠y<jkk4i7x#aaa\.|M:D{{;d֬Y|p [lav>cNTUO?vcΠAYXb޽4ƍSO=ŪUغu+VٌlFUU>;(L:oՊe޽455q>\.o6vBQMƲe˴@pk܌boZ\V/ ?? 6xb2d'O#G]ؼy3~ӟ8|Mزe seȑ|#55ށgw[8̙:"Bm̝>1wv"C),)f a,柤CR9y z=\$:")٣2(]mh&q@4'N3!x'#6* ?RG܆d {pfgr’r&2~0l}53'qx_HF6|Ɉ^1LMB\4ŧ)(> e@LFCW8zGDN2NW363A ql޶AUӲ)#D)#0zK|\ǏtTv޽{QU{S>FGGc0zZ@%tU2tjeر|g9skiȫWYhh(\= FjVshg?k]ڻtj:y$vNܹs.''>B>7/`֬Yr-*`+22^l63qDhhh ѣZ87nȑ#Z3y^e/`ȑpXpBzX,L2Ϟ_3f`ըJQQ&M 3))1cJ7hРcӧ ^)Bff&QQQ455QTTĴikllyN2 xamms*z9׸2뮻XZ[[ٲe ~z>t:3?{W[[˻XV>XjK.z3(… EޙL&l6eO!ĥuStvx,h4FEU0+ )9]?s#U?Ѡ'`CTU }ݧ/!!C*>+@^ 8Pq>Oϵ뜚w`ԨQbٿ?"--aÆ1|pmݛ}p:҂.cNVp]SCmM>x LP\vSg}͚qr=+kikP~^&3m(߸v]UzۮK\t$Ͽv;OYN o%fmaZۉ gdz 8w7l{05rs^6K%9! x1vqrJ˫y/b "lT6ޡټu/Ιʀ(^_*ihK53&2z`N柠)FqH^_hטEWM#*"}Jٙ&l|mh[ [U\jkkOu_ѣG8q%)l>/yvmA>Z˿)ʙ3gXjA t@|zZ7s*@>Æ 5.{uλ0yd>L[[%%%3;~ :o]줩]vk.mis2zs~_;?z2PX70,, ~oKggulOzBBByw8tPФrv hI?WN[[v=q]Qz ^Yfi=(N>MVVV%O<0k֬~+e~?c͚5ڐprss3gN ~hH|>JJJ())aӦMDFF|B||z?̘t|>?oGS`IZ;Nj|& 9!N b42&w;xP"Fǘl{ NUTE|lB(AUj X50ёDS¡c'q{d Mb{УeE2O/ ]/ё;(,9MltB'JinmgHj6M&=ns%{e„ 兴" 2߮:oR_AйzhiiAQ9r$$&&2h ^z~v~+A'N… 1 ]F֯_m4ʑqͷ{Ken"++Çk3}jtR͛DZc())v<8@L̙3.111JA_vBBB:t( +@W'** NssY+ /_P(VO/IIIDgoe͚5|g|>F#ӧOgDFFy?k?3UUUS\\̩Sp\SVV/^ {5!p AXïDGvnt_˝6A4p<(GG@El#NB\4ܜ? yaL{#EpDrB%+ٴu/wpVa( /_펮lm|>? ^ؑڟϖ݇t1i FA+"bϧ^ ^Ϙt>ٱ{=bH35u:nv u ]/VIb1d>ĉ prD)~$sWN8tu /guGʴ p^*^fYkgŽVJm "z6~~1L&g߾}Zpxbϟ߫UllM SٳgkzrssYh.GRZZΝ;3fB(Z0eZ_$9NE j4dggsNСC >\ ]yDFF2c f̘狀ݻwgzٶm90v8[vB݃S;Q׾*x<|t$I `3`***hiiWW]}-]>s|@/q)m1cZ ^Y,m\tGG6{`FvzYe6 7-""B JJJu:ygr꾔U`EalذA[R+0Whhh海|e5 z@]]˗/gڲQ+::NGHH7pwՂ+NWWWo{zꩠ}Ezrvo߮ 0aBК,_'x"2Cӑ̍7ިM*hy=ߴ@xuOV7t~<G/_~^m/UUk|rVXg"QE(Vaۃ8Y?n[KsRg ( 'K{{;}2= \C6f3cǎeٲeڶ݇7|B!7b6jůw8 -!{pe3w<;pT|F0}Yb1j`35; cw89p6% gj%f%"Z(]-6F:Bx˶d <ԊdbaEGPX|HMN bPk6 ^Gx0[̙jaႠ>m`Nd4H!ͭ‚9] zG #E!++K ۧ ~PXXs>Ѿrz=SNm۶9q~)$%%}-/Q+--M mFUU&0>6DV`lW zp\v͛7u7|A-^5k֜W#F@MM ֭yԩ(dÆ A}(>lb*++>kiia͚5Z>@ ,, UUٸq6vz|wb͚5Y&`Ƣ*ׯinnfÆ M"؝זlAn[0 8γxvCBBhlld߾}WV=Z[[!],̜53'{>![H!3hnuWM㺫ir].s[Djc i~gc3~%8.l!ƟO3"(0n~9i 3'MV(L̈́ip`2 -q5}b'j ff/u^=K~s~khwxih2x۴b"앟}vtV@yy9Ցȑ#Yn~oOii)yyyg ;Nv܉`zb̙ܹV^MUUcǎ`0pqv؁#,,koż$&&rWNn_gyӧ̩SJFnn.0tP>n7/"^{-ᔗw^QE h1bEt:yW0`_=̛7+WRSS2g @ss39gΜAQ͛4VMV̰lZѢEϧm۶1vX FFFF"??۷”)SZ~e IDATjڛf^zEuu 5lɤQVVOcc#fٺu+(´iӴrW^!>>E1h &O9q< f"**nJUU:kFk!`0h"^~eyg9s&Z@1bD?9v$&&***zM8m4݋3'11:6nHmm-ŋٱ\uU]z)fϞbZ~sI%ziA)Jm]+(sWwXQQXX " ("(H = i3G̕Ty^ܹss4kkm0 3,9@ۑf6̞=rG8p gϞ[N-`ٻwݐX/j`ui֬L&8Ӭfn; fIo?,3іSHg@h+N\o߾]oKII9ik`s:t@ ͛YfӇ+Vp8mL&]w/nbЮ];֮]c(@Z~!YYY[u['44xM}rXf :u-)++cѢE GIII᧟~3g[+ <<|f̘?Uv}v b_~8Nk>̼y_~ <PzfϞf…j2rHf̘]رcGnw]/ͻ(tE?`0?~j.]ҥKQUA^߱cfqmQQQ>G4h}kVロ9sPZZ_|`N:?񔯗nEo>~@E\{|72k,}EnAo=? "77M6QXX^o(Sk( }pB9u]UW]`yeeޒt€[4}yɉ}ߓǬYPUUj'~?ǝO!ĥG "nS_!΢ }i޽TuR-[uN~?۷og֭Gtt4k֬!йsz-WNkגdcǎtЁl222z_G-Dy^V^q8(Bpp0ڵ }ip@ @jj*$$$K4kגY;Fr2zLݽ{ӷzj233qѮ]cNٸq#Vn6mJ^hٲ%>ŋs!"""}2ިYYY|deeiF$zy qZj^|>6mڄ鬷ΑCm?2Mezj}@hy3 lݺr7nRXeQQK,ѧ2 ѣG.Q*O<1SXj999~4hP{Huիv;ݻww|0--Mo>Rnn.˖-c\-ZgϞmy.//B7 ЅȜRi@EQ~7.՝ h;.9s,((vsm1pnW7w]@}6>~M0 |+߿Aq뭷w@@O}l=߯Gr$Mعs'o6ǟ4_!ĥ V!@d, .tSwNn:=;nTQtӑ p\8[h42l0Ea e;d:~v\,1 fL& ?i\!D=X!HOOgҥ|[W^y6R֭ٳ'UUU l^NN;w$**\!A.'B!~??:<<˿C(\ٳ˗sUWtnrqvME*Cm0VBqь.!%<g94`r\}eAl_'j8eMz{!QfllW!B"]qBeee<,\0ٽ{7Yv9=vee%7n _ xj^cߛ';r~#M{KٖSyJ)B!~3nAB~iƂ (++cĉ$''( 7|3o&}:t&M`2jҨQ#JKKd2ѬY3|>|NVV5"44HqI&rJ,Xgzbرy$''3uTBCCzwTUe̘1v1vƍ(%--;3ӧ /PXXԩSjVO=ML nbHh W˜[RWb-mxxUxL-nnCGiEpܭmIzB!I E!4Xt:|X,ٳaݺutÇQU3f`40`!!!0gnJ&Mptޝ,9s&555:t[oˡC̤wڵMb6IJJ:O5ubݛ^ۍTjՇXlIkr&rJUϗdTQ0*FRoAE&` lƢJj<ͯ!V,Η['Bydk,^}aA@B!# ЅU׷{ǎݻވwrDPPgf͚5? ߏi$$$ТE ڴi3?rFnؼy3ٴnݚX=@ZMn۶-!!!DFF+PUon::utB2=:;@Ip]QƋӯM=[4"pw&tIeۡJR+Yni@r+_l-wFB!w!q Ϛ5kXr%. Y`;w&..P:u1oߞBCCӧ=z`ڵAs~HKKcŊ۷:hn7{+d˖-ر@ΝYb iUUY~=]v=oukU|lˮM=/ȊŨRZU`CFAk<ț ā٨h\6J7k<>wyen?kѠ_{0<)&Vo=!B! iAB(\wu{`.㡇}j*nVL&pwos!jjj4_V~u먪woĉx<8nvECzwHHMMy籶S@)G5,V"}ZE.>Y;p(%G*p+>EQ(((L&7oNDDDml\픕t:iܸqQWP;Zq'@b 7FU!*r\ ٥Nz?O^ׂ*7.O j2h(rU$6BƆZ(]"U'`_M\0j؛_C\6w`VRUY$j{p"Mpc6D䖻B!tMDJqTUEUsgLK-Y<~s !B\8ŽJ Ѕ8Cb U'"LLrB!555ci˟wiUU EA(;O iq!B!.glSUUf`0Hp.PUUlTUU]!B!.3 }>é+8τB!(ED2QB!tI.B!B\$@B!B!. !B! ЅB!B̓~*jw ((*t`cN( ƖʹNB=Kq(PaWэԪa" !B!%dc6>md]q7 ݽqYI4l5nǞ *SgyF6~G' . vn A63_NyJ'эPUj J &!&To-$ D(FGY`+9T;?eN"Bm5qQ!zj8tHH,*vx8WIt(эUSj5 `0(WWTELLgႸHFƩSU ] !Bq9/-me,5xʎ$ƆwCdTBImr m_>o}v+}WaЕ--$M+g;0 7竕i_ Ϗ*NՋ"ޞ%` чYQVD&#wѵM<)qA>:ۋ(F{ ҬՔWnk6o[T~|N%>:C%,aUUiЈ'Mh\\*;sMMMͅ.B!Ĝ]Un=[ hE7Uk{–۲4[sҲؼ'{[34 ^_m|@p{h{!p.\z `hT9,]i 1veo>߮Grp?NQúbA2ْW#p9>;\rslYֽ,YѠRV+[8*_meLq_o*a|toN 1NBFp?a|I珃;+&~f.olB!y 7K86ъ~UdGwφ٨B f[6=vx|~.oX;`}BL(înKai SxTٟS㾑]JbLeM横BZf1^_-cq{tiGã!j7]1_[vmD7 "<؊(8ݬښFPͧuR?DwZfi׿B#B IDAT!By _ݎx|~V7ism{v]/#Ok?R|ɀ(XLF>;jP(p[7I/6OʼA6l3*)ȜobK&#iIf^y5~!W<B!BtAW?Cm&1]_4FZ%FrEztH v=AnQ%+f6O Q v B<8WuJFUU4Шvz-a4>Xġ,Zc/}e8[lՋ~ݚpUQ3jXWꔌAUi`}!B!m-@0TݚW>a[ZdB,f#uՋwW/ny:rh߈ǽv2ڻAUvzxmZurǰ7M bB⢂l4RG+ډatjݘ 'L_ۥ)QA=~^h%YC Q^a2Վ>mX͵#;TSo-eŸu( ЅB!Fю|{rrr5}ilړGMN^E{򰘍\1AO1v%-@J&.fKZСE,FHr +ز7UX#` ]ش;Qe +6$3H.$DžSXZ͎7yBiӦC ggeG*..{[nt֍x24M :us= S^^ΰa3fl˖-tҥߌ3Xt)=z %%ݻ3wܓ1o}h.znݺM7Dnn.w^ _^x>==kݻӭ[7&LooptBZZׯ_~«zN;0`999TTT0m4=8=͛ǖ-[̙3ھOKvFH_Q&MD׮]ygo׷ywyNՁcu=q3^~e~mv0rrr~]!MX|9{O? eB߳3~qn]躿F4 z|SSS`nkx>t:|l6L&u-jGb5J4L¼yx<̜9M2n8FƍyYjǏ_~GGEESLaʕ^^ٽ{7C nNaa!Fv3rH͛ѣڵ+mڴW@ @YY^ OUtҢE >zA`HH^gǪ"ˏ#?#Jy ǃj˨i6 UU; 6.]0bL&^6ǃjbX@EQp:L&L& ꣏>^W^oߞ3gc1o<׿~zFɚ5k8q"zo߾nѣGzj}QVXs=ǕW^bѯ(*n~t~-[O 30p@Qχl6-u'ZN'`ZQU@ 7LΝyG󭫛ё>< p>'G*PU͛7+P]]M^^1m4^uH\\s%44?͛7W_` V6pwѤI}d`0in[Ϊ;#F`6 k?7>GFѨLze+ VYY# }?0$**A!ĉy\}ne姺|nK}[Waa!cǎeݺuL&n6aXXh*eee׿Çp84i?DDDԑrss?Izz:ӵkW233Ό3 #//1co_m6222ѣ{/O=w`00`ƍdz>Kii)ߙ?>}\s QQQ|駼TWWӺuk&NH9s&{e޽:t-Z… Yf /b믿bɒ%QXXȚ5k{4i999۷zuxؾ};^ÀiӰZzjzz:C2eL&/R@r1ydf͚ 66ѣGCVnZk|ԩ ׿( *111\wu'ݧdرYNbb" xLBV(--3ӧOg׮]| \.o&Yb@v/_J^^۷ӴiS֯_O?3An:fΜ'55P^y:wΝ;yx׉f͚~{Î;ٳ'o| 7x#?}>//իWsw0qDHOO}̝; &??1yd"""صk6m_5r 6mk'|ɓ),,$""og}Ɍ;rz!+ 3f 1vX7|ŋ)//'&&zgy7( ݺu㥗^[l!//m۲sNHLLdʕL4Çǘ1ckǚ5kaoq:tܙ^x&Mz]Νul߾ۭoΝٵkP\̼y cϞ=\rssӧo6zxƍ<kvIHH`tЁ-%,<Ϛ5 &L|'2zhaX|9&LE<ݻp{1nF3f L&y|A#Gd#3## }QNJtt4O?4?#viބB׶ejG>rRbVk}s~??ٺu+1/ofOpW3oi\wuXj;wke ݨ(.\ٳЗkFzz:檫s _|]zޚxXz52+xX|9_w%--ٳg3h III!== ;IIIa׏#GrAJKKy3m44MYG~ngvFyY9۷`0Y aڵk׎!CvzZFFǏo߾=xv+ilܸWj^{5x5b, ?8@{ƌ3k1bya۹;Yr%xYl_} YftMt֍ݻwIqq1wq]vepwPTTDNN:W]uo̙3|}YƎ'|BNNoq[Ĝ9sظq#AAAرJu…sdۍGjɊ+ܹ3_~9=p/Lvv6iii<̙3K/GdddϡCl۶MeeeuNZj}݇ꫯfĈdffܹsy'ygؿ?cþ[n$%%i٧UB!NS5Mq7˗3gtQXh#Gw!<nvn)--%}v=H+//׃mͨQ/2e :u⮻ҷWEO>Rत$RSS/3f onjV\w p:xdeeO?e]FxxIH%%%lܸQw h=nO?qq7i6mۆ0sLv;yyySZZh&MErrr˗c04i'|/͛7cHIIaرvnV>s:w?R~X`Æ iӦOzZ ]ˣyԥWWW裏2|3uT XpiӆJƌK/Dbb"}=8|,^￟͛ӱcӸRh۶-v.]vg{9^/Ce[MhժӦMbK TVVb0p8ERRRkw3?}( III|>,X?ɟgvͶm̑Ad4Mcر;b 0 |G [Wٌ(z>Bqbg%@? 3O m͛K3/?;S[*ؙS8B  5k/kܸ18^{5vA˖-.0h۶Mfh$11娮I&z?oW_}$bcc|4_5jѨ?z֮]!%%ӧ3qD~aƏO޽:t([lțo%446l޽{L_|W_}~K2b= LIIOgڴit҅ŋӸqS*@`xz?q\TWW(4kzMMMbcbk۶m/!$$DJJJNx\Mv>&p^/6 FY>gn%K`ZILL$++ ݎ`?>z-u#0~xz7Ȏ;gpWp]wzjjz^{''… ѣ dJJJpL4IڴiCdd$a64UV1n87nL\\I6OfVޟqqqL6??Hqyv|<䓼;DDD3|p>s>̖-[5jUUU,_G} '22EXXVBUUBBBh۶-7p{=zmԨӧO F4ÿ/N'͛7AW9z`m+**pL:U@HTT7t8p]wz pBؙΑ &nMñk~266Ծ4 vXj,΄[8Rۂݬb@M^,FI=iO.`0ebccٴi7ofɒ%,^2 ժRSS;44r4 `ыiQUU|O?˙={vT_fXغuϞ=\?$Qx-JQBBB h4p8hڴ)OHH,[nA1j(),,pFUUǫ+?5Mc՘f>sVXA>}ضmޕL >N5jbX~=V;8֭[SpzA]@%h~0`Wfo5LڵK_yy9׵X:(..fԩfbccڵ+lܸj֬Y@֭bٲeڵv;յ~3|pE㏳j*|A>C=4;vPPPĉپ};wyyy,Zz\GzDD!!!̚5oEq5cȑXyK?>EFFšCهN8MΝ;)++cѢEݻÇGG߷1sLMFXp!}F2dfJDze˨w᭷l6Dnn.7;c=P aÆ | #bagN8_b;bښO4nLYw=UԒI_Me$rY\Ocb.[>{%a6l_?}M=3^?3zhy/ }Yjjjogʔ)DDDܹs:xogaҥza\\!!!?+^KݑAh$((kҤI>c:ĦMӧ!!!,Z^wÙt[n3fή] ?{wUu>~s$;KBBH aD@ADpAJJՖWkնR,j]*ZQ6UA@Aʾ{H! dO& K<+,3{Ngwǭ IDAT}O?ݟkr .py***O~BBB͚57d޽ݻ:ibaܹ|7$&&/qqq7zY\SNxիw&--#G^w;ɓ3gAAAC˖-={6cǎeΜ9.cشi7nd͚5xzzrEعs'cƌq:t(*< &L?'44={k.W˃Ç/3:-9s_ФI=ʟg7o΀Xx1CT)BCC8}4Ceʔ)g̘1,\gr\e>z(ѧO>sssYl ,`޼y<̝;U~///>S,XBuPah4l2~/Kz)V\Ɇ :ujmt]ߟqFk}PJݻ]1ѦMFO?o_ǓkƖ-[سgL0Mzlذ3gGvvv!;v[oG}䚸pʔ)W">>gy_,_}?STTj%""Ƹq>}:Fr=ҥK)((`߾}nݺΊB!n⟷`޳^;@rclprKŽ8t-' #9_ZYL#"N׈ ֧Pt jӹU" ⟻ʀ&uM:e5$%]Zg-gDt3_]qYǶ< ƱseZBq6'|K`4]?:ubԩb rssy1b͛7ۛsfΜ'z MXr%&~ӹsgW: ++ 6ТE z!:wL\\111ݻ {90`ZZcǎӹsgm6lÆ ###̉'߿?VzI||<}{k׮̚5 ***h‘iKll,fju{OOO 駟W 8JYYѼ Ʉ``Сiuٳ'ݺuC4 hŋ'ҴiS0$$$oӳg:eTT111R[[KDD鮥x/&&rC6m7nqqqDEEQRR APʱ٥;gZYr}YڶmKbb"qqq=z4LBDDÆ s-]էO4M#!!5vtt4SN%33.]KZZڵ#00֭[ӽ{w Mزe e˖e郮TUUL޽!;;י=**H d?h4r=v1k,znӭ[7W/[o:?yyy4oޜ1c8{ӭ[7tBn֭3<$$>}PVVFMM 塇ҁr.6l0u4xpu֡:.TWWHu:Į]ѣǏ'##$ڵkGjj*۷ϏvѩS'Ν;ټy3aaa4oޜj]iӦ Æ #""˗uVڴioV+DGGӵkWW&%%1p:KDBB۷yyꩧeСTWWSQQn>ZTWW3t:sfQXXȆ u^x .вeKBCBiՖ(*+8h ۷G)ݻ `ɮ`=zмyst]gذa;v5k`03gqqqtޝ/_Ή'xg3f Lbbkx8DMM =z୷"""↎_!čunһ[.qYZyg53<LJ;J-tiHOZ}WŇlK+̴Qr8RV<эiOwb_E|>Lj|/eQakZ!Z,V;r_ʮB-*v|nȚ~+\9?2\e]޵/]}-IWy4}iYofɱ\+׵wvtRkmeαշ\?z<Թ֒[wL{߷yy_-?/F|P ]5^~+ۥ7ڍzW sVBqgܙYܯ>Eh€&TABi K9vrZkd|V;m2K;5N0T5dutW 45 MS*i.1v}#_Юf;ZEvv]ēRa`++jjo6x{QRefE6%#̂J&s/}:k^ ]sd^9c:ċqMigwYULAFw#%MɣU7v#?wG!B8ZmΫlet~76~>I}#y0%9ygIRl9Ϩ.-t<ƒ=ټ8#_<ӇZweq*d`,Kラ=0[|#R*,~}_,'rY/["Ѡ(B!Dtc3/ZBx{_fbI[p~2oeu 5R]k%߯y tʪ- 4b#Zo䅮k`}n[, cЅB!wm׿ ),L۩=Yո^/6SVmbla%7|Iq-7&wU#B!h,~Inƒ=g9.VޡoZCo}n׵!B!ĥn]z jVj']xn8۷oLv]_}B!'B!B!I.B!B4vGǠ )@J!BF-CiQqG(ۥB!B?ڷ-[]dMfVvqB!wns>teXl2][kЦ/_l3t.B!DsB!B!n~ B!B ЅB!BAt!B!]!B!h$@B!B! ЅB!B@t!B!]!B!hvyJ)Ν;Gnn.J)Mfts^>!Bqm vϪU(((X~D߲n:|W˒[o*wcƌ!33m۶FXX3f +MHNNf۶m1qDRSSINN&**A԰n:B4ٳ'7Çٺu+([n%?wׯ';;Պh$&&!C ѣGٺu+FG}:yW_u֮7nɓ'1͸Ar}vZ BBBӧݺuSβ^LR0p@F#cǎ$$$b rrr1b]Z~_3gظq#XV<==ر#tTaa!6lԩSf<<2e]^"B< ЅhDAyqq1[n%)) //:;/J:YEE|躎6|;F8q"NFFYYYhFjj*J)|}}bݜ>}ڇng֭;vDF#999߿3w\233]edeeŹsd2a4𠤤sα~~ӟMPP+LHH ))N>G%99BCCQJqi͛GYYxzzRVVFnn.gԩt96mbҥv|||pssaܸq1YYY|ᇔ`00sI4iB~~>>>>̙3MJJ ?яrSjj*&3gθ7Lpf̘Add$;wU/gϞŋt- !⿟B4"mڴ!((K6mDll,۷y4k l/$##???y:tdbΝYm۶I޽lBf2dAAA4oޜm۶a28qs/**ٳN=tXLyljbf6mׯܹz?zh郮뤦tRoyG]=nƅ 1Lt֍ѣG¹sXt),^3gҢE ZnӧINNgϞ}:[t邷7&?2ڶm˄  UVqNo555l޼ `Z7o̖-[=z4Fʂ ())e˖7pΝ;]Af}?N`` Ç'((Μ9Ò%Kl$%%1rH/ 77  /Fuu5| k׎ӼysYjG.f3%%%?DY`EEE;v֭[3n85kÇY|9.\tk׮xyy1fzf|>|-[2z:}iڷo<@ӦMl߾H~bccY`^^^g?#00PZυBFJƠ ш庰۷3g?Gq:`sssINN`0Oн{w fȑ [|hh(=s4i҄p:cuOWWWӺukLBxx8DGGSONAAѽ{w222(//wUZZJFF{F)Ş={(--%((iӦ'Z'YfTUUz(܌uǚĽދifT'cǎϓO>y!~~~< 2nݺΖ-[u{ЬY3h߾=SN͍ϻݻwSVVFHHӦMM6xzzɓYwC%(((psscڴiLiժJ)Wtg0 0vX/ 0Çw~6mʴiӈߟ.]`磔"004M`0дiS4irSR!;$@48?ҲeK0rAyϟ 0̈́lZV+qqqhF^^kS ujF>}\:v<@bb"^^^s+r`„ X\fc…;$''WwEfϞ_z[e`0c;h_n޼y̞=Յ-;wZ8K]]= N:uERe˖1{l֯__AA'ݫW:k_pN~ IDAT>}ڵ\Syy97oOOOz՛l>3iii===kFf[PYY={6sf]3iAB!H.D#a0޽;q1[̢^SS֭[]dvח2֭[ -˗/';;??+^=z㐝3z_:iW}ڷoOhh(V8ݻws9<<<"445I7|:^|dggvnעik-;wrYW}ZV6o̡Cs=pn"33פu}ر:Uv;b۶m?-Zt|PPP`gɒ%Wڵ+ƍ]c+**)))}Fv -[8, ֭ѣ^.=JJJ.l6Ẹ-[$,, ˗/d225Ԃ,jτBM H8>p1qZjUUU?JE߾}GQFd6mĹs爌CQYYIӦM]sݨ={j*v;ӳgno07nرc更iӆRRSSݛH4Mdܹ9s/FJJ yyyxzziA}uMOHH|3GDDFNNhɾ|tNHHH 99瓐@%%%J^СMC:t'N{E~~>iiil6<<<0|W :֭[ӷo_v+8rF#7oM[hĉٷo;wd߾}tܙ&NHxx8W&33p2dر\LAAA<3,^4vzh4r=ԙ0oԩX۷,V]\o&4]g}FNN롊&M!qr7R v9k2: ЅBGS2kRMM cpss#,,kvSѣL&\-w#Z9s2F#W]R9sz-hӦ[9Qڙ3g4 ???ڵkwܬbN8R ooo,u9ukVZi6l4M#<<777kM(//G)=o<<ȸqjRdggЮ]:K]4R]r~wb㔗:nMJWZZJmm-4B!$@Bq]ǏgʕNwrf͚Eii)=f.BXHw!Lff&J)VX#0|rJJJ\B!ĭt!ץoaݺunnn>}k|!Bq$@BqCV+ٳl6DDDZ\!B ЅB6 p̠~3 !BI.B!B4wfm!B!B ЅB!B@t!B!]!B!h$@B!B! ЅB!B@t!B!]!B!h$@B!B! ЅB!B@t!B!]!B!h$@B!B! ЅB!B@t!B!]!B!h$@B!B! ЅB!B@t!B!]!B!h$@B(vB!|ϼAnwBjyf233 eȐ!\5k0x`Zhq9t.\`ḻ߉beee:tqqqVgϞew}p9뇮si!?l6rJfܸq4ino*B7nFb̙  s:t=z$s4㩧ŋ3zh,qB! #;wdӦMjՊ ֭[``tؑdrrr'f+Vp y衇hݺ5ŋL&233ҥ s~ZObĈܹ_+,[swaXׯ]taӦMݻӹsgΟ?Ϛ5kc /R "]BL6os=/y'Xn۷A͛wj**++С `0~v܉C !**bVXAii):ubСhƾ}ضmߟ]RSSҥK9<=]Mu)!DfԄ TppUJ)e6Վ;ҥKi*44T%$$u)wwwo(}}͛M&M0uq~R?oZhJJJ?t]W:tPnnns… jԩh48Tjj])C8tԊ+RJv5zh?ڶmUxx Vk֬Q PQQQȑ#*&&F:(www7ߨ PNRG+B4.OV*::Z͚5KٳGeddRdvڥU=?JKKU~`PC-\P^Z&M(ooorrr԰aÔQj…*$$D%&&vکf͚45c FFz'jT4G !ˋ۷/cb'|m۶ @tt4/|G,[2{=QXXҥKAst7o-ȑ#,[GvZBBBXv-}555܅iR/fϞŋ`Ŋ||4mڔ1c{_L!wOdd$k$%%ѷo_vuEHc>RRR4iEEEh:unݺrJ6nȌ3Xp!Zb޼y=zR9s&O>$}Qx饗8p vnTUIw!TWWSRRBfͨeɒ%L8:vɄb_Ã={@yy9M6exyyl6;:u͍A&MAZ:u wwwl66sM=hڴilܸ `hphBƪ֯_Oii){^cʔ)#0WJaǸuݻ|ngڵϰa4h[ln|rnJ͉d;O>H|Af̘GCI&c5]FN4Jyy9?>R;vR9qP]]h$$$ (**"//]JB!jȑ#F||PRHu}9+DB+-!) V=!iAB9k?xy}w_~W|s<{ ;{FO ǎz`a8{s|Wv=zPPx{X,h} ^G>\d޽?Ma{vfעuG~;f́5П>ǎV)>t ڤ'ٺi&MC?}d?\ر }cC@ԕif3/wd]{I=C@0NgRh}ÎcKw.B{>C P9oן }(ǹF;Z~c:}tĭWf55~tɽ6 PXgΠ߭ϖ& !;,VYys-= ю p|;ZpٌVU(OO(+s@~0ǗTס 0.^tQ`e/PXH[Z]Ai)63ڵCUVr*Fu|i'N~G!GKbwGKOH?hI ܫX*QFwQvskz 0QmۢCE){`#O/#P;:oi97bs11srđUSV]hM2GWv'm/mu(St*+wSgm$4mx|} ;[<[.^V"ڢ/5t0;3]w)8 [AG>**vX,ku5߀jԣwQ};jp5 }`yPq\W)jaJ9~|-h;BXTx k߿og8y 5q~ ߡmꑀx 쎚0Q'!(q.nVU& t\8<9j U+0z`CE])TE^юzRYSTҊ{#1 xQ)heed8~϶i!!KՎ{Dj\gscb\o !fD[hVTCdkG"h /VE***PLBoD>c:)`d,7~w OB?¶|ƢA=Sǟr>PYoQ}#`fGñhf Gǡޚ4uO/'[7ZoЎԢÉ|ztԠOyt>f~2۰o, YS_/# 1D5 J 0p2pxa_5OATT:a I}Zqg?AО|ƕe;I?>h߭Cnxy I=~2xz:Gcp4m()ArIŠU3~ntp_Eі稫Jľzaſ?~j͎Q} PCn/pA  ^:DyŎVQ}~F4br>t(]|=(m?_`E0[~ dF/Å~ ZQ [JGvw|n*!!@% 5I I# WQ*."- "R"^%In6s?,rQr~>KvgfϜ33i6y>F K/`?->:Xz7*+pvD[X0jĨY_@[m7 .W6Q޲ esT8 5~,'8ںڣ-[ ?nLDz<}ʖ5az Hr%T?&x/j3\p݇ipR23 ղ 곏ѦŢ}Q֕B-/%]܅A ɨ>KGFA;s~S}{TA;| jTG}1LOV5ṛ`8}~,j0OqZ IDATQ6l6ZQ}c1n?5bz ͆6}&*9wbҌ5xXU@N0ES:c'QP1d5 Ѯ37~W19h\2|m܄zEcoڢ +ϣ|Ja4iԾzhоYzx0 7zC(u#/5E~? ;wPϏM;`vU'/51TtkH@ {wQC ȯShwiRڣ}\B_25$+:47yc 1uv*5V+?4eL TD u0Z gg鯡mb_ 7G1 Tn;Ԯv0녩}ooV*+ ׆)v.oa6}v1GJ=Rd*<sUh߮dܯƌD^u˰cٳm7L10 N8==52z -J{`6/2c7ڡK?͊Cچb2*SQ Eru^@Oڪ`7ǡ/-TvK@HeLK mt!~{==Q:-1p$̚ot k齙h;Y׻PJ@?*ݢ 5k͞i:X28 -F7[ohkCΨ0 nTB1Wo(r(hvF[%ژAR[atwlyh.Z -C{cLl @f\*LY#}|Z0N4(W}̣yPO=z6kQn} & U71EVuVڡ1mNvS,UD9RFWMvml ѮBZP-lv8zc + 775:F꤫^tDկ"@`L遖|;{{CTk|M2ys:h=gtVʣJyǩB@FgWC͘c ! YE'? }P@ե=.^T7kоrh/ȯLoocXhIɐ-ОzQ5wի-XbIM3Y)h.W T 9hA=mHH0߬ 4ugL//_/ES'4n" b"Q : m"7߅oC~#zGb7 L*:rqnhϡ9u\M)ՋC5C )^8~ӈjn|nYY7GɨuVb͂j m7h_iTNFkza\ڋzt' J[P1ѱy^2p{h$iFO<#!+Ǩ@ktL6__:CgRP}j᯿3(qi# & QF/;㌊L]o6 Ѕ.ɵbԯkhϽf> ԦƘI@zj[|hoMoi8{wkh)ɲFSyot%//T|1 ёBhSfJޛ ?ymzEƘe?_P99FSSr}^ 3nD|U n?ǍFofô$oנ}W~׍ L~}u lۉibAEoyRоZ FPiNmQ*pKͷs;᧍ Μ7U.ł>=#hKş1`_>-FsO"aBVc|hbA+_hd}tefCF&*tj̇˳ $mpѝ|90ihF ܺ9tꀚhNIA=4kh&.ժ]qFѹг+4e+~!0h뷠Dqt$A 9g+=S.]A%&}@@$?ڢ>[Z0+:{FBW+wnoczC @uvM΅B/ej8w60w*h?" pw<~=PO?WJ7[򕘖yo#c_hw ЅE^d2<ϜG8m^eL/ лv4c.'Q>Y;l\<]pW2NշR{w77Th&ao9Nf2hk~D G7lCnzyBz&Gsжt}\ ^jXճ+ګS1ukLzrbi @`qC5 C[uo#U,xAh7e3} ?lC=OQ-P؋c8y tZ-لi;ڊѲQF+=1[ާH6 G[%8zZ4A(chM˿3hN'Xr7ru\h^VmFeQjBNju|yQ敏q .I"׃_$&>S `@Th/Wu.\Pc~QRmbA w1uiFۘ4͘/b\L=ʢ=aqQ/j\X_㨱P];YddMx խ-FB_y}xch>(w7X-GE,P>A۰ +`PEgh?l'P=;?3¥hKƸ{}ڌh#,SNMc`UEu<ںhKԋ1 rЍA&Z5s pob8*rF~? 텊l +rX}cCFP}lU3W Z41kQFOi_T{]&j{j$FFA.fl]/X`vJxÓ~=P1?+As*Q8ꭉP5չe`@oFU$x Fihnt4 e~:hܡ!UnW*Q}cۡPznV͠S;cX/zQ7JL`ԓ@2ʒ BQko*7 50˻B* :j9Pqoi(FYqAx}Կ_Aր1$sʡj/Y!N<={HBC^ʕCVa4|T 1&,W֘O{P(ժ׶Ky׹P?e? RV-4hp۹SR$%%?vky')B!& Ѕ N:E>ʕ+ԪU \{UVG.]ӧ%/nڵرxfRX!B> ЅС۷/Ν;ϰX,9X@^Rm<駟FuJ.}&B! Ѕ4X]oM0n=o$%%q%"$$@vIKKfKbb"~~~xyyFbb"٘L&ʕ+G`` >*իv D)Ul TRElNٲeZdffၯ/III$%%QreW7~]III!11͆;`[IJJ VՕGIIIRXz ҜŋIKKd2L*U\.X.55Arp:$&&bB ;ֺȥKp8*U *9B!%?Z|Wٳ<DFFҽ{wr .999l۶]v1x`Zlngڵlݺ v4M#88={v[lٲkגn5u/9t=zSNE;v~)k'`߾},^ƍSR%֬Yfwtڕ,.]ʡC\nnnԬYPR%y뭷\={ؿ?ݻwM6̚5FAz\n:6l@VVk&0_d3HMMeȐ!lذt]Vh\/_ݻxxxCϞ=T!B?c߾}ѴiSx"ǎcƍ$%%cIZ';;eRbE|}}QJ}vV^i4hЀV+… |g7&۽{7K,Auʖ-KڵIIIӜ9svNgt]np8Utb9r{E4J*;J).]O?;ʾ}8~8~)/"5krY222rʔ-[dggUVikצFdffr<ȕ+Wxgw[nn.sA)E ĉqFu+VuViѢKvuMΝ%]!&▔R>|}aXxiذ+P;pfȑ#:tM3{lKfݻ7`t>pJ):vXk׮kdeeq7ߠ: 6m},]Ղ!((|ի>;v&#FШQ#׾hт+W??8 ,`4jԈV’ٰa`S];w̔)SHNN/2=FAƍ4ty9vaaar!4McᮙR[˗G۶m#y(B!nN{J)v @XXX <<Wd_5jвeK"##loooh=JFFKfEƔGFFx l63bBCC1LL&t]qPvmJ)ʗ/URJo>V+eʔK.EƎWX5߁p~~~vJ)WkRY>GDDЯ_?\!O -B[u=I&7dN:>|ϣe gXٳ'Ι3gz*III\|JRxRTZl6ӰaCrmM|WZjQY“ծ]U~i&OLlllv IDATOOObbb\]oV!B!, Ѕ*** L||<˗/'33Fjj* .$''???^:cϝ;GJJ yyypeߏf#''{2gWkZ+aaaTP΂ pV ͛ǥKSЂ~'x"s-VYZ СC$$$``Æ ,[̵\8qOOO聐 I&ͼyHNNfͪUd23J*EJJ k֬!''5͛ݝrB!o']܅ikצSN] 6uVJ.Mzz:OOOx"UR;wr11d4iBժU9wgߟ>%KH~~s:=<<2d}/_7쇄p"լY3nJjj*s-urߜG۷4~mʖ-KFFAJJ CiӦigΜ_GWOYp!GeĉKnn.f5*WڵcŊlܸ˓Jff&&;w !B2ʄ >|8K, ))#F0l08ŋݻ7iJ)f̘O<_h4h~&11QFqGD^رcUV1p@rrr@Jӧ3bN @~~>Gf\:Wҥ 'N_3hР\J:W_}?ԩSILL 99ӧ/dN]NiqٳDy駙4i;ŋ$ L0kע:̝;3g2sLfϞfI-̙3>|-[ܹ3ϟgŊ<#%26ƴiӈu 4Gde8NVXIΝbPJqaJFBfSmڴQSu}뺲Zj*]וWKVW[lQJ)p8TvvVNӵ áNZn+]ʾ*<<\i Z~4MyyyoV-]TO+өW/,!!AUPA-]T)eWNN+ 婜Lue\u]VjRYYY| t]W ,P2믿VJ)t:U~~+O Uv]9p8䥮j„ q*33ӕ_өrssf+P*//O麮T͚5{?qzRz|ui뫼믿*Åp./pk׮eZUNNr8nen+JMMUAAAjy裏jܸUm۶U6M 0@y{{-Z(777hѢ".VH^הc\> ֵZE_7~+ԕ+W裏T^^;vzUrrZ;v-wիUxxP6mRS>>>lٲ\rJ**55յ^<+R.t]WvK"CAljs ߇ VG7nՖ-[T=ٳgo<==UFFRJI뺮f͚fիzǕjҤ(rM,(y[p+\2M999Ů eAy3]է~Thh4h*S TGUJyTA9)Oݮjժ{g҂.Z;wٳgٽ{7`tٚ>}:0~xN<Lvv6ƍfj3f ͚5Yf+dggsqzœO>Itt4ݺuc߾}ws3^^^>|tlق7hR MӘ>}:C aDEEske3g^^^DGG̈#hٲ%m۶?tuVzADD 8N-ZD퉎fȑѫW/Y|Ȟ;ttRd8NC=Xf J)|MFAFFӧO'**^z1rHƍWavv6 /_N۶miժɓ0i$"##i۶->ND}Q"##ر#K,B t҅3f{sQJŋ9w{E) %%G}֭[Ӿ}{~m4hG&%%z{̵coy0LEDDC ʕ+XVƏO֭i۶-8N>[n]???ٳʺ[uG8Nؾ};o?#??#O=ׯnݺ,YHʕ+]s%y7&::ziӆiӦi&{N:ɓ4 ]4>߿>}ЪU+z͑#G/R~=zkx'C̛7]]H IOOgܸqk9sPJguF-ܹ3۷o'??ɓ'Ӻuk"""x'X/nSB~G>s ʗ/v#G{EL2ƪUӧdee#'py|AFE۶mԩ7n$66sαz,%˗ƬYp8X֭cܸq”)Sعs'5kd2ѲeKl6=.]bڴi|t҅|M̙Cff&}{{l߾3f͢IKK#!!={йsgf3gϞe͚5v?w}Gpp0Ʋw^vt]gڵ4lؐr1|.]3|?R|y^y>s:t@zz:Fbݺu=:wٳ5kRzjԩk֬O>iӦԪU vYzp &ЬY3իodn PpMfp8HHHʕ+E-x\v;7of4jԈ$y9°aÈgС1fRSSYf f"00GOТE ΝG}رcٸq# rUsNVXAdd$L2AAAxzzҰz"c<?~CrAFU"d4MAl6WcBBcƌbŊR{222:u*18q:ȑ#9uݺu~`ĉ̜9w}nݺSO=Ç0az^|Ex7a̝;UVlm6sCʕܹ3;vo߾L8Eѷo_5jK/W_}Ebb"֭jp8غu+Ǐf~zvM^8x [.^^^zZt!ƃ%KR UTnݺ^4lق/3f`٬]ڶmdO>FkiҤ SLaT^7no>,kn] e>|͛겓'Ofܸq8""j +Vl6nX,̛7ɓ'TRl۶}All,o6WgϞQzu.\x(ZbӼys.]ĦM\2:ur09ss!V+-bƌ{Ŷ;ʕC4<==ٸq#seСQJel6ӯ_?N>MyZq8E6Z͍?9sмysǶm0Ll6rssgΝTZիWӧOҥ 'O櫯b>}INNl6uVF'2qD+GϞ=h߾=7=?A)Enn.6 ,hԨÇ~;wAҥܹ3'::3fp"*x衇>}:ӦMڵkl۶gRtiN:wI?> dee1k,MիرcrYݹtO]v?#Fp8V 4bŊt']ӭא!CXbݻw'))ɓ'yq]v)SXx1˖-#=='NK/1qD֬Yѣٶm\z0z6رB||<:|aX={6;v`ҤIt̎;N"]ٰaڵW_w%((N'Ÿq\L111nݺ4jܝr ɓ'#<<0j L&& 999lUQ\4L&kyłߟSwdQFL<Bzn9HX,źGу|o֭[Yt) .zꄇS^=1:UŸtt:]s/)0|p/fۗe˖K`` mڴqu,=p͍ӧ~zCll,Oooo 22FzjuؤH|/I&L4 ^{U~M'|O?M.]={6`T(0Uz{dž ^s={yyѰaC, -[q*voSB9tӧOQF.Err2N\k.k@M\nnnCƍiذ!sߟ2eʸl.r^\K4h@  J*qqu~iMB///ZlY",e˖ea2?UFHH=\vUAUVBXXuӥjժEΓn׽eРAƽKӴ"[qAzիlg ]׉%>>ٳgt:xG\9NRERu_QJan3—.]pС-Z <<ƍc.]ʮ]ذa.7o&..?gF@@w&332eʰm6yy"ϣnshA[VVk;ׁbػdBq(Xlh"mҥK`ńq5ƍׯϟbt:YdbŁ?~<>,O&&&`jٲ%_8f3AAA\r]י:u*3gΤGӉUTԩSX,ϟO͚5dܸqkۗ~Ι3ԬYuc_u18>ȶmغu+m۶eÆ $''\>}MÆ dذa >ZXXʕ#%%W2|p._s=wQU3dfRHI HGVD v+vWbCQz -${2d3@"|^{s;SuFSSrrrx7inn&**usmqwp8ݝPR #G___zcĈ۷VW_}4.2.]_|= jR__޽{yGh4̜9^O}}= L&***HKKwfǢEx馛x픔 =[Eyx衇hll$::aÆ'1fbϞ=5{Nkk+3fg۶m 8Df3%%%[o=`0ЧOV^o}Gxx8zL YYYhucb|sɔe˖MYY_|1| 'O>?ӓc3g?xggĈt֍lt:_~%˖-;&ӧz'xk]vN^^sܹsύ:JVXa|}}իF2|-Z8:i4[x뭷?~Дvꬌ|嗁2%))^{;iӦOhh(*o6_=gHZZUUU<lݺ3g@CCW橧"$$xww?j#F[nLj}Ynf:Ā3f 7n?OtʬYxꩧt{TӒz4~WsB?f:FZD IDATE]{C[ZZM7ݤ&L̙3UTTV/j߾}jذajj׮]jj͚5Q~{1ԤRSSՐ!CTFFjnnVSNU_rOI]wuWWÆ Sڽ{ ֭[ռyU]]5k;P{QV.|U޽UyyR&MR*&&F͜9SQ >\EGG.H*ͦx ӕlVW-\,үgj꧟~r_Db RfR R;vPJ駟AkF%&&QF̀ܬԋ/l6zwUBBRFR7nTMMMW*&&F=sb5ydV\zT||jii} 72sLum:5fu}J5zh5w\j*5`շo_uDgC_UUU) &ԼyTkkzgUllR'NTd2zHƪX/V{nuH5rH1cJLLT g~/Wj06nܨ Ք)STuu~rssS_U\s*--UVUos^Ǐ?jjjR>դITdd4hZnl?V}UjԩFmڴI=Z(ͦ.bK/)ժn6oެn65sLUSS.2o]ǫ#3ߙS~WQQQ{CԕW^T~7|_YYݻ6l>|۷/P۶mSƍSjȑj۶m*11Q[N)ն+͘1CfJﯖ.]v:uzWRmqƩ"zj5p@u]wu֩D>5|pe2رcUtt3f|Р׿:Tr8jjС*!!yTꪫRjoUC}w*11QEEE/\5c bcckvZdsۄ_&Ju]DlV/S}QFR6mRJ)UUUz|nfUSS0UWM:U;V*//O]xj˖-J)|Au+ͦ^xկ_?*(nB.R455hrwj&p8>nK >7|iӦuamnF,/Պl`0tnjjp8v֬Y,\|||VΚ 6 &пz!Ə… ]Zy-[FRR`Z1U?\΅fd21 0j({1~?gI{jۙ6mUUUX777xzz޵}{w]_vш$$$O;[v;Fg7dh˻bw}'x"0 J)#G;ng6Zxyy˧yNwmQJ|'Î|&vL& r)//g9R.frhG>/O3͆Fq^_ۦ^޴b0\f9W8n Fq, ^^^.GɄVh4:ϙbl6;˽˼dr~NtHL&tdYj.;Áj=kpvoy6B !~x7}VK.]~daEDD0k,6m7ߌN|;w}}=W_=/sG}N#!!죏>JFFcرFEhGΟ~^.gL>X't&Ju:/s߿2>^ٷV^̈́ ;ϙ@OwGH..7Fz"Ǜ`0n}2sYG׶N;RKM;77S8ا8SsYG8~t{6ut.;Go?CpߜZh4N3LPZЅB,ss%pB!Et!B!Y܅B!BN@t!B!]!B!$@B!B!: ЅB!BNs,&`v2B!! wwwt:NB!8dt!l'A!Bq2]!B!$@< eB!$@?O(seԻ\3ⷥ`^ &3 Gi:OM>D @emjo^.Dz,FѐSXAM!z;lKnHl/zqV[JjvA~DáH?TLvA}z?'n:-hjiuGW?VN=BSͺ^޸ncڸOͭ8{9MrV^Y7R4jXܦRf@~?(}/ηvme;d`n28.ů/A*jUhݟo=Hmf9{әpעi 7 "4ldid}k}kKsR^nG1j@$6.B@O̭VͺC%<ߥ.ǿn`p?F ~ Cp z.|}vrzXKyiJ}m4ޓ72e&>y{9^zZZlVo/y!\GT6`nҷw>| AqEK6e຋p~ʺFlvCX}|.{;߇5R|7sCPJʼU6 7LƧ?$Իi̸Go_)Ί*߸ =N.|hsEw7$}" 傾,ڰڂ{ؼ'*FW"),WRņ]<9m"}-7S.,g߿B>T79k6[o介;%| *jwSI)fcJ%u KpvtZ{f..ym.u~ jXy.ΊwƨwgҟX-CŕMՇ%={jjneҭ\<,Og7?>'ґ ,z^旑[[ ԭpw;w+aZvnn|$?qB|$R3wV?Wj5^1=I\} z5 64͗?9;1 q3Z/pWFf^)Mf§y/+e5!DmԳ/wb»38.r,'֝>/cΒ;>č\96,͙s5ˈR`;ؓYH/#/[Ŝ%?1np ]w2zX#.ޯ7=9qcC9/3Guw)#[#qVQH]c31 '* 7.OCi514D(zucp\85 t aU~pH>;<=yxƋ C)JaۿqsjDOn44WJ}S KrsMRCt!'Z=,8Wƨw;7?Lcs~,ےeѷwC"a8i(]; Fe9SZSKK=e~CS vZKÁ]I;TFnh4g< Gp(Fatd?6>ȓ, 7sٴ; w7 {᪦R:"YyOҪzº04!n>eݞܯ[ay\lM;Պ@)_G2Ƌ>hnxлoaxH axKٟ[BK^j埳W0~hnM%a-G1 w3B1eL;祮>Lr45 ܧ46a66SL7 <-qh2w9^8i_̷&6;Ǡms\:2y|+͖#~}MflvosʚFt:˘#SɌj|Yvnb/=x cw2oY2׌#撞S|N>z_9 0O)+Fæ*̷q,ݲKPm%^ĆӓK~o6|AZ%΢W_bO B /_/28pv`ta{&$?O4 t10&v?LbtO=qp)wfr0/ɜ%[i6[(lᮣoק0eL$F?bdlvy%yT"53SLBuǹo9}|a |3N7ElٓeBG1R^@9w|l \q?o#z7g]Q]W_ROoPN^I>^lN2|R$ᶿjnqy44d*!3+̿8'*MHgH|8^.4cJ*vNHZЅhrUPNTn4|d>bºj9p\2<7}2:I!Ax,OJ>lؙRPV~a!YymMYu=bpi1[,`'xNf'@>P 7y˒/g/_c ӨjSޟg>~xmٞެBBh&"K6etExH ]/|FqeKlhZɡKF$՞7_&^/+&b2rhUr׋jk:J)VnM#$ "h4hST kjHcdƟDz/lP\QïESK+V7-d 6 <8≷ay儣HJE͏iGu!2׹ؙGJf~]</4ȏ>,OJcɖTJFLxwlv_U;P bºWxͯ1<flJ}999Kx扔U3_(sbaƿPZ]7Lbv{7\214n>»_n CFd>P@oa jLt=Y;!iAB; z[}8&^_ȇl$׋7f_o# }z0wVΈ}a>v˨ʺ&@sN `Uy{z^BDHNɊj.Om˧i48[=C1jZ>=eY=iґ L{sy\!|v >.+sWbk:OȽk ְ4|<}ۥ w;086iS2kƕL|m k;?q?b9rl"F&ܿڝ<$ V3gO:'+ݭmaqbJζH j|#!<(뵻H¥_ZCs^ yiJ6:ރWrZ6J*)$V;WAxE\=n o>v=o˵OCq YB^iz_Kzs5@>6*fk<}"~L9?s1o;iܿc2nI%4ȗ{>כ{m)`Ԁ(>Z]yy즉<Ly=x i(wr}ʫ0䋏gVz4m IDATϫ U5?#e5tA.t}pUih|jeA`pGuMUX6 kiܽK$@99/^X,\r%7̴i~ s̡qssc);>̼yhnn믧gϞgtFÆ ؾ};GfرrJ=,X;"!B$@>kX,,^z.2z@kk+999hZ,YNB!~- qQJi&,Y@=ˉDjj*K.9s3tSgffELL̯~""""뮻ҥ˯cZٳgf'n{<]wfd222 +@&[pG}}=FEi5tƮv4*++=zu@h4B!B$@g]=z ((cOا3Vd[gRz^^0dȐv%8Ωpk-003fgαO-ϋBq>]R۷Ѐ^gԩ3#ظq#l6}vpww'<<1cr,JVVw了Á/qqq$&&ܶuF~@UU4551rHp8dggLEE:???NL655c (--eDEE9?ҨA82yΝ;)//gРAh4֮]Kcc#ӦMsEZZ[l?efvIVV2p@NsblDDD0tP@J)Eaa![nٌ/ 6 Xؽ{7---l߾|FJ)***HNN9q_^:t(g 8Pll޼y?h~rsso߾.QJa6m3oܹl6ݛ!C@aa!)))4559Ì?9Fr+"))Çc2$>>1cƸ immezFٻw/  bbb>|-[P\\ /QQQ5 pFD!m$@ܾe˖ѻwo魯jNzz:{& eLџ 7p \rss O?,ٶmW]u&Lpvn ^WWGjj* to/8ǾlٲOOO |>|]s5L49d>c߾}.޽3m4g_[{âEhmmu^TTپ};~;ݺu˗E/)))|w]tXQQ|p8csXV6l@ee%.RL֬Y_L߾}@)%%Fglذq{aڵs=DGGSPPUo߾!C`oj^RtϟO}}˱ؾ};r QQQ^Ç;t(HNN櫯:zNII!##ӧy gB!]bh1uia?2޻w/!!!L4=zw^V^MQQ۶mK.Aѐ;t\}\ph4Xl%%%l޼+QJF\\=zÃ%KPZZʘ1cׯrJjngϞ=X~x a>شi3n8BCC9{dbݺuܹ ֭[h4\p8֭_~%?CMM 0zhFjeҥa?v}ɤI0`YC1w\~iw~WoX, 2K.oooYx1b޼y<#DEEq2w\f3SL!<<Z[[inn_~\qt҅r-[FNNWfbk٘;w. &OLLL 999,]S>*9_߼y3Z*MMM +Wj2b.F#,^R.]c=F||<ӧOo)SF@@7ɑWUUg}FCC111\qQPP%K(..fΜ9<.-uuuܹyF֯_Ojj*{СCP__w}l梋.bt:233IKK#))'Jww! I.yp8[~hݺuwN FEE;w]{!C4iRࢋ.5k8ֱcr;[m6?#8pJPP^z30Kg0xHbb"@ށh#55V2}PPv۷ 6пwKm_z=7-jKxx1]~L<^yϏRRRpss㮻rtJll,/2eee$''sW0]G:J)֬Yb!&&ӧ;#((nݺ믓߿?zf3}qoee%sw:[d0ka2N+@WJF^^{w84 {G|ᇧ<#Er=|-77DHH7x3hZ}PJ'K.^i&NG[^xbǎ\|.ߟ9\GTUUQVVFLL 466^]vb|rB!NB'RV_ں5lذcfQFKfsQZ|||}pS:VGrk 8&}h4\2&SJw^mL+9xKh4i\t֍*gbcҤIh3f  8B!Ot!Zֹ^;r>z&k .ݻBk7{{{wEEELYY=+VW^DFFҿOwmAUGG{7x ]XXI*OOOgke;gR(c={1ǩ*3N'@?XVt:Ǽh&77ux<<< ''o:˩l63lTTTmٳ)ǻn2"<< )8UV:[ٕRt {<}.66?D݉%..h !B> Ѕ8OOKKs,dggd ӦMsC}snn.gvZzdr9חzkגNuu5СCY>}0uLmm s%~甖5kOCuu1sssW#|dYҊfϟϞ={y2Evvp`sv:F9f9R]]gvN:h$00wwwrssOd2iOt SZK]swqFRRR(//"֯_Opp07p2]!8C qpwwG~fq*HII!;;nݺI RUUEPPW^y% Y|sM_}ԩS1LpA222$77~n'ލ}Dk#2eq?GįȲ:]O6mbh4LˋJ^y3JFq}U:i~˖-#++ e]F~0xyyo};hDp8E}ZO5\~\|p!Ozz:eee,Z'|RZ҅B3Btz3kZؼyq@ll)jZ-vÇ'88???t:;/bƍ,X9zXX'NGneٯSh3>|gI4hZΉtڕV^MjjNfqСc?rz3_x\q`ጻM.xq,K-e^!rw'bXM<ٹ B@@.N^wriHJ)@DD}FJJ ,`׮]FzرcxGh4B!NBG\yٲe$%%9[[*++={6 i'rI4KERVg+,,$))cފqFݮ @^^.@rr26!ܙ2ȩFCj:vuuu|$%%&` ::kS!eڏ,ydb͚5h4:tLg),,t͹AVVK6o|MG_ ^bB0p@\OOOV{ҕ$֯_LZ wwߤBq.BG<<<y뭷hhh/`ƍIii)d2 7XD: Q/++?w۝]صkIVl޸q# =!C̮]psscFrrrذaV1q&"##̛7ijjb˖-رcτ`ŋg}xHKKcܹ\xݛj֯_n0v3J牴~!rrrx9r$7n~~~׳tR 3f aaa߿d<==[:'kmmeǎ1ⴆ 4|>ƏOϞ=cÆ ǜC777!99Fpp0EEEٳλ;j*v;瓜7[2p@z=>>>TVV|r0aB?~hZb7#GϏ|6l؀ ::Bq$@<SO=ŢEHOOCBBk-ȧ=X7 p |TTT`6QQQ3/-ZDBB [[SSS)//gʕDEEϤIXf [ne֭.׏/-GA2}t̙ÁXr~u]ήGܜ),,d|z=~; .dϞ=ǤW^zq*i禛nbѢEdee9'Bqcǎu9. ),,&NHNN.~~~tM$%%OHHWȩ`0p='pa/_|/$$h6or=L4\&m޽{aÆiZJ())~pw+(--?W^ѿ񃃃;Y`|.ҿnӞ/=O\s5|w:!l 7ps=!4,(+òlFmm-:ooo"##]vef*** 44`nOEEK9YUmm-Hll,}pUWW@.]pVؿ?---xxxЧOܞfNyΦ.]f^OppK%RbV1fIZZ6 go3+_SSC}}=eee<<Βdf/ XE0TTqAťu|V)UAWT(3nGK& 1!Lz>̽9gΝ=|ή]6]qhٲeϡ5Ű@LL ~?oߎÇl6yСP\\۷l6sp8Άihܸqx^l۶ GdB-HJJBbbb<ǏCK/۷,(**BBB=//^9Bn76oތ,X,n۷G\\N:={fAE1s8h߾=1bڵ TUEBBڷo6mڄA4deeA$4m4=C,ڵ yyyƴmۢC>fЉ"Et""""""NDDDDDDЉ":Q`@'"""""" DDDDDDD(0Et""""""NDDDDDDЉ":Q`@'"""""" DDDDDDD(0Et""""""NDDDDDDЉ":Q`@'"""""" DDDDDDD(0Et""""""NDDDDDD,"ȑ#ELL 222XM˃fCFFY( <"##qqq,""&OPŞNzdEأ~;Gt8*h ,f6w*QBxqa(Ʉ8q2d˲ 6Grr2-[\.`67oތ͛q5jѲeK4mڴF}ݻw/LG}ǝwމ#***֭êUEjLyO IDAT׺$m0uT8ql6#11CŀvjUV߾}{dddy[^/6l؀=zjV\!V^+)))u6"Kvo# X$\+0 !rJto !zaZ( n݊nݺ]E9_  I-ʌc49T׋~ ={4["116 ǎC׮]Ϲ=B UW]u4 n ظq#ZjFJ!"%!ߏ_|,h'|SB48q"|AHm۶oĎ;ѯ_?l7䣏>ž={jz ۶m1wƸqpqvm;v,~a$%%a裏W_y>(L&6oތn wC߾}vk~  0p@;wv;}|FESO=O> ,09rU寵-躎W^yǎ "jt!01vvdh8^ŘϷb~޾ 9Ygg8N̟?߸fUBǖ#.xR7an̙3y>#|زe תM/^~w矇K.ʼn'*}َo8NDTK?4Bu;!K.EnpBQX&MBi{9lڴ yyy8x k :۶mڵka2зo_ddd@Q,^GAΝѳgϰ̛7mڴ5\۷oG˖-qwl6ԩS3gxjWUU@UUK o/^]SN; 33aغu+p!,[ ]všCpAt;vĜ9sP\\޽{ꫯׯǦM`Xпh}v;6n܈v+Ѽys 8Z߇l믿:ݻ7^}U̝;]w1B.I|Mn:\VlFnݐkעgϞظq#\.qUVP-±cǐ!Cnclؼy3:tLL&lذׯGrr2TE$Iu+V;ЦMvmX,8qK$$$!",̥XPtLfnh#GW_Yf$ k׮%\m"77۶mCff&躎,s=Xz5كX 8yyyزe -Z{ }dÜ9sPTTݻCQamӉǏw޸ꪫ*P]YǾ,4=3+h:7˃F ѰEn @l3ffqgx0sL|>۷?{;̝;NǎC||< yfݺuCVVsN|ͰXt)dY{\{Xp!v؁;vGhܸqS}HII!C+WjbӦMaÆ#DDtQ:tʻXV\{xꩧ0a_cǎ<h]wy'^/ߏt_oUUca޼y$ ,ᅬf͚aڴixǡ( LgyNz+v؁kbݺu5ÇqQzaY? ** cĉذa^~es=p\F_݋cժU8rKӟ~z,Z=~?6n܈aÆ! bĉxt:W^p\3f VXQ~ڵ 9994h⊰ߥbԨQ|Xvm6oތ5k`͚5Xn]l >[nwߍ={rᮻw}'OO>8~8^~e4 | ^x(ѣGcܸq())C=իWcŊ>|8<f̘w x뭷BQ曘0arrr`ʕ8|p;54++(Qg ʇ'N@޽ ɓ??#`׿!ƎW^y;v{g^xbɒ%5j^/TUDAAN ׋aÆaܹz>|8ϟY'0p@,ZGA޽Q\\\纂*z\WP\\ɓ'aРAXf vڅK[RRb\N';vT]C>Caa!,X8x &Mde˖5/ Q)((@0>SX,|Dbb" \|hҤ oߎ+W`…/pWgϞ0L4 n7VXDL:ÇGǎ'K.HLLć~TiyyyPڵNVml67,XӧOGLL >CX߽qFt 3g1vXHHHÇ1k,5 .RSS-[O>صk d\ կ,@FFF nbbbo{1bCNN222{!::G?[n(((ѣG!@ll,cǎGAA/_/=z48aÆ!`Μ91#< IIIu@,#==pB4!|>9$|>۷Ǐ$I4hdYFjj*f3$I2 UWLL L&Ngr0gs=>|8N'^{5_|dddl6@e,_$nSN8z(᪫BttέV+4i-HMM Ru8)** Mqoo?f͚ЩS'n`rr27n Ib8~iiicؾ};;Iзo_$%%l6bE.V1* {ia@ `|ZV\veU?..θnٲ>:w@ ptnׁ͚׮[jB$$%&մXOWi/eEqQ*ĤDDEEA$\zaJoh|WPUiii+m($ѥm۶Fۗ]v$I믿$4o޼%xp8ip%@]S܉jM6HJJr$Iԩ ؤId2AnU|X`N̟??0TUELL ?@UUxgaX/"x x^4k ]tԩS^z)ك'O?Sڮ];ou]7|@ `Ue̙3z+UuiÁ'|SLA6m IfϞzsŐ!C*$:ԩV+/^ll,XTaĠNzQ\\1c/ >sZ @Z'N?#'w~];hf|ZhիW#`ӦMΆRzC"33| UUq%CNN6o\׃& sfbp]Htlٲ6>'pQȲ֨?~tiE='Nȑ#pX~}7nfCrr2Ƅ m6|yyyxW0zhL4 bxw!ދc?~4oXd +\~Ž4hPؖrh۶-&N6mg˅;G=Ыw/;v,tnݺW^lz QY"z^~ >@@sKǔ%IB"qL~a޼y֭v;V+L&n<Xd T: 5͸馛0n8L6 @Tv]qcȑxwk?CO^<iQUMbp(PwI1Hpٚd2UVx衇зo_$''ظI;b[ sέ qqqx#!!@wqݻpfMs 'OD޽aX`Xpn=~mu#Go_W4jԨuuI\ 艈Ꙣ(>}:K#`z-/MB񠰰& 8ߏTc}{jj*TUfjEaa!n7RRR׋hXVPM||>nqqqF`ٌ(|>#!!& qqq$ . EEECJJ L&ES`Xp8`d5["*mpQ0$|ycߠHޅ>a`a6QPP׋FAUUš(Sވ|HLLD Drr2dYF\\qs=3M–zcQr\k.K@4jǃx*`qqq4 eiiiֱen!55$p"??qqqZfp\HNNb$IAAA`6b/F$ndBLLL^NDt躎ݻw~@VVv Zh=Buܹ?p8p5ছnB|||}7ifTf?u7xcִwQ⋘5kGPPհlWVC+&1%X}1urAd (3.IFL~p=+׿\G t"":( 6DDD U0DaaaYTTy%t""""""DDDDDDDbք/0sL駟Ƶ^ 6`֬Y={?n?wF˖-ϣyعs'z-\.{8p 3Ex.\#F ::۶mã> EQ_+WbժU ;nɘ2e ڵk O>$\.?bPUw}7zхt"!͛t? L&vڅgAΝѾ}cKJJиqcK_t:qQwA= O>,:BDDDD5Q躎BgϞ뮻|rAݻc=nݺoAٺw}7;mbɒ%={d8""""(4Fe+6#!4M$I0㺮C4X,JCl6uiX'9SLATTTu$ ͆n EXVln E ^Mz[BDr;$U@Vkˆ|""""HHQ҅7pwb@'"""""" DDDDDDD(0Et""""""ND&@QQ.]7B4{oڴ ˗/l,]8nΝXl lj.f DTkD=0w\Al6%IBll,V+E6ۍhDEEwS(y^X,QY>& 111@{6[BDtXE DT+VW_}56l؀B۶mt:~\vea1bt]lj'vaŊݻ7V^d4mѫW/,Z]w%Iba@ L&tV& fT()Ƥ$ DT+$'gdffԩS=z4ZhG}Fbb"bccqmk׮ذaF;[nENN&Mݎg}O<o]Љڵk˗ĉå^ I( IDATOchժbcc:֯_&Ml6cʕBrr25k:t(~EHLL=t":/dB۶m+3K߈4]r;ѴiSݻ{ ˅O?_SNaѢEaǽԩ-Z1c`?~</Fbb">sctbŀNDgGq㐟,l6tvz+vi#;p뭷jo߾ɓ'nݺjodž Љ)DTkǎCLL ӧ{q㐖>n3B ۍXv@tttX8BB CjTU(@,C4hVM$2L&t]P }`P4h0QM0@iqn;w2SK$IHHL@AABsB#˲ ]!IRc?FAgzoA /AL&H8 t"M0fGQQTUEӦMQPPB`֭q$IhҤ vڅL:tV5Btt4~g\}ؼy3ڴivqV6EQn E M`Za)t]l6foUpw(͘)z>1QL&\yxgn:>|8:woCE۶myf[pڵ+-[xCEvv6VXQF!!!w}7x_~%/^̻DDDDtѓSQ- !vZ߿͚5C>}`2( +8NdffyP_5nl6ڵ 7nD˖-q7d2AUU,](wkȑ2e GЩ tTII Gn7G鬼^/S8a7{mWrjM$tݻw{jbРAۍj\}aϱX,߿5(q5""""""NDDDDDDЉ":Q`@'"""""" DDDDDDD(0Հ/yЉj@V:9/:Q`@'"""""" DDDDDDD(0Հ2ՀTGe@'""""""E]ЉB  Ba,2t]M(JyE?qDDDDDt":/صkz !֬Y[nz+^} !D~pСC-[o߾ׯ|Ix<"""":~=m]1zh<裘6mKYx$I>}:0sLt*4MË/cƌؽ{7/^lCDDDDtb@'ZB`HIIAzz:eF۶mqwbժUaǬ^#F@zz:{1ڵ 'N1l0cĈXt)QĨy::/Y-Œ3п@aa!RRR`6III(((0B 77͚5l6XV8Nl6A$CUհdYtw@eDeiI )4M3UWGЉVN'FQFɓx<8t K$!66@ (Mzl  D/P|oPeTUU~o($R ` X' DT+u֘7o1n8G^^^/< 8IoDvv6 99$!778pӍQz/En EXVln E ^Mz[Bĩz:JZZ̙G1gvmcƌANl2<|s=|gׯҐgy~;M3g@o)亙"qDt8<#l'|&M`߾}1c222`6ѩS'DGG#&LM6nرc!Ix t[n{g.]w׈Ѕ@ӏweCu(ewO xe^%jM?W DDDDDD rjwnTM'XzcEř >Y.¯hs(E"\1Q}Љ*R5f/FAU]b|(.~x'B`"DDDDʾ)~ֽ ȥC\~٘ή:Qtyߕ7Sll=^:|n'*VwЉ,kh*5hVs^(+ƈwI@A@Ѡ:UxnrK j R5_ѐ"ך Ӊ}PBĉ8p`ǹnݻcB`߾}DDDD YUk"4tY+,I*L?k>vϹ~z]t0QH_~<<ףy8y$ XVdޒ={ !{n& zB^^믿& i&t"""MNm| MW64VA$@);ŽYDv^Ï?={t"-- & $f!PTXf͚V+, <bcca ]x :+?*!W-6 \{燾"-T5i]*ːU6bK_H b@'Ze}6l؀C?4Zؖ$!!1X_`t8|qBX IxAenG4 @^ןL9>_~+.Tڼb&TZ+$K(,#RA`. ѕp{<(,,; pz;j>snE0Q "$C|EE#٪Bk>m]Qvk DT+'D߾}1`\r%u(,,DAARSSuV8N$cΝ4رcԩ6n܈ HI`2z5E" ͆n EXVln E l"*+tcp\P!--Ϸd{P*S "QhHLJDZZbω98OS2LQ}^>kUg$&%#->WP5 . YcUasHHL<u1`ĀND"In}0h i;vɓvqbŊ9r$ VO<w}ѵkW,[ _5GE*UDMwnij:lycWPMVKGC]TM YOW4dV[n't"I0|pnD~гgOL&ҥKQRR_5(׿cɒ%زe ݻC$3;vD~~>z)dddw7"0$cCV NVus#5Qn_.]D#Cu  !=Љ$IB.]ХKǭV+ P᱾}?w:t{bZw &"""HjZDl ^zzo򐠪Ы*H>=]Pꥡ\*B)u҉N .6嶐sd乃n3EOalW`/_G8m\e# oPgگ%׵$'>DDDDDDT(vM$ \~8_x_wOPz:$IuU/-뀢AQuLY>Y+}]˅pEVledof NDDDtF軑vRyh$دuz[y UF_k}+ԫe 9}2M fr"rgmf@I@=|oּ_u NDDD;]Tʵ$IU* UU(OݺX^ӊ$!9T(+5TTK u껣; .ƂE EET?E])3BhzK zwkt^!DX@Ub* <xO5F)B*@TzYw""""""*6dNS:h.Ttu0^>tt[欬LuI y*JYKҀ.<'%IO_5u#DDDDDDP0 (ZG=kkhKJ:pO@- FBy^:}eUG;hsehB Zn?x!DU#q[9t"""""/ܕHPZqW.9x _䕍t!P(-$UN\ilKWЉHM@p;yygz-4^W2Wt(k/Bn{(:9w*"$6"-ڈ `3HDm{I kɠ8*"`7,4 |֦mDie2$HPn3g;֐J%ZJ3}Ω[dٮۺ`Qq<-f<=DIWBm0ƢUcpR6yJ)x۶[GAjxAX)?"nApBul،G>% %'`8Q4 S) aJMw:Ueh9A =fhh^x!=\qx'SO=38W믿AK/g}6;яbG>'x"G)'|]tVX/ FFFe|+pEGTw  >@c ްoV]p6jbݞC:rtj`նiK0[bZ/*}].1Vj]>`?y~e{8 NiN=T$.r\pp]}%K E)dpGۑL&Q*ގnhpks')eMAT#ip]w!/r ~ٝb9/Y6Rcau&>>!%,{.T %Ӂe;~R xqg ۅ yp] %tAd̯IkOlHl: DJ ;tb_|%N^all ^x!VZ믿[nQ,i.8Sz[[<ϛqN(z6fp!\/ry4W1%L& Q ϮB7,tg].q H\ eRqĥ(B]+fqKq4 %c5yB"oa9 %ӆm۰n\]H!y iAHMp 'xs />3Hرw?||D*Boo/`&:;;[R IDATo`͚5>a``[n~.] MӐdՅ[bŊQyUOR 2Wb%̾H =b?Du$I/PJx< b~\xm2nk@,uIH%ٟ݃Ѣv n#J@@@ĒH&B_N%4Rm9L&ގ6C__ߌ4]$h;.%0; r=-D2݆>2NزD2N;a8mU=H&>{As|_E.Cgg'֭[N|3ի/o{裏⪫eYq(կ {/>lr!83|wN_AAx蟃@/%b]i_څñss,U5sOo YSCME-4RDofK̷H3\8 Awr"NĞ9{^^xw}7{f0𶷽 ty~q%@4tM_|? 8'Z{_ۗug.S5M p,ԄRΛ0;KwR)x·C?8$T$ #o0l 0;#㡲$  fo=p} }^p!R𚈳RlFBf5Ȕg<ܞ 7'4Db\*LlxB,p)B**tYkzB*/@'  MӐ3~.TGhꩅT`6A*Ţz(ѿ )Zsx'.ȰT8zd0]8]#񷘀rp^<6HAA=^RȺB0.ay5a$mqRؕ7ktR*/ J6Q%R('$*Ţ:u;Hmǃ'$QdM7:<ɲ3LmE B.Uc-/dv0@   `&hLG͹-S#aZFAդg+Ny!m2s7F쫩VM3)-R3zør0.Q\8ԊPXU?WEo&g1N"$  ƛGMop^UyχpzU3w$gb )z~דVfUK?>VG؅T/YʟI,WA8稺3 SI_b`{f8օv A,  Rk~M?j.\φRj^p뵬ޛXUB*޴rQ.UxHbPC!^ ŹOdy0\qpD)ed<0\#[qP4]!ݙk(([ a`Z-Hh t v$|wcA DӴyymo{\F"hlW--\-l{ sԫǡm뮏 <Ϲ^XRE} ^.py($,`rCʛ0>}dAAAMT4 F R*b_09-/ڒ=t,qDMng&*3jѹM!Wyڒ5sdQ~(~c9Q MUP4X^}hO!n&(hKƐ3\,0R0Q6B]Alw]s6Z07y4t(NAAP8ńxsNq!w;5b6/ ektn5l?.`y8,lr3ofڦ'd$G &^.4DЫmҘ𻸫D*Xt SM;Ƹ_`8sb0t@   @B O֊tvMF]|+0.Uˮb YjE'eӨR 5G9:yq]?saZ{bEXH6sy,|3O t  NA_.(fk/ZAhe+.,QͽAZ0sRzcJ)r},Bd6d.MGק5φR ^ŝZ VW3[qj<GRr_*݇gg)&*Qbp}B_@ A,  9])ۋDR%|m&!S%皦sφ płi5Bqِr? díEfuzi5}NI$y6^7&@t5u]Tř񙭞{2n~_AEi0Z@n?=@' OQ>]][+__^oKv7rM7>gLuj.թ^`UMj![9mU®|f^_]ڍ&q), TMMr88Jv\1 tsEW.%6֕[M'hqzpjҘ3HAAvE&x`nW/ehq ׏bKH?hfw=;efTѢm_4xFAhT_0]q)Ռ ';?+c%eO(lԼV(qھ.!N^A)gyv[b>88/k׮O~Ux9֭[;_җP, LFGG~>~u%8o u3\MCytasMV`udzdyҷHkb󭻔R8,6' \ʚi=od{2AW uJz]ĚK- H4,B̄E`ojN񲍩ʴ4]!hui^P*-%e R4?ƠX#r-Mي)Rld5 < >kRJ|_FGG:?į~AĂiZC$nF\1U_n{Ns׫,,?=ZP4.ՌQbV7BR![qQҬQzDnp^4˺fupni ub{@?ryM7?x8~Q3 W7C4ιOeT ne+d'a{@?xS=[qa͘ߊsw`{ a t I|'?赑d2|_Ʃ /=X~< x≸3`rr>,/o;.RA.~{ᕱTO# B֤Q7Z QTkYmO2A "x8K6,)Gg"_qk C YP0#a_*\}s[]>;J5u\*XQxmmO ; mD fB]_4hUQS:g}D(q`jj K.E:8|r\8K,A"@6E*ҥKi?p:ц A+D#l`Q~9nCw8t$-J5m߂ŲԜg\ =sMZ0jܑj.,ǃ- tGpqhw1ơ \רh K"p=%`: \6mwhzsJO "HU2H Bf8 Cx\dӍ~ut<m0R01ЕBp3_W(&+Ta8CW:t'kpހ 0ך %t~}plєsM6LfRM$r7PPB"Rad `p9퍾;xbC =&d2D#!M4ibx=RJE$ !xᜃ1L?²ʌTMU\ҕ^X>3XL7ƐJ߻lʼnTO,{LS1LK1Ss xr{3R7'!6膉Ԓv(i, 8ZƎ(&J 2x=p]S-c`crOʣ-ua c!X40a&,ۋLK6rleӆW05;c\`6 c g`Zv9Dv۞h~OAd9G2D&AOOOiN>d!z{{yt]GGG2 /@X H!Jۣ Fu$ɆSstKti?bBp،=_)6 }Kݶd 3O8ׇ;6ls~pQ*}?/o{,_|;q-ॗ^~} = ? /_eROI݋4ZDDХl{sjW0V:Yt=w}=zuߊ{fP(,KSG fT܌;܎ _)El6ce`\xQ}uXKn2x*.Y ӷ.8TD6K Uq R4OUh,DY u(.xт@1MrF4߰] y5 LvT^Mor6f6*&$&25? 3-j-&828}ȕRؑ3Omld{56eJaC®\.=>JjWq9^lA>zLbq`@ ƲepRN<ӸpW|'ؾ};|`Q˖Îe۟KE`Ak(b3..%˃pՋNsg[mRU3 ~.T8Nu;sFdzGM(ŝ I'N:)-oy ~ӟB)x<Mс{'6bQʕ+q=@JG/U}=%b鮚 #4 GM7j,. -8rtf..`\6WJ>xaQsq?rW{`3aӥ S";R3M8CbiOWƣ#í}YB(L.+.$Q#b4/Lmuiцol8bITi~FS>ڛ̯tJ?o4a/ Us2!Q0]%anݝ7\x׊qD\_iZ&;p=Qӑ2w!^Mp9M^pR/pxmsl6푭iZtW˨8JEqb<odt,SL,-n+c h.X񄄐u"hPqVn^a {DOߚu?지vZS?װO[64Udt'`1blGƎՂz}B1]M<^TqBt  ˡHͯ$~v:xE.#9riDz6ljæQw!loqeؕ71\0zY7g t'ky]H'cț.҉^(=_28*_W8vy]qXLÅHh^ؕGO{ vH1-Jpq(:4po^īcep)®<&6~Gw]+vȥ??vl'iJ 8CD̏v؞5)Z Ӏ~i]mIqdm/6X҉ႉxߪeHl[V#Ҫ=߉8q+oc\dάUKS(!nԌW#pSۡNUmQ}sH40!.ےձ;~ueۃ'%vf X̷xRj1l.DGPS#u/ 5X,_LWSqnkO!xm#.E6a\<$cb`ECrzp eEa$9.hGրӰО40][}jBq'l.=\0Ya2=W-lؑKb4@٨i2D Qȫ<[;#EL횆sl|osѢ|Ό]}-r@' bBt*U. e+xx 𽸽-:4 o3<81-nOkU(<ްA|<;dzvX xͨw  Ola=mR"\=_m4btgj_[[,% CMqcs &E-X mf|1:^lxr,dT#x \( ҝFbM^iV)͋ L{c׹h1&pq\bC  9Ao}3E&6lOlEӋ-EgQ|oߙ|1\alل|&׫w)XHTuo,DHS~tWJѸCL6zr{Ӕjb˧tgV Tg`Aw<<#;@' 4i1`d10!7\Mx?)liā `!tCԊ=U1AMdPbAAHPJA* ~~MvW;,Ob4l,E8\({L3BA:AAcBP`3.5،Ktp<Etbx`14etX{$ bC  Z8Ɇ׫wf 1tH%Oȥؙ3` R!t9HZBʏ Im 8X NA(7:Kb5.iZlmI1 R)H0e8{ёJ= q䂮܆#/РEM rMCOAq@ د16.+ZL'P4ʶa`YXׁYc% ;'UJDLxT4t?u5ߙ#a>=9JAA t CW[#)=4['uyyO+Zp@d?H;Wcdl{.R #Ӑ8df\<$q@!ͻCTAAğ A%Xsh7Rx{eߙ_ϻ8t#c%Ep< %0Qўm8L>ЕƔzHbeK:RhO¼ JHxd FAĢ:A~T B($ӑp.$|r^-g7xq'p!>ӞluYH®Pq<Lx-  :A~R ;I kce/v`͇q7l[&8Yǥ%Y;*.Z( .'.  ?5$ gE{0 gy&׼¯~+lܸGu֮]6躎Lp'XZh(7-'PD]PJ]hSPw4f{クKquao]W]Jb(iem\H G%1 \(((5 8ʶ%)d+D q HbJl{=,xE!ilZdOm趇}}vU7= h.`Vc5JY' ؿ!N>X,u],[ ozӛJ011B&AWWpGcrT*Xb,Y#8vF)^(|z\ŁT JiHcpy_ K@4 `yR*ж6GCq^ơٞ 1}L{X?XkL,{cB0zW$ '8!D?v}eYMSJ>o^.%PR @[  >lʺR?Ǐq(h@Q =Čr AтCz1[A^R)H)T*z?###PJqq!C!lF{{{$jJAAO!N>aŊ}vw]غu+n&|6<~'t/_?<bp),t  b[q`Ӄ(~K/?A_(>100o~HR8?|"  x4ժГ b/&M)%lF:F"1yc qNAA@' =hEA 2g=g؛P;Ahx衇p`ڵXv-~iH)qwNYgPJ!ᢋ.iK./}R ~: Ї//Rsop9`ƍPJaxxqi+_ \]{?F#\p^x!8< `||ԧp< X{l6~?s=O=Rxp93-q?p饗Oe]b"9$ ER =VZ/_|1V^^z oԧpW4M[?1l, ֭W\r (J׾OӸn:xgpwn駟+_:7G?6n܈/𬈽8;|&NRJ<8pK.ʕ+/n-܂s=_~9cى;۷o]wݵ"P*gujJ%qg[o}݇ 6`˖-[o7 J..dYq =-bC Xpαi&s1e˖{tI?%}KsUW]>_Cs 2۷os=~k;v@29眃w83裏g瞋?RW_}/>a͚5+C* \sMyx7b ?O;\tEB`xqWcW^GySO=^{-V\u'?I-o^_xw'|2> 6m{\q8qW#HAr16n܈O>O=QX, رJ)X\>(f_{qwy~3r!muJ) CvNeߏX,L&wgE,v(NA, 8.H)j*G?M7݄;[=w}i8pk5\믿zğ5k֠r :(<wI|c)z Vš5kpgk[o 7@IPMpCJ+W;ĭފ#<֭OSl޼z(֬Y>ַp.Jz>R׿L&O<t:|#8p 7?~<Xd n_###H8cq9oW_o _B  f~/2.2Xt)N_~1cI>}`ҥDFFC_PTTѣ/~%a~n7ϟܹs\~Ç3bJJJ8rseذatܙe|>пؾ};=80jDDH˕[r|?^[!C[EDDDDDDo """""""H;.""""""( """""""1IDAT2 Zi  """Ү\p˗r Ǐcx˻X|9C >|ɷ2vUUgg?uuuw5ax<|2֭پ};vf|SVVƧ~Jvv6yDC]DDDڕK.f~?k׮%??|iu)ֶ0ex[{k-Cسgw:=JQQmxB7GqVǺݚ:CQRR¬YСmxK>} x\v_?1eݍOŋc?(//'..on bJ6n܈b'vrJN>?̙3رcYr%׮]#..EeΜ9CVV&Mb`Ȕ)S5j۷ocϧ_~aڴi\.Ο?ONN'NKXV֭[ǹsر#/+WA3P__s=ȑ#پ};'Od߾}̝;d<TTTPUUń ;v,Vbȑ<r ~ix (6;w2p@fϞMCCׯg޽Ʋ`=̲eȠK矓¬YzYt֍sңG<:a_2bv׹z*999… q{ذaw&::yѥK֭[GYYiӦ7,,4rrrX~c""QZAvcǎ<$&&ҥKf̘kHOO,jjjXlN8ի۷7DXXÇ{x2e )))79sk.>Lvv6cǎ%<|+V0p@"##y+,YB޽III^l۶ >b|>\t'̛7#G0~x.]ŋؼy3iii0gΟ?ϤIHHH 55Lv3g̤_~w}fMeeeˌ=G?aÆ#ͪUxwIOO' 2sLzI׮]رcaˉ'"//^x0x種fԩnƌ3f {nf̘a~0 z-rssy _$!! #PRRB}}VEDD]r\cV2~`۶mGFF.D*++9v|>q\ 0l;.--e̘1裏ZL4qƑԩS . Ǐm6zɽknݺxbv;(,,$66: )..&pU*++q\0yd̙biv=>}:ڼvQQQ,ZPo~Cll,V2^x1Fb <ݻwSYYI}}=$<<"?Jjj*`;vpq8u}iVS.]XhѬX^{|Mf޽6p:wLrr28^yBCC_;˖-G%%%$8իWnЩS'*wyy9L0!CuAvIFFO< ҥKFΝɓֶy###\r6iI]DDDڽ`cE\\aaaDEErp݄p5y:th n344 111X,fOuvv; X,l6N/22E}}9@xx8p"""$ 2m4~Խ{wl66 ~p# aѬ֦zoN:ѵkW}Yzvm`0hkX]t<5ACCvp4#**<.$$g` qboxxxN'|kײg^y Ć ^/;v4ǹ|2yyyL4^zQVVl6[Q[ˍg]."""ر#SNҥK۷Yx"gϞ%77Tzfp {ž}8z(.\뉌qUN'aaalٲǟgul0w;CMM o{~'mi~- ᔖR]]7{M&&&$~?}eɔpԩ6)((̙3x<ks[p84n77o6?/[VtNBBk׮󑟟O}x֭[b1a~zW_ĉ6zj^/7nd2}t3ojr v ""K]DDDAzbZ:t(l޼\6l@߾})..fܹvsŹgϞ?!CSO <3fz$%%yӟCO2qD:wlAvwެ09x NիW׿D>LVVYjtb lBRR DEE̯k}Ycȑ濙2;V+ݻwp0sLy+>6͜fѣG3ƸctFRR7ndۗ_tz!V+'N2Wo6~xƌÈ#x駙={:\u[VbbblXV~rc;::ɒ%K~:cɒ%,\{ѣGpB<Oh"""$&&|r֬YC=Xjo2d]vs z)<}!'' s\¼yG&V^Mvv63gdÆ $''gm~a4;O޽.e1|X,XVtޒp0$ͭdža0 nI~غu+466 i}nno? 6~hll4[oa~eI 0 àv0|޿޻n\V% vܪGnuӦM#55ѣG57Hִof-biyӶ;#Gx̚:tp˚imnp4vmdXp:p4^npڴu.V{V4/r1v;#F~"" H;tv@]DDDDDDP@iEDDDDDDtv@]DDDDDDP@iEDDDDDDtv@]DDDDDDP@iEDDDDDDځv?ΚIENDB`rally-0.9.1/doc/source/images/Report-Task-Load-profile.png0000664000567000056710000002112013073417716024520 0ustar jenkinsjenkins00000000000000PNG  IHDRcbKGDC pHYs  tIME 6(3j IDATxwxTe{L2H[T"U*vYYņ ŕuuŊ+} "E(5@(!!'d?%3uq]$9ysyޙ;{,#@ " " " "syz\}P[$I;FR5zH7qn8HE5N]yVOy՚4;KPI#szG+߳ߥ=w}_65>? Dpz|*mtu5rymqF|\+?#%i-2 5>? DxwY66Z\T^4 4|T]$$ZrҒN7w1{շXJ:m6߶X-ަa=;+5%Y]O5=SҪ-zQmFH3wg>rH2ZNJMFfHrfާ#u=5]7~C^Iƞ)SbZVcDlUatUBlkŒj}xZ y[E(ߦսZIpyUuߩhQdO2t,hk4~AX{& Ώ_&ߋ}٘zYY?mTTCK(ÿ{Rk\Bɞo2Vi]餷mTz>(.SyMRegsۖbQhYwWBzܩJ׽i6)$^/+o^].]0mEyDqJBQﺙʨ<3l- h=JEymJ/R wЎ5jhBdQTkQDBB?["OF>=yf N:)z}+TU_r蓿_!iuqk]KTg^ߗ]4BVub㕐6N+x4?W%u OLTeWSBxJwJKDEN5!H>E͟Hݣ5iZYgçO-߶SԾ^ݙ̸UTb{fݨ>;^~֯)h μo~_StjbF-TeezcƤt\]98+TRrth3I'Z'룕+RvCcmdҁ-v|X+ ~K԰T*Ni&2:9\5r:F}O;Y 22cF\O%4\!ZU9-0Vg̽(*kszg}vFKuk'c"pnJKnvcS5ꔮ|.(=YƗIVu!]Y~T7ou]KT^v|&M3瘝$@/;W]Zx1$q>qz&Vc5 {yﲆ=57^O鯙Tx&?Adž760^'}{UlB)τm.cA(q7Wh}#B9F9Ym#s\Z0J{0Cf.ֲzw'!{ A!",DcOh EjD@DOYqX,:vrzŭְ0Y,J6^{>b@澮K~)[~n~tݭU 0 y;mзH]F.SR{٬;ã5*%skL Te-&#H2r׻%csuС8|1}PvWONOO n{m);)F!3smg+e E A3s0;wJ|]S$EFGl/6U> -XKbUeo]zn:R1t΍xZsp:-2fbM$5b7 :i̵h¤ugI?Ԣ++3ULףO(eDzx;=̭d:W 6)1̷UenG>}[R^?-_+;rpB(eѢMpԻBֲz8)DB"u.gq@@l5^-aqbջ@@l TSQi~[o7Kpt*,"x+A!GU۫yy/"xT-\N¼չ8 NOK@@<Vl4c^VN%!uqy#*A[J(ՙPOݤ_an =^Yzh;Gk.ЎWc:Fk j9Z ꢮḘTG!m+((K A@@<&5Z_jtf+hɺcT,9D\^68 l~VJk1W97Bļ~>YUH!r ҲJn%r+(1FeS`_ n kUXK!WQ oH{|rX1lϯ lr+y!2 L׹hS)@@ &` "9<>  <> Y1W9)hԜuŪqx(bcׯQ@5eM!YN4/b  }ABuxMD@SP-է 8@@ ۹J_*Ԥ뵭 u,PR~C}Ao-͕ 0Jp[\CHZT 76]|pyvouOhOA~v@ 8*ݚ&eVQ ֤)O!­03_,oJVpy)8:1rm}+mu~r\L{:bes>TADW9YPTK1[WW&GQndoT¯o_E@C˫߬o7FՕiKTC3^QSn//z{i>\!=tnZGFR$8 ֗[MIutfꕿx!hn*Cs7iJ h8s."AI꒑Yh^X+,,lxkUt2{0GAAۜq[5k AC[>x m}n\jjQcOJUua?iX*5-\4tPeddb1{f|;.fpxC: ~k*.Ax5 X>Hء* b k|Dv!)w>GjmRb  ghJu@;w֕_ ԳE"|,l_~Y5>>Od!,b ./t.ϖ qAev5?[.b_ $r­KDRq 52B}sy)@0XsS0Jry,PfZE۬kh[ڇ+&ҪvVDZFgͯiӏ}xҏ:sB,4_HDannҭ` I5{%G)BXŊrxXjR-T6N;k|;ĸH=Z]G?Ҳ B@Dsؼ~kVl% i+|}b>ޏDg/rA@@~Kv<>? b,P jGVK1Xmc]O|QU n"hVACZ}8NlG1X$Fkh-^a/#  ǧj8#)VY,OHО[f{yX, nn*MJihD'Ql֠ T[-@VXԇGkvfl-@px|:*w1b3<ܚfyGb3|`W٠ oo5bC (ɯчDZskav;T`6f,ܪ% A!]s* $A>1F}mmğipty|~=`*} ? T75}f>b[ V4%9jko^ VGwkXJ,P%=تUlp2sOW^7]?čCRi_ cX1{Mn &U߿o]xtMl/Ǽkiqy)C7߮m^Aɧ bdSJ$ɓ%,4 @5}UMÂ!.SR{٬I5*%skL72>f҈ޱ 5Ģk"Bd*(55x٢m]ut-- QNhN%=*lwT-N=GL'ܮc٩ܪ!d(>}_]'C׻UdK/=s:>KwIVK\'>VM^A JLJT7լ^vw E Ҍ<߯9zXg=sAR2E:$]0z[]K@{Sf`Sn{x>)M׻kLǨ~kj >3C͙\n* &Ii<9^O;P ~g:Yӻf> 2㪞m~XhX,xzX[З١~}n!1/@k{_vu-ϒys ņ{yI@"9z-IuSu'4NZ[SAL:Xݒ::=c }>GX'~OkpHmBX*ɍ8 ^s<t@t=Fy!ewȶ|-{h=lZg=1 %hlKCgĆNVrTVOU}ݧ\9c +ArMi*::F'=O^kw@X,8q^{]zӣz@zF]QTD)}5-.znX.~omΡ&k%+s r,l8# %)'tkUZDWN]50%bMl138 &ִv4;s뵿\K^͟]ک]z<+K]W~n-]ŪgPYCxNNʛ袞6Yno`cXo|h.MxJܻzNW|b|OYfqmϞy]0nrhsއsycbJ̽:K^dvv&_*}>Mgc͗^cה}}9&Ri>%[c5cyL3R"LhxyΟ͜]n)ܰNkMC} 1/zs=:7?>kbӮ6~5~X}5MbRIy8xs} Гs9łc<*w|" " "yeE~>C=zޣCԻ:cṪ(oUo}J JאT=SR%M7/3Y2tK߫eO^:/z )6ȁkC;kqr`Ct֤u7iKLP2AZS*̼O}A8\缬qSzvT3${Ԕdu=mj~~c:k Ꞛ?!Z>LEoFYd왚rY:%&)1>Q.e5tRJŪȧ/z{z JMRrJ'_g3ke̾4wJnם˳{w?KY-~U hBٲ@׾oWЫ;ڪFV_?s2"e}S~K%fi󀉺(RinWu3QЎ5jhBdQTkxKztJ("!Y!R'E#<3JUMW,=umZP*~ШHYWܦ/ľ{U]9L KycRRؿY$Y;uZfˮ_` o.ˮ_gŵ˸EqZubH,22jx|GwSugSӣ{T^7M+k}> IDATxexWZ܅)];i mB*-V}KZ/.F!}?$@{;N4J)B!B@!B!$!B!B! B!BIB!BHB(B!BB!B! !B!P!B!C_؛Q|176ۃ .v3Mxz }1܎f_f,Flz+_t23W`ƶ˘ %i3 jߙG'70b+y:IO+&,)ޱ5hVmj&J}K75 ȨnhL,?%_`ܳ5qȁF!XBd"r~\ymgQb}28v2HfD$c34}֓KN}.Bhp 60'pxFNe8Qzljղ>(}?ohѣ&~-+? B@%%Z L\?e3k^nMZG ^9^$r|΄էͫѣ"* gs[].3r$YY dhy'PYe4VJ[,棻S B!P職cwMd;.wic1a#J0a5cNvC~sy; `_k Mn,d\9͞5 >^Aq2ҸM0Kgaѡ *W B$uA\ܰ8PYb;>0[2-՚R^+5W'/BDBh&֬XϾ1iq*AjMi۶  7-7%rjrVl=ƅXz)];7GqM|28F!4i 8Jf-FR\8i'~O/Y[WZ%oWh`(rWvcę`d@o0ebr)4@=%=&V/uߜdLf|?K0uWSE{%DNz}8O|Zrn|>6HsG(@lOI|(fQjCډL 2".aӜ#lёv'Թ$Heqi$YpZT:qÜ!*Qr[Ԧ*s+ዅg˜p˦pKZfTg.PB!xBr9sr&v;1gS*aJ$&M_řpY|fWͽ ͥrm m2x39.!, ZX*a! mS7xo1*'pmͲq(ƎjJ^wZ/'v.GFU(#Ȏds&R4-˩=+`? V9]_6^ {%{Bx{!ٝFϾC|-ūv~TZ3dй]w~h ܵKva9(gjqNhCף,'^2qj<Κcߺ6UǔdP߈>ZYOO矿Xz|km$n!"QNOϯB!Nf5\7{bo՗r z1HrёU7'v =__P\18zT{!&j*DZ|65^h^H '1L]hӤ^zEʑ,@='U>0F4>>] f) )97xw޴}Cu]>n~PK7:@ޅl1ݸXh0QR-Zj뙮dEr.[ئxv-L@`'F3VՃqu'|=xAl0x"aB!? >Cr)M%٘;Gbڿ!SroiJ nUEcPd/?Lf Q@G_N8huoۚeTx,I?ڔUzX~*"xrv)DeN]}{[Wp7 wPMOxQQwZrW}8% X+;V#v &z0[lSCr8Qw;T>Ն凗k5#l;B!B_nEGyj֩C ]dqz,k:/*Wʊ||6Œh􎸺愾$/+j=?o rn|+Rٷ@vfD0P4nT e2տoBWIO7y*f?ʊd,+~GKPgbOSQФ]N[ئYO$z .}QV]( DD !B?]ĆacITt4]ww=HL߿8W"jգ~jv:!Kxs&3Y Ù0t'-`I%6 % Jh&56+޻[l,'OhLaW )Ĥk1̹_B!m[Sv{&Jj~n9Sd%'Uk*-+lv.dr.CVOQd8ڝ>5,Y4u2?WlP_8n| i OleҚz8dL&6Kat8S0Z=u_#$vJWL dJaw/b}brmj-0eB#6HTnB!ބb$5[,gIx^4-h_, Kqҥ5wb]o( ֍Z{RN ngUn1q֔eǹ'k\mZ j2XBK1&Ho8xϼ.*uu*NԨ @EK(XFq u{ߛ~[lSK(C@:">iN`߂Dڶ9JzFB0'BJdsp "1-9eM WrM8Kd0rY ٘It5&$ ڡEX۔sxeL6ŘIna֊_i;6c3̠LEfYأNOZ)nw,Nlr&ٙIDYôSٟ ?JhභKN,})_ kr_/Ѿx~dV3Bq_&LfpbS2H9Þ2aaS4G]۷nMO|o=NLb"QtG? + ! !sf(Z/j690 ?5&z|Ne1=d"Ҥ_{N\I$˦~IJVnGCiUjן&[bkyK z8-}nc=3}DOf*dP3qGt\ȭ.C_Fmn@BpEo8ǹ4,?׹$8ۼZN;؏46jus|;fRdQ Ad-U#}Tc&;v's켮2Pa?*fD#B'hpؗ 柅kEBk Юc *ziwe8'.gғw?̦e+v!G0*;MR}Vj!6Bl ZG|BjѲKOZ$dͭ>'cNJl{q(^Aߖ.Q'v%Wm&"]$YC;U2p`_N(nW+fYVJx{BE/[ͶJ Ri;:Ƚhu[mSe:dB̾SѤ@Dչ&lBu*கB!?0xMdsv]^Y8JHCʒ/3~t3Nr˨B!?rZ]eh2L:r6B<`aQP9$B!$BZ| Ùxx QS}|dB!$B6W#WV/"S[ל6tz_B!P<4NTчS,YuH!+ËYN$^! QyԦOR7/`\%S@[?;A!B)B!B%WB!BB!B! !B!P!B!$B!B!$!B!B! B!BIB!BHB(B!BB!B! !B!P!B!$B!B!$!B!B! B!BIB!BHB(B!BB!B! !B!P!B!CnF3[I cژ3-۽< wuYF憗}ץX~ >@B!?50E^\;n'RԬU UɤjӳWU\ YិmB냸 !BzlxJyz, r^ΏVɖdu^B*vd_JU#[f_VYj[mTYWm ʜyP[`1;GP!LLP=PvlWqk }i\WqTe|v'[ZQTPN.i:4TMj? ]#)R*CS=ҢS^Uz/)RJوo.T?T**cjXQiCTVRPzf՗:(SZ3 u(pPuo&)KQYRJՅ#UӒJQM=L]!`'OUw~jDlJ8Lrm𕎪4(C_ԅ|5eSI=yu^O!j^EOT1]E^doRrfjGT[eUQU򶃇 6D wT[|j805LTzY>ht;2R㟩hGrD3HI4$͛`gY^ޤ)m|WGF5=R-Wv jr~}Z]Q?!| !V-*r U3(|NfYl!qU_ʭŋj[TG%_R;l}fQ ƽ,ÿS (Q_Um}s2C!Ją%(ZW *I鬠lR*EmzWAe鉬s2KERʬQSV#-?'|Qx RRR׭[} \TEWE)e:?5gB7uj]vJS)2Re@y'_.՚d5*_SS,JLuhto5xCRʤ"t|ZzM}SIJ)slE˷?+\ÿV5A9t^7^(vsTE(XKw_oWKˏ9*MmOz~[RGԇ}VmM5e_+@cQqUlg=:6uRNWQBFoWKz!Besd,@RruPD!Bmτ*5(u_2^%JAUw#Wrpם~gW'r:ɧl< Z.oʔu4zIݑ{II'[wb2*?q^ې-}{ ȍ,G)_osL3&ɉuZS?ĢR0[l&[t?N]\|5x{!BI7;٨#ڱ)ߙOX3^z we3qρ$d̠Ѷ<pы!75w@=etEmqUGP_yz^~yZ%X9{GVAE5Q.cU*1z4cǪ"(d KmUyK^GG2qQUvH5}hTw$i+\(P!uP^?!v׼2t@أkY5v,G5U]rbe_W |@XlfsܳWռ GT}U!Blg{)d$NSbwjld{*S7;_/$WSk*ʧ1g(~A;ݍsUL/v/M[e?<ʱ8ȁ#HxG;@vo*aVqWח}le#gͻ;1lKq8OZj+1P5.s=;*;bk Grb2 *Tu7F[ʋv_oi]=[ f}.rWV+e, Dj̒xW:xy Ggy;Ĵi7wP+!L6ot.4͸cN=%2y)\i8`vr0,-Ŭ\ӗF R !/0^ӎ&[}llsx},3v[O\pss߲ -ӥ $.,i:7kL,\\%úqvygv^<9 U;T>'H_s<3 +>癮-hܤ.lfevNu}:aqܦKؗZ![傛o(mMğ)TfKvMhܸ)cy m|Vc5psu{O*U9I) cZiif Ml4L32H ʺ3͡#2:;ÙWKsWoYcN(sɕtf 2LsgHL/LVOFsl[siĺx'~֤R7SNY'|Bwp^}9gR/ J80މYrDk\О}pZ¬C9q$vqDi\/{~/<߶2+#u\*yyfU ^1%ԽGPe:ljs.f?[Tz7.;]=+m}mP)9ֻ*3UgaorYė}cQoD9F')8;[_ @W[PRcyjg _bz~_SbRFeFjSfT?HȰіdb Dןaħ2Bx^*'v)N[cAXN|ﳩbijlY<[_n(&v]~X0E8mTl6=51e糩/+_eƆ49p'V^,,a|'ŵTvU|dɟ#q 8F= AGuYVdeY8B{v A7ߜA8^=l,g˙8ף֌o.9P7t,epf |r̾ ΍a1D՛Ls{i~ABxh^8O39lRMX"UM#Ы#t U;]cd}ΗɷolSaLxb*~dU03|˳rV^¸͠WQ cO Z{]9Py z,>78T{洡l5z͗:,og"2!,I~ⅯGCٙGKHBZqQbkYc1nCnx5)0i %v8U+i,.%bN9KH=KR]{M&GJ q5X>k跪7 D} }:8mr9 caWgvP(K Ƣcf5.h4tbaWOϬV<5I5J5pFŷꑾpi7-+;Wt_Ǯ8k~;hQ+mBEo#ƹ:Vcɗ݂FRp u s~d5^:r%UD,{n gsVkGA?\ʿȘxh /\'$о!/<?>#bZ_WltRL"b2&AȜ7QUA<; ݲ9 RJ/{5\Q@Gϴ}˫_C붡!LMz[._Y?k>O/i(S]BZ^Z֌ ~SX ﴦNT1j|D*⠁r}j,AY٨PE=HߟBBƿ?כEǹ!>FIO#qo ˏTnTiҭ9\7]>twy}Ju1otkN}881)hqoٓK~9c{ o+vohZ-vƹ7Zc Ss*èbcy甚53aoQx$h;P륦8nk^,&s)Dz]~+xj? B/~5BZ'<{WGg}im)}:j`%ӇqX|X?ѩq4C6 xY88 $_BL؟~Ӆo;]] ֓FKZ@Gˡ>}'Q8ps-|YfzN^p;[gs ?̸d76Cet5){$X VR\ƅgʣ-5NF- 1h}y< BRl(sQxJ1%STƺ9z&M!qkJAٗkAXu,("Debx! ٍe䏯1G1u#cf<[ ߱yBGrDr#twxngaB^0ZG ̘05 \Kr fpՁdw>ٚloUV6Q$9,dL43L7 Ƹ/mydc]cBOjg>Vm- +eCS^1I#!Z:[oRv,}j\k1L!/n$5L聠FYc.]>sQdEw&)|ӖEoƄ SѾryG M%_xt&)Z;i05:;S?&Kio{{inm=3ql]jhKVɰm$ --й0'^{L"nD/GȏFrBwk,lω4+|^2}-ZvqB[cbWĆ+Y`ѹLNreXP&Mm.Ӂh \mW.G`rj,g[QcdMfLKx<7}q*~;4-Gq2 xZhBW~_yeF1d[P_{B߀@} p,puóD_Yn%KfAd-.Djҧ#1\Ix{m`ї}pY!\בԋq= ȒAl3^y?l\ґymx& =) Wo90[Ŝ5Y?=%s $/ZDٯ-H\ ][@te>[Y>O®#~a,NOǂxGV. eZ6Fd )[bF]']'1y*!>GP=q8I<%f `o:u5*)4y[sߌhSz/_Rk 9gY-S#=]`G]PENnyA)WԬE؜3NIV?k ͟xXh| cގEbNnZ+:oc[z싊ePS0hr&2'5hhl'+9D'3 $\q ח`crRvc?Wd$H?.2qIkYS ;wfc_>@բ؞'IVc>ṛ̏ulmhr-^}/}t;(%ã:ζO|ǎ+:cSd[AjƸoG u7>Xuff_{#I?<±A[d[>csBסjl|⍤-󖶔R~ucb) %sKiky,(2\t~DNx`iL܆]\틹bׯ1Uv8ڊi)<67}/(;`{MGz%aL8?Veul{2IQ{=qK#4U?,dBSd^`ĭvwjDWsB,qOyƇUrOfL&0SiD_O"9 z|tL1Xi;>sdş%jb74yCXWD_@YGhpuqh9WrOHi,Z4.%^XΥq.og۬ޔ/[ma;9SYf03oMň^o.fuد#k/p %lk1Au/q0M.HHE[-dIYRΞ%|&7b+zXXV_r{ \?0%#r$jFuha`?Wa %7d V~NU?P~ck1ĸbƒ1!!+lUcZFW5I@웽H6aoT/]-rֶUD,deI)tl&Eﶊĕ%l拦,q۲ ;~u P8gvb_ ~VZ@FݑS|q .ӬL;}@Ƽϟ}t6Uׅ҄R4~#qsh'\1^Á4.o^3:O*{ѝqpx^aqEO~|@p^hOįuylTXdeJXq(?I.}(ceWXr7X_ql)ae/8|L{^+4!u$TrX?Hu{i͓ύxU&&޳AV Ov~Cs܃i>%k3BSo˸١fMqxʠwA/yXuDf8-~,u,ckӒ,ۚ o+%ƿ̖'ʳ7 >ciRe}i3KebUsD[Y3Y'&Q XKO{˂ǾZ/^͖YhPܖeKW{zz4gm )lJչ]1~ZqaP!ɳ4@!(g8\ '!B!B{˨B!B{+B!B/%B!B!B!B9 !B!B!B9 !B!B!B9 !B!/9X%2B!BW-SfF!B!}-B!B!B!B!r B!B!B!B!r B!B!B!B!r B!B!B!B!r B!B!B!B!r B!B!B!B!p@-Zp>'6 LFpB!Z;;b.N~*|;#/` S _M[Mk"}\&oN OֹB!9NO͐{%M'gFpN.\L1O(x{ޠ.B!ĭWevvw7J4.ti=Cew1s8qj[2pߜ-hĬ\7nmlaZxgX&KB[ 6 >cW/p16Z=sϰaL1sJ1 éD )\1*C\ V0`jvGZwƒ0Y~l塴leO <1cBvߛ-+_!BngsOnbcFfv毽Dׅ_3fӚf 4}Csh׾MĐìQZ?c\]̘I;'M 1Ei|qƇa'ᗬ Mp oCa&?C Fl@5> eGK4z}0},}e[ϭ9)Z` )ښ|kzڮ8QrU]Hi=jty(¯c&3Gg,r퉫Kbԋ׮µ%C\}Xj!/ ] fENfP }5+87Ă)9ݏח!NT)iUT"x*4@2dp+xuӹw͚JX=qUj5FN.C\BQ/̱ӆtϳҲU?R|{V!BmE3yј9h++AJf#Ek9~Kʅ_8[+֡7u%r`Xw+IFX_\UﲧJOU/#J@FAZ<@K!TMC_pw/c 2Ncoޗ);gp(IG%Ocf=:ajגRy4j*~vМA/'/ۮ9j/B!=m}U0{WQ蘲h9IBLH^ǩF/ޘtˌ_,j†Q ͣ-U/u:Ʉ~*"~MBËCNa<oMn:5O0jjsUc\`BLgwUcm7Nb3cM~(TوSj1Y>mXaNM;cxZ9^VB!l6˻l&(Y蹀T !Bqk%RSG\GbDb8C+A!B!@X 1A<-_Nr0o3g})s!B!, !B!Ŀt"B!r B!B!B!B!r B!B!B!B!r " 3h7@\D:l=>᫉7۰txn3> Kȅ,Hmw<܆IB oQS+60R(81cҨFJ턓jGT~=APW Nž~;>;z*craK3U94fS"6c*i޹/\Azwn|RG!Ey;ttqƏk%{H3|{#iV3Oڶxq ou(Můac0dk'9z UK-hC76p[?NYDw cg_w^eWF{16Z=so}6N_PZmkB⛳595!ڢ fH⋽a _u塋+0x5;VOP ciVbwC7 ~hۼf0%eP'oO!kپz]_(_w< Xև"NX_ڙܚMѹen \Go<7[pXNj[aJb,:'&:'Rӻ.FYu׫GzWˁC_h妨x5߶;RVau fyq^md~Yy !;)9YSƸ1%DcifGh69uPc":51 ;L_+f ϻ˴q\6\H\[/M1z3ywǜ2eҹqoMIa{M0yE[0XV B$}P毽Dׅ_3fӚf 4}Csh׾MXx0\b+񝸌ZwE amT+|/|RV֙5gr%S|V's[xMlŒyOnu>Ҥ#'?i#8~Bd]1sRn]? "s_+(HYr\7X-:•ğ*;a< "&|J|%/oRtJ=񷻍 ʷSzJ+c0&^wEYȳy6VJquB!<]t4kAR~_ Rh]<<6z Fs(+W(5rV)?9бz ;Ԣty(5o׌9-O͎37}8z{n1eiD!Vw*0ӥ í2Ug`@(:LD+_<;0l$zTaX+pK?85g+lC%/kF[Կˡ4y~ .''9ߕ~v6rkOnΥwY#e̱LrY 11OOh`l_jiqS?~x^YgzfߓOcO=Qi%U37Eb*<Vʇ-Ǯ"i״ MњJx襗Z$ ^!=gD;fU]MmTP=a[YC X^B}XCITVd8`=9W6s8>56k75[hkR=[ Ht{&ȓ M'`JGe{}#}C>P(,5vfU[FtUzz=M%6.©*5['teA1qƌ.s>FUBl*<1^@_<C=b-ƮbiE ھ8)*TJ%k4_/~TSUJF2k->,ugTZ9F۾_q)˼BQ.wUzS >{L]"֏ed!,!v-D_pw/c 2Ncoޗ}KdK?0_YH{FRvW?P'J_egctpDc\s]V*"ͮl-+~aEl=oD (5Ш<=:{a)E.GT^jq=>?VF(O`<-壤lrSnp?%vnK'WM~#Ne~=R%ԸYrS{wb{4q˷$]O~_{ާ'QWS_ac-fYVNVʲ_yݕV+xB!ы7k9:?2c?<'~G[`jޚܜs!8t(kҟaxεao0^O3k ڴo'BO;|Ǹcӫ:kf<C_' ޕoo:,gnt2F/ptuvNx&3#}qC͞L"rlrc.eZ-qLJn(5~i\ jvE-1"zqVcK]A^zzą׏]Iܖ!7}* %lZ}#xܓ|:g6ۮqKUO‚YZПڅ&vmc-ϷTugSx%}? B!0y2+: 1+uykZGnyiB!2 (+Sn:~Wcr3u :GسIUX Y/\] {t^B!}3xF.\~9Ԧ iUX5™0qWRK!7z2*B!2NB!B!B!B!r B!B!B!B!r B!B!;$G^.])nW} U9ͽ9'ұ8f6{pj}㸡|M?ݦp ^-j2ysy6x1sPt~ oioݛB!}ÔÊ_Zѯ {p|#19vm۷<ޟ:7d.+q 9Ga؟i$ݛ-=˹B!aI9y`ظ}='m[zw cg_w^eWZ6N_PZmkB⛳o׷C~>;OĐl_=.<IVPgj:16Z=MYϹy m@0fFok5`jvGZwƒ0 Wt\{H3\bbX@A}%ۘq 4ܛn7/~ -yC؆ w&@ۑl*M~`AKС{=~6`qܚLrvʚr@Snϳ).cAERWTkpT"xڸ,No'60"?>ʇL؛sn}?m^ 32÷b*Xؗ !BȁD?ddGc*|ߛ)9YSƸ1%De]/KVA;]~ΘMLkr'G]v_E'o^OܓؘхQ_1*\6`ƗD髅l9!~orڷxm_?be?=EƤ~->EXOSЕS/ݿeܸGioi\VW,!KD;4͖kS~wDqZ~H5ٝO4c ɞ5s)wھd=N!B%s򺍦 *hFU `"Mp /ãal4ҋ_m)0ӥ í2UgP5SsFҏ>8^=)h[pjǘMfϦYZ&Ee"Z߁a'ѻı*\[2t@K8!5~K&+a q(XIjm_6N!>p0r%1UQ\v]ʅ tѣCg& %0NE.ޕFm{9q'HWzR) Sɽ<*J5e%ݡ`AXpV܅T/FB[/4uiЍ8XUe?^RRa؋h2/]ŭ7כ\jF?uemq+ʂJNΎTRP^8GK6ޅx#.%Rp)TSnѺ@3$-v=1үtn6DvL 3#[ZP;V[XpVȣB!@Xe7nNΥO T@nZ":7۝*;ٸu&1cwk-^j2w L$_ThtVZDoG2ϧGm3Dl@[[cBajJ1pDTri0e&rV? IDATBUҽkm\翉OV\˯qڊ#՜8J>8ygI+_\Zkv:s9_禓X0#Dz=ى*TmѱWz%57!ظt ~R־dcm (mr'Bqou^(v,gTtIt>t2st@.2[I f=:}>yј9h++AJf~ۙnuG^J}~¡$=jwO<phv-go\ /byJEnx_ƺef9߼/6k){+4ͯqpa }%K>eI/O\ʚۜA/'MuY=TB py]Z;㦿~gf<氇ϏU'+R־d{m͝B!P( w:l-T5zt-Gsp_fggWi8ό6caݻ2@ǔ̍Ntvc;nz!z'qPSzg:eN4zs“<5Ꮣ8T6m]RG CY$oûBf}xOe}+1Q>IHQj$j "@ۖ/.awr-εiӺ5lLԃsD#\bbX@A}mFsf>Np 8S9ΰaL1sJ3hO7F6#?@zͷI @mMY|s6ZBϹy m@0fF/~qFg:LE '(]4LbwC7 ~hۼf0%eP'oO!kپz]_(_w< Xև".JojMtn !s89xq%z0_)92Xӵkrk kcClq-lY9וTRl=zֶ)ovbHuϹuE(1%z¼V`Kg=FXikk0XKb1A`z5¥r=B?ǯU3Ypo ÂjV IN'=\|xZ1GCոzx{dbIhN%Ό[7ټi @C\Ϯk[Յ* ]<{RTU췼y3N.xVV]7͏J^G꯾g T5h͙=z*ݯv=ǿHlپ: }GT ~e콶7e}&>\^EcEԯ/yH"!Ki֎кxx#lO3;FLɁ͜Hܵcޡ=Cqx fPc?FCV4;tRޞ|yd(lڟ%kpӞO2$TrD ۢ2|ٓ]S0ck<>-7µ%CٛfA1\&d+Vj;6FؗC8[2Y H-Ԝ#nͻSb,L]l|+b9m yFӿk?#jeXNjkcly-([j8o#%zնݩq:#f{ImNQٚGqU^CujZ+vC~ɵe2(>j*mS2_g; u1b0Q:{QJSgw.fԸhCTT*ō}p8~h+/v/7s3=PŭϿB'};lԞfxO_ѵlBJ\ {p` ǵJCjkRsvj<ǕNU*+R{J_Vv֡\f[})+cx:{4y*~o*=ņ/== +2F10Pl#xY0#T6ϥZʵё]ik|S̎11OOh`l_jiqS?~x>ZgzfߓOcԏ<~FګWspo_N޵PY>PZΑO+]z͵V0_R++m8,s,`W2dG.mW[mQUoGWuDQR4-Q7 yT#E4#Ihjm|ڮeG=h1 OUxZ6Cl3 [{ғ/gؿe>3_L/'̩5Piޖ6-Z|{5[k%clLfږq%Ս,ܬBaG.mW7[yvN.U.&|h a)J_8X,۰Q[j;Y]v(>ZTy76#FSi"^.z>s|iZw:pܗX3ORzPk72IY*/ ] fENfPخK+WzvfFE\Y~y[k+kz,C_ _!K۽ st@.2[I9?~ȥ]*(QXX!%`zs|ߤ5>Hd֣+ҞxO8G;YNO^v6FG4l5GeH'BZΦߒr6_FRS ʃ#RrJEoٳ9c n䊢Xn=`[1 cR>JjI&.7c3 ?+u~m Fc9K:+g;Qe[+wdM4Dz֕\eYVjj{)iPؚG^qu(eXCideoPiam[,݉; _GH?ˎR՘H"[K$&&x!^:~XWg_`_r4rWV< ^1$|Ʋvz=UzҽӔ 2)wʪ="unkq6_[xQW/uԯ<F/ޘtˌ_,~mՂ#xkrs}СI7g]8;׆$z?̟3a4tkӾqa?m SSzg:eNꨮf }U0{WQ蘲h9IBLH^/5{2oʍ=VjѦq%ىUYuyq Ij?V\nFwʇCB c)|kK&X0܍oFw밵zFjTv+OPeZ+wbM2v]ٺ4 rI.E S0k{̵y|.9R1lVI>3ONK\mKnjN<(}˜11I!+H [{&#ѝZ }⩧ zBT T8RX9$yH ,C86?MwH8omԷv٬{b~KR86+ۯǩzmekMcWEԯSf$.n b늎?J&&>!~3 ?+.Vk9?41y.:g$~˜3x##Mnncf2c9Q{Ԋ {F# g5j%UO>`ot79!BN0$Ѣ n*xfX=>A2%?a0pl$ն&d,9 .n='m)~ЁaؙbWٕf'm[zk@)|oKvos.jCÈ 1d-ۗ@+<& #T&N T[N]wy[?/~ -yC؆ w&@ۑlomƌ}]4`qܚLrfGwoVBe#W^tk}MqeB!fyE[0XV B-xW/JFEHcv毽Dׅ_3fӚf)x'Kq+t6Fob8NS}&>\^E#%|yi6 vŬyo67˪ ^:6B<6wSr F;j;q5w1cJ$EpU5h͙=z׿!~orڷܓؘх_]<&w<&*ryLJY6O!!`&"?LZ\^)c#Mgeofꔃ_ѫ`ܶRMM:\˂H?@[8}vʻ>hۘCddB!`ǘMfϦYZVUd耖x:{,>@Y5.eHn>#k51 yix:dEf_9%^4 jo^:9.G_)ľ<57R(d^[or-it$gAcyV.fxޚ LG{ϝMK`vy.1&^CWҠO7)Pybc ֗6R>5aJ"C{JR:()󨛉ڳQqQYCRceI>\F}=RcJ+y:zzT^~ʸ>(aJ&UogB!DNMe\;fMgjMւBE&1cwk-^j2wkK0p& oWeRS `L._@5Zˋ/o\ѯ:TaͲ !Bx˨xw IDATO8G_h&/;#s6me#Rɼe>cֿI%%kЗx_ƺef9߼/׾ЬG/fW㈋+aЛ:GW d9~,$[//ۮ9jF"ͮl-+~aEl=o)*c(91QmPM(]ᑗr#.O7wCaO{'WM~#ỎDA.JF|˶_+]ʻ>̹d(}rWB!Z}\3>],szUGe㢶zqU}°]~FMcrF'c\43б,}?&7gʚ'ysvJ6'11Ė[FGSoLrt~:en/x[/9N95zs“<5*c(!Eb˩g Մm8zgG5~A`}nFwʇCB cq*]թlՖRzW$$|'ڏ$Tugo>rVud=klA䢂wӼeм!4D1435/wV楼dffdR.TQDqq p|<0>9gB! ]uF-ض,7CqOc"sk[|^7Ef^'jr&CY={UFS!@,ZHZOK-pj^^R >HسI{{ %B!V8KJ0F4ʗG{cjSWƘH߿3BlB!遼dT!B!퓷؅B!BB!B! !B!P!B!$B!B!$!"Ch'7cٗ9+|%{ 65m'C&F.DNf첵7G^n_P-/}wv_Vz ϗsw!m-KrgX[Seӈ%x=ŧ!ŏw`܌'$"œ>ܓBaBG_ZknJtD?&ҔPJ#*G'p8כ)_z٥˹j=\XMX/rr|43K)L--o7Y9δ/,q?y;OwB!Ny'Ըzq Yr/7zp%c{mIgOr-.UQk@&QCwkG}bfHVhB5fSōͬNh&1afFN5,1o$mn i9şVۜglN~szEePhB7#x}vb=WB}~Wʱ y!s*&[c{O;2a_&ǹ}жɮt3.e@;HAٱE^_l(}_svN,E7}Tשv΍ܚ웛.V~K,]?}6LV-0p;´i+&0^/%Ɠ8:~Gš*IvN!VSѨ8ꂳ#j'BɩMo+Nپp Suos= M{g1G0߿dw[=hb.TvBq$cJa(GaS"I,v喺@f}ƣ^GLًSFh`#4cV7LS饸56FobޛS \<͘]:xSP^C{a&L20}-EV_i ldL\x[r67 CR^ٌ%z,7 /JLj.ÿ|O<ݮ㓌kn\4gso$;/m#fe*mfCY-+aܾRڎoXseM^cغvXBWug/F~J' g7@^8k̈́ Χ#zT}sT,yζZ+/JͳEכ)~vBq$y(՞(ΑnD\n>m]_L#[5=N$*}ӷ kn8fFݙb7(Sn,2x7ZZe۰AAx{RG>=Ge"]-CfMg 3`LϢ`2;_[n[Sk/CZgBX9R#CGI:uщ%xV9RN0>燳XNp ?ē]I= 6涤QU/)+\X *ԣ6*]C57*6hã޹\Vv<Y"<_SePX-:Z9'>a'0tw|L Ko&!M`[_Qi+P+f<;~nUr͙ю}Y!W`9/8x{A쑫F60MI,#X?4"Wέ'3JU5ӋFQBl$Vx} wG{TO=sVQiQvCoRES*Q)n]#8kq|umw;t>jK뱚.&xl~Q%gJlW#Cx6;2OZ(XckQ^ȅBq.u?*8[FڽG޽ݻ?}OtVuyqBìGwMW<#wrGo4JnyfeatР6gq6:.3< zR6 qB7jg*/ZF˅꾍RЬ?nkݘʄwCQdnMvZ1\lJ.S؞[scGztzUX:Iaowq8XB[?Kw׵:u%+seǎm|7|z\0s ^zXSm-!x?JNol\Y3G6ԺLm-<EeO B!ԄTUzkZKLSq󟥆YY|=yЎY$o ÷wnzRI}?fC4h"[HSx·J^f-F[)Cvf;1K,US5|hC@w&Q9-_ٗAQiѤ1+~_XuX-!>z4AiʽǮI#ܖ87nQhk3KZ?HmB!_G23q|u1&7v{'Cߩ,ZxMeRK[B! x 'h#_`thV8?U!?ɨB!B˔2B!B! B!BIB!BHB(B!BB!B!1@d d<ʢ,s|WoƤH>H}RwY6l)L zB!>Ls>_Aʪ%1,}}5kC'α0|\dܫ,R[Q zGK7ɊB!J0]Wfqld߼s='!/csol8~Wʱ y!s*&sQsԥ%ڠP͈$^˿EҞ7BU7x4ԃ++ mK>{W%>2N넫ڙ&]loD t?gs~E Z͎/ҭ"gs*r;> 6m;ܾۢ\;aa}Y1>Ȅ}@Ab$`Ȗ#ӝ&]gS\2FYB!BG Χ#z+om! R<(Ay`_ą /g:~)x1*\ 5aDl9)9S>clnO$X]!P}UBFxqV_i l7.9靘>wg>fd:uؾ5kB[, 4$E1O\˜x\ 38qJ)~>MB&^xҲc#' B!ýpj5/spCT­)5BZ}lQdID3\cȬI&0g˕WfysVݨ/GvhX/M38 amcU>WjZ;0oC~=qȽBE ln7N0>᏷c2Mp—燷$r~,ߡTm;E÷{lP#%GI2bvLod:'޳#Pr l{8FEYB!B P Rߦ  ]b6 }O3utG_<2INj@y)9F6MIHQ4rn;̎;AқN +0r516q,苓o5ToڙX{ -sԼ?s8ܚa?9>ɫTU"M)\B!= é JhhG>e(!4f )OPX`2Χ$ʜ3|@NNOݵS^׃ #v^ޟ':r }CiLSK?ˍ+y#& BUܪpwBw.IMDgrΥ$O X4(=R ѱ?Y)?!ظ4궯]i!B!0 aqq1kl^.t!'~4kx71|~2P܍tҕ~8A!B!LId^|u$lj#xةs" FUf'dbFD ]".GselP:z=0GeUzkZKLSq󟥆9'0{З:Stw8b2':⟓p󇺳uTW:YgJi*R2L"r$䩩ڪ)T: !M*(N߀#VmMM'YB!BKa6ǯaW0b>Q eT39iV%m"T5TQͶ-CY={UF%Z!B(eڭnqߓWE}(zcɌ1)SNkv>ݖr};CdP!BBN|2o\B3r!.ܩfK@~_꾙H3yfx5%B!P!B!mrɨB!BHB(B!BB!B! !B!P!B!$Vdbzأ&z, "z$`o&R${ȅ,8]fˡحʻoavrf_Vz ϗswSxn;?\SV2w( y6)n{\f 1vkVےliv季z0%ՠEt=8pj?f,ʽDG_ZknJtD?&ҔPJ#*G'p8W)_z٥YʹnL4#n#~}:RF+{]n1i;CY?Ldb uz,dsk=ss_R]jiɴ@0_6~TP/+BNqz^RoyGHB%^z=yLT}FjXTF R߭]rHy!Z טO/C76nJ8cDƜ{cfp@wƳ;t<v9Z1Ã~&7NOHZms:ֳ9595A]Z ߌHcډ ˢ_u N2 \w++$杆ϩ)V*,wd¾L skm1]f0]dˀv ݑrK=Vct뽈cQ&88L='y.V\z_OBٜAxq9{z0_)sd;%R.ųŵdUfhWSEcK"o.-x18DoNpn95s{c>sM5O[kE<6g{~_g=N#`a/1wz1W gHH5<hP̑#UQjբVUqr9{͟V-jTvCU.m5sk, k\;+Sֵks?t~Y>m48nee[8BI1̜5cv3}J$ƢSx4ë㈉#{qjML{S`w*^isb?P_M{q\£s6Ko kq;,<Ԅ_b%1CrcKt^5;c61l OcGR;)é&r/@\vOwg5V9M ]FѷUZYY!bZkm ly-(s[bPs#弍(y.56Ǧxߑ{p|j_}p=/HEOoѲ<)?+=btv݈2lE뤴˥]k޿).>4 inF]!=`9/8x{A쑫F6MI,#X?h0"WGc݄ ѵ"]q.2hr5nվ~jb6m*P[ /#>sJfn^+ehӝgkk2 XZ>WcY sac.26SK*qLWǣ%Tdv-)=kMZʲ71fOjʋloS"~fhyT#ؼsOCl5:F~:4* 6Be[ewkrK[}.nĮkBBt @SM/zP~UIϕ,\mg6THs&roaF" TtKK 5<eΤ;`<W2_q|25:ob7AM=Yu8=!|T9MtvF?%Y}xc^:Fݐ( +1!{:# XbҨ;NExU*ɟe;'r:F73{PO]|}Mf )UT;;FUP9茾},ΑnmҶnij2buiY>W7cr: teO8Uɉ)M9Kj{c|>]1nүJz^GY"K?ݶW_recv,tVD(PT8"vSav3le}];+e_LYlwDT*ew ]"׎߫۸h^s&CPH`hW|,a; Oҏ9Ho?KM.}Df=kBs&/8zxQrŬG#7+ 9󇷱|Qt)diDGԻd^%T;Vyj0}^.Tmʇfq>^<気ϏW&"cvkm?2bS7t-67w|=]qHO\%'76Ow([c).dJVbW{vRJecY\KcS2g !p2#l\y1^xM~۽)֛ji?}D9qk{)D/\&$wO5Xhsd_t xr-ƚc;הqZ3U*yﵽ6B;Vi96/C{3}O ވzm;ֈɏpd3v̪'ykVsד?LG6B= FDۢeT?Fa6wSs- }+(0p(-Y=?fr.WӬ'J@D,Ƈk"x8s_>OCnJ?[!792q|u1&7v{Q`Ɍ*D[KĮ %d(jH`@žu~;\N0ofGp[ -'=B!$!wo0pzfnz=fwΛL6S 5!9!zUW.S?Ե™ F!dT!B!eYB!B! B!BIB!BHB(B!BB!B!ée S^־q i;>0cٗU3aq9^+g\ٜ6#ZUXTcwVKn6Nd0^`(6l&dbzأgIJ1{zcNVcSV2v(U<>5w !?=Li!xs6xa_y1p&7qLv8tz/AӴ $܀,Y;GsrX G_Z,YHWB!O;gzEePhB7#x}L|jiq0Jy+Rdl-I+'pzeѯ,3gʽ:VWH; S1afFN͵6éeD Z͎/)ߎA ڐǹ5ᄎM)''$9dٜR .p[ȰFT@v/0g=\ȃܳ9g/'x( t_32%Czh\=pwsFUMYل^ jA~wViu;?}6LV-0p;´i+ދ+ǎزVb اӾ9CÙ4n4sEZ<`:Z<6CBB1+؟wKcs= ylZeGnʜbۢAIY7%ӱL"Oec07z.!B%L\x[r67ÿ:c]L4`Ja(GaS"I4.f'.a֣T R<(A \<͘]6ؘމ$D"%TyZ[} fg&5<²S3I<]ϹȔV {Izx0}v*Ϛ:<3!q1PKSiw}j3qĎgh5q_3YOZziu;hYlٱy:m~K~Ƌ<@mMɱe;.K<3?`Ҫ+tIup ]b싹+ L{S`w*^is 83%,H׸j|sX5n#MRr~i71CMZ)&.!BO%Ge"]-CfMg 3lMvQHks6&7kUQ6izh,7Ď~&V֔K]V _Czڪ+UNGs4Ì1y1)1T;0obIE [1é JhhG>ZJ*F@v2I8U֩wRPy`ĦY: GyK3^oCP [zPstFmx;׌(K臦ز֒cG jw@sJs~hA_KKL\n>m]_L#[5t7M<% a|]T6A>ۄJ5h7lm3LiK!palBk.<] @3J EA"s!itkl@MZeX-/k\;KrPcs2{RQs=J$jW >G*t[C:_& <@a4rn;̎;AқNnmP R3g]Qd^&ޅi>ˏ@ՐVt!+M,d95Uy3In7_G0XݺxTۂT)x{Qx}jxt18enhTprSy 9(*"|}o/Fhv;u6Kԁ!x\ƬIc][)Cvf;1KJZL{WDwyCOĚx*γVq>4-ZyPRBs g =@`j4yX,5mgyǎ9u?TʺkGG^Ds9f8 槬g_ԑ1R^.xcz%m43~͸POc{r%Bl6es- MC!7_j!Uxh>/B]~m쯕B!ăE !D93B!P!B!A)C B! !B!P!B!$B!B!$!B!B! eهE-h=˾,9^+7[Ԩ}n3? 'x20r!r2 e?r0vny~dGذWbSx쟻S~?f)F)聛ްmC@b; %ٜ٘I-j9fRM73R XD~3z&>s!B}yjz>+8gxHSB(ՎprrQ¯RNǪ\Xsޏg4Uq"?Ƭ[ /u̅B!Sމ"5xx|9C~VK7گzp%c{mIgOr-.UQk@&QCwkG}bfHVhB5fSōͬNh&1afFN5,1o$mn i9şVۜglsQsԥ%ڠP͈$^_?fXϰ,UP_$-|r,BhiXLؗ qnm-F+ lЎ;Rn'bjv,~n>z,`ߤ0ל#'ɾw2f뱑gtiՂ-L oO6{by257pX/s6"gпLj.ÿ|O<=Gce6c|Fزc63u!,0n_)&[B]bu#/Q^?3ii2F̺xl__⬵2/ye-^׈L-b󒧩*HO悺5<0aųoW< PQeBwOtN}kOwy wEymѾ(ڲ6OVx_!˻A C؎A.fdǚxy"lhꟍpZSnx8T{S:G~r shͤ IDATCt}y0UlaL&TAahwgݤucGR;)é&r/@\vOwhKJ<V h JP͍ w.W|%ey^M6ʻob2MҒ燷M 7 kM+ʵ>or~0*:4esv"FJ%J]ڄ32n #d?gX{fN@|34R-[]+y< /BSCPn R=͍z$Zz!itkSp/ل2E?*yĿ^ݗ5*=RI#9+`^OChuS<T !3+^tx"9{P9fǝ MMeTN}%fЧR8ut]Ē3ص\hIBu+ TTJEc\=5/oN8fb؟OΨbl*U`N/VW >E nBZmǮ8qq4p9iqvl(nhTpr$eJT.,yF2JCJO^ %fѦb5NR#WVT2/Cy&1}Os.}>G>j8F.prDy$O xVXoQ7 m+1!{:# XbҨ;NExU*ɟe;'r:F73^1fb#<^rkyɵ/m͝ܗsYԧq<,}wfDvYTp7MMi"*.bikf[efF嫨kX pf\%0<ܜ3x3~υt%}qT{LBJDTc [Q97WfoY]4k؞ !z!TL7\̚qb9z$DV=ﻳh%jC:uh!ǡK=EqAws%6E.}OΡt+v ]$+c>h'Z%zt320:hј3|tK@DZ϶Ck'p=͙ϦFRSʝSe#1_{+8w_x7g}vW*RE9(>}ioDze ̓cmΕT*"7JNyXt.۞Ot AuN&?38hxvr4XnjOL)枌^쫱HW/.c387\,ߌ:JtU,r],?)VL+U Î\e8h{N;> !B »:ؕ89mB*HU[3䧌fj߀c3^ <Ǭixιv7Gא0@ F;Z@[Vul[&4=5Oe1Km&Hw_oMG]9;u`{:EؖΫ^c3wT- ."b,^=ϕJ-]PjiZ 9+|"](u ^z]Wʜ;%Z'^zC;ȅw:jTE|^6:ӧiei,)?dx5y>D&GuLdIjݓY'2a@jj@Y=t+_%4'sjԹ.qroX'k+Y!l6mKlZT=oܹ)do׫иJ@WOV\^/Fu^I_릩LW+Ey\!BQ!B!-B!B!B!B)B!BHA(B!B B!B!RG{2]Ծ1= z+FcxHfx%LZфGY&R`e !B!#!"4R*0%lg/Ѱ vNa8yc ͡UeH/F _3?ҩѬU>eIvB!ւpv1}`˂W2|x‚ hN>bOBd_`)eJ1l=}ێ[no[",8&сd "8"N`絿X0p4f҃īDF8RVScis&֍Gp Mhw*_̲<.}&g#з]3uK.C [f#i-:3ä/˝}QY-fn^^|p$l@V L5)/fЖ$L{ljtlr e !B,άg]j{gDۙ:1ߡرKF"|Bj[0}z(noeEd$Kz8؜|?Hnu~/T9׈⏻I0gU:31/-3-Jؾfgfnv}{?Ґo\o񢍛)-AÕH߷2ym8q` Y4Nxq}W*Ҧ;>65%1(rP!BG؀!ofB My?⴩p΍лNmȴUd@byzjr?WW˕xZ^U-sŧ(Ey>d8^ }pěiNܗs0fg^| SYo/SM!T ~ 2 IvI9@ Z0%lZFx Δs 'B!Ŀ TU\KEwx[900PFY?y((dJ'U퍫YP(\ӣOc2FGK}0r=6VP$ޕqWS{Sk۪t*iƅ(q@Zm?gqLK  ߤ9l fpRȎB!nU(Ty goWб1^B![ަnT95HrxHzYNoBÕ{:Y)Y8%9xg'cf320:hј3|tK@dgykp2 ]aXv H⎩2sQzU0^M*~iԔArT>*/7q`nV'YՇ6ucA'aq~B!%MJp(^m1.)cJV킀x٪ 1:Oh2 9 DW_)B!xW8r"k87.{o1%spf~uD [)w=6{f3'3{VF-{M!B)B!B<2B!B!B!B)B!BHA(B!B B!B!RZy }Kt[ :a=q`o&#8'A!#qw$xM˰B4wps]q,v8{qvok/72K-k{cv'McY:}Cf'j}5! 9z}뻽1=ޓWbmBHr(%7tzbn-|q8,p/-tz[_ʾS\ ,i/'>4]Ճ Ga~-DWK 6!BLy?ԖusH=ȊeH6~L`@s:{r8u$6\)+43c2qUƳ3R6 sf=R>%&o:˜Ih n=M}s km\oEE¹d)*_XWypcoܽιw@*=i2p"S'睇 ޟ7F4-߷U0҉]x wO{ syZ[9*6.0ythUp!B0*ּl$Y{{uBm-|/W5J^NJ0rg;k3MF0Jy>Aaxl#ɡbFjHyϪՋ"ӧRk2ōr^Z0p~s$魇ѧ/UaqtSa)`4[|r@܈Mi5WjEI8_^9u~!ɾ׾֮a+o"QB!ăT!t(GyKx b-(]KEwx[l@5ZdɭYw p3TfD[%kt$d^<&Z_j.,#x*s|ay[U}^> \!R(\ӣOc2F1b3qw랂:Zn\"[.Qל~b+ Tn5JEsUV%nY.ӒB`7pAsc7sj8xiEoayJBqr,P Ed0> D{.nB|I7@Q ~ڋ{{AE禽aZǨ{D!t8뇟_'>X~ goWeH;,j德0-+{šCrC"RvvҞ̄xu~W4+Nfb~M$DN"|O/zF^jR 1֒v9l*d]Fętp1wB2z΢}Vl QJiOplAzש^ߘ(Sկ&x3*xlUjl-?=1#CsE! 2!)]N%;]|8%89g=w<") FkjW* f3f(B)V(fnyLtuʿGB!BtŬG.ëGMzJ9wgѲJtBVCz>rOKmd],?CT=f=|I1W|x=jWOܵJnq9dgd`tТ1gp&,?.=-5mf"Oz/3M( ;-ʾYGb0W*/7q`nV'g` xCyy+cs}~<}Nׅ=TDLRPE#\! J?M4P1gO+teuCɎQDn|KJ(,%B(fq.z06ĦA֟)/7G|Υ?ΣvCT bx#R=8B!U2LY)o=Υ>c3߆v'2{8 70_nWj Eu'B!x,I?ch6C;8|k*ʥȏ84{> BQ= !B!Li B!R !B!P!B!B!B! B!B!<A-G?'p8eX`v DʞCB!q(5>DW IDATe>hQG^T&N!B!}@01 +e3M&'JrÞhWәG.+pՑ'/#pHαauN]&z0:k?Ӈdw{G#2E'*<*Q!B!}Ch&nj[~s#Xȹv[*}ǎTF̠kcf|g{Tuf=R>%jhJCTl^ĨEB!B!`ை=MO7%+rmE(>pȾF9/-L0pU`8q)UR="}07=\w$B!(s%euW4>5R('J#6Ӥ(=ѱ*w VmduFZM $ऐ,B!B<I=D6>h줋$HM ӄަm辩 fѷ׊BpxB!)P[09?Yƅ[HiUϙEc=|I3YNo|.ľ+TR>x9A!B!*W݉m{t!(WT7TT8ة;б%.bƶLʴϜF;ͥYHԒjB!BXf39O*ȟB!B7JG 70_nlf=T4Q?fI؃aK_%9MSoo8.}@k_-%JB!#Iy?ԖusH=ȊeH6q*Ie &09_= 9w:U?zRa4)[S)2s  Im袇eLIHf;/v2v%na~6 Eg|r¯OԺq$ mNY59B)/pi 0K+o[",8&сdL*"+{ct K{uvƗZ3hKq_ҹϴ>?..{Fat~(: Nsm dT;7}y(QX"cWp ," |p9G9Ks;e`pe\]hAK>$#3=u$^Kb{Kyfy+f֝ ˝#bv^G)>1p pl*t۲{|α/WB!a0%lgH|b/巋 JS={LY5&RñShcfcԟ5/4#kګKdm[ϔpΚo 6fiI^{0{GMK_<6x}pV欸Jy߱cz&;e ί=QLEC,ƼgÕH߷22e8Yͷ"2%=D|IlRVigƐeN(g9pQgw"mz֥'|KTMbu<9Sӓ$G"z<$N:Njژnim/f*lZԅs5b|5X;ݻ3KYlXʫ sGT 48Cf}#,^9ގܼctXUsi݀>SFǨ{ՊJ{l*wŹ#WB!%9Wпedk ?#x]5ݫ:|u.nh_G;8FqI T@P}Gbwm&=S 7'4lmw09T^ )YփGL6^(>[`AZϧzQrT*xw\[|QK>5o$04ų3 6n~*̀1  yO=b(1w#<52MJMՠ菜 )kҫ#'sn-q# FZXA'j\hDg\Gaڀ6{F;zf[{> ɋQ2F#t)"pv g}Koң$Zǃ"8F#ZŽPI\B!DW;%V[{^Oǥ;<έd6a-2eѬ;^Eo \xs3ҭ5:2r/ch|e/5opV~SeK>ZV)F.m~ѧIQzR1ctʻuOAZO-7.D-lkN?|*7*S 7,Ni[?9TҀ9Pw>oayJBqr4ep^iīoW#OTEJ39ς7-ӊ8F#!ME1u !:뇟͏xF+?$j>Ǿd[BzUR Z4 .Ē'Х'ƳZ \us%ԔArT>HJE&]ͪYթ qFQ`,v|~p+ѥ^Bk6׶Q:ypgNVq֭:N|`֣[1Fl5$cja,*ߟ&R~sOFwNXbccK$flزe?߭JuLb)=-qر?67maQ(뱩`߭sqB!a2sr'MHwTejaƟьrWWûpl 5-9FwR[ h+Ӫng` xCyy+cs}~<}Nׅ=TDLRPE#\! J?M4P1gO+teu޸%:"GY-:*پc}dc%B!Ga6͏uу%6-HQ|9EA>[6w.q|s2.`b9x\!BƉb3ewk4;8|buğ˔?0Oẅ_)\25M!B)}`&; ٘i\7rFOoaΊt;gRc,^j[0}(XJ:9cOxz,NMuI:\$B! ıCA^?kvpwNԹ)f93HTT/>XNJf.Lb znOi*ȯ滈NH[Oc_<>i j ή`@BB}t=٫!=zhI +bп?~MƳs[rmF9/-L dXu<ˁ<6yo?B!(y*/z\*Ó!}^f"ÈgT^.k}Hz*hoVn(ёa鲗Jt"dPPVGSEYSs*  |yh}~,tIYx4F}u5Rȥ1=4)JO*8&ct,'NxR޸:<>yznJ Io'B!D, J goW*2q"M_Ѭ:e>gON"ZhSZ,W e)WV )'漓rCRJc֒v9l*d]FęZ4'uqήa블я*d)jj CrC"Rv^ޑsIdNHr6IiEoKMAttGIgc~Տ:]}\\ʟ*B!U)*|FY?4],W7I1W|x=jWOܵJPס.[} tWt=}[u x]X3gQ !B0*veLӋ6W|+HUS |t:,EɎ#ykxx7ͬix+ja ﰗ۱ܯ =ۇ'#,v*e35;Ό>Uq,gO>9F:'@jk(ooy%*0I**tE 'oX_ČmXԡ23K['eM{sΚ"b\%`%B!Jl6h.z06ĦA ͥՃ4O{V<+1s.Ux #B!52/\+kC4PMa8Oذ\ڳpC~+ŠB!PMXQ&D7|,w&RnB!⡔B!2*B!RrA!B! B!B!B!B)B!BHA(B!B B+2@@@c~؟Y> 2mDryyW$=`$.bL7t>д-=LZކ&{Fްp_qٻ DMIp+D\1,B!(5Jy~ZF=%RV&QJGGZܿ/gJwp*nQĚGd,}!Kho<>Ϙ h3UұSZv٣` [>7q (Ha ȺH } *E!~4-늫2zl*z#ɇ1*,t~#$qTxj׮K#tnM,b- &lR&嶡JqX3'!MѮT"fbξڑS8;ʔcؕlݞ:-34aR >S#8&;/f9B)Wet3Ǣ4 Kd`0:sG[_'ڹ6 kkomM׾kKOɼo̍,zmy8FQl1r8Gק(V^ ŧBczkCΞm*[fr6b }5Y~\~a־aqDk`hݷP[?>e_Nߗ>آ)mc/Գ AkE 69sŚ׼xH!UAxwL ۙ:1ߡرKF"|BAO^{VAcplژ0'~ͮy&͈;2r5z ԛ̺m7[Ys5-ASX>m=k}1HC¿"rKXʜW0;vl_ϤzX725A'<ŘWpD2 (kU,'Hbsl̕M8s0,s_Ei<ˁz8þ+i8gֳ.=_j̙z$9F?'qzq=PuKk{1V1XX.K/H,<߱7]\ƫfynʾ i2c9ycGg | S>Ruk E6jO(ob!n^ßERUsi݀>@Wu|Sظe9}s60qZۖƸy.}io{Yc[?Wy˝s$Bǩ ̹ʆ-󽇰5G%[+Iy=^#sqG _:qpUR㆗LفLb znOi*ȯ'{0WC{V< V{.s0fg^|> ],ז9_,?.nҢOĄ#Io=>}  3`L9Cqxj{n F O'opLRS5.#'Hʸjȩ [ }cу}(em<(piЉW8l";n7љMғ sǤĥWޭ{ jA~q!n!Df{\sz9䋭$PS`(EUZm?gqLK  ߤ̩U1MxN?`kԏIZh:=8O+kksgMe5sKzUy( ɹSO_i nɋ?Qg zn5,z4U+v\ĵU +2=Fb筜7VIqFRI%*;iOE=ksla^)x\B!T″~~a!*]h=o[p\KȠ̤MN)T!$´XRl kNJ 9 \HuJ{&2q"M_Ѭ:eֳ79=MmyI6rT[Kuag A;nV[!P(Fٞ+>-5_z]w|cNV88>[uZ,#Si<.N=8يa}m?\Njy9cHCS}a#zW$cHy(ɹ~Ň+3xhK<&OQ*'W q@֕?R 2{..C\g%Ǣ3z=͜WbE=slXWy=B0=}UN.f8t^=on"]Rt~ߝEF(Q C Y?]9. ;*gb\hā»YU<>:U:( ٝO.`e|#+[hmp"6J'OJ9κU)Wztzkڈm];9d,"_-̃P{SJ̷`<)whI*wjp|0YvI_=V:T);T(Ώ&e=9bߧ@:~N M(ʋ'Wcblt% ;ץ~Zԣ[ߟ ve,?oZ5+ޟj{cMy61'3m^^Sې4Auֵ_ON<Ƕ\7acF4YYpӢQcqݕlI_u{9>k\BPY,?4_בͳk(KxvYѩ_Ϥnz^5qH96En+ociN+FyQ_|g%B!Ŀ@<+1Gײ>]>}o_?&ީsԏ_N=^`_g=Nu2jmv-3<{}ݟXm*L7#qB!ċ'W31CQiW7/o]˾ e1hjCi7F*BHB(aB!B9+B! !B!P!B!$B!B!$!B!B! S2Ų+5A5yLE~Ip\Nӷv]' 0h2׺IօFpdQkjGҟ{+uM7nF~)^˧Hx=1c!B!ċKWIGwb9v')OZTuZ4Z'::bo?\`#*˺xVjC]s9t X!BbBM"s]5VAN06^5`85q-H²8?ƨCؓ`ߒQ[J:1n: G308 ikŦCZ򓬷7r-g~u^iKӸ=Z:@^Wh @5DTiLohVN\Ûodix–?-C9ZI]e}~(=[8}@*X/J8>yqH'#Ϙ^\@Ͼ=s-FoSiNNMYo(G*9aqΔϸ%ȧr [딻6 ;h]Pgu #R5-SZ2STxن#av1lnxKurT@V.mnAUO0j ɃFWbhL~B!/8!TMe QIT{㔀Iii_{wvJ2Sp?ʕ EɧIҹw.##i\-3m3J Ohrj `4m cҭ[14hם}#4y(SU`E ʫBezث=g%`UB!B6}L5r^ZF͙sF*b!5&"6Ku0I0TV0 nfɸzk3]UE{qcu[mѐr^ GgğbQgp^ÒѼ*/p^ҫ#YcoɖZV!Bw-%'crpDgIƱ$8[Z|dŢ +QDEe۹nZڨ"T#3z$`;5nr_͑%hfbLO "_ڝ'da-&1u$T){~X>nSru+_mF}"U)dB!"+bQgsQd^Ԁ/h7(,nG}Iƹgn맽SYʱ,Ru_S^\Ҹ='MB!٩]Saɨ!3ˈx]@@y@KE A5~I'/MWuؗ6 >L?B!B̚Ws2nQPZWGۋȃ_4ˎw]O=ׁ LĸEx,B!%B!B!%B!B! B!BIB!BHB(B!BB!B!ȵSS*X}> [Ƒ\^~=`8zdA>jQB)`+ry {6'(04]5`܎O56ԋib:>v`esN̹QXv&FW"o9z$P?ggQwAsxvz@)tk\a;:eӥ0^XH؟QLє6WvVNְm䣆9b鯽e$["R]ڎ\ciP׫D@:d97[)?kcو' @rfe,lޒ…0׭H@@ os,}m+q\Zr3vpioL7nG/6s>|= !KZR8;B#q+ 5_S<ONFTTQQ?-b 2j]C[fg)t./$MD,CW%?q|Sק}ZAL:r/kbeJO:zGCasnPvl[wN3-{bLA3 CV:Fe˜O^b9OyJa*h[Vv>CO;\0x)߰edߞɠy`s,-LeȂ$}M<>Gi:+xZ4wtkr3S%X qJW5e {wKZB$q!яW=5PY3q1y/7sez~ՉRmĢ?ů3!x )f;\)R]'N[9rH82ATL_/6Үl(1$:<o*Uү=e^cNԯ@@f^!P!9_Ƶ'%$8B̏NX+@՛&"X:5+Q}3( |_Ivu)2{!clQ)D@ /]]&CC9f 8 :NEEQ0'c`jVH#8LkVY1U?n7=/U>F8;kƳ9_ |cM1%UC Z-S?U@kfĺK<@g\˦aiVo[};l1r ҉ozן:Zs<"oa=EP>$i=e^"S8ؖcoz#-)GIzg71 Rߟ>nhL1|ӏmؽPܯd;Pr7_&j"0g6_|}y-1fͣzV_ΈAnB+)DŽ\*ҞoƩ!ӦRsNFl`a%}:8oX˞}#FVݭG脎O^Ƅ.g1fS4mLu>.c홗}=ןeɛ$28fuq;ܦ ޹e3{iRCiP71| dh9i57ӓ24Pxk巵W}Ămۘ@mD)fHnG2jFWÖr1w˹Kw`nk#f Xjm>%qe1|II`Ftb0S'Q|j:5Y]QfG)(DŽɕ2Q[*3\Y1'1 i.㦮(aocPJU?>LJAh7tpQx{Y/Bұ 6z"ٱ|(GqAneHyBݼ_ -^rԔhlh6 {g{W y*+S8(^goWTepҥɯ+ħƲnHKoJ!sw+~c B=ɻ/#_` ~@yc{r1nBRWiR;QI͟B99||r-oslTnҶ.T폕CV hSB:(c0wLd:*[^Upw")4nQe]DrydyM?h<ۼt*ѹ8>MyMp,֑~M^C U斱l܇ҥϚxQ0puF+/B ƯЀTbw I I$u'0#.EP7. ?<\>;sE?Ӡ8N]k}|.}' $ZFa ?1VHŕR(;Obl ܤj>Uq[zj5j@ՋÎbS kRfP-[Id~Y4CX GoO*|<{sda ۗX(s j{sAn #nǩpa99EpIwrWy͟BwL]<]1ߋ h@̗%͒僘]GFҸCTJP%BM>jO2c)VNbF,h=ey1\MiNvܿ< x,B!d3wY㇗ HGB2^<<.VzC>x=lpOsYɁJL~udlZҍ%c zIڿa@2o-S:OLNGQ)Ļn>O, _BOXiU276GǝPVntޯQQ j\ԛǛuyԥʸ]f,u pdsIĔIw T Iɦ 8p9یQ=Wq<MW|F.%\(^S1V9OM9O4s#r}YXMl5T8h5Jdh@|j>lj15>P*'}xlWҞSLJ4!INJZй9dx4QZ!8l-F> ]J:xf ި+Ԝy%tFk*֖6%W?^{.w_𺉌%> -'q~{bCb"ꖟ{q IDATX:ϋG0{~>)=17]0`&v Ojp7#[-H }<c6Ґr^ GgğbQgp^hogNu%NGHMO2'Jq`#o$FQD^xʶgxZ:Z9\G:j1w] 5hiƪS- z춑 d1rwG7ZlVìLJxaI953q={KK0c]PTVk$C^-z+w(VJu_P?%>_3Z5XBr8qjoPHs4pׂp w])׉"r}S>6WƑѬVo''.=ɍ#'yJ(s:&3 LyY N*KFFf/}}Y@BNܽG8iqWIH{ɧXÑ]&֗uģDp;&yt`us`?+o~4}y\P\,S*-aڄgړ]g˒UΜӿ_S!B|U}#X)hޜbfM9dm?-b.<ӓe;Cs,ƀݓ|jPi3Y|&>k!7nA|j.8:0&` _+EgkNr/87T?nm6`ݜE謝6qr,X2΢[ %'crpDgIƱ$8g(0sb3jxTѯ}V{s3Jfm>G>C ?niQ?=zZcjy&rwhX; rhcygy[d 7}ccjVdlx*/R6dM )Wر?hj6;WG\[?E?ǐxufhP wv:/541m?bmFpҳw|v_!&?*F茇9|,k^}qT-yeM)4w =^[SWv<et' 9;s-!=WOBJTuK7 !Kr;CWW|d$ǭXYjvqp(ܔq`6ԺM<}(J0e\tz Ӆ]KEbYZ-p6AA'v|i Ƕd*eP%`?c;[@ZԎRֻFw&D6vqrU_vM})[ye';Rŕ۞4ލ;#_Ϣ[$|5Ϣ[i'H{|?-~#,_KC1mcwTcVLc>L!>Z([c<̊јLy/WWk@ý|/fNZ'4u&hl.*ǧ@Zju&[;cp|D?cbUvYcq|nr,ٕi|L j(В/ZQQhv~CxOs1%vt ߲)Pؒ k|ߝc.߂Im(͙N04 kL~o_5e?l=S}m5mIe#j)c~ʘeg###0Ӟ뵽ϙ_^yoA\ιXÓakXa&3`8,yZWkAwu]:Q?>)89B<*"#salW6QIz|]G6Ϯ3.B+FZ漙G-BT뭄xnT8Ҙ!_ʉ-% B!JB!B)eT!B! B!BHB(B!BB!B! !B!P!B!$@@@C93 $o)( !B!4?RkqqyIzfGEFY!B!PU]qw%*iƫF &д2c1WX6`v{nbh >ԋii>$8B̏V7\#B!>GL\p7{F9%4c,3v4c>|)Ǣֲ^ mcN+ӷ`&AjmB!BC=-鷉\O8a0qE}}O~/G )5:ur+Gtx' r5A!B!OsYgU8zC-S:OLNJXP5yz *T)j0( B!BIs[A_|}3=g&vHB2uyi-6giԸӋ-5uPBbRB  1➬$h rAs~gfp """"""J """""@("""""" """"""@("""""" """"""@("""""" """"""odR jEii)6M9N`` fYA7^DFMg/%-c9`<_Us]DDDDe #^Ӑ@xD[ _Mcc„\uG1 R k1[r0Dh9O>>´|jF b?t{\6a0?WNbW ?ė`wykoW֮Τe3>OI9Ww?FoNNLO-+_Pha-]/;XCG:{:rqsrK@F]hùp cWIF~qciM_3}Va̝N\^uEDDD.@J)qsC<˟0JU,_6q|w7tޙt yxp=Zt@qBF2?cȻ;;Gb0fO(.H Fu& ?s8iF41oXk vռ8{ھ?o’|ϒ4J}{YS< 'dL7VCùkpb*>.qg}ëGT+\͋NC VՇHWpq`ї|rpi˥cNԕLj1ҰmC[p9Cs(R=4i,I)ĝC1$nL`fỞmǷfe܃,L˜V>[nи(qS26} C[uKSFV^ܗ a_jg=T5',g>I&69 z(Ͻ*A&/7lM+Wx񁡄Ob9պ>_^OL͉߾I֑˾_xu47b{_QgF,'=p ӗkzףsX~a7[q#kteMml$fp{~q7n&LEVud+ᇹ7h7L'UDDDDjO9BX_e~<6$ٍ<0pg+0jMc7m2<}Ց7 ᒗOB=ܴ|2L4> 'O ^%{?Ru;&ps ^eĉLp_,/8n?pL[I:8;btߙ3AMB1mc.eǗI}A_כ|_N J*CuUWFx|ߧ/rq fיawgX%KI(?);҉ux/L`5@uWh|~~;{sZ)#ݛ\k|!Nޙ}c srAZ ?/|=\q+)`~|`HNL=vC UM:OYk[J7;ͣJPzO]łEv`Sx0Ci9^/3齟0^ 8y;_'GCkͫ|i#|>/&OLf\Xl3RW)ޜ8պ pd_p$2oc`s/w,d[7B  \==ߔ0S- d&g͚B: 5z/Kcpqw#XŒu9tAQR t`zm=ajR8]պ/ԌټcCsUqUJW:xz*Ax}Q̫֣UA:S,uHE)nAֈBl_Ď@hN//{)jWb >{i^b5¨&g|#sW;8:dgoi|Si}©Fs`o}*OE#Κj].A6I vFF[`V<--ֽG\yfa='b~f5gwC/""""u g0p[?0C-XBbƿ.)MA\BPLeڟ=>v\צ=FDDD;%{Xhr YS%v̛3\F@'GNpi&x9愈Hm9#IVŕ0wfJW9?B]e1S|:B .jeq514FyPDDD) MѽSZd,~N,fv#GqPDDD6d˶Fm^ظU8cp<vwcyf,;X2g>+$o~isW!tNayҷ<2 CMׯxIe`#n01>-vg:v3>at(Flp̩he yk(_Tʁ%s͚ϪI/a ^6:M|m[_s1)bF7|CjۛAzd>ɑgqڟۏp6wRf$A .ep;c\WqO ̄WP8 [aȐ4fk%wf]ƦT hRɶnWSzGZ&>im~&s4t1Ts/J&B䈖9;Վw_j(Kg˂9,X"FO#Ѯ Cpq'KDDDЩ8HqrS Zb0@^p۸UlȥaG'B#^hPCLH݉6^o\ۏ "/9bYr~h^'Cnf#?oo߁bZ̏y'W_DAlTQOؾWj[׃XRѤpcj@6Q2H\?~D?)A8{;sF0Ϡ(e;,mFGSzr uF2uJl,1ֹa5yv- +.g7`Ie5٬95G\ і>ͽ|AӞ-q۴l<4Ý| k&[֥zH:4#?s+dn[6 嘥mGȱ?3$O+p̽ 6!-icAVm D 5ףyC^F#*zsW' iy z{`vv1!:3Yf4;Ve<.#⭶ f8[͑U{n'l7>Gruu|ϖլ 5٬9kt&B`!; xCScz4E l)KcC sORNB1OSm# ,ہcp8wz &WA`Ihd7>^]oNN'y,ʪ;v1lBLi.jL2N 7C0䁏?PL㤷+0,roLs8v-jCHWrVQ{5 v3/jIi}i& S?pRG\kRYs5'կ(y`*<~WWlM\zN<"{lK P <`< ي)ܙ8رuؽ?Դ4gcLxf2;ِeC#NDFWÎ9*SoG vJs.\BlF&3*g@Aa:QB[o4s4؁k8`jM<4Qxdh= ~עwNęKDDS8FjE/WlZ fμJt@>;2On5{abEQ -{OrAOxt*|To̴u ;=yvԑ1q%b[M=(GqO08 ;sO{Ҝ0`ú lqfjyhj8($a> MpQ<5fؓ]x m[cIH/QZ8sQ t.})b|~CLXˆ*!S;T<٘sq+Y릳  8'5GkBE/;nwΞE揲` IDATzM :A2 +xn0N035v8Z߹k:Y; OY6q|dgS#P̺iTrm9lm'ʩqh')?res]!j:9vzFݢwF]yX # NwO3VZ8#sQ <9RUs:&tjPƖ)1c~r-X-$ח23 }NhD_RW`K鹹&gG0֔ |9ulRN&}"ZM;%{VR-uCQ<5i$U|ܘDF~)ZsLQrs&q}|Syuԣ~IקϬ/5o5ve[z*xNQۈbcR*9%vL>Dw"@W}tDּ}yĀGӑ7?۞nr[,0}#U\qsL FA5}V `; G afX~\mըUg&NpwS8,mw |rɃރ7$? kxoVB uZ?y|X],{^q^[sȿ) xy,n4އ7F3lIfx/ /W@. M=`d/`SumoPs{aFpm#!d Zm p˛Py0=8Ck ߃W!K펴N-+xUnh= JCǡq 9Q3CvtZ5}~#%"""""u󧌖>B# @Jxb Ϡ%D_J+H] &Of4  jgá#8 Gb?C2N1C[毅7BF+. ,R_`Q<iϬ`_9׏2Y0s~%Ka~,/}Q.x{]-9sH]|? VoC0nz#o=*MQ< rh(zH̪\A9 x@WoCQ?w+9GZXunyWL`P[Xc0w-IDDDDDF L\oA'>5M _c I~^0C[2XQXáY@- WWK$́y7SVP2|噠E-r.y0A^Y%""""/)%'z$VGNQ UY2T#h8D> ;'C2^*WÌ` ȪMpg}v+(N]lU (f> W9t{>x}b((|=īVjylIQ/H4AẫEGpsm]+~6}"k{BXWp\/j2,Sٗ"""""rN8h.t/w9c-sp9V BhYO3ˡ*\q@Aʹpa pC? >h(\Z!{(9{$9d-|7e fp3m<…] l@[jhE)GfW޻vg?*A,,~Yv=&ߖTt31~ ʏ:< r,/T\"""""Rg8ʨkx=2@+ZeB|:s lsZݭKtoKt?G0h$~h;NF!g5 I0m>ٯwK{I6DCJH=p۽[K j"<4kt9> tҽEDDDD cm0tEmDOyi.s.X:x\rIʊFa8f0~>t xضl0z=<1W\t摣|X8la ] +BhACxX-l^ ~%pGKDDDDD^1L]7 T9/myk?T=DDDDD伥Ͳ>,dGyYDDDDD/9=q70mv[#"""""6 aueB݂|GDDDDDEDDDDDӇDDDDDDEDDDDDDPDDDDDDEDDDDDDPDDDDDDEDDDDDDPDDDDDDEDDDDDDPDDDDDDEDDDDDDPDDDDDDEDDDDDeR DDDRf3u>flIaBz3}Ej]UJaih d&T,8\ ǀVH^[PϼB!{׸VƴӘg.II4k/gT(\]\V (\K6 zO\ .밎?"5࿳| R+V63㕤h%kqX_R.)d{6Shbs`9.uoKPZn{OIܧ!y=W|%O 7d#4kZ16gsNLCy#Hy*±WlPcOKP7FO]K*q{z r3Y=s>_z Ç`k)tɥydelGצ3zh\&[3RX^Hay![m卵o0dg8pp#=&?dKE/[TVf/$,r{2RXS)* *=ǘ/Qf~*־Lm`Վ\U.[dalvG)^iNзw*B9rv lԁ 6UqRyیzx#gc)㍠ϸ:VxYT*]p AX90eV _~z?M`a|2vpkPf+㞿aG*ϊQf+s_tuϤ^+dBVyFd37l6u~vv)|f Qoqùs3›_Çs?ctmJoMХl.|LX&FϷkɐ˥n _Vݔk8z5$^8 {`a;ğO&'V}d,?z{-a(L;r)ppo}m.5b3w7K60R`N8˰5^)gZq,l<`?ǾmRuMǝ?sm%>;~vrVLfMGyO0&Sxfk`ENY\fv AUdN^kfVt5}(m`g!?ه` +m}#߸ZH_h:&}fD{X] K\+7Æ߯k,珞,v{cT2?yN&qgK<=/Oz|6>ڹ|'Tv[g .=o}V(*w}[i#C-eO[uka ӶTcGo0ç7x\Cg.naz-|THـ2=˞S>aPtȧiu4lj1Wb_3b<5WQ&ŝ~tm#r#_#2w|i fLen6WPDDDιČM?s=\a9 ^iWHr{.}gF xG `ڝ_hA65+x;2$<5l:{4[>{6N6;_vdmޮ~+-XDv۸<z ۟\}?1,p~ 9㓵yʒGl+P\ů=îνSwRP/cWwjDڢ87m>OL3'ZжSԴ"|xDX0{ F7ֹg9%ˉmҕ&#'tl(՚zـK8! L<2~cBW1yV$ G2bd ue{tkقVeN{10gi| WE@ j9R(,k:\|B>Of ][<̏מW5&LIGo]pk9xcvKܹJ_OJ‹u⽄؍_6y7wdV+2U]soϕRG:{)yΛjtmz3us!aUhld(`^[HfBc4q~0:n_OoKQ[I6;fmgzc<3+/ffMFwW&;;|J Hsw<)ɣ2YrR8qt}?C^/vTɬ;M|1􅒃Y58V\Ml͸/bl1ȗy7шFtV?]4o9WoHَx* `ˌe{Y#:ssp r~YeOM6Q~,a5+]NMWlߍß-:@^Ns#:F8#Le+w)> a-iož?,;سԣ\{|ٕ r2|3ldhh}+Ɓo0zw쳘׽~[0NjɶWg,C$}Ā+[&=7Jkw(2KpX2X+K ,'^DLXlOIY2Э5~4Rewkۼ$5{˟:^j쩷IW]}5N䵿B}ef̄wǦoXnQo)9z@("""ݣhY˵Ʈ LyQB?/C{}3QhB+" L~+_/AE^twgsK[uƉ0:Vwג.mHHpѷ.Ko./͇x8:7+in.p8>NO=zUn࿣yL|lfl[O\GGi8E{9.sS{ K5.5iu]s11]E` Ɖ1tLƆqa\5|Pu}:zAimnͯa4&GU4̻K{ew6 Q'8x0SF Á9W_c{"lǤ[;^]Rnc\r1ֱS*n-q2 NFey[Ko$|s>7[z^Z܌_ɬ^!a\v^.p݆(Z>wDr` """"'[|z3Pqf kMݢp3͓lv{KH*j>&(nZ 뾀Bf}cqU#i4Ւl5qMqp[NqH$$aiB K’eYʗ؄P,%@hb%H7ْˌ4TW*>料3s?9979Wm;cu2 h):o {g?áCɞճߋԌk`ufpnBg-â9 k/9ٴ,]+林=A   Œo}gikd(=M+ "AAAR$   BAAAAdbl M.o=vU'nE4*> +,['`t7n`W,nLAAA8[r}w8+o+7PsYl-_s~^̵sHm;y k2A   p+]}?<&R8vݻ-T? ď|/^"UDK4zv2s}kO[eMEZ   g S.dǏ|ϱcoׯBzp#WMb$KwT)*%]; i!7inƦ݃PE   pJmp#vsC Wu/oTGI(h%ǕMٚ6z^'stcs3[ IDAT.n? :O`m0!-x{Ԟ޷\7O?ߥo{WV^͖-['j/h1#yKi/ 8;c Œ,#]װ(c01Dg ]C5_}pw8o}fP"οpK /kڗ۵o.̇WR~lxl;ϙwγq,G8"Tlf|IWbFPgq3"1oBh&Hc"3]Dc^_#܈ `LDžz6EРN[_3EWvOlvlՓmښ^^,`Dog.CB >VhxXGdG&8WVlߣD,MyXK8GOɡ>G'FlMɼ*AA8*-zG|r>um ZN媋XXBWq980MV3ώ򋺏?[*>rB{W*5(6`~hl=j&o~ ct 30Ba@$M(E @P PPv(xEfѱZbі<\HZ-:V )"sroբæcjd8l gi-:v:vAPw#>W.x+\7[r1}Os3!IFƒH(HЭ%6),u栳?aBBI\v$qPJ(V|%&[b!_ӽ}_CtklRP˺[O-IG_ ClĢkd)uRϿ Lq d(thҳ mV ͂ˮařiqtv 6˦:] 4¢kXwcDbb2ͫNFIe$jƞk&^1L) ̸7jM\Za>Fm蔽vA7aKxRÖ;!q7⃡0`oLAf~18lPǁ^6< M%qdǼper,FI !Zi k6JrԺɼL0Ghpc3&='/.q4A8N^5@ģw0 L aKN6R\Tݪ:T^N(/Vꬰ\5VdW |*s r7&vvĈ%8H)E ]TE Z{BiuUkOM<[otM$j2zuyɿzTB/W'i$ "OMO;fnɁ!U,z]AA,(J4L,j p|H/U{wl(I1{4k )4cIi쒔FÄ0ݾ0==0iX)ijlZR3cB$I{pr K&4yi"MJ1MӔ&իٲe`!_ҫie LፉELdר)Dê y4hPu&:힤ɪf='%=4E\)qIWc3ŮPkR~~>q*'Eެjo`M6V[D(9֖eݬu.]M~?X'r;ke/ҹ^v7_S[U/<Jqk`X)3s9˚9=J>00=@$M\Vr2x3mema;W㈫q86!J? >= +.Mt5'nUKt[lߞ4N`uKµ{$M~aT|bWeJ>+s^VÙ!N[ ?K;*qMy݉V:p`$}ާ3I]@i )[Ƅ S~)F0V}Yaj=`c-/}zHMy K k8hhx^Iv028R,:ܹ۟(aXNm}G0l}[=rTHiJ? 2[NXzmA vJrzHN`0Jg^#vY /NNmui~dH"UGŏyFx#YJ ,b^L2b-L*f%ukKyT8eꆢ͌wxGazC&Ó.hz,3hb{ډ]Ji=rxbDmc8Td[s:c|Hy3!3g y"h-nD顠e;O\X\SGDC rp; x#THiA5)S-I9A{_֞ =A"QU NAiXt,28Q }t?V_yƽT pF8(tR.҄vc;;:wu+fRQBif)%%dPYFqF1d;p3=]{"pOLL*=ܺV֕ceʳg90)t7/{OBޱVwsQEcЭ2C]RڶcBJKQ?@nL:tΌOFu_a;ĎlT@ρĄKj4h$&fRq[ݔf&cif)Ŕd(ʗdS=A^ky-}faE ZGMy UUbga6ɎkfE Y~0nUJ脅%Y.+%1e96%|7! YXO~!o}GAw~,t!dW~c"g <ݛBr! w(t(w (wrQ)tГKP~1sWbc`)K򗨭` ^wT;B>Z-i"Lf3CiϷ鶄@,,44X.>=NP'啖WDdٳbZeXΣ û;JqoR:ЬMPHhf,c0 8,ϱk9CCChd29'4x:bi1A%!M&Z#f"Lj57EES:xع'Ë/RX FɴgrqŬ+_%uxA8<0{ݬ.]}(кZޚRPᚩKaMLjL[w,;G 3I5~yld8`hFUYνе}EK&F`oggxu Ö6 3c qNóbiqo8k:"EN}]ێt \,[lc 4mo 8o1+ WpńjP4D`;FQi5kI KjY (p0+kgʧ]0jTm&&[۷&uu,[.](a" ; mߧy9k]1<R j\/v 9v SZZ = ˲qtW A>ܧtNC<]47vAUvWW]rrV_-?l"iЩ"޺M_wwnH;i7rcw74{V[pa @Ccwp% 3;{{CJ Dсt v1A}_5wAB$ (tRUNyf9&N΃A^n|Mxez=tKWS3ե%³qam?pFDնĐR5atdW2za1Efʹi#'ӆ1I*Ѷz R5} [۷V[#B?;䝓*g@GF_c4Д aiƘDZ"L{q,ҞSXB2)e`휔'/#ϳ~#B(]5sڹ׎*A(P?g_>tQkR[H^:Y tqbsMbLks;;T] DTHE,_ʒ%,_:x@xz".S;;FxV7V|kגCJ{;0| ~G' aRBJϬc!:BǞ)S#pwJ+&O?sG/؇ꦦbuilN_F #{R/rd} >)U#T8M/'2.ɴeb=*7tDWA8?v%_{e4=9wCltRqm|WӸ&C-sH!jb;jV^2%j_U{ucFmx6V'WT^!"PYV? ѷ~Ѳ(Ƽzzv5 \Mlo𔮅)_~DԠ]HY5imx_L㏋Ĺ޹M1L{yotMgvlZgp!AA CPGnK)-NdI 5g_:%jyV+6|b't֥d3 3K&'y)0L;ӺL[@)hk::ە,{%%Tc=@?'֕cY2?W ¸ K/?/?{ew|8'R<%;P7547tp>Ɔ{gY?k=ee "AAaÞ@948;{6[16Tm[-_ PAAf0}<y{[_0 *="  0?#ϱ~#J0"BD PAA&[tOȒ0 lói"<Ͻ]D PAAf0 Dllؘ& ݅4&{  0Co?t0lܴ y B>]4&6Tm`yiUBD  " T"F^fcFj'ϕs( AA, ז寇恗-nam>?36؆fV WV]e8ĈBAAAλ漋g_Ӈȗ2 NkJ֕("AA* 9\5*(999%  " T!cN   "AAA   BAAAA    PAAAA(   AAAD   "AAA   BAAAA    PAAAA(   AAAD   "AAA   Baz { l[  $ag`ۯa s *W}AAA0w`믡}7\zʯ$xapdLAAD ӖiP}, NPZ +?^ sj0AA´+J!w6\%Xz3xJG#,^{6~_Vkk>q?կ=1R1" E+*av>x*vPW/7w&T||P+?KڳY,΂yЃo%Qxf g 4j[]\4 `"Nɲ CWtvdWJ2q͑JKKg7kU;R,M# }s]#k/O~/ JՃmbg v; bɥ^}`u@,ȩm3Mɍ%#.{/*~oUfwjok ^X-.jxWumhlT"UpƯLTboW/1:ғ  .U04: CoCOy|ms&a歄ru/'Q E*c9|{ ȧGzyC#%n>0d_8@+> |gK|8ֵƑW~ȝ  ɟ)311L}" l.X *K }^Eȫ_  ɲﯰpUp% 路7{4Ol4 MJ$@V[c*.08;= t|f[իٲevx7б佰VuѤ&ZG}[b[3V,*w5)wUb&h" /}K/NϩYBebgA,v;O # lW5OC^t @k̫X= L>bOb1wgŸ*-| ޿,/iuSI !:Cm[h j@, vP?x}t;v/5>M擰{aS` Hڑ  pCa(X!{vZIQ7C)b1E0{ .fCV/X"3: ם ?9=jFj5Dk?&t %vϣ^L- Mp䕲6JzQKERd,`N2,ay3]Uw-J8δȝ#A7!k}%MwM\ˡt(b'^1.{m, zkcN'WfۯA͢2i^agf`O`REz0b3rW" E Oӗc|o>{/'I` ul~W_ ^H{.2m9"wE}kW%y YpLd8yBh;~]1{ZûZI-Y,SXZ]jҼ켙k?%!]04p8?LƪUJN,p* ®:?c*+SFx* C?[M~Z(ᡟgA":DPGc P,aZ@xDP OapλaޕMs^[>a%*k)v,vbɧ-*Ofu%5bŁU:#5M>Vvd7suRX >iަf$Ky/אPdPK"Kgp紸(PHn9e:h_Ã8<@&&9Lsh<*r|X*Y1YZe <ÿýO®?Z3`ѵ0j~D&1ܼ ~Tש<#arc'bgsKGC>ȫ]Mqyh]c,Xt6͆8t':v}{c%{S%1KiALn:{ZUaνa\2`X%Q,*QL{;C z(A9p4Ȳf'a=6Zs`?+ 3@{JCT3;qlDuUtJt0*%gҞgjhX[ IDAT-̑X'݅Cw:8qΉ+$Y;'P:Ό84 LW͂UamX`JH,X6 Cwkl fŪYvtt':6ݞ' *⨥4pՄ1JɌ:JU3Mac*ALuz%֜͡Iw3Y/BO Ugr/̷kg~D4aÃi'bFp[2rc{>%Y'9Dð7U}媸hٮd ~"pjMg'QayWpꓲs_y wN'Q^^F2J{`LLz],P` =8yvx -A2V͖⊝s ¡;ƷgA'>W~ZYSZ-Iy<ʵQ{5JW>tbCkqܯ>ӜUxѵ'=a$7vg'ijK,hٻz3>dɞ4Kӕl X,Ȏx)nrޫ\E ,zEڦ{&iɞdxfN&i6mf_Lɓ'g|%_6[ >_yph'eȩ8Nɐx|D^I2ÃapV㈊CcycZm7kw1Z N*zm`ERN *GzF(d9H2,*rLX2$8 19&yOAiSlNgwt&$bʋȡm6 %Չ97VzSK4I9prlZҠX2S%JLc/'@ A;[lz?b7&_~y5S9g'*޺h6= p wV*9{VoK#^jaC?JEά<{{FH`PaPjc8J0"q(` ('^SEP@945cҔS'lpVq߯o1XNz AB>eRS!Y_b*Fh7ˢɈR'H XjlT>16ppŸ +8 a2!11,-;JG dΟLcycRu^KˮQKjTk џ'*ԋJ'E@0xţ^FC~_ԧϙtxp .CXT ƾomO1Oo'JΌ>A2к>cFK5_r*@=.tG;DokjjV F } {Y$ش9 K'+5{Y'b,$.X%7|!ab=avźefe G6wUdֈk\#n<U꼚{"!3P&g>HW*E{ͰhȨHF.G_yc\Z։bЙ-;lc,(NG B@@@ | D3=#=3ҎJyfϩ~ucW`';\(3WT5@3ʤzf3Сz]t@%u©C.9\S釵sRNGoźEwV=EX:bG0Ph-/ph/qY4NEbc?>FB7cc$F-W_ 4ïwVŅgbA,4$Ss=KJi}܆L**y9չԻ`V[#07&vu*=gi}@uܹ" /`FkItiAwcXH8u. p >yDob:VI%n,B`19v;DE"}bH(dRڿl3) p b=~S:6[0a?9~בp B h )}ʭ/PS: =Ms%!\ E=ҁX2tD2m(H?7rqf$aCp;ZBH FQnFlxc;Bɒ3!z2Mv&6?֫OVI*5Ybi ݛud{YE^Uq1M_S?'X:ځhV%Ke(״V mT UiВ婷eF=)zarRӢ〒@r(Q~$zb]E@l 5# LjPY_<}>MMT? Dpō@(@cMz0(5U8ƒ1We;?3#^/!wU9NWY14vaW`VK +B ψ*|mbd<ŢL=Im^3 vmrtyS3c=*d܅^/1Y]i昉I-iWNKF䋭#PQdMU^i=11=Ĵ*+ą,8휔fDj7ҊP"@lY֋l zc#Ģ0h Dk 9 sPjDjIvff @?/]dAH +:!% (ӗd߹)?rg^;mX0LOXɴF*+{`9%f9?MWhN "K@VB8lɚ2CC+DUnDdp|p=r0]U£*EN꼼CDpZBT s'cFZn7ܢT5.n"O9-d/[G ܉`:#m!îBJSnZBxczcbݨlG}`סSDq*}z1sS)β,XY̗,:X.]Q/S}fVleXv .gR|L7}o@*>qg<|6ٺ"cXmv2i dtm +>gOZYTglNĀbuÛ]b!sT-RS~hVe y{|HN{n<:c-h )x E(1AbʏNc d d $8"61 ? _|6s"&= U׆Έghtg+@XϩSd9dM3?ہ/ POvבysGB,/# Iܢ#[D $ ٝv$[3~*{M Ѳ,K>0ؑJ9@ C09Jӫ6hH*{t7[CJ@Lߐ3VnC!i9?:"枏'^<4v! C#iQl*W3lgb d d-|~E`;Vmp-u-؛{굳Q𵿏>].Z.{YXԶ1${f+j"k>j%v7֥hp ,CFwp>~ջ&f' zo 4B+؂tXЧ)W逘oahSyH}JlxJ mӀE_*>P\4sWƮŬC-kPo([_\ 9F'bbۅub+*ڴ)eN&>؇13 /+H #܊H :­(B6.ƒ)n?l:b)BSY 0k(3Ub.C򺎁xDD=mh0$bsʭ Pd,vN.tn@WFϤI&N[r 36lz!{5g_3stPSh8^#u#9!LJ6N mX.CCMDŢ(-T͝;48a>{М]MGD<.DvemgԋPnz Sn!o:x߃X C\(6s3u ! A.vw ڌRS̕iP"hgO~}a8zBNV r#u92d`ˋbG<\i0Gbz.TFdrjbزm-Vs=-L7~`>ax w?OơEjv+n^bZ;Tg㒞;2 @ n}!܆BX5YџZ®Ɨh*!0G@u>JMu' 1܉&wSL(4@-iFmg2b}ʞ~=NFH *_c jFNorQ8lC!4!*usrWG\8D{m2bj6/߄ U+`&bbQ"Qq*Z*ͳ?O>ud@,?E e;~IZ[]tDZyfU"K{ hB{J+b3Ρ>EI<4(8r(ϳXewlkg_B5p cbZ ON 1{"T)ӟCh Dk 9}4Q@Qy?_< _'FN[^^mEVk#ہlͣny+30)Ysol[ T~Pi&=bYz`N5Nja3}hmq (9~hިg񰅔'Uhr$dr1•Z`:v1:tD C%6֢3xnOD$?OvְYfI%*.U xn9U` 06PsSC'Fplz65)1pWyp9+hZ#6OE?8@2xD#Y=:{VS_g%ޒ%+"5ӍAmB&TRN'o*  6ϟ=lrh ա)X\qֺp|AsyoS s X!w8(Cdx!R<$dל%ަȒ@Թ"ֿ!nqs _l`O*.;6Z\/uC@ٟ'IoPk|h;S0$yLVj*rp Qh(P^ﭝBh6 A%Qd,EBW`h8G=XqCL3yἻ̹l /3HbH¯W/4΃ZӴFT h-])@ :R;­vB B6Jb=YCS&8t*X88tk6>-_]?:!!v&d >'af@Cp9 b}1|)6 4S/zw,@c7- v4W(DpKjVdD]Ϟ IDATX&hn(NGeJˠm-rԷ61ۙshjr!Z+n::1oE"/b4DX5Eji-ufua`j h$[i8zfڌrs50PoUYfrj - PfD*HbnU]s.G_&ڕh+(M}yS+x/G,p[nU X)ba 2 @D@,\xH<񋹲T"3BƐ :+5M5 }&9EZE2b"י㺲LW- ~ {{ā1B1G#Rp< Ѵ m?O{K}kV97hrQCXӗ ZnPaF'sI"""""U**:DPb\2 _T2?7<wlg3ۙlg3@8NZx#IHtbsz,9!??4lg3lgb;5d4;OK v!Tp!ːe~}rq=>!ַf5>s3n8 &+>5}@؍ z11bwX]G*6b7:<~7AGޑ}|"""""N3՞P֢J*=,(䰯SrpnyU!s8{Oq;1}6Cvf;ۙv&3ۙ0x ա8=ȁSo'w+^m+z ~???4lg3lgb;)7dԳm[ѓZ%]͑R,3JF؃Z0G\VCэq ~hrб ,owݍxon|5;Ľx'nG?Y_OՇ :~+qZncHc0>gaŅ0qUx/éى7݌wFsx^ܶP?1<>,"ɲFXl֮]ˆdkvf;ۙvf;txۙu1"""""Yf D0>|]Q,(a^ fc!񤌆Al݇>l>VJP4cA 옟oǂ|lF-hI2z6 j *,ȳeU6htövzcGxӨPby5溭Ȓsm[}m>YrlpvuB"""%Ob׏mÖ6Z U. +n1cg ^І`L9FIJyV} OȨc׏m>licׇ.D݈n+NtbdžʜfhTҰc!ת)y8:Oy?Cmj[PՋ>ڍdYV4DŽ*&TDEe^õitIYF] 6c;:{l<6|yGeN3W+*XQDM7(^k\J? UCB1H->SYtdH<egB""":ll,mÆͶjx.GuWtlӰU.+L{,H4Z/~Sqhtf7jqLi6)V onRVrB"""(sEA b.3/ϩ~Ao=AA0 M]6 QfկeUCjѧ8omRVMW3%VYg B"""RBRmgzSk3^TO8lbO[az: %٦=NEIl2oPHP5TKWFV ~ϐ^7<ć Vծ{%VY½k;`kea!ˬCI,v""":w淴݇m>l#6A-"ϊc^ <6˷q`gu9AЃljWƖTYP&ɵ xX&_(m^xk)}bVJœ\ NEM 5nzp:=e#csmCm~JbEv/cA b)^~o3&RY&̩hJF٩ǵjnIDDƖ6!7Tz ylb%Uxzȳgc9(RqG*0 O}[^\ *]"(VD=M$e4݇m^CK_H5n.:sVPo058,ǖ UñyԶ; ʈQv,ȷiR%½42QD7E_ @| zQeu,9fa22XI288#q"Hc t`6q F(fZ$ VÞeF5&6҈Ju1V=EhUZ/7-_o |f< g.`^ m(6sY&۬UAD}*,Ѓ?iQn j5&V(6sXf0TOTwt "ڧQV"ςc˲mbۆ\J#jQEX"^?jDqs}I__ lJE"-"4a߂ {ze}??8pEKrr|k'|E6T8F_E_GͯcíUmF-!ת<)?(c_ ߛ } /dCQlYf ef4fD&7_* FRGcƇ= Ŕ Dp}Q$*T{aAOgffXA]QJeЩq= “S>ӨtH6hkUÞEZ%AQUC8l޶y}Jos۽~GPc '۰ ߆y;1ٌZY#=Q6xyCJVRbEN] ._>Vgcdžo-+U?yVDKLV)ASdA[ŰgֵHr?m|vu[Iy%DӸ{A7+d<㢫a_+=+o~G^N~ |NG9WEsrT%iяk,+!13HcRArקT''тl:(F--a G~ 4Z5.?J *tm0Wo{ffv#,mF-z5fVxKlԪֆy2`d%IFYIyhĞeTC4Dx0<#O ۿO5hFgxT'(Ǩ&~V >zjwt(+y' 3BJ bcx?^lUXG.mj@q" tUL:RQ;jQO?@7%opt%6]-Fbe đLO9Xm!2brC%b'RzKDK-sEjʲ ϵ1h(6(ۄ/-pe$_Xl>ߔ 5jDJ­_<Oҗ~%FI4g' #d_ @^_ FGޖfԪ0`]'Y,V0q8yj9#ԥjL%IʰЩB(@46xdUSeb}>9YP㱍:Òy_`ʝ 61h ayauc.{uw jDuѬ].ChBtg 'QVvf;cr݋ʠE;6ѴW`{ <1n?s3n8 &+>5}3iX$ܾ~ri K~$*0K6m&t\|V {_Fs1<µw?<.@肯Mt yG~os տV{}1(F?'MM݊kG<ьsQ⅍>$~lk5& 'Ԗ\vC''DѾq7Upn6bg Bp 7{$bf-?9/ E51b sU+p} Ox%ώ=Y eW`yݸnxw䫱q`Gd <77K$Yxm\[96rDzx]PmIK*䕃(+T* ={|;v4 zqBTo?B=fB:V sgOxa7ݨ3 i(IEMn^r\BN%#u)zpf;Q}]p@kޱ Us/RƆwBIi8m}^3//oXS-Fi"|݈|{}>a=~5 F3 щ}9lYo Ue09u̱oV GyZ?h 9s0ֱz7[g+\{_#9s8<?tu1e!h|ss$""B""⍏ટ5w݀*n- @F}y3>vs#*[}惒xs9ێE:!*v]߿1Z؎WxCx}gXwo1^QE>{p5kq[Ҿ 7\g4ŁЧw⪇-h,N2vPKcKlc|~ ~І퀿|'.Z uq^ڰ |Qӂ'_LJ? Oo/vօ's> 3}!5q̙U77ßqCy[X8ﷅ3IDD DD4Dvᅟ~ż w1(`݋/Dʼn%4]qmo",F~/!AnB~S!AȨ4ap하Fvv6rjxC8lХڒz_&11c<Az)mjLTUA-_+d+:;ac|}!MyI?m\vM-lބw~}<+ . ϯk`ݷoC*4 Lȵ3g>]s$?ăE(2P2M<']&m/1ANjQ9ǝg_[cOڬbX1 >|LH=k|zޅyM1 X!$"""""b $"""""لCFf)VM& """"-O҆6w *,8kq>.>: kYAwx& "H3@@(`{Aذ! P"^IHo[?@DRKLvg}vޙjZ4J=Yŧ\ޥOgB("""""Eek[Obv33wS,)[K~:ߢ%-|[u6Y)pg{N g޸z\d9+90w,}#Z@E3?Yg;2wҡlL ʒŜzU'Ofqߚ\Jl.JN}p4oLRM<y.B-=g'}ɧl'&kZ0{1%9W-`.twc}l yy8+_0t6[,g 3%0z{x5ڻzl=s)]?-|ůl5U]d:vjw?x)|5n;jXXTRi#Ç8TޔrN ܃8PPPLefmp?UEDDDDDbX^n9ῃk(0o$C]}B8HJ,l|G X2>\A*y >q7n;S3pH,ܚqNAr;9 GJwUq̘wo/6zv0z f,c?e7l%,ó)ż7~+Bx>=;3c>v5VPDDDDDTvQ%LLڋ51 4OU& s6,>wmʝ1A UEDDDDD|RQ!BQ!NNF ;bn-EXNZ'7b0yl{jҾO@#=}'$.Q!S?<5ZBF19 }Fӣ6[LDDDDDDTXiUGn}x _.|_W 7RˏP7Ni{GR~ra2Z7*V~̆#]ADDDDDT\aOkHg/1;ފ}t0qqLN/PMή4 8j[Ze:DDDDDĦKY{. ndg,_3-tSJ g.@%Z.ӘQ- xQbgNbH~VaŃ<—#esE1E~׀3PCeP]V 8l_eF#dee)嬜E9+g,Y9Y8TRE< ag`2?|O8,g$~) ˁ׈ Edw1JGp$k8d3('ug68k{-9(FiT, 2dԐJV}qah;~%f,偛ߤp{,υx}&6?x7*>r,A&"""""OYSw(Q3?.9~Ё#n )bŽgLM>X C|0o<׏y_#0 w5-.idoN[{6XV">Yll,ׯWu,++ (g嬜E9+g9W~m+w issy1Fji|EW!iV*.߮ X/sKQgϡB("""""XD+WE؊iGؐTEDDDDDlŇkus^myTEDDDDD~w ^PDDDDDV| vVϥB("""""HTpk:#/ QPDDDDDV٘bө&TEDDDDDj70s!*""""""b p߀z{NBfXhm2}"B("""""b+ݙMFa U!i@V+L_D O.jB("""""b+8Lbv1w V!i@3W'°*""""""bWfkξa *""""""bd<]+A_PDDDDDmB6 ɨB("""""b+>^w56ԳjؒuZIPDDDDDV|1*1q *"""""")\ɟn*""""""b ʪ70EPDDDDD,VfIg/1mUEDDDDDlŊ93&.Ql H=*vܹ ؐΌ"8IPDDDDDX3ukhIPDDDDD*oSTEDDDDD؇kp6rkF]*""""""u(HK㳸WNFB[1k]  /m H),fަT]D ) lۧB("""""R*kZwQ-lB("""""b+o$"6gV'=ć~*'s t`[ +5d.}`>D |2jN gZDDDDDD{r9x{+;^`ēqi55,Go|ɽ|O9T>?0f"igXDDDDDD·~M"WtW!yvG<F\ACȩRn"'2 F?HMzi=K-YuԊ?`G8v _9 斡mqLGvx2,K5;qj jKK&;sOx3WZe:rEDDDD:N\aFƆ{r֏mK8Fc3RRM wjJ*-'Ӯg2I[#(g嬜E9+g,Y97'˽}8|(G\}? ki{x;^ˑJ3K%Վx2Ӯg岓Oh7 嬜(g嬜E9+d:wy\}n!-!;n~  *q/nb>Uat t:ΰ{-9Z.` n漗 К1GoyʱZ,~HVxKԩL]u?>c rX=okrgXPe:EDDDDfO޸& 2ds\Vয়r; xK0.WϬG'bhaV\&"""""R 5٘]ri2mZo/bccY~ck(g嬜rV}"CgYDDDDDDץп]:+uEPDDDDDj{&ŕ]PDDDDD,V+J&}۶P!?'")1M ?0su2~\B("""""b+~K-dIv4*>ݒC9J+8;+ if=#{7נB(jݔʊݹ,|\)(>i=O b+~n EDDDcӭU*R粋*t} KgS\I3^ޞ #1B1dUVPNZA9G/r(6yr9Z}]iz-ݚ_Fi:fJh›ЧgV+K:Mi-xzh'.䏃YYYKT*j IR^.믅+3JX-zǵi`S!󪠬/`K:=¹g[1\nEtk-2Y-<^疰?"Vd-:GmN;e1ڢoק`XD-*r^=plʽT,kۂǮⲎ8fNFӞ]ߒNQE c"Q!s'9S:>*<<ixVi4B}]tqP""""ьIqᚘ Bi>VؘM}t>~<;3;`אtSNq%ieZPN걳6!G,늏.ar cSjz&Ł.r221;{?z,CBގ`BO(P_W#""$lK+dxR˸k3sQXw ?wWTm@AY5_lMgt.͑;cøg`sg3twE<2a]Q[{좈4j3/~[scX*H,6r-}"O^QA9: h3S+RmЯm ":hS@r&SϹyU0碽 SϹ 7G݈H|ƓQXظH_{*C+ cdzriG&g%[=?y}BkfʪM|%y8p7ǣcGm+w$a_9ӷs”g<M(""Qm^3Ev-ޓC:%'d{:+I5ArKd]{0yx4C]*MGCQEDfȂx޽5 팧Y]_֭}BYS'G@O/|6N\sAkx*#ٔʷ;1-ěׯ᪮Ix:} ǫM ,7vPe! eNvSQc]ddʷ,}esJX//h?W!l eU&ޑŜM*w(Hch#;-jJz[?ˣH-U׻fg61m~A޶{>#ot>ijKwȮ6l$\j3Y=pX7j~m[0mtw.菣QgD/e%[""hL[3ޮʊ=hؠrޑŽ#:LT*6ŖtmJRޡuԟ N_CEDBAY5Oo b؁lH٥h63oMn:'9S:>*<<ACGEDNs!N ʫѾLvQc"k}&&H;13{C thuȩiVVٚZon(!b\ynWWWwR("bʪMߒFD w>1mN<- vѭ7mҸ,-jEW! nLeѶ JqaҐIE C}=6ץpE 19$Hc"yz7wFuʷ,"1L-VfN{Z,,VVa4'cg+:psb#dTi"_1qa'R:&|3."R[f~]q\֩~~ʎlL;Y>!?wGfbKJ;3xaXЧ1rߕn|<З 94>.<0f+9sE~x$IA8:)4XŌU9r}`A4fv!@D\yeDWR4;YrkH9\λn6iQ<31sMvx3P)?%r{l͟ѐӸBw?6a}+D)iJ G޽5 팧C{z5WO{k5 WG#wĆ|:C("RO. }?bo X\mO ^qaOy~y5Ay dW0-p;V bSD&oGi#ZjLh/ U "(W3n6Ȗ,g%m׶|JH#rO.΢:_7uԕ+):(0Q|폳NkdL%=NJW[\2ٙSZeKFD_ Od4Vi計4͗O]ʽLҁc`a`͛.ȸۨѰ_scHƆ}z溷[ҪAW CGch_kǞ7y|lvN;TSRi2I[(g嬜|әǾ[:d0u<+g\ʫͼ*{ jׄ¡l|$ǖ&­<6F$>#L؆frL7BXɾ`wƩXu xo?Lώqc#IqJ w1r'KttÛrVG{0lZfm+]g嬜|_Onq%t~   B+3V'sq6 H\/ r> }$"<4ѭHCGE䟫1=+xݴuT^L&`CzDx;8TR@YI9Sl`]Q!idv^ٕfZۛ]w1}U7hG׶NF;޾Ufߎ٢Ayi4ʪM<00Ra4==wq[-sUI\;mGʪ^LQԣvylLUI Ԙ-| -@TED/iKtk/jG5tTDe\,O䪮HZ*rS6\ۭ5SWckj_m$" He3񡣻z֐_ƴy tB_WjR_L OB&C']Җ2~ȩeUpG<-YT0oj[_w~I#){ H3m_7"76qٕUĴݙy;*F&GC|!Eԑh1:GPD0x (ṥ:*"GWrǛyjq#|AgEpY'^vg+l[Z!pwpvB("|kCY,Qx[&O]͖L͇i xud -ܝ7weU&r\#7 Q*""؁{% j計-*(fm<`ΗJaw. ^4?GPVee!bk~؝SW<n,A툍vW@C%U,ږ H7f@$1mviH3s!.{s4!U4oxBO7`XoP!F;o:] D*6V,{ԝOg޸~- 6WL|!;/KTEDlDdKw=[ iV6%35,-'wq}P07SX[w>WoI# m/.><.V)&S郍8rl ڈ't[7/IFa9K5f Iw1 DPD Jy''(&,>׼/36.o'> 7obeܼmVrg]T}qvPPDFEtڳrO.R "ML”剌xo=&c/bҐ8քħa`ID{pqT+B("bN`/[2uT)|.dKz 7UI\={aWw nyUK:LbN w ,Q47r;]噯wn EU,_NZA9ie'jN|x{.Ž#L|M華;iuZ+ BiÃʊ߹2:+H+(>Z I?r)ze`ޮ!+:犛x8Ȼs{xla<SgbOV1k1ip5ZPDD;0_%KK'"UH+;~V/3|EԘ-uqm+wB|]ƛ6~'hRSsC;Ԓ_R$zOn(Ta ʐ>B9 OY )9i}OB}]niqy8(P9Facr>;=}KRX+1#psRQ!p+x=,ٞk+,VTVPξ/|ŕTݐB}qRPDD(וWe{r[#+i*j8TzҗS\y|N=E5CP:y\e&>Y h馃BPDD;bX;&6ҏ@/" S\w|)'^ya7,ܚEP!ag0Ȯ ~s O-wj-Js9cة|"7]7ݼv}ͼvʇk0ԇa:TEDluQ?xn\Z/b74׻19#.tHm8am63Hͺ7ovѴmN+9LhKã7wH+;6:cU2~[ Sw x]%[wVx}~z :E(F@Ćdzpyfwex(x^x|NVe"G{hvq?_؆ꅷ/i #x|N2 +`M2Fn V!!Dž't`݁|%Mظ FL_<.ʎOgf &'\ɟbmb3[°.Dt/vWRդ_˜ŤB(""u^! hߒCj~iK|%IC:0}9ўiSZebx,MtGeRؾQڱ*""RW 2+F{;Xwm2.zpH+( E{ԕY/kX[ܧ*""Rؗ F:+^<8o;Gkv؝CZA9AB7<+v mJ*M3{+W%16.wQON^i%O.d{ƪd[1sv ԧOg#tCi’6m-kfgx IDATL.51my,ߕMg~=KPDDߍ=00%//O&j J͚l^ĦIv-x~d7m*N\ߣv 4)#hMj$nP_7=ԏ C}7xnx80n6ʪLr;fO(NFBi0UٚZhSPYc/v0ey"^Кc/"Y~uS7 ynٞF=. S!v}6\ՊWOa mT1M,♡x}."sQc"X5%3նe3oW,Bi ^ю 1[4t1O?wr Yw䮾 EDNkQ"9|4Մ 4^{rl;LfK_t ԎBk f~Shȫ K JYlȄ-07~"m7}ĺ?i=K-郎_p♫;-Y*P)]ǖss7& 頉E֞=yla< Kgr1q:;h!rn;vդ\IQ!DT3x@Dd3G8z\VADwo.G?T@=\;mf ehLBhǻcXydL<b2su]Z{ѿ] [,2a{5/MA0;')<3g2xaX\ybqc>J^|P?:iX-xnh'6,s'2΢A^|Pɾ`w;8{9RTzfڑpz3r8++KGG=PY97RyxFwo& ]O _܆ʢd)gor>3/|HO+=xKS^tjn!4nLqcI( ۽fû]@~LrI[!9欜rn: bsVlfXH{(:YXme_n G3Wr(:ͭ2éNsޒRrڙ6 2dԔwMLgN(coT:t_JL9753g2`""vjhؘwג[\ɼ{4HhQ)᱅Xu>su2nԫo$<20!p>L3cjo=#=jZUD>_ujLDD͑gvfGfIV 'SMtwb}PD^tdҐڗk\no$ VU_Ell,ׯWuLC8rn~D昳l噯w1os 19u<}CsZ9\!>=I_Y%9*Fr<뼘킇G؁ɬk+?[ҙ4G_ePDl슿3Αŕ,ٞ UB9gn*f$R E\Zfm=@$5/" Ł7\JZp^u)-VM5B(""]*:7ObN9%ds  ,{:R("( ev|ͼMi1K*M|1uU*""\pm\@CGφjeDF>ؗl^D/iK-ϲ.Ǐ7sU&pUED9us*f* =JUId]qu4" 7]o\Ǫ1[ }"pUED?Ww xIn6m-kL.ƫ]ÁR[֏,r+oT!faG>0^CGb J͚l^D[roؒW3-V+MS'GZiBi:'iP L_6ƲqaB&ÿ"9~IJ,ډoɨ݊HI,g; |;a5*""b|\|]4{ O?wr Yw䮾&%"Õс¬uY'd<]UEDĶ\ɟaؙa;CG36l<>sgI2ȞȂ BbQD-TZE-nuw[˥jjZR%TDd&r+mӉ?3q|ysld4mjc[KƄUIȿry6QS ˝ ]s-.2CLXu֡MMJDdcVHKxz,%>mp700@(""#O˳Kb[whft aMppRIHg<5ձIt IS սU-Lʸ߉~; aa"rx,]"M<1iW-pg"+C[P ""r{s}|+;1~A o˜,%ޒNEZ{0fr -*f[ |5t^PDDn{nڥ!9M}Vq.G wՊnZ"r۩hdVH2LXuV9KfmuuPPDD{޸*󷞼-Z_&X2^E0ȝ\,v^4(pqkݔfgn<aL6/"V5+ԇwv&YVz5.?GSx&u`RgoDj6o6!4w0a00x:;Pӧׯd47;m<իB Okp!w)"r W'^jL v:cvz@DDnW4Ę(0[*l?-6?vڅV ""VQTBl2FN09Ŝ"z-g!y}PQjDDDPDDO+ԇGTN71BйL)Y,EDDPDD^TwU_J ~iE:]:?7KRb2c\==hyQ )7jгi5ۖfhvA1c֓<:颇͋H{n <nb.(fjV]+CDDEDDn>17d輭'p^X7M=DDDPDDCU-*.(0[xz}u.wT|Q +83{If{nLNh#:h(""Boz7 fюDvhv͠_a(FTEDDEDyc8ռ:&CGW&T6ld4m""" ""R83塣o/%V&}r :ԗ#SIQ E7}*|3e3 oǛdTaEDDEDflp{0aAr 9i^[Y/ ĿIcկ-t>ɓfFJoe]AD"jI,sݧu"nNfU(%:=GǙKqut_3,x hX )"m ""Ԅu[&l/܃+YM/HDDf g!'-8ǖ.x]Y6)>ON+&`($Re3*"RQUr4F yC4g# uUqDDD ^;LDfܙOckB!9_y 0S`Vr&2.9رC[ )UgY~!$Q::6M`#A50ERjV8zDMH!.t\ 8&ߵ4+^+ {x.?J6P:/ ziup;aLw3o0k(80&an<5{7ńѼ단m- x,2m """""r{1XVМSG\;glFꦗy|[lL,n7^Z>{P;:Mg`"v< v•Sx=P̹2-Q [+"""""@("""""" D>';cQ=E'>ƝU vxEf4vƬ9@T3=w<ùsKoC*{a  3ݳ*%dd*8q ]IlC## 8_F={Gt,:`MsK̜|:c4yR8&mœȂ1M]Q^e>>#bhq {Yi>?@ϓ1{oT9fuX]Tr`ɹ@~Pod[Z/|o;yl ?E9N29WR`Yږ-\?uZMpgy}&bзɤ]HbScL3xm {_ǜik7d(y[.)q CGg)ȉgQ 2SR %+w_oQ(7.LG7M{4:<6-c|a&*.~%\Qx98J}ҏj:&ܽLy{L'T`'DvM%I8lŽ8p,C1ʖCDd zD}rz?k@7m"3uUjc3~9(i׍0 `hfMU '^ +m  w0[Cl,vι{(}l܇RM1?ȚɌC;g[sDXEII6?{yͺ62!KH;s:x\9Ij^+YBK)랦 ^m Oـ?S[u- 7_.ػ7SΌ?d%!&ty}Maz?#g8|UGAsf̚Ĭf 6~oؘoM=X X?s.TRRm g焫CewS%: r#7,㉚.[>\hN2٧8CMdObK?*2v)I*5){&LMz6&j -98;֛whJD Dg~wɺ<ƾJ h!otzՅ''=w~9-9e?vcDz1#EY?OUR@V#velQ+AtVi߭`X eg{+YỲkuZMdF$nz[ڶd'>G.x4eʑ#֤=6d86{b.$b=F=l%8ɺj=P[5lșcD:2a DM\7h!{ϥvfwo ݴ.4Nᱣ\D8)M.a|.=bMZƸcYz<5kkNfRRO^^yT&cCy}#X~29z]]h_][rn8]WڲYr8 v<@7lΒiTkUQ.bؿ BڮOO]"߰%] ,u&-Źwk.G!Diy >IGZT ̻4w0`Ħl`bN҅٘1uM??ɡ9K6qˆ0Ie\ir6>}:c'7<J繆.9H4tRml%WVMT6=ׇ?K~^q'/1iL90}N3~UOsy[%7;cj /=; ;۷*JuyHy2&;j""""""!]!Q BQ BQ BQ BQ B,Ӏ+vy\j+?vCf_zѺQ АVV+1tGbP1""" ""rfoz7}=J^^Tˇ;zOŢ}c6gonϰvԩ;t]F`_$VTo1e.,lx&^d+Ss8õ}*ϱhuŞ̒ ]Z bS^|3m?sֿNwӬu.O-NfhBTݔ&_xB'.f˧] NFNL} gHKcZ}|z >K*Fuc~\>{)5)>>I9PR:ao||=7:MuSt|6=KG!ΛGRI*]ljb' \JLz=̰mguj}Ȗ{yu'} a*VK8c:gVa+;_>EDDPDDnƚO+5&"cϜ!bkULm,ϑُ1p>ҸK};aך-ÀkpgtUX^\Iݺ fLx9fRbV51CJσ_Mk)p՜;n!Nk1p)NojquߎQ V!eG0TJ{Ilˈnp28P4̌agro~?>EDDPDDn } ^v;|=#v}f/4愅 ~n >vNȫ2N?1`ڨ1c䮞IDAT9;ㅗWOJf!i=p]l[fq){xff{^CW ?G+{\Yػ[Gw H3fBJHY97`fap醴: ;wRN4bJ*ZϪrduۤ@Xn<:Z}eA"ej5VJ6`c a=2YDDED&@/#m4uke1A/=I਽T{b M?N-O6Z3izCWEDDDDDEDDDDDv!"""""")]!Q BQ BQ BQ BQ BQ B >e IENDB`rally-0.9.1/doc/source/images/Report-Task-Input-file.png0000664000567000056710000031262513073417716024234 0ustar jenkinsjenkins00000000000000PNG  IHDR2bKGD pHYs  tIME ڐ IDATxw|UsWFž"nYۡZ׷ֶkOmZv궸7.Ȕ6!qGB q1əsϽ>e||̾)|żXL """"""U T """""""_1GE """"""UttQwjEDDDDDDEDDDDDDD]DDDDDDd'>""""""";ՠ(lAkqZtEDDDDDo5b, s ceO:dO?qB'4Q|gb-A|}^\J}(>֥̯\u,A`qtEDDDDDd޽{_K ~.tԛ>}ׯ`ܛbmZ|z"tzpIGRز3 ;;QV9I)߀2zZ.s/!s\ o@:]ȹ=ʕf q z1d`)uEDDDDDN>>i?vkRنaz<ؗLmo-KSN)?hoEjnh.L5<`.hi8םCK[Ot{^wOy-""""" bmƐiia]t&Xfҽw>f`cpu0 ,%|ҩ 8.[|[4KQD3 Z2dG]|Ri?Zp\p8FB@:$7D41XlO[k>yfR$Җh%L8b!>dt'\/L$%n8sp0H~d@$eCX?M"! w#oSm,J&Zhi u)4A`q #= = """""zR$~`-p>| %FP0v<"NݟNN㢓0!{pۃ7Ga.}M>q ր첽p<**>br"l&Z9{fTP1wy)zDP(]S+xIۮ71Sײ蓹9g+ΣbdQj6 m ?cSgWP1{2pQ#^Qf1g8i@ʏrN_n|k6/\1#^̼ WkW~Fˉd=EPH4XCшljwrf'ڽ#F-<3Fڶ73@Ukڧ$S7Miy+N%n^BQzݩ̸<?ZRMNзh8~x* `+1!2T-xw_Aod"'1G6.lVk7㷱fet͡1$ׇFfԐ f+YO&XYx5K*XT {t\wmڛ|@eɗsDr~~ޏy~a3x9+ǿ1e7*ik4aҊ+8,iuڼugմssqEj+>Ä 8,NDDDDDd vd u, 24J/]2&=nj[K&1 }:nm<g}`.܋դ1:SY\ri'ج-|\/ϸ?_v>xΨ#ɿ' ֥/~k1ln4-͵J&>8kro@2ٽ_ <L]|̱M^ IJtg2n= 7߷{7yfE:&""""""_mG}rP7L4&U'p\\٥})QBhK5$;go Ͷ]Ke{</!45)B: 64wD#u DÝ2, uO>e ul nkIr{档~1xV/oΠN`-ۅG|Ep*'U^ajgxJ2n}đ5;u>#ﯧl>Hzx$0ILh8K@o`viM 187:t>qpCi 83Gg2Aq„1S#9J-7Dݴ .﹈Nۖ~Ȏq:^Ghcᓎ؎;8޺t+.fRR?<]m{u6޸^GyzfAy:x͟C(t rz^=4^a~7mu/|623$}G ,Ċz1O7&0iM f"""""".;A  ;kvqۇ_"""""".""""""" """""""[DDDDDDd'ttQ@Q@ 4HN@5""""""";C@okkS)|wܡ*t:[qi8tQ@Q@ttQ@Q@ttQ@Q@ttQ@y۲ҺuH&A!PPP1F""""""0ZHRD"SV-ь}RpݻPDDDDDd[A$I:kT'9l뺴JVV EDDDDD:mP.cpU"""""]DDDDDDDEDDDDDDEDDDDDDD]DDDDDDD]DDDDDDDEDDDDDDEDDDDDD}"B[&bɤRϸCxdhKsE"0>mm%ى׿fEч|c/ȴ+iO`dl̖VWЍdin9x&g@w=o.!c:9oavC6w~⧧ !wk"?BsŇzDu2E[ה/{m{f521齆ġ3lJ* C4'q@RPҍ0D8BzpcbXlm麳 nIg2[1cm9,5c.~Wϩ}C!q7m6yuC. fu_ O}?e8K9lSv<(5.)Fyi>'JLA:YEDDDD=}Ɲ~dy RcJZ2VQ>CVn>%rd5ؚ/BQq!EyM.>ukXӭnk4Sߒ7yEG[zM䆡zڠ7=>QJ SPXDOPUUOɡw~m+)?WЃQ2ԯ^Mc(n|Colv]&T7$݋aKêUԛ\J$X\$VЃ~a@1]d EF]<o őJ ah Jj tL*A&E-f\z~ KZ CG_=l<d{uiGo@nO>2#8GwS&,\:}9[bŔkIq OܺBμ X Mv.7~,cxH0W5  P~{Bmϼ4Mzkl{z&O>3(k?H~rޑ?_g/aVmh8F k GoQ#{nd9+8V%,""""߳3)VW-p矼LTTsPof/b;Cv^x{9U - (nzˋXU03Oː< f4gq ^^3GR(4ں&2_>ݢx,y#+}wcСyD"Qbmki+ݗsf2w\W۝} CDC53kuriQlkx7~ݿrI#ɲV$„-ciUhwKj}UdW;DL_i^ {0 )tõ'&p&}*i re>=x$ '+o"9|w- V\@C?dvPֿx`z5mdgY>zajt*?>[*|if," ƫ^vьEw%ֺf'OcaUm!W€{>e%""""_c}gV\iH>0n}^}a2_^S%XhY3>M&}Ѝ-KtcuI: ˩Ɍoq_v+’V/YI]&`7amtK=hKgjcm"CY8])*2w>WYI0> ՝nKtb|d@q }(ɡrJV׶Ж[H߂UMcvaH(AsfVo?ʌkIX\\*1{ȍmu,? ڪSNX2wxS8ĽЧy>"{|R~KfauYxj.ӦD kW,bM*A9[ 1u%'+ʙSGв|8FçtSihKL B|4m~nw.QY2d= ,9xT IZ`'ZJ}>ǟØa-ൗ_c&|Ogv΅%<J3x={2JfM{L{b݋9^4c2\݆],?Y)=x57^w+h(I~v˫$\t۵nz,lyg'羷XR~7j(%9>*fּϟs͗EDDDd;aC%Vj2P8< EG$uI 䗏P#4ќ\EXuI wpw^#$3Ey4/XIc2aKĪUTvkd~ H~33 me֔ꏅ{h߽k~xۏN:{#gۓOMd/~!) 1x[KxɁg_ozG<=\W}=g\>T1z8J~}`TyIJ4HҴTV>ݲ-_}~'VZܟe݈8dQZVnX1ioV%ρ- W{7|b kCew`r xg^ ]F"9E>zO6oM$lkI$S>}R62&=G$h%ʰqSiR>N,c0m1<4?<>umIPPVB(Q7M"\Ψb_ɗȷ99p٬UJw+5t<ŏ]6{.c>=z&Ʋ|헼@y>Ac3p(Bb"󒺮DDDD.MܭAxJ>AlF;ܿ~ IDAT:D٧嫉r9ui_ܝ8rJ6`<90J5ƺNf3.Y僨nJ1aKJ(+ {rǰkC(~0FEX6N Qܣ}b}Ï9}#of9AXK:Sa'>&+VRxuAYI.n&M*܋N:zߡ,DVf0'}+^ =@%Y=Xs "^{5[jn\7f󑇋{oP_/90k]FUSD=+>Ƶ[ _oSW bt8-)`B779c-!Gy75Xl@*dȢf%'&A#pֻ}L1&Y7JY3xtٛL7Y :'mۈn-QԱnKaI/J <ʆ2tXsJefOE?i' /œb"a}u3v;Z$vermqp 8Cs\,vZn ]-g\m |Neu?/fm;'Xnpc H޼ZvwO:0dapq(\]V""""3t&z& KA-$KVq6к#ib?C`3d命ś  TDDDDGE "ƒ[\4PU6ߣ|DE[ gqпgd/$XfkZVWy3P wQ@/1[V~&Hl_bƒ dXAgvh34'^Xu?!w}7Pu3A^%8i.WX7 Ͼ_en'԰|Fj)"( ^3G%{Ũ!=׮bL]XMCk=N;Eyҳ[.k?N>~?r~T^~u"3וP~<[kqK/jHy\_3 wۇ{??1i/ڪqPzi\1&FRŇp޷dMEDDDD]D,Ɣr9ѿ1U}v?C*2os6=Sǧ}{c,A3s>ƻgf8!{&b*X5n$/O3yN]ᱷP9UVΘcg|H'c'2\""""."$+'ha .\rx[fCvt +}Nx?/3cY!tzٗo{ks<7i4 gX krBMG7;UdmCgڬZۍ搓zkMGdgr7`0|wymT,_Kʄ(()g^s#V'!َeqѨJJd;I$撛eims\s4~~; |=ms XM-p瘯Ź~ WDDDDЧsϫ"j /&m k)fT.=hlNВVYȶt8h$$q׫5q!Q+r&؎Ed;z 8FWбooN"""""s@WwöZsEDDDDD>s@H*)~U.@DDDDD>O@7.DDDDDO-sWT,ɔOҷx CKƐv ;xorNM:U>-ql [z؀^}yRrAyd2>)?1l{7珃]4YW[eS_~ xzۆQܷ2mQ\®Xot5]nloruWKY܌۱^@",$fb-CQvTb-ȎQDDDDDs]XeHyy6&&dQMekG^$BPBz+͡$⣪IP-~C6(Kj;bu(fX]XdK>feuf7|ҽ(>a{bDb48)u51'Ÿٻ owOU߀\vLu\eeutQȎe5͚l6.7tYc-6 KD9eLYY_i|pWH$Hf XK:C2qҼ"OASmL6<֯m3Tq>`6nv6Ǎ>5i4 ù{ Óz찇>tDDDDDd˶qvctcu<>yp}ܐfjX@|U%g0E0k:K>;_7hkKf}):]ē| ٖ9ti@MXNn.NNO?kxQc+Ml] ik: 6.ېダtYn7nێ{c6]~6MQ,04[ 3IǁLkKFS۳''M_oaFKHˮXZ˚t@@vczg {?Z7'<%'b0x]:k[Ij!Сa] ̘]ԥYP`5x`VW|怾1yo9hX ! DHǐHρ}'?}`uj{1i:*5i})9 SO[fL"N{vk QϐrH7Jv;ۿ[B ?^-C{v'ٱ ]DDDDD34amK[VgiXHۄ49m=>)z%8$yP,ZG妙b~U[O,úuqbXV+&y4"}tk@58 KfUfDzrSqZ\ꏖu>UՍYF1 a\|[iUY[~A?\:k c)2 g:5ma{LGc~rkm{E1m< o7v|DvD?|gwݳcc#TJӹ$!++쭮O9؆58Zێ lc7c.u1p6manLu Az g2CGym\'kZK}}=뒝h>q|=Z]1]B.h)/M;溏㸮K,SD6TF<Wgݧ.otjmK OonQ_V/}=z?j.=@DDDDD>{@@Q<6Lǔg"ۋGDDDDDdz9.ʢ)A՜"_T(+΢lح !g-|_?9EՍ]䋼ٌaP|=iW}← c PH-""""SPPٷ;Ie|\c!hDC8X26ҁ^2{۲Z9ȯƮzK/3%K9pnmx*>nذ&q& NDDDDD]Dl~'' | ,/""""U"e&DqB1EDDDDE K·b$ԗ DDDDDEKeqpVڈq|¡óX11.""""."_>X1p""""" "հ!]NDDDDDEK,eǞGD=U|Vg"MMMcB۸E@t`Eb] ư~z >;i;d2Fh:&q(ݺuG)l5%O]]yyy8Zċ|^AL&G? a4_s?C)r\B.dҾ /A!DDDAZË|` hxGs籺|֥Қ sN_b}(vg\䒉g3n1E!j]ǿ2[y58j&09 ToP@7p R1o1~G3\Ňg8rㅉCҋĈ,Km Ux.h,xpX1]Bdh,F,#"Ph$B$m_ a`\pb1XD!sW9 vt9t+#9`8h.Z1j<%XƸ]- {?Db1nr~э740X#Pc0.`q|a%6EN?lv;hh?M⃇0f8Gw2/[*]~W_mN+SƏKaa}^A{ØKZ;kЍӸgޣeYaZ<;{aGEyW~ߺu˷9c/M73Wɯ9F!gwִ|\9:;XA ^̾{G]Ș0/cf:E4icOYNhǝ_dq`t2{n?>gy2g,~krkS?4$iF]Sg/O^&E!NLx~~Gި!S/wS<ǣ>8Cyx'?u' q.ϸG/19!R_x{T}ok͏>7No:!HUk~C2sln )cM;λ9[W\뱼x̫[OI^]2,yIjh ` T jw׳oa} p"eo=mOp&2u3)Gl[Ɗ+c:i'Ty> "2[йMyo?)$(%{kpQʣKYpƹ1Iaٵ: ,nKC.5KҨX>>fyԯ6DE}>͢Yt(3j&Aꚦi逮iH<$A,Ֆy n)lg[jXe`c7ɛR…CmC5;!2Nи $Ja[]ok[aQ=S..HTO<'گץV4GK<ҏLGDI#SoAOP eɌ$µgbҵW0 / dgƛ̄NIxy^Ǹ~mگ%QlKWF/oWJP䐃iŧsƒ(wVzr9M4Mt@4M;jP=67 wwHlk,hpʈP^Aɜ*]) *c7D{3bZclz*D Κ03Fu&3A!ljuɪT DGHI`"&SHJLI&vހ/&X.d.Ʌ!ӗ.ȹ=ry~!n@AStDפGWӿOoM4$*+{R5Tj7-u)1U)>k8'O?b)w3S tt,ׇ']MF&Yʻ.h?GRDNΛyr5RZ-v7~=N)@qܦuP|q/`vM*p56ۆO,5hCQvwZ IDATh>\ĺLSQ:~3[.ug cۑ8^(l!*63ѲM9n^~ fpǽgޔ}M a葖:]֝QA +ϛf:2םQ9~)L"N͊8 1nH4M4X GY`e\ 5VS'%t5c4WضCRhOkl|lPcya"v!e ;~ "  ;lsr-HƲ9fQ()`є)X鳋8i`OC_aUkl)Tu˨ލd{v֗G!`ݖ0E"rW%JV V&Q~ u\9o2{Sv5#ǎ1_SXD85UnMaR|vld4;hh#5)*nMp[kādAl)BMɻ_l%Cdw)nFڣRTHZoKY'l%0em.+l&Ll@{n|7E4M4M;*Rķg|̂: -ό)o9`S˱dLZ")+ !Ӳz>5k 7&3-7t x&+"1 5ѧL\h2Rgظ .~Ii,D3&\َ?D96Cah>BVVHtvX- %;_:kii7Mʦ/iILRmAȃ ѰJ}eaI=Tˆ1j-+j:m7>].Nh dž 6Tv.Zn&-,\Wn%3L0 jYv:҅Inb2,域̢"cU z3ff._G.00$PuYUNlgnbf4\kj)xY)1дc4PP$ 4M4MӴ9JS$\~ BǂL pNxE)R[u_oӒ9®ݕ,]׀ntɑYյt(SJzmIc _N|#6V=WuyYpIm\1צg;E{~SvxD>T*} w3w|6 #Q /ڼi%ּKQN.}w c5}ȏURcbaiEīOgh g`V=ӧ};s7ֶ>uюpn-~Ɵ}qB|OW4M4MӴ^C+uٚ _a^Kq<:woYT{ܞ|L=Sݞm8s"D]g㛞j@Bh4Jjj*Gu!m۳EXyWy,b4M4MӾ[jkk85qք!RHyzU~kj>Cp9ri9TlT:kcd0VTiiikk D"Pwiii:k4N3ݺCDWU4M4M45MR̋[.HBnJ4M4MӴ"qU} fbAM4M4M^#(gäitaZˎii}5Mӎq´-M4M45MӎZ$v\1pcqo,u ,'%AVXe"]<_Ӳ,"=+z2Ѿݴ,w#>Z*pR8r2<4M4MӾ]A}}=hT)M;JGw/ض)Ll{SC@tB^㣼Y)˰ ;a֖cOQgd2'5y=^t;?0旹(V᱈/ްh6~7e'^aYGp}?wz-V^7%-Ϩ;1j+w\;nLUtGJ4r̛׀T.K,]WӞ ע&bjKKx'^]Q8|8i s`,yZ/xsF+ٿ9;kk?70b,)ǷGmx͹ybƚ'tM+7+A(-̤#!hؼEf0^E)5M4MӴom@o\ˤ J)Ώ$|w $͛ @)UocݦnNU9( Uq)47c_.n HOPt0It7t5{xn:#$;6̡,qѻ<0LY4:[%\dMT)dzQ\1פCq"ws5Ù %Y1+\/Q?ȉĢ׶ Ӡv||ӻ*F)Ohit9!MӾ2|$-B)ԁ-B }n~({B)Pq`maMRZ.XWPә֔,i}Ā00,7$ o ^@+^=Obii蚦iL{w;dƼ눇QŪbs[Ң{R9L=څLwlHaм6b8i&rP?~W|u!,#KL @P98bclOKJIԖ~Ot4M4Mt@4Mz.]~o1qhx7K,luwďO;\; l$fe4hiK`l߾lӠYa\sp>gg^3)|:OOa#d.>}9g) kC)WrB"c@sM4MӴcP_1 z>RvDQRSSz%DkmZ&NaOonĉء0֞ێG S"pqƮ°DU.XݰB޹ѕ' a.Q7ػ^icTZ^ Hgؑ42HeqX"ql:,6^ ,dK "$ iئ@>p’93yan-"mDa8>8,;L~BJ*d$9DB'aZlaضH6Ӳq,|i{x x{We_@/#J\؍%?:k4v 0J55fD a#7cfSFPu N;}rSzy` ?|=(:HП#bΛo2cNT8ǟĐ-Eeۍck~i9yŜ1Si8xùii= #R̈ny8jOPʴy 1v~ 5L.š |bX7=zq&eCf={t*c;,ր܌ӿ!pY8k371",`ے,'}oNX@؏Sl3oLSFʥ|B>/IY 0(5ʊlf4J`x\XSߚS{+ň&HA>0aӬZ Ǹ;V.9pl[GV5nWܕ1[GYh6,}RX|#;$TN/M&qhKY"tٍ~ڱ)?pAԤqE ے2o:zP&K? L+ۼbݾ=Of_}_o0$~?!}jdLdۿS[re#v$\'#SђdK4},+7u1Bkz(0#B *ZӫiiPL[鳋8i KGBzR6Ib:f"LqgYBnM ZٹoA͘|fև_KN&5fdGh(`R{.[HwfêPD To`"m+j֗o;~5[KȈcچrԎNE(߾-˾_Ojj۰'bvvB@n)e`[\\ضlXΒQƍ ۵<d ˶gP_]ɦuxac$]5-.NuQ Jm[n>:V"E6br^۸ZMp1G]ƝMgh=st%*abUMPLˁdˊYw&97[y?sN;# ;vRH0Pխ/Kl7"?Sxz!hU47)&U~0X ukzU4M4M;"qA m_ϛ/[{)p2>?eX6C{+֯Gveg Ӯn J˨8\0i('l:On{鄭ZM29s ]} \ \6`݊Tve⩣qu\ʮ$cH]ۑTj2ӎ獥{Pź) !PG&q%'1/k~|ǑЪ+C װۉ(KAg1q\Sf ,x'pWpqlQ.hb dw*"ɦ]ԥ0i)\{ijEOӎia 0,,31Y~>=i;\r`?N>T@ȪsNaܸqL_1ok I @q6>OrϤQ :Bt-qx@BXK!p"ɤ8GPN<]g>d|ֺii1('0LKxUt7I#J p/@)R D/`J@n.OnEZXC](VA8DNa>!CnK͕ۨV(Tr3rE#H$$&<.z5EFACT綃R U!pҳIKD $2R̈Y8*"U[ڤZ( Ź`0sE˷hM+vnMڊu1Sd6`"46$LZ>$g'#PnJJTRZaײ5E$hq}ѱE&"}Os4TFPKm힖f N1>"WS˛ןcXͨzԬ>ZsC7>"F83?0vX]?ѣ-mҾ~꭛S:*?ٝYq*kQA5M4Mt@ocdӻC]1nQJbZF鶵,ցAI^%ׯ;8+BRWYM |נۨ$sip=;4K}C[c 'b@J3F6 UT!#]I&_0^LЪC{FVls93'OacI\0-I׾+~ˉQ/6'\ؕŒ$4hѭ-e,_N5+Sasf_}] L%:֭MN ~8 ``|0dGtl4X"]QWsO0Y'q$PҠE,0(DtoRdaa y b>soP+>BhsEʩ+Jѥ{g2C|m7 [M}uS!&fpu-Ӭii}B0`b%DkW jVZU)2} _߀Qx H蓆9ӧСW?Pw %s  03s|8I7 NGG4s]^D]\t?n|ʿf`(([,;b+?N0tpjOaSع'.$Imu=bqE \e犥lsݮbɝֽp]{fݴϢYRfpI3"EFƍDeO45߬h\KIRڧ]spI4ho|B6y2g)=dZDG ::Ll?>$BzzK/*%QR!9Nps$Ҳ #LIiQ}-QHyBILE\PvtinJ4M4M; XSSC}}=pky"URO]>%^[b.sq9x=oמ]i_ᄀAђr8pnFIMM%55.!m;7{X4M4[{&ddd:Jk]t\Os}q|׫>_{#?>[t04M4M.0!4Mߞvm'e~;N&d]"wFa߸omX"0°E˱TyiiHrDB|H`ۘsL_]qT2VHA# s²?mQ9OUp!V#" co 6v8B1q~[ןHf1z2p"3Q@BOii8]Ӵ1Qr'l57/ݝf_}B, +)Dռete|0'o>{XgL=^י)/ŠQ@?V^E.)II.~XǦ2qkDCaܥ/q#_0W5kJ:yFۿKg|$vF=g'rړLb7ʲqһss0,Tɀ~U^a ޟ}5ӸeG2{rҍCɌu5-/ړ?!i4XSBS9ĠEh<_؎4kI6y8GLnlBSAϞōo_3h,flGw?5{꿄~*#L/2dyn8"?7>6fw g?^cٖ #rhVv ξ1zbe9OfsƝw3u&\zvM4M;tMӾ +tTΠBIHkӚ‚ \ڍ>O쏡i6|FF%B8O7d2^XG^מ6^bSU==^ ?^Gܓ4ENuԻ,}ϔM5©x-F93F e8E=?FTXq qn@na$x#80?V(%5EewOG q(ddNM'U 0߱ӳ b[>U&1i-$ۋ7H1ǁ$I ^6]NOWcqQ6-vG'KIa!aawhHkϋ Pi+*!{<>GaԄW~zwĥk/LW`X4/iʣMTZDnӝ #`Ǒʣ5{Abܴ|a5u;RJIj|hp%%RaL0"ȡw߾.vV-&w}H~S*1Hô1P{ҫc !q:RH.jAݔ g^":xsEpD #<7Iޑ/A3aO=@V׏,?2z1`le$F(>3wAldzRܞC''țhkpIci}QA!_(~i~ԯ\E0̧N4IoZRARgvg>2X33JִJ9aSخ)5eTpܐtkfPF&ui6CU֬ݍJ@BW˚a [wl5uNl>T@Kw3㕏٦Boevm y?5Y_ΐSG3d@[VW'*# BcpplqP?rk/= ofU#ɌEeTEY4vFe\BT;p+XNc2bZ$lҐoR(_a˦,Kxu%V8Bj*3Ev=-HfBQ ǗX^M4MӴcuh4%RJRSSSRվ#A` $~jκNN띂ZbY!'-$Vs;tR;1u6iY?%|6)|SD˟Iv# H*wQIoZU'Zq#7sg3VtJuIOKHUã!`7k@whV(wq#}q9|Gdr` j޹rVTVruC%+\d8n0_2:sѭrn|_Qq o}6كGS8gNm. , XEkKL,ј.QQ"Xv&,˲S]EAL쳻33gfιy}1}>,,ǃلCk~m@Dzǜ:s1k SD#{ϵ=</_<osK|/C„E7L_Xۑ'H8vpG7{Idχ٘s$0_r'Om-ןEUBP(?-P__OjjGuBqLdj뿥\H8D8%qpI75|«,-A9I;JҎ%ڔ@Q| 7!EOBN79|z-ΎmvN#e~9!dS7g׊7Îk,%t z Bo@9S((Ăwq9)c2" ӳ t+H ZKHqV%D? eIvBP(kFGcTM)Ljp8LRR2qѿH=fVh:HWPYp( BP /BP^DKX+P Bɣ*P(?i]DؿwH{+kTZ ڰ}աxu}Lto}5Ua`%;iPUA2:PuQƶr]P( Ej]PD-3NwcDڻ!e̸^Թ`-9k& 'O?ěka‰}K,Q BP]P(4 @:7Ԯi+g=8lF{2cll]JHq.鼿ϤS֥uxc+,%HΞxKC #F2Ucvh,Tly!G=3iD&ueRVm S$ţ/pS-Irad u7㕷Q{;/5c&Fϒd/~>6)E8xqcJ/$q5UI=87%Vv>3UӒ4d"kaILYknՅ$}G1?~>}y~35/իaڻ]о]*떬L.:^,v-ko}NeNd5> E$lYŶ:Aѳs<{\Ά%$w/Nl{/G}a5L?Ao,.xwڃՎqDׂ$p]Hxj6,a~]UeaZ,||8ԅD=ʶyu tq'jafŴ`i?`yEer\|e0Sp٧;պj8$hZ=[[QEH6eHƀ3dzkx3ٵ/{VegQFe[+T( 0(wBHgQgޤz# Ax:f7ŌoGRd)۰򅻘> oy>LIlJ\)719>zSٷ?aFx{af[&MMVwi;eʤ +T3믿cNr2qKMqEvởxfy Zʙ2oq&%b.y#,k+/byf9G_E΀q1qD1K;a&#&D<e6⳻ON?64lϧz˙vz*dcm;v$`Ŏ+5RrrIǨfޯOѢ j\Js26H&_`͎*ڤeOq\l\ S-0ݻ~MyX,}zt7ソq2iX/|Dpv//>v! 9]p"6. Ej7- v[Nn4@X!b F3G gu$G퇙7YkňEB M繈# T\P(@C4f9 4߾/p 7qoC#-Yb-}xƁQ4 0ݼ{P(69 ʿ+K$z+DG~ϔ&öe".%?m0k9gXW^C IDATE: Yx;sI_n*NF\|RLaTDHI@ W & .H7ј@7 ͥD.uDSظ4O`cD+V2Gy{.b.DC`C$Hṳs ^?)@éIUsEJP4q?@ t]4>k!ŢDJͼ  F~q)tXGHt*Z+0xRK6ٷ}V⯘ˊnR)Z~z{.E0m`$gKh\IT%bny['pN4nb >`] G0hQ)]xs ][￟K߇ٟ+|>|u)0^@r_/B8^ nn):0=^ ~<9b ﲣKP( %ղzW|`-BhTv])M7pHxCZ=8؜.fZqۅ˞zEy^:0 Ko!*} ŜYdȳ/e"7_7,>~Q!$u{SP()5!>_"A ;D v9X;\lJD)_jk:Jjk|RAZ뛁VO~ZNO" qU\6b1C袥:hI»ht8>I`&7#.c\$1tqEƲ%˖3ma=w1#=a7YS x.\b6x4iqVր(I&D4"|WWu'LS~w1=Q`[Zc iB8D71aLHlOg^ 䁘LSy\>o>S$uvн | 9?p3խl_ ƒ[Bi꛼Cl)Mfz"N$H("N|;nLa2=9f3Xø-˟'8֥E8ő'$lK\;F$/M`b?uk`P/khۀI^R2֭dcObX-Jtfny~n_Y? C6VBo.y=[300@o.h8vXH D#YLy91hgYC$hѲ+}fr;쑭mcEt,*doWwR`0Qaܺ6bN^_&^^P.]b u5A_ύ.=@}W1Q*oO,7nOaD7c!V,Yg;\jt]" Xbd`:Q[+S'кC7uϤrR^ WȨcaU,X32sfd/s.fs,q=`Ֆ:‘Z-Ij JMPne_|ʌ7`[8.\w 6۩hա+úgQj-[Ȩr\RuW`U9F"1K<[̆H>{ oѽO)5UDZun'DRR1M!iJ>&oGUbk53D ]wٹe&fR\ҒT/Ym/!Kb&ui+2 7V5멨wdө]>>Pgʫ|dBԊVti@RaeR1Nj 6$fСU6^]Cڵl[ݵ0dҶ0aU~[y%e+ٓ܁B}C&ٸ6fK % i.vߌٲ3I]XYGp-7o*,=I%DŴkAVy_[Ϝ7?N|Z*p5/)%+ݿ9Nm`Kօ Z On]*ظu 1H5-܁//_.v ':g6[~h.0-%ěOǵ>BA)g2!7wJ'@:\~]0]C3Чn1O'+H%Q?]aR6t|<'iEêxݵCRX}^1~r EҲiS`}h{j{潽.VE5uzG8«ؙ٤*)t&wyy,з5535Ȝ%sXmSsXV "LɦsikGdkdW,F2_6Q] t!UC>FI$a#5?6ׯk/_+nC}#qo;BJdF%jo~hYU 4bV6.Ibq-7=i'ͺ!}?Lަo4.]q>rHR~c4t_7I[9ÿ0=`mۡY~71$|sBP(~l;Cb3M܂NLً~ @`"8t_"f {lނy8 .DvsXeJ2⃂'鄶,d>GpvȵǗWM: IJKO_>%6vI#i5no%hLwr}u~ 6[IUJrI.8f %=tT'Ҍ||&6s[4!3dy,s4ZodݞZU܌,VP'زj9UhLO_} uv w8t)%׭5mك_LYìge*ʻt;?e2[̟3=fB0&  aE7L>CGJv<"5kxuB):l8{9YhwB7Cx߰&1\{-%mt<my+e:xL6^zl8t%]ʸaB|>ҹC->}{}ϐÈοÙ`[1ڹBP(?@? 5MܱǞXO>8cH;27FCJP=D 5싀 i!oN!] >/YE, Ѳ5ZʫP++) d{  ` >p/>T~֣%YixVۣ(5-D#у3gb"ՠ{p9 1,ӄH8oL LbꥡP( Bq,юM2W(3~tsq vKP{8-,\%[j;OAnRG ?݌ߚm3ƈDm_D-AO)eCi#0/XSؓ9g*`[vX4FrpXD1b0;XǼ*j ЧcкCf>b4m4?XC(= H'~n۶DbXFVI&N0F/g9s|̋ﭠ:P-;:ꉱ,$ikdGjEY?#ܘKw12o<^hNA{F NB)~r%x[dG}@c/ȼ-5%[seAGN<~?>׽B#߳ =OyN`8'XW{&P( B8XxqGJ)4M@yq ʅu 8oZvQ\P( B tB8FhF"Eq]IZ ~3+¶-a Vn߃^)]6QOGQx|׭xp%"\RL~+kNdO-..H#sqwaIrOayrbRrpHI0Sq]hJj pE70TXDQ 4|#,Ğ>W( BP]P(ljsLWf4It?!%h'EV|ZȦ%?So`'M˅YGyrhlԾ쮬`ɬnxZ"iܬn;GGIF~*i:n3/n1h ]P( *P( wMg[u414p| ͊dkRgUq$+4dp[>([WIhBI~#[5<^M4Cl!νzѩ p8$Oob&]"w9E];?^DT YمRb.2b( BP(P(R8I8kRqnjWv^L5XϟCO{f+f!$+ݺZΉ\uz2O"3T  8ݟLrɉ̯'Azo &OE9Iwp]wUft5g&U.>aÆO3_T7f-:쪾P( B=96a Q¬w>w^Jo94,_,ϸ &FtHD!*+gP&#=i!"F[~/)/_hp#즫~J8+NEir*lrHmAP((hRox?)Uy?m/j=:^iV3z~ߏk6{j|\Iz<[X&^ $OB ^t/~fk\-?z'}8T6σ~BazMe|s:i$~C# |~??[@ xJ@7MyM=>?.\CW}_< C'3L/~QM Bm'oO*0Q')?z}^|~? j>OS7 7oVsY;BJI_N?$:0~>$o.܁׃3ϟDaIص|K[j=l}XNS~Axg=ŌW1tY~l6nȞZ$u:O@bOsC KOp?zg}~BqphcoǞfsM!f㙼9O2]" )@u4!&@H%~ԬY}yr^zGA`bOe^f>_&sr/ČmfILzRKî[Ë?"dg !CAa#M>Q±Z\W( ?@P1 1a, ;1~4@bW|2A>fj ͤv ~a]嗜IB/ e}W$'^zu C/q_|U֍ֲCNKEGrDsĬ~{pٕ䤾EȺ\@6[,i0:'}ai;0 oوˢI?½|dRqQy0kc{x.tm{m>W}Aq9[}oR:r5[W.&XГN28"|ev _s~y?aُؑ\m3~ylG $]SK$틇;{x-)]Yx:|G7p_ 4-.5o.N@6;1Inm\xaݫq0<d'. ܸ|s:ċci"av*Ϻ/nxiC;Af y瞮\dY#4}̳p5Si1Tl\Nr)cr٧Y10FSOn BP]P(#2T㍇SO8F)OAX xoF'o淯&` '?4/uXGj]%s\L" ?>bzVcZNP0:hݙ|j;:.r֥c$F%zsf?ݱ쒁L:xrFWϣWg)q\n\zPZHĐ6nZ?~qhrEX0Qōhp6`Vg7.%'N:RR^җsv2!"17!lQtZok˛]NOI\\# Rr:6a&s?9I:뫈$؍!;Rbى/ǝf; t嬩=y.6.:,D4W  nN߆;\5=BP(!%S]]`ϧjJ8FaHJJ:"4MUKz{jD/L=%c=ԆmE^~pA,vlcWUGS) \;ޝxI[4dwQq0Tm@mzZ (tkX-ʩ?;<\wN٢Ulr1`mׄp!?Rn$&z PB 6!rlY]UrXbcL 0c[{ʜGNy'($5e$@j.b کk+b&eR\Oϓ*Gws=ۺM$<gVW[JNn߳XG[f&Jv:(HK=n*splF,XȀf+]Aqd 45M}m}.P>es@!v ek4oi!i`!Ј5/o AG+(g_ AOOPhuv+ Е/)@(J_kTK5-o|^:p&iV>NQEQ}.e9ʜ0 q aGO)Fs5z2'g;0ߟ,Os\QEQۃ.v7VƲ*6GЂ 7yap#)*쿎͗ga/p*!$mda|D;lo`{+YbL%d*Hkygy)v*eErteϿ3$4|~jW\KQVEQE-k.$ti.G5^6'e4xb \~a00ĺ?`6i̘3"dE3L>vt aq5 % !xh,C" CHG͒Eב `k覉7OI㳷᳹rz3lq0, Tb~A\0-UKD"Qn0vkV,(]t4,Z^ʜY9bpv[Bieg?9ʘA&?Q E_A><7bl'qǒ5yyK'#5@dk54{tAkHC]#.f;sksrI7Jm= s9cX69jqr p}Gjt=6{jjlgAh}Tմbzn^{-^yMȖ휹XgGFț0@c=U]s֬'W&6zm5vV>j"X\ɳ[ۈ;#3*ٓ ω߾Գm|P՜& #cOÃ䁣2Ay <?k[o#eW]H ˙_uFtjVS'д^ݟ_pl`g\gqӨlou) 6.s.ws9V !wY&;Wĕ7t5%EQEQ=jBZ5oaMstp;V4OrЏlbS \ql.8k6{f ]@ğ=H'RtO>p)?;kòEqqg0JFk8u[ZGGh=e.|{>馦cg `pn2lP .++. Nd 54!3ijRB۟BPz o`jmAgƭۺ sl9X?e&EkYxqyc0Ig˚h)'aU졹!uI\i;0Sb}\E%=,;FwOؤ8;{ӥkc&xeYX>$1wܲ,,3L8maYA=)Ǟ5oGJll_k.3{:bbdN9fڻ^l uql{xN ļ:^:ޞ=FdIŶX#kuq"lj(= c97gg]{f04|ȾVή⶝7v[N^鉨,(|]imk r=,^)1RFz#n_1:ģk2X!4PK!$NdOn4"fdIb,W$$L _R7ñ-[p5e/)dI_TFJlˢƋ}̃V_J&I$.xxC0F0@=ЉG4wcai>4w:pD+ML/e0PI 4Æcƣ;?+=(tm'H'#EQ^<cROز|RU#5  %$]w7ɜ2ȵ[΢?ۺoV?%S}A-o;dٖ0ҟ͌&j@8hhiw9{nAluboqocES2tM:U_N-}f!΅̹)ƨq|\DM4~jrWǫa\4{?&N3^y#fGΞ#2êfMMîkOfLMV([˯F#7\ğWʽ'bʌ0I hHKOg$b;3qs3NDqQRFy{ە::.gJwޭ=y%|3+w)Ӈū鋻iC88ml.28cD [č |Uy|V]sէSi#-ߗ M5~*(E;.zrc04JFNeFu-hAfы\)c043ތB n DbIx,ߛ{pRH'kg#0A+cP*x!ScvV }lmhf/S1 g MSAu$圫/&93rI0)ɤ$_m4l<{"Z\y3|4v0G JCh2j:8nͤxaZ‚>{Rlf<}|Z4p\>sͣ2u$Wtw쟲jN)+~aWז"o>u:!v==$2[~_Wg Ms,B)*ل)q]g3)D;c|ŜqsxaiY@a1 *lg^ hX<[[-1޸R8C+jCTdE$5ªNNNIGKO>x _c\hz"ÎJH~_ZU(6{XWՀQ8sJѤDhQj640nd:Ul$ DkH.̙G(#CF gƭ<^C#9D"1̲I j`:iNk?\l񱵲CABjAYi)Wcyf9F>#gP\sW(sMQ0zVMRQY5gڼ">Y=mFa*_g̀H#*\ogÖdB$ifsgx𦫸ߗeQE5"֟>t;ٰ]}$ $9;#Ij3>iMSG c=%9sP0z44)F`&gw]Id%Kˇs5z}8b0³{%>WN13Rѣ S$%k$q7p46A& 9)+bDɘ7eZ7ta3 OрJ:; IDAT ˍ`{+:Fjpx 1 I =fD"6σ!BOb5EQEQCK<|LVNA(dns6L kx彏Y<1t10cR5f͝IFq?$ qb8H!qh\ o7jIYL;~'L !0m[(qb&'Gqb=Y=cY0gP6&V/`̣莽{oC5$قƝ^ȉg, XTnج޴-;9Skxfzsw=H-䓎DW_ dFLDXe=v^q==9‹(u<L,sodCŮ˝]w_m bA\]Ú@' rǠ-Ę#&`J/Jfn&ClٟD$CۋYN4D>NyZaVQ\EQEQ?3Ҙq {60=kLۣ#׃RN N5I\(BG3Τq9ec1{V8qIrr87Wb0D~ws+Y'1 eOߟc(`g0G8\xe\@x̤l{z.-fx23zBO9YqIJPλxQ.O&>cY.Ly 8|y[>KuBQIv-=hcQ;h@G686to3Am,υø`ʝ2릓(qY=5DOgxGL$~wߚTD=ިrN~~q4ZLKh$lӥs;FFA~|%/nڮ xG`)O=RG\c xeGJ }V͓˺Aj!&?9G3sPԶdLNcæn3I j tGIR߉V^ x&zRf#KJ@]A> o>ӕ\rlttEQE:4Y܅@u>O\[lƻޕ?k;9f޽{GaA >?c tN Xx\Ȟ14a5+EFb5 fs{rDf,̓ v%kgAYJn##je's/c1kןIQ$'#f& k:i;tF͏.kny)Hae}?SN%w .ᷫNrv>A DRJ8^qh8{8$3K>_ȹy% HI3 ON F%h͟?UzwpGw+naE&@h:NG;xŷyb]XPQEQEtEQ1@uuѽ xUL8Bo?]> ♻x59ww6f+.bLaГUHúh08cӊx/P- ]Ƞþ;_rPv|B vx=O^ޮ$4v>}t Aۏtk:]fUzpι*&cѵ_4+h WGʨ3񅇑fjķ{n&]ͦydq?G'"M4:)мf̓t6Wg|oa0=u.ܹ&b~%I~U 6?߹^Kq,5\QEQ/8@W2PƃUbdY'SXRYHImb=4"ub<Ʀ#Ϗ $I_7(Im&MwiZ Í<~|`'OG,->{n{_HbyEMsbn2_wnwqN%M$NoWsoYɟMg>[o3{?&8ixkO71.V_p{,Rq]~,R/µ=cI)L'|.-(uMvghO˧NftCb{A/j:(sû?%+׃V]QEQ/(@BG4U%(y| t%1̘;$8v v?e1fT k/a/(J_Ne9GfxQ:IXN֎^RVyi 2#2qgL$٧ _ ٤Yq7J9@x'q$kdƗ)2#ZpSp̩:$wa yQxCL 3٤/Vb$L&3+D')-lt$RJ\נ(C"i#$or.>.<7j!N/iA>ͳyCɰT㛢((_d.$99J(H$O_0}M:bn>cȃq~Ƿ$w/JEJd1ַQON"%=ؿ//ϥ}k_t$3=O][ß)f%L[z<&Q> @zv3ûšB4ҰrWΰCmDÆ e_oНǵB }f_H=#|z̹MFGW5.%[s?M-((y*)E9D -R Ž5L02M uD0n? hzk/7ן yo"]a,Dƕ31o@QMm=w !7s#?c G߿wƈ{w%n `݉^O!Yrb2ex: 34 ԇ\nfnRNvEQEQ(7Mr ƕ:[.%e3~?qSH=sO[c\ŋ~!42FDvb'K(b1#y+21cmTwٌr n"v/??^"yL5 ^ Q4:w|ΧBCwx|<>v^N+^-c9Z Fw +xAl]m;ꋣ(("tuwwG P%(H4%55C!0MS!!l$}q"PtA"4Øoل[h9妢 NVzB۵//$33A.CAV7Is[W蘆I% ҋOp:h %oFim H|B2}].hkIͧ 3pҥN\aQ80?vZۻPN>Y&ScEQE [gW]>wL蹧Es$((/ wEQܡa0vRzf!ND0+̀Ϛٲn)-@\w͗.9b`H5*(|u@Ճ(Z]9Y71Mĥe9?#bK/O+(|[+y>>X؁76qwkv[hREQEQx*EQs5sp=HM\K$SEQEQ+H%inVFU=.g]QEQE.m;a$VE'7ˑ&T̅8J=9I E'\̈i7((`HkVJ6oB ?y,׺B'@wST#Y/z{r 2)Mj'};m.ytewyd:1k{ltΟ3LEh$D\tEQEQE蟭ZB8ABsټuzrL@6t{!0 t$<V~a!4M4t4smQc~\O{qIN#]て7upmD LC`;6vlkHخMʗL:>F|]oͣ9vމSQEQE5(Qy>P- _@N8Xv7xɔXb6a%e2f&,IO i,Y(zrcLbrcEX#6r`խUcRuك1oJ~7N]:~=ac8jl6aŶn)S25%Ɔ+^3!#FsXo!5V!Ae] Ԑ.ͱCSA46KCBg[Gf0fO+%8[UGAPRJ.ukY^HGܠt0+#', a;j@bJju0EQEQKͼn5218v;T -I~~9#s}A&? )[14毵8[6na?ȉbn,nsaA6~\^=HC&;skrIow!Yd%.)&ϰXrJnnm<_@s<Λ`P4Vf1uH(qxNtS9"+ _Y)͵[Xp ?>sefbw|g~1YSKiĩpΛ5L ud$MX'6oMDdz:ظYCR|d0,jntRd)OrH\M#s8eB&Rstݦ$?N?+]QEQE&艪ız6_6ḧ͙S  HU 5|i3Y|d_O?*jj6( !bsM'RZ5Fv,8 un{%61B4$Wa)<؜78ʿsnn%m Gs(tgc::Ey^2+`a pRDŽK'qQd<?X]I!h7Q)~扇n癔9Ѷ%*\?3N<C$ /V-gyg)ԑfsP%//^룧(pwfSgxxu-u㆐':@Q2g[ A{/)ʨ5,|^Mcܸ15e-DH)/'W2[[Ȥ |=yF>G)$ijcgkLr%= 5SƼbHo/KRWytl魯eoqf.9y>N0.b<g`GOҚ8F)0v!n&6O#;\꘡hD_ʗ X-vqgv IDATt<]QEQE׊c7e dHQ6~E;^K-(.iL-T|HQ(XBjlѺJ"CJP0 X˷fC_* qvI:-JFƍH``.V$S ¬j$lg%߄ײ }e Ɩ_u|Ra3=3kd(ʷGpӲ)uBV܊d ҃FEֵ<&zjXa/Ar2be84VFuM%(78HdMU3i9)/oP= ,+((~'idאv_IF\Wޫ`(Z0!#)LH3+Ky7pNbly6>pB:јYC9,\ Lw'L !0mٙv0V9bD2nc;1[/ gI$E x=Ҋr  wL%,~-,aR<|<nj@@\BsO1'Kꮚ((Rpww7}}}Ԏ%X[[&~&2Sa{&6D ЄsOw~Nnkva;-)ݮcޱw]? 4o۫Lq=w~;g#uQ~{۶>u?ʧFzHB`*dEQEQEBC(o8D{Bϳ o'v&~nƾ`>vLT>Oہ>| X]mt )5$J((|#E*t}6RG ="+((_*EQJ<F^((]QƣXIT(EQEQE芢(_*_ N[{/QEQEQT(eH02xHEQEQ9$NQhF[Gq΄d1gkBK,G;WEQEQNT(_IV4 +G<[p.$= YsEQ?{'E}?~+wpGA@XQlwOM$Ml{i"KG;3G$WT~s)(]Q% \7eltp^t١((CayуD P5(Nփ:''sl{(}vEQEQO,@rrWُjtp#]jV((r R۬)݇ߧp((]Qb4?M GXO`>-]3/;7ϼr((*@WEoKѺ/⫘d27J4 >m+>c,OKT"`Z`틏$c/<+d \((1GMqWGJ\n߳C4t\‘(T|]8?j$uBU4z$  IM.MUDL?""ERJ"VKvx/ۑDRhR"4hAh#M#)1qp\in%.1( ! UKPz((]Q{z dga>B%p׳TTO 0{hܳg$kLU,]ӚO~{>FsFV#o#Sh˗rR`WVq-1aPn9o?s6 gfq/˙]&,j~S;3sغ\uh4IooŒ3 YOː&&aꚢEQEQT(}z0+)Kt` 4pkֈh=Q2O\ݎ'}g3Ѹ+WXjKH&il\A ɢrtޙEl\{ҕ"٤3 M!*9׿AfyG5Rŗ;\d:2}pX+((tEQe@J2M\z6H(\ݽNpp^9 2{l}>!qaѪS[2q۵uyky][hg ~Iq#%Є) RZx&Fj\P ZS_UG?OdgDSEQEQ(Hu>Ҷ)=V 4!D"98J"v.wk @&sW)#H!u @;ri[ 2+$Frkz ;oƞmq[DQEQ.G4%Q,Dm uG?bQ|wW])ᱴ@[b: {Fk߳iKZ8;x)I:wh}+4r:UR.؂-=vdμ-8pTt!\g²@DR+(G2U+6Mhd {a̠"/e@=MN;5 5g4r-g7ݖZHlH I稛BP r;U3L^}{|gr:a̚KW?W>ʏ릕N|A{$cș\{)ϙ1RL$Ҿ SM<)ɴІxKLZu/x)y"$$ 433` '!M' G2>Җ 6M]EQEQ=Bʯn^ԄH;`Ul!|VNB~=Y6s9ǑmxsBJq Ki 9> sgMqL#a@lx膁Kt]q=chu:F]%H,ˢ_8뎫cuݖ>+0E3MuHmd=KO@czlpǓM2XА8'tLC:x}:"5 tg>'=Bh^\Qwx5!\M("!!ouuu!0Mؾi>G X-!D!NDh x !G P(;2tI'B8='mBlfXXF4n'sL=ͯ?a'H$T((cj ACC_fVFЅLR5S:6qf,^wX0(=kWfMzꗍFضn wРi׹k*NJ,p*g| kYF#.E ꒎O9Pn7*jh1jhOft<Ȕś)ot%e@-Bż:s7 (؟eLYUw&%m'M{VHrprDpeL]Ĉy<0Wh^i׉!}wTitlBΑC#! t!t =GhcgC{N+5ω:d RװM"`:EQEQAN.*vn+# dP}+eJ͙),ImCan"='=V>ܴݍp~/[wPRUΎ&܄Tfȳӵ[左Q7tf}u&BIAͬ%}7WU[Xy"$%2j3<7o#ނP [9pPop7ҋ3& y{mzwg'ٰ'܎}-cbt۟ZhA;S{}#>-^Ȓ$WV?jas#h곣({.=:EQEQTBаu%O?[Ja;ia 1 5Ao jmpCDB$AeFANqʋLJΡO"N[@tT MGH)t4<\H?:x:xGNyi`Oh:F,3)(|ޮS.y)/g+hO;c-;vobNGsWn0ow !nGs}?+ xn30F)߁ĂLohFLr_RC=̶ݸtd2Ix4}/:dSjh Ǒc5I  ײh\VEq؞tRDa|RyiEY fӊEC8*m#}M ɖA]l-qGG-vo^ʖ0C)\ ]0Wobc:}h.矕̆eps};ѮmDf"*Q~73#̒Wf溭Oua؀Lܖfo/4e4~C>iv8cJ>;m0||v$Iб>[k"!tnƖ>ˏ^p+G?\^4Bd_0-lx>?G-> ;4q[?X4qq]唱:qoЄˮ _0UrϹ?0/pËp2{v~9ʉ#p"Z(q]\8fҌL HߨS7DmNtJ]<QBt?$a̙tZqZ\A9$k!N<m>FS6_ 0udb'.nFd5Gy9T _|Wދx)'sJ8sM=  zO*/~;g{37W^0x[[>~ھ:A\H]ϸ~_;I'qUa'ƣ;^v.f洂duoUL]QEQls[]JEh}cRj.bkصIcϋubS[ƥbށ{|M5@=b;o/b +X4g]51== n%S!a޼\u#=S%/'HYƿQx\32w/S^煋rqwNApvԖΡO$> >*ǥջ\>gqϘB;5A7vFB46:O26#űѻaι86B3K?ڣ1|~|#]TAGkó ((O EQ~*c<+$]t渠Y%D(tkj|wfLCX((_𭦸+R3[&[S~,~犢((?DG ō|{<}W5ED5?f 8' "HFdB~~&/ae|:Ng -M>7ve4[ީrEQEQq.P(jJQ%}e5MWrۭvíq_Pt?MoZ[7 E$ɹ4$L(~gt?aٛ㢋"޼ nSoEQEQc~p(ʷ@w“:)mڑoKkO  w&Ra50s9Iԩ[ |i>KӸ9\5>LmGjtH5$OɆO8ԙ󮹚m*<:##6=+Mi649ھmld7sߖӘ H7v t#ʲwAi%?Ǖ tܺޚ_SWwfP~uwrz[5EQEQ+=p~_x-= w Nn8(_ w\|:ҤC$œo6M׬Tz+%y{97X}*8iG~G/無-;2+MB@zι$ %ɾk ~3Oy@_<4 ;Vr4x>O5pɘur4Z PEQEQT(݅RNHȖl.- E% [9a"sǡf#9=$ /0aQw7CwhCsѮ׹q{rM20D-RH )$k~ 87LCLڙ{^ <<@y*hWEQET(rfj>bVmCM}\'PSEeu%oOƂt鹸VkFgYq{(^2k+)5SSHIq7Eħ]Έ+ߡQ_hv-Z%KX2\2+W<28)z07>tL.p=aFY#/Ө4((? j]Qclw6l;L2/?Ľ|؃̭邙?Ĵ`on2Rr)?#=G=ICw 7/='}аlpIlӗ/Kv7mxIօtЎfP۳ٷfk! 3?QS0v'"H@”Λ=EQEQ~0455UM)ʷ$ ^WW4U%0K$lI +phH"0 `aiQBRYG%Q;H48sǎu&~?pjq,p"رt[ΩXs·["((B@rrWU(*@W~Z2 {)!5~5(( wEQ)VRsEQEQF.=׋eM1qTGD<4LSG.QWbƏbM繸גY:R{Up((Cd kV2u^1*)ØAKҏGkBc8;f}³S3NBXSCuGrV::G^T+X m_afFa|Xi3]@[񙘺|dM7(k,`jrgpb$6lE[<, S\˾skXV9oghI~rK׭(2G0S7S_ /@CqハW֍p[1&Y{dlJw6߿@#L=6UœE븾l6~[: d"`)NU,-gTE[\QEQoe]8͔^G?A]21Ymfy=}h-dwk+:e%RH[QWz'!( ԙIGhjx]Mz\ Oc7Sd%aJHe`l*JWfMz蓅l__4qԍ!Ei*^Ͳ %K&N#􋏰dKKwdSP؍ f~-vz@ɺl[M?c1^@lKX9e.ŎӭcgKHڇf-]ϖ)ޅXJ#g_y:-:3# eBgօ&䇊(te |fcЁ>vr!4-lqL:ЭcHGSϤk fa2L Xi7u`d0nde2jtF_|^ q2nbyʡbO9%+*Y_Өmg2 MMN[~PJ: 2lƎQ>djlRHm-_Ŀ'!A|x 7`W)fۖm*g{q#N|*mRI m6Эk}(u$-#Dw/M5Sћ6r[#NPD:fƱl'*Wk HѫYWZ.4 0l ,s%Jfֳ#>[N'.]Ih@ e0.:>gĻTGQԉK0ҹ-X_S҇.釯bBub ^~WaW6븮[n4mOB۹yx,I\fJ(( +"v]-La^.$׿dgpv t55G$wJ)3XdGso-fgC Ih62,?YYDnxs*m})ف9mФDhanEd$T[^ ~o:Щχ)#l\j ZVQd-ʤK4Udhv=| ]Æҝ8by54Ŏ4mWޚHZGtmC~Mjh&hV+`$2XI]TdC?<:'nܼu[*Ѯ&/nw%T:ebU|ۙd" sQ "Mx?`H"{">z;`6I!C. "wӨ/hIޯ(}P@nZ" <41=#"n9S0sva%Щω2muG!B&OhWԇnQN$!$NPq]\xNI-T~)HǶi;-N%v8BvF #D];93f2[jdƩ=r+.d%9o|={WD>~ V`deB/қ}9|}+qѱLD_у)ir6tA)|H1θz{>}F\}M\XG°kkt|ڿ5mO~ыh /^J~Pt~[qrT =O[:Wea֬A~B1_^1ɵb ΀SqM3Sɯ:_@UUw֖nt;*\>|~T >#&مBqRRVđb-a'ݶȘE1 IDATu/wbL,Qv5Ȁ C֚4&X[vze adQѴֹtv4`5Vy?纹^u{{A)c[k:,x,ű NJHR]FRxLGu Q%wRaDaw{:hu5ahqGiЎshZwAV=>k9K>p8h:nC')8RY'ˡܴ^ _v>qxx__>cf[tN֮GL>B!k#9Bnm5 !- dh|jqB!' BRb;B!Ѕ)9(q4arTDٴP4a4n'CҸ aiKq+Vl$(~ĥ7B!8UBS&ͫxԵGyFDcV}[>u(<ݹm-xXi)<2L/k2̽Xe7wU!BH@B=lv*|uI&~\tU *K𜘻nge`[ ڞa#_y&K'CJ8*e^M!B tk" 5>ڥmոƚ(u=Z}=KѝQa_ϲ8aX\yI}Vkl??ΠWn<ͅ~ B2.kxon3%V4~~4KyJG4nu{Eܺ8C0EB!p@WذK%ՆN3tx:^sJ`+Y[_wOp.g|;oR=4EVQ2KO2rT.(h9|QƎ"3[nFvn#QVJTy@^ O?zW}74ERpjj'ÿMyx6|q&VJa T$sjzT+}-g7obeȤG;q 6_g>k$Q[*B!ـVζ)mXf{̡sN1(չ$Ead )`3z%bZqЎˆq]e ŏ?Kɺ !Sc>2.O7k*MlSľjơ̈sL@._w?z3οϝ' ]!BC]m,_D !At R2kyjf^\xyLIxb:GY[% >ѽ ikCt;X93P[+a1v 4.d&XJz0ix u+tOyߑ3Xy2,BG8mxۗ-aޖfy=K0>?Kbm^c2.^|z!e#FN]6)m`_ι\ʇ\W.(' XΧn>s.O?<AUWQo+Ę \g͌ǹc|މ3"\ !BtP(,A6KæZs-ON`Heu!akH)dDN""_╫¶l?л5p!u C&M#.c93^Z -aXeIϚQ\w9/{?wPk!; &#{K0LU3o+ ;f7/YLCq|8 <Yb[g+lh11'<̍ m \+Wg8>tV_?Bw' "Fނ>uW/`9r)K,4)*{\M[<:jLU dXlf?7fTƈQ#z`l(QAr2뇷ɦ_@yR҅B!;sȈW^ڣC nE_:ٲc uPGͼ}q#pUH3p15$(qv7mW-Z+l.Z,fs?vohmǦ"f*AuҰ}Qh*"o,:"ϒii!-,kqPЩ'wy;^m1yfs xeqxge.dܗŢ KUBӻ"#9!BtKg񓥌;10o5%qCTv6ٛt/чeCBl./k= +WK*SǂzHR}0P(=~oaw29ӹ"in,Au*olbٺrzŚY|3T^EhkQ:ƵiOU +׳l}5˷='#hn(pXh=Fh϶ٽLL~FȘqϡn-4+aA C9B!膈}ǜʹУ¤ .P2ԫx4*b3*q(vmO?1z >F6%mCDa@['"!|,sۏsq[+/dßoMzrPv;b)-lL:td#CK5+X(ŴOG>cruwڏü.Ĉz_f?otX˭]ĝ.m:h ߠ^Ȣ?E_䯾 B!8ɹr' xn(mٽj/gYK|~PҲi 'C (ö+PPP֭CDAogΥ +ANVnJӣo bexh5U#1 hkƪٓj0lP5^\R;-dW;ԷXJbpYƍ@&TTaؠnmH]4jxIJzd`U 7ֲj.|G]B\v<z(eiݵuw@7n*QTa/ۅB!]D`KБ *0bE;=v%Y̒7[ߗ޽()YƎ0Er@J CڧwY~11 ض'VGYIB!Ѕ'K/ FnybKy?9̈́ct繭[ y>-S"1'WP W@zx$(ѱRN|/]:XJKְr̨~p3# `7ssifPy8~,l*kSk*~!>|s5bn0{\8*Dr c\}-? ^A<_/ϰmj9A %B!$ !NnB^!Ĝ kkAkj/<ʚ=!dw8|Hb?8d"KK*ջ'Gq\_˲Yp7)'#NhNnÆRd|kv̿ϰ`DQHkWdknugW7WT_c)h 7AH[}{O/8eTU}!B BqQzϓ|s֯犻Rt%ď:sρ0 dx=9eDrǨvB\aM/v{\rOfrkQ{@WֶrwOWݸ滷rXc1`FArZq֢cI?!s)BC 8 8)CЦmex֬x^ƞtuidÊ@q +`:BQ6¦rJf|ZǮC>OM0z!ZL"oGQ{4csS+\]Nx`C6֬x?"R+YǬ Yb%[|޼xyչ,?8Wcg?ᒫBl ZkL`#*B8ws,A.DW Ðxt5j_v;#~΋:w5yp. ,~s+0SЏMlVn |*`Ҩ*1x%{x,9k F3&A On_fƪ>|eGWdՌ?T3~ʁyyߔjl"܅BqRJ>Da`SS$ 9Btt:M*"JuzQJyC1L6Cxq5{z2)D ('𐭵sxi22CHȫ C߲NU?(Mz2:RD!;v Uc@B!9rk-҂.DJӝJ!BqrT7 Bt>cTn #W6Z)Ә0\#n,(0݅B!ĉЅMxԏs9g$Jo@tJ^|q%T܈K.@gi\yIjd,)&BqBZضYϾȆmxE4es՝Xd2H;Ⴕ(X2$sQX,x2I2rb$I\+7mfy2x~ PM%$^ ƿ·"~r+4 !BH@BR&d7r9? LG~*&/Iz|9uDLKMԛWY IDATهwshZxԲ/w_= ?_zVҕCܮ?ϣ>gO.Dݫ֒7T!BBo,Aa䀉|cS7?FsQ!O>Erē3g읟b?O3 |'W_ky#7^^05 t&a*1Nueh]5+ְ=x~ ]!x%cЅ'5'^ƨcVk"pv(Eո^1iv6#0xZz0閤qs=IA[O3Opۅn^%&Tc 0j薌_%DdQ'#A+۷Qнt!B Bq<0̍;7r1(J?p׫۩*'Ϻ CZZ]JI:0 vm9ZT~f1+㡨օB!W]!;h'ͫ/A|5|_O (7F.n- ^·璶G;*\7üLGnC0.ܟNK0dB0!B Bq0alh0O7X ~:M֩î{5~Ory/} SFp5Gx8kf=_Z2Op~{/8_ZЅB!WϘDkk+"EA Z)yw )NJHR]FRx'+NAc0h uaQ(`)D!J8EߏB;.Z߉D;ZΧ2]!ؼTf{69CW19KLEvV!5aߢ0-C{hNoD!e9_B!'. Z;he"j[ERFHPy-o,iUM"BkZS?0hb``jQHd4*0QHhۗ#wmhlG\TGF>-B!B!퀮6VԮ!ыɃ+lHu> J|i ϋF=͆Y5 l.˹d@ UȚ2Λ2?=7Q[$ʙ0~8iKaXa6e6b,vļ 4BFFyj^^azm|{La8^!B!1m+xj|^ޗiD~@ԩ,aFLlވٴ#Λ:"Qe5<~t)<"]ϳamZZV&~eUY|?µy9ʐQM͖ U +V9=iPm`' C8)#s!Bq* fؽqz,[Ik5BYsE7RWhPZ=}>045P6髹?lH[֠"QЛO|n:xc=]ʭ(׎tغmAYoc6.c]:n u}QY(8ù)LVhVrSM!Bm-{73r곝H04h%]*l(sNIRR.]occ#J)<;iwYs_E력B!8·R477S\\|kiAW/Ƽ#D8ibq=uXNiюGQ8Nĺ/Ќń% ^k |8Nyhnvj9\B!WA{tS+^AL>Hv!n-QD !B!f Ca] QKv!pqhdWO,/.B!$ߢ'-A+;%?9_H(B!v]%E&EO҆u(,LGM!BK+' /4)B!B!182GB!8H  xifI_c`B!8I ;19]Y!]!B:ՂX.#ߧ#*'Lʢk uMCS.B!NꀮtZ&c ֺ8 d:s!B!֒O2)*t+EK\4OB!8yzǠ.}Xq5;Jz~_9(B!BBڶe^mȵ?= FrPB! M !Iz%Y_¹B!.B!.(ĶWEJq8!Bq1Bcva SCcx .B!$ !;˄0SO LR^!BH@BwAI B!$ !DP%PLH…B!8wH1?G{Wb[3Dn;H'q8w4 3N˾ƌWJ !BqXj y?V98(hJ3sAioIJxe$ia0G dV| ^ 5nSN?+'Bߥ qB'*DqB!B!ڕkhHbҠr<7vN39A^ojF5kY 1*za>v&yr#Ba3v_6T\~xǎZ,i*ȍBBC/ȴQ: 7m$R3۶*4jcҋdxE=oav}}?䅗9{0g@;MKf_kAtDqI5T{{QJb_zM5+7314Tf^'߲j)*&$-BqBӞfciPp.B!da<~e p*;seQNjSYYFXQP'S`rzlVϡpouY+8yZ77]'߰uOR b&(`aRE# Lssa}(:UD>E0la K\tvO8 (ʲaq5gAd|9*B!BC=W(N'ߣ5Eĝ#6/?zݐQ@YO=~._[Q#Y` YÀ y :*NghIDDc$np|(5AHf߼cӽo Վ6jH;}@Y)'B!;-Q 9у0|@7 xanMɒ}cMЄ.<YkJѹl# .>o -]ΊQ6u*Q~-K*J0$Ъ;d!p%K/"T˺8 Jv<x)MYVXۆS%e8 1ZۓOB!E 1 x/v$N֍B!+tt6 :1'QQ#(ڵymŽ[ر{; ͏@0n[Σ$]5\4Ħyu5{tqWX,O=5aXy:,^n{ 8 qۼf2k#Ƭ%!#VJ8,b^}Q^ ]'!#ɁB!$'U&qeӨ|}17a ={X.NԸ% C"#t!B!$=NɹZwǗ"NpNYE-E?LF@(!B!$eBgq2etb0M1RB!]Hj6 !:c3տGIyC!B!$ !޽n VZͅB!.B!BH@B!B!t!1~2qc$1 OB!ҩ*J B %R'IkER˴KN<ț-B!DWti9RBtc T8 Z)!y{d*RJN!B. ZI&r"t8f{]5|:1a] z{΅B!1 !}('7(l }VNSGtAB!oS@B0d9b ƆYHҹB!Ѕk9B!B d5!1FxIisWX{ϙ9933_ΫBk^ȍѿp~,B~o> F>cQl\|A>;HX+o4|? -%*TEQ+5qռl=,>8^u-^ȡEh:~8tCkn笋!oH-nbq0L21 оca`fre+n+)w,@`Q/)Nyx4Yhy0|BKnk5g nVN'/j6z,4=q.L4Є颥3#G=(A΄۽qFbVA[%:^hwMϩ;c3 ,_5:N|m m$`".?l~Mkba S1MO5bϯ喹7~ t$̫H8fGQEQ wE95B`Z47kJx4nZGگ AeۂIiU!fNYD^| 7r `er>ZMIh>`m2:=Hl-bE} {I*΄]kbjB[x'HQ}i&}XՙIx#MptȈ {73o66#}t2r1Z0mbj xI飺!^x[3⾜q@:hxSLJ3>&`d%!͎R-`ЀJM2cs*=vEiHXHA%bu,95;bУ"LL,:IJMIdBhZ;m!)c "?"o >;7yjB29x4l]Ƭ(5 +0.:Zl;< $:%ѠUpƨ |8ءVq#mŒEYGOrXF)qzՁ\ t!4`=1R,Mk汥.FF0ߓ$7QnZ@S ,~o>n f'Ӕݴ#k$Elxç{52 {y{} WHaVLU6i$kbY7~(Ӝ& {= m54Zk6i_<ߟ9f*( +aQPRLav &ylv9e{[XsQeEp;/ȼf%طt[dJri]9?=505k=«|2.2O .q I.O ɋE=L:?&udpl)WOJ^4oEg䂛'SsB#0ƎAً㽸UEVHSȈ ɎEq< RT4#,^.fW9;r<47N4&QާqA!8~ʕ{XQLŔy FL@Xo<~8X{qy9w<6i3 IDAT/Iodծc)9;ܘ_Hikȁ2|/Fe$-Ezoſqef8R%e﷕?UHcl9ǦKp1. vgx,֥[7} zM,L<~I~ˡsRG\cΔ'I4 #o\w;eh ט0/"GM=˟x}=W²D"u[f0gW>7mӶ׶D'Aװ7mzL&$Q4BhxM;:Ps2$a~!,JԕĢ/xNxrR=D^67Sٽ}L.mHkBf2`T= ++xY̑lh3NC!gѣYs&r&rf?Yydogx;x/Hx,I\ۤ5R|c5tIQ+ҡ|,Ê屻wy^ p}lIDlw~Z=7VFOq`EQEoz뭷X,mƉxDش~DB$!O8 =R&]puw^ l^4 -'Սujf-Zò ;$-Y!2:_mM3H vK,cފMl%%;TC aO5`]p`lf>û޲)Ra57Mسwa}KNN~C#g3l%57Tf[QZޙh0t%+t= AjJ毪fK] RtfP~8}bQ`LJ8)t:l;J("3Ŕ' 1 ? 2-"JN7IIa.IARV!v FAY':cӨYErp#9.=(c),+cnȣc 2L\t-K%Ђ̢cQ#i0:нWd:V}V; 1g(*!s!=2洁t+/"5޽ 5`kAJ0xpth>J:&b6fr*2޹PL,kdǝyS5PfRldMuů $&9%l[#*,L#L#N(6q(UPֹt tyX]#{w:bSٝdབGמUeN>m]v{*;Oa]`>s`[=:$f<((CA<.<~ϡP(?aAĬxo]̢RI:&mL6[5^X8-10,<[|ȐonO֤"nCN[ ^}]VH^Nǁùj\F~j*sbNv o]yRHZ2k\7ϴצR~r1W!S!Z\T|f.s҅\YV?6W#ٯ9$U'Tᵗ,BqУ>#l~\gXвhV/!YxlBR@%0~xq/҄TAWH$!%%tMMM!0MmhtM n:Rim>HJ;"tЬ3p4@xN5l% KyihCmkw@Rhz۬ve!cָ=?jvDžmu'yXp#wgBp6!5BC;*slwxV$$u_e%syh6?8@*[y7rCke .7‘uHܓs9<*1kIpTݏs¯"Sr9RHy^((qzKK FSI,f_:^ܶi%=8oBf\jG5 @*]Gv?糹%Ġc.~(=pK3UCr|_J6u;k 4M ^{#Z˼Z}ᐼk18tsV^S}Ĭڮtx %\1i3C|r FGDϋ` >ۛ!sK8sXֱ%vӲy)  +Su,޶!in\SxO jr)5[օ FMHHq'~NG󎨇\ܓ%\XD}³rN\'ns9=OW̉Uw̡ິK.kq6,Ɯ6ڑ_9Nf*߻4?.pp~!s̹uWwݽ =)*>zx Br˩\QEQ;}I2I꫗u0.ՙLvLw =n"═C8fhm 7M:ssyO.ȖGEH k=hՀkv4μ?N;¬]ɉbnvq1BD#cE\EГ+&Exm*^`f0az>#y̚=.W0q`)88ል+%uGbĝph8tb.cclΚEsJu]Acl & G ؜K.9hcHCEgxL~a kAXfTX+\V2Uc2*KQsܕc` QޯUEQEQOdqGz6&OIm-ðnM\Mxƕ>\ⶃ'e 6а,϶=oM{~HK.=qW&6MP8uۉdJn`:mt=t2N,0m\$` 츍It8װ,p#Z!55H'v y7̄/*?8>-SimxCi<,ibjB_1iYO`LVl~f۷:j‡((|aY܅[YhU,>|V|֌.W?bô<@~8&0MCNVJa߿yE} EQEQEQ i2p$݇$*4` qEdJ (((ǥSEGwhhVIޔ[ݓKsƊ-<.EQEQEQ(_B`c#'^QQEQEQT(kxEQEQE芢|Iidu+[5{h'OWEQEQE芢|Cޤsf^y%VyEQEQhBJÿܴ́B!~:SD" !i4:B;UEQEQ[ZZZHOO?nY4|o-]шEQEQPHDuyg>z|4MCsq((ʩRJL)g$)'#mZ#AK0)𥼷DDAPf m~;#So7z.0K_;S|'Q"qK]kEQEQ̚(f$/O,#c'3),щR]2r@%wM;eiF7xL})O2,|ܿ7}Im]NW'3h的F=չ?#zq,O(Eޡ{=/lD:2hK((A+o_|>BRP'Y391W}Р{o#ׇ!(:8k(?;ޯgnjZm٘ кt7xvs8g u5}r ^"II&k*Ljz((rAWߐ@7 4a:Wsu|\txN?k؆ :t*&YOJ$QCJlHW\3~.v i4#hSRI 8Nwye{ EQEQ+w3ctrq!~vѲ)YU݊njqn[_'g1:/^}l;ix&oDbyVo5WTQEQE9(W?̛̍ۓ|h7dpKq#q-fSy!dBEăBg.cj8اO @ ((]QӸؼ//›ȈX'DARz^Nb4vmEl Z)858e3 8Jx#3?2H- 'L eEQEQC<̈́B!~:SD" !ig7ZvhY %ݮYFll w+ُ$()%ғOUhݼ}<J{?SJPEQEOٿ=Q8ȣ.ؓ(*@/h QUn U}0@QEQEQ #;nHqO}D pY[\q%ԇ)$ǎAON&g-NCwX>=p!XXJ$QH5ſ[قyW ? Eк;ԯKQEQE ?,cڧL{YmL|f,smth޳Il8Bб,1 402*K@7 ,GQLџC{Qg~\FDbi&5#Au,2ttgZib\zI ݿI,dfbύmJ|8oekrʶi('/皎efPEQEO$nMi3"SJi|\tabDݰs;{k`` DZlnccfgȠk ?{l0.,#˦gSg)G3PIDATqDiƝ]}1,^X&\TodƽēU=zW>`#-N9e._n3b ANcSR9pBl\;%Y s`*F֬͸fҊ zwt]5(G$X?%q"aKI.~#"W@=̭ 0fMر4Sgw3Ż-I)h(qÏEH_/K&~8Aɩ&SEQEQ Y{1q#2cNq~+/s*$(51L0 ab%%tlI@f5h-u[iB,>!k8vZϧ?`mb:4"mZϫSPM:,ꤓZڃM\ڌ NU7n*Vs8s@.v&?;daM;EK2k4P= lc_d"1B &aV,X[p0ZYv'cBU:BMU潻ngØqn< ~nQ狭fҲljB1q5@ӎd1p2Np4ˇĎv)_aF]WǪ~[nDS_GDl((s]lYk$ӧw\CI➺,޵~ᥖt?y4(ֳ.7S 0Z߄-ok`ΔC/QjI+Nv=ww!f(o>?74Xj#~ )ظ'z Ћ=Kmw|d۶G_OJa'Ъ|Run _KhfOBlfˆ1-mhwU_0s66+wSFi?q '/ &/g}}ʢb6>gL!2T#n"ƃ%~6߻CQ}r~~s~!W&fF[{`G f[c7qt.e=΢CBִɤR?2~?@\C잟30I+&-?#Ҕ{>W==|svȓ&N`p](x`X)UEQEtt͟Bi~.eX'Ɇ$=x&.#Uo9U-=b8#!nan\̏g71xa&vYaDR:c&lI,!dC0Y Ű=0tIKc DP_RxNS!02tKb0aW'5Y'P՗ 3 ٶm[7d\~Cǧi׳wɹ%l۞^g|sbw5O+J[Ka;\s=2S* ,%FgҲ>$Gk-滈^8.޷wD'' lθV{w{Go]v?Wch]t3W3iX~ &`wpã/ѻ*?E.޻˲Ocl\Ɣ\7O:#v2oB?7rk7ҭWbeѲ-f4vbܨBLQ޽}nvT4Y|Λԍ"ML_kr+W4c4Q2L_Îz8y] o`=tylZ[>s`:E ۿ CӰX+ФfƙLֺŌߕ{|94;u#sD݅&%蔌q"AjRz6S;>4=ւa#kC3H-+0 b|6"fɻ=mAkscx+vt/%ݮm㶄q]Hh1ĥG(ޮ ֵّq(חɯ9Je̔4J{[E|zXYe ɢwl:gґU=ҕT^hkS К)y4:_ u5wEQEQ>Ť@c~&A'.xx4Jč%9d7(x%efRc]щs ̛ͶVGqDZ#1bޡ/1ϊ!uGs"?4-z k{g޼yO͡[E&Z&E58=f{ t\@s$ˌy̅`NGN_Lf0HUJX˯ANLԝlÂye9^uĶ+yU$v~]`J>W#[-dždKϓeg*Vlzɔ= ^"Ʒ8@S鞇ЯP2HЭĐ>Rj;駴 X[6xU8uy .&lgUL}vl9|K,B^|Uc:!bj\QEQs =#cbnZuO.Ցq}"Ofp|aY6Fi/n~ s3)M qHG>#Tm=tG5r2# A141^ӍDeOp3L7yk|$+(;V E`Z"0dΤD&0Yiy m5ر]{#,ѹ۱'w>@Sѹ|j\FLaZGkM8uJ-f\yo=6s]r=ۗ ΦW hZ"63j8 &ј߇Y蚁[Ģq<)|ulkLq1[b&tדxY\u6]Δ``-Mѳ*ݡSy0 ]nL+o9B@i֏dp)xh{H((|.:B 1@_߮[G|nZ>L+uGuu"hUi4QCQۙG&˲|XtJ4uѰ|(X~,]1[ǣ+|׌.`.=G*3{F2^6.}c~ G'"P9} M˥&2!-S..ҫCfgo / 1$^]@S(#ͨLG\{`ι~vVNK(I5!h'u`g@3xq^l-ߎI!?y6 VBFYj)?y'1»yG}DT8EQEQcPC̈́B!RE^#VGEA߻1gYVgPgOʖ/6Lɖ:*Tͭ4i^}H\KY=k+k7AZ2(:#+L,|vWZ*TV cZNvXKҴY"8E=QD*n{LQN}PihHOƨroe WZ՝ u{R< u9pzCV'~CTXOީCahrۥ(W1ڶwTH']cµjq/cBe>Hj6i7dނ\Us1`ܣ{JC 0 C:aI dMi=Bӈ|:0u=(.rAO=sNo}ز :쩠yk]5+E@@USO#`FL=$`a:xΩݖ^M4d G`v1F@v1f@4󜆸czu/KgX !ͩRXkY2?tzݪj媑10;}kP0&S'u='f87&qM܌tzv `(HAgF~[U6:)q#WRN5]w 5t:(~-}ZgNEژD7F~.߸K[V3#4l]]]`0Pe0%Zy>J^O%Ir;V8 joXoőoƖvk-1 HѫH8rpEq[iҒ|r8qd#IENDB`rally-0.9.1/doc/source/images/Hook-Aggregated-Report.png0000664000567000056710000017635713073417716024262 0ustar jenkinsjenkins00000000000000PNG  IHDRzTXtRaw profile type exifx] @ DUEJе: w#:,}H F24BS%HԻBx e8>1.WwvΧDTC|L,~stp iTXtXML:com.adobe.xmp /sBIT|d IDATx\u038TQ Q vLl$Tڪ)>1;/t-OfwS lW+ՔR4qX &ʯAfǀbbyݼ=yua^W[oxhժJRT*JRT^SWWRT*JRT*BJRT*JRy-e[nŃ57&&9[ +JRT*JRygjDXDDDDDDXDDDDDDD a+NV*JRT*J嵔5$,"""""" +JRT*JJ<Z|ٺ$$$8sv<VFOw`eA-Z1 G.o9\rpŦwnr-\ydފj9JRT*JrL:pOװzj֬Y<Jfs/p9ũ&<2i?C~󇧹;.eJRT*JRy=hI mw9f9mp+iӊ&O[OLhcV*JRT*f#߻~1')#q{ HSl{;?Xs07c| " 1xgF0]폿mc֭l/H3]wO#2mcMxʈwC-gg<aG/2s*6mПˋm۶uGwX{ۺm۶7xd@nz Ϟ_Ƕ&q㞿@{E-?yݺyޟsJRT*JpsnS.Op$w4nu~#m(B_ & ۆшd%Lվ)tg{;MX}혇s,_E,^?p/q]-;*Y1>>MSx'|OdpbfL~9<=k~ ԅ4ޟ>;*~ g+j:Û 'ƒ<ë ~5k Q?13RJCsy♙l|}J㩫/˽Չ["?}t?ϞױuLQNԐbC`ڎF(JRT*k> >"N)ze rTc!o`>Lj0΃<i{7վ\-c9{8v嫡ƻ,'5|)Kaq{ZcINރ|<Ȃ0y='&ϼLYGġX6'`}>6'`[~{IdQ>j*(-*?{:EMj<}yN{ ᤢ|tZ,ўRT*JRln.F:}1YpcM{l]jΚD6ic9;S3[߃.(9ٸXxj[gS":+1aPj)K0g'(:OW|u50~桇s֎@Mrr]0IJ͓NwBkH_>s{weK:kVC)Q vSF+b?3kW;6|Zqq""""""ײ +=5+) Qr3!j~-(dOamȉ⭉%_QXs':p4"Si,N<@ Uymdǖդkϓ/R_(}alE ׇ!]3qx ̝Idf+٧K(JRT*J;[呭ܕ޷ {7;Z>RT*JRT^sY-o]5doV/RT*JRT*[6[ˣqRT*JRT^3'\|۵kf S*JRT*J嵒zafb"""""""*EDDDDDD\>:QZT*JRT*RڵEDDDDD䚡aRT*JRT^SPWWRT*JRT*Bh, DDDDDDD a"""""""*EDDDDDDTQ!,""""""BXDDDDDDD a"""""""*EDDDDDD aB\WWGii):i|||',, gx<޹UÁ YUWW9___Vg\ p]]`f:u@DDDDeP\)\|!^LD:gy6ݎ!aNQryKxvv"'䐳v2|19<vn!-m-m%T#& cd\,{2ʛg6хLhq:Y݀q$`Džo]/{3,>kf)e ̘|D.*EG2!E0e,YjLerd#aa p=7s.YOvA9n1ac'1?8 ٔFz {5` ~X&>9jٲ핱yG?{ql,_vcr3 M8䗃!ıS0Ђm^:~K irۏZ=as [R,K&1inX'eqGc1]fgچhrb<# 2΢4)UH\BQw-[3QHA D|iu]rІo`& vle V6o)Y%ޏnn;WSgZ 7`0n7Lγ8fjg@M9Q ;[M58賾q7JX] H[MDD˿FEr݀~4mj;¢l':[Ɩi7G'{;Y ,/!}"01vDRcCUFvڋ,ʰc߲Q9tۖi:8C.9\2:T_(p;0 #!Q\_*|`9-^1i9Tık*elZ ()ǻ퍚{,OL䧘V (޴ysg}Ӝ11H_2 ԤY""" ~sk>f8$9{JE&Q=hW[D#bŗ N+qOX렁ѝx+{ӎ26s 8FNV榐R_zpϸ]UQy[ G1~ U{\B(޽hxgiho,!{ wqpFK]1W};ILH2n$z('ndo =QIM-^Xb*J2ri[:yn b"Z*|铉s:5[$0|xгĩE+1 .V4w[ S_3W1[2쀁Af\{ 2GѸ`>{{F6W5=WZ֖D0lDfV*܆"~{-,qS V$5ZF#363hp7dD{b4UȏZO}1 a[~5ऺQm0PƦ"v9dx|Nl5gm}Zק0&;}RRm Bp8(*c[9DԅUU_W|ocq7|=FѾ[V_'Y8{'r)_Bg{(a+ALsRAl+!Zk+Q ;-DߚyFnt"B%ٻ)JɎ~BoDy ~:Zqr%{!~Oo!qګ?pe`s4.6y6'ڽNۯOq2GjAnv!靇/bG+Oz%\4^|ὓ3=sx{zv5z< 6M^ʈ..FSf\o M!i-Xzӳ[m3C{رs/{vY R-&=̺w>gq7a&8۞~m<xM7j[w$v";#|3bh!};Fe|Q7EmO.5DqB紇톺){ ?F{!ĥZmM{.pY.mxcUnXZ.("jC"8\a-MJl4rѿkJL6>AB?0#rz oՔ n:Xߐ$ #=n@ܰ 87VC TfQ`TN F %zN-tƓDbB9ρ66X]LћKDD#WWT V ax;ʝTw˘;o$p8O<2G FHTrꗔn~·9[7A{uNo5j /ٖ_ƕijrN{>.bW9D'vvB3%0#=1T٩MɥS=*~`8Ϩk/$.{ry[e)g>pwLbM<<0Ǝ綳,G&gͽSzq﨡0oEmS8}C4e,6̮GR5Q4> xS5SX~?S0dQAajG28o.#a(q-v| tNjWǝsvgX"W|R,ٹTq 30`1CA묣2w$!l% }浰 _+5V]l{b tWi/xjM xr9(-.N$ݣ?ItOJnJ [M9y`=Cd.""rQ\m\_JZ) IDATэIꏹ{=Yd]V_knM%!(&cS:xˀ33u~{yŷ(J8{ +7H@FdتR#/=*bŖ+.;~sik ,$&G^F{C}.d}~;J&[Tw/˶̬^d\vkw<),\2DFrDDDT\n5eFJ߹^{gӼ3%&1!_#ߛs,vQs-^˸rG0Ѥ&dnO,Ч f~|v mܨXwb}wvEd6IBe'M27˫J,$.+@ʁć]P tUQ7{>x*^[Mnɲw" Ji-|;ZV#9(wKEotp ,nؤwBtLIYNtn'mK|}}jNa嫢"uhH#u69]qcyljƆ%ߚd\4L p{Gd!J6= $8Ӧ7v'm LtXC =3o<>Kqc5w76))a`:~ K]0ql ]hŐCuA6vblJ`~z&E bixDLBD&?:!Lɳmdf-]ߧ8 Dd 禈H#: `s?=s93iLb1q-t74q~&r"j?#c F`cDfKӏOMd@\w-;8aNO%uP\S+q^L`䖶`kOwk6[o !?N{17g[ń0uh"-vn̖$SH ;1ѱFl^܅9J⨩y&wn!x<9??m۪Dѣƪ#~ |xj CgetbmK 0Mep D~v9s:y.%iDXDD12? P|JZ[I'[2:@˖K,7'3*AE aʊ).-8oK΢hJB[DZ `ǰ؋8d>,@q㒉4:>ф8 ]+v7<}c [wcCe,-- @:ErbNp6B-wM->] .SIYWDDDDDDT__Z7~Ʒq7É jkqߧGksFME!Wz6m|ٱĆR*$'Å9B}1%' }b\`K}i?iØ8GST \#~dcŪLl~> ~XWƦUXNl Aw Pec4tP!ܓNuq@""""""גV;ӶŞMpa?< 36S>S! ~s#y(O5lC<3Vxo}~,ͯ"""BXD_H偿m&L-癿ዪKԽ]H0m]&vTsȥˊ\\|QJ837#t5|CY2-Ա/k’>"K^1-dʱ7XO.5br+/w-c"gm8q!oo$0Θ0N7|_@F#5."{03@<Xs !M7זGn `7*yY+Nn;ȁ{'> QnüitrbM)rcvoT a9?.h2ԟbᮐ|P&! wIaXz` mC|]\[ʾ>ɩHɧ5loZ:]353(p4d04^;ޚߟ_?(bnI3zMOH21KU)XIDzŹco݅1YRbLpv8HN̤;~"[v?&Y  >0:еu<➫␃5Tb mK=f#;8+/GhGz?67Sx$Cz{ FN)+Ҏ0zTdwuйCe udc;n J+LpM}8&s)'sbk/ʊy/1ErKt; &;hݖWpa_tw<AƆ|ӍU H| 2;^[k2c7o aroqAC,,ef5[}nx,BwADG6bwv  f=ө?Gvа`HdGJWb*buUcǹ0ۑƒڱuYcӦs^.};QJB}=wv6s;shy˜6=C9׉ =_Z ጻ C=A_GWҺ2*pA_9(--pPWW9g2E_ r'n0#77:Q yK7&C0ъNq]x Xc:Ňe1O;={Q/C}0JWh˅)3Uh >!SpMfLbr}ymR1~,\³LY68e1tcm/wuІߏ '? ՟rB'?oچG{~w/yݼ<1[`$:<Tbbw</xSuN|'7 {33Z3ix<ͽsII 5559Mmm-???"""!?|bcc~㸝ީsjN ]АUuu`sߟjueV,rbX?=lQ?UKe a"""""""*EDDDDDDT\Z Uy_dW͡1Jb-h|Ozd+V^$ cwS5IƳ=҇{r5o߉6zDDDDDDr-i>_ԎDŽ}x$g}>] 7j۶/v"b+YH}d-l22mV*QWԮ c怶Qח썅|܏)̸9˛q>̟]ɼu0gvY[kEDDDDDJq zko^Z6LX쮨=ǨٟWEc 1} 9^W3gddO#=VCUv$NEUpIZm>< >;tn6i`B qKϛu0FFjK\~~:k׮ 662+zkv\h0* nm}QQ(?LgYJJǿ:r^"W]AAA#DDDD.¥|R&.e=|j wamڟBr2 "'}5Upz $clhp6]jz3eLw.77cp?tKKԚǒDDDDDDjx{|ڶm{wG-fl^VAq#x.`Gա""""")1WΫЮϙSEF#"FEDDDD._>Q!,""""""BXDDDDDDD a""""""" Żc"ʹ-k8t]}g2pɷ4/n7TVVr]h4 $((pլY/ UVx<RyoXRRl&""t:N*++)))!""E㕲^""""WthST*ϓ-ì}&Z׀. w***0̈́^E0d"44˯u\n~VR<{*G5 |=\+f$t&Y^i\p]jU?T*K}\F~9Ry"""""Wl!\WWR}TPӶo6m0A5G^>9~z{Oo}nTG@ؽa'syuuep]jϛ*%WpVS_<#_8U_繜1<;0WnDž*q1MKUeÅ0d~e] =!Isfqc,Z̛0R='1cBMwS^]c&púWY Ǩj33h""""*^ ?_^ʫ$/ @ńHz=O.z@whF-sQ;iyhA-LٟF[bη5^@7uz^Lw#ȓ3fr]FuqŐ (]G4ñ/S([38mra9dY)~2-O]Vݸ#0Kqc>)Nۤ6eǴ3p`Jr&ĴgS`:G3ܡ"""B_=|j+&bvuy|tOnkJ"ݱm8RZ r*MXBLq׼қ?q8:Q&[wnt[xF&'[qC>L5<֕ʎđ8Wɗ}8KfAozu8cSuWׂ)2CsrNZc4p:ֆ޽R&Ё*rgxF0qsSDDDfԟDn^}'x#NCөy+8q7pۃ8[Du" |{-/Y)ܨ@HY[\wSGzj nu<Ȧv}(/]/o0~sߩԧg$2% (@K26| h^a|Z%EQ*"$bk"EbdZ@jLdR(XLHL&r~L%yڋdfs9|f([,co'[I/ oϱ8|zTg_ϟ~Ǿ~=Qv>n3dLzǟR^B6~ :#/_skj-}{$vC%א{8xKy{\moee '\w<_{UV\K )w>@_,"""g="|03ǍbLG+M!ô+Kp`qQ Bπޏ}}NRq ىQ߿!=Ʊ'450>gowd:q"l6d޸rFl;|Gt7ĝ9W򭮏2;)_0hyS,9NE{W/f0xg.u?#7" 5N^ #q#'78wIB?νFkn/=?s޽d$1X=IOg G ̀uʂK-Г IDAT.ocHp%g%}5l~#Y8/V#s5a΀""" |Ȯ6|%4M5\}wǚ0$︑S}x/mCJy):xquNe_a5p˯8a^/aqM.N,p'Cw<# 'ڟwrS/3\?yXF2;\/^{? `{a1'/<~\)""" }B,Tn&*0o#`7 F}Fm ~tӐ3QgU>iAJ$Ɏ.rtwgc00aaUOޏLs-X8CC|aC9?NPJ]xLZqUZ.MDDDdH&4h⧯eXr%{;9Qy=!v>z9c⧯rQDdȫiX}w^&M4l;SYḍ} GNϺ<=-2N݃JE8GF<,SlHzzzviOO6v 7#MGeU];:>Gsaٙɟ1,;?#G^px%"""rÇaTcOì}8?*oԨQtuuJOѸV:;;=z]""""MęOJC|2""̔;vvqGrE12:_6;8t---/dȑ$$$qlnpsړeqCR}t's㌞>zJ*.EDDDDtZE0=X*/ Dzi LѯPRRex0poEDDDDDKp{W/o+oBj=DAW3*Z߿*""""2샰p7BM """""" """"""" """"""" """"""" """"""" """"""" """"""" """"""" """"""" """"""" """"""" """"""" """""" ,"J=E&ӑ9xCdʤ"tUݤS`7ai`:ɫk_(mtM&/);)ʱ}BU%Js}1(qAe1%J"rV,lV KqXh+U)-z`jjvK +eqVJz-yTWP0%Ex*r&]U.!9wIsS׸"[9?)H5XͳEډ"" "wVBb2h63i9ili QĹ""-Dn9d㷀ߗN|_v9 T<>pGx.7%.5&gg9sd㴂--~<۠-ozđaw=UM[+VlV3QA-v bfχ?衢΁#Oi>_!99[Az%"51 ٌOE y|CTy,oRRRϸ4;ԗS-XCuR^7^?ފr걓fYgBw~/%^H~KW߀ocO+odRTL,o3+ȧnq gȉŖCIIDp:BT&ߦ24ܬ8.'gҜWಃ-;gut4Q\(NO.|/n` 0 "G:(ʵR R"GDU6,H?|nqT]NtD|e*O#""""""aQQQQ9oE D{DDDDDM4߫O&zzz"""""5ZDDDDDD>WEDDDDDDAXDDDDDDDAXDDDDDDDAXDDDDDDDAXDDDDDDDAXDDDDDDDAXDXpfڻaEÎWM+Pȧ*FCk%?wVM&kY,ߜDCr~o1D+a9ؼb9&4/c-=tp߽=?^MOt~Kyh Xw(9+װt=Oӎ1~klwZsynn}+/v.'qT2)Q`O!~!Qqt7է(ȰҾj ƧshK=g%R1H囹CORq=#=U%^v+_"^e9%QL!f7QDA{TjrְqJv?&2}M~r6_;{&ۈ""rӭ"r2n[E,=y`|WMeLT(b!.?rKr^XN,xC)}n8FEC-/"g,9ӊno]xcF䛓ǐ4-B9<4uY~B^󐵖;Tް[{舺JbTO jlfi,aAs#om;)fHgxs\h^ֻʈ]!8\brΝQjCiDXd8a ٌk,\QaaaEFFEDDDDDDAXDDDDDDDAXDDDDDDDAXDDDDDDDAXDDDDDDDAXDDDDDDDAXDDDDDDDAXDDDDDDDAXDDDDDDDAXDDDDDDDAXDDDDDDDAXDDDDDDDAXDDDDDDEDDDDDDEDDDDDDEDDDDDDEDDDDDDEDDDDDDEDDDDDDEDDDDDDEDDDDDDE$msef!;5p'zhtMmsΰƞ}{ޚZMkva"""" "nc;v2kp״ag|I&&\udj oOǒH@d:zY~72>OxpS3{O΁B~dO/p#kWE9*=o/cAzOnc~MᣯC{{,W\{'IGEOg^J:) xy$9A~mOҔvz>r>y&»#l,n_ڷy[ lo=w,c?5>",юrYQ݊ۀVnͱsUzC 3̭TatM>An?Q gYYg݂rw7xWy.؋GAw$$qSDDD] [>^_#yv:7boXoګukXd37XIeGHnG Wl%q-}CUU1شgixMCT:s{F϶GYZ5'߱4~=?_**+u=$wGȊЏn#gz6q !ץw}}z}oજT~w't_'Ͽ+nW zy|Dg W}[&35ja$~k~W;2gу/UN6[׍|?aaiX:=^ICnv7tx"j(cTҏyqݼ[9x>D>?p~#2s+}z`܍w$c>wn<~̟Vpp_qC)q_K@_7E"zv9[/k䱝cynZ̠kGq =NEDDD>+8,]>'UUQUUŶ?=uQ5 F偾j De^Q?ikܯt1s=QvF2** _Gm;g^&5QF:yv<,]z=1_= S<8F֌˸$6j'n=?Φ yPL5c~ḍy>e<>6fZDDDD2aD.:ܾt~|3xŵGw~k]=O'=Uvv9hDX4jxڟuϯ0v~f>JTDocBOƷoCDDD&aaaaaaaaaaaaQQQQQQQQQkÛkʢEXh޻'sw7tomg^?-[EyPY7lymgn]ĢlϐKbËXaw-v7燏E>L'̿V^Xo۵|-ZDS_[fa9O"OKLXsu}ggX:c6VP.t"'MёB@dؤ7μ;f'wwmXeT]LANO߽N 1F/luo@D,e 1cX.:S'D8&e/fD˖fdLqq1D} ƪDDDDNFEHCCRGʬyjK#`޻.,2J~|!ɑ@A[ʨmD<Y̚Er* Pj`bV$Pt۟ɆJ Hrf0kNbN>ſ}+jNHayv }!;OS _ss;!elڲz&>9sȘsR}m{*ظ-]ᶜ:9s2>fhcϖ m \<@eA T>m0z2+kw73jװawf3O MTSC˜$k-˹C3^@|MnoPDH1 {6cCu--]Lb~ m{ز-F}@h&:nNSNX)n3\e,HDDpQaaaAd8hhl;Db.8ak+_sq pc@="HqYӜ$MY/i?'񍯧 VFil毑L+Ď0s5qLM3d[$WɎ`,)YW1)z<ⷼaҕNR2kwᄏ?y{qeEz}ލⷻi+p2>{iLsj˽h1m[NIv˾"VMKw<.K?ijlm[:,+jhp5y@l2Ӯ q{i͟p@77h%(;DRu&A`t)/9:cRC"&kZesi;~}8kҹ}l,*tW bIu^Ƅ^4?Uv%S/ ƭQOlD;IfݷvP** L5LOmΫ v? L7lnaZ̟[zIϞzƧ25eww#抩L_vnǖ%8Mꂥ䥍Q?$""rЈȰGӰr55լ{uD?1'S=#9#ݛ 11+,[6[FYCIx c+l&6HTw3=>qL-'w?+HicBm5[]kҩl+YBGfI*{';x ݇wf޿Z6T?x˱ew{Ke]mkXSED<~vG@_=2(Ibń=li!.)Hl:e?ccjj*CtXvԣw_E+'Ϯ,Arg#Th#mT[{W73gSndmd,^ IDATltO| ^XGC_{V'}-@LY'oK8~b^?$&ŝm$"AOoƱ40^٘jgU8W}^Vd{IInp0̶qÓ4vpEDDE_^"\`4ۤd͝;26Է pǴ5* lѴ41OڢhF³:][P'pF,2 EtwtvPO1> nt̠D3V !)lxMR 2Ѝ""" "/{.ee!ArLjOSxo۶'647:g`KC/{g1ҙ{G:sĿ55/hΞpa.p"i$ys~ xIu3&1aHqAv`ab",{dqj"m 6:GFGt`C33`N<5Eqk;GE\PȰI|RFІ-^zkk~b?&@qb;d?ن{*o{{ϖ9aܒΔI  1\h pb1$ >+rxОGݻ/Wm9^x&ƇwoAm^ jh 0ȱP{gؘ3 Dek& ^M|Rfqe4аoaQmWWs~kWGf<1xQhN8*\š vWSS '$@N ">çR-} @cY |m\i<]m:^66TW#]Ot}[6LJbt~,Y-lZ?Y뮤!&:$9 @#n?C}[X!`"Ytt@`kܶ-@ mg5c4ƽ@EDDD_5ZdIuLOTҸcO@Dhb" pd+򣣷qFg0{'0$e%vazӝآ4ֲKš#$G68TfR,&r̹K(,qCo^D̞ΜT`b{25Z}Ѥ.Crd ܡrh4([rD2\ JABe-oɜg1=+kط}d"#},v.! =9Fڲgh6Tv4jgf{O>d j`̚3\Pm:j5%{k+ؽ  )L]Y6^)jz' \\:g4PVW \y?YW;!~^&qtB30'}¹E9r?ǶijjQD4Y#cy< y$ RMM#+gqݬ, l֘)q]]K exw)pf2 P3煞aaaaaaaaaaaaQQQQQOL o^R*12 nd2ĝd(z&ڦsV;5+r,;Bx\6LgJ2.ζ18r(_]ԅjdq?9Ln|?s9rW3w~7i&+.ϩyRζC<>)wg2pk ~Ps| 46Gi^&v ɂ=3sz )u cai H0t}k$$.1 0Fkط@$07fk3`QgtuuF])nijcZ\Z]/6_hmɺ^i@Zat5ug>]F⢚spv[$w\`z#l6f˨ytӌՍ]F͢%֚U4٘Yjt6ӈiF\FںX:ck~јFZk$9ֳߞ枋}d][o05FMMKh:ǜkק~Yh`fܹ~Q* sƋƹ:TjqcO#"\?ͪi-Wq9E>ٶZL&hZein|Uy\vYx-)&ad2aupׇ~ni`:ɫ82SJ}R/f$m#wed8MVҲӰ[LXO=LdbeR ~*3LN\;3ɱy())ݦ+ hleP_BÂb'xIoA&p: !<.+&{6k9XEqy\'9LLV\GxӞ]:TOɑY>37`i Ѕ#|3ʟ)oi!.-lrx+|!"gov96NwEv,&G%' KpҬ ֿG;~_9P>o0Qn/E9 Z,X~0({=o4&gz7Fl~=boh؟cQfš[AgcdR_y1iui=N3>k|\<6WBQn&izv-|&[< gn w ma{gpW_BDrs T^%TE~/ny+6\w&BvU 0 r“榮qEr~S| .*>;wl=!X98UVRt6Bn qW8X -)k*IPU> ;J󲥐]~A8#dwQJ*.)>v+\JO*?B*m6WPl=U[ٵl+b*n["/{JrY\n4*|՞Ńwu [v*׃b,`ϣ܅#*U2-~Ѓ60gSLyZ)r%TVחmJ"YO&ZXyY4+Z\#763Μ4l6 @q`i#ӕnd*|liXY'[k9isiwmpڱXBحP צP7Vk808X-ܰ*-QOIn&XmV ݾ!`؝+hn_w72QlV\|R ?,~v]86N8B~XYkEKJ+˻!n"ܙx|/ ;B8;|pbw)wY BCr|<[%%qblVB@P,VJoa>W |rrs(?H6AS9ypD\.g%fRp{ BV Pس)%D(-gĪ}t[,XQJM@(]p՚3sqX8sL`}798v s\cɦ&䱒[料LZN6Kqڶ`ǵņ-<^?!a_QB:;G4M=jDXdX`ť-$?:v8\\J́#Oi>_yN9n4 N mxx!9Uq{b'33=m$ff1XZxZWq)wRU|r]\1X{Ÿ3C٬3n Ѧf[vga\]?݌ o=A[E>K-y}T;i3ՆspIᴛi믧4ׁp=|55\oSlwfkpW ?w ڶ@SI><ŸrWROpk-Xh i9~~iDn0$lGq.;i >2[[ u"# FɌZ>jܐ$.ub]щB]Fu`ڹh\Ohh]su&OhRg<ܿ/G&eYjkתB\h}aF٢iFjV x4lLvI|Rvu/5R0l$}Ѩ2fdcTLZWQ u179ΈKL5efR9- 0ֲ L㵁stm5%O4135jZUb1:o:cDlN01/n,Iƈhn~͘wCq3=t57U9擯aGd0Rq#ٌAbqôImqUgpL>ekflAal$NkLos08Ð0 Cj""JHK+'ܳ&&[EMM{CTY;r# ZNY%7g6g92SmUj=/̣(Ji*|s}uifwZ|}"""""͌F]VFCEAXDDDDDDEDDDDDDEDDDDDDEDDDDDDEDDDDDDEDDDDDDEQO̱I:y5lsHϫ|8:t-JLROV/ۢRrjp|Bvz*JZL+3IMM'=56JGˏF6I'}f:ټnCjL3;j&nj>&==Ti{TSIo(|njcܸqnf\Lt^ݾTc7nƥe5?61Ç0QMS Dl㹸͆3Y-3ů!!ۑҥRTifڅ,8:]5w-CF'>OR!͡Ӏc+:z\~67JaNz:Yg5Ax s& q٠O٨yI™x7؀..vg ۲IMdCSe 5 N\dtSSR̼GnNl\Kб\~ɩp \*ȃ؆-h,FC6GYC{/ZyҼ)迄M99lq9c'.1(Ѩ|WH:h h EN7v.z<*}>r;k2}#fOw% r1iOqYɩ,Yt㿒ڠߑ7Ԭ"GG:y3v'gI<%+ߦH#ޟt] OHm./VO9l^ϩWOmJ23u)FxY<273q$vPVhB //YOΦX v[+'۱TV߯oe#% OPS9sh*qi;)οv_? o1h"~k"5=?AqX}kKHx巄=Ku /͡VxaGˏĈ1(P1/g\cIYî 0mh25 #TɩU'8Ivw$g7,mĉ+4ѡ5؝ڡ~N;WC)$EéZruX_b0)oÉ7Qy՚c IDATʢ ,q}ö+K6o&''͛^¶7NCqw"3wJXf k0:!A<]^K2C;.rRYTIԂ6,a, +9JvgSl$kT](nSvxC(:KGc-3|n#_<"飮S| O<\ll]"Hh xNd3.})_g(C`GSB̕%?wwNM;O{$W¬u:&97q޴2q?oQbu1\rs'͹M:N4sYU9w\L[>xϷVj3; 팤>㭝}X~./iNS mQ yo?%(~/W~vrmbgY%?uQ[s}Un.[l=l:%7}^eUۃ KM{aU$TUS D ьI[!xN?&oDz`eٖ(|Íy蓶?ŦO˖wq}ϕ{ps|&<&eêϢH&qjsOrM3GYۨ60bąn0kS〨1t8>nk6LTէ= 7q6\.#&6=É٤3'?IiE3"}uixƏOdTIM84|lY%p-L}}u[r =*`gp"M;7e`1k,cohzV?Xmﹶs{xi Z>hoq9Tp}r x[۟:lcL=͢ɓi{ M}eLQp3np.i7MWޚ}\8t |VXӓ0oMc)NjTMCZϧS}r?ZDgxX((]ƺ,ٻ玤gqo"7h*i*MFezf4៵:jU,Lc˞-M9pAۉȩ,_{ϧ1{i9Hջ%ˆSyc}5_,Υs!wL1+Ӿc0&f0deN -@T0{pE'jCWE5qX |j~= *!;u^Ý!6gıa*+ XY(,]!Efvګi\+wX>afFM3X>hg_!/n*^beƶp1̫uqYyT \% T</gra·'o-αm'VNe-5m~[S VuvoX3 f,J%."nIIn۩lpHmrr͸JByv_xUm zM!a7VoeA ,@nNf"%'m 7m y#NAIX>x 'xuIdo#$e￝N\g([!W~֬mx̌^kvvssbo. glqT A][j XI8/DY?qqn0cTymVl+\|ѩ_|5L~9?am'8U_ܿMasN#EAT8OP1ofx0~Z!wW_2lj7ظk\~ɩ7x2NB(kEEG jdm+g{YxJ^/`[-~ 999l^EyTpefRNE&J o \xg)*ǿz< ` T($ gܚL_Ivj*^G?'3~}^SH6tmU2v/'wDukkRR4\YYkX-d"5JNOgyj5hD1=A[ MH%ٴkB.q( "#/RzPTafeLYIyh "44P{ Y= ++߻Ε,/llA7 [S1$/R,FԜ+DJc CHpT0?&%URI,MMX5yL|D6D婋*Ŀ.Jp5擕^f|/^ϚӱA=~p޼as?q$*!{JJ\7㡲9lYXi?ݚ0QwV4r_?@7M S[ .n-`M,>sK!4%7]Zт9Ihwؕؠl2ҷaxC% _{Y3:3_0F%vv;[g 5DbEB _J'+,z{A"""""w*N((((( ;""""""Pь((((((((((((ȏO%\*㋽+l}oE܃FATWC\H!Nbt_[yٟs9:3"1e|bs9葼%#cFM`eBV}̼lrVEH@B:e`3{NY,E\]{,]277.aXןelxe5,X60"""#Ҍj|j윭7`ы^=(g7rWPdpƇx vq7`;;oHIS@%+q"""r/ь}_n z`HO^|FVW`۸oќ<.;/]Z]~%>['8vW^_F 0'"""/.imm"FMgJ_ 8Ẍ1ȱ3)?>iT-DDDޤa[@kd*rr_]Aa#oL@2N'x/p`>mv*N3DJ^jaBg}}2xdWo-}&X0u^.6̸!m˅c{ؾ~f,CH",c0b )إZKy5>pfzZs?:['9{]ovmC_^X'C˱/A^|-݂A_p6_gCr0"[?.,Q-1LNfXX'xu#_dv o\*B;5Y17\{mL,lx#FJ>?³MNL䈣u%ƴ&2G̚m4/_;|!w51i (?zz Ӧ0"L^jsaadXZ?|rV8 F5ڙ"""rg~hѢE*WJUs? =U8ԵMA8%3bP t_}:KC?n7aTT) AosM=SQA v* f yy'="(LWCU,(5Af@ p3!Zjz-o|מk RQ9 :lj^N~S_U9nmSuK4OGux3{''λ1[= /wlHcCxdTJg`wC!v|JwDZ/.o>˙|'\;P9[J96cC<C;w;g k!M>Óqg%Q rc4@:w++8ԙ#7O>+roNP؍aA F&D=Òup8I'з^C__} T(=-PGxq7>[?8xh 2#ڿC  󲏗VIλ`g.W8Kwrd! ďx.7)2N 6"""wK3". yq:)e-`kC5V^]6o ];b#B|MY&xwVlqvBڃ88`-ط|`a%F:NfZSka{Mm[Ů8.fb.c2_2y̛BE{q @x@g=hv8nt#DOgٌ[?f30%jm/Sq6mɱE/#cN:)-.{y?/uY;sD[ro G/ cTWಝ=g! gker`uK i9MTOd%DAgaďqYȮ1 Ջ?Q# zx҆d,Nz9nQ,];P̱7xw dİkX|:WgҬ}z.ǰUyk݅_chF6*F:fւ4rgVXFzT \>;^֫6|{M="c6"""rt}cH[ԑ k&F7.{3Em\ y0ɤMgb ! .S{wq6[R!LO7Rm "-%k7HL@Nڪ;?w2!-5 aJB(@kf3,~XGX[]wϑzLRdHdR/$O~1mRbфMoy׹(h@=UN|jJ2imޓ@b|¨?:*]nw4#ZȉIX#[#B,f۽nsrl;]=z+?xFNI#IN>szYұVlhgVD"EDD~0i!2X؏ѣT;ʖZg1F1dDRU9Ul9{#p5P[:ca @ذL |JsXGG@KcRp6lj jtkLem IDATmn 8xˈưf>?r#0s\]Y=?KN;130{Ugvvئw^gnx`@ҵ`!jHQ UTY 쎫&` ;5GlK/N#s uǹr.CDAKadsgC>{A~GZ/lUЋ}9U^DDDE\ބwpCͲe t2oQlb≺[kE 5|7n<뻆1|tc4љә8$AΦHH_3PO {)U^6[\җq G7k 㘣pmoc x1`Tmθj}%;M)g 'bQ.tPqs^>R~㵽*1୲;#;i# YN}|[9I5_/β{_GU:Zuc.UpF=yJpzm|o15!te #dҢ9{j("" ,0bMb@̊]h=\5me?_il1eѮ|m,~,!CTsrК2 $F@=Gser\>}ΑK1h,=->o=z~g&K(~jo% #M[~R`'-8hoj}[:x8/{oηRc~v\i{[ʯy.:2h?/=ndvc۷:'잜51b i¾M].g4GSv!f,~p@.kM!tj"0#q:@>t 4yqL.PY2{"oCH@# ;G+ƳOGA#d(dݫXFb:l :=HC!/0j Sv>{x\D} aTK&>eNNcQCXzʏ9gks8v:nxꇽI*̷8$cupXbT~ҳ`eg3s!Y3ԟqYC̜䤩'ữ-|}cx{9s7eCgB:v6&$ELF1%}r`ūأc⥶sM)^L^=ċvVl{ kL$fcv6BL qRd~atՖQDDhFX>bGֲLR9zN'zX4}1[\S}g+ ]L'3yp*)1UG~i l\_| { Q;ސh23#Vvad9^#h!RE,;]bỲjcI&Ř_(Y m@"G $?e),XJeQlBJk Ĭųr"HΜS3ފ|~6%/x\v.ݲu@oG17#p89GjI]d"pLIgq; ,-f;Jyc#_ $rb [a<.k4FN`חQKWDDDDDDaaQQQQQQQQQQQQaaaaIx"+[;>]ŊOɻt->O,Daw)E宻*❬E8 UYW{[/ds:t;˙-z2XRg/^c}\?Ь??[XD;ŷマ=lZ_J r3t{ G47c޾D8sw19h( G3h(?I\;e3}`#gqzώlQO'Rx;;G144N?ka~LDZG~8iQkd;CXZk&6Hqg(.{| >C " ,"pk8IMjNx|T c;>]o PWJ=W5Є3)k;|޲?&RޜJ3_`GI_By|&<ٓ>{ǧW5@Pc5 _&t^;s?-C;"MwV;!/j` &Mɫ薯/_{P7_9h6`=8Y@L0W|Q]AV$Vx+b\/ i3AݬS1UW (l [&̹O^wg̘?-/gKz4&WwX0gdLOWAd4r| F3Im/9>YGSn<0 O6vLQWsm ",6c 64b #H|k4jS)'m75@ͻcTCwtGzpyL3ȘJ]nvRU}|GP ޯNrɏALx>e8_`"b0a="sf6c<ӻǻU\|Y_j~9TL6 Irfn*S(~ʙbuMЩw,Iƴ?sþ{Q&LwU)[wEy]oaX|u.K~A)U MmSdXi*ԦOLefSu"_SٟXՏA:JO  ҽohoktfaxR}IDD~FXP|:k3f`owok\oAa8S33?cWljKo~ ɜɌ&{u}rvoځ3ؓ3f|#j| 3HaPW|S]SÓS`Vޭmd'87O{)~d~AR@7'gfh0͗1uذ{pQ8ꠊWp ɘ= lݴj't:kEeAуm/,CUek+>L]Av { NxIyҀ}J[\Uy0~ØԄ`nbk6"uEi~NdFf&ɾi$k${il,g4=:'05s,bЌ7o)nD;,e;COFf&SGѭ+70MTI?'Wls/M}A]QL -k?~T'6}CN|׵6R,B佻Aƛy>E=\$R?F&$۴|z2LԱӟ(ko1>)PawߓMg[f ʓL}# .4Xc-}U˻8;I f)vܗDDDAXDn2IíٛQ RGy]!T#&LL[~LOqqN 6IL:A4: (AҤgٝI<76Pχ3X{=z0jQcwD"#FC<K`{Fjogp߱WH42~T),9{hRF\WW5^ :Oqi tΖ&"jKyj[GAtKcccm;"hsLz2{b¤p84&ѻ{w"$ 89pu(//_PeJ`R2vx IP$Mx+[Ǚ4 05 ײw#!I"#"nɯ7OĄ'#LA$X;TWǕU KI"6'=#2*WjiK͡ÜEʄ8"zvwt"MS io fEXn/5E6 &"b#t˒Y7 auŭW$Wxt$G$8'I 5UEQuǡ+LDgxnTAK=>IqtN3ϑ4ԂnQ(&4."5BI_Mcy2c IlT>@cf@b v}DM\a؝Q֭6 >˓h ߰AX":|ud%)}A#'kyv,bGۗ|wNT:4ō)AJ?_CuMASӍ[[Fh zn->"("N~֢( tWw(8|$ ~X&uY< tTbJBIMKwP""t[?3|J? ˩޺?)P3 PYoXdMbe[~?}:@׮"ZlB0_Rrzǽ)^=po@uKqee*خC]:ַETܼ۟/UR+~|HGdVIf'LUufޮ5*UymvA~obUU38.i\5hWinmUոU[6o֓;Q[ǧy=ୌqC[v>ף|{mY=Igi; xiovTPG۱KL*gֹsMxOn| OjܦcGt1;Gok%=4(GcݫFY}smMu'ͯN 7dQR6xON;6陝;T{rɎkY[PϿ ꤽBoܘQuwu~8_`]?S/Uux'G\b?j?7^^6TY;j߱qm]z/qro0yUU.Ց! xlc6c}Րڭh`hHq77L=ڻ;}GoTUW;t@Q}Wuw?_YkH:3a٫7yٷJ#:;,ם7h߀c4ڧZ׆mG4XCb;|mQݱMF!ڠ'7pAbכ^ۭ7?W3u-4Ci3U{lPGNVi^'Z|+ۯcڽK=TW]mam~9mu=V*U=r\~XR)Rn^vjd䟵Hzud[lPF ھR<a i︺QuUuf{50ԧсZ=ylw>{]cC:>ź%Ur٣XsK}ӆz`!qR| W{~rHV*jS|N/9S_٭^vߵSc鍟D[XCUW^De_?@yA{[vjַKZ~}xO=cڶNجX$H;ms^ޫ~r(3>En7o6jB1J]}BkG.r;d۪^wH滨tWj7REʍ Vݦ?G.NnMՒo/ޱڙ?G]={c~%Ck^ynrix_&m=~Gokԓ[[z+ۭScskǛ{BkzN魽=83_ϝg%i}k#x^5eKpƸ^={&vJl/ЗSm+tdO+U)Ԛj5|EWѾ=k\k/͠ki }٫8.P^޹-;y2W{H;^zQith&?Zm_׫?9]97W:rzǷ뉦Z󽤝nk*[W%uswRd24y1ܣ}]{X>:7X>zy/?xSϿoX':;oofp=8}ZsC߸:o++F/ IDAT̟^|/ȿAVtI~O\H}ձ]”yL)9ڣcQ}XM4.q^: 'PעB.9q+zl!ٻ[]Z=O٧qLk˛y'@GzmrVlծWKb[7t6_wsOZ+*%z+N~gRmnl-6j +tiWV,3s.^woORFz菥 zk%;Үx:߆Ym-|Fg?{3^**Y-(qrH?>p?`(#n/s]'2zDޘVk4D"dMj )fvBmYs_ y LSw!gkLDBD\aoT-l (p^qeBPYW7Z_Wjv%bk62lssR!~wdO&j|1}5*K,˩HXλh[8W$sX6uzr`*}Jt4%lKYpPfBKYJYNy]R:RR#Jnw<(GMͳ #ɡQ\6X9dv;5ti iE;Rrx[NMUmC:;jUBXT4PėP{4-J{ʈt*yÞg.\ѝ&7 >n2 eGlK"˧VFS1b1uzꌥg"ҕrJNV,=Lt;VT.6%R_јbNERl[fSHD!Ŕ+ +hu*zҳ#F*xZvO^ٲ)p$Ctdr˔mْLVv˩ Z#q8%Iux} 2S)[h8yeȆLGZf:dHl5iʊe%3޸l2QS2 )[I2L9ٙΦa0$/7pOUfYn9$eIVdDOm-\rL#-i K?F]- /YRG%Z.uF~= {94dNt%Ò׌9'_!WeʔCv*-[x?'<0+ꑵ5ZRQX ~)Q? X A@ѱb-rV VLGpTْ-Nv*6UQQ!] suT)_{brE:.2AEҒd+BB17a (Q0t//NwP1W>]i%ߟTGJ7܊7pZ- T*htXz}%` {a(ph۵J/Y .Nu7+lqJ(-^y|~P$i)MF|5:D2 <ZS";0y - yUΎo3n'%p+l٬`$ٶdi Ð S.˲lٖ%ٍ Ð.uY Ck˲y7Px*iԤޥ'_z>}^/=׎x?f%)"ɴφ)Se۶d[LӐa9Ɩ-N5LiҲ_-Cfiݺu4胠AFR(>c}Mgo~wj[auo≘erْh\gǔE[>)ץe1EqE⧵FOn%%$u+#e>& }G2EÈ0PL+;k%eʐt0pH!!Så`=eNuMѐ};1:_U<5G G:ʧOKF6 [?;j$$t/_JYXpL&C3woԯB Q A1w(x(A@YRUK[ L\~# (KK P. 0D4 (;K ,Q. 0)R:ْd+S(fIx 7b)x=xo}yG4Ҋ*K鰂nubGJڔh+'ܢT9?nECj3Z Yj dO{Q)A "#v2*ːʧ'y |nKd^z;n)Mwːdz2quOw~k\S. 0PVZ fê$ٖ%2dAKC)V~e2LvZ9H?>;H_4Pr)X-ay͔:s4u=ڲlCJSk˶%ZͅܩMӐmے Ά ;Ul4vdF];5r}ѿ}}}toAEkݺuaFJ/O䑔E:|21l\VLQyԾխѡx*$3Xܔ%/:M9ns*Iye#-jr9ߑ cQøtRMj75E{-> RAJV(TH츚=j2ljzeȐ-,L<  e(?IK̰4AΕץ{gAa(KKP. @D4Aa(;ҧa(IRuʥPFzf˥E[L\.ΫK ( @\ eri0%ʥP( @١\ eri0%ʥPv( @Y\ 4m~Yիi[\`T‰:eP Qn+9(}Eiͣ P FSL_Q甖tV-zv0RyV}޸UuOiл=fr釾"mTSOgg4p,;=l׈5ـ}-/@YoCr?׃V\NZ4LSdV?A{nts+ U5RƩ;tA@E& m/͌jVFKF`7X?(ƢӺuh`߱]FMtM^ua%mҪC#c۶ à\<.+#ˎ#.i)43z GF(8'jwD5؟ařG/\ lzhEdt٘4sĈ0q7#78gߖ,Ef ƕj|Z0fq>ʥهZ+j@΅fFQfJP{sP!-&i+Ѭδd5e% t(VO)ܬ-ɭPC~ϙ<P.yRR#!\AN;7zQZZ0AF [+_]6Nśj:'kV)H-3%z<  * *ޖo67LUO.ywH4x.飄_0f#7=έmnJ7A8Jh)}lP:}pQY I|T[Qۣ$9s[jM :'44-yI9ّcWw21v,tk!we~#\.#Vzvع\#d,:~bZcIK2s2 [֠%pLB2Lɶl)WmMJ}*o2. Ls~m{A~j[O?CjHвS%uڶ+]z[[_ J___AFj-=*O񆕠3QJWW Zihbzekn2RH#ad9N(h'S[m\iʰS'fےa敝7McaĐlX^ِzS-B9:xZ 'Z7}Q?sf.ri K}=Wz$C0.SYUZ*sItItrk\_+7\` xQ]Z@ Jub+씌U\>ɘRG.+sZv嬖}#Z6]؍4A P2A8DQ7y|Rb pfgo+SY-^yJbʅZLjT2kElE&@~5W~+/(* 82 =o\ Kʖ;_x˲j6iTtfjΪ>eWɸxB☚t^5wn srWuA̋Z@ʥ1AgDŽ_+w>[M7kb+Efsɹr,Eb6wiYOriefg+wsMY7[h׌`nhrn L@My/ ҘMέNq@4Z33E " /G>Ts> oKd*β3“}[#L]VT+[񾝷eAAx]>C<rҕҳ}dY pN9mk;\k~\QhG'? (d0<8E9ҥ+= 5^YmŌ|/ڵnT/E@^{HfTk/~x&P.]|f3AbѮSAؽ#.Bt߻\ R QžtѮ[+c; ne\Vz>}잶9"BHZlg[#dE4X!  #k&:P.wVz&XCݢ]aaLw3[>}2W$BHײ%ӔKϯ*@B+w>r+LBrA-E@lҪCLYZ#mSs)W< b!mlVAxm.00fD+%8PZ!d͖_QVzB5vqWShVW3w+=RqI+kM3‹h֋vq/7#.ZK,Wzjz.>m |v֚Z^Y cNn7#.zK~`gQ,f}\;$ڧ԰90?>cj5曛GrA= ϡr؎i>oE=xWΕ<J<^FApA!C(^oQ#եxg1ml5[} sJm;Ld|^5TSҗ5rZ@L'[.w A `XqZ 2+䑒jLK!o{Xm^SVM-Y@-n7j)՞% wﰧ=bR ŋq#^uŵҚ᦯ϰ3%n}\APRԩxKv"$OKѰ muO-֬7(3SБT[;sgJ3x 7T+;7T-%ߔm,~W1-wp֜ZX)}Eiͣ`*m胠'%S,7k[%ɨ,W\$W>u*q­QS-&mJLsܒ'ӟ96<~qu'SoInGa {ߛ;tbKv?.=n,>Y()r糵@218Q9._gtX2kMÖ5h6ِaJeKmMJ}"j2. XSov+G06i{5`6.]^v+=tҶF@P0DN9ۣjc'S[m\iʰS'fےa7MC=lX^ِz"̰>=3L(%-]G|eEP.5. Z|PP T}d哙)eKJGn)IJ)7sWtM9}N" ْxD۫+IDAT(̰~s)"_VT)]}_Dj#c+Vo7~OW sŲc+p鈩ɹEHaK y&Wd+oUT[WJ:҆$׵UӜ?P˚ܣ(y(>X2պf{mkD}3ŪN)؎ouOiлŹ1x\ }Acf0ܥa{L?=}C(vըH(/CҐŷ10Vץ/ʭ%#0A(>(c%t|״tZ{ۏ{j$k_jZAJt{']#MTָ$i|ybp#C[RM0V혦f>o^Fҥ1Y:>pw{oh\lw%.Ǜ̂y-@Ub(*ι֮e# A8yb(det޾C#:vy{/ꗉ^Ic&A}b(+]ɡ{*w }n-ISaZ G1, .wkj*@(f ;eti|\=Z<1~Ib>1 G1.f˝.ϭE)wK'APVnޣ}(`zWe]6߶M <@q;ϥmؑa@Y=Bq<(yd\[JX|a@Y:͈3q{BI;%֙APVnޣ7VC+wFSʮyk' (]Q ʝO\[ g˸' (P1 !#QP{=APأ|ܹx 'AP@Q ndyN垾Ęet8 ws\$V# +ۧP,o㑿=1M ͂{Pd;@E`ECj e{Jvzd; muO-֬7(3SБT[;sI3x 7T+;7T-%ߔmd?elۖa4D(>861s}r2iMfJ|\deIfSLCad_3LɶlI F50<,xFSVYL7v4u=ڲlCJS6ض%4дMӘ +Sϟ*Jf} gU}]y=(>We]G[y}2}w=C~u '3SeG[JBr9SMZ.|SrJE}^,w\,,?KrZ!IZe,$XSa'=P+CŴveh[.H=V0 \ܹO¨~v=&K80e`&&O3Jz3 5rC#:qAIʟOa692 (Ig_QP618~Z#Pm (R;SoŴcj52 (Vet޾C# @׉:qVF*A|:94B30 صkWVM'&bʍfjjj%`lbB+<<0zC%@;a1\r{ S ,TV0aP (.-w8>(w u5\ LPWWe]e( u}>k(&2Av(wJ eI'˝(M'&}PzUW=xrg1ApMn+&)[:؃f'&aTF%I.Y UT8kOȞ:.2AEҒd+BB1(S/Y}N#ca"onP[awIP\v:wraMIu8ݿK+hxRB}Rф.o]N$Jȩ#_e1%41 A(g%Kdd2tYemY!e) \H &זe(<,3Ô˲m[-Z&iaҲ%ٖ-N5LiҲ_-CٷB{ 0" 3-F))()|N#[˚)ץe1EqE⧵FOn%%$u+&@d2(_V,$:xzQEZRԧgm3'>aSҶQ8!V/wNܿCFj40,&JaKv]>O}Ҋxxq{Ḻ,|xzjOh7c]jb;~LK.Kn_{A<̫T8(]q"zm %h)F_BJv(+)Q%,XǢ܏;jO(OGDRdRh\a%Cd~u1h ,<ݮxYIm6 R<SN6XBB,b`%K%-Vg .:[@,<[6<\8(ꐳ0FPLC ( *RYa@?өHKarJ:e6h,%0RѪOX9ث`ONCӧ#Xj e'kV"U4X.9"K b=Pp\xH>Gu67+̇1X^oۙ]J(j9u.Ր}?+"7DeYXPWr49p-T,"f%%öv,N(<bAƊԚ [r(1APV( @ A0a @ @ A0a @ AP-[ϩ9IENDB`rally-0.9.1/doc/source/images/Report-Multiple-Configurations-Overview.png0000664000567000056710000013561213073417716027706 0ustar jenkinsjenkins00000000000000PNG  IHDRr?sBIT|d IDATxw|yl_`EoH"juٖb˲d'NرOznnr{NNnwYXzT&R$b{@% 6Yb3.m6B!B!>Q$?B!B!>i'B!B!]!B!$AB!B!fIЅB!B@t!B!b]!B!$AB!B!fIЅB!B@.B!B߂;(,)$퀳U$%Jy(.ɒ+%߈00 \X:͇/%t ÿq,bzM]|4Nɼ ,ppX=CIMb)'B!B|l.?A7rx%:CD\ð8ɵoCR\lͺKV`ky5+YhcO(@,kͿv4Sku|owQMkv菮Kx5U忶60t<ϳ2"B!BP(2zd4 d ")Ikfo{p;ˊfH!~D?bw{$jj)wᲤhx_c$CNq7mYϦgG8/x7W_rp&zx_9Nt37_yhI%%eeɱ.N^I 58}=R&Fl֮C(:u7 V0o2:vЩƻ^=œ5hɂCuHqٳ;7F.Oi%r0y^藺z0U߸(R~ڙbu[''Ҭ^α[n;y|PaXT.LΊ{n@bS|G17۾EnT'LRX,^f>3|F:7cOkr9+5N:^~_ 0Tp5h 9Ҏj=7p&cMMϧexp4U[7\JMt_7Ǻُ>8+2,_z>O9 {\]n[GMw !B! t30ƃ4x~’N20{5ȳCc'T@c#+( `WQV&qc+ ɇ#dU׭j'+ncDUvWmzGݤhu_dYYn^/y8D#]Wbc:L%9Ӎ$LE\KXPhawx4Q4Pօ{f6tEq'ܵh͚o[L[J'ixRsuhW|Ekx[ 3]XA^zQ^oQH)xN ؋ojfuT iZwQ9ESS o{|JS0Iijc**E9[7_Oԡ>KSY4o 6:Ŋh #/f櫹jfm ċu4y9xdw~($չ/mlr K]m/?K S.-) 1|e3,.Ol&0fB ޘ9eRސG8㋖MyNh2}h? kY|sޠ/K٢dzyL(d<д)ka|shOKR{ 5>Bo"{nNѲxJ3oL<,&6&9eqx5,?d/׮$1UxGTX/pĹ44;ի}A'~ {le|;o܀tg/|.|l&0fB 3l& >3fB 3l&0fB 3l&0fB 3l&x J&JrP׭dμȤ2q-el?EAe*gM} }# .%^Lyz^>kڅz *ۿg3Ry,ܹt j T@;=۹o)[6rais,?X2Ƕaycg,Ex4Sœ%KQq;pћMzf]c y|z|2gɥFGkA]Qr*ŤP(No(4 /BP1mʤIblfV/܏Q]BniD. N8#IB!B 3t l=>Zś&$ F*rk?O'; 3>i.%\ @e B[{<[__}$E&}?~:~52A̪= ދ?nXc*Fov5q ξZzd}LCDc -}yW% m)L y{2n^ dUY)!'4^~V^@w:*?kx|wkSit͠o#zãSi\ ea13qB #OY`7Y> \eY'~ Nj?HdͰ_o8i8/c,hP‚k#$lz(/t$IbjfSPסN)fg lMz[u4! 2<T6Mdx`8 hV'|]K.B!B: zr`/CҝY!B!o4'- ~Q!B!ۂ.B!B6]!B!$AB!B!fIЅB!B@t!B!b8'A4X6(}9.!!!.!!!.!!!I}\͝;(B!B?B!B!cЅB!BAt!B!bdB!B!'LS҄.B!B|d8!B!b]!B!$AB!B!fIЅB!B@t!B!b]!B!$AB!B!fIЅB!B@t!B!b]!B!$AB!B!fIЅB!B@t!B!b Rg?\|8>6%[fab|X !B!oX)iSYJ!ĥul,ʧD״O:$!B! qY hr}yP/} 94 *c5*;cg\te>}~S:>5ȵ,)sRs!B!p :Az~D)bwk;mK?鈄B!3ewqo 15ѭFI5քx3ʏL*Y1bnB!B1[Гi}4;:L( 1̤;iĒY>lJ)H O89ϫ4 Nje5~:¨,]]Cd~fWݐ"@sIb/_cISi} 0e,Η[!e'j<٘HxK.^6ݲ[ߥÎb_㸔̤8.7!*2'yѧ8>+4\|eU]km,$u;yʯ-TtT"L>>\I/]6JݞWxHWJ.B_ =Iҝs(5v٥u4CM l.y{{I`Xx J()n5I}x~( o%%:,L߰l8M@X"8r󩨬 1@6M$m#J*(o8L:cq((B$ p4BǙW@Yi1)z%Ҙ @p/)0?yXV¶"s( wA)Uysl2Log;} f||Ka0q} H[<8OA0?Ey6S v (arQgp"4 \1~WB47TP]$H / Q g/"TrirS[1EgOl,V+8'h!"YSa+((e/3`IڇMr+.c7&L&E~iZM$l,A@ i+k|A"m55,Ftx9c biVwWhF~Y!~Z^Cm?:6O6oe)zdUeyN#{sT,\Jgղt%:hKb0{ fq/•br* gGwR|?-})LBNA+6lau@NezA<Rb4ujX>f_ Deo佣ab!x7oce&LtC8xm c7GX~#mPy?{?mLMr˦mA73)ڏ/MCN͚|v\l<9o/N6K$EӭTgٺͬ_Pâ#~v^i;l|o=so7b]9,.'r`Է3c* #9]իSc8z~w7M1 {>V{ޘO _R6Bh}:Or?ROӰ&k7]{ihdh$JCɬy,[u+Q챡)Z} y7i Z:nfO%x0jBM'srߛ47}ܹ ^̢JkfGR۔5Dl:MRik(gw47gW_YI$~奎N:|&*g&v,Hĝ q\Eu s0(J6b4 :{_z lPn<]ϡx-|]Bۯ1C(8=ړCEhi Mmd( " paCmn(B@cG|j4a5 ZBR%hwwεWn8#8 ܽE#!N;ݬ)15vwr0Esy լ+y4G# jUr(3ҼeذqwϷ~bU{4KuJk[hm'8ԙ?XPGH[ \^y *q"f{![VƷamjr2⑷Z8S_m-&|$8G gfd"wJR! MW ||P)>sm7~A 6+*&yZ/haRP;e"<a曹ijJ=tfO>,ڻط*`+os0pםn1SXR_>=oZ>5?wr yAuV?}7@cS+O>^޺ێ s'yjQN#ZX04u|?xT[:ٸK'sR}mpڭ"Nr=]͜n;͊9˘H?rnklG&ƚk3U&xPZn fOr"C;N29l6]3 fS:bѼ?dBTzn6TcILcO tKJ}(e:o8a5_K,ahInas|{rlQ¥󻷲F9~V:G"-vf֡!bU Cp+(EzocVA!v]ƣ !.Y3dq?dkgፖa:[^̟,%68BGM7,˳ tbB#2YwO/3U6}B[fl.;cq\vۨ(. XˊֵTDG84bUܼVI9(,6PuCUF_0m,s϶*UzwC-->]bp(w-⚜,$eŬ˷v8ٴʇIїv;sm+B*_25^<X=(z2e~mحTxmgIQםW]?TLN"NR'^.w1J[{9x. pF jÎW_Չ7xk~ꚻYgޞlO߲VƟfbw;Qsc.h[ĖP[OˉvKkpkXG }iĴexN6;?V\QUf#08aY//J?N\ooH B?vD+Tzq=,]]oFOqjs8].˙v>LB= Ybkkb=nv6ۃz=*Lp4L+]:5ʵPTTD٬r X W8_?,a+K}v;}NK@/Kﴒ=,\{ 7n]JQhrnc;ssxV y\#.ڧM&´;Ȑdܣ 96v%\v;6We٭j/julvͬWEaa U|ae'I)@iZOvհ}E9tiZvN|:V>|.ͧv\9Kw530)(bQ9þsthg7Cd}+Y3 206A1ܺNRJK< ! ĩ 2 X0 p:WdA4sB'kyɾ>~;Es9-KlʷF z C8;6V a2f+^QH{ ڸڋip_1$\9 sGy9kyKYX:Jɐ3E~W/u' >l#]3|nf-f2pG' ULċz)˳n'?ǎTt%Ol$^.fӍ_b66 '=s:uKNH&C:>k>n>QtbXm+iV[ ΎMGn_~uZPfT"N,%!#&2{YPfZT]eYϧH{l -v*W-z@'u#,bpuu#,(%צDh& xAri;LS2 *_.Tky.N1YD@etw4ŋsT"@wP Fþ]44 mR(ͧh.~K=Zn]K5P U2OspoN"2$HZTVQpK!>`~1cށ,|>mgIE]ÂNEQ3M 'ɐȘdYb]ӰuH4A̒ |j f8"aĒ&>~!CO,gIghT:ɒn8,(pULJ GHhc]#xP$MF) @ӱ0CgPZH&b %HYb$r 4Z?=E!UðhXlvY ݣNG3S4 Sa$Osە../cpm)E?N:ʲH;$<+=n0GNfW`q uЙ뿶t %+oޛ'Jaf3"d+$WxJ$C=Ĕr'8rmh2)&2k~vNXuP4-p8A2|Ohtҧ^2/dh!>% yasٽHЧrd& t7FPD4J$:Bh8H`p`tdrEKyNt5-]a.s!cz&L[b`ɥcDh -`/Ygy;HWc3EhUSQVESR)"}# }'sТ)2,LMmOS:%+Qqz QǺU乬g/8!< "ɂᤴ+utƯ!{@*U(ñ{?}>1q, gU\!%j,i=B)L4i@u,}i/-4r|7C)ʁ̬V";d4ET|ne>8JXu#O<+-څvZ47 6!:*oca||+TjXig`h9^]?2WOv,y R}}<40ǻMٱ9vw ~?PD%qà?adyݫ r47{˘m<,ҖS1f(1bec,>" W ՆX fQd=RD6.v1lSd-k}kVQ4;\N`sg:bߋ&rW}? ִYއ5DFnCc(eSs֬fz6~eRwPqg8vw)*.گ. fg(TU9-H7'"`9Stˋ9N&1,ς܉87E%rTCV]ٰt>yx$E{WNp0fe#?ׇÍu fWSaRl棻8T]kɵ`2gN}tJEm!VMrӽv!lrB! 6kDxCi=ىh ;wvzjN7_Īi:6D=4,*`q{bx35©{95@YrvJ=ʥkwxU~or %{Wvްb(*k Z BMh!^f?B7y{g̙3;m xhz3 1;+ ěJe0&y̩3XU'KE<[tmXRYbO'#'׵ϲ14lWK{riVh8l`L+=o۞ъeTVmei.(z 9Dv}*u.,1ٴAA8!'{^@@׬^廏mYT g}_G T5+RIv8L yrpHR3`ךT@NB.ǎeoTMH獛^_κ7&]&6;Xd!w9EVH0,VNDFaMÑH]IDU4 pb~u}RToLJoA/AD`.և$9r0"/mڭHAM u4Xpو$[~c"[H1Kjf-XM:jQb0m;I$=]ʒ{X1KJ"09i@Nж;h?. n"(r{ҵ0w:z㧊rV}tdSͧtjnn5ACS}=sę_^GZV+,lȉ4izMX5l)cRjމLs='yRFKIL!!!};P,R:=q?qo2Df23kK)fWy^jّF=+Q$%֝⑋! Q8m͕D#d;ԁҹ ">AD.]~S4Kb$cI&ҶSwtjj7[s:QbEwREODZ}OVὐ$ cx0Y%vLl%\VC6i0JjYTҼ̖$\p $́áfif9$=V_J "MRK%&`6SXGg"2)/Mלst'CAA3į'7   Gu=    ~8AAAAN$.¢#aw|qz~g:]..?;jj;bOchtLbZ`h,?Rڳ}n/O kk ]AA.cu&=+&]A4CF Fv{@5.C񌄄H(#%IF'79HXI(b)o^_TOPp<1I   }&8V/0 zqT> G(ͯlP@RBg҃@B.\F݁jj}"8N=т.  N(@?%~jnBщ9"+V4_=님/@ɁI2:#l9J|?Ue[Yب,G.v2O>   81规Fꢭ%x+Y`l4L )DE;H5RUUޟDKt .܎~ok l?G?]AAI 4Z CK#4P0HNP NNP0HP>V/)λ?Vd"Kph=$h4uCT YG v45?" z P5`07'T2:4 IjjN   g<]B$tzÑ7C/# Fq$I%tzKpd}  Ɯ ?т.  ~IZ"ILˤ.@B/   `o8@rF+3N*]AAДh"CtqAAxbrA8 De   ;]-"AAA[eYFUSA(x ,3y 8qfj4?HYokGݶߴN[_/PRp7ow_7|Aw'Ç?H-(m窇- >[^` ?w3?zLjQ0v*-C8wɁ-{|d7-d/&I~_KU4l'5~VN&kykx6a_o[Ղ݄>354B:&?09>b.]f~c q\~c|H7xw hw<}\6b8#fo}#5;x\ g?S'5#3/(vWc7~%Ot-{<i=|ݼwݟ_뉊u*$h:Õ]__XB_o#X؋1vi:#V*~/jìx. j`4[0try bb1׼4Ȳ*4(hjۍDߠpypT% "^(^4ez\ y\x]-v 0Y, 2AG]I9q&p4 P<=~z//_x GPɂl@qzȲ*0=Gd1W`9\ہ}uZ(TfNTWSAU`a5n^?ΈfEwOw5c♬ ˆcxb^"[qA, zt4nK7 Iâxѽx#y\ f{8:5F2[1.osb0[u v񇚯͂Apy"7Y#ׁE 6 !4df+fӑB6/L d? zELャS#X`qN\s|c🯿Bv][ x$n&sؙr:1'K` ..TGSB% BBr붜վm><9PϪ/_ѓ1oE;xGrEBayo?x S57> g牯냐)=%C'-E8.< )-Y|Sa(֣k!Q {>%_]i.g%ոq-O ?l"z Y:/6Y GoHs`=0݂=9NJ2-$ob^p;1PG%-*+L/rm$;xRO]ÞyxLt%<3Xsd5cys3i<>4ΉTL/OwB4_~zC+윎s\}+Rm \0$p\W ?fU6RH²lf#i0ݷ6cgqL/n~ʰLz~| Aw1Oͻqx S^e{Qtv=cI8*̦vM1 )#xe: ;'gڝOűyh<*F;mxoo_lHo _Gii9OA2ֽ+_,& 2F̓6 c9\U{f XXʍ7@t-z.^V|Od]M}d~?w_ցOZS7U֯Cg4)g{/3(OC-2vmH;)Ou;v`453&rKǫ\6#!=#ؼf7΄{o W~.uP;G&~^<CSwy7;d |a}{ OAC)Toszy Ȱ(F4;,GW!%1zXvl4Μƻ_&yῒ]棤`|sFg[QWBD\'F?4KƖڙZF__LdbW:Ekn|?SJٷxiTOVM| 41|}8-cxn3ugI3&:.uB*֨$ؾa3簧~;߿7'Kl:/Op͖I*A@ Öӛ{z{%z28_YL{_$qW< 7Ђ 1f8:6mf^3C/KMr$Rr{iɭ̊j7tl{Vdۧ1a^u5~' ;ywsL.Ȣzg;VSY@3xu ,ENs:hjdKK$^՗!6̋qiǺb2ȿAnEԠr(~-/\rEw/`aa~ trl+r݃~Y'%TϚ;0objz5^}-z8C>=8HȦ]DLHI!#%x6RQ^IG @gAtRxQ ),U^ 9guu<6$tA!IM,_ ibv?y) 0o-i{ao:{Գ(YWUe$@HSs+sFV~zta]4?5TЭ;mT<$PG=- ch,{^|EpNrܹVDdzY pit6L>*]5UΤL 'LҨY)㟼W&ܪDTfW|H=Eykٲa5 )# 11Q̼EM!bA)^eKXdd Erid>RB .Gk^w9[Ps0$;Rؼv_}>]>]f42$!)PљLpl^0{";8;]NJiֺ xرW ^GTRSSIDȘíT+x<&M"eJbC9.`,[G-dV,\IILPXWt0r,A((Luqʏ%tS-_itRJûd$IYu<;oێ̚ Mh$]nk+QX'l@KVb<ʎ:%IF't9$!2thA1IHl)DtV;bkP0qGJ()..nCg0 !Z1 IDAT IAYP$3I_7߰`y)Y[a@֡(ؾ|K~(wbKqyzhཱ<,YEc>*:,ft b,0% s5?O&>ɍ׏c^D&O'ﻙ)~Kd2r9fRϲ9kѵ>uwزq9H 6n0YYpKM͋YOSZ>O?q?ʚ*\U+ @ݞ ,;3f0{R d pHf sd230eD v5[`cdđћ'J^z2>O[ ?=-.~^ψ/E~tKmLD0%te~~2ۄ'h """Bxd8fK{m=‡ߍ̵ۘ/è~k ݕ+T?ûTx~$Y]©O~Ë'R3.zo};&[c{p;<08IfvEBu""t~Jx+cW>gS_QIg&<"QA$gx}y:6i^~;P"[8WIqrFH=و1|.h3韽qs=y+n)vZp ْg >{71%]F-;t`Kѹ,vA3{k<ĝ0vYYQ|AՄb#""ɋ K w[_"NwI2Hm&I!eߏEy1mDd=Fjn\Vdtp"l<ՙG`%rewsמ'x-x-Y_#i؈5܎lq&\9 #x_YcGb4S$lf= IC͇\< 9CI JxdV +9}z0v>%wLS%Denipι} їl8-ɹqXFϗ a-WG~J'HCyXR{2jTod*㪇8u:3weu7n?Wqsx"=T0䁧gX>0$ѭ3śIԹ0Y)"C|wGMM~/cS0r&$ BToӏqץy?l!W]|qOO^A$T8{]x- 0@%ʳ08S{ غ Fy9i/P,zv8z)q#_L<#D pknsDQd0 ճG+JXZ3>V*^m0|Ul//ȋ9fpGìbJ!%~P=76'4:G   Lf$   z"@AAA3oO4Nt'oݗ?S:~6Od\8!  Ÿ HӴ*n@ p%X`6tHAAA8Nx8UUihh ˢ C]AHAAf'k墱,ˢPN( '}݂   NFl|B]~?,\~C׌`ru755QUUuR+  Ol'CsEp.ީZd݂   h4P.rA8ffN   MN1^\AAP. A89DoAAAwB*AAA8I~Cg-Ɏ]tʌf7ض_l ɬ$(awoHd !\' \'|Jǟ8Oũ L΄KLI ʧM&@_@Ucz+#)QDY gE]MǍWwg`wU-3wOң,OZԬ!w;\#Pauj WN4S]9[{Ź$$5rBC'S)^AAfԚo?54j7RIހI5%IvK[tψBw*5e[ rFsa]vLftرFcÐ~Z vG3{2D;*~:u)IVTm^̒5{*vO".N'g@NęX5cŕ>׍[sEꥴU ^T ̚6}NQyޞ05{#;)ZOEQӫSb9蚇|%}9_|m_/-,PQ%kxYR2Ǯ3Ov2ǚvZHձ˫,/(dOHMUY<Ձ~,.-cq"IV2;vW^hMMig !mv/)ʥKn mhݩ-+< . ۱)yϖ8CSΫ̮CVJM4Uk&]?,+NyG{ټ 9IV2囅-goܙD~~*M2eӰ!\KWt?3cyQYVϥQuUEٲu}e_Ȗrnig}5mfAI#hߵ WM XZ'm!ԭ+tEX÷SWފ%Q[/Wrṹtwh!|^U6N=H dDT*ow߫ˆYFXф]:5.΅#z20,_͋8lY۝Lym*2~N\X,*d20V(eK}J^)^񝞠DOt~4y1ݡĀ?bƪ=5]FNf+B[oK͹wӵnfQ䄘}WP0} d'Ǒq]*[;o|a ]K|Â5w=xI3k7TEfp[2>\z0J&N":r`[x Ѕ8W%s6,굳Y5e[~w˷ltr-w2oE! 2~,\ӥzri~Ŧ]Nqk \o?PdRvL 5 |Me_]<=P:ޝ I=#H:SsjĶ禲l*6SSδ/pgD3[:E7%s+qi;eG)š^J/{!O]IP"*3!\M$>r\UI3\&, # xᛕ4fF3֓u N)n r\98Gͣ۠XԬ*G=~&if ݉iڴ5u؆{ Wx&bҕ-iZEƩ"jX[D|Z^)NM68Պvm<-.[a.ɜ{V@=Аw; #@tMVؾޙ(ٴWd P;1$ӣWwV<;9'SACSZ~.GsM/߯Q傈sspƤ/PWYOwp+K9krWpT|tg\Dw`NoK(8Tymo˱T'v!iQcuŋ~A{?oJm\" AB5g_cAs=''##>_<獸g(ďVҮV[r%w˖ c1-Вr\.!%_@Bcwl{o,۲dw)?d6;)~x٬wy晝y<rzV1$c [gwr1|OPi`ωNʠtXS3RtgT?;:Y8=\y_s 7NFb G{2>(DE|H6.F11^]Tk'{AJV؎4t:w| {5ow)[֓*m1nx|V$%?" ._Ktn3w6[4y796,XcBIRp:4J,2(3xR9iD}Tƣ)Ќ8z[^=o.tk_9}L+e,AªD̘VQ(L6(+kė]@^JΤL9T(_=]Tn[ǺTY5&Ӕ=sK>Ar9\>eeGIѐd۷bdD罁..# G#W61k?0 i&ofޓ.^ex|9VÈr絩4WkKߩ0[ɮ}]L!EgHEP9_bd<{4;d=ND^yf6Kwa~JB|觼u~ TYn(1XILg^d7Ƨ#298~&QMoS5om'vDNm'dfyx"" gR!D341wgi%;KLl w^Ԃ2]". ]oM3ua6줲~OXH]4 (>ӯWfCʚfNuAB(@MUǍA\B$VO?5G%dyUOPL.ƬqidT{ºϘ+*Ycoak24]MD' &+#'`l|? Z,CRVgDMKvtSؾa~[zIt?ɿrߍI dE(?ɡ0c"DF% fLg=.'G^t+}:c?|43qތ$8$|c 7歽uhq~ 5u^|8r?Ѥg&LXs1)){8 v}K@$"2Ikm.o0 4 x2|^TIBklFrZnJlz!C/hϠ)LLzȡ&hh'9t8 F&ǜ{jhj吝8"KNF<ҹ衽ζJ7Q.JgbqB1\^i!rCl-M^NcxN&&I$H<2۶Q~d[C'i83c}_3(Ԯ2dGwGjT(^ uvKyxvG'2baL,,C'h:q*+h5JcQjJbpQg qqD9 2d16K7yc ’~^E(JxNΎtB &)x\d@@a)!}Shl;̊=?n!3p4>ÇYvYYH`vORO$93%bTK0mt G8\Q%*bWԫcTpIrJÆ]WH̞6n;)L-O`aaڏoak<|<%ӭ9ɊwP8k 2It!K" ! I%3/]ΪčM/MV<I#ƥ"@`ҁ>;nv]C_#& +@?aDX~ѭ u*0uJ?DU}$*`j%[N[J!S<,1 +wP$xd[ iљ5e4hkP}d'D9z^ I!:}gN⎊cwwSCJ`3d &sÏq{I*"D]b=YpNgӱ ./IrݸlXoqpݓpu;JaV, $2&AǙ5{aE#,|?YLyGPgV/Dvod\T= SL[8Gk_͒ Qvy'u we|z^_e5)GyͭڍӡQ grc3xAb#hAMռGnu$xpYf:u _7L1⻿ɥQDx폫WEܾ!̙4?zɕ´[\#i%˪|ed$a$7\sV^f Ee0Oj3 -!&3ӗ :~$7J7ln/C-LO𥑓p@HaܻϨ"`P-P֐˴Qv8es%'ތqg!^%J So>y—$=/dg<̲'c^F^/CRKw͛I7)J&_`{Ԩ\6W~]D""{*_CxWaiyH. 8] X؉0,5ϽD7Ǒd ϑ |' ᝴nz5 %ØALMjY~or2&8v'ۈg< <op?JOIq# .bY?fLϏBhMuhdLzxkO em𑃘:4o4 tG?OUO oq1 pbz$/o9g.͏E+5Dyc1pofl6'oۓH_)lOnl6f~>r؝TlkWkwfl6eilW=fl6f]C۽~6r]Ka`*6fl6ە$"/f/i]]4zzz.K6fl6ҜN'NʌAlWr]ll6fln]pfl6v@6ÎFl6flH!ieɐyal6flHYe7.lO3Ytj'%O&vj]|.OJ>qf\{3ػzUn݂5:&$˙}Î}'h Btj>ǒ:KEX9X`D> A0(?HXd,ڪ9r 㧓%}jټdE]aY4@;#+!I}쨉c4>xOSf~ϘHZ>`lxe;HJGnK fNs]pWWm^SIԱCطv-ZB8b0e9qaצT+ằٸ{z ֪TvZDd i'[ٴ$V$i#9:90zٶn q٣)?8G֯`u+S)ʎGobߦMoǕ4k&5yk`[Ș>hf}9x3$wurDL2ccvNABv7.]š L)C9j[)k8=adva*s ^~~8zϷ/>g-nq#0g^,|3m%m8%y.mwIvn?Lv.>CT| ?{,`g=vkֿ0}AmT> Ųm\\x'=*NmM'ь+%.Kqq1vNK@ /=+f:C,V^*bgY‹{-G U#!x5սW8?',KjXVȢJ72zcSac"eEChط {NQ}K֨"9Xd#m\g )D߁UA=,4mcjbs I70却ҕy}f[Q^-"Q;M[(o ^t7CC rwrq:W<#WM8Xk;^cc]:6Upu(čB[gUmۙ`Y6/}k"sǗdNf{7C=i۹%ճl*kEU(ø4fKaf\&ILn:ѩH}gջ%ۨ[$O3صq?Ǐ: Opm۲>NnX{iX$^!mܩ̙7b>~?ZXbӹeⵜh#j^7Iد/}dc}#&"Nv4,.e&FU0AzWB̝ʽ}?%#KDtȼ8w/Jayjhm5BZvaķcB&m5ܑ[92KƠTjE)KX8|dv@ KݧY,D7'1&ˁP[R4y_h1-/RȋK}Sda19 JB,3)L(#_b/ru&>j"^\omӃ\_l5޼Zⅻнtt6I2yP}1w} iAY7ӽ9^Q?h;o=>e?楓>bNBxk$^ŪUhKcxMd^km[O10i=rqÉlthDK?ĿչwcIKE,gnf]Wg<@Dd!ڒdzgaZǩ=+ܐΗ~%;Tj!+rx,6 w$51+7oͭ5/UiYdad(,4O|o8zU^\+,2?玌f?\UTԄ ,%/&W/e[1N << x*qrJxމ㶰brKuI[ë/cy'z9god-y_'S+幧g]?2̻K 4 Nq5Oz'K /є-z5kv`ysTȿzMs,z Yۿ/Qb>Z8]7V^?Bhr`Rܗㆱ?/6XR: {$Ϗװ`%ɓn$j/%k&_==kxkM{M#peыK8YYK~Lx+m̛wR-X}ӂ4F oC&M2B4`IsrQ+eYݵus `M_aDj*VAOmSI !BBP$DI1FiK=NnNQǛࢷ"@v?nD{32['^-8%D!έ"H q ș@n+ŝ7 oY{~؊ǜ&7)e{?c/0:p=uX~8c8$CKGH+Yr)?2ecW΍!ղlN;2{%|cou6LIpX e25q?@36R rDd0iRvgyLFQIo];ZpYpȞ\} NAn i70Ops38U IA4JAz {Eᓹ6?X[S̕o[`iTőìIXa= D!BeEdY!@gL2ssQAauh{v9M,=k~YMg˖rXq nju/~ǒ%IM ,.@D*wb6Z9grTI'j#|rϮ:̡I>q.+QFT 3eM']`5b&15/%n~+װJl/&LJ䅵uf9H!DIgln9.*ELxI"!AMћh'h".)(EEw!4^1 T@ N *bXYL$|C6{[qe zR6w:Ά#\Q1ګi1z*8$9%aRī[$fOO'N00Ƿ0rI >C ֜dŻH(˃ϵ$%AEė.gJF&ٗD&d}nReI@0i@HȈ׮ʯLA{hŰx",?MVc :imFm>Dt05㒃ܭp'eoΐ )i/ -wP$xd[ iLĨ)F[{#;!c2mH щs".wTïG"dU4PD'%^0{~KR }m4κ'tz=vyI*~eQTPd@t]n? pdDȝ4y#}$ID@A@ddYFMv,dFrۯq50pgp$=` brֱNes/1n_LőD'[8r:HЄ}?aaSE$9(! @- !xΑN`/UMDEpw᥿%:y@^\HdIAVe,grQ])$n+`Sw!CImSfMggF?f63Q^)Iow7ٷʌB,KǴtgdѺ 4zôMɲ`ˤ^ oPSdK`| ANfS=K?dAl iy!g|8r%¶C,߂m裵 oks\&-L>'H$f='~#]1r<8?3u$0oT\7QpEe.~?l'nZB 1M ϵÍk6`$Ԉ<_L`VlW_?(mC(y_WH+9o tk .L 2.f"69;J5SXMF"M=΃_E:Q~Dۉ-^BĎI~r=vIO S˚Ws#ah|' ᝴(/b=bX<LbEgķwɬHpFA&9~v{b#cW;G`}X[˲tˬl˲#==JS P%2„0B-FX7T,"X&aMCd\eaa!"PL=1-)EB(*` dy ͔p8TD,C'i2 !4Sš`Za *ltDEE@ X"`8P%Ѵ:T$Lpn *">PF`?ya 3ҍdDL IU AT%M U5a""`aaBc|7a Wd$@s"CA|eMQP0 Uе0@ȲHKE3KSc|(hZxDPI il C 2$\. k:etB:x~/eaW5 CQ\ei ,SG Ȋ(k(N(a*2z(Dx䠨 (`ameU.,2/)HkhasP$LC#lp=LUBn 9p/BsF%{俏e H:pX@= Ǹ 槜5pDYH}XX[Rp2 ܳ0Ђa$+[՛tP BQ@,Łdj2NU@BMq^+<}ZMkf.C"BkP$6>_ e6!5dz=7@K 裦 ON6Xh xGkKII엻^ sbYUw'd8}=[Tpjk7Hͷ263}e!S ߷'r- ͿRr y~^n~ޓKY|خnl6f~>L.P%mlT2P\YɎͻ ĄIݳrGSæ]T7gL}ACZ@w T4r #9fBήBD cǓP}-[O!+~$FOMOաM*kDc X9k'jy./&AL p.54^kLebQ>a!o5tRgc(ZAFn.P;ͽII89s~SM #5Q[^N_D"i.:h3$eueLB] j!,8=qd&!<}򒚑t+ ټ\Ι79Ӊk HiiDFyIv~*sm+fl6f ˬ}|`'v.csi/ӮedG̓N향,?ڈqK%^eld \K yq|mW̚fl6f;'n}V=6fl6bj-{b+a_K6fl6b 6wgQ݇w6h4Z;6Y-kpOsj_mOC4}KuMjܰ)a1$$43 (%t3{w:0 x:L:@ÀkdPwZ=hc<180d wV:;;LJdB²P{.gAwN| C2!I`6ס胛À8@7i@ΪCt`p @otK2 VdrdCsxlWAm~Vp=VL===c?`zH$-{#ܟ}Y{^YT}?aH&zƮ_{nj{5q,2fiJ'Ui̙| ~!o?RZ2D'E6o*Z}⑹u3 L~ misuWՙH4MY-64eLӒu{ֶkZNё 1ߛ۶eǿkjDj<~Wu3uؖm^/^Qg-۶گeɖΨô}f?cٙ33>Wߍu;ٞ9=jh1U1BGwjըּJ3 :g5O]W@~84q;:j2 ֯%wam~]6./j~IBG?@sUnoP峗iT[:|IzGi3}V VᴶmX_6ƔH*J^;J;jDJ=v] ۸Q.ȟիM{;/վ.>5wkVڱmj2]a=5Na[|WziuZ.ʦk͚yJکM힨eccչ yGj|jPsM^Ŋ}v).yFj5Z8]'?Q@pXj*ڊjM/ƩzEZjT*lФVΝZ>ʫX̔hη7i PөC4[R8OϾ=,lы**jfCڹySiH.ݦy]$CnO>۫ȴ~t+by3`sZ|=(+UvliDU)/V,WLW:,"峴n^# m;˴zwmEpZ~h|YZ?JQVLW^Mp@BKW)o?Mg|^P?)yˍzg/*5 +0t&٨#u:sVJlxQ_WxR^KK~BȖmѤXߕtPNnk+zCͱ{.LUxAD=ΆW@ss=_WE /W^UYYpOC `ͤ/RM].鳾A[_l8ݭqKJQt.4t.6, 䩬$VtZ@S`p9zxn< Ёa]dP0{'ڀW Ðm: ضMc@nnR|DBnS? z0T2T,eY}݉u3ۘǣݯyz_=gYڔ><5۶Nޮd2J:0a(;;[@ƀ >888888888888888888888888888888$ IENDB`rally-0.9.1/doc/source/images/Report-Trends-Overview.png0000664000567000056710000006071013073417716024356 0ustar jenkinsjenkins00000000000000PNG  IHDRt8xbKGD pHYs  tIME 4b IDATxw|TU׽wLzOH# !{`uk[]׵{G]{]ETPA@!4L{$uU '(df9ys9i&B!B!U@!B!$ !B!@(B!BB!B~ +Q)$^(tJ 㖓YZB!hɯs(o29\H% lS !B ֕P\cw% vDQܞ0&-GmS@D!Bh!uE`htDB!0(&H}XO !BvMDH(B!]BL0MG;gB!D{fK"N˾/ B!q D: B!h44tD;$]FB!D; u?򘪩8_2 M譤tz䤎$]FB!D;-5#k W<|EU+U-vnCrog؋%֣o3N}T~hM40P!Bvn!,m'QvŌJ´,@jhAs,Eæ)`4 ]71Uê*I(d`0z0u(حjkz ,EE1M|fmQ0t_PGYpuԖZ]i@0MTK[3llزUL˂EiQC ME &% !BqBZ!{UnFr0scoo a Ob^U#9]_cuBL"'w0u{T>$WѳK<銗MƢ :*=zeqÐ8*Fͥ'eqzM}UúiBN4D#ex~E-0;+pFRr; l{v5: "0**yݬm)։i?˹CI)WɣY1m9+9%vN]#W̗ޛ{`[W5pC9˦;gpޤ=$=PB!h~+(h;oEH wsLNr fQW^-[ (p:6DJr8K<5L#@xj --t)' ğ{ERQ\?2G/&Miia< SϝE= >/PLMA1DAf0*{TZG02T 6+1 <}^:c(L 'ǷE:avbX #pאKГRyRfv[bu3pbT ] քC@B!GE T`1FKk-kwqҽ߱*'ho#7q[՚3܉q d;vӤKtbkK̩iIbt;pV&.&\4@tSkYŤL"}5QI PHܾպ>јU|WȆ5I$:_X[ĐH:`&hbG(eՂ'K 6mƚ&>*G^ӏL!h*xٿ3ss6}jK՗nbGv|Iڗ>1mpC_pNzg}#'xrx߸dLax\F5NO"x[ȰxNLo*!h 'eCǟeG3|S[Vg)k1"Bߌ(Yxkq-򉱩M%|܃L{9ph|<ZGVTl7֦ͼKZw /k i>f>k]0vRCxh6d/ haĄS7eSxWR|l*⃧2NGT>kvrG a- {}Sm!{7viaVƣw?7; =-ᬸ ;ĝٕ(:NO`Of~- ߥ+[at;Y,z{+]5nC>Rw++x*v %m˫B?#;A BߔE@a\ė֑ȩxl8]׫v߸d`&FH/O>狁={B2,e 1&\Mtũ"'xl656|: Y~e\4; zso>;"[+X9΁qەtX wq[ظi?q`7r6 =Ġy|IbjO2bxvsj۷lr qNK5︝)n4B4d{=\0P]q\q?K2cE4p KJV<#U~tp"qɏw!v/>boOWEm` 6;qjݱ/ɸkBdB!MI GI_@n|8f6ZY0ehbm Na],@Ւ{Rkp%iaF"=7᎖@9by$!f"5uA0L 4 'Ǥ7>~枷:~g׾6XfuSpҥ Tx^B;>$kfq4~Zy=)iIxUPRQKk`Oޜ? %P5K#}htN&fQc{nbSUCs[Kf#g?񉊗"{tOpj!"k[WMM@/.NM{dTBEZhۚrktB!mIC*4cT/akJT~k- C>MeI6pQn %up;`^kfOVO;2t`o:ēJj\-v 6jqdݮ &XqJMg,\c '=DStʪk$h{~nme5iB}i%R#Igb.g\fv[ Vg'ww Pݴ` ngЁb5+3XOCU &sXx|'jȉٽsGsɈrSph2B!$ ѾY]X%/Ri1nZ|Dk8cBL9ΏzibwIN<8j&2} [ȜI Kqg O&'(i6~un;f nE#_Gl6Ph%F@JΣ:<:am`S&MESm-jA)-K>3=;( j d.3 d'9PbZq%la|Lh.e4.þB!B!ڡ#pIKǹC[G֗l=XOir$-yx6m\SbtHH憓{[pܪAKP?c jFdm J| ib&<4~PA /{vQQe5f9MdBؔtt2sC95eTD&AITW#yNȾ/BߜC(Q&eaBj?d+Xܰ/uTmasi谇CceZ@z5o>/Ԃ\w rղh?A7KwGطse QuMqLMm~|3^/=' d)q]~6.yĻ]ȧS>e] BND5kپc.sjye[dS`gmKy(')]:ܺ wCL,ws$&3iQ !BGBHq 5ޘƎiBfMYO=8鴮KzO%!SgLjU;jl<;l:I@Sާ³o5o17P{1/!쩌u؉(eXʘ9s!I?lay=IѼ7yzZrIYLw/+GvQT̜U![` #ߣ7lv|&@K?%e8F<뙻-HRlG-a4 ןхB;.BRvVQC53ɲBP;0K8Rޙq gp]o0Cs?Hr )GZȴKtׯB!84MLӔR킢(hj}#BlJ6Z"?-4rðK g$7]FN*IJzQ?hWH@vNmaʀK'.w&n!9DG= (Vu3Y6*kQ]$vɸY͈Ko#\]&GD=gx `8_{z/~t񑽹'X8+V(#H؅Gx|#N2tB;E+AJzvM ø,5 05ɹ=9a"4 `lkĶGwGl6?I?!BNu`G_uu5^MӤDqHDDG D!Bh!z|>nhw à)DB!qB}9!BUB!h4MEfGB!BvB!BH B!B!P!B!B!B!B!B,YTl㗚&NP!B!8&7{` P/  B!8 Ď=#RB+bB_~:IKB!BP|_%kLkǦ5μLӨ/΍,9='m!<n,- Hjjī:I >82ÂX#HMBfF &ŒyXTOxF*]TJXS"=+'HB!BH %T>܁afwqN$[iG oS-93L]e"!LApS5WLFR)Ͽ`jMv*㟧F;Qtlda#3%&9iiD4/|xhitP]_!B!8WBzgChج6B\nhEJ7Ubc:~բaZUCSEb׻> fʭuׅ8νT4,)y97ʽ&WrMYY5a rӸu'uu,-וa&A!B!_ Egc ꘘ0>#.eoTU4vN0lhBEM%,†iltEEYv"ca0Epz7rOQ !B!Pqv]x}:b\I 6ێYJL<tF0avf7??h3Zi{i=A!9753s*e'B!BO(&ncCc!}/mv_wNMFtiSvjk}8˃iq/l#>$c}@!B! 1I\wy8~"#5/ i!c =pN7MCTr|wk"ҡ`v;D` QQ80zJHMe"0<0\ad'k0sK5'6 B!B7~uuu,_4 V^%pݒF3ԋ'KJ??TjyA5o.Sc8Ւ윪_."G=Lu T+$k:^΃"oU3Ǚ5c8;h}Mi)j_v̫l*}ävG뛏- #&"Afjx(=Md|ueR:qGN*;ST@P'\wQ~ic/1 Ly.Ϋwp-DjWZE;q:aS0MDFh!}:qĆUPRՈiqB5Hy>8A/QVIIEX %%4qǒ&!QG@ \y3VF* .$EF|8u%6Hԋi -ħnAz_%8@PYVB <&(4WKNE(U 6gfKm{^SuFzXٓFjJu>- 8j(gDLRJ*0:$9V&* ,]?qnV.* Ax0ؠ)@xl2qY#DIqAʪJ*A`)+%ło?oWD %ImY ~4Gp֯®\3d"#-@ʒbjcHc1K"6qL.*>x;]w~e9F-ç+xH\"Xjk"%%`3HII9!ʪJ*-ۡNzb4*iq*Eu")1jJjlIpԭlw[bw[EI "1K)1<6K 7̲0In?E$e Աg_l~*JJ[5|e*5TQ[S3U{$QS:-W-E5Ir)-))`⊌')._rpG -x(.ޏ7Dbԫ8}5SqYƚ՜wE kdz78'U4TSSt)Rj U>9M'^(H,0(Mx2pK|Q}!vY@kȠQdipdeqnEY^@Nϱb:?R+3޽g䁳ӽ{?wdEwX^TW}gUۺ|j пK|-7Ͱ7wcͬ1?MӟgHr!8t̀EE/'뼇 W=V GVv}2ԯ{];1l0 rpl{([3vrY gȨa=翯 J7ϖ}4:|nwC`c׳ټAWMdy48oNb$,̅<`BG+Fn^x-QVnëv}u!u5ec=6t.W8hH'R=8NJ?Q8ֶE|0~{S8c\-3~aD䝹KDmwȭē_< fAYiUn~ΚmnkH+br4>vA`^xfx,c;E1t`X"4r,Bo!o\*@(ഇ>hg'\;z-Azu #a,ci4yrx#YPdUQjrq$ed`Xt! @9tr:|MTy#( WSYmdc 7˹C;*]{%N')YI$ڱL!mByo +z8dꠤ%,>C~"NTO}NwʒtEEC;좌{ wJPZolx`ML,)Idv%)=kS#yzJ5\rW ޙ_3gN pp&Jl  $efz˕ZʜCXjGr2RQ``԰cR*pO<^]5X je6xDKe!yx"˃[4T,qdœ=8z+J8eu / sVTẍ_{KaZ#93q!䫭̈́a9xҳx2684NHv83ś~Ɠ1|/c{s`pV9GAɓt i=OLd4 YӘ,YSq6T[4|̺y1.O-h _&I]cX~|QrpQa I$whh,Z[I5̙3/"hq,.۵I:oHCQ[4וA2|$_1f~{`rxgѪmIZ/{oo蓿K3z <;Մ'.┳o`z3u 0|ujC-u>L3D]ASWws.f:ƌ>gW&̆:?5q>k\1ɠ=⫩!XMMA_8Ӹ }~y">0 -#䥹ރq~sO<'39-X^B`5d&AferGmDs5=q'\ƨaGxH~co?19у>|p C"k cc-|0:4Zۓ/e 7u2':fQvS^'%=W__{]",;zZEy8yBq3 `q`UC`Rt.!q#R(BhwEA4Bq 5r+ae\B? l6).'B!83a֮>DdJ.f`P?ZCJGKDEEI B!=i5B!B B!B!$ !B!@(B!BB!B ("P!B. 33EQ$ {4G!B>~*J 7, B!J=YqB!BSB o&4!0 Nӡ먄p8hf F])(J f(&͎FB!B-֗nd=g&,wFW|򍇋NaK%P=0FuE5}ͽrQ^XEdz2šHN<;ېD),F锁QB!_}c4GnjjmZ^Q%4nV!g7[+tTd= S`÷$SHٳ~  {'0d45+x Dj @K0vAGFd$Iҟ)~{{L"")8331&I3$lNו]nTkg]%:St$% 9-Ojj,CkAsq__🬽SUS+ 5L2%I:_tjծE9;ePv-dHwәHϪA͚I2;Yj4=ËZ^RI%G\: ^$6IZX5jGB=(FSlhK-z1ֆըE[/ @K\j &]&c#^~fjۜױD7ǦR-KVSĸ1b"YHхxdJtBؠDT:3WHkRl$I:]:-atb20ty***"!!9OQT;N-xKvwn IDAT]HL$I$B et9fq'7bnA$I$I9C(Idg%I$I$htO$I$I.2(.[p$I$IJff&|݃ty:}4B?m{pOݞ$I$I$'z˃BI\eJ$I$I%!Ic2$I$I QP$I$I$I$I$I$I %I$I$I$J$I$I$I2 $$_E!I$I!@$]=^ =g륋zNVչ'nԷ eSs2a'8*؆gT^zS|c#_umcqL;>xU?( `%ta93VtVpZfL9T_y|ꁥ<ܷ7zW>;qѫSU$bLJxgZ@nGN7UO3oOS.xiO>DcQ1zgO$`R}_p_^yGl9)k,]2 dgd^NZ]& %I;$w͌_S^G(*5PD lVZ=3{kb i~}`[1a{3G$)W9SA}&.d/e_ɽcw o՛ xqŁ >ؘs2`tO-'PF<?"˙LJn$¸arx/E 0q+xōgM:b6θ7l~ڴ t{~ K3ubi'.$DO^͖*#͙t{"2LeБd<3^x'ՅuLf 2#UDƎ#_x_jngkyrSI,n "W:'&?ͥ}idFI ؙD3fQۣ#Kbk9ۇmJHL3ނ%SzZVټ]ؾ6WMeؓs(ob/)HɬaUyf^&>[LȑI2Fس-_O$6գlnc8}uab:޾kټ뻶&ozvd1닝hC9J6vlH@&O[)}-ӱ`T_%&5RƞضdSUCBJ̛4‘RӜ:koa*u^_Ə@Sc9o}[Xq{G:wV>?wd?dSR_FDn&ݾcÛREVK,OKVhێE(7ʾh0==LXknj@r8r5B,9-pM;YL4[{޹o: 8b2ȓWQ)Q}5?!.}Ԛ`t"Wy)#c;3pzpr\6;*DfT6֤y7{Ɉʄ8`m֙5Lzs9Kڠ^ Ock .؆S!:vJک[rFw=F]*ևPL "8gL1$tQ!A'> 3&,3J7,(;:*̼ŻӬ\0~l~<oO`-|xߞ 6{;;w"/dq"/Vq4|QbV?˼)cyed@s7ꯘ;u,Yɲł3q>pS9z;=v/ͬt9o:og7K7zwGqlXt*9_cܨy} UJZ*ɄVSAfͤfXq٪n1X,ż:%&\A~{'/ϲV 8Yp̪@x K|oʔ9rd3Snj/D*uI1@ wxm̕^+kepPfbcc zh=h2s`V+#Z*3ֈZF!rx',}>PR#jy]*8ul/U6%Π!l??q> &?I]htf,htpwyo"Kf0neK%L}wݓ 'sWup!s8nh|$ 0u{LvP߼G,yz4С;P`QCE.{&Os[EqXG|nxbgVA0azIu(7 ‚޽RׇW ih19,Zwgnw4_Tt И1yiw"Kf_R\Q3#ǦxR5/4՗Z9+K:}!Aڏ^"0:3mcURv-WɨC,[57h_$|1HкJVޏF`ŝ5)n};W0q {& 0Gmpe)$ڃ֝mVۆWXu@f/LeD8]\Lau1XP4N|`箯زs+>oN=~ WW|j+O?T= F-{Gg-2H`?ȑIn7#"=?r&J6\ bD#Tt-(wd6M7ѭןy?>!"||fnA}33w g+hP6'\!x$z~=?Үe"^U7)?[3AM#Y=%.{A J5i>y4Z=yX/D>wRx8Q;_hڶڙZ5{e\^Z7'"N9 ؿv6 7[qѧw\Jfܟ7絅Hl؂t-X. Wp[8r&[PX74wyǹ 6xNg/f8zsXV?[(@՟s]U&67XԨ>6TL1V(ZRS31 U$hX͢vgoz&z܈ j% &:렫(usL,iCcA&RɕSHKOM*iɀT+%.$*54S# Tn7Ւtx Τ%+[ {>|#.-"dwg44耓pj{}{`LgC?|Jb6XuF@UA( wԔ;  r˚E*]8Ơ ='K( DЦwaPyh?{.ZT"":L v2ek~|⑞3|\\MDzJh1IOwaׂi!RVZMٖ)t{yt0o?v{e ot]zv%؃ċ|Pw臓tǍVnĝ^>\>S)H]]dQOZF kAZ:ٶ+קISR'^ago ) uTdqÝ\! B1qUd%&ҸM!'?XOhƍw"Q9qvl]+&ZW5&)-Z&Hۆ:/0jiҁA1k9(;JHW{hHBVM ފbV {7!7v/EhHAN6')>״y((TӚ*ӺI*YmIzFC'DTEAѩްzF<0|0MmaY#eSÖPNُKhsFS@6l8`Q^hASj4 Ǐ3NُQ\ݵߚ!o U+[*(Rq'zQQyR]FtIĮ {DOBٔ8p(R*V?UQ_x643 05+JNa,Y8 ղ9)1NOsĥJ612I5)p f- ^V\ b|i. |~wp[|nV tj5#'maĬy<6ql8m]cI5Wqxdj!X@iXZx=DV*\+ACOJrvW cHU[MPk%!!{pRf^+Jz+.b{1~[a/&g=ON^Wg&6TS8ӑ^)ܓ@GHɨƉ%Cyntdv㉽[Ȫu5)(T1èt"!aV=qEgD@Fa/A5oP@j4(64FId>ڞC}T8m0@5,#E5֨FbGF"}lI:#:%z\N[օҠBH(ED* $7;u>c7OŀUX07j%4!)EEhPД:uvugAh4(J8(ЕM6>Y!J sx|yȼaԢw^Kѱ yGL:̑ 0?Z>0qu :y˕b؍K?xx剝/$# G/' ^.o],|zo̕<xak^Ft͉L fNX%Z{&N0aqskGgf-xD1zZB]0H%(s ͷĮxqGKvur*,,P/W.EqqpE(RUJހO!DHJ""P>F1 o0"ԐWxA!!oU!DDxJ\" !DP,qݾa".]؝.v{E B! B WWB5%B-|aU^%|!qfѿzĜ7y+E~>G!T/ub]}>)qw "^U!*>"ϺKn_xܾqi/vgF*^GU!HPx< Wj/J,O>}EM {B s,K E?Rt))H8 \K (fIXxJJEX R'HHK޲_Ts/rvE.%ei"]^(#tYյ wYyXYa(q~YVհnw _0,ܮT؋Ჴw;6Dv_w|a(Ϫj.;J?$K xm#HX8\Q~% Bi7CgI$2/h-}ys" 6ЮK_TH}F\r_GZ4"БPfCzS t$&XP5ĘhC-ARc.>1$֡U41қL:~[iqXcZEZ6 Rm\0) ԅb{t1وGo6%WqI;C`XBMQmx=_}Z '9ņ 8MR#(HL+'|+Jqͷcxlh2]ҥl*z27y^1h!ղ:;RH0k-%F5.&uȠQS46v/QhSl٬0@VVd&_e20c)6ʦ,6+~nBM036`Ϲ ;`j){|f_o&B̬[&gͲpm{azqYaBVNwGhh~^d,kOν{{gn0T j3*۝<Vm@W^# =|WQbe 3K? eoZ,5g՝;cl\>V0YYe~J>ZI%pPFb.j%NZ"TbaI|^zS, h5\'~P&lV9ή5:Ul%IVB`a9zv[*]>-tb20ξg+2jϏ&[wcE~L3r}^.} -T4,ڷ\"aAdĞ?}zUW!+4kߊ<2STTDBB:%@!+?Uwq]Ϳ97Q6S$I$I$t䍗nWD+¸]nB/-Z=)^tϹ70:b&UR :H=s1IDATF5Ί^#%BI$I$IBI$I$I$OH$I$I$ɀP$I$I$I$I$I$It"H"I$I$ItYZ/SF, !BȁI$I$ cbbx{{uEF#ZWVׯ,e֭B!B!D4Ms!B!&B!B1H.B!BOB!B!}FF_nEimv3yIlwR]鷺Iuz'NN띞T;=i(6lB!B!>2]!B!$@B!B! ЅB!B9@t!B!bϬ !B!s-QFc5\4>7cCCLB7`%!% CcAD&R z"MWmCS-PLP)X~zŎɋ&Łv)W<^ hodWSҸA DoL(_V\3E2LYgcZ) z2lVB!B1WZΕA$A4 "$a?K*c2.k[I4PZQ˰WH|J;?u>+Wfʥo㛅dDp6_>;g;q5Ql~+||g||O:;kd%ěBzx6CCÀ#m_7Obdo/UlmG]2/І"",:?ri+c~Qy%|kϰ0d=hD<njwJ&sD1|oͣOM}KgGAŐCoH?emN M1HY~vgot*Nzҩ~'bቅST.= | lKĪm;Ry̾C>foq:$'Џf99/+q>L<*lg{Лhp 9ihyi:4Li`DC#{?w+FyF>x.ꎿ/rpνǫULPDd)޴CHP մt G"<<!}m y<Ї~I FU Ԝ9KX)md{nndB!B94;hlex FbV.!:PYߍG?]-xbI* j'*&ozqWִS# L}}d6>D3U:1Ls Xx/"R#k9dEw/-Þ鲆|=nJF[Bܣo.[~⥀n&'qz݌3Q4MM&i2SQ6 !39RĢ5)5 pZz2NsIJ/wWԵ ~Ћ  NaXNFs|;t>:#Iv"Hsy<~m]4םti#. vZй|\\}1Yl{_F[G+e9r<2ûB!S6pu6D[yĶ1DRDkqc8.O3vFxy)8,]XsO4;VRNpA'16WssVnĢǣ3UM@1Un?&.CЇ'^ih.O\BgZN-TzfVWq>:zj頵BjŖhM _|ii[ٱ0p_#4 - \*bC癢B-.X+{vV`m<(?ѳT.Gߗ&k5fy TKKظa-_֭G'y!B!]$q`jRwpcɚGƾvаAf3gOe^zYHvR& γ͌nlwM,Dm5:8TM0ߎS]Hi~:*hH_E!F=*1x3;҃׾ȴ 5ScDOI=,j֟=Ks./;ȢdN֘qBH6<_bYaI^Gv_}#v6+ pfw+  [IcR%;oF;90ObBo ǠBK Weźa#/D;?`N`woo|M7MwB!6g@SgwM1VGmb<%+XCiס`0b4EU70]jⰋcg5H/},菉u]2En2δ9 FbZ -mȅ6x[qc]׿$7˩ٿM\x,x/+[ަͭ'u[Bݏv3ti.d& 99Q-R*"3?{=LӀǿmvg@IL0ASZ6|?Y>C!bR͛o]}#_R#:!_^=ہ '.ec\˜K 㩳_m[?R ߿ixdk '9^T !B!EdX`tĒqUDbWmgk!61p3bjl1H= d'wOTZ:q8ٜGs7id݊V̏alw SQt՟b&h ˲ 9ZI P$-dYe")޺ 5=5a4:HHa+ >TמHDt5:$u^6(޾:+(?׋OtV߷kYNWV83W薈tVm8HDS4U=N(X¬8ZM5\%)X"(Zptk-W)W0voW2H[IJ ,\,G|g35 جB!B1) .FmeGQ;@`W!B!+&,vѭvcyI.d}eQIn'NN띞T;=;wzwR]鷺Iu󟝆2˿%شG7&FOTB!B!bB!B!fqB!B!GKt!B!b}B!B!TU-B!B|Ҥ]!B!d B!B1H.B!B !B!sB!B! B!B1yO B!B!OѤ ]!B!wЅB!B9@t!B!b]!B!$@B!B! ЅB!B9@t!B!b]!B!$@B!B! ЅB!B9@t!B!b]!B!$@B!B! ЅB!B9@t!B!b]ӴZVd}%B!B;f4 EQ4 ۍD۠( & ݎN'ZB!)MFت299BIP!m4P(b!22R'!B!`7݂q\Fz=0 (yzn>" !B!>!7\( PRP(BEQ.D K;܉R X3~/rQq+r⿆jAHs!>:ϥf(!F'd$|?͹>r6w5grAF'B*(:f;$i5UmƚiswjFC319>Tue vRN y&h=KDZa ngz8y9B2,4Y[ xbfOKs"sY8֤iN:qUkx,)$EY.oV*jq5ne?͙ x%K^T}#45\C̏Utwu30y%~ܹd۵o2Pl3Oem~}4M#k[ddMP~P7Zˆ#%-k6PRϧvssT7(JiVKyuɉ HE0 D^O۾KJmv"cyH+@zNna'KiI.Ƚ;I3cm}p_kx )yoEq*vyAgXuV@iR^~S[0fyUe~y#%v1V͉\bpd:$cZ2"-7PeD XIMB|n-B|imgKIeL,xxx## ABZ.Qft7C 2EKkyt88+I!~2r>R[ho6US/zG4}>XM(}-+gb3P<6ogk cFĢPs֚RGF1|?ةе# Nb&=7ZEa`qĐbp]L*=kxZ(;^J烇i!zkxW#ɈPo\]+pWa:|5_ͮS'L%jqfiF#'-Bqiwqhq,H""4u䕣*%-ŒNh*c~-4a7Ci5> 4 3B(^UO-  ߤ%+Ȍ(iA;9}1~on2d\^zMz8 hb Pk]4\}ң_L]4 a#h9l۱e& F{BviDRfgtBEX1Ć%K?Cۊ&.IPPNȄ3 '))H銺ha[;WZTu<KX4Saщddox1ZYLNh@FUβē28{9ƻHaPoqL%=V.7 P'Gc<. P_n,\N9Ģ'ȶ =MST4;gErFKֲeu>FnZ*X#jdC]3-gvΡgi3px3 u32&0ĥfPt#;7Οu4SBuqeXal!&4 Q1qW^ C?VICK}U Bd|X[H.P'8ey1v=*[q Xcqϼn^?>D58z4MtOO Eƒ1oˊW0 Kk⍁rW8ngcd'|Hp=!/m5.A't;`&<:"6DͻyپJJ;zqz(:#qd/eK"HS^z3U h-ħxy!781Wé)no w@EoOQ^SuWRy7ىLJ`165]qP}8e5t 1 GrF Wdqa&=oړxh'}'">!N7Oci{ Ma !9#kY$p}Q*r~]~4`JVR6nYKn̕yZ8~5ut :O_wa1[(^Sxc֓Rv}A i !=ۢ ym=l4_';!A*J)?SOkW?c=Ibz=`M!&C4gI\YZ9pC:?0@X sx*WΟL8"1W9LӀ5MC1?a)$?,d.ckȍ 1jXl&,aOU58I082瑓wqƩڿ#ױjXvQЇA|Pq28ptdl/ dB|~ et%gx ^>ECc*2ǩ3t3/گ򊂢iW\^17.O]w~8ELO]H)d:~ ]4=~w1RuлGhNFY/-`;L%>k?zPgSg)mqbFpbVb ˧$m{c\&wm^DV{sUQ,ZΎ 2 iy&sfLy{('+4~p$vr%YQ=9RIMDRNgH/]%Db /df<NǕ'6#ʬcFZ0UdMoJqdr}4=v+姫u%5[ֱ; +'/5?Q&۟BͼF7}/ v2Wn`Uc;{ζs)&懡(yY;[W)y!e3Wqjk;ك/-ɲW@jY&Vt99ӥ˄kGO»צa TI*kΨ/?*]mt'44O?-f_F0uh!?s¾J%%nr"f=LjnFCy,!ƤpD{9u>}>Hi_ϊO+I;N'kO"fFٰvꗼFd= !ȡlA6/a1f wősz,K"sq*j{PXդ5@Ɇռ`G=Ē”0.@Ci)}jYEYNr˨QF!'1 :45]B囿VΟa5Yekkry  ૼyJkw>PV^ӯg?s%,Y{sX+(/ js BѶgxbs#g%_pn559u $m|/n_BLB~/WK#xfc UmyxA/L[ +YvSea"b=:QHH| VQ}WN d˽yif#:-oJUEߢ8tJ$:m&aBFгoơJ:ޣ,+h jl A{; XMB6odg͑&9IjR}}'gj4RQQCEѱq.T7C|":'CVV4&NTr gڜ86?U!|A?#CCu3k`Ё2¡cgi' 23;5`I}.BM`9~ mv =w0ElđQz0] Vm宒GرMXvbq[)pr]-5:j/\ғWgq 6f3fk1E٧PlkzܩS.!( Bj0LO|R66bQ^ ai2[ Nglv(Ófw$>VKnj…ELN4 5@ÅZ:&BX<c1‰Y^}HL SQBM vL&#&#e;>xボ=XDŽv$XD|B\ǹd']aiX:D4M#wTKyU;S& шhhllF=>r::N-6to^$~3_ч/1WE(*͙̳Fˑ]"iSNK$Q&sR1&8L0IMa$a]#n>ɟ}BV㒈vwpi~;FH `']]4:t:tVWg˩\rlW٪;'_ZN8 D9v7IDr03y>Ub7}~3q$]%Ocv!Fl<$iɤ =g/l}{VN"]dn0X$H\ \3YB Ts,ygZ8DFI:纙kT.O#9s)/('wN]?f-d"A4&8?B$24Yq6dv0=BWG==̱0?,>t}|8}ͣ$ 2ϭx(P <΍;VΟL'*I l ^p}c!1?@7 Ζ[ XƧ&7GinRb1 ̏31;C,XddrP0u'8;HOO'=-Ɨ_]p_{'Uio279H(V&/kH*dg1@d)fsh[pLJ p,DV3% ˞ (Hp( Ɩm֪/+nܲUCth0oIb45  MNx8K$T2XtΦiگ3yy`2} 25g7xQ#jfdȲd'Nf*րv ;1_[bxP2-Ixsks1ZNӟL_"@c X)_̩ܺs*9n׻Gxi,Nw@rz ey=Ϯ`h4Oqe#e`m:å>=|mtm[wUO!GC^_4gIMd9v><3 I2ޠա`]0 ],c(lzvA9@j ~NrL ՛@$"|jn<ȚXaĘxN\sdl22Qd MwnR;.TIF$IǯHeؖExn~.M4]Rp;h2,ڶ2s1¹JD %zi"Kï&qWPv\N`9ZX dHs3$$7; (H:9Nd $M~V9M:nI R{LDXHژ K5UedMkgUe(CwX#vxQDD<9ixgfWr[3׼Ҷm!mp.Lo9 IDATClbEf#4;؛1o.2| .Jxr)*%{?lʼn!{_$ 104SS.F1m nY×UvIfb[-\| ×!FgXXoUkqn>m 77=E4{Jp,fFPB̥o?u?`ObM{a ˃k蟜L>~S.ҳ (,,0?*JoI''Ǎ<]9:'X$M$=dz.[sPNLJlCǓ,KY;&aYBs1=5<1D WI[mH(X`-Fbg@€yYϤ<nӸf /EH/J[7tz2Ahl3,f$6!l[ؑIZ>;Z>qVx} &G [vlSO۩G\Ⱥ}ϱ-tp\=&;"((*"4ṀÄb60g?K>{{1 ZtpNQ߸׵Ϙk,J^ɘ槨t-5<2D`3orC*7C-˨,|c6HdI]Oz}{=Mi`imn>;L_Ǐm?Mu.eiѶMz#m{>TaBҲ7BFA>%N2>NJbC8sfO,U4 eqJr+ٶxUB;̚l-iw7~p =:"c B$! K5}6 N]24 ա`N%4 MYS:;L"Q131{gVP*H+Z,2 %2ҊX .^,^}uj3:Vl#Iʞ(Uf+n3X/Ý{&Yst,!,o 'U. 顢:Z^=:VXǭ_{6d2\ȋ2UNՏ$'K+bRlK)*ad岯`qr< 2.cܺNDvR_CUL0 |D8s8 Of0{o}xjŻ&g 'UJV$K$ GZiWR o3qCuD gװ/|cӫ }VԒwvƟAP5Ad4l(3-uڳM,3 X[x`>~w*UxЭCW͑w(m&bj벃`nU;sSxx^i[1+uԔ t(2-4- Jū DRxi IlBC3>@ܴ'Lrk8T>΅> KW ib6M*.Iv"5YF_IHzHF dֳA6ؠx2ޱ']i%&[f,5m@qȖ =ʅPR nxhv{==L t21ЙJ蕙OQA~laj'Z3[Q$yT)&'=Hu+IV.- )aTׄ#m/3KުxZM#I:OH,:' z1۷o!XqӼb9ܶm#GJk =$!;<\>TFIģst[%YD#QFc A*rj3Q1+:q ?7;yiܱzgn14ɶE"acǣ#!BL%m|*ֽ]=QnqLI4N7dǦT-ˋid} 7+2h ?HU.NX [TϤ4;ȭ>"J5ͤD峡~ΞOп sz;3@:z|VȖC*Wo%fpOIx֮"pGw3y>r[lсh7,TQwJ :Gd2…y­Z6.2vÇt%H+{ZWwp8q9\yc{m[̶~O?ƌ?3gmp+(iVspo-23C /}ǟ!Y\b n"-)*$ݫ~j:SW{sٵ? **yPhىz[iHs382=Hq: {4$&{nNXȒvÆmb&˥OxFlwU=2Uw.7;XX/{rcK@Hޠ$DMjXF6(+fRCh+O%&2E3u 4qZ$M޸c2%$CAM#'v?~uBRd԰/mY -2;Ofabq<A7"*@ɶO -{xr&[3$ԕ %=-KquܧTBeTU~5AdUA%l0 VߺϏ]٠)Ԡ7F,$IF'5e%1۶0ӓS,&Wz:XDǘ'bۨ(NV'I>HW}_{e^V(S#mԵMʝbj@'$N^7d235<ӡ85u<{|L?u 2va;;K֖.*nux~(m 97xDe۾xe{Mxݸ5%uPuҼ.\xn}sL+ɔXH ۍ %lk9,Kƣ]6O]VѸ#=?B`'[YMakֲ[$?\ wO]y%Ef9L"I偨$ GRo*1l̥VEU(^*uԣc ΃pk$'9}>=wuK(&Ecj+=I**zCʼnߛZxr?铷l_-3&2,3H7=LI^Q)u B1Bt]i',d\c&9(V?O.>2GrS*{OP$aMNߘR\l;HxɓG N33;N\Z[_d'w4ͷGYF ]K F4Jɪ7˟c!HrkS94'`3:3$&*vd I?˿w/MfWl S$ȩEdt@ HV7)-i@i&v"s1&"_$ ,Hw8=tU5//5Kr9^UZu=[]dǻ'~mu<Oʖ=R>,y<«`)%A $(S#\jd,ߴm3: v`Cl* γ\1"MAvGQa>?h$|(g엗 Ѓ3}ho $?FBu)^SG ~Q5{s/{͡}xujOxSo\cȾm#!:if8swP-H -l/},l2ػ9}U2ʲfk'K;%YF{2,FhNwOGȻtO-C;vRYCz]1ahkDj'oi'\%k⭦Q:/$91S|v %GNſCBœ+tOusVLIi5+ȤUu{S76?ː9tRb|e%uۨ*!-GV yw[|>AMa6>3Ĺ9$G/5 $3ܾxRWZC]Q.ulD=l?hW+{ꢲv'UT#ѧEn]j Ӌ[d3_bh*p?6djvmc*~Ͻ[K8!H_ ;ň$^Pe'$h-* pѬWBt5SSE6L q4go@lVϠvͽ\a߳T vr[ - ;uk÷y,׏f'^d~vlᳶ1μnS_Y!3>lGuٲu/J&KߞblME~cz]f`<槂8݅.+$c׹%". |H;>Tq`zg86{ ٵ=K\9.xp]G~Ѝd'& TlabV< CSLƀ@.Aĵ"jar>+=sWɱJn \a"~]Xzn\̩[iT6>Ŏ<#׏ Oh+\:>9LFYzGq4=>5nYaF&m-3_ _Sxݽs[#\;5u4]I„Qx;ݚyo.W_e&n p7m#ČL3<4\IΣډ:6`P),+ǘO)/[s/y#w` 'R|&":YUxh_2e OEM$MY fo(ӯDŝ{Y-@ݡy17Oo)$;= 3Uɩ 8NBϏ?3\96MT66Z:XN:|FxL/ev|y;ob4i`qZ(gf't'czaԦѶQVMrfXx:o~)  V(ɝlV2r}4&Y7@ b8;,;ߦgǸEC&gldE'xqOvQ)^ xfz/Ozcfd8:N9?{.nH`A-/l$Ye?4Sha8%mr8+z]&jg/4PLJZL?2\c62~MM|q~' g8{>dr6ﱜpSЗ/qX-xp*@5YT_˷hk49xAא% :*._B>[%?GihH%\^,'|}n3>3@̰Iwz{Tqe;z1×MMc#{V~YIV(˯v.rmFl57j}=Rl5}y\27iFflj޻ʢ5/cr)ʣubdp +@M㫼SΥKin뢭eq),e{e3Bp.\-t&=ώ+w(…/.r!3ff{˾=)\2HFI `W~`U.]J@-}HJ̾QCAwC[y>.qR3H,Ӹeu ۶Xfrr S+qW+do[ܶm"\b< <.xAzz(& NȊfI ݁D[5϶ 8DJE @הa+ 0AYfƶ,D˲% {HVP(-/$D0l{i/i$$ ˶$YRɣTe0dLFI$-4׫P'_swXgAeӉC_Ʊm[$ D9] n8Ĵ\ѥzv\QXfh(D’PnN ER߇X$5ƥB,%aHȊ©mtkc&xxeTNq$X,U/wS5N'ڪ^FX,NBuxp^bqf%YF8:b vH6f,b@V4\ϪK7%^/g_4IJ,TCN7.:k`?ǴHƣFj|=xò:yLI}t:qڊaٶmaÄÅ7W|ݝ2"Dcė5um[$c!1Ywu9RY;fH$F°P4^aXfx<U\~|ُn Gb$ QKkV,NyHI}v]3m__:5{ AXaS:@8  :qLn6cjDb% 5l9&G!xs*9||?#Tfs2z$Gl(M$&a3jĄ.m1]be]f>b>*L\ y16ف#ͣ^J/F]o~l"|D6f"Gn_b6*>䓂 % 7H'  ¦rE    <b |\tAAA`$#SAAA1]AAAD    |tA/AAA[$nGD7hAA@̋l`kD\Bvl-«<Ծ=e88ӹy۶XZ8 'sHZmQML;HAAA=!Z;7{>\|tO;C+wی~7w><:14" w1c   WH*ۘ *(Xa$Kؖaة2$a*ftlj'M\ $a&#x+x>ե۱53?i`6$( ,aǂ   WjS ZWƜwNq~ACI 15@ñ 2?>#Si?|giLrգfeGva|jK/n'KH~1lPo@)6gW~0?;x`9MU    <.I\tR4Ԑ+Mq}Hjy@3*^qP={)uL`zn6L4k[ d᧬ Cƛ[I}y:u[]FKȤ[Y?1KKOYl SII=8@AAAA5 j#2`[^+2SCTow"b` T&qu.'{P<󂰆XOAAxYWCIN=? Ǚ#n`YOw"#Im'vM$wd9%d-[]!""@2"!   Udq2&i> ᘁ+XCcy @nZg?;M5Y i)rV\B_ӧkOlTӸ/gGxD:~Q_mc6i~+F8H,Ȳ,AAF?; k^qq:9:. $Ivn!utܺL |~jj+.LQط|Y\C|}Xl쳸csx{H`uٮIFnY)/WISQVDVr *g5vY &;)`g3"?2iH"Vxp8"HAAH&>0\8 /CŶm{w`nnd2GӴ띑P0XfRA방9ߖ$A*c~Ua~??>$*^۶1   qJ%|~c#7eYhuWJQ$I  _9I+Ic.UQc9_yAA2k lAA6 /AAA!Y#XGw$oqnAA..2$aY֓'AVXeY|{$h(AA>N:h4A <4hwGAAѦtEQHOOG40DI |+H.AAA{eY `P_6JAo A,ffC\ 1TB?]xdgrlF̴^\[?FIqQ=;zta&&xb ߾Io8mUnJ/R}l&:9 1L$Yg8p,e1Jb* n#E IʽE. H,&Z4GZ~:ό[ b'~ۀle>>I El?{fUm[,ܾ_b|~ӆlsQ?Y$bs2c#j6#ueD"|zmض4"~r  $zAX#q(9  L1q2Vc$lhM"icڀ,+eJ`NMF&aȊ=XXMҴ1-d MiazXFԯ7ntF=/麌m&h%orjw5U- #İ,@AuT$<=Hpv"Y&d˲AQ4]qPXlSq睱m3$iZ؀&{K,pm ƴllZ:~zʨئENa 3s!ztRcCSQ&e.oـ"-5G' J=2+hؘEܒph2MA+&tAr6L*z?&4ɷŮLzqɲZ}0Eҫ `r(-(㷪t֩!V+QG *iڇ 3 L\}SDA8hP$I8k/o֣ u6ђ^$_杦~'֢<&;toiz/H>RN2!(.OƲY dt^YWㅂWl Ӂu*\hXf9%Pznr+r)6/.fKX寏$g4:Zˋz{揟 $QkA&S>Pɷ\$IB$A Pj\6M!_N2CפE|A'+Ƣ;V*K|"h!Q)OfDl)pi,MA\z5$="QH'M"= eLq"`,d+Caga !?UNrq6~dȦh9?"9sY ZϜb?NΑpf[8oJXjn5IܬQ\.nWX\q6!%^7=IxCg^\*(*h*(gp3CDDZ#a/h'eyxvM]X\2:`q49$I$INd.IM>7LޘŒ+YȔAsY[sc Z6:0MlymT5>`4*T`{s*}qj+ m#o'׮NinYot%ScE!jlIFF&hTYΧb"T F?1J榰 :\TPǮ%3zS^ 9$xMSX/?sZ;|z5IlNMN q㪮U@UtE6,L cF9Ҩ ̌?WtG86˟SbS؛dƍ.K$I,1I YfKquQsz2Nrp@"KW&jڢ~7U OCU 0OV933(B!9a\wR{]`β\^fs -&n)NN \ʣs} %O`N&S4>'ᣬj%۶ogxFX3mv`#/~hq0ӌ1+;{n7\:ƕ=;x7St`"qرk iadM,+UC0Wq0; /m*k T"uy|{kfk.$&ME$Y@歁Ǘ$I$Izd $I7qg\dG.jP7ε`- ʂ,͇Gz9΁]d>l=~ÇϲyaŸ@gL:eH \S 1{sV\8(]~~P-$tTQZ~C׍e< 8{:S 8:ѱ)\&03ͺ8rc2]dhNS3f#e(hcz>ItYhΫUu/ͣ-%I$Iti$}( ~";hˊyiqܫU{*n es.@]FmxS)/ɧާNfH,*tNffuksk(b,_!*Af<Ή$)4A#Cc[.T)ߍ)]d&Lj$bphHF'x=64R\nZIԃmJѕv&;'i={ˣ&u'̣4@l,Mْ/ i=w y6Uq' v;6.#aаi.߉GSHS,wow0A,q\k&{D.x)u ǎǮL2ؘ"Okt Vkq})j?e>"ЧmHsuR,M]Ndr!I$Itkw+B:&n$!i^Zvv$Ńk3J$I$]~ !0 4l("sIEA4E!9%KmUW$I$I5w_ e`.I(e+I$I$I>dBIzde$I$I$Ip]%I~9I$I$a.$I$I$ItI$I$I$I e4M3䇽5H7)sL-ҩ4@( n/I$I$I$} *@s3el,F,;Iar}owm3g}m5[J0Zrk*eWI$I$I${@:$g㣮"tJp}NQvv+v6kZ!_@@.=;I/JNq'}.?Hn5r?Oe彖彖Ny*L]w>]fܚKj'^Ǎ7Hsuab@ IDAT(X/pCˊ)ȎƾnzB~Rys;b1b;+SHaG$I$ %rt椮!؊(`Yem?(/Чs524hǻ4liN11 Mp'ʅ-,NqbG'JyRMώ.#k) '̫ ny[Kxl&퇚=L8qζOa<I{4Nx'9=#o&>l(g2ܱ8-l<q6J =Di4d+[}=V\˅n`u_'n7CN*I$I)8\bx&FW]~/cAƭ26zá\RtɱqN_miv5Q[f;{I:24l#ac#R> ,B. 1ړB$e rp}uE4\>&F‡ ˒W#߭cӡ*r9M(~B"{…S)7o=^Cc+q(SU4qݑG0gf^+,迻<ٝX$I$I$x]ܯ,\VF.ڋx1#Leic).Or| 3裬,Cy4SR]FS4>vI[ QP&v~ݤ#&e R(!|Vv;K_G6E[AMyu>}=dKsZϙ.be"'u`f%=** ;p犜I\$I$I$c50KR뽛,U%?3v}=4hb!J^ZMM@C<W^ Ԝ2^\a~MVUq z&-*x*`Ϲ!2~Vo\ 4qW"pˊK5Pg+MzNk)69; ) 7J[K$I$I$R]4 !H$D"\F&PtlǬ g2,2miZ`zb#T !0 6]CW@`&C`YEq`YYšYNTl* a5ӪMSP2ttZs²5}]. 5AӰi**K9]99$]$I$I Я2K$I$I$]$I$I$I{.DI}$O$I$I$c :D}smONߟH$I$I$I뻸wm>mdtI$I$I>$q2S~? !p5k"}r*/I$Iҧ鞺[WYMљcJ/EQ)Qf7#je[e^m>K3NbXN,$RQg qszWn,ıfߟf^2ǧЦ q.5d"|>x\["u.2h'{=39B׀Ƽ|l|֓ǹ80A=Tώ _$]pt'e0v('$@5v"6<^AzǏў*gz$'39N˙툂'-!伷+FbqTtkM{}-rpj%K7<"1˩e_Om6ԱeUQK}m.Wpemd}ג$I$}r>Im*E?Z%KӉ.w1IQY>)֣b"NdlZQU.A~%I$I=.&n;fJ÷;iѱ* lĿhJQjx1?`gKn~V-r X6oۣ$f7=vEZqD^*n1^y|w~p?)Lf>;_H#åKY%b8ąZVήz)8k=\tSVc]!gyr'ձ{E , sb_\$I~߶]i=m`">I1ljG'nZE0 btL'+Vh sr8cV,.rSRQLNF6,< SQНnlv&IijSoXs[UTMzgzD[8.vsyD5#SQ)uu,Sg)8q >}v"`jaM2dSi^k3Lƒt.6u ,BwK#/`*k0aTMRe9:FU=IuY mzi̯9i-~>{J1.GIVh*4d0T h)+-$'h`*i\SV¡O/hDGhg8Gyn 10!mX0vǽOc? XOuia6%- ⓣ 2KESP\BA&x u,35MG(y + `Swqw1cf\vB:=NrDJ-ܐhz2P\IC! G2^@rSWY!=A*A+2ΌϦ7mTxmWJ{-+pZ!7ǁs CYKL6MdSe`bjc ,"qD4ɛ{h3]Ȑ "k06bR?'4'}qOjzlZɋ&St%FQ<^H j@X益stW1:0a0v VdZzxc93FVqPnl^' g2Ǎe G~-tN6l UWyvv˝FH6wݲ xt0{cMU㴍&qs16@1|vPPYmf׮}9utns h۾:=N`#oS*^.("щtc4v00'c 4* (XL\RK6P#F9qatv^~p}bPR\>or DhxBT5,cڵ4TaWd#4gZf*A"TTNʭx%P+sXy_G{XA%{Ey>d'/O^PWe~6!HMJ*0EEm*j#, T[8siX"J*wgé$eʒJ #myoa%m~VUTfKfދ R)zx[ YJ}<7"qrRlʳUb9p UE7If#GQ٠q%op3g.mZŗcy78mR^Z<?zpAފXTm ^;ɇ'Z/o\LQ<_V֣whyԢrf9|o cy;\>k;=,\,Ho)WF\%$I$Iq*ӤEi \}6XD4CR(~* J3R߾6CdM@"%sfH A8 ,֩vZmHI[sŦm-E e*ie:6,`$j\E5HXaT9Z6hj3/LhI3ZulLF" cSP K5^0b:WӃ_F A<< }D0LzV'u<Е۪8wҚJfRJ~t!fl6 oFR$ӳV PrX0׆<;(`ci(V'NSӉl]-3ā"BCnJ;Z=|O67Tf ;scSx V0_05>zmIJ!6]@(C:y*2fq8DDA69&b^HeUR6<,17.b + pϬri,y c kYdSˈjJUWEw4mehPf. < ,l.?^}Ƈmt tur-&83ȣp~QҼ9yn$n1+)czN1QR* ΥTƳZ ~-@hs⠢q=~ufr'֓suڻq)\(@*:#%YLoGn%{E*rbВXg'P6qb3sJ(N_ k_\=ıxZj{6-r՝^/$3Q&ӂR*^$I=|t6&G>z|sj)Au`P]GD&JJW9ph` eKtl M$IãC+tB?;լ_HW!p~g5󩭩 LaQ1ymRr&\sӇ!_^_yx\iFf,:7ղ}4A:i@ܲ*< [9ONbhnLL鏃9ulRZUE^Y6uwܡ덲S9|!%̩8nwS})pEQ(h|TnGTiq%@9} ί*&Bd3ć(~kмeP6wK!;t#S tZ־>.v(pUT,b]~l9cC E!ϢQ:`h0n`3{!uݏ 1 Z$I;uOL}%y`'O~ˎ .l0z1CqQAu?U):Oeϑ\c"i;CA9Ar^LsZRoyYT 7=q==q ]G>ʲ&@CFM}UǮU0c +SG>A,Ɍ%[u 9<d?:ƹK @(P^` H42x5eeȤhlgW !*b i0]z]:ΉcyrI AU^Oo0)dUbt+kT11 M}E. K` \uM32=m.Ednjah*8Ĝ{rlX"C͡>nۊtMaٰv\vX k9)V9&I$I=vqoI>7h^47nc?}MN&Ϯc$oљpS1U$'}Ύ+5rXC 'nXc w| M?`8~]CtL+K:;OQ4IYefLT욂 xojHr<\ϏϯyFz說aӁlepiVDvEI++ȂGrӣKadHf߶`YhXd,cZ*|vnې$I$龹Ibt=duu3=txK_6ji;SxPnW9>Ӷ8]%O kwP:5rH>g+d8|^gWQSDebdVa}O@IBr#U@[?`g$Zl!0⣌O6*vGtEL0h3G};B &# g>3P2w!-/I$Iҽƒ-OɿB;bK |u'?9g:G%Hgd2)Scz{NX}+/* eEEuzph* &'tn/ &+*#J2L9=O95#cZ6v} [ٰ{o7{ΓS^5EhZD@i! ?lǺ%Ko0`ZI|ӝC- ͦ" BASu6 U~bL8) Q` ˾M0g㫪ι{fCVE7nVuuwj9Zm7C({C=MAHnr~s9eY&Xĺu[ъ)C",h"(3KK@mk4q̐V{m["ItDKY5U*)wOGcX>KWd^{X?{>WI-ߖeai,_G%>/>đf݌T4|x+dM15 R[ ,E 8T Mkk`~/FŪ\Na7܏油.x,c{8:|^B!DuP l| , //^~fhO/fB%ׇڔ&%Tpx;[>睩 eHa:h f_C)905bDϯ8;e刕0g,SԌJFcb>1O9~C-w$gWgrƋ<!=q̞G4őÇRыcPh.K*V0cG#,"[3oΗT|xlaRiO!c?l,z0'$52P8I'Ů+kv  a}@qR2b3и9_V3dBqzXw8kb5(9F=f&z003p9>)SQk Ǎ; LSWu#؋bxQh[)xJշ X2 M~|l-g?Ob)*`!}FQ'Cy^TCG bzf}6Kz4ݟt81B794 ! M&B!`W B!B!:4 !B!Y@t!B!" H.B!BdIЅB!B, B!B$AB!B!$B!B!D]!B! !B!Y@t!B!" H.B!BdIЅB!B, B!B$AB!B!$B!B!D]!B! !B!Y@t!B!" H.B!BdIЅB!B, B!B$AB!B!$B!B!D]!B! !B!Y@t!B!" H.?u='89,X*B}H$شiN .Z;Pشi INNvuф+%4uQ)4$ӯGOWM ݴPScS.B.BtgL<3gRQQA:;SO=aÆaـ]sEQjן}yV,j&O̬YDuJJJ89S0`n, (B9t+:A7G (1e훇 tm9y'>#iY6MPۜ&YluP_kuߋg.BtW> o(b16l@ee%}, 3̢bUE7ŚXV&g;obڹ(D|ӓWыdԩsgr 'r3gWޯu_8=ӧOO>}s19sذa$ a VxN{5^{5^->[\Kɨ)r߲Z&%rQ>Wd2AmlB)+ylz66 3sO鬪:Jy^y_E$[R( ocho[Ӝf(a LHM9.)b?mإe^[oH%ݮ~Ho{!2g'H;0X|9SL0Z=1.kxt~}cT|1s:3UeQ[cZ&͋yD͎iKծ9˨'|a_YgI'ĥ^%\–-[mԡ}_O?eΜ9L0k38O>+?k2uTbu Ru\{\{|'˻tfw|2lZ9˶}Sq9s$b|}ϴU4hjsaZ,jyҼgTΡ9,V'w|thsSa̯==Œj4M 5=k'eYw>fSjEA oPj{T/>.^>^>ֱ]";b/XɓI۵a̝;N;PTd+ M_cߓ3esS|G2,z;_^A$n㵌/ ڝԴJ[bEj.Qu1.+NYpNjT Ig8 !Wl"$ l[I, 5u-g5L[c+bu ^޻狭)ב77/|9qOrѫx<=p8Kxc^~)o5kW^ä>y'~s4G\5 ZwG\[]/|toпJ'+-|{XA~bi=3mFqf ">-f_2#H8kՅg^rpoo=:4:,_ Rɦ$9N)x :ot#C.GqqW1~'ŗ9x=*Ÿ%dۺ/ͬ#Ě865j}~V69K–r[>?iV!Q_[>}7E7CwИTpҵ3޵Ej|&/HRpOI,=;C/[.;A}=Ԥ?xVWWiaW>\;M2Tf-?kl^Fs~󋣸c /}}\<:}zUzTM ~6*hOKi'11kUD/=Ǭ?I+vL[cɼETڟL"#VlfIWj^xXIgG/KRb)_nO 7X;g+&+v9sl8FL K| #?ӮSru5}@i;^ ˗617 ] :/{Zצ&wTfnw 850#p9g m7{u*4DzzW2o(5lLtr耞8v9ԭ؊ϐ|%$woqqCp{V~_ĈїAe=\ًoQp@Π⯪gđG3b2S/-ټsSe7 `JE! d;>V'9cX 5-akǒ04k896''P+( }wmsI1Ga 4հjKG[ԛ} 9b~ N8Mќ7H?㝕Tԓ0Z_O\[hz9u< 7ye[m˱pk93ʆf/a>x[3o3J6Wmf;;Tʃj vqK7{ϳ4נffKBCI8(:7Ao/Fُ7>~ND1U]t\>=8Gb9 w{2]!7qT B`l9 >b"gv,kah?`̉L<ʋEW _9a]1>. <=ɻs6ۋ`/κVnؓ@lf&[w6mTM61ղ@` & 7e%ZoLނ\pՍ0,o0U6bԮYEC.ӵ|;|?8@q X!srFf/Ӝ[Ni Zʒg˼C9o=ӓ7o)r0} 6t-K7]HߑePQts pH?>_JӰ-1AoH5n#FUu.fo1<(8 ~zb QU6@C ;'x| dXy#On3, Æڍ{VB,0i$LBMMͶZ|{챙ͪޣbo lF\;ˆv}~.6#zC0G}CXӮK`y0R'vo=4r-+Ϗ?\\.9Xko_˾ūmdO4So`Y{-בjPsI\Xμ o?IIvggusW^A49<f_ /`0Bs_{K4o$ǒw||w78939Ӧ~[?^/6&~S~ ~Ssԇ bY5SU8Mȕ?GnHGyHJu.ӋWlbMMta>SAyppEqSr 9|WS61#Et8 <{)s Φ83yJNxt7~1GҖ1?[{8 >gI!`;{ۖlo;rO0$hkdE1witNǸ˰))rU=ܡ6O! ǥTo!\LSMͲ(s9:"ԷbNc;ð.KP(8d.=f|Wt?[/8#yǩ_L.  "*r\}9['x*#'u:+}Ā?#!hnrj.{_pEǜljǍn%z&+`_boaSFQ@ʊ82 w_w`e6۱j7 !`ѢE}lٲe[ ~&M\ߟ8Ld,L8@7-@([AGi'pqfw&7@N*H8aN| >[fc%"51TT\Abih Yl C(aW۝/Z{FNgfWMCUUN?#W[KOnL0amKlvTUfws4'nnP"&jajb7SXN/>SKxs(8: ݁#೓6KaFIlE .K q@Zqt3sI*w۹^\wrWK6I:qrsa䄂* z=N0I 6Uװ㵒D:ހTs3eaY]~va ,E{q(F"Iw;R6ı|xwdLQmd,Prr;,X /W%|x,TH?A"H$a*v< AT-,Eږn2Je]qtmnIx\vH#M7txKТu4$q(h3Ḗn|BDH\ń`a>N#E4&Xynpϣ,l,e4&͍7Db>\ B(@rFkbU;y̩Zu_1|NUk !Bq`t!Iױbw1dՔh2mwDl|c~V%Ic8묉7L~qjYTh7_%B !?6~zm4lXUʦ똪|v;" T& *>09wS)<֯1iJi&5U44LS݆eYD"CaD2D=,"NH$(YObqDt:$VD0Ms[t!ޱ vyINk>,ڸ0obyD&oaBt&1:=͚eC/.=cyLDJgcmUQWT`8 iwpK&]]nò,RTW[0MS.tIQ @Ӵ.J֓Xui|ĪZ?W2.D.F^te/ lw(;l?̼)9RŜ8x(V oHCiҵv;˲,0LHR)MsLòIdKcp.\vUYC'";$AB'c&|eT\Lrӗk~#kY7B'p-7G\{1x <sJCQtB3 4 6/,v0`SNucy.B>;NHq:4MW;ivu0$^ 0PEٟ7Kn !.|,H b)H")_30 l6[s! 9^}D"#Gkrv(Yu˵`і$Vi3Ql'd2Iaa~ !EQB]]onXM1hR%V1LPlςx4hv sx]GA:>^W0 %^iHDbeL&4`0j 8o>xʄ"X(hI]8MCT#6Ќ̘}. iƨF~IaЉ)w߬ͅ=@"";蒠 !ށ<Ǵ, "4ihָ#2-MuIj8q9TTB|{gX d8!A@i5hm% fNԅDFM,2koORNS"Y߅݁r~&H҂.d˄oNs\#߾a)d]uF^Aׁ)-B! -B홦E,eӈ$t)MqhRu.*DדaI88H.zD7Lb: 4AJ71syg0[uvst%.*DדaI88H.51,H&u4uaf`dW/3LXҠ">3Ek2>].{ހ +!AB9, M7 icZ%3V-Nc8$qg1Bd/9wJ cЅ"KiI,eI44bI hబzAAЁceWR!bH.`YI$ u" %˂F8Stu`I.BtqBz]G: L쉴nd_syWJ7,#ѤA0ICt;Xuī$V#]܅6ae֦aXYݍ+4f)usۺ\B%X ]!e( eanu44,_ޝkFÏe3ub, 0 56%Y_g8[R%{ھݩIͤ6ɚ-q6nU޾;U߷SYۻ}Gdccݩcs=wz_w҂.ZPμym]حF^Ć)Ed wӻO1Nxct)֛׸Gnf9 B,eM$v7cYuu1Z% z=*~'8l_0 c+3˲u].J`ik$ViJ(Y03_Z&ke}>g;]۾%AB%%O$wRV )zZ.fB*:h?m !\ρޱDTa466RPPEzDt DM]]w Ǫu֎ 3BK2mLRgOOZHþUGX{--/t!D֓wJ7,N}D#? 5ԅ&s־8c( )hi72uB7vw->ՙ$V%ˬ !r2XuF8(suej1;guXF:AR3&2͖d\o}aJ.ڢ 8(l:iw!D璖XvQo-gf/ $i$0$uixڐ\6H.cZPh=66LNl"7/ce aҭm-]icku/l#(.8e{A6XL3,j44N nQb?ڱ%oc̞:FNs{+O[}^tc[[ ;ڿQJ3;IfylcLt]4M\.v[&YpTNeIEKŦ:VGkbIxmMhd\$Nqn'zIdkS - Uſ]v͙C(6Mx˿9ZK:nne城nY -ۙM*t"tM"4 W]S8Mee^[ɞÎ?+J&yu9TlmMeWQPU˾}oi/z~, 7Q, 4$4&tiD0~ qp1B\6] e;UǘV0ͽJƺ ttaWv}M =Dz,4"ahW0!fXhF[_ߊI3[nv465+-߷NJhI6e*eYEZ72uӷOܖn>;FBtiAB!: ueˬ_vUAQ2ݦ*׫-]iio]n<#L֖Lgoe"nۗ<Bt? !^wBt}69ֶ|f dѷ&]@uhy 41M/.āBt!B%d~=4 LnBβ[V!ab "Bt'rڻYIƠ !e╨}0(+g`qU\9 qCڶ2Ad.\FuĠxQGՋl}x{2z8]Y5!B!Q !B- <N!pӸ9|x%Fw\eϙ& @(*UPEPbr-W+^h tPZ MR!=!f{zslHBž]f6}3w9ysܺXV>wǙ>4 pZ70Y-̠/>|;F~ ܺy,ds'R[>fMDreXum<_WLu~>3<9} ew{LOx]f*:yל=wЫsgN[=,NFݮU|i~q ̻zE_4XfXy@?xhM_g, NOtZYdm"H)m[UPaFE$~D iǣ.dfi\.*z4&2"-]J$Wtwc ף5ag{7wОvݲr=Cm[y+3QE , \D?+e%+zLH6iɌͶ$(̻ kv QVn!"""4."r,Z65ǩkK5I]-lkN0TQM>)+A伃`v/fD5;ۨmKњ^w7+4%mZRlm3)U׃dCHp🁉c:ws5e|VVgX[L.aPq~"٧?e%?@y:}8.u)^@k&zx{r<67њ ʬQD#,K'"mKH"q"bg0خGsfg{;ZhM;n2$.L*8& KDr)+\EDkwM1$]'YYJ}ܷ ʖ8c*^Z@ MsT{YDgZhBS?х@U#  {l׿OQǜ'%ɒO͘ⱜvLf tB˲05ũђ$G} lmN04)ɋ"""r)d7kWmN\}'?'Nuqclhtle vJV0yJ5gf ?Q9z*O?>Jٺβ, POZ{׶6!yQlLdxs{_c"җe՗(Q^)w9obU8~Kcl?3%Nzy^UZ]>eh^Ս1>ty:)5pϑ.\CS8㜹7grΰ6tc*{m{;gHdF `g{DMMLT"ZHNDcJ$Wh8I0urn2eٴs VohM1 -MyD^FlX0ec[@QkH;ʏum. jXㄱULj95=+1ˇ(eOYu[)n]X'178.U@0UBswdd\Uy.0KѽPU ShMtN_˘hs]%1((Hr1\ OY."IRc;^pÀCulOR=lZZNm imSգ9`-hjp F"^v}g [㬪i!&q8iޢ; !3mpEZI#i{4*쟲 Pa tyngtlk )ۥ-m ?uNqkE3eP@*S t:R5e}*:SVS]SE$ugڶ]EvëwW_?p<643ch9 (zze%=sv ]=c2t}X"q""3MM[I66csrˋ(2eSur*""}y{y-c ޞg xXuk}3wBhO;*z3liNPۖb@QC30Fвt*҇lIYu3:?0 [@ƶv#o9h"I<˲Bf{v4c"}~I5;خz]tlkIPov  D mldt)͐ L.aDY!Qt>@ 9!8vLGoIdhHgYQ.""}CJU^HeA@?ik\#帴mشlڵƌ3*E$;cVhLdhNf֜`XiӇ +N$c{՗꜖ϸ.m)TM U.Z[/m*EDr^YyPoIli3n@ڵ8PF$Wi/2w,26m)jRhM^ iG KT\ИȰhS8dpUEyC9}+r0}۫}~rwap]Cϸ- Եi8YmH_]DDZ ϬePq)ե ( iw^b0g6wv3^+iخc(E,pDzA+@pq]ҎG[!]D4M nݏ,y%*@QDYߎu(/`DY!CJy_\ɁF7Qi=9ewcZ6yq=]zm){ZR)u]<ϣ FA$D `Q  tt{a4D40Z"޷ucόۚyIۥ=eӘȐv;~)[Ez tuwʢzvSVxњd@A)J)Hyr֮|gmuŝ vu.թ)ٽ:v2Мݳ@]:c t܏1W޳S1qw>5uD^$HdWA:=(ˏ DBy ֮Y ѾXC,КPמ)<ҎK2Қɡq tYv=wN+f!wd?׶fy֧$}rxj^|CCN ȸliH) ,vc Lh4JI,BASh0@,q]Ww_`p; b 帻 dv1[ߞ5e]f۳]ott3=zvwt7_C>`WGCˮQEв;FოFz\8`PqEQ4LHx{~6d/z`. aKsmIҎum[ոH֨@վt np?F~#CjK|TuN>7_Ets㵣?|gOwo`؍_e\ҴnPVijn1ضM$鸆Gড়[rXEi~ʂh-ˋc /$ׁq1pmQ]q>jDkZR6] j9aiO@7{zw̜{{״%;:v}Mw=SYH0`Q(wJ]#'i0d Aa ۡ9fsS-ݟϘ>9ة@0,~q O09ӣaFC9t2yrZ9?{)=CZqXK=i}y\vAA1Yyc_߫ag[m]E( HyM y\پ ov6xvoh! "ca"Ps? X | IDATޞqv=RKqMK2sroA~W t! ?BؔB\,d2vZE 0[h)Dc~He1mR^ﮮ͡#'V{*)Zű0E4 ,AE~$Ա1d2$mO;4&:VRw4$[羨e]SVӛ9@b)vg: w|捵G3b!+@4#d<̮'):< 'XvߑHohMkt!/(̰SVR9ب@8D7qDC!zZ"%&?U0kP!_3Z-[Y$Cӹc2!+e_Z(6klTfwVp"ߕmfAYuOoN^-"=(/_˹pQ'pwf؇,\}F"¹xwWr򜏱xU\u!wGu ag^&%"r0tZH`"=qZ'iѮWe{.<QV>=*,FYuFSVsJfO+yixJDDDDDD$@*E$J%:fGyr tyRFYH_cJP.""""Y;H]DDDDB)Q^)ܢ]DrF SV""""} tyOYH_NEH]DDDDB)+A<D/1?e%?@Q%:fD"""""""9@K.*}SV" ty:)OY"٧?e%?9tN&E*` hH(ϱI38!# v:Mv!"#ɈL)+A /;N<#ι/^| CXio;泽as^%t,XVˎEҨAw(+KtOY."=ȣ_3=p?Ee[8K1_幇aS)·UX? @ΛO.'|>/kn[PøYG1r`>.M[vV2b "̘]ͺF0̀ sZwb5@/j{~sZ[su{rʪol4]DzPV6l_OSP%8rt \+@0C(/I\ʃ^P`L&w֗cOʪ{<öl7O۟`4(9θՏVШAw(+KtOY."=8si۱ZHp,&,'F:&ZT{%Ec- JҐxQucIYuAyGy7THk\46ŦKNgTy  *bxc"L~d<GY3(/:G:?ꚲάty@הU7"ңRNB3/ʏ1L"\@ِ*3oaS-GWgŔzݼQ^bSp p8;oyǙ{X[w6Ҹq+[V:p\L~foZK8OpCY:׺~S"""""P " Dq" xL`@(JZOՎ! q~Ϝ8’l>u]`'(0G0b0#|1`!иQWUxeY}Kz-+"҃bG1VK:&)(5]'H^$CfdDS-iO9)b*~ O84KIDDDDmp:TH2Y][>Cџ``ndaythzoTHβpyc\y4q n͋t2&|<<0Mtde({ʋiϬlvrѮ}M__?[;QMEpCȓOrΰBnen|OOo?A'XN~D\+Vb٫<ǫ3<=hPXXR z_|c误dU95/u#|ǙsYh:w,7>I<_.I ][G:5]Drƚ4?m>ݚfO㒫&P=d @`ht#WI,xm 'SԿ0y4Wegm۔lXXz~n9}3|iDTͣwy /d??2x{}1'}Yc+ (w )6Wx[|ZI[GQ * cjFxKypefu9mr왗Qw,7a39~H%eS[-5[Iz#( ӷ`Gv;iE:}WL8v^/J;*:c"͎u+`YmY1h_2]N3@c/G? \|i6Ԧ4* i*?O='k4@8k_~ N8v:l7(qIeXR6%#t'+fSX 'tI/^4A`A!1c O8"5."+i[÷W8_u HQq3ɢ!wי|ȿeɪL[3H}x ~˜~TG_*Hw.9e'? ~Y^?ys󿞥)o7~X5\{ٚg~矖\2dO;rn_3 N?.ϼݶqW>[Wroo `<FEDD$nܹy<ѳ6pbpmysC=n FՈIcëiɴ1nkN:Ug\v,|7 |JUWMy]<=j{1>e޶,|Av&o+\r°{VkJ8|v%7Sfq幣o^;'sbgK|e-1pQ\opBy bn}Lf׾{ [?uӹ_f_k]-oG/od @0mKiJʌѲq_7\w5YzbPA%C+ D (*.!/  96X*#WŘ2GZ*f)g%h #KҬ)mI2QQRL(zyU8@(B"JY$"aL!VOSg/d@pYMOYד0`P C!c4Chb!BRz6oklh5@PQEl>~Oʫ,zyϾ f ;1Oն`LdPapcäw<ǭw?Hb9 `- hJs4DDEDDD$ D!NSCQNY&u"m_ьgY-RMfQ8Sko6dBiI؀EAuC,_"782eٱvSC|Yxi 'Rh+f7<╛Y,8ۆ)^z |- ? y%v֯X;qx^n k-Խ<ÅǍg\C"[w_=Lea)5AylضWØRv1N6iSع Lod}[2_g"؉8fhB 90JWV`ò%,YC#gL`]lz^]v[^JM.'jmLG,eH33ˢ"%))ifiӦ,SЪUm6[Ν;p:L&l B$BMNNQ iq怀VVQQjׯ(t{<,(Ji&)`0NJm_fu޽իCϟ `0l6[{;jEi ([8iZas0 aaam*++ nWodvYhlקO2viZخZWUp8-4&EQbi`:Z~*22}yyaӹ!g$ BӲo߾UCВk6Xք~CQV[|h2Fy߾}?[n(.]<խ[F$}ժU;K/}_~wJJ-;/_>Z w߿N뀀.hrppp-[K.(J`0Xt}Dܻwڴi3oQYY0 I^pa_SۙҮ]*999WZuF1gϞ:t0BUU(+V<ȑ#W:tE 9r _zUIIII(n[񼼼AAAmz۶ms۷VQߴi_svEQ jŻW^=noBqj iٽ{mRE1'%%]Eoժ՟oܸqڂ |Ɓ>cZqcbbEQFc@HHHL>}gϞ۷o +D}X $$$O6m|>ZUUn0;[KJJJ.\xݯbVvq9;;됐HHHH-..x.kXZm=sk׮СUIIIcCjj=z޽{~w#*+++ ͒%Ks\_.]z+lQ!!!1(!!!)iii*bo-_kZ[+b IIItV] EQ]vdڅgrQQѮYfx≪~B'- BRVVkqqqv6m͍ /wѢEטhӹOUr˵~C-eƍ)++m۶wrlllN3w͏!]vnZ[VVVfy<{RRR/_|-8 B4 I@oӦUf9hwEEeZn}_lEEEqIIɎxEQLK@nw0'!!(Jpu |b{}[aa;vY F@eem]TLP%vZlٝݻwwwraaa|q*1!gJ` !N(ݽ{\iZU"4M3.\0uٲep.SUUU:7cccx<-4*N[nngnzݻƦk;[Zxq7N;޳gttΖ-[ȘiZyr%$$\?cɒ%wU>UVVVa4m&)_~DRR}sjJY4(|TK?ް;}c ! MpL4MQoVk`vv& l /`Jddky=piiioԩSGeeewS۸qv<55hѢKRRҍݻwzfsdjs nwVfff~zrs%::WrrĞ={ޭIQ̹QOVԿ <bTUխi/,,{vn,j5g5tzuWvHHH߹s1%%%ױbPU(&&Çڽ{WE*++ۍުT D!!BBS~A```ݻwEi[ly0^SYYyСC_GFFr8VZutttjddd眜ݻw(;++kzeeezIIᨨ ڶm;2***+֬YsV5!&&f~x؂EDDt^QVV9NodddWUUcsrr[,bcc{nڴҽ ȊȔ{cccx<߿et:;wvǘn ::N˖-_4cTTT[M|v`kٲad,>N)_&&&s\ݻwx rss7FFFjjXbb⥥;WXqH7yyy[  m1dll׮={׺uyyy[>S;!BqE Hi(!;mկ ChF 0[֔I&i ZUrEQl&)`0DRu?YSƤ(Jd2+R=e˙+Z+c_sX&)V<& !d2%(b^hla0"q֊h4 1ڮVsiy2 AAAfs"2P!Y&_/۵c|VnZjKO| B!C!Y;ܹƎT|>n߸qv)$DB!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!BSfl(V5I4U4wc#B4'!z„ Y^Bќ;!8۬Vk@```Tc"B47!};!DܘZn=e˖]VkH!hN$B4+%&&ݾ}֭۽Bќ t!Da0l (￿bwUN@c'B4"h.,ݻw566ڵkhVe˖gnwIZZ'%B!Bfs~͙4iv(xEcƌ9޿B!%8WM&St۶m4hqqq׭[ܶmۦBNsvvv ڵ$֩(.'B0w d4cVk1X-Z$Ɩ þ}oڴ驒{ln٭[)]tf^UUUT\\|l-w݇\.WvQQFM;&!M$ B4qF1,99yBJJɀwcXTU>>?edd9tj~PUբƎh3͎(&):..ī۷o 6<:iB!YҪUǏ/>|kE6vL✦l.bĉ>}|(J`c%B!΂oʴVkrc#sΝ>aO=D&3BqC&d2E^yUTT,^jǓ1fE-,,\iZh^YYYB!N:==aOtth^{!h:d%t! W6vI@hBZliOӴƎEtEQE hXB4 2 Mf.oX|Ҽ„B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!Dek [xB!BsX0p70X BO<`i&!B]4 tF6^hB!Bs4`ZoW@n |xhWM--W - iy @b4r,BsYuhA2= ')]icMEd 4LկfɟdjfDof9 ...:***c1a&)<88KDDDxbbb {<_aaa*|$;;n48###jԭC0D~s}z5SW$yFLQz ]e=4M{cw'f&b4QQQaFѤ(J`5 Ș-Z2 HEQ6lh߾}d2Ojs\2UU|x+**8UUUyw퀶gϞlv\~%j8Iݐ Tf0"l6[ĸEQ CTPPP\rrrRLLLtLLLd <847 ,CлK\^omI}y{`z-e-cF<@6}<]~Nr"I@h^"_SNZe˞6s} X ˎ?艍`s7ϢU^I"h4M?{_PPLPP!!ͫh4Vk+ 6CQQ,Kj UViݺuf U,#]/3$)b I0;/zu`# j:uAȠ]nۗzv=_4oqq&Whڵ+y+++sUUZފPZ=ӑupd0tєKvׇSx84C͐$ B&[PG thddԨG52Ɠ>nݺC=a4k$/SUz[hUt.(((rq:շp5UU82cP(8N޽{Ng.4W~Rt:=999iHUUiv +,Dd\kOz̛֬ok63U=P^^^޴iӞ8w5MUTTgCU լ`]^/|>בkn v B H"hR;C1th?o,+VѲeSޅilذ3#7~g„k &# ~#1sfK暁 udt=|V@+ii};v9|"aa+3ᄆk5V@!Y !Azqݣ1[4McM\q=DE3b [ 7LrP!:ɓѹswO1w:u#ou S?!&&?^ҥ?ה={)}&eBqH I8 /L?AUUz F'``Р^t<]I^SL`Ȑ 4w1gr>"fs1p`Oڧ}F>Zu 1g2nJ;PA)Qգ~B4/[ _Yہ= 87I"h^ ;&PtF#F<Sٻ 11h> /۷OSII9K IDAT夤ٖڙb**1!q=lڴ>x9ߙi&- B9ir}+J==A&-qD}*뎢W\q7pdQ߿ ڷOb,@vcZ-unm(SvCdv5 /L!66MvUр@&PTL JѧNrP?bUկ=}ㆣ/jX|ZhR$B4YUUO˘7o}_D>0 TU9Xnk̀Jv΢MTUe ڶMSoLL1??6m≌ h42iuSOv v!8 .C2 /GNE;+QZNGo舞Ll-&@ U!` }`()JG+s}"ѴI"hR>t=ҥ_ĴiIK72|<4vmDUӿVlo_EEe,]3g\qH&OFfA*+W~Lol@ u?!DgD_3M2 5,= =XSn@? zq==(@oXX_0 =)ҲܧqG1^c+p𧐐Kʉׯq%M$ B&AQ}/TTTF]4肚1]X?|26o%rWaZoALLo0__(Ju]}9:_~nKKiFIw~.LKKaVu֙eee'`$ B&[R[3eʄcTb_o9jȑ]Z+eǚőD1(D݊/rj֭/4hG~tlڴ+V}СժJH$ YSYi'&&zfg ߏҦM|G(0z2d 8D'^nRʶ< =8>#zSx fwv>3==}fHHH}ү_q ZcǎM+W|˖-3}>_)_ȨI!DłjSb3O"R`ޯ8c5z1ѻe6}H$pzҒ |>/zk09=GZ0<-(5***v,Y_|ǿyn>.炂M!+^rFu fBF/ v^gryc:Aoqj!" ;pzz$}܏>$5 =؂>>nz=z ?w(nx֭n۶Ӥ/{n_|G?v~zQɭ;!'99iXQ0{ M}й} vZ;#_#ׄ>`Ժo89zH z.u[*,ɇ>ˑna QuL6q+tJE1FDD\0dȐ3.]U^OqH,!SU^^C]t^{z+˖ĉRYi?z$ 2ꎯq|^ģzݤeuǤxT|/W)'5k֍SNaÆo꪿<㿦`0"$ B3i0aµOn|sw].wР p8<[gTBS65ZqqٳgOرcaÆӧCf9:I@gtGӢE$'%eelٲR4M^UQPPLiiE4Mpv{jyޚ;*:Wf#;;,/*99ܹ[Ss|UU1`@wVStS+*>}\n:thŕWq`>堪u{=UTTQPP\S`0523sN"gy1MVYY?ꫯ^>a„WΥW6vpB!6M :sӆ ;ؿ?Syҩ(:q㞠Ž n~|:u)u=̫ΨIJidgѧ-\}dF~G~vVFAu;31rcƼ.}K.ĸqOx%|M_;۝hSOapq/i}NFrOҿD<ؿٵk?=:o%ӧ/sՓ>!zǭ>i|**:dĈ5{t֎޽G {M`21@qN.{/~WFZ,룏>o&""_,o!`$0}دѧLMTߩo(_ Y~ ^g͚ذc֯_/yϾ}9Y3OmajX̬Y3k?g`tTUcʔӾ}}Ć 0y8|uvg:4MgKdd(}ɯRYiGWvOoO9?ГW_g vmKffSI|| )޽ruǛnGiiv8ŇiƓBv\^x͛;wuꤤK;F \ \rj"(ZR[)`ut'$)--H4jeX,f>[߹$&wѡC2v/3k_w`,s\.7.+7q@~kT5Sִm\qŅGŨ( 3ffI@c3-q+F\ F_9Dr= p199뷡iZnqqYiA5ݦ4Mc˖5M&##G^קQQQM7xի? 4Ɲwfܸ55%00&N{egÇ<ț̙jcʣ_~ɰaӣG&N<E!..@+UU\.f %uʖT`ZNO\%zZ!ԝ[h!1M۶ml޽K رcֹsg͚uwqq#]h\F5p1J~a 69:OO8\E'+x:EMx$\vYeѢTTTys:Kj8ǚR^׽{VDNNkz<ؿ(,,EU5;/ Dh>̧oΌVV.`߾\JK+ѣt.o0/tY|:ͦgq,[3vbZ  ͛OkݎCgO6z)#<g$(AO D_ `qV1s yB_~!?*BCmt֎^)Sh4⊋PUh;v =zon~u(CwD7hf DQ{N~\v]\nbb"xGj-P'3ee v?NPo0Fi>.'-曏0eʿ #(J۶ /sS1TVڹ^7\3'xR:tH&6V8,,-"jb1$$d2ҪUK^y~M?^_kdƒ%:_ͶT;t*]L@2*0/z+J~2܆Z!wݟKƌ &CWq'trJSgAo5t_XO֊۽}fģګɞH|C]6'g1x'p84h&\_2 EQi8q:Xfl:-3fTqlLFAhKxx֚Q^*^@<3YUU9x<ԔiTV: QԬiR^^b&<<&1z}WdAUU,(NTv"&&0^oo#/e۶ pyg0j#̞tЍ}l'"Гk <7Z!pFc`߾};vc?[k5qj@u ʵ-b w3[13*XeV}}OsˬX^%6Me$'qWlϬYu>",,1cMx^yerwܸ'?|;2vѧ@zB(۷fZV;第ԹD!Nje`LwC[6<=!8=9/ wu?iFna>p>[Mnm%O[LzQ3c) j` ?D|:[l&N8]v̝;˗OHq$++GȽ-ѻB. hbR͈-q#.VEwRBqRV5kyK.~xw…UUg$Ѕ85nA Bqb>#'d=x=KiB4*U2{JKK1b_m6[W_}u3 WBxnS+/~nUB4/O; }o N!@_3dɒ%OVUU3ـfΜy?ű: B͊>c6|4䣼;2ٹsiyTU#;;m~lۖiQ;vd|@{wS꨽I#ws'GoYLB/4!@駟^yw' k젚*I@8" }!_ =J&O~=1l\u}'>_1ĪR^~Úrnix>*k _~oqglC}n0cƷI/%Le}}BcE|us= 3 ]4'2d]Xk@ j^> }AC[#%h河_oeZ;D%q+ / ?Dǎt}$!G1s.gYmJK+xxXd_~+VO;v ^úuHO?߾Yv W_=K7p/dzJJ1$'ay;㮻Fo||^!֬=Iaa)UUƍ{|훼{֚4x)t5RRowrϱy:]{Ϧ~GLll$eeG[=M$'ѪUKz/^|w|UDFtKʫi!1L40.([a6F9sǢL[HM/BU5V `1Mсۿ[vo߮1B4_vO>:xԨQ f;t490'/ziIęСcXv&PtqqqєVryp8|<.\-[FQ\\^S6..@`UzȐ 2B\.7bΜvT:tHk׺399=L>hTTTK_a\QZ>ҝn{=:ҭ[ccZSGrrQE|M# B@ʪV{LUlKݰ?G_ܰYJ !oނ njd^^^ʕ+nDˌ+-oD:z$\vYlÎڷ}{&\~yߚmvmUUBCmX,fF#Sz]!vͦOnd u/plذfjcd[h:ŋ/bٲw7xj`Zp5rr j>)QYi'9j\@۶ xz>PE3qII݄*^:`BM[ts믟ҥK $ \ | |  ؓS喧ؾ=ׇc˖=L4cСCrMٳT,X=1 ܹz}Fvǔ)KB IDATFUcĈyq8\lܸ}rҥ ( Eƾ}Fwg`ҥ-qq|2TU%70ƚ5yEqmm,,,H+(-vl%&45jbI=I)_15֣X#MPIe6bb 첼 v0qq+s籿apy%%'o0 w)6@]F~~ z "<ΟFRX,BD?~$xN?j Ra!FBtJAٳ٬3ťPB17,M{r9>J%䅞=KG 0hP|Rp܍[KuuƌYAĉ4,]:NGϞ1z=vqw<֭{G&oF~zOcŊ91/d2 IJe٣qd:bc#6v>sb|b淈{ УG(鼽z᭷'޾R@B>p {c=)=:}r֬ B^1vbL/8-`LC<7}WÏ?11ܜT3Ac) uzG@'RGP^믧ϙ3g 7cdtx R!'p֨+QX :NGNN!**j0 驺.>pr{WI:xzThhP#+y[||\,(,,E.8ًQzE}D"#<=U׏]W׀+PጜBT~~E~~ `a!EQQ9 J! TzT\h:\\G\ KrXXHX\Z o@SYYW^^.`Y7kllŋ`],K-TIby?P?aӦ'vt`$u|@! =D( sm;~_uTeB(阼cy:/&@5oݛ)2EAII1/P(JѣGsy{̙3S`Q:2 T@p+*> ( m s G@ӷP"JG5XbZ79s'N9qG JGEpa:Hy1Rm-ئPnÃnu6 +H*B:@ފ^S,c߯n>eZ&|ƍy^?uU{PBX`^ ծ( %A ϱk^@v3@V]>}<p*@( ,0 ԘFQ(E <Fȿ,wu3d-0yB{p @<+@r"[,gA/ ' &xk}5/B+EHOO_{Ǐ_P("G JGw΁x=)IP(wb#Ƞ0@E9%H] !G 2`=n.Ȁ^ ؃x.x"\hxIm^"D3J@D{׼bF;kvHi[94mܸiV4i*@( @n\2E @JR(PRR!oe" A͝M hÚ߳ y?\KAW>2_i|B'"]P"./\ ykY/\{B<@ ȵXҳZܼK bc1nkExy ;i(Ơ`ӦMń&S )À$x-~r3'`ƶb< p /@29|u[ Dp^\ ejHs؏^n1u .S^^^^b,F{68Eۦ{ $U ףuh0|Ӏ bcPC*X46jaN9TWaޓBS$4%63 z$wCiA㛗WP R 6|)z W Y$H^G2HH(ם(yv$Ī@I1"8nر$%%lp' ݣGǔJʒq %u%/ZN.}ԩkkkӍm] R_W0̦Yu:Sqf 0`ۃl$N }Ѐx?$)yQ-6:[ xZz0% < D z R"$@LR$aǎ/^)I[ 2  N -dH"jgB1Ho Xt>ONo5t2?"TxkA/4 tx@XRj;xn^4' `x&8|s @O4= I-' 9.,S]]}?;}e}/}/7ú· 8Q95 cg.nJZZڻ0" r9+Ki? ,@(A 9 I$>b>h}/ymu>Q: 666/rÇn߾}1f;9cƌ^næu3Xs3 !VsJ;ت*}iiƶ d3VDF`A)PJ%@*-F >>>q:N5h.N1-4ML&󊍍tԩ67f)@ ELBBFO7az cΞ˱wFcss_UU+--=,A\@{< K5H!(s Ҙn!Hդm5B͛{ܸqK d>}9;;jD"yA-ljjⲲ:]!  vvv=6{T|XwQZ::TVV6)/HkOMv5> ] 7:KH)/M^r)Ӥ0%--Tr>hР X6=22rT*d6UUU 4ΆF)svvѯyo2MYz͜9sݨܒi͛7ODTINPl:$@7 '@GT0P:4 ÈE"T"xrvv T* `0h.\qʕCgϞ_YYYV-3ݝJ/&&&~ll{hp>;mɣqxk*.injzzp8+.;mMiO|Anj)j Ҝq!HHfjP F,\]]džH$c;wfAL0lѢEd2[Ns1,Bo=\G$&u8w 5m`EEŵ1D|I' /.6 4,cE1)R@uI$7OOnj`0 nHGF|}MM͕CIII?޽ 3pG ٹ{]]y63 AKOO?V[[i7^>}z7Րݨa]ldlU[: 1k Gx.WLB1e$ 22r|XXI&aOJJZ_[[[ll#oxLE ұT\tǏr@]v4x-,,܋t:]VTTT0`0oA1gΜO+|/ǁɸ^7'dHWأ ͖[ʃ [^ ;l `0 az Jn!nnn|}}tNrsΟ?8)++kP\ұ\ 7gv Հi$Xۼ; Qn a~OiJx "U; 9jRE3O>"""YZZZ%&&?|'999;SP[1f̘bbbfIPdPȃݯyy=S~?T* RzG( <==޽{߿rѺ|ŋxp֭/6Əss1 ELBBoWSSG#s@0^cYV&pq7J3@&$eiJǧ~949& +ɔvvv!!!=<==} 6DEE-9r_~;hvA$3: o-,qI}WEBi-xE@fq 4& ` 6PLTv UR9t:ùsKmll!|ǯ|aC&#R=р $ij'8G#R y (=ץCN>ٳg8E=z/,`lL>--mg\\ܣ?jI-=æØfAogoh.nl@sbJ0qcQRW@f=4Jdur988xxx nhhhOII9u,??VAw~umo0y /$ %s.Cy ]A© PAPL*\ɏay mɓ;tݻw/SE6TbŊج,.o#9çuMNǑgq!9Wo߾UNZܼJ x T|PLdfyDHS(cHٖvC*EFFҥC...bx^z>v{-,HT%y6m[|ky&2fͦH$0`Fz,͚5Ot{A*^xᅤ#G%ƶ-1YP(͚5kJqA1.?~ <2k Hݎ^tN^۬Ӏj9d{;H39sDP(#vuuUjMqqqVzz{ Ct{'$$|?yzYkooo{ōfI ӦMeTZʔi8+Ǐg8p @7AvA1e"A*Hn*Hcs>DW('0 #H$^^^qaaaJT*gdd8\XXGeee ./b:H$C5bĈGYv+F[oU]]}&noog̙]ܜçC,!6>=ϱl`ȑ5.\8RYYAiEZm;]z5uڴi^x~aիWO6S!--hBB\PD1)B>}:Ow!Ls>LV;p1%p/; AE1u^){#蠃IJc7@,S 0Dnee5""b//;;;eSSS}FFFZVV .쫬jѱ)Jo?r7|LffOX?8~wۓƶ0P(bfΜC82'T6w߉bT zGv<~Ν$&&:[F1mAٴ!5-9ͭ{݇prrrhll$?T^^˗/yR:/VVVg^cӦM>r["7oAHd_v31mI`9::LHHqnT|t8Qa`A^!%9-os(4@J71,Zں gccuO^RihhoGɓ_uڹsrͦ } WwgSфH̡P A:h5, 7 c0J#6A)?> |~F*/86+++;##s枩>z?0Zz˖-s5Mرc8f֭:sc#FxBPUVVުeǨqizx: ڝz>:("pp+b/@jjꛠi?/ IDAT $D20;dTk(s~͚1 #.BCCwu˲gyoΟ?1`0AX(AԻvZhƌ8~m.f97jjj.k4OW*@ZB𫫻JI|t|Db}G._ɲ(99Й728b\s(o@:XskeY'!!!}]\\reuuuݻEEE Է)c n455M6mۺucRVVv&[wL"@f̘anP8Sχ9 sa0Lrrk xc 1I 2#b t y/L7 x988 g$&&nHJJАn04y";vAH$mڴi jc֞<_YSSS~Af8DPL:g7'pN=˱3" # #5焼fl(7 `.kN q`YZMz8 @20 A4c:-?@%+| e:"1< BrrkdnT;efSɑJ%xϻD~aD"GJں7|ú=٥KNTWWz(a׮]uxGjժҬ6=(--=`aaPascԩSvq'*>:,ˠߨP0,Ì2Z 1*hl;(&L&Ett0v$7_8 TϯW```RtuMEEEE.\8ȣG~wС6lrG?T^^flK :m*@ E̙37ܜGLA|tRbGA1ˎ}D/  d 8oCݝ8-,,,سgϷeee.]:`,ݠ(FT|W~ݳfZg jeƶ-MEr\el[ڂ6 ~qTMG'XČ 0 ^0 *P(X&Y[[z{{ w744֖NΈ QiNB1 ةT]\\u%DP+J˲LfffVyyyƁ~LrJN+ĝ@*_}lKPܽk֬yy֬Yo,?ބpy/wrr2^U.@=yMN&8~#C"zB(J0 cecc(/ҩ())9cǎO&Nٳ233ͮ,ˊRZ6Z"@&9$JW8$ sz#G~W_1 5ƶBd(@b˲r^g@@@J*zyRV666V(**My0~G< $,BGyk׮qfwI2M0X,uDb`1FsӺ})yb.jj W0rS0a 4[0ęacc;wi]h)z=U鴵<7<ʕ+jill,"#_XJ ?Ocyū 3PRRrlߏ5jJ~rʕ0`mA'%%mպjmj X,k>ӀrRjʙB 11QpuuĎG/KO1o8]o`qط <<F&*>y  C>xE gc˖C(-444_~ǰa 0Cl0rη܃B<wuuuٳ>cܹ) p2cv/$''o™3g6'ٷBc1wXAn^`lv]`~`Dhn=| ppL&R*#F D HL<LۊtR@JO06!K2Dѯ_%vvvrB ˲R\hlԜ-Ueff0M   2a@"== zk…qB.&N\cc(-ߘ< 98ãw~S7)T*iBn,ӧoj̜~Bˠֆ&nP(#c[1th/X[[".zbnJgYwk~iñjd$lY%SZ EqP(o߾}}}ovHIIYL&/̙Z+**nɩED"P*JÁXXZK!C,A*FKkdbpb,ǂX7"1wW"I167/Өui4t:VshoB}U#F-  rrr22Zmڱ J' (-ܰ2Bt}/ Xt&Q)zpo8܀aYOHWxl|?;GRJ XGtG˔^$< ZШ}w3nޤڪҺ)ROau}[wPiii?~s]Ο?T*d]999XXX8XYYY%RNr:XJkX !`ڰ2h-<σ7 iШunDei{WչT4 FoVjZm NrʡlTP(lB^چMt0a &Lhl3*ڜ+w*oi`1foH1 (,{`){! 6Q4Zk'R9Cq]:ځ_?Ԯ?65R!Biwv~'ϟ[???顡<==c e)ah vְwZڡ,˂eEf)ptOm  5jT1Ueuv%Uv]k*@NN΅+WtҖ# [F*@(TAaψQ(_oH8њD2a@i 5&8<&~WOnco-HՃP!n.ZąlWӏĝS(Fqq|SDBBO6K?%T XZ-8ca A*46 xg9+///8y7/^AVP:b!za[R(G` H(jN,QF#WG.rp,ŷ:Zs5|('#';ɬ'*΂T- È|,t[Czi h5eWܳnk / O͈?GcҚe0 Gn?,C&wCډl_zǏ?9BҾX_n ^JE."2RAGH[펺ŲzglD ڕ#G~>;-X `U>}Ffi-.^ło5 OO={VQee^C&<[1]bd [F%K>OLqL ^x͡}~99W_ܹ,|:?.X`z( SbӦ1hP|8v,&Nge1yr,Y2EbbzDze`ooAo)|6w0,^ggP]].Xh =&.]+/G?.4] IDATX[[X`0jO* O~D12w"e,Bl/Ź ^F|N '85kM_d\(抻Nk `8*@ZA.H$jǢ>) XR})Y?i+HR܏?^>pvhtVǎ^Ɩyu +WB^ FffNA[CףIիcҏٳGmvC{e9"..E]++z_СW Ak1<䍡C{´i+_6#`X1wǓOس$&LX(c.С둓Sb>xQ]]%K>5u |"b,0xplt׫a00R~r"Ӟ~_O6%Q&-16o60<2W@:aRW!=ƳV6 ߻Ey8[2kZzlVQ:At#Ar-D.26=61[ TUբkכ %Kf[ij"6x^Сn^^ĉo>(Rキ>>x`i)CnTl|O=5 & ~ݺ駧 kĉi<@L>PQQK?F?q1o80 灞=g`YÇ.AIIlm kk g{ybm+CNF1JYJhH="<4k`Dme ##g踈A/F @/A]4u&@ii%ݝq7ꊈ0֕kjamm ֭+@$`eeҪ;W#'* {˗{Az\?"d44ܘ9&DIV:\D]]#bc ^Zdgbʔ!5N}>=&Oz\.&-j4v"bC1"XL6Ss88Gq֪==$[V$_S*t@3ZC ۝[TN{)bԬSF.~#{#Z < hVj5u0NE BNHHEϫ( ͛g?f;S߷}Fr{ΡȃagˤIrXVmcĈ_yY3v*:ݕ*_7*}3`B8[B(µMy3bj5j7諿TSVh'3FQD]L yGzEQEwOY$rja<6NeR31|>DW#F8UjU/ާ:$nIAc29~(Y|$-jV)X%͞|笏DNl[|ϳ)\C֯F<:a途^ !7$gQr(iiY839%Ě5a;0|X|sqՅX/|!Ѵi]@XXyVq60Fh4:kvegd$oY-woܠJ$iFT0l;e/l>@@@*EDHCetUUq؜e9YHVZi,b( jMzDWGs3W?t@Z7A0,: 6q6uŷDUU,f+Y9@xP~p8 (XV,f+~F|}KiHvv>O<љN{ҥ ۶%Xl[Ӳ}>^yKÃ8qC>NCøz,Y@z< =;PffZw7',,ZpQov67Xu}CDwFyxHD8; 2qjlQ5CoLWNQ>o|;Tu+30*]վ~6i_+.K;cb|#$чZ Ea2N'-%6'Z^K` T (_?:VPjUUQ*.݅be"7< XL6v'nW_ \%a z]yTK:/רS7[s[o=?v( _?%c߽I*93iyZ MyR`6BpZj鋸ΛuuV+0kW_5cxشi7QQtZF]xw}sΥ(sK͚U g,Z]r=1bKT4iRƨQCS')_#GLժQTgHNNG0}V4XxG;:FѩS+,%K6wK6%'~' $?e62ymx.fF)C= G(Mx|AQC!#jt[!b&_r5P5HE1ꅔ:n#bqCb*E}G/8,6[ݚQW )(AGXDaqۭb.m"/c,8uN__>ztz-A~b0h}j ( }th=FhsDUUq_-ŁfutXMvLVLVv6sԭ]O`u+D`^)ΗU`9+DudxJjԎrWfpܼ|LYE+M]H^ˢ+ -K*:DI5nDQx5yr(|t6_Mzչ;Fо} ڷoQK6$|(*?j+zxsl8}]g죪CwUDпs+-~Z }9ʇw4mq(FݗfVMחgP/!^`{z=wWg%WVNEG|u+6nҴzzMF ]N,h\B@`#UjVBm^ TQtlg6c51[(ȵPg!?ۄptp¡V4R,r~ REzfHN/V! ؏b')H'5ihRUdtll,f+:ɀ!}h4|d!3=؟t׳߸6n#d7! ^z)J M=6@W"n j؅rL @Ӧxᅞ >GÎv-fo1JvJ]UU+B.LCbMEDCʭ/> 4j5}ʗ}Tz)_mv)# =y|픔}ϗM{sPTUq>6'Í!Rf;`6p{Zv춋2|(( ?=NޠǠCEעі\ѱtFŸ󉌩ė?RD.>/а TŅ+LQUjJ$W(Q_?\r4k٘o2_1Nfdq4lZ?O/븊p_6TY+ش?n 5*:V/罂(r@E7*D>;NG: zz{ی *RӱC crCpxTz} avl["$nd{-h0O2lq鮘 -T[ןGQ%uF<Mp:tn]Y:|xk #01ș,F4H;p@!x@'Dq&һ\74zz& rڮTQZ0D4_}^H$y+6c Оթ~v |K| t]0/zc ||2 >5 "oPUU$v;, H$ " Bzp/e<+r[kV-(eAt=^18#eN]IyBZ ~F 34߈F#*G;z)%?#>?#Z ~lIHXx.N"H;9F֝:i=ϒ≄tz&Dr 途"n73?獦^Z6I"cn\.DH`:B,ܦk; eOB@`İc5P"RDdt%~54IrH$ 5.0!]hu!ceUYDojVm{$@: OWbo!H$׋Y0mb¢VqS,( BQVMD: D"hݛS^ ~>)( T JQ|aH$"n"D">*0|]s.VW Gi m~Ht@$Y"k~1:,( DADэZum{$J$In/s@4-`^DQ4U4isC> G>>[-@QdLK""H*{=e3$\֭ppi"jBJ il*I@w/ H$g95=]: Ib5'+-/*f^~y'OZI%l·L>  ᭉkWAаq`_jrg[o")_m.֮¼e,6AƉLNecmr#M RC"XVc/Г*BVAQNy?h9t699ϪU]`Q ^Wn@ p xoƭ:RD I 7ljEmsk;pzVý7?$4<8kiz[#voKfŢuހb7DRlݚ+6$uh4l6y?j5\%u\g/ِ @8!}B^K΋V;Z0ʔN#{I9w2'H;vGƢ+^yTU%';Q4 UG|I!cX6bFb '+|*ET}^k DRpx1]sJ ob[3/ Oc3 YbEW̎*Z%EB#TV%PUo$)_T=/[V9BUU$.#rθsG=_Lb|{!n7C|yHwF_((]*b6{/Qt; Qx؅hZX"p~A5J F|T :|u>E: JACScshtK}'i-_竏~&n؋38\Nߧq,%Ӡi]R_F: D"&nD*X]`8dN Wyd77v`|T$ V ~2uQo#)_̉[̜>>z}/?>gHN!+#'f?7MٶaiYlY\ҫh~!"H$oc"A:o qJ!8h 1T2Lms$w4#a6Yd|} l6;+i|}p:B~"(z=}k|̺[{J:-x"ex%u-H.LD`pr AQ_ FdI e6$cwS:~FrsٴKNS]X'ӳyهKj?+H$N`" ԯ{ߣBV!?+ݔuHm\6v 2"Z "Us  4hR؈u~&U t8<|w?c#I4 4C0TƵ;߭[#Vo-ًD"6]2!`8pw?Q~de?zUҨZ*".| 2ԒGR<MN˽Jiv{#7ui}]/k7bFz?yw6jHڕ[^+^ߋ(A"H$FDs?XDΕ!Գ.Lݬ}zeƱ#9o:u[d˪t9| EhVԨ}>C6ENõ:d<9MnQ[sjrBelZXsSRcسc?ѱ<ڣ7C: "6DrQt`$9gDBQ=:}p&-oݱ?+k4Z:tk~EdLnGnv& ~ .8ܪM[0xT0?D'r|Gg? sWyٹ^΀V"@{Д zXtqyї5}V|3Q1m5!DR #1j)ے+0p?&b@"B"fFg^(Dg[~Zޔr<s'[-,OaA>_MLDדN@ 't:L~n6'O@`y,fQMK!R"Q ۅ7>矅1k/<9 V-ӹԮߘJ8vҎ IDATChx%"c՞JmV=V"0(D^NF?BJNǥ-J 5\: D]"~ۀπ&8!ѕl[6!%IE`b {"ɥC8u! Dgd8p. m5*>WUSH9<ě~G-U;o̟Ԫ׈+}Ofx&iߘ7tb6*.Z&{$|6]nN<ѱߌ̶ ( JuTJc8k=GUUF {Qdl⳷<>_8Î9̐'WOwcZ`l{V,*K8 Yo-K.D80 !@1Q}U`:x P@0䲸\}H*46T8~qo")>7\.`"c01` "'$_?T0=]<{gPh\5oiXr:Z-bGKDo @gP5`8qb?@APkHRa9v0˭{EBR!(D6B:nG8VQ+"1c:1y6: |Tj~( NskD n  ?$0(G{a/=[Q>'װ)7rʶ 6 қ: nfdft:h~G[MmH"ɘIh՞`!8^~&^[ؾqAn7Ǐp8H޵[8m Nϩgt8} )qU;ڹ\pDO,@ADZ !,;/`HD 6VU DzcD(z:u\LV'^{8쀘&ɔ_f(>.ݛpѽiiikmܒyU;4JrUV"]d;_DGpXNjYD޿V;ٱ1&S>ju F#>41|' _@ `1RfBB  TfEᎶѲͽ\N2O5F96ir~/1Ukn*F??t%o9oǶ (=#vp!;,r:%vbI0"18qrݭ; EQŤuZom@N. M`umKvvd+rͷbݞHR:{G9~8K]nonH=u 5(@7P(B˶جwJ^:?w}C`1%mԔ)2-L{ ?'St|| n~< 'R鉈Ao`j1y v*S&P;#ˬT]xb5q9?bʄo9kKp, ~?⒝'6m-Vl l/#(GpeGv.:uoǛ9[gˣުЂ!u+  6c:x(S'|Œ_G*+":>ϼMMV2 8~p#{6*V\4@D2,YDAXΣPWZP; LyrT8~Ú6*<3uPӯ5/=>B3KOvy:v_}3R7P}'nӅR2/DGMck{|!:mn:4C-mEȁdfdzAwsKtGƌ)Zl#ҏh~J~wp^`tJJʂ]v-ڲreJ'DrXvo!xfy^PUnDr} ܇h,D P=$eAyΦ@3B;Og^LJ_?3/~A!|<335VՇ෇ӢU{Z-}?ﺇ?~ ~[2lF?ZQq( F? h C?ɟ?}?&X ЏFQߍm*c%jM'$Fql\['iש;s~Q|{t:p~_/l.Y7o<% 6EHZU}ʦ5;㣡t8G:wo Hs;mbo~Vmchڢ!3jF?[p8Yd8¿7M2k"'V/9/1>ፏ߭|?w:EW{ϰp$Zwh@3s0Ҏdڢ6e2c^\> s>?OivO֮\eʢH=n6I=I#C]v^N n^|<ʰT'yһ#2l kԘnv׊oo7xY3aX-pcυ٬ (_k1P;/',dz3C!owgXd*dTc]7҄9镡唔4 ki7!i@cD_uYѿLrIPX`"84{9r(Vk3gvzRv,nGfqωXVnj\]9Fͺ"k5sS 7.wgղ7S~Zl|h&@sY 5|&~S&#-{ EQ۠&!a̞W={-k6\FvČ3zw6 JzJLpH΋dcͤL_;|޶IR! $ fN@ ɝg<; EVq6:݅x>WYQQڠ( 96 dŞ 9ذ`\S^viTz j,k_&Dw$D <AUs2ƭ{vHAW6paeجO؟tW̱4\N7mB‚,RcZln7_~8Yp8q:]\nrd=VmnQr(mﻃƼYlZ\f':չ5-j6mZ[ԎywKWUU~f %WCٕpABTU12ʮm>6cٔ2i~@`6jxP+3np7MŸs>~EC}gOrd/gw26sɬVӧwۻ;yeMUUl=m#)OXfΜ޽{] Q a;smIPR3|4t4:Z|u맯ZsuK}( z^6Dmn{?!K) ׀w~Z`"d"xFk/e:~<<ݕ-G3sXp &FQ8Fa #'(EfFe=t8jRWȈa?a6=-jF~n!NMh5e瓑o|Cۛzq -ZLnDu[ ̊E{ ,]z'Ky7˵lG޻'yےͤHZ4M|$&&< -K$#g]K9mڽnB;إY!P3BQyxw|c.W@ "Z1 r0Ѡ=:a89j7yED=xL*Oeoy,t&?a#_o_GQ:uoOjQ̟WD;YhJ^_f1l<ҿ+~~|htzQWV/@TLen .@έt7lL6ص-G;Lͺxb` y[x3H˚5l,{%:6ƿCӫ ҧK\hT}G֭W}ZS XvL@4kbbIIIc·{3WlVGq;k#.BR(Cr}K&96hDt!!) h@_ϲX [#yw7p+iT8pp`;%P*?#q˚7,ج3|7cUU 'DpH 6ق_ܜ|2ӳDPP'3'&6#3?f(o$#- At br1+V)MaNK`?6SQ=)Vb*4s,% ^GLl$FU7-Ob}92bd_1nhnO>}C{4wsbkUB̅6mt{bbIII2JRh>-x Bc{2RWݸt񮎷T3B}#/=foc٩޶,yDJVO{(Ԩy 6Tņ6zl?|Ϲ-~FFQB ,^k4k4/vWjDT8QAfA S 1U#9ϗ.^j:@zٯր@7}Ͻ /4X,Ni틧tYRql,s=)l3f,p#=>>ó!.)O >iƒ@?T^Ž{۞p聈r|͙fE#ҴN'h9OfsbuUS"_q"..{}-" P=nMLL|cQ]RqHgf`MI".[Vm_+ e-HEOҦgzOo~V#Ы :_v_@E84;E5-# '\^ViӦuIړcɌ=(ձ3n @=>>~޽{M " AJV\Ny*Ϸ~E,AQT߬Fڍ;x۞ U D"`"-O`S}K$+ r'''/#*"&,L/g|mH) m*1g3T s[ [vj(^փTtȶ@~nmem'"lEMT*UE1M} to t Qܞ F80E)\ joR.9~8GYz(yUhY,ӦMgޭ6/CFB+ l e->>~HRR޶I"F<0ϾᨈrO#bP*)4{\z!blw:fS2۳-6=O;~"MKE8>=:z}3s^u.uazwԸ{$o_UքTU\hH9p$*t"%bM?xrh9IYp4lNo[&( x-M$5uj6mڃwݲdfRʂ8z(Þ08))imH.#FRD>~Q';Љ,L8 V3D>>ZusRE(K] w穆߭®uAmp8{=#M=IR8^t6IѴv˶ `[ruyob-Vd:9sW>9}{FVӈ_-W_P9BJ15C oǹSmÀ'lLo H+Ѭpԩr7TSY )eXL6'r |iӦϽm䊨|th4ڠ ѣjX|(jp:+Xbb9\]/Tqg0{dhLwoѾM*4Jb[;:"$' {5^0!耨tga*DX?b~Ynh $l! <Z#&!b59!A/<Ii+7s\䒔*pޫWxM^͉-҂Mdq-]Tݴi=qIG1 y?>؆@oTB(( HTPUda5O?@Ay:p?b$gv9MG7/:EQP4O~zsn=Q^tBQpϝ;w֭[#n#n\r{{9Qiܸkҷﻷ:޶e1s.)Ct47A4uQ YXHDOxQFT{""n~F8@ˀ4re"j0uyc_02Kr"t:z''O6)e,&;oDjm…önB_ JJ7-wom;$U#A̪+ĜrKٿi^(MyޫdD3+:A8qgghE}J hy j Ol"e7ҳ Cl-oG8Sj(tݵsϦ%36Oc4b*pR٦O>pƍ|,F̈s^1TrDT+h44kV \1.#撍 "vXD FIcLU=mY|<FC_8D ɳQ\׀H:HqM }R IDATh{+Byv@v{j|||ݻn^d鄔b.ԍ8aNHHxg)X "rWI)H$ =ua D\tCI^:}:b_T _SQ@:dDzS g׊DTUDr#q5 ӥk"s>^z%*J=QtB'qHu̙^'7. mu>.:C"H4cBF P8q! .\wu޶GrqݖGLLL&M<Ѩ 0ԪDDP"jA8 `:8CƱ\$7ӄ߽{ݻwO?v UU[ߜ2퀀L6޽{rܷӫ9EV/ϵbʷE>ӧOp)74'Dן‹#H$e]kN=kkAQ?GFAJ# &(0[s'|2\uKR^pG} . i_Vի>:9N*BXD aGZ9 _t(ej|ynŁ)Ja|2'?یVsssgf[z#[B])*VT@콗 ^ *zl\ŎJD+ ( MtR T8Y3g_+...NHJJZk9Czvs10z)ñNkm%+-rѢEwGm!ATx9U"Ÿ_^N?,2"VCBEߏJعvꌄ?VȔ  8*uy(S J~ gaڠuݚ4;;{㯽{fw'GFFFlX HXd! kD`A&AEUUUxOcY]It4]Gx4 MNyq2JEWryW9ux<{2226Xփv=#p8Y .v]OZc:UV? .|4]݈&G_"B:#JhD"9 ?~]?i~~`6|~QEy%?|I]|ѫ"J-U"?FDΘF& g]Ç044Ԭi|L111mCCC[j_lttt`  ?Ђψd`0l`4⇢(b`29^ՠrhcqyp:h [ͅp98\TU:տ늢p8\.+j槥q8)999ɥ D3r,\pX.z-ӏEUME=Z⣚\`,oDåmJ$??ޢ}MMڄᬬ3yn^ %*}梜n@[!,D>, $`2i Q%#/;?h=nBT?! @(9"~n^!=Gwߦ@(:c$:uG+ \.Wbfffbff&[lƠ UUM&SV111/:::Ixxx몷[ @UUsuphXFM;TTtvn]5Hؠ˫2ܲp/cy` Q~V] C~2!Zyvn @B@t[{FN`Y@oE`:BlB̿M~NUՐ qnGyyyB,(( !!x[af9BUU#@hhhhppp@tttѣG?GݠiZ%@IIIYee@4,u]u9j].p:Y .Vź6:Ĝ[kl!+-r…P|f+4D$!}շ6i?w3*&56X&ʠZ+of$a2 01^),@).D8.keDt=8FE=~\/!M ,D>Cx-xgXx3xL* ݷ xxG{K_mҤIŜS8v{N*> 㦀paahCu\ŋKHHܾzNnN=ou](sT}СC}mW-#"_d8Z$H$}_y[VBV."MEb8W`;7^u T1"sD8ҙNDhS "D"D'w 8 82ۄ ND8zbr0%' ֕#1/}}CU` X|DGGKnJ&Mӎֽ{*66UUҥˀ]ȫ(Dօ;h嬽BZ~r 7k2LGoO[sOxxxXz5QQQ .]􆤤G9pFQd$4<5 WVص-[,ܰa V!'zk F`bEDB9Dmd}GVTTdo59.WCLLz;wZ;Of޵C#3bZP)&p6ʡ*U&fq 2Q=b vqrC~fQUZ1z9AxZnEj"#(yK#qލST]h^;>G{ O¾ ^z=йs~\oEQz[ll,5"44΀?8k9N$%%i׮^z#uDDDb躮+'fL'T;yS[v8;--͛7TVVDk`$ O{o!.ސs4Ǝ}JJʉiD\\ n2kzf xwn*@_1g)@/;VIھ-Mχ=.fHQ 魒Ѡ9Pewj\i٬yg6a '!4 5n٩oA1o*rhLA5Nv85tjUd&طz_ݸ|M=EwSיky DRaN㏨Z?Dn sxxQmj"cH8JRDuV=ѝ^\3n`0BA_<Ӡ ;v ^0^t]vEUUbbb iڢt{F|yQXXXl6|iM=C?,--=ijx` Jxᅏ=]27ֲ|Zp/̝;k?p 7qNjKhO_g+]v޲SLfLfyf#"V'{tpy$*;ĭۢ?62?m;򶧮rAj)؛=kƹ]ZupfҎÙI;nZ:_n!XU^@D?Iv+PFoުs!r%vCc,,[m/ƭ_YN!jyvo7v>=V.ܯʑ꒼-:5s=_ܮgϞïI*X%&&_^.ԩթ(QQQJΝ/;|*@$g6ďRJGy])-XϾGUUFwĉә9e[0}psl:;L)) 7\/ʎI|)䮻^&""};cVxjN}\O1lX_v9q-?}7j~#onkݛ-ɠzK)b#V*tT΃;֭طK'ip?1"Ǣu^ MvLY=0Hj.CP،lX8"܈kTB,AQӵ ʉ;t&'$$8njeپ}:Շ~l6ӢE \#_۰0"##Շ F brnfI*&_ ͒*Fxр颲+xwعHޠnbxY?M:WQ^jl-zuݓ9oE?W2",zqXDBXT7DR|]goVr7W$mLݻwl׮]?R K,>vCyݺu<8>m%r5#:6jg=:g׈r+bժ-4jddn^E?JJʙ>[:3bD_~a c95 ^zrUݕ)OiieeV 芿R/Mtv׽|eSWKB䜢:Г?g/r W}ح=qέII}XBDTͭr6;DOSI=ܕs?5k44bbbGܹsm lAӴ“} = &O?N[D[^_%|#=" O!33FB5eEG_0l؃ݑ+gĈ~' Fgqe׮Ǵiѓns_E9uo@DD(te„iZӹ+< ^|c7nwt3><ȥ>ʠA=z_x$g/s3Ĺ IDAT'ٴWl\뛖͜Ӻc=tp `U0zT'{tмj>q˪씽w^,=aÖwsJ$ nݺ][QQQsqܢEhO1\AN͈N%gPDc9"+9"ӤIca9HJJ#++Mӈqp5l6Ո*;2=޽;b2q8$$ҹsۚ\,3NNN`P>8nӉobFuRSs3ӤIc ,,&,,=0~~f<6 4mEU_~Bqq9^:ƍٱ# Mh۶%Ƿ`0xHLLeXuCum4jاY%se8۟Ej`(diѢcMum(j!G*r.]W'nuZWhGF4x CO<3.={ohʔ)JKKڞBz@o}5><)HJLj!LjJ4㎖q-Ѿ&x1|x_[yZ5( MFq㍗5D׮צMӚqq-߲eqqClH# u ;sv9K#bSlOG[i*wګ&neo_|mDrZѦM.V׶fUX_R#7ҧV_!N}C!Y#H 3xDWcv<K"9߿-Vr߾}}m @@@@sMӪ<OmO?*|j G$x u' IH$;PB 4d(DR(пkj=]:v]u_4R}jXHT"!JqK$_! ?#$z'..F6mꒅhXBSSS9r?i#z.!>#&fsڌ Gr #1`':0{_"ZRR}С6@U(??mo?@"Gp"B$"#ہq~",D&>xXU4?צU$&1t L9jO#$%((e=.Yf܆oreږF #= RD〟K"4F&7ޘd`̘6u{y;}j"'G9 Ao͑H$Pmm/"<<<8333׶7 oaJ쒆 DA/D2N 6BTr%'3wSkzuq.4;HNm GunϾ})Y2'MF:v%c6 cm[AtɺuCll$C!88~H׮ؾ=-c=|֮ݎjmۦ\tQoFwҾ}+,tؚJ+VNaa)m4 {b6zٶm?۷dÆ]\v>Ν Znza6lEii۷dذpL~^uK$ UU|;VTTT=k[)@"=y D <w B%88s o z,Ftɑk`Pݲe눍O#33CkvǷ䣏矿HTT8o5wߝر+a?uG-b߾"#h4ŋV3gt]&%g̥L2e˦wB^)**ϰa0p`7"#xϸ!L{$;}Rݻ#!!\st]gذ>7tҖs_h:p LϞ\w0f9{5ų~矿ȑٻ7䢋zѨQ(9w=ɓ>7QUUToP퀧w^"iȘٳg5x<| ~I#VNar "dS3{%QC}@"$QbZ"<$"b`= q 5\w._D}޼ fΜ)ky9L,GC's;w 9,XM4Ks bV~Nv>ߒr+_0 ᇯyͿ&;˙$-竕=N BT"%.h{9"B9=i:NI5FAAX6'&D.LoIGRbbjj'pyiF S65ccpu37pط/5k5TSש ࡇҧOj'.S3>66 nl6g2 }r0۶g?QO!D ˤ#>/(H p3{piH$Ç$===!%%e9 K۶m[]v3a K%˹h1Q:FDBy@EL yG"g?BĜUW^0 h޽)h*dd䒗WLqqݻg榛Fҭ[ GYly?FuŽpyQE~/aĻuLQ7vh(Bx<&N"78{ФIc5 eԨ!  ]ױZ4Ĝ9?qW3wp`橼g]Mm7YuxV)@CB"qGx%]f$}\ۀ4rDrΝ;ԩSӧrDj _JJV_ _2_>ϸ!Sy$aD ?9A؂8 8ƞEQ-Ԛȑ%tԚ  aatԚ~:ۏSYiǦrmWp=<cG yu{/~<tHchh۶'!!4,Nu`np/4iҘ[o+|32|x_V̇gٲxsE\q ^z{) K]x; f$%e>~y#EEeܙTjVd V=#pB[HDŧ6 >*!J-)ϑ*n' u{oӊȭC"iw饗>q0!![_s4[no0L)HRyo}m䬦:̣zUv+1Btt߀Xބ=O=+ĽJii!\|q>9n5wnvШQ(W2y{h_^(tЊڅ1/Jpp VM7gs-/`4>=;e#<{{tԆs__`<̞?G{..Ͽ'<<?|#$$#ք^Sx4.߈2u<}_odƣ |Fj꿻1Dx#r$b^xU6!N |@H%Ꜩvᯄ>)Z у(X.5UTDrlzy\\\iӦu-w+///L-@V=!.v8c$gyDfDb}UUΤLr4{<0\Vc/&,,]2~#+]WG:nUU0N}o#**/lX;ݺ̿o\H$ v|Ǽynq\%/Ԃm6! /:Z;`$p'$IiҤaMu]jAtfbc#kB@(+hԄ]<ts< و(NU*;!ux(..b1TV`0p:]W (nf.#⪫.oopy7r-o/6o`ׯ߸}EEE 6 oсd=[s$$ 8)Zj=a„=A4OBBU:0xŻmQX 10p!X $Dv !*69`(xⵣuk0Dɖ^g8FIN?QQQGF=F&q:x!T"ߞ7{$Dϼ1 կo8B+s|DuI[Çnذa7Ξ={mۦi֐s6f9/w޽rss?)@.|lD"H>q%%%7n\[kjDxxxT```|׮][,&QQQ 4FQUQ"tݻwwUUՁԴ.됦iN]5-N+p1Bp܎04Sz@Jc@7!@$~sjbjE9"_f~7ymD՘"LbX :#Fi2L?֭{.ХKM6m_?v\iXo#H$B4h?&))vln٪U̓:tСc.]\V\\PVVpMBx ' IETb7TrPw_(1:GF׾U ھѤ7Vs;^@c/9(=.G vmܸ+VYUUunE ˞ڱc徶!#H…( "uՉwH$9tHNN ]j(J_hpppc??Zt8p`bX~K.]vs!JHN^!k7{*BL_C“#y;6EԎxU k _llÇ޽0EQ+W\~eeeɜCխNDon>wu] Hc):"NΓ/D"t]E躮WlJ͖vwq6_ܹwξ f}V"oT~I;JD7IRQQHYkf0f0f;3sg ƾa>{=}>bPBͼt եj U=)1GPD%5Tk}ST͑OP wRTƅpZaRQ-5kPanj׋8Pc!WDJrd-iQG=T7.qX֠M6װaövϞ=[LΝ;=Vn< 4_|ؙ3gt^E 5x"n,:REݻ?Ü9/G16η4/::U:u֩S玨 qqqq֭nݺk(}||ʍ9rM||ӧwQgxڄ*NgB!D b (_|uvhРAiii6m̙3'<_}1==}tŔ{y0 ?<\O$)j/®l8pq"p8rXn@"rCjFתUI```ի7 PlJ޶M6\`]v-JHHp8" UBvM44<Do)ylڵuW_s1qNӤ/c%5h^Zֈr4M3%+++3===-///4,Twif lv=Zj5*ԬQFi4Ǐ=u֍G]u֕].W6p\?|V6nܸ Ð0AZ@W٫=~ֆM4Q>3Mdn7__͝4MIKKKO@9r,))hfffaɨI))))GL5 8*a2afQKlN4xvM,K_~~~Z2KTRM|­V4M4MsΝ;y wܹ!;;{ѣG&"^ƍ\rw}$O: TG=1XP@'OƩ' ;:{\/٢Nb9}`꺎aW"=ֳ-cXPEr LWi~~~~ubbb*jP.666fM>>>kaytiiiu0Ci: H;GX$@-߷B e}'N9afHߵka$߿cǎ2 d\5uGlڵ w}$>|*T*"Ҁ(TrZрPT˯V P'P]2J,T}H q+@` KM.?@{} T U8w=;Pr $p2JBP }QCۋ՘i6///zbޕ+Wίzhd|}I0MC]fgggݻ7>777p$[`rrrNgi4:`_bcc3^vvv?%& BJ:t B3QxooPWP?F)ĥjԨՠAEMCXH"D^Q @oTRKـ'9ٺz! ^{ygT~]rMZ78C-u\d cAB\={sO3 pe1+PLT_¾Z]fZB\qq%YLӴ̙3gaOt=Q }QmB!ZB*yT6Z-@4Bڸ5$5na nTB`;ۗxB\)zGծ]_~9<==}Z4IK|Qu_dU>z^8Uಊ=+`yrNۿTTTTۉ'ft+MNO%K~+HcBqc+|>j 6VP6AԔq ǁ=j5yLTq <н?oԬZYWpv\~>NJJJ7oleB8Qde!.Q0B \'o`@ُY5-kJ`Ơlvy`-U80ֽs]+shB\Mlw'NLP;%PT^D$ B!ΩUVc}Wzt,BA&#ñq=D!YUX 2{9pI!JQa;ߊB3H"HaaauƎ{駟` qdk!#0!.\jBw}~OtLB75Pjg#E%XPB,_N0!#**8{ :y:!C>:BH4.]|8eʔoy FMs~B\8T=Bj޼+&Mmժ[2\ s; Gjq^DY@BxVn{]vL* qq>@+!TH"%Vݺuxw3zS؃X(B5}H"%RLL]'NzWZ,@O#BDbM`$ BQoxGky:!6VpkI@)[lg>3,L#Pշ}A`KPS !tx饗~AAA<7֨X_x:!@tKr"2Bbcc0aBȑ#wz:!nDo >@(lPT-!7.rw)O<HU8p,B7`50&x!qi?iҤl<7J@B+Q:p?0j_ -c !4MjܸcРAlҞI7@ᇀc“V0s !4MiI&wf tLB$:0pz8!<)Ξd5=B+jI&v]eC!<U ñ)5LN@u!1 Z;4i2J4c$+ƆQxPt2̓q !"""n5jޱcVZOO#PZٱ<_N@dPB!.sܸqɯҥKt@BS=C' Up*D(:ovޚC{:ubH!1oK XֆrjgBq * /Ľ[)k~*u!U0ñq-@7Q-<B eX|6mĉ3~˗1 !.N` T%D%" G!ą <`SLqkĥA%[ X| ܎W4jPvcV4 ^tF%#)%A/pU^!35kӧXMӬ6liyLqn@cW_08+p}cƏ4`*)Bbǧ\.]O62dZI\>#ہDφ#xպtiIttyOtMhaEagw#JJ:w-/!77o>5)Bʕ+ջw)aaaagҥ\.WOQXwS;R=yH,q &N#3 /gZKu-[_?~СK\9>FUNl8 ܰKիX9Nbb!>>qBxf͚={F@@@Ђ ?^6 #Ӂ+KD EU7JJx8 QxyyѰauˠɀt!GW֭M4{SN푔qCq:u!25.jetBb4idH׮]G7s1+Vx5[AI"STyAG<B!nz wغ+W\0o޼vx:0qI">P-!?݀ Jq唔YKhhhm>׶m>~{ZbH"s8+ шBq)ۼyGO?}nݺuI7Fq>&j~`'B\>MWb_:txL2/hѢrCD\߀BTB!BbVRsN^e˖??GYt*$cp%d.0ݓ !+Vرk׮/ԨQm6L4={Nx$ bmߡ  !%i111wk5k69iҤoar H".E2p0x 2<4Yj f-bDDӤImz@Ow 4q8r۽=Eq\y:!8ugGԯ_ǓfΜߵkp\iN?׭TEx@UF$<"/3ϼK4M:v+1m7qy͝O>)4n)5P=&WnH9ifRJ{QFYy#^~jV,ɇ8i >BM̓Ak4M~i9}4_LƵ fVzΌ1[@R f^DFgvi5;7ej֌WvlVLd˖_fPV wV))̚M/2bD_BB?Yj ѪU#ڶL~snǛo~w&<<+7hj\.ƍkr]3c~i9v7&:}vĉ4f{Ql}E!\ӧϧi:_;SF4z˂iϜ9NJ"Y( IDAT<$%%3wn݈3~AjL:,OO~X(ѼX_6GIW)|{x~}%'Pc3OuM#̨^Mマ>-[|{Z_\ߊq}1@& 0 z2(q|\:wo}ʴ|Kt MHIIIYpel9dul̘x䑗س']6mNiFZ|DJa<8.Oei 604_C\psn1@u̙?s0 Ջ2v"u֬{o ooٴi_iQ:䭷fЯ /A5_ivnSd֬_dffsQ}{l,G.6xyYcB\^4NV-@yz6ڣӶ8<N4Sh{qoc jUE @in%ToP|P7z_" iA}O, 2`y7uޗC X ԏ(^C.jQ1Zik̩S5kՊ14͵uĚ|ik :4͵f>w={agvf~ܕi57n1ܳg fQ~zia1Cu2 bZoǙժU4MsҼk 6 cik{ˬTygt4kԈ6fB‚5mZ|afv_fժ.j4ך|0ʬ\9LO|f^ ;thc_v9eȂc.f[ǽ1?RQ5I"+Tzr`?uq^ $Yz^hިW -Ӏ@/Թm2;U‧Q0Q?j8~ls{kP KsHTb} ƨd?pTnk{u?{\Dw0dXϿg.XJ:ꇿ?'W\'s?r-Z 33#x4lKJJ:<0INNe*'ӱcVڵ+| CӯhuQz4~ԫW4jT3mlrصUV<#v#wǦM{"%%cCrKvu#q˖=8. P~bqo_qZݹ5޽fϞxukS^R!Ԭ͎ҨQ |}}h֬U- qi:߬+⽄B}99{?h87PuTp5 /Ԥ+6QԬPUQģ!p ~Fݯ5&.Q {}oGN7PzmdOPm5WXߊ^rR/d=!" rQM+߁ZD< 4M#66maS2aZip >dNU+2p`ؕ2I]$lِn-X:xpj׮LFF6^^^XgENN4<샔*«~‰gn41MΝon*ˇC`\ 4Aѥ.\wނŢi|ժUdժ-4kV0ؾ}?eybPf49iMvw!Ã>eT/_R~U\;ttFll6moT;A@/SjEMzplݺ}>@%;w ::??bcر߂-+m'&Ʃ-\WuEh/7FuC%A3ZjբCժP5}u@[b. H$w#Y91XQc< B<&5{ý܁t׈$ jڇ$<pGV\7oAڵ5W%%%իb&Fꏮkہ `Ĉw7KeƌX`*EsO>ً>}U+аauf gotP&O^1{b6n?lt:3vθqHXX&}M e2ey/6xS ˀ]ٸqdw\<I1.+|Lm3hP7u{j*ҤImMCzUiܸ{ƓKrr** 44Y~~cx{K=QbDøRB% ZIRP] ya&X D%'Si1P-"jLއ*QHZ/G% C^*!*|'ņJ0]bB\1銫́j~=5@N ,_u fݺ4jTK?'{i47n:{3cÆ.jEЦMc oτ _'FppÆ&(H cܷrѡí̚5nڰrfn=0Vmĉ4x' bڴ9zk=|sh}w ڷuXX0O|Ҍ?ի2s+ԪudРn8p>Ã͘14[oϷߎcǎL*Wdܷ tPMҡ2>ZnҥNs#D Y^X**j`y&*qCu2PI`N.wZH(|XZArPoMmBME9q﫡uoTSC%N8cΟʂj9t*hnAu/)*B%H#QN] v@sP}|/]+E`k*UGԏhT)q;WQxX.bw)1BE]WD]@'ƁNנh5nNن}*әTKCJ婨.PQCnYzN 'R|J >E29>P-=sQTkHSTBJ*\NnIBu!D%{ kpGrZ ⪑D\kj6ۨ=QjbEY &EF]pfDu;mqz =wQ=/򳥈πJH܉)X׌ru+pܣNxyY.TpBDI&MP^sB!! (2P֠j9ANNGiInn^Ai>BkAQ\>Qg}iy,l}wz,+ĉ4ZzĂĮ6IvBx$ qQS.GRXFF+WnX .;IMX WJn}Ih~%n=v:cC qKWǁ/gQD>3g`=B&._Y~'Q~UjԈ.x/!Vm4M4FŊ =r ˠ^*TRལGOdZ}[9c?דp2eh֬.~~vLdѢU4lXÇu>"#KӬY" Jׯ[)W. `ٲudPr$(lNN.&+A*QԩbaӴi4M#+%pm e& n*XVV@\\D56NddKKsi}zꉬFuBҿ^wɨj?PEBB~/m@#pС$>xwDZgA6lɫ~L )_~=C5k#1U!66իrq8r8p/0*UZ"cǡ$%%k^}ʔ n*$&&Ӽ#l>'vGOI|22ާ7m|Bf=e;`׮,Zի2vt#S'cm~6fDF\p:wINNcsv횒AOq.L5iӘo^ڣ%ѡPzjfe ܃yl:V 11ر{}0ذa/0֭S\e̚۷C<>YB\h]JBN/v(D$- z`.@TwQw&J$0M6m3r}䣏ҨQ 93wt~@ϞK"̊bzKߦr8 ٬]Sl{ijdZN}^^:O<ѓ3~b4hP}ƍkϣi-!3Bb"lAJ,ͩ5@(d ߀@{T V`"t]{ZiNػ .AҸqMy+Fx[q*۴i6lEKW wu+yyN֯߉SeʔYĉt>ƶmiذ:v7uV9k+?l^*DF 88Fjfu%:R"ЩS l6+ijV̜3'âEزe/QQe(]:ΝC4//%um 嗕# 6EFӴu5MjvrzR'Xne&GAAiiї5cP,˖}g4m?&i}jCi Ѕ_BPPO<&??#(Q3;-=Ņt׳~͌їpF|UDWy)U*3_^IF *X?""5EŊe ^sOKrrryOINNL0ƍ{eի&/Cٲa|DD;ѫW;^zCJ gjnfE>]˅;#xᅩj5MOJŏ @j%L`:jy!J<9{])I|3RҦzxСDDbu\}EqnnN ]-7 4X,IV09F^.enb9ْafA1%O6l6+cYYji__ɥ}9oBv;0 F6`c Q H E0pjPV`D̮D6ZiϺMýE./\b?ȱix}oڗY4ZFGP"e} MAƀ@ 4j  C2@Ƌ4%*IC|kQv($% 5-TPտބ7C"$%>hj2 ĢnNٟF!ĥj-kdj7!J"@ J_4 4 U7T,Dz0MCo+ԭ[g ѶmcLd͚l/;6ok6-vEy;H-eASXֈ2eD/_>B5jjS\rMv͛tBO!@%/Q;LE ''ΝcYyŁW-پ}?ٴmۘ vѾuW3n! M֍/+Y~'*#$$#ZNnD6]|||4M Z"##c#"""t]/UjXn4bX.˕z0;wt&ٳg8tODy{6 qUI"Ĺ墺bmGD7ZFnNͨv'`5eKII'99 &QFtr4ϸx/jٹ6p`ׂ+VlbŲ̘2^^~aۃ <̻`4Mǎرw! !>+UTDTTTdZ˄UrRT0L=7߽#~۶m;vܹ/99yaN;v,tPt+jTWX#o\QgPB)AS<I"ą38c{YE6Tێ `RB\wLJIIgi,^]ש]2o5 ;DRR2ÇOGGuS(g7o %Z އvt ԍ_ѣp,n6|0#c>Σ#U+xO9|8 0yهhѢG*W.+@%*!i 6S'77׹~999C*Y4w%MtO4_]"|}}Ti7`X,v{iVM|V_@@jr:N4 0M޷o_|FFFBbbČ#G$%$$ggg'qtf榙@^]M:p+7t#e9w,u'XjqCMX|'VQ JT|V̈ P]kq/o^4wsPTKF>jۀ1QEqk~>5wދ1VCUVru^7,@cjb_wUC% INqu?c:$ Wu2_|CɧUo$ݟCuڏ:%jdsq}cPTDO<5Z_?[o vc4hp?/_ՋpOl>7^,]Ѻu#ƌ&MЧ]F1G[Rpާ]km Fdd %wAFꏟ!CSvehݺ{~Mff67cde9 KL3Z.r!ݺ;c _hjZla&&͛W^k7oNVeMӴMӰX,vjX,^KqwMiu=P4j !*T(miv=jVTf'[&RSS33222srrRM6 Ñ|'NJ2 8w8+++r%kq1~Q]`ˢ.C%'n},E~^A]4/C]D%,;~_-uSrD%/~uԅdTQӷw@%| @%Sߢ{|Fc8G%2ݱs*}&P [P@?y&jFG} fS'TBϽd U^Dc?Ea;Pq-꜑_jˡʅ]-FsmJڞF04$H!-1ԉByQ InUT@2@iF+'|_F9:{W~kV3T omszՒ݆sφYy[scDػ|V2*jGc(n q}pO|ٖͦBzB !BBNP (TX!<˩x*(*EDP鐣$@HBz!=MMb|ޯW^dgΰ|y4' 680탳dT*%^xHOυV?o>//}-ן6m4>`-x1z=vnacDz큅 hZj'x٘;E HLAbb,&N^xuufbƝw.ʕ/W(<[8~4ݚy`ݺ;wO?=LABB^0>/V{|ܔ\mٯ}aaa{}=>|X@h4h4֯^^زH-6΋ 8{p]p^HL.t!]Vfٓq<]F q\' ouYKޑ-N14~ j Sဳ8kk@upO~f9f',L@-up NX_Z^ }>xふk,YrΝ|>غ}= <# @`/6n|]\adSJ!ڜ,ڵe777111?#ƶ]p0)yz#,޾gOy C8dsXWQr>p~VypJޯ LWy/X^j8ͥaK7x;aepY^o3kWup|ICqy,Mp9)҂Nؽ;//h gx_~CV-.(((!2229,,l_d:]cS8"5 p3yr#{ἰ>g^8/?3aiq8g8;u W+pm4`zYH_k;ༀǯ?p&&6s~~ zqLsf";=D_t*;ICp>*ΉW4wL39nX5쯸sV8Q^0!.I$L@~^ӧ?k.So=8gRįE㗣j}r:]sGe,zZk&k7,!x>>Q{E^_$Eu[~Ke m貸DDDDD0!""""vTii%o?V.'mJK+sam3[e4k}5bBDmj׮TzӨм^mkXhyѮ]ad+##>f|oE'kl -""K\-߻me֖_(Kap8xE՛7?_ҺDDD EDmNe n7jjj1ip̜9 &|;w `Ԩ~ΉPص+'O 4+W~ɌbԨ#>l#OoPM૯doa2`ɒq]ga0aƌ1Š_#'3f\3,*kpT.t¬Y0nuO>YЀy#ؼy/z趦i0aPL4է\Y6"0(t\= D**jbZL8FÂ@e,Y!b!x᭷VBe9r />֮ܒ Q1g΋(+d̙`=?rs1iң(-9 v`ڴd( ̞]BlZEjg\\4y_^^ՔAєOh4jP*%XV,X(,,,;k8TBBw(vWVpm?fY}t~z9 c ̙c1`@sձc<1 ""XBDmʀܦ'GAhh큊j(TJ[-nwh("22gT`Ȑ{pqΤ9J[r1J_:t:z #'99gp@_,]^}!,\W8zgd 7 {-ˆؽjcSXh.ny!bb1|x%ODDԑڜZo|O#'vbɒ?[/>JJ!IobРf^Q2efz3gúu;p8н{~aJK+j߲B$&ƠތBC}A_}c+7bø?a_9s&"?}M?g߳g߈{y =bi %^x=xD W@Dj hhk*(w4K&,+^~$&@$%b8碬  n( h4*th#^2$:c @@/ҲpuXzBQ!11={F6׻k.>P]x"111ѣ 3 _SGCѷo "#5221$ #F8JvyBpI2^zi>Ǝ!UA:u :7:,[C&d ..{r_OF'":,-(AA~\Le̘4֮q'[b & DD!""W`BDDDDD ]Sm~1^0!Oᦛc vf'Ɩ-`6[p/Сmw੧޾agdɇp8XMDDy@C*-…ųރpLDECeyqqP(D?mۨKjg;xtDŽ CYsADD& DԎrrP(Fff>ܴ# J:N.Dhh~%1hT(/ sqf))Abb 4 YEuX8y211(**CA"**sTyy5rr.]= IgUy7-+,,lE.^8u*vzuV㧑S;dԩ\n݂PpJ&""<QY@ehj8pg߈7|'O`{p7b˖a۶8}-EhhDQDnn1>YĄcٲPVV^;~x-3MmڟdƘ13z˜1;'ȑ dg7{!55f=@_n;klƾ}KcX{yg5RRCvJŴi%xU.ĊkEƜ9/ 7ѡ8t$feduL@]'N| ?l~S>{ P_oFLLa6[1mړ1xE1.+v7֮} ]xaٲ.igT1tXf3_\Cb۟nmn~ЫW7lڴ&=wy ~%KޏxOQ\\{?))0vr߿畞R""EDԮ4 !I ޟ9s,A3(,,m(:{nyRSQ[[wQP(D( $%ر,L?TV0iKfBBtjiwq$'N$% <<!""hQi]VBa,E] 4p8dכx={F4P(` 2MM@¥}EBu$)`6[iwLBC[II`2]|<0"jWMfvhsuww76-+))V(u8}מSra6[ZM$. :{כ~@DD0!vwQ`2j&(J~coZJWWc!ueAQaȐ>ocDz?@er!nyG ,+V5ܫ-𝈈\EDԮƍ??&l6;/~~(-Dhh@P&B;<_ƨQ( ƻ>4̩k.M!pww@BCQCƓO0IDATׯ=oo=,+A%G^ɏc^>zgj^P2OOnIM2q0<ЭTJE"vr4ou.XZP]S!x{;EHbTWMsVBp> 4%WCNNΜ@bb  VaaXa4ڎӟڵ?0j'"= D]ru>wbW裳e>k=rU& TPm]"" {A}͘q=Xn K`ʔpu%IƏoDDDW "j7ᢾ#HDDԑ>B5 & DDTDDD & DD QaBDDDDD & DDjsuDD0!\N@zz߁j{r8\uL@:=_ycכ] dSOJ 5:>y_-=L_Kee /_[SWDDDDD. t?6g<@ ""v۞D@"] ~A5#5ѵo߾KF jQ6XcLDZ @X@eXc`Bԁ$Iru,DvJzWCDDzptmc!h4qp:""u dYYݻ!-""bFqձQc1:t赠л\ u^&<))SN}o4:""8XJ֞GnnNɔ꘨sQ(#GBn߾^Rꘈ*h4SN͘={v?AݨT?Ο?_ QfUegg:!>>~ էl6[5ѵG$#FOO?HnnǮ:ѕS*{^0j֕,rIMMM`ru`X*l6[o،$bZ-I/HE/!!!|||RDuuNWHDDkV֭t:]NT(:io,˲*??f+z GΉhAA Cb)ߓ̙3dY:8""긘][TJ2HyT*O=AE)1%%%'lII7[It0`L&Sӧdۋ:,߲,ƚ rFNٛFDDDDtY?~|wmݻ+T3k֬3gwYA.S`:]L&SVVVjI"G-{yy?~rӦM7}溈mL@'˲hnKLL|,&UTO4ijݸquuu鮎(&L2}}L/,,*[n:GZZR{񔷷}xxL@l6kXXؔonw4o'ӳBPZB톝;w>p8LKhXFc,Ɔ$IbT*RT b;xOݽ|}}A=YZZR\\5T@^lۭ,]%I AZP(ZE,-jnnn`02XFUSSShJj:&11q`T*!V`߾}Pvsʢ(xxxɲl7 \)%I Q) eMM nnnQZfԜe "" ]mRPPT*ۨTw:B! N>OI|Nz ;;;%%%e 길?ݥ}m6̙3Ǫ~~~SFӧϝ7oDS{"2dBNÆ {E;GQQё[NX,:#""Foڴ&8jԨGnܸj-i\ٳ3IIIOh4fl۶mLYA4)`0ݻwiaa7t.^8v؍6l8qğT*_fٌ&&&>*2*++sRRR/**BFM4icǾjRlltR:|z^ݻ~cY%<<|~IUUUZ۪갰EEEGťG7FFF@8p޽{MOOϪUW=MVaÆ9&xƍ'0{xxxyyy tAVZ5hӦMsZ>99F!ӯ_m6qǎOYVc||ݣwAqqя>?dR)FFFNeb]yS wzxx&==̯4M$I!e^^^]ABJAAAbcc'jlWPP-55Y\`~ի׭ (|A={,)) ˲yǎ3JeZ0L&;r꜈1ǎ&T*MFFj1[lQTvsssFCPxvuh,?tЋvp˖-7kZIdY,ɲ2qJR:r tpZZڐ?? >|IAAA7fR_WWgBCCŬ 6lp8j.x",6hodd$Zݵ4&y?>|`(4Ly  N eY6y %%%A5ߏZ0??N ׯN8DzR3bĈUaaa#JKKK$( Qŋ; zZGFFN)*,,زF DQn .* Vp8vL&SdIWBpqYUUUFLEl6ܲe}III 0`cy{}"??""!XDtYAB||-- \e_~ƍ6Ւh<0W"U@@@l6l*Ygb+**ږ,55'O~7**AuvѣGX~={ hsdff~,wǮ]Fsb k׮uÆ #SSSjP(:IOILL|^'6LV6.%˲p8A691pH[_~eύ7IDD. ꂂ$4[^^:]!(JFцL0`,+#(o/77wnw9;ccc靛6[+d?psFrOt^ (ϯgXXm l6ZVձ۠lO>s%IRZ;Q97))>f %IoFFt:]aÆ}~֧OeY6;yzz Źo}}wwXQYY_\\@roo7tӉURL2ZU8O] Q[cBDbjkk eYɲ\qĉgTTTeY/1el{SuuuT*6m^YY$Iͷ[\\n֭T*!C^OLL\pM;w[en7lNhk4HLSxM^z=U]]7--[oo/_KIIYht111vc6h45ϞR S C#w ӧc۶m{0==k??nѳO:FJJߺv4tзvikkk[әkDQX/Xpeٜw4 X4dȐܼyVpkkk fsyc}6o~r>CΧߢIENDB`rally-0.9.1/doc/source/images/Rally_VM_list.png0000664000567000056710000020276013073417716022565 0ustar jenkinsjenkins00000000000000PNG  IHDR sBIT|d IDATxwx^R.B{#(ISAA"*"RĆ?"HW REPwB㒐ܣffgٙt]W!χR R ӎ#b%$[>?T|ʄb!((J)|>a4}Z:nj #(pk9-;=N1lnpbf%4Ԕ}MjSpfK(ןKq'ƤscKu`` :6\.f !+pVƗʅ~/v \2Wg0b^*=T)%$)v;K8s]aUft͆jTG^GhO zqkchW9!DΡI"9;9'Wl , qK u ]L !n G!?%ΌouI3jC(B^e0l /qQJqтLv?J!B!M#` !B!n @B!7 B!BF!B!M#B!⦑D!BqH"B!i$B!B4!B!n @B!7 B!BF!B!M#B!⦑D!BqH"B!i$B!B4!B!n @B!7 B!BF!B!M#B!⦑D!BqouC)&%%@hh(ᄄwkjj*. Z|c۱lx^F#aaaʕ[=qѕ"%14!F"&WS:Jf04LFM?CqC|>l²eػw/Ν`0ERhժŋŞRJ΋l߾%KgΞ= @|(Q-Zlٲr}v7r!X,.\jժѴiSwMi;cc,s=m$8X4ӠDHDحΦ^?G\I`w KhrP4o(V St]WvfϞʕ+q:bЮ];ڷoOhhhs?,>Y= v USt} GC%T>֐V_mbkGib9q|i E#iz/ŋ_\f3ZcǎYW)6s3o!'&;oY:?ډ?J"PIo*Uܡו߿Sk.~5MpՋիgiYHu DQ+d7w3z3~Y\ hT^_GNY4(;MKѰD> {N^X*"ڗ5|aQ6W f#X]ɤQI.ݍɯXO5r]i~P@7`Μ9,[,"Y)eۙ7o_5gI#q jӑ3Q郌5L5Fq,ʼn̝6 9~\ga)΋7Fqz z)x^>S[N'111x… p8o/|Yp&6=q>nè#j.6OȎS)(>Ӄ_럗kNL0۷c0Y&c/!䭕V%1R:;~yKv y3)8pvP"AFyղ,;W}BMAIr2|v8. J)oW]6rΜ˟3fSJdp*6'o]AC:}9~O9yhHZMr2&RۤK\77ndҥYKy<͛GժU)SLXWG<Ӵ FM޽!<4r O 9v8.'QMqp!Ȉl&(J` 1bJehy|tvtMld?[)nwf1e֭~jԬS}rP͚5iڴ)'NdӦM|T^J*elsbF>;x?QdP{/ٰ7eW8.Typ܄1]h{\.\nƐ̙F ˁBhh(L)P~?vS`#LU7G_w^٫'67\ zjLBRbfbDFFfIl |4s6El8_3o%$8BX1K(i 1 12ix\v\~6Psu1Pu݋D3C1hnۋjF3j \7OW|MFkRNUf:_Z/H'yݜJu3nXX:{I1°C3@]Q<~?fSڭoq ܤwa]{@:ii>Rmˁ#l&dʒR:.OB1pL(B"WY}<8n &ЬsRx=.\mW tDžۧ0>'YJ)~v\ti9pztS16Sv)ucs0X0ηJi [ 0[ -CyRηUJ~sn c ak臑4p!R8~:,p;:f9㘸f<8l)Z 1G&T.۝fXhQKQJzYp!n;~Ugas&ʹ3ǽCtxk?z7&}][{pb≍zn.Txc$:Yڙt/c"iҀظ8bcv,]飷U4Rn)U}s9 o߾+V … q8i%!(oJ1w(W(? e9oj/#)&wLV5A#I:wO2EO\lϿ \(8c#iISH<;H4toLPfb)Z Ͽ5=HqѴmoJ!ex1~7ԩ-Buԡc=1Ll߾M6]4K#*>y 7+E¡-mTcɓxSw]_yjD^ضbVHt,)X!c>$~/Z4kS/BLl/oǷ?gI4kՆˏ."EKt"̟ܶ:rf]ώͱ(^@^soD}+؄I,W܏{\iKM@^qƙd➔cTnϏrJ]8ώ@ؿ~)'&.&WjЊ/lJein)*W%NMbti^Eiтy8:2!+_WϨT,1DlÎ,}~=z@݊dyn?:Hhc)T"ű^VV_Hڷ&M';Or1ԮU>S>'ojI{|ה^ټsewD\Gr,Wc$''gX4)ɘة ?nn׀T.VRѪq cqX;}/=՚Id̥x<>|} !ogi<խ ?/ Kv+[j2J [3un9˻C0۰i_놔͛u(6l iFj(Y$J)6oޜm$}=W{1@EDRx9x^ɒ{y'(+{j2~Mt̘5n3d2b >CZ֋fʇcߧueqNqLYqG|s9HdƴΗ+1g.JVk@,huE˖(U$Low.\PA&NOeW9y o h̜>f>={-䓼;)~sƌi(A}xvDrTۘ?yi3bPvi3`8/B)/s'gX4M ]SK~pSv ѵx4oZ/|~ڹMg/?眛: ӫV2N\ypR;ar6Փg$ cL62}8ɇd蠧yoOdʄwuo寳.WD@N:?Xtr3R6eI̘1K[28rؓI:F&4aG3HG2Xkгף:-U|9(/?On8̠3kGt]oʒ] 8ND9iGa~x;Nn'HV,xb 0O=lfZkO'_29Kn-!!,C>Lyܹ, og܄e9~ @RxlK }#yVRl?ѲM; ^3繇M3>`A;JO7$̙fF{7J9'*m'3^gskSyC 5dn3<ٲMQrky,!%oE>}]Wʏަ) ƔkJJ r]qml/POS =*VG]W25eծDZ߉f#Ll! #Q34*ܝ|uz3g+ zV?|oCwS(/lCuPԊúA{F~6J> oNV8w&̠sxRl$9< O<,XZ,bbپ};G~u?d'OU6SLN@DGϠS(9U|C`]ڳhTxZH>k OƉ;BF# d=6X0SbxZ(jb.f̙*hOL| 䓉] "tlyV {<`b9H;GKWGt`W.shO2{Bv2k<;a#Ũi46d ځ3ٽ,+h7]z;;6SgͤB>3v9 };I?rt&!f sd[jU[qonבO̤S hҨ!寸^'eYVqRBQ޳u&*ݙ;wf TV$:ǟšW%͘M{wy y )Q+6l5GL j78yƯcV~`Рq(*0s-xsZk-Y| ZѤVo/)Yf$g_z-;/sIL(ݖ2Yȋ%Q5jIvT+tt KIj4PWP6O0k_O{M㞪єs?-fϨxc@{@qljLn6u[ߨls .i` *?{Q*yQXDv4hZcwXqXvR* .hb C5ڼ:~/Z.+4*Yd+KH1Q{sI9WSZ^-JҤ`qv_` ڣU0{h·qwՊlԌ-*l{8y.eCl| ǹ U`viܜ7Oon(@׶^WzT܋_1iba Աh̑Hp;!wjKeP/,+6}([KX`. K&D˃!iv'_ljز`&jU]uYx]UJaԞ^]ZfDyTǖ*Y|FTrr$ ]xgd;bJ9ۏIɘb#n't1xrmZHfnrbdu̓iKXXzC^`QX)>ٗȨDFL*lךvwͪX-kzOy_@ٻҶMK:>( H)9РWꕟB՘(WO x.S=Ч@j:ȴ tbyBxx8:tnw^ otFf+ҏ|QDDFQ\enMjY='^4͠"Elх>-ks~5[)W ټ#s'QY{<܃d>xs.M#Wb2i2_eGG=l6DEE]r[cyE~pŊ}I\NTNvKp7pgn4Nn]ϰN$)2*J2>u0_ore< Pd!2O+Uɲ7Tts=R]>N(||8=~hIXlLEZxFԂ,4{A|6ɚE۲!cO2_:\jZZ=>BjF PCsd3W6oo,,?fi\G %,kټ IDAT`vB3uZhpWi:xaRr)g4C.l4qAfBl{7т#hѻ~Ⱥm=ǿi ymG%i~>s.ޘ5uskZ=MJ5l7}5q<ԉ5Y׵b_+_] 7~rϝ̂1fF}FP.28y>OI4\Q% O7+l+ȔD&_kz߀b1?jz3.<<I6YL=~HK5U+Lڴ_2l3<ئ1n\j0p&i=cW}GdK)c!yW;_5&7,VGN=hߨ 7m&PN{o`h?hY`ٲe RSSYb% t8oOU;I?OjT$yGqOgg-,=›ү-G)4rRH4B9t;O693f4Q*BjFVуݻSD~߄n 1'179V_lŵ_"pLHqdKnbשT6 ~Y362F ,q rl?9D5AhtwFǿvӮEOI㗬H]k9XB9iJ(bGPu߸ޤ,9&XD՛f=,;kAi^ݹ!/`ݶ 5btf#'fJL$%i`ww#ݺc=A҅Ǘh-OzO~MXdHZ[xSQi)\U^Y [dp*ڵ{FNgvWg Q|ժݚ^0ݶ5Wgɴw)ۯЮa`}nCVOkwUO8( kirv4>YU)_mgɃէP%o\]lbU,-q΄2 դ]sMA뵥w]QA-""s.Gz[]4ir~B2n<պ5k?DZ%I=YѐQ<|3?K4M,^ wM`TM=:)mJ(PmM!V%vgHx)Lkv=ZʌY {_ϬIodFӹ{&}< aG9RM^ ߐr QFl۶m۶`:vŒwvyc4M{9?b޾(Cub]iިӇL"y]аpMAeOPh~ lO߱y_$=:u L}#w,#-٭u10z{Io4!=exBߕ_נ@M6|9|? D@ԎڃLY} sS5:#UN{{GiXңv*^nėZl Ց] 5jhݏ"Y2s /15&(̟>gp Mdog$W AO ̘: c]hR #[xcJ ٹk@b_!TO?A.G7v)7 \lF*uS(+~٣Q&>:5Guz$&&_~YiFn:զM۶m՛o<O4t[rּVکy?Wn_)]W~]JUPcV|3U(yEUP*W[~v0 *ժ TYR]^﷙*o͇J)]۽N5+_Bqk j\gc+WY=~OjVZP~=T޻V;Oސru8jĈk 6sΩs jj̘1m۶m۶jȑtf-WOmm^U0\RݖV)ۧQǪp_=Rܾ9wH|EV"vMP~S}*H>e-P@*\B}qj٬jܥ:z׵3y7[*:uΫRLT)F[}COu_ݻ6mڨnݺYf={(ݮ:.\zڴiڷoVX~4|.5rc*<<\\]U([SҕR#wF*LsbNtZLRj ˯+gQh*xuڻ者KLVP i_W -O(|j7SUJjUIJ׽TqfSu]yI#THeZU:M6S7ΩW.SuYKm9RtxAUT~T6Q>TÞOJ?*]UwAT~WgvoPunX]=mUΥd\aq褌cJ)]}@RsJ)CUjڸZ*wHglu*ɩUj3ePlqU˧}dk\I_{՚O'pUn˖mS?͛jEFoj=oخRʙtT͝0BŤ"NOrSTTԡ:Tߦ%Ta_)ש^"Pf~_6[,^,㷌)L}tueO8kZOOel;] :7l:VJՁK#MiՄOT_%EU{腿Z:e>Zy@e:J9z`Jq) 7[^r5,@ݻ7rDJ:^EPp|x[jP3}Rx.?B{9~쩩՜BAkȍJLLdҤIlذiw}7}!OW2s^5Oފ4jv/8Iu:d j$.0m?9V~Zk 5j#6:ۈ[Kq(HLL$''c0 &&ܹ/s/,III>|$4M#""h"""ؽ >'NpIZ͛X5׽d&أ y. @>Ρs&99DJ'$üvOKtn35U..ϯ05BMAXC.>e Cfمdx B!9 ߦg̥Wt/ ONL GG̡U߲{_haex!Tٟq6Kɲo!B$K3[Ծ3'g]Uv}=7NTRtؑoNd-B7gB!$Oq=$:x5/f @Լ->G-Jw/+J%5 bHڛ[B!Br\@)ʯkֈn-K '5`]5/ώ_˯bxi#EWas)wgVc||U>|8,%t:ou6rz!E9vΆu| %k!s@.i~6~3DMkҴ>%*}&_cJS%)@wS?ƤB CTQ\RZe?]OD_I%r '+z绦 ˣR/q?zh;e$%RNԩ@e(l @HX*ϋV@hZam~^QEDhƺvI-ʾ7 }ke|Ozoߒ"3J)"u]ȱIuz;nj]ohzK/a#ț'THMI\i0GE3q8ſmnSF1l@ B }]v0w*V:QCү=Y[u9{Mg!.p8nu6(i+ENvou v=$wXpq'_mO9;8볛x3^ o'blERߗ|wdC)Q8~5/IZ>mZ6Hd3B!"R`sHu3(7\A}x?&3F)?ͦSxV=#9R>~3{ :]B!r" r KR5Ufm dI9;S',=JFѠB1?.ϣ]:}ʂ^&*DZ4sݢo̩5_!6$XBsޥ6쬖KPJn&}'>zmSaO[iЩPy;Ga#[ӢSL ySY~yF:2'Bq N'oцHRoۋԽvg2G~J+JPZ;#q$ؓ9C1ha),yG!B\NӇ{Ûƙn1^Y ͱлX~vl*}m) e_R8N7h>M#|>RR?\n߇ϯ cחe$quސt) @.iSx?B8C-VshϘ"c 6('[,:Lr@~R ۍ)##u~ƈ)mȉ;o~|.Iw=k"g)s)%?%9δuN1 {P4M`0A޼yoN qQL&VΊYvn7_][)%%s_slFBBzO%'e @Bțw\-EjWu$F~y~Jp=>f+QR~V|=O4ŨqxF;ASNfV3v%ؠرw{^16 0czL %!4 PCB - %6% 6Uf1Ү4Qi{wG3w\I罧Caaɦ2} YӐvlAw?~t8p v&L?柇Aaoml2|hlac1&ʟ].zNaaʦB|H*p)/}kj!$Ai^KV36=U8l8瘃jL[1qxmu*%.aa bU, Y'8D; qTԆڦQM-At9n|"|5ܲ)4 0Afadn$,@ @ QW IDAT6Up$`7=(k1lZߧaaaʃ.6}ao3 0 0aq/Đ/2YT0 0 uX0 0 0=XT0 0 S.$0 0 ۰aaaDU UbRAp,"0 0LyPJ>?X0 0 0L% D،fRAp:0 0Ly<bHm@X0 0 0_cN s@aa^ Caaʇn X0 0 0^0 0 p Xaaav@RD]V֘kaaRD1$  0 0LCQT1IR< `1 0 ÔMT^G0 0 SQuQ UDd *1aaYC֠!D0m %|W"aaa* ͐hbMZ:\7~{ 0 0 Q '7#L߀C "(6P[F i z@(A8@06mjAAMYt0 0 @/q4Q样`  F0 v/~5;Na{l[SOZ F0pޣwuSD c&Oŗ_ٓGwή@aa;岇t!E^LBwJk&"`٠}s:?qٵ3d8 Sya͒x敷Цhhh0 h@dPR=߾6fhXk8e{0 0 Sh/{1 Һ dAXt ZVl{>Zy-1lY^"+i \w}q/$-^}wQhwރW^]>70 0̤! U.}$ [l+k=RH_T5)#ע^ _U\z4 ,^GpF@ VϕaG(a0 Sfba#;o /.]cλgu૯?oWzy.p݈u͆6"%1BPa.0 _z=܆<}8LooXBjbVۗ蝿b =p@AGW]C|9u3.vz׎8v y9#' ---3)]!>1L!?+ ѷ2 ^.LHhNTtlܔ@@W @[@PF}VtE4-S+D,@0v^8ǐ1{܃#N=T$TEazD9oFE 6lX /3:SE(*s/ ٬\u|W9 |wT ŀ`-u +W i Q ]qDuB!^L"YP̀!^DJ4=Fr8D}}>J44zBig<xED5w|31]/y $:6lMDhf .=dgA$?=eFWZ?MLCo.0 RDä0RU0{= 6e7=|:~sݹh[~F w"ڰq$Hv}߷ap{Z-'ןy6Sӎ?߻P:&1b1F 0LB AG+1I𾍤R.XP3` \|hݍx {y8b֍P(;"TPx0Ιj݊6Z!1vw] (8h/(8KCSP 0LD H*YY9`Yޟ )s QdĥlDD"D"ΩD"|su]ɓ'u 㚖a*.$  UEMmIHp$"0 0}܏^a'q]"G|0ޑ$*IX5ثw0 0e0Dd`0 0L!9 y "Y(;$ wWaRG$"HMLiHAgUޘB 0L"?`1ROCte]| 0 ÔJMl (¡AeÒ. گ=-V$Q{"oK5aHDf#4E} !Ũٕe卨4p^aaNz^S3c)Bߊg)ךϰaaBH6sfȲXDޏ~!,@*NBga TCbDJSFJD*B aaXLݐwRd([H%KWʓa-eXTW0  !ίH;¾H`"awX0 0 Sv K+ÃiHTMZSzIh !eRAp0 _2\z9kR anz/yruk,@a);v~2Έs2,@*aƚ$K u-?0,@a)+BHHp!@ C%d7 ˃V^`aaHDc.?xibX 0{p"zUoIDsygm 8 a>ἷV 5gaa"ʍ \!RlUy\Z| ,S< aajGwC92eOPtIΡf=۸ Dx{ ^} |矰 &T("@[MO ~tnhjŕ aR;!$Vd(*cq-Ym _p80{476  B:7û^}̛>=r5ywQ?)3ِPs|ǻZSØ0vv9me?cF kBJ<aCić"k!&j/[ Dױ'p 0;+r {ĸ-7CM(yp6:D\k<4F,˪ $tpch ı 6䴽 ea@oa՘{q!V s V [Xb=.>חba!$`u֒q22Sª m]-P,.fGŴ1#ID0tnk0|xg;ѵ~n9ez!<=}jo_nJd/Nnt'{a4{XFt#~wuX:|">͟jtL=r\wxa 8n! 0 bbk!4hɭRMcѨO70pTAJof" `L: PTb1Q0x<E.77DhB(Ganx?]N$ SP$`{! ө MwV Ijh!@[R\t ð?D1{ЄDӨq}nxK'+ Nf MhAfSo.0 Seu^I dqɿD026"@ϓR)S&)s%dx#zW=ނw~nϛo)'aWaU}edw%?=vc IlذvUWN8zYiY?;}N, fK8VX=|&Rq.$UQ^l![r չ}HEm= :Gsts^#.ö3`Iq. H߻䗘 EQO=_ ¨'CD]PdzONjoT0nv8p=L0 / a̵\ɸ.\?7r)݋Fӊ[ݽ MDze([iNm3 ?}y5C.jxvNjw}|bʮbʮ:^;3 0@`9lp)!͊6O)[>?τ& ۢ\倨;c|OƔ]7{m;Mƕ`0 0C$\FN?ʅhk %{%קiޏBč.߫clKנnP2+q"lұ%a&-rQ*sf_xRmbp4S;& (3%{/ly J=o}oYE&8O > ӗ-0 SncDپQ~E۾%LqH2G5X A7$Ep;;_M|:]-!*q7Yaa*IԽaeBvI>fN*ATut!ˉGj?܏O}|]tIPdi9`8 a#Y*v.g%o"9(pDT-bA 9YD)Z,@+>-]fzF~a0}^/0 0}JƖ |%灀3/BzI5Ǧj%̟]m ?LgS hй=tX)Ɓu)TpQh 9 0}pID@VSRo&͐Xb*Ky¯R=O*)MarVH^GI|}@aຉ Y= UߏT D,kT{g$R/|LC.3gcw'1V  0LɑD%Ծ!9YಠVّ(_f8?@.;L*Lڀ/E9f]J=;q;g2xAxٽ! xB@B"~_P5;X 0kFZ5.l @i!IfKM s*"˧픏zV=W&zn${A]ϵ̔` ږ,5睍LĨQ0~ǝp⿫sb:0 ä6FZ3z Iv&㓈򆭔js;R;v8GCO ~s&Z 0TY*teJ>Ǚ#Mw/<ވtfXaIP! b5%PU T,f 5]}QDM? "BBO"yYfj,3h1 % i62k.}A39{Q|o* 9zx^wfݟ2 0L,iDR7$jKfȊDM "RGʪ僒;}@z,25紐_ঔn}liwʏngYL..5=ơwaNBg<$1脋6!]i$ J?RRA㥊бV5@]R}OP''ALD&.ܪmUEhEzn$OHC& ǟ.{I~,b<I 0!6Ʊ5KӰ}&wCHL6),;҅jW7A/k2G~ VR!_jua<5=(@TH?}g _c1~8l;u?#ea^,)mKRxRDt%=7$sqw >Ðֵ( IDAT( ߄AMb/KAHD`a!MRwwٗg(6{qa 6\vP)qQbLoDž( ;_{g_0Ͼ-[w.9u4vc-3Cᅧry8n=e5Fb^7LAUqGv;p+3 *RcuBzw&BH)YдQՅ_xLt> }q+qD-w$t]`Ɩ]oC; j0n(@G@@vt U%3W=l: rGe NoخϮsTF|"D@WT@R|jAzZZ 7.) DjAӄ"]]-C0KzѨ5GYcUDkgTj@H,NL1XVHj?ý뀻}F.۽8d'c32"/pa.?ݏIc@ L~ 3{o9O&  m= %6 BB-) !TUT$DT0FA8+{\%V_:Idw@  jBP(MC(UMw`P7Ԁ5W!Cd<EQt^O1@ 0km=Q^o8"$¡ϼx<^Q %CX1L( bW`a2\*~}Hqmx, xhJ@FG]tĠ!#1qt-2<rõcmk9jvx궫삅p͛c]-3 0E6(& XiQ $t —$w%YuAY}Rs.TWB}(eJ}ss7,q9zNs WQd֚GԜ/|$wuP_£$:_ówX|%}.qX geBQߗo㬓9ޅ8xφ0ͷobL@ӈQ8WwcvEXU0Sv`SoÌ=90 _q\vqM3L&yuU )a5tC*m+$!hpI/`W!uŠ2'ᣑk BW@cP$ҙ[RXoPb8)F#⽝% /ݪs ݠvwߌ+Gtiy|LEw܅v.[[zXqEkhl:ꅗs5pPe,Y !0y6>5-g_UkICoC!caJ(ƽo7rUz;Kd63 udϊ@ I%Nm3i"Y̝p WcIɹ+Ǧe`!bq.`}f h:I D%?1?P|Gh`:v 1Ifly̠6à(hlf_i(&69Wƞ) 0}?;̴KR=.|;gO6$¦܄3Ŏ05v9^TU |7" 6FM7RB* #OaJJ@u<ƨ(JFqB'dKpHzT/*3/}6w &Y1+IZ(X\0KeT@+r:ZZZ)9FOb(\!U M9#cY{9!Y͹-b*kӊ8rA8/0YnoCbpA?Ăm}a, Tq(Ӱjt pBϕnx)Q7MĤ_ R^ bȮX}tqF,tҡ]R_/wf,!y ƥ bpp$1ƅD#{#__;4c,G9GQ#&x]zKֶ{;'1;`a4(۪Sv(O/MP\w~?ZZ9b E,}Euڴ:;CK["S|~݂n1)x,:.˸vUn*EŭZY:Q[Ϛs>|>'WBaH jjj0sp`mCT 0~#P`䎈2i[7|*( M7"St"aBIee2'BKh|ޣ !$3~>3/v&< g5(/]!dZK^̙gՀ+iv~Ǣho@`j ˢSę[L^ODj90 ø$!$"A*`9zR770֟TH"oFXbSVBK7 ?1\BO&q4=LC]u12#XS,v $t4s/MPbt)E0* }uwcZ! ] l)PJ|>Q&/-bQ| ;ed.r0Dn kRl*6=p®;% man ,`#\fLn0$Ag DS 0]c2\*K !)_&\HpSha"dD@R n7;aS6NDф0YH~,$b>SBfhyBǼ_Ϧ); E)g =r)\Mڅ.(zEZ(@$?u ^:s}!`]aa@Jo{ %WجL[;>l#e_JB(h3n1ݲӜf#z H2[:GHr\$t8Dqx4@f_b63Kv {"U)!kg>#}Vs@aa~*LS9W(G*tcԕC[s5P Rku69mS-1rKG}M{͚XCNl.(@a# :"-7f]qR&Y%+!u4-_ ̲50 SNd@r% '#Ҿ*ME*Tps> )k -ٽVR )ó3;o6ک]ž/dyx|O4Yi=W+eWIk[Bef7.MWlűjc-w8 >s>\ B&@aP2e 5 rO=# 04C".Q׷0OgDu.4N^chy5]BP5+fGllH<^Ahj9zB?O6vW0 1tM.a+a[ dZHBm о|bVxNBll|]vyǟY'3 xCʞRd oUq ꗿ BwedPgL@U\`(C4\g ZGBVn!P0ι,V=>v"Nc~I -KL$d.E&)fAbY z$>d7 lzށWbЮ ; `16tj3un1nQAm/Fb5  0aC-Rގ £T2o(I*E!-9+,]hFOUxp.L2&`Y9iB:GZSvHL0Cx_=f0#lDG(O=}IԎэ IDATO:j/1Wy'5GcH=_cS.mW [w}’1_KOCX!~;9ӱtz\p}i ԭʲb]R^x(GR5\i)*ȳÜB5jO#氼&;!m⦓; wt?;! !ՕqZ9 Orx!1MbuK [ ^Db<>56ܖpSxC9;&8B&-k(UxNMh$`xV~SNj `M*q>}^lyJٵk^x p)f N>`ªy@wa򯼏̌qo<ʿpvC@$wڢch㨬;ZշvoI/FӖ8r6t,-mr2C۠vPkۿ.1 aʚ{>t7!UUR6 M"6wU%C4SN@Z+$Թ%bLo/^C!_'_B>,xndUxQX[ǐNZ֭1r@B5u>r4^6|'w#doYYǷun l<;XQPk;7b {U AQTۥKD%HcbI}zd* j(2Y^2lH$ ۃ+0_@"Aus?VcܼWuCv-w\tsÌ1d(5pL;.*SZAk~cvP,YKRyt13|)_AkLQB6v[K-![Dɦ2>2׆Mc6_ldxԺ .J7(@t!xѡ8ܓO~'x2MHPNʮn~>̞4(p0kW@m0u+$6n܈{]`[q@ Q@#D@Ky>; X>7! Dυ7lL QŜϵ~&د--hchDc:Jy}'EhF\vthXOf!(K~93B]E,f bP@ذ!gWiuI Ga'! {ǦvMCb:v}!-) [G~7ve_{{eb:XZ[uhNa( @ZpÚ1 bKG ]] (9n<흄jyBU{?qs!WPD"wYg!)ⴟ]V9| ~uht' 12 FwD1p@UU45570ގ`0 dŠi3~.hLky'2쭅`@:4D2hB ivEQP__0:t{Ka`-*,`xmں4y]C:h\ǻI UQ s6ԇ`z.6Q!.#)cIT0qfpa`-fS]]]Rb? tؘW*Bq/LߐBI*jQ_@|c< DU4C]MDo0ߙg#kZ b} R1#T1!C+.EoA@s`;+ m" #>wc`YCC(Z4D 75MMEW$}pO/6{7YI PSfYӶT+ XqpO>:0pm-G v EAtjtm܈Q1@N@/E%4:_ @0Y2gR#Si EQ2&Aڽ@(YiPQkEa0)x\DqPUw*[Q."r̕Ef3 s>RTRDUY'f\b~YRpg)G (ӵ,EIM[Jڜ8WuYWU`ÿ]j>W>,GM] Hr?V$$kڸ4{x@}׵{aTF+"Ѳy 8il9 o$zQnsLN>X-7_}kpxl4ofo#7/o16sKӽ̿&ܩwoa"ɵg*FZEJO*E$I5p.n8W=>x-f?oq:.N'IP}f} `B{@[Ǹ@E_@'t3& !WoѳxT8i ݷǻKWv{aUWN8zYiY?;}Nτw G0κb6'KFiUO+k,]k9 Fq0 ]{7.{nȗӦ𒣏1}d5߾p1ġ<3vz){"^w쉡?8q:0Lw{jtc>Qm$+Ƚ0t5KT)XBS]ۓqN,pVv-IjCdFJ: qӣƒZQð$tq$5K.Gr!(%M|k܀o:5~aݿwގ'd:]aFTiI h꠿Dk^+B#"T'i VqtBiQQGWfȜW#Qf^' jrL"kya࡟ހ7l?)gYIܳx-M5 );3z-4KvM}ceQK#DOD~"Pv&hr.UmV@瀬HP|:e~u{,R Ch]safR)i7dm[U䵭u!j-䦭dQ1 7#m!ńtI>"$)G "Q#|B~<ͲP-Sz=[P5"-RRS Z'<Й>aX.c\ƾ?op/AQ;-{={N Rv?}2v@43*.ԗ+K lI?)2 0+ !zP%K}&0`JKJQt%Kω,%@*gCM![j z8B(/mE}-QC.'c}shy4:"FWz=)%)R}W3K c8v` Ό"taVF\vId<-G7P~mz^=ȟAGITDHߋe5V#npMKeku8_wӯ2ѯKJfVD /tgBamp1wf4Uaf%za Xb^{NP_'CTD4($M5Mt/Ce4ORY 05,jmzW[sٷ}j`oqu?4:3I"taVBiXNrhʥ}GӑEڊ$2:qM7eB]blC `,GefXz5QDO!^+[эdҩy ;<.>Uxi?rPۈ ^t\K3ӎ>22 0=%J˿13ݾ2[,` "Ti"56"#S([86uǎz\I){eX4"4MxKK㷑 :DX5IӪz-U+ ]̹I…>ď=*ܵs O9op1d35Jcf FTȴd.lLU0WX?ZY%QB$B:Y^ Y_v.QAUTcD:_I8_s٬MwA6TEJ.;ad|j"t瀐or}ɯpmD8 gu8 1 MĦue2FI?S膹ޛt&iѺ y]@SZ㬧5C*#vH5^ s4N]ũ]{ߡW]З@)-PL-8M˳&_t/^ Z0~/vJfNf?fx'Hj¢2oN;|g1Dv@E Y_'﬍vf#5Y,|Mi*4rI#L;e]CДğ.g٪ O/;vs6#v@6?t|;hzZ܍ӿ[.a!3x܎ݔHՆjܯ4T=)t=4.{HcQ) 9ȗO Dܛ~wGJbNUegQ|}Pj'|Mrjhr{nH WUl+d ޤp 0}%D-Fu4(Xk9PP0}:,BwG@%ґ+n۫`Rt=۟#"4Cf& IhU#A&8׫<)r%X\\ j`瞃3utXq#ߙa[tH`m7TfM-*g2Z4D $OM7iCԨ vBiz9ʻ3$#[*L*I F h} " \еg ? 2PpG*(pC\K ߿*x^0F)|BGO;z+nmw xI'ǖu1 2KQyQXazRuwΚ=uR"a^F/] T9]/ 8>?٣`GfskCSiôaMF@#"=羀G,Td`lF !#=JDDa߽xy/8.r_g-_l\?3~O~"^}m85\0L?q%+ukR"WզSE$J>o} ?=W|/W^|.nrt>0L3DTdD=aRT *ڱv+J̌qVJνPtEB9/]*1&2-`ӑ"Y|`9}kk\{-z7pUW{{. ~;o¥.wĉO:"v[s'{[F4>LPAXλwxǯ=G( _3m߿ '?0S= ֨V}=DBAO\ZUYfY!잕oe l?F1c;޳rnqy~u!Jު4 Db`ֹNyLj: E2Px 0hm2J[B`4]*l@ VXVa !md(&CbUfBG%+(Qj>O@-1ZǸ旻kaZB lݺG0΢\.c|<>zTerfY a7p9׬0زe44m#ز|=0 j5ضM60mWQ֕ؑ nI/Gj85пuFuCyZT58l`t[߽YQX[ρ6QvJlYtZgź:1E]CE'_* 8 p֯Æ17/daaD 6`zz, CT*Ƞ) LLa [ҿt~/xno[p'C>ʙxO{`ÑGcĖ=q?Ƌ:p/oBDQKL;wڊ-*:Nǡ lղ?Gd'5둵lii8DUwD ]։ꕆM3sbPtl|xGm[,(,?H7?Y!?tKN90~x廯[< CO/y%:I[r>rc0Y"Z$y(~2Jiȍ K dH sh=/jAa*RQ IaqҼiBGr^ :JQYajUHUL0 JF\k`ƱTkYǻ]0"`8Y׎=;wNV 7o^{lEQiߣp7Gv'n ìZū+ݐh‘nwmES@JlWNdSCi *輚y# kvu{ C*Bл!D6J` +I6.0"l ԹlsOll~6oņa`|b+..FjCiweZl ;7 :JE =e [jW9@p9'0v ֖ƥG]EEBY|h`^'{ ^gJDGD /3 3F?`6QՄF!UmW1Hi[5ڗj6H܎ݓYG'(p X1mFZi4H̪'{Z?; 9"7 äȭРWv{&cq?RQS9M :(a`.Rfy,j&xQW *<罛B a6 0;675[e Ŭ&Qݲjg\cze̵["َR?9i ,WMrGHJ'{n/x|]$Z#0{Ƃa0LXi XenS ux GTZ?$B*X!)+Q٪\zS;b Yr%[T]pHlS4&HPCՔ5k6az#ۇ`5RFVZw !IXStPd#%yFQڍv :ma+y^s:E+k\'6aےb`Jy1+st"BafeTaz&Zʿ/G\"ɪX63r)?5pڤ҄<5:r 4]WL}WVf$,G-5oig$GCeVm 4R?ivO,s!YdS\[OOe8 iaif }bST삮~1!ئ'"hJ7iJ5䈼3up{i|.|7Az Bj'J:I ~ *\ nG 4׃B'J"BKt*o*=Q){JcB}`8&n:^^LzJXSԘ>='jV|CS>~aU8(ʴ(=mX2 Ph5>v-K+aPc#5kh`-da"k*Jl 9gxM3G8\tKpEvdgj~ *b:pJ'[! La  B`t8ͮV*e><>K/\GhſLq+v@fzeJ*Wk$KƋJLYy"a!ZRk ; nK3`Z[TWN"jeڍ/ʨr, # e[aHr~{tu[w},̸J?Z3}7ER8_Z-0f+v@/ni^=]ird^ExPnR|d]-U#tkw,ºp{iWaM _9+ iex}05yC[ܲM'fpX`:Y%95$߿0ZG;BDpZu۫!#O\T ?Ê558{ac5Sg7#5٠9E, KǬN9M; 9Йn!lg*2}# vز&bVNuIji[uRDӛvj{s&UT<߄1guO;aa"tìC=ߟk A]N;LGa49A\;m==DMM*Lyp[jqF: P0H"t;Z=EGt ts~:IYNgG O4S # j@&yauuF5͔0WKߍ wXsT@V7hPar; J:yQ=xfQh" M7L]Vw2a$mDF*v1bjD!ҬԢ yy0gAiW+cҰDmD Q/5J20jDD#,+(05 L*a_tt :ptzڄ"융J MJazNPɰY::[fHO]*Xjm@:%'y<4qLw ]|oÎ|HV /zsp)+rTgԧ%^7W~ya &WwfhAnQgYu(u`(M5oCj-;#4ٕ&EH1Jb1^\ڍ7a /*妁!V8*f v7 /qҏE$;Ƈa&)DA%o%_틑K}㞇K/[o+~+4oC:-xU7=ٍo_} f&߄hiȆ `%9Ȗ.gzݧ0,ޝv@[Xtt1CЮݻaiMmϋ#uKliD&"4ր zR{ S2;T1 $S"~xmj5ux)=6m%_w5'Yʍ73ߎOP6^+{ n~Cp a.5ސQJGQ$"(-+)n}CRSҮ E^tJ{X͓LirU2Q>T*{bQڴlCnHO`QXN $-eQz0 Hk>mwl?zO; 568gCo﫸{NÆJ F"l\oW~?YVЃ ȫ˫JT#;g"ZSRES.H\SzI)+ImΨ$= :|yjB ߶# Vb ; ėয~ڑ(OӢ9ޛV<˕1*cv]cCQ,U.j pZp9n _#Stlp-fN&eZSAFҝI A쉌tUoO7wQt#[,jm3#ZгhL8#'B5A - 0 2F6`@vm@8lm 2hDyRfetkTnAu7JRð{زQ[0JJ0&+uGaزnE-)-UK5:p y ϛ ):7P9ܚug6#,,{li~ݦT(" ?B,Ga5ExMțtEY]SrTG5bJQuƬ0lp 77[ ڄ[-r'F1\.@(6*6m,\RV" #N%Z ryW*bbbr҄I شiI#ьol/G~~ђX!L¬CE̯\,`Tjv=l06oX@JuiFDTJvoWFJA5[4%zGs%Uyiyel}xt60H*G(;zh T0(~O.Vߙgٰ DPJA@X=_?~|՘y4\o)$NTw^nď}=^~0Zn͘2|XL0FBǸP~b%)~qɨ#"0 w,E 0{ "GbRsciM0 E Ti6b#o =E/^E3 ߹ wp+_7<ʫ#L6Dem,JCC5z0` 4(pFR0{4_mZphEt~- A M>Nխh\+ҶKa Ȥ=jDK;NtdI20+v@Pl/}/㼋ޏ"EEї>n OkBi-) )O*TVicPJtiCL딬-2|f4%HuQ/oV;vSG:S3;l@b+0̊ML)22&Ý2ǖэ.JvӮZ5a [6I;xA\DC,1H:^?HNB @4BK)VqxsR+wllT=tcHiqB֘AL:$1IKRY/+8Yb;gҢ)s|1 v`$GiEGay# IDATdKVγ%?Ob:+HF$gNXJMcߖm1MYV$GwͳyX^PR&73 ä[h-TiLM@]wNJ֑%Z]k-OPs5TTm ZZn0"+wS"^Tua&K\r[V%7quS1 23Di;Ԝ*OBtӋNg$.iӋN{DK!H W+Q-8FH&{UYӰ"ېGZQS\@'>^ESt0t I:7T5K]5^OKR$SJcO/:RIk8v'U\vii 4 Eo%a; L1s9[taV 䈵Rn5 jw MyXIMe5KW¶y4Ϡ֚2\ga烫ijޯRq sYm | 0`;B70iZdO0զ<> R: /'"L-:m}st K1 :aA|=R'd=-uv90 T"O( s5kA+쪝K19.) 4t΄ ?^XHKwCwdtmRlqPK _%R55@IJZDwQ9 k>!Xv7a7#VS #ulhnUS#"زyZunX E;fJS}gRcnjv՛Rt(DL m?J?!V|M2 ìuazTolaYRC(̣ xwTWM6l]8K~Y]B ^U@:DFV(6& *̕&FL?nb [&ѫwH#R7j)*7 tZa0=AHmP ȵ<ۼʺR`TԂZDA>3&hjMR{UmԱ;FL)MKi˜"rtNSg%Po\K(Ul6ga 0=r@rI}`&onV_[(Na6arFwR[Ցd`{=9ZHJ6!;.Qz9:EnQKctxLvB k}afH[*E7Iu˒bUPXA#TkοT%B5%%p'BK}H wʒ>{Ot!30/Jd'Ցzutl h)Wc0 0y&spB k/?55;5,'ĞuL̐5 _Xښm;#rXPK+KiYwuA0u[CsN0 0L`$G,/7^DԴΟeKd$ێnJ?Q)P 0ST:$n'wrn7 B*Qw~gKEY4"ʦQBCc %l 0 Ҡ'_Vu돨Kvڴ'A `݆-X7:z֮ax=2/%F10 XB5>>'B6ͪքZ;aݱvrV 6?AԮ o.1ET67#5꺞ddz 0 R`$~|~W!.dBg]x3ρ-|yqמ~NKŹBxoƄ&B!'NZS%t* KAR` u#_$nJCi*uu!B?ݠ4@,'8aa ; -5O~}LJ>)7q{?z:w߆{w WG2xVlvWI7Sɣ(b){/ D[ Ez Q+a8U LaX0c:H;RwogY4"*2[jwfaHvǸo*^OB>wԗ ȿ;5~xԡkp!r: ⛉[=5 PxjR epcXkP7-tdwR0 ,t>wD&7FX]L-&tmySD'Tvre:\qp a.BbpSH2c"XTʧ/sO<G;RN]ա׃BF=E\#6+Dn9O}*tlJG*Sl;Bj 4]Ұb#Uw2(RTW78RgR;j3 0L®4& cKY1RFjt8Yxn܁}ػ_5)[鈀"`h ך2iH-qIj#Q*_c_(lSw,g/UJ6wGmJP/yU9+laar`P1J3d E'| ga|םy-^7ÕDyѶm4ͮcPM UZpc=& Q7-MOD=l\]AIt.-rꎆڒ(v̘V `Y6 q=}nRJcrDҎ`4MH Xʸ´0{^F}/f䕼GaTI{5 5L }dP,[~c-Ӌ.x<5:j{v*:A6V˫5J(=k B~$+s@@le+,, Lq6b 5Q`nH`NJYߵ X*c5֘9JeYQ0")Xu M5d d bp=SGnE!F|d^#νg=~/EM(c3 Bغuk7Ri욳mCRͻk(`yx[ms6*մ0*9^ix[G)P (cJcNQJ6Ş4fDC:| nY C*e`Gd`t|sBX0n,2`kT*Ka2 `b:(EUЃ' ,RBk b!r9}%%nTn)Ƀwr>G<|_>?,f8;f|; 4AU|/\R~W;~'8zq=;jZtڮN#^݀aՖPmkv$O"ܻT, =="nC;&G`ھUK %Q\* ZaaG@Bx4pkpڹƋll1 ǽ8/Ml˱yM _<a>lU/-%/5zqcA(2 ÀpU+ _B}pI|4G4$?I- Ui90 ìz (q wc/\ιxɋNXtϿZy*qOcɧG)<%⬿> lpSb9;&с^Ob!K(z1 0 a$Pı> @ Bk܎=L3@Ddyan6¦mn.^5B#I )11޻\o4XKUc"6)QDÏآò!{;Q h3L0 0X I5mnD/] !qk$붂G*,% C[iB]0 V(%2?Dyf g*plF胳nh)E?lw@Paa; L,uG7X"zZije+L.6ZfIB*q,\a}WeiRā{UYBc| P6K*8}<r֍> v0E{aaz `<!JGGAlv4n RԵqYm8Yyd ux|^®yFEvّFLA-AwTGq"Bo> huzϐGc_MF0 0 v@rDj@(HU 7L-ƶtKmHXBc`4)sjO-8MruGb&"QT[U*UNVDa7Ծ-åogjN[I,qtaYp RQGj^' Wdu{*P)Y( 5&l1 `ל>tN4n%+DpVE,;= 0 0U5T.ݠ4Bt0=h-.Umîy‚)1[sP*X4irbV+MkD-F0@DZwaa:SV!UuKgJ ]pY]tOe)mc®9;Uwu!"_SNqHExpwvCiLcz<aYp$GdH񡮥SݢBx$ю̠ Tv='Ž[_T h<1WZLW0 0 v@rDٚB]F8 a aa36lچr. uGA+W7iW 0 0 [( +o뇈0LT5#$ 0 0 oa̘Ё|(G&:ۭ 0 0 38 AIzEX*$:Ҩax>xD􂃱Jdž w7 ݸaa<H:+N\_bV~³w![H.ƿ3욯ǟ׿M8 &_N2kΆT}6aaU; !hYŗF_Bq|쵱kQ7+7mO7.y/.اa} |;7uNAզxaaaV\& .W!wyZ>b;EGoǝ?zG= 8ȧ㜳mWqϿ{n}'B7r }pD؆y)>`,A"ۿq\!Yؽ`b}xp΋@0 SKݛYK`?`3@D0wߍ ^NwR㳟 ׽?C ҸW|Ǿ<\oGy6yƃs`Y®?/^z_7biz7~ݹ81 Lۿ}%0r+q {/>o0R6O\=|/%lƗ?w)/Bj7|  \y{_E6p7sKM')ҫtPD X⋮]ײvE DHGzIH/]ufrΙ9qp2y?zQ$ZϖU76(46 C EQGOniqyN{"y>,1e""ȓ\A S?IL@Ą%m-q)%RTCv-L.C"NG\/m?)eNiS֊W}Bca IDAT)т/KnZDƋ+/}}^WuONɭF~bh\f11DUq\Fw˖ȬRVϦ9Kː#HSDDUrfH7ɱRa$%4Bn8KJ4tT"WT|ʥx:UUfI/>cDZhv Yg/ iq4o7~ڱ{&+} Jux3yJ}=[l3!kk㇘WN^-Ĭ&jwJu)};Zf~1 gs屨ąi?Ÿ{bd݂]_ނuo{tLxΟ&[06J.Jy`>WL>64''[va f8Ptj= RcKFæ2m78Na4p?+Gj)?Mߢ:xbpF>>}2w3 bc2 %9xٹD;>V?t鉷%2\l’ϦpL[a,WE$NYٲ_c%y{x}2rf(Goh?G+0vmOVӦWCC8Oߚ* V,bц:nUW<+{믽E?c"Oy>KE]9K6cTΌ޲1CQ~x̙0ޘB3Q؄J**66_/`O~eת[խ2ͦU͜O~fm[^`A5)_iԋ'_#od[n8?檇_sLAz%u{_+lz"TR{4ݗdk8]E1st|r1[sts CFds|4 'ڬZ$IEwr8nx+[XrU2etiŇ:v9zoAOM R~-Jiv^G>=%o?﹑|T/y&cDdsM+}>Z$niNBbtk{ImzGƁ(MuuuClںJw_&nd ە8y6 s c>-aU'rTqO.-Y[r֡,ȱ⒯w'AbN Ջc+Ax>甋FNv9JmI[*=4m޵a'F עc {l^0WԹ)f;ZFlZL| 8GIWfD%֣{[Z+4S4fow߭h^-OP7;5è ^̊f^Nff=X.7قbVGۅ˚v`ZUNUǃFMXVή^y.7ɂb>g\JWߍg;EˉGjt ߝ*QEQEQ֨(((((RkT(((JQ(((F (((((RkT(((JQ(((F (((((RkT(((JQ(x7s!"'x/i".Q-Vr(g,) 7F~%؞]ߴbX[ؼm/nS~d7oKpy^~}ku2y f|r0knb$u2ͧ'(r^J%!n<$Si >yGcҔq]G~y>f/>@uן */N ^~a^Dhz<W-N]UpW ]$h,f1G<_0/'b&qm"ۍ A!Ց _+^g'y.,gw^A|)=uf2wx#-mGUgv1ܢ[ݺ4j,NJKVƵ:8Z1:qIVrGA,\ {c4ʎk zBqg[^1^Au1i5n18~ɲOT/T S+HK >rsh[ _4\IV.fƽ葑lYl֍Y@jE6lԂVj.Bqx\+@!IGZ7Mp0kJJYLcQUynf,[%ӮCb}phȔߝ>$%~Ed!(TeIǦI2`X)+˗ѷ&hi>p+g-9)1*YY-*[ 0OXK\>VDEKzfdeIxp4hu/'Wwm-v&Aueʗ+oD }" )钕.a{e_aT% IKǨ4 NwI|cCuf_#G""*"!#>8A\^ٻ|d!(111wpٹeTC$+;KR$E%<"V⒛ԅ{κы&q5NDP3Xo*7@"C*YYզgW~בIȔ0'M5 LO{\;6 +#o#1ё%"O~^c#bHTB<ٳ|IjVKfFOHǐC[e+$*"D3$=%I"e3c[˕JDXds(vis䗕G[_m!_<7Vc#$-=S2%*Ty}n Kzlԍ5劌|^n̙$JrjdggIBDJl+Ӿ*+c^$R?+I📿ykK``|hïzg(R(x nCeGe`]ڍzWnq٩JtH.D2b Gu+6ˆu+d cLʝnqWk_wI֥ry]\\iHcF2qU PʏR YTɫwd5sɠ@[mi2 rR؝7W^Kv/#rYe再%4$\^d<.ٸic]zJy$<:Q^H^r$x3xE˃/&[/=*hub"q%{W-1R\/j+WC @d˶JEEMbՑJٱsi\7JF<0M-JzKrzeR72T +9rYx.Y凒`IQŊ_#llZ\ +ϼU[]z$"U%K?X&WuK'q=!.x< @B {RZ5&I.I9e(+[gK\ )p2$'g9L5a7s(g2j$ck\>U/f~{2- _0-{t' <ڥ"Nb Yto#o!:؎_(76i34͡Af:?.MD[ҫVѻ*K&sf}mho{m[2b`g2&1?z'6  +3'fiҲq*˸IjИ34+w U'mڂVej6;Lvz.{.>gw |jJjnkMNԱ[(sDgmMlѪM?_.0a14cO G;KHb#G%CRjVZ xqҲGWt̢I aޗ╤&Sy`LX5 ~c|#wP/>{}$cp_}-uw?3a L?6̦or¬[h:vr9hF+GbڧKtDgW'8dOꇏoUVIV|SgStKfɖ ~ѼB ;~8ĥ]NQKAQϫi~S Y͔:5H39K6`P\R@iIOqzZ`Ұ[L',~GijԋVspnB`@b~bUQS~^T_pZPMN&k >zu>Jvp?0v=JiGDQ@d?V.$2Nd`|b%jYB"0ՔQ@SS~GWt?5M@sy~@':+(ğgӬfՄVs[ Kァ{y,D6WAo|55G}h'4C1k'8{vjleV0 vݢckҨfx\n,@5KpVVHd 1ߙ?xATr]2:_ǻO!n:hh"Ox ŏ2qUUHq _ȓC(KR+RT(ʟ3]Ey&Lωo_¸geq IuY;m x*MNU뢪(1gVaҭV"ȫpfTh:ݎ2\%'b!Xu 1tV-+iۻ𭉒%{ gc6 ~Ŭc5:u̦'9킝%n=33`xa Ъ#bPz Н-?r2|b#t5:4Cǫ[y_ lj>k;<% oϊ Sԫw;+)()'":O|ڳSaT`5`8Uݛc\x WO8 >(b|R1'a_xE ZKQ,EQ4 (.$~B5=8 ln^Jf-)3jB2hPZ^_NyC 7iPQZ_qP7%x\,5%c |Mڹ/a>tfNRn9^UUwc"k45LFn~ލK4{0M;eۧ\a req7܍2Xl\ٸc?ngfͱC~S}mxٶkƽ-7!^30e{+ugb*nM/ˈu{ٙWbӢ<3}i[ b~[Ln޸K+mvl7IY(8(*>Pb)Un/ԻXɬEVÎX359fY)Wn6u\Qta(7IhՈiv9?G/s*cʛAYQ>rU[S_:8Xo(;(vIl%:-Og>õǶ3yֿkps"{"9ėc6qԞ[crzP'I|f1i#qn0qy^Fʋ/Ɖ/Nj˝;l–HПohN!^T~"Tg IF{ \iIlԓMeY;{*&kWmIa B]gc({q=38~p'?]t#F3롌9ϲSq=m=Bo*riyqq(4fb Qd$b6YW/pChrj+PY$44̦]x ͭ8L>4lۖ@#n}:7D7£hեO=9D"+'&IX"HNAhxy[bCi۵;]epGqyyi$M۵#<ȯf_p-.oi \|G5u0[}ޫ%GBdt"oÄo!r|Cc۳~:-F-CHNͤan.a )06L_7඿?C:ktY9ðtDDJ `u39IO%.c%$2-R 鉱MR5 :K_=d!e'a6Yig7Km!a$i:)5;t8 %Q|aݚɇܖ-HDn ֊*:4ܗ'>FȚ0Z$2:aI4jYC54h{9A5l4Z6 0$~f;ѿ "ǂoh<{]j?.zvAt#ٟ\da91dQ.4NK U<ܣdF iխsqz5"5~3hoO :ĥg^a`D&e[C/ 5'5>:x lpݷ9I zm(g#5ODݤWW/z )%?;T:4t]MYw]ח|#k!"x^t݄~@B5t]/A su5 10Ԝe6 o}u% Yk3Խd:?]8= bx '0_|-830{4y޵\׻;9Cg'$EQ8T( l>i(9"^ft/w|o`W(򇠑:>| $3+ʟQEdKK(ʯz@EQEQE5j^EQEQEQj @EQEQE5*QEQEQ֨DQEQEQZEQEQEQj @EQEQE5*QEQEQ֨DQEQEQZEQEQEQj @EQEQE5f*:xIENDB`rally-0.9.1/doc/source/images/Report-Abort-on-SLA-task-2.png0000664000567000056710000040534513073417716024517 0ustar jenkinsjenkins00000000000000PNG  IHDRێsBIT|d IDATxwx\QduYd˽6ńЖȲfܽB3 ܱqGndfFScƒ\h6<K933=+BE!B!AU?E~<>cK[䶤ֿ~ےJ[#mJ[?s)_~B!B!EAB!B!!B!Bq ]!B!H.B!B\$@B!B!. !B!ЍF#fy^.$7 fr:@QjjjԺ,t: ᤤpil6Emddd* t !B!ߓsrr:uUUfq1k]̘1[ϊ+ѣ3F~_//J//DEE 裏㏳}֎so~$$$sɋ~C!B!Izf3eeev[,233Yd |}}}\Nl6( & 𥳧b005k41LZ.^Ohh(zLf V+SL`0@MM NɓILLQQQAcccPbccFs)jkkՑCLL mmm2j(Ǝѡ/,,S墬v"""ԩSddd^ϊbX֠}ѣaҤI|>;#--1ch@ccc9CZZjSUUbbbHIIԩS͛hө Ajj*dffR[[^g̙DDDv)//:B!B83@VUX||C-gCQ\.?jrrr0 x<v󉈈@#RSS5kǏCԄ%++'O@dd$ǏW8ro&?GѣDGGcaxb\s5TVVoc0HLL~LnN6m7ʕ+),,_&!!|￟}JEEO=?`Æ (l&55gys/_ÇywY`^{-+Wwuiii:tq{w۷kO> b.??l t:㣏>3f466pwzj- 7P\\̟'y衇;ټy3:ɄNӎ(L&E : yWزe ֭cŊ{yfƏOS|AV^}V!B!=.h5EQXhQPl4t@ss3 _^|/?l6ZZZHII!"">233zZyc=FGG̝wܹs!44b۶msWvZo~C}}=!!!,]BDD+Vp^˼yHLL_NZZfYk;Ú5k0ͬZl|IjkkcŊDGGP\\LWW f}`{vs… q8+#?UUy*O=ZnSOq嗳m6-nmm˔)Sp#3ZZjkk9~8]vvpI<їtz!B!PA/++0n8&OLqqGxɬ\(BCCtA <8X|TUUNZZ大Jggy說ҢoʡC;aԨQ(C=O~!$$mZE餿Bxx8Ѵk|I-K*}}}>}UUx<8Π6r^---Ҟصkz+DDD`4~Ҧ {Lee98p׫#`0ez=zeʔ)۴~>àQ <߽{7+W[oeժU:uݻwMt!B!ߕ ʠ4Hl&++v;˗/'::F*++eΜ9AuAff&6 LMM͐fp9u!}V2 zn7G[DDAm0vQQ%%%466r73>cZ6mW׭(?HPYYIgg'<K,aƌ+Wӟt}j=ze˖1yd&Mϲe!P!B!.8>6Ϳ2x, 'NO?EUUL;e444ɤIp\ןW4D`(©SHOO>'&&oo$~F\\F_Z푖)`g„ t:nq_W y饗ٿ?EEEgݏ~__>W_}U뷞#֭[/~>˴iXd6׹n X [Ȅ PU{wX<Gv?8NjjjxW#<cG\.І?S裏OyG?~<>2sL>}Y<|>zzzn:̙Ò%KF7ɓ'IXX .G?7nb`Xp\IB!gЇ O>q1zhz=O F [ZZ%** /^X+'+G?CUSwm߾wygl*&L`̝;@KK O>/[n}yzKK ֭[oGQZ[[)..fҤI7W_}>s}o67<1^~=ǏgժUZ9o͎;0 h"fϞ lfϞ=ڵ @qq1SL+>2rssGtRoΝ˽w܁l3v^B!B|(SL9נ7`0p9]:=Fէt9$%%IWwVU+f9hf6z=n߇>-s"""@υ>k,7ǏgjT):Qm'22şlȈHt: IOOO?T+D)~|DDzޞ^&#aaavnt:p\c=g6tvvo> gͦRq;p}M&aaaqDža2f6 ns^ϔ)L4CQqex}qBCB9k&t:***8r c̙SUUEEy!!돊OgͥʪJ˰X,x^ >-jy(B!fJKF8&&DJ8#G/vB!BRBt:_)--,B!B $O ^B!Ba`=0p ~|Ǿ>O|Xm"_Jm&%m=ܖU*miU#m=6a„B!B!Ea;-B!BGw B!BKl8!B!Jt!B! B!Bq ]!B!H.B!B\$@B!B!.ʚ5k$BB!B!.2EB!Bq.vB!B!B!BqI]!B!H.B!B\$@B!B!. !B! ЅB!BKB!B!%@t!B! B!Bq ]!B!H.B!B\$@B!B!. !B! ЅB!BK|zt~^'<>;|$Γqq>l6$%% yru=: |QUE[[]]]F`8|NxrrrF|t:X, ×lKOO,YЯt;駟Ѐbaɒ%t4^fKGUU(,,`0pWk>Z[[=zEi_OO;w2{lӿuttP]]M?>k7a``-[v#77b7k*{vꪳr ^|En7~]UUl֭[Ϻx~_lڴqEQ0a\s ZVyzzz;w磴 6P[[ VX)STPG||<1yyy3 K!wyAw:X!w(t:n7Rz{{(--ZUXXȶm8p^kIQٶm۶mp]i.>vmhjj(m6s\ܹm۶QSSsQDnn.g}~ww79?p:grPUۺg^y-87 gq\[ڍȑ\."??}L&{1V*|yfEaҥg=o^/oVTUv͛7bVl6wYz{{;7n-ԩS5kk1a*۷os:]˅hog̙t: Yv-] bbbu g֬Ylذr:ĢEB|7\X.!ķj .:˩6Ikk+^0yf|>c2왁E9|>tvv"))Ey<V+(Btt4111_*WUAgg'bX(vwwׇ` ..kp8hiih6<< OUUZ[[Irryx'66rс&"" 'KJJ cǎq=k.0x<+_`̙L#mawaF5\ ͜9Κ5kȑ#F||<&iȶF# ,`Æ |G̛7 !no !Ωʑ#Gp\t:̙bFII .rZ^^Ϋ C=ĨQxxgimmeƌ|glڴI+onnMdd$m۶{nzzzPU@RR7pƍ jZб{nx< Y`/f?nt:6lۍ^brJfΜ(466siA᷿-Fxx^z%:::1c˗/jc}}=> *&V\ɤI^|xrԨQ\yZ婧8{M6l2/^M6QVV*f,/_Nffy~g&dS_~ez{{1Lu]Z)}zEaΜ9ܹEqq1yyyCf_rh"gnèJtt4sٳga(..fɒ%(tuu~zq8h$22y|!i֬Ylٲ6mڗmB!I]|>vѣ2ev~Сa0Ájj'UUU:;;ZżᠣCkt2_w}v-#p86\^`͚5Z餹w}X^{;wgNp8hnn/ %%%YTtttxPUfEEEw~ÉrQSSO?͡Cen7_Xf ---|> nS]]͟gm@>קe)--'СCRXXSO=ŁΫقV?PSSfcŊdddh~֭ѣL<˗COOwTUU{z!RXr?x ۷osN&NHrr2>(GaΝz~T o"44nI&9y$. 60m4f3555@T\s5v9uolݺ+^Ϗcyxjl6ӧkuč7HJJ #Gm6zzz_?Lxx8ӧO~HHHrtvv0m4z=%%%8qEQXl-"<<>9ºu8}4wkatvv /rXx1s%:::}|>qZv)֭[GCC}L:5nRb")) u먨ʦ ؿ444Yx1 .$$$6֭[G]]v"==4t#n:o>Zwwv >T-DFF2n8IKKf3>}ͦ-x Nddn(fddXFII  `2ضm]]]p3qDt:V>Ǐ駟2o<ƎKדFYY 8k!.NU0BJV㲆2뛨m`Vo$@z}csՋ4s٩?݊wCUU-cTYfQ^^磠+W~EGG]<J)22~8Jii)TVVj?`ʔ)w}Zϸ8p:;vǏb -`6{J̓V-띓éS}5xᦇSU}ݍ(xA,XbڵtuuQWWGnn.Zià), ]]]Yn@FFF͉@]Al6:{75$|Iطo ЛӟD{{;aaa^iӦiq:lٲKBB?σѣ1LZ[QQiӘ0apqϟUWʱcPUpsMM ˵}2jjjPE˚ؠuV&߯UڬD?m6yF⡇ *yxw2NZFΑ ]=FG?q,H[iln'!BEmnlҒs1ёL08K4 P±JLF#'cT| ʹZA̼jjѣ`0ˉj:{z cbNɉjjĸ*jN-0on뤡qA_NijiG16+0&f.g:f䍣:a aVN|IMщ%'GYu=adesIJgZ;9v{ѣ2a H0&}4'+3e^%uDE31'㒭եeƏ4q^^fIAA˖-FS={6A Ҵ>y`!1F.2?n~H>q!F#TUUi}ۼ>Ǐs2\~~>ǎri]&L@DD!!!G9N9 &L0 (..w}E.¸ zӧqҥCNc…ZX,7* !''G; pA,XhDQEnoե[:eda!!8]n^ZV_LG+x(i`z<:Z\?qai|Jx7زS|}V;HqGʃYPXF p2sx^~C]= 0{ N]7R\^Þ _ tv߶ˊ]TwRRuJ[ a|N:w",4Yt='-99 KkwZ[ z={ N?1ۮ]ʉj>uEbf#QU\OgYr+jY::##%{mDv{Xm/Obb#sIsҤIA_@vv |RS~1`OܬM4Xww7F5pRRҰpzShhǎ;썎 UUvdeeLtvvC[['NvjPN7s=zWW=6tVVְx֢*`۵ASUUwwwke֬Y>|Ayy98f>[t|>~mĉ&''!# 4}EQٻw/>!zrrY,y}>6wxx8ö+͝ᎭYnXryݤ Azz:7tSPzҤI_-[`ٹs'wyryfjkk [o=1cP]]f#YYY?lZpB!`_y~!fY/1\w6>ĉj^:ql&- ;mIEm#m]J=jj.7'W?^]rϟ1l·~t ض0NCE%bW-߶>-UƒyHOCSU^` ̟֮Y7SZ]O|l4w߼}}O0kxFt-4g#ƏYS?5vP-I ̝>ѣ9tn5 u:N̡Ak'pr|fԟf޴IX"8NWo}+N|6vdϿnwЀdo&k֬ zN {x((( '';2[`/ˋ/x-`  _E|60ٲg&{<=ʦMoBcЎǵH<6tÁ R z^&lذ8q"QQQp͛(Ze@XXWI&j*n݊fI z=\ve石B$0șNt6xႭ2gk>O [ZZx>w\2lٲRL&v6x'h4hgGNuu5NxIUUPhh(fmۆZiϧ=~fsL(\wutvvr \.唗( &۞?c23hv(  -ogӒ":*Z1e[?MmC3!fƤn0uSQ;@(,]8>c2FSZuz:Hax-@WkhqE:]=d&qZFQj$b"#5K bkkkF\#@젠4p#BQƎ{ѣGkhd|ǔc1>1'$*!©֠u l,vڬ4Xy)Y+`890b;eT\ /CqE-eA^$O>[Vgmx}񢠠)LŇXqNRHvF8b}6:Sttf#8\6'?h:0'%.&Z[gF(#9^R'I>GQ1 5Ϧthsψ嗁yV+CFղZ_ j@eQ>9Thdtr>xgG'NkDYYF{o[s4K*>y$='+jINmVb-Q$`2LEiͤ&'RV]OCs;Gųma]}kdFK{'FC.SFQL6>Dw︎DB&Nq1d&ɧ)ƏIo L>}ċ¹s~m@>88\iCCCМ_EQHOOjjj6khhwJ^#sf3 QRRkjjxgʉ.]JVV֐$))Pl6,[lLu먪">>3JNNFQ*?NSS;w$??qzx ꫯӹ+mF}}=G%22Grrr~| l6ϟQ"""`Ѽ x<:::hH[o\\Y:oUUUrTJAA| ?9zXZ[[DS[neRyڵkgѢEX,ꫯ#h{nƨQ}"<<\;EEE8!455iSEz\x1#>---tRTU%55pRSS蠾!cXV7n8t:---]˭Jjj*zh8q"zс н^SUJB!o T1t1i\K`|NN7ӌN-;VQNaBNݽݸ>MDZr:Wtg./>sǕTPLM"<_.ʻ1u- YQ ",SV:x~ Z4 ǦH1a!!\BL}aOe48?ۼiNToNGwLl4{ eLz YiIX"fr͞JNF 5 n;=vLDjrH>.=?| >6O|sw5kYunj]𗔔eN7W[=ڹe˖!8n77nE5t:MKk|>1b6:&K9p-""B ˃8԰}v*++1" 8y9Dr:UUeڵZ9--M zݫ Tx8vUUUX,a2EZI)SFWn*++9x6ynۍ*M ,`EEEZFvƌ\O3˴nܸJ-}.:N+onnRZԩS7ydRjjd T8 z!}v9z("#pسgOP{].7nԦқ4iƒ?8aԩ(ڍͦM,`00}tEh4R__OEEÖ Zx^L&ӈT!fO̼ܠEl2i%߫-dtv¨ =fOe?,50L z*: &:(E-+_ƫ/cU 1sݕ Xqm?AOf(~0LF%"<0nzNa)Ll27SUL9lѬ)L?n z=q1~EcV߰ kW6;b) oXZqy `)̙MiO%8`U{o׫  9AĥGUU-X1 ̘1^gƌرѣGYx1ڜԻvB1i$:::سg6p,je˖-5J Df͚šC8v/2W^y%qqq~z***i@xؼy3 [}tuu[oip8l޼Buٳ&$$ٌ`֭1qDl6ٳ'(YWWGjj*ZX;}kdf9q{UViY#Gv䪫t:y뭷Z$'',0asСC466ezù+xwhnngk%--~nQU!#+ԩSٺuVe`4ˊ0}tjkkW_媫bx())w)%&""ꪫnk/0ٴi^h-Zo@͚5ݻwĆ ̝;0Xn(µ^{9rίll$&NHII Ge``/d->|0I3sLm^s\7~Xڰ`8jo===̙3o߮⊯$9s&;v젡?q:ܹS>}VKVV| f͚lnF IDATJUU9u8(B!۾h4ht:c .3 -{щCwfP#H%hޟ#s>-<}=1>INuռnNN?GQQj7`no}j2b>#n7+V`Ŋ: z̆ p:,]^osp:ܹ-[h_O,k׮9Qj퓒黾;t:=<1<<'2w\***Xd K,dƎ(X,ƎˢEp\̛7gV+FbŊx^^y.rBCCӃ2n8bbb[oe…TVV{Ջ5(('wCt3PZZw}Gjj* 4Rә1cFcbb4i)ݗyccc덣Pܹ4֯_1|>F= &h], ڵLϟϠAhݺ5۷g̘1,]bΝ[/nf7w\%?grrrgPpbĈ jA$==+ȍ7[oEFF{׺j;S^]WtM[+WL6lXF[o3fPZZʚ5kXfMpWk׌N㪫"==J/^ŋ1LuZjժ|n[*m۶g4Q!'|lF[ xZg\p8O>Zl6oDD[d2K-t:-Z`Ȑ!\{t:BBBh׮qqthѢFUU M6Z3֪*С)kv~ic2HMMnSXX.zZnͨQ9rdjHNN>eͺ`0ЪU+-⡡Zms||<]tjRUUE-HJJ"&&FVDD:tRUUAӪU+Fq~yOdd$Z3pNϞ=0a]vjOPPl@||>;uUUmۖ-[jvu易@Kf3:t뮣W^甜O?QTTlSN Z:>=@׮] RY}~0aB~ݵbll,m۶%!)$''S]]_?~)(Nee%aaamdѩS'<Z Ą  N֙^o߾xdee|rTUn8)&B(jJrss1ZbsZR;پiBU:<6n2N,ѳ8N;~tvIZX}>xZ gTU?fժU'ۭl>՞ǣ5o㧪V(Zץ_p\UU6m;#nkԾ'^㪪jǯ!ζ]׫syjy 6}>. ߏhԵ\jYvjujƘǷ~Kzz:۷ogȐ!vO5~=9Nyy9[nE3|pI΅BYBqQzر >SJJJlW_h4jӯ^iʏ?SNN!8Ԡ !(n-Zy ЫW NM6 0իWaC(++'00c6J!B!EAQ)//jұcGF-翁NcTVV_ϓlҥ^HyEhpF66 EDӶe *p$#ORB<?>'Y Jr_Djlfne}26Ҳ:nL`{[B!B e0ӯ0 =^n?; ?EQx*0Wgv'+p:yމ:>\-tHdqn[ѭce|r-r,'lEx(k6:-6{}͡,r5 6B!B!ΪISNpXVӮ0~^XinE9a;'ls ӱ]ED(7 EQh:kbPN ph&iYۮᒔ6mATUmC!B!B4H޶U 1ᤴ@e-)F5QVQum&lV 6 fQ[f4Q7XO%jEW}Mw^!B!8&i^FZӟj)j?'z]>ku?רDF<#p%[B!B!į }u'e|f^{hұ]kVo߬#jAoУ(5kXi'-C)Mc]n'E@rtnȎ}i`)qQtJN_]U~JNp zwB!|S|{h4ןz**NB 6neج5S͜)Q974  fdE8\n"PP *Ÿkx),)b6bAeP{ :rlV3Ax}>2s).-b6:> ۯⷨ&44q Oz)++#(( B!@#֠ۃ0Yt0fQ?t⢚K1-N[Ήg4hemq@e^]ߛJz=aaalbj``}B!МoVZ|rIyzf3Gt!BqQ8J~Ԑ]Jr@rB!ŤIAB!B!DH.B!B\$AB!B!. Jz=~cEUUt:yF&B!Ѡ ELB4AQL&ӅE!Bqh(6 Kyy Վ!B!BӠ]Qv;6 1 0L !B!h<芢`41MRUJKŀ(*UnV#ʪjkeV:},E^ :JVt?9RXMDVEU}*R.BqH.@UU֮]b[nl6-͛Yv-?RRR馛Xp!<,Z5khIwMRR/.,ZlIqq1? رc<3L2 pWөS'>3V\N`0pm( 3gW_vK/o5kk_ێ#حF+ݸ|~Myp^<>RO%&“cۑS䩥x"l-~|8U.O.9t)pxKqVLWE(x*-ì<1-f|vcNUmeLۄ!-!B i.k׮^kXhӦ 夦e.7r%yfV^_^xڴiٳ|8JJJ{;),,dʖ-[ ..Áe׮]|zL>Ν;3g8|0{n^/w&66O[T{o;ҳuQQCRhncC8Wyy=FbDS=a920-F=SSlw f=NY3@檮;Bl^!B\hL(뭳Mdd$Z"##vAFF^AiE+gРAݻC^WgffAUU nӥKv aݺu|>RRR: EQ042zm`gP< *T㕕Gyo}6{+4i#ǘvD|q\r75Id8P1!BFt!ЫW/ Y|9>_^O?UUٳ'Я_?6mƍׯ:-Z`ۙ8q"ӟڵ+Zb~d, ϧm۶DGGY޼ys, &L`ۗRl69rtINN/e˖c[W8>vgUdFEiSNvj p*nSק(B6)a uk*Z- #һm(ͤ !B' !HMMeС|lܸ-[αcǸ[GQ.>#t:]t`Ȑ![ӧǚ5k8p z^9vΝ;_?g7| /@RRk׮%55J||*^򫹽ٌaV+G MV:Ѷk10Xv3An@^%Ů|}T|ؗS䰚0&cx{M&h#ЅB!. EvBcOY{}6{nCuu56}j9G߶mfN:ifggvZn7mڴW^t:lBtt4111g?ξ}ݻ7˖-[h۶-fhٲ%Ӧv۽{7;vM6ѭ[7y j󺵊l:ZJ!U]Y J àW*vpxUw#! LnF Ep~WPrJi(SѠ02ErdW7 !,qNoP*]>2\5]Y_%5ΰTyؓSv0u8VFamCL&B!ί'說_LHsoIh#Ao_wv2K$~M!B5㡸[) [t:>',HYjB!5񐗗dh4^S HUUTUtb2oIj|ՠ !B +//h4b2$97PNjtx.tHB!BDtǣ $jB!Bq5(AޅB!B%#T !B!IЅB!B$B!B!E@t!B!"pQ /"Bl OUUm7UU), ,؆N©JFn)& tbcB!B!-AWU2{$bd.ߓvWq׹e~k:GsJ78 O=|;s2Vm?n ?FŨX<^N:E!fBՔ)x~Vр߯io 1@ &,Ć^%rxP0jUn|~Ql@b!&=蔚r59.!~ i0d>z!Bq)^^%芢%"ٲ/ Ɵv}˶9XF|~?28^\ITxxMx;x5O 4ʾk,ʟ@X^!nJ82|GK0utncQR>H`+Nj*)("E0Ζ}9ed+iYET;Urbhی;v#$_d,./a!6.ٖ|zkg2*vY#:$E1iT-uD@@FB?nS\\LyyE!BL&ר5}u8nβSO_#e.ޝA2ٕv Yex~ަ'x$ƅctb%ۋBl?+vұMsf-L2zw#w`f@Vd/hN =:Ģ5пZwͱ 8]^# z喡( ^9Kq[ٝvIKY01vzvc֣,YADHyEZ0t3Cqwf1rnR*I@@hE: !B!&=u]::bO߯fQҔzu\GBL3FO @7{d3Q^"e4[Mώ:v>]ĵCuBo4;7"E0za_{yG my~ێV!D3 ]Za0)* " hv V=:Gt IDAT !B!_裸+¸A)lޗ߯(peD7+pv1ޯ~(e4 рl@d3,s_pݨ@pŤ=0t V#T\@*UU  neҨ.lٗͪMi8\^b"*b x}~~˫jb6Nݟ_!B!KL**KǰqO1N;gUUe<,&.&=.3Զ r :}* E meN6"$Ȃ|>_jks )rQVHL䫺=%^_S7V՚zPc0N;2 `5!.!B!Mt:FHf\ ltb<^?weLÎ f|u1[++Wjת螻tw]u)n,;R3sRPRŨI$ƅqE1o6\[{?jkR4[gox;ܽ5Ee5q~(n>BqQݞN|d04g<OAI$߾B!BO>>TYY^Gw~YP[c8Cx-%.2}a4ԴqԶCqXM ٖ@ H'8Ђf1ѩm &Fb(tDtNDU‰l^m7(f?n[6. OMR v{YL\.JqatLhA6ktth L˨P+8\^ZG-%@+1Akj6.>aQJ'&{eLixZ2B!EUOn]_nn.FmgWUoNf&TEMϻѐTUSANip|g*CUU>\v'lW)QUUeTUWAqMՄb.t(B!B@4qkvZ( ۙPENAwaEM*A/IB!BIg!B!" B!Bq]!B!H.B!B\$AB!B!. !B!MVwt XNvC!BqhP>su4u,B>r)}/t(B!B@tk^ߔ⬮!3PUEQyىOovb9]L )|s-מO]s:&_'_z]LbE!(Ρͺ 6''[mR5q_˦~?<^l?--cRZZz}w~|\=zG*+V`ʔ)yzy駙8q"֭ro2n8ƏK/W_eȑ=sv|WtM?{Ǐklڴn;w6~0zh cƌv__^ZΘ1ciKJJؾ};UW]EEE=Zl'Ndƌ8Ny&O]wɓ>}:~.*> rW3vXp8媪d&Mk~.]_ѣy'zoZ>(QUcǎ]{Uee%۶mRQQȑ#|PUdz>c=뭳p0en6ҴuTUw;祿\{n`ԨQLBll,f%Kmݦ( g 55J?Dbb"&LjҥKٻw/~)lذ^{۷sw5΅e˖-L:X}z1FE!++K;۷or1&qumCÏ{߽s fԩ>|X={曱X,dzl2/96 &&^{<}ݳݻ={`X6lVhl^͛7OӡC:OTUeܹ̜9EQ>^R9{luFz̝;Eaj3z=gҤIVf)//ba\.-{ 0۷SUUŧ~SO=0sͳ>˶m eȑL|V+ŋq:1QF( ϧM6ӧ1~gqupt:3gC Ac0$*p8xwi׮}硇h42}t<VU;ǟ~) BCCIKKcٲe}X,,Zo͕W^I^O׿E`"MAU6[_MykڦM>|8Ջ_~G}?믿N=ٹ[ؿ? 6\xq:zjƏzkٺu+Px޼y>||y~al6ǎcXVz{ǿoF#Ʉje޽̟?ĉYd C FQQ_CRRk׮QRR),,oEEEoj^/G%11ZjEXXuj D1uTNJV:t(r G!))/,Zljeʔ)0mڴӞ;ɓ5k۷'33￟?ZuiBxW3g6BYYW\q6m"!!?Z毿Kj]jWFUU-Zĵ^5ؾ};}%''DnSVV/%{/:t 11I&1g<sa̘1DDDt:4ig}]'ڼy3sw$$33jf͚Ÿq{ͭ7Jdd$-[R\\Lyy9[laΜ9$QQQL8Qz= , 77YСCX,jSNR\\|NǹkYm7YfOӥKbcc뮻xmF~3g~[oE^^EEE je]Is=ԩSq8r-ӯ_?VZ=܃dg~!BBB޽;v$nB!94qC'h,~5kFCɓygܹ3Æ K/%;;|@QQ˖-#66yaXG]ܹs5j3f@QEa͚5gpl2t±cɓq\۷| :0}]RRRhӦ <nVZš*楗^{2UUzyLNGee%.KK ^o͗ތ BCCIOOgŊzә:u*P&l6INxh4ұcGFIQQ <x&xiݺ5?UU/q:,Zpuƈ#κnEESN;^1cNDD| =+Vp7$*o?v<= ,`„ erJ҈wd2Չv3m4zɕW^SO=-QXXHpp0|vnk>6 шN`4x(..GeРA :N^ٌlFQF#~:ѣGhժGfĈT{GvN6 ;`ȑ>7|L֭;x(((+cذaڵ rJl`4Z(^vBunMGCSg,SZvP`JOߗ qE3|peJJb#NӾ0ЫW/KqΝluz`Z߾}2A|uqFDDЮ];/yrWj=wPyy9nݡCm@ϭbr*z6xYuu`DQzNCQ*+++p\x<_Swx^JKK6l;wfɒ%ZIIIL&AMM֭[ǪU?>ݻo믿lf۷Ǐשi-At:;~.̙\Ij ߏ( \q;̛7P>tv͚5k QQQx^z==z@ӡ騨`Ȑ!nnTU ^$Oyf0뮻`٘0aCEQz6c رc|~{= ;b퀏1 *vǏgΜ9Αh'd rssO>NpQ\oNǎ`0`0x饗1cTZ='y !h\ o⮪g| $.z6b30t76%zSt^_dvv6>>Y~=;vjN$ a5|V u꺣Tp|Mŋٱc>YPk5'֌7Tme͙9s&~!P$;++(>LQQTTTh$;;}vd={;0|p8/\6blCj 5j5.//FY?16ϱco>{9MƖ-[Քl8 V;VVZW_1c 8z(+W䣏>Қ UUU}*++nB\\_~%3grqAk)//'##IVVVUI_x>L?o^Q, wq7ndӦM,]'|f?Oh=sN^|EuV>ó&.Nza,iϑ8N>#֭[Ǵix7X|9`6>}:| ~O>2mO^'..޽{QUt`0hY{;?w^!w84?zVCjqժU+l6:t֭[Yl>@(++#-- Cvv6vB4z؊)X:؊g}4;A)gҚWuwRkf ~I$V6k;̵:T]-F%Ęd#AU 4rm% ^K^'^u/^L>}3g[n%??ӧ3`f3͛c6yp0p@/_Ǚ6mZ~3{lvž}xW-Z૯";;W_}#G2fӉf#66;voS]] UXXX'OLLh4K/QYYŋٻw/;GՒZӦMcƍlܸYfOӿ.2***_<@nn.Æ +I&a>|8+W{;老!CΝ;;T-ZDDD :{L2lt5Zs}1cƌ*€8psΥz2E!&& 6Oښ-[R^^s=GEEUUU-fjZӇdeeqAz-.3( ӧO/s7_O,֭;cuO>8>(f>xi 0s=|_|ǏיvYb۶ml6oSRRѣ駟С;w6l߾'x f'`l޼޽{O!Ťe61w&jK|#lMi8jAɍX8=Zqe&T8ݽ9[sNqdVf@Cѭe Od<^6oR$m`G>bCxL6޾#5v6eZgaʊld"zP\=&&+VxLƇ~Hvv6W_}5J)ڵkǛoIXXǏ77ޠ 6W^y|{<`kƸqbĈxyy1`Cq=ЫW/>kѪU+, o6?޽;&/|]׉3NV}m޼LDFF'%%ѢE xG9~8g 0w}___BBB d21j(ƍ?@vv6 0&s%11-[D߾}y駙:u*sΥG<nJJ~ϟOLL =\&gҼys#ׯӦMwa„ tЁ_cǎ9r$]w~~~5Gc1i$ڵk?'|BBB^^^ƜD4zSO=7L$$$ĉ;vq1aÆt:Yf ;w4M3Ι7ɭcDttGzK/dTlv҅zsDƍߟ~g}O?=zpWoү_?Zje\W7a<#<ݛ=zлwo|||X,ϑ#Go>nv-[?$&&~g}G} y|}})**bҥL0#䩧/dΜ9z$96*?cAMEIOe.t-JgHޘǯlø/7scX|M>݃Z/(G4v: oMtMۻ #),wMW/_C[3yr*k AfdNg=}Uus>Q"#\.Vu_WIcxqW~:Nh>y3putOt:ƘCwW3u퍟GaZ9k䚚*** l6cZIv1dUUUE{gR(0ІzxNJyy1BX)EUU{y]ۖc 8J|}}rp8(--5wbp{,b1A)EIIq|;w98)q_vݘ.NRvݘL)ʼn'43^ۥ0ΑduO8d:c'ZaX SsRYYj5ERYYNhl\ uq-L.//f|" tiUTT` \!$LR p8y[h$#UFPip9'(,2]Cp>'*؛-  +ٗ[p ÉiWX1nGKq:56~^&4 4TU9ByXyPʮBu:rk2]p`6kb4ΉZ|uAɩ`%sL&1 ]/,5PNF6;1z] !.$qjIsvqIjwˎ{18%_(Je{ k [SPRݽ9օ _ {WJe'tkvƬk:^Ge8گNP(,fuq& EVB,<3c{ۅ/r5 !B!Nvcei^&jlNJc1IBT MC}+pQ&FۦAzcO|]36 M`*v?$2ț@ok1ѹEbuf!6o RNL& /ŃIh,/v䞠F""Ѡ86^>]!B0s R3c)흹U~s=GJOm}ME9+9j J]8,ޙ籼~4~|!*.B!8yuqP6g쌭9R8B\$BB!B_:ƅѲqP}fG!Bрh/qܢJN.ms[;wvB! 9J)ʪl]b"</"B!h@9@B!B!ţwB!B!B!B H.B!B4 !B! B!B!D B!BH.B!B4΀QJ`(++ti~~~iZ=7J)4M3ie%B!. ЅD)dŊlذcǎauPׯM6mA^~~>3gb0zh-/J)>|8-ZE)Ŝ9sʢGtԩ$B!. ЅDrA, t:'//s5лwz+++ٱc>>>lz͋R ٳgt8Ν;IHH﬈s"==CBZZZ}gI! B\R <<;ҼysL&nvv6ŋˣUVv4Mwx{{{j|r;vLJ۷c2ر#^^^F^ YjjLjj*:t0sb֭dffRYYI`` mڴm۶{8ټy3EEExyyѴiStBpp0QUUڵkl$&&ҼyZe "%%Yv-ǎC47nLNCAj%##;vPTTiӮ];ZlYR9rkR^^/m۶%11;v`6IJJl6SXXȖ-[ !55UVo>޽QnYYYlٲv;ƚ5k(((@45jDN6WJm6 sb X,tЁvp8شiN.]<7555l۶{RYYI@@III$''c[RRºu %55;v{n $%%dL&1DAA`ڴiCf~yB!ĥEt!.Ate7n{UQuz#ps_}eee̟?}r5R]v1k,:t'ߟt#?YFF};v Zn|M2rׯ_Ov;v,nzz:-[[oQFLRl6Oβep:fѤ:˗/'77Ν;3vX򯨨`ڴirW`ɒ%x?3rHX<>#vQk٢Eh߾=wq1t|rO ren:8q"&\MFfشi7n\@nݰl|w\̚5Ze1b b\+V`۶mt҅ JJJ׮]ˀ8rwH+==V)s!u-[F||}l޽ݻkVt!B$@Lxx8yyy̞=;wF۶miܸ1-fgg_PUUE˖-:t(lڴ˗l2}vҹsg8@NN7oЕRF׺uk<*3uTˉeᄆn:,YYz5ɓ'KHH#FiӦ8pg1?#N~ҥ %%%,^}_All,;v$77L (ǏKذa3fpF>}p8X͛73sL+J)֬YcLwWngÆ ,X[etmmۖݻcYt)k׮8Gk 77\i۶1ݏ?1L 0Ν;S\\̒%Kj͛>}:v:Я_?N'+Wdƍ̞=`zᑗu]Gxx8-"33EH޽)))a2o<MӨd:t` bbbbdeeɽ1rrr!55nݺp89|0K,{EKjj*iiiFB!H.%d2q73yd$33pM֪U+С-?UUUiӆ{pӧO'==~yaÆ1j(v׮]a׮]TTTNsн{w.nJ).\HYYk8o۶}j*rrr G1*ZhABB&MdffҶm.#G|r4McX־}{^y?W@LLњw^RTTTpq'NxR;wR]]MDDDd2޽{q8DDDвeKc¾S˸{hƁ(..LJ2~ZO.ʽK>}<>^ii1LaС$ nj'_UTTJr;|Q91wZl6J)Vk8Dz믿αcjU x{{{LyG!. q *++1H||<Æ ȑ#L2,6l#F6l8mc2i2]׹صkp80nF`sjw*1؁:kv{l6\er:.'''HYYvClذ]3ӶZ*ENN8myR;v0uT#4L2by}}}Oǣ())aժUg-bݺuuǽΩ:yũN ⮜uS׵5B!8B\bR̜9-[йsgF]g@դI.r}jjjLp!1f.aaa2خ];8~8.u9yZ˝N'VVtW]n9rd& Rkq-xKJJk=mZPPmڴaݺulݺvލdsyqW4jԈg N6'N믿Ltt4駟uRv -99]vm^v;#ٌ9PS{!B\( q4 ???***̤;5L+s8L&bbb<ַl̚5  :c枩|ʕlڴۭ[:?߿8֭㫯"&&{prsso yyyןO^DGG{tCWJ͆  2&)u]a222X~=y=޹<]\\… bС5I\AAEEEqyX?44KqqqMm۶gkw[QQM48u[xx8QVVFPP1Qۉ'Xp!fC^kDdd$5bʕ=zm۞wEB!ĹIℸ%''i͝;ף:2.\l1^`vJ)Y`k׮EsjA4x5ئMعs'^^^tۙfc˗)fXV5Z׬Yc]w;t} .u\!99ٰaDZWWW߳pB<ք"""(..fܹ(j>/555{m٘={6 .d޽twZyynDFFb޼yF5sju9 *aҥvΝ˂ ؽ{z'L&cvuQTT<77׸N~!cӅBQ q JLL$%%m۶`8@ fǦMI-J) ɓٶm~)]tۛM6zj @PP9%::ژ-99V+4MclٲGҷo_t]gʕ߿ooozBy IDATꅮӇkrq}]Odd$`…XVp\Ǔ–-[o)((u֔xb8oټ}||h߾= ,wƒA@T+Wdf r-!!!F?At:ٱc6l0:tLZjY~=夤PRRkgN.}:۶mqѻwo4M㧟~b׮]ڵ ܹsyr2e [l᧟~X^W 4Ƃ Xl˖-30rH:utֲ8e{|'2uT[Tzp͚51k,$@B!ASNKڵ\l6L6mHLL~qGn: 4HiٲG`rq  iӦ ZEubcckͪ]UUšCuxqօرÇt:]vueee_}iF z-J)Yfvb`63z(t$22;Ou(++Cu6mJvhڴzFuKxx8O>\#جnfZX,ƞܔR}_:6SL?n׹#GrRXXy/!!"??kmےJrr2NH !eeer-TUUbZy';v,VjѣG g!66|^z%|}}֭CQQc)--Ϗ5j[oEJJ Dn݊$55l6pBBBC-B\,8N{9y׹i߾=ETT8NQ\\̍7H`` Ff0͌?RSVVV߇*B\0J)fΜywHMM'_k!22v bP]]&>>@^^4i҄jjjػw/>>>hln8كjUV[Jt!B/^Çyw;4hvIVVۛ|.]O?͠Aعs'UL8$.]ʛo?NiӦLj#/ٳgc6lhFxx8sѣ :֭[Gii)~)?뻨Stt47x#$''3p@,YBNN7x#QQQc=FDDG4z!7ߌb?W^y8y>Lxx8GѣY 6УGfϞMRR_~9999vÌ3:u*<& 믿W\QpУGf͚ŏ?ȝwIXX}ݼ⋌=KjetڵKE 뚒bR_s5<ӄt:uw}rҤI|1c_^@)Euu5})((૯bڴiTWW3i$#Fb޼yDEElٲ?`bccuRy7o΂ kݻ7{6Z È#7o `l61"⒦iÆ UV}:]wEEEr !KPJѺuk-Z޽{Yd fbĉiX ÇñX,;; 7@.]1ct:ȑ#Ջ˗P.i߾=3f`giI._HZZӹsgjjjꪫ8z(?3u)W|=z47o6> iXXw6f?|0qqq~.A qv7.?>}p򈋋3s8pcfϞ]"B|I[̙ĉݻ7Æ ])//£l6STTDQQ999y睘L&كf_Bdd$3f@uvIvwޡO?+o z->#uV% Ѕ III[n>{l6<ȫZg`nׇ^{?N:kh"VXw_o&j̘1L6!C`6ٿ?'N+ą{nzMEE~ 0뮻ze4oޜaÆϢExaK̝;AUUfB!_6x`2tP6mJNNtޝL~GzENN7FE^^&W_}GqFoXu]7|w܁j%++ 2uT/^̕W^Iii)TUU#дiS:vjqX:L<3 !ğCu Frr2^^^mۖgy1c`6 gϞG=h׮mڴ!**={r퍿?7|3?0>>>DFFҷo_BCCu0G.]۷/v-[2qDFdq|||֭[64wc=wߍ#66={oAjj*AAAoߞ[o7桇"66"##ڵ+mڴ! ^zѼyz>Z!4-ZpWEFٳ'|-Zk׮?Nbb"Gt ANx7c-¸qHLLo߾l6Zl+B׮]2d8ڷo믿N8p ƌÿ/,K}*{!B!,B!B!D B!BH.B!B4 !B! B!B!D B!BH.B!B4΀²Z8ΆB! t]3 ЅĜ8qΆB! iڴg qd2w6B!g`65!.ݎl!B!@4,g2IB!BH.B!B4 !B! B!B!D ԐlHyʹӐʋR}{\_hgJ!] B4${oE%3,\hK?4 ?Dn:ڮЦughomϮmV+_Mm?pgpG¡\h~U&4cOоmHLpsF&ɵ]Rk·RhNF]\?3fAaKtNQ: PB{<* 4kzض}#h3~4C/}˫ƻq~~$z=]vWZYzY,P^z8z{PVf34 l64Me9 ~v(-\)PU~*ʃ`6ƉCB\ɄVV^/JuL{݄r:] e`wfs|}]l(]Gq{ʫ2Qͣ헅hNK'Rheepڷ:&\ϙ G!<)MsUԠ n᪬t+jS @[ f-Z)]Q5b]V)5Tt$*(u;u**\]Q]+*~]8 e<򪪳ܛ4qohVs]g7ef#M~[\8ݧ0" {PA=9*׳ɝ\d6??:Unڌ߻H-'~-`WuWy|m@ V;ڗpΜWhφD~6XO ƺ*Ѧ9s ڒh:yh6uh*T[hOAuH(+Cn8^ эоg? dl1 C3@};]j8a Wx{2armKM<{y׌ Ch+C4=t;]PS㺆J1ן[8Q`9mD>PGk!QVpg~~}s:]8A'%+WOz*&[-\ a8_BB7ڦg |e߂w_G0mvA]>aS/Bqi']2{o+mzq"=,Z ûO?kh;vCH0ΗdWw3!(T:?q8_j9Rh?V8^v5?'_@۳BCpsvhhqg"tZ;] T4sɟuݺ^xmbPƨoD)Wkyp0j5(ch} K+@9vF{E]{ey=|xKlTR͗x+_@a1};ԠO/ GRSמԔPO;W^zdܯ  D|Ihh}RBC{պtzvhO**m.riޟ4Wڧ_}+8={"|-O|} cu+Knp=cn݆됑fzuCoiUB`% BU5ڴ٨D-(*Bj~P1hpNz}hSMף}=U=7 5n j~yyp8P];]S~#0]`忕ɶ"c`&MPChZa!4vJ()u):w=۳Bf8?lp_\BN@j[y¥8Zy k ƭlYP=5AhsA D=$'6ԂEp0Dv{͙꜆Sh'9#?_~s7#~[hU2L_f3jp{`^'h sˏAΉ`Ѐ>()Et<7yKo@I DG|蜆khs@ zs4`F5ܔ^U\_,E k6}O~uU}㪝VGN~Thܸ̘6w!Qih_|נ1@~3deOx-G{uTHԠhsMѾ5|hB#8Qf66nEi5iZtU)̛ xoDF0=h[͜jj寳KV!`WCujih_} YWw|E 5t齈"EP@l`^ "boA@HB JBzL P}Gr2;;37S6! ٢T;D|%4o N$cI)ƤUeۡe$⥷獐| F6@vhX3_& ZWå{vQG1-*#f́?lRC9r(*;jA6iXl oQÝa>(.Q$vD!rr="6Dٯ?rAmإTi)"[5G|2x)Gy~?i*؉2"/k7">h.E^X*,'SG@a`Ko=d,kCS@~Z)[]T,Ĭ#"1cA6nTŨپ=U/MP/㞉P^ā8ĴZvE,\l!Z6ٽ3bRHIF^R\|A|b޷AG VAsp1{uY^w 5иbFʼ2";[ Uw6GJDFjꅀ99D2UYգ 8+Ͼl;_|zlC<%Wu# IDATbL gaTg8՛ jΓ"O+s6-Uڿ;[lT5# qeҴM Wo.D,[fd>㙦:Y;~2o>cu?FU8>@_|O}߄xbDD^>Rf5w1~^cԔ-,_wEPV1UsHMG>,^ݐ.B|9,]񏘷{5FJ}wAFq0rˡ9djJ@THFk"}AxdF SM[1&NPaS녔 pKKݣw3pGê2~_pfxߩtb Oa>pLJܯQ=^^\͚">_P:voaNRHU'DlqV"&!ݷ'l~ C5_\* gI#UOa54d blO>xs6iaXDz&ƻs0-S+Ͼx'#B I'LF<:$!]1g ib<2G1ɩxl*#O e԰c),xkvYZ ė իG@6"̬FK  @x0^']~~!DLT8Uj^ť9z!hyI b+yYMӆ!.E(]!bУ GW h8pH?n mkZ1f< '"/ ɧW͸EϙdHg?X^!ʛN@p";cTÓ!.^M2Uܰ C-@}7"}ѱ ?E͛ޘF[l(@,Zs빤4E F^:2S,6RC<\1DI* >1I5U B,F&NQo9àyAkLADWE=Gk4W_H::XUH /tjzFٽZ)$qmMELE9e `֦\pmc{c %%ȫ^X5qHT#L 6O c>9{gÎ@BY9r8}5ij8a`>0tt;gbA;6ZB' RҐcd BJë{yɽ{ LS-H<9 n%}UٯLF\H:WWX>pzā85,o0AwD~гzF偱s/r3O CK"'@nHD ee* AAԥ!9q*]hڵv .?~B*oiW]PMBlށ(,D49n,膈Si&4X-֫y^֓#"Gg?"-C}Cj~uBrPջr}x9T,*B !0&px"vADM1Oqi"4m''Eu7yXƏ$7 \^Jw~Djk䘫O*]vׯ8tkpyR@^^\>]n'bW4"5yӏpTUn`zYms_ADqÝܔjݎ=e3\pb>#}5puȮԢ~je@MlD5!,wm&xjǑQsØ3G:\Ր|}"=NP8=8RN!N,^ QjÄ۠}[D1Uf&"G c+Dˍ=6V*_!$(1hwصyQaZdG"#[c}z U TS4MSG:hDL4mCU o0= sZcʣ0 [)q^&~a\. BU$mi"f}*UDN"*5C]je+ޠ_Tc~*5nKs p ^@ݽM+jWU]3eeR jEާGsOit ݯ*eEVXYV Uz_v8XU#D'E5ltyt{-UC+o}kәPR Fϙ7+*/z"f^={ ,LlwNOst}<=]5bG4ͺ鬽_}Ont/T~e˭>*,\JBv׮\qtLӴ\**TPQ-_0 !╷i0Fw^C< w3-w!}}ծ:6iD7dJGM4ML75_@4M4M4;[]/iiii@4M4M4MӴnkiiiy@75M4M4M4<蚦iiiv tM4M4M4M;Xh*((iiinV3tM4M4M4M;!iiiv tM4M4M4M;iiit]4M4M4Miiiiӧ݁4CJì< Jwۤ}(`]DFbkAJɎKǑrW}ؾQ,ɚk[:$^Nd`Syey,:O`ޔ&Gp$'O<g=ԽT+t"@"9wX6vmӋnMBNX t-F"9w[}ئYʞ=!Tdꁷ!%NaA"IIb pc3l}θNԽRoY%YKۇ[kzcy{c5}",gg6ݬK\ePaxWrrY~~j?)MV&@<|(09 4e~T+bpJ*Kyq mw6<)f_L^5w2b幟gu,[ҴD2kۻxd%jV:x<h3ms碻ynt59 bSL790ct߉ލ{1ah؝׆Z5))L^=Oo+4yb|Bgo^H0UvI)ٛk"ܦ/gNm 6vr Tr0^>S%]=Rގx‚Ű0glh=f'U*HE"e|13˔USq|~g4oĨ/PX^H,i1ܲ`.ygo|RJLiU2m N څeeJon&n Cf0ť\ *L%ܷ>?_[Y4v~[ٟfp0=݁DSN  9 ܹnfxk;\OI\I+#4JW%4zÊhSA״ S8# a|q:hV'!( IK8wuGΞw]kGjQ*K9HMdc&cGNڅeő329+aG JXaY{>cwJvSJYi҂ aCF>B#RA$ű),fu1E^ȁƒH>Jo>>9Kް;^v/ lH@f䛘o)wt;Yz23Ypp!=`m==w:KY'ȏxS,]*~5%4o~KG3'cV^Aie)̓iTҬɃ{RGW-'>;M74]x{xӭAW+#hW曘oHOyPs*Xxp%,HV7pp^7u/>ksFI&w.e..f bpM/s??O\FlZ5≕O,o  8Ypy'z2eSMFsfpw tӀ?*կ6+4 h[dcFd3Ӎ*R鞹,Y#&;bҿy^\!7rmpa ap(|/}EXxp=v_ wfI&K^6sn[*bxԽ6k2kۻ<.nzcЪ^K>|>3b3bvb>v6c2B|BX!-f.y#[qe+X~xE\:'Q#y hߐ>3,hQ7#|k6?YIay!͂"~:jW,[JVI63КD>B.jڗ;^#V0gG6/ 8y&a-}aH+L%2(x>5I@dOkڿ~)%crr+E}NBQ6-r1z7{>gu`lNẎr$;'З8uv~jg@OS'L5:k[xtcxX=hS5;"7ڿT$<M7ma)|kGr,8cQs9#ێ`tQ5/[ݟ/6O&x[L&yh# ^v/6ӗR>1mlI/ };{TMA;yaLsBЙ|{.;8r2szŰ4`9l9 ='f81o&MHM%AnIOwI@Ne~pf$$!TChQkI)<ņčg'Zʘl @An;Yx`Hށ! [JI I*+6L mV\*@MqH.H ``t [j p8>`8>|57Zʍod` kKLZ Sv+J(,bӱ,Y+RJ\ݧvU׬:\Bζ>Vr"W%MѥA'VYḰD;@wK\Fiit Ȁlnq)bʋo>99VǯfoPn.xT-Ƿpӷ-ݼ]VݶwKո'6ވY?"79;?a/}K}>! W9.Y40AH) t`T# 2~/CXz u3Y>;1sK4 hBf\=o499x۽pl^IIz8M_;a>w3 j>.y{MfU]/*O[$lgSMk>sϬosc8}g SQu|OӀ&zY7b4͍'7F7nPHOfU?34rJrxzT&-LZQ]#p{[~47 (晨ߥl7neIR\Ƀ} 4eqp6L>{3͕L7G/@vis O eb|qycϐU-MopM_^@) F  E5a6ɩSycذVͅN.Hm50tUEsup.^:/㮞wʦxf4R Rj]RY ^]Ĥ7l:y{YfܦgJZ IDAT|V,Urr)w,tsUۑvn{~v?*\T20 #:4NdA2Kb6'<U{J$H[ W%/o|CX.i?y}5 T ,bɝ:Fzj;M9c mpK_B8s \H.gsŊfMޓR kٕPPB}BkzuMiR鮤q`A\z6SFeѠa႔*g) }LDC왔Ģxٽ*W Q YUowMMAwELfU! ҊTؐX !^!4dfOiܜoc>l+Rp</,΢OL4ލz=JqeqX]CĿby`?O?Jq+j[EX~Xl2K29&a n>q.>1,8OgXa5s7W'޶MVÊb#L-M a: ' $,*n!ԧ^6/K2jf@yMǼ} u*%K▰DE6;-[ؽkϕm`GUGVAͶ](o~7![^UmG2],섚gn馰__|=|yW(`]zZkF.v!jVN/NgU nq)/\>ʢsaYζ+ϧQfBmWJfI?%#20ؑ،8f ~ ^9?EEj `TQ\q ΣOƮ! viNfUjF}Psyߧf%VOڅo݁dd׬} Ed yc˛$E`4jNbN"vgש]DfOH|v<6aep 'e/9$d'Ц^k)ӨϘ}:nr^9rW9V}y{[7MIzhڅetQr s\@= %T4 ̈MV/b ͂"),%6&mD"ݨ.awn%>+5Y[ыaը'\x|jX}̍5 k}]4 ddRo8]Fo'ݛ&JñcĥNQe1l{,ωqK7eeiMt~d!!'-'O7T+yuk~N\OφyI * j^:O8봨@^:&,?)MVƯb}8N\EMwq6ۉIaڥSz|+v3WoAќD.v&MVOv&Y4jF?zE|5]tW tMB$ fnx/CXώGr@EUd@B RQӫ.;'bf4M4WሪJVuG9Casr;9Ld`$%9ܺ6fy JLi%31i/}iږGVu* e)9mA/b!w, CX0N;! =])Q\0:SNA!mt2sx۽98O^y&_ -owpqk5dظ;MكHBu[-6ZdUߎ˛ߢ顭6ٰێoOir\. 71G$7v^4nY~ }-[08^닗͋ ֏vL&6-u7t3-5 CaeX m5d~8b/Έ«>>6^ 냧Փ>ErA2m~:[4πe,"g4H^``𲉬O@FjBz7MF.f,8W4WsOm^v OJwcy튗-0hB<b#ccVo@鏔ד[5ˈϯsx:T?:,c;Gtf꺚}R^A F}%sY\ږlZ^L7"1#w.P{O3mͳt' o vZOy6Kwϭ{|wU۫Xt{&-!F]_IHeN4Mr&2 w=rHHB餑C睠i`*ݕѺ^+553b7pp*ݕ*%VS$&A9fg5)MZ!-zR*gwn\!)%'__b3[lkϩT2b1 t3_I RJN柤]AFm^Fq&$'Š"7"0O=F}98u7` ^;Iy!~0ڠkwL9xO ώ')M1;<SCz[DFSK洮׺No#ʌ×ލ{e"1'FH$Giu$1$'!a>fr2?PPFti:II"/``ʝ9J_]ɻ@[CSQXQD4jNnY.i4 assĜD=/ '$K}rb [Jh\]qLRi7_V &=L4жTI=F}0BCHO!.3O#Oi1dg(ܣ}t4).ElFyt߱fT+9ufA~lNI-L_otg "*":m?HIaJ$'L,6fb5i)M2]M]+4iܲN#v3e2ڄ!4 @AK&$ G ]"c!4h*\4hBXSg|LSC9iAgoYӴ;w2W9̓a((D 4TD R @`i؄S)vpA6DF#iI8T30U暦iiVE75M4M4M4<_iiii@4M4M4MӴnkiiiy@75M4M4M4<蚦iiiv tM4M4M4M;Xh*..iiij_),E, Vښiiv>X,g|kcΚ5M4M4M;!LA4M4M4MӴnkiiiy@75M4M4M4<蚦iiiv tM4M4M4M;iW d!ctx?ۍNN}> RBq1F~{'gK:Ѧ;YIp1n I4ced">A^F羈ǧ9%)a20~W TTw`IT|efyD➉U?3{~~01j,{df!y6=yuȆ͈c xM(+J{1Ê\aWbX?X#,mwOH (۹EuámDFLfMM\.(/ |})@Bm #R0 YVpJ2HI9RBi)xx o/cr6!E %|6!%x{CqAzz"y뇃8ԋEBQÑ]:&$&7nIJ()rrTFD;>r)# YQd*B/@J C&ͪ}iHS}(Ɯκ^=4 RyRm4 (.FX*U竴4ST*x9G%%y~}ϖ7`pڦQ# rsAd~>"+ ^: 0 V~Ʌ,u~3㺲S 0rA bpxp!& JJv;"7WSRp8 !cʋ`#ڪj@ÆTKː6+2a3SJZOw.7b*(3Se Un8*-AIu]*Y AKJ1x٦r%CfB@֒RGU ~=T|u"kku|'T;[ QUQX||UxHڋxD?A,\8vwAa367gSY~||~}Ɉ%?BϮNkh`<8mvm0^E$"{ .xIT\dfA^.ԃsIN?_ʐPT"|}!(y˵и!$$"2rUd^i:J Ne4l*#6Qy{Նt:'N21<\ťPi=@ ۭFƍ1u0@״u1x *0a? 1缍qDH:9C,ÒUAK G1&< 'RbA|p}!9Q}794}ULCG S[>gF`"'slcnl7Xjc>'xYw_F^#&>Xz9ecCvT:#gLUbIzyBN>$&!_Zx G!-VdȩO`r7`m#7EN{:LIxlhfAp!"^~s{-1_9r(=T^?h\iנŵp 㮇%B>r &#^a%#fXQK:mķ#>C݂!h# GN ;SPp1| vbLy{c6.XX(,wUJJ!p"c W#wˏ1UɩsE|r:^pK`<"0|qCt# FL bU|9gv# 9F"^{O5V<1'އ0M +lOAU>e0.Bz|%wiYа‘U4Ո0MĖ5$D{xh.Fz=r0# r"3Uo% +GU 3p"r!o$D]ߨ+#KUj.G,X5srbL~Vgd|hw|UܖJ $@BB-P bY^X "`XQWWt!`Hcȅ@PD>'+{3g39g0[ o{G77i=7\ ǡu3l~af]= ߂a䁻f|(*8| |1l߅ ZSoh[-adeY- eּoHYȃwAnƿTxXKO#mc\!Ar$xƾ,:nȰAzV[e UCN(mtK'C/? aoggcpr(Hؓ7䮛1v$Aƫ}1V].ZO#47OZ?rzHdE/RFFi9Ǧ`l]/2QNGbaȔ'N" ?泓kc;tq990FߊMOg_2v{ÓYAw~q(;c| u@AeK;DGYYX̶@+GӰ} |EhVD껈=7+*TvR:>hs1>_a] kۼ2B=8l܄1m|XRwf~k?n㹩u;#E 6nM[NP\1 Sۺ+_!cC~+1]v)h3Rd)30K/xqs'd*l͵Zj:ۻagN6r!_LOޱ ۬}o/[iX#H~ 1]f0:<9J┖n Ҵ5]fVq$!*[nMQob%ƾgU'담k߭ؕ+V z-ئ{S` 7]vYNZem7~  1^}ju|ڞ\5ԏ)ӐK"Ç`z̓҇ J{^ 32lVkސK1 ^˜}/p:e 6A|1yڴBBC᧽؞ )#G0Jpv# ٝ1}+=b:۬:@侻 042" PZ\kNvhiKV{e$[Aw$#Ø=ApE/1֬)̀1f`.O IDAT>?,zؤɉ|.ֻq3t~ @ݺ01kmbǝH>WBl @Ա)2,+(/+Ao"XYBҬ8ֹUKpNO{kZO?XruWY姤aNIދGHyy/Dع ? ZVjb#=Xc,gۑK Mc8;Cu+{ ṠD.lN Yl%'=\Na!+N|p mmA\똭/D,e+з)6R+ OGK1zpRHbl G (.}x0 _R qֿ;&Nmݢ2륒 0۝ baKS0>ωw]5:4@WQT'1mU< Gay뻜[Nw&ğg1v'c|jOm,O~ C3}N槰lƉtV(*BJJѓoVXo0f9鞈;9= ` i/nrS*┺٭23!.z0a.6>td?^zG 'x{add"V˷muj heauWoÑrmMr>x;6m[13zБa@;VYOtW8PZ 9 w1+/r0ũk٤< i!]:ne |):փx|m[EV7a|u('[ZEAڞ 0|00Ra|ol~>nփǷk|j5߉kvJ*E\<: x# wbeҭ03P<xn*{[qc㔁+06 Owwハ=::}0QkAt'LOEM]1>:r1 ؕk0Ym#Y}avXTT@~!$$@k6`k#=`xc .J\1>Y;:tZP clOC~W`Hf&70JˑbOmxljckV݄🯑gT$UzV_kY)X}ғPV9v490BFTl~3![JS1=z+pc| cѿO ;a\9x|$v̷v0|?O_lE+X D#M0l޴jܦج1Eo  cvx0[xM`l܂ 9<Vpi5xaJO/$p:ZW|:Bd/o!*3S51Z4i͚T?}@-w7vOk ӷ ٳ_a̞`~4ψ0lieug|攑mUgi^zjEj[4d8*W?;خc Dj2jĉ~NQԙnFE5@>1w!o07|emm;ƻb{=nЍdk Bip_a,70ΪUm3+ݙ1v-*6bc\7#'yHHlO  >1guFC; [cl'20 +KkmBn>.HQ?V6EN׃Wk 87l;RjryrTah|k\G?_l=Fzt>W߲묮SAk?裏^B(43(N|}zXOswwyоBeF Wj[Aa ߔ4́[)M :  > WCTma}kJ9fD5NZ"]:Y# w Fm}Cx{c[+>2{tMDZ6ׯrAVGУ+Һ22i5xUZ:ҥ4Ϸ>=$cB|i*_:ڴ|Oj*(Db=vYW9Z1;2sGkP8.c@(Э3tIL['EM+hZ^ߞ`gW_J]n*S ij/?6!<^^ֱeuhM6 j8zX׏ ˠv(tKDH8poJ˰9 mI#|殀бҲ}zXoC.Êc}pQ1r\O^L@oB8_0 4h_PeVFqȸ;V0ԟ K()A!͛^)R5J`=4'JjǵRJ)ut$N)uƟϐR,<RJ)~^5RJ)RJ@tRJ)R])RJ)4@WJ)RJ)j ЕRJ)RJ@_RJ)RJڂRJ)RJ+RJ)R5J)RJ)T RJ)RJ)UhRJ)RJ+RJ)R5J)RJ)T RJ)RJ)UhRJ)RJ+RJ)R5J)RJ)T RJ)RJ)UhRJ)RJ+RJ)R5J)RJ)T RJ)RJ)UhRJ)RJ+RJ)R5J)RJ)T RJ)RJ)U8.tRWAAǎ0VZydffMDDį*D{<<ߟs^gRJ)סR1{a֬Y*޽;FտU^^Nnn._= 6h"5jĸq9Xuݼ⋈=\󥢢\ ((J\Zf }ڵcرJ)R&]IpppL$''???|{N}v^{5x96MADiS{eڴi0a<QʪRJ)., Еi޼9OvQ%$&&V ,>c6nHEEQQQ :imoMz7n\3aj׮MAA< %%%$%%ń p8رKYa4lؐҬY3 `޼ylڴ Mvv6>(aaa>|ݛ/_Nvv6`=iݺ5FJ… IIIRO:t`ȑ6QJ)RJ)m۶1gJKK AĪUHNN;j???rrr0 O_^}U222#>>ƾ}8tLJm2kaDEEp8طogW{{"T0{l90 kFii)k&&&4ٳg?g ** ___|}})))fEEEdffY~NN/2rѴiS{e޽L>oVZ }Y#// 6mڄ]wapQ^}Urss[.O?SXX~J)RꏡR ƅ RZZJ6m=z4~~~Ijj**fٲe91cưyf̙Cڵ8q'0ޱcra0sLؼy9+V %%ɍ7HV0 ]v[oQZZzرcՋ.ooov;k׮xN|||.Ng0޽;l۶~~ ͆ڵgϞDEEi+RJ)R 3کU7"Yd "B:u ,,hoSϗ2Op̛77Chh(u%&&+Ws+zCYSlu[naϞ=lݺT}<}Es.ce?QONؚ]][RRBYYϾ֭[l9]r#rrrXfJ)R1*0 8CVVVM~4M֭bYYEEEwy}4ٽ{9~xyyQQQi>ٺu몼; VXXHaaa6nx֭a&00vY<>WN[]Bڵ񡴴M6}JJ iii8segyڵ &$$Ν;ӻwotJ)R])@˖-U,Z"jyݼy3=` f<-))!++ +vٴi'`\ne }|$%%!"۶m/駭nfQ\\Lzz:.;88f͚y^Y~ff&~! r_ND+WVyXARJ)04])@PPÆ c޽p IKKvӢE :w왧2Yf;6mڰf/_ΦMرcP~}ǎ;6mcǎU<#O6P\.oo*EFFǮ]ꫯ+ey֯|'&&Νˊ+0MtL$22CrJ=z4x{{ԩS eҤI-n3dRRRHOO'<<ѣG)..&88#G@{6֭[yGhР"Bjj*zZҕRJ)裏>z }UTTo>iٲ%aaaMcQQQԫWLrrrHOOڵkӭ[7*&22VZ@II dggS\\Lݺu8p W\q:t>>nS|e@fѤI=}ΝN'5nooonTԩSvMIvv6ٔUW]E^HNNPڴi.l\.tԉL q?Z";;\ۛf͚q)OVVyyyz/6OMM%((yWѣGȑ#aۉM)RJ 9"+ʢN``gS%%%^^^w!++AAA(,,xKpeuEE/uk4}~~>n7r 4WRRypreeen\./JgXQ-=YeY*.WuPJ)RJ)RJ)T )RJ)R5J)RJ)T RJ)RJ)UhRJ)RJ+RJ)R5J)RJ)T RJ)RJ)UhRJ)RJ+RJ)R5J)RJ)T RJ)RJ)UhRJ)RJ+RJ)R5J)RJ)T RJ)RJ)UhRJ)RJ+RJ)R5F3 IDATJ)RJ)T RJ)RJ)UhRJ)RJ+RJ)R5J)RJ)T RJ)RJ)UhRJ)RJ+RJ)R5J)'!"J"ORuzS'>.RꏕƜ9sعs'͛7nqڊ Ұll6}YRSSiժa^>S/a_Ahh(O?4.ZѣGyGhԨ|1˖-#11/S"Š+OlDEEe.[ ңGTW<|ϖ-[xټy3M6rӲEE|r:w꾼'x4="Bqq1+WdCCRRAAAոclG!,,BϟOLL jպżD>s]6~~~dddyf8lBD̤ooo>#>S:t'|¶mh֬Y*PRRyYx14j// ]42e }G~qݸ\ORάY0 m۶촩#ˠAdv4M_u~4M4i!Ѳ~ϗ6mڈ+R/2o޼Z_:v(<? !!!W_edEEE( 2D2$66VO~i~iv˄ m۶zT۷o0tܹsEDl6W>|y~X~~y-4֭[KVV֯1MSn&!d9v4mTVZ(U#ϣi2giҤJJJhB,Y"~8o8;wJݺu%$$D|}}[n%޽{W9ub߿~b>>+xyy}v5jO);l999޽hۛݻO?[oEDDk׮%,,J+x뭷xq:\7x2ڵkǸql̟?˗7:wLaa!/"?#s=nݚm/ҽ{w?)|ǎlݺŷ~ˁW^|0sL.bZl?NVV=z 99[o82,X@AA3fvZk߿?gԩ:t-Zp=ƍ5ktؑo#G+d vk0 rfZ֮]vyˣaÆ~lܸ5k0j(Lwߥ 0Z%ΝK^^ÇÛ6m"((QFѧO233y9p?6n̙3)))K.3o'|O?ѣGs/{.c̙deeg,YBnn.W_}5ׯ碋.L3ggȑ\r%2uTlBxx8?!!!DDDi&&L@ii)/2YYY4oޜ;|MV\IPPr ۷g{nzIII7ছnwԭ[cǎyֱӧaիN&MX`,_  ĪUH)=z0gN'~~~8N|̞=F7<@ݺuٰaf͢ݻs-vZ,YBBBǎ`,]ɕW^_Ljj*SNѣiӆ ^|E6`8Nnٲe 櫯">>Ӯ3۷o祗^"??]r뭷a&+Wu,[ ɝwIrr2yyyL>͛sWrJVXC=v=g˖-L>b:u-ԩS;q㏼ dggӬY3ƍGpp0s?^^^ 2aÆlVdMÔ)S c /avK/w}~;{fҥZ'|^xGLL :[ >V\=O0`;vЫ~ @)۱cʄ $11Qڵk'eee-͛7p4hv;宻0W^$͛7kV#ҨQ#W.ҪU+QFIxxDEEI~~^_4i$ ]#<"/-ZHll\y啒"jՒ^zI֬Y#~~~ҴiS1bl6;<;wxyyɶmd!}nM\.+ϋaÆI-$,,L/cƌ%0i۶SO=%u֕ WQi2qD  f-vx{{ѣGe֬Y#W#F锋/Xn*ի 0@d̙t:Ν;KPPݻW:u$aaar7;VnݺR~}袋n˴iӤX4i"]w]}kرCKe˖rA.Lԩ#f͚I=;$St]t" 4x[ 1d,rm\s5ҪU+~A~ab9x𠄄s='֭iܸ9R\ /nVZի/97777+}yfضmcz-nFLӤiӦdffr?^~Oeee,^ӷo_Fa||$$$dž 9r$K,!Tn7ԯ_Azz:iuVzԩS'11:P~}.]ڵk9vsaѢE_">>///HJJv;ٳ'Y O&ngҤI=N'EEEw߱qFz)"##=+--%==hOV4پ};cԩm67oNiԨ7n?$ 3g'ҰaCl6111:tƟC~IzhԨO<gRYXXXe^G|kn>SV^M ѣ[&))dLdYݻyyyر#_|Ջ] e(:vH.]ؾ};{}+ݻ7.cĈt:iܸ17t۶mvO6e˖1tP֬Y7|Czz:[nl2D0xW>}q…|cLBLL ˖-#::nݺѡCvͶmHOO'##\.n&vA֭>>?"IJJ⧟~"!!"=ܹ3,\͛73vXLӤPvm30ŸnL1}($&&EII "Byy97o&""ѣG{ٯp: 87xa۶ml6ƎKXXpbU6rmFXXcƌrW7h y֯_iL6hFMZ cǎ|ǔT6 x СC3tPσrNnp:TTT@@@\uUdggV׊[ҳgOjժٞ7f} apQ6ö#hذLҥKٳ''O&== &;ND(**N^WXaDFFСC94ٲe uᦛn"223f0g>8v?<> }3f̠Mү_ UE7>>>lٲۍfcڵ̜91cPPP@ݺuq8Ԯ]___GU\.}O4+?dƍ >)S '燗fboߞI&y.;v ??߳ړK/yhԨ!!!iL7'qƀEo>~)` lŒ31b#00a |M4СCg}ȑ#߿?t|tי?>]twߥM6pJii)ռ⋤`8x fb͚5L6g}"'įM6m>^_\.{nK/TvZjj߾PW>>>-+Gխ[7UUUV\bbbP\rToU*,,L-\P544;CL&e6U޽UQQW_Mueff*@}gjڴiJ4h"5uT722R-^X>|X/Rn5a\.뮻Nz!նm[USSƌvzǕjUVU%%%7꿛gb*553f(˥}Y㜝|||TyyI獍ꮻ2~ƍU||q M:U5449sڳgرT*--M:tB7/FioTTOVk׮U l6+M߮TFFWqzԦMT@@ڰaRJ}q)ݮƌ^ԯKqN)fnduݸ\.V w+++lF//5\G4q\^VVb!((ȸk٨ 00??? HKK+{t:ngɼ|w۽{7a IDAT磏>bر8N***X,pcc#B\VV"<<_Bff&墪3n:FŲe0` V+!!!Ʊmۍ7$zN'L&>#x֮]kln1T^YYi-~w`ذamۖ3gp8 4^r??S70]uq6OyZj=kenJJJl1*BBBNzw1tPV^Mrr1ԷYmm- r$墬 Jhhf:G7gn+uuuhy.f#{!!!-ft8s8o;cvS^^i^;"""ZN'F|||N Y,V92Nt!8֯_]w… [$;SJ)~m̙_}u3]׿ŴiӌF'O^hq1vy'9z(sE#kkk4iÇG=ո\.z!o/Xt8\wuO\?DZ|#΁c!C\!.0 ЅRJQ]]/J2fSRRBmm-DEEnpBoj#bbqj466Vu|71-zdnSVVFXXDBH.B!BbB!B!D+#B!B H.B!B !B!B\R-k~ly%ϟ22Ͳ}|~ e=e%v[x碌{~;碌窬yk?Mߝ_KY[k+(hoN\FVUUECCÅ.B!Gxxx1 Ѕ8Nv.B! :B\d\._b!B!~iXt!B!]!B!h$@B!B!Z ЅB!BV@t!B!0?3\B!]93=\Hna94Ce(ޞ'5UbjNu6;v Åۭcю;\nZ.7j.|<Α 3VYITNE7녦i(mhQ%& oϖYeR:9T6;u/cNnac(/BW /O)T֒_RՂRW:N=hF]C#pz{VNai5~>^ƾTJQ蠲ł>PÉl+E0[ oXr~9_RW`;[9ZTAIE-^ؿ6e4[{,]WVQXVwuRFͭ6b:[Hщ EYU-Ja6_g{-ur )jN3ZwiRߐz&>6{u#{u-Xv 9k^peG:WE>'ߦlG'qTr>un tmXf*nӏ` |]׉ ཿFr35OŅRy7sYnԭ8]nJ+k NMŇC)fln}z& \5煨VW]gcïqH +Nb6 LnNJuyKNٮ^M7HtxUŸ:!zwtoW/?}9YTcLeXvht8ׇxo:6zN=w惩3"Zos״Tc:J~w|#lcm׺V9]nʪ HiϿ+LJ|fRϜg'uzur 4; d] ])B{ؼ7?Nƣb|| 3s765۲8o(ׇxsPs,.NAi%_n%ϛ:U[]ECm9Kodo3Wrƭ;J.pvݶ7xOm}#o~q̞z6X ϯFl(j*j`Ly?s3fn4nYTTt560!Lo<7@}w3ѭ] OZy ўi5vW73!ip"#;7q͈^qܺ/W`u-fXZ|rc[8g)S;ƒ؟[6kwp| yŕ0g)MvQīqݟsգp:ZKsS]g3}j3U۹鲾pͅft5CxO7{̴뎬^)WF(#;:N<1ܚ02@㱌|,]ҋۮYy-MD0 ŭcS]g#x lv'F3~X*"sO+{93oO+_ˠmt($\*7ޞLfBYuU 6g_iN_Th a1pƝZM(b1g î!²jLJ8pnKb4>?yӹż߲9|4)(9K9؏ 壥yjX<5.iI`Ԏwk+Obpj%"؟ɉ߃pI~-_N4i'f Mk1}Cd51Dӭ텭l+ӫKR`lcF.X-f:bӞ\JH2^KoVunE=aî梟ZzqIδon<⹙_.6'vӜӃC,۴ Mg灣$G|c&OoG*-`_nq]{]_鍊lQZYL& DM*}<smVi4Db6Qgk!:,g\%s`^d%7K.6V)7"R|r;ve׻44M#C,Cz%|AuM&ƒUl+E!~#4M3. -f38]ntlK*@Ō7w3|qԯtxyXPVC9`5l: +֩i@0xjՏW)ztJ*9[̕|WnϷe4MG'쵇XLBkkW0L[l(I)E+:?s;֧7Q롉F"`?Z\Z-fޜmX֣xY>^ U_G*dn݇3c\/]Ƃ5;)cնl}e5:bֱYfᨧ_;l­+^;ʪ}<(K^{/=;Oނf\q5 NMbM|},}=$D}ơ XC᤬x˃@?؝.c_|64Eu'I|;),g@utOg,͜n`^jݼ' ƭ+*{ 8IrNt!~tj1L@a-^-n}a&LCӴ_Zgn1u6??_úĄrs(u"ٝBX6;S͎Gvx7%O)esy˵aؒCJ6 MDyU.npK黮tW_㷏᮫b/&"nIvYEr\ _}F}E\x֦?ZA߽?<\54ᅥvaLj%}:7~]k|_> :gxsf?ðNR^[4)zx*WOE)]f/GoRY5QNNc\sO;O_oOyVzs&< ?o/uǸA o}F=bP>Yf/ՏWpcH7>6^xZ-s Vߤ~@N<EsО._[WARB$7>Q|7$f~?_Lol3tkޝ碡M#gm;l/ .ߝi}Ou"% w>mC;y,١)rrgLn6O^I%$w۲/$߇]`΃xyZԆrvpA"kSSH. <_;])6:Hna!MΦ=9D!.*:+tKmL(N[ST^Mx?z'rl5#I6k;v^ʪhƠ8.6>LbL ѡ@q}Q<F4n7k`tmCDZ}#-l"77\֭[ǤIӧ#GXjdffHzzY y뭷(,,k޽{#..N:ie=Jjjy 5M#77˗{4@WJq8@HHiiigT|rz֮͆]je"@/((`Я_?={n:\.B!ENt!.RUUUb6IIIDGGr`TWW`?>mWWWIС)ܼy3֭ct}ɴiӆ#Fxfa l|ǎs4Mc|gtgCg"@"00UDB!Ĺ%!}X,nF "&55W^y>s~a<==O3 sѳm0 <:u;vOJJ"))cF>b:tpVc;c4'2qğ|"BGt!.2Cw !CNΛ3ff͚ő#G$**k\.;w$//]׉ 99t:ȠÁt֭E vٷo#[^^Ν; **<)޵k!!!ٽ{7rKll,ݻwl6hqådjʙYSSî]tBϞ=iZrrr#11L>Lҥ oΦr&M2 IDAT222%44k+++ٵkXVHNN6xl9N'w聆$''wʶ8[߿JL&۸vʢhݼOccc㲰{RSSR:w|VWWWw^jkk8娉r}-sٳDΝX,l6}>|CjjFG%++jL&aaatJ)8pqD\\TUULxxM(**bݔ:QQQb?S\\Ltt4aaa޽|\.aaatȑ#ٳhS~BI.EF4222p8xxx0dȐUKMM5a?_~7@m֬Yxisw49s&J).r,Xn'44Amm-ż曤3yd***1c'|Frr2v)S]#`Zٹs'|-_3rHƌ;ChӦ VBuFy3f``\wu-۷zE4MGyxxxm~̙CYYY+`ذa'gKcc#se˖--^[n;p87oEEEow܁>cڵ8۱Z 2krƽJ){>Azz =۷ogtؑGysӧc6yӧCϞ=qݤ`֬Yر6dȑ\q(7o17xT.,Y6l_=t;C7DN'_|W>aq=xFC!?Mt!.2J)> 4 c?wx^^^9edd`2HIIk׮n:*++x4JJJaСPXXڵk)))a0뺎f?}Evv$v!::ÇG~~>Wf׮],Z'43L׏6n܈dK/%88LVVv;ҿ<<<ؽ{77oovsWA:tl|}}&""m˥^ʶm((( >>]ҶmogĈDGGc233ٵk۶m_~F3hhh ::ڸc'|7#̹n>S6n܈'C}԰vZ>}:?0 4]vIK6mضm+Wl63tPp\l۶ V\I.]~VC1g\.]Y~=֭0nx4SJt(i~±:E6mVb֭xzz2bڵkGcc#7o&33ELBB bϞ=dffJ)t]7ʥcӣGn76mb޽|hF2+Xl&={Ҿ}{ ٴi|L27|r<==HHHu֑ˬYӟ!BB\Y&La0O?N'TWWd $$x||>)Svv+ڵkgz#Mvv6o߾(Xp! $%%q]wCٳٴi| }ѡ?GAA7n4G8?JKKYd 'OK/5qGFF2~x˦5{ҤI1лwo^~e8@NNg4]u+\.m۶{1}ߟW^yos=DGG4##..̨[jj*/>|DFW㾹gYx13l0&Nh޽{3sLnҥKIKK;&Ą >|8& ]cɒ%9r˅b18N)?OII '0B!/sn> !.ρ_<966ѱcG4MrCѽk馛 i^]Ocǎk׮Pc֓'g{Ń>7dW^bsO/vݻwxjn#<<( sРA-f[,#Q[b8baܸqa5 ***gR͛7t:jC ѣG{GW^FxlO;?C7bرcxi7tL&뮣O>-u;~9{ Ǝj+xv|2 77ןb >i۴iio1Mׯvo>)--}v4M3檷kCfQTTԢ'd2߭[7vI~~>?< $%%Hbb,&B# qjF^[[k C?]q:^}͎t:Yx1+W4qibφ7z̟?۷SVVFii)[lÃx*LTTTM7={ϖc'w\@S/\:~?-͙O$_6d5ϥ;w.G1zf3쟣yp-5o";;?bՊb1㙪GMT;`t]gɒ%TUUw^݋d"00K/Ԙ"BGt!.2Ѷm[֯_O~~>%%%FB)eٲe(,8W_Brr2!!!PSS?_\/~~~wk%++l';;ӧ팗k~fA_rN>ۛI&hڮ]QS2r 8f̘AYYAAA 0XÉb֬Yl۶xwvߧ~j#A]TT3}tcZřPJd^ ]׍ãEOaÆ1h e޽=zJ>S|}}ׯA!&.]`Zq:^ok׮E)ϞS}v)Ք)SZ6UVVƾ} %%^zѫW/\.ߦ#j^[dl6cHrll//gdoF;4wǎ~Pu4MK8(++d2qCi94ZJJJN^ZZfk6ͽ~gNUSͥ???SN'LifrNASPP@cc#va)C|Νܹ3FlK߾}!N q '==5kְbŊӺn֬YLg.5ka^0g>[s vYre6p8SN?; 9Ս3Ihh(v7Pލ72k,rN:gϞ9N֮] 4hlWyJ):d,L=..`v;ׯoFիW͉***Z3<\˫E돯~ڵ+&#S|3]׍&$$tǫ̟?ٳgs!qM7nH`.B}҃.EH4ƍǡC(**bڵT¨gܹ]i׮#GYܚȚ5k'** c}rh ܎9rZɥw~z∉!88 Ν%\BPPvqq1ǟq^`+Vˋ=z`22yV׮];~Q 7j(vMmm-oFp5tPnJmm-3f?fxyyQ__|@zz:'O+`Μ9lڴ۷Mcc#W_}ϡ)Kc۶m,Xŋc2z$''kY!Ѿ}{/_Naa!ӧOK.94g_f .dɒ%hf NKK3;;|}}[x7gxyyt:q\XV&M"ybb"F}]ӹ;:t(+WdÆ l۶ P ڵkٴi,"R\vecaԨQXp!;w.>&Ν;1}&{ݺux!Hyc-))a0~xh7|< rLb2~ӟb5j͛)%cMwk0Mss=رc 0{l< Õ+Wbܸq馛0eL2{/|>1ks܍7lc?ħ;P (o]\tRzGں㮻ŦMuJ*p[fJ@HSڝn#-['|ׯǂ /7k{.0v; ѩS'tiiiӧLD0x<|>!`Ϟ=}"##B߿n~x';H?cΜ9;( V^p$M6p`x0qD,^@ `Ʃ UUQ\\P(N:PPnNáCзo: /OO<|*^{ ˗/G޽qM7AQhQF7ް( (..)4rAU KKK=z@QA0 t:uB0D$AEE8S]ױw^ aB. B:tUUU(/+ox"2?Y!ͬ l. [MWUU: ~EEE8p<u:t.]{ Baa!uݪ;S`0hU7N;-U}_A W'% z#AQqxDu{Aff&^/t]Qiׯ_BEYma߾}0 ڵ+BYF2tպO>u {聬,|G0 P=u^QZZ;W^RRJkrJi{ >fARq)%>c\s5E^^v܉cĈ0x`cڵضm.X~=sN8RJ\|Ÿ IDAT_n p8ݻ7^{5|駸{ѷo_|~cߏ.]X?ԩSO>_|b޽7o?|\tE0`<5k`ƍߏ;탢(ܹ3}]|xѻwoݻ}mx( l܏'<)@au \رcn磲~aʔ)x1tP̙3Ÿq4Ms9:t(^u}~ߢ9s&nF_{9( ax衇УGTTT`Ȑ!3guh b:ǯ z*|aҰuVٳqmaǎw}H$o۷oGff&:w7|3iٺկj*SO=+Wb…x뭷xp5`ѢE9s&ϟ˅_xr( bҥ4 ^z)?W^ݻ%%%xpcҤI¾}fi{w"&صkWϩL4 ?0y?~}:?cȑ=z4v؁38&M}݇: iwVZ^{ ]v>xQVVl|wyزe ˖- /J,^s͛S>;wgϞ(**Xff&9'f.]CZ=cp\x /o/~ 9B|Xt)?_FII FKbp:8pNҥKoc˖-8|0z-;XlF Xf ۇ>K.E ᅬ_~O>$/_Gý&ķ=h4]-PAH3P\gƸqb 5K)^4{_@ sbѢE;i&q֭{9ddd>s]k֬իѳgOݻcɒ%X|9֮]P(Ԭs 5[+@2O?Ů]cٲeu,[ ;w˱l2|>\2***0|,X+V\BX16\J )$.r|gӧ,YáC)Soƅ^ {xCa={6{9~a̚5 V 'fЉGQ?)%JJJp 7{Ayy9z-5k``ِI;0LLxq5***o~ҡC 2999ؿ?V^mOOOOyyy~rK/. :t@~nݺ5)o}Xc&O9sXl6( . իq=fp`ذa8쳡k::vۍ뮻())=l0\.tBHOOǩL?>eeeؾ};ƎkGee%  ݎ?+VH\oE,2L(Jeee8q"0dȐ7hzӧqx<۷^{uَ:^/gM5rN=rssѻwo[צ?DǎѳgOڵ W]uJKK8cذal2dh^PPt_~ ^׏7̠5|\JΝ;[`??]onO8p_O࣏>¤IݺuÎ; @UUnTVV⢋.… 1sLlݺ9r$֮] 6GYg k~޽{Xoma[e3gO<)%t[niii֜X`p8̲cӦMXjo~?Ƅ A$b<`c`6l@׮]vp8HKKCZZ>W_}"̘1BÇJJJX7n+իWiG333n8ND"n!sNѪs=֭ƍ裏bذap8(--n7#":#7& J]v)҈BII5 6 ÐRGFFl6^/z)u]Xx1~_;&ӉRk];z-%ϕVsuq;f%<@4!o߾;t耡CbڵشiOA%Cff&~?^o/@͵cǎBXcb_qD@Qy睸km6 8_|FSO=:u«Ν;fa…xwd 7t h 77;v矏oկ0k,+;?/ߎ{'OƂ <=vӟ0}tBQ|>طoL;BQL:W_}5.ByXv-F~SNn;طomݎ/RJx7rJ?axsߔڰay<ֲ3fBYYN em駟nxq'1c0~x7B<ؼy3-[iӦ3goߎg/7ߌƣ>cg]w݅lXVߎxW\q/^=z|.Gy 5׃5 p s ΦN' P]] ݎñ`x ,Z_|y 4vO>$ (QF!==^{- |r,ZwN\WXB5/ȍ;< LLlذ!aLW\q^z%vmڵ+/^%K$)#Gbʔ)81g<ի֭[{۶m㉾Fu;//o/nZ+;|#FO?ٸˡ*oߎÇ#`ʕ0a0VX)%&L"!gaΝ8s*Fr0~:ӱsNTVVbԨQxG2裏 QTT#==X|9 2{ʂ| KK._ 6zC=p! :عs'FP(/GNL{g9stOGZZv؁j\tEaII ⋄}vyؽ{7./@ZZf͚QFAJ~aԨQP}JJJ0p@\.dee!++ >gy&"K1aX|u]Xp!7nzC V¸q㐖RTQaH'8y4_zUUvލpXt)QTT>}  c’%K`.:}hbc}z8쳑>}૯¦Mpg4M{ؾ};:uꄢ"躎+W:t(B Kb|:t(\.֭[[^xu5j+tLƍ1|رagϞq%x)[[?֝;wB4'N%\bڊi/1|ڵ aW^4i&NpN[ncƌiQ3l޼]tIXLDDDD'-Qʶoߎ#F 77"==}N;4|Ǹ1d,]N^QQiӦaٲe0x`6mN?tرƍΦDDDDD#fЉ(e9h"TUUaܸqؾ};>sL6 %ݮ\p-:FիWc̘1?>~`֬Y Љ9DSb׮]8ӱzjdffs¬Yp׻W_ٳg[v;Xl ƍQTT^> ЉIx0c G?9眃t=nz3ߝ:u&\QAvmX~=|Ik={ՏB@UնIMUUk7:D"m=Z8n!t]m=ږaV6D2!z ^{-f͚eGn'|Yfooذ|MRc&`[ !!@[z'5M,8( 4N7tS@J@eo(f7jQ>3<ܳK/a׮]P)xꩧ[o% ޅ1c-[3gd斈ކbW( tSpZ Qʖ-[f?: Y%ibѢE>|8nyl0 W_;}%S@HYo2鲿t"Jٳ>g}Νk}͛7'<>rHTVVZmA OJ0E \3BNHO!aGFޠ]J zDbSy$"""Ŕ]ɠUNG:zBBa=1W2!@89Rf&׿+e]wyi"G^ I)xbL<\s >cH)\r U9"""":9՗u"0KS$M1"t"Jٺu0sL׿<'x7oƬY31cƌ@{xW_}5TUUaXr%fΜ ˅{:5[|0ԌF_hQ\.f̘Çc̘1(**BEE`ڴi0`OO>$aM63I]t-[d?q=`Æ \ZBvAt"J1x`!83Q^^ݻrssaSlaII z EQЩ n7wEQСC(i0xR2MRJ6$6$@o[k8mB@҈fƾBXM! )Du+x饗o gee%>DGB _[(mS b$:LӄB'oC>2'# 4.zmu4@u@MOG8P02y׾jl4ZzZZ:tRf&~?… qg(KzQf4Y$?g5B2Bi&wYQ;w뭷[oM?!z! &``/{'a;""""T4/ ;ZLRB3a 95E:T:[&ˎDDDDtko1$<fǷ¦( K Љ踣 8<6=}U&|An,0@'"""""NP3 Zu3ey0L 8 c]>ߥNKAO=ribW%zivDDDDDDt\{sbP4~0@'"""""R*]ܽ?t3@'"""""RFq$˲#M|i&t""""""F@o/\LX3& -:Vq)eʥ!DDDDDDD Kju9IezLD7[kGё=Pu}&o{4:BjY5J7!̠Q i%G0Li^4̠QNx4nɺQO EzM@|!Y[|ka; )qu8#TPk+|KbNDDDDDDm zB"$@B&,\KvK`NDDDDDDmF3̣.!uf׌ˣM#QH|>~ӟ/ǂ `0o<̝;n!Q bA?b ϕoHiLpuf9[|ɂnOi| :LJݻw;tb|ӦMl={6VZ}8s0p@ 6 }֭[ vB(jC#"""&5V5ZjGzحQ%ްf%V 1 Љ(e6 O=;$|˖-B0`̙*e(**(lΆa(((8N߼{^k?ޞr=X{=1m=oOci4jm}8֖}~J$ M4H)a&ڐaitf;Rӭ[7ڵ Auk׮ϥ())AFFӭrssJu~m=sqivSNm=Vyy9P,0w\m}: 2220p@L2~; q3 !""":A7e4@uR`7U  f2q~Nc?xH5ղkNDM~GSO=z3ncر۷/l2t}ł c7߄wߍ%DDDD-K)̄4Anj)%O5LѮFgt[nI?qMZ Љ(@XQ#:lRgh j8l $`H4iu͚{Gu,hcsЉ(eRJB ) #F\l/<i&**ՄDZhN#:$` fc! S$ 5B`HgTCwe7LQ'8nLc׾ QoXxaB!-!. ߞڬ2DDDDDDHo:z6ɯXɺ?50LOHJU=-fAMMp"`NDDDDDԊZb)/)%t!R ռvO7E\AD:Y'na@D3QT9Niu?1@'"""""jߖk4TLȚZtC4'. !aS@5Drt""""":Xz+vM]oX_#Z2a݄^|Dz᱌{f7%f)%"F}'DDDDDԨ'-P^f{Ejhz;nX1R"VP^DެʛBZcM7+DDDDD( u"?5=!Ͱ fSMhFa[K=\4S@5־Ô 4/y݃51afЉ@r)dC 'a #t]D竛R"ufbwj5pSZ_- :@Fo&h&zN)Q OXG@Ԍ&u?0@'""""vaGh]"XS.d=%!N5oXӅ/kը \O}P5)7Et}>cMzC F׷ĺt"""""jԱ 7׺I6;Th CDh' T;0tm_7P_<~,BDo0覰n_ *8,[I:8dYx0 ?Zä́?C7%DMWdGt#iSLqKI)@8)Y"Y浗ߙ/#1PԠt"""""jTÄ8kK6jd5O)Iͺ?: !M"ڴ-ָY+]^{NY.Zt- K Z榔VLU#}℞hQVP`ة:*k Dtd;ͅ Glxc{ D˒Et6ۑZr~u5 ST2Rʄx>)6쫲;zS@ixMuHKz,Ќu"a8~o%zuekO$זjpԌ5.NHP,ѠZ3ibݬW%g zbdltdw! UӴ.z2igz2~BgQI)BQVVt]_c۶m0#o^֭ÁN7[""]yB[e5CR} šMYά%jKV DE?C P:7ŗѥ kfk_> þ:ӁhP_%izw`NDM"%KpuAףm۶a̘11cFk&qkiӦaڴiOG}a`߾}=z4f̘. .dNDDNh=rRʄo}6I5 ׳XmzT#!X0,6jǎEQC5CHU3q!s:8aon%p꽁q"`ND)B駟3g⦛n¼y^Wr7`pB|cر7o?^HȮ5WSnɝ#55SWjw /c,@Et#z~5K)aA5X]4g~xBmtxwXhD߼wڅQFAQ?Yc2dн{w{7|qf?!*++VEDDDQn[LC^xSFt)dYhZ})5(J F܍þU_rnAͬ)/v/oL!L"5/=^)X&h]Ql6nf~IJ@ .]6uRt UU@nf%B$d5˗ܷhւ ÄMXH3 vbPw ű覀nT`oegҡ !{U@U ]7 M7#H f9 wЁaB T#)Q\ !ej3n ӄ̀iF:"Ѡ&4À;}BhPu)`Wj=Aֹ ! kcf} aFާ4EQdgg@[H+Wغuap:|0)&noj!cLl?j9ao !xPfSsMaGZj\5c{E d8PUYPi3E 'E˸!pUn74C6`729K},I= j&AWR:oHKx^OE Yp}wi5* 3 G 6xB!D"*TM()Dl'v:K#t]Mz:lueUntˈ{"*of@#H+2m/Uϥh&EQt[nñetNzNϞ=m6a=z@>}i&\veؾ};rrrnmסCttlx[ tn\#:vb7vy̨FF0DUPEL'2Z& .+6g1H) CЛ=}t(בNP!蘛f@0ЩSN6er]XAI[;B4aȆitRJvGϵe:,W:_ڄ!º oiP Z˧wFW"8<ב}ws2s~nFwUGNfyqFtTNM~5:/7@buEcP]&1@'&ٳ`zcw}4h੧BΝr{?nǨQt<:zZeɻaqޓ@V!$aIH ~!d2 $$mI6@طC x%˖d//RKl˲]]U-ֽϽ%8scϞ=pPI/!2@$A6זNX7rͰۙ\ma e%mpd,쬮62wSZW5rK*VusSn"aȆl@DB]mXH 'BG 3bк#o}[;ڞ~/gtB1 BKEaѢEEu=z4>O֜9s0gΜBgw,΃ I1;^\d4c$DW& vChFav3/TLdu!veە|hN>|9pEf!Ց.l5]1$)$՛Sꀏwħ̀И5B!B1}XI[W^|݌@J{]h,?v8J^hb80 {-賡;n3.yѽ`\#m}GjQ(Ƞ i_//~Fz~E?DPN!B0h(»w3Ʊf=ʰ{Yn&h?ϞGs:l{=,:n7uJM?,w$ ` 6p3P `9yQ?T 6m9U ) \vߦxQ֟? !BFTA6!mAx~&T`xL+k=](/#?}suyoqa"U7lٰ{\,HˆlW<.db9~B!2Xj:0ᰓ{NU\gfA1MB@+!v<-^ʭYθ@\0rX+@u͌ ۀOVƟ͗wd5n#B! Sir8|oLkox?vafTMY2}e-#eÆb:}|4vpݜnCz^Pz/ عpQV^X`}t6%^T96ZcF B! crAٹ3fپ+#zȆ]T.ЕQ  x#uYƤ IDATg^ ypPM_s ѕyﭹnGuJoN~ :Hc9}9/hɉG:!B!D ڌux9МitВ{TVg*}{Teܛ^3ڲ0 cz >;g5.IV v/eA$IM7-Ȧ7m_SϺwg:?^Ža3^HȆN2˝? !B&݂Kgd zFК=θ@py ?]~OUjc p7MSr-6LxmfzklŴa͹ު G@+d-8j5h̛xQ^{qSa\@ֽe,߮^Qdi͂_C-Plt0[3kﭧw~;?Q 9GΑ[)M(o<5ON !BY[1loՑ0oV;_WŸ=9 ZsܖVєP` Qٕ nOk~b2tf 0[|2w@QX6%.>}vGZu)$/븅@.9޻ݻf1d^~^TˁwjT)jtמߖѭ>Gx݄ 2msQ8B!ҏ teU{F־:s0hpfeFZ5,X#mZ Մ ˮ9Zڰ~\л/PMIq"3yW ;nô9X[Ap/[υm1א %Zn\zz^0 # _;n1;blNg]]-ƑS6B!ҏWvA5xZ3:lv ?ֵe/ J74DsnC$h޺͡ZӚ\Z; 2pf16 ݗ du5 <-Ɗy &g`[m9m.\ޝ7{%ɯfc [1?GB1pwf+pn1?7 9=PN!BH?y|e۞jB5mdc/= mŴ^}c9Ͱ%}ܡ \p6sC-5cB`5/ ZuP{6^כ{븻>x }.?a~ٌC@/_aGψ?Z?} Ov uo4WȰ2)nX}#/ !B@W `n3.A7k΋ʮ ہ3f686qttCkZo?!Y}]Bdm]W3ݺtrFUn[qf[:.%36P GL/]-ޔTzAyۼ>)gfl[^؍_ـάץ3tB!BVc ٌC ! (Z|]BGWp˷@GV8TA~?9?Np۳~U@2XgYiL 3YMƻ2@6 s[1׆h~ufVf.nAL/2;=@ZZ anUynW݌@~*~ShdxB!~xk|ͽy6sc9/_֭k.crA8bqv6K3p()^+mh+ض;l^`6y,ՕUL>|&mzB2ŏ5Vt{ uѻgsy!c-;b;bSظNuٰa: mi30&6HusdxB!PLsQ4Å;.6,vu/#lX n3jYeoWN/%Nwa2h0l֔.1d i B0?XV?nPWKlF.ni#{CkYfu)辞R!} _r;7%UI~pm93PN!BQx l͠ʆ~iBFИ=55 nglSrp(&}H*f}eTGF0{،c`]hnòfk;sVLpry߅[%։ݰ4AsJVz{tAc㦺] skoE~9Ǩy3=:3[o: u9'(#O7 !B9 -߰ phs r4#H(!w3@T1Δft8v6-n sgq KН|r ^AC9nI=Bw2.59p.IW>`U-ww\6~y] AaswK Ԭn{TLh.*꠾16-X%}zEn0|fB(y@ٰpִgs="߼BCB! ~sB1a: ΜZY/X P6 803&:khH(H&f6b8-ax>^TVPέ [hJ*hNi;f3jIPLF$ʙ[f/ߊ ܑoڛ~(&cp/HUS$3ny(7֞ f0cW6ekJG[Z- c^|AZֆRMwgԻr89hᘌ],aRKUˁR{eЭ9ߗ(@'B!d ˸=B5TT{_Ӛ *f1guwM:ziqv-I ]-iD]dn nޝ04&̝0 vEۖIU~[6lwf1DsM! ،ao[iשk ֤0f۲"rl.P)#Z]f>!=:\ X0s^g3g7!쓃\yH*; ۃu![J BFqtB!BtxC;4 YV\W6l%& Zp|t.1Ӟ,qv  3q# f~In1C]g ݋vudž( kMۼ.]kȦ aE)Z.@kZb:~uvqі0 ƠFxW};nlE)Zu^ńpG(@'B!#0fͽlơ]w'6U>$!Ϭsd4`6H ;Ss$8..va3ho>#]<-.-P0md4lhJ(5n +Xx䌻6 eӁ *K pױ(jڨ( #*uʊh@$x7OgN+g\gW)?9QN!BFl(۟t[Z &x>mMI]]7y+ 5c*P=m8kt?gDpuΖtAKZQx>H(&mzxvp^.FqxM8uec*J= fAZ5J9.zoCj:hl7uE:!BRPu9,:c+K}ϿƆ8ؙmG` ;~ U*02mMIQ^b:0ll;fʅ@0M*#}1Ba${Zy Λ>_RuϊI&:zZAL4fB!{Z3X Ii~wpp/ <zKz6&T?8vFpk룲bBTx hvq𪘶a]}}b:;w?{7eh潟wgo?A(@'B!goZw-) [0lj!v,G΅۩-!=2#5>^|Wsݿ]6Ѭ(tmov[/*ww3IߟnC6/ z(>Q;!B98Ct}TGF0~Twf tfu\0c,ۏ$a:k53hSkPיûca,tdtL.,^׎$Bچ;IqsƄcw3e-{X(Ѻqiafn޻wD F:!B95 G4>1k.Бئg!*#BkăE9vpe`̹]7Np;n~ ю!2lBs47@ p8\9,@ ٶ EQDDMBN3dA}4lbxag+ќƂIU8ЙFeHp6a!eG6 !ގ/`Mu\q/~ی1y׿u|_G:w$ !p= / B"?+rДtGOm?BFTMuQ󳔻2EۢGF6 !V__H$B @UUUћÇk׮E6M7݄+W瞃iXv-x }x h!dB 3W'0 m{ zΰ{!D F:!d>#D"Ō3(@߱c>cɒ%c֬Yؽ{7z-|+_q/|?04MèQh!$89 Бƣ<! !'ݻN1yd<#شi}QB!!ى)S@$TVVq躎cJJJ{,˂CuX#c s:C1˲e#B/$3gս MIQ @ !y Zo%脐AQRR*\zl6cB$TUUοo$gFuMes:C;)K>¿q7CޔPp-d0':x:CB BEW_}5."HK;L4 me&L p!!҂H$Rq娪Ѽa%%%C+#c mYFuy߯EŮ'A܌!me">=8C#C!  _S#~!"I8O=J5 ?q饗bٲehoos,X^{-nVH/Xr%$trʿh/ҚִqSE]g{2d3uq߅B·-444ছn=܃`0.ӧOǘ1cK/!`ĉ_ |#<6|k_OSzS"h 1p\n j"83uq(NH$ng͚,$IꪫpUW}$.d5YB2{N9S[22|QbdB!4ў$6CL60*bb\e)g ,9{L9(>QN!r!- =N6lloN^ӎwj8q"g ! B!'HZM̟TƸ*%P2dn9{I(c (dd !CJG6 !B %I¾,Ҫ߁O{6Ϊ.$tҮ65 %$DB(@'B96A8B|Аɣ HH i Ea9 kf!ܩ5xsUb΢mV !#A(@'B9 HBт+L¸R♏ZPn8:X q;̡,QN!3ט/Tac*x~g B,Y7urݭGc!FtB! [bxag b9h2v4p]Ş6 $fgacGK7!BȉE:!B%!gm̝8 Y]V۞`1vN?BtB!Vⲁ"9w׉KИP ީs ,TP0-!oB!d(@'Bi-YќĪ2l<E[FGZ53lHե"@Y'B֠lB!d  >B5 iTFBH)v4-5،ֻW.FBpF%#B9f\FuYYUea؎Uh[:1bt}w Yuy $ `BPLC#!EtB!x>{-ЕNq0fn9 tIaJC8ogue4TtВR]גyʎBNWA(@'BF05%1w(搐Md2>sl?@0g3l. &BJB(H66%8RBz F:!2!8e!63⊉QQN!B`kcKήFE3fY=ZLLfI n +҄hё1{¨ݒ$f ߞ,< phL(ު"<`  $L]p-|И~}, ! _T>QN!kH(x~G Os)@Fz}lGVGkZŤtfu./)FN}63.МT$U".X8y4)ǒ V"c^p۰eO!dtB!':r+& Q j\{<&PLi͂8r]rTNюԧU iPL[|`ZO4˛B`P}dBI%IM6Ҧà[} tfud4 9eh>Y|ؔD\6p^38'B2#BN8!#pXA7zɂ6Cΰa:L\5&2XޞёmzΰW܎y!BN5 !B8\ c MB,g@Ro/,./f26Ű{lt f.wdLt5SM|)m:s( ДT,<,R}'BN%*q(@'rܼ]p(.1j;1c\%9Qd@.9邌ͺlϼ< I0b6  z`.ihL_ߔTO)!B9nB9nu9X8y4TNjI>&-æNn\5L!jT 7$lkLb[SϪF6{bp-/jbgKY-) !A(@'r\۞n14'Un nF5 MI8єTЖֱ?nߩ3C1ȡ`=nwe;WL|А1X i݂N3 ! ST>QN!X,9[z|x4guAjo^`xLvݭiZ`x!mSSZ%BʠlBוp;'E@>Gc7XB!d$ F:!!z@['67Sk ͊J !rbP}dBAu.mD6֦$M˱%h@EiaÆ杕8Adtu!>BB!3脐AB L 'OƄ z|q# bΜ9B ˡ cǎgMe]pWL60@9I& "arKВR1)đf$،$@i8ִ0,!TAGVBs8v5B!'}(@' ڞ={p5 L k׮'> Ʋ,|s=IpM7{ASS>OHoq5ל7&a( s$@ ]; H_W$0! H:㿸* #YHQNbXb.0 TTT>@ +WBew}8|0݋/UW]zap$КRљ3P^lWYU`  椊p-׮./$F wuyI*KP h6x~]6{ (^PL^-vb:zz !2PF:!dPcb~P  !?lF.zHy96 HR$$aW Uw6ZZ w$- }A|"( P)- "$:7$D0HJ<^5*;0x@DB"脐A ?~x{]ý$aҤI^P 1j( =*ۄB!l͓tBȠxى;v`ƌBذajjj0i$8{PӦMԩSyfc۶m={6"ݲ!>:B!BN !~1eTWW7 we˖ᡇ'?I|+_\{;w.n&⋘9s&jG!BșH&2HB8puuu9s&=\!m6TUUarشi.RܡCgL<^x)+"B!tC:!B!r?P!SK[⩧Ν;1qDTWWò,3:b֬Y~5իo>m^xc̙4gؾ};VZz̜9HdO?4~mcĉ$ uuuXj݋ӧ|aXBWƖ-[0vX3sO?h43g"CUU[:`ʔ);0Gƍ1|@:O?z Hoz!^wԩSQYY ˲g1c( c V¼yPRR۶~z[ԩSQ^^L&k;JKK1yd@CCV^]vƨQ!}$Iz6n܈6L:sXf 0k,7`aԩ'w~!dzquס>9dYߏ1cn[o0/|MMMhiiEȲ<ԇpFb /q54Mwpαj*s=3f A 9>Blܸ_=jjjn:?mX~=W_tttkixwp뭷16ԇ1ɲ뮻Fgg'>#Ncƍ1n8<~!~_OJKKqbΝC}֡C/ׯ?я/C$\{8t8ZR){7 ˲vZy7n˲p;a|Iq0a^z%|߅aO{P]w8T*뮻X |nF4f7pC# ooߎ~5jz!>|{B1B؈[n~;c8sPWW?xGp# ;׿F0_{+WcO~p8k8~CQ\pؿ?yjjjp]w0 \~娫ù;G6|l޼r :묢5MÏcO<nfvmq[oų>?8c=.섽'SN!#_}ڰalFuu5rΝ Id!LǏ$IXp!80ԇ0,UWW^ÇK/R0~x,^{E(¢E PUU>P_4o<3~0P$[nqW}_|'Oa()),^7oƘ1cP(S"J ! ;sŻヒ?EG8W_FOOHᒒR)躎SB$̝; Nd[7zg' οo◿%Ǝ@ 9StE ^}}_.B=lׅ?!t]V—%|߇eY(--E (Y/|]P8r͛s9tH$Rt_9J#Br̚52PZZcmEkǔn=B`x_p%%%=Z%8&ǏDŽ  !gYss-IR{raۉFtB8_o|34 PYY EQD0~xlڴXt]?oL#/x<ػw/lҥKQQQ6ki I,F.oĔ)S/} < dYmzEETUEiiGaٸ#@0 QQQUUaˡ( :+^VQ?pxMPVV2 @UUxx'xꫯbҥhmmu]c޽'58~$aڵ۰k.A,ZH Vs#@e3|& :!@o&w>@K.şg躎+V^>8|0ك%K ! k]wᮻE,[ W^y%>F"MNdxa{1\r%T>%%% SO0 _555?>r{=q˸˰tRزe ѨѣGmmmH$عs'fϞe˖34MúupBL8x7 2}Y\|C}g1cƠׯG23<+V`ɒ%شiZ[["a…Xx4dݺu8 !裏?j`Ѩ믿L&/KdlݺG;ڊٳga kz+{9u]+Xl.BP}:u*^~eh5k֜dA!dDᜋo|JL6ML6M̟?_ܹS۷O,]T̞=[|hTp=#f̘!L"x8c<;BaYŌ3Ĝ9sĚ5k\?+}Sh$-($quo=:994_^ ȗG"ui1߁^28a@Ql+ض ˲PVVF'Ao?: -`Au{t X۶Q^^BN=2MPN!B!!B!rB!B9 PN!B!(@'B!BNB!B! !B!B!BitB! !s>mZ(2}4M6ly,~Ȳ<,w?P,+y8B 9Ǟ={iߏl6;}*ܷ;wq9lقX,vžB(@'BȐ~a׾ޖ$Ix p}yP?o1=s:z~|(/Bukx駏yeoDKK ڵkIR( o\;aCBCB``öm۰c̘1T 7oF0Q]]F/ػw/oȑ#hkk÷mL2faܸqX|9$IG} bڴi8!IoO|IHt:͛7q| ގt:,]زe *** ˲U>|0vڅ\r%BسglF&^Rw^5 +Vwƅ^I{nL6 m#H `ؾ};TUŊ+,z*ŋ1w\Hf|G%\H$Z8&X5551ڵ 0a/_Vbʕg?럻t:V$ ȲK/G<Dze0qDض͛7#H`Ř7on݊]vix78-[1c,l޼d-!I 0B4MEQpB̛7uv۶mCGG͛ŋqlٲx555X|_LB+}݇[bٲe'2rPN!!gv,&}8 ȾXdEˆmbԸ*0mbD-R0Ulh TDVY y]yԋs3ys3kk -[p1~goNll,'''ӧFcc#'N۷oS__Onn.7nw}YfqfΜҥKqwwgرQRRBBBS!$$G1{l0|駔P^^·~Yl?#l6^ ٳgYd *555L8,VZEOOQQQL<^dggS^^Nbb"Ofݺu$%%f***$33;0sL"..;wKvv6ͬ_X(..Fo2qDZZZ ~`۶m8qhƌ'p!>cf͚ŹsX`SNEQHNN.͛ǴihmmHV+yyyqFNƎKjj*ܹs?FCii)hXbxxx0k,~'j>|^y۷o'%%~h"Ұl3y'a֭[Gaa!'N`Μ9C=m4 Áh$//6mĞ={PUD %%zTU%33y~ٹs's͛$&&vZf̘E_LL EEE԰d d…ܽ{|  "  =ի9tgϦ?+VHii)SPP@NN˖-#88\Y}}=111\$ݝqvvMaa!cǎaZ ]ˋk׮=۵X,apqq?u!x)ЅBBPUUUikk֭[|G9Ņݻwᠧt8 s[(x{{( ]]]=N@dd$===xxxF#F;/Zt:V%""#F8s,aaahZt:t:F#a鉿?ױlձaTU%&&پ=nܸ0,, #G2|pnO;/44NNd2a0GAww7[nEѠ( &Lx#""h4L&l6f3f ́CIVVs%((;v@ww7vEq۶m͍綯fg.% !Ⅱ*r9s&#G@ף(  k.F@zP,jjjPU[nC\]]h4$&Lss3}}}twwS[[VۛdJKKٷoqqqf4 Æ=}dA{Z: s[۹{.*_S=ëW*MMM|HSSsol޼g2rH=꼇hŅ:TUeϞ=cKHNN~n~quuv! YAB!?FEE2l0 k֬'??EZjȼ0 Ƞ͛75j[l!>>aÆFjj*'Oʕ+,Y^zq)”)Sꫯ7 77$ l۶'O:L:???͛FVˆ x뭷t8v?ޏ)'jiii2TUE"UV1zh]Fqq16x<U}".222X~= /^d>|}}Yr%-bpn݊### & FaԨQ,\ `X8<999L4\,X@__---Crˉk:]\\X|9&Mŋ|wTTTN"88>YO>MBBl!CQG!XEQ NJww7(bۛfz=~~~l6*++ioogܸq燛gϞUU d2Q[[KCCL0EQ"..FÍ709cIGGꌩ~gtWWΝ`0E`` f7nPSSlfĉz.]Dtt]}}={ *|||pssΣG=z4ܹsqˍ7[! EQhii 1L\~!UWWsM|}}X,h!줭1cK]]vjjjFUU***x!!!!ΖK.9$l6ڵkDGGp8pBCC'WVVb61L|tvv)\pNXX<HTTDEEQ]]Mll,DDDkFVVSBB!PPPիWټy[AǏݻ%?!B!3XVΟ?/-*++ q\'+)ЅB!Bj !B!/)ЅB!BB!B! @ t!B! B!B]!B!xH.B!B@B!B!^H[Ly&w/wDbVlnڶmkڶmk:nٕT oaL֦AP vfٻu$ˤ6?͞632-odJm9^XhNK{C@h7>|@fSf.Aksh[B zxY[rk9uf7ˇB%MOכcbydSPӾD=F ]"[b̅e?+@M;;n[Jn>\0)|P]S8ŀ^DžcMloJ1؂u]nT0މWL^iFKRx=9]ǀ= _mR8̢:jf9c5+[g6XO' nhS@y3f^-?fu&sF&0s*m`&11N1ʙow%͌^2c81If d4MGc̱o |-s1Ӫ'SfCOS#1$*9K^ͅu7SOcL7L+sJ=< lO'SټJƸ[\8ШmW'[b^3(/k l\.|`Cc榸/&bS O~d1q `ǚ2aMqVvsE\rfuMVlJ束Oq*Owi?3L|5 (kl`$۽ssg'6S&qL(Ypi}fX1~zuS-8@|P{~isZǼ6{r}%žK;"&5yhkxrS̾}G̙D[(g>s:*v 1{clm_0ƘDAYL_b1>[EՁuzu0b[Āi0/6S &+SՍ1&ak/LnU%@4 >2XC#_kfx. ěj\291)&|Riԕ37-3.% =l5l3XÕN#i4q{IpycL^5 ?-3̄ ?#;5_fw@Tܓ||iUGɑj YX@Yud1&P@n祝L 7{˜LS? 2UsX ܦ |JZz3?͸گBuͩ53Oti&}ݔn(Ow[?xDÌq/Y{=;ޮ%ITa,4150晳${hM/zIs 8(fT{S@+w ؔltQ a61."~.*'m^wQiL2]--uxs$Ϙ>k4_~Չ@B&8>zFހ0їPzS>rMR,8sK̗O_)ȳOrhZ/ݍy&^fm׎9w xd%w >j\ Gy:d#"?y'KF*M["ȸ{#^$@2V2}7yW_VNA\9(LG)G#wKM=L=|$RoS_N~0ْ7;k)֫^}?`ҐN{>kO Dk_\Γ'!͗sımt_W`tW+,d&݊(ox\!By+fyl{S}drP,/`/3ڲRv(X?g4=dׯ.^֓4՚v $_rqp6pYW;S#c^"9FrI1DSyND dʙ 8@ Qc,p\?f]ߚ `Է,3@P2T+Al? `D0~yx,u'Rdˋ|(͑yhݻ;x&!Xs0!%͸*Wn)mqvE'^W ynWsrfٻ|Wo~);!T}3 5f)*Ѿb,E }81⊍3//,;gPA6|"'pj)V BN~&<5Hs_I;'=]3s1YJס󱬉v^ickǛ}wn^dH+K =ә~_IL㪘-Mbo~G87mҵ ԩՕAndOo>ۓQǏjB7iU LFM6Fq}mRU+}(lA|gY/)DFZg{2'a]Vƍ+g2Q%>qd WN{$]O܎bbڴ܂8o?еrșڒUмV v76!(lِ6-x?݁ AWfs+8d7MP)>m|OR!4>ُriբ\'O2C]ͪh,OHP/]a¯41go07%SlU_Kģ|{*g><ܲ5aF:ܒjgoѦu8ΔExz 5Fc :M rzקQZk8R(fp@sq1=*ґZgccxГ0uy| Fcr~LhF @Sv?Xt ia̦^ `ywIȇ{;ow%1qL(u5oeY ~Lݮ-Lm ܁9&ڦ&iq6K{7!6g>Mȕ7WiU].WofJ+lڽ3l\u<N^R#@Hn_zCJZ ʑr\)Oʓ}w S]WmzXSEy 4Ug'~d:?iDn}V9R+IVo~jH<>cŌ7C*yR|ES[Ĩxg~[zoW#JRo-cf^p矧yiVT|EnŘa"""""">􈩈@""""""QDDDDDDT DQ(""""""*EDDDDD]&:tH9MwEDDDDD1Ƥ5n+B""""""//4@""""""QDDDDDDT DQ(""""""*EDDDDDD@""""""_7E+`-[k5*>/w%'qѫE *TMs8pi90wjW$,*Mߣ7[ҁ):kr\<'񥊄]y1g3]3ѮN%*֢uuKގ ,9>؞xn'bH:}}(wYq>ژaf?IR<Ό5=pcwZab/2ػ9g«Q/_Bv_{'W@|[MGzz<=$dȸT#?/G ,kg!w"[=~ѹh>kMƴif`E2[]y'Qy8^Gߏ6J#A:ïkڴiLy1Vzom="_Cקdw{j/zOz +_6r81um_]b4{\ij^ j7&A6 qLJ4n7㎛~:]v/;3ԣSg/NNqYj_9Y% Ȋ_`aj4~rdX}r $Eq$1'} P@edžx _'mMGOnD?/IyfAK*ۃm3wPw'̈n3r]<dI6/P4z#Uq 2Gm^k9ˣoy2[͓E?(wlIsڃ7vߴ{,d{wr,`bZg~?Ю|=uvbKx,BO@ l'pZŠ~Hh"D;gR VHZI^?Q[gB} 3w?F (o>B}G{}h0~ܥ'b ϝǘFl p=7\:<>QaLNGßނR3³Ҋ|1i/b>|jfp7zd&"3uRٱ­m;ypM_o󇏓1y1cqݭ~ Ʊg 8f-JХ~t6]ʐr G8gb-W1/ȚWy<=Z0;ۖH,6/Zơ1tUN{˶n!c!L9.]89H59웅 zn(;5֤=od 񥸈ͼZNiް3rn?G>=-M971ۻl*ړ7 'gdC]L~2g.@](rt[2A$97~/I"p&;HI'6mGߏYnkW-L.P>7 Q㕶G%Hm$hrt=מ9h q;Ѭx7Ӗz+1^ֈb}֠gxw/Mx7 mMchg]ٷ{1xerzC"q7+Y1t癚[Qwz |?l]S.r!w=% ǚOR{!zaxZ ޗ%w0U TmBS_qp~Iø2h Yyτ҅'O)Es6zacޣu y;'+'Wܑ8ES̋|Ҽl-_W}j>Z6Oer>+H8~,CN.D'f<;)w1kS8c+-*g; =_^Eּ/5 *=tyrek@yu1V- AW&ZZ.,{pd|$+,=T 2"Hv>K-_m "O4) BMcXxWŵvo困N{ xq} ?bpd 3+&T!w[f_=]Yt*O7kxzfo+שi: ]^O n,yn)[6tN>I@tnkdUEhVV(رLf[ϻ\L}VLjᏩ+#\Ʋ<܇ǚ%o}C=: ОY8Ư'i?8`*VZŪK'4 J>ūyy~ 8@oŢ_79KI>qi#c/qr^g\8~b[l5eMlx4/&YƌDZRj}6 `КxlxX.H|z[}iݻOW]K.efBU譎'&~_{z׃i.̓LyBvU6UW)e?(wb1.%?pcl.bf^ln#?mL}Z>S|,H?>}0xhӖ;w(c'|="`7=hkRR\ 7SOR™CxtJ# VT{&$ 㘺!X:~ 5~ CrER<2 ߺ/N39-b"a8/'?LEpC{M(7o~$1˾[\g ۗXq#4sG)=xsf֬Yߑ䱼Tr/iθ~Ø bbN/+_mʅ796wS6"*ƏI yt_ÒX<~E[`iLH>ADDg;j|5[0pM>mq 9S)K"%P z*Bz4IYڅ~ucCOQ7X (ƟZRvG~Uzp=InLߟJvViH&sK hM{}6WFt:=WNoP!s-y;2Fi߀RmvLJfqCuo55)qSvf]×kl'[S=/jqzoGA/ ^Bg%bsp/[ӻ5kaڈ<i\>o>ŶANVƈD#ocenXH'sr!bNK&4irqO˅<5Y_Pfk>WCGX:u?(wb47vߩػ]ol-h\ ,TxP*ҙvw/cLe/u_aWZ_GH:J*ISݙM֙x)z>óf.ܤlM(^"""""/H;zlqJ˃j󈩈u^$""""""*EDDDDDD@""""""QDDDDDDT Dq'NPDDDDDD#iNcB$""""""zTDDDDDDT DQ(""""""*EDDDDDD@""""""QDDDDDDT DQ(""""""YxqT3̿qI8Oϥ}ͷz=#il"w<ʛt٪; 7kv—}KSd9j}y1Hf+Cɒ%}i!gX5Iz]ȞҲjԘCqj.Ű~Ve[߰3֩yuWS+sT*Y[ `$/CS?`w3%62s<]&m.ĥvdz7~Z MLz5cYT?5 R"0h+Wb`=|Ԏ~ѹi5z:3f`ƌ|neXպ g_Վ1w `ɢoF Ǘqd}K?uĨ*I<򔴏g_|x3z\•tk4~M+]ݼ=_{me1{紈b090;MS D ұzsFnI 6MFU(U; ulQSdiD![cжT/Saye&3lB=Xq.cK?C0JBW'#uMѢDO|FUJQZF?Gu+1l->]4Ϭ\:Қ/}Ivs}K>ZM&,Nn?0d/S{1Ft!9EקqCI׷ոK+w+:TK@\GϭgH:͑l/VPBCCɟۭ+22W"5y兏2㉗'yKfĺm֖ˊ_Pmۄ̛7AO ZWҡb=uvb{">loՒpz),"[|ZAϣʍDZH:EH^f.cфqYi黔 xwAؙ=l+_bwф՟5~GҰ|d˒^3_KkQr=|n^yKIBIغcLJi x3M<[$f.b24Y=oV29tk4VCy2A-En:(uЇu^f Yޓ]ж!=ۏ+%z#_ȆGzV^rIrjVƗn $;HIHlǚ3jq0ɉ *ݝ*!SC9b$XBeXt=מ$wίxA2 U!Gj:|37֟8ޔ"!tc9Vc D Yɞ3#?kK(ح;2#L(jO}c쮗O1du^ߥ$>_Knr1fp,Y'Eu\Y 9Kmg@ӧ Ԋ镂ݩgL;O:<w.3U>mb@o9\Xqv8]>ͼ8]g *Kq?˯KmrYWGjоd{+H8 !),(G\N.D'fS_jWHmƉ]Ld _Nl7x$ U0X,K-@嫓ũBx7l9g@\L"16r?S0y]1Ix1\^vޞl[-g(Ҡ y0mibNoeD9Xue_,n&G~wmL"Y ֧L< 10K[Z6D9o4%T8GDb+i#c~~?NLq~j{ץ|6ϡýxP,\/; |;Ng$_d87Fs_&gO:Ϧmތb πWN}dNu(اo|u|5yJz<>':&5>gwTQ.&Hп{w۾\:o_!/O<<:q_QpkE[2si1 f L6$ӱnSDS{fѤ &3c07r3[=SD StUzls iޥ)UYz9pi_)QuY{:9uǚF>]lG08cRbozUJS11/^Ga >}It8NZ*%KJ~co9".ƸRL̟_V1e=kyiYÚD723#SI`Nz4X”(QY}nIt}ɝx]DLӺrۙ`/f^UΔ(Q4ʬH2r-Ͷn\O3vxitISRӵCQO.SLLMa#̊p"CZ?ɽ:.}MӥCMFM2U #wƐ{e8߶}g3.[TΤh"gKлMvK@I^ݜP=%"""""thY& lO /uDDDDDDQDDDDDDVU""""""QDDDDDDT DQ(""""""*EDDDDDD@wyx!EHDDDDD>41Ƥ5n+z""""""//4SQ(""""""*EDDDDDD@""""""QDDDDDDT DQ(""""""*EDDDDDDM'v;bs,4I b&A0~qHųP^iZ ѬLa8ּZK']ECҕNK JW['r _E UIֱ߰N67L;]jwus{f]J=CN0@< Q?57DtoI<ȐS%,fڮ8)On9rY{bzR~e5)_N^8/lbWm@%0H߳:.Z ")oȑh/#wQE?(81]WOrgb|o?W\ 1xI ]7oOOBx16NM}u1oLn֘2by}F]7oQF2$Kƕ|t5ޟ :K=O\ g8eg? ߥU5|ġ nfO FQ78ȝk| Ar@>,X %928}jlTՕ x?v`5]L\H 6ΡcXҖ.|K:Hpt| *WI燲? Oin.$ʽ?_JtjKmҡi')2:BZu",Ђ1s [hɒ7~(Ó)3ٞJb`LSCV];/V[g)R8k8;ʹdȦRn Ol"'/?o ě;fdсR|XRpV=\y ə/xS2d>poXdR! &Ot ggz甞هԞoWmk~ytR; 'O<͆ό@'aηG"}ѫ&|łVj8UU}-Ǔ1G܍q1&[ۏ{y1mu8@@8Iˋv zY)0z{rΘBLd L<L ]O 3l#a) HQ_=xu8qΫ ={a,%^HYɘWcbyXfw~drfXSVVRR*^v?쑬&`QVvL܍h,GIF,̷OzX" X( M?p;U}"Ot̑K.$rYWvPGwo|(3OY wj 6g{< kΘ:dʀ!_}[=h<~Sԡ*L<(NsB^pqa}&Ou%7z嵑a-B-iY1YO';㢝X.Gv8Q8"I.}}ɶX8HK<=}Nfc6C x¾w][5[I};/:ߒ>y“@"1NAܙx_={tc\Ř1ps>?cuR޳٨Ѧ)u[StopX=Ol<6i$ཆHZ7&ZŶy&[r p?Rr49'Oxk6;*Y'2YH3.@a/rx< ʖgvT5XR9<x罢+7b eNJYC\zL4'ߖ-fӡ <ΚciC-XwLgN#;v|c I2lAr4|f|Kz%+V#I|ӭ#KgI(Uݔ,4Aq?ɳiI$-^.y1u//Wcob6Wy0e={Xt.ms{߇1| ^\آ7qj/Q![$˦؇(V zݼ)ض#Ur_Kų&Bnԋx?Ԟ{q,Y~%[i?Oę3DN$ YyFH߾,!g6bA<֡ PkX Yb\nk9a\ {|A3_s5o9~oɪ%Yx1/go9J221iCn?S!D IDATxy}5QVDDDDDDD@""""""QDDDDDDT DQ(""""""*EDDDDDD@d1ADDDDDDtQDDDDDDT D{U}H( ("bY}eEV뺻e]EQ *.tiB$:>$DH$~K~̹䜹4 " ( " ( " ( " ?ۭJVRff$[n+tbK+̅Ҹ~})]w(RԧCF ӘKjDZ|VVS^IRݬ.16`h $ ?E\AA&NYfi߾}RUU5sLM4IEEE?[~nyCc/v ]zt%_s?U^Sud'm>:@*=W&I0 man鐦7ݦK4f5\/ߤڷUt(uw!Ukߜ?(F e?(;vatԭ 8 Yq f,_=v:EĨ#ϲ9\vӧkС6lOΝ;V.|G?N_:MI:-P^k΄)ۄ4>wf,}K.Ac+4]v?:]2<9NۯT޷鱻ֈW!$W,{8=7]1tz ݟd{uMG3/)4 {G?Wo}'ՒU֝^b;*n-z-\: j9NʫԪ=q{Z~: "ժU=ʕ+Rq~MNML;r#="}4ݩ_^|TƭUQa>L?Ze^9G;hn$䡛f/)$u =ߩIc*+Uo{^ߍԿ]Veܸq }ݧa.ӁۿSƇ@MWk2I^[ת-[ι+y In6wto7]^pL[d'[VoئwkO{G=}R]Vlخ3˩+]4 x,ޞSyܞ}L9l=n?3>z}{+mrs3=/h}`C*r{[/ץ7D4SK}]Ԗwa {&ڔ^e4~fdՋn"^=W!~[`Z<&Ś|:f{L^):o|m((o*],cu+tz3hРE:@c,Z@d 28J]vTktNA.YRo,$9ѾA6:$ݢ[$ηֹ6IL#5fh;ePۋW|mtMF趡2ʠն"C%^oCӭG_QyViuJ?Av]Y ֺl(v>ѼzfWgy[{pV[<2S<2S~G~sϯTey̌$9fJU{$*dj#k LQj#Z"n_!~+UaLfWnKYF+MA$SDZOl^)2 k͐p,d5Z3Q& 4a;>'éAN]}UzLHS%ePᯩ&P@:ևuى PJYcbd7UJ5Dgv' rޜ Alƺw~J:ӾGQeͺn <\T- t{$rޭ{W=Kuv%&Et P5xKRV!$c۫e%zdV(Eb\Ί5Mlr]3$Ge%!!yO;\c͆e3ՑatOί ݺ*_8UyR{ 3ⲛXn="?oǣWԹ_0%OwV?~ 3"c#>sRY"w:io.ۄP /ge)[{CAD}j;nң UV֪F\QUFoJ^9 gwYzݪvoR7g,}ǡ>3D7*$ST}$;#nQ=ýS5J;d"w3=;M_Zb2w\UVVlzEE̙s~cTȰq׋_'֭oM׌n։iYѡѻrW9߼m/װ8).{]냱#@z{m7ESZU@hkޗNNWzyCq( Ү7銸?k4>[{CAD5jzuzkȟm]K?QS )BMXA3T?nv^>E '*>0O~~:I ^쩹#cdg̏ΫQ ީIW^44nj^ƮTG],Riz_;uA}-%33S>֬YjUWWkzs7;7-}k[6fXو/]~I'`PS߼)bڈɵ*Hn7?)_w~RO}k[n<]gŶ9|gǵmwYzhWw.WէXQk$}rͿi䜞z:yhWd="Fճ7́!*((7%''pz߾}3(::\ZQ.M uxg InSۢ=< ΑzgEFX>mT< 1*̢Kdrvkɒ%Zz23=[n+t5jK+̥h*._nJג9J)8t.J50N_]!fα|VVS5w+%Ʀ2  "EA?+{^57P 8  zr߮h+_ܯ>:___yQgߤ+_.[ݚ`觩{ݒlŚe0i$;ûAz㴶MLUm~B7LYAw/Wy??}F&OWexMz_^r/ꖮF|:W\vzuMZߺЎ ZީJ\\&uSz52{bos3\E?iz}˭)Vu9ҿhAnhge-`)/=~ZPEw-.8kzdA<9U:%pJ rHZI.U8\ 7s7fIqQe:2B^y]t^lH I H ]dSCՆ X#YNNdLd rnB>@Nn>y~}CvW˫*l6,&B e5d6f5{f,&BfYLYFZͲ d1e1n3l 7ܢ{d_Y#metޟuuËfU=ewڙ)}g hk3<>Jn6rR;J#ki].NL?4C1 $u}܂;ݬO]èik Gsry)wF+L}EWj~_:Jp1;6?!}WNϡwnkǫj_G#H4I񴘌]zg ,1\l̑VVS|v)whv9,͒-Z/_ղ ϕ' Rqfe֜J>],vnӌ:)({dbVFNY}ihn;׉6!в 'MoPC]<^%@Kߥ$L޳TC>̔2Ys,{Ezz^U" !\%oΟuf~djǔCI2(j>Mʗ)k k+!@0r{^ZOU/hїX 2D@w \I3yKV7,k&d0 [>;\sF)aL5ah͛qbsCãחssyLSLMkg|-Skn^^I{Ϳ6_'\ڧLۙUw; 0^:y-ܒp Uʯp5):PXEզ"}1]aDF.OoLƔ"(UjoUZԽ]bp2yeN^baDhҋ"IڔuaQVuogW0uSaY j,{P  @=ToI7Jn*4 Sur{4^BZ?d n~eW( EJίԛkRTT& ͟Ӵ.A;Qi eW0 U0Fd0x4KIg뫝y!@A+_٨Sr~wBfEَ/QvQR[oKUbnaDhrJ5}e˝gw?+6I]SMF4κpz @ː[RTrz~~l4CDDŽ0-Z/_Dh!V$[253^@9eN9P7# -5ڝ]֢ OK`PtUq#j!\a!kƴ#Dh +]zmEK[ݲTTPYݣԻ}"l.U5)& -ǏyzcU\^8Ɖ3&Lv a(!K0 "R 5wC|9عmg6E h^](r$-؜oF#PIqQ?16&jJ6UE.(r~.og5-&unenIg뫝?(P/Un"Ihb՞FŤ.ǔƸ( HmAYGoKޜr(вdTUT& %V(1Pt:XgǶY:Kì4߯[b_N)(Ђ-ޑ;rӌUڒV-iŒ>"DE*#e h.^*(вx|~.MF SP|YLFc6.zcn @2+qIbY>){dbVFNYӜ%)襥?R[{sʵ(!KS՟>ٮYS|:'sγפPYAW=Zt=K5L9+5w2HS)ֳ_'*0Z Qߘ Ң,)kPdTk=ژ—iusk4vZf dm=Eo$]6q֗ܢ+NiڭH@K=ToM7? ̩]Ϯ\YFptaVkރZͻqA4h~c&rwfOQ5$0FQ=[;*5:&h+W-?7Am{QK RLx|f7E*n9>p@[J@z U%zXc-nU8}8orrrX#9qCbW69;NVfqө2-! u QvgtF# 3J\ !O%/ 1ZJA4i;ũ#T۪v)N;@\dn$9qCp}:Y{eLCHSe\ 2!Za!VE-;〤۳byflf c xK)&׉*]0+ܭ=)6qz);ʯp^DKQVRZ xv|;S{5,})כ/OS e8i@s[N~)zh6=DgWҊW=9=CXPAeaw7Ez#yl9ȸz z͞Q"[]݇OмUQRSX/_ϐχr?J%WJ Rͬz~׶ (߯["R8_J 6D#Z2>I 6găAO$.Ҧٱmԯc&# 4Yƅ.^Ԃ* f k06ffNѮ2@<{:'ڵڞYw;*0PLE."YY%@sܳuώk0+`>ϯ[b_> h2"IUn@PXyf7̗dTߎ:7.RvT( !)tUIJ/K4D-ƔB2 IDAT!]^0"y|G..ܚQ;Eh`(9R< 'NiryN h>KwqZׯYeڑuZcxnHy"YWߘ)Er{z5'Z^\cLFzƄQ:; tV*ءkRW$ D-WYGLVjA%a?Q׳vRP}O8@˖VTW$hpz%ً*WsжR@Aв)׬)rynp}bxYgŶѹ#yb ZX7פE(Z ܌h(c.J{tybdGȕv h|| hd9)Ɣ" uk:SFNG 6%^}yes&Uhb@@iEUJ+v*n9qjkvǮ"EbNg@bn^w@\ +po+۳Aڣ )r:UQa!fZ *nQլ"EZbVդPBؘ. e H,!KٕK,b2*PN~1MSXP|.OoMJI 9Zsd۰C95nUTEV26ʌbfNQ^9c ?۩]t~(w܀_ʭ+Q gT۫*w}nBmb%d 6.}xP&TXYDQ:sĺ*2[hj6*fQAC̊fQͬpۡG[Tu)SJPNƔ"mL)dTߎ%J{UTX\yqWQKEt:e ڛS9 u3ZqlѺ$-؜o}N+RZQܞ!\Vu^BDv.Q 1 -VQh2 ?uh- @$<^̔2Ys,IJOո9*,g*Sn9z߻g;5sYr{^wgxPO9{& &NQj}ɕM/!luj,W1n_~ßlR%X./<>\/(K b#ݙ){|?E<Gl;6{Qbg zߠBuQYτʚgNe?jރQ^KK  Ը*<[Uc-nU8?fYӧ.%G0k:u2]zEbHRq_XP-*wSGߩrU=tlyR )ˠEhujWYzt de;/HlJUtEgu9+֬O^3xA>ܚ'SJkl 29193E؁:QEf|C7ņ3[˿Vހ^6$ur.OZ^%-xU.mu)<ĬsrrO^g]|g,dZ2d r&crSOEP4jX4M[S(7_+_4,J7Ok׺u8 >$ 7[ua{f-~1qڼ~OӢ, @G9u˘.}Z%U'i)2.^syCgwP E罯EdTc5~3#L;t[)r{5ku@+epzIСCk-jGh`(5J"EN1 Όs˜zʯp`=q 3Ș k06\mf}"WbnfNVK8|Ɣ"YLF]] P+|ߜ! @AljnG;E L}x0@A!IiEUJ+җsE4jOפhWVaqbVTk+U PQ]xAf uƔ"!M^0@ADé=۩OpqbpW%d鿻#O@ADwښVB qbp(rk$es3Pnq wP8 1jgA *\ "Оr%8٪9Nl6/b " ( " ( " (R=^N .W=}{Y!fYCiERKk_:>PϐsM#9ZM> vҪ+ӭ]M+cc@APϬܝSEEQ=[;gr:HQFIr+}r ZvwcT{|h"\Y­G۪1D*ZWKw?ܬNFkw EZNqH];UD8tTfic sw:MɘAL g2Vu|+$ wkn ɜ^e/_ Ӌk?讖oczoX#`BdL cr&c3#8 xP\5^ Kik kӔ5rEIiZw+o{J&-yL9$Cc<z͞3J c {GkތkcJҜS^R VeY4)&pe9F$I@ $ :TǿFSȘAL g2Fϟ0( " ( " ( " ( " ( " ( " ([M԰X : gy~|: 4ї6MmjqnsSX~V: M!Lc 1ߩNc*y3SbdSl|{s@]|85"\NIaY $"ɥ wӎ?tF4r&cr3Z$y$rK)vNCNq7lU SL; )r3)ɡRH d=f.k4+k#mؗG&\x[7ݫK#%'tWdxdcmPC @ p_W_L] 1Es>v4:a]4M) *eƨ{ "L3 Ǖ' Rqfeȣ9}YQܦ;*Ź (PoAc2L>/p,Nzr82ق`__/R48wB=8,N/l*XLμdɺl0\h812.3N|o."1T=~hQ.z/<&I7Fz)>]<^%@Kߥ$4Xnݲ8GA}-|J%W(n!,Pu۴"@/ъ&0'iƭgW:T6-ziDP~'wzSƪ߈?Xnװ xƭQaA߾O>S;q~.AyP)-:?3{R 0SdL/Rn}DA4\NfF:Pz_Oh]4ZRhL+yꏃl7_Gƍ^|H|Nv^B9m1 #Pܨv޵P[?S]6-zt{ԛ+{r_C$+>'AY` 6|Q2ЍOܫslT~6Icd6G벉SgZ_4hр de5m4.Wj쭃e$җ/Wـo'b[G6I=|Iɸ9wO_ '+ Vp&_Q6S: SvTpXNNّm}ս}; ]oPN΍«ggMQnhtarr9.AgM)le\ҍAݙ){|?E|Yjx{v67D4N2wɟtǬՉ9aB,m?NoEq$Y|Zã1"j~T#M ޥGnVMRoRZ:6[O~J ;5cZFԞFo3bo5(i _c/\g~ueL>W:S=rJqn=ZԌ! Ulk?AS!1lb&~@wm 9Vku{4̎ꅫx6&hd կKjYK7$ݪ_j ׀hX "ĆV0[ Mq18[IHӢNq$5,oRJlqL#iwLͻ=]A;tY'IXzqoenwt~A?Wl;\sF)aL5ah͛qbW8 ( " ( " (6sJiOjԳן̃Eud{D~M%gLk|;Q@A/Oft.јAhVJ?a:+.]zj[V{Ӕ.֗oY*]:^KvlW=U۫%;5(:vT܀[ZB S/~t:P=O2{U}q{v7sH4D$/:Bk1pG@NV-Ri)7cE-0ȥHȭIcBȅ&{,`,a~w;s?3<״;g@|l<]+`Wp;rOCRd,{3Qdg;)c:\M6zsHn's|U U֯kKdf`ָم'YE?KkGBJ Sƚ}HOM\O&) yIº0nlzdן1ڻ_xRi =K8bsjǎi~eԿ\8o`[k<.^>ɯ6pm/3s9-̠|Q= Rd2N46T|>_b T u_V{&cGgdžٸN~R3XG̯<_3Mw`,Z8IDAT`>Öynvz&M>ȏ`EdKiwߢd#\sH| ~Q@;[Wr &dM `!ŕCs:ۙLZo[VseJ99[˚ÄAD,bjKj lqI}̲Op'3U}$V{wؾʜ hK~6f0nq-sh0i<{ffS}^;Yt/;і$D㮎dDQZT˄!i،0R{vvnhK""r%ǐB B]r^.O@23l1Y/SH-ە[&3~NQsxAt~ʢ?V{orE={vDo}-륶rDȷ kz(haf\k5 @stb,i&龱= "" ""r^Jۉ}"ka 3Yj5s!^?:9!"#H2Cum/7d ؏O[64VX]UJ'%L '/*yLdR2Su5qDVuH/Q'!v{@DDngzTDDn.oE??.|o: oPsfNr_Fb) -$ԃoT1,&꣜_;Rm>1!y97 17U~?M,Kw#N9 MƌMΏaߋ)t:'+^k4r/ì9/}Ἢ]e:w(ۓA*E/rwȯ,!{z|i1vqLłIYDdLh^3Ywv2)t~Y^@|qV/> ,H`YS %ᷖ,=h! do㇝ɞVGۛ?g~߯1\_>Gyg; ^1@OEDDDDDDQDDDDDDEDDDDDDQDDDDDDEDDDDDDQDDDDDD/QIENDB`rally-0.9.1/doc/source/images/Report-Task-Distribution.png0000664000567000056710000005721413073417716024677 0ustar jenkinsjenkins00000000000000PNG  IHDR"[):~bKGD pHYs  tIME ..X3n IDATxwtekvfMHJMT, "(*v_{{_bA@A+"ҤC*fIH{9Y s?3\4AeDDDDDDW% """"""""""b!"""""b!""""""""""""""""b!"""""b!""""""""""LV#3'ƏG֫²bHŰߏ pfg"""j[3=5f|g#.njOM]~OnDd8^~3s}wp>ocaynDԺ{V+N͘ c9%)*)ueؑ'hEthàHKpj@_`&qQdK/Aτ6hۮ=<%֝2\BIcOهɎE#PxACն{nr؏ O1cK ⹘!WzI{6 Y~33u1N:+v,BN?WM~vT3R gH2?8ſNzL?ґq [f KC_W7IDDDWW\O]Ϋ2VM5C8r 6wtcDI/x%L}Cف1n5vVW6~鸽%=%7BR'0/!a,>+7{*E4|C䅍{ޅd٘5Z`y{+v%""mDjΑ-j(&X(BO p'>Yx[,7݂| hv^]2d_,Z]F3#0PTWQ)6Cg\Z`iBɶ{&(L{d9+ksխ~f}᯾ w”UuPt h6#uٟm$""ڹW4 Um{4pc9Iͯяbth-AÐv]s;~ x(n|t\%o\,}sf,)'5ZwV'(\d-U>apr{΂T=Q }[AK|`-ޅGT^S7{ (.5:{%|}`/Sٚ<7 .;]DaC/>Żkئ"#`T|vT8S_ĭxk  QX__ o#9fKCOwAHm+S7O10ˀT+6!u61GcR4, }r~!H)d5Lo9N:*v; Ç:ŸED1huC|y2HZ5oK?!8 Y}ܰ|JhosnNS3+*Vκ{09:/DBzwϏ t7[ېhvAnz& S"?>r_cx*1/B.N0E~{ m?v셧q*:~VCaM:VU/DDD4h /wZ,q(b8\^""}ń5 #G!ڡyP$""""bڰ9%|$~oDJձbC1S&F陣d __`KMhZᖛ?n'5%NDtA}"DDD "DDKFqA*»3|8IJDDD "DDωų17}( DDD}DjI>]ب Fv#[Uf/ۇnZv% c7A67BJ~"޼h%__1ս;])g̚. nU/ueqDGܯ]gN='gqD9,Q5q^"""""b!""""""""""""""""b!"""""b!""""""""""""""""b!""""""""""""""""b!"""""b!""""""""""""""""b!"""""b!""""""""""b!"""""b!""""""""""""""""j ,Q2y`ЩW+~_D &MFlG˄esVxߚvtX1_ ,>Zvb,ޖOdq,&oKg-1O """" bwJpJ2V>7$j(84<8$c5R,eta02MVz7X)oAqB;;:\-o`kr.[(/\0^z5W""""bq,ߟbtH̖nd$`pW ^j*;BDDDVݝ¡3E&7+-܈CgL(wAC307قn.lǸcLXFKp` ,mD#6j!^ X_FG> p c'U|S 9oC26%MZvI7[zs:]&.FFyfq] !T"cnGJN 7?l`Ə{;08LAAWؓ s߻ =ѻ9 :g EDD䂈)!1cl!Fߎ؜ # oޅZL>^Z?L(5v_ᑩcyb)d/^1C<)R9]8i¸wp;6݉ #16'un0ϳ!g?ބsfk2_[@FqqQ ""j2A攐mwp1Å @z~)i(?:x/߂4Sv}8QiKpwG?t VaCJ ,$mp?$#؆;>ɞ?ގb\ik$iIq<1g sߛeGL_QIa80f6䛹 ];F/M6W>|3nlvYǾH^؄Nߏ?ZQÏ (0vvdYT']z5\5xM~6j3%ێwZQX.L[%l SI%{7 Cdݛ,AB8 ;w%R`H݅~<]il;R00sIC]/@r2y2 DDDA·N]Jkgç+Fs׊AeQKFJN ^\sէ"5e}ҀP19/:a90ruĽKU*qdb~3*Qnx_/wCK%,Yǰm?؎n^,]pU,v]EVܷl/W#-7wGB|{?Xy8?͌"""2ZH݅O6bGjN bKR.>ߚi}cS,Haw_RQJK:;/I85w%d~)$ vxC" p+j ֚X!"""؜_;]SsJ2f-ۋ^fގ߅./.km.Cob)/Gq2զF!5njCa,L¬ :4%8i—;OԨ,vYU|z;dvV2؝\8"Y^l[_~+7DBsUPmN|%9$\3>zwc3bf ~sDj|nHfQ{DvMH]}dCF Q! , ]^f]8]E)aFu– +QǶ\V?F%VDDDT "26$栄K+:5%Ƈs֫`9E߆{c0k8S{vHg#,qf;Cw$ sqZ9 O˜lOe#lNʁy#OX~*8 N`ƽPN=BDDDWD$YGfoKVz F| ZT =;ޥk?L2MV,FJN Rs((x钱x6njbNL?o9Sv(}-Aѕͅfꐨ08>7 E`p$( hT 3h$D6lIG2xwC_=qr)È=n##ׅ""" "*˪#C}0Gs55]5MѪwOcEe_;N!1žB[ryG][j; >ZrE **aЩ0w |?/V腞-Y+pIؙBTZd̛/CjZ!!KDDDD "fdj): +ES[?%{ʮTR"D/6D-z(v QK"6 !Z閭 u?HDDDDT R(9)U!Q!ౡZAN-W@@5lj؞#oyJœ ?aÄكX"""TkT =YUu)K莘+ W {N@"HD,8%O9h+J IDAT'%ຂDDDuD v䛹swUngnNh!ly1 Xb<ؑ/ǵAT`߯1XkrCv$~qbӰ9r㈃xr|$!""RǗ>ސiFwlR!lyst'9k!7uc!6Σ^E#b:aK10@Ad؁*DDDV!U 5hn5/ZS"GĎ< Q{Ʒ{b}z/?op*L~ u(xBQ#bwI8ab*rzOb?o˦oő9aFŹ"{h&]7.`SGt'p"LJ0Hi.95FX d2nwatul}""z "vVm# H7@pEpqմ) RA&BXNkRQ% չ=_J]~1zMZbH/d""" "ZӅn\oUDۇA)򶻬3#BDDDTAD K5u0$s6 f×a8  g+%XG$A|q<m"""aamCbR.! kr ?Oř_MYʨrz7[:أ+#r)_cUҵkW >}7"<,cVR;TF^/wxܕRp AP@;` &!dևjD "eycLNByڨC)L>KDDDDT94ʧeņxC%zjIJؕϓa1~V|'΄|QGBta+B%"""SbhR֩DĆGk/%[aVe>a"0쉇X"""LB|5U[Q ;V-ˠ%-W8ڸy;^~~cbءFDDD5 "|{sD*3 D O"""%!NT~R{: Fp=1""" Q1Uis'hU"Py?wrC$4*=Q@T9`Bt R^. {O=>\P^eiĬi1ihYfoA??=ODDDeTntUUaw_)U+"\p-}$zF#8W>_=7,]P/N]"Ȃ1sP+d_ ݅7${牢VucIsҊ@˔}CY,WÅpU6,kwI8| 7a+B\vŏt ump<{ϩfDDDT P IvV sT04"{D.cIH|31i(ڄaghtS$Y""""aѪD d*eb{`q4' .!YU".{9/Bǰ Pk; C 36#QM `-%FJ.<HcCVDbY <&4𾉘5feZpCC"""*ڳ}u*7s|Ee~ڟZomi ߱:\xa!r0 X` |*\:|#dB [òؒ;LGʨv%k4Wݧ`9j9Wcljԋ0g8ou$8}|*GY"""]RՒDCtLVa80m;ޭWsz4QF:bf5""" \ʊ<\f05y#v)7a'Ȑ/$v$qm`6`+j ֚X""""F;jA6WU,#m1x8 ņYqpJx$,ښ%zk"W oNt,)|A)I:_|pI2nJŏ3Z=YeV'7Qde `ʈ~hݹ @!#ׅ؁DDDDѣyZaYZ2Z0ӝvlKɃ愳ol ݉)yp;,ru Tʖ +QǶ\VNaҧ;qG[0Y(:d7sIQludq`VLt')bᯀJT[ S)G2Mz ×!5c-EbGtQʚ$w lIaPRv ^0^ ĐP ip_-lN J NIV@F؞aDu\. %wT%KYZn~]Gc4V#Qm^-bL(:dlIŖ\0T F`oA:gDŁ"+S:Xz2[3J2 pºf-6cӸ]I׷ ZQԁ=' dhU"VA>={DaEVNNGn8;}lCg}vLxnל󈈈%g ٬ uQ!٠8ڵ;|x| JF9N7"k"""zݛ3GУ9eUG6b^?L=r׸iR97o&LmÖ3DO AB|4*òVĄ߆NZpK2;NTWc𓐈ʲ]pS*Hnkt(jMPzp3VXXVNcH嶛db5bfvwcX`;y5R0/QADqW|92o 0w*P&_1^&K Cy}(qA.q\j4嶛P vkz}8ab5R[crL]+??: "`*ѹ?su'r?ݢܐxypxP>C Aѩs].Jۙs]8IZV6DDDT#W4D1hg%ɭĆxk?'S\J7aV;>:dq3RT`V$Yb97*'6;2d̗s7f`OQۮ$Wa!"""jA8~-XMr 3IJ-!5yAL Jd1{ `XV9qHQS "{6GU&)̠ŘQP+9I_^j{C^(׽Z{Ԥ ]"d/QS "7tdeIymTG""""jAD!3`HBKMQ'"""jA"^J7vԸ <=\sm SUDDDDD*ǠuLRH_Ln^HDDD>AlQÉA5:B{㯁V DDDDnDA@&hJS2w4}u8ApvS7' &HjSLJu""""w"S)tZh9N%b]=,""""" GgoI`ũA4EDDD A8$zT( bթA бgs">:V(>ؙB<1ZɧtxE,xynQ(hN>]!>( EM~A\Q6?W+ܰذH[M8/hBv:Nw:E*L&ۭ3嶛lf5b&vX,۝lFFX?ZwOwBޭPo!>θVjmGӔnC)jۭ嶛^_vk CeX(twuӪD|~gjyBuswug!"""j<Ub>Ы"tEzfoxk,H a ~^ #T;A@BHˆ@_-C5Sz#GBDDD R0i=8MV⋻{"W %CHmh 0Y}a`UW· !%+P Ưs#մ|Z:?Z,H]-:]$_fEABDDD R !>:?EZ_ NL'"""b/BNgD-j1xmT'hUNDDD ri m!"""biHaZ0/u ~v_4 !"""bi$? @QbxiD;y64J7ޟ^j҉DZĸnͰA\ cTHxq(HcU}0O z<wmU3z#̠XDDDDnNN7:oN1k^M@l7M@=y(osamycۈyE̽~~?B|VDDDD "M jz6ǟB t#?.<:wFp:""""tӪDDɔnH*9cl&܀7FwD\7y0 1G-~V00#7\ Aw;`YD @1Gsxj%I}թܰk1{shU܇˸wO;'&4[dI QUw(⮾-0g4mMÂM)(:y6!Nك[ajP*P ~ ./9nsl/X"""\R Lw7Nc -<+@ΑW"y(.C |YvfD*1kIgG \sġw@DD{@يЪΞ* ֋flaa!v[Ýbm7vkfsf6nd*, v'ۭ*,,x߈DAѫe v|wf; T@o5Fu-WCT@BDkpȰ^ ܸqcވ{=kgD1-vsw "P(xExuxx|h 3&.IFЉ.ęŘ{?hޭ#& Nt׃DDDD ҤY.؜4JJ.$gxf1RsKpcB+ JK?-"C`=|*^jϤDs""""j25%,dq6Kv$bO G{Nɫ?ߚ&ۘ|mi翿h|t|*,Y%]՜]>rH,j䘁ߝ_6,x9<@ #)r,K,K}i]-粸ݎ,&w V˂(ouU=4;j<盧 ´2:M,%VEN^G(a?Ų$3ʪK>#.nAJ)8A~|Cpʛl'ߴ\6^5[eYG%^heG{rȉbֳ,4y9Q}d4b/wWrZ,coʗyWwd;f?٧ ȕz|qyL[xcڍ LQ .VbN (ࣈ\ """Tq? 'br ֳFi EDo̝̍zvNO9' 9}u]Aw:D>9z%gcK.xƏf n3nLXwH׋8muoП%7*ЗP_kˤ:Â8sX?|?wvꡐe^8JSdX}KZ8)MHrksx{$W]/ <>VcÑ" }i]cNWǶ`d"3"#֮ C͉<6 qe/{zQ2gڬytcG$t>wOBMn77Ƽ;k>W:_JNu~4<ͿDzV@n )Sn2GGkcG5a!fhnk4MG߮51fּrhomP͕n~2:3kEYD+Y&UΙvֳ$m:*Y9&0ZL05xmY |lo !.ydu,Q-bxZZ3pC98X:xc 5cyl_N]ϸҷwW=ܲYУ ca"""" rys_48y;ew0cB ٜdYǾ9EP04qoS']]OU˞-VQy5$PFؙ[ɲb,vI˂ݒONPDtΈvDDDDDAD*2)q`8(O|*YA펵Ih/߰) LOPs8V%U"""" "+BNw!x*])FgM/=a'⪧#lߏtlFo:EekRYDDDDD<;-b)-iiRw6"}+Y >M6LltcX,X 7N TdN:&fTΓlxf6GMʖك3]<::-vbql?fQD;؇Y_qo v%s9ǂE}>,F^ۛP .+Ǘ&qpS A]㖣cy8Mh@DD/m:FBnwS8il~u%APCl+7c$<c_cvgN&37Fhp(,(%X7bòEeQ}n@|}iirNNDפ3:)ԅ w7}XDDDDICݿ* 6qOIޚHcq$FmJ&-o^Gɲgi\ܓ7z{Ox #?Sdg`z-[aXL{*H,~ucq;u"dbL#7sH˿-|=`s?LbXWC:.?7Ich78xɸfgvo\crq֐Hё쵶m0 kWrr o(]oKKْoc;Pã.#`c9Ioh"> aV*)_`nGsc7[D8Rf #“X" (KygMoL'Vzkx]z[DG'KӋgYb=:3Gp@|8;go(nymYrRrC &$9 H*"d@+gޭHlLxljݟb 8NwxܔK#""""33&>[?Xn՟Ogϊ^Tǡ*Ku9)>~jf}S \X7Q3JxW|CddqͽԠri~fKDDDDDQQQQQQl4JU\ba.WLH~~>'Oĉ`&nۍiXVUT˅aX,, aG͚5 ǧJa*FTk.$$$ӝC|B-:Ih[~~iii;v@7o^e瞂H5 x\kXNO[%$"""Rn7,M 8{+[kwVi8~emeds͕\.xٰ3<M|ہ$>ԗڞcfx3Uh_ѕS-/EX$|WW:acY7%;qMFVCx8b#}(TrsĐ;-H[MA԰lqW'^>~ M+Vu[ϳV޷>.7Ѽ$qq {-OO ez99xzzU\\LQQ1~~H """""z)}W|x+ qvE٧^pgw˼j#{NVjԬMޏ0qH=< 찇~[2:95i85hZ-)0Q.G>ǿ% 5{;W&6`7 ucj$ˡηS-}/jש}Ep("c-vGu*9u|9t6`@LFŔbsbC Ô;J):qPyϨajjLر̙ӧ7:aƹҢȥCJE;,b'Cim}O=“ct|>$tOK3] C !5MKd$|`6*.e-N!Iù%?ӌ>-ǹz3+a1Rσ;8PCvqx2 yX9nO]x8.Q:gF4~d..3wEǷ~Ïgosu]i5q9]f cq׏C\xh]%"""r9dTӼt]}Ϡ`ӳݦ%S"J:q:{p;1:ߧ%'dҴ8fgu$^'e.Lqcb嘧wXjQnӝਁL cLsGҲI06Ӥ}&i;YsvXcm{sGo'}ܦnYqKm:u ɚ5ү_ߒ_vU}#""""R=șElhG ReBԲc[LĴ 2#u:%^m\S/v}3oVfȵ;`Y/Dg) -i/{ ;pРsYn.,J#û]Z:pBݬ."""ry$[DwCj!u[1Hs&ǫo="OXSJ:6b>Xj6S:^)|훘t:/}} l/o2ϾɛfFK,xxSyڞAײu{f op}}$ᵀW4tY=.!XL\p?A2f=~It"kK<34Y-;>u>kD&K,Z髜`"~d:uZ*"YUh_tn:v,_*kyNMfٔ6B00,l66|Dݱ~}шH5Y,.O; ?^"KvcZ1 J """"RX,N'a&A "r p8d$DADDDDlq>K9}V%"c=t*\AtQY纵W~ b``b%YtUisDDDDDАJn6 PCHTj*UU""""Շb!:&fd,,7:"[r!btoi߾=KՌe(TNaPF XDF``łbŰ%3iDt8.ܥFgdfrISR!<.뮩&w8)ZTt """"""{ӬY""""" """""" """"""" """""" """""" """""" """"""" """""" """"""eq?+b=FfIENDB`rally-0.9.1/doc/source/images/Report-Verify-toggle-tags.png0000664000567000056710000006431713073417716025001 0ustar jenkinsjenkins00000000000000PNG  IHDR: pHYs  tIMEG1ptEXtCommentCreated with GIMPW IDATxgxTI!@WiJA4AJPQTЫ"^ E~.zQ.E{IHH fN&i<>{9kΜ)B!B\!B!B!R !B!B!V!B)hB!V!B)hB!B!BHA+B!B!BHA+B!B!B Z!B!*X?̟8y6;.$ CeׇvqԫYR!sxe/BʥSʳJ&bOUªFN.V\8, ѶE^@> -sxe/B+X&`_ӛ]; (8NSNCTr#sxe/B+Xf[ |D֊tԩDyh+!B\44_b!mtoFU-y,UYJWx#oAUyb\~S?:ɨn"xzt栭rGҸv8>5LPC} !|pb*;Ӣy ]۸p-t:00lts|l$fsI+u!v& ҳ7*]s^},_Bq y Ua43tp=ZxMjÆۥ&.RNS3Ѭ :V:lM#o9HKZu{͂dNdʣ)OіHT&6:Si~~O?Nfm[jcw^iڐ=:<7pe2Z[WѨ9THT+wGeC$e}ܒN/BO; $$rp-BoyXYk@ptU.:F,8JnDVZۊuޯ:cU's6&2ø$U F[io&n&.'!&n&gjEa^U̼e'oIVY%uq#n-;ʢy( AhSKF?'{^WSr峯wTv'&: n^C8OvV:~?C3q)Z=Y9u5nR`_ɧt^,r/nY/BOÁh(wfVl˒ODI%@ d+ שV 4a՜;/`^<{ Y%;Xg5ؠ%7(-eSUcZ@Xx7_?9VbG+|2e0]Wsh#7~/ԡΌ-`c,ڞB/B+\MF&WubƋLAEA ` p&/~+xSg'^5 D*&Kʦ˵ZS.R4mݵ{8V\ /p$7&ُ@ EYaw|\_sX!MϏ~ q=WP(ྂUPiB!yAMH=zןvv&C'QЭ7PzYwq/2O\!5Ӣ<ݳ*eU F=mFPz `srj9Mӧ/\ }eO Bp#̡ꇎzMy_m0)kBqi^Yv=K@@`wd䋵>ՑwN`֪d\@qҌ [8KjK.(go{@gXβ}[ ݡ#lhМ5$'kv&ߵ 4ÃGcwb.Go*Ռ 9TiΏ~H{7r8y0R/BO'r_Ңy ͏}*0Z3ъjɋ3ywIo;q'ag@>FC>剿B!.?[Eqs$8e۳>UO}љiK֓d:KvR$$$}S2S7VZ?!B\~~mgfGdH}TKi/[3Ǐ3imlAS[Yxe{S =ҏ_!WH8׿F7R;*_9vND|15j!jG_!Wȳx%l{BC`2QA y1T,2W6B!O1-;NNn>vCXL&#~f5Sf޳)sxe/B+\ !Bq5KB!B!BHA+B!B!B Z!B!B!B Z!B!B!R !B!B!wA;:Li)CCG2i|1 ֿZ̫SdW{/=ǔi79;KdIsJ !cn=eq(W /S$иر:U׬ ޵oCL—*뮛؅jW45B!uD Wf2gf<d>393_eX=}/:?>˒te4 ңR#z,ezPF䝫)uWIgޠQHbɔ> Сrpޒ Y9 ?0\O|uweʪWYU~H!v]1v~gSw3͛D YWkQՑ^Z9I,R gƮ儹> I3?75:w)f!?S62`gNmM'> UR3_A_X+Wi4D?˖pe? o6\Rni ?E !B\m;3o3yׇ %pLw/^wQK MuJ@ǗXNNVc4~?Ϝ¨^R^l/,~_>P ?@ٵm-ևGI#&@(ev{nombkD4~JIpyl4J-,Ywsxѳ>#gHȂ )!1{WWBE5m 9\fu;6Mg9rV.~&+C!׌tbx<$;)7q"}zS_}g͇Ԯ P:=oT?.Z tLAF0< {_K~FC >_rt#:-b٦ ,:nak}0}>I~F>Dptο:T~jͩك9|"úl$o4 tm;Ug&-t]N&ck|M@a !m'Cp_P !B\3wVK0o o%6v+k>73qnIo{͙cjņw=?Ay<<ϷWٍ/:o_ LͤqCh_B~֡^3Eۺr6 PA- az2ggQ 43 rǶxوVND2ź_jAȻƄBq-)Gi2O>ZM _MၻI^Ox( N䮛 ځ 6Έ 8 WgbW!8lNK>|n9BR9\d[\"[FMh'CDU|ߡp wkBzC`9 |ZS;,ܪ:[ &̞V:bwԜsء=5 kbWA& ,H5>K'B!הrC#Xh~{.N5pk7ͧȗuºj:/,yyxlcoah~t.w7fϷ_$/ؔh~OȆTG#պqȪ8lMQ1 >wߗB2ٵwZ"h3A#aao@U/  I=˿0nL`v'}_EN>j-#B!9:0q}?3eTw^{Tq_!L~ApB!Hi1nucyv˦^Խ92B! Mw;ӹv5ΙM9ћ~[„Bq[B!5M !B)hB!V!B)hB!čƸ}vB!v ڐB!fv !BkC+B!B!B Z!B!B!R !B!B!WCAG{N`uUJu[ % k`.ܸR~~SؚWq\9z5γ F96`qp-gX0&~F2b\hSwh!>8nmnߓaDz.~n+Ëјtx䵅qz7DGsxlEv?u8em_iOvF!۵6[Ke`@7U  u|: hbOs)@w忡w~T~s|@gUn,<>C^jCG"+OfͼX&]OKͲ/ޘ;tt^cQOQSNByyvؒǴO:NL֝0=~;km]3qԞ^cbxndFw%u+&0̙2:gx mN>NxdX/韫\,7ct :=c0 h~z h.Ҡ5߉}b- S+n۝a`KbbdE{<7ş aW0G;ɳ?@Q';_gLbXs?oJU?29P-|1;СX^S{m0ޙ,`<~Fk]*akQ} 8ho=&s_/G7垻R7ԏTF7t۱xs?a͚4?M/=V֌̧o.'ũq]y9yٵ|tm3mA]i{p\8Vxݜ4O{.{XkcqGd(d_oFVe*p击TcK}rT{=ˆJKzHjURΔ걞C++jūCT_SvG>VCTO/W9̘6MgԒ9*-S5]Ce,@mCMSMWk:TjRU݆֟HSɻR{ W3ي(y*jDvL~uV\}}S 5Kۊg=6C1N͈=6FNPy sԘ#ջN̴jǤ*Y91)3GG=Oe|)vz|\Gզd%ӱ8zc5}/5*-/E~N˓ceρKR;zWw$ӛԇc:6}P{ YMWwWç}6JQyAU8>ߩ^}zUYNW{>gvW( 7QӶ]Sj­#ܓvvwW}&]wP%8urHu%҄qeU;g _P/:M9RP=N%9dmW~NwZ9ZίTZ^%kP|X9vw-<]޹p?FکXϨ㗪TR~R͋&THQ>Z7SWZ.m>JeWO柲Ek˻>TU6f8Uyy06|˾Əhץ#i~jq? {rmuiL^Q6B0YN@aɤZnk2~M(h_Rtt(_1λ~v5 ?Є5𙄻>1FKwNh @%{"(!GDfJ7>7ǥ bcGю{& $H=Kq:75檴ޙp[9yNX8h9:}#Wo+dnO}*hk'"6c`=/8~ܙ~WC_5`nɤysxet *_O†Dztgfx#]cf}=w[-xD[,x%f7c?OMaxnד,^[@@.Lv-{eW1q_cnA9+!֥iןIX%rP$ʟcesI|o?}jlpQTv4)-_+Sr?%9>{zEnaZ.8hU䦤b OrX4{,(k2;g'7ú/| KY m 0yZhc /n쵭ug`5 KZ;dwCW9l=''9:7ckiZgfپl]Fjhlۨu-aq+};}{Ijyg˻>/_=Y?~rÃ{~lfP {C{qRXS8CU/ d=p[j<NW4Tb%'  %-,gx).,V}!N>wNZb61?懙}Oÿ^ë7ߡj] j|66| Wvh;52Y}_cGJ6/\Qr,.ps*R8mV "W=u+_C%OM.gxovou9>D@խ2ki3Y$JzsƤ ?AAZIsQ\|i2;E2gkIۘ?9\qxXc>4Xg2M"m=V;|P|ήl6ue` #tɏ^qFgk+42rtJoB6aVi~b:} ɤ$G<I[?g|Kǒ8c9<1O3L 'cPO:YەӔsZq7|X9vs6`x.\홈 0T+hNs\e=ĄqK;#=Hu0q#AϦ%OysE֦wz"M?j^,9yyZw}MyQD[t7I-~۝g[c|kPcD¤5E67} P+b/0ﰝɝ/ݵ;C&-佴<:SC1#Yn oOgѽ/\;7mWGгk/!L_91;G0>}`6Cc2qhŧD~(rۘ9:^([a}As7=ěy6k _mًjGobϱǭnKӀ|عpfiZuqp4r?뗌}gbOn獏YVQI+ [0nKӀ ꌾhc=*7-4[GqiQe̳ƾL DH#{?l˦A ugk +)d+s}q<%Qq~l,Z fB(306Rcm\Y`6Guh]+/hڮ✽s՘-#4@` r'Y BѦ{yE}'ո=S_Ap{p м]vv5{"C}P6'^L>^_ &~r7UG{pvpTu5JydhD%N[><1ػC>7RE@PZ{6[A*&raر#% ދY¯q6CjݑͰwb/窴]`||׶YG9cQ8. Ƌ1#vq1k敻t98tkv78=2H` Fq{ۅnhz˞ Ybk}ko߁(t9rRٳ99e2U2nCfl]|q(=%眣co}TӊBkM!к9+|~! g{æm$Oz.6W QlLפGc҆#Chf's[Ҩ] ņџ_'v' kvΆ«Q. "m IxrXq`dP2R2U)Z-Z|)˱]|qL4 )zUz'Mxڸ#F8NN$Fſf=tgk˻tpws{~x]&1,52Œ UdnSjXRd6dw [FrsEVnZ-P,7 ! Z>F ppg<7PoQz#ݸyot#y 'c :t:+y \u\2,& xycׅ#Ͽye%3sr0;Gp榐JQ}7Vغ3kurqzX pg׹n Ik܎Ua9{S&kK1܎O>pzk'&}:րrRlw44h8wsťL~H˗):ݥcu]8]Pm+)DvlǧIcB ބ;|8|K& cI nD›&{| Lb7qҧ MnJP'dbdr2?Ii;H\|6v[:nH@Pm=5?#ɫфFB=g$mm8Cfrk<\әd=ԐDM;\X{+1cF3zhF &Tn&̨9*%.γƾ>Q [; H igЊO,9ʕ;nG{4օV+քB_|FW)0Ѡi jPݐĎ9|?3jh #\ȶ` zss5t3YMuoO]zGMHovMD輪P38}$jӱ-DHduűMarҧs]`N>=[G hNnMbҡ7W#'G\_mGe /28t mQŨ9Ҋo\9ߗDPT Fd@CjDמקZ9x\yr}~kQk]hU9ךBq=ѩ+B!ĵC.!B)hB!V!B)hB!B!BHA+B!B!BxqB!f+!Bkr B!B!B Z!B!B!R !B!B!WCAG{N`uUJu[ % k`.ܸR~~SؚWq\9z5γ F96`qp-gX0&~F2b\hSwh!>8nmnߓaDz.~n+Ëјtx䵅qz7DGsxlEv?u8em_iOvF!5rO8bCT2OcT5H,$~/ѥo@V~ڝCN2+ ոcos({?kk:#>~琬rcy V8<^y@7h72ie5zehU]n}ݹ~z}ʈrB΍W+sƖ8~vҷ`ҰȀ Ygmz_+s#{6+1#ե\1A,`Δlׁ>ccWnv%/Dwo%I\E^r`ӧ3V@KG8we/q_NDwؗoQZDt {|[Ҝ k֤-yl &Gf\d>}s9)NiK;kn+ul Mscw,9Ov;nk ³gZr֫GzWr,U^Yq\edW+^Z+e=ҦzzQIߥfƴQmzASzhܾ>;PKv?ZhTNCzp)>˥ԍcB_Gѿ=*u Un枴+K;3M*YtusG[-Q眗&K-9KMZx\?z_oiʩyw*!ksci|r~-tP|RǨ.1_DR6ÊϱV5cQ}<,rlW :l-q[x78[5~G{%8.HS++|:aޓ{nHc:FGP'Yڄ9*p cH&hE:t$]skBF e\F/exUvƶI/&W -6^JNǸMuBkw澗^V/!<إA 8"~ 6SUBL<)=.X>Јv3i aEBXѭ 5WE΄wges‘E˱Fgן~[!skzU!wwlG;_+7>ѷyU c2sK&͛3u,{tgSb}6]=_:|#;Kv AlvU%k,pǮӥ˺Ayẍ́м{'3w9q>kUc'xTUceT0EqFAVR&[_^+^rozttE% OjXUnz~oנkL'nz¡o3y]H|s%U}pOpS^d`= P*> սIɫ8/17z vU ^URŴX-HB/4סו?ʚ焓ZS"`+e[iSZV(>b,ZKr|.jôp;]]q{ +c 8MIN╩rX4{,(k2;g'7ú/| KY m 0yZhc /n쵭ug`5KZ;dwCWXkJgV{E%Ã{~lfP {C{qRXS8CU/ mf2~l@9OkU,x.~p -cA֚< tո厇{r=clw75*LOwD^W}rTαk+v/9 =M6SJius15:7,XtTrQ? dd$Ǹevko)'Y?=e@=F.׿?ǎl)_,,X\LcUĥ:pڬD{V\[vK^hH^H!7}7߾)r|5[?ddga釳Hj}'mu/IX08|]ƣdhwe$21/\qxXc>4Xg2M"m=V;|P|ήl6ue` #tɏ^qFgk+42rtJoB6aVi~b:} ɤ$G<I[?g|Kǒ8c9<1O3= 1r(Sb|sJi9vc8ޛt{>kPzJm_+_zQRAQW-:7=kwNd@^ց#C>ye{S~}QeCӑ425ɷ 5iHq_/LЍ^#_ds' E!۩ܩ]3dbGKc3ui .>c=R؞&Ě~f>Z(̵cx~u= ɇ aiva?{KߘZ%mE#;yXdmx;_~6~v;o|u♻n߀}.^/x>b88g*LSWA> /{ %ksZ Pyp7=VtJkn}xx?n۹pGݞX IDATy]\D9}?; =N\] }s#rju Ź'bSvm!k[rڙGN\lg*]`4Oťw\BNSs>:n*<$q.دy{cbJ)Jb<~OzWsnPLbg>K_Œc<&A#N{[eCpuAږvQB!%B!B Z!B!B!4#Hv3* ~L;͕DZ!ޛ#Ҵ˲q,,mc"k|5x8g}9z~Qv|6U,&&cU؎"kqN׹ G1 r2l8o %z)ܲ {Y<:#=\Mg 6/c3|T}ʖ-GhkLC_=GYT`D+5y59CYA)hu 1v2-! .*򶑙SBAAx+8mGuf̄ 1tȑRdwRzv$t=K0Gv{l[>#I Duy 1Zup'[w@LL0=6<ԺNXUYLm02E@as83v~+bͰXe` 圻 fqACSC"k!i]0;KuA;t|Ҍ#̱Ğ&a3fقl2b1rs:40#d^ yj  t|.67c wMdj?ͅXSbBFEd맇:F(dڍqg{A#G$(MaW8.^- !et/j'.ចHɧj~Y^[Z6M}ްU+flJ[$Ld$}p՜Nx >  )#ۆͩw%q">Htt#)VgqDOF'5v:֪q80!#FqEph_ Sz2{ߺo6kS.>/5G-q!E$2lB:C~nc_ۆvb;t"fl(`\j}XM(ڢYq"҈To#z`:á3v\MɥSuG{>ɥ041W|Xz*sg-c ;{6SFmTTq;ܸ*Δj-bˆly <鞾Qe8o8 .N`4,iĮ8fȬ{[U\5ԙ5Ąb-q(׻ j97I>lym1Qk`_[x C3s&7s 3|"ahBƹ6Ǥ29% #qP_"LUJ⺘YIZ}f^>Uă QUxurŋE'hXz}GXY1 ;96 斠aF/?`e8c$bTb b$0DSu#^1>{ v/[q-.XeqF&&i3picg孒-~ 1&Ay~9iZG5%b1׍O1a2pښpW ̷ h؈e@d5xvٍ+^,HE{uuqrߣNg sv;Kak.;WP=} E 3ܿ`_m,bJ'D+bHTڼ\OUPߝڨ:i$02e}(("A:D^GwrG}9 MHxnpR1fҤv} |R.]ʒ9)DcI?ql (-qE{9t\ 6DdR{;ӈ,vS`k_ˆ7mPY-MM5˦22`l0Q#6ذlv` w8 HqWWth5ҮĻ )vUs4CD:nTmr7~K}T:M5e22`CWtO {~vd_6IVA:#s&FwG}nK̄tz֘Ll[6c`0!˞@`+vOM 1 6!ܲ="ۙ2327^iLᷠղoҰa53lvٍ̄A;[hX~'Z=KL﨡La!A>4Oz]A\z}$c–5i w*Ft;l]mtrבXZLØ:ɶFL|Y7µϠ^O#3k7+ _s蓼  AwAA)hAAA ZAAAVAAVAAAA)hAA]LEEE  E^   |-        HA+  HA+  R  P6l;d53-%@sl|p`:w~9z#UKRHv5|t-Y< ϵӦa{J5[sHOM!eD?֟tp񹍾F}7+o~)vrxj\z+<6<)oz}[һ~˪gcLZOO 5e~JJjLL?M+^MJYLB<=8[fGV''$%%˞bMl-mkҠ:)Y>QSY_QPAuhRR3{4q>˜oRڼhsZzqMH#mb~ TsKW'o«s}rZwٵstݶve)e\8\:-SunT=ԇ'<~Zފu#l,Rkjԍ][UTb kO֫U9/,W-5>zX|q:f1}Z;jլUܿLXruZ.]. ݧԚ_R3~GmT}cpz^5u=WS_>VlWx8u%jcqUԵONW'S}EꛥnUm<8g:wg-gZb֪缭ƧV@5x[ U>W*իjgS'Z}s~q~.TKZXru첷SUui^>Ǝ/u 7)ǹMDDo2~!_l$y;{"$2Q)qNI!D huM^@ŔJ^?կ瓒&*>Tg j=^oe=L:w~NJx 3mj^֦ \G}#5UwgܲxމhVտm%BH j j^f'tp4f>Anf=\=بhx zz'bc.lDum,]{|҉\v O AA|]it<'ԕ ݃f#ntWrdr{&¼fi7zZ 8bxyezX,׾s?~?Ҙ`}*jUghjS-*<2~:zz;;wNo|[DkE9w1nHHL8?ύ$kx ]ztCpVPTH}# eXF}/]G?Ɉ1juO]y/04҄m˃{1{@꿨M"&~X? $ ೝ+IFp^&&pUqUctH3?We1WEQP:0$'RWR8uޘv.R ͗Ri|*}si<YU|+ī<IY9e:)V?|;צEˎ[5`Tn#Y9o, J@7n!p瑑DuqI3YB5ʇ-$5BX0X#1o'Vv c!]0o<_=ԔeK9ZqqLNbIaX{ۙ=N?H ]{zvvikeǬaqDGGMܭ3tuIҗ:UOMhՕ[$GuCNO~ܒTVx(Gw9uwpi<~O6?7ٗn}sNwN{3 ëvA{} 7gAkgxvl/Ioa'cm<}UON> e}#B'~œ*2 ~4͏fﱱ#bq"֞x"&N&&m6Xz/;SxLx9W~8Q~h6=1)MQÌNhm`hj=`yL:ӈ2cT HM/9gGl8wLo\сXǠT!jw׭_b땴Ws\ οb4Ãm&az~ɴw_!QOCO9sX?-'O }'$ %sΞӭ|oּ^m`>v,_g00߶{n1:SYꆽv=NYJ­vn,ےӂUDnn \Ey{O K'zWɅrlyg[$BNWV&v' sF_ݯps~9]n|?݅x#F_-ٯ{6j8}'6i߄ۉ=d.DT?np;>:a;J<˶m9Y' y33b  1,iFfMf⳶}ulGR g)>e$e,VgM3X0JMާl, n EC"-WN+[gNe`yGSAڷ-Y';MTĦp$V5*WB[< 5:e$NZ.Mf,_ŝѰ=l|g,K l1FO휾sW~ ^zmصʴ(,FfN uJѺZ \9;fTϑO>(#:40#d^|1 C9ؓSoTT4Ϩ7]IƝ0Fa zWBq# LDڣs ZώnuՇWOC>|4gێokլZ6k6fSUy,Dps9M1c泽 #lUoSqTWijොCoG;qs\xݽqqޔCo*> b(n6O׎^ucooOus!yXQ'al>:!i` YE}{>img՜kWAAn* J0$ք0BuDph_ SzٶvqUS5)3'1ZĖ yԅFɌ%F^j3[BHd؄tF+PNL0bNdt lژKV1[ PT7EDo|u:LȈ$f55h[Ujvf AF+e=Ei] өj`O.^^8^id4<Ĺ.i 00>iCa 0] )\^UǦNzy}n MDwp6z -,H3[Gf/fVr0 :}c̊ߍ`e8c$bT҆D{<KjZ7P;2z7]iMR#>ŗ,ɉP0ɵmR Aa;E59=P/v_8vvR_+nXh !2Kۋ-ILN"s*HԗSAK^Z"2L}t.|882y+>rRk۠>X{*sS-WOQ~9FMxvv>Ys?NFlKX8غ#rqCz9M?4߅kdI]`F\`<G\S{ě8sv;FU@c'JL@2Km^,h!𯜐f`4Gl>.&I mk;4L;nֺB 8ɨl6Ec8ެ-'ɷuâFˎ^Իk1O5cP?Y؛O'&N\La$Īع{ iNe} qtЩjנ3E pӠ  WU'  HA+  R          HA+  b***AA,bAA+l9AAAAȭj XIENDB`rally-0.9.1/doc/source/images/Report-Scenario-Overview.png0000664000567000056710000033120013073417716024655 0ustar jenkinsjenkins00000000000000PNG  IHDRYsBIT|d IDATxwty܊{S$EQ]Dv8ŎK6&d8Nr,ꕔ^AD_{q)"%Ys{μN}}g͛7k!B!2hB!B!ĝB!B! !B!B!B!" B!BH.B!B, !B!B!B!" B!B{ЅB!BE[ ~k M35>ͭœ>3;5M#46kYne7ܥ$Mq$`" g+=x?"YRDw.>J^i6P3͝xcv!B!'$$OEيdŅATTQ%-V Q),_2贞Hgpv|6[D3m-S!Noƶ^sRÿb)TT sP[ks !B![w3|nɗ" ,7ƥ .A4 M>[ȫ#//Tse\w7qi_*jx]<2~wlB!ByPOO066l~d%oy7uEflKWHp̭;SyUG}=OECPT4 1K)N&Ǻ9_SOϤcMytV./~A^ E@f F9x)Io &i>*?tO;Q&³VXIA^M*:&q@L~zeF˗O3;H 7G<Ͻ][#1[&%WA j xpc%fѸ7<_“OKV| '?gO0ޗw;)K`oaZdn$vWz(XL1_o֯8Rmo6d!&-Oրy[{kGh'go_9>@E3|MD?7򄉝O=9#|w}I!B!" t!P5 ?1KrqYiƒ"1H C` 9*fh^4C19Դ(|;px4 D2ۄU)z=F?ᯙ?uwA_x>5[o/ahΏ_"d~8;2~Z//>?]Ca^.G܉i7?}!ѿg f_  ?'v|}_OyӞȃO=Q:Z*[wWLU;5@1PʃG ⹿'~t5Ws[3W]քrgi p޷~/IO1!Ͻ^Ǭ6~Eonj^! ވ\f?!op o_9aC_3j|(޴EO8X?G"t!B!ė?ƥ6n#q9JԊCo4kzG]6NGCu8T BB(¤( kWb`|Dt %wQ .DdkeI)| R>̫< Dה~nVvühj(U?ˌ7П :_or>^R'^%SO}LgFj^yu*&U]Kعz)ʉ_=q)hs&F=c34}u·AG(˾@tO(No4tYi&^覶e; B!K[Z=6jE1AJZ]8na"qI>AA7 I\SxS <=KXh &,VqnޞSTv\/-M c8.`<'` &.(%:abjj1:?i&zBMBI e ]3;W n ýZ=h-Ì; >蔹b::x羟biǩiq{(~U#gyg-|uK4%enAoVXzΗ t&7rOvΚƣ^eKCI'68U}u.4@Ljp8Hgf?Mkf:;i93R1WdzWʥimuUo|7 6T%Sz.B!Ҹ]L v9ALZ;{iKYUM4c,H]RgjI%i)wH[M/)f+-N4ـw|Ffx:ߌhxT7Kc5 C1 x̵L5 Ms4/p9z\8K3rxKt9tA{S N^):7=9FCb pwQ緅ǁ3_z( M]QcM9cf?Oz׳qZVny[ލKa smai?tձn6oo~; ;!B! }$qs :Fe}8we>in"+-:܄ݭ[Ejh+AڥϚyt lm2 "4 3뽅 ׁͫb",1C8Q{#cIeYQv,.WZx/WfN0=k'(7_ac8zay]u[aeav0)vč_gK3<5 T;T-U#׌AWFԹ2*^Bm凼yz/` b٪bO"9L~zs8%<57C& Mg?(z.hU_Sل[ygyY;Zq݅B!6}$q + TG-QM/ yEфG# *؈L:+ W˯%[ >Ɛ;%L16f4 c&Y!o4uXP3LXi}7o`v1o(;+;bVjWКmt};k|{KvbNXcY{YK\JyAU/}^ЯLF eE1?*6g@s/x–hPfx,[ x :clK@C8x&7ذy+]X(b*}Vrufo~r:q20Ÿ'҂4N!2+5i8ghZƢwV?&f=J!B!>{/׬yfh9Oj&R ?~NX6/T. !` ±ɗWdoc©rKŝH䟓86OT1ͬ)@c#LΨD$%ʥNA;yt}{is;_RKJ4")2P{ ef h- O;PuA,۲}Ms?3 7p/*8,47?. 8>[s`,e9zh찑XVJ˩*YYDB U?4~$f哙Ya맣wl 4v^kpjΟu \^#sP,5q]7I3\O[4s  ~a$gdA19HsE'쨀109Ą<&hodHPL2K 5oIhl X)^_WKQ\0,+"=TpWu6ҲLWq_!B! EaJL i ƄB!B|Ik>:rY$L{[k=B!B!gq^N|u㦿5:zo>}|,U5J˹8ˊ1< [wPx'r~}^7=#˳F+=BL~aĦdtYK#0OEQ<}ɆRaźPqZl>>|S4'9p |6avekٶua}j w$Sd4 Â] &SM12p=+=7֥^gQ;̌Ë@lR%%<䖖eY1kSS^ۅFә")\sVs3st'wL =S)m 8ЋEo 6s)+Wȧ95M購q1u[,#SO}Kbbꥻ=ø}CD=P=Nz3mz(ᱍrakx":hj?͵:|yUȃO9Nww7f]03źhx34{SVfF|t*t3shM?т.kg.5lIW<ӴW#}H_8cCGS3G\`‡L;ћ gfjUqU388xxT [K~H{(:H|4/#88mV);^uaNhb00T ]MTUװGغ,8Ʌgy "l3!~$@KAwu92ӹyefT+GUMO%)!LNE9wH |iXG9(],֨tT>~;'7ƐPVZk9"UDs;N焃]?#>v:_a]f(zBӷ\0k&g!&-[šᘘff҂?E"9DRFo5 =fM};9XڵɎcw$tmv-mrcBg n,h,殮;d3ks[,ky>ȅ3Yٙ$EݤjƴeY/JXtaNџ ֮h_xN𓟟d6 ce5 w\7hdwqiF*+jhMBBRʖuDQ4^uLhZJt5"}@6I͉v aqQLĥPNt \Ɩg$n701EA2vĂ*De=?r-8vScҋg?Ao$5#|.ש>Β#OoA5ؔ3Vc2b 'zPuM}LϺAg ("ܒRJ c 3tV}PzRC|{ëzfPNŹQųh9 3ߠc;qn&,.Txrem~WF%I|eqK?<-C%܍kil:ĥir- o(x7kqqژq)CI+eb3nZs3]U N(g,cO7Vpd-=S3ٰc~ꊳubsi},`RCƥm*/0bt-gYNUˉ8}Q2ȉ_ZS(5ts55\h`Do!<.U+(H ]+Ι<~~f^ #t-+N2t3յLF' 26%Dށ#*cpBFsi1xtc tX)r?$Sui k%~Vv)"g^]목MrtR]^Nׄв8MlLXBXf4~Fީ&>n#b=zWOixkf%8>œ "ә#(Z{+M j`/00j }BKeU$_ 9ܔd*Zh9r-$j!>yz 2GX?FEefbSuQ32NO10l'slL2ī}4Ǣ)})T ν:1OQ17sǓꚠ^^Wø݃d OBmiJ?}kc M=D撛^BUf,M -^oǡ0 h*ng?]mTUGaҸD8F""B 6b+G~|%1~>MF:4LѦ{)PA 4ʷxrF< :#wRys<|( hYGzVi{eܫmfao 9ˉJrPpOQSH[{?Q$y]34y_é0useHũӬ!*I!; :mw=o J:6zm:K/CE[h*=4;ɉe;꣛svc ^5==8F : RǩxQDci^fGZx)ﶀbh4S`d*N*e;)N p)џHs9oq! ɠP_M+*ɣ'9Č^pY-Xfs46 ׃ωR-jdҭl2S4Ճ{zWo|Iǘ}.3uGi=AEۿݽ7qD1 _W4W>l`FS0Lu #{hpH?_>*]*Ωn*/tcS sǑ2`O+5u*n>N8i8wT3?~u w&:ǟd|bbPkCg0b4Q4kNpd;_}xq&<83L[xU MNހACC:=ɠCS tp/'جTWuMu2R_?Jפ 4w h*]4^8<{+IAsA8z->8݊ &<ǎ>IYR0PsNMao3M5 M3>`}uXY[/'&HIATl#~5]B0 (h RWɱSx{ȋ DXdžSCi*en^kL5ުEbrUc0ID4}lKۆ^73=ō7}n5,m'94 Aܷ[nPgmL D$>˩VF-.t ҋ{m 9Nck'I^O;Yw\Ј<f6ƇD &> ?7(rW&L$gjVgVỳH&"2XZ~bK4HRR(XQ>S80 I,LhX0F`6Ӝ]*F b#1) 3N/LHh0h8f:pc","=# | z\^V 3N s@ѡu*&m8=HhDa>*SӌV0<< 0PB>8g`qxP eUdҦ^ebat҆!auu;dAә#abl93%08`S#L9t&F+9B@>-t #Jbo֎i9Wޯ`KVVs TԷr5YcX&wzhah:B\'vBJQ4uHVjVQ{g.3Y$GbBAGHFEme[XJLX0AkƠ7f5`{< җSVRHrdΩ^ΞSチǗ]5,< f8~5&b]AR3Kofx~3 H #x>bP3CۉW x|b[VFYA6a&f'OՔ eN[^{i2d) fCݜ?{u|EA>qcw~[|J)lUt՟Lμů|5$ot/?x0h|;lKh^y&{qYp3ô;Cy# XW1 `[7:;Aǹ痧䗭~>N%?_,׻/֋-pJ$n\>FZm [!Rkb2ySޭL5yd-_Ic.qOR?k dQF5ņ^v/aK6kV.t'׫f$1d+9-|8]]G( iO1q $$OJZI!Xz3b$sսb_㭃>TDB2 E`Tz 4'%Q?6FKw Ԝu3:iC_јoXE3N՗eK '>)7_b6n0-9׾NIxuztPч%PV-RGC:k"ŗ%xt]dF%&J#."W_j>Dd3\D`{QɄlYp[mkde"ivxh5Q'MG&\5l@Eln\|Znv o\rH6>$of׮H $Ɔګ{hHFOz*jkQ)lO?WƤT}k:';)?\ɨ. ȖB}DR3S2=;teH^6Xy1=<$^zs ))[G=!Wsw/ވ`1Cg43wмV}@ôy-_~MrZ Qow*sO1$=ց!&&Pd#2"ĤfۮW8hȉv6<$w%;LDVz<_&#2*Wȣkǡ$@g3") 4iũ%#1_Trւ?5Q&͋yƞSZ7&`.>zycUT QMfka*Ok {Ǫ.A3WKQnLzť_xyh3]43i&B5&(r.R]{\`&8,Pa*ђ)4u;bzndȩ֫/_6f*L]s܂'u%Ws9"#%҈w #훰K"0 &K1zj`xlrQz_E?:k`pԂ>0 k١i`cI^SMM[f?p*d=nuF3FEǨjiEoy+wڐ Cb+)%5bA`|y9ϸhJ_+97YLiQ:~rI0ϒ5kYu흾/IeYjϬt`vO湖'#)+ pz\8UC";. _55;/41/U$\yD/%?7YS݌6Ϻe.[ k >zAv܋_|*ل^iHlu6Afx;7>{8ua Ljn9 W? IDLPӇIgmT4J@,y$,Lf "cyA847 nNSu:FzFM,Qxbk(@unѩdkPk:|rXWAs1SG5SDz?z@kTN"%!?x'c&1#P Η|E^3nHhR!%ITk\=?$%F?WE;3ۅd :R]v\ýj/&5Ȏ]}91v^%xF8s"ŗa 'd%e6Ï)";w]4Xcښs X*]| gfI,Z˺Dpi^{/b *`#ٽ{7ݻލˈyG~St{ ]̣6$#Dht s { ħe~y&Ѿ&BC3Af=:3{֎b0dёQ'Ӊv eQ)Բ rbХǒmNo"=?+S}4q͏DF }p0y9kt3NnA:]\~b(?{𪄥Fpuez݌0`$;\1}qtuW{ =ɡ1I3V:fCn^Iҝh1"5$$CxF|e@ :*+++3+1zޱqyکom==@_uaG \ŘHLdR[Sאb \Z;#CzITc)1B!.\#I>#@^z&u n;3Ӵ6l*o1dW{k_-#5~z2dq0R[9=PemibIpKd OaЃm =;$I *NnJp Zh$X(a\pjPTah`P ]N{ɟI~6Da`p=Ms㴛 pQUAJOL͏!M`q(_^NZfd75)?33ƔޢNdK&v>.Zb(^3%gHQ&fWOyNMIǧvguLi)ƒ̥J(Ak'6ȩ~WsI3s#DR. !pe gR^QCMM9YB_4Kk4:)F[޴jИ%J1bx*M3 OvetjJ8U@).Oe$X`8+CUYu#6LpӫD'Ih>: #;tT ՠ`--) >ǙcpjZp;OQbh(۷ݿ?/Mf<)/-( y1Tw!ť5Z6+PrI*Vu5ɀ;] ?8GE#:ނ{2s=;@, ;?î73"x uqk<% CiQ.uMt _gۇ'J*䖕;}6TB2b,;.'Iҧ^N0#tNKb;Ǧ5w\DXRc TՄ"@T@k-Ϛr@(ݸ !,AQAUBA PX 1 6z)cU[4-sUa%כ`d =%ȷ,%@. EŠߴF?˼>3L@0 2CBxrط 020FN|! Y@ІH IDAT,ƌMTxz2IB7YM8IN\tZ4f"HIM)Vt"l*T0]xP+-DQ.{+kIYhXr,X`H#Vs(84 ГFxdVw8yZ2p G|}F̟Ӫj@hI2Q#JtTU((3NǏC~Y5y_rKoΆߣ'H.4BSDnѡ`Ht淯X oE8p, 1dmt#% z[1]ԖomsVq?x ӣZ7U3;Bwz+XLVQ;,M0O޴ժ€lnbhPxqY-xbx(AS,;]I%Ա53_8NeTf *6*ג{| Ed&1*vU>0bXmzJ#>50cP*ڈ3,:s#?8z2[f[N&Fn=E\tv0Y`0tYhܾ=2=>_[O&]jDžuPE< ler.>-Eŕ[νض6وcbv~$bV/iraˮCɀi^:^CKwpL?۟tJz8:l2ca&c88\fl6DJ[ԃ.P' i 0z.ڋQ廱T_G_z?9W^|o=D& dPVv׽ PT#֙+@']D:̰`]TM$H.eMƉƢu|R "]y-_FW6yb|ø-F6>7R:wqz'nmԇ*M͎ؽs#Lҕ !0X !дlM>OQ9&(ز,r\8LNΒR#I[k/ӝuר-X{gAu.S9+oV\Uy~ΫZ2RbJo觩Fوϱ%3͉$(IS904pqm-\?-,.7AMD7<OaytcD4VilڗI^tA~:85^OEf6!PjgbzKh߶ 9^'^UIf.^eS:hP]"dO8Nퟰx;ϵNok 㺕up[ ܜYXLX 'lnJ]=:dfzKa#u{_/qGk<,x &![WIrƍ* |C*vcajW YAxrb:/&'kXX'54Vɖ[MCBJb+OaW@|b0ݺ>߲ \l{?ECx~on3%;KtM֧z$#![NYcEGZ);4 q%|Ob&SC:dh4n0cVk3^b!0 wە98}8la,!0ؗH 6M&*Z:)u=\;NG'fzj ^O;Y^p^K]?oudIY]-ߎkVomy+Ǚȫ;1#K̢:Wyu=YYCatӧb7 KHiqbn54y쩼]\#|a9aaN\mMh `Eʲq@q1ȻrIw:Y8{MJa$Kw2 +o:i;řV]N:">'1w"ma~Q.m\Xocj2%g3 {XpUKB"#-YRG N+uuV3h5B)B_n[h,IYs}DP2 v1t0'Ykyd[5^֜d`sXwGSCS\8K-k\_ r2 `Ũ@<>TD#kx*=E8$ar-4q"CͶǨٖ"b&N9Aw uM\tN2CtՀӞ$ݕ~ƙ?%?B``3*T_ɓ\d#3;HeB$<tTw!b:1جٰwz.csI̬ Y.ZIeS@RTQd#OM䦙`-LnB@V:QE<8NRdmX3lܹ2WD閝<Z"0:|nڊ?kPaIzuX Yy>MIheiͨMad a"=ꢏ9v>Do&z [M&Y.Npg{Xu)6;G9\ܮ[9P%ޔ{vS(zSȤi* ߁jDE4nFP4Nba6UBG9 \//w{!T(W."}]W/AM09Xm(ڴc?K$q}$Sͧxt=]^\|l]̛-i= ]Wƿ4PUͧ3UdXo!xӵ=Nq]}aqjdS:'lI-02?F8zC<ۍ'@b3ߢ#|rs7W Bti2`!p%6F?b} ב6bcWC$G/P6kq.w`S wr#f}t/wۻ)cK6,^uow2h0۱- %0o;u$Il,k&^~*3̭_rvazwl$Ftm|S}wRa޽`(nsz r :5LO;2b6]#STL*ēQb(F21~~"}x-g7L0:8x(NRW[/o1}b/ww1sWzƍ*@܉\y2Xٵd}!RX\uI'tnj-SM%'9~(S) i5>z uoa}iNCΝc bķ)߈ixX=)OYνhL4~-[7Q䃮Kg8S F2w3yMx]U_}-غ ]&Jxlm6FE ~j즳o׺y-Y_ .X}3o4Vf{xWIGUx!T<Opf^}=+4gtTT Hƒ&csGtt3HTm:ǧ[J*S7?--9^忍g80@j7x ̣}mۿfTe9s183So7D#ho?G^{7um -p9&.i0;6Wɢjwl ǿŶr~7/q+䣆FrW:V|={VZSCŵFz8wևX[NĜb|MEΜ&#iOhuzȭţ<G' X/hP7p0Ecd1~;>GC:`9VqX{3nv˦M9|DLvV1Y=k&gs|OS+wsVN,M:er)OC+P6 [iq]`q_nFGy:=jh>:xϢ MzyǦB޺Gfбt rOxAV'iUl|Et}bkroB0=>aeRUƾ,csegƙ.Bpkx׾h;4\lÝKņEsW{Gzqc?B|=m&k̷>Xri4bt[^/?lۜNGhLVgc1\j[f45 b0awQTgE} 0:Gxi!LkG;mә?l2KjYIl*Xkisg{G礖 qv`jMgЄl9Oh RSC^98swy ћ?C}~h [ IDATb6x|ޅT!l|' ?UOwEm?` eîYlLp3c$Uk箘Oϖ %WຝJ+vEU OHĬX聼Df#|/u"ḩzMY4[23 *\@z¥ OEH!0Ze~]%8Ma48.Umo[6zǘ HiP+(eb*5j$€3CQyEAe%4TSL$-VeǪ/ŵ\mlghrxRŀ?XDie)\;HՖئzW /^-*a66?=&|\K2/].hVߒq]ƾ m 0Z=䔔SYkY LyPqC1RG^Y%A `\Ρ m#F{2(8ǻڴj~sX#;n-}aQ t Gva)81tt0@.4٬)%dW}9cCAZn%4_mkph2͛KYm5^N lxE垯]Z- OϑF\֮`\ ,]r]&'%['֖vzi:B5PZ]Aa4uܔƆ]le[49UZ9'DzBVHZ ξaBQ  ίSeoRUхEVal`Og/[NS{aRj.tbM r?!m[1X($.WT-fW.;u jhe|6_3J,c]-Fc]V9xcc [*f7C{"9I$I$ %PȪ}mqO_[A+BiCvUf@A2]$IJuDdPobv.`ѵ/fy3ZX4L$ K)3VjU| rټ?=+k(ތroc"4^>E{4@GK3 E?eҼ/j侑/7:2ߗODhS~oկp$AƑ3oO) MdPyB[[ԯ)e_*}j:0}uS|XdC$}!:@O&Db[$+E4f:-Iof~a1pS[}ͻ@&Ev׳nȱrIʠ083ٺqdlg()5$[Iu1F9!$}ZtY$N$I$I$tIҗD"7A$I$I#I$I$I$IK$I$I$I@$I$I$I$I$I$I$=d.I$I$I$IK$I$I$I஧Y$XN~Fqt]'DŽݬ.},`fzj|C!T4l$I$I$I7Fu"}] ~N•( :9ĩÇǃGF[R&1+_'l$I$I$I$ݓۃ.V.uՖݩ>e هb`p (#E'eEETiE$I$I$IDj^'3KS +[*)th4<ΕF۵<[r[1Fr,2y4 #ģI?Ku=}ЙW(=\Pf['ݱhthg/9g aza.sf(ME]D/3Ţr_d0`r|1IcCt NNptلbz߷hi1j: @dvq&K#=|pcu̠.r ͤK$I$I$Ig3gy}< 0 }A/ӏ34&n$BTzP֬&5_Ntja1f=WO ٗkY[faldޡ %I$I$IMqח ]۪ˤTB*xfj3`㳐ш%0__jmghgFQ/~.I$I$I$}>"q(* _llX`j1 "#IޮwkԮ`21* $DGKKN&I$I$I$}>"qw 95' Nϧ0T 3C=znbVѢ!.q]KUt| #4/i: M|aHz yǔ Rp'tolve$I$I$=H{.T#NňnÕ4OXm6]~ńa, oAQp8Eo$I$I$AWV6lDŽlEm$IRl6}e.I$I$=H`"/j$Wm2@5o$I$I_!2$I$I//K$I$I$It]M)H$I$I*@W!}V#I_@$I$I$I*@7FbTFy^ZޯfW;4X,4d$I$I$}yUwEQz077'{%!0x^F}_$I$I$I_Nw !0 |>|>gM$I_ hL):&I$Iy%Izc=m4J՝AAq iٜIƻ[ ٨*ٽ}4?`fdCfRKq[Ͳ5+ TYˀI3$!]g; 5?qP3a_%bl*gƲ1]'MTvi413rmX(I$It×NXC2D?Ή˭qɏrkg6E#4GG0ԗe tƺQGi}f,:3Srimk*I"gG?G G~3C1yqˎd"AAŹ= v c7J$I$-!{%WȽӘjg1<̷v@x#;d:oޭiB֏ɬƣ)I:.boj綤mUHœ 7 C,txdo 4p=_`a*F֖S(Xe|;XhXz}Y24˥/-;F@!s`{W$I$=8d.IBH VOvn!?݂ryt/-?9MpWuh!Fj9 }_g"bxykN_)>ε 6bWtxk-y\ gL rqbS^{s=[~?J7~gxrMK7vq[pIcm[Dy,4oHlVٙno^c@??dk8IwXC1[xj_d8zq.o]`v| sb8V3nkVcX?1JFJkO9vT+]`4 Y|ods52(s+t Xy,:pM.~='MDcIB ީǹwϖ5V:z'i"?7 O6__.w _O3ԉEt& Fx q}Y28$I$!tI@RU " AZ:>:CL7ѡ3#N5Bc*tu3A`V@r)bgigvM QjQL Mu\0pa_fFaKm^{H3Iw>T[%:ӡ׆S(W;ghS0'b\鍣 P?{E?p'W@B zQ, zzޝg;Orvł]APQz' !z(3󝙝hh52G$m*5.T~}LI&r9Ft2{zyYvLzc >Z*Z&Go2:SKf~ _SdYB-qZk]TAAYa'V#N/PVN:vWH& C2Ikl Yӊ$F9\OWGrDRWSe!VCUU܎6Cb5ֆU'xUZ]@0u 4`+5@맥{0Fƪl-Tҹo7g{-1@o$ !:A'N j9-rsxm`B:^py4ZuƧi{$D0[Y!?c ;5Tf7 RFLɤ ~"'NdppԀ,mИ#HzYD\BZV#ihD&NK0uDDav{I)$t âyǷUU]-3ـI%ʨ{zҞ h_UЭkSz1>EGNP'&%ÒgMlqDۗ& iqq%@ƏdĬPTxSUnyֺ&q%cF@ڍ99SwU+HLpgS^â=  'tA!܇,,yf W0טc!„tj*cB!eиu%K>}{5o#IMTeYIr7jO=_/ɥ5F22JGBnDjX`S99e:\N ʝg'~Pv7ֱϩ?@j8gAA65&6='8`F 1ws3-ޢj>'@hf:3&5J'^.!oD͏p6T P\œ%{Y,7Y-ˇ%`9/AAA_s]N62 J$rc$PYVڽŠ?;xxON֬/fEʰy$|VƺΌdzG1CUUET%>_IBjK-UBۯ&$}a:SRNS: 3'I;}ڧ:9UN8&|y_,u:}Q'??Q3sK&B 4ѨFzTA0[p5b>;hk w C.ٯIE}1znl1LObZGW%$*afS Ĥ38QON\tJ]yV)%: Gfz^BŮ FjёFZJyw~ :0*NBhD5-" ]|n:36*%سHJe|" E"bzSj(BEE/+  ¯Thh(f?'=DxrԲnDrDl'kM7;cM;z-lF}扽 Ɏ\+=]VZzg*-gl$lZwPK̄zXPZ{o.bah;'2K iu|1w+[$iٻu//7ui uMVnx&}Eϊ%+@Hlv^[\F0$1#Cs kۆn,;I87-3NGDD^AA8gδd-IbױUe4}XH;ǒ``HN dy!!ME)k@Ջ'HB-/O72U5C04Qx-1ҷW8:=û `ܷ vifO jwjf{Sʊ -}ϱSa#YQH[ L*>..bYadY45[GeaheX~~XbrI_Yn9>UOBZ=ÓHzRhEpD$ FK   qfA5P%-2br I>X Bk[!c!C̈́ 9qIsRVh:FI0@j6mvUQ9Z]Dkt:ޔMmy=[*>Y@VZ3.5@ii%ߵ"Bf4 Nt0ɲUdM.ktv#&="1>(!@N8B/{M$$1ad醶ˮFswSՑM: KA61:2;rŅFBl8ia5d =ZaX DNX zLZbH:CɊbɀV֠׀d2mbF; x_AAAU9N&qK;N`9YZDXJ i,߲:0]ߘ(LZ+,.cgP#^OEzTlj5NFepr#CɱTy{ G i%5;,D&D"a2鞠CZYY\_!Qma[~!Q@MO~9AAA+<Ӆ%pD @sI`6j1h%@ƞ6FS3:]}5j0;SG`H:i 1DHHE3);avaFJV!4F6R/47Wv :!;)ZA>fb鴘Z4/9(j\32 Jݤ3(3 F`akhL MHmYAhq   ¯.&L&~ S~?,K   灟)j    9   y@   p    D.      y@   pIG 'K;~NAAAôg*ކ$wATUeBDnK   3 AGP`=W#o>\> AAAg! O'#AAA9]ҡHqNK9q\zxJ:AӍN_v._Z',<V oRg&7=VlXKf| tg\TUKSG?Lh>ћPx__曱0ؓ{yP9 Q>ꪊqZ2H3 Zn*cٜ,\FցQ`|ڭmN˦?t$]9}1h+ҕyDQ=I* {]+1d2 O~GZeMWlG ֜L5'9yDMR5.? AW kb^q*.x#uhj(_|5WK>d3s}UvK & ]շ`ÛO9fw:?c' -|ᵗ?a"{D.ٓ(=As̯֪>he(eJl_VCi*QN,'9(NUAid#h[(Z8yxٹraYЛJq\sn|+|Y$PŸ(YKO")..E NDn*ZSO<W"O} 4Hyx mzG1qQzWn>CxLv.gю4;y]\93,讚-חY~$p] xɟ_p}| 1˘ڧ*: -i[ܗH$f&Oe\u˳%LcX xp|ZX7G7g9MP~Y_K >JߙI(??߾(?-@WU_\{(Y^`g H5@ɚx޿2EN~g{q̙%1촦PTxR;_@4(;x|861nI6,> nF 'kp<, ֏**ҡ#nv\uOq\~8<Ҏ5D~pq{|U ٨C@ x< H2^*n?Am T% p >ŭbH*A_dt;``¬>Cdt#&ֺrv)''3ߠ ף LJr$?z$ŇGT`2ailu!uG3bOQT=ۙ՘=IOf~>* ib?L jVqg<|a?^m̡*hڱeu<*sw^ɧWr%;⓵UOZo߃EQAuMFZ FCLV2䒄F#u;Qd=I Qd#GB U zxG] xhu+,&2}^<~0'E`Pn $18+ص{?49 f&z#fImwT@ zGp[4G3:g&?#vML{c>.}n/W>BǙt䏿1P55tby VFZåF.??C~kˠ]~]n/A$Ydp&~d?8ٹƦV|!z4A0h%˃?Dc`֩m?&Lzp`qilT^7.C^$)NGAlƨӠ*7^_ע<83c&uw܊1H`*7<9)7LChZ:0z I>CIhjT>.йətiZ;oρ;k;>Q x%f%*v (Hfs$h2aJj㑛0tx>_ms΂4v }~v~Ko~|xMwO/7fmƌgȐLUvVW2ڕ<3|+rӉb+ GgQL)?!7]{)x^4&rs rc kJÚ+t314MQI E4w"-{(WQ<_3;7mԋ&bi7l m|f>/&Z:Bl7aFr&Ssޝ阙M>xݵ4x'~IHNKG)Gnc1_^TT'a#{Ի{ӌy3@ݳd O7ϤL{5{~̘ 2K~Օ,|aTk{ϗ.O_v:qɄQm~oVeS72m1{[ABu.sɂ碡V4xRµa2na(ڣgtR][Ae=~OVe$W qy$TY= k -8=yh|>ݠc[y޳蘙Mn eT֯忳p"!װǙ1q #3xZvӸK;by5 8[r\r-\6i,pt1#G c舋yy׽ŴɷmE˱rEo$8Fj_h؄.1116mL LѴ?Wt:tZ-x+Yt3d<&^1iL  I.: K\0i2Ç\̫*+ygE7{pYwNˈC=JNA[| 3Ku@TLOtKJf4%wPq# ~mbxx?>A O.k>9o=A~g<"*ٸj'=ȇs#9,)gQi<C{ M5x xֻl՝Tr9mO?S:kG=;?@.e]L~1ݏ#lmr?ⳏI_<ϩCn/Iҗx,%T]:7=NO|puiWqCx-5:4PVO O:3nޜOḫ9$Cށ )TGdy㭷nXO2S>?2i`EۊirnOuF>z5^{5^{u>YZOɮ8K7+[ͽTrfσxty4-_},p#rI(d}ׯ2W<&} ㌋xS`C1qcyj7)Y |CA|) yꋸXz^ Pm-4Mt=X}X"1[㹅p{XRȍ]OW[Wg\e_e7>5lۻS|~qgv=D /";g|^2J+h4?YGwg_aGk M435s:v4(2Fnkq$袸x~eu-zd ,lq&*gy_֋b/0Ya\FI_MuT}wŮ;bhBUTLvֈIkDTMr ; G qWA\ums5M. 3Y|Q\^ʯ?gݴ)tw-м Bqĵ,n{PUUyPf̱LEk 86˲d#Ō >k@# Rk#Uu4`ׂ$!k##n'{%q\88 ̸"7lmd⸞$X͘ҺNK(2Qq rg>z-xOܔr􎀟cX1elXqKE/NZqhQ贔naLbs~ą :J i9{F[OP*eWѿSIә:j)ڞBr)E;N|U$UiݾJ:Rġ/_ )"c+L$rͅ9g'ӣчǓ>t8ᡨ*8e x? қoG.Or~ +tNΣܨf?P-ڎ} 6zOصi(~_FB#1r0+ȲF'4N1C9ܷ.MVpm ޣᑄ=r/q*G'n҆"6U&,Z AS;ˑ)d F F;zoɭfa3QRTI ni"և~H8Elv]mP}x ϼ̂`xix3[ÝòނN) 8蠂/){>f2ڶzoߟ+Gdf+FUE : "[oɺ5/_3NOPgK` M L:y6 n)(ġ * E z^~?{댦{U=^t!tQ9?@0=CVvKۘkh ju摧y\͂oKx>ǟX'ͤYxJ(ܶ=+Q1iG&um#JՌAeg{ O>0x); @$<-~jvոi9|3Gt&ܬ#x.2#(Z֭J "KQgYaJ?}r%gĞ)ܱgmA~?rqWQtDH*44zbB4h53ȪB}Hd! N}bDڍu a6#:@ U nZPH@EG$C%HEh ֪Q,j L/Z=e f-Pz,Z 5KHT;EMl bK(4zs? t$1ɠT5yp%!&L2hPQ 1emmIګ72@}kHAEΥi!LRR'n[hߗ_͇g?t"Cf^O O `;e Ot'Bnaך,wX޻4ZX?%ſ d71t4¡<˼.#3fw2a,S/S&7bꢲ:c 1V-ZDn~:}V6A~ # g'00f/%l H)G6\׾wNJ*2z$+%|^|M8.0$p4fߣ̮!q@~NZ5'#qj8J.DzYKgĆO!D^#^7b* CNo\o`N@/Re%T^mջʺ%Y p\ `ИlkXB^6h'w%~'2$oWɋ'W'm1dqMCn\|.'ġL#:Tʈ}ЌP.y5Wu JŶw=#3FU>g flBkLڗҾFj.׾~TDm$}"1aD`9l}3Rw_0F ?gl%f嬶`לXG0ݹz.7M{Eu ?Qv'GEh ` HQݗxyB.s=,K쑷sѻwyƯ!$v w^q.a^oVt#aYAI`R-N 2{$>ʗ+Å\#>x&kŤXs?OWs|سwl7 n;/$0"I"LOzdh&n3 /< .nD@\/$&F3*D@VZ=~v5uKP2lr^1t5AJK@GHR *ʚ8=׫DlB4sY1 SP'3vhG&X=ȦFʵ6&M`px;4fsB4 ! RbFh(-kδH2"-$aUe87+2"S]M hs7`cZhd'n߇$nABVF' ;SđmqZ靟Í9fLҺ0fWϥFjF.MnIat3lbWf$$\xU((t20++-TTUQuEZ{d`} ~Pw݉!9˞l)v}ʭڟ{ZdN׎Jtl"1+CkWއ.Jzf:琗Bњ2l4r靛Z)sC eZ)\-[4?hCڕ~X4I֑>`0r蔬x:^(: 9܌siЭ^C!t=z-P}i}x":n6ޛi7+fa|4T;Ieބ*oc5[ FslKO^:VL\r=wf*񖶠AU8xy#)";3kA+~Ff_k5^[.{gcmH|R<6mޅ6s,S&撔؅Ωfr "Jq 57#ղLYG)IuCfwrssJEZ >]뒆U&m s t˰Sz)Vg4=;C Z9P#!3Ծ雕IN#eױyIk8ithIGZ ^Z*]H2|m9ӡ]H ]c@g#~ 66c|p:?=@cm+lFOANtLDuA9DJlX:F̸#SQvK5}zo~hmEדarE&Io%{>>t ~ᜄsӒדHs^CxFQA]J_<ǏS| Y:mD>$ 6=MH= q alB;c@WCFn$EBclǙ0t#IJKPU7PncL­l0p@GL/z Ť[`nz pIz`Zxk#ZS}3HZzg2!&Eze )ٵrzw>vijys]=k8_@;;xKDD#^ϯ剭a5}b)=tٍk(z;#b|LWU!W*WG{xii%U;׏JvGRd[|,?lm WtU MW[jE)X+<Mztw.xlVzF5o?_W;v<;h))mBӵ3uVY:>Oώti8Чu}s~={&R_Kqb Xv V y*UFwUTisknzB)z`umm-磌˪XP HB1@z!!# Jq|x̙3gʽϜ2?#wcSD_K9VY1O?qL~e*ȷ8޾8._OB!BxwwC\IsnG7J(F-nVN& $wvtUl]4:x4XcLB`訬͊9Ljæ:1U4^< Zl-N,N*ͽ3lQ]V:l;h4U7ŒFBbR @ENK͍(ieCt g ^* XWjM i'ŭ&R2>pɨ#1Lo͵լĆ v:t;tձw.tw;x u+خJy'-f,Σ*Wӕsvr%B!B֝{Ltunt󠟗~uzIwM9m6x[GHxfJXT041 11^hYLNSۻ܁1Numf<|M `INi1ŐV;:{=K[om{(mo =xńs#݉]w Z0B!BǠȨFQj*(x{bodB|=v+$G0{H ~-=VL\4"a ڊĸ@v*V IoVI~I;Z/#=q&ʊ6Z ^Τp-˚ץYvUz gl. kٔہUBHhWٙ@A>\@gl̬-۶WCi{kH1cIUɑO7H/k=9˜%}w9kik%юSU1w 6]Ş;5mt2wd84deVҌ:~\8vLIB/0]!Bktc4? OMw:):| 1(w.+k'^m4Z< 0W+SIES !WhXL~̪[:!cЅB!IAOMSq9NtGM٢k6+?L:IB'Bgm!n#GX!Bqh W㰰79[nӜ񉕂23J!B!szpn+UE!8iRim&ivH/ !B!N]U><8_*m_$LNB!BÜloWq:B!BtS qh5_L!BpBQOV3?:.8nZ|MgB!BqBz`Of 6$4 C1#9LE!B"YY͆ZZ q: âb\ 3]!B!8]!B!L@!B! !B!.At!B!H.B!B ЅB!B B!B$@B!B!\B!B! ]!B!p !B!.@t!B!H.ozRigJoYN)ٗV^!Bӝ!N/K>X:Z^cpcupi mGUUTƊ|,=:v01{X3,i bꜫp@N'59ۧ~Zpv;z'oqq3ZR!B]EhdttށCXɄA{@öONj ^ԁ$˫:혻ik8\zold9T<|%>ޜGsÎbPvt!Bq c Ec!HuՔ7iyL>WtObKZ*9"fY:ih肐v0cR!BqB< ])n:-(:< ;ΥMvaW\LEqCG{Nũw}qGn[EGP &G{}8 !B3Ht!!*Ҭ]eÆSg7$0(̳w|VnAv^3~w*9sCcB+N,5KAq mfN441x`47lz|Bc=.ȉTQ0t,Fqck 7moaLs=ְ$Sњ=}0bhuvN2 *hjADL"#&[: wPmsNȩ\}Q 9;ٸ9qrnr1PT6 yH`x6{u$s}]8 9HoU_BF^򫛱؝h>Д}d̓B!&Rº%_!S9ڪ-x՟8/%jN*=QQi-$qkI;ɘr7ĦSPNc'[ՂSAIvDsZ$SI?]35df7nFop覾2,o T6am#2aY6}6ky؆N9{I/܋cP[g Mt;Nsׂio$'#;-e|ڬg'u f\Mg'a*(֊d-؋-h9p*NL2r{E$@]|b?tPUr3gǞ\v${q!BDt!a%U0.bG'c2VE0oeV.C"?za۹~vz@us'a?{4}}pZ[(ض%R߰}#2 ^%kI+n"pLlb{|(^>SˠQN\!SpL}k/$;>~ADs~@MLnd&1~:pt9m+X.XκR&hk m +Vo#g;,|+F?_;(XF4c,zZv࣯((!33).$̓S/dC pb㻯`MfsSN[/B!ę"=:h-/"7o?]1{匏M z`Fu)ROY~bq[;-n1$igB( >A(zjm'XHMyUTaIf2 ƍ\y'cN<8irΤD{h$~0s+[w7-4TRlA~b졺6;>F}ݣF2<[VPab@R$2|yu rGU8[ipp88菜tUSTcý}Cko"jB簿 gp<֪7۰:TЛ=KQ՗~}"RNB!#i.P uQU>}Aiv+1rRQF}}i(b\:twwFkGN<tf:;q(y= x'tT1A3+:ܽQٱsZb1NKk z{Ԇ@غikp2g(!nz :t4Qf>FZ-*y`b=#30q{ X[?#)%L?>B!Jt!D{T'* EѢhUA*\FUg+gK:Tŀw` }1Ɨlvett:q=Lff82s튮Aتڌ強8N+Kph8CӚ75zwBE PPT1Rs+ G;VKidVS\K\ngȹWpxB!gB0=qS촶tas؊&+=ǜMQx.͌ /aZJ?< %M熇⤭zxPm PZ#:-=ǏяbKԴ86.D@^ AoB(L>+:ۻ6QlEW4t(81K^ II IDATށ}R.S-f+u { عj3#apP!B"D#zkhϣ|N3U;iAWAޯ9t9:h.(=Pp꠫L&B ҘMUm쩲bNqIw{35U?N:jiFkbsZ'O=N-eeU7pj!?nMA 0\w_|e)j靜OQ g cFm':qB!8$@B# b#04eju`v7|C!x?a[M+6cs灂zj;kc농wNKinef8n϶Uk(QZFyn8gխXmfZٺq=YcoT zO:&,Npbm&}r6gNN'Gk4Q_@k[1tOpޱ I/m¦8mT`LZ2X/ONBwL:|2A]uPQVHy mPƓB!i#]܅/Nq)iV.@/#Lsm%5-Ξ͔aQi4FqVPOyyPLVfGwGqh }Wq⭵z&LB]R7}K[Y:&-&**詴1SJM,l)Kn;}c-գ'>iT4U( 8tP[ӌ{`|s```4zZEu׌:*{ɟq3-;jA;=P6i7y't}ϛB5ME2jQckcDk?!B" Q=1ùMs.179Na\r?M_o;ygMz5e%f;=` so+W2)=i0,j"˄(((nշmJ6eSiˤ+g'rQc9LBW 1]q`%a^ƪd:J$ƽUYd4f&S@FϜA͔Vj 9 ܲFW ιƗ;nKٙ](:#~qL|c ̫(usݻ((=mlXV}#/?[Aھi޳S)7h :R+tNss֮*vnIw?93Ι盭n,f3"wQ'{UtX:V~m]K δp_sΞZ}:g(JϩEyw?O \t^?r~=o!qӼO{ QLG'rL~Hs@o6G$-_{#cXL,^2v]ULLtTW5[_==eњc:@P<  ^ 'ر">,kE$|nG5zgS[QKPU5Ss^}w6OM.N?J180{`Z3DQ5hqso#(ʚV6uaJS'miyc/Q溿>|d^Fnq>_)CI[>oJ)ssFݜ/bqMW2!./}os^pVsTNX.nAՓ??~4|sE_5Xl*s/6٣0NUl ů?3'l>ۊ;8￝D6v젶ۃQ?]=I8]>w5{k),ʧ mSԒf\~LVv0/̼;#K ;Nv$^ӽ&c1_gpգiü۽#z \zpJKA\MWKB֢[|H~w\>sbZO.ؒX֥[{/1nE>笀YΠk_!ex{,̸5!J; ^Βbw%MߝǾN-5 yw.E(k77jv u9|{}a~?dTPԫcR:O.j@q.)ݹ>Z+/OgObÊ ܻX$̞Z:T7=i0C8Z^`]ƮFʚݘ~j_ 7 ؼz5 h] o/AM̸J4-Ln}v>~CI nO\p\r?o6S :*"؝? Qpf1/R;ϹIȯ0jwv~f/__YM[Ge72ǹf@)൫]314w:cWr*F']έ7h=|9;0 [nD{/h?"BBN/DUU: Q~ >sk0+a4n ZԌ_z?Q;^^8߾ّ}D6>, D͗IWʵu<ħluBܽW3$ܵN2|+UY^җٷ3U87O ?oJ 닧)3'en8o?5-fZv˭YOc>V.^\'PmX-cŴ&/"и/]֑)~:.+".ZH~ cf kg<V%= 1 Mf|m "&q *־_og͢xGyey#)#LW՛Q(N_ZNEvGq$ŽJƃ`c7+fi^#MexL{yYG,^μE[)AGg9s=( o}omc0īm g gԷI6 gt;Q)|UWgC'ՐɺU-l#q@LZ=dl*u 6/sKD'Ḩ=m|C' ]m1*e[Ž?%^ W]('i3(/wemLߏ([zt{ÂzX3|_ rv8>I6;tvi zbMFG7et\i&k.A[fĴ2[|$li q4>чc_s "SFcF-1}Y}{C1>uM -nb9Sona Q/@}6{Yu1;`7]yz5љl4tx{hoܽ9۸l7?% 1Ge_QLGč)кY7~1oo1jr ox30yG)OzuBJ~gYm,'D*6SkFԑuu:xiS9_cX#b͎z7̚9WWW09\BL[:;fRwճӺwt%CsjC?|F' "ylz54d/= ol"id" =ˣO]eemͨj`l;[n9߭L%`L"xٯ#a2t`B\ɾH QԱy4&~^lY<_d]Yy]]sp-f'tֻ1qXږȫ̅+/M1]K@snjSh=Iz_PFX߈0'@0M hRԄ^ȝߒsu<2z\rq5嵴vϳR?}d`O(]%M1elؓG}X{7 MweVt5UB}%v.DDyk,\9$.bkjaxk>emYfv|PlܴqY{ϖ\eȱTXpX=2W =lO=58kr-f<ׯ1[bekMԵ[lȺ[}ky W@5S B|u%oٗa_73MmTll%gxcgsӹ#(DKZC-=r \s_(~].;MG9WzpL7\9P,n\"+;=yԶũ ng)紛q lSY| ;rz_ՃQQ***u9K&|L{aEc;5~ w3aEDzthHaFub즡bڷ p`;%=SqRN=|7\6-6z:Æ&6nMBSԖw`ɚyo8}V?{'^v rSNeNOՠX;rǥrv&§-FfhA(M:iA#u~tG# 1ؾ= Z-:IKz,Tm_ ]YԶ,W}>u̳34I%lKf.L6ΝbA՚8m3Ȝn*{(SEWgo0hf~|"WIaӯ򂶉2nt椳(}4ZU0 ح6NÝ^tj1\@䦧fxTnM=7P[@~w5_-bۘieҹE>cym_Q5(ִ޺Wsf1~vjk%t`g%䎸D?4b;;4j&cRH= ޹ԇ ]COYd԰%ЙW1+UU1лh[2gʂnc{q j(x0G>cpR_E^NtY8DDe$>j{:L9k)Ѐ"aۤCwٟK6,<yաu G3*mh14h0 : ( zqw eMOZIijvEsCX>b=/sna5rQA%ڨ)OB B`،Sڼ\Ԡhxx ?o}KX>x0l퉙 CC˫0ݭ4ѫNX}un`gEb)[sLim<|ٕC[ 6bUИ4߷ږv|~o_Φu,篳Cز"K۝;gQ%+c?~>4Z:R^PZ\ r艳FƆzC0]ï$89l<+̎68;Xeno" (+h@U$6Ip\)#3aH,swGb3 "2_zqAc~GzOP"闞d\-Q1]̘V;?^4_+ϞŦův'z.+Ta53rd <ta}9Α=wto뱗d3`\{94‚qx6}V#'N12i30·2'1{Є3<%wvU.k;Ainff&bI@V=y4x28!-w_EӍA '?\{f.?0E'qPAvf.N$0lh45Edgh?CC<8&rݷW Kax|0zr(RF$zrr I&%:#6r*êůOEl0+кG BJ~v:-~ x#z8,\vU I"ο -Ur@& \SJNN! ]|14%ok9yWY@UOxBqXi.+`wf =Z L&_0_L^o."P#:ezcm*'/;V3 2_s)i0<EEaŵ"㇓̜C&D=y1c"p$Ec/îv :J;98; ʈD? +ݮ'$:A84GuΈ8qS)۳.Lh8†0m1 }O+%9Va0 e tԔI Nꃶ|(@b &.\OcIZ &)!PWTsG-ٹ7v`SLD'24!7rrsi߇AɉD(2,z/|%zd52ֲl|cK/O'Sgz-:I]u\Y\EQ£l;ZJ tU.lGEBh̘O%7eBXN8=uj~0 #O{e23$ƒlX3NLѽ5kﶩܿWʽ`/??#7u[r̴a?zU<3;w̳cȼ[|a,/}FMcDHmHlX'!zH5߹mshTG1y;nIKN\jovUÏ6>ûPu| YMM&ΛI`e LÕ(o_Ȓ}^Lwݵ'`MtF/zXO򯗾"l팈tqdQ mz!XKb$Đҝ>$X9m$QIt% ܍|_ygK.G? ƜǗWo[ĘZE]wWlCyu`{ Oj8bI$(Y_}ASԿ35'6rLJJ$v6j豖%R2u$l}"е}tQtIMZZ*|4D2vxO jX"3~\_Bڋ$rؘόo͟Wc-g' [qL`t'3]]N#=A XKٹKiLϦ٪ET>i5ez OhVN6uC[RF0P;>)`-3Ieױb829|8cMH#h deL|=UYwIJb3+6Us<;5{8T[̺kCѵ;lg`TB t$Q&Zk;gf>t M1L3~bCwP_ާ'l;ӄ1 l|oYmˍ7ªEYcӐ<%UV><#;zq-z5vά}k΢Husf'Z3v 1[cjF䮛ghnfG fFd웝T4{0Eve٤4Oˏ*D @c !K[2q:|οnFbbŪ)S-'(G->fl^=w;bqڿdOdh@U}8s2<$+CK >DzaoV #&3k`򼍋}wϩlKlߞO{8=$IN*2r NKA6֟c=H44;#m GAbX~#))H( hHugU`%`JUt0uqZ9,Sɟı[jI ҇3 t$ دI[ϳ ORTMފoQ {O~pĤ54Mg#f'pL'ⱕaEMf$oa}$vxsӋ/&ed"_z`݊CQQzrej&AS3DcTؽc3x/N8Cw0R^5 >[I6< '֫'5Xsa*48Z_UkѷTZBMl3fH[zUhgb,dʶ"#Cңk(:N:ѳ58]n$M싵"҈0HCu_WIYDLLj=)Ix7dDϐ))?ƶ]{-RNf/gEX͑YOOT:VH #IHcG]9EeHA5N{0rrKُs '3Q$(*O@pϑOE!->ѡT.8o}2`--f !쏟u$ItTN'.EAݸe\ڏ֭?)Ց{*}TOB W:{W֥*298Pf]OgzB!7Ļp)֍]Ȩi)ꥬSCؙH j́@IQYT !un z̦'pY<$Rü)=92]``P7nk .w5mHfJ[ $"5ۙ%Zj(WBDBQY,Y94Ѐ,+Ȳ|cWP5>QE36' [ ֠ M9)E СV*#]Dur7{tB1o4fNGkTj<\}º&^cje)`ρOiw4ad?r 7hބFt@9^y7܄-, .KSnXk]wf{EC`P^:5-jKD&}-?q+DÃ$I(4T.7!HHHbTŧ.51~͌MN؂0c tj=h/76uTڂHN AhL^H!6"/?᡾hpSU\B֟PeWKQ)llZ[NiPT-l((Qrh} VUHUK~܉t1{l:ZT΄vu^KJGdj4#G)mqQSRFEp5' WS9ٕ-D`kh¡ $OEQQ)VԨIS=;Ιm' '];#S(u1*_NiBL]:_> 5$ $黇HI$|ƃ+#HkƘދxz ѩx~{싟NY3kYaʴ.H(d.{g$7LMA|` sG6"k8q!7x|)INIC_ +~J|ȭ w񇄤Sx3oZƈNNk72z(F4SE3> X?<`@aڨR>Z'~_ڕs07dvm `b$ܷ!ITe|~`|LQg"3F$1={p9S(&ϦWe,t>ݿ }`oI-"9ssהSx_m)J pTͫ#SA&=5T;jeza;v+KJA4FOÍo 1DgϿ>F12[k6d"gJP6&8vEҚ fҷX49_G/GJԴh{"M SAvQ،lGVھ^*aNnKxGix:b6(j!̋w2dx7./vKFSWDm^}$?F8]B}0eWtf:`1.mTMcwy{La :. th^~5{$ap&?0~~O?o"nIA$dJSmV,>eo61>®zkHZ#)#p<ن68ܮg%# tnf-FcC-{kBa  pPը?vku\EvPȡ{ɭILT1exE.  /b]ڙ2.  ׺t]LA   U@   p    \D.   W   U@̚ C IEɴ !/%tQ'|څN E_i::.Lu˧].Ko]ڙ&,˕> AAA1L?z{;   pkAAA* tAAAA ]AAA"@AAAAAA* tAAAA ]AAA"@g( W>/?/Nxc.|X\'"ǻ\Y4#?S&ZZ}Ih9(id_Q!ԗn"sgx?N@s{x8r#CKp%+zNbT_wg7Բo޵X IDAT{| naZfc7 6n=(o?8WF-y} kb/xE΢H~=w\_ū=sCQɎe_pԗ8cx[js}\ /|ft}5dDpd?6|;VI[0)5t2w(ByJ~GHОEv/M9w33͟MOӇ-|[QP1}}=W&c퐢(z63v]-Z[^ZY~nCPۯW>*+ڨ)æ3`$n8>e}FR@nYp3i.-U'G䎻F盯24:Fx_ϵ^+]Tel` Ǎs\V.[IDYwg> ^KYڃ z}(n5uX2΋ k V| 7r7S^قZN<2hM/c vdEV_EmYRc ߤ m& 4z|1ؽc}bC8oQ An u4Xx /P5Z,7p+h >jiYC@H:ECe _fBm]#FKP7k .$|1ޡUg,_˨ȗl#)njN}?ę5P]ސEXVzv%ћvGAB.˿f~/B+ّ'Œ0F_}Lh²ܖ26/]1](f2w~0LJi$Ey`A˔figDy ack-M˨MKt#]"-I}9ݼZNpF%Ly-ҝ[xԿ~TdLa|c!g?ym_3'#^kn8 uI:QVH7O)'Ǹr?+xvh՞Dn}R# OTODV|~59-~6+CqW:o|v٥"elr0j3+y <zR&FALW~/OZ6f\V\ d748uA]߰r"i 1uRe7FcپP𠸂zm谳/!Z^nAe:L#w7l:ZEmI._{_JH8>G,#:5( }L2_ol&vbM )89gk}= MSA*fߚn:{RcABc֣m9}+u3 ]+( xud؍58_|J(jpowPugIEɑrlRSub{3I?w{/qӐ670d0fY`g/LCߖ, X/1OϠO>:hp Hn~R[e6"bx:՗l#S}b; aUu $ZS8{n0k~KObu!o۷?02';(ܷU1̿'>ߌ&0bePgm2]k>aJ@ڍ5} wb䡡pq1e(=,b ۹S{Ã=t6 +^j Z b05 %hz-=ʣ)1Zr9Z ){sǁŗQ_Ad⓵ߘCx4֛M.(H™x<~<9ٔԗq`vL#lZ }r:uG*3NVMP]=%`=3>SO751Gnf@rjC݁ObO3a)$G۱Ee/guT9hG<(N u"O}8IrKL>&kɨ#4jsJ2憣]{F9= MGjh3)?qП;ǑWã6{s?#Rжˉ6 М5-MOg 8jΰyOdxw&ƐX:`:5ۑ{b }G Xn<2/G26ѳoO%`\C {7JI֮$d$ҢB.Ryu3aQuUZ6mGDT_GGES&OGͬxs@#v҃R1\r;7Pydz8o;߬CXN7A}4;JI/vIY_ȞňpK?N&1q9K )d_ؓCe_0O홖!2 i.,'ZV^ഔc`(࣓p{<ȭk-8dN jn^<:֐\uȮRwlX֣( j/oWH^b]:I@Ra РFeDrp6SoAŐi?ą"kZ**2&OqrNGQ\J6 =OTeJԛPoLcBQ #Ij RKǟޱ!h|zX+,qGrt1vUQIU"<&-A#o_Ɔ|B60#GֱNUO\>܅TE7OM7HTF}ik~=[r<6":2S0lN6؏!Lk'JX;3FA_m“4ܔLuxx.ټd [;S]Ŝ}w)1,H-jWdmegUjdЌw-狭hjRC)w̲vFn!o6>JpE5ϐ[Ⱥݘ6~ZoY9F]p b/d^HȻҵmioXN2f@LBSyl+⽵ar͓5$i02ym˳K8^=ӹؼbHI C#pL$| d禸(?} )ጻd䡄z+ٔFj]X5_DϠY734RGCH?hzByh0(vFU dN: $T:lj].l/twQ/LC|T}.}:$1$-.$ɀl1oi BG N{ZHt#Y4{Kp*AtJ!g՗'5ԛ>a}-xTsBXXFBCjT}z%}(ÒCQ@[2': gfU 55u8Q&"b2}׍sWgC䀶6͟Ͼ;㧕;%|p'Ȉպhjjw("j2nk-VDARGIy1ư8|E.*Ki4cO<MOTX2C_㠾G ThbBQ *ٹCv3yMJvPxb3,[O~Kfؒ%ӹunZR:l<\CKm!',aֺi "9HE^NV'wSM$_LʩP%VnmG8jN| 6e%mJk&K E2qxG>}S2TAoIZ@xOkIm"4,9|b`C֗~= SJ R{ӻ Af-߳^ܿC`ω<2*{yT |gqU$N3uGKl%[c8^DP T$һ?M.awcy?R񫱱4"G8% aQhOAKяxُBB[∏(+[O-ڞJ~;z87$#_I#(* F qņc8SvU$0{Wë߁Ls0($cADJ϶u:0x%ǀ^M{$ 7dTKMvza3"&\^$Dc` J Ip7p(3rzn;o¿ ?B;@ḮŴ=P\LBB#Qay|OgJM3  {axuDcj+.IqcG쭽̝NER7nE'>ޗ _tGyjoyavjhXDZ=dg+$>sЧ#^=t (-/Asp8^@;m.-ZA8VzGxs͇nf0?=1iӦQ<"3[.GV'sO0tսr j׭sExyƠUj>vrYē-”2GHiO ?&5\J֙K6;(.  µ  y rxI%XLAAkAAAAXh"   W   U@   p    \D.   W3kΈfAA$I?z[ B;S__b҇!  ^`` &Go/~]ۍ,W0AASըX.   W   U@Ў+  p #"AAA]AAA"@AAAAAA* tAhGE~zEʎ:>]&=YeY[[X"ҧ=Eg*vYeK])-n\< K㴓w|'?ϼEXPM3zO- 3ȭl ٰs bK(+ĉO57yQnGjd<{;沔c傽d7yGqϑ4I-8Yj1I'EE-=$nmg5rb<'U5ix$$yж6f; W>ͫzsۚ YT͊3ev9<|Ӥ>Alf74)R u81CM#??dxɿ0IRP+/,$mszv#Y69lӕMQd|}/O5Qd*rp=/0Izܿ +}?+ B${n^6*oTC|-u (XˢOT1+kLݞ=9l^r=vzt-Q*ᠢ(Ml؁),ܕ~q:@V'iҙ+<^UҮB-R6iQ[IX,;yHdj9'Q[EuYHye^*̝qƆR-=C뇎p[9sOs v$N3PRc??K娩LAMQ߱^L8z:_F ^]U|r'_'-ɝG3Ndw=YAys3y fhdp'OSƼf8|;G˫YS la_U`8c&sꃺuB~^-qCok=LCy!-^R3}u5_XwT͡y}1oĜKVH^ᄍ~<= }Yvce|y}L`#6<̺Ulڑ.)+Vo+OQ>.We"/(=_4WA(.hp+("0{yeS9!^؛5d7hVpt1Z f*K3Oҹ~!B3aR\.iMCj| hۆ|%If@5ۂ7*x<~t)䤬 -*M03v W8-jMӚ;F<1\chC\d-Īv`3sl^<4#lD"@bj\v kUG%RprDOZ/F%-6! ";ǫmƓG`:uGᅛd@&7)/y綑s7oF;qi&s _gj _3-%el?7!~q&]F3a3{]T4t؋]Դ%;i"$L>H 7BBy\ϷOaHK?I(?zgNFsSv*37Nj[%_.('Nd*gp *Dvu5-1п9Xw|U?*lߒ."\vW6=yح4jGx[Nzә2)˾SsR@W A$#u%B̀> 0,FÓ9Fb9 bb3;9WD=m  2o({5a$zXl#+̟ڕ s4<~L^l:T۳6e" s#o#m|%3%V.7eUml-hk_@D2~#Н+G V`"'ER\Hvf&o,=FF>]c2$f^j@31\^{l9ƧGūcQwD Z13`ޝ(wslxz ssZxM il,E0/hk7Й;^$_?ᙽHyl/~=xd-FFF+; jN)VZ ہJFLN /`l><1΅`//52D_ 1 AMŦhjZ aJЎ2ЪTxyQ;[uzJ!%:-X<&Zk[| -#t r{ Z|亮4Vx&𫁡hU32z 5UNB !L4PU߈O|?!6f=cF SYWЄD蒄ZŬ]Y'r)s2'oV}n7 } %yuww'-ĈFhTjF- hѢx11;*/k[`kб1˹Z[m),^zn@moqbq.Ol,IeTx5h?{_Gu3r{/{Ƹ% MO6!T6 NMid1w{-˲dɒޥg@Dz^9ssc_O[(H5 NO4cϞkV9FbDb\!sr1u>07ADxvTdnvѴ.JJVa>g&Z+c\~Nq\Om=>aO)fuaP,D* `^!c*GA qI?Bu^\D"{8ND;ɛTל.oMLA|a*}SC^ Jr,?gh=wةmOk)r0ȍ|7ppgLpJ._{].A*#\4}j“,o$p0?[&2qms^JVt2 7ʹVozu|n7(&0~i^|f vN9R/>]1<\O/f5٩l0pKQ*QZ'//ec#c2J _GQ6Ϯ|̗+dV{)>o<f7Pcp -bӤ+Aho羻ƓO{^w_z_X-U̍m{6_)U-Go㭗^N7ll23) yLpb1]'t ţGsa~u팝ckr(97dBn;_uиIg LZF_` +4#f@00̓lgMY<0?EeM>f!]q >B!M޷'`%h$>Kb]#ƽI#@]gSl2JS1gR u36'OXr67GOgONO1dlX%>Bo)Qӈ=% rrvs7JgbI,ZO :^WeBvU'oS3_m7*}x,.fO!$5+Sg-:%\?U]G{HM ? u]x"x2ºfC:z=(LO ǤhtgdTLC߀_4O]yLUMtYc)Rɽ'V;3HZ41 \^\99TO|> ԃND؝$%Ē"/z$8Fh6%ppW</;Kz4 ODiˌHc:inA;qϸhNqMrtwsu\5H.ZL^@6zp04ꟷ=:1.#UR Lʈ({3M!'nʽS 5tW+QY̛DOƍ+L ?|l/c}g:_-+&o#o՘}l}zcu~z0FJLB*qNNuBѤi|w^6QO@ppUtyWP(&=B!B|F#F7~rB!Bt!"2 F!BO. B\E>h8!B!%]!B!$ !B!Àt!B!b.UD&B!KW$N!BR]܄B!B !B!Àt!B!b.B!B ЅB!Ba@B!B1 H@B!B! B!B!0 ]!B!$ !B!Àt!B!b.B!B ЅB!Ba@B!B1 H@B!B! B!B!0 ]!B!$ !B!Àt!B!b.B!B ЅB!Ba@B!B1 H@B!B! B!B!0` 0hjG Bu@`4bڱYMez:麆R`h4|"r o b s`jӺQu#fuOw7Ōj:zE/a4[q8/`5b00.v:C׏n #"ryǤzЭVLU+MG Q &,Ɓj~@P!e8S!: B\tM 'w~* #2 ?$g*V<ί7{=GYf5HWqV_BvZ :yc [{_~J¯V.(;)+,eX7F kb~vG)3 #>RfWEs^ږ4GK1ZurJIw/Ѷ{O/DOyb]OkNx"#xYkb};?w fD=;y[L!B|4$ q5ut]N֬ an2rK)p@C[cǶ-fr3!c*=]"Ogrob>YWrZ?ekVY=6w>3cFg^ɭ I57_7t3\&=w/aQ d`;R`ĕIe\1 {owM__5t=opeE_u6cML<&BӐ.HQuVrr4H(SǒGfN]ò'*aֈh>wus/pуbS8Nhj5Ƚ N5n@4tg*Mkns6qgdn-!?]]>p]nY?F60+3kfA@ :B!'t!JN[޺^br'rm(M?y[a.pgUڷIkU>$'CJ]\Q>N"(&+IID}};"}º>b/e_lN"/C[b#<"ȏ1C䳡߷%/B!JH@*uzoZs(M `v&3WHJ[먾.v9؃frX03Qepb5]WPwL'*`:KӸ(@y<~ݬx{?A'1!@?k:bK#S XMsvoNlj EiDX n\w oauX4.cǦhKkv$2㜘M]r7 1sgM7:f6ĉl':)3f2$& zڿ6G5әA:!,gy

nsmRW[ws@ &ܙ͖/r'dX@ q ę=0K@B!>,Ѕ@SiM$rg& cbcltd%l+?ުIEx.ɞcMDM[fSQ ^Îej-6s] f?s 7p4,bR6|8{r\[!Cى=fq2)fB.Nl_kvn&5SKCթ3z=ӄkx*SyL;܊WaQZ"!VJWq̞2XsM3z#}k6rN{KnjN;5cz.ӯ58 T]՛ڻʙwPٳq5[*Hf.w\?8'clXM.*{e3T{uEֵUEd$:1ىt6Tn?7c\D2Q7p_J*|fu;,誏;ٺkmΌhgϛKX7cNK=H7Y1̿k.cr}:ŋVPjb0{eЂX3{̓yLJň|:.=M#;: JSSI`*]fvN{"v=L[;H;=[{|FwO' ̎U,]GiGT?@Xlt| o,YKٮ;1ї1B!.t!2u0X'OiȈD-6QJc[/Q4^AҸ0b$<.%lUy mLJjS L˄$,2GOrN/?y{]Gԛ21[ScJ(` {MhrH۽6!VꥪSMDyז6xE1vtVjNW3"wT3I[20#5Q=3 |躅@Ko FNPvl'`ܗN$@!jI@⪢LFe %΁}_mDǦqnsEE4A?(FIS)Kdb6꣣Z*߯ P -*̨3;po>ʼn-C3xG_T톌CvBri11w9+Q$gd< G{Q):&߻|p̵`tDb^B6ۂh2U OVZ+ELgg7-y\@y:i>t@W fb&17Yhb1*hΖ&4Qy8^ #_ھU{E7(*L  F;ęN>X3y%3%, _g*yY3V@vNSsխQ{vC=hk?C䚑gYXǦh%t %IlJy)&[!"jt%li$#JK =T^'B|$ qUQp xZt]ŌxŌ)¿~n)g>QMG`0`4QTU9D1㸈XIqe#.gSuV/@L40:UPFλbtD0xۼsDرXih6aY Mѵ^?!P~Q {!1ī&68яC{֡cZ-[8t~``0bB:3Lu(qkbGBEAY?b i :x3^T]h4b@8.njӇ>`scO/ }Mf _%h0(MƁF<]G@Tj17~+U[m=Jv_"h3fL05hB!$ q1|no '}dgw`_KY/}4MHVͿr?g /O;,\P8 )#-=]uɾٺޢ(7PuCOCWG3{W=ϾU5Dr;t0XlA1) R0q1U0vAAAAQp߾c2+>S A=8>"꺎@1a1U4[ϫ$!>hnRRRpxyV/5BCv)5Żkdz'bWwIY\g׺쪹 lYs8j 0 hx\Ա78#ԵvA՞U:=_*3? !|$ qQK5T(;yQ {v⏤pD.qkmFM$cfcެq$Ҳu7ߕz1tPZ&(.7<éٰn ;tl.%SܗvNՌhkE͈ hA/}mx4v C[e`0t87C&z<܎w;Hjw%;tN{ջ-܎aӴz!t]G yh[wVttBNn\GE7sf IDAT˜ FXi[rҎ#]t{bǹG(tt|/((訪΅>A_":~Jv4h$_;fS98^ }XЌV;Ng&k}]? aV'vKxij'gLL'8ֿ6nڬ\Z3B!.֕=H1|X2gnN]ƽyVQT˷c-d$/wP@pQ\Gp(hx}'E +h.*6 //e&(+OvcFrhFfE}E]yRWkx%>cEIQX.j^!ӕxl+x{G]dd1ulswr>8n6q$}s꣥l i=8QHc)JǨpt{|~FV`sbS4z=x=!:i.N7w1 tħyx]r +c){}l$S(~躎aFt w\ QfҚT?^C5uP dcBnN#}!bI@d$NN^_gԵRW}MKٗWP$:c ӧ֡n&'Us h{Ou26J""bP0-DmIϴXG9;}֮cu3A/hm=~by`#{42m:MYtTQV/^̾>KF1"? hst4SUL_ OQb ,9¢<"./{%NӧNNl\kRJb00g@*Fg W9yo:AWE4W6Dd%80-g$eO*7g ]I zGRG(n4q=+X9iqagXW;Uv{9g}蕶Ꮁꡩ0-ށf&=kXl~@ 8Pf`tY%[7+FŅdloT OK%_z]gsQR7štfךr4~MGтtVfDZ*CFIB!!B\ h Y޿ip+]W0-$SHF!t#݌1Krl_yl U%*lqOy!z]tx4R#6L6qIثjٹ9vC?[#xx0)/3R/ q<.^|q U;󻝋RP31ә7o6Y16Ɉ0m{8up(´"7,=K ܊x_wd޸D$OC O>E`"Iu7rqM3w5/uQc9:NlxlZժTVGa8z`m݅ę9ӯ`גٹdFbZ{]Qwg1v5Ԭ<7ZŀINQ ׇVFXD<&Μǭ7N%'>8NGgg&B!BTB!B! B!B!0 ]!B!$ !B!Àt!B!b.B!B ЅB!Ba@B!B1 H@B!B! B!B!0 ]!B!$ !B!Àt!B!b.B!B ЅB!Ba@B!B1 H@B!B! B!B!0 ]!B!$ !B!Àt!B!b.B!B ЅB!Ba@B!B1 H@B!B! B!B!0 ]!B!$ !B!À.B|" p8'::.NSS>Ʉ"11qy^FuÉ'"".BQ顱ۇ/b5qGXIv`Ph@HW 4,&NIZFЅx^V^͛馛h4(7rOqrUUrVZŞ={nә;w!}m\j%?dڵkټy3B!8q"_= Qa N$nfdJ$MHcNQ&](++giӦ1g233c۶m8 @.ylq^Ԅhjݍ5+->2`g}kגȍ7ȤIڵ:Pe_=Vj*Vz# ~ pY>r꺎]ƢwbLJ%w6^+xrKHvH:okoVo@EQ|Av꺉qZ(Lp(h=Yx+}8\ V@vm6$Lu!u}Զx:A! &h6^˩>')Quu' 3/ʺmue}!ZlfCBzCk_`w{ yN qBq<ҥKwraV^Mgg'!Oo%i<Эd/ ^εQLpzp ?i9[OwVtg;EQQXjMMM|_峟,z+?<9999rw'{/oW4r_@k"6!ۺu+7ndĉ|_;n+_ wuǏgڵ~|Ԗ>C=C}[f!zq]EAa>Qv,yʞa ll/k8R4O6dS/|}j^xb)!щdL{rKCmi:4k\ Lfy6)(5ErKm;V/w^7,gŁS3IY&]fULڅ"_s7**wfܹ"ivҲLtot[Y%&5Wޮ߃9c>{$[YҸ;?c kZ-wYF]_t_3>s>mW>6oj_ѡrI_Ϻ:ό33f BZZԧǶm1b;il%9;lyDgb 'ͭ%Ǒ8SKk0a#iߵ^DySpw^N-|c38zoEZwiZ IHH8EGG3}ty?>3lL]+K?]x𻅵8|z m  [>2!,Qxyv/'_f^lcybh7woC Ұ%/S٫+V܅7 g%=j^xodzׯ =5|P+kh'<{6_o =Js{~ou:=x _֗qzAB:;~L۹}Ο|( ̴wGa}|"1՛ROM'GR{>O _ڜL.1KNX%NVu}OZknHqr)}tvvs6_R F[?or[%fWqx}V8%u YCo<ƂȩC7{)[OIݬzup~g]ŸNܑ軨zFnn{~?Mrr2.X/|Xrc\1?FroWf?95\SiHǟ=? S]x{xǹ3CԬ1OUMkVߺHuv# ~߼}/p?xu-VRMWF!={@PiJÛoģ JGW@ xj^ed:Tfonv>A2%k_ȷ m}7/^k9'GQy}>u H? vg@`}Ok_BEh>K}c =9*ug6_d <<- е ^UAW BZe{51=Nc_]k~o׺)4u:7~|mL8z elۓlSo9Fdx u҃.AQNsmvCv;& ]Sl#my_<Ԕcүu &;q&6\/D{my#Iq1M~946+%H4w%pxL 3K6q)X&fNᥪljdRֆ`8ރ7/E{Qmeџ~ͱOy8+LĨkt臨JMC̜N͊+ɑ#Y[ES{QqpEvX,N.j> "**<}}}8I403o>GNޜgzfb+ÔPʸH VqqqW4c.$r+k:N'-9q`i@h:RMXWɰY0呓ъ6 uR6E0nr"&1:iEe c C!k/ha5(*Na/, CV95XQUb ::(%@Rfnb82sg\F:sObcs'Ale dQQrl%0u у`yO}ʻ}t4a&+yd\\J&|ԈaLC [׺jW]ܔIHx*sh耎w59tTg4n֡tPH w#ZOgڂ )(+iiAL6%n;FgO66AB4b,C>s dgg Z3߿ sqDO?Vts3e4IT@d\'h ht|_=~S8dײ7[]Mzd֧!&5>SMBmT5\Rzll,ܹu`0țoᠰp`2V &19 qU7#VbS@Z{yZ85~})Lr3V2p1b) .xfB?H#QxNL#MݨLƈ|_fJ$6.C[95Au'9wBɠ( Yqa$E󝮃2x R,L_}zFOS3g:`5ikꡪňjzrQ̚1T&C^2 UT?;m&!< ʷ#?H˻Ig;~^CKC=m`$:9wqru_U]==2  BA\`=~^("*r#}}twUWdLIwOÇo}SO}]>ǝ dJJ*RnlL;okw]ZYB D 'JcO@+p crc|é A` fܦ3]ZxŒUMMVPTPUPO#9tANeee{}o9p*?s=߼o|^Xbkb1QLIf8C0 nDzI┖O{\ߋ>>W/_<.,?d.nv$7\9j eE_ϩSrGr]w_>3f#<<38O:NY5Wۻs؉j8;z)ʎ\T4׮۟: c$'MkD8T udobUʁ3&VNG kls)Sqљ81q 9d{wXJKw770-HmvP_KsƸYLޏfcQIH/%i8e IDAT5ps[:T3{‘|g.֪8]eٰo]>_x"I{i)c;3̨װ?`Uřr»q:"ۆ7E2[hhh>Wu]1b19ә?>%%}W = %%sɸ>T mc;"ir^7`H$C/L:^7*q bI$KH:6af]cab)RXd2N~2̦ڶʹiӸ+7&nXM,$`7!9Xb8X \_+Lt,`1 $6\OB(K,!˱nq2k֬~s\W8ꜳ9tZ0VDD}}-i˥fL#gv_v}ֵfxӹ8SjJ(^2~<O'8}m2TIDDDDDD@)Dg #cS3c.ݝtsXRjP]--tg|D*RV ])Q5V"""""A#"Yx4-.?'}ua+.<x Gͻ{u;>8ysi{Wq pquȾ}.eaMDDDD$4."C~ݏFydh}Q־Uh U+p<~p!#ӵpeGoDDDDDL#"Y`lt : 6VٮV4Wi'N4?ʗ<wl*{LLy.1Fa}*IDrUQ."8/8G[N:=+,&4o1+VBe8&ڇY1^{Mz\LW%Us QKIeC /gE XC}g{^],geKA𶴷 E,:::0M)heNfnJQf|7aHGGGQ4gYr|7cuݼCSE$"K6[OVQ3y"3<]|63_z~g%Iē+u &Vோ3wZրmhhP01/u7uwYK6>Ϯj8q -KpbNFh M10 tEXEF#, a@,=R58rw^fË{u"u߽e'_r>SO?{fkTy1{+koQyww׎XUz\nyeu:3ؒ&^X {Բj&TP/BзH$&qSw&GQ."*ea'sqi*X%],X~.;O3~XJIf=~5_Br8g[Mi|a~\ץgb!CCSw ]Y5NzwZ]e΄2T&٣~aO+ CMG-3wܱ-&T$5v˴e#MyyyQzzz(++u-b6D"M)xa!LXo`njh4D`,RA[ID NQL&)+EeԬL:xuM;3=6Oab{{Zݬ8lz!k*9Q? t:MiiiW/):- C2z4Jt1qƘW t<Бe-lnDU-mKؒ&5PTdM)2kL%7Ɛ4whᵵ,aG BCS˝/cNS˜I)NH1P."2,ˢeuk/o7vh} ]Y<*5tfŵLi0IluEDhfV*En;3ݬn=]8l BòֶYUs%ujؙo)N"(WDI*,mh#MGC^^YQ3Ƴ*R M{)N"R +*ED:i4veBIQt>7tic8q 9xLSw"Aّ b t)燴=wdxum;ݬ!1@6Nc74v3cNk"3kPP JEDDdg]Dd;cH{,oaq}˛z;}(5XW~S9T\Efh]D Ntz]nuؕ%f[<껙Y;V1n, GP +4H~Bq4UGB9c 4tfxaeKhvΝw=^\;Mݼggr1INqb\]\P."21gv#mUŬ |hIL[c[Aّ ]DvjF #ЙeiS/jc}G&+XkY\ܺj9U)9EP:(""";#Dd_CcW-eM=;^›;9dXݥ(M#""#C}dRID Htrc {]{X۞!wbQ:39}urfIضFd:(N">(W"RpF_wgs݉#(4ؕeQ}{LCLdBE*MFqb\]\P."ZraHCgKyc}']Rixt ۥ% ]{ >1LJEDDdG* tz]^=<= -0n'pX&THcnlAtPDDDvF*Ed;/}Z9|YT,c||WL&1Ƹ}<glGsu5t/'^K18ri8 㲦-njb][F#7XƲJՔtTlTHD0a‡^}YQ^bcYo1]zfٟOQ{>~c3Je1+oVPrؔ'3µ,oaM{tNyБk;8C0<No4H~rUyCU"Sm\>[2M1Y{քØͯc]aӗ^%fɓG36e\z]ʽo6=n'vp˳S+z^_Iǀcl[1Rm{kù [{y2YHOXoM#M6ڲ)nz6gY:exK+dGuKmQx1n-Xj'#MM#"Yę1aӒ^|^߾_/lb܅3)uǓ^BU4b8fͱ]]]455m1}] X hp`'}1u݌<:zҬn7rĮ31Igfl/ CzzzܒL&>innw3Buc [c80T+n~hhSiҹ^b=[ ;"IwXX LoY], 0&j_8*E$" IPVYI*۔P[/=¸S.}E/#B `Y6q&uCX1'n6Xߕ***CZ{\6tdx}] uҕmlyYO"5kے^ڐٵK*J SDmm|hLMM vM,4M)xt (//wS Z2~|7( lyf˻-Ah,7@.{^ Cp`vg&GW:1t=Ҟ\HG&ſ۾x8$JCx e $&[}Z&cB]muoWp09'bi|ZO:zZ&͘F;էXǛmqege>4vxC'KXْ KF,q Vsq̚Pθ1aQ_hcBCБhhM6Ӷϸt:39-'<О~4 j/2=>={cOfV0{ru^۠]Df|'лy|1m_9=?z)WL3#Y|㿾c^K&}'\8\sXۮ);wYߙa13VSU׍FX)N"o`!OKK{:G}glt|}B2[ (W"lj9M0biI%\^YJ)q1Ns_V4eeٻSLrW&7uwf+~ְZ;^u,oŕm3k<UУF.D8 -ˢ34Ytdr\@L@@dQ2aWWbU38xv Q7Msl:^տN*BnִyfNأ='ma)Ah}nNc734vex~hpjKL.n5JN3cKL &3O!"!4!i˥3Ú4o&3S.""E'0k;x}TqȮcٽv ⿛H1ust=z\^,it8>2Z@D̞ͅXS{JJ:_:Qdg17}.{v[GCW.l_qޞtgGܚrUQFD N9^X ]!鍏VȈldmdrvsն\?;{RG&翿"; #rW 08M@S4+J$,A̶H861 1crٔ&, >%NTXۤ6.z&[:^sAHG=ؙ;KG:GSwW/5GF+"Rptҕ֑l+Vc[Pt6E-ʓw*-K:$@(+پv81K숟+J}j*n͞:9~NҞ]EilGamz ,b1n/cݯ{& Mi/%%Vo\^Q'lG>(W"]ޝ1m$׶vIW$tE8H1P*>*E$2c A7*ꏬi tY|rܱp_ |1w椣;/Y.q}|iѨHiEDD$THDgnэcY ~h:xט}/x܅\le&?_z"~~>[oU/qݷA3'o"|MD$2sg pj_ >~ %A+=Y?:S?}ΝuYA\_5>>:)^Fշ'1=mg}Ϙ֬JSǡl]?k]D"HJlv"ɤ]-?+KπGggdU b0K\I%) vHz슈MXDvnAmYĝ7]㭻q㧕 x 55I-i, KVR>v<%fXXaøk۶qm cASDAT( },"#rU#S4U4aŰmƶVz[_no8> "Αb tq> 7l१"{!K_fA})>6?+%"hH蒓l]}j IǢcy%oqwe۱g^//]oW|^ӜVs?üj";=et\D =ϽwS ZDR\%LEu]Ah{t1"R^<ͬ`:RTڠ$"G EG_4]D FDG_4]DDD6hH@M4"""?*EDDDDDD t)8b+?:QD(W"Rp4V$tE8H1P*>*EDDd tD-"""]DDDDDD@)"/IDrUQ."GSlEG_4]DDD6hH8nL֥LXCIKa:z}U0k J IDAT|5ﰶ%fڮSJqX Nդ)Vغ+Om" TWs^{7x6{~!͛7_m =.ڃ.`eh)"!Ӽ't7/h#qpBκu%,3{Rrvd2,a>D"M)xju]n7,X, 08znKq\z48Ӄq +KȐ$kk oeX>+%a.\njC`ƙgR͛P%~):{oN{7,t.Y",9jP]`S.Ĕ>Ͳ s2;5!&K,x'ySS:Ӿ%?O?I@aHDd +,)"EjT}[4=:MNSܣ4=Mq>h{tfV0nT*omVMt^DD$T&5"""""""@MB<b%" t)8b+"Ny*:JD$:"""F;EDDGlNQ.""""""RTH[)tS)V""ѩ@)"R蔧SDDS."""hSDD$T&"""""""@ (bDD TtHtN "E4.~~ 7)a,7l fWqHL= .9Ӫiy>n/HWpsp\ʰSbTthtユ5cOk?23uOfx*R>o0R=ڗ˯_9rCYr<9Ok! Ey=ũ@!bq+mgUOX|>ɡ3vw;\7؇'=cvq-y\uDDDISEd cRt/y;84𱒐7=CKKRbɠ&ܲ)Ԕtd;= C رTd1cAaE#+Nh?N>0TȐ[V9cƲK~09<&, AEECy[.< >~ҾI=O :l[|hh_ELMT4bbUTȐV s9,'`\:{=JJKsL0kO>+1&1e\ł]‹q+9ֶmGij[Gm=PX,AeYb߼1m0T"P^bض~<yO[IDtl=SxI|,z}<9y|5~cy+sK~#s&{udR)"2(G8h{{5S[[0 }D"*EDDDdĨ@ ?(NwDL֭Ngiy-T5vBU*Uq&M ։D:()V2ܴOD]Di|K:Y5\|Xkti~iWcfqonf~;\E'vtH)xS)V""ѩ@!t7az;ߛeiܘ^> gd1<ֱ*M)#Աې6i]6p mŴMw !m΅Ly*j$ŴM*m7eN.18qL*h'[IOK'G\EC臘JgW/9DW{9I/qԸ Lag3wRK+,)"EjT}[4=M?V>8MqNSܣQNͼ̟[G*[DDDDdh,HF. F8""2ꩃb%MHt*EDDDDDD t)8)"Ny*:JD$:"Rp4R hSd$(WE8""""2bT HЅh]DDF=uPSdiNHP."G!E)OEXD]D CpSbTth]DDDDF  T|TȨJt 7S""ѩ@)*EdXD¸D]&"RHSDDs-r~~~yn\1K櫸 $iմu7V+8h9\8o. [!eة@NFy*:JFrU4S 1c Kg[Yt3~ X_]+; {e\~>y<5/ ""#G]Fq*>*Ed /?C`ZMrwXO? ڗWbw`|C6)_w/e[#m(?e[Sd$:(5c0.v$c{?mg;ymwgArizw#CSS e8)A; eSp/iτ}2 C QOpF^bQ:#/h1a8wr^( #"h]WiN?ņŋ}_Ҳq.Dy[.< >~ҾI=O :l[|hh_EQ ٜh'):*:Ū8@!r0Ʒ`]?/݅f\:%9a&'AI2g b[].8֕bk64-RӶ(NU, |Dz,bXRGt n[48hX,m̓GgYf;i~l:x'ĉ}c?uxKs$gs׿ʴ]O̷An Sq|j< 7HhceM)@7<.j6M)ha>D"M)xL&.DBAh=:Gc<媈ף<q4̟[G*[DDDDDDD t)8#"Ny*:JD$:"Rp4V hSd$(WE8""""2bT HЅh]DDF=uPSdiNHP."G!E)OEXD]D CpSbTth]DDDDF  T|TȨJt 7S""ѩ@4@:+ ]D N&2T DXE<b%#A*ũ@AF.D8' "gBt'MM-z!v)S&] j\KS,gI:2m׷ b2z 1['~DXp>%" t\,MXdCNOqLEOYIaYc8_.vۮWO6Xs]S-4h If*~M%ߝ/2i d'7sgb?U~==o4}J')pS)V",G#"2$NY G?ؘJoϽΌCwdL8`&}_UɽoJq}݆~;yۻlr-ڲˋi B&{ qTe[H݁ˋi TnMy-d$"2$V̡ M~op͹{ftc;1bsNql 6?y;x1FqnFQ>Mv Q sK rUtU4r\QQ*Ed sou\IL` cc>֒=n6PWCM34},3sbbowd2u+a>Db7\$ }˲bnJ c _SFy=p֯@>W!qۗrۏfA\هF9hWm`3㧓y1 1u>|dc; f{uDDDDF-Mq¦]D ft 'Bet㬙75N7K7y1߼O,xtQ+Ǿz?xXޥ;u2<b%? U|,)"EjT}[4=M?V>8MqNSܣQNͼ̟[G*[DDDDdh,HF. F8""2ꩃb%MHt*EDDDDDF!`)>*Ed""Ny*:J$4@ 7)V(OEXHPFq*>*EDDDdĨ@ ?(NGzDXp>%" tQH3X t)8:p>%M`)>*Ed"?*wpxQb;J@A`Yal;8Ǖ"R^&ضe)W fbeX,yJ=:NR4#rJ┗:#xO(l,А>'ߍ%+&"""";d2I"kTȐcԺC^@q9}|HXF;"RX3s#2U$qRdhNqFyJ1&q"Rh, 捇p7x)]KD[+Qs?|$ʕ-Y}ï5T/=Ǣ=m1E3W^V~vCP3l.WO`Oਏ]̿31 =o_`i4"r^l ۏ4nO 86cY^j7Ln9jziV0 ֗/OD1|o3-?>H+"t;Oz_-jX|8KG!u&Ûw;O/=nW2\S\__K/[`v*s/?뿮)*i~tܼB`w7U^_˯I{|a@Ic ^d Baw$/p42 14f sJxPWzL_MuŔ $A.>@A6@ͬkIGmlvRƦZNyV/A9J ": #Y/iB=! M}XR|k(<†N*~7"mE>@#ʄeKhΜS(CPWp's0}X =p-wPDٸ~!=\/CгlE㟐g0 ے`ћa×q;aUoS Si`%ٱvrlpk =rϛŤD }q:5B( -;*`PLL3@GSx-WBլng_xyyw瘖r'冹Ej9d\rQ-q;0~A`wر,ˉHA}Ӭs]3k\azG?7-,sxIDATO` v|Adr>W]̝V@fjaQ'Ƣi-uong- VRL%>jŊnӛՙ 5]v)PggX Llv0lvl(Ԝ`Tw {%n'0exPҢ3G:pmz[oƕC9N%"r  ÂE 9y{e!s k>$̙H)"):7|}ϰDp?%1:t*Â+%K={ۘ1s9 g|SJ"x@)Sɳצ* ET?qȕ307l MF*GQ<.AcVR]w?S\Ea!s IDjAJJ2~ nxiiCɂb` jgŷ9< /r:"""""""EDDDDDDBHP@  """""""!@]DDDDDD$(t`DDDi4 ?a|0DoaX2ݝٟcW| 41,Nwb+0OYCӰ`mAL~:X:[}'cr|t]DDD À@tw~ mxye话U?RVIg'0]-rFAWg]k ~~;uj;Ur+Xz _a`630xί]ۗ GoN5_l{WYtEvo3o?wͳe{h_;[g]A!uh3݌}fAv:0 Ød}]*wd? }x}L"'-6Swy%1dEq5{O-m}^Y+.>κV=^^/a #(~c򹣉4ٿy 'SOvʲO #60Oۀ-  M{ZՀ; ڼĘmUٽ 玥$#ڝ8όSm9Mcu>w_t XHY@^*nWa=HdJEp95u]QK~}4 Z(P@jl8A{Z[]CsFd2E% X^q!߿263߻< `1 ;' ~8Jٱi=k370@J<“),)$#NsM%:ۢ>j30=M^McFO 7-x49.@nrħẀOmo%o\ ٱN]oFbxao%(ZD.BDDDNT]͡"Ju,^p:%,\C9m!'~Ģ{Y|5 nvz7c`9ux&Mr_'Ծ27MeL'SƏYHi}t |4F&90@Q+n/ UpO.~vaҰy%mLuE%]|fN?d[cMl Y8*~pN 4n~{]uX-;-A.,Mfi\XKy`U'ޠ+IG|^\v{d xX8hylc0y[x4z65S<//Vuy#Hp1;\ o,AgH)&>\Y@dS'i`K~ /C˖%/AuVIƇ` nq7Vm'xe丕i|&lp;ƆkȢ r'|uh?77^4mfFj8<2WWp[םJqQ'oI.b=پd \Ubǽl7\2=-+ha)LKuLNfu(DS:'"""i'MŌD\t>u?䝲=+-褺\}7N~/NK^sog߲ Is]PJblƥ޺**zpiZ*W? ާj'f6&F6S?eNGs1D[;Y|s_ћ.~LEsw~ͮuZwC=Ɋ,9b1/1YN#fAI7D 6IXIu߹f|<Vbs磻\vĭO^{TlQi\~ܞXXYC^.da8{ټaa-CN!vw*7֐yc;/ݯs߃ovL1&>iH;o.q;l}y\7ro瑽MkKvS8s*ɶjWoa3t:-YLƒwd?w u> mm牻bٞ$,ZG xR^>ʦMVc3(up³9BR_/!-G]DD0eH[9Tk؈O#}LMӳ"7."""COGG?3[f *Wꄀxz̜$م7xz_&>>E|aŕ}`#=^'Wv(nW~sf/ iDHHi̮^z#HGE NW=ב}D$e`JA㴒-V `p0  ÆXW416Ąj‰ ,aɌtau F..g{ ^RrbpL))62౒b!*9]:<_5HlV('NW,vÎ3%*$Z|v9x> ;rӉqEaIXZ.vsIaAul ##NNXz=?i!۷V銽Qߣ0InRqؒG0jU_^7y$mrư:Q$뉫ށYkğM}mXiih%mT7ٰWy%``1좡013ĵǏk#G,$¦ڻukv)m$&3\\u6ᣥSo!/>?nK)arZ7e6g6,Fm*/Rn|FLt4mcWf^,,Gr]=[X` /Ym:oO.pI䌊`׆2OۯЖ5)Iϯ礎޽|rwG< vf: ;ZK?5PI"Lf"dxOˁx-7mYΞ5EٹMoJ0RYDDG7=z2"%(DWBϠȹcniŒĜ4콵TTu0zNdT>Nep1^:4l_]ES$Dv #ǒ5(}%soYkiOŝ@fosq9twv̜( bmcߡ&FHnt\?`U~##i-Ⱇ oyGYXj9˪'$6js&g`F\r<`ja7`nA§GWWQ{nbƤLm]κZrȈFtQBGRX\^_.&FW0FDDDDxjx8'_쉙8tTNrZB׾Ǽq1."_ t?1בJar`QzYGƅ|EEDDDDDDBt.""""""EDDDDDDBHP@  """""""!@]DDDDDD$(t.""""""EDDDDDDBHP@  """""""!@]DDDDDD$(t.""""""EDDDDDDB9>U@wIENDB`rally-0.9.1/doc/source/images/Rally_QA.png0000664000567000056710000031573413073417716021517 0ustar jenkinsjenkins00000000000000PNG  IHDR *z%sBIT|d IDATxwxUl*遄$@ @A(* RT bQʫ+H"tB"{O6B$yIvg$g=B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!R F*` . VYmB!1jkǻ18 w "ą[mB!DPxx消 ;#"]@ \8|B!h-X|z¸qi TWErrS|ҥp:lóW` 0C/u~8 |{B\jSbV׍ٗ3b1Wwj1uB^|vGp3%h 0  UVOtt4fJrr2?v"-- ,ʫDbMH`]ݺk̦yQKUp-anwIB Ob` `0pW裏rWٰ$,X… IJJwd0X ]BMW :### h4Tw{D-a4/b͚mjn9x< s3i$&L@VX,(zkݻ7$''늊"@7ܙtLqWw.!ol2Z&<(_={Х*7W_eĈ,XUӴkDf#!e+Ig=Mpg<6uof-[Ɨ_~IϞ=vq6EQhذ!&MbӦM?`(]7@@ !.K)Q~'.EJ, 0H,={oaŊx*8 0i$v /@pp{(zٗ4AqMtDBB+#tRUգW^lݺ+Wҽ{wt[RK/Ǚ2e m\F^BqԦDzh2OQ<1n2L}e|tZ]Lތ;3i$4hEDF^P!jH 6R"eCka>>>>C Q֬Yç~Jn0o[EQSǏgӦM4iD6e:~H!)QmDt]}_C*k֬ߧ[nX,mYEnݺC:tP?( gܸqY)SШQ#ڕ*܁Hp6T!NMd@D 5wA~wq+V>cǎSرcٸq#o&:^;w P-Bv) 60..wq@5x`VZŧ~J.].yq( aaa;[2}t4it;:)FDQM.;'Qm$"D|lfӦM,\N:]ցCsNf͚EFT%0EB\bW֝IOTYDjX{A{w#G駟Ҷm+.5ooo|A8{GӦM"7ӀuQQ{\wsIPTYDKNnnj޽{7oM4uf3#F`̛7-[(J_`/ R..Oj@Iff ÉkdKۿ_0ydLfP!ee@j )+{zrRXXzMӨSՆAܙ@."" z4Mnw`VXuDg[X!10؎{-ԩO?MJJ lfȐ!ܹKҾ}{@?15@~B6 אL6nL@qݷ0i#wp??o***IMͮ633TheSUDZZN+guu@ی܋~ gekԩ#3]j~z>snFz wJ!DMW`j|r*th̚O|| n@>pO<1LV5״R?UUs=}}E:G_|z6w_{/zOSDDÇgȑDFFʬVLQ,  W^l޼3g}Heex`88@- AϤI0c"N]NN~z5 gŊ4lXeÆ8Nbb"xJtt]Tbc%(IH =zuF˖1DF) O=x\~}=CÆ %𨁼ٳ']taӦM̚5KݰaCqxw !jXҶm,wRm6NBrr p8]\PPɓx{{ " =^^PUooO*+mCe#GCaa)83nV˹W%KcРgv9B$~:jjذ!C;QF娬~iӦyf***5 D$Rt*{E9LFҳzً e˘ t&ɀ@_^NGEE1j( &)OOOzE=Xz5ӦMc~"BZR bɤeoˌOW;g’!^Y{𾪪Wׯ__?qD6m3<#h4ҧOV^͢E޽;&)wk 0d\d@E*ݺíG2 B`..oرc9r$8qqFnVnf;~m+?`4pB7q.%j]D 뀈Zwg'"""x78y$ǏٓcϞ= 8ܿqnH}Dm @("j!/m^)AF| =y2p:]uu >uibVtIoI @oݺ5?0z+AAA6ѣ{f#??뮻{jiן9z(6m⩧\4fΝ;Ӷm[vɌ3oNII 7ܫ_'lB6 5:bر',k':f2NtXuP/++ĉT@~Ъ5M###L&!x{[4"N&,Wͽim!0Dii9ǏEݺAt*v,ÃIIf'%%A _AI^^YYyzhjNr(** $+**#$\W t:iӦ< 0jr8xzz2o<СCL0QF1w\BCC4|4N6Tׯ^:Fff&~~~xzzRXXHZZBCCQ2rrrrJPPi(^?.|$<<Dqq1DFFVlBBB0L䐖` ""_N~)fvJپ};ӦM/))܌; lX!ĿV9q"W^Ͼ}x7yGKbb/?@ם='ѣ'q`"EEtҒ h7R\\Ѩ'&&3^ N]ȑ#ɘHDUUSO݅b͚m8.,3?~'={v 33/?dۺ#HOkgЛYn'99zk7BB(,,ᦛ:1}8E/1c" E̝;UU:u!e$%e`00aDU p8yw8u*rɀj1L:.L4ʲZ]|EQ,f^t\#p-djдi;ƶ\: {@^W7n?̠Aj\a(f<<qƱ}vƍGjj*z___z-uv=>}0w\t邢(=z1cob2xGHNvOɓٸq#Ǐ3h LB׮]{n]wSLAUUV\[oEVV3e>̐!Cػw/>>>ZiӦxb=ʓO>Iqq1ԩSuL&ݺuꫯ~wo  =o?UV Hf x;iРE^ye>vv;w.曻0ars ϻ'҈bÆw3i֬ڵ(((fܸi}5,b͚( %|z̖-3rdMΩS{ ڋUfp]y7IN`׮ |۷ `o+pEVV+WNWG{ǡC'˷3f˭| v3-b̧Xj Gڷl:nzfe(˥Kybu2 fif>ҥˎ֭[Y.]vL!!Yfuqmdd-ZשS-(_s^_'44` ] T:Iƍ3f?2z|HZn֭[)--',,uֱm65jċ/Hii9ҦM>\.FLL 'NDQV\֭[?~ke6nHf/4 g}Ftt4&gyMr!6mڄdW^9JYp!k֬l7^YQ#ԦFg@~jd]ӍƍǀQRRơC'λMzu-pMi21JJh2CNUW5cժ̷kה׷n` ooO IBB""-r͝0qUp4Oj# &05χo$446mbHNb1bTΝطFuvTU >g+~୷g+ ~[H{oܸ͛c7n߿c&M*b}>Q>(NE{v6]ym۶|UVS-K`˫-pX tK"? < 4b$$$0zhi4mS8vAr}m6`00rHKp8,]aÆaZٳgYf2`2339q}C4M^z <___Zj?[nE4,YOBBtsN۷/+Wpʁ|xUW]$%%jjӦ͗yyyׯ?@Qtm߾_EEnǧf+ܻwyyy+",,bp;\.Wbit:T}7-<|"'J;!!ސWD^^~~8.iܸ> afćh׮yu̗fQVfӃ ?vN`Whp֯MBQFea<==Xn.&桇&/45}fX֓~h*@hܴiS@mժG5{{"##͛7s\EAAANޱcOOUnݺ(ybݚ IDAT=z$&&&^UVV!;;{Q /OKK`oII6NiM螒YFF;W@@@;v߿gllk:͛XRRrr޽z}P||Ÿl6[ҙs 10!!acs< R?i~-ǏgҤIf*++II%i-?_|9qqqEnn.<˟ҩSxyyQTTDӑCeeg~e˖qAbccIOOg˖-Ui֬ѬZ޽{bA3pמ_JT 0g|I|M/^LIIIOZ-;M )[j],A^^G$a0,^w-ZCH?qq λS|Xz 䦛:Ѷm,駫̥DQE9(\[oFnn!}KzzK~ѯckTb󋫎qf{WVkN,UU-t}֭ϲ1fZ\D5d2.<_Qpzfv)ݞ#JUyyy;M&SG\YY1oopXW4jdee-s8v=5??XHHHw-6mzĉyN֭۾ zhV 8].Wܣ7[,HFYº'%%p8rV]v(**ZuT[4i26!!axyyދ|/' %Nޭ[7ݢEXb~;]p8HOO'99]v1k,.L׮] ..KrY`:uizʖ-[7ow}7RN:vȒ%Kطo'N`ɒ%DDDШQ#7n IOOߦY333\4L&wFf'|BYYӓ e֬Y޽}( u֥M6|dddPQQʻ[( zX̙æM5jf&` Az*e6afא_|'Ϗb̘I*:'?oKg) y nZuk^cԱ<F'zz@<<~d0h޼&@&N|9sg'Gh?eeUf,3FUC =11XܳTJ\\C<<}F<<>LF\.? =6T,3h˥>A~~1!!YNyyX,Ը$_|aÞr7Юk[lnBB𢢢sƿo֬wqᰶo~]RRkDFF {;:t$==}]rrgWŠiZYPPu͚5ykNg UW]'99y^NN>>>=Vkbee味k:u'NԩS EDD s#;ܳgϐ=zHzoll Chn6n޼odd:w9O/WChӦҢk .--YӴ?w)3#!@hm۶<oo68fQPPp~__*,,~x\{ 8cǞ(++#)) EDD9رcFׯO~~>>>>UY3J~~~{-<<9kCHB$R(r6ţXX,BllyG`og=ɛ^;DUC?w&_\ s CCyMUs^7 4m}~9|)xF@EEe|Ҟ`gӦM'=z}ޝ4iBrr?(~~~7?\!!!7,ZI~~~BCC4w|jxxÇ?bZSt:)00T'''ӛk&)ZnUU fqPP`N555UUlӦgGoyy1M%%%;9rsΛ:q˗jd2w bٞߑ.77wjMv\cxw#d2ѪU+}QO/GF6ߟt:ٸq#t2dȐs:E\\yWU&MT}t:+9Sz6=T( mh޼9SNeԨQ;,^+;;Mn= {V#L&S#UnS~AUU/SQQiYFMHW.NQQ)Ьi| ::/(PUULG_zz `˭Vkau\i֟~12˫qNVС r9Ng)g:RMlU?~ ֱco Of?EQ}=^ZZTn 4!<<|x֭z'ON?rHFddeee'?\6mkn>//ollŧw[nNnnEEE\ho4=BCC pLݙ6##cnOr\b?UU\PHΝy[96UVѠAy֭[MqfXѣ;w.|򉱠iq׈<nj%<<mQӴ3jddz ڶmuCQE2Xˮ]5sr-^;x`pU57u:]=M.+ݶmۯGJJJtzN38{EQ?UU8Τ^_j.lBNgZ ۸NUU].Wpf*!%reUۢgYcر?{r$LIIp$-u^^^?oFO*ą̙3yn~7RtΝ;oMNN^pԩݡM68pႂE TMӲ8wF0EQ?M jĕ6}65}_q*t[4tzj'gM1i14˕=tt&v (BBB *..YYYK222Zpb+VB 2uTJaaa4<jb-Up:PΝW䬰Z'4i29;;ۂF1M6NB)))`4#[j^q8uwq[yynjMY ^m @.B=Ӫsx|zMZMQݥY^NSq\={PVV&55b$%%MӦM1].#GpB<<LFzyyEuWo6,3K|Gpp&cɀ޸q㰋}`t:-YW_}W_ͨQѣ5{JjJQQ~~~ԯ_ԩSSPP@~~>׽iJee%!!!*Geܹ\0{l^yj>ˇѣ{,^/VEQvllϸ\rM욦9v{EiiiFffߦ-d2E5o|vFٻw ͖TMYEEE[JKKŽҲe6o޼PuM=)3 6&%%wߝI=K||SRSx)9rM// /x{_ͮ]y7ի3$?̝;:$$`СNή;o⦛:͸qǽ$&p8F[ (,,כ[oʳώGLzz.zJϞy;9o韲uf~ @jJc b2gq,,LTTԋ}<_PPp<77wхhۿrV5"3--N93xKӴ4MYQQ1|ڵaׯW;vȘ1cٳ矮!ʘ0a+Wnc40`'N޽{3`6mDZZQQQtЁ\~~gv;< 8JϟO~0 ӭ[7[ݧ|Yp:stm nnzDDHM<­5nq?i-99֭ۻ Wpu]={xޅ ;wn8NKa,X kl62i҇&O|֯ӯ5\23Zm{BVΗ_kO'99s(u LF-o?_cٲ_4 V`7;z/z=B Mov6LQz^^^-}}};Pb_biq!w\.f+-..>yС #7lq˖-/|t:qY+0pdm޼5tPzŋ)//?gp[˖-cŊk߿^z>MHMMeڵ̝; 6P^^ի6mNo >cǒKV;0 UYj;v3ٜN'NgCL: `50 X|\JQU'iѣ/::mٲ5z.77wiE+okӦW-ZX6011__k4M֭۝՟"H2OPP{ƪW(€U+wgUepΝ[KFF.M63wŽߗ:u|h0˵sXzkn} 410Џ;>&==Kjj$0ЏɎSnuq|t:V> BN-/EHr՘ j((rݩ5f~VQWӴNUGQOpTUiZi-Lj!E6nm=vz^2//k~Ywyg'4* xh]^^tQ]n]nOŽw>; 3&LMDD=zt@Ӵ@$))'rh&R*+me"!OsW5zPkz=;j\={mZI]W]u"Rn9Ί={(..8e˖Ϯ[{tMI6m,!""񸸸I%%%zޘwΝx<.p86m4PQ4ݻw_hZEqedd,mٲ?Cx```:e˖nwdܹ+ҥ¬_SSSٳٛZh`bb /O$/l߾}M{SN ZTTTVPPT'S%WQ,< < ڰaCĠAܹ3ƍ[n׷V4Dbb"QQQթiڴ).ױm;4bǎl߾AFƱ N:tr 70|pƎ;6 IDATrg}i&m۶;0gn7 O+qB#㩪zڙ_UUVYDBTB}t9lcIJK]hˁ+nߗru7:ujG)sΝ lVFDӦ1szI0kOx Nfg5j.~ap@OR@?~˖}5.cINNټm5BMXֿuOmܸuu\ڲe7M&Sd2t[+Wv^ѦMff5mtΝ;_\vm 6y鎏h~Æ >:tx599kvYjU'[غus&MZիWo2N[T4m4XaÆW\y'4nk׮foݺ-[>l6Gu-[>>}0w\͛WuWV;$8il޼>}0sLnw) XON>SRӕ^:3r|2RR2IJJeƌ?}?:dUXh)ޝȢE+i߾Չ_x| zh'-Q;~Ʌb0|ˬ^Bӳdgӯ߅XmwRX,f</;v$X$$nr}xケw-[CٶmsoߒCvvo9v /*))9XXXBQCl233x<^7+==61f::&>>>G(JX}Mw^oZPPP{RXXϯno=88lxRRRf.? NFUUAAA=v??x[zK""". 衪7++k(].WR%@2Kqg֭[K˫ɵQL&]vg…xbׯ{7f֭[fɒ% >fѧO) PFQ6nȰaø+/)**rk*d95Z}ZUO7g( 6ELJr}X,h'2jdfO b0fв~7֭(PMh!SsB)3 ?]@JJ&~DE 3ӴiL^>y3p84kKHH &6m0{kM&o5 @&|KfQPP\x!31|||hoq` !? x8|8`bc#16~}8ݺ׷vj HNNΎ vOLLl6,**Y~.rAAAMZvx_ l6[e#fp}߿]&Ǔ(R1p8].WqBBkL&}V}-00b)ilf Op```{qGw1,0GHH AAAfMQL&?W:EY";v9r$ӦMcȑxDEEիDߟ^zx\b N2tP @xx8ӦM#%%b4iBh艅Ft'|BhhI]x<o|p| |A/!ESRg@Y,6 aS7 _޸ŝ5nѦMZ}:qbmuJttDe_[U\}l}thh'ntf&̀ެY 4m \ukʏq%%%;6l8(??# *..rl6[ōX,ŇEq IOOϯSDDDS& +**rïq8E)mҤ#N39##󌌌qqq74irW}N___ E) .$$MY3'+[nLppEśōNHHPTTd2"##ʚ۠Ak[X 6p#l7rHKy"r7^oF+f6ʙ擺uחVZq:vsl-in[`7 EwQkէǸT>}/jj0<99nݺ}dZwj(t{}k׮WכcǎQLIIYr.pyiii KKK34MSJKK$''ԦM5bX,L&I4Ms{ˊdڵΝ;/::iiif ظqzȑwv^XU{@EIKLLޮ]IN3p$.R]I>[jCBB:3JIIr%ݻ3L%ˏ[Ueπ᪪޲eK#,Kn6M""Mx<ر~oz[jU Qէ s?-6}it,;  nd,&)l6*|/&@U4wl6Gi,>@PGOrX8/[,KcUU5Mk HUl𪫮J5M+{ WV^ү,f7U{}e-l6zYk~0`0hڵkرc:t(v]qJ3ě7o7`x$k=Hېģ>7L' gϞgee-߼y4M˯?Q Do~eg$ ufkDvJDcZbĈ 6pC5Gyfz-/^( ?zqup82-[_N4>OɖasҪ3557YU#Зe'$$O?4]t_'334McŊtM\}|(R.FQh)$5^}J@LJxX~ `x<*5aH]z3m6pPUusJJӣG&NHzz$a 8o p82i@_~`I!(,,QXXLǩĿS6י*Xi̛-q9ׯ߅׫8UU瞗ذa7 X,5 ViVrD ,nTUudrrr[OΈ#2d7="uXII +VO>~ ̛7[sH7yq7T;VdD'":>Sӧ3|p B\\$"uirJLʕ+q:`7e,;yyy?yާ}||N[vgθ٩O Hx;M[@tt87゙iӾ?vMbb o ݻ7NG1q\pA; yڕfgώ? o699ѷo7^|A+o!FHFFNY⋻`[W.X,f4YXSx̀Q! =^d}Ǎg/ODNbbb$4MtVl.x<;VۍR6rrr67liLY+:`$֬{c9 ԏ3n/v^yeIa^+>f9 З[DQT^xa*))G55}5k3};5'ݻO3g/}6LGtt8;"==M멣i2J Y`9<]o?Af1ܙHέ+-ﬞ_غu\ ǃYzBLqz+}&\|qc}22rc |e\vQ_ |ꫯ2j(FM\\Q ҥK4i+W,)x H3,@Q8΄111#.KJ7)#/V;w"~̙3zbӍ- #Gd9r$ѣ'K2s+nǶm޽ȑL-s$))EV 0l M'c>33E'SPPcҤIO?aÆҢE c\.}SNenГ7QGO 㨪ZT HZZ2%8EP`= & ݆qn[/iiYٹ3Yr=4 >Ell$:b˕W^_/e۶$%r]/fv|}mȑ >h>xǪ(&o{p|b~AA} ${@ǜ!;4M[uQo%\3L,g>bt,TP?˖m믗;:‚5o";;>}p&M>{/~7S,]3l7oĨQT>t!˖min -ZItt84kKnm#~ZK8pホ6mTİlfk鉯߾j{w"-#DmB/պŬYƺ`rrrhܸ1uv&c}Z֭-7it,\~~gиq;))qEQ/f!,,pH&.-Xq[q4hҤ!>>=˟&ӠA 11(J^^~x<^||| g4JJ>׫ФI4' RRBC17l.08!j3cϸ7k֌o{:T|NGҥKʗZ@ ~@tejժ+ڵ{7nifYkԧV`eӦm_2d<&QXk@@dd$ M6%M(**bҥL2kע(>) }aVkkfw͛7?1nn>:DDs,BOBn~*:u*zG!))}iiFqq1 ,c^ZQ%> |el賟@I:vqݔTv7ƪOlQI!X^5J`\AAӦM3͘1aÆ1vX6m*{DNYOXnUGїZ7@S Bv"`GٿY@z}KO#]}J@d' #! q8WL>ロ1cЮ];cA4M3f[oT~aི/i@ބR,@HW9.X>=Yը>% 2RBTX>UZZzG}d;w.z+=;qNWTTČ3 0Tpv{WߟѣGs`>D,XiӦk.EEs 7- LfiߜoK|4kHHh!TU%?̴,vmߖI:pW}A7}9P:DlDÆ| pFy衇ڵ+vMd|GرEQ3!΅pĹ`[a mR1{o&Ҷc+f3!7קYOoaA`4zςO^oy߿?^{-K.-/_kMaĉtԉ{ m8ڣ/thڲ14߭w\^;K-ptW'1ߨOr" #{@胉nЫW/z). IDATRVki̞=Sr!4MkЗ,Bs+ <4jlr |Nb"dvezQ`O\ ^A%3P~sd0ֳc@c=F޽'43gf͚.Wɓ=pf||W;+UصOޙ1O賓8)aΰ]7@1׏G}={NHUUϘ5kV'+0xh(30 48Qdg kr4# xjzDf "g@ JJJ2.\^~Xra}Y.r^uKMޮFq>3q_vo1|TߣUaKaG^Q @dC~I ,Dݱ }e9 .TYd sE!55^x=z0i$RRRu.GoV\EGo-X;ꩻbdto/{=B7x{ D6WO H(-uSTTbt,4N.[72(cZ߿?K,64G /ХK^{5-##CC_7~=ͬ~yn#T;" ПqY0&X PvҰ=:Z/&D4tyM 5`踄U"X |ZvSrrm֬YoѴiSVkE2RRRo<?9F銪5=|A{f,LKSa2hѺ ǎM{H=$; 0iNLooѪU1:L422r2dxsS^ ūIe_je}K@@Z""xnJ& Q񒙙׫h. U,@ l6_;ϙ8JU՝@ (n}{9neGuxL5W =!7' c}cߵ@ k/>b#B_|ⴴ{in0xf{a<=}FU(´䝗?(c'3!>F`T5wЧíƆ#l/L&ϯEo'-~3aXm22bawLtר{BkRN2B!DT_W!gB߃w0`c⇾R FߦO}@B|ԧ!ISL&S\dz?,G Q>#34I@B꥝{!B<0y5L`$5bn熃zUՒD!^2%ݯ;/O~lv[$d.ب$ B!D*DDT% нYƦ'?Axd1˓A.flTUC!DT -NGVF#*!,SfL aψ)!C xb1[?>W^(>j DVq>u<~v3p?05I@B%IJ!@n=#>jko;1K!ƐS\cQ18 &/kxVR,I@B%}@DU < ,pĿȫSƖI@B%}@DU&A 7_.>硧l6777:sA!zɨPTہ"By1F"Ρۆ ࢾ‗ܐD!^Dk-',e{h4x9 ,~L-I"BCs~U|6~YzU]xqgy-@yUZ]L!zIq.zBVA$B!$\htQn\޿ѱ*ԼU?v[ J!Ɛu2j o1C5:Qf3@l\Ct$B!(aӳF"Ahx\~q$pΚ$ B!D> \X̌xN6d21!G\\apHgE!zɬ8{wkF"/LY蕱jI@B%}@ĿeC￉ 7W<t58L!D+-kJ+{Hߏz(Q `EOJk I@B%}@Ŀ p jp8&[^K>u)Ѐ!I@B%I7f[Er:ѬU@ jftB!!fٸ~q'jbt,@6Aw/xh$ B!D> lـ-w}IØvSi!KCR^/5un18JD!^2jgұ[["f3nFOH"BT/"Ά{쇨 R$B!!{@?sEc5HMiW zNH"BT/"Fm@?5:QӵGH" F=ry USZZzF&5(miii}vu:4jmt q%( NGQiǿhX>0--ƒ^؄&SU5[49Y#Άhd2ˬix,>LTib20k_5Pb @#$ue˖VkpBB}*0qbyJ*|8+ZmU7cC4ώihlF4rhPIn#<2CPLDZqGC ɇBQ £BI᲎C17)nb} G@=.)9n f/ zWʺ[ؿ'гMlZ{%h/LUŌ_]YQ)<Ofii~B!n_ϬEmYwgN`YX ūMdvSbTU#걾Ņ%t칋9(q:xNJ(E4Py7GޤŞ b'p?3}#"$.La~!>KϾ8x0O7-U|8iv&p-O1 Ӿ=kow砪*/̄OVُ2h\?Dz#3 R4MӀj](Bz=ղװh/̛/=9v0kl_GL7ek3 E|p2-xB~a5l>_}6Gݬ%ϛ߿ì%7'ߖtL}F#4nˏk_ ]9jWJ +^zTF}@[.?i.ۍ^]Ob\c㞳F7Q)}剈B!E$dT n?oP_E}Gh/]K󚒕KVF.f @?֯J|׶`[ホf(J& IwMCUU,3>VCN5-6QtDTq3 2"BT e+V<SfLӖqde3LR)*(!pF]Bk[,f, ^HRq+D4 e]MZf~(v?Z6Hb7$9) M|LBCޤ c#Oxgl_\C"tFjw\ZW9I"*Ef@BCcj|vk\⻴a4m٘аޝ ^bTT:!n 4 ՇQNJȡ4LfA,> R2OyoΤKLXxa$g`ʿ˸Kxz\гR&\^ùTKH2"BBe -$/>L&7;c8'F\f>>E 2fO}H;ɣȜ;.a1 UboYr sBnیcz]C **! ilkxMQ)2"B¸\Nڇ_N~2ˮŃ{^rhq7nz#*:azE]BF?u>NNbGq[Ox穌w7}~ngs0ci* j.&j Աc mذRB!A7ǒM3m(z_ MW{c]. fo%NBՋJ0B m-{@D=zt˛hh!B#@ -umb!$4R}Dy"*fb芴TjrEA@,qjV4zVkS0#B!SQF-' 8noճg۷o?gl۷3<<bB!:??8ۊi&B!DR?_SF ,Lui`9'JRcz~qf͚Աc+slaaիWwPB!'߾ɵ7_lϬrP2RJd 8IVVUU+5\QgU$B@o27`p#ddel?" 8LX}4MrrrVO !i+YOrb#(̏CQ ?! 8Eyyy+slVVVB!4R>O%-:B4V?w%ޠC$:LWU5:B! >\K/pRѱjd2>k.B 1I@)y<+TB!R`w j R-رy/^zY@D999mh'77wUu$B ^࿀k_HO5Xſq{(^E4:! 8ȑ#Ow(NI@BC}ܿ'og( uil۴%^ m2z"ZH"Ns*NBS4Ms; R97sj$I$ 0H^,H7'Yx5hC^ 'wedd|WVVW_$I$8iؘ"St2!2~WqqOI$I$I`GFjb:;&oB IDAT{kٳ'І^Dd"ŋ- iEEE$I$IE?؂zHx'جrQ@t@0x]ZZ)###8++˥4hEԐl\$Ij+]GnAQg%AB xtĿx`PcQ53_~(B5epW$I$r]n9vvL*~U !ؖqC\ _L a"SX?@[I&xb:vx!Y:/// @NHLLTRRR@,p|ߐ$I$IgUU=xJߤy 4pvLpaXrX$I$jV`7к 0c fY^e$_Lau%ҝ%x x Gat WW׿Āĉdee !‖9 8Q$I"`=+#mc@,&Ydٙ<5E!s?;7O& 5xxh4ѣ>Ǐl6^iӦw}rq}iiiں"qjp$I$UAh=!9wnx$)*'O6 {`2p)AI$IRlӔXHuS <@[?m45E,G=ogʕ=///( M6͍Ç+к]@$I#虑qv:t&XΎuӝ"ymu-΍֓ HX4T,X@||?0tw_Qr8M*ӹxÀ΍9dRy=cǎwoh4VN# ҪU+Ξ=B[=- Iun$ITe?M #ܼ_Dwh^˚Š'_&=%KE+46;h$Ks>0`0CYh<U.񸚢( ZlɠApww67G[;bF*I$I5ZZM_iIY][Shߵ5nU^:ca(/_|VT? mIDDnټKhzV///zAll,)))\|fd,I$IYJMEm0x(wrUL@-(_LL K,aԘ.R@tt4w׫N'(I$I5@KX4/)*.]Kn ٫o BR/g?^SRT<lG6:oQB[-00)SOرcL8m۶!8}$)< )IUO!5o<}qPF'V 2=3sڰb̹U=2huFQ=z43f̠~b|r̙Cbb"hk3HHFGn@c$*j(v89IZ/w(:Eݾ9S_z}:;9AUUl;Ă9(*TG {=V:3uOءCΝK=0kgo/jB΍PnS@@189$IVVVFzzzE/}5YjV'0)@+≩ch2J5 ̒>/PTP ڤ9/EΌ HRx O3zhjUbZٿ?seƍvp-NjwnnnÆ ^z9;.IbHKKcɒ%|[8;bMS?Зog䄻jըWX֭H ;# X62CzyG[5xt61c dqXyqE~g&I7ZB'dxxx8;&I~CYYo63fbΎ&УM3p 㮡g@Z4h487›bsZ_LڕL=M9I&lKA;AƘf͚E~jp?#11W^y+V*ouKRu gҠAg#IPTTСCAkT|!LFdZD;2D»N5KAA~G%_s>r@^nG&ZVTOOЪUUٺu+fb߾}XV; ms#?uڵ x$I,Xi@kKͪW ЮK+zZ$#B,+3z&6}'-6`My @`Lsw/VSqq1K.套^"%%Ц1Nwfl' 85kPIFlB޽6è= >!лҽwzJ{䇋J!ݺmlGnCiIh dX sf5E9C4ZBӦMyW0`@Afr sޫؔ6'H9..իWD͛7sS{В@oWtzۖ^Ҧc HII))=xg/spQ K1J<mV 0B{sN>3fFQ?p@yZhQ+sEQt֍O> 蓓fZ\pnۄB/9H%#8e٣l;g!tz&w\]}h٦)Do>^t:By P*W().%+#W8y,YXʭ\p؀Sa$ȕo: pm۶L: OOOFWx{{cѧOYjxSԹQJ5@&T ]5ħ'V\ !!@36ծ(*((AvfJl4pu =<ݯٹbRn勩{ &8 eS' Rdr 󟄄 'P&MxbuvPU]n`.ZQ<<<|/rDE)hpXzLR<==}|}}븻Et"jA=nh=h2'O̶m?~: Sƍ|r>s7o(Q*M,d$UGW"CW~(ʵ!R%FGu]ر ?LQسgsGG;rd$USrqq1s6&Mk |6w-[ƚ5kqrh_ԩSٻw/FLA+6Tn9$ZUÁlEfsww:Wosww7LΊId-P󠷷ˤIػw/#GMVVs:ƍa>ڰ@{D,ILQcz_g21IL@~ 0@y`ӧw߱pB B ƝwIBB?"&舘ErlРn̛ܹ7MܼjrJJJX~~>d2_kժU|'t֭JXV̙iӦ ƍ͍ MƍӿZjUeWW777n6NҔsι !bцd%-aҟ<ԤIZ;Cjj*yyyrnȑ#iӦ ~~7Gk.Ν;oo[v,V+c֭\r!7WҥK;=zu[]x? d%s#f0//eeey?vb0LVJYYYnYYY^wrϽJmz#g<<쳄ģN#!!˗}vǬZ6w^֮]{C^+11conNGϞ=}֭t ®]`ԨQբ7Ņ۳l2֯_O||ɓ1hhu"'!X@rr2G&** ___Ƞk׮̛7ϴinPZO?ij>Knn.l6iݺ5s!""↽dff2|pTU[MKKcL4 www>3"""ڵ_~"(--Qr֍aNNWo3}:t^NN#GLBX~yHUGFmI@t@wيn;vdƌ;v,| c޼yzmYhf,.^f#44z( ѸVg*.\ 88t,**…  "##!ɥK&<<777rssrh)))ԫW777RSStc>>>^SAjj*{zzBFpuuB } $%%h$<<уt z .@XX-K*+x{{УGx >#Ϝh߿E@- J `)//gɒ%DDDɓ'Y`Çgڵ .^@xx8!ɡOOOΟ?N#22-vdRRRpwww-z%":$t"5j$4i"7o.֯_/TU_r쫰PtAl߾׉ڵcǎ;wmڴ=ZBUUf+ڴi#:t ƌ#rrr޽{EEnnc_+Wݺuiii"!!AjJlRlRWlbΝ;FEf',3(*' DYYStqa2Ĺs*mOLL!!!bBUUo>+5k&Zl)u&v%TU-C [#G"//Ox{{ &(ѤIѸqc1`$֭['bccŋ~(+JJJ*ŶrJ+.\pIHHAAAbǎ"77WI&:pѦMk.!)))W^"::ZnZo^Xq^ ?֭hٲ{ErrGAAA"))I*jlQVV&̙#D˖-żyDyyxD.]DLLԩxq={V 6LtMƊ3fJǧ\̞=[ 8PȯAyfֈyifs\۵ks0 m .|Aq g*0aݻ>}.]bذaO>"==]L4IjJ5k> DXXHHHfo7n!!!B!F!Ν+V8vhٲ/EAAhԨnCqƉlѡC1n8aXDffĈ#jTUUlذADEEM621p@1uT!{O*i&q!f͚%233ő#GD?.l62d ׯb"<<\p"xb.E@ _Z$I^qXi{yyG_~@_]tv] N'.\(233իEppXzu"22RNjq|N>--Z$ڴi#{#RA& 7L@8 V(ҸEʪUi޼ë,>>D.\̙3_>9rӤI;0< ;7B 뀀c(үwzJKKIMMȑ#ݛgrqZjݻaaa :CF駟*ܹsXVf3-Z_~:t]wQUT~&Lp0t~ nnn;3f͚ѴiSn>L~~>?#Zٳ-b̘1L2۷Ӂ-ȋE\{`Z9}4єcXj[nuP$vʊ+8x gΜ?A4nqLHH˼[_yThs%>JJJHIIb( <o6G!..EQ },YK.QTTs=ǢEYzp3f@Q8׋w0,[$~GlB߾}qssԊX)+cƌa߾}L0OOOZOvI)ZN%9SN8AII oO?KNNfٲe\x/3gн{JղeKHMMeǎ|wwww=z4-ٳ 41wЁ8>VZErr2gΜc<裕f\lNѣY}b2;ٽ{7EEEXWu=:]v1vXmFVV|\|ݻb޽n~7xO?D@@;wwѣ>>sHϞ=ݻEjAAA4nرo^OӦMaÆ;'|///ڶmŋY`| Ǐgذa|tqfѢE< : f3=P!;wLJYf9Odd$W)>!mڴˋ@,X@||<7oFQ5kSO=N~TҦM[>( cѢE̝;Wm۶v}!p?09,Rl6?$(>nBdՇ111,XJKKB30n8_gԩ7ٌcASۓ<@^^cƌgϞ̟?x~t޽RGbxzz^7fx -ZkfWWWNʨQ*Ν;3ydRRR[.'ND3ϐSO=+ތ?ooohӦ_1gMԩS1 l6<<; /30fڵ+#99\]] %>>^x;zuj#9K~_uy 8SS-W .)) !(--%99!uqXh4B^^^^^mX0(x"qyyylFcGAQQ.]d2Qn]\]]a̼t:|||(,,DIvv6V/33ٌht韖1*7v\>S|MΝ;P|sjpҟf0bz˗𸪪yo~c]6\Ed2]֠+W( 51,X;X|9zիhDUUrrr0 r ߿￟>0QUl2331T:WΞ=d"77ԩGyy9)))\>CUURRR(.. [,1 fJKKr `X(,,tHKKl6Ns___z=v|FLj[2`JJJ6ÑS;wYzzz"?j:% : x({ӦM7owucA;I΄={_~+VPTT֠%%5n畗'^N6L !_e0bjjIOOߜo$##OXj< 6mbڵsgʕ,]-ZhѢݻZ@;wQL. [nezeΎ&|)**jzRRgG ΎIG0f >}tv!CY1EI&{]W/xXv1z]߾}yi߾㒤ŋO^^vm Ϝۍ:w%==}Cbb @xxxݲgϞE1`EQ v=EUҊWŠ|Ecߒ/''gSrrg!d͚5v$UG[lw@8'...m۶2(([ff l_LCKj/S@@W_f I<<|8&Mbݺud`95QU'O>ۺuo(--=ٲe7.^Y~~fZZj4v#6ԏDy !,~ǎPPP 1**깬.\HMKKET]UWZO7)b#**ލ3q2CpܹL>ZA<|W ݻw/.))ISN}gw$%%}}ĉyyy 6ogΜ q /8p%ZlhΝѡ!ԃ!(j6[VѫWҘ%%%-$U7 ڶm{LR (t:}nn[^^~AIMuH@6n߾wճkHcǎka׹j](J" ڷoqƏ$u\tƍ3dVZEFF 6w!44$IRe+  $8 VPUU̜9StE "y2={tQZZ*N*:w,DBBgN "((H?^!ą DTTXfؿ0Lbƍn-Z+WB%Ď;D^^xEqq(--//nw'|L1g,z rޟۭ;{{Gmڴ};屆:uݶmۭvi_cf0Z7;66TÆ _ ť>8}$ DYY?AAA믿bٲe"<<\|W";;[gϞܹs"##CtA<ߊ+D˖-EjjX,bĈS8p@_|QtU-6n(b߾}"$$DlذAdeeN:^zϋD%ڷo/~gqiѺuklٲ[iIi&֨ pIKլWVQ`6oެiӦRQ/&9(`$JuL@*+W+իW3k,|Z];úux駉W_N:ɓ7DwxyymqƱb 5jo*u8V 1bvk2ebccyi֬-d!IIIL2/pxXFŬPUxjuӢ vK*L>nNHHcs///())q$;vt/xIHEQ`0`08<,\xxx8Vx!M6}S:|.OJP Ih]pYt) .m۶N9*1cƐɘ1c={6o^^^s=L<VVNDž םwʕ+ټy3/^CQ<==⣏>˫^[sW1yܸq3x`Ν|Pw),,dżKђ*u#-9-Bݺu|26 HAAxyy]71*EQ~qɄdYf۷˗+7//ZR,[$:jJSfezөXЫX,L4g}f3QQQxyyLn/9x L:I@۶m1L,Xڵk(<\|?N>͐!C8pucٻ(wZz$j DAW EE DT"".HQYZy!Y$sZ$Kkɇ:`ya 87gLZZ;v,ݩʬJ8_KΌ|%ݻ7̜9<իv:oߚTX: YUSS ggg}gL0/=<ѣ4WrLVV5jԠFXVL$--5"t, R<ñX,Ijj*ǎ#**j&YYYxʲCDnWckEvZv kSJ H(`40Uzu|A҅J] Ŭ_'vM$`Us0r2YpiIRbM28JUaU>,H_'N<3ٓO?"Ue C=D>}#n{0g]M> 'tLUV`$n߾2dÆ #118"B~~>gꫯr9@דյB"t|)Y]9TF+tRW9xvj,q?tЁ%KuDFDHJJbȑуoWj`3%8> J_UM@_(frI[nʆ\Qz@tnV< l7ȨQؽ{7nۻ*wߥ_~뤧g4T R>MB9sH=xWAAr馛JP HnAR>JJ& (Xw!رcիkِ֬JLD'kaŊv],Cdԥ}@QDlƳHAз~KϞ=9r$Grh9NϟO֭>} >p%mtR>J+u~\8'6v ̙C\\?׾NDزe {fذaupQyzҥK8ݭލR5 J4F: M@ʎ Xƚyhڵٓcr!LrU[AAK.wL<'NnJAR>JO(u~l<skҹsg>C TL_~ᮻbСl߾zlEK^TŢ}@QD9 3#6l}rhUǤIh׮ .tY\!*u.])Е:?M@.xfCYnݺrV\*n/m2i$]g@')tw+Ui|Q4)xʲfmۖ_x9CDعs'z+7x#nu;zVJ-Rʧi R H`/p3͑#G_!CopHMMeʔ)ݛ RXXgqލP %XJ(-Rl *^|y-[wuGN::X|9SN~4b`5u@˥%XJboT3 sx RSSs_z%rx{n>;iFR=|(ߔW3J4ٰax{NR²7f^zv:i!Ԯ][賁@]/*+x/b 1M;y)ҰaCxÈRJ]j3@&`ʤI$33S '9NYfo߾$pnTea^e˖o>=(U)k$z(T *ˀg!͛7g <j.9“O>ᅬر 60fx ӝx5ɾJ) p4mO8ѣRJ dlA[HǎeٲeRXX퉌 f|8tѢEƾ}hӦ aaav6DN2gNVZEqqq!p7O=RJU իWAObYYYY٫RJ7;}.`.p0(> >>^z7nJ=9r9rDknJ)U֭;lWDEEFw7SJU!Ti8 ,77Ӷm[K<;z_rr2=]taݺuCx^ؔRR ilׅ]ФcRJ)o'X$q/&11-[镲\Νˈ#,.; LӤYfK "͛xw8~O3r D)0AAA5qQ\\̉'wlJ) Tn9 ?@Lfff5k֭[Gݺu_>V n߲e%4bbfee^J)R -IOO_]&M&d06m:ȶmnܺu렜-M6}by)f. b]4Qg0M3Ϟ=O  p8jŦ|RXXt>zRJU|.k xRiJZfUTT~6..UZrx@@@xNNfggl6Rʗt_CRJrk@Թȑ#G^nݺc+@AAAFAAAZHHHDDDĺ\.gqqqW#VJ)`a)..&<nG+*tfdۣ Jaሩ[_ի7bXvÇST))r\4<43j:Ψ+((WQ*;`{~yyyOōoѢŘc4QgcCO, .j  iqunY)DP¤իWPv햑4=3ԪUkD||g.$$^Sɿnܸ-Rʧiҡ)NOO_TVQQQ->l ˇhuǏfh?~|.1)T%RRepr,egg5M4 #px;hPPP/%%en'"pCXJ)UỲhtVh`fNFFƺ ǿ(Tesj'.&>FuVG&==}iG#KID5%%߻͉'V_J)Gi ReG?G>HuViwwo(ϘRReGg@|& \СCJ] RJ]4<)M@99iii?=i&wLJ)U hҡTE ]EHjj~RQhB4Q ].W\oRJ8]T> >Hw$zLK!)> J-M}& |$99y0MRJJBKx M@y~UXXY?RJ9ZTϑDW^^޶EEEY?x;&a:`R D]IJJZZ\\L^^^Jnn/H)* <)P tǎka5M|, U)|&JQA >`nD$m'fR>G(Uv4A 0 8Z7nL:ugvŪUJ8yT/]}@|& %02qD f0 m4MRSS?~<Ϸ\g9@7^RJTd"-O:`PvZb|$jRvmf͚Œ%KСaF5KS95Z$iR5[& 2/_ΤIYIǹ8 /@:ux/^\נRJK*;9AT->Z5m:g͛Gv\U^5xXlÆ #88:(Ϣ2z J)tT HQx XUZGyիWsmrO`Xhժf?$>> Ϛy@[tݑRJJOJ*G>^p8uvҥKy嗩[[](Xn&LZj`0x*'TJ)ߢIReGg}& [M$6m⦛nXl <(-Ri4}!C Р￟+V0b/Z?jҢE ӺukaxvnYJ)UYi ReG?G>Hee3-ZČ3hԨQKɓ'o߾tvxcZ^Eymʕ+0MSn!+X g^/M@* $y:uV~eaÆѫW/^u|M[- ˻Q*uIL#<*)ΣP^}U ar^}qmv5YbC)z fEux3f 5jԨҳpкuk @bb5=xjrEta?])l6|ݻD;xяcRNi&ؽvg}/^z}VOyYx1ݺuϯs'dD)_hтHoǢ@AAAt֭䟗'z˃.@A`Yv-?O7BΝYt)o& 6OÙ@=FT@:u|L۶mK7 g~A(_ml;h˒%K7o͛7r=æMx3q p?6TgggRb93)ʇ$}& #x"""xիRڵ1c[lK.Qx؈gg!*/T=1ogVǗ$}|/- 0 x뭷yf{1N2 6mڰ|rf͚E x1+9YkT#|)3 z"|t\bnշv~SLaɒ%ߟ/WA 1cFnY,n)p-ڔSU@D)k@ʇ$}& e'g'Ȋ+?~<5kZn/[n,\^{[nb`PNqC):8 4x̳X,WpBܪ+k׮宻"$$$+3Rޤ|4(wzA:28Sx?~xVZwܡW`aаaC̙… iӦ V)Cy_)4)> `9`0u1epaгgO֯_?OddV`F(> N>H?2#` Тq,ZEѪU+/`Ǝ˶m۸{la`/p7J> N>H < | g˖- <M7Q իoͪUh۶-@MKzSCΔQ:kY A &3֬Ys=GXXdv;]taʕ<Ԯ]ێgK@F> Ng ȹ@,09s0EލP]2aqXr%r ՁGwAލRUvD)k@ʇ$}& gWxX4vX.]mFHHSbвeK̙O-VO+DUY> J(R 49 xpԹYd ?< 4@ϠAXl'NZj~@`ky7BUE)rK 4qULL o˖-ꫯr+@?5kx xXʄ_)4)> M@ J(R 郪rb] z׮]Y~=6TefΝ<쳄YAT ܻ*}@QAU0 hݨQ#fϞ͒%Kh߾}""_ݻwq]NNW&++\cRL`` ǏgÆ 2<OB{/T|Z;}}PUK@B1jzu1|prq >#G^r8q"O=VxΝL֭-xqOiVktP.QGir*gf@wu`j֥Kz->"""67 -[u]baӦM<4mڔǏm6ߏa?~TBCCX,\.~WsӮ]ȑ#\.l]vIJJl6y0Mbnn.Ϗ[Nxx8رlUabaΝ߿1Mcǎk׮p88~8l߾Wbb"iʢEp:lڴAQ~}"""۷/ݻ{{ $665kһwoܹ3`AQfMJZjΝ;4iwu7n䫯"??:0{ҲeKٻw/}믿FDXb5UVrwf~aBCC;,ROD׀(tmBU0 X t aڴilܸ믿7Sw5kd#"4mڴv%kRRRH J(> M>$ v5j0 233%1=zt1$## 9ŋy?~<իW?#s †,YB`` ]t9[A۶mYb1111m1WTQԙqUHӃ UJ ꫯf̙3Zjy9g:t(>=wfƌ4lؐ&Mp׳{n֭[ǯ{GffZǖ-[?>L2^{0 |֭[駟={6'66s/:̙3ٽ{79=۷' 08|0f̙?~{&EpAXfhѢ=:uFjrM7uV|ABBBnS5-˪ $49x ~)_}G4Mo} 0?Ͼ}>|8O<٥nݺ.+rزe '%%իWt튊Xz5w$,=)w _N@e@h/̒%Kڵki u֥6 '|3bn6Ҙ={6f͚qs1b[0 :tP2=F[o9x ƍf3P~})aaaһwǩV?:t(ַu1b~izNdd$cǎ_dذa?~>[oEtt4]vOϏVZnk7|3 2ħATTӧOg˰l)xʲzQeXEDD\oƟI&p mܹs>}:ݻwkoY /P{ǏaÆ oQp̘1;O0{3gnvɆ 8p &M:^r%dR> NgU., ;eǎ۱8|ٷoݻtǫn[:$R\\,RPP ""iiiR\\t:ѣwy'٥nFFFk2MSx ۷oVľ*33Sf͚%%;ϱ+K.6lBDDD7yw]NT%%;p (!Wz̙34Mٷo%11QD<~AVX!i@% 6Ν;%??_DUug.XXx>aH֭eٲeMej*ڵk^ٱc3Fbbbd͚5eR2xx$,,K, Dxw  0`ovv޽{$$$liҤkVk߹Tmx? ={v-[޽{4MJ0ꩧ$++K^6mH֭eҤI/iʢESN r-Hzz|ҸqcCJnn|rKvM6ҵk׳n󝗗'w}$$$ȕW^);vg}]|DDDg>`iܸK&Mnm޾}{yxۤQF/71cHVVL:UjԨ!5{Z{ٱcGe 'OK_~2k֬%&&JF;(M@6n(;wΝ;KBBtܹm^H> C;v6:[lڴIrsseԨQ믋Hxx\RDD{1=zrJ oxcǎw^Zn7|ömۨ_>۶m#771|\.-"11$~7m۶Ѿ}{tBHH=zzر㴭|wtԉ.]L^*1\p8 6ŋ` N^8p#''?[ok׮;wd:tロ!CW!XTIltEEEYf̘7ވ a+ux0`<- SIv0M3ߺu뷏;$??-[AvvAAAZj5jYe˖;322m\~iZ;wۯ돈jզy揤_%''W> Mi&\.Wi}O1 "ٿ?f*SN?2dVdDjj* , "Տ}rvo;3͛GJJᄒkRR۷/d#((jժQTTwMjj*ѥ߇ƴ5`x֯_ϕW^ƍ6mZi,% G|у${t:裏6m[ftw߇JHyzJFW~;vdȐ!* +}iuWdu>nk̼HJJZk׮V;v-ZzĉZbb8f͚_~41qqqܰaC˘\.W?SDr#""v:ҭ[7JBJDFF^{Yrr2PuՋ~͛7ӽ{]./cǞv???Y&}a/_ΦMhܸ1YYYg}mơ;w.;)95kFff&nFVV)))oٞf͡Cp\8",,fV+> |)))p à_~L4dn2wy'wy' 2W^y?qTFU̓@ڵk>}:999ގ̬YÇSPPPX~={Y9v~;[l)r:`̙ex8q"J?[negã ޤ7*MEPlpEvZA/Q($ti!BIHgK *$}<ʔ3k${:ki\r;W%ǎ{h47o>zO+Jyyy?w p12336kם:u]_lْG}xO?$oΫo͵^[FYp!%%%lݺn#Gl;+Ƒ#GDDD믿Fڵ֭o6';;~W_}׬U%%%vZ/^L`` {Q7Ȗ-[X|9|gгgO]ݵk:͛7_p)>3rss.jvuVfΜɨQ<˛k׮\.X>M9r$SLdN IDATDFduc ۀEEE_z%s\\JΝ;q\pdzk׮ u ' 6$b߾}߿K~\_HOOgƌ,_lXGvNN 4MsF= .iQJ9锔O۶md^^h0׿jv0 ƌJRrxÛ4i϶m۾wާ7fs믿~>pXrss.))9xq0:wd ӦMnݺ̟?Yfa٨]6sΥgϞ ڴiCz<ϋ_d֬Y̞=Zj1x`7nngĈ<ԪU={2{lfϞ_|駟7O>aĉ1P6lȃ>x^~;{eРAԭ['|f̘A6m+$|M̜9;b0x022-[b0QwtbL&׿x78p v K/Oj}@NγC =Exx8wy'< Zqhݒ%K}j׮9F@@rd2aXHOOl6]uV\\LFFk&44RV8ev;%%%aLLJJJVZvfQRRaYe\.SfMjժdB)IZ, vYLlIKKKtҠA=O ʆ쾍j߭[322w=(uLi!1ݡ(((1o޼ksq=˱҅ȇ~H˖-1c}ԭ[@/=N_~iӦrJ LTzϹ'O\ٰaśKKKOFEE ]u]^O81[ӴԀ>NbX,Nj\PPi9=kh$22ȊMhؐhN:yg34n\~sNn+/g$WQ߿~~~ԯ_cai2U ^us_ܸ}^PPM6׬Ω\V2TFR0l]'N8z! ƶm۰w?oqFߏ`ڵq}wfΝv,YB֭IHH >>=zpԩQ裏شicƌa).STTD>}gɒ%\D\.YYYJȬYxgHHH`ĈZ ZXJ)֬Y6mqqqՋ)S[El>H֭Yv-ٓ'i%%%yddd0sLyygٳg*HOOW^a,_\9U5U?r}[JJ+:uZԾ}ջ -++5k^ӡCO۷oiTTTÇOՏ߸,˙ڏ?xw7==eSq`Fuh~* gs➘xxi͚57o6='Rn UF <5jЦML&tԉw}cǎn:RRR())߳aׯ&L(wuL֭Yb)))|7$%%ӧ+DGGs_OΝY~=;v<À{r52d?ym0K۶mٳ' [ٰ_~mFHH^{l6{^~岥-z`\-߶m[y6}6 iZ&}Fi4k\4*4әqk].I|L Y9DCγ飗^=1k,գGϟ_i&v_HttgG(V{~z;(j4y'oI͆*7Q,?<6{_pB)EJJ Æ 㮻b߾} wop+,*TiTpK)rNQITJd!뤑!=' eN#7^zsN :7|æM1cO=T!!!6lHFF'yaƍ(;l6=Ԯ]JKK=)ۡT;v̳b)7\*,,dɞ ,=ϣ3Jԑ>C2-뤑ԡInƽR֩m۶q 7SOEz=z3gRZZʞ={f̙3B5]gESZZJ^^ׯO?v3223g|:t{lv0Lc~ǬY SN8qm۶;y~N СC̛7vv;˗/W_n+K:lt# ,~ CH|6KNg;vWXAhh(-ZpƐ!CnO?ѣG:vHqq1g&>>VZ1n8֮]Kxx8,X?M㏗ j׮Mbb"СC<ㄇ@Ϟ==>%%aaa;;{ݻ=Zx֭R֭[[=֯_ŢEXt)gy ?ӸqcvJbb" .d儅1k,&%%cڵt҅PnZnMXX˖-믿M=Y^Hپ'>,:J){̿4L`T˖-uYp!@ LRѾ ԯ_@IIɉ%O XԀƍTjUyyyJ4R.K+ͦRtut:PDi ÇUJJZ:*99Y*MӔbQ999ŢjU)))n{bSv]l6}vuAl6ԩ7o^p8TzzJNNVJ)剿\)))*99e v'˥Trr*((PE4Meff)S (IF&&rPzm !*>w_D\QQQ }s Ųk]x6o}vƍ#c7o~'W:t(?rnu$H$NERr\{{[R߿kaZ}5G;`˖-r]Iیl$.!"FR$)<o>ѣٻw/.˷ [naܹ}oo#U"N> ^'IRsf`fiiiΒ%KT>} ˪F[l;'$99ف{uF((i`)hu^$ ,x5iii_|X{mSU)'o߾lذAs\is@?`L.tkxg❺{numq&*D)ng…t҅ٳgR,3bF)D}@NI9@g?.tŴlْ3gR\,}RSJsNnF~N:m@o` eD}@NI= 'r 7'ӧy'۷/[nH=> NTG*NI$ (Mӎ޽{wQFqA'.baܹ\YE[@o#Ք"N> ^'Y${,Bܛ.))]x1 `̙8<{\.۷ogȐ!<#=zԥi;q@rUEBdwH#C\ǁǁہǏ'L-BBBvݷ 'Oc]V94`06J!d!J:i$uHKlivt֭z BuH$ ܝ݇M6ѧOƎKzzC\.+V}L6 ½dܕ*!*+D}@NI;e] |`ۋϟOǎ3g륢bϞ= 2ロT=o#:%UKCxNOMmB5NS!tji$uHYf1p@> |^委b޽5ѣG?4M Ч,C>BB舦i$&&'$O$C^`> CeΝ\vK)Evv6o&}/4x,dbг@ټyTAfҥe7EoR$*ܝ\섄u 7,ffǤIʆ[}{3qw܄л"~CI D%"%%qƕ-~ CUN-pogh޼9wy'&Z.˩ɓ<,]˥tI`9rIT=@ ~aH |,UVo<'pǡUiQQQu~FFFݻ%I hΝ;Uui***RSNUsQ +ȏOр@;e5~Θl}] jYG'ׯ_sNx&MDtto#̔R|w_~A4xwb&UQnF݁@/BT(ły@cn^ˣOFAF1e˖ u]8KK)šCBQ HDdQ t^L&իGLL ]v%,, ???8s 9st3[Ǹ' !B\6QQQu~FFFݻ_LSa٬ƹ?`}pBQHDde"qBܕoNgk˰ C^VV `RW0`aB!$ " g@dd]tYVPPp,++kOB!p:$fE1 \iB!De!D\ģ,B!_C"!BTB/!I@EC!T@tH&bۋsrru,B! :$ (vC)u,B! ,Wv76=5k.˕ЄB!*"DxDDDtkuah 9zQHHHg_&BQ$ ՚_No IDATFu}]~ϯvpppu@!'9 C)VCݾ})+Gj}e }UW}t:맟~WXXg !: :$QVΎҺug4ڴi3poBBBĶYYY[~h4F*v!BTKR!I@yRC=[vj֬yo)+9sfiJⴴomB!fC 9IIIo?&VY??سb9i6j.b!BTCR!I@9ӧOl7n< Y|՚ޢ쁡Wvf+vB!֤C2 ]xҕRʾ'ue```mqpO:u-,,g0 QXXn&r!BTSR!Q\\\|n+..rOKKKO6 Rޮ]ڵklJJJz^Ӵb/BG* Bvs:uׯ_^L&S#^B!0:,q6 Vp`08~^6rNx16!BsIDd(eCΙ.BK/!I@EC!T@tHqQ]!K! :$ (eIDBQHD$E!~*XBsRr2p լY3g)vYs8p@ 6LըQ,9DVB!_ !*@i P~~~gQ^Mʸ\.Fyv500%B LNZDDzՉ'*]q6MӔjU+V8{, ֗'T!T@ 38U~ԦMnu~9sFM2Eխ["rx‡W!T@. ps|_RvR.[-5|wBQHDR}QZ[jҥcǎ`0(|tL>BTF S@$* @=#*== 4M-]ThѢ-}-݁p/ΠG~R+> ɕ^0wޝӧsua6W`0p]wq 70k,>s^^TC܉Ud _"#eIii)AJ[E(_ כܚO# 2i$"""0 >w W\q C aԩ+W\)ݸo#ⒹXF Ç3vXիW^ G.L͛… Ci/˯v€C+,,L*))ʬnuE=,Uv>TjG=SbONqVU;* lHV6#Uooqo6ZìY׿EڵusUW1h 9_ZZz0p.7~2J0tЁ]v{m R~%%%'ӗ*t)Pn[ߔ)SXj6Bhܸ13goᦛn" -w2"4Lm۶VZEq馛y5rdI}5aF`Ľˆ 0a5kmt:d4ի+W>iӦ&p-I>!cD4h@.F3111eAuH035[iӆ8-ZDVdѣٲe >(q/i ^|A)'7α7H#CxGM`3[fM|M~gbcc1c֭˻ݻݻ7._+ QyP„r߬t/?i$uHz'x, {a۶m<Ӳed0ܹ3Wfܹ4i hBBjuruHts_s5/ӪU+zxIٰ 6{6^JFtdУ* r!; ^'Iq@+˗|r:v_3<Þ={=z4fxHǽ"SI#C\&p&Lݻ2df,T4jԈ>p0iw:%UKC\H^}-x +U{fݺuL>u^-k%o#Հ"N> ^'Y$} 𷟫;t`^`y٤eF(.@͚5ygYnÆ #$$$ xm9 Bi$uHEkj ~gXr%Ç'44 o1k׎ gѭ[7dj{X@'@2Qq> BuH$  m}%..ӧӤI TS  BuHPuN@ @o'] O>$&&2gLP\``Ȑ!۷)Sni@o#:&SI#C5i,W5k֌Gn|8vARRya|HPP&Mb~L `wB"ğ%_BT-NNU$ Rgƍ5Jqh4i&n6<;Lɓ dC,\KrUWq/ЬNx:%xg. {L&_=+V`ƌ4l ĨQg8N6le˘8q"ddde6nHJJ PPP}Ϝ9ñc)lYz5/QQQA'mBOd&s@CIQ)5> j֬zwTNN4MS VZj˖-*++KK=#n;vպukվ}{ղeKl2iZjjժXEEEs*11*--Uwիڵ޽={RJբE C.]kתT*եKձcG駟*9fVVj֬?sNo>u릺vv;OW^yz衇TNԄ ԰aTIIRJ)ݮ~a5qq/MΝ;ՠAT@@\Nd,qa7[oU6׿B?!>>^e5~Θdc`ݩ҈cǎ ]veɒ%|ᇴlh45hE%WЩ0RbbˁFciժs&Oӧ5k/ 4{r뭷rWKÒڶm˲e(--%..TΜ9S.ܹ3sNvѣGٸq#deey~;֭b`Xb#Gjk.ڶmˎ;غu+y^388G}f͚ѽ{wիǺuPJm6+}R&((￟x '?;YoW^X,Μ9C&M?`2PϏ3fPNFv Lr5jn',,R\.F#gΜj/^L1,^oLy뭷0nxbBCCiӦ [&++F1h &NyR @RRy~1c -[FJJ 3f׮ 111]~ӧO?KF)*)k~BǸԼzUuKCUaVWg{իY`AH>ʜݠլY={xbo?6mJ-hֽw}lj'x뭷-{aÆ ,]N8q^"DcժUtRJvvy:t( ̝;QFa4|l߾Bf̘=ҹ_zu֥O>k4mڴ\GL&wu?>(pzUy 577r?*SXXל9s&'OQJqax^|E ~֭c̘1_;<^.qqq >0lذrs ŅI됞zw@z͜98郟_Xx`0P^=ڵkl6/ӴiSƍa֬YDDDФIy^~ex7oNllyCʎwmƻC=Ę1cx9u]t!$$1c0x`^~eâE1bD{E ݻgG?Oyׯw}PV-bbb΋sРA2|*qU`0[oի5:T \-Yf;$^_~aذa|Ǽ{׏1cPTT8Εݻ=ˑ{CRRR*Yغu+ݺuy>|8 4EEE1f̘ rMӘ9s&v[1+Xf VZ… S'> ^'YxE0 F[nتb|rCuUWU%|5w\զM%@k??ccc{kӦMߨYMFb߻ߖZ^[vjܸJJJRVUիWP}vRiii8N~eXÇjRSSɓ'/k߯UjjsQ\sJNNV[nU[lٯ_&55U:uJiթSTNNڲe:|p4M8qBݻ\ƍSÆ Sj۶m絁g+..V{QׯW?Z߭[7u]w`MӧUNԜ9sTIIڿ*--ٴiٳu~ Ðɑcǎə3g0 IMMxٶm 7o@>/߷̵-""⑿ooBU E&Iڴi#;z7Nw.;vN:ɼymXp4h@z%%vgƍӧaw!vp ҦM1c֬Y#;v}J^dʕb^z-"]vXyꩧMrsse̘1ҰaC ʄ X^|E uRmÊa2uT%//Ok׮mlٲEbccer-Ȓ%K@w.Ǐ^zӥ{(C۷ˠAd}YFz)bX>}ED$55U5k&|򉈈lٲEbbb$))c߽{tMv*:t]ʷ~J :t ڵKll|g""m6mرc;pKJJd2uT)**SJV\7~x),,0iӦJΝeҥYݺuN:I.]d޽~9okO5^4hѾ} @\iH]ԬYS%\'3f̐k^3ɦn-[HNK;WYZF^ԩ.oosǞx4l.]𸲋\os dԩ&M#\qC. ,UVɲeˤK.2rH)SȭzLNNѣGeҥҼysIOOCIPPPR9~믿.|.\PfLY~ԩSGVXΝ;]O8Q&L aӥza),,7|S:ɑd իWKaa\Rv*III2~x1L7HQQ;ҲeKWrV0 Yx4nX֯_/vZu։aҳgO={v<;;[j֬)~dddHNdҥ"ׯ9rD6l 7mhhlܸѕp8dϞ=ҦMY|!!!!r˷~+yyyCDJJJd޼y'Tj/H\\\i.4\ըQ#uРAҾ}\eׂ0 gǓoV6l̙3bիg[nGkq# dƱc^0Lz_:6lpu8Yf9ǧOKOOϐso6{{{G{xxSfئMNիbcciҤ+=<<cܹ.c.bbbh޼9gϞ-8E\jժxMձlnI<==]j(5L۷ƍ <[v=?&鼉" w.QNJJJ~9BÆ rSItt4̙38q"cΝxyyQ~}.\H֭iذ!b駟^+Ӵmۖl|Avɰa#11O\\;vt-zBpp+s :իyiݺ5:7&''8>n"((ȵAhsTi`` ÇgŊ 6UV1j(2)))s\fM]ƺÇcY~=SNw̟?ߵ:de8u9tV]8g'paV6Z%BUJ+өS'nvM>.#:=֭˄ OjߣGDhhh}wW?~xZhzFFw4i>xX0,QQQ[jƶmZ֮]N^gϞ"Rzsjl6ӧOJBJ5+((hv?ۛ믿8 "''p~_ر5jwy'7napBZh~ ;Ö-[.Srr2999T^"N>Mƍ1͘L&JJJ())!99ٕ@]J%KP\\ڵ֭[w5hҤ ǎ 9z(͚7Ę1c0Lڵ ޽{]&'Opn۶:߲e ԬYm۶qFN8App0V+WұcGv;G%66pF]D0͌9#GϬYXbjbȐ!L6L"]C[}aŊϺV]6 ,87MD\eر;L ?ҥK+Ktڳ]y݁?*In6o̫ vٌ;w/[n ?/\]"ݻ3gfs;vllhԨ SD쐝' CjzlL ҶFFFvhРsrrNNN*..ѣ$&&oҵkWعs' ,\Wiٲ%'NగJJJ櫯bժUX,?^p4.]ѣGO8x =z 00FvZN'99Kcy=0L׏pI>#N</zKJJ(oVرcg_$ ᆪwX,NVZAzz:ׯg֬YDEEqua0 N<ɜ9s8{,dffBF\йsZEe7)S/#,,@n֮]K||}p}]233ٻw//pZ=~<|}}_>xyG#wI&m 6zꩧ<׭[3}\].StmvΝKy/uל^~eV^Mjj*@Fff椸a7U3|Av""6v:))-Z{F)X,o~wtt-Zo߾)oڴw\ <==[c}El=?n&T___y $44b&N2x`z!ùsշ[o%..ᄏfƌ۷/QQQL6͛|r5kFƍt}ȑ#{L&f̘>|8Zb„ xzzb2W>>> @z|||fٲeˬY(x hР}||h۶mÐY`:<3tD]=2LԬY~bs###iҤ ͛7o߾L&, u֥M6B.]\M֭y뭷_>}?dذaԨQo,BCC޽;㮻QF3)S7ФI֭ȑ#,\^'xӧ3tPJJJU3fӓg}Gyjժĉ&((n1n8͛ǣ>J^۷oK/[oG}Dxx8:tpTƺr2ex   <<^xR瀜s7E݁ 9v*888;SҴiS7Vu䐛KjHIInݺ={Hפ4 ~fV^͸qHNN&00f͚Zy7|UWQQ֭c8pWLǶn,ث_~gbJJ_o0L"R`2D^^^Q͚5{߿?p#pQ8Z oۋsssZ,cOhb9nF.pdȐ!|,`w)8^E```Oƙ3g 44!!!=/////Wq5kOi&"0sݿl&772pAPPfl \m6AAAsi]ðV+X,X,t҅{S^bfsV5Q~~~L&l6-$Sz{```0 "$$ի=;;"z/A:uԡ7ownhrYҥ˪wuPTU<`ГsssG[7n'`Ĉ܅?hͼl6v;?bٕppRJHHf͚̟?=z;*Zj̚5Gbٮ/0oW^Ƥ?׉\%*?0H ?~;vR Ue|DDDHFF9sFZl)+Vx1LRTT$"΢Sbd,EDDN:qF;dΜ99*~ҼyU>*o+'oL&Sl&)#+ha Q?7Ե"Z~*M6a*KթSG>Ua/y{} hsQp΅}֔RW4h+R帚pn]p!;w7"A]z{{Ӿ}{ СCr-YFNΝ]Aǎ]l߾~$w͘1c8zH0X^i2tRURvoooݻlܸQl6[JK^^!""C򤤤ĵMvv?~\]ەHnnk0$77u^ ]ԯuaHJJ̘1CjԨQ:<x龏¹&m6mT]絈 tzVՖ#lGwѣ4i.Vx{{Yl6SZ2۔Wb\&Z]{ZeZϘ={60 /eL]7Rچ`'gA999y: ߦ@XKp~~F} 0c{] ~RJlDr\ H`СCq1|p~Gl6SUJ~KW 4E*J k)g/@_|A^9skY(W_}E~>}:v`3αO\.Z: JUQ|fuRp% BܹsС}vᩪBDHLL{aȐ!ٳ0к3RUh.ZM@JC]Ǐ{o?CSW: /@vXr%v=x 舳RQ: J]ܵM4Ñ_3`LBZZ^2l66l[n̚5B;gL ӽRJ!XJ]& ' x,)zW޽;+WH_DƎٵkqVÁ8)W,(iRF>ѣ9r$?#ý*HOO_f{gƙ.腋RU6(uqXeZϮYF 3SL_~lڴIEOfڴi 0o0Ҁi8|3T!XJUi:K فaݞ/~;=tRn&^{58yzRU fRULQQQW }r,b(777筷bРA,XlfV#F`„ 8pa.`խ~qoJig\_)QCMLLM}=z4O?45jpovZfΜ/8pvRJ)uD+C49F@@̘1CaY6Mn*7pCia)RD+KS ,6y8qB>MN^n/, ؍I)RU& J]Bp˒Hyꩧ$550dƍҾ}^ -RJ?AlfղfjJVX!V&GcJpppiQ`H)ROD rwHBB8K|deeɛo)nčǯRJ& JU-uqV$**J̙#gϞCeӦMҧO1\ݪ:J)R& JU=&ƿYq\nϤ'aɓ' RɀS)RW#M@I[>\ސbyAí{un<.RJ]4Q @䥗^1 d0 q8sNܹsiaqRJ)l4L@]1@[yo&ڵkGǎ ӓRSSٴi9}RJkApp.]JKKz׮]wQVT/0L[@C4UJ)T%MW&RU,;O&)355u%$I8 *RJ)uQ o'@XXذ؏srrRSSvohJ)R2;RJ)ԵCU!&Ix(RJLU!""RJ)k4QRJ)4QHiORJ)R& BtRJ)4QRxhRJ)+4QRJ)4QJ)RJUM@TRJ)u)h*D'+RJKAU!x(RJKAdtRJ)Wh*D`)RJKAU!ZD)RJ] C)RJ] (RJ)*& BtRJ)<tRn:VJXQ*2U!x(67SN@cQJ| lts,J]4QNFWR?Q0搔R***a zvLU!:KJ79fƏOZrw\JsAFF-bž,wǦԕJU!ZDJ<xN4gywǤի3g7nc=iZ+M+*D*U LJGyDoooFE޽|nI+& J)uixhт0wǢ S6Hʥ TSE)m۶YJK?BtRJD)U5gY)U& B4PRM>Ι2ɇR Ct2R`#TmEP !XJU>TCM@Th*R!XJ]& B4PRM*J8M@R RUf#JCU!:CJ7(m4P4QЕTZD*J(uq CJЕtR Ct2RB(UEi.NU!:KʧCtR T!XJUQ:KDU&JU*)UEiÁR RJ]4WJlDrh*D~(UTJ]& BtRJ(UEi.NU!x(UtLDz.IJ]& JU^DDHJJb|g>}qYUY>Sn˾Ç3l0&OLqq\(,,<1|W8RRR\]Kf|YV6mPZDDUR]!X <#F'^z1a .s=wɟ駟.W^ѣx=v uѣG3f =ˋ#''ڵkӬY38x ˗/f͚xyyQ^jk.]6͛7LJDN8A-W^j-*$$~;3%Km6~m/^Lڵ)..g!..KҢE f3gΜa„ ̘1{ueIJJ:u/55ciذ!5d2CFF$$$`_=:u>5kF l$%%Dtt47DfHTTiii?~֭[SF ׹*,,dM&Mbpׯɓ'WAAA9rG@LL !!!O}'%%%>|Srr2[&22tR MYYY̟?L>5kb0ͼtx;v,)))8v;Çgɿ{1ǏJvv6f֬Y,_{xbx衇<677x]vANN{f٬\UƛodB_111d"11˗ /z\ Xd 999&ij*"##)**瞣k׮3ydV+>>>ѣG}v^~e"""HKK#33uG_Orr2<̞=m۶a2())[n̙3Ƅ 2e ]taܸqk׎RTTDqq1/#%%zdfԨQL4Dnv:vȱcxwXd ժU0 |||XlyCff&'Oaaa={3o<222=z4:t:w?C (%RVaC ݻtw,J]z2dZr);VݦX,M6^rrrdɒ%Ҹqcٻw8ٽ{Ɏ;H~aywnKRRDEEɆ DD'}HFFmV&N(6Mrsser-Hqqy$$$ȀdRRR"o\wubXvru׉a /HeΝbX?u_-YYYO?T)?XVY~ԫWO֭['%%%2by{Ν;%""B>|skZe„ RF ԩ۸qDFFʑ#G~5kH֭"/G Ðݻ˰aĉ-=ԭ[Wdݺu% ,%::Zn*aH6md"",vʼnoV%99Y UVb IMM Jn$))InQFgyFڴi#N0dڵҠAeΝȈ#СCb٤EOᐬ,ye߾}睃EIttʼnb[l 6ȉ'$::Z  ǏL۷L6M:'8n8p`qף9 BtR}ժUm}ҩS'#""dKRRRQ[lח^z۳~zVX[l)|r4hƍ !11bi޼9XƉ'.ngϞ=tܙvG~'11w';~:wLᦛnz$$$y+{_4mڔljԨ]7f ~Jm߾ZjѩS'HBBCK/L& p8|ٳ\ p\[neо}{ 8oҸE"ׯzN-on'::u'>>>:u/k.^y,Xy;v,f0gΜ)34.l68r5r""4hm@@iiizWp]wB>}(**yYf""XV .>FTT& ___jժueҥ>sAAADDDdJۮ]q%_"BXXO?ĭ|c=VcbbسgO?~rM>Ѿe*eҢE &LQx9r[neԩ,Z{a6tPYp!l߾N:EXXfٵ ֜9s $==JPPgϞ%))5jгgOx ~g2330a=\իW'77UG> ǏwnJAcڵ:u瓗G ,,/Ryf3?=˖-#%%^{lvVD\@/d={6fٵ$ȑ#۷/=%K0}t~G8ܹs8q"-[$44׏SNӧOOh"|||1b-">>3w\6lHƍˍg^:EEEc$x{{Vdذa|Z §~J~\CCC]%&&?",wCr).\ȩSXx1 6uLJr/5ŝR lugqqqʉ';tf9<,,jM/M4a.9gyxxйsgBBBXbK,/ٳgӣGDZۛ?ŋO?1rHzI͚59s ˖-/uL6uQRRB׮]h߾= oeŬ[jժ}Dxx8;v7ȑ#L45kn:tR̙3dddpwФI, ˖-cѢE2c z~erssرc_j"$$zEqYL1L8phZn]&|~æMIMMLJ 6ҰaCG@@-[qtЁ>UV{qI>~2sFJթSOOO{=/_Njx',\kkFڵIOO̙3=:w}&I&HL$wc" Z%:uimi ZjmQWA@ b. I%>I&L?0s (~ g&7oF#dYƑ#G0i$@Q۷mmm7oN>˗СCpc˖-=z4V+OQF8qbhyf8N̝;0L7|N… QXXYqI̞=HKK;#W^y[nOB;tXf V^J 5kIDAT~?:Yf!55-Zfͺ/ի ̬;3 cTKK" 3s{&OlϞ=S[Dj'̚5kmjllno_ `ߎ:p8JB\\B*n;TKQtuut"..f9tWQ.T*8t: \.!`2VzV Nw> 6 & >>~>/4^etvvr!!!}~x<hZlp:X,ұ%jz XQA9j4T*Ȳo^M/ Iǃn|>fAev^wv󑐐z n7t:t:dYFOO4 L&~?`6@ jC/ nw05 6  MQl6x<DEE!!!!]z( L&%/ڵk8࿤; @BB¿ںy(BCr>!II8!}cȤVfGn-BW|Z2l^BZ֨j,ˏ\VuwGGdžXx$)4wRHHHpw|{+oAwy//1;phqVo0Ro}=}ٟϵJ=9{\p6?v^YlϥZ`ecXH|]!q\5^d2e$%%x(r 1ӧO4~w%I; /MLLa؂4>..ڱc.L6m[NN PrɤIt>qT*הC=ZXgf4MP8>11q1c㵵mMMM]^}zhU*U s7M!STщ;d8Ç?(JpF  Y h7n)))7W~~ w\9wTWW$IkK[[αcra%@J2seeCO+>>>tf555]1N5Q;_H󖔔4nԩ'Ϸx]OVG%''ڵ뿅~hRl6[0QT*>>>>nJ(ذa>,_|Qŋq뭷b|Gtww^P.{gEMMdYlzJ_0 ^dgg? !}{L@Aj}jR$I!l4[QƌKQ$>E˲ܽ5oAՎ={k@OGGUY__zebYVTVVhRhkkC{PV7:;;_+V$IqPGlllߧf=z_A:^s` % BpA.`0 ("rרQ$w՘Y}111vuu`V~-j~U*|ekۏ㍍}>a ]җn>Gff&ƌJݎzѣG1jԨK&t7n\h`A||<`2B {3f L&P]] ^AύT}IoV1!: "u0t<;eʔ DeuO?~ *%%!Q-@eSz| fXbؾ}3f~_S`0(yd-̙#/E[ntb1bQ^^.իѣ޽{(TƊ8?P !D(,_#^|E/EMMMb̙"׋QFf EAAӟ4`@@<~_!o[1sLaEmmlWB!w$Ir>L\uU\!?,^}U!˲ƍ|p:"//OZJA!˲7oxE{{0ax'E v]<K۹s$I(p'=]XOD+Sw޳/^صkפOVZZ:z(ʩݻwO/--;ͶMQVCq+;;Gtt4 j/OM6aÆ eN2sArr2`4q7CnC\\ꐕ=zGq뭷bϞ=p\~#`ڵek׮޽{ڊcǎ8SBs=`„ 0())Abb"駟ظqzL6 EEEhhhBJJ ֮] ǃuap\8zyϥFslٲv^6m׋ ̝;X,3g:::P[[;h|瓑s"** HHH@YYxWq 7`غu+zzzpaL&yزe jkkq ?FKK ۷c߾}HLLıc97w$6XED߷ V/Qm?KOѣ(3+#>> f3dYc8%Zv„ hwj!eeeXp!QXX/:u*RRRi&! ^mmmCMII^?M$dddVt0e`ӧ`,PTXp!&MNٳ2e N8EQm6о͛[$I}%0!"#?ZRb۶mGssshG@y0ͨ(PTpf<{l߿Xd rrr+`׮]HJJBjj*-[͆:T*l۶ oV}t\z1;ضZ~=RSS"::۷oǻzHz6l1~x\}@zz:N8zSa6 JLu,}CaΝGzz:G\lܸ6l]wٌ(j,^gpή};&D,"?#+$$$@$رcxWR7``ZXXχuuuxgCj5k6oތ{bɰX,|x7`H8b߾}x`0`Zy5o<߆fCii)LcXQ^^Y!I=#F VwƋ/'O" b/@WWנ /ěox$boĚ5kg9rFl>caҥ-}n ^G}}}}l2TVV[o\uU(**²eP]] ͆{ K.l4\9%S{l'?kinnܹsh( PTT뮻.tc۶m/Klܸ,d2!&&3gp&aڵk~zdddYYY6mz=N:LJJV+z!$''#==xPVV okv%~333a0/ 9Ʉ ##nj:DEE^=ʕ+QQQE!;;;wرc1rH|8y$ 4qa2SOACRaԩ8|0z-|Lj{ddd ** j*رt曑oO,˘={6Z-dYƑ#G0i$L6 VΝ;_O 2&O4`ƌ{hPTTزe cb\Wc}03'" ?3|vc_r 2ۡh`X INg踅:q˲$''J !***Gkk+f3bcc!2n7F#~?Zm(A||<-Kǃ@ ZVor!66$󡭭 qqqF d$IZd?Ӊ"<XhQ/ߏv!`XB,;lZh<^>@ AntP NrBsEp8j?!^/V+ , ZSga֬Yፈ(\v"+MJ.-YG᭷B u됔4a7JJJP\\ 0!:'NB'" cWx,Ӹ;q-0oK4& DDᇃ:'%K wth@ta\(\q}@~(|gL+ ]"Bta cBDXEXEtL@K"K. Q(Bх1!" cBј Q᠅(Bх1!" ?B˜NB'PNtaL@E(!0& DDa%XD%XD((B˜\"/]"09 D Z"/]"> D}@. Q$tID(Qb cBDXEXEtL@@w@"_^s`BD~PUU7ܱAڵ 8|X(QbBD~XQRR» D@J}Sa (l;"" eee1c9(L !PSS~G?a(,3"dP +55Uzqm!--m">n76oތe˖ӽW3Ј"u5w D4$P5܁3& DD`!`ph5> (1!" )!DKQ#B%zҦ[IENDB`rally-0.9.1/doc/source/images/Report-Verify-xfail.png0000664000567000056710000040600313073417716023657 0ustar jenkinsjenkins00000000000000PNG  IHDRbgVJbKGD pHYs  tIME#[ IDATxLww-I N&kVZ-OJuΉst/0GaH1tvLHqKJz 騐ކ֬lW1{[ i ̓v0->1wD&@ɜdyIH}c_gK4"B!B!B!%!B!B!B!;d!F!B!B!.!B!B!BDbB!B!B!YB!B!B!Kd!F!B!B!.I޴-[ɠ~,|gk_N'n߷I޵o9q:;b_>ϡ tN/;pe'y(sB!B!Q'?m [ CX-[RxGɈg(lLܴؼqF! Nrl!ebsѿ?v^SRh!lQ #<;Oc/ʼ2Sʍ73ڢe [*ifIGtG[0];8I1Q{[sZ>6:' ɈuϢv -ĤdPzfkjbBEFʭ7Й^?k}"|OsBOy5z+L̏ _aW3I'3~>rMuQe OKw$}WW4OmE4♔8s }$1߿"'>'8TnN'zO*8x3^<‘L%I{{d(zN';8()'#e [RLrܴتPZn-yP[<05[zL[^Ɉ:]3+J\2g|ǵSƖ-i3[ɧw#yK!u7|{iH5+d5wܸN_˷sik8 'Nq/"_n3K-5Ӊ0 n+׀%r:98]0;wޜ|ud/X_o3~}{~BK+ڸ?o\B!B!o5ć3?R᫉vwϏ3ygh|0^Z*kXpy7N|؎%ɶ'hm'rnzG9`y`e(3X.-$H֥bٷ*jh>{vg+}g.ђWE2CGG1-O 'Qe8i؜ӉӞ̄ƮC-RqcܤӫN}$S|'I/iHX gƘ`W{~$>u8xvO -KI;i8Wz*0sv?eHo.Hε۝\bv6񭯛/?XFw0bQ|I3;^\t}r`%i[zd',|M /#09~1 ]Mߔ{r\ sj @ps&\|-[-?5+*!B!B![)zM~/䦭0315)df26x.~ZZ[͝G +]NٳC{q roW( 9ϡ&zZAKaՆ(<}3&gRNf> A~03q{^/z}8<{ۂSE^z13c"?y܄si&e-RXKKQGgfiL@!>dB Cn츇-t4;/g$?Sǘ 3>F .gi0J\c;׸~&p[͏Rz7bvxճ(;ɼrS"!gQdo~.ػTL}"oa¾2b!5avhXؚ1Wo`mvpO|;W3O[~mV !B!BqK'7M)qLHYNK$ .SH!vkxv{m"i@Y -vy?ŀoV| g Pd:3\* QG)l"Gˊ2L頖|][uVKWvRLR&i3up.N4g 3N{M]'3h=N)p>)@Z.&[=\^tբɃU;3ϧS-li"pF?Eg &ک{r[gu}8]v2З|e䮎g(@JW}!(N)&뷏8hyֳ}ՎPg8b_7 sa/d> olX7K{c?`!fK?/luU%\؂J8ߪlju(#wy%*$V|߅˥k?.%-H##xA0 -+#fuBNls6|_VU{Jd`gJ,;R&ߎ۶d; m#dmW^`cK_|9$o@<}C<ۗҿɃSas?g[wV]Rvl-mςaT-b3B!B!BR0̖?& 2wkwr)ߌȏaJյd3jӿ 3k#@])7|QHY Ozu<n>3&h3tOĘ$`Jԗ;￀=#D<$ qIN0Vٳ|(r׉|5ٯ`ԕ c2ff=cc*aʅ^p1>{gN)#Khgre|>Dx,1?3v`1ue_t5£<0A7W 󽴶,R&^~Cu壶;iU<-w Ӊ'EayᡲǦQXkA?4NF<-Lom'S.ͣbY5L>Mzt=NbіΦ(̟*PRAʪ^%i'\&My,x8>thIޞ׍&D7i7AtסD^LNѽ/b4]MF(*vhMNiEWG9i*twtDxbt1:ph_\D\}D{Po\{Qx1zq1FX<<&5#G^Dh4c#GD9=zL?^c4c##G:Fp4zȉh4c3Gcǽ\h$~:}ȑϣ}#ё?D7J?s=ntKgCtHwW# *O4EB!B!.G$z~..^mW` ϝ^'h.8@Nrbt:SќCoD} DwY#sk$\P,/{k#5qNheWTOc9O8mFQY^Ƿ8{#֬[ۙr/ ]׺>ȫ+saV!B!B!W,$>vgoHewEE!B!B!v`~Lj(x?mRB!B!B!ğܚL!B!B!.1B!B!B!w,!B!B!B%#B!B!Bql !B!B!B;l6bv!B!B!B!öDѨA!B!B!WKg=)ψB!B!B![d!F!B!B!.!B!B!BDbB!B!B!YB!B!B!K6b#p`vJLơ 5ݥC ؋l>}m IV200vWݕri7Y((qаjfh{XP (XGqr=T+vsaS).(BUcKSF[u9?!,.?UA׈{P.͟=jfOņG0iW?3͊xB!BSP1OX Fq\YE_a-vRPK\~Zy X~AA/,yesRPG¯\#0>UJuE5>qx!FكzOv?ʜ׃-ޯnɏ>AW.:zꮔKL=ꏤ}2=Esffhx}99Ge?ϕ˲f˲[n]42qf|)^v~]T53]GG|i?u&ƏUQӳbZCl³z1?~_wx!B!a8p΅x{?1r*O:򟧱ۀ>w^̭}%#|2pf[[b?*z }*~6ewg?/ 1z^v04=Bu@족ʂŢXkpǿ֭ OSv數NE8,7a6檡ʊREӍy2j`Q,& Bj* `e{).6EAQQ{akfb6jgaS(..?6X,(*\7D.XhZevlTWcv3X$oh~n b50LOcMW51Z=VlD񺝱ny OcȳbNdqӎlhOu5PI׃>&3j۩7L]uz6@k$][*X<#'ӓD/υΊRՅJ1V%YM>nͿ_YmԸYXn\ ( M}𹨲((TΥz((AW|c3Xbw+c݌16A~4?Knw OKo=T)v>K.P=,kh l]:ˀ]MNV߷q-4@Qn/ot_^mFW>1p`]l1iIMs|>>z`s'E> .-"R!+G,0# x`ݍt+ m2"ǫx#Ym0\GWͻ90v=%M?Ql}kq}b, ܸzp(b'P7v~ IDAT׮2MO: z Oϻ8 ʹ#Qng[,K :^n7i, zT|y637nǷb7[o\#B 9Ñ9av#&ӓ$0=B4*LDcmc_xb((^_BMEp42mc^ 92'm7|\FB{zr8m#KdD3c%>[Tc$n7Ӡtp<=7Qg .&m&(ٱz)|nMc5]x|3Sтɿk;Cͮ2JLfb˚L1^w>uU2creЛVѭeSA>ǹ޴ v4It\f}|QUd(۹ag N.V.7ɦ&JW@GfhTzJ f*Kt*H~zl4 Td%62BFRzc.ߨb/,R3(6ZgV{xi3֭lFN;س]4O(T5v?,   !+ïh治3f۫. }IftF7a9јiI_*.81=>:Zi4c6.xܸnd)ų c:E63 p&Jt@Ik ӦJX+*}SC\X^Gh07?'*F\#y=}"FT5`JNegELTVެ8lQnLF"F 4ݣ[?<1ӑh \=<'~`$BĘEPʼn:_r^r5`5%mSth`*m9FrRSѳ-Z(HDrZAZ$DD,0  cӵB0Q<+F);N@ At@Dc!hЯ*r7We-M4B0D1WZ KOi|WF]4͛o;Fd'"63fb\.:1$!SACt=B@ӡɹ>qyj;0ԍz_L2cl#9f`$Ol4rQ! #- e^6ݾz NuzA[k8tPL]eFZF&iڠuuo7PZ{OtmuYB0^w#uuzuh#7 @$H0d ;u+bzWw?pΗ9uX# 6r6]cz~_ZJ8؏ש-efwA"LOk.kGg Ub2heL\qE]g$Z}ԥcED~ج aEí݌>+I~r@<賱Ep NR$9h[EB\lV[1!ӎ,o_3mX8J6`Q^o"Rʨ6*\#|LOni!Fo0aL-q{Hʉͧ g0dEW:(FcFfkp)%/hїGGWy رY`S0 A4]VNSWLi+ xk{֏G<F Fֹq[F M-&  @-XO)h@gvPX,{B#Jy1d:^nQGr E2m"?azp4Oa;}==?tK} ŕ򍱝!?],5M?93]$ 'U` ץ TyM&^n}T:ri|t{C t㭁*j{x>m0iيc7\CL챐5dL0ӔfV( Ti)3f~5dq8 4NW]=fz7cz~.Q1H㿫8mÝ^5 6I/ʢ3+73#׷`Pxcs22K0bXج``3-46?ݭ|Rd'>b.=VĂTt/ܙRy n#d0ff?~]lีry7nB!B?KL3Vwg8NBU6|/7z &V!R!d)Ffm|5_昜)GXe3;>5q~#Ľ`&A$D(dQlL/lvw;[k$ T@vyU{\4a {]W^tP|"'4ɀwYb -4qz>"nLE>?${Zg`|T( UCUR ~8.+`앍;sy&Ä1x/b lDn k 9;g-@S#q }fwG݁7[2ec" Ve3H7#-KXQv,ezFvGCiXWl܊}wfIa^:8bkOK7Im` psFǛFhm֘bm?tI$ӓdf Ǡ?ڹ@Pӓi2nFc鳊2=POxeàğg4ts)jw<.nRY?tl6'*_km(bc2od@Y)ЅG͢tMg\?kx7`!B!]I(=ZQףL5_w#3`F~F?W]Cߤ=L9~&cs4>㝼>$BsiGĚfYb(Ϟ'l4MxD5΃(RnqAg4c͢tsң!Mŗi(("U')VRPJ:(FF^`R!8ȩ3L P3O+Vlu=S4NHd bqLoAO:=;Q S\r9ԝFBQ,/174aMCPAk1Q>k,u nz 魯sl;Gb[^)^ /ab =#?.(:O[6N4諱( 2K" Z!< 0E _}|¬&+դw A$y/zZRF-)^}K]$aSBiM4)(a-KΗGSe>hAm~wcIvPm71Tcb=]JsF/*03y -Ӭ*Ʈ bk##y =nX=Y8ܕ~ &~[ (x&lYIn?Iғ\sc66s)ϙ9wzJvH{~Eƫ\5~EƭDcl\>USlc`V>d !Bqx[/~s+|fU,DZ9l%V[7#f+9Vsk3%aO;) ߞgi:sSq }5#9y4fk8);@i[lY0; 74?`$^C'Dڤk8ȿ']6 Kw^X2y^QzKwto]w8{FGs]nZPTlL)Mδ !B!XB =ܚL?W~Kc6^:HO =ؕ*T A6cS3Tl u-rf{4c6H b*ɼE qiC?B!B!qS;VttXKz/5Pr#OG@IQ],R}zS!׉n_d+Lmn|fG+vk<8YzT%Υ/J8 ȩ{hp'l~v']Ƿ=vݺY.s2'˹;D!B!B&B!B!Bq'ɭɄwɭɄB!B!B!YB!B!B!Kd!F!B!B!. pB!B!BܗvKW{Bݲ#B!B!Bqg[ !B!B!B-#B!B!BqBB!B!B!]" 1B!B!B!w,!B!B!B%#B!B!Bq$Xx[(N _1f'iqQP[EmX 5QNȸl'&9IA Vi_dqRP x;)(pR`-[bOv8~IlqO>z8)(~amzLޖlq^/4c-(v %[!B!B!ğb}Bf |GqOӧV׾54|Ǚ+Pw0=>}'C;%Ӧ}|/y叙hr|nPzʝ8\59pd^Sc)zFH-{.zdݑN!J?0N @.ƛ^cSDt>zg.08Kҟ|p:E=Fiw +ג6-B!B!: 1y$?C^{/Y'`Yʍ03,~~Iq-b@"Y>WN[۳9^~td9吿*̮% dwD3a2ϋ{*Rsk/PW,"  pO}\l pۯ*9>`AS=`-vRP|wmXꞢ)E&#)8arerQG֋ڳ>6SO9!^ɾ鐍Gz }" ! X`+EǰXh+,Z7 }2Y4Kd!wwx !B!B!BO@I*_ߚ=Aw{됺1o׸kg00 n(46>P~zT`lrC=!J*9u|>_,z7KȺ[~E]g*WGS?^v/k==GS9%9o<- {po7wPsp╇ Oݩ07wd%jⴍ"î_"kO+Yy"dr)7N㟺Y'Y׮0Hða{^/B!B!B!~ }i馝n'y` icD񟿾PORް!zX-ہk\vp,b~Ylv4=}^Z/8m3>H[J3jw?^:ڊ2Gsf]Q9\,nO 5e\?ZI%0v`5cz vLEOt}>`+3y0&qB*MOdem s3P6M/ %n:LW M@o[1*OR$n!DqAߝk Qg$ ʩ}%=Crἂ蝍& 3B GRÅS.'RQKGM^\V"%P"p#DHuG$)x'L*Qbǚ΅? 8{{k='^?n*X ES^uq$W53&w~\(~d%xfs}r $#avgɣ닠[N{P33Z 0[nX۟} L4>5_|@Z">}rf)F[0GFSH=!3XъI<,$&aoIGpP9u*iCk{><x7l&5G*#WS~8^g;MwM2jڼ{<`QV}8)ZB!B!B[b"s$tpK1YI.[w,&#zqڽ&q$#EX .$ :+}/4D0ZFų jDKsZt6Jc<‘ѕ-?WYx/?;V9ĺo1S4"c޾-ts,y[SN\iQn=U ^񰒫bQ3^)P!SLbNDMٳo]%J@BB'OPCsuYʩ{ݛ'֫u-v|js}' 7r@[yWv֪`Q( IDAT[6/  -ms #H˰80ԙH|G:9׮o!B!B!6ѓDgSH6U^v?q/LBb8u2ω&.qb:4cb>^|=/X<9TM2~'F&kibG[OL (V'fhgf3 EBcA ^} !B!B!DL6wÜ.cy WǸJf.`{-2=k &ێ-G _.W`0qUA24?y?!=OVAH). םB!B!B5cw}1>GR_\$ ~/e/@[>52g6~cs/ fYW:M(iNX$B!B!#$dNqmAI,HIe.9by4$PB!B!Bק9zqz;kPIz5q9<GqV@Ff"`CDžVn\4o^#sBCF@;>O qigTPq.t 2`|qCAW]0-4Ns>&xÌ>^. `uqZȫC8iO V8'hHkOAwKs(â&pщmDFb+S3y W.Qه ͐3.\; e]w8$ K/gt1v[Дyb P0w|npb7hO*9MuӹR'B!B!aDՎ0F#O-Z%q)Al(]i׀|@׼3^w"Dg5(]w09͘Dz$ӷpn;Ur&X9ǺpTqk QkEahhscs$ JSIڛ9tK˳٢LsD"x I[i}BKRc)$37qm-cE'rPYqOhqx>sۿ%j+MzG4h7Bo=x<4*IF9=v*PG EZ3{T+cs[4( $3$uPlŤ@2N$ը@0i/԰+jޒ'\(]|e,:yfyѱ:wR QA uDN.%)B1k֣jT*7Q?ċ  ,ش/b.T5޽XPj5N8ts۷ϒ*qߠhҋ[bO!B!B!ğƺ?Wlem8KolfuJrQo#2 "zB'xM8s-n97O2@{f0:;@_`t,,V}!Z.͑ょr T+EUbSa"_6l8 K|mT~ZMRta`NSfPrpJ%ЙK:gl/! 00U5{8MWPѕ%^P63G:7E߱Ob_qtr[qٰى;gg*vk3ɘ'B!B!Oc[t 88@G[whm\Y/9е#z'έB9o޾!N7 ΁R[Mԟi&)bVM9Lůf3H;cv3{uvh&{[pmuB.4sP UUe4۫1ou]淨0ֶj:zh+^k4cd2y Ǚ4ϥ L &=iZhmv}́ ts(R}m9Rl9FϱT٪5.}8Oke qJaE\sd !B!B!_~-5j9O.B!B!B!^]YLwHV㩗$B!B!BD<㝟p(ηp݅B!B!BW,M&B!B!B-di2!B!B!Bm#!B!B!Bm"!B!B!Bm"!B!B!Bm"!B!B!Bm"!B!B!Bm"!B!B!Bm"!B!B!Bm"!B!B!Bm"!B!B!Bm"!B!B!Bm"!B!B!Bm"!B!B!Bm"!B!B!BmHdmŗ$M>[Z_vKȤKOؿ;Ѱc\$MowOq6|D_~ !B!B!= |k2tmEn489Q[/ft- UN"ӜSEk!9FsnD'DnKZ3bs::\B!B!B*4*Әrq0>L]((RgDyR%!~Kj Fp>B!B!U?(K EY=c:9e-h(}!G;C-~5$3D`4E , `!7E3;h9`#c]zV`vgM-D85,2q/ 9 bg3IEWh8w8 ɬ 'T-\z5q9<GqV 9r<ȀѮ.kyQzaF/H=.{QStxzLN6E1P!>m8#_սts523rom9z4B6<:.yCsۿ%jK0{$yi4Dk5W+4!4@ U"yr*' 4qfI "lg>ȁN|;\x'BtVÚz2,#HgL jT}XMްll7+xEsNOqӷE#A9$tux4c. 'Oq(o Vpf.[GHO_\#poh4o W3*d r$ti7FZt- eYHDhz˘I"r8{ny\{wAJvvԺHӚ8Έf򫶝IRYg !#}*ЅB!B!Bs+_66]|7bYUu-MՅaJ{9}D%fTR۴L%ûJ Nx|26  ϪLbۃ6d;fm)O݉$<зi+شT sՂE5a5.l,J|V T8-uhr黠'$fA5L/oݢѹ4H5aS!9q0^Nʱ:g41eʩr d&8=VxZwMzn Pqԫq}/ %h,ԋbI+R{ؠ<edbJU56*0{E9 Ϲ7:!B!B!/WVj$b/L0:Geg,:Tlֲ9ҹ)}KUY-[͐K͓D.#}O^2ϑSb^w@:Oz%'ٲKs$3 ĽiZ*\v-RaUr<3y-IͣZԕn6~ʣT=B_`Ks,{ࡥ|ӛ 2E!B!B!^ae[ƋDTredV/9.~T@<U|hl~ gGi=_4($on}g(~ՠ0IY 3o~Cn>Ƞ7Zl;z\%!ͺd0C';N֥BU-Z:Y$92DK0Q)ԗsgΟjS1jG~ygPMh*<2Tb5p\ooKcT?Hl{1l:X|kPt{'}r$U"!B!B!xe\=h`UlʌOd$ق!]-YױƲMN^![gg9fJ::Ey+Q5a3oZ`4l.PNuʳH|tK} 5>4F6B!B!B!E\%g {i UuN_޾!:2L2,p3po#G}+~׈'*J̡9?#=g^X?vnf90s᭦t)9 ';OAckԬCGhyp.>-ޝk)!fA.B!B!B/mrZmRⷑ ;̠UC!B!B!^Q%R:*y ~#YJ !B!B!+^ߔnB J*+ !B!B!J!B!B!Bl'3+"Oggo]3$OqO uzH"yK.R,˫d\ncZ['i1O|x5{s{4ݶrTI:~A֣aZ' ۘ)/[[osysEޣT;\wnOh̯WmSgj0* NmxmsgwS[wnb*n;_ʈ"h¾h_H(s]' 3l,kǸOkkpu|)O1iuwG80H,ЏxǴ * IDAT$Uu}&{3>Q$_12[8DxU< v3<)=mڦ$_0$6mOur N$h6SFo2.CT:cSL9IM=}BN5ii*aK|'Ol4BkOr,hzEoS铴7B岖:3?^ I oaci6tT=e'MXUD=PЪ'o38gV Ե#\MuSJ\ZerTRۤuZkkի.dž0vɿKg{roݺ^SuߤaK?߰oː4{:"yڑ4%O:>>v}"+\?y&Hΐ6A$ bD5e۵aK.%hLnC} B 9rε%qD50]wy@hbO}A_K?̑35X?QCsc!zg0ahnm&Ry =t\8NKmN3yaU$^׿^.v,= ._~d<'8E:WC򀝛cKGSd5t\xVG%0O,8.ϴ4D>hGT#K+ϓ#7˃  oᲖfw%o-oHyu<167~ xms v:"+-\tu­ . Fi}ථ{A !. L >΅8)u _(' 7a 4_>"Cjsv<پDSy ]ϝP (}!G;CWHgNcJ\~+sGNO>!m܋qU dxgYSxI4&$/4s3tpJpk=]?MW`o ՟Qu"ދtΑ3"2lmm oK _٤Søk+Jq]b^Rm̎l}x*I 'pDa/B$oקp8L6Mawq}+ԧ2tYu֯HQ<_L ıvzim(Ic.]Ù}\8㽒(ϺuP8W f' wݹ\ ]bU@v հ*qV T-J䱴-ԝuͶ\J@%5uOgp@( Pk ;U@65MXxS7`Hg0o9k9fApE${<t~q!z/pNDpDWϑSw@6fw9g;ϖ';EkdS8ʁSxS*6k'2aи8x47!QWM.1C:SF]ۻmvxžwxu_z/wГc!Gwqa,q/ڰ?J^%Y\oX4A.M̑ vE'VK\ 윧w/,,u.Zȕ7ps毨5tv,9d4ZF'.K䡶 (wu!kU^c6v2 Z6|LnKt~I_lL.O~Xm\n?N |N>}DY*7K?]] p>k6h:T?y۽ꓬ[^?aNGId M2Pj]0qguE+_>ewZSxçp^:xy.9?X*>/ z 8I6^Ɋ-,SdsH |B\ A"\˯K0cmnl°:SG4~ug0a^LN29c!t xƫ |-#BCyCip-b&bHSr /\[ROȶ}ؽ𚧈IPݯ;I8DFmgD>) BA&jpsȐ+yΑ7[b\P&h7aH\.%=E &sV/'+w]CtV4gsdY> &:u;" yDba#Fۇxp9^V8//'٠#C2/޽pe6WnP-cVqy'w`[gLv nPްfD>*Jo\U;{H'iYΙ\Z&@׹+HŶv8/?޽s f_z_+^ /ᛧhݠgZRJ{p0p 'Gןn7.Vj*} 2q'Jڴ91M&9@C(hsV,w,Z jӹ-][\kwzH&۰2z\^^޸qkX'HXy>Rguը)RYp4q,8,[=ކiZd2-ƿD`۴4:qfsCV.lftnc`Ylou er%_lHg+0e˟JrQ2 "zB'xM8WDD{:Sw_̆Oeyfy s`0P9r"*]*1ŇEa2y2 ^]E*2Fsj&P[1 ̄\5|Q&ҡ)#O?|n~u +]UΈ0 d^viA 2qPŠ^D앨<qy*>VMqnE2yTvrv0Y Y W>K]K1Uꁻ\ ڠ2voqj$uRF+#q(@F'0V]k M/Tr,,e.nRΈѰ6n؏`~_4o/Bl!E^Dj ́]0t^aw^(=vٍ[Ѕh.\؜a6[h.z I]H5(Eߚwv;/Effygyn U倕O+87Gqh@' mfd j]ǰe:v in}۲ˠvjxd ] iԔDQFw7f}rcJu R>%ʐnYn/XQ%)n-= |;\h?ZZ",l\flf^ز?f)`48kzirbvo|G,:7ʒte9(5b_#Wopk)\ovgfMce^ 6&~Ge[ܾ_jnrlp)YOXZ{d kRf3#|& iu'SOH4k<&| v/#XJl+Ƽ֚OMmuZPK^[Ky5k;)vl+ݾ.!H̵ww(CS3j:[нM?fXSDs%8G%i2uȰҽ"RC@׭c%:F s:@0ߣN&_vȦyϋQQZ ,dird~[C^ƶJp/Ş:h.XF6[K@6BMVވQH*fH*X9aVI4G9,hKh.ף.KC>u |5"*;dXHѱ+y:?1Fwp:OC lOKMRcBSbr3Jн2Ub6]Oڹ;7UOtͩI3VcK)Hd`&kJ4Eܩd4E|`5]@.ў%ki:,)f")f.}Mݥ߾+|r"VM;:'R 吱@EmMRYaw@VdPsv1j}˚_n /ؑd %gT4uo<Cfs8PR{~>3AnXWmFb&J9VJmQ, 1P|Gm9()JKEqs!)q*')h*0WU`mݘ:O2^;J9r$̭~Fn@6m>l>֜@reY"l./WR:͐4(s7wWԴ{yK< UhnD0&8O=X^mRgh(,en^?_Gje~^H(B2YQۿ/hY߿|Ai/M4y@\sZs`,`2zy=ٺ^tlj 㷶0ڱ15O0O08tlkYQQ lp`$B73ƾͧKhcZMzo0=4ak,~2`Zӭ1lqԖ~{2Huix^!Hh9H"ӄL?l:` 3i5>K0K(%VS.XƙMl()C6B2Bbv徛6h[H;[!dXPSL&kKDrinL*6WtH8μa>棹Ӟ6hd 5E;_xiXc c$A0Kauks5r~ѣܿŨ/a)&]Ce>GChp< &4s&5d َ`W32BeX HTOo*Jhe$7 0BL( U_鈂(&2*؃TiZ'Gs@4PZIa7F]W&#`X!1 X M!0H$pimz@]D%FRvi|1g ,/;I y{K Os'kNέ;A² 0[q_2r3Nk>xC%G蒉5R3ùJ1vCq;<a""WnDyi_vjٽ5csd|yldn4Pb"6 O1~v8Ij\ NCי߄, 27 )sr7ɶie%v*g .;h0&ɑm.߿ L25SZkc823ߎԽG'xC[~RsevbSPW :uU]il':&\=U-΃ȡ"PSGgbZcدx{~~-o]=]%&YI,SzU4 N#ψc6$0ǥ(7'_! y#XXEm=-x>N;IAL\A6'DžƝG02[MK3BͺHp$M*X ;AۧX<(mbP@P{F~\CYlA/mﴮ<xtK}\wU{<@Rd)t0m; ƇԶl \9vǴu8wtNc 9Qe~zqstzd"p'!>_Qj੦b/uQ2 Ȏ;}?z)t$kLq%Hk2w]r. *K5w%{V鶽ot|M]8r)">'G;C†ZM$PrNz ӴM3Qtz3Zz-NL Ƴt ʱyqidj^9sD:_ЕaM2X zz8ڹθqPI}W30Ml̀.aavsYpxmP?̹8jvlv3{v7Ķ+XJ=tkZgil6\K]Bg6xSׅePS HoWW`A_Cz;%ί86Hk>֜Wc9Ǜ/5q1#QUq}z6KΖz[sL-:8u&+WK87!P-Tg4}=KCfڛvE o7z\yö^Ix J}.8{Kzu2ˋG.V`D8 R|N1qB^GN]g Z,bVq)YnljEsy^ѻܙ?2QWibƢO켯~ejEmKfG\~vNͺBz;heˡtLV #< xhn5K9_diY/$"`1|DK3Pr6;F)8bj<8Ol"!Ƨpy@X.fkY7ټPp=e~$c::l_myYIw7vC$j::{i">TgSucv/#ZʘnDrC͇fK5dmi;yڑORl!R(.!Uu$~k:C~D{na,\bVƃ{E 8,Xfԛe8ئh4͵,C?pԗgHQ&S{'oss ?vPqcGz@8w|m^k ^m&0z=1@ed;b{iހhNs-tG/v~|?dnܮȑܡwn,~EZꆘ&o5#~k h/J S:X)F]W5\G><0C@fOҺnXM,7ν'͔]o̖u0ɟB|`jӷxtV;mηe:Cd_CT622p6P~[tC3ݳ()C_K!42wӠ?KSWS>BiS3w=tc&t gi50“Y"EjsmFRg>#Y8BG%3Od`&,y pQFeϚLg}B 􅳂r x =>qh)(20у2=L/ }E>]il"E3ƙqkzPc-'Rlm jqF{[3R^8tksWoĨs tO}ƔKUڳL3ϧL{3_ewG#2BWw`)B -` Fۿ]'{\d@tνDb֖_Hh\m2KLAd2`$\;m4Ui@R:x r΄6!;Ƥ4woK6C >nnݓjebĂ­opFR.hPC{:Yض2.3zBl*i}3/Mđo>wpVA|6גG:E D/hL&ه 6b@ @ oVj2(8E \3 %j|bSLc)f|sXڜ8-@9U4wM'8"Q᠞(+jRUK)ô6US%fY >A$3(XƽHTb8֭tdpP *05!cB"zP Erd| 2X^n\ӽϖPJ˷,tekĎ3=2JCK$2hX-?J VrusA.#e6&@Yd@fxV[Jڌѕ-@6e Aj lRa7o0&VZ5[LyJ$,NWak r%!f,_mN~Gݓl'qĦJ,= |;\h?OW=-tk ]}I̚U-\s.%+8ina3fr,:@ @ w7jEcrǮ7`4GϝV#kܘrP" ^S46IP O:͍Ø%L _S%qdHBz~REV)*0TFq%-߂ (2HIM\.11-T `ĵʌ?7sÐ2.Lh .sWUb-o8ިIo Sw9w!cN+vZi\Ac}ߎ߽WTP~H̑\VLrҲ1?UH{ijW.ZzZKY.s9@ @ ,C$|.@U @=B>,ߍM?f:e]`8?t^'0[LĆiOw\]()BOd=dfj֯hέ0YBI/r~q&G,77 1=Ϯ|&YKsCʓpRfǚ|HP2n'f:ݬ%=-)΀ɑ28q)dDKA%?o)BV'cɧ&yt=.sWL8m T@<#5n(s1ʼ۩dcqf l'qw7|Ћ\rd{30&EB=42PVƗܜCOh>Tjc\]rrJ0~FjxF @ @F݈IN)w{>߀KYz8U"KCi^c j/RGW/3Pk%E"!nvg09Ao}Eˌ97G.rqШZ, cwu($\p("` csuDJl@ @ 1F`dǖ6U)K򏭢T1ދ) LvJ&tx7Hf?@ǚTVGL6>' @0@ @ wL @ @ `W8Cʝ=/z?GԵJ[Б)F_dW7^ňq9~dZ}U?հźhi/{Pu =xB#a\6moܗ1  /iyE}zq\#ӻ"S MD}cz\_RW ."ឯp׍ip?r<>]fw8_RWCw!}eJln|k@뗸'2 *|"@ XgԭInϪ38C[;WpQB?QG߬cK hh=+M|}r]uocG124 Ӱ.L'寱2y=Xqߍ2˖t`Mv`x6/h]\ѣu-3Gę~-E"q =TPrzoqha_yRu_|N~q9rkp) Oho}E]T WEƒ8kB`d=2c-ˣoSh IDATG^AL 72dMe)\?;3^ju_PVOH [Wl۬嘜pѰto?oܚ`9a6s&جrj= 'mo R~Ew(C{l#عx_pv.~~w~m Ym@ .cΧ4W#iW$ɹ Bp8J&#b;f;[An&x۾R\{S(r-MQImwn;\X~侙00Oeۨ}f sWSʰX*0y-Eˍ˸- &0xl5r΁4]4K|tq"a>C9[Z+#2LQ_"Yw}>8Fng4*I^)5`47M\wu,7ܚf}v6ŕcrlb,C>G_%0ç1,>ztTQ2h3cs)JwثC<خ^bǡԢ0qGx;SVuucsNW\wxIi;iz[?x[d Fvf6nc7pg '?v_blnV׾ƑT7ސ5@ mS[_m|Yv\:c &G/S.J{_c{H ' ڸ?1Yx;j83Äte<^Q3YF%3%t\?Kk9F|P"x<|ݮ01 Ӄ>z}$e?ތ+b;'5\< :>ZvEB0,X oTK|5>Fm$ zO_qQCev\a)u3 ?F)uh~{N<cn_d n\cI1Ld6__qZvSsz]·pz?p ^Q/qm7TpówV;k xs)GleV+v b#> C_ pOPb q4TBIOs iNL/A8eH'1>fYhtm/$?HYUc5m"_g:A)&# YwZsGHY mjىܷz~>dƃ]]WfɇWLN/AueXi j@u_F DhzWm%PIca{HX9He`~6J,:=X$6͉VA-#]kYF崌8qXdlt'G{cBI|ëߩ28p5UX֓cxbnɤʶL=Tm>e5~|t.ޕFF}t^{F=oQSZVRwɀ[qaМtoDl ShZL*QL,Jz8FʐlaIZ/S.XI} fmKT,8o,|n+a\fP7=13`I΀HI=)C< FqXgkk}h4fZ-yX^O `GKx6:}G p)Jc/ 'x2DSg2su, 9zN]'vl%t>~M;az ~\|Ӗ{amZ;H/ \ǼONFpfCG?Otz>fS] EoqJ`6YsG[4tOO 2F@ o12/ 9k/i47L !9X)[OcR-n|.(]E[Ż\Y#?ۖwj\ :B9"0q?y˧8\\{U< l:M?նpj}\ѣoe~X> h-C؟JZu0)K#_n`H L$¶=^|tO}ԣgf lf]GArlIO%/s0@||~/Cv3i5EL)hUB. [ (?Co5 jk%FsxkSk, ee݁fp'Ҹ϶,ucF+hk۠CjD|:|12Pqc\zuFBnAONqyy-w{/-آ[ܸ{u_pٶ6., ۠qA3`w!xJD5Þ 3 _mMub\!g$aofRdx2>j;UssH~gqXlc)&~˹o`M9:zg^:`=O.2s}>ܙLb4\i4 ~I0v>C0D2-Q[qZJ<ow}\,+"vO$+ 3Pڗ^= mN\0;OiTa^5A_~Q8,9y/ir]g z-Դ%7֘/&cry̻7P\mLO}>)#МD@ ށ'׼!#'9Y'6ti\{Gm%RU MMn?cxZw*ĎEt $$0-wXGB\;u26sCzf&ـY@2 M'8"Q᠞(RɛsԶdj% SW΢W'h4lFẙ3nFK l>\`ɝ 2.x\\ p\H6QoZ"_XLK#\ʙvȹv}rK18 e_\pR!8-'p[0X Ȁdzڪ\}\aϒaz*1j sK,.`f%7|K8<'rljZ{g/7k. <$ak-$s-M.͇imJd3ͮ=$);)t8,'[U۠ʽDo8+D2jKzB̨55EʏMQ hi, @SQ@2Gbel GP^09$K$AU ps$\KD"i` UD0I*e3EuOUȦgO?;`%k0:8PUE&: nhˠeP4 .'Swbs9pVD3F5 @Kp_i68s;$tdv3_Jl uԶ`m]\$- $ rgETSPXD)ds^%Tǚ`OCS;IG!aΗO.h@VRdW"2tL~H,0NS"%`t׊XlHcb:Q)b2v|&\YF02 ij9rՐ ` 8>2桚-!3]-wM=˧Y ́𸤕m)\cfZtOC Q.tSlnH:A6ɛMg Plz졥5ur>RlN5mʽ4TѪ};Ճ8'sc,6 H+nܤccS,GqK/d_Jw%I|tr-bǪŬ+n g: fKs}!I¡/duk te0j;O9q@GmP޲jO P}1^ȥKT  Dl6X)GցIc&(M2ٌ,AL'n9~`2 `­q[Vu}sN@ O1aTȠ3ӬOnWITN_njTtOas%ޘ0~7Vip cj$q s|,v]:2`34m^0\܍*)Aҫ{'>Ia@|~DxSe+߫P]5VWWQZ$,^fOB%fK%gsU)ܑteHyXGn,q,(t.a<=T!w7[{It(ʚqpݣHL4zZrv4XN7CC}9T({%ɦ2P30[Ic}$Mh?NmV5 l%6cq*AY25}m,S=IRdMBkmrwk6{ԺCv?$ #莟/ic5Ai_C{p..89f4wh}g{z55W|I}"Ӵcs @lĔa4qpn sg5ep;GFiGPt"w9EFo?ͭBQydc[}bOavސ"W}Qe,#[@;fd E6[y#F!,!Tbѓ{8o琇![e.?ur=C.Nj%vivOPSDeǘ 8:;{,NjuQH77u~bu>+=ٞ*r9vW5DŽjN{el4`9ڭsw(GoDyX^~'D.ǙNr/NWw<՟e$576W7$8~ kY- @eu>O|ͩI3VcK)Hd`&kJ4E\d4E|`5]@.ў%ki:h33o_>u GyZo{r_wvMf~?Ҥ!UHNE ڎoi+i=q2HՋXLi)gLGhq =LalH.J@Ћ7&-"G?fɻz^k^ZQF1 Z*w8fdP]gs3RuÛM:4lGҊٞ7I ؠ8(n [XRi Ŧ+jC&5a^q])U w`lKU (RLBҡVb$R``Lb,/iId<+qB\)Z*Aރ$=EMgp(5Ob,W@2a˨`viFuP?sX{פڕZxNw,w E&Ǩ;~!IpW䒁&Od1UY)$kuhRӪL|z1I#Xc-mP*yc$0.a@a#]}+'˵sSƟ;!qSv$cd {9G޹/_'cX1Z7 f|9 z1hLf'R; r" @ o5H7OD:D6gp5:7ۙ`p R1N>c!L{xݹ(^dʍĂ 9>)l?z+fY7tԳBk/G6[+P-di+Lqi'(i4@Z!0eL>b"%v5O6M (1' ("/eu%F/KFn?# @馩-~vI+0gt+8x^d9zMK}؏,p Ƃʟ6f0Fqgu:"RE"Z!LVor6hK%=Иd4ӌd =B*Del@1Qc5}o7dp(ƌ2LngusCfwe^H"`Ζ%=Z\=CḦ́bU^<*fe2;nGu+ D @ 3a| Bӧ?݀^]4AؾZͧO/qZyT!ު#}gaf 3o qɢ IDAT,6H ϣs޵9]Mryb6.x]t̻3 VtV7i؋*1aiƸؘ+8Ŷ@Aw21M(6 eP,~EtXt;"'3'Ȗ`Lrdsy|Q^(BG>,OIɉVCi]NxZpX95tXi;YҨ0Tup-z`v&o ;Nt(i~W} [c~܍ץr^6 ZtVLTNGf@]s8xM"Zڮ](kܷnb#HIr^f {Blrߪ8ƪ.CoڃA=&!Xwɮ-:KL u&ZXL6 Tp|]]Wj%$j1{>:z.BQe$.;; {8*r9|܂*mS[dOt~$~6 @:DWE3E2-BxLP:@č(N= i" =w]=9=ko/%;JH3^{7\d,-}_i2՚5M9+soI?}i T!i*p 9ı$dmܴ$(0Co[/{d;j6;K:AϖePD{Lp+ǾW-a%nO^Xl(qtqn*'gwr>#pA9_10X$p .fJ#M~RIn*b"==ϓhpWeDDޞ2cLϜ'WM_kc7oܽjbqGNh='1.Ů7';$,F:4SM&m|=w?Ú :/Jn>Y=˖n;.k2G>R)*@c=@ ,nkzJ1ڹ0蹘KLkv/D\cJM%w{* o|9p.,Of3% {ٮ6pm)MyNcsșME/Jm4+h6'/w |4h?Or7{~]>5U[A^XИ='(rNq ~Fmr6Q dS;ZueKm(&:s&FZ_kf~y-B+E{V:c^ IXۦ;If'Әy*y.|Vn򌶦GVoLZf!L:.lLڑ?)/贎b޴ev_9n;B@ |}SGwϤSYe͎7b#7.}0& X\jԽCJ;:l_[g@ dLA= ϙP8[h#,-LB00h"Lg $7`۬RyxB@ Շôf97YJ]w˄hXیLsg?p7v֕u'Ƽ@ ID{ާ~ `;Ls G\U2(L1qs]'wJg^|ꎚUug.;IM<#}bӳ2 c?lc`&_`v{fvX]L @ @ &KjWӷ ׍1Ep31'ށ@ @ @ 8BP]hDc{kAbB?@ @ @ x'bv!Cȉv4@ @ @ ?ծiIjD+HG_Hnm/  3rwa2Uqmr4* Q_y#/pٲSrpZGWg%@juF_FJMQ3Wx?q\c^Cy,BK@ @ @ xG]'bRD9Ց h b3/P7c~X K"@ @ @ :BLޑ) >W`o;m<737ߵQyrQL @ @ SL @ @ (vK6ߎM+:&bY+6g1qG _8ۨ۠.I*9~ P;{E͋;)-Z!6UB+s2︹zYl> (mPE~} wQFsk-Ш%mxs7?CrY]o5bցJT??ph2κ=.Zu_.3mhJI@ B _;I}?;Eg^x`v/dݻs;F+贮M)Qں=%QgoY|Z˗%"01J0@>EԆolm{Rx}sXSqXq:r05D~c0N"]hUhqcxf(c?tm/W&=sKy1_Pym3c i~&(d\,r]s|dr:.P0m Y}} J+;m)QlUe@v !R>3w`|g$jՔmv>ɟWJ{XL5t'OG3 o*SxWٙIocLku~#M[{I*A~ fŒD`s]noxG׵_#)ZnڠCRZ8c<"lõL)/ `m b+%%˞;  3rwı>X\ߞ=b"1O2 )>S.r'8Df16'0:BzD+HG_Hn-k9&B~Op=BN&tK9uLo N֔0=#*'O|51ʹqB vǸO~n{GIDTgFf50~&E>[1Q@S 2F I͑ŲnV=7+yDPs7-cgKE4QJ{zl\D@ld/D͇9b̯{e@=H_׽x rpu6ggs2vyjs wNb* +OTkz}Ocߣg0{ֿ3%NVMs?vFt_ r/-Q>*cNٲܷ2;Ӹ{j?p览s,ljzƾ3恋da}2$uR C+<]уB]D:q M5P~$"3,n#˯-ٍK<5gW ,)x>:E f &j5# (:jDE$zqw \nx:vYy/[I =%0L'mO^8/VNs_2;Tx{eK:b3h^ۜ=ax8 Ag*r7 sL!mӱ,нē6E|TŞeד#1pH E,R=DF&S6["B%XE1%AZ*(fYy`?2O5uJDYk6+"m([q/騗 IDAToKoIR JɎMY 7y?@}9hn)]NJH(|FEi ڠXL'i?E$H*jx 4 JQ ^J'b4y=HO Co`1@ymvh,#czFm{%ˠ&uպCQJ9{<ۂirق2H*#A"/{|9wi6A|ňo=MJl^S훻/|s{?;,eؠ8-q}5 klf3x~)aTt}PK_Ǚ2EҔI 2]DX\<xj?ʌXHl'&g"HI7CA"FzeOˇQ"ǁX@bm*rQӌ>+=Q7F􆽘RFJ"+[M)6f|xP)"W(P^UIզ|fY0mhtXYNróL1[ʀkRo^ۜ.0^ݬ+(E/}Î]ގX"*U9|T'}MLMyLdj@ wd{834U˓)RY)>-'1v9oUHˁp bvr=O"U^R-&H?_ Mʿ_NP (QQN7~HJ)ò*;M'ԥ3g$Q׬^̒^I.C/e' Dy_N,$|ZDzj`]Y&Aƣ Te5d6NL ΒyՋ|fSe+ѨH3٢܋A*f-HIAg?$1BH*4:UcsLԀXR@2H+P[Ssv=\Kk2fFua~h&9N.b˼ i 5qz撁Z5|GO}>OgU)BkZZ})( 'f%6!|IuI?Rs̩RNPlyeT=@{IUØ1i$/fZePD?T1JWʨ)9z*]{yۓ2m'p_asAq>SgswDwu:M" ZcC`Ti,hOt/-jj@/30!F*ƥY7vbvo" U˿PA*k,_uz e5IV$a`2Xd{6AڣPFB dRWbAz \a;l_kܰRm܃*H{P#(ㆾ15GTAI/^z=Ugcܼ 2,w%6oD0C;5wf |U yRTD0fIsNkU @ zlWuZcwWJfdGPs%v $ n予z'هn)4Fۉu2mY.*dH(1 e9J&-8)`oĻ܇s4iG㐕* I$dkϢPMfKDT JX:ߨftS'Jv˼rLd[ 3Dx3唁 ^G 86ϷC ZCseBRb#w 72I|36|Nm vOYBʌlTYܵZI\&恲R֌3۲?q#f}1sDۗa@e<~̥(Ydj'"W9dWWGg*{.,KY&óL^Lŕ}+}s^n8kxE<K^b~ :Up@S+Ƞ؅gҙth~َ%+=oN-AqQ^ B݀9Xgf)?bA`f!0،dBsL;b J0YBJ)&Sias!P+1)fLb,/ʝIqB\)Z*Aރ,HK{jh>cIJY$d,)Qe;(x;qE~; *J)I+\QY ƓnhBzP{nuE"By*:Y 66Lk(A귳 I3O$%ϋPt6} $V*ɇqy ^r@ o5H7OD:D6gp5:7ۙ`p R1P0m;)|O ;e؋L1X1'%Pu,&Xz@hfkJ>㱅L;TwpI:ח96OP2h*BZ-a |Dr3-J0kumP^#cN<P975D^dKD\_,O夊MS[`&$@_<[gӅ2ܗ& ȉ}kԽ\2~dG̈́_0T1[e=!4;ө,$њ de*P|QDv[۴A[R=Иd4ӌd =B*De`1Qc5}o7dp(ƌ2LngusCL^H"`Ζ=Z\=CḦ́bQy fȜKU$̄)+ OfjSy:Pz!7>ј)5GCȩgxgI@ GDF2~*=hߏ!db&) LhJb aEQT![)`|a397F'Hf]gI45&/'Da֗Tg5 _39mu.@IJ^S}FͯWcۋIry^IlgjicU9Uq?@H2c1F3k73%fMh.+=I:_cONf*G Eu4ʺvx&XU]_!;?bhB, RrW^ߨn :_b&"s /s|p4R4BW{#zREƟ[(~ =so*] &ssx- qYN,tLcqº6mbll+}L%'!,|R>䬣sԢ%<%H>z^TsUo9~km2Mޢ+x*J+^쮣p7\~y$hnX2%RC:69dܷnb#HIr^f {Blrߪ8ƪ.CoڃA=6}4C .[+ݡ[t4=A0V2M&-՟0Й9i*8{+XHI=FY]LbnnQVP=q|A{tzb>jnc)cAw':?}Hl?u@:DWE3E-BxLP:@d(N= i" =w{{8r Y{y^Jw8#ёfQx7\d,-}_i2՚5M9+so 8O` ISc}!&!k3$9GzzMfK%lQQ^=7}|,"c_= j <.p{RdSOG4vS89˸ŗY9k q"gp1SiOrU{}D[݀*(3xøAR`{p^AbPޱ=?e~9O9t/?eXtd6FX şsy=^fIK̮9{/ ՛1g<\1>mfX1F]&?=/V˭ł DO$trs鯫<̌~2Dku1 q5?'0l'*ɭy磩9Bs1s,ڿfFc48^T,@ FwAv>E\w[vVP^ఎb^3gb\a ~-TYs[إD=Wh4u'Ƽ@ 6l)M=vz.hO^1-W/GU'}Q51mDg/nHnSpQESeJKPp!_Z[g@ dLA=P$nY,¼#paH/6}2E^5ҟ#@ (z!0AƃrO.':/sns21bm33͝7X79u'Ƽ@ ID{ާ~? nxߕ,n5! *"ɭ 9@  |Х@ @ @ G?!]3:X꿦b9joh@ owxAMCX-Nkꎑ{ry7 VD,v@v?:vd~w'DZTGDI@ @,ĤB4YrAbF=W$֣S[83#ӴJjk^So=|gO[S3K0cV#6g Q 꽲7W|kF3ȭh7Md? c^ @ J#5GTQa2~BPx0Or *fvs]cgq+<K@O#jcT8;l?=M",v + ֝v ^ 6@3w/ @ @ ] 1i\o<% ps/a$$C .YIM-&$p)2,8h wdʁ|>~*$Ȕi`k+e7E\/q}>˺|pt3\m+b9&CHrF5&'p4Vcf3Cw;ڄ"ٲR=9G)bOt薾ۃ@l &L<oX~6]aP%י|x%0@Oj&>Cq}F˸m&Ill;#+t4꾆ddi&?COȑrOCJMxeDAMT W ͠nA웶 h+7!4)LWR_CǪem4Ӊ~Rs dJ0ՙcK9L fdP}E4(K [jin 9}H"e׳-{)fEQgV=4l[ji2t>O?\9NU K6ořblb5d|ÊNX`&Y(X.ݡ tKzE =Rt +'erRgEEb"@ @ 1׭mV2'7|oj5%qJe%U$IJ7Zv,G>iKSe,URχ9_ ij KVks'$A %hKi 9Y@d.M4˩Q1 IDATTLrI<4ފbBKgiK*[N퓘behe HBfs'& g-p"L.5WzHI,rܠ~);\@k؊ `8<̵a.#ÙGD}5ϊ]k2_Y$ӱ4&(泬 b̺%}((kb R.Tsl]h?Z=ur)vdC 䵾M'$/I3J00K'PnnV7P؀zy*[tA, (9jKjWˈI&fdsi TY_^D@ @ ;٪ə2tpkDjMXٰI o@ %TNc; ,}-"eJ > -Lp_QDPJcjXjR;Si_1bLX+arWq@e#m]O)qIW>'eK%]/d;Tv4B,d>$LnFK؍cx Pޚl,\cþvd%teo^Q}| 筣o~MFmOo2[ųӾ38f81@reESm!*]E+DFh&`@np%\reR6c)HJ]Fg+ b)3U3Fv D}NA㡷sq 9R^cdPc @ѩK"],(@ @nC#ԕ0<)WR(uGV:r$DH/ᓹiF/2gٻO'I G1N7 <#\f=XiFlwlE;AeiЙebOȌI$phi^IW|7sG"Eq#D$ nI#lX 1?4Z\wdcbTu'@ dca &9o +:ܷ̆r{}O,i`g87EN8OcyqVYVIKiЛ%"Sx3)VƘ-M67Jhyp$Bm!L1=Ik%Hk6T=8#ۺhk6GygQY$|sgkx-_AJ]{rr| f!|^#f.;1¡7\Q @ my?VYܝ:'4c.Z~Wtje,˸"$5[o`09TUv/Nuy|(f7|L{Nk[m)j .ץ* K/'v" -$sg/' '`(+phrhĴ_g,=t6uҙJPm;R$ KLRԤpu`͉'ir>ZV.@Ph\N(W5K;uf}<{i| r>R~xɝ Ǒ{AC^gSGaI,ݞ"̎f]\&R}%VG]KJ pSie1{96>&A'hkTF68d0~x֟>s~<Əp4N!reգDb=%bIRy:~B普ƬLg+iʽ*,{`x|CP`c xbIgJMq`ョOYsk%̓ϴ?T'c4#Fe/6ܤ@ @ I򓜥8z[܍wѹA}"5`^q =NR]\5%B ž4y֗|ok8{ itt[ @ @ }_N*p]`9A uwd]k 3Z:>MwQ4}(޻(Qs& ^f@ @ ܈1yN0x׿=ZBN v_|?ptuvz\ņ@ @ 1@ @ @ C-V;o8މK4Fm&egp=8Mddww\cx57)[.S/w ڧ oY_0A͑/9^cuIx^jl7mܗ5MMh#Gnύ1CE[;Eg28@.pھȑ/i 0'N :ꎬi <1{n mEc|ő#N7#E"rC@ 1^FL؏ȗYߺFǹL(?gĉU#sA %&:n%~h)( ZFcW}VBBIo#zFuo#˒ =7dg:/YSKYNM |;r'\wA 9fozQwߴ'3o׷kX/2=NS/>/9HfCb]G2S2zKk736KF~5U#3- #bhWͿä"AO/lܠ3sxVɮg?uʁHUL_¾FXl?34*q4 w33֟ d1X|ɑzc#e6|^o~wue.oo{ ;aύ7"EA[' L}oDhz-/Q~%5Z%N:@a@ٝshЭBPKu;& oTI.R~oE,@܉pY=2\{MKEr QBUe|r7TaĖ=o7ǿ& YY' -}nűɨk |IJisf ]>TA,7 t:k yRrC蛓ѾE}^w5oh]us A2Ջ|Kyʻj}_cY[]:E-'pYu[t\íVv./d-ǎchkҩ\c)Y֣XxlV+QM'8:g0߼FGη=/T2r͋Ow{c&ԡ86Y $LSlutÈI>i;,Wߗ D?m8o!/a3u@ 7f~X] }$HC{] R{s|Nu{'pֲu0MmOx+Fͦe,N/Ii˨Z:K@"N~GUQ0jO4Z.vdm{yt@a8m [O/,=^:3Rhxeѕ3=QJ.vN' <Z>6n) 8(8(q+$?pp;yd.]OxRS"~Npl5o3;EiP ԡ8u MSDMa} 1N;Qbl{\b5$k);cf\ _ F1>;Kwg rG0@?8eMQ$@K_XNtq hn 8jJ +O(fZe@KoX" mb) UvPG~^L/<}I ΞXs|^A~KcNx50㔗@B+6o[rGbŘBj+~]# T,tݤu4N"ok5k 8~ JL+oG3ZlU%@ u͇u A 5r.sw5E k`1&s iqfh܉u>>+#y$h" g8:ǓDoU㵹 !2FM'w ahuTIyɨs@c8qJVK(B{6A~  ڻ~/m~'N$SX\v Yj,"|Kfdg8@.*P:3ggP ҧA2_Ôd/ 1_4Q$J+f'5S]%:oXH RXke() @?dx )m $4]=yt \a?b¡8 ]lpN ۇ8ۓόH|}-%Xܗq*Ef2 8& 7Ry'‡*H_Kh89.[{~^PX rߜd.SD Ksɱ ?]Q\ݧ)m)l_(@Eh ]٬L<3t_i.~A1&plz, rB2O\y+귫s[k ڽܚB^sӂ1OԞ yr<1r >\Rqkq%@8@h@9E&c 7P( џ8q]>!clg( 2O1RBr:O?7eN:S&s/$摫t;cʺml.t"UV;MĜ1N\ ?یP~( >Faf)O>hS2=]43Nz`5sՀ؃ #c|-}A_4c9J p!-Ѐ ]$ QWK3`-&`q]@y+wUF{lgט:F}Br:Xֽ7&`JPl" r+ir92v=g˿37aqNr^n,tu:/h[Wm~?Bψo5߉ߗ 06͎`l:orHJ/_po:Fޛ=c_äP5~'#P?3hck3C#F|nDGx2~7<]~ p8uoPVbr߉Zn#|+8t3RlzgmGI"#3}) =l~&*= ''W+ uW\F,hs!LfJY8V_K &ybv_|e3S˧ԭ-`̈́0EW1t(g"Z1 ["[~,I^[V'nm.B#d:Nݒv+szgshDpˑ/9vjTs3I}PxS+C!{ƴ Ge,J&$|x)۲0ks#c k2NH2F7'(|[4q~% ;Ky+(yzI[A^Gǎt+QL*MTѡ4Z-$t~w18+3b7 $ߛ' !['dj -;}|}\9Y 1"!=4@-4f~:_p{6Ѡ3! {1 XVR1m8рZNB/P)-[g~4 ai:zLrZP#s aVwQd.2*ɳ 4T-(yQ eg;يXQn>Lagkm Aαޚ@ 2VIRQX>VP{6FF3x'J^^:YBUi]{VCb G3(U&8!;ռ4s4Zhuejr .[z<], XHHj4˿+2FqƣFǨi2aTTFnlˈ ͠\PgACORBヒgt6[fv~MP .[ Caj!<! \@k覜"<^8i:7C_o NH%HJoIJV#iH2)mS :SyjzGDJnO$2tUGeQ ^^/} K;$^[:*E+3}.#Ù$m*F_ IT 8 IDATvn&-eؚc8O~@kU1ԸOKuŠ)܂ r'{2`"d-,Ԕ&s~4j)҃47%{((F 9K{&ʸ8x&㊮o, $c *ZVG+]t/g{26_.Pn;c[6iSo>Υ{*#mxvDFGwc`ciql;h2/(Z ZA3/Iƥ8) :Uds@d.M4˩QTL-A'-SIj_`ULB F&b۳)lX%c;K}\5dP#BcЀ@ XSh5ku}rΞ~>uV$fv vRMfKD UXZ׾u}@x"]z΍ C.djOeNggp̛Sަ2Ж`ЕayG;Q/A}zz@x$#CsƷg涿l~S샊ތީo/ed6W7$☚bq&q[J2.s|MKǻ5`A5^$|'o5'E@,'e8D6?ii",'LLǙ-GOnpn2ؼ=jhϣ?>#Og/tMJH䖁N_F<*pڊ u/q>\8grcA&waD; a n28ˑM TF#$j,rS&(}YT > 3AJ%1fM 0RV7["j}d޾(͇7I/po ;blt3"ʯH9y O^L>'ztN3F[*QaYNFfU HSjgu Z bٷw+Ab@2*TK e /8Orn^oRLq1GPTJ;,4^SCrv`r2ؿVrZd9,dҠ?wɞ*dʨ{gan:CLH()F$Y$7S K2Laɯ ~c|ZEUU#q4(=Hy|P8Iet L 2@rj*Yt Dzw,eІctzX$B2L"+̬Hri&iddHIJ^:t:wK+=9;I6y^IW|jɹY#UUqOP6aKdJ0pHr)Xt厩ۓ;7ћ) .N) ;I6IƤ[`:'GsA\1貔3|; wyF Ë^O9pݲ1҄_͘u81O cIr^&/s!^~ab%#*A/< ˲M>,9+[I-HA[Ry=q9ia\߱swIA0ͮ;l {#T5۷X^&-Ey1|L{Nk[m)j .K&#VsvӻO$Q,O^Q*訠R'GP}OM,V/I''Nݺ&VH'3%zǹj37)7VPzW^ZF.nW9HJMq}ldJD%[|nmmt5 =Jz{I,\z+_4ͣ^jX)D.-Uc +pis>LE8T?L,J _mYg~¯I$-੗hRt9֖FGC,P}zN.㐹 mx]| CۏRS}cMo6ܷ"CDHHf8cyVГ=!]oW-ikwcՖ3A]X =[w!?[#W?RRѸސDD(ggY@3*xXKedCF s6{^Z:*pƕr/ 4W-A\xe}ϹO φ2pXgDt 6A~΍sh]XK_o[Z >?{JpfANk>9S'.}2lεIvrn|"K]ٹF[;NJN8ױl>[A1~EtNsR=9b11D'D)+ ׺H;1x`2ȹ٥O.ݓ-gq\UҠg-eHOSsmPm U`|2xSmi\ξgA}:/f&P |+YlT|Mnm7x=7h 5~ytOؼ@ lgRcmH@{rN8OTiO17%lXwr0Xd`lK뢂t73 \LzǟQsCsga00"@ }'b޹sW; 5, 9E*X*I,V:#%oxcmwN `1ݞ?Q?@]\g?l|'4<@gވ)9N al}8x\(jIMs8:;=fs'l^ $=~Ñ;3f2%֣tß@sx =ӚFy\Q市RB@ K @ @ @ ߟ4/p6iZ-n#o'qR%GW?D]$pb{OIe:)d`Չ/3'Ɠ5Ol}csGH"@ @  OI렌ƾV] Z Ì Y@ @ B@ ()w4.d!@ @ ]i:ߥѩQTQ,)C2K8E0 JP^ g~f<@"h)⾌S)x2%Wp]>2r'`"F?4VGD3{.n#F~ԝ\4Mq5U%8^LDrw}XTP{SO -;qn%{{.h&1fH,u[ݦ1^ oײ_~u&'fuk C|vc_A_3oXc\rktRY24ն};ڄ!9eJ#&39rI nrOl {=XAew&LN/ƽȑi$@ @ @wj#Fk؃^.cNleFUUQ~O4)mm ?{gֵ5{ul9 $Hfp @t!)Xjrid`CoBeh%2 ҂=h\5p@UHA40 $}dCؒi~Dkk@ @ uhހ6Kw7 B6 7յ *Uu^Feٱ*I9;yV$T TbqiueĜ/-nܙ@ @ @ ؜WD { 3‰ 9*0W,p;=ǿG'|FRn;^.4dAo}XZGUc_sDǣZFyoutĤe/z%6B㬷s=4PA*=wA-B2wv7Ah+ri>IxrڕRH@vb yO=Y@F)jϷ1t%9Fog.1lك^یl Uu8NF3Ȟwq JAF;Å4zS^A3Hω LGY[b,+>Vd|4 q˲{4 y"@ @ J?ᶐ>mlbK|upsz%N(֡鴬H^s7iy36cþ:d1@ @ {5JhG$,8$=^J%\emø,|I!Zwҫ" #@ @ 8#xyОG|btG 3Nt+L@ @ 69&1@ @ @ _Ul6J[D/hh# IDAT_--I+|S @f|]Z"ƄMJմŶm/oY^ ы'>9'g M;۶NK3Fô9Kچllv8h(.05/)2x~jS#8o^I?KeVx-0 ?f͎}Okk1w8_{3?虐@ d1Ά/hXo'UWspow/'QpI/WgeA }IMH^_m4 h ƭ h!gɒ]*/D6k.<󯑻l9: riMmeGy/pW {sL _Y햎ؑh=(?=!|B} JKAMq6|3dbp9krzf $($s.*cg Lqt@l1d?z晉-"7L )a2h|_B" kb{|qvԊwz.hش}G'¯HQ?;*19lUIvzju}r'eFKo$gّ*y?%9ǦwdE#t:Wc Ý~Z >RS ck +k@ vYo~Bq!V8dP 1?&jYb6116 Ex᧴^ǫ~m:m̿\2*0m؎jl- n>lù\~侑'ڒmT~sJH#V`TmaߢJՋx,*%8vfE)i7MAP-C.C\ 7H_ ?Bϑx̖\j$Ьn'85xz?^M6tEe% tOr~ UyWr?q,Y; 9%(N7cjݝA.Wv+y9D_vw`IGj ң\ ^֣tbݰFm^Voe"{x{_pSS8uP$l<ͻVЏ~Nw\ͷ8ӛᓼ)>FQhyU]u s5Տ΋܏&Fo;7 |JXXR5v)0ʐgEi@ `1ӭK6wGdsC} B,9rѹwKa(QKsmNtv.{g3 \5CNg}:d-IDu<~\QxwT3F#tn3ZDgS٪׶Ga(8?5j ?>k,G~CwTOI*xh+ls{{%?os܋ڠ&:twn{"HT'kjfَܟ[_6OW:y|k%Te:3d5]v!#}OTGuTtz  WNcJ=@\ .= 5z@bixH)WTrNv6-S|W~S&ie\epFø8n 3Cq̝^ht<n3|J%Y{7%C{eޟ49`@ v|G3%x CA"\ w`ڊՀ#Z :%|{\ɇ&|7H 2?ƕa t D!S_'j %|PsܽObj m"#xM\K#2pXghhcRrFFUp?'8ӣ[]O:btr}ѕp_G %TI+px Ϡ< s:*c݃k0$߿_. @8fhJXVS{W.~f+&2WH1.J{m\LD2hwf'#a=Qsq+. 9k]yYd|}7#>"|2cxHDcK˹^#nLݢ{0{0 /[%K%tr0Yr{96hX>H+~z$@ߠs};zP,3.Zgܽ9^4&4P!hPKd3i@$e> $G雬72̒+:Q3$rlZ}=.">l5NL z]|Pi`x0_Z]~mtt^Oά$M6WMOkortsTr=T (=Ï%~g4%PAF#"⭳򶭐Y3|uFHD \|/Y9$G4.~Ipˡzt X%p"B_^#X M7F$@~Ä\.v/~y+ٵ? x.rZ8a KA$83rpm,:T=,{ř#=H fSƺ~З\?_\t]HȜyղa#|^QyG7GHLfM3xD \z=#飩 Kye0O,6ß3vU!.O.$tӜ'NJvX HĤ9^HG(UYH6*jʶ8lH5u86xkiw'd>ԮƏ,Ti@{Jxr=dyvDGS~d*zf&ـY?O: ;LuEA#q7־:Rf%VΒgh6WlE|AgEK1uОjwh37A3nxUWT:ho5#K`s:x(@Ƴ^eE7A8ߴRFMM͡3W:+ڞq;5`׌ *=A77`LwH[D+8+ sڗu";B0_?@& C} ɥݸ!'(=klnI.xԢV|UcH hdYPEbkTK [#?w V`AMΕy ߺ1*W)ݍ{V^].jqpmH`4鷸~ckj1=(scl|ͳkcP⇸n}?t&HVUfZuNP[ìbЛ+V ΣrK&yh')pe$d=uHhz\5L$\㧇ݵ{ {UkV5=Ҫ^F.w @ 4Fy\hzFn1X;OFiGPs茕%D蕓\"Uw﫴W]i fuo۲~,j͐Tلni}38 ث5y[0 [4;I+PAx-ʍ0#qt}~[b;JqvdJeQ7tQ ۑZeަ2Uc6_mk*ћQkKh5Iw3Ͽ6r/U˽]=KDQd$-?7 QgZP(@oVy:?^šyb!Y >ĸ Ғ v"1^$t|Mo 5!Gy:!g޿D.^?dWl!,k&c&/|MÅo%>~kQl5>)>8|w?~K2zc&eh2*6wZz#л]x|^'lrSv$9C 6-A%䝢7k>Ygt82įLJj]8)])X7$P̵hr SjKUis!ЩI޴ߦOݫ\Sӕ;!VZI\ۃ'{j[hђkN`p517`Z F@i5 ~a"ɿ!:(_n epʼ'XЍo\ulZ{9Yxr!DAnR#)SR6W+W4{XTy(Nki"Sj~]Do5僕ZԺDI҅w{=w=_jw@(4J3 ɓusAe<]W(-X؝&ңsR #6dVc6/pLQH:WZ(|@pjqXZbl`'5uBC%&i_ x#"1T"a:.9R2(ק˿yEg!ci >t)&8>ٜ/5YD  $}4 m$g('M1%L#ty% '~c"*[(=H%lPIy&C=P`t"l5|V `l=m}ŀ.9`D X ]%<;ImX6\9h hHVR// c{rl>w Ni3Dƒkp)ي8O)_h6rѴ#ztSexsr0_Vp@b(mgcE䟡V%]ey_);KaxB-CdR(Utm46 .隝z_o,Ѓm|9bGfJtH*$])+\qз6V%ot\{blE IDATy]. . &s!1[0V";y~_rW±&g?I*'0zD,9f Ҽ27%uh^>C'XĦ2S2a@ ]E>L{$sdsC/}-<xt }\5{ 3‰ 9*06[$voWx?;ãtzG6UV~y6:u]M6D!#h;#Rgt롁 /_$T8E3Hω LG߹΄!EX=#w^KUXj=O+i/c75 Y[1ȗ:j6a7Z)锊Z^@gIǃ|`0׮f^& :"x״m&Jv}#k% $..B8tJE@JEt}^@`A?Ġ eOym=+p`ڠbqo/'3xh+Kz[(<`(n[dw;÷9򶴷ȴ|&5‰A0Y9ߺ<^Z,̉/v2`Cj7,lҲ}dY8@=zqit?w7;K{G>[AY#JeU(8%Ӹ-e A.&vziV??Mgg hlڋ8 u^%>U޾@ ?ym'yjK=#Q,Cӹi`\%vsoZ[OйIDx+JNĜ}Y#8=3&%hOEc߉k.NՖ6&x\fǰ%,^s7i~f<|)U?<7@ F{'b^:U<E6ZCZ6KM͆WHb Ρ\\[=1@EnkgmC{" åY I!>ɼ7f{DrKF 5N4ej ˉί97YE7s3"qz^9f76G/9/}ўשX;# YڝfބBåțo6Ɔ@ ^d@ @ @ 1DaW8afs}IC4y:$rȄ Td<c b wDJKk{8^qJs˰Pnω;8\;wPℓ@ @ @ xx1sL`_BjgO~u݋RW妏,~MB;"HW!Jr CM^$5CUA4ć\H$Ȃ ֛8鋤2[%Z j6cg9TG"9($b`R$?=RrOD{$cDR2=& C2JWqi =[oľܗ,E_ &˧4I`1nݍFtiHm x}=CFavIp$$|ZRb=(& 3|iD)U:K?>pehoV4X.ŔMCu<]*sDzF7݃&kS~6Io$G8j^\ɀb'@ @ /Wi2$ C>QG(UYH6*_N8Pjg4k7)t.b XDMx.bdHzk1Z3>\{(yoɓ=;k+b^ jVruI2hQVu2ab[H;LuEA#q Z]XJ%5 *UoXxFsDqdg+.[5 91%^(I&TAE:#U ep2UwH6ЃrS*X5:{͖Jz aD9et&BUc6_x-^gJ!h>GB:ـNM N MըU68%mݷhvb*-2z$OelᑌuriPȽ sMֶOW*Yw2]}yH'7ybAT͕7K黴Z~\ 8 erJ45ӯJ!^nZE_B/uuՅ|򧩖FqՒ< *="a;?EMe*U͢Y @ @ vDEtuH*$HC4-|71? Xt"S[/o4#2@ڇ+w 1rUf+S~Af#MhJGh~ǿ$? cIJU*%wQ ["SeP Tbqiue 07Ewc+PSӒ!/wQu$&(!2,!T I:`:uᄁfeuBGe"0oLrzDb.,)/5&T2D(%r)3,MTKKɄ>Gd4ׁ'HV#bw[-&l'b@ @ Ku"&=_sq>*KgN4dQ WX㬷s=4PA R|m7¹Q9Лpzkiٖh%2mYMJxNxCtzG(-ST^]hx3ߋmA趿C85:=Fr;d`(ά Gvb yO=Y@Fש 9/H 9@oU8߻崎TbQR8=۲爎G-{.'p/H<:SVt]go+΄!EXإRzY㢛hW =kJhkH-^٤bb "}|| =j;epoCq a&ub|$dm{K\dnmKf߬Dw2ܥ}I, ~'?NoU`cTm_`ȱQx_#wr2F^?qu]y;{mF*q˞sL }wR90*G;PqX(HwR*'YO{Bo+S\B)Dp%z\2OTO߅?#ܹwY~qto)*< fȡvm{wIp~87CmF\`*p4,bmwKW\'XDˇ-ߏsۧ~l_-5 $#tK< e+GF>GgS(=%Ka0yVOH'|ݹk鮯] <^OɿېwI;{e틶?ϭ;)5N\\M{xWE%6DO8I_Gu!ð>^Qil;3/L@ ص;bL XZ-J{iq}Dct@%FokPԍ7~J[uȯxOnSdTنivTckQ^lv/XfM'?іl:M#{V j ]W2^cQ w@wO-mʹ-OiHitjrJѾA~cP#afet;ОǑy}62BN[hE{ӥ^ö ;Eش=9gځz ӃV`.zs!ZI"o5nn{^P̿~~:2zn܌;wh RĞz:%Lru?A}#f7\n:܈"uG}}I,1^BQo4?*?~쯯j_q2jz?&;mߡ @ b@ٲ[Nl9%Sgx#NïoX1JZ훓tjhWfe(WVhFG*tK5%<^YHgTAP!:<1jo#M- Ugqcܹݱj^`*0ϑȔploO3u}IXv.1 D;Ghhoæ#{,3:8c:E6IE_{=6z'$AkVptx ^݅Ds|ʇ\' KL?E!J:0=US Dpܹ۴`o'$2ti?sv7߅0 qg+(M˨~ގ;>%´\2H.؂hd3޻M(De N" g:_Ip.Lp|+k!#c3I2q9<3Xe~EKDr>æ-AN_Zu+oKxUr냖h9j36nBa?a5Ep̆ ^ixD8Q8l:q ̥2ݷh[$\iJ0573tҋ{g%3/'ss8,@9u'} qj|>&q)x*wbU<%a㲖#qSǥ,1Rvl&IN?)mhķ:ϗLdžydP 0f>?@cWmV&U/}d zەɎ2₅YC8wvO퟉su2i~tȚ gy|y|"]t<+;1LUrԉ1Lyt @ "?֓h/^5S$Us ?N" d1R)QsO Wk)O0zvS #3I2ɭJ$l<=x ao c)#Y$=aAƞ@g[ۓwL$R$3p>rlޏ(N2 E 'Ljn 6RsՕd/I$Kn~nGKL^Nn؍ǿrߙ=ZOv7ɉ^=FfRhO{ӇKȞӚ{75~x>36q/ؘ6G>}JWbIRy|޷ :Fi Lj&dfJyz Lvѕ1cZxI,\cϝEG>v'%w& r81]|e}g%ٱpW|\9OH~< IDATL'R4rw7 _k2ѿiOtt=T'^eqT6 FkIͶeV)Ӈp9w+yLXq2WpK$ޞ'Hi x= ?512~z"Ut߿+(!ڂjϙ }F?;pC(\g4t ۬FCf|&ZZY[5;gx&Sџ=S }BBF7Lwd1LFݴ |~V ;8*輻`oի =}?z/3ctnjO}[?G$q*/&4!4]'<Az&} $HT-'H ٬Cyf/٤v>sD`?'?͋; d{X'./&-}kl3XFsJ/2/{C)4eFb2Bmٻj0.vqhgq"3ԭ|mw:FHfWN~ Cd\K ;~D*?3)$\>Ζq구oZ.=;ΈZ3WN2@~!8H%fFH[O`<1T/ x$SD"x[Qel5&)-$Ni/nϳ\$mM7]@}@/olN~?푪zSNsgC(Z"P{d}LjĨ:gsN0z4 ? h\{r xSh+,푬 נvA.f7SЮJGϳ-wlǂ@gY͹$>r/Eğ(ͯvlk KLc:מѿ/нBPG?M>N.T~.[/2Vxۅ^8.De _n]AxX:z˕4]l>|73Z{˿ R&ߢwF(X)9ll<1 MIB#+o䮻9~/7M.r:SNUT`\7oh{Z:Wa3@gXuSNijO+>QɢU%8Qe86K9REmv--LQʨ@$EF%/ NP;%[4 <>Fj,C_Ek4`u)IM>fp0m6j0)^dy,YQdBa$qe"y$9ꌥ q*Q'i Ȕb;S6.0cyZ`SI"@ah,):볧սSqa:LDM>g1şڂ:fZ i3=)BZWw:L2 q4RQm-9,OW{B(M\O^brlᶝAc+% JCDs'2=b6-[\2Z_fiκbB18OS[-mek#R ^cOIB"CATQ{-$ᰬ0;VHA)! 326O24VSds$Oqg|O캏l،Pa唉IJr2vޒ/B>j}& q?n49DbA$20{wrDZD"2(9_/bu$6Onb75 W{c#j/?=;rF٭̷H+wio?BsN%VLf H8c0c4H+cp7We-VoDf?1I>DA7NԺN9#;VªF!\T"s4Y*=P7FØ2DNRmQ38>ad&E (AkZZ}***'f%w!|IO;{NJ).ʸ<CuT啁F.qXg0g" HQ5cIW~˴ˠ$h]/ƗtwmO+r*q}6" tOepqY-jrq86868 rٳN'.+0mDєa } a\F]$#G/RVOsad{U*IgcՔIˤ5/I!& "۳)d4%HXCvVq=,A/[*M6z)};m+ӛ}=xb/I>&A08n;67hw9狇W~{|zPRDWBP==unK$* mSuPaVز7ƒ B )Ҁ\D<|A>c>\r,>L",cAloH/GưjqT_9yQ^BŽ=^Z\đ)˖6pI)fq4#ደmK`(0i MolD =`,M#לX?e3Ge²:S)dT2S4c&n)PYzBy@Ux@HjV7W"j{ֲdyj~%U<^]w&JY rϽFI| A^Ig ~lgf-SGt:#4+fC6Y162y,AC"[M^&}޲ToF$B4O wWHOltdDU`1cH<ڤ G Ybv:ƂB)^OMlə>hD9W&ʚ15@?% 1;(yRQüH` dO25 D(D( U ȹNTq' Os}T'U`c,訵 ݓt)&?Pad"l|Vϣo8iޤC%`h Qt WU%32LJqlLإI#)YOKg~ƻ/ޓdgWU$;^'4ېwCLflhƦْF $ 4TI֢I*=ٲc:]a?0hE74䢤)-n+$2 N^$4+wlQ4M0,;s<9X.yY}K̆1 @Ҕ!i ^*5Kx߉?lMb?R9KIrø00:. y;3W,373'ؙ<~oyx+(A6ݐvm"`ϔ?-OeR$3VA+/,6ܶ~:u:0F]C\8H 'h>`?ٹۥ(is>ݵ.P'6]_r--ݶl6;0NqǻO$)lR5(Uq]I˵.NRBͭN|ȶFz~..J0=OoJq*ϖ1]>ǧ/Te/81[wtwB3N&Ak~v }-n/kQSv/IUҫ %:.-OeL$FZ'8 [hkVF66fꧭo3e-lt=Ο48~fJ9mhg)crG{{ZyDs6 @>E[]+h f}廳 =8a}[stk0-R@'t'{g?B{Aa{Jk(#qlz[)=(=7KHL>gGxfJN>ȵ[~pG%䋇5r?ǿ<qyv}[b%caFLon*jN`?Ü|'︐o<~{4\cכʅ@ Dž_ɳ>m4 ;&UMt~€]Jo/t`}+@з^鞰y@ :&ˠ}}jО< ߡ ߷$cct+|~S%x,3څWqpvoM>L}|{{?y- @ 7Ź_D́[C|~L:4xg3j͟pzVfN;RYFP:7!8p'l^ I=gl\Bf; L4GvyQQ9:"@ O}"6hkeԴ78%l_quNOy@ `z"8#āCw1+4ۍX>9橳#ѕm^+Tga=@ Eg 沥z*MpN,@ @ @ Yf/iѳۢNSxL O8npԺ,̬Kl<#4~EH}5xpe }ľG%t0_!|@ @ @p-Ĕbu?mɮ ;deeYڛƪ1_b;)'O-Q}Bx8u J?"<"G݆Yz>:ˮGZ19aalAB @ @ 8BQ}3F QV%5^!"/黗ID[uU:LkHxu9V𺑫p=I, @ @ w$?PPa)P3"? 0@0] cS?}6X&N$明.r>oVw\CM;M&K96R/ w -bՅh+UF?G\h:G}!%n1$X"?Agڊ"/XחtI꫸Ś;IVgۆL }BA :r a\c}- .3=s<glV%#7܄"-1Gct[aPG?9|`4 `Oq\kՍB"*{}MtWBGڱlld;A޻Qb? Sul%hme2)folD3#.0h&Cl?IZ"~pq}ʶ5^zY[3CÏy{ENwtg\VJkg4]~HU0M~zX{Zetb703ҋceM;Zr=OKDcYӃBr$^b״BۋLk)|B(g!)@?Mm#(ÍX|}Dk1EOom+rrq~#0QξY+~1!K) T -}4YV|SI{9=g<ɤInt(8NE'\M~G9;Mc @ @ #knR*L:$ʱ:ަ֔ݝo9"/2X.+3* TJTaY݉>i9Hb٨!X?}.{C:D$0 {|_j}խr*c?=E2@%, XUkџ=Y$#6<[~a:LDMQ>8)ple(*d@2Y0kT+D٨JrLJeQ;d 4:Ʊ!\$s4YZ`@ah,qIY!)C@ @ &%U-{g{>k9,ʱ8.<-t&r}w)Ǩa noY"REۧtWfL)"eؒ3܄JV5OD /@_V/U% D~z2">,}z7gZ=nxy?&[0$~&˖%|-$C]KUWHRv!O{ ]ҡS<8̫ПiuUݴ#u]%j9&m>є׬PWH7۟${JXzFr&w/bL0Vanh^{ 25m=n wJ:EB]} qdc)Q74ݎQ?~lX ը`+1@ @ ~̨d42\ɞL$,Sl[dƚSf -f"g8IGp]&x1*2 ]-/v齈1Ε֒p, *g YNQckщL [! -,#*K%Y&'EVs1!X"#(EZ~Dt)+;(ݒ_&R g4+ ^X&.MF36K͖\[=`$a5(ٸBd.ct8 Ds%mBrעDNJBzPP L^$4Eɖ+hgq,{@X6gKɀݺDh$NHLf+Q!z|/:_-KDGB Y8H0ǧ/TeR!OxL2E ׾*"c~?Oe={՝Ʒm)h1jP)Vw3!ՏȀP]I=#RwJ9p\긇nr96Oli5E2`>As+?(hC#>4m~ 426t+\HgRd2s\=@Wc1Mw/y>dhTLĞrO$5FH'^[ʐ6}~Viz2h5w;nwrv߂X&E2m!N +CtAתzXѷ)$炽F}J*j%T[yuA-/YDvkgH_].5Fz t\_$h S2SwF2NLtړCcpFG@ @ ?x $qYf & qXb|H]~ 'qE\f9 <۞H[a)VD{b~Z1:P-@ @ V9;9#)̟YFP:[,HCUENd#@ @ `B@k ﹴk!6+qNW\}4xC^ժBFb@ @ 44@ @ @ g~txCO{c} |v-31'v2#* ϩ=)'m1ZV)'~"+)YZj L15cnL/Pn]ۣ4>4muZcbzfɓ:~#_bs~`@ @ Gޅ) h4 ]N]2uW8 P -.d iO9iϨzt(LާV*NB'K1:.D^Yd)'k!l?G"`__-2 q_gj_ o,US(PDEн)&@ @ ؈K"dp@d3J5d+3ETقanT( k9'8mۻY~mt:_{KDL_7Z$D@ @ BLz60d Po+ @!nB2m8ݗ kJxNϨJ#>c4)hd$FN\\xdsG0r{j+st\Dd.ׇP1e_2'}x|_r-O 'hC#>OqLWq&K9,<軯ѡd} f{a"'+hGpt^][s /p[G%I :{V@t+Z$+6m -`@4E2a7M"l!!~yfF{hGK pViǥ6{ɿ@& c*A=PC#ڞ0(C-WV&q)x 6ٟhLHfV_Я;Ƃ3ۆL }BWy|P>ۻZfc$+!M fCz?a5Ep̦ȫ{y/|Gf:T^,Gfd(sxKqg=ҋ{g%3/mg/sNd |?g+vef}h9`Yf{>c>i\(h<~nO.r-&Iy@ @ ۖ&,vC?С={ѐjqio?=*dm\F.&GGCDx!噗$mM7]@\i .rw&}HxXbӵWx8uѿ'*F }EXJfDxwt2u=Ct3&1kOWkN'tgH1{s0DCf|&ZZ)'@&nS:zfQS }BBF7$w2︝8͝wx&}x Sk3Q7gxIynL ;1.zI29c3|arͬS^d}2?w=BXoK_dx&?\;VJv ~Ȼ- } nd.wCwt/K &:뮤zJ f da3}:wqwTr4&&>-NF陬M)$&S)OXh1G|}]=\}:iYnQ2_ _lu|Uh"@ @ _`sCJCDs }рAaԦH$s][DQr a,>!8F $VD.5# 8^?z *,2M>fp0m6j0&ڋt+s<7'OgO[p۲.-+r>4GOQg*ʩ[fDSh?d Y"dzfGkIGQȨu5)E,ٻж|߳뀑q_X:v Q vS%`ou¾6g&"Z;,3r^efcMԆ"&(n QR^( HK6}!obI6>`H,KZYJU5vK1w|,KPM, >Fq\ W[5}EnfRw{MvG8aKl-=&cUX&\3UcL=!U(1B=r}݉RRe;@v3Rpؤ1G\4BX_QH[uysi`lY"X؁(*=;T4⹉p+Q:%.t`G[vҪqfSS/_S[*QTMf]{HE(~.Dۙnʱ ֍<`S!@1Aluļ97Cʝ6^u}~ksJ%5tʱثB!B!ψ̓H/sJ򨔾ftuJ)Q 6*E '>Ֆ0[Jc$i0nYٚJ KW@ π"Z\#hSM7r[BqԕS-mUVߛNΣW&|}ZA{') ʤH)̃A5߉RB&@Qq{B9293BkmʨPHhKƿ]b6zM JTm^xv)B2u?1HxpƌZ j59Fp]s_4*Fu5F-*Lr yA'TRkW~F* {1^J]\ @fP)WrV|KS5̤0bpeIeʱKA " lΖ7_a?hxȤԽ3*TL2W7拃R+Xj*gefJJFc: i؋맘kmOmxꋜrU-ǹ|SΧʨ{ʶxB!B!1c6Vb> 79"Ȏe4Z)uj)o๶ɖPUtjdZ ;NӠ@*%ܩS JQ-* O:)UOʾT@{~25f]C3TNmIFeʤ"ZjǠU[x44)4XKDkِ~%TKȬ&j PK/ ==`Xpt&M:#ѶL{j%;I RF^NirR԰!j%%O{h3Jn=PuRmbµyB4c4,sy~~neJ%^E_2 $s"|ݡL\neUR @yog> ?%fM6r YfC<d]]Ԅۖe/M4A~َ\tZ~dZ!B!?Yy&0'XHe- ^MTXd6pCmIGH<"Zq@}Ɗ[T"h!APfLQ+0_x=>BWb3^2R"Fհ6]M4׿%z(bm6aDbX=ʀ߮<ã&8{ V5IfxJ}p 5%FHVqECS}0Q TĀ$w+ _[:>gjԷQ&xg3J:B7L, 0( c*Fd(uoѨ|o9LM}^Dta?cr:}{>ob|fzv'o0>cc4`6t"&7/TRb">$.j60W7&C` 3Z92q۶X")dmhdD >3p ԣ|CV`!7j -DՄY# *VgEg1_csH?A IDAT돧ʊ˒3[RZާ<1G񅋉fo6|6MRt B!B!vV;b90ʱcqpiT*L584U58MK$+;8GLuh 5+Rw<_l9gwC *9kޭJj07:i ~ɱQ,c<' -N/A8 %غ (h/S}Ok';׏;?g@su-ku*܃[p3R`^WP>{O51ja T;Pf{!]P*͌ s4PߥgS*ûm]9k<͘8@o}N\1֊e25piKb(*o9I{ #B!BW_W|[<1ҠH_fs,`b$!hhAop%9xU+낁k's"B!B!:k2! Z;J,FI !^Ih!CXeF!B!bהH!vl{BJ?q!#'\'nɀ׌dM!B!BnM&B!B!B7ܚ)~wxqWlhߒ(CDt *~Hcw؇=CM)o!ߊL7HG6!BګWH[HN_bM]H9LJ#0n`8x?P]j)#GDC_0] -Bw/A>޶1xRN)`$t1GҏB!sG>nc47h ^&; UJ-eJ0q:<-J_UN2vw{TB+&a8\~=gW 2l C 8~<#;}?NXNܹ~zvdp /r6:_SD\1G ֓\X{ +LJW-_NZM?BC$Į̻Ju1Lx GT:~\k!>NÄ_"/ȆIECǪ;gVbPip=sP{6oέcWc+ͮ'( ,,>aȐ]rӌ?egIRq{%x$4g/Q=fgsd(d?빀'&l%6 6@^`P 9gĕǗ};Bls] v.A%U4=4Iso<d(Gx¦f)IpY%<*-{ 2s6Cϱ+ߧN^gRybVki$n:ެ Rqqs3ģ`h|\h3^(o͑1TRrog jLGbZ>ZP 俭 r#L<@/hrRg]jt/_ -:T09CVs`Ώ7&uZ+DWGi0Enp(_ ='d*@:(;^ |#5tԬv׾C4vp sl,V t~Gj9OV~Z*9pvk1(0Q? x7TPzAwu<ifS؏g$mb7?[5147irvLf~A=[By>o+ۇJx-&b'g71qefd΀0mn ~Ӷ` fmSOWCx} -M=r$ -g.r@htw<_ue?{}l@̵[+o|t %Ic%(G4s;x>Vyjw4'Z_rU|5{4 J6ϝ8ZSA%\`܇4}|>+m-\ZnWJm{DT^ǬG 5dlzt}n<@!{FL:X: {<=|4?$%v΍y<4zPGpwܻ׋>Cݕ-j,mfb 8$&r1u;py5Tlh}g$s/MTj՗#k\CHdlu >O!y)L w$x̟%X[ڝOwg:=CUX;s+% OV9n{5jov'J0;n\ o۫@'ʰn[83 1[X7 +؍ b _oxN=;Yt;(O+ksm޴@Cv0s=U OXM7DEi˛4>T/=TQ='Z>ϸl܁ĶW+ONϡ3σt4{2l 0GƲ pKT3X 4t~V*;Bƽ!:vs'ܼt:e`;1o?P}xۃ9≗eK<&Au3?*6;Hz]lM*Xa(BD  i?atr *@mLŌu?U@9v3$Y9RIq r, +aKi½ g.pivҸeۊe?M-*hD51bQ0KRKK YwvRk=FpjyMZR fR92@b*LV^mwYk8@q^8-ثJQmpZBK-aP7iIlhɭp5UcL$.ʮdR,X ?#8N ^\?=7s[9AOh{U#4Vc6Lbj9DC(7hw?Pc{jTʱ6ZQ #C[neinj?/2{2H^CӌB9 |&l84U{bh ٌPih{c8LL/A+w37ۙ[= qfwܹvƳl*ƣ?UC PǤ(Ð/~0πɭr=uS鶗NBuD" "bPիh1mM\nLPn`L{vD5QoZ"X^{2Ff.XWAلI\@* 01[95y0}‰Hnm)UJI~rϠ,?#RURzX&;+J,*$o1܋B)fzbLQA-jw %4ϰ;P7lRiT3[Pfn@r}Kv, h=({p535SىUEyHU_bfm"|fTUy1f4bhRU[Cs+KPUдqKՌQM7J\sG ;L+hyAYyp*ipbV["h.I[3nG6(l?o;1?`s<)_M2_ePI23_` ՘bѺ1"cIKLy@uB)r" j)dI?IY(2X!UҼU=U-ǹ|SΧʨ{xےp=܌iSi}P %(7 9RO|g+ %ł7 fg2C22uU;Zrb*syBY5lM`1f5pCJ(R@J/ø2Sj,'` lsH<8NC%`tw22،+qjQK/I]=S9E?Pg'%|5dkR{eL5>峡yZ=!]%[|O}AƠb؁|x_S!lt`Q AG/qߠIL4[r[䏽llRB&KL o_8(7bG)h@&VJ%* $2_mI~rO=J Ș^@ [-{Z\#hSM7VbDqdSUVZlWp5i߁k6 ؋2~rv}kLF#dTv)j]"Z j ՕEikH(7yC)E4}V9>0`%4 +̋i}3cTIi)PȂ9ڟw߷~M`>(^Fs+4hPj [Ɣta{âʃlLk)^T؏36I-EA[>a!ڷjXo`rŻ{ZKύn/Z)gH(/rPlŎS3ӥ8.)zBH @%c럋Utg y>ح.uAq317`=qԈ$KT\vh;Y4ߟV˶FR+NY _Xk7s}-*7t]%xw30 f2̓N ?8[b9\R19#ivМUBE^=ѣ$p|l}FcxGB2Kd"qTh/XhVZ8X'qԖ6GD+b)=ɖ|->C$Bu\a-1HU&mRPrxDs{5F2uLƃU4y0VcV}/1re00> IDATbD6>yT|>UJY^*怵eqC'M% 㱥m_ M+bfFkMwEj\ mw4}HpXK( m- `lMeDuf(fj#tS1Xr[Nf*2*6O(ࣥi"50?O×L4ו}psx˦A'f <&kco!1'gHy`%T`H%kg~s{552YIlvY 3N.`i4Z~A'\;$CؔR,0Fn0G24:Un;З8_lxKPkhkQ A޺hh!APbfmL4[qCO UN]bo;y~7TXSikrq|)MJ{s&瞭[ pQVb1_֚?SmCvaO?b) `h'-Y鞿`j^"6l};`{x}:h +qڋPB,wU#rw Se+ o\Jxzؽ-Uy>t2~۾+;6Kj^Mug& F3j"FLs/.{q;h^h,VA1od뱷~і۟y"Q9< }H`)<0 %1ڭ'oPw9\QF1RuCf--J}],Kd2ϵ;RNjhc:;lCcc TSL]>DM=XlX*E6>N NԵie3ʩCd [Żv&y!oRXޢ%}'>o,RMmjBkc*{0KVVͮV#_t R?Lo.qx[y"V`yQkA;vZDoy ΁#tu}ơ2`5~PX:N2J_[}ib؛ 俼q7?;~HA?^;I[zI=W8z(Z-j-A<?.LC }֭߬.&y=}7VXt{_/@ጻ} J o>Zȶx۫XCktqۋ#ڂ^b{q6sn!ucFd@JtQ*^{vۦB!X?+ܐ.67ʟ/iV3p'LO[m\ž&O!?-L}m"vN|Hst(*yz.pW :>1 i5o%}F(*3_>C<1ҠHXW"CIxqTŠݧ*Ɓ_jqJN-O!;R3bUi'#dz~2N=CWcV4B#3#Eb$#rK1@DG8^ˤw(6ڕ҉x8xo+)7 11^LYG<8 ~K_ʺD5ùϛ%hA643BT[^Acs:MWG v M0ں6uī]c8/=i+?]t\\_n 3k5;oV|Q\1zWra!&;tpJ0.!FéWxZ7##njߣGU+?`tH H|254; !tz<6<:{wńa]LǦw{4/r֗IO;ܻsN V:k I/Ō.ox也yekm *H!B!%52yϏ{} }Ŀ>lE fB7 ,F?EÅ{/?zG(rY7i':Ӭ" 8?PqKU"q Gwsۄr'M=t,A*r= U,SOPЎׅEفcnXg 't@sщ3&E^f;8L$){q8}Gn`բ#xzNf0}8- ZdgpZ8xvZ`#>#5V-Hg1\F<&/L S7 x[U@3QX3{Q]00փhTf'Hdt},ke6Dox\lG sqdFs^MfA.2Zg7e{@g:bih91ĤkaBIO ZhcSAFz_  Xht$Ͻ(] hhP 10+@eulvI [ x`6S."zZ1bg97 仆ch,N6ju#kz v36߿02o,P̯z(!'.~&/]s< (oqQ^CCf[ڳ O& c?RjFz_ϔO-}>]_ڟPӼM>PH؈S3;V2F#D oS=&7yLfd̃ej`Lrd##Az-HDىL`"5PA|!B F BI,^ZLFcL&默5_|qC3̽C!B#z 7`^"ܻ~*BAhw>b< %2|+=Ẍ́I=XLۇU k[+(ݍcBRxu!D-TPV)< `R!0ㅾ[v=8ms'7?%̵"5KQ.$NnB(͐Ζ:GaLdrgqh45ϼ{sB!B|6 7[q>.6_n4~S/sv/q}uo˻??ᯓȍC d _}c69bxVk :y%W.eFק) JЋ a:z(JP?Rw&j(Ēe:7HDa Y&_dd $oFY[JuLa`uW]+hw*0A4Ϣ&Y-#J_b~>x4{Rm'5*&A$a׎(=ǔNJ0A_ jHM8K)8H_HhF)ϒ mPXv0lvӨޅv}~jbV >f;"JĿAoq9^/d|{Ak:.Bwk@hBg18EVy4B}IJ^FDIvu1PG Sb4w!haN(%x4۾U fQS,ۨkOo٘}ATT:SǷwj~D _cT{_A'um=a4yS2LSֱ ]Sk6V}0ɆͶWŧCi LJɒYޠ9 _đ=♃gۏB =LX_d;zun]7f!Y"=Nik*~̾0>@0j{(Y3 D_ǰ ,6771OpLO{B!BgwJ_X}/g~/­  ^W 7=ߝ:5#ATô:__݂/7ɣxmq/oۣe8I4q^χ^5 뮻*.ʮerMRuЬu\Ŝ躸z[:YEaxth,^grwp1ᵵ۠ޮZ?6ZW4t lƣ鳂<ϖ3WmP`}B\uWA>걳N#[6EzG-uUbWT-Qƒcߪrk(mmDBS oCi>"eGAq-s?#.˷f(2ƶivkַ?M:`~κKEhl,btY~tyL2{K 3i<; VY:+diw^bA|'u;i8P`g3R*7vk6%ݕ²5m;ks3zjjS#W<˕h # :GNoЏϏtť$*7{瘆:\qx5kl:' !B!%#e>9ɋџ9AlZW z?\୩l: >;MGxli &3N}~8p|]vOٛbDm'e jJ帶=*]o#xn= %L$P"=:OTwT2d !NOK^?Hc5n(UQRe6咃^۾^ݲAKG-Ʊ=;zLma<Ì6G>ZٚԦk(4VfRdlSMD-jVtwy.Vi"Nѳ|9$>I i@3.V W`h܂mՎWMתםMһ!rVZghfgn s]HCw"R}fkf#T Į] IDAT_3"$\Z`̐B*@ŌQ]̳45#zr]#Xl, ө\\OsQ 1WGri?0q~{xXj])]7*[ǧ nݾY?ynhź[r5" >Mn1 R_qhSJ7sR,B!ߗc7_}R[?dC@ lXϝlq_;~?<@'>+,~KSxmڱvkT6J_w!X/g~˙ڟˎ9T/C[7TX Pp<Zp< F&&S]MD[fP{n@6eaE+"r0a]#A9-Pʍɕx4F25]}!7>:`۵({-A`/xױՏk6)!M`G筰47RtEf?s [+2U{I#g ՁAwֶpqsT _۶\ڂ>)2k;81Lâ0ZR4QjX"_{G4Eft2e$rh~@-7]o?ERVPm%_])1}!ĿA03שè^};.R"Tw\=~f H~lff.E#Sߪ.S.0u;֪A43ً\YT-ZPq(I]-D[dW_0A*S$4<ȞFx++[翾7wp(M Xƨ}{ xQW5鯝XQraŵjyj vO륇Bh\.)䊬?EsK38g(O䘆jxǡ5;E- rB!B|67坉lh{lh9dv+U(q*L2hQvj37rXwRq8B{:Ͳ}l*}'G |R}Fdqׁܸben,,stZa nd>b lR)|]o:q? \ V / N4M,(}6LEc&395n0H ؟,fBp0IBedi;H~̈X\Ϊ h=)3N"CLchl h u2 n,IGv{PC. d/WI c=RQ1M"1"$$ΠEՎQňvRs7O.aI%P'{1c &!y||L#!N1l9}<t[q*'4b0;4ME&x/;R!uڨ =͜d,Nj Q{YbI?KDdF]3;HW9Ob8/\dՋ>~2=Nйk'2âM:b*bJ!\[H`^3FbpScK$q⦉;n̠ W΋\ֺeOD0f G\D01﬽xB0K_RC#F})m+6uK0/\$'V ]R6|c}?nb*'1=d;[G?n{$xE{k`PXZunϒ#aVΟ$eduk$j}-ϰ2+.OB!B! pF5lYŔf`&4R`3<LVdQ.GMX6!B!į6YP£y&R\iX:DojN 5y}# T./g c?BU|3| $ԭy/.Cx?@Md,.f(Yk˼l~÷B!Bi#J)D惻 NiX-ZIUE ?JJJMʮN`Z0YRJnQ@%?`k;_B!B&I`S=I鴠1tVV ]PFX4B4HӜ M6'V-N$!qqނn&2.G*ٹnL$LCčdJMӊ b~HBlfzТa Fw@(F2w ӃbP(qɺ(0ddMDb 4t'F,fv`>euNc m8iVL*A$c"tTܖ!Æˮ [0O6 Nw*n(tt7Z"46 X$ĸO !B!>Vn^>fžXx,BÝWͼd(جCOpd #ɴF/7w29{qLdg\.d),E{~?y ʚJՒxɻ nb?爻f ;#m'-JID Gzsܱaփ}9}ɲYY㩲\ҝv ^xie6|F&dٽx{fW xe9]gX\DÌeg|kdwc|&_ 9~ d O|>Ū~=Ύ^lE1Iejt| C'>ɮmYЗClB=ͳlnչ?/oN@O_|e +5=ãdh D!j10ff~o+LJi3B!znZ MUPB"2f™GUi*={#(M͢d9uFj~ j(aR554gxiY.:c 1޵nZM/ Z _xR>=D6Ͻ~ =_؄BaONP~Xr,{xjM~QgZأg)s= y} ~l5;2 lgQ7x17;~Ǔ8j? ws />Uų# OJ~sw[9S|]|?}=Ô޳Gf +>f7ɛWMW Lƛyos7L^#u ^NyZq/w} O^o/O Ͽlm#O=e?ajRaZ=65.c /hϟDlxqz`NN!B!NqMNjF,tp G(-Lױt{ ba+ucwѕ?iS/'XMVz6v}5yv sbœ ϴdfpbv)\[|<~C8M !=B!B| fD1M1C!ٸ {1XxS:̞C~k"xQy??/]|p"F̖CəNNF&7󛪵Ba$@Ӓd瑧Ӽ?_!B!f}Eme ]tƃDM<yGќu4m2Bm&D`q*j+3. 6X2F{.^ڏQ{%􍢟 ߗ ަ;m<ڒerD^S^ 6h/zġ8ywsw^#}J)6d@z112WBzNP RKYaw>LEa 5=oeB~s=+]Uޱ5>WB󨙆Տqb'8K ʇb7N01&X mC>z{&cb>6LF2ԗ`VJdUssXzdTX_;ƈPEw-B!B?7/@uł#45 >S9,Y"A?}]'iNYL#>2OڴDa5hZN~}*`h?~M|k)<|KДpi^j$*ߋKͷmˣ*wu o*Zn~|Qi ڽo<7?~h=珖1lqKaIAp^̢?|/=Ss;5ۥMܦ=Ώ'y 576?XF|us|A_'E|Gwj;JSYra{m[|y6_;}/ٽv_g@L6B!B|lhsν)!nchB!W䠙X6LFp4& 6]e$Ţ`cR18& 18&E#`Lk˞˽wah+bKq*" 3vMDC|Ŵ5GJ NGCAqt t*&ogIsaQ ӎI#%X)f5\N3q/q͊'A<'а:p[>" t t|Q4k ʈ[\8M1|t`bOHnfwI&Wx) ?caT?`bfNNX` @8HnBgr`3'wK"~be8R5E<"L-1?Łm%&,<-#bXi[.&$:B!B||hsF1& k27sob8B!B!~e7mͧas68㑸9B!B1`?'hBn%B!| A^:[B!B!1AB!B!~}H.B!B !B!3B!B! B!B1H.B!B !B!3B!B! !B!̩ B!B!y9B!B!-'S܅B!B@t!B!b]!B!$@B!B!f ЅB!B@t!B!b]!B!$@B!B!f ЅB!B@t!B!b]!B!$@B!B!f ЅB!B@t!B!beR}f7u܍|MO!B|3UJa@h4z3זf#%%] .B!BW0H$!c64B_KJ)b6tL&ӭ.B!#rSFx͆i )0LD" nVI!B)sh8&Is!~ES献b}ʯgìMWݧ'CAWJ\;X?g;tWmnRr(-+!'ՉINB)1qG'lT,YMGo"9XtBKx9(=8U_:co"c$][ۢ]|NSQFibL\iWQY=*@C)Eh-K蘝VϢ {j&6}; ׬iD'8{/CbsSZFMeMcŵU왱BKP.ȖW7zfN̼2WoyErIa4x۶n?]+%@: pi?ێXzJ=64HstVƪ[]Ҵ;_g`c%s&Fx{mq:FٞBFN>>b{o9 |m;+93/!@:*` ]VKS}˫yZkg oxahf;YT/λWSvnT+=Ω7 G7PMb)kЅie_WK apb_EE'Ll~Y~qٳɄd4mŷpf2& Px3{7+{'ȩ[=Kj9;[ٵ3jܰJ;O^|rkX{B AӼc9X~uJ)Ao}G&|UaNd)π 'x{i|@|b.4rʝ<~TW ']\T{gb"SCtɦM[83ORJ˩}}l߷ls(yy~voM:; 5ɑ%+YZ<h9u Y}FԺI{Yx[l~|]!f[v=V9?!>4yky{tk~#ƍ~>M4:(7+)G`[9f3vGǛ.qkg"n\HPqmLUl\\cpkPRc

A|⿖[3^L0ְ:ө-c>ݜC{8q83L'f-B^y5uUe;>ޞV9Ű{.k(Nnq-sHj1.iP„͝IŬj0i>vYVQzOp;ÍQ0Z[h&K`MɢvWN40ٱ8,h(:y]qj~BTZ[ Wf1uj)LA3bt6q@evRX1RSN01]MP1v6X(fqYXJ&9r! 'Y b5A"<ʙ8ZKJMi;S]F^^RhDnmBHd%%-z* S`6v-9 P) Դ͇wCE2J]=݊D4@G+mzGQHIϧIvNichO4a`-cY*]7F(f`{(h`Vm>@C!lйN=|3󨙷nK.O֧.QcwgR\QCMI})44`v((Kc]Cmg&\aYXF}mˍOU;S- j]Bδ*1Ωý$yT^ ^cF̮ ]7Tbp0:_ۭ3* ? $p1^} ڽYj~*Mpmٙ4xOAA}5h׿2F8NW8Q0x2 43CtҒ:=9(k)NB1ܔ]o8=Jnækq̉!g"aށ^bMjwÂ;gò 6s6s.|g7.!k2ufڻi͊hq_7[< 26#M$"-E`Â2=;i8`8JhR< w2`@E<Ͷ;R<þ3C LV<lw[BSG;gh=9m,Uhrj&q9,KwW} /k6 JԽɰHys?G?7ơCgMfpUg>B{0u|{'% u P KV&9yt0b~ FxP]?.QF㌆*mv<9sH ŒlD05{[ st^a)+InmQ/L2r ';^D;xsn#:\ddϲ~N`| Rim:xF!Gwj`عk?{34'PJC7ZLDqJâ+b #yeL Jœa2NT 00YXM&T"J8u 0 8qC,&4V&@ӱX,< 0icH04V>}cL383%jp Lf3EĔfA7DcX-fLĢQb+ )Bf&,NC<)6x&Y'#™Ɍ͢ň%L#Ȉ=l6~c}DR]|ӇIKdWd<ɑc ~w%,s-9gDw捻ŗ4=VO ֯ %…{8p5 y@Ǐ41&⢪X(53.b)oO: mKJF|m~mA#5XTO,gX{4w#YPѰ;+?ξí~gbId:c4Iqekfe^vq?z|;Jnl8-pkco3&,gM]9Da~ekq[R(}J.z*e̩! W-KjA[3X\'%3KYVg~oI޽b8'EWɯZ'ț;n}0bR !բ!IfEp&G9JXې*6#x;ϒ:o#]TNn|\^`{o'gY/h[[R29Vԡ?q`t}GW4(c9 ^?^~{59:F038Nl[6Btt6^y7`Sv-<ka8_<"{[Gnί(&[1=IVŪlŷIOIۗgF"X{څ &p͹]dۓ|/p;w/i⋩sϿP1ëw[ɪ4scuj$`]x5nk,'}9h&ێ a+_'>0 2J#U,*v- ]Cl:dnhgvή#x,y(pNO0xhAѧ{a %*r~c^?p_V<=Y|ٗRkd<^Sxgi%F4AGn'c37~qCS_WtRrp'0NZv.AkxϏqCOpϒи~^:e<0{ńAL3S1>qR+ ?}8Y%*d֗bw ʥ0dž 0:4Ĺ}/Ͻ=|O2DH$HpXkbO 3k;&?~f;ذn6=hc8T'eG̚†WiξfY6?Nv9!}_>!s/mM8!4 zϟb *YG:N ʮbj@ p惇8>Q=w.kr s7H'4ΈBÂr=c! M[Y@E>]JkXlc];]#aBC=eF:y+},vnQ˜+Yhks>N{ F'UliQ<.EFVg=wۂd|ňaRRF翾4 бN(aY78ϯ6k&W%Lyu8&jLRV `089Jq6nKLsRRmSALSZ3uS9hՑj.?\kV S{qݳfsSL .9Q+לmLNiN%T'`"dp(r@|izJ* u&e"xd3dR_ZLX t)+SNo.OrD kZ! V6Cxx=/{L6qxF^"7NSiѸ֮\m)$!z: ,dʫ9gU`y.B9h0vo*6lX|18ORt=9fC N_mA 1ѽi6bpC/JCt;hLe4k zJ>5E0aaHCGc"'ufX!7|W/~'_MEPQs7c*඘=3Lu}rծ&,dV-a,_4t+g_lts2ya{ŧ3?mu64 nlvp/=oO![2~F9THvDSE*zwq~?ZrwHazBjkqtvxq,/ qn$qo9H1Pu=mxѣIsٕnA7膓"=i%Q@]KEMeeq_aA3ɩF0ѰraNMNΜ@K좼Ɏ4TpXT~ɢ4;ŕy\V|i]?qRHI+[AlfkKE'3H(UR$@aE. #L^ qLvٔ(-(J'˪`v`7fl0ʈX-flfݖFEifSNL0,8 M35+躖\cav\Dff/^Z\dZ˖aH[Fmr M0DÔJ :vQ+3e37DwGRAI<-2,&N39YPlvyOd >(bvQE9*L\Q s N#6$a6D,ʹТL ,ج)iM Gt+65CdVRtqlNuk:`n\Pkڴ/KiZV7sj VQT |D ߵB)mSWaIZb|k~Mӏ38]4mV [4yOBӴdtSӦY,.Bӑ1zO`tdYt=Ĩa*w n@Hi?}w= ?N]2,njWmy G!(Th<;@7[lG ǒccmuLNѦ\0_[py01o8D"n]uI) 5')iיn!=?9E{LŁ:]}4= 8Pe5_ςiX,dSSSOAsfRr^?C8}$%@L&& ]JaģvwM7aQX2B]t='d6EyXzCko{a;III!ŕNi֬[IYڵ/,3YPqB B$| z8`§A}aBFӊ'׍pfRUlmiUnʦM]HN1-.RFGǪZ)X+/^}͜43ea _ Ex~b2$7).~tVb8ƺtU͐ҨL_Tk3"7N2 S.BSJo'2,Zé&F9]yX=i'G)b'h @OBۄ% c"Si11%Sw?Y;wΨ,dwu/)q飻ѧzD)(r:q6p1iPʼn9{;@sXZ#Q1pĎjfcAbF2F$4EgG?, O$$S &i|@2L.ҹzGGI f⏪sq۷h}t6oN}3Y~TjiS(!r>Ɩ_^d?k -l}Nz]cpvx⺓c#I> 9ÜJe[i F1Κ'=rٗ.p3YT/76Jʻ)/E D*  ֈd2[菥H2 2G"f0 k^޺ Y?cLt$':Tx$Flz*8@уWԿ9dX#BQ5B\wfgtDV~!W8ψ5Bf4G1י)l`4f̧, tn%<3nTl lfgswhDf~196BlwBnY!ca)% UgnmIEA dS2ܵFP2W@@ǭşIrmfjW,Ԛ>yj9*JbSYg"eb"~]ɮ,/ft2IaSGh^ ZFڤ=dѪh6* X5@;g>~vu;J3|j2l@ +\6+SjX`e/[tMigNԃhc㣕 *FAԤ b{rX IDAT&ysq6Hs 1ڨ-(\Ee¾ MF ɝh1zO0:Ø(]W $ R}A"ƙ/Sݜh=$ȩXD;lJ 4:˔EFW|Ns.KƢ,x~=q)ckߴ,VCQd;H2xxu ˯~nORZnO3tHF95щ99ܽ O 7714>0X#y)Mb95tp[|3!z;O.&ix\Yd |5 Qz&~bˡBcS?-]m#%OK`QI,~' ETt3VN6%Dygҋp7'1i^Al}oQ" ӝg`at lC8X1YdPXªhK JR9UP$vh'$9fwL܂j{sDwx)biU7uT}Md:76Ξ Rg$+~ tuDW_x-#8}cNU4rjWSEw4BJR /bqdRoifpI?\ 7MiNO6R dӦw3QvǽVG2} 'Ov~^~>9_xp<%Yln]_Ν '[Ć{C(-# QXA^nƋ)콜BQub+[ߓ g) 0Wsz;W[6sD?qcX3K+nkg.};'ߡ],έL䡺z)5>4!lx]!,%Ъ*YΪKhjo|,%ws z8?cWxN.hmf"aVj6{|{Yb%+mG4DdqۢJVuuc[~ccܲ2LCfn"dbed9'* 5Q^@YUZN'}}ܹ0/7`S? /Şn/7o=k2څbqZh$=JfYൟKװj~P+Ʈ^LYEAV~yP,Y mhOfwEi ;rŔf:cQAwfPh9 _H{=?vkSfa0[~cXܲ|)E>3b]q Mn? q**<(!{wuX=-!yo/HlȐ42Bqݑ]At?U3aCpIwzJX=ɡcl~(;J-y1W,*V\b4FAr-jIggؓO~$/(͙8p{xi7(*[Yb;NNx1;}U-&mFR&n?eo˧(ޢ< WWWu36G n]n%Ww߱̉䛪܅Oo; O1wX_Cwp!'l?Η;//-6i! 67XluC4˯3[Q1[YRv?uԹi?")CA8rYuw<ܧ{|49Wаݛh=ǭPjӬ9_Fq>́Mϰ{C PLN7+Gn% )-0zȘDabyM{O~cنWG~Ik d4E[~;06:N53_.VLJVyL΂<<:w hv2Jfsͫ?LM4ʼn4݌gs8 1Akc%aUjBqݑ]|JsFv*6WTMZ,/fVoaZFHa›UB* ]C-#H'v*L~rVm楝̒O%|gMUT3u\Y(:ϥfJFԟ4E$JVIYjL.#kJ\E&{r$#QR*jk*0wa4q弿IrayU_l%VFzrVz 2&:d|\ɨYUMd]Jw)}tO& W`qiT.w:i2Uj!+-Z?WȢC9|h  ;}BrKVނp.1ε k=S>\܅/2>52cf\Y̭4{f|̻SUa#tO#7&^>wqP҄ÓNUb*ϞapƝ(fA)]Z'ϋ]]}s z"(GZs* `'NQ0+s:B(nJBC\Er8;)O/eݣo,qS] ǔvTsYr%c(LwO aK\~*jQSyQY(4Jos5Zɩe׮=v IjvS΂5d:pY(XOr`'' Pt Jk_qo RBq)ly$IL&I>"bL&f*7(7<Ó/laa w༁g_oxG܍sGWN)Hos)|zW:{CӹE{ uɗ"r4)R~x_&16p0jeWqQȘeyiDzPtRqtQ)<#BLqBIP /azZi a+_"0Gشe7}#t;J`%*>c)T"J8- ͊.f{8c+ &Ϯ^7T"B7y͒/,=ׅB\z>rߧ{0sqh]J;]5i8̎nmxrϽ+\ԢYq;a6K[(YΊ{_}/JE^~ďvUܷLϾhc]{y/{8!WU=Lл 1k$,2ըV%5 [Ԝj|ކ5s,~JQU~Y Ø6Ͼ 奋S8Fe5)쉈GSTÝ($7Bw]TB!B4;S'3 ;MGIR\uK Pv&ě[Iب^۪g6S|1-#:mKDW#]}Xo)B*>lXT^Hz4Dլ-TT&68Hc_իδI.B!b]IR8#C=4^VF3&1:6J>%%bԿwGI%c Qc"܀ ,lJ &^Y))ç`^#a"ɓ zZysS=FmT`jM9:@v{B!B1}͔rq-& c|hb+%doԵS-8޼ԕa9U?J2ޜVcקi#E@?- nN@ްB!B!fԬLq6UMX&t n1Hln`M* u*"N*c1B&_!2=yMw!B!3iVO*=$CPφJO$I`+qP-b1L&iii3Z0X ǃd ta$IFGGI$x^t`$B!> ,ӕ5cE !1<fYIEAuN`]!Bk:"RT,yըvlWF^*-d&%B!tb#b`ǩDq@O!Bv&B!B\$@B!B!3B{je!ĥB!72:dR !.aR)I&B!MhƓ) Fu]R q H`6q8fv,0i#!B1JwMHKKcllx<.B\$UUl8Tuf'HGمI !BtնY4 }B\(;Ux nT'ץwZvĂAQ0JFB!1\ !fʕGde^!V/s}{nGy+y9cܵZ,}j29$3@&16njBnGeQaͭQ 6||wPBNcIvũF*pt*u8䜧3qPeSa:"?XK£Z$feE_wԇv v>@F*@0Bԕ$5B!8/ ЅcϢ,M˨.o=[;J̠-a,4 "}4*eXǏX|9T`MI(' crYZ()@~+^*Qz[i!(, 'ZP KCػTKŧӣqh 9Qb۫?eOL[F|>QbOql<3r+wwfo`z_POÔaxG#?!X YN+|+_.q_>w|+_F rj5{v8p T٠Hk5^ű\*TkmW vlk]a`$AAEuҼxרXE]|byN1bAzV䳒Q~%aWgFkM$D(<ӄǢFQ"㥮Fqj\Cgcko0Q>yW%X~뻼v>Z FhN`ӃR:ɨ"]1u0Щ*[L,[Qeu٬2ch 'ɼ3)2|Ao O Z͊JvX&V-aޫ? Q;RˤH:XSd:=ݷU`iZ4jsXShF錸>$+=l j`?9!&H1Bq]q˓Ɣ\Fd*is< [Na;r3OV<a$A<}呑@(o$É9.'ΕbN?j@Ά1Z^?'Ҭ*H㫩ťœdU%;`IPuBi=9 !{82bឋZ<%Lɴ봏pm1T!O鎚Xcw+M6q{ϬT6 OasR0+PS`%5޳^ğEUt1YYǻ0M]c ى_howV9!1)+!ٚq Bʬ_ !oZJ,[wK0MĻ0gsn!Kl䒓&9s~v*1Y:* &<^oϛG`Ql M,}K$B"d,5Ͳۏ Vge13.rzm}0txzۛg~m5N l o$=MWRy-I6<FSΡY)(ٷR,Z{f|+GNmX`ϰ#şI#Dmd/%q1/>7Po*xs}tp$ɡ(cwFU4e,Fۨݢs y6X4ߪ$] vuE !/CECדGp|CTMEI%Ҍ c8]=Ab&>}{T8HF)R$;wNX4??ՏQUթJw mp ~vg7Q;!iPIŰ8,$[N{KN E^& 6c4 ^%SHilMa0:`4 x֑$wj#)>^%I@idY4Cpb&ݮ9>lf%`gB!zf<@?U\"`pph4:[ qsP͆A/;@\i.._fH>92P-BЌOqWX,F__`QϚ)N*"G d2]$e&m4L63sB!UwU֠B!Eb4^!nTbXbB!<ό- 6]|R0۵B!=Weh;Li2?Qˡ( LNMu~edt¤B!lse4JKw*@6B!I !B! ЅB!B貆Sq/!B1f<@k8Yq5t!B16k0t+) .E!9O^:(4;ܺn\` !B!b1^;8H(~vnr:YPWJ]36lKs }='C?s+㔅B!B!n8kx94BVY1K&ε*~*A6ljR:{i ڨ+ғ4isSh@|nPFiAb 4d;u^l:0DQX )~r vp8dO[cvt늮W:Hx<>UB!7 ݎ+=YZpWe v q̩){/J. sڂY:O3ϸ"ԿM|1M~?QEuTMr$Fp//| s!{CL(M) "azB|c57*Wǽ4o|~ŁpB\&;U9NBdr"B!n@W;mKfnUi4nds/֋;!ٌϤA*F‡$u9|(V?-4K)3%%?'(>Oc?N`5SCF f*\t]vv5B!MhcH?L^7`M%KZ*E4ib4nL?ud1S ?Áâ3uc#IxbMfFAa,`ӓ*Ti8${y$YB!B7۬)FjǏf9מO'24LO4bwm"JNQTLӯWTеE05tPZŦZ̯qc\!B!%Q`ecXY@bp$th?Emd[4-VL9A1 {ꯚ(Qh㭖$2~ !B!\f2QV~N{}`j2w~]l2(+b n>U7K^zɯEs!B!J1$9L&1L}L*c$f`!Ƣ)NU\u<;^@2gp8D0j`YQ ^ BZG")L7 7pa1:Y {F*&3 =Azȶk$s{x_ Db1L&iii]!B!5ro۩$ǛMb,m^.I.B!:ZEAZXX[rp7B!B1f|0 .c]q !BqmքB!Bqt!B!B\: ЅB!BB!B!u@t!B!:pնY DѫUhTJvAB!&3]~?Vu H$] !B!54m4MCׯhzV|A4 T浪]s^.P&v绌㦼s8WO6Tn݈#B!>X$f(Ǝ Q~f{^fG <;w>"X]̕αI%82 . {+,x0t osbXh-Z<4zE#1JmH|Q%vQ @IbIuzea F|_)2.cjӔp77^`no1+Je;1p^h||M>k`[_\U(d /LnV׸m;IWH a`Z@M/lò.Vgg볯^{?I,Kp.8+ Ѿ}< M_S>4wF̳ .03`gղZҬ:MOюF6*͡{4;fu5h{! ЅY~߿Uw|:&J8~|÷^s˕A}?f>'ܓǾhw=+)S(cx;ϧSt*Z[M0&_)_ߕ_eȎ"l8?1jK+ˍ$lIfZ0`?3~e|scw g)x=?|b͹xΫỘSwe: 7l<xasfa֕; >7OMo،+??^7p} x?W?6`η =&R6㯾#kQí<%ZKgcc~_xvG7o GwY}5 ГÇ=?D@qWuU.0wl?Ƚ~\c90vЖsLJk]H~xg–]ZT|Bq]moJ+/- }{.{#ldfN8n+_^nvU9B"۬ !0R1~TQw'HGIV-˜Zn2D2>ž_}\Ewon!uVѾ~ϳzoOqbѼ9Tծ?7uz/9oO1ڹϱx8`oV̟'4?}ʥf.@'Ƶ[b#Kwvqde(w7-[]%L-!@N*M Bӛ17{Ml];]ɲ{,̖[og1_]@tRS2tÏYYރEHfp lf#k{~Ϊor孿& uW~}73 w=<-㺉jsߪ~W:zJ̹Okh=2qE_YԟYΓuvdK<߉g`.[7cgs_W!T.gjA;T[ٲ|<&mHl.Ec߯6MO .8u__;S /~NF<uy/uoy'))~zM}t-Rj(GQ5vښen"ʞ}wh*^ez p%. ízmzo<2v :x9.F||řNo%%̂lcjsvbuv/ߚ2'gfV!ޖVF=[ V^g'=_:DƴXv~.z%+^}zl;5U><^ZNMVuylc7h5>PCx=BZMQ5@q1GxyTDȓoͿJSd+M͹G0~w =?FZx-ۅ+`ym;y5=s57[ n{Yk>q Z 6z~tl0 AO'Oc?A%4RԽw8fLE֠J/G^ҟ>/󿬢#ul#M ^:[X?koO׳re!,θ;.Aep/!cG#Շo}-DO\[hm\tz_ ƥ,;O,fK@K ,Ӵ ^/! .>ȳcxo\S~j9{e< ^aO C${=σTnv0nkCL ⱬ |itTWRǍ_qHžrh>.Yp+G: Tbcs=x\^:p68h?WS{1 EK$ +!L;|S-4idN71v^}a %r˥yrvcb,AJr<>hZs_R_;Y̢QhƮSjfNT_qJldnI'EįqHi|MǕ_0ِMN$ SDC\O󶥼^BnuLj,ROMI~1IIun/VmWN$"|Nzٹ|1Bx,e4a!_Nh|iqѹ]I J1wlyT/fʗ8F'{WkbzױW4k:<;XsYpMiiʦ0!K`\o}iXkwsi>x1AAm5AWh}>ٌbFB`1aq][!98U p&0 4 6fjx*hz =qiȇO/JD8$˘$ -a}Eəi$H:YT+ ^# ހ9"w@Co áWd=*q9M6f1a5iPC!=~G ?Q9F@UTB^|F3&k81F۸%KVo_#<*|r2ÂHDQ C\ IDATx|~"Vz Y2JybOIgjs`ԁ$inz$.3{dݸ: #kh=цլC4tF:l5ЙH0lL+ܑ_Al~T }7tT ` wܿ]q_or_a(L$1Wz{q_<֌{5[x8v ɖ%74۠ Hj=bLH#5tK M0'bHbR POOEgf:IhZ83+Z_~^f+Ag}8HO$@䰑L%*&}8#Fj \HsytYg%R QS.k h:v=q%+?7m.w/_BF0:q,x:A>'>}е@WM8 |<4MCUXЫNڻzPTOwMǽ8L^3?>U&G@,hFjJOAN-e8*i_y%BUt8h Ҷ_K~O~-f*Q(hjhF`\7=tWa2kn!51(]ýg&cEWOܵGA|vmkh(1f n4+<*]ĥ=i5e]~$=bcc_ae !yA4U9l'*N>֦*z@2{|jn|@9$AmG{k40D4i6<,YBtS܍ިޡA(*FAa VBZ\z]HpL\tⷤ2>Æu{5R^_OS6JE|Rnztz]PPu(Njj-'ϼɚ ox=i`qرG0vBky% (yƏ.z)ɦ44-0:,w+2Gt>&+ǿ~=?G;0 Z?k~K&‹pvSH޴@֛pDޣlhjV3ϽwDi嬭>$J@UUmmxDYg"&5v=sWA]O5o!2ݼTTEA=D{ՓdW;i5ۈLF.qk=t No΀( Mwa֍Ut z%?`87n"J(" u@MiEg nt|gRgaX_X513K95Lg3sY)rȜ:0P~Ë\_G F֗x4jwf]sxYcnZ'/cEwQ{\.綧xpC7_6u82Ǒho哬okbcc ƌ ᅗڔIE:i+."#Cpi8ZCWo{ߩ21|{әDZU+>=7X"qەG?R~l~{;ƖgOSy諨j 0=sZ=ĝ|hh2,q풿;f>y B  sYdI']   i>BR(.tAAAADWlAAAA@   0AAAAO=@?iAAAALԠ   §yŽd3b7IYAAo-uȲCUn=q(   |B<&pϴ`   '!f\AAAs'A4 M81H=D~xyOS-AAAi4@fh{>D~  sޚk? |p#  ?!: ‡$>;  sk.   {;z qgkk>} S-AAA9oסX FX0+A^uĭkGOa7aCݣ1,BW'YL 'tAAA9}Ͻl000M!6ufǓy@/aTuh ~v^x_2AAϝ6@INᢜpD\jL0'Ovc gV&ajtt1HndG'눰{U",A7ݭb:˕yaPiidQ7)L:x ǧptJD`3.t)A~ ـ,z%AA.= /7e 8:t>PHC61a&_}{HNfi/t2mM٨ .=3OUtk)C7Xb=@L|UTlF̒#)><Ǎ1. A6)FAEiѣ'#̌(x'm"4L;MUv*a7 >9ѥUCJ|69~jֽS˷u??:|޴ޓڽtES(jo=OqeJJ.O7e;y,$rjAA>m4kg֎fK/SoxFGC7M|a5Nd}ɠ\dZ( fu=RΆvi઩$U62&+ѴJsp(-Jv?p#U Nv~ w0EX147S":63Ijoh&7t W/LM掤0 T"8ђ`C*Nusx2^]90H!]Tdk5T5SɈ4^JDn ibҥT0s~х.  "繉Ǡ͊mǩ6a`&:^:N7PϬūuhVgrqN5:(vע ׏^'ofmr ʐ9k,iltz j䟇uC/@槮5zdX Um6"Ee_M!.*= F4RK{ng=M"2_ 5-~tB_!e:n?? qJwc%[EId󙘗NÄJ립=*GodgR Sr\@b4M!2FgF.tYAA9k}F5@_v=h AU]'(oPKXȯVҹ%OUt3|t,у[jAh䭣=v )s-=4x'5COhm''IBgW/uP˞޾rwz$塞;{9?v ǚ=ee9A!ZQ'vx"ٮG:QG81\~-I,,(ABJ߃.I :AW~A `^ MFt?_Ic21ˆ3ZA;>'4P/]UCCB1臤P`PA4$IB0~  N/0}k}A4 Q?0} FFԿ\ ш^>B5%$0 !ik*B!M :T&a41}x|A$^#1 x!NhD~z./ z#wJ9^+6B%t=>|u'3$@J%\[!mi*O7e;Xq+++2&{,GOb֤1b$@yx*ҦLw#VTdOy )k(L]kKD;ENqclyNALtj.a|0-:$$ԐlݲrLvRrF3i$0?\i(^+ivS["H+˔(Jª Q-{~%k3:XӐ$NͿkWma϶ lھ^B,L΄Q9$[fno'Gla㦝׷W F)#I7JCͬYm(:3Yc>-Kkg7x=yok_¼6VJCL&yjdƍv.d^ٳ(O',U 8öUш+ǒ]0&ŨC"D5,bwf*{l$eogϺUv&gcD:cc!'ٴe\ukQ|Tl^KKwⴤR2.@{}%o\vA:붲#Tst,|4mWrLިɌ鈴Wv ,]D$Kٚżr{*9c'0 ZXϢc̸jb֝ν+Y$.;:\ G)?rMmLbxkUo`pbv=]r1IʐkngnΥ&ܭǨnĤ3'2k=Цԩ9  p>U |0$0isѱdeSyp7 DDOrp3 SڪwzA\66T|Vʶ,{Y|S/#tr;Gތ+Lw}[_ĔdLhF3'RRN v\`;{7mdqycHuS3O-c OgLT#+^C䋮`zq&6Zɱx{j\I^~*c#=vfŲMtزyL.J'$vSg Ko`+)u'SIMKZSɶ.[]톨dF CV{92Vșz(&%/@/ݷ~fe65KhvT}8󮽁#bt(>'xwu(b8+ذn7]㲰t%of" >K<]ŤuճcZzt|O)$jDiE/w*RS[Aq'oމ',yW]Fiv@~d:\u+7Sv9Qo҉toYc2+Ue߻*-ϑz۴Ӯ9Ik};Hdk=DI̫h|qFMGXeVbF1YT͔ˬIpAA>17k?]K>d$d;nʹc{)ώX-^}iӼOoU R(FN$;^I),QSaaڍȒΰq]a{&7Qm"3 È0$51)LOlq)b KMR0nHL8R2jO95a Ocd90l$*%Ņط8ސz^$Mɼ  =g#uzBhj֚픵dcBFYg"&m$f$&8$0G0rtFfg2"'(=8Tَ`4m5":Rf&u6S}O=4$fbAaf=`tĒ3v,S^ĬED0DɌO&l`'-+GЯZ7ɜv5`ggU&:6)SFmà7<¥m^B!#  gy~Kvz%gZAi(ӝu貳-@f 2#!۠t& fhJPPBvk6uN׺{0'$=& \ 9okAA> (2BAq5mhgJs [Ҡt>L~N]};% wWo 37 眞ҁ!%?ms.vE|n7~UfŨ703(ϠduYldǖ#7ʋ %$a;s$cρ$3a`J8r2Fo5Q8JxJSlHģdIt'9=MaJ!%|X㬨Nmx{]Cӌ)D I%!,sfI2@B>q]s꘲0r.lig@B!C i,Ѥb<[LgH(KuN/ׯejtŀ~PkkM. ?hܠ zO>KflA IGL" Qg/IQC>'gh=j^o3%L$ziӴsMQ;'Ӑ$3#I  8/53Ik8clxVg[\>hvsMn^YTNf(>nJ]o¡`2SfM#/qZ?CX S]JOӣSSOIvA,Xx3S c8{7 gZH|0"h:# 6 _DH_ӷRtT!!`H]MC y iF , }_2 ӐU!A&{l%[ g 7DP8: k JFIvdNGp3]_QF%bד|J=~.DOxq !s8FG#2:(-N": ;P%HP=yN _\ёKlO4ԣ%dܹwp؈uAr  |4e<'gWQNFc%3"iJ&9IJw>v7MF3Ud#f 7gBThħ7l\z]E2(H8;5Q]A!YGT?adףuAQ:+)28 چIqsdvTw0f6ӳ"-ܳWi?ux T@O zpISWoN*$vvRx테vo(c CqR 9qp ]5ض0b.YO#n*Id \wS4{p ,H]!!=p.e=D,Tp^OhjٽGd30$t=gt1AmemՓ;e&x5)e9]8FDNMAy Ѝz J}p$na3?1줏z [W.#:XkB lcJ 1,AG,Gع 9)dZmֱjMqnчQ4:j)_OȈ<I&,%8.n)诹6M YjpL>}YQ{{_v齋QTl$kKEc7^; Ham첽;s/"(5}<393S,*x+68w|7bI9S~@n:Mʥʶ, mi*̊e RKI|^/YL$b/*شr#Y''y }X.:-M .3AklMhm,"pcBF ?wF~f[??Z."I Yɖ+Y'a ^ȧs+|0Í sc07RV%O6CuY1;SH3pRE~.@] D?oPxno7=KO !Brd?0 "}ͧL/2%߰w ׁ]TP+ ajsfŽ},9m'/ZGۇ'0$w[m9ܐ,,YGؔ4]6P1Ҙ ~ 3wph:h2{9q2+dظ=:8jtWe"iLw$GmP{,;PtPz!}Q2ݍmYD cMr>Ͽƙ3&h"FCR'd1b0gϠNgTz8 >fme'G2nT"^k ~绷/zdұo?fo2Mf@X?TsezRϞ58ku}G]p8q`z cpj"I}rB!~])Ò8W(%lUJ],.䙸eLٯw ?*I'sm'Vai0^E3a2g1]ьħ&[%ÎԖ|cY78Q9zb| 'L.+weՇϲeNڐL?q!>?wMW O:v*gG(Á_H$#Gq9LAw%2|P;I"5"o|М38oVsm,o/ 8}H裎"'!8Ç] =4qEĒ;u:M+l]5--X:g` C'LC<^,C+(2л hWN߼ݐ vj.V$- /rL2 g\60Q! 0 1ւZ_*oS&1{OXi C !> !ݭTN<'0tb#84ll&6._BcSW mZl`MQ,YS3,l vWhF :]ҝуFēACwg3쫣BN#III$"?||X|FD{4|IAL7^m^ޢBV6e(/a$_RmM[X6rq}VylkNvOok:*پn3Mi N!wϪeUMXM)?zFܵEsu)۶+:uEkU1;wS;$DzBNt!v/bJFNb>*hSckf%sB!_uJ!)-PDkLʎFT^jS8u 4B!t!VdžɊ]4468lNB! ЅB[VW+;SF][7ó>?si!BB!B!D B!B!D B!BH.B!B !B!}B!B!D B!BH.B!B !B!}B!B!D B!BH.B!B !B!}B!B!D B!BH.B!B !B!}B!B!D B!BH.B!B !B!}B!B!D B!BH.B!B !B!}B!B!D8 BMk>J*ip@T/j#;)۱*KNV(]UT[~ć:j*e۞"S2 q"je8 !)1`_aQctR[Yc|s#h(fGI'1 PSDyW9bq,W%Brw66l$("h\ o? Ѕ]y{x{}-Ki2+S 2zs<~,x6f;N籋O 3hQJ\ƌ+™Bc>KkT{>~|R!qh181y}3_4xƧ =\rH|^MOx NL=zwbWs_B!m _{^a/~u3SǧcZRk|o{1 Ѕe,~I]in,6/vOȧNwKo'V̳/|Z*O厩>zuV64_{:5+xexk ym".) d'1up"ͫktPӹG#IkfWko=z?o_f/+sWVq$_nMSAk,Fko/BٝU_1+jbk9*}啴T8\gµҸc^ J$ =?\0g}1+?}kigo/l_JV}{-)'s4>\LAEpHN>DtŻy,L<ӏFה5BzKٴu=ƈ8i1/+;а=yM sP}`lCR\csvDqݽ)*<Μ8bpv+{+;7tS a~}m򪃉u3[x+Hg),# ~6N0=y쨮Z[!,:`3&>xn>^C{ хBK#<$W﹑g4n>2 wj[o,E}Mw]{;s64ߵgVO&wlgbR,qw3O [MrFS9_nCq=ИKw3?[AiM$d]hr18  ӅDVo朔2}HrNvPvÌpE XAUtG'>WP=1 ͉N˥5XUyb&?ȋNHHb9V^0*ݐ}3RNCU[/:{2!y 2:e+(lfʑ!B/^O`׆|\^c66hUP Acͻ?dn?x7M N]'t _MCxs,Y2r |M:{,&7$޳[&38@wgI?&!ѻS@6MvFl)•92[VfOM^a;ߖVU:|Y tVndZ҇#+mk׳?CJ p HB/AusٜǶ,Ip\&CgMwk%z̨ Év#o[9ȝ@t!B!]!B!$@B!B! ЅB!B>@t!B!]֚Gb?Oq)%#~}_~?˿~~) 444`NB!p:?h_ Ѕ˲tRB!tqz!DLB!0~jrAR ԑNB!0#qB!B!D B!BH.B!B !B!}B!B!D B!BH.B!B !@w0'1bm6axρ]o9?ZsøwK\k\q 8_ڂa>d uE8N}Jn GN?' cx&0o 5̫CߺsHÎ:DZנv食AYO1?M`l`Êw0=cv95tgPW=I'OaN9 c^xxg^*?~ʄB:3n-ƭc8?54Lj8'jy0l7住y?P=vmܳӆQQ0rڸJÚ1n}}yV=c ZCw3j8&mI8I趡8F9z&ݛԂ'q=g_Ƈ` LtH;lۊj:)CйSAmjh=? ]UT0 ^bTs7FyB6TY8ht` ") {|"L U:*b#3AnhZwPM`:T~9tzۆZ(,EW :5 (DC{ա 0h8:0 ۠`r zPB|x_Ƽ^􍹞(?}@Eu:9Y Ugi6.>=`4CF5@`X6"!Q]!T?/@iܠ3=GY ^ RўW4ϳ爆J(*C[NJ`'{azyq@#wag>"ܨlh={!1lYj/:% PUyGNto@dC=iBw'Dzz[-QM/xV`%/xM82٘w܈:: (+D}/D5 ?'C/c|&l}(1XW_rTa3/ :PNI`4Tˡ[#!>{H.Էł=P@ .Ԙ_@xþ*[.E#);7طQ?= 0xG}q|t(tPtB vWc oK);Wห=8=u =kAG߮A/1}a7\s =<\gCIjس#V;jE& Qa/{ >uf uceba,ފ0Q+gkfw<ֹʼ PѾyǢ; Py{f'EVCOjv!b7tV@t~&FG0׼=~t>-\$ 0OXפc<, `蹸qǩs֍ԑAX3>|BAG%$(/1>{@lԟz%9S{_4XyqTq3۰&*n7dzxʴUc}|vp~usO8e` ߄y^= K1WOF=< :`Ÿ06uB3 )^/1x܈Z٣U{75nQT7q zxt6 ŮނI˯zq9M3肭Qu91'O`޼{30w%`B-#&P# BkQ+֠Js#R켷1?-g Gc4v*gƞv _u֮Q}mƳbKP8$= {2cK'Y6q#ƭ`nA'aϸ_XT FhMǾRع\z>Z0 l e"7tqw}qpqەy*׿FGa|1vu9X ANp'`=8:nE} Uؗߊ}¼Nv \ݐ~8_3ETck^ 0'2ݺȝu)̗w`z)i诵6:y47c%tA{F˾ ǺF98Ұo#:gc_:Ϲ{|z4쓦Q74 Dp1֝Gc|8ܓPB瞃uەbϜq'`gF{ ԇ`åk0U3|{>7x]{GO%0oWL?kJ0ÏaEб60> NZJ0^{4;NZ1*zv4֬񽣁S p`,wۆ.pz&b]+1gb]sjrD{Q;\w@SSi Uԭfa'0yʴq'0P[ڽٙNC ([n@GԂ+aG78'0Qc{tV<0F/kFT^6qͶg}MCQ뱯{b<Ƣm۠xŢgLEgO!k3G7c,+y~{[K~F^4ۜXw߅=?zXSf`MBZ=l4:$ q <3BvԂW0>*9c,݋X5Qce}ոo<Qp?|;8ذs0yb_(UdߜNMmh\&yq<øӰn< 5gd梖>q˽U?ڸ)=m)h ~@g܊+Q'1Űy#ՅNBhgW:>{iBPoYCOy3e:$a茱g<衡9p%O<j:0 žtGm^g} ȃfX$a~/֬Du.p*hGul_ot-[N_e@qcп#v<_tb;ƽ7c<1KDO UZ*,ĨKa< {DnAm(,@UЃA ,|cTS}y;J_Aķ+op!o.jn !p=_vGeY02 4AJyQs>AU! eA!Lfhåؙ~aFgJo /Ci) {F1|ZCL@HF;[Pt.Cģ\hW :%=gW|I tR<󠧗>`: $ʗ`bCD2XM߲g 24:3*_v4F:c |'s!m'x; ^nTk 9ho!(  ~UDT ~./tJ'{X:\老鐖[aTW{: AgEht]/^~]`A pl Mu ѣOnh.Xׄ7a>SVt|'?FO?iWoNDgŢJ!uŗFpES3P7 q9eOĺ ?GġQt<0 pB7{ D''/'S ?zJ=`7'=Am h?'Ss<0 @WrwW,p'BL{<hN.:~{PW꿄ԩX z9t֡>_=<鋲{P֡opA`g͍rvNQy1mFCxf/~' = w/k@g@YݞQv֢iEYf=׭ݭynDCt2 tO EїB(+>ycxߡD3+ō=8:BO6 fC7c n0t'(+=y'P7} <]s·_ 9j:s^*.xN9y`[WN=u0: cC jkx UoOVNOD=OÜu'䠽v>}ePao قjtA7^ĸVTk?PƭA9Cu13SzjZf;H7Cqt6緕ijoS}@SMe2}1w[1~uمZ( 0UZsh[boQG)OG2lr|Ntz,4aըտ:$k+j QKvAp?C>||#!8l?F-u`;T0f4vl#j<;ATz. QL, |]Ղ٧2;Mw܅Qvƹ>y=j_TncYZ.opY%0| _&T{,:}{WYuAs Ƣ%P1l3&و-ϮC {Dp9گ|<`z?toBmij9]cG@ IDATû1^F~:l9vjZ0 l7CO:/lF1 Z 06/CEƞ xm'z(t.8θPP6@H$گ0澇QfԼEHOly>vsS&y(b-wp,ChgMAgDǗ`<(}!e8:Dž9{_&03(+D1nx^ $Y~anE  ~и TDLawZcs%zH7CtV ,zqzG:Be} =}`Š iaPAZ$:$1 ?TQ3zLX.X R&ca8tP=}ܱ1Vl!gb{D)UQV*zhcT8HnۃЍ9{Z2j2=-g\}9w GdGD:'͐a׆;OZBо 2P%%> Qq#S=& Ӆڸ 9`dtJjWNBDv豱Ud ryP tTv7C0(TyE@؍ LVNUn'HWSC߂0QV1;cbFދhsYd}#<`1Bz/G @\Q}詙Ji@4:;6c/Qgawztl) 9ў<PN@;Q7 ^&1аcw9W`1Wc_OD 1gaiVR p 9`a_y):eZ zt0h-Xޛ]Ti9JH1Sں Q0(tFMDx^bԚ5}$sV`މw5`*ةSW=jodMEgJK Qzx: U퇞0":Q*ÏCuAU+L V%ƠJQ=a'*TM7Uӆ&CL"zD)F * A DUqiw7k!(dX!X?Nwm]E(#} ؉.JOJF',t5bOގw3joďDK@cjTS,:@[#j{-<{TjJ=&y7aqn `hz}Q} J˰.S}$ψnL :>! ҿb5Zs\}`OF[c=5˜QP].J8S{%Z{P*m]W׵?]]mYWQAAz&$$!LX@+y`{3s{ι{jX!)3݂bFR}׋'8m&aIಮaBO7cY}1bm`:˖ n pހ_,!faT9xa?u[c>:UsƎ,2ys13bNэSYXSD EkL^yp̖ {57> 1# O8Ε9l6,;WDS Q1]+r AAv7̀t 5JE !1۫3-,C=:/ w%pc*IXFG*""""""EDDDDDDHP@  """""""A@Y 1`xXι5 )"""""""A@]DDDDDD$(t .""""""EDDDDDDHP@  """""""A@]DDDDDD$(t .""""""EDDDDDDHP@  """""""A@]DDDDDD$(t .""""""EDDDDDDoiǻ 1 ȯnWS2ӧS[bC//TRc'0TYW`>dEb1_c ӒIH(E?xBSHطn -];'1xRDDDD~m "!LA6Ć4lb8?_Tc}S:ԉqGQ =ul7ukYS}vDY䯙K޲L/-gqtf) u$ɎQq!SfcSTÆٟ3g]Y=OgDl-jW3}~@JNKNψ8BDDDDaԏYTy3Ɉamd?hf-:]k)vmDi!~bV,8KoAvf(6{ Dz$dX_R{4LJS3瓯x%d|{|#so^,mkym. plbB~<X@ l)t_os7r3Me*ZwIzHv"sZJcIlOeXtϊj1 <ƃurf]%Tz8vmXmwJWtEAhd,mfa I\z9@d"Q䦱EmSMoz%~z]²uuX;ҾC'R"kw#}zk+ױdͿYBTtJv'q a5I5E;X|9lcYWl]}."""kS@ ;4!@HLd#<(lTwO=f˟nZĦdkFWu5`|8!pdw7| TA 2-lQƑq'2zjj]M4RY|ys߮ S7 ³"KQ[鯧b@f7Lh|=o6[8VL~ib^_)hՒf4KM#3+DDDDנ."f ͂(Ȥf|M^"j"Oyz CY_{ 1p-0fy?؉a1|i& V}E޲[IFtB HҦ=̔xC nW3bm '7Tb0b㰁Mm-1TRԄ_`6{2J"?lܙoz(L?<^6v43ioanqJ锔Fȷnzj+ړL 0~9q(M1u!MIз;)Q$Ѣu'Hc$Z|ԑBDDDW.bһM{ٲbv `+vlrvhޛf@Znab1MTU 6NJՀIa4kMۧ3sN}f=7fW!$b|j=Tn`T^ Â5:ܽs7Pan  \UԹ?4ѭw6!uY6*L̀m,[ɞF1ȟU5sb[ĪE4xSGIy% t"t ȯF""3$[+r_eKfDؠd;`vaE)Y'ciۡ =rrhY۶2Ï<;>+ٲ%@B:aE6xzH!ɹ~n?͜c6$[i,DEZn3"ڳ5̞[7e4rj:3W1)Q|uF$6@} 15X=5[srfbXay =0n/[ZGEV(ƚރa"91ٜ2#S3co$aTŎKdiI8EDDD(ϲ2ˈK_‚%+X2g#>izIo"AXF.ojZ/[J=/j 7.a&#\{w0cb![JiCȑTFvӹYsYd6|6S[oĹ4]saFh+`#ؓbY[k6s3bYz VYF5}羜#/`\A%*&c#CNsdolo#mu' /#qb/_˼V"2i7zFnJؑԷp< ,\y[LpR;rΰiV"""""!TEyё-lyݸ]4y| 4,-POm}=4pFƺZMpm5pW} q1m}JٷU{i0 "#W 8$ayzn_ð 'z>ZE8R'ƨeZALn">xY^8MWBvL`Kq6kMzP&/^|UM:rmǿ>Am8ѻ[ wvzA>ZYLBFڎ@[Nbr3҈p}#y+&P_tsh{;Lŀ VKOyy o|4rڥ<S)YWHvT>zdV&NhNfror4}TW^`E6- 6az >blւ6AN\[5m$RXVy2/-IwPV3͗yOzBJ}Ѥ8QBǟix |UkJ]l~ 7QiS֗B1"2M˼0|"jCFzi&,ځX:l߿d7 O%}/̋Ͻ.+gywXV҈aX[X-ߝh41 yˉLp╼GAbś<;W}!{Sg->+1 e9aE#I001nۿٓb1{Nc "aֱql₞&K|nLz#p6=sҡ<"v^_}cϙ8{7 {Ox=|b)tZW_'FH0rZ(jQ:I*ٶm8Sk}9E[y`#;7Lyw5=/quq,).؝v9 qh*a+㌮e?3j7M残OMN.}»sK80 v<$93|/'2K8g;}Tl]OPcc]ذ}1=rsx\>7>5ř&kBtj/sөJ./ū> ѥ]g֖ɗ#QHc@&evAqm_JTDl]wIu\tڶeArL:qjhNɍΣjO3`7QG׾nm#zӮ?.aݧOpftҟMT4Tw~]Ѻ˩Յx3ӵ5S)f͆/@4ga}îQOкt+Ư^f?q]{ sЭCzje7{V}ʃWJ9 >[]Iw3 :_-'_\w/΢9ؙ7?T/>=k%g'+2}b~!ޜ8S>gX?&f(+y˵ԝeXQYiU<0YL.fl+ƆBj2} Jh[bݿt=/:gyЩPjO=[3z۱3YީпKW;o(_}/OγKntx9#ǯ[Úqޟ?.[ʚ(⏴Ї~;tO %o?, |K,Z{@gIƯAtXy]ۨĥ=$w_џ cU1KpsK?sZN5y!tЎCc]46wHCwԔ{бrqOw'޷`cyu$^07_{Q^B/n#_.H96_Rh 7疀k;ʘ翟k"Ofl)E?gḋ)=F# oL5=KЦu`7z+/"aLdkrKU@֙UwՍ?s}MJwaoSB\<(^Ǿ?|q\ԡ+SRB1eEߎc~K=L̼'bX*͜ڵZboeu^{1M/#@JĆc^f }Y,-;}-HMJ6R.nUcs͹y@b)7}1ϰAmnۇQg%ey્?b3Wu_>&R\WMiv\I57͟f"LyUSYړ5c5Jv%~}p7qgڋoBpx@ +w ۷oMxYq72,a-\{˝FcSZRJMS3পz,z_HKK~,Y͛]f01Ot>Fo0Ϩrwv%](oԏ[.9gŵuȌ":5^cyQOF2И>ČTR^44VPZU祲ь[G>˹I{DgĹi]3PW,H~"rR0o˷ɳ7ndV^]ІIMJ]dp">AXR2{Td%t*QZ@J2205Gc_&.-^o :!Y:#h3 }5n+Ql1;R־5w^_Gx\\^JiIr:"Q|ZVI9 Տ ؼ6m_ɼOd[,.ػrES1bo<Ǹװ:/˦~z{rJA?6ৱ#$q"V*ki5Vn磪ùYUEKH30rΨ':Ύ,(?OTN^;yWD:޿Y=4?۷Pjұ|īS\xD|WizUOHJ8!vRξeʫB| #zS= ^/m.}1;\Z&S1c-6grʰЋ5 #qA?/5 +]z[]odZ\MMXiD;p:L<~Fќ.}^NsGlXW:k_ D3ed8tgݗ|RX3r?C.ut쁭n'9I >-0[)Gհb^ G*[᢮ 9qO#>a7ep:Ll0B3jM+q/'F?D;,f's:v 5jQbTnI#h)k K CNf+{?3l NɰU$?bAXta%ˠS?=M&^}r$ʨ ksҩ}o:\\M ]ɩwRTVOJĉv{L4_*|aײft#rhݴ6θҋw+fO]G=6۞mI8J0q$ƅtr0;739\-LϛAYψjF])571=3y4>1m.S\]?YJM%&/7w6S'/Fh 洂K~%nRӂ2~#< +?{0oĞ.'yFLQvR]UځNHJ'rb;sMէW`WO] G ' i4sV<̡kV}km<-z\Hbc[ضy9#`Ш8,nM^ui Ioexf#~%rKm";BcLK"dgd#EIrlDnX'9mdL5}oCXj+θp}o<ƺpSc 4` Y8g;* XD']33}(r| (?桑O &FƠKL'6O UU^>5cyk vo֊rϯH1p#2|p]bI)Ĥ{3eyf3y|dH#btRX͚;cXV,X0Xm ؜_ IQqgpmqgPS{<?܈?,?JVl:27{m?ه[z3[o㽪G<`!t'Y;raQgOLf|l7u{̙'ޝ˯Sޱl?/hӟwʆFrMJKz N} 3gjk?|;z'cgW ^kG~xwJ#^:hg4_U'og^nw,ؾӰb&lXwA-Za<76<2^;k|On{5]sۅ;x{.~BZ\Luc lw\c!p.ݚG`vZJqL-"V.'t}OmFGtH >c?%`~rR#q z-P͇_z:|}LYJo×s3[S,I=D;HkO=#〓.OOh J' sĶcpF>_Gp[yǴFzbEJl_O42#G^~y wѝÚcwR\cOq5=G=k rY>3C29?zÓ:!ݸ`pFgsss's0Biޢ vA?0sh9~JaW9%H}Uad>% hd3.ɶmb+[b~g L­z3W9TNM{V1K,mD]O"?砋"%l*%ئJ7lc/^׏Rv1l4cʵl0?råH:I1Ą(n ]D:9ՅlUKΦS93ΰ("h&cM{Vϙ9#X!k2qA]TGAvH1MTUhދ]Zu "rrHPwHP@  """""""A@]DDDDDD$(tt8<r?>-.w0Gi}^^ph[Fɶ-od_`-6ylRE$(w5DDDDDNZ)))8ΣZV]7fa񮆈IXη 56/"A*l6]MkEDDDDDDHP@  """""""A@]DDDDDD$(t `;_inB?wgs7QR@D\4Nb|TSPg';=(EDDD$]ETMcJ"zkv"31M)*`cKIۏoՂFE~#Y ;_]IA`odZ+i߅_y_ggj痬sr,{=n99 wNhfW׷i,.? ZA8z`FOx,v._ŔytލL"/WS`"4.]u-CUEr7^MYS}EՎ k5SɬM Á=4r~S8=Nx JZKy^^^HXhvl6¬)) %l+15 B àz2r2ز\VTP}ɅrX W5Q1ۑVZ|oALb<[$ׂK~BvgrM=pY%J#)‚*ÖPbcѽ̎l cޞͤ-ՔU5u+F-#"`uAHF^Ҝ%|xfjג!EO'.#sIDfOMq.""""G!$.3cx,OQ|+ڜ(?K͌~0.\{ ^s;*)abj GEnVR緒Ѽ9BM%OTJd m{ԁV;7p&"""'wM9 )-mҐ1 P[h:i:"qZO޳D͆rt=; "!"""""K8 """""""A@]DDDDDD$(t .""""""E~#ঀ.a'&EDDDDN]X]ڱ 潊w.""""""EDDDDDDHP@  """""""A MsBIT|d IDATx}\u038Ý2bH7`RJIެ궺쬵ݚdOmp64Âͻ" r 0c@QQѰPǣۘ\k`>{}wuvӢE JRT*JRrT*JRT*J^RT*JRT(;t#""""""r|JRT*JRT^b Q,"""""""XDDDDDD⥓JRT*JRyd~i(XDDDDDDn V*JRT*J /nZh%+ˠ^qŝ_Ah;G){ NϘd=1q􏽏] ycu ۷og[ǼǑt2z{my3ƺ ?>g̉,Ln5b>Ǥnζm)sJRT*J 9uCny.- }Э; MCkN,ὸ{@ XwY7trݾrb;w!`lN7-|ˆ Cucok˯fR.^{1:o}iQ`Ÿ|F, +QL-TX9GC3בWu~!rT,OcYY ya1/e|s<)[~ι3\8<ǜLxs3Gp^u&cfGU3SI=}3˗0mgo?"w8sJRT*J ˭,=F^@Md݀7lN9@F~;ɾfnj *Qc>'z5hk0! .}Trc `Y[F3n^4aF7QME l-Լh*OTyдU7<+P`Ɯl=76[4WyoyVPT|:ӦjN]Ey@UTPm War6Z(בRT*JR˸LpmUޗ|~oKlS2kjā]o{3іVhexݞ"و}b?{NN\ >6O)rq7r3Z]<)^IdH"[,m:S)/όV?ma،YcsֲrmU".)+F ER5݃dWxw#o(7=dwGG)oK׻AZR1yDcn7Ѽr=li|e`Hc;aCү$^2gmx/g#V\yNϏJXϰnNy 6Mڲ~ >}{F+?k:d:gu7U"%C3o/? GdW]^DDDDDT_V|vZΞ/eB?3eQݖ)I?y)h3xsHm_zh:%NnpW׵e$r$z&l&"(66qk_ɤVCP(G5~cWYD[ڐWLZ|1;2ߣb+/X3ֱ|`~@U5M A0]{1X]l | dM+[Uܸfu=lCITasڴ4b';h/:tW" n z&n_9UDUA9DӯW4AAa>WƇ>LUN_d椁t&z ͐p(H+sph_ %3=U9_ _t|g,bjp (?&rfMHnm0SFAEr8 ^3{@9ZYCuU> 2ezg0W$7u!{_ŮjT*JRT*uNpuL[A {g[?V3zR:N7)|`Y#fݸ\ک'>0g8N_wV12%}9~(v{m~` #|iO3U}4K]N.wya/ygL} ˪WP^iy~&U%߲j ,q 7s]xiX>@9> }O?-\kl]:ƞ+Xņ?>.}"_ʌ%g}zMe.JRT*Jr9.[cco(2JRT*JRll.ijՁx?%;D3>RT*JRT^6($E>(RT*JRT6YukH\T*JRT*7Bk$\۴iV S*JRT*J卐-u릑`!a"""""""*EDDDDDD-^:9ZT*JRT*7JԩEDDDDD䆠`RT*JRT0rP*JRT*Jzn%""""""7/uQ,"""""""XDDDDDDDE`"""""""*EDDDDDDTQ,""""""*EDDDDDDTQ,""""""Ҝ..v;.K'r///, xyHsv{碢"X,x{{DQSSnLJPuH3sYCU*E.Bee:CDDDDZ/]. `F:]@DDDDy2 D!1k|0b1dH]곦bO畧$PF}WB$>oQ /eLVWWIKN' hCȱ$s{۳SKslX澔H]ƞš5.l%:.!-2c9O3{R,DDD΢{D+N*mLMbY,Q.S'IRF^vMV^t &2'+'e2XY>l\SWpV]e13$5JCVsIe|ϱgBb(e[e׎'""r\ >aZ;lNeڭVH[KL袱 `C:q1͡KU&/fe1f "7y%)5KX k̨gʟb.^Yr<'9+.Ϡh?i>s-K^ay!6DIdMбN [%qDhRi FZX,g $8"I<7qH]K~Jr$4sl $琼xsdTs,V- |ڂBT6E$΢QWԞ_٬N t?s\cRW7i"= @6d%C`ĊEDDT\r,$P5)?$m-s)5wIIɇIH\,'X9^D榥hߢL/';ԀFvR”.l0N'ܭ,oyp^-#׺ "R2JH< N%;j7-<<=BE!^sZ |KmdӵWH򲭔Mn{>)˖ _ڳX1grՊ;6[[ޑ0 p,]bRM09wrRuk0Fl׾:7_2$J!ӭWM~;{ᖑ]OmeewC@{ߗ=A&v"0;a03džp03VS7m:Θ{{ЁM_c85<͂_,X1j'R݅@;Y ewmy!Dl`{c3ZwYUѓ!D\5FQe4hLFl1cyzb]Pβ9o s cO$1*'-T-9XKӷgD! g&Yvb~ Ӓ) 1\.!8/SN .8(ܲWgPo-4BIxQƇaA~+Oʥ2c)2 4a Sl{~2oO"ۙͺB 텑ʆ5EQ8Şæڑξ".},v""Uƈ~7Z3j \h5l^j~Mמ}jg/#iG.lA}yxꨳF /0QD5\_ Y&~ ̂Li:b/9j/""r#q >uߝA<2/b6Q~~=Q0FtxN9?坽D 賿R8歏hP,cKO!Rm5 3gOA]dט~G{wNa=<229[{JK_*gbԀ iH*hg g#G-k9-TE1; uiOvloosͽ2%S1lXhݍ`07ld9N{.$ƙ'^ QȖT` fP G0cN-=MfUyڏʄHDḨodO `H`dF[na>5E{iPʖs d9.DDÐui4\x8ˏ^ ;dxF{op^IΎ4 `഑sz /L ˨IC%Ar>ȏX`ĘDH?@Ap8/I[cOza)mӞ9'Gn=qo4-)fL˚# g,>j|)o * lT}En &VW1t#mP{UDyW9`;@N~1ӽ[+t@^_ phR53Eߨs*g[W=#QqgV*5pg:uaOwEPDB7C3ܪUB{wNi@ݍ&j/8Ď6ΪKLLn?7J ='Lwh0 7K#[2zu4w:eY5{ޣKY%zkzgQy`5cX"""c]'t{+GCnt)(f'!{&%jv~w_;ch[P=붕50х14^l޶Z<'Iyi5--Oާmo%%kʫLt-fgk=}Nz}o|_7^V]Y>*dH`XCtlcp:j{ƈDleSY&əv-49q $Ј"xj26O_z ?F{$Fll[S^{Lړл1%ʒ6فs۝ >oa4䧤a% }Â79b22֐?.F#Cu)._@rhCLLoz#C쵟Jlz{ȍG v87QrWS7TdV~6d#c@5u2_m{Qogw{XY6O ;mY#OOw_~E%}}8}ofy[_=:ąIX1ehڨÖ[{k@{軯Ҵ;og`@_ϔLJ(Ov%&cӍd~KH?Y{14z~C'{qq(q\Q8iςvڲ 3GhƎ ϔekB?~k24+@e6iN`>$gD9=km0ƅP>:s_pf %.֞9A7Ϗ(3`"UG`#;S:qDFp\|vhSM^XBgrv2I&&!xэ+m Wr(GNK4L 'qOC5'nM<2-{ n~H9v -gsRH\PNjJ*x]. m-˥#7RXP{e%H7-կ2@ȑic eKI9$'yt-B@kuhib66-xMO( ?4F1rxGҒrqf'$ fJψeA8R7yw^U013=cbٗqa l`u$!n@Bj`ߓ`a:ޞ!E^"#cRYYɡ xv;bedžkt}nP[Hœu B2 4v ?MҜX`QV{С):(&&ڻ?s^11e6Eo23LՈVV慁CaJ5a2jd圧H b5CFL̄7Psѽ@`댁hz_bBlC?0{z8+!}OLs#͟A1GI/K,ff޴0PYfcL~?e6MGpR4ٳcwj/; `zloKH|fk߫JHWk76MOBL(fC%e2* $0~<ug҅ A=h5,QTdJސGz^" FEDDji$XDo Q.3H•^70}RuA~r2E@aCUT|"ڗQESi!%%fliޛ+nHȑDkR.>M-AL׊X"""ia,|_D镘?\_W4}1V[^Hcdf%g՜諪0u5N3's ""ǏDy1Z0] ;a&Ⱥz5" +E0`ć kv&3v,V%_^=[Xޏs ""-%rha,I`"""""""*EDDDDDDTQ,""""""rSyx"0 =gO^\C˛z~(QØ5{.pt׀ܝlε9adg 9>bm􅸈>3_7^z2ؗGF 1.xm'q?7C:w8 |D$mã˩_1j7ߖ΢ IU W_BVg˶=ZoՀo}'22_{ ટ|[F>T~ j5r r4}+ Y"0}`?:0qB4q]}.PyW|kƃaLD9$B;&`APv%59}[ˎqjLԭ}'p[`MMZ6Xhjƀ7!y71' (;}ʪ 9'9 vsPGW>mgO xroADDDDDDEUܷuС`'5͊fdj]%`lHn};2з-Q=kwsg/ aO;'N!GS>cUz9m1;C$gSsx4Ѿ5l+lfؘt=ºUJU ZnwcC֭^O ̼/T1ljRG43u\O\.6'Or兯/V///""""W i$S\\$88t]jJJJ0 Jn. `Dpp0v]o`t__oӼEDDDT`///jjjk"QSS{EDDDD"l6cU\fuH3tY nӦ GB 4 L6m"""""zEhhzMDDDDDDI:qQDDDDDDTQ,"͒0O;H }k +jOb &گ>ccY]]ϝē_I-{'Wn/ G)Mڪ>0}븂6XXPEDD"׳*6|%myzQQPǂɟӋNu #T_A~HDDDTUV]p%%K^]E8L $,ɗsgd>I Y&Bn̴q^wBiKªTt@DӚcw\a&O/S.:} ;9]^梺MPd^_A^F[Z73CmI\qGp`i@EI*O)JwaetW2j"(!ԃlË"q/:.LxqGtezgo@dg=`t8| kmtJ-t-l{!޵q^Ku5-.r/: z} {q NT=e\T⠳ thZxNlb~sMKn';}1!?s,|t) դ}eɼ-m n/\O}o$:IZj{6K2:@̐3XDC\LRΜuGsҳnEq1 >,g06dx'ELAPZ{N{Q xP:j7X;a;|Z=Џ>'YwF6|#iO<\ Tsx?kOdhbL|ótuF>9I|;`Ud GTgc6BxIDQG3{\]~'pгSK1 R68O&Pٜ_C\M &NFY 8OvE +wTj@Fm_4幬`EII vK_DDb!88/pɇ!b s>̱*0X'EVC`.P?Cx;s~?"0 ws[]NagGoN `3<6r&G#8&ϸv&㧍a3 vI`4cɅKHu"cG>dcxmr^~8{<l{L>M}`"-<]<R>z±uxnϟbR@?zA~ b.~|gg:\|W1v<O/ڙ""""v7EEETUUaXVݎꐟ@NNQQQz?g9>EDDD'5RYYX"X,TVV3DDDDD˞X҅Nk2u\tDDDDDDDTQ,"""""""XDDDDDD04y2\ʆk6=c{c Y8_p/`rO_K""""""L`/BhF &&©f(uY&iE0DՃ;a߹qt#B.RcnEY|kYz z9-[CO0s7NUܧ+`cF1^qi_mZS)0?RPFh]s[G2ZwX:H؏coKd#wp[g^~0VdlN΄cg2/CS,"""""r3\f (sgfc 7em~k݁,6^l9ǴC5rrr wݿ6mh^TTTs* U\(0@1;K.>=KW}upo]v|g7A]6h#[Ai>p9''֭[_=Ry̜b>YC/OEQ;\ǏkH3dPі; `FE4O^Q,"""""""XDDDDDDDE`"""""""?˺N%{c"5%wT釖&#ؙl\8N˩hh4燿?yWDDDD" 0(-ZvJ򄭨?naն1| `06~p:8x+9o,aQQfPL&S%Z]]MEEEEE^`V^.kMv]J>L;heK ܮkmeK S\5c6 & `DPP5^V\}Rp6J̾q]tF FN,k 44eKQQAAAklV\XnRT61/oo.7nTzy{r\5:kf\&Q^kKDDDZq#.hQ;£T*̫4WxN[T^"j!| 5Y\.pJEjq\d Mn3K>ZSM'Y]Q_lxr!Y?"""rk^hOFԭMӦ}lj\EաݗLS.p-W*:\.:.ļ[:܏Nt_ oucWtgFoͼ]UTkt{;=aXqa:uZu~ݙv*LX ]/ݸh.\5?3SNb$tĴ\҆saVL[BOb\Oᤃ1Ә|{[,L;Obڤ6xlgjә6-9s-#H ';?@LxΓ^d| |G@V/fG`o$ս0fpO(}ؼs{O|1s[;8\df^c&xOfmnZHޘIX6'h `ƴ:LG5pJ٫X7t>ܘGA_NʳQg_AQ|/_lc~ahŻwyf VViα|N6mIZQ@_]Tпu_ogݛ>#ncLV:t22 M]7)6#o=ђba֡VWZ ;=ߔT`z1mYlMչmamոwȴta"uN_ b! |^V.S:hsxCa<:ͤ}c52y>-|eVčyJē)N<-cn&?[so٘G-N j<ӗUDs`y7|1o`B^\eH'"""r㹂աݵ#? e Sn~^YM5/za7Tdͻ7?O|B+o/%{vmj/B+H_}S*jeplA.AؾQW+.>TٻDQtUǘRnEv{Sf{[v1ڰ_-ֵ"5ZXK͍R[JfdF& B :38? *MK܇O˜s|9ýf6|RM "ԬuZ9? qpU&Gxm,;Ol5-DZ:<}('Lc֬u뾀NBeQ!WB6>ҧb3C|-u^Bd6]8-~әLLH$2nJ[/t_ rDBcvSyf:IKKU5. dfNdKtS n4o3a6g|AFS MhP:0@FDF~RA԰h6 uD<;=|z= olb{Gh3xF^wٞ /e?gXT|qx(;~A$/{%O=ɛ~8uy"_ :{"_ ǐ,IO3`sȜ؁Pꈼ%|-x)s"f< W,iyᅄz5ϺЖ|-g=DļR IDATK[ WB]&U4-/晉1;{YYYw Z%D,(""" ߮D eqsaBÈV#/"&i:ͧ_7}n//gmH:ظ3/J?Py}`Y jѮEp 0EE;N7fn'kXmv ;vi3sǙ6.y)Frg;L3gҗdz[l⚗gMk^n+xh)qizצfrq˴kOrl5"""" \R6$]|G=B=UsҢeo ?3  5( f7QW+C\Zt?- g/0aG*{(3|N<L|{*\@ gW=Ib9d>.Wk [Uqnw!)op""""-0'c\34&bm2K;_6ט:tpޭw]/d?Jֹ.rTyvhַ0÷mimm-:~9oCpccc-*Ul<ha4b Α#G8t@M4@Cqa.fOuDDDDڊhzOJgC >Ѳ]^x7^=:wW_};:JΝ 9oTKDDD8ck^>R[Ɔ3b,Nf?ܫRJ9=n!L7N~VʖK0ΤEDDDDtJ=83I*U< v7935G>molT퉜 ];n3Ο~~[UDDDDM`@DDDDDDEDDDDDDEDDDDDDEDDDDDDEDDDDDDEDDDDDDEDDDDDDEDDDDDDEDDDDDDEDDDDDDEDDDDDD!XDDDDDDD!XDDDDDDD!XD~DD&"I.&.|6ERlQșh nL&Lv'g.Oa9.h'EDD!XD~dynvo;9JIi_Y*d{~6Uvj wRbyg<WH{qINA6B%Vl8\ \dJq줗;V {B"vS$)EUd8铘v6gq6+8 ?,"ߧrA-"!#%>*$i b]{m */AJdhK)8V+i{|$>vҊv؇l7>ckgBF!UGAɊ=!EDEǶ{bLL1z}n wN ~$_VBM*J."+ۉ)LIWÎ,߷Fd`$| 67E#~n %9C ?nFpUV5"q#p[.[ IbTO$pd0Vf3W)XDM Qz3~M>)v+6H+A}G/|HDE-bwI$H x윈|?>> nŝ_e%AX%"kd \|-%cYsPpt`e^gXn7U֦Ϳ)SVGIvb_"N;NkHnFEDMROya?Ojł wQ!qvpSP$?o~BD4/ ,> #-3"2SHH#U+2C8'גl ~ q Hs|,VvWTM#m|7U¬[\yypPPA֬Q3Hf\Ic/x_GJU/hOC>דR`c|~I6+qy3 *vbqDq^|1W8NƖ3ݻL~d"oHw&qo&09YNV7F=NKdžMcqc~s>$DjLa,2 13xwqECu#"m{.q3xu㊈LOX%i$u吢,""""1O ````s~"IU """""M.O?$FUH|ZvhP``````9)R?+RI^X*[ O1y'ɩ&|$OYK 5kǑˋVBJD DU """g[X'9+pdz:{Y~z~ZoPĭ\m%","4B2%`oMLԓ7R2h e4_77ofĠeo̔|1!)LaDU@9  +Ryr2w/JAd̸ CM9'^5EdgN7'sk>5f Lz̩Um+v?AkIW_4?}W2jr]@\DDE4"0e;h3wZ*o+STAeoo-eNv4w8Aa;Y5*xoj>Qgr?([ [< b[$9DV)nʃT5{'Xnies,NLWo7N;ߥȜo%|-~6~2#coY+D&YCY=ٴre)cvTo~DeO i, n*CꝣX@B_]Ge~e)7F"cwG-f vVDqu{Et#>2q^_.",o *qRl\tO-ٌqYz~Ĕ;I~=^*vi ~o$ 9uXVΊ6B ~v cNQ[^XUꅥf#k=76+W,kgt%i/wQ<F""3aAZ,k,S͓yVTD䘲$ObƬ[Q("Ҷͧw'zE;5MIej &lO[/n< {XU,FjOt{b,P```s-""""""? ``````````QQQQQQQQQQ`i*2znj) Xǿ},xu 4Eq09G<ʢ[wX4z8'0,eG(Hkٵ^@e~ԟX_Ǧ*ϑ1Ge''kxfŸ3$ Dڰz/Kw6rp;egn+pÔ(5B8`ofr0g'|^Dq!,}u@{3yPaϚ՟`w.≻afW>x{'Ffy>/b3|,xs. zs,L+ɜ0`ay`x?̎0ϋx4j(=ËwPA4G_]f32#wK~I+ /gCy0}C\c]`ǼO~M&eCzifczѥ (ण`̻B.fFL8!] ?LrؗwSb̾v@#|ƲX42󾽌YTKӫc~Fvҋxa^e'/vWYZۉtFk^a;6} 8/`P?=*@EP,Ghykb rF_̄۟eh')v3"""" "mV!GWz#0b̯Au4}a<#ٺ9:sź/ǚ!10.qA5MIP?c\M mu|sȺeفll ?V~->QRyjgUGט~Cyzmt9U[p?7MAƖ1D3i?9X0|DS7q>; qf[fO6.8G` T:?t 81[ײpzECk ""","m@uK߷75eM8a࣏7~k`K{nH]DO}[ӻs')a@GAZr[}( [mlc1ye9z(y'1,mxYhijk?iM^@@%*n冮&J lW͋[Zqy{]' B9@5YK?|u' 5M=B;^.݁O])<5q^pi155,- i^T,]`.\TK {'m=n]q _DžȷQOH[Teٟm}/u+ƤDOoubLqC.c2]t!Qv\1'?90c[˙JXLK㷣u6zbflt58PG BG~w%wtv)wmڛ<'C|7)1^sΨt 1SNgĄL ~90pL>al9:n8y~wer"& o-|h8K'9ӂձ!"""-LaV=rm(xwƘ2#3cSLFt ڃd`X .md_DDDDD!XDNL_ǟ]l!ޟkcBȩҋDDDDDDD!XDDDDDDD!XDDDDDDD!XDDDDDDD!XDDDDDDD!XDDDDDDD!XDDDDDD,Q Ū& 8La>)"""""","""""","""""","""""","""""","_դҵ^45rq?&#:񈈜';"m#yz8b]BC+zz<:YX׏+q +-dC37LE@mе6c 8 ]掫yv`?̦מޮu-g{-!NDU|qa3.Q@ɱ9U<$ZUk2u?#Yl3SG33}k#pk}z?fm:08޽!:(ȏɿe|=~ +(-=ȴ#sL>,9NQg8;ʻDYW"󹥔 cC/}B1r7 OG; _/CPyvúcx?Ǝ ?Ǽ̳ûߜ폙4 ]FdfuaxK+cr*w)5~1Bv}Ň6jcm1?ȃz0[l3w?`w.≻b870e^n|Ie,WG=W\۝I]yF+,uTD;qV?^̲k3_f>>hn+w4֩#֍kCȮu.\dV~僷wmDo9fʚC7 4ۺ3,W]|ڎ]`Ⱦ ;TQ=1{W=]<zW+eې1{a*f¤mtW Ԏ*؋Q`?6`e7޼b5s}zo оnG3n>wcW[?;f;Lr)+kО=ϋ,~G;Vqǵ+Ædlz xk LLsW~>Xws=_93!!?CMg5b͛6AܺfȈ>}2nn o;TFDB#S6b߫sYu9o"&>y3>}rΗa|a|_c1F>P7/j?0|pMӸϻ 7w _O6n;st +/xdm3Μ#qn|Tg5[y0KQ3 0ꌍJQj /ӊ 0 όoa}QUnelk>:c;QVaJ#kcu>e4bc@p^QQwrW3\ƛ_Fck=c[b$_kuy6Kc<0 b{1"gQn6[oz q19i OyF`l|g1`x'gEQ}12c0|F_Q[c J0 Ϩ6uƢƿOik >{J9֧عH^`Lo|w1,(3̦uմ4ͣ֘99)3+Qt:[ _)1f?_|f IDATiﳾvg5:;F>|<[TmgzE'Y sƖ ?v @0|t6XȢRJQ᷷|eM*ܽSnڇ{~k`1^.f5[A`<TN#{\5?%c׀x}-28&$^Fօx/fSe+"7$Foo `Op9*+BxkC-'k`Ȉc7\;3W׉;˽P=obAShdO}$ЧihH USkihu[\a0pr,V_(3limw4;#ڃϘJu9ۯ{1T_ Q ۺ_-$~{mAJkbS}88XΌʚ '>ݭ^0W !4.1n#o#9?}[{YZ\57G[EDǥ,r Ex9X7` o&G0 OsSfs=GúpSos .uDQ ޝ;IY] :rMz֒{3mkvcp\ǞbijQ2󿎆z*k!*hGX=?'wK`zܠ[gn(Yw=mbV=a43jH7 O=MЋo !SY{ ^&sU]RAO5;F>մ}[2>/NQtS9ii,\ߋwy/Jy#ȍHL^\^Ly P&/ڛ2ḡng>`'NZ=X~6b{a7c 87EEEңsZb˘3_@6`a/0;":=w1U3P } #` ur9×fnc@&u&V.iXts O嗰̝.aф]'51_62LY)fk8c茍Ӆxp~B;еS(8Le}Gl=/rfгv|s\Τ};`&ta0.V#{ֹZxQS=/7`nߞn.grק/$yg<8gfLt<^;35wK#:eGaµE҅&mܾ!'\ΰFIycZ|1V+":1iH-O-)aqx"vnm~SD6?{SߤOyQ,g0 C """"""? pǡr^C7y)L&L&+vg:(ˡJr0a2/_M[#Z-WB̈́#ۭx޷'6%9}ܞw_Cr~XOh(ņ)n7Kvu}y7o>NQJUA)>R%)x3VOtFeX w{ ݻٝl(s.۷പoNJW鬪vHof;wf&.IZNJv x" "FFbEAN%p8'q&νMd%`o&ڞEIU IT{ )!a^WM݊djs3"fpi"8v쾹8LٰRX*>}R(ϕCÊd#>!kS;?H8Ǿw5d#>> 8V[|X$dZ*/AJˠy.K#2ػԋ =vP^N-8#ǷCe; vv;6 '}"nL fGZp؇l7Up*i3aƑVPt :w~DƑlmN2[:{I VGƑ6¥( "r^s)DdQ6zđ&219!b2:!5aX\DП9|n24u%sXI ?NavqN,Ɨ(AǶ{Xbd"2lB2RQrW6OфJJJ9=榐q4V,qP0nSDzdgM"߽%P> X$?~ r(?THIJ*$%7ηvIW}r{RȡyG%h&t;td-}7XE 761/~6}9's]yYx Iz=N~Je%Y8|sOl??ǹ*6(ȹST+2#0Kҳ(5]MZ|60.%2 ~+Oѓ,m׏"HF]waG6B vUm!>1-pd&LcʑaV0;wrClBHΤ/| NvN$'ؤ4hvaHt:۬G )rCua "b?v"NbOXJsSX4!Bv۷$;$[|Uα98ia^j\MrY5 .7nıDbyCL*>|4o#DE+QIr{T!ҎU|C8r\<V,xppn]T51CY|Gogٽ,`oa'yO&P'nqomza{ON1aTw/Nd#qzzN&c͹,<6KŖܹTw炘r<6'Nal$ΝK'x2ܑXZYX[)*㹹8[B:NK5i$6>&8Ik9EEI\НBvG$ָLrOaÕ'vnė@\|&nSfP.E{ lr}\ects֬tQqǑBw:NGU=fmDK_Pydj9(NgIFf"ɊCwlJJ$/A5OA?2VrH_nl"""rN Q"qy?Dfdv&VVxAVG+L>x \7`9w'XDDDDDD~2L((((I"mJirex$6"DDDDDOCOB((((((j֎#uaPk)fEʗ0v *򕯘Bjr*ɤNYO~m %Jr8n5T2{CM(ĸGVکcIMKjXjZOr*o5|ҷLjj2Icii- Jjr2㖔XxߢNXF-allN>:Na]e>RO%ul*Z^':D!XDDZ>E ,YXDYy!f$fJ]8,ޙ X`&wW+[hmP.X=yx<>s3naK )/KeZE/` f$[<`e/_ZBY oʜ'^?m-`d~w)]83X`&[椋|T-]º~|*{wcgd jiDt`A?]9 IJ*5u|PgXC7PGk9 :+G2Â9 r'|;ay"z>|}&ҍ`yw} Y~ yofV5ǒ<%lܳ9s6PQO5}{ @MdmR̿;6^f3k9X ݏx6 "Yؚey)cSIA0@ [&ޞGh~ ċRcְ,%Bҋ55fgv2,<>a6GŊ$MBw^צchoV G:^XSQBt`1"0>*~}~M|Et~7Ulzgbe!kʂX?[ͯ'q?7Qt^>ЙFLbhyX_ y2UlH6hši K.3z\+}gY,q/ͧ[cskW kc=z_aZ/|$E7*ҏ'p ta~m6hX3S ^8vqF97D_͠[^Ίxbfd'noِS+b.+ 4/ a5{oZJrR2*}ONZF +oEޤ~p J>@,|R筧y0?⣴{?,"?<+9CA+ƾM1w,( RFon̎16 [؍<>5UC߼㋀J\:g.Ko<Ȃm )?rei˜@9+:'q1TzD[([_⑱7%2o+JEu UTUa/+afa*(: b֭h YҷQ3Hl{kSF[BLFv=;o[>,|SN?_̠h(_lCQhOOXS,e#)܈*WX6'o=!St)DU Җ(+ۄڷ+zc*,f9,Ǵ^/gW浿fOwyl\6d &Ɩ4^׿Q O'>6;b86v:߽Mbf]D!XD>&I]\G\_'k|Ŀ$}:Ӝ'G~tf'S({9șs5s |GaN>Ÿo9NV$sWڎ9lPO3y~~ E L[|/*n#LcF%s6PG$oW'QL/[6}y?0es)F&__O$~+~ٳo*?u'1k*?0}w;y仰gŗ]fn̦ iٳw .]uܹs[٫v]Lv}}5&XDDDDDDn """""","""""","""""","""""","""""",""""""QsEDDDDD䶡`QQQQQQQQQQ``````````Q[]+Gӓ,{GyƉOޟx㻑}o9wy@7,|(c>ʣ>Jw9}_>ã>ʷ^>:3>ʣzͮl}Q+7[""","""=#vsj* o;I8zÖ>jQDDDbP| sAȌYStF9=V(G8cʿ0QiZ*ɋHd +6n- a&.V<E-v᭶wyrV,3{|?SGq`1NLqsaQރm;9Doe+VpEC'y`GF˴d' Zި}~!y%XB!_5+@za?7,#(YҟNQ:x }D~c#03s%.f6!o=ov6Hhd>C_"75O׼V߃d}(TSɖYd}~9+-F{gXX/ސU&1Z IDAT0'G[h}EڎɵKW6or t(ogm݄`պ3>c \ +H4slYd9v-Oo/=bWGgank;ﹰ+ ]yN c}F%VR(gX2.Zo62{ta<#iB?9Vfqlb`vxqgh:J;^aS̻%Fl|DۗYqfb ޛ#Z:><Ͻy?,""" "r-F rq&ycoΉŸfnv&ljux| 3,\2/OBO-'ZvuYEsjv c鵋Ϸ0l[ٳe"+^_Ms?DBz;F`OמwW`<nx/oy6%w_yv"7ipk "!*_c?HCj^1?Ɣy _Xb%τFhh4=gbHYM2dk {mF0Њ `F:+|ьfEsYYb YsX@ʅk5>ɩ5r{s|| }S-?aW|&wۅZEn>rIS #bAuGÞW&.6N\Xr6jO}m7slds/3Ξ$Tt񛳹_:46h#l,gKBs|tE^R/97q"xR' 0p̻4S;d~xE38+h81q2.##@xlI˷:355Z/ya@,B\siGND&. {7]iӌfSa=ywwxs̴}/K[.H1S.TZkO{Z9>.Fϙe& ^n73e_PSHxq1F3Y.t+#u-}%L8z8Ʊw~FB܌f C*2t%$ĩ[ߢM~g\ƚ ӗo(t~E~" <3?ֻXf3Gb ~q~2ıF[ϸ}5cǹd5=%M%"""%݅~wi8 usWxw9z+կHr7Jx|׹'xB1[<0rc'/~kO}G˷rB)Z LXUtehK)'#Mo])рw/xkr-}u+(27} """ ")`.hbG|{~®7F[8 |Mv=:10>`fU¾dpko3~KZp$sU2שz;+S^Nƺ^fmW=_iF0yPk)Yz^~]""";'бݕ<KR3jLgWGyGߡf'b䔯F :VNbtCKdel}=wv<%+:WxBi%xXw6E݄TB&+VfVNwABϾAP 0o7i{ ``L>"pkO|-:|c~idkuܹshL((((((((((BBBBBB\YTT\gs/c 6vZ?J_6:?Fhpݻ<B=*xP۵rg|]xTTlqys=]^\ro[{yy6Tli-}>(|Pf kF/khQDnjؾgA]s?a0L;:%ed5)|"YyAv{ ""@v&̅{T Cto9@SWt6N #]IWt2|Y>Փ=`DG_8 !] @ IYn3;15t 12?s`hh 6 7I#t~az@L KN6#@`fC|&b'߬_uWٿ]<On${$ _>.}߾:; $_HP#R(HH5= $[@c_~'g1MH3ͽ0x=RWHo۔Wlf PYä]&0[PCš:'hH`B?z\364=<[Ȋ]-M,Xﶯm xnOjǟPunp^rߵY7noA0f/`Y"1`9E$P޺܇{ "ֱ:?::ʾ4uĀٶB^T# s;|L;:`hn;l`})H0a`[pC>7;j#}?h]&_0&l9,-TޟpE6ְڑ8+>/lqkk s4vRTطy G/|PCsriZߵa0[sXVرuaEKӉ$pb.ea)%='ya]:M&`a`f*_$R1I,pP~P4u^LX| Z-Hn?ћ׾la~Z֯_ǚ+C5/&Owj>? OdԁC;x@?5[[Dv;غׇ9w5k׭xlFzmEL0u֐ρvp( 7@Bى_*2g0)LGPomeN~cK(]5 5 q㍻iH`u]H[GkI/k,Or=XY zط%GSv=n Q M5 V_Cl̻FαF.va3n^|楔_Yýk?=+O(бW)kc]㉌hNFSfJ8K[D7Slżtją-N]Gs Df\Ź$wmO$gӦX8Li؟ھ!wltR XWmw'/g-8GtD;Ho6[ ĦgyL; Yv- `vK0zܼkZ?5X:=`7{df 2nYDD!XDnxXORY\\4 s6,MOlN$5;l3z݈lXB}x{)&/;t1HnbrVQ(d2-0бB'˘l 3&/kĘɂt] oOa FqEpWmp#>!D<6nPS|6XISV.L6c6'$!zB!ҭ``F5"ɘd./bal7=tYI:((!fs0E$&]PDvL/>fPGd©PM$dx]]1(pGyBtj#[,FF'tQ4ovQ\HjrVXnGS#d&U)[=t1X3dK)];:6D[7F ?UK#hIMxzCesfe?.eYIOO%6lcU~6f3trsSawtl˳RINaH~Sx9Y$Z)xx &B>g& K)]4_$8/$Yx 5~? pY#aL X(CxߡzܾD\VL10=>ž@#@m 7C ',Ei#] t~FC,9f'8ɶ?)ׇ<~IO6P_dPwʟOX2eĞ]%Ta(/f!B$^kLhh깗L0 ͽB$b]4v/C,j5DnU7a5EXktk/:¤؈}c]jj|,NaH[E#U=9M5!z\H4 BՏњ¤>;& [z sؤ 7ф *BܐvXdL۟aK>^J9rֵ6 AVn 8rQ8-d;ڦ׻HZC7Fc41 a}~sC۷?6 pس}UCR^Cf˜XA0.9(cSiڊ#@BW+duB+4v _J50(*:2zn%9[B9F0QT]GZim;‘:=drǦaH2LhX+Ra_+F 5:|6lVd.ֱ=tQy>Uj"V=Ս\w ߰ěw]8|}dY`GhnxŬ] 7nHRT~#ᦦCk&}니<-fQѮx1Jyle>Ke!E7 !pBst,="u{:)a-և!z:&n؀Yc"fML$11D3|f|Ow 9aURLRL}c'=D~0F3F $̶8c9Y1:DSG$)kݿu CW#Fʈ9Jzzz5y\3n߄`I ӱDLG6`NC~̦S{5d` :=> ,xlzt4hbiQ#;(XDطgt%&ghCuu vǝT"r_룯χ 7z) 7L͡pSˇXs<t&Th}kkICskh^!ivk.o/`Ȝ?o60]m jngÄ߳ FFbm.0ݰ.hݻG1%@l:jj75 _GSFn|pJHs E)Ucu6:8EVh$əCBκf|}}t:?_ni#P_3{|b50:hs@_s=IM8Gos?Nr 6\#ɱF9J}a^+A+~NB!~ Z\<֩-nw졵fwߜM8W|u&;_Br3-XLlщ>搏>޺iZɽdַ`4jbo֞@>$Rc$/r4HV}ty,딏5_M iS> hk!{sd XC!}}G4 ٟEIݡE>s X[_kbwg)| ut%fS:RG{;[0};0k!&5er#8y'ٿdbrNISh南Ձ60?ls#GGffM;y`H*hyJ͟7/hY;yVr\,tHW$dzx5Eױn+Mg<9rÛ^n/&=;gq˜ZSpH1!ls# TTQᤢOMUR]]F$ۃ* ϶ȲdmN=il*ꙤQ> >Gebo\Z&^Fɶjb-ei_c%&QSe C[ZǹawY{ -dD8옪;TgԵ}ea;tmNev,~jK\l~?A;wm.ʁ`{5U,v +<\qh?@i20Y҈7826X; Z,Eڨ"HKe!qUo돈ZEDDDDn3Gg@Ga3ŎeCK`۱ǵ팴Y[Zf0u,c#.oSY͍]MĔ$i^З]#)/dI2:XB?RO2VT^ҀӤ<i`,Vl%2\eU+w`2 8"oBJ%V9j @0alDN١EDDDDnCm=|mׯ8v0<Ige@$GE16H;#>B4*_+PH=b(<z /}G$+ a~h`?:wk<i}4#4: $XgN㏒~Haϡ`kُ"iF`$Wە&`` xpa"*Ew.w`"}mv8HsU>`*pZJU5N 30EEg/E+<:"""","7_{e[7ؽ]Y5U??@ɿgK5:.)ÓQo~ ?og[f]7˨ÀW|( #e^Zʾ%㥷wv Q-cޖj2,čBQIN L4Bm8%8ƭhQ:qȠ:-pqߵi!BWG=Q% UQHɄ06W32hi$qݘ 08La` Hp`LșL&v7 &L&F 2ԡ[AU ij72qЛ"0rXpWGg t,q&Lqq )ęLO0"ALZEDDD8Cp̚a EE`o^iOFy6T g˂Z7^x;atq8BuڱSG3{p{Ǚp07̈́ <-X'CY 3q| Q[٢Ν;wNHd\xi7u{9兔cgeZK2¼oϿN֗i.v4Ⱦoy9p|MCSeGuo#*of尾M3NCDDD6qt~SDO=40sIwNeʩ,""""""ץ7ppb,|F&ΌwܒeV)͹7pQ$93}ݜEDDDDDCR7I,"""""">8lj>8,"""rc4̪ >}9+j'*Uu""Sog9u:̬fϜhf>MW!XDDDf ;?ؤȟm.TGnp$h*6snIO~7g`[-gD^ e0!dO0D# ۽Ԗ:oWl61{ִOU7g`[%GA֪2 ±ɪ/H9"QD)J36ݜEDDDn98DpBMnz~>-O} g'tsNs: 36ݜEDDDn<^ס8ak.X;8RN %z0kfIwNW)ܢx Gǿ0 +\sҝIӤn """"Хݟ |HVhxG@4NX/ c1?d`E|?HݜEDDn]h>gWBܫTT=ʭ4CH;%Q[J[ϩ5ǷZ, REO&U [AGyZ(S>P+JO ngq,Tؚکt\~a{!Ά2ۜ݅8kVvdM:rMtHր}卝ʻrucݑ?|q %'|ӷwQw_I$2-be,ס:Vg+6ŵm_OqK[6[S>6Rakh<e+X `2  $WH  dNa\|99h7H *:6*(?<48^j  {lV _;46ކ0%fZdW8tQ fQd 6[Lytdʣ%0{alh\mhll<שHg/MuqeηΟ2g`Ix){/4?5mm$gtvv%oLw/tA[Wc2ǬI c3::ݴi񹩨,"5 Pѿ)* 8^ xCԪ!0B _1 =yEH} c{*[>D[cC{[,VWPt+9O /j4Լ _ᚶo7}9yy^yuۨ^|Jl_iM˜r27ޟٽiBDR/U(us{\A}}vu)u6ުЁJJ+8 WP=D--&jECy~A 6wmt5AJʣ1ؐMZMyt 0QJ ilntV.v'l9GnZۏQlZY!XDDd$7.̑-f3( 0l&ph6A VN }1Fn&YLn ""2{Cy9iT ,5vu_WxDw5[ǚ:pQg r2)9{1mE:qD!XDDFIloaJiYr<_{2n'T 8>/Mǀ 'spᖶnZW(Hl6%MMggh} ce͇2gQ8ZUyp>aQ_]wۺ1cKtr8Muqqc6:~EEDdM2sM,S;[GT ` L|,3V_}r;/숲nbʜgg7Ne΢,""qZkfF܏d//*u<NnOXx9>y'9ydK{E!XDD(*)y,3+j8E6UͪV,=  j\o x(:{j^:uME&YED$A[h1|Sao8 Typrh;6̛M!8¨Sr(d7<8hiBnP ?}EщE˵|:a=Y/Ig |ء̱L<)-s>a`YvoZk]y+;RG7یQjza5SNt6¾Wa}'<(uI s-s6yf߶Ώ.P`uljdU: y7@}:i;O1beC , ""2o.yΛa]%kndL,ݐPNd}™ٰAG:{iz9K^N&Srq5#6WAXEDʝ=wJ5JD*Y]Ἡ})9v9GjʜE!XDDFGloaJiVh_V۞ao7x$a>ÑΞsʜM7nXʜbC,u>l*|NQl6%jtG< uL$ /5Ic$6BzOx=ѐ`Hg/=}졧2!}6Sf+w:D!XDDfV]R)^g+a3_W.WHh6A <;O}ȱzNE1M$k\2PWAXED&TU&z|6Մ0ax*'Vf| IDAT.ށ:&s$Grd9&/' r >MogaQIsMfhd)y^RMR] 4+#C O/;]l,Y7 XrƟ1aX&dm߮(ql֘U.۬.^FWvvDYCEDn:]ޕjt%ñMy7MN.G[1f<z@ \2)Yw"PV`IpѺR kX84Rr*,wzlUZC]k`ueeTGCe] %npMeQp,|uTҨwپjt%jx*uʣ;aۣ]B++Vs$G^d`U'q25oWH(Hb_*˨ zSPT6 S-U߅~4\`_]9(ՁJN;l+JG? \}crGpGY=yN#?Ù%Srdx'{#m9c\|z?}!Ϯ?ųA`4_N`(_0ZG'8rc;+A5 ˉ زr;Mfod[0-&r_6s ?Z 레BHr(ed?*MrlLVup\і9qqòcr5QN^۱pB)cCtyhJ{̪pdYNw`.^2]n,\99q|%=[~ y #|~QI3N O@ .dчp[ 0B _1 =wQ/$Rp؞ qD/EvT's)tloh-0A,,Uu% 5%?FWeOkț >?+Vdy4ͽ׬m@} ,JݡEdT,u]滙1);r5l_Ie7Lyt+1ʣ{G;4#e9u4}™٣OX85߶߮=¢," 9Ζrߢ|xȈl674~ئ<߹&DotEg3Jc?d9Gz"4[x8D+/n66Sf+K-``=ںx1ʆmI5ͧNdžPIhmjl%[bh{LMV12f0oU\ryƌPyךk2UMịx}_)qV&w͏S5 o2%ϋW51O 4k4n 0}&gV0TfU8{ϰ3)7l iE!XD*os(wէqnvОx]7%Ϛ+r.6+» #a2X'3p-LK* +B$Cvw͟z鍵bw]RD.HGh>h97!(<^H})@`5{͔KfaXAX!X!XDjö}ouwEF<:p,mbJn_^iؽ f Hl7Ajx7M鴷:NBh~]kM]|jtb˹@jiSߟI~SDoG:Xuǒ=>Ʉ&A>vDY3E!XDFɶGMi9f>Sb5wۻzxnoŽZfk\VUWxo[-8} r6?[7}mvn=K l &_t[~ 0dR& ++\]a߫?.kfSj%2DͦiVfUi3lKa=9\_0z]S *ay3Y8ؖH~#)EDñ \06=y8;XK2/|-Nʄ ic.塵8~|l ""Cg7A l2w k%hfVWgG+J<pL>azM5-?57p}!IB嫽]%E3b`=IQ:Z" >]]pB\NZcތ6RM<ЮS=荮[^͏.bED.Ճ7r̛M(7Vc-x}cM \}V12f0oU\ i7B P|3X ބ0l%l_$e^m?__-]҅*\=c;M N"j% U 8?nn.zo6wnt5R&ss8m9MOdrxeܪE!XDUo)lRRc-I3ѝ'ع-&dr{uRt1~sNkүha2X*+ H-0/|>j%I^٥G*L4~v1%7yӯbJ8F C> yZ y36j)2r"paa`#.kIBPt7 0|҃ynf[\_0L. #@{0C0I/°BH%HPUc-Isٹۏ33b?V u6|c/OXF? x+[pEDF^TTkuW:$2< 2:n@s4>_ϲ3 °BHܤHKZxǰ^kޛa޻̙>_ ?~4.Ӡuߗ2 ^is=p{WYqy`z{(t9ku85kvZV1u`CsJRhQdJd-{ن3H"YN0 dPܫb`#.ٍLj%_&xsQ/@ ߄j_e PE676kI\@MӬYY G8;6پN2LNϗ{Ȭ+BH_*˨ zJT*صKTh PSSWTy}4\`_]9s NJJ>X|DO.9d7efeG c7F}('0 "U4};"E'H)DWX=6 ag`DcW8ȮNCmV>fW̘1)2g|o]|9ktVn\_[wз=uω>dǾO|M~E5N|&o7LWބ ;`ÞEL4y`bQs$rżoR6kkCTTXl-Z,Ʊ ہXA=vra9Q8 -!I[q)H{mt/ϔ ps4~Two+u6;ۮsqڀ+~}D3hic` }FP,2Y̤g׋/rTxm9L(G/~o'g2᏿$9Sx}>xFjz6YK:Dw6N5sx NoGE眒Q|i|&Ss{ʧMV`4a\Ncua9s\i]. ˎ ʹD}~+orœ0f+\i5Rb̂o5{/4Vge¢帇aDotV12f`U\;\yJupeʣkog;\`ƿwd=,3N߇[u5#q)~l_}r;_z-֍k(CK. 8@4@^7z[ #A=. DC8;`df|zG nb^6}VLdvd7%ЧʞJN>P8/z3s_<^KTKCkSeORW4-3_Ǐmzeeҵ.'^욭<5IaNoСRl,,UݡVp9Rԡ&X{ljP[ZJMNuBh\duwBuk y2=|"MӬͪG^ >lw%ܴfC6ر4dD/vB_,ٰsQ0yaGT+ ""qJv(/8@]&5PXŔ,M)snx4C*RHlJÛڛx+ͰqȞlʣ5O8𳕰}2amEDdH0]\b ޺7Q:^?T ( o!O{VU]~H'e {xerpL)g7Q Xp^`8hoh.\fiJk=xw.5Jpe_'l(YvD/U>:Ufw| eU޾k#{DPVVI``F́`l]lضCl`~A.w*J|8ot|k/ hzEщCk!D97ݟگW$y"=Ll "" b:HkQc ۏs1gfżWLCC*Nl>E'LMX2}Vc3aуn0 ,"6xs=HD0W@3'}H 7ӈkz EgGoNn7LWF4 ++$T@Z#`pJ߁GW'_$?uϻk)"b]tIh{^4FsD2l6+#9^̦+$F*ju%b/:rc'6? rR#-YĄa9vkB|0j F*ax_)]xo`J#Q5ֺc| {[v?ޓ\_%Y0MyN" o27›x;g7S!}^LG"9a] a ""I#FEQ^{k](gKZ߁۬zFu;&iʣ wƔcvb{3])'f% ;. IDATtS*Onw{S!XD$i%ߝq^n@"JVUfu :`Un?f%up}o}#:}*$LJ^;5R)g]=bۏ갷#Eבq|(^$r2sAZDrG֚MҪphmkL aa`$JIOn>p\pl?8~œ:PeQm7AFgUN]6AXSJؾ o0 ,"n15R)mw{^&('D.o2{}ÛLtp>`7K ""I#2?w_r 4ۿʣbѓ5C\a ud6|ohXN Iyipa W[W)$f[`:& v(/80h څ}k󳹾 DŽ L -MDmG7b(raJ/cp|Vl,]nS,3+ÃZm?__-]҅uEDF*D {4z730}`Yc3NL Ml!y9~܄aޟz iAmQVVI HIMHաq_]O֡vwzB֙+ ))vLydJ8W8 ^ip:=[iS ""O#*' +EްWN&z={rʣ{Hlp 憩f2vVK'}}}\_0k>y?}{^OUu쒌Ѿߣ~kf.^y7 rAͧipzaj\ "")dHwhp-ṟ̛q_Eǡ0,ZN{OEjIaV32̪f.^1lXo ma] rtV.;_!XDDBG#+4nw|)]%A9ٟs2&zbf.^9ȞM?9K`:iÅvw ""mS56ׄSΛI1 r2uF*%GභްP NR'LT9" Qit,ghm4d9zbAn4MVCw;şl).BHph()4mUXN]C`e؀ րueeTGCe] %npMeQp,|uTN9#z9qԯ ?T ?wyl4b:OVK'{[ț &,\~cLhLs !(gTJ%A]m;I=|VL,0F,|>uR|`I%ax^90U.R|k|8 VEDTk񗄩 T᱆.:OukSǑa>KsƲf7p9'qW1g锣J~w?.f1oULpsf4q]ncTh <ˣng*fc?;yllU =/Q|VT!XDR]ȍU5Tz|!jBU*$H?u<`UU~ kXDPbω>;3 |:LsNu3 rRnkRba Ģ,#mPզUvZz_cǎaRߪw"iEIe5r?:J?DZV65bѶ=vra9Q߳[pYsOonnqӿλ@aG5'bYq͠_t ΞGd1v7`||3qw2~<\^ #; >Jg^=.cLSaxG;:P\\ ps4~T^s߽m\5n ]2?fMCv}OaGiI,iƂ&NA؞vkAa3slj\z鱮:ɠ.~W?s|/cH眒#Mύ`>+bor~JUޛؘ8`$7˅Dyp옠{Mөv,>UFڅJ>C=^]>{oJJtƕ~[pyW4iju|[o̮pl|QZ13kE{1} qYm?],Y+r#g킎ƟZ5_Oh8|JW=d`4Tp!BJ} J #.ۃߪ9qB"!'X`nW8@RdhK;͹.-$]tSw!n|l홷G5Ri@ݹۏ33 ϞmвmDJ/n.v N>"C8-JedSEvjY7T;ᷡu5NpY 3k1oB`uʄa IQ*JYˁ:jJ؁JJ+8 WP=D--&jECK)VZ7;k%YaHgϑIVlyl>@#8o _5| Ӛq  *h7ADX{[{WY 2՟14vqo8\!XDDHg/=Uo2N?Xw:vqU#S7wl@GKttIlg1U{^r3GL.|qt,""㬑J]$ǘxaYuYա X&XXX$!?jw_}r;/숲.±: +p.Ȫ&|y#:' &̊u`b`Ua ^Z A-IED@zu`P#/T}=>`M]#~S0++$''aVKw y&/aG*]ldU#~ ++$N, < kED$vJ-H]hm{,I@,ԟgZ5 |v,3+ EDdhaop}I O=st(${V//?-]pk EDdZM&KQ 3o6MDXX$uh"Mnz%!°B\W̞3aؚ~8>/@,NZ5p# ""Įppilngö}6͌I*H\q,gAro\B$Dg4 ro "Wj(AX!XDDOG*5nwED.32V{2Og|B1RI8qp!6lΖQ "2.j;oWqEDD JM~96ɈQ "^5^\!XDDFzR,/\>3EXXD$ED$H%`S VQ45RIXEDEDD.ϕTR8b``HJr V1:"-RWǕAFF!%5!quEX,mlY>no?mV33'HYpe m P}AyT{W/T) (U[Nw+0 \@yOHGYFF*5o`CG,2,`8!IcڈcAs<^^0j\.&PPY(q`SDC HCWaEEExգ~,"#*RW_Mֱ8}Ԇ   VaUW(-\CeC u/uuDuT$^͵T%ng Pp8L"`IcPƫqI'`}nqt2JU SP$ZGeJ5x"aJ|x}%TFBdO>g%g8aj*B~*?ޒʼh!}P_YCa诼梤چRԏrBu_* Z+ćpkU~+ TRmң; 주O)uGDF<_!XDDR_FJ/,K7b$g! `%~vjʨ31XFEC4؅ G]QsEd}ACU*QQn<,U uUFU2`9ZjT ~/ UA=: e?i(eU,*e`I*`````````K_6h=Lv} R)%9|x/h(Pr:\@H2jk([(+ӹJ 訊$ .]EjjJ|R'XSIqVzVVqB>/ޒjηX;D+}%Tm&P(QR _#܋@Z_@[E?@Yi Rj+=Xt..gg)x =PYe~*}3>:pB(+s 5PUXFZK!XDRd0H:Pc,QQQQQQQQQQ``````````QQQQQQQQQQ`I:"#S;}.;>'>2=o<?ғ3ߞ}O=?~\F=&k㶏θٹ% d/"_y9 |y|oϜxe0c$~.GƷY"""" "i({*S:~zz:ġM3I,rU~79_䶏θXzcϓTcdOe;ܹSPQ2/;Bm-;1VKOL2KDDDDR<ϧtq&pWw1C^^9ZT9r\c!""",Rzj4wOpOɍlm<@;{=LCuVSYe|svIq?OCˬ-ӟ(Ϟ~ kC]RW3s͋}ݛsh|qV~T#d3ヹ5L]CѼsolwvW/~ bVeP[e|Ke йez7~zp}oݱ~]@6SKY̓O>̙s>yчX?%ZWˉﰽˋIb4ueEoֻ!U~ϲ#6&IKaSrjߧγf8/4{.[e a#9OG%;#i)t5ש 5ŏaN6=bnޯ}󎰬Gx`:~{z_1@[?b2ziD]公Ͳ}{'a<`Q;>,F<;8w^S,g_ !|y:Mul> ;MĖߦ2VgqT>H|%Y@?8z!FM_w#;u')KԦ}hTYOFFZNl0uC ؖ]t əHҴL$l?J ܓ@댗MSԞ՟Y 5<[DDDD!X95A `يtH9꿏(n쇾^z8uN2%z]Rb/3<~!vhn_~%K=1~NVp_#ezjJ(9y3^^W~D~Mޱ!wh~99ijqmw~&eɬ8p'=&8^!:t܉ܡ"7\ gp$4P{ <)i?·vw&VDZE]M]qٌH,=u=ܱ<IM$.vԉG#߾彗$l^:jh<;4suٖ3ֱoHHc:Nz`}扑Wfىw_wNRRrv}""","Ò{VPRRΑhLKYq 9Ǿg8E_bԻ0uty"i=3.xe-*{${6%ˇi=Ҋ7)nꐹ$6%;tM+S}~K(#1ɞӸIhcڍ7n_y;.-fOm+>Dd]d\)Z'Xd OG*+S0Ug#-$vVRv5U|.<-)]~|˺L6ng({L~"I8 x;)-Cuk?F66=}YS\FU 5'./%zմ}P5~o=AsTۘAOVRظiVhz%vz~0>V2LQK%|Uz9և]cٳ};R9G` %{nQ$O#5yTp5Qf9U,\Nڑ]w-%o了&6dn$|:ߖ@7iC"V7_fg=IBxJxeG-яo_Ji_;R^*$%.lzRtzҴM3hK!=s>װ*]ؒ7!yfJߴ~תL[*i7BzJZJ9g 9i˿5T7iɤo :A=:@_v;q=ɄVYFZZ}sS&6$X5'ΛhO3^e"%MqBדL4Ne.ʏcXiϩȕx۶m "刽;[BqGWBYk08xGBjJY˒/۹;q5*oJs.9GO_Ww3d#h#[ : uW|0bw%;ׯ<͍|[i}]:!H[(mQmcM_Lv&GNr3!Ϳ>!>6kWܶ.L[ |zpD+Ǖ߱ MnU0~cYso0ޏquRb2tX`i>w9G⃫)|ܟ͟G29sʶƒ[+Si))y9?č9>ӾM}B)>01.bKitG}2}?+o)fй`ORPF_f|ϹűdվA/ZlCğ;F/1z>q'|r#}qVfnf}B$aq|5TTL}Sl:#IvisS4k7dFJ-u |n7%9CHEg~WT:ٜnFtFL_ƾ>ȸw䧶qF3h+7m ʽ;ߤxkq0þtGrl$G11 9[ e\8Q7[Iܸ(wϧm@Պ5(vq>0d%Ir<t}IDthv:څ\OȵDG/;+}t$ɴV{ . W[;n,cxdkM().N͡F87"B/LaxgRHy#/}-n|+ 4@[T {=pikw6| ΐ-o}wS'd"Fh4D[;._ʥx|]H-X b|~2:觯s6[ԨFFe1hsAJb UUtaݚLfdx<c[ˎJYWM;u VfKŴ {vͨ a@ V8Bw )r VZ 7DJJbY*#5QakT6 mňZO -Oq~B9 PwFw0a0ZfM|lmR{8եopC|{뱶 MfgT*KKl&z!E>ZIM~4+ol[X֦T3vF><&&|3-L$ ߮$v3tm7 s]IU57F\sWUTl 3+4}~wf4ysZ0bU|9}ID2h:aa6waaaX^ަ'y]h $<=(?o7f +.5)[` N\M[x|Tx(,ŲnҟQRBgg e;);22`W m0X V<-ﬢdO`JwdO]mMTPkuN˩tbבlmԵuV/^"Yx,wʀfLnit&JvUI[,i6)V7xʦN:f1DM?s9ߟmfӵ$/+qx/ Ok #1%wU3? LGHI~jw4kcg8Zsz]hlboa[̅KIe'^(>d6klW9mnU6aX"Em e\7wUgCvUjo[9FrFb vL觥 (1a M<}4مyǗgmH!]j:;i͛MXVcet[[U&KJL O%A&vҝQwr9JvQFgK-%;qGO?jUZMۍF70VuMqe 6DBL6eyy͚[D}MT4&LFSЉISumX\;:,ĥo%3ůQƚl )D 7;uB5dnhag^ns3kwr iS+ [I/-fWX"Vakkc3s4i[6%|5 V ZN2UJff'8HKb5Tt˛ǧTRݵ7^-o?%C#lRc;> 0*ɾ7Ϻ?pL n\\KOoY;fz5a e,2&ךCIPbgJiJ()0ԭ&q" #Jj\'7zW8b[ұ E{l%~UؚH=;ʖ&qbv%lڿ7UǸLR,f@"V9L%Mt4V0ȭϓ=~ox ~˥'4,"rxZ({c/VfR@V,_"c۬W1TDrj*ikm,VBR]JNgKjj}klg9d2Ӯa;yԖʈ`N4fEX륳ܣ;Ulynθ6ޠq;"RvPC CO```````[ IDAT`i""""zP3M]mxzM K׭L=>?;y"BoKGV聘@!|m>" t%Xm Fmq<0W0΢a5,VβEf"4k5ۖ]\h47 O%0g6K)5d$YfUO\~K>9PSJ7 ]w<`_<F8-0nrm 0p ZZ8 S /Ylr_]<04;uP``,,qEpf,|H4\b2)"f8II8r+  g^cͽL:rTTlRZHANFId̼Lg|fOX({;5}BaN *ʋȉ9_rɮ̠ry9va@ja%C~l!/  HR; )mn3>`XKcƳڦY`aٿGuDRR;,  Â3B: (WWr**+.|B+(ȝD~E%E3ij33| Xv4SP F%5IYХ[DNs6 w)z0,f%xK@UQZO3Ke."1׿^Tvǐc,$9c4Cw} dbH:Xbe0h !!Si!, F} 01,!\e\vR^9YИ_X‰ bPoĐ_ojzhnnr[`ށX“J3߃n` );rCJ`^1dƓu,h$XDDDDmɭa0%lg7y 7e_OgiF.g>aH[MӆȐ%3xj-Ci'&)RJK +ИO~l V|"5pt4XbB4[C^y9E۰L^K讯&eӼf0LTb"Ƥ2)&g^n"#5emn%%,y+ g[^S vFEDDDdAc}t>-y1Zb,$= FlVrH͈'B 7]u}y7-"Ȳn@V^*vJCt{y1Y: ̧J ot=Lؚo#'d BCAFx٩Ą` @G8&C={^v{81Hhc_惦,V#},4fOaϚ_L$|_Y2?l CNae;$=6_| cѨ 3"_12N|߃(/J%# 姒?Y~"7"*rBg8*S>"IDDDDM]<]xY-s#/y1<IdH x`+Mea)fa1̱笠y8OAFIs<7ugpwڼC|߾ñӗާOB'R """"ze-[kW4|33ӽCKoKKfIȯb#x9gJj^%ݥdeѡLj+|*+ JcVMEj.59SDZLDH[FeQp-J"47cwPlB,a`!\D^w6I@@ wՑG6q=2TZHȶ 3ȽӨ)$NNFIN^""׎FEDDd("0\;8jI"7# "fy_A!kI- ^Ķn?`2XɱPGwN6FA9, YdRc\&W/U}9b.dJjH͵b%Ăb0 BkHӡEDDdgJ8+|2 ȍ#/|#l(:e$/#RF مd[FKFEEX ) IDDEDDDf>{VEČҬ\ElK*%;ʳ:EDnTzEkFG ٗnȨ̧0>p  EDnd %"""""" """"""" """"""" """"""" """"""" """"""" """"""" """"""" """"""" """"""" """""",""""""P"""r#e}C}83t2tvӞӸ\ q>41ʒMܼ[Xr fѢEs0w,EDEDDD-A`pJjZZikCSѻeвWqo1D-& ń|Ky(.`KX̋CDDEDDDσ{Ѝߴ>'*c7DיaH[XWt;.aY͘noQ}T8g':ÿkKI Х٤Ƕ(L=G:~Tq͋npSŋa [B7E"" """r\?9gu}, Cgb;Bnv,7/&f( ps>D/ _A9Z\-.[- "5:,",""" ]P=E/??WVˆUt'q?uxS[LtﰈB,(> ҿ̯?=ws{F6wzhy-/d-1/ȰLC{u V.ت2s z{m .~܃!9`wn r6XO- iIp];? CVQ9o\h4ۘºDʿ\^P:{>zN .Nt zTYDD!XDDDZ93..=ǿ_@A Q`1/b!"-","""s nrЮ?TBւ~h:{z~M`+j |ꊯj=-G01~lHc7g",""" B@_aɈ{l3E8 +( 0ʂl /ޠ"rG.ΐ+»rGG C)~ŋϛ};p =g}^T","""saI"YX : 01`,0 QOa~9Y1 ƖvΛl 퐹3M`,ħ:33'RgIT*8-طeќCaLddб-ۼw KP“+Mefp͋^wSED!XDDDrr4SS_϶r* ,1RBQQ91EwgSCJfwZݥ='? ޸*E2lֈ(|H0tN pSG4ӝEbb`1fpݫp]'k[h M7>'z}(J~v9\#O.'I#K|md唓m2{@89S9>{9cwvx^uOi g ah"""2;?@sXG~QVYųw -.c^{+f9V0)0+j )0³),"2f8+&LaFW ""h$XDDDfk~,623dپet}G-PQGn}TVaϥRTTTP즾*KDD!XDDDfmoNR{s`iUݳ?ؒOeiaXT lKpo8p_Wx(Ȭ:sTKr:}elF=Y9tdo1LG} 9ETTEn W^"" """rYtwy枧T 1Ӻ+sRh/4'KH!1CMUxB\>8ΏDqZL>[0ݡ{SYdSk' ĎRCe@5W^"" """rYA?5ol9.;7F7cp -.1OftpGi۪:$)> Jؖ4ÎÑJa|>ےFBx$Tw6~+yE^Ko9:X?.p-:FDd^HL$Ps^u/NJ97 ?3tNlj(;yg}3]^"}Nzx9@sbqOt(B0'*u,({sҝt797," """r;;-w~7@+_'ޡUmwBh9 +jՖk?r7=M2uۇp8fr:Tj{h(|,Q?tW؂{?T:{tB#g` 5zM6l[L X3x}-" """2_ h= ܸĿ&XMg }݃ iJ(ȼp;_4%!}aUߔpNnn>'EB KΪ쫾% Hivy)Bu"Y~m"cu]Cܨ1wvO O","""sS!\{7=xCw Y","""son81?^'LWN`","""EG Uݥwsngh^rY","""s ?Xu>sE<+o$IgpDD!XDDD9uU{=,qW]Dp}-ceD.^NR(]}CSo^O !"sIM ""rB_rLqyXdfM"2g,<`TNߘۻ 9:f&s& IIsR`VDFz-׈Un(Ovk+.ݸniEZ/(PH0L?A" $~Τ}~R'k4GletM4Z;MP6On4OKC:ax'Qֶ5N8P!X$CVt o$n6QK*:#9/)p7gpr{/[ĺךphI*Y׾m{\8]4%[Ǣ҉3[R%x~~i"ܑ!j-,ImmefxDP:ޜ'd+k,ù<ɘל I*. lTIj&em_FhKTeld$IEUY ~RE;d<ʕPYsdOU$C$I*Cֺpm)$QV̯Ţw$C$I*|^Fxd(Qe:+cyޡ\o%,Ii$G+dVu2ƊT';;8B ,,I mꖒ=tKԅ4dG d d$IE3t(W 83[R$%IRw50g] TWEY4hȷ:gd$IE χ}Wu+7:mPHC%%IҸ X3/.Cڔ$Y&g ,,I oDmv*ʌ Ι%%IR1T}jƪ@e!/ek4?vI3鄃%%IR1ݷ_ Kq>5&`px|'YyN*C$C$I*> }2:ieh_:y>w=Jl5WNJlnל$C$I*gr;e K41tv:զ_m7RXvzG<}2NKCg׭\Pל$C$I*WN'.wL<8E%0]:5>XPk/8 j9ZWd$I}~ ]Y7|-8m)4 <=)ͺ$j902] ^ -,Io̶ g̽ESXHgy ّ7մ͞&,IJK/lei.\9|QvCwAѤEnEog(R36A:H2K2nK>j[I:H2K2@Z5σexd$I!вquk.n$%IRZ l YIe'R( A$zB~gX IeǞ`I5̂ ;4K2K :T% $)ҍpP3ZTɋaH7Y Ie9$9~Zn*O@4 ,IޘyPiU$C$IPš;CXy]c$%IRW $-`-n;\ $bN+?,pZp5j'[ IX$<_,L>ZH {%IiLZd-bΕN$C$IkjoWIeopG< $I6<]fjj\`I *K IΘ9W֣̃XkܵB!X$uP[r0m9l!15 Ni A$ltw>Og=ΦyPuӬ$C!X$3?zFsLj+̹_h-$,IJBdzap{Lx.pNIC $?x3pA;\qa=$,IJZ?GaJ7E@u[I!X$n솇> vo?n,}/,M7J!X$0p ~ xkpӚ4ͅe \_I2K8'ÿǾ:LZ+ӖBì`+I!X$T_<G/[ힶV I`ITQ= QxsPl0 f\]ӗClϹ$%IR<#ppx]* 3V̕\ S ,I2K$n:? r(xXf GnGd$Iz^ 䆂9?*~(N7Cg^h `ϗ$%IΠ`4 =~]ȏpoɱ7jncc-0i1SFB0Wd$Iw}`,DpxJ}]fAHA>x DC:wh$%I$I*%$I$%I$I2K$Id$I$,I$I!X$I$C$I$I`I?/ ټ/sݴ;y` 6}NvobO$C$IRܹ?iW'}\}'[ˎ =lcϚ-\`$,IT.vlYsj`ǖ7-%5J*LLRvno`V'I`Irý[vкm3 Oys\޺mٺkokF7nfWgRJ!X$#[؞ʖw˖ٶi{Rl߹صk)[wg;c^NU@$C]{ܻkWg(=7wMk%sʹiv ܹ;XJ!E*"1KRXE B2H=׮f϶l_Jo5lͭ ={ش-6Қ[ٛ;ٶ$,I$IRsN$I$,I$I!X$I$C$I$I`I$I $I$%I$I2K$Id$I$,I$I!X$Id$I$,I$I!X$I$C$I$I%h4)IENDB`rally-0.9.1/doc/source/images/Rally_snapshot_vm.png0000664000567000056710000034273313073417716023556 0ustar jenkinsjenkins00000000000000PNG  IHDR 8ZsBIT|d IDATxw|E׀%M i:)(U@"MQ"FQ "(~|U^O.=;{vvΜ3( %4DCCCCCCCCCC㺡9  иnhuCs@44444444444q hhhhhhhhhhh\74DCCCCCCCCCC㺡9 Eӭ_i\OέoZ7a n̞={8rYYYx{{ɍ7HfƱcطoG%++ ???С7.,"BAA{a UVnݚpz]T!¦<$q!Dqs@:$N@Ury%Vv*,\:iĨƇ5( h Gee%Vbʕ_B-7n!!!54?05 [t^6 ;^3'a˟z [V/1};^irVX?LIInnnmۖѣGxeU,Ypq 9t X++ ^^T;*Ge9r䢣 4]ռsŤ|6M;Хmÿ KY!kF=qBqZ?掷R'1`p '."  88MOyy9PNyBCCϑe-wFz/G[l!%>[S}ۂ'&Vg4)gcfϞMQQ^';;}QYY^glmSH=1u>xhP=э̞c?|@Gb aMB6M$JuUH՟ 36dp#+-f}WY~jWծr8"+pv9T8tӺ&mv@p3Wbɢz9T`̰^xُ_m[q-Z?źuXvʛy Vmb⭥2XL`wo0.:Xij aaW^`8oZl8T`tE;l_RU6Eh0pEÎCF)\%U+Wd˖-tڅ!hpPZZʏ??LJJ ,`„ Xʢ_3ǰ.M0xF·ct ~~28VT9uV..ΟqQUvhDތ^ٱ`4) Î@0.io/xbccpΞUVV/sq/^Lڵiٲ~]P $nCafw>_jED݆ zP. 6 UEDjEJYwq>Ua }v+Uɕݎ(U/3M۬g`Dw~Y.Ӷ[VL?OJZ*mvT ( F™\Ufa0GGDu`/)OU?,Z!"έ q.UNHY4N΀xݻ3e/*zyeuہR*;P uCl36]V6oىF[zy;3]f{XmsEY("*6 Qy/oShlfŊXc2gfF:ҥ˙ 3gx:;u?/o~?FYi9~%;^ Q/>dX*z 1z`ЁRv!i05;K)+|eO2a9'9_w$Vyy78B;q"^Xr%vX&><WWtÇbd%޽x:tpva$Z!tmOwE^<ܓO,>xSi9,k 7sA[v=r-<,n4O~n"k~S<\!d'cǟq(9W{EZ N74dKBX^4_<3/6&O7kגǏ}5dyL8z ~G5jTC.sst^~pE8u9/-}'@πtnMY:TnA<3v&T[%[V0WZAQ^<B|Yl4l؆(u0<4ORV|  Bei8l *oТn:zzę$^˴O"3Qtj3/ooBε|r*GY)ydft21w͈갑zlO2r_`P9\R fѦneul.b}g;Sҫynv`4))ڜDnτpN3  Nƹ(|9Sl+A>6aˏ^VGJc(z}tC[euqBhL+~x"k-oލ k\q}!a~lћ[eS/F\ĪsY*AǞC|^& xQv$9 L;/vc(G0dʆ/_d;Ξ_`ѣyiw}@rӻǍ,XwЈ(|]y?u*s,z2e.뼏vo>JKKat`0ӻkbcǎ4ѕfM9o#͇-`cWX3̞6uMnxx{W{ZF1 cćDTxkC>S,,c/za MX{.eRPAeQ*s{1<7%>DH+"$ď=1H-^Z̾}n&:tp=::~p N>}A|rxj,'G?¾P'|'&C凯aaqrk-(<|+^Zز Xwv9'q XL) v4z/AOME8rSxª2` %wv|s"!QxJye]3c@9t -nƣo~p ?0^Z^#aQnb7c>S`ҴE;{["S坧1d'T ҏSxXlVJ *]P+68ەs͡Bv6Q6m_ǣC{}DDEPGX9|8MޏX6h!6dɋ>] j-cƗ L~NZWmSϯm"š/ eN恕toӟCYf ҏc9mPVl!n֧[*BWBna}§/GD $::<ٚꉋш;W~dI+QQQu-|KWFqXܴ Qmܴ Czq"9r.=bc9yZ+Oܔ{a1a'gW#3gk*֊fS]x Cn>S67 V~zMנ*W7|#}J߾}_~"6]O^u<VHI1i*=.e[d*g알]%%"C$-ceeJ<57)R^(o>~S}_'%Lڼ^O]+W4z7ɦrv-p fMbʔ7_xO~(STʳڊwh}ٛY  L]TY>y..OȆINe qX* 佻ocv5%_/?GRnR&+fryo QVo=!6*D?ѻɧd\X-pHEi<:F7rz ,s+ops[J~TNX-m;2pxVz 1'!6K|71K~?)q xȴob)*,"~=%.B4JwIIbm*W][e|71QܼeSR dv).)"\ϞϡS%˾Y]2sl?ZP-VQ,ukS.I[WHZ~gqr%RU=Tٽ$&O$oNE>Q*9I`LlcGb"mgKi D&H[iiiX͛R;BFA)6^8n6ӛo i q=?ngUiL[2_},wdle?R:>&B}9:e0ps̊^ z5z1O6!۶sH<Ϧ>Ŷq|;u]t6]VDAT:u9)9'bf&gԇXrKoN_m$n߆H,ukр;=:4v74m燐xyy]1s-bh5h8=41\>]G>\||S=iKp3n~'a.hoL㶼lNʊql;iR$nh Mkc+xѠUBEI6{K-}3Y;B&p+3m'VNv#)3Rjdq7+!vU0Wxv2A}]:jŀ7:"Q+ؙWL!} Fӡ#Yj*t3V̏{3ޠХ8jhS;SY2g9knGKO؈>káLdsCX:rԸѭh&d;5|8)={nEjAٝtg?W"q|E 7t$_l+NOAZ!;V:"%'RJ|j}:ns`yoە̈́vqS 11'-K߿W˓O@߈NmڅkqfC+sn±Z] BSS!Coǯ58ar8=ZMӾV +/LoŴ7Ĵ?ζ_~bܿڦ^p=e9(*@V=Y^~ĭMQ4x) Y'mV ʦf^U?9 5N*_^]f0˝ 1w//|sya?:BkȰ3}]f;вi4>~9 X(@dbr՝36Cnf폛5w5=7TQO .%śZ0I;ܜLyEApAAAs{㒒|||.j,cDe,=^#"=<ܫ,zy"fݦqگm3ܪ]u80.:Ё͏aGռ;306-LXpAPtVv8-+g-?fFξbT<<< x.'y=E Q;~>v3Z-l1aO=;4"4/oPdj-OSPU| n&\}+\ٜ W/D@Xtè@euȕ??;z~&+g>/W=njj$+WOOt:nStș (xMx`D-ih&"JG IDAT.YL~8($FMαT,;A(:p( 9 _GUY ZMÖl]-s?I]Yt.Pqs5`*738oH]ƎGXa4 E/i;۶%ePU͈[:ꌼ ?g{dl܀wk6U}9m1yi[Nz תk;cm6A 8)|Si1.ZG`/y}ŶY6|T^}Lo0XIKj}|\ї(ιqK^6 5Vn$IkZ5/&%8# }g3Y7ՇnRmSPtXe%L8Wl#y[UY%NzN[wqEgpbM5W KNCUh( }^^$cm1x(3DGנ VF;/o۞i#4#oo_NҪWCtBLxjƿ>/›{0s[[z7`=!/3*~|2iꃧ#+9EAe>p"͖M4 ??x:vxɼyyy=z^-e̝"=ZgQx* b;+g &Ln1YF\eo;lPgџ|=[f`GTT=z[L&c˖-t3Ivm۶CHȅj*J?ϾLVp(u|#광`K:Efn C=;!!;K4_>y t]Vy\\2rq"耢4Op:5gF+ غeJp@ouZBуTV6ɞԢꝮ.5/gHE݅PoStE&S<XlL H$K\zLuٵoXw]@6D14Y3Ҳ60kJ_~wrxBN!3:Ѓ4t(Š>f$Ո' i'r:T:= H$Os:tQ-lڰ1`y ~xۏ$[`&=&7D;~qVX @,}w[nfʊu|Qk 5\^z>5X 3\'<ʭzUn\uQtpD.7ES0"v 4iןεge0b]t8{p۳1{;6Hi!n(c5}vq\\oٓ3pݿq̅ i6aFCщs3jS\UtD͌ƨ6HD;.NIQ\ُ !$#u :D:|fAu+ SR0]ĦZK3X.ֻr߱P/^zDDD(qwwm۶u.ty+G}̈{cߑdqpOK)i)E3/h$$$n:"ѣGٰapVs21>8t`՜7eCON_Ċų`‹-wzxRnj^d&'y+3DmܕfFT^~DDàXˉK.2>g@]Ԁ/ޙʝstv&9Cӽi-RQ ~tAZhC=#<>e╗?fjsN'sc#| cNڥ1/a썘-6GXfZFy\>l=|n郯AUaEdT8~&vĞٮRvwȢm'CEg̉r NZ75k=Ƶ*J+dČiӰSl C;ǰm>l;+Vg_bf^N\1J*/a؋y'T£" xzqO)f>z ߨ3s߭7g 1"JEqz(4C3tz@ Kӂu|x}?Rʂ/w*Z߈Aр[waɬYTЯ+nMRY>jc߂yL~R +йЩ[sVϜŷkUYWa~aD~YY[E<>&r*JͫemV"ÃOCrzb֦]%*"vٷt4*.Ҷ},_."b/v|4zu97Vz}YRjH۵P Ȟ6,z^\b N>v3:7JLxeYD*z@mC궐-JCVOc@I-!n)'5Ʋ`9)VRϻ]߿!&C%*jȵCUU(_\a޳ߓ2qN"tĜ{\tm!; _IjLb9n+FvR+Ræv3M,AٓR("(j_}.7/lsȎj8=Nvqʯ۩JT{7xyK[JL(11ɫ-/FN~a2wg#Q-O7Jl޲-1"?>\d+- ַ(]F~-bys-jp+]o'DHI&96ȭɀ,M#Y]=kS6<joA{Ɵf~zϟljuwNG3f ȱVr:'3츚<ߨ)!b] s8p7M6d+boaJʬxӨQ} NcӹѨA= rcI)T:EDѸQ܌z2N!KzPʲB?ZrOVa эZPQ=(ON[,eZ\o L&AAAcX(((^Oǎ;v9/w<\ĩc9SI& ~QZQn:G7_Z4n@^qCPkQU;8v$p5n -%Өh &@)@o>ݡ#~ ME9 *9wbD4&:,Ky W7ObZ^Qp*9z0yemFȀ˶wFVb" iiX:J<}i܀ v=1 룷kbnX(/%@f4hܒ`+OJ[H:vC`}]H:οnN#%5f1T}"灹8 X qQ fFFa--f-w?4;"U65I"h(iS%hƟpg+N)i80Ѩe ==wt̿I36 ²K@pH0qiT\lڡST\<| V_vR[+u^0Fs@44444444444+q hhhhhhhhhhh\74DCCCCCCCCCC㺡9  иnhuw@CCCCCDDnD$e|P;ChSpޗqMAi(D*P\<=냫N{?Eje[0Cԍ(Dp 'O=Rڷjx1xNaolv(z˝8AQf^ۡܮ "BBi(/ U F'5Z$u7#TȹU;E#EYT-|Ac!zN{!ϡ&W ncР~)_ 9IZt>/{I\*^Q~K<=w\[ rf$O͘H_@s@44444He 5=vEȆ7 ?QUTUu\,*r˖o/!RE2+\~<;fJ˭]"]}>J\mY/ۋJ^Uj]\}>987#NG Iߍwj"B{"g{_۬V̕k.\\s.s@IyFݿx"r>]AW'߅DCCCC_dB98?ӑBPFJ31Dnh?ա#8 L d_SXTh23znt76(y;~Js9s䉓ꃹ("m)gɒo8~" }Co2rYֳ#C0Ⳉ$;r(MNu(fɼ/I+Bg4ҭP:kQpt]϶C4؋opYȩ-pCqnRϹ= >Qv V.ҊNjmo%;^JT" XPaG]WA\A&!H[g"WϜ%)sSo\×?rZdƕ|3s bb:K7D6is+Fz/bd# 0DQEQ HWRS_lZN\;E̾_uZJP`\rd\ܦE4ݺKVB`/2oN߳Tl<") b$iKޣtFC^W?m\%6%(6Qv.JXPX/J(Ȑ7Ȱ$ҵtlLbo]2i\7IJݷJm$ёЦDǷ?_&tj@t"weIEA\3bpЄFҥ[کFF>#.oNb hiִp%h?&VV)Ik4dҭ6Eb7oxM/L}'y;E_:Q%E_Bgz-ydN8J%("ZnݻJP~W,^WL& mҤ{.R?6\uZvd\* ;U""+[/ie|ۤA\v,=zt %".YKϗ_^%WWlaRQS޽h/mdѽƽ#[ݻJQ4e(HVuR/:P$k{UZ&˃ IDAT'HF)ҳGwi0N5,萌Z"#%QCIHV,?J@EQ:(:([ΝxzvItoХ4[$MlIB1<'%$^ܹFwI=ʂR_l8^(<(tbJQ%\4[c<^)=G*-INWs%r*E7ձg}D/kD^2[\ K7JviyZb?iY&NA8 GGJwKp7_!.h̼&Ѭ5t{ C&*]yh&kur$o~<%9Kd˗'bO@bVm%g, OʁsmD-?$ ArSe˼%9&N 1 CJ3ӥ_\ )/CHߋrGΚ$M#ޏ7DO@Bb䣹ţ{eﺙҹ 9wt =Ee̷qL\um7*%0wb̈D[\6KYK.8?7wmFp W{pXBqC[x+8pvrqIkŬѤm cC{Ro#1SQ9ڹL۸W`&^ݙ=S97 #,:6][XP6j=ߎki#/Ť0. }v`en9ӾG ^C,ٷ0M$DTwptxV܍M0Y,5͚a @zHr/#DQrx5Se0V-U @UBٻs3W&ZzvlICcR1ܴYGc/E/`ځ:]²_l.=;7S\5nkϣ b]EQ1Y_>/XdiGXhG!Kp@DJ43~Y@vm="Q[GHDHIynZɌDX$b;7~ؤX{r쁡`3Bp,۲eK^fv;COcI!Md٢,S|^ +xq X7$+n#/Y8w1EdeW2r0,s3C;%^֭p iٸFKګt?g +>lc;6?{/fp|;]_mdc#sSߓQ0?ElatHT3O¬hLYF죧M㣙,$xqT.,„f2#Zͺble+~z{IL|h&<.n1[s"4!u|}.fHՋeoN?}-Q]/ q\CDuP؊!i-l|G-ӯ1xyg]bǞ퉉fpZ꧁f2!n/ X1(<6kL$t!RX&IEgN\`VW~ 9YLh& _̬^%v0G_`VuZ=zmF~+$/Y'&݊;܍+&L _̜O_#Է2YQDNa9 I, ~yd <}6M7p#⃦b:h?`-)ӦpF_{/v+1q? YŕWtǑVM*c;}MbǁfAÍz_{EI|ӞguǺ+x'yWCh uM@SO\PM53FwݹKk9EtQdWه!ZޙE['kJx{LnNKay|t qGѶ7mΖ/dڏ^U=c.{^ΡCٸܜٳ6ޒ} ։? bh4@|cm1jX2s,{&DŽ/۷fY|* U̓w]N_~Faw%LQ40›&&7}O{}Y?~ҥMkf؆)\2/}6FŤrV thAɤ/X߁31dX@Ac?=gҼ ڷhE ^E/,96mMT:jCsO*V{g٘yn)-ofS\^n%F4}7nz0u.7jŰK2:j555k% =>e["Zpyl9[`3iE5ƱxM zAGn{}*DQE4RG!BIf_ޜj'P1hf[`̀ɗ<ºp˥|L x_}8I'v?tArMώ}v2EP*Lvk,woכӣYi#oyy,Ͽ]\n44 [V2mr4r/=cfV+Cyo/zQr &|aG!6Ha}oxܺqc5)¿}˾f# &pJ铣?޿PEFhl'p r>+ܕTسFq_sm%!sΩ]jKH"A`iOWzYl<-y3.-& Ҹ:jp} ֛WFG&&Jk䇮cHa1x=ޑe~mHmߑ]e=?eB.{$:ڇύK R\x&r(B&ӯty.OB==iwۡt{1֋DN]ĆF2'wˌQ!1n?y⸚l\>//OCbիzi7 sDDSVR絇!QԠFuZEQF/B? P{b]$v y>.ҟ@'6FEQ_| %XmzNtm˺ؓ]/!ķEt g`#-4ҟz>G\uRʜi})}ZĢ|~=}2&ѫCcV2qV1g79ZfEݦ{~=Qy2ѩ<1(Jqz[J~!q %p@N/W CΞ k1E^nGZןJ+>HaI/Y3^g%k׭!+;CDmz%v]dHjD۫*ٶfĶ%q2.[NfQfVm$){hG*Tbסb[c܇me񊇚OL4˷M-zeuxQG0Bhڞ cˈؑG7Dvk{ӵkcgPvdk:-Qfo"}!^l~A4kіj)rh6m݋!av' dM۩Fѱcgj$bp(} vDK&C;}\J2bݦ-OMfKfhF]S۹ ;54i]W[7%H..o@:v^telڰ#G>3HЖP{v {2Bfj-1 C EQE+*\r{W]WWW]j}FJnE8koxo+~oy )&+(viq|VZo x"^YVI 4o LӻI``@$NX _]Z1t$9A~ r$%/]](<Շ7#ѭ{GeoHFPO%+N ÐsIVcILJMk_X&^C5_Hv%/A!CzV*uto"Mt+|( %{'79\ro(14XmrJYc|+9BZ,/%VH6ɒع'xҾI.{ rrA\X 6 y/rOnQb"% 0PBå}b4,')MN^坩]wLuREQw {J-:u]##Gjwfլ3229k5 Pv0'xy8Y7evǣ_C>jNCMӿ ۔-a(,)H&E blG ]rX<#|ݵ͟2Idq(};(rzW^g/{y8ZP- s=dj!.C/kM%ے̘>ɬux}%?:"XB4a<ͨ75ÞHn!Y70[w?4YEqnsDQEsܺWɯ~_2wYͅZ#n7`BZ2|H'J/ȑsK6+q-pbM\U_" Oy>/Erm.+g1]}ƭ{ yzDwݚķibe<=,7kEl\m*й Ysd,g.\MxNV+ˠsYJ||h/&OuЖ)g}l'Ї=};LeÌ8US^&{36}w>)md G*pg5vЖ)t+qAZ8.cCVq0[x;)<*7R89 IDATmYkjpo|,~IU:voeR8mc{CR?mZM.isc`2Fh&~1}ߪ"6oDaafj=\%{}7=8-bswra\u箣ٻ%##,Y [DsBغn1U =yO=GYNXKL΄<ٰf L޹b29?-+}i@R6Lm˸/K8cDL9G^67lSjxU67`5Ӭ=op+6 Nd[ɦ]Gi׹zF;='Sc@EQ:Ǥ zdfOtDuo GBtt0V˱qĈYhY3Ӈmg⛄ ;?>0+9C 00zZDձ fҴs[t r0TKl6[8Pvwh <9OpY.oN.ޱk\ZKZ`k nyqvq#6?rٕwUsN, i?>EWD~2y?#A>1zSqScW]^3"'QS~\nU;}OpfY[H| ><M#,:xKP$|YhȨ|K?mҷu=Lf Uwr8;}PXVX[ =^@@d;aQkp;*q^ٯk)`=O)?q=x摠⓳_:GlWW̮ C M3owl ϊ^}]D^v.{Q7ҢL||-DT=4I 0LSrR~x;pisb1{EQρ}`5iV|کI 4~l 4O.[vw,\,$J޹6cㅷ.QXf?s9cr00J,=MـQu"|uWQZ "6iAhwxAnGNq,Tsh.og?6a* X8{63g~Mj{lL'}}陚E%7 n&'Dl՗AW%e.""X4~Pkg ?m'zvM?Mql)Bko!n`9w3vDPth=׌ W+x桗 kFF^u5M~A8qW_X[5ް2 ?ּM5bi&3-˥MB^JK_#v]j6B8#'(y7}%71nW%y"έw$4+ :I~Ǿz*B| B~K߉ꂥ(9`R"ڜl𞺼~`=Bdg\?{6g J9c*H d kx=Z 2SpӾEc\{CUxI_hf+'r`^f^G7(k.OC|56?ɐװW_Cűl>X_`V9eխF![؀=!ٟA)ˈsJhw/*pj)u\Fwʤy̘A mP=vG@6{Kޒ?.to 7 > 罍z4__нl߬>Mëo-'wWť Տ^b"6#ٻb1kszq}Hy}9ӕS> 7~A'Jd-&_.y>]-w_;YvTUn=`O˯Kѳg_!OI偡ruWq1{gY&1{OOƪ;~hݠ!Nh53|bM\HGY^̥Vq51bE:r_y[uLMXHktjN 5ˉߣ|b=dW;qw,y.U8r8f9(154?-рXڅ `ќT9=ڀ[OaSs*L]h6_ND?,2! My]F B(l]E|ح{NԻ~ۮsn|i2~&9.kp*GzkΌSߗ''~uN;5I I2,҂},#.#J]2Qkw $bhVv+|wHHNV '| ~l((paaF)Yǂhh4c[epҠ0z >W` b]#7g5qy07b`"rTGۃ?;zn DLdlV,'Gu->+JAin9XZ&FѤgop,W1WUFh^❏roJsq]N >Ѵ L-7Iн^ ѰX,}Aub`:10"רHm:mcu 0[̘MgKNX-U}U?>10p{S>lfE҉fb *((:COHνTz5&Щy`o^s/Gx-4m՝&Aڲr/3 er"BWʭc&l&5m$/|+~jН((RwDp[5jE7Ɏ]9+*s[Xq?I-aWVYX/ ƿa:Ouk8Fmsiٮ;7qZr*:ya* QEQEQ:9$;c%&?} 3g|B(BƑtV#}J :3g1`Ӯ)=-^~#;w.se9D7#0gv̥ՠ~wL~1V/LgΣ+((_hkӝKSPqdLj'h~h߇IQ, !vZĨ$&, MkSLb\,!Il`סkgEQEQE,V_bMy`WOO0lf e.aZD1dWL]Cog܈Nhoh4;f2ʊbʪUuEQEQE' ||X*rcWP5^;>#Eƪd_ډZÉ+/&aa PZң]kX$n)"x@9Y'ޯ:`2qu "%"dRUjPr&ai:/ kn}qhv.KjºxON4ɶ=X X[|OgsY~ ӱ/oፇ'1?y9D$EϨQ\_p8())"B' àE9Nu j(cDbN_]uFL@ rY'0:UXE LX4Ҵ]s2 j)?$AF}bf+t|}B0'챢(4C rY_}EQ|V O_]OP'`ޱ;|`EFbT0bܝ"x uH `?oOXB3{؉`lq;Lg39{W]oRW7L ( d;p sC ̨)EQΗ!:G9Xkse^\#7lbXϔ#0f5t f{S:}978>u\ֿ%L|'o(?*8{#='7i4lל"lBRZ7SYQ?pndsvSw_]=EQ bgN5dS֫[C Mw9 3y5l pߝ<>XuXn0YlН ѢEͤ"(#S>z^nL~! "얿wSelFz^T]EQ~W136`18daTw3. ]4|}}9mRt"y=8Ls\ƌôi }Svp:UQ΃t+3p4FQ/0Z\Gv#""0U.$Kaa!QQQuorFaꪜ7aCZf&l >_?qQ'[@N09ۗd￷9?埠Yƒ9Zq,D zƏ(#eN\% pur,EQ߫YVkELë(RK_rr N% ( w%'FQKD(pPB\믮S ( TzEEQWz'(4(^bEQ|"NIN-˥5?J@EQN㯮(umF4Y@Nyeer Z)4(.}+ܕRMEQ 9\v) fGS (oP:zj(4.Sc[wL% ('A(g'"J)tX] ?us*Wwu9νs'` PPQDDD,*+.+?3*3n9wuN>o}cUOu}D"lD"Y EQ(Q3 ^WMKJi"H$@N"JD"Y1)`/(! =4@$d0lS.D3d~jcR4d^F Dj'[i4,D"Y!Rb`Q/n$[4@$d G[RFR"H0F⨩^J{iH$ӫ,D"Yժ|1e#l%H$Mv,,H$eIi "x 2kO# D"LǪ-]"H#c1S\Ah5-Iv/H$MH$-GSv3XQDN=g]!J9&QlGN (sG LL.?1rgPKLM͠ g|r~?FkI$݌TvC"3vþB7u2jnc,B4JKD+ɻ".BS~7?}m\}҇9u)4vt}~xQ1wF=|Ǔ4nN>{{wS#\LB]/5nD"YH<$(%d jv:w-<02D_6N<{274OOR_x'>0Q3~?_I?8ŏ1Ӱ$/|^&^)nQ5D",F 4K[˱핏^e$=3CHiwydU<[=g^0._̆yջSώWܾ|gG|_Q^~ ?Cy/(/s//<=B"l*rH$!pjԪIٔi{}9xC|w`h,>0=$_t7.EAqy_& % N }](3{1 c&#o߁(.(\DS"Hֆ-l$ٗ&W?+?=~Wp#ΖnTgxM(kIEQ.w45ryB0ZNa<7,Q)0']4W"HքXvCI9śV_"WϷ^0^q4n"R8[}pi ־B 0d~ò_qiY8.^\y3LĶuKq?!L$ B`Y֎]OLqshcnd'!B @Q'+o> M_[MBPTP՝W膆aLKG3c$MgC8L8ӊ#8v@“) A.dqO' PNLchş j ?=MEss\ ɎRi=I F`4H x2ilZo6Tn#{XE:KF@$ H444 L#xVC]>lѨ>r]u5K)~wFXOH2<8O IDATx4} 9_r->gx㮻a2S]*`I${!,DH$5ew/# z#|kF=z#?Dq/oy,o PI7#<ݎk_G: ]gxۛ_t9|m80Mv~_FK/&7{>%>ֺݗC"lP-fvE"Hv^Oƾ4@.>'_ o|Px>;3n W#G䯞x'p__l??ɭw]Vq뺕|1Tş{ ~?n9q/%h?o~oxOK>T^E&{H$F"4l$4@\~gzo`}o5Aqxnnm󹽼? 䧖=k9D"U888Մ) %ɖ v( SchٷgE5oyijǯt>EY$NĴM,ZVwcz4fKD"l;=]Inoۋ"tE-vεdob Yzq ⚕”D"Yau[28v! ɞB+vCppO?n4.xMM1@ĵ(S=ddO  y zAK66tyۂg7pUe D"\GQ**XGM{MulH*0\Fْ5&ہ4@${7@,pdS1 V0*B(smJD"HaVY νY**TptFKtʄ̚"!/fk?TKRcrRH$H~;!ly֫,k%&/dddc d=SZ1+2"VE=۹tLdD"l ;ݛ_Q O#MhRsNEͩ0U?1BF1!wukHDUfOd0l}G$!]UeD"l!r)|mxqR2†}Di$bp!Dq JadsdWc:&ùymcRS%2ۊB{6pL4{KD"o(܍OhNFTX/!3dqz.jj$l!j Fe_H6 12zMeCH$ 蜶p>oU3E5)s* !0Xy0eA#3̞*"ՌI% J$ˡF Uf3eD"DTޠŝ8 -x-ᙗU4J E!FK_TƳdI6iHv-1Zv#XdSԒšߜPF@$I5 `47FFsrUE͵ BJV #鉪Mf.MJ-B ]l8e}9KvMWC%I*VdcDϑy~ŅgR^"LZjjpq]gDIJ.Sda ժjW9=fk53H6o o^rA DqiKy=GNӛ㎶i6oX@1@yshZ`:&^wٳ9¡h V-a;JG NqkۭpK[/ePsMwom"ٕ<#1*Q6,U Ms h)%ŧiul2SPT&sӄ+|Tn{qqqATӱ- or#Ū;{ VB-4ˠdɨYRl]ѥQb4?-MҸ(-l=>=!n܁k ɮd"?NTLJo(k:\p__d IHvҟn8EAr,,j y|,[ɍ_ mD5;HV}3vyWUC* _$\{aFb43nf&(bmnF ]*cPYL"Yu˱!(RBr(\_%No=R4Ċ7Y5Hn}KwHusV;eR߄4o0؎̚. ENmX@0e4B, X}!@wtFr#$˳Ff^/ҟ$onNnB ]|rXE1z5yzQ\?6r7s\N](-X,s%;p$=ӑg-5Bj c \K~W.߲)Bf-leDgLap5ٽbO_JZOZFjXh$3Nw߯ Yd޽d3iF}]ŴM4kkxM i*V1f9la3'ѷ^C*s9qռQ@j I[J%<=}?m p߱@-p1M]ݒjv1ƫ>Fӓµe2znál Qs ^/eR$02} "tɞ~Dj6%W{ 7 'jBw㪮SN\2 \_&^J.O_p <7 cw,1R8Rv4KZ|v?nLd8uK،fL?{JQOD5θVY-S36qMnid #Q:ۗݠbƜhtz؎C;H.둠QB P;vrf%FT%Mf(5ʉ֣$vU2lK77Nq4.x,c!j:ŽU큲[ٹ3DR11"p>b87D7mwlR4fU\\jcZ&qlG Vz]^|n/^<56p=g87 Fo!ۃp}4xBt?nj-"גhsGscd ]:o,қ#q).<:8:ȽsF| GPUT4.|.pțY [G0[87]Z7sWϵtQ >Cc!Hir+.\_LtV7'T2W= ~C#To#psS^uE˲Y5`r87DzpLF2JI*ƝwrjYL2]qޏFkNh81ScLgHtq*tfӿV" ɮtLLv:K^/Z*Kvp o֖6eLce8fMh9i:׵Q ^2=o7 +n5\5Y0s<}b |'@׀rs{9|:4al$.^+%fJu 309= BńV`83Nڱ\^%G9| i3mfJbG =24kAH0/]_߽ E!QSY/') BҌ`3t=ц1 ?O^E;gh6q-[%FuIo%֥̅rveu[;SQ%&9t[h(2c85Nztl:;uC#sd!;5O)F=A^xE7.~yCFE#QJV;;K1X-c1R'_S%RLY:]+=c\35?_4Elo*OGYA,k>͛^IxTa [XG!JS%UIs;ĩl=ƉiweH!ϙy.r.g9rvSɕ6\#ɽooh91S?5m]M]lT\A4>s ׁE`xo4uTiȦ^ r%w(mw?Xqp]HS\M^E`EH-^cr5ǩSrSHw ;kvJ4T<#hhj1uh2P5OŠB^jXN&Fc7oJ*6>ϒID 4: wpn`9݃l:B0a9r_ۦ噻W]cVtv$Z0s\I\Xv^ztY/T+^ {m%D\sa}Q \fL!BXyf2EWm[^ pnh"[٬З"RkKp?OstGJ"Ɂ7i5Dq:%W=uo~F jUW 3Lhnkk)[%&ƍ6K 8'Nor̺^s^/\O${nTK|­u`8=Fc 5B^+.{\٬p!z }M_(}}# Cn A/L< J-|죿.UQQa>_=}T,[~<|vvS-~5r;|[:Ӝf%cl8CguJ5I@1l̓aeqv|ߞ?a'x[gĻzv(u~板CR[Xwx* O&cxTz?c nIӜ.ul&rS+ND}рE\e3( 0e2+Z\nl~aoy뇾/e`='⁣ad&Lœm?qNn]Gj/b&f i-#-8Ea4EnނVt6%Xԋ [`uf#Rrf0xpU.T7b}O s0tw{SW v!RQ4 ҃c0Zp?,&^NP0",㹉Go?f`zcc$Oo-l&J#^\U_ҸHñ X )OtYBE:8xrCTϖj f8xouհa"_]KuCt87C4r=s#Xf^\?*t?YlVN \Zs_Wb:[Npvx@Л[:6ej1MdwZM_As1Bi}kWb݄+їs+kOfi$i6 {}+J XL(y7Q(Fs`րLDp]ݚjKL«ycc,OE"\>i=Cɪ/AXU Pz؊nj甓Q1U# [A{a-Xn|˥wwpHp-ӃlF 0zOqkֲ,? Rjy 3~/%~e|߻>L~ɸh'x͸^j3ॽ%( Ld:a"?ԌNTWFجn5b0^2zr͛笖^6$Z7g)nu((綛U" -E|3rL'K.s)qy]u0u)q'@jdrMZSjjMwzj#pcNSdLGIVҵ΍'O,}N0RJ5J ƋSVC*r>, =+^S*V*9-Oz|dhMeJ vޛ%U-&ScW+4հRJ[+~cs|o7Gy~_7ғ|_8y^u8նNpLP}SBgB=Ggkժ[Ο [gRh絛 3Ex|q8gΡ2%2븅C97Uh|qg?ˋ0^r~a2Q 3E&,"8Ӻ"膾Ms(px=_&Hi)jèd,T˿Xp4ZV5SBѵ`x.z= IDATYV5Vv'Oҝ#Stz{mʦ疖WIk™ k9đisw%7CQ]60rs͛ʕ2+QCacaMEțZ¤;ҍ/yOfK#S3$ O=KT7\af&&ҸI-Vf<] XԑN7R"_׺'aq41䈢(wd4c _Lgy)tzFJ0W70xzæ00m68;AekgFZ12\_ HsK JQ7 dc5 k2 qNϮjzwF}j2SyxۏN3er=#ݝ(bV_nTT3Bi-2XPx=KoS1ɺӵ9rbj39륜סSoiRXLg9r&Os㘩LLq{ÞN"A6no f{<>u4~~BrĴZqUA$FSt߱h̎1Up$<| Ü%>;bF 3Hr!pIi*.E)HGfY,1&mGNiooyBzWi-X/vQt86]wu^ Y#XrbU"fxQ{M5[#Yu.Q""!bD q$c( ^{i!,l6KggbTG8Ƈ~t1RCtlE^ڛXDt h"݄Pcw7T*;Ơۗ᪛yahu~}?ϼ0/AmO0} t8|ۭ>?/yӋ?DO? E>¹&/~*O_[<-O}v(y8 qƲ #Eeǩ,( udq{8˶F_j@rk!P蚻 ScI|"OM-iz2Wt̹GbSi[67l@PTz)r=$8%\um0d-{_4 ( [..q0܋@N-9a8~7ZLww3ZLup`]չ i3q頻'dw,Q*4W6|سbMO2\ h枇:66 -yV4fsc 2F>?w覍fkbi./x0-Zزz$'m6q-L19}NN/0Uhhγ2׿i66_D>LDluq;iؗ_'_׾WD qc/}7KQoN{{_]~PJSiS?ԇx o{7~؇?# ׼Wά0$gd7X$3)RJIFMRv^t%< 5 R3Zh)N'JНZcIR  Mdܮ!DJlƃ8(9LӹLǤ'}ǧD±b<>ZmmmMnM]#r(Mj3L rK]nkk< G;ٳf,7 UB׋G8ȝV`qeca7li,Rc6na0:͍P0VIIV+QU*X~f"WN5$:1O[+m Fk)"}k8r*ֈiF Ü!6k<~^zBFk$b(3ʙ3r/2YWOaXLyK&AOi,GA/ғƓ5U4bMW`Ejp>±~.?@ῼ{~SApl~i@qx?_|ۜVx|p}9:Z>y- ]5Y:30yCw#QF2c˴۹%^euf ESiVfJ!q-Bz~q(r~*%e]ײ $* G3'rd骚i5ÓrK+q$]pȪQ G8FQ9{&oF fq|m5$3i5߱>q3D˱\A?^NR|,c61V/'(E!|Vz7XvbM?# |.{t & ksbXjd" `8;:-b83]\X!ТTIUR'W%as3i7@#4zB(B]wL%pf 'i;s FXUNá5F I"anm}*vҮrxk7_:u_@upxhM[}}/c:Y?My>?頸}e/d ݱf/g)FnsZ3) [R!%b` H)FۼwLqfP1UcMt;>w99#Kr"d zK1OE1|9^~ unB)z2`̵T]f#c4yiύg8YjiL9e=}p,[2̔fjiDJ3K !u_;Y"o7Tq6if4JHs4N<.Bz\}-EmrLCuoGs<WT}5{cLFihTE;V1f@ib80Jn亭tAO?W-` ڧ"k,H9BgSTG[]+4Ki562,C!5'I" m$3T,/H[G:X!~r&H|-- or4-x<8?x:n%U<M$&1oqsaL'9tfԣYZjn:&=~z=mם&Tj;¡'ޏjRWC-˱LgGwy.1],sӕ,Qhhur4-6$e%^I׳hX\4 ^-r,Ʈq$t (B%YYa0k>\5.ꐮfH!Fzo6B 4#Ze0;@^-/a d5 0)y}+GU8BL^~q\KpGݺs5X"C|z'MBrJн}Ktb<-.j.f&/B+' exVIӤV:t,N7E*G؄/0XUZt\pN4iYݪ'$؆ !WM!ji\^=S0 ''R+j0s`N./nŅ#lrzn.7Ubܩ=QIR4s=la3"\;l$ˍD{T~X)IgC'Z52кNZK5\Uv&2T?ϧ%.ykȻx]<LӤ?ηq^o|7a}2^ d0o L&竧~,r,~@;wݻ@FAJC吧SRBoXT,V.ԆcПX۝L' ,Y0}6ct fIIBnW=ֲFKq.'p䡺 ǑyEҪr-DZƓk0@n}.En xq}}ר s4xn(% o,Ǣ?sf_jܨ{ 01ʃ !crZN_wIV[XHifHP(.Ns phAʄ-,sCu̔+DE!&֔Q`z(Gǖop) k Zn_֣" 1vN6Y`*ͭ3 jخe.KuL"^ gNlHv3kOqwݫ0)MDQGyZjAg2?k${'RԌt o渘D0;9B[:UԋqLb8?c\;d&QIy/zTu~?}?ʣϿG_nFkWT6#ʏҗ\`5TKZY;BVEY`{p`s.%&jqF*3]ސTHKL0}4Kc8bS(G8ڌQ3jWZ{M槈UcLӝh/-Uz=NabIJJg@2Ey1cWƕD]+rt]u,i2kjřg]),{T,hMcXafҗX[}@83(x6i4C*[m0%jH ʷx_soӟUuqNŒ3]hW;0?dp1b4[k1'UWJ$ԛj=c)Gj32NjU6LHvXeNZM^A+͏w$gyr瞜]i]e@d2k[m06` xA@eiw60;ysU?zv5SuU?驮{70<}7 8'dn8ܫ<]!+m:Z<`45ư|u>0.Y'7">8zt`<3һ5jxLdݓ.ǞѣOLSWz+Зx1NNgtiwqT9\p yCd([ܠx _JƳF%{3ˑaOy o,jVVļI>zYUg`VcG9{]\[Jqʦ 59ϙob Mzn+:#< n^wvtͲi;L< T}4=s_8EI'K6ĔlDg$GHdXXz$G" t/ an9i!Z,ݎ뒴K{|Rg<5ѽ!i'N+%I'AIc6Lxv#I?15?z]1?6ٙf7L'yu̜k"P,+]t穦l1؟axfXX&llYt^AeE;.?irަF<;,ݑ.NdNb9{8rUUM8l@s\S&S$1vK~~~r' vv=#{ /CW i#opL:#_R%IUZ٧&y1e<qma(1d7뒱339}j * lBaEf#q'Á9/rԨ'AEf-'<;FN`;ثȈ8Ejjs"VLFy&avW@56qJZ5ϞQ).rp̘dwzjl%j;T}g&*՚OYsKp ͌;5DrʽV-w/ Rx\`0S`X)&0dϟyjNXY}zPVoX?qs^p2TQp\ 'f|)>Ч1 ',"a'<ƇXG (f&kxg#A~zpy=Q suAΥofl#J %}t}j@4 )Bjͅ%v*(oi׆:DׂY%UR9:v=Q7ַ?\ʹg5_-붞_3dN ]? jSH! LR $]?hjaFsKڮp'tUUƢ@+|mXSYWMJq(=kED0esV3O,(/n(iTrYɨGrhj5:Q7y6z>,@+b,7ʻt;1ü/>7>AַV%;̝q29J{EUq皑(.'b'tjT׵_ $Kb௲rrj`&8uΪnLjLSezHU, ('#BM`$N[5*[!v^\C+8FH1<% Ѹjœqi"l1xTSG3 'J|Sq HDay7'."*ZMF~&G뢋Lr.Ir`s,Pgbkz"E[A@1TϳGʋUt.*A858I cjrO%.gKLEgQǯ% d{󲎗,T4)^sFLh"4FafjeWf]Qv$I+g,W9֬\hp=13N'bep pME t tqɸ/ƱL&[C0W[XSGp[#N!; NfbE|ta^#8qeo2'؝ओf0_l* W:XyH8iW,Ys3d\!;ALEmfjp]vG͡/jN1İ]ؤ[Uucٔ|npuB~;c5 (mvU &I(~E?=ʼ罐.Yޖۯ0%CUX!,9zMZCf?b_z$ Ѹ׊'pөNQ=kltr qp0aw7}\v]q&Lu"p啽E3R3 %b(9ޑ ts̠bT5ߦnՏOSuxřWS4Ѣ(:c8  v؟,Yf':Wh,0#BB- g[z',BlELjp2^ybFum2i}A_ŽbVN⠓(;WT7ZD\71w2q{"8ߨ/Zؓad=Ҕ#I4֘h:9 ]T:E'Z@ʋ|,S *"DPhVh|/nS`uV\h"4NӬ8e(S+* N+mԣ<?^g.DQ?|s{ЦiR 4T}+fK -=HU6)* ;gxMzɓ$!!pJx|c|wUbaִ(Ϧ{~kAO^j40uL5)UUq2eaf+biBPhbѧIϸF%V6qϚ lO#@z:ymt:سsQ CG5nW[ Ѯ6xY)7pl_0뉠Цy4J9FA E۬,I pPpDEjb_[OҨRb蕈h/<Sy[U T6u"lRf?cŬ@aT}upکiS秅Cq- ɮZ |s//V<SEXg֟>gCf֗!(E+¹pl`YG. l]T.~'eV mZ˹+r7y lЂk[ЀT Oy=Oϑٿ0_WVpey-Ib-@DY\5aD|(/;T1"z(i+!W;Ж`V3kg >#́v1tQjƚ]DaERZt1)FUfomSWCo:("%?x5`ZWDh6:CӠll&^1 ]F&G (3ldC|D@7+Y?E@7jDwLߌ*pTqu4y&[".RHRFAlb_{YVa?Z-5lj-֔lQ4n T8E.;V,V%Sȶ꠨;ˈQWXz}h~:]Ux.USs0۞M!*J\sSg{犊 7r*}%(:`QGQ5pq%" 6ӕmT2NxgdN۔k4( Ef f"ev˜hP4nw4yi\n6aTIa9˙&9o>k)+ߡGh+1ڣ,_DC/ ["%"ES1DXcDdC٦֫\\hԱb]zHV>zJxjZ d t]ʟdXo5[ARGB>AbN /-]w'Gnto0{rLJn:j^s͇v?~|n\mrp(Kr~ ڛx˻>3Gm=~_p5p|cce !$ ΀ RN OSD&jb)"xcfz-@WZvf&TYۍƌj>pYu6`/]z}x]6РzhCk0(4.K>T%'M/ :YQ{TVz-\z@7*%,Qj UAWE fC^߉Sh"lCb8Cʞ8KJV2;S1D_ s83T¸ O ||g5{ևS3.üoˬm}_:!8y׿|U_?r>@rv@qxy?}rOwqң|?u6M7Ss;UX:nπ .v bٸ1NeKo6Yj"9~I1i1 Ge3ihK:_{EvvU^z)"thA9N¥VKMghtiނCT֛u.8(fU6gўRvZ +:W0^+"l*͙%:RN4nTQ{U $!ɲѨr!e\cҢ\\rT*L կYq|O_CW4{G>m©V ;y0Tasgk^f}\n5=C^IPO<.9u\>g\{BTз7q}|sǟ={KqS/"4v|/u=xǞ9t>Xjz#e- jr"kgNqpb^ ҥ"t&w:٣d?RC%ί7skFvMOfR \MZUs'zLγ· =xMcc9%H*"l0x2yc!nzͫckιaф#ws|_?q;Q3ݽwg?8F 7_݂>n{]ngsK:o(\mԱjH&}fp0AD\8MT4Q6Xk ns8)E%>>7g8ǨØ)DXYe1vYժ}IJA 2s>-t+\`-Y|4JX5efKAj,q٭X4iAK؄)P +FU4/[fDINzخWQ" Bjju3?M&`&F FU p͔ \hŨ{_ ~EV=HM3|ybs!L%vd]AXo֕|҃N1K6I-5#( zGԅ7*6"\uEl7ؿ}]24<}_6VZ--=ƿ`S{'>ˑa?V]0IS!\v#Ԅ(j#9z_8H cth"-^s=kp_; .֩>ut/ҡwΒhlaRr)T)zzXa6]@}skVFNTn >F:<A{U2+Tw(2 ڬ/X Ίi|:gPl&z*Qicէ ,i.3M3}elKu@TJlCW}1P/:*J(\h\|)# f#[9o5V3Vi@bpkJ@1h.A8lĚLfJKa)ŢEw&/pQ72sE+Ww͍7WvM?k.8:g:G8O|^j^vZ& ,S+ZO$ASX4{r]H$ X:7(Q_d!,5R-;yuhX> )Yҡ^S_wmq^:F)qX6hz+lv{ev_7hP}?CAѹͅ3]O]Aƅl31W6ЍO=˩Dglp{}7Z<4O>"gEe d1DXOo2sjw^R̢|>Py1+K9SeD٠ Zɱ''>6@QJ>*z }ñO_bR 43<}?>Y {*]5*dNc@1'aQK ^uRsSeR4^CzUҩf#r=fۚlQ͇v3%%2 |j_y|}9ׅ:v-0º *! IDAT}u_Oxƿ'4ts߾n Ono19½s>īq>v!d#CG8@t<ͱ8}"L}Ҏ¶88Y'ͅ&*wW҉ )4[nZVz|*ib`$GA/!;X|Zz8I& JfXu zMXH&݈r_'mq[ (K2DFM{LLɯp""Qi⻩5֔sMiN `'RE+ h|i|c3ZjQ%jklbN;~̙X&8IWu\3H&N[>'k8\aԑIX: fZ_fL"݀/0PmQ~>EcDϻ߂$S{ J#b`ʽY߈Z±~סݱxf/%:f.7A2>2WoO9YQ "L`IR3NESd2d2=&3U#ECCC8I{n -Mڞ My39wǸ3{/?8omebp?̷7N d\GSd7Lɑ~\GRgoӅקLJp2ji):ORgpZ/W֚MtX Q:IM+a&hMa(6эKMMvL`ӒrV"|N?~҇4Q"+hzo-7$vϕV[I}L`H΁|l 7 , ֺ͌ďRY鏖p0tn3V]fR~Xl1 Z,EHG'`AQ=fTt}ϔS0>&qt]?O_I6fҢԃzϑYpj-X|c[1zT o ]fU&EƬcgt|l4 WQ*j<kaOx)s8T,䷾H|-K;̯.6GµTd.q>>ln/.̇N?u#ć_vlo/;ċnw|i#lh>s]~jZ6nxy3 cq"'<}f=ŏD5U*}.klsz׍x3D%@֘ d6AUY>SiWCNbg]feS-٬H)Z),͊AQ"JT^l5{6RDT#nehVyC[ Тq+uqɸiϹп ͸MN^=*횏Sy2_+%jĥ ,7]Aq)F}zk«9`Op<=Ơb̵8z"9W*mI9VsYFV7+:K`>k}-.vV +:ZEC) 7DT* 7NԽKNe'Dʇ[:.@ʦ5`CpCBSqgUw3 Ϲj¨Jhes׬Ճ?`FD>1|Ἳ=N>lZTx{d俿a=AZ*BObh??pg-YAXy܃T1=ɤOV?Wu,ix )cE33DaӪXk~I@֛ E]-NϱkM;U(%4갦|({=xrY^&eV3nNW }i6tO+oq@ cS濾{cWED1lnIcMrpģUA$D#0x4y"mgJg=|nzAutAPB_6@Mx-9Ym\f}kc bpRvàʑ-48ZGHˡԳ\R'}]cF2EH1W*C ZMH}rh\H6yh"+.(guF}{Kd~)ful8bSZTsAl^b./z6+̪"KG T%Ԃt st_4F l4<#B_B3śSՃj3(7XmvRVGt%ccU|",YV3ρx\PSʌvѯл !RͼC~/s1|R Tf}E *! (h""("Ql5yo[DH㧵z$f쬪ƈ2E28D#:%iU\#{( Mk`AzϵVE%ethV-HKn̕2g~9ů;T !UA4Ql͹^KiHdJu\T*\UN-@nnFrՌqЩ tNїǁX"\jvJ@ek.*WY5eEyUS *c"K<ȅ8Y7m|$g{{.tYjąi@uf#%4کZYgEx5jB9mmA[.|cx:AMinl41Dײ$'* @\?mwO}͸Ks^ʫGwTLkBՓr-89"z=Lc|*k6t*Q+ 4Lڍo]Ti vj|ukA~Qc*B_ 붂pF*nHT)2q]NCp LU}W+ @l4N>r~fp!Tş \?1C^Q>"r#J֌ySDkCtyqJ ȌϾs 0Y9iV|e E@YUMQ9F%^ [BFWrՊ^|\2?#P23PӰ2sŲ *j5$| :ηΈk dU6E>[MkLmt?.2I;\'GDBiVW8dr]6ͩ]@sID Qy^,1w==0˹hUdiPQbJ=j2UV0nlBM z2zLѨWA fQ7k \wƌCaQWBb7(97/ŀ.BӈR>1%:WYeQg= ゥ$O=O3 l=BW-u E;l8980 k~o>;x:qclrsr5V[r(dBmez#E|A=+ .dgq]bStP|1_+ 2>Bl+B7=\;m|[q~x ȓ9KUZs&ζZ"-U&DswhռGmt)b͓nXNM5j,aDj2*'UKul`fF "S߫msx3w-ƗqØVnZ.ΙXEgΟٮQO8>.S-b}U  UW5j, `yXLvD[rKnzTNSq$3`vb|4m>+ڱ@H ȏ͗ۜh'C"[ZGTT7A*\E=f]pxvrTjԨQLÝPHlDjKjSaxGU}=ѯ+wv_W3qF3^BvS/: -` ?ƈT@\yW•;5j<) Roc.v)o'[~w&5)ުBeSa"^|'W?t7{yC?>19oEuδo[aid%;JxF)8ׅt o5jԨQ%gFC!|ރ]PDجG /Ӳz@bC8AKC"O{q{&b) 4"oη5|5ɯ} Rb(t/ !qUdһu5jϽ5jӅz՚OjLaePFaW?/#~7 fDQ ױ8oMz-_U9gv\H!tu\* ")>Pni"5jԨQ%b EŲj,Mʨ(奜;n`r t%t05q~_ε$-=9nM3w"Nm!dI#QK(-ddd(Bqrؼ;{sWWx=Su~>}{~׹+ z\s!kg!0X#AFna nOQvѣ3ĵQfw";KDܼV"ۧxGwh+kp0?~|'~yM^B6?垫woI^bUb4 WWίm}о"jm{ErX#c#?|M F{'o`s@Nl[2Z5zG6ne=z@DҖzڡ,7̵K`~s;_мZO3"sswzh@f0ma BqDؑK CiR!wcq3@zQ VH(#箸2rz1P[T>[LD v! d^FJ)AʱI(Gv0M^H=SLq"̱Vz˙a9$=r'=ǖEi>s!;ctKP(\ik~ ބӣG: jRmxIcG{ IDAT:yVZ豁JߪRuɳGJ]Z١=FRJ/}#;v.ǩ(}ZYHs.t))Ymǩ^CafQC+.f cE<襹hBH PMmS2k ݣGy$1q%q*k/@|ͳkpNgC kMUp"N{vHOֿ|WS>>_%V[?ޏn0=.`bFYPc՝]g5cAmFdOоC۳Ix.,ZG > `lbZ3a= X[X̀S`} d`ݽh󩚗k`%BF42s0|IgWS0,5s3}e={K.1d׵sݡ"<ŏx/G^x?/jBtWܭ7QRJ6)[j) 6ԝs(@!kui?3@P/6N*𓿜O鴮m3.0^\G!Am!" Ǒ.s<wK a7wv{{8ڨsCK&7u'?74?/0D;F⽦t~X eK0rs7}!csυ' "{oS&8Yk VҸD:'DţB--x΋],O>p9Av_e]<,@ #18͚h/$ƺ=+g1&A<fRs=*؊RED9Դמ!1y'OV-;vʦj=/%Bf9.Lge kcT"/Bz51$6Ts!63OHya; o,```܋g7»?ȪQe^*D)}Ni7@鍎^}V’6TKoN@X-N^K~#-_=RC<{s*!eD(:ŦRk,8"# q[0_ґE=5G"ǜfc 2gei G!=4 *æ3Aoܻ{UJh>վ\zi^1 L^ qf7s_&;k^bL ~سOOqťbZNÒoΚ&P=v"tndF*lJ @16,n7\'ƄSMfaFsy88HKo"ɓ՞m]ub /vƪi"Olw :MrX9:edVsĪ!ǪwmvRp[T{n5;6LjRSAvaҶF b[:EVGaᨮH+_6o=GG|Fז On }DX*k bEԃ i0LdZED3 L?'.:DɽU0D}mO 3 .Jo9ͅƲwR&J;:ɓr0{^9Yٮ!瞪ݣ)XҕcI[;`YEB H>Y~f_=SYOGbH()u`sJn4JQaܢYi42z昡Zs;3B竃L1C6x[~Wv]$Lb-oy Ս:}t$f %ȧs\sm':RW\Rt>c\>^# hSsQX<,D eT9Npt>/m3ˋ$աKjx'x^lz>=zqk3C-SbdjDAV^޻|v}_Ԩhy W6Y9s{}yS0ܺh:*X(]g\K{;kktrea,RZwerͺs+sL> exe:j3ư1+5"+&OjgPǩ=;ct>Cɶ-۵3E|waYwnp.tiMs xK|}]k)wgg2+G4|NՌXr9(Tkc`>lo瑙'V1x^5n)w+X.},e/O|マXpՀ4bW/+=Ā+OG{ݺbq"ǹd{XϲY9488gEV cP&uEw2%r/)/fvJyY=^6i.eQv gF͢2h%^bt{)*PN^`} |28j+BbrڳsM /m8h,{$f|bx.<*5,$PW)J+c +s)*>R^3jzTHF\5v0B>$meT,Hlİ GQ&[ZGPn] d2 j]jx gBRus@3fX랱gP{nؔqB$yX3d!3ϡƯyx yqcrIN#?N/x 3 72Hb R~H5Ti%u-%LqHV=N. n./ dQhމ@ D0 019hҰ(F)Z٨֨xk"'yב_@ ?ۑR^E@Z9${@)YT9h$O,kx􅣘a5,~)^x1M`SV'QK%Wv5G.83A޷YxB7" 5NGQY=t)@Yl~}+: |JƋ8ct'ʮNϥQbmu\cC|b-jA=CrTzi-Nx'P}7#(.v׷uD0N|0VCuPH B`ñu ^U|6 s_eO4jd鄎B6.SX)ĕ&oգë= GFuG.羻<;?țq/SԹejxFObFozaCx$cq 94FJ)GI擞)P!|R?mxxR/}~230Z <=5:G' Ū_*TL}ak + ]@p(\wl LDs[Y^U)U^(m:(uS:BRK2P#Xb;b2^s3Yt]\7]?˸u};\yߝOwG_|?ȑx/7>..ۥn w_^|o?ϯxѫ/oFJ\N5 `J90CC\Zr|C@.V}Z] tJ@5in& 3OVPHqsR&';UF~xVי!yp4a̠OhE-88 a[Aֿ 3;zCNd +k9_K >Hyiv`Qti=~/e3("׼Na.Vn.n؈bSzApBsu5+՞FtK>Ex``LK1\ VF!SNӰt,ؚK 7ci \z\2 YVzr_-J9=ŵqt~{3RgaH;zH5S0 kO"̌݃('6N({UWU#"d;2ͺC$%wo{[0s-i_ڵ+MDbʻ?Oʯ׾#8̜< `y/~_oB8|-07~IWy9_`4Wzޢ9'!܇컹OWAetxKh+֬H1~ 4kb| he B:PHjoLwVp-]z4d wbD wwr"Kt-oRiTd[hi"Z!;]<:ce~=}zܛ-(|yښb7#5J.&p^%KW=NC Nlf/][:q)U ,z,eZ)vrsב5(6@ g`pgeU5z+g񗿘߹~nfS_'Ϳﺵ;gI>g_ 7?|uo$D?Z<0ğά컚OWh:돒IeHg4 kՈ^D=-ċ)M "D*IMA fQ}w(+w5y!5PhozA~A ]߰H&O">:z:2zOR}WkadaaU/XoX)RgzFUG zl%)\9.ri`wMkLhx۱\ Q-?]JI`eڋ:5+7.hgHUe Zjѭ2.U&XCSkHv1<=Umi$rmW q3WK9!sC/!g6)3f3ؽObM#tFrx I k$쫙I m= T3Zb9Y:][M"slZTu8 "'ll\VZ뻔//;DnPIMԣHD K7Ō6@@]Xm]0ͱCkx+1;~Q-*jxz+gpk44ltR"S^7ZA`ly6]+7*=}C-DGw=~"Rʩo)ώgnY&Ν>cxJQ TKMjvg$ 2l#?m0 nˎ Ge ў29Б8%m6Hf GFnZ_m|E EEƎ5ܵk/K h_:]>ON 3إtc {XSPB,Keoh` .Q^*Fa[Ʌk+;ف><3֐\s֨*yNiF 4Q;*|84#3uEx n r H8VV7!k t_H̦'wRz7*9[7_*M˔rnQt<\tjBv@Dt#W#ӌEٚ{ch{.*!J)T_os= ;2]u8 6L:VߩQsE| urk\[: KyKrNO\';W;½\Xx|CaY>'UQ!yWZqKݭrzU^返[bp^B8Ϯ9ow Mﺆvw_WgK+oZ΁se%F#-+ߔڽf$O!CZY=oA4׮^Qr3LaQ\;uQC.i1VҖW$=yua??{90wmM_ qu*CH| O!P) Fڗ<׿I)ɵy hUR /U?1š+ lMDB4ZR+jt] RI~aqk'VXx`{^Gg >Ʒ3s0zv~\ 9ޠ$Vk9$18%n-.`K1ESmx:]8ҮFc| Mmĥ'3(Rb/|O vsn+om`yԉ8qGՏ|g+?|+?*0/GmO6<~{y Y<,A3Hz%J>nH ͵ŭHu4rD:Iev a{%(,#m8R@ޮ(t- hQ+x)W.6₳1uKRu -tc+ :+\'?;tS^mx 7BvFi i`uSHbTv F8N=\;:Vsz\w!9fgavJGmzNz` %x{ }d#h=Pl4$16F-gvDV{Jqֻ[&"O@o.p]sy^;#`k&_Ӎ|FAZZaUlknXLGz"k%Prý,X򋈳o}CD}, I]W@j : B1мާZ< ۝Rf[ k؄^;]Q^ uy.J__~aBnNJǸ`3vS knPJ4d+e@OF İ#{2s3&vt|~ʣz[cRIC/3QM,2~W QWy)ah_mU2J)4"ETʹY]ׄtW7pƔr( (Rnb~Rڼ N{?r0j6#lu-$+OJJfR0 =v:(QXT'+9@g|l# : W:mCDe FČ]G6Ir0@?;dz!# ]ٿ}~I5;ŷ119ȉ'fڍ߮ij\J>/=70XxEx 0Pk \O[Z)) >_f_᧸o~t4c_!u1Rmv2m,΢FpRj [6x6ݻ s/cm“זr]ʩk>o1N(殺w(Gw(eWk6["-@5̨6}F]#L1~_RS0DSTvO!pe `[l!c=|qVrK[v ފXg#V&Y wU` Ҽs3Yv'rf-va*+`[;Gbc2 0+oV"jG{-XGܦ6}Ly‘[_w^[?ߍQ;t֝X cu2 4=E账zf)I)%PSMK'RSaA_"|Fl9\t-l^y$NJ?1zvkۡP9]2ZAO_uG^g2 ]ޝ=+Q-^g*u!3<-ݸ7gb,= e0~Eo)8:ȞȟxgO"3SRxhS-6_ Sҩ^5 K"veruZd[ee?i=.XX0Dݴ#- Cu.湭ݻG?V&z'`@i& V@.8Ry' lz=6޳sQX+dsOl]OD.vn˰rԯfkR βrx(3h(G}mףvA;?NéEN>mzuQ\O"4y0/ ? x]kSݐY.Q5} urw);o vr"я Oîk! 3+u`j/.nz@0 ̠ [|tCt@ԟPqm\3ܼz7ԇ RJVJwcᄇc"J|h8>ET"tcU̜[w.޾ΠxgsW5擫:Tjn*<>pQM 'Or7(VYf[: %CEMբR`Ϩ_ԣG+8H)Gr AW)BM V>gl BC]KO*DiLgߨ׿ qcsLO2;Ν&[f+OWnEHijW&:I!<)/zn @[%3ObZְƆmP35xZ2cxm !&[K skeLtCCַ+wRlbOeq fA={&?wuL{\0+L"4r P Z$SlnIAPtȪR \(F7?ݠGbA)Al죞#_hAvQ%|#]d#{YmGsEn+/~fdyp;=D B0`UjEV\+ <-tOEk][DͻN+5*0dV&'<:o=zhQ@>E&q)v,]Ζpݖ53NY;Y?B2q%( /+f0 $nwz8 SYJ҉˰Z֯dqmR^x0na\)XHO˾G\hu|H2l1:L֩\΁@@)S9z"xKfĉ C5TeCl0r^ϙcF?;8Љ~k%g?82 Qu `TTе2p5,s PkL)FP^ ձ(nc{V+ܡۇv|݅hb JNO<~1C-Vt;{fbrnIN>wϛ껂B#knRGO_冫.=.h#!yv"o`رɕ HNzu\"9ҾόSLl?ULM'P tdvw040K0-umAL0SXAcI6DGKf+pq ".9ix6bhْoNv!b|?Aݸf#$gnuQForT8fۉN. a^*q׮{x3}dq!c!=a:=7V"#j)ߖ/k á;fe40Af֨`yRK=3c(bFvHضN2qccȇư"ء!@…YJ g:qQ'=$G^g;>Q&A=3eb颡X9.ajefԓ5}_bUAČa<3BN_p6'r.@6\)}GO>N scw}W1)s)U(JEȬNQ )T &a>4rT|e ,|iy+vjN{Y{R{V WíUR󦃢 уģ numQ 23v 3c/&;EеcDcaZvv Ai~Ee - sf^6_"pnwϟah>"n ƹ]`9vm d$4H< Xa|sr s|.q`oMAfEy% +>a2CאXz 7O)~ ^rp2$RO7svIJ\J딅0?y?KW\8 ` te27q Hrk`d*@6z(2?q@Kq ř8=IN7ygw_lQf`MX|13>ݵ M>g;1"X5&K,,A14=`~&'D&]{kNjRk(٘,e23~/R? 1NN&ã~8dMˁI_2N?da6vKԚ{ˊN25V>Wq1ꬑv"`]r[8z躟cǎ3==Ƿ\72H[smSV!aG'DjW:FRqVh\dfhTHZ&nPލRr`"}WՁൺhҌp72?⚞]ݜ8fRq]Nz 鑛]x#dyNfF1ڏ2_1R#/d)qeMize底q}Յݝ70?t>!\5G0Z.ɗlzc4F@18]?Qm|TEv3m?pBCf6 pHv:8S{_k\ix7u,v6L$LJ]h;4ș=UzF{8v \<--R#,LWs\w@rvɗ#LWq]x;!F~Erz ?!za|89Kl1Us9u`[f973G*5~R͐-Xl+Ϲ]s (bY&z@#hA'%wi iF)؏mD{y v*ff2B@Ub Z,Bى } dnEeXKH_Wwf[0ɉ9NQR,_-L'W[Nnw-x?e4͐RvmL~7)?˧E1]yGbxOw_qm/}w~Bq\u%b~VNy#!S<-C2L,> 󌨀VȨ^Q0xDPa2® Vb?(Ld/ `^7L08f<x_D,NP`z+8x(, l)\DHY p_[Ofyj  Oh)0p9 kF|) -KR.K~N}|z]q1ɗ㴨U}8 Iuia}2Xf|W;:VqU/?$})/~o; HU fvC] IDATO1}pRpwS\3_s-gv~t- ߧCIRt?Hś`f.♣ I6YU*PA H(tbh Z]Jf'^ž3~NUC$o w@}U-}W^},'.me c2<&>r}ӧO3u8_W*PܦҦ"d?[Nn.pe>wz7+? 3ũgg >? 'Gwqx}1]d"{IwkX]6,Q]p:RwAXۆJ!U'jeM|nSJ4Gc~IQJeRiML_ پK?*h+ǃ|aFWe2,L޻:%Zt)FB h|Lt]+NlPdxFkj&|%3^Cv:N\FƲVp{߀Ւvxb V7D6vIWrWhNߒ'VBh|˙zlx#L)~36;GW<;ɱ9zO%of~斍>cD9u&ַ_:gK{O'_t7g?Yi xq&Y$_!7{0{x=;ևa>o7-}gxc|Y~g_{/Nx4s|せ$c1kU7`v^WjoTNUW..W'z$.oA:2ȭ4f2o܌&&p{qUV@K,D!A]٢!hŦN|29ߡH)TpҧY8n,ŅOx[z;79|c1(^4LeN~k~{x[xƎ딕fB>4Ta D~ eЂ=*}*;ćoؘ N|cT8bWyNG lTׁ{Ǿ'a~;/"tf{L#SE{YdX(l?>c1+"~2lagJfgb4Un] QJO#h<>EH {; Ɖ%H:optݸٳε sɔ{-n`ߢ*%K#'7{ATq ~|*R2-<'X W~(A,GeqV>PK j.*PF籼^Sc[ԧ"M#(6iʱ1!6?6s^S#6f{(}| Jb 7ع+-gS{;MN.ј41Uz|gZX [Ea?%QhBily8~ JQL@=g>NlPLf񳙿>M fbJAe9 ⬁SO@Яa׎B|ԡ۞չOsH3尨>luM 0,4` ʰhMGs-Xl~$.q+{9Kcsi#AkA0Gfkk5;9ݴaek%J.?GꀻqiԠ^u0L :Pd&27*Ms@&L3oyH)~׻O븬Q܃ ub^D6[O:E#u ǷFxPKn>Ce&9@^3_~[45ITܳT"gPJJoyhd Є:E46}?ڌQJ#wr^j{`vK%BlbW#?0H݇--ځI۟1|}H_Q&r,TDwJ#6MmvYcz9<xhfPfz~6f?B+y w5RJu.㙻p"ht^Ey<Ogn|4r2Sz*Gp <7}?{Dp 'wcGp?'6~?8 a\mm/mmkSLSuʹҧyq3؀Q}?t+̼/c?˝:xJE3ۢ Þ)DaES՗RH4Bab]ceaZXgRjRq àJ-|7P{c_x3Q.,m i֊MSauw}ihk]ZAzl5Vu\|OejffјFlX!kUb>J!4; MiehN=ss;qX]zjaP<?0)+9=~67}2أ3Z4bT^Ri)]1rai`DLf1PPhD{ny71+M549 av" Q;k鱏>ѭ;~ԹCxMߏZ_\^VT74sSYNwgޯ2`*7).=!ɑ4% &{rь49 ♁j22re d*cc=;}347,^i4hB cYG,4=Z9S 8 SA[ZkWKNhz[xmƨt ;<9Op3c@̩I Aeao>Z)gXY6,ʹ"g+A ͜hxF,˧~C װ2Ɉ=!(&W~<"єگu恮{|@#N3-Zvϣ*rWyPZ뾁ʅ*=ue3s|Vk(͟6 XWW_>|1<\Ẽ|ϟ&ocDwO pw Lǎ O(ګ*/ |U_l91-T|37ȗ^V>軸g/+Xi+L2 t߆cOL$ i$p2g*+)gdrٟӏ7Gsa>8\)O|cWCx) .iO;yV.}H 8Ǐ2 %YK<+TR4bI ^ HLS){?:E/N~a)Ts'6Cp/>c0#Tr"1Ml1 LG-ok <7H<8rJX84wIejaRо%&mf c,lo5~| jS*3-RZݪ[B $˵Ή3؉4R /ah+~rf\'I#7B~LP+<4c=e8>q`F><#z!0#)b$ҙrTg7wn҈S{^zhҨD5psySa8Bz S@).T:jN'N‘Fg>t2樔L6DLa{l-LfܿՎ'2S|OO|ۏ|D=VO {_oTLVV:A3o$40T.DVrpfǏM%I{=36rRRlzfF5Po;w! 0bCͩ>pkV3^|;9I`BPe,D3wc=oʈS+<@jr?XA) ǭܵ{$fE\|̐tA(-{Bvȫu;H5ŦQFjfFX8cvvԓ]xf>:FjO3z/%FQcje86Q'adS \k9mEsYZSf]I-u/}xkGC1ZɣaIj{k.vwIdБ|Zsb=|(zXMA+yFL"$R)ɨ LJuɑ^?G#/GEIisԳwS+<@0N|Zk=2Zt;n(mG0W3=h+7ȭ: lM3qFhǴ7}мzJ,ИmX>˹}3'yoV80RԦ} ώa[I %+T:~X90p{[v"8-M|+=Gyi㝄qtSfF֍M𢡊:;`i.T>2 VxNG Z;"Zߜy#2>D! jcI5.Fd|?.PV+}9j>H#AiCwq4v:Rid0mBU]Yנ`$veDwOL]XAsaz~`^*a65fZC2ǁ>S ,+A (Zx1p< unWm JkVk=%FFcm4h¤>s|+ơK1H39VeĨ6G \x9вf}TAbm{ !e[淃z$~j<;[$˗JjIk7T^&쒾WfKny8 w(ca}V/tz8aCx=$Q5aiz V~h#¤Vx'J5J 4ې '>F!kt%w evi3NuP4Sv-,2@0с,Jbaڊ`"4l? {G}$NEۥՀ:3ŜQϮPXW:[H3I=wja`gŅO HL5VFaG,2GiGT %!UJʩzE @~+7ʃ}6ޑ2;haPpǎQfܩAG@}6Y"-(xh6Iъvܐ3BPMcNR_f%dX&3mU}{ BDުm&:3d셡ec6TfAK$da@{܊u3jfhPUQ~l,EbbSap߁zzzZ澆ՙ1 w;S!cxfÙ IDAT稜807#cJ dJ^\)#Mء3v'L+6KH23՗0(BsI ٮ!F T|g,;;ڋeh7 8*Kr-LZ|#Ecj'A={O^r"2%HLSKܗh3"tׁY=a 5N-(< ,qe,wh9Ҡ>y 4eĨϵ(E胗\ލMm}.b'hnNb;9j/[KZC_iZ>{GIr1֞he)aJ;J#Je:= ~<P o2$DʫK3A-ߑ.Bs%3HP]tn|FGYra)t#j4yaE`eG78DNr/B? ;4-,*SFwY*{˰4঎:<&0w 83,zIՑB1*B#hK)02^fȠ p|Kʭ*z Gsۖ;đZoA-{B9tMȗ_maO" ڥWHֽi4(`U`Y*űRf'}L v$![T1]*H/}Uu3T_ њ:>Ƒ0 F1r1FD8ξQ%|CSB$2s1Dah#N-TsN=9{ `~n`gp $k738m&>*sH§o~@߸`uxTnActiMF z0F8 ƉMfҘzF*oU[uv|[Hk>h%O$f΍ wgO|ܝxO"ĉƸZV(aQ_:n_#'Y'h/6oʍW}L쀼 L?JpƒyfA8P!Q݋s䖋; xSxє\9{WI]w 8croUVO6Cp<=ּzYp ?P*z_G4T$~s;d7ќ~)sK$UAR{Rʈ;*1K#Nm1ܱqz)~+wI{wJ RѻJ~r1e2_C`ױj uwmm]>nAk-^b (/".ybSɹXI=5 uqrO˖W2]b=農B [ @M+{u=pUx6V*9S2%,<3C܍KBvƎ`fB+Lb(CЊaei/{;=ܱ5fb}x-^MRz77j|yrؾv1ngz' 1G2+dWHU^߾]lBfe&B'[*FӦb;<{/pg$& kl[5#pPze y%'rܶ@(#1ZV9n&,+hg(jE@&A5əIeJX2 a"fy :oZa\ Xq<%&֚ՆC W7V$xEAIX`eIUBэ9Z͈֚̈́>Je>s甘GEWye!d>E=$V )'(#F+jV00<)Yڴ[aC1X78fOs&eNaGT0~ڞU2^C[T5 &ARS7VX1?VhA Bk PNh"5w{Z'mR7Pؾ雛)ϨI2RRH0 ^)c@>; ZS}6.Ŧ$Kt;B*\;lK~7r`AݫGzN3h6OpF~΄ HZjv _R=+vaGC*@ -̑Ϻ104 lt;YѲ&ȄJ3>J6gjeZHixWvg4;ycۈ42F  }{Bh9R&JX&Ld쩨g/]/F/H]3P G:xG0WjVO)`fsbw}Rk^XPw_dmF$:3ŞJRH)]Yb{˶Gj_܋tI26#\gMKA#alRVaJvǧώqp;IBd<# @ ܸՃ K2XXIe„ (?<ӽVw ^=1UhENH'0|9RItwWD74j|Ο ~U۪Tn+#,Dࡵ&HtKJq] ( wQJRX G. _*ki6ba"]8@Pq|@ |\/p䃧f&=n4;T;7a7 L#3:8:<^ӈ3W6뜛֚/Iih{hq۾5cpi>?qJ{ln*G͉^ʹZY;*>VkM0IaF3wʣS¢)AI2*~? T,ay۵R OJZݥTs($]b+rOa\+79Oq~&{dq2 @&BOLqqO/B-[%-]Ç _u<]k[}2 KMt[[[+U?;rK[*QHŸ4Eq9 27<(<^^ l6]-w;(ס]}SC{F}_i^[Rs|^@ hx;AqS0M[-TlrEZcW*ۆØ3S8A^;`r=s3d(@ ,nrq>L9Blָ<$N<~f3S $_J!x֚rs3QJbK+e87Zs2R]\sQd\٬Qg1 \-5(E];^}L nbVnN)g`Qb+$J!3[hy}Rkው(@e֚w6݃ra@v敵Jr}e0P]+36aNԚhJ 'P4?>PWk77yemyq<:*mkྨ:>_qKּS.a'P|澚y\+7|nTZ\{֚jewRWol Rn^\e5+M^^v,|y~yp֚˫6ۋv Vc>\+׋l4FųK}Zk!y7W6&W;@EJwsRh#C͞Ra2µ7Pl: òjuu*>VɕR\'Õgnn/lc ŕ*т$p 4Xk^_Ev $]Z"i/w?X (y3]^͙M3V$J5ɼVnrK+U=|#o$Uލ  ޡ.V[4\e|H+΅;MȂo+l0 ŦIxT0iJ^^d;\sRҮ,AQi,<}bf{\.(^Zl:>4RkR"U&wvw)֚+k 1颵h7w#Z+ݻlVnX=<$V Oyq*^ydhu7ϥתw.6xgW֚S7K>(Qq>wJ8qpy:tlz 6}34bTHuRC#7P[k.KQ=>e/];x R'VыHabr]r_&kKUY䅕2/Vyc62 @7j_}Xm<v\Ʋ^$=} (;CZ" >N˦',nInּ\uVwX>~5TGf͎“nVZ1xQtXwF4h ƤO~T/Л^lg׺zz^wymDGU{[ 'P<9KNakX k͇ƵR*A0YK"lnseYnƵRVO}ݼZ  b_q(68G7ً֡L5-^]MVv[oh=MF)( x,~FI X,4ZJMJE^bڦ'yegfֻlw^e_(.jݑA:+zOiޮl/Pcn3_l:Mgm4-?ZFc*'g0-Bl"TnYT_yڦ'q|It{`Z3B$Sx 00{=z'ZMsK+8;%`Hcoڭ8GA{Ri``n4zlU]?G$܁$3'bv~4ni`H.PfG(Y^c d;uC庇O,{u TW*| лS5n{NLf(G*gjpԢȋMOj^Z0I`-Z~Xfss:KE/%.69sk?9~ OR ׹t*,-]_?$#_~or"߭5[|H&MZO)l_R=n/),f2qXE΂Ra9MVjDn6${619T1 =PC&ǹ}7-.w ԚU 8>ԝPZW2qb]k8CK:vsd!Ņfbڈߠ&RTCǗܬoS*{*KJW)ϨT2t0N0 i 1CPHĭpNh>K5JfޜW,U[=t9j_j134KZ a. 7KsM77j1]Pz-ggQ.)zxru`lb?2ֲ~Z$c=m<КJck#%PZ 8 qklykIX$,#7-0Nkbe+m'{"Qgi~9 Tcn'ן_|#֞F]%m~(?_CO6xu_?|O}m<ٟ+L{l<:<=e<~O)<6TJow S@2 A28UH5ǧjmp9M3}]"PM.a1I\۱v䅉jRJA ˴ $˽r#)ql5JR*Ba֚ū6\h[wy%l~5Jr![kTv8nMCC]8P:, @<32TS7x wB>Dj_˳w]pɊ[k6x-w-#̶olz@Ҽ3!r4}6? xUXcJsmQV[,y"Z:Qc0E>{2/pi>U %8xmX4^`b@k+}~nMKo/֏0̩~ _|?\ȏ+?|R}rgf9UM|-fp#ef{neȉųa)'adⱮe$PF"4T;ftJn`Jx~rh>) / Oulmol{|&c6Vm.*TaB Ex z 2;DJKEI\m8ZPHX;֡wQwos@ͨpD5ߪ@bB7QKHI9<˩dG@ְ2挴s=ͤ ڭ[7*wWg>:mWvO1'>6 pWo`SɿŧyWOӓ;7/\Xm7!ه* ZgK]0BlPsMڈcijv@h+^hm7JּV(͋+Y,G57+M~q LzI\~J;vv6҄&0B 5.ûE4KfWGw„ 6Tە ңb%HL4$$As׊|W?Oue6[݋]j NidL18 LŘɲ Llc:ܵFȑjswxvL1+ Z۳O @ m;Q/G.RwYo:c)݈FgK۲RUoƁ慽ܨ4;pߡ(MGiܬ6Qae„ w>aG12S@CZi0]|ûkvLT׮ے?`g~o'ۏZ8.28z-d1NM 7= ^ 4_z4^^!cv)HAu8QW |v=<+U>xr*l>_+u?Rv]8;dz%ltCUX>g;*'upNn%Z x ptqkH@KquZf#C!#^[)|Ox:~ eH)ѷ(krh\☞~@Vl]so:u|&; =8+Y,09w[ !k=煀LY-o(2/VoãgfK6&XV.* b/~^r-vڻbŎ""*PBz:Z(^I<2;;sdvSޣȭcB)}C9ˊ6wrtclZS{pjjTlƮIJ_˲⪿ut.)5RoJWcEЬ0ckuJ 5p@uĢ:60;dZ| b"hlU Ih"ghyFKZ@Pk7ҡG2c-]OH{ (bDJpLGۏ2m*7wlD)>׶y5.nw 0HIhq) x $IIIՆpn C&qtƋ[}ȁezHAB6>(MrRIHov"Xa3 a:%Pu~sĮ9Cԉr,Xbߠ1 e4k^0q9yP53[?S{3=cBR 3AN9g@9Q{:*;7 R,-cP"NM04x}ʱ-l̓]"=obYaUU7Y5l݋;5; " RT+Hi!.5F0ZhM#kc^"ګ3)=KZ@0\T]OTXUmcGXl&H]4o%uHYJBzn WXG"h۝{_}[(E|ogձ$Z30jG!Ditqy [Nla>&fQ+ؕ[iͷJ(,w,8) E%["C<~*w;g~H@R!&aiczcqEYD5:&!/.jJa^GP6(2 [k;Xκwk2rFB;l ?Qp؜ \݄e؂ UL"53 2cr/\ITO@p3}Ӧ+%NcQP)`K"u ųRi;@#M UF,5q莍cGkgu"lB,QGr0m @.ŁH+°[/_YI"M٭l`r˱@0_-]Ӓaj7ZM'dژ^!T4 C96cJit[D4 e5EH@&t4PD=Q!b8 -\[ڮx"’B&nwkiQۉ͙NQa;tƒGb&a}Ebl+@K͐Wb6Q+ Cr xI~ +4|ARDmӛ ӱ0MCb+&!bjNWJwB5YTX%Q 4аW: ɀ%;aU+ 6=ZfD$h,[ˇ{pk ,wJ) Fba9v\eҝ[$J!lI!DX/ˡ,4`o]:ZM0jIA @Db;lI=hR646VbZ=\.`EP!ꂥ ؎EBGq'2mؒ IXaKDp b*7{̦ljʝ{r KwS.B3aqŃcF#NqX-H"ec>lEuBS [sa)UmFkceغw F'b$P.XB3aiD(pb;&2lb^lKf2{!j$Tt$ -- B %(ñb̖c1؎ ɨ$aرS5Fa-mlwXBkp,Dp0[D4aQ ˓Y5^s [6=QIȕi+AlM&BT.4͍aEДcJ @DRbq؁b@B8K^brVGb?v1(bG YJB]*7 lËixPd ؎ I) F~T< hڴB)ࠡdz,FiBɯxЈ87\lCѤl rNб5OZ=(bX;雬i)`dWɲB!˕a0 H ؆ I./c KQYŞU*[3v*B=5&RDI8fX82]AѤ"&aG'ɕHiMDsK ?UDÎ+p)h2DWh7X6p0FIQ @DR2 :G MlW}OV шzHBT(wx574:H"QSCCñm*æzr4*~iUEtABiSW) @uD49KaQbK eaEd34dBf:mrUdۓD49LzH%GD8AvR ab!U4#%؞ ) FXD,t 2bA蚦Ѽ D!v[I}xqX&v݇XT8.?\G):r !XmJ&NsKQ' @Dc:rܔeQ?L!`@+c*Ba;ҋZE-a/H%Q-Qƀz*n;膥iUẃ!u)0Mʍy8 D49JA%`gXNq ~BrQ2R( SA)E_/o7Îq;f/_̤8p'q1m@8^/o|d?$.4bzaGt 됃CA8fp'<)8@|3g~^(:0zwhB!H:L~Q?r0ъʫ2TLq KAKλ`҉#p+(.ϐ<tJÕКi=C*/*̐;o#CfBB!qbi$R]lUX?! .5PeGOLXrlUػ=!B!-!B!mʮ8D蚶E%B5]GC* +2ڴ\:/]w)}4]b3u}Bt=#\>D ҋՌX_e^>|y0.N4>[΢4簡h<th ĎVw3(9xpKe~[KbfGN?c9tk_/:aJ)~_F9x?< h 7܇]Nڗ~KHc_ԣHRw/8Q&rP+G;Nv-Qc>{c/@zEE%8>EW߉egƌQw1iM 8rG !_ur5,íTERJ)Ul:[[uw?Չګ#κKqSͳsT+]1K5jCu>乩*-)M1{GJroV*,SG2>Vm(Q>yos:쩪2b+DOݭКwn‹zܙ/ڪV)Tr2j9zʭyS7]R`:WGuY7i7bŞ(QtJ1tc&p nTGworJ9CFns IDAT RX"kTC2qJW+5VfiiLz;L<8)$yV 8w,n4Mcch1#V.ɗqyMKf}]ѣ_%6T5bEQTɗ+\y  Y<>#-YRYFSOTx9a$zt=tHS6hP:¯gH.$4Eg7gJ>,7zogfI꠹uα#|}|Wc٧c(Eo34M]| >N}BFWfoo2_@9T =jBEhNrR4o&jĂzѪ y{)W5s>'7; {ӫNJێ7jŞѹO}!m:e?^Ex#J"%ERe7knKrbJͧބ2]PUzJ D9㯺N+ hٙ ]<.ťӞ䢻nd̑ݶPE1bZ#FA3]Ï>+HMKn좊f+Qa5Up+ܕhp(Hrb_v47^s*Ǘ@y/k3#Y+0Eu|ܲv~LD0<}LyΙ?HxO" ֌Rʡ \mhxrg[dOsֱmF͉y):2$p3cr.U7?n JK&_v1V-)"GU$Lԁס|:d d?KD#@Uvc(ho&Η vU@w"6 -V2 @h@'oeSpw“2? h:(X[tn7ťtົ?O=SnePdK.WF"mz'2/IH&@Lp٬4lخWБm{I+ak[6,LF j/?(0$5"B^G @K%)1=Z0U,]U4XćN#I-ᣙ!f1smړޮ7Ug.kjYX.D*Y3;nzjNhrFN}3ns*Tw9')x='1v;f?|F 'yÎ8墳Na3X;e>f^/UnLxg|?g\-XM3Ɠґk.uOq 4ybڿhuձ/㼓N/$ݏj>7O-/_[ k%%•ܞ+';oC\<=Q}{M0yd\z&COث8S]dL:`څ#oL^z).6`ɒ%yS[0rwZ>_3m]?HtNPϛCN։oނ};3wxoX>?Υ+uI*Mi`[ ݷ߳:{_|A =jou;no>J4RuV>s ?LIxZ0O;% k6)SxoR]KLN;f(CM#k_를̜ z֡<Щ%}@\yw5Z 8gᅱ '֡XRB4% ~V!zy]P" 6mx c  |J#Imu7>m.mF"^^RJoPM( >އQ}%B!B4"B!B4 @B! F!B!DD!B`$B!B4 @B! F!B!DD!B`$B!B4 @B! F!B!DD *X.sT=Ua;J)P`E EXN3X^ˉlqmِ@#kh`%yXywN)Ei~+'A!ʢ7gʢp:ʫC{l롪 6e/Ʈ׿)eSVPHaI9cS^XTV.aUna=m ! BR_ı#gQAPX%d8W-e|,['e'zdkDXVH %+op V١%9+RT&99ݻgR^Ϫ)WMy:tpR\u'xf=:`5sUrzmymjȁkӮj qYU,u.+]:*ε| س.S +*M Q}R ]Tw>GTDյgWRUoկKfmRVw8])Ԛٟlܷr3ƨV,ZM6lJ{< ij//?|D8=6ՃOPr*LÓ:SGu9W]pEz7J)SC=@ykCˎӟl,RO:[H^oSQM8Ou:f+@M0\FM<حeF;O-'.݆?SITCP)z켶h8w٩򨡓TֵؖMNzq2+רA.=1ej/^4PuLٞ[ 9._VQ&aum]My]e)`vd8[]Z62S}2V5fcVJ:i@^u@tVJVzsR]Ӫ4dճq[V]7?]@: ש|uqCԉS[)娂?n;qBG` !jj]:6sƳ71!ʂ&y-KX_•$z ~y$wLŕ|n.X1M|Wr#qu9fw05>e0pœYS,nz:r;0|"ywKÂ>HϤ Wi|}lcX*䚗/a&^p##,@ %f?+1iv("RUĕGwދNcRȧ~ۏ!WW#z5+JZTo{1kY^nuWMp9ޚw]dt`g/]X s"~\U@8TδOgws㮻Wa+ճ? =gN hdDx_NB4e!jv4˅0t~m9a@6^ƺ0'rUJmw&wփʛO,?yk(q0|؀O^|.V]v@f~Apq GxbVu{jyС\}${9`:th]ee W$`RF6e8 ,jd0j,W9Ar&yLb*RIoۇpl' IQ)eEnI$ߝhLaC:p^wg3![O䨳Nu[[+/$-G~kᰱ3bH6V}Ʊ/_bs#33:2XE2~ RYtdN=v ~eRZ8)Φ:}r~-=\յO`Ŭ-ucgqvzW0s&i{0Wi$!)Z{L9s>a uV:Vs7_vm]pՋx_ #.9.hVT(M7\O ?qI-f5 Y0#)ø$őw_0M'^x9Wc|7b5U0dq΄[ܱ9޸d.^1G^q o&v E ß&+Y m={g6!Bj1CUTAY)XMX{ǃS`<,bEn :jj5O cѯڵkDX@ɍH{hK;2,R32p׌\d˦!]k"\@󥒑dtM؈JNѯp!ꀦidgnĶd Z'5_'xi"1g]Cwk31btynhUjtEP b>5ՁXҢeu#P88zme)ÎG),hn wF[-)PڳGsV[Q'3)b bmX%%8sï^-+Ju0B֭x]~ PJrҋrI8V'>i7NX'ʦ^F ^7ndL4 vBY eiviC"ֺWjMˌ(=GgOҽuRl;MuL?ie:{].Q-"Ev! Rr[k.2rou>-= SiՒO_x8;:_ym;Jf֕{b-2CBJ v)wtΝBh3T SX«kiTEC5-c[KmӍ_~97UouɐkBCJ7WߣYv7^ksUS!Lsq1u:(SXQ bmB@7!l[#k!DBe]%3[InׅE}9\YDQryNt'n>QgscDG8[pyҝ9oKoۆDݠgj3! bU3Imί_º*R_ʳoHJv7]۞-Yg|C(jcEAm %dUf qgvk̮d&r叕XMuFzk5e@e!  ztّ~+%Q2[yr3}WuܶO]X9 ~[]O<6& L ƫU$g4U/+]ӗ;3 jZtZ荶n̞Gش(?~5?@fg'|2we, 2ר.J4y8oZtKG參Rl\[%Ɍ/>:l9>/Rf z17Lf_ByzLye@YA/uTLc!DC!>`ן䖭P~$_O&>T·ϿBw`kuWob:ɭ:s5h|ٛ,PkC1QKҦ-n5(p8wr%{F6u/io++u6_ ݇StzuH!u6Ə 7syş}k*?m=NcQgB>i^kƱi3_~ޮ<9߲I޷LgEf\>GFɊ_yt<_9VMZf5w1%kF)ٗ>mzU<9\ti>0B+轏81dyK }=Bo?zdǤѺɜws<=V9N:?6vJ|Fx ˺;};&N&W"0<~t+N??BZvVd֙߮v)lX9?\{|v@br {Un è gF'>?\M_ÂrKFw>#X*yRwEb/eL:A-BBel,814ز* mq1#8KX2(Gw-{%Rm8u\dz8Inq1ŕA1IDAT.1.ލ9 s w+GQY%G[Nğ# K6'3ҵXf$bq9skT2Z1dFTÀc֜x~AhA2О,[ oߗ{xchsi֫/{umOs3xHE} *C>&LtǜD ?u4& /.COWT`y9HώYft -j+(,riզ erOt>_C:JÇq0 cЩPQZ@LTRM/s)V7n\ܤ'Y+s :>Nt 2AEv!mjh{rS>Z^u\}VU94~mDQTw?':`XQ'ET<Ή -F6iticRtaGx z;Yll&wE.>m947,XNgv~8~qX YS@s9$z6_N:2oo5'͉.!ACֻGqi9-K>Hv z$eSXHQeoFa]D/ΧE@h֡;)f|7NIB4Sw@!ľDiZM)hDt.;8i91Y>MԴpܸv!i7̦gqibnwܒRXnO`&Ji=]8_űmLvc{[aFؚ86Ѩnp]ٹkmk{Rfu'֮SN캢vr\- mYX˵{n(A84}ŊTqه38O_$7D!y }d};^>R RJ7xTNع$IՁB-{#rc!#Ds1kx:6vi!B!!B! B!B#B!H"B!h0!B! B!B#B!H"B!h0!B! B!B#B!H"B!h0!B! B!B#B!?'{IENDB`rally-0.9.1/doc/source/images/Report-Task-SLA.png0000664000567000056710000006710013073417716022632 0ustar jenkinsjenkins00000000000000PNG  IHDRbKGD pHYs  tIME -&= IDATxwtemz/$PB ("HQP|,(b{}X!*" VPQA.%CMٝPd7"sN΁r_wٝ5a """"""R N (%""""""dRDDDDDDLI%""""""dRDDDDDDLIQ2)""""""dRDDDDDDLIn\&ld²e9x8JXh8D w7]㡰rjt}|@?^{e$o6˷|4EDDDDr&F&{~Gsb9oA>LjKKmQɤȿ>9915ⓝ(znWakѴL~k[1{5 W*~O{Q̷O &vw=/'[w'[{K\5NYGJ2Vxr~j[VK55O^K|p8aݳa N[Hz~6,t,F.G|Ȅ"f~^NWTLOS vw7)"""M&]B[йoLkwyP'vlEz/c KE*o= 0cf)xV\_qyErSTFuUZScvw:m[wHa>IՋks^ޒ/&$N3ԫ)i/[S3 ^ lV9NWlF6~ ՍAkn Gw.i ͍cޯi7gu',峩KN۷=u*z}d'fO5әhvV~1Ltpy0iA 4w?IyO'72s8e#n*CEt,2y Z ͩJn~sg?c42Ss/w?߿Kh ⪑WDDDPiS|FZ~6#x|ߖ%vGAGpoڔ+ >KO܄7g\8R7GݹMm]*ĭKq~w^ӘvU݋)hag.z+ύHĿBnIzF$jwj.CJrme-[Α ̍ȝ4O7с 35<@|?"%fKzz$=+"VY~9bfOq*0Pt 8\!OE/\L&̡MsObrnƈR5N5C=({rI\0wy>_?'\㶲|t>O?"/"T*Y]\=RɳOU Hc뺣\ӏAtp3U.AuqVUhF{V5mKes1=M)Q=~\ùs}BqKMb\&,\6N<# ֗ճ 7o|lg? +gfD k2WÝp\Er̵Z|^p .On4:'7LC# 坋ePn> ǃTo54\.} Tnzƽ6 svDo':.ͣm;7\ <t~cfӦbR6d]Ş| %"53`ΔZ610/HU{Keit9ꭴNl`:xl,L@܋ޱHnQmzbfrr1ʶ›]Iɲxl*bMVoJpsW >L>R23k_DŽ-=.~>wv.؝CW7_梙lMmfdFh.<<횅N~x)t-6(AJYֲ9WџrWizI,exC$^97 ;ﮘe綋|'zsj+Vʒ#(EDDDn e?M/YN1}C"$`,f1lpc| e'&Hz@{3Nf̽pEf<\\]Y\[K- rwXOʑe|3nم5{gr^JV6@ZqHLniM?77Fd3ރMց|9ptZa6y+d[)5g_"|v}<JLcO""""d0#h(w\򿷰h'N$%ۆw!!!FTqv49 sD'ɚ?`ƝL`8yX5Zt 5_½'5FMnPoStx|u;h1p |:ރ6U.wv9W8]5^¼_)86\|"ٸ ށZ&NU$xԠfل\_.ad.VjD}ZVn*x*j+VpQ+eak`EDDDw&з_( ?60xWr#""""R. pliЩ7Wt+-ٛ xEDRDDDDDɤəY-sHgrxbᦐB 7^2I͛d/#O9uc|hѹhڭL ɖN1qi\2{-3=|tIEDDDDDɤYF>11gZ9Nb= Rfqk +)""""dRDDDDDD}ODDDDDDLIQ2)""""""J&EDDDDDDɤIQ2)""""""J&EDDDDDDɤ(Q2)""""""J&EDDDDDDɤ(%"""""""J&EDDDDDDɤ(%""""""ddOb콴ɄWD#:l*-#b2QMX,1|U 2mEyCdtϕ*-oZ%%3 a2QhU5B^7 9]m LNX},P1k+_Ea9is0iԸ1+y┳GaWʔ -ճûw ^ >OI+nRDDD}ٛ^k1@~ҫGM`-YtM&swƈo!~ߋV^ǿ/Gd,DDQ%e76ofGN^i܋P"ҍt~>=0Qu"NcG Ʉw $u|!Bq5y=6# eW0}2~@S~o[{@6^p?cdr!nƬNXDDDZm߯aӘrknGhGM&Z} ,O( g8\׏'QpO29,Ճ('L&› Mis Gndp˖`9N39`،`b2܂k}9d}k{FGr`nF%0\;L5lk;hめgcSO}j9c)H=.F.՜ h!CD_Ǭg^7<Ljr h#&0 Ʌ-"""׎ _0LwO}0k#9vvyV06T% X[°?"OڧZ! Sp FO2D {Ob59XRvz}X9cF6޲xWs WdQo?|ۂc҃p]+LE=˜{DnވoHڌwU\߈wW v.mf>{5c3!W |؍!#e폳uo=\wvNfO'4z +mG>N}w_Z;3Y^z` c L=pGDDP?WE߿ o1Bom|7#*a?jTp?S] p5š 2ƯK6%ٖk]0ʸn GDS͉yۨFֳ?Άgc0.3Nظ+ll4b%?WSa ,̲)ڇ!ZF_p接s9jG""W+_0 U^ \#Ō7[Z:YY^DZa0Mqsك֜5kbΉcMjGBbn81D_y!ܗG~j~ف~*T. 5z;Lo`<ދmôpK84f.NGsx.LxipRf?j̓גd8#`z<ǝ~m9Ko{fw؎ۉk<ړlwx x'jl5 @ax$|x$3"npMF ޲8@ IDATm3#{ה?Afj/AbsPf`o=v^ڗ_Zv>a ;1(rϏoc%l ~ۛa=;c_"O"wJQmcEm0{E: פ빿f>w!v'MfL)4VX rb5:u㻦2|Uן{G 1a)gB=0$nq;63^ה i]ܶ0} sHww̍!;#Q8-NUm9k% m)fК(w?O6Ap^9 qnVtxfQ'qXیR,Fi%(oQrN~@9.1}k f 6SYXe[tω`J2;:Q{K#w۹9R|c%@Fkc^CgɆ6Q>yQɣk]j2jr'/pϻǩD >f!IwRw%?l[z.Ż;:Յ;??7͏ߑ@u,tʫE2#^^+[vr& 8΀k%rx$ysgy|U#[Z=:NƜvNjO?PZv2m6/&y3k'nh_<=Y9al.K4)E1'6m1L<¡$/sv"͟CCcr%/A9ͻ3u?U|H挙m\|l$+s6iv{yP3(_s$u|p]1>TPog¹aݳ Og8i%NqsVcm0,]꨿砿v_s]xڱmt6N@y[AVc3o8S 'f#3-L<og_Μd@ ;~ړAoיck3/dQ#oy ʍdSٛA@`vI>L䯬n8oojٙN+Y35M.G#ܞk^}IO*DjCZw6dfewY:k>t۶DDYiqvg8GqǓɊQ4?vÔ l㙏xdlSzg.)TԘ"//䓔Y&٣굘o!]/ cE#'pV + 'C(gX NNDxjj ew4d5[o`m&:|:otm8~fէ5NC-8Gң'tqr"2~I=mKDe?g(sk7<GÖL65:9:fiu!,Ng29=@(1Yv# 𯀿Sl1;Øpȓ'V$Fތwr[_o_ t`>- S^fcr6z{Amza%%#Џ m\|1͆ʸ\%?w cOX:;uh 'cky`f#0پxO(ٱ%jaS诮Gٚf,HS+zȞ<2Y T/]0)pFyl](o[_{ !㳙-ߝ&>=iغ扢s`X_g=KnX9ϵs,.]Tؿy6=<^^Z{Z>Amټ6Vdd2зp?̇/NU$fKovf|D~[05m޲NI۶DDY&{sr&'/'eA3=8># }yy= E7}V<Ü9=yq+c?f->y턱jX#^R8Y~j~ރOP6lm-" Fݦ' ʦLK0ml:f ?酹Nvn &%+c(.V.;ɿ3x[z][C]['/Cx Ǘ Q/D#Y}a,_pY]t{u'-J+kW~4f.Ȩ>T]>_r7 ;0}%:4ČYpƖtOP٣4qskswLcM湏y1/]8)۟:8SJ0 ţ|Ywytm4cxw,j'{ E?;>%z{`gt/5\3N`ӨK|T[oX2NEZ!!Si봚g?!ݝ *cy=ʽ/شi۷P^wNWYmPϴ7^o:.c𤙣X5bHH\ҿ[{ߕ?//ңFvKsQV-砩][~˒} 791&lMHvLUބr+k]g]7՝WNxI,\{nDk|+*3g?"L`?Ɂ%dfytܙMa-ƅ*eF΄gSTs66$:9k9zӶOcٔ`=w*WC:_?}T +={,gy$̏v=ߦu缱]|Ű QZdrVdǾKaVVC[8X_o[""sba~Qn*-7~F=^~[s5NkllS 7DDUDDDnn}>!G8^ije3. Os6dOO9TASk\tgFc;eEiI6xqC|W;J{g$""""""rc'i(""""""dRDDDDDDLIQ2)""""""J&EDDDDDDɤI"\ʲґ#G9Hdddo2 PDDDDDD41WQ2)""""""J&EDDDDDDɤ(%"""""""J&EDDDDDDɤ(%""""""dRDDDDDDDɤ(N|LkX-#2DymK ˽ }~d5'b ("ia%LLFuoi u1F8cP:[7<@ 8Ji03%lXww4ʋSajB:-?1 cm?T9}J.uKVgx VTſrvm1qUIkEkLc?OzFmu'rlR-0NߞbAWq ĿEy_֥4g 8O'ڳ7݄<݋٘3`7̃ \2gJ哑ȄDe.i<.[ j0Q•ώcXյD- 3zثfp{EQ>x25ܹjLLߞN c]ԅ&L\c`12Qm&rW0t 뭙T̩8e=z (?GNٺi->z{2Ϝ3[&<ʅ1pF.-$/bs rwy[^1Z]b:ZmKdLT_ͩҌu7uƤuW1(}~"rXRL,/c"Pϛurs" #Ljxs=oOtpqGK&سg#n5ulWk9sc8AJpY!<Ӣ'!f쵹S֎ÿG0%uG7uSܵt.6wS 3H;_1j*]g~arOM8K<7Y%Ni ns̷o塘L{->=#}cd5_Z=G=Βw9RnzdպKkL:LȹOKcrXKWR3n& qWhWj;T9ؔJg:f?F]J9F]1TǮd2cй yL:DP xR)$8I\Vቌ+U~͂A?0< bss&'m#S̙Nb-ù$/mOLm˩w~f5,dfq_& 8XV0GevK=/ya4'waw q˲}LD.M洎\+zFYС*EjI~^2rj3Ӥf{Rr4gvgp Qn-ߝ3yi,ýL#5S0׸~Yf4dBosn$7廤~t[ ⁾ǪleeoVټo!Ӣ֧͇2\7-`U8ŏ֧r0! Eg!S*{yg-*8KFg'3o۸,%+ Wn*iMz\ ?s5ُx;l myfeqzu٫}b/kc '8fdi2A nzn_98F]<&MgN̔RcGlz\*q-Y& "9z޵]ߠDw'W1l1~d%j~ѡω ty3j N~}_e>6] >' 徝}d==(#}.)As^g=;|ƭLg[yr+O]'iJM©m=jd*%@ Lg]{ڬ$<+`Xl9H9 4e'pYq~qkd$d|9gCW܄`%T(Ez'2ݦ}]JԸUۉ*>"cFc.U1*ٰWr9` -uјW`Ljc2VcNx{9dX '70RD 97~ւ2]Dfs嬛--f~R.mKZOyko2?h k/M eJo2S>ܳ Q].+y1 Xyt[ U / >Ct7f-8:/;TƹE${}LD.Uŝ3L^dXQN$!oY]< e;h0Nb,j5]ٶ{>]!szkhe5L aa>DY ȉ?yj0#lHJ–, "N7?8l$-q+aL9bwXYT˲@@ Kta nOq\yQ+mM-”TmAӹL8K66 7(i@Hu0Fyy3+$rxLbƊkm :Ƶ0WBC#lN_q `jC= >~mX7}oY57thGFVss9drl +q=Yߠwߙ,Ger̩e ~FP&roGmQߌ9l6nKUa|]9CdjY}/m ɀ9yyw$`bL[|4?;-^I>=UE {O}7| V~'B ߲y |'=C!{DC(gZbXtLdO,o] Tܝv-]ıB)mϹe\RHDG*eq="~nj^BqiHyeQO`_L:~Wd Wܩow_11^z1&c+TݗcSo,-.sia"-4oᆪ>g&$CPB E"]@PDQT,X^\T \&( X( RD@齗$t-O=2C}֚n99gdf[#{jnmNn<17.C7x?{2Ewݿn̉n1j\K!,J; ;;57.yi1)JksOA\̭e6McC}̦IόSsDUSSyn%|(!0+ӠLS^j@es#X1;u; N.u*""""""dRDDDDDDLIQ2)""""""J&EDDDDDDɤIQ2)""""""J&EDDDDDDɤ()fv{I]}SDDH&Xk 51ejMxJ1f*c/Q.L?vY\G,g{)2ى#cW orbƕ`:*+Ja'<δWUoI_>ĝSâ\ιOSadGUߊSrP?o^YS=LkEn;^9o&LM&sv1m*ƍv2 Mm_q4w/W mL &tMWTp1ܾ1Ō,̘Zmdֺ慺Q$B9l~bL؎x0e}'=w/~z1p-ԌL7V6옵bNDwKٯى6J{7ks2ED{1D %R1DFE1W]g:_g;_. n_%z:3dz?{Ɲ6JI [H}?Lሽe%;h"vB0e}ro^X^O{'=[q*ǐ+ߛ}>fdKYYU>:S,^o~ϴ_ n\wk*\, ]5ٜN<P&(8EU2l{^'PvwyՒng1p2&2I=<VҲRIꕆծGnf&!1te^3 Of(uz*/xuζIv22O Ӝ/XS.U*{t*^r>ܔa 2ekn7JJw{n>v=̅27PgdvvJ!_g}K5#rovZnɴx5e8਽T N5_X<2d?/X),~1$_1VؘJ =;NglY Ved6@Ʊ- usZ}K#mdX7,&`lvI̾Cq͆gYbO%P]ٙq30ӘܰO"OO ?GOKVI~oBG}yoo[2{w&B*놑睄Ǟܐ.lj+#p::fmx'œRB?Жl;ܖšuPgZlx+IB8gb-pڕh/gsX2O {vG1 7ٖʕCs,5 ʀO=pi 6dB>KeEPQf*Ǣxe)v1ƌ0x%"FCy48+ˈ%_}3dkdfef\t2%VUD65cO_⵿Ex\EZL1k$P>=0i3Z8fp6҈L< ԤA0X0 "@:_/d:V?C:h68YCWIge>_}s%?8ʖd+2 4v:.f;0iDi{]p7?3x(zE(c-!7#f\UWԟZ7Me;Whod̈́Gt殕ǮXK2@6ria @ZΜ 5f3wRƥe,hr: #xo<'=9 224`x)k\ ownJ2U_G'.jΉ:jƏ0֪lkf"w0u~܂'{xl>]FXI`nzэEhՉ`'0-;{l bc  ZE^<^-ya6܋^=c͉/3ó43bưGp!+8I|GLMLvvʿ(i[)`q^&7,3|X$&Su^!M΍-'ik tk7Lf%2ivcz*""""""J&EDDDDDDɤ(%""""""dRDDDDDDDɤ(%""""""dRDDDDDDL(;3࿛wc2>OA*.N҉)?@E&rǹ8 ZB V6.*a WZK*tX IAl:VUj?łM)WrXRز7Faz?)q.掣m?#ƹRudt΍օyt5"Glqu=*26\Qy٥Wy^?@d^{ ڼ/'F2C_nᒎ\~ MS rv?9fnLJs <ָa@TCbGp:={?`ig)üFրjш&ܕˢĎc(HOгVOX|zҳc4hd2Y(AFDsj/_-o5qe 2Pqlבe;SL\8aMHN [6>N $-ԃ=d1MĒm?o#b J|ѓ1Ǔol ٔa],: nmǘ1 \3CÆm0@IZN kYWŐx21omFY0~X+oQvay#48YKs\be0 },{ח|#ûhN؀ @\X3 }]dS'0׌Fo#q6i)hs.:.Wo,)t+HW>.8xJghӐkֵwl2JGX.nMMDU9m9C|E\>?Xrf{?Gu Y?ӥ1lO9[jO|˳]?~?u~}]@Ijx eCSĥ_$$;y'Ivw[IJ%WfƲ.R?oRHr2&9̓n8dץ[ZGD8Z FHܔf͋1%"[R |!LXP|1ZfNP󼟕Y41ߞsOH+~ǾS`ئ8s*>JYKAHqcl@ ,= pSYtaA;}{nGfOnNB[jߗ3we7y7RYx]lNuJO#S6秜J׸tWBVnTL8NOw5fs+͈,\39W֜$/elteLkYᖯuF5pb2Ffw ܿo1,!WLnVGdžw1p[ }0]#PΓPnb:)WU,N@W dqK&Ϟ;&*`T֤$dp<% \yߐy^kru 2 1ޣT|H1JhYƒq+Y{лDшŅ^yG1 x\:ǯ[gvws|LFI `µ [o jy}eKW0W/,L~+8gm86]x{;Y/< Hj^ԏ_ר{qg)6?>ڇ+i0S}х8tOl v]o~|\\qHc|=itwʹrPfugR/r|wKDU“`H["1,lA-ۃvp?r9N|hnщ;`}v|"W}AO_Qw-'1q}'7I,W'x+61p .P  eNXˤ07.]}h.t݀ucNt1U XJ aiW=K[35zM77fFzmztC- !^W|Bfyƭ_%7K!c-%~KjMc[}!v哌:UWuu}"qh|BAXbh\<2M/0e62swJ]f<_˒f|}ۏ̕iP)Mz5tgcp2&Nd=Jük\Ť-(Q=a q"rKO8%7`#"ʖY:œd9s5Ef<}q|Q_g{xU\6'D4|ě :U2)+Xkj֦EHcd'嫄>o4&6.bU7?}++g n& ɬui uHs's8Obz,D}6klL2]MthzhBdہd݄8ko˸QnQt|S>'ū]UD)V?NthW8s-ЭF4ѵEt&)c&raUW, ՒNڶ،9Bv6l_Ys*K7͘,o<Ŭiޘc1.9b׊cx$ő}q'XFԣc;tyܼc 3ͷdx e981fj-צFr}َڞw&vC$%lfgzvI88Jc&`LlXY;^'r~U?"f gC]3@[*=_ ?|s"¶nKO|1)A,=xA\'8(bwƷbȘpҖ]o~bP1H)cᦏo&{fO(OVqjj؅ ϷȨ`J HYsZ;3-2_v"0區KWt"r;_~ՇN$,۬21}v%77:e/ed/2v}ol<Y\pVӶ9; vcbۉAJ^clۉY&_A򄿛N Mqe+f?>z%'K5R{9i͛驼4]9SҤoR[6'JZV*WҰگxऽ(h_r2Xwh7)iN₳o;M7'D.r7 Nr䂮-o7pŻb~lZA검O:|Sv1uWTs9Gd.Tv?>wN|UwaF*EcR@]9 w{67eݏmޜ+yf\l .W֓RqΒ|gdvvJƹmyP90s8|1],!cER HZ}9ka{?e0&I/?@i)6g2 {!~Dwk6pfݷ$Pm?Y_+fW喑LK|~kYv8uRϫ6.: 9 1NBnlY+d\R- ˗$uDz26?{%3aW[\T_t 1݁tsДpk{T*.>&ҎŒ#L\)CMg>RRd{6drreZ6Ƃ ;҂TBӸ$.د-gƖcB՝uv=(=F<=LRs퀝̬L5>8{YMl0_A^g3{&fѴP>=0i3Z->{ ۟Թbnɼ LPUkob a{a./$/_⵿Ex\EZLѱ`tRG Ύ4"o0N]7.(,ns>d89K E?d?mY+wC4ӲF# fzgMY&xz|. {/)gqِA/ )g #`de#0_+i?p )Yw#+1VG{zLl^Orq4 Ihi{n:i.xɂ5X`&Y=*. .ഌ¾Q_' "<neK|҉?Tu|1ǻၵ|Y7 'C[VÓxiM5٤ "jLl?>a}\a^qF|?'zSl ߓ!]ޝY{ݜag_I }P_FE!9Nm+hx✓8!"w9;Y[.LXc lYs;`\_qAFޝMߪYo(D`"䮞4ƁKQWe朎f$_7PVo :`֭^z`WMJ2-Kۑ]w#37KT, `ΜӘOY7#]=iL;Ơ7دXy-͎u?{ƭ xf 4לO:'XWą|ŅÞBqk|]&3B|.Ƨ#x`,K{^kvLa<`^+_8*W}a{@߼w>i5 'zjs:/ddDi\K,Rl)D_f𥷣zSa6Ng_n9I.':Ή:M )7k H~Q“ƃggI(4B:HJdßOE*Tm} [ݏ13 YZѣ7DŽ݉4a궍-_(ȏmzPǰ)6ٖЋk3nԄm,:DNnyZ8.)_ª-W=կ/~>f45y|>Ť\n;O 4VpL %9F+dv%XcX`$ )qe}߫ mp# ߃&u|k;~`Vn4-Rs0=q'k} SMaʖ٘a|LkԒs0iV?ty*&3g0Nc2b!JEy.3k,jˤٍ(9\Mhr+JfS!MuVFR$'{;469"R0B-/^ee3[RkB ͷit#)?K*[73/&N> $""w7ܖ3ȋ45x~afE}ݨ>sUyT8;WN)ǂʖV")^Ԭ73ѷDDݙ~76/fՆw#5\;)?PAL*'֩IQ2)""""""J&EDDDDDDɤ(%"""""""J&EDDDDD0vADDDDDD Bw&EDDDDDDɤ(%""""""dRDDDDDDL(%""""""dRDDDDDDLI%""""""dRDDDDDDLIQ2)""""""dRDDDDDDLIQ2)""""""J&EDDDDDDɤH?K~!-IENDB`rally-0.9.1/doc/source/images/Hook-Results.png0000664000567000056710000011344713073417716022407 0ustar jenkinsjenkins00000000000000PNG  IHDRYzTXtRaw profile type exifxU 0 C=EGe4d_Nk}H!PluQ Pn =S8, vŽSѶn PnKgu=M̴AN=gU*T!o1V7W",@r8 iTXtXML:com.adobe.xmp C*ZsBIT|d IDATx}\u03Ƚ:h:h+dP&Vڪm)jnuJ7+-֓mnzM;bJ)BŘ`b(r 3? Kf[uw{}/kET*JRT*K"Z/"""""""={> j^RT*JRT*/6lFvEDDDDDbh|EDDDDDDT슈qN^V*JRT*J奕~KFvJRT*JRyee]:D?O%11埁. qyo S#0X7y~{ُ6M/hÞЎKu3$5~-ַ7GȀy-\PT*JRT*3<I8JYnׯg|\XExaN_;mv>t{3~X;ϹLП_qnKtRT*JRTxСCTTTp[܄#](p'=*'L<1YT*JRT*O3:g?w7ocwů~+u$WĘZiD{A7r]cj~&j"CO_!CB` 2S?1'7> k۷c?Sk=&{d[X?ȳo;3eG_OBN~Öe kٲ};*B^:ߍر[>Dž ?ٱc׼i}6m'<oFL34C;O?k6ݾ?%DsJRT*JY<O<7 ${,pw< _y|B08ޙ5;kZ/a\O=\F}W_3kn.nù'xT/nOO,lL} lh?~xۋ=12^_̊~MG{w1/pd˦dn#Ǣ~̝XE+JRT*4nSR=~hg:jع o0t]e*ssCtسG@g.޷/z'^|!gopJ t8s N}e|,c6Q]',bδ0>XmksdJyU|hu&%lw: 2`9ҮaP_Cei)eexg,J{[L>{zt)+a8)-7+2kz}T*JRT*FvOZޣx3eL\2e/Mw{w==-\Ց͘#ÀR- 4cӽ\ڳ BuQ7l L'.q=-EBɋ/ƘGs("ױr])^0 VV?~jm}~s`$tƛ[tRv'crv?ɶm_z/-G})yz[}fҟ70r;wnfݚU|Xsq""""""_z9ZQA(vs:cy4ZkZSޒR9;Z+wW|NI ‘8LP;Q7{zCU0 ?mxƐGcxc1m GedF>LXϱjض`I{VvoVz {IM{;޹i73dpFN-2ꖱ{Xy3W۽-e-X212'Gy}vQ{JQw:Qp՗nqܜp)_/l ,.X)i,SG&[wsA 6aQ!koFdVT578Iނ, 2ހ oxB!m)\z?Svz)-kav%CO_9y-]΂w w`PQSGv kOͶu6^#1gV\LY軈%tG#$"N0t$[Fc̽ƨhf>g >i9kc CR5}1mᣤ&^o5A73G;$}ʨ)R牑dڝ/GNk.#;_|b9a73THim _OrȁYH;^Tya5}_P20YNVCF2|:o7R1G 0~W*JRT*f1玾yW nv{7|e?FbuA&oSH+?cџ= (uKT_ABM>ؽxh<t}yX,)fۋx1s}{ ➚ţKo&)xMš /!>ZܵO1wpr[7=R_wτ+j(E}|o_zEaa P_YHz/ZxsyzJs΋opˆsob@n4s|ԗsNP{βy!}Z ,YWrEDD;idWbcgf$u٫./"'k=6kynIFzL+%V"Ѵ9YgC*L >~fLHpes,l5)ѯq?-ci 'i,F]~%YXW|n;>Zɶ%ȯC<4k4mKcE~&y41~<-٬XNV,""rF4+r1Lp?1Le<6m]ɲmU/9*&DfVʼ7Kȶ ٵz @*`m1@/h,t1N3P y| `<4YCDDDŮȿs%Ll X|*^EQzIĜZ G$nbjʦ7艉iEY[vwc8g0:9\?Q^R{1)ڶ 츂'3f|*-Ic"mJ CL/ߗ.ōӭ#[cx,dpc߉U:eCLm6d xj3͢|,""a4plvV1`:x<7o@T] nfkS&MHGfySʢdtkul~c6謁]0iTo[VR1k"H[?!Q xKx)D|T-E h㋺f׿fa{c˧G1$>l׭_I< ,]]7c6@}r]31`j\? [.DvL&&`EIs,\]#!:m&&i{I&K궲>ԓ>J{1E-n9rg/jj&eX#b,Ta8]ye2ȁ5JPDFo]]A%SIWyeI ۶E_N{$ wUhgIؖfRZEyˊ~h$U\TUܣ:u VP93UdpG7}`Dx[ ouSpF\Eky3 ub\,e@ImƘ(. FL .]7"wW:ۓ8g'[*&MȬPJ#eժO) =N*tذˈa:4#ZA$ Ul(p*<"*OП'5Xk8¤S14 ekjKB k}p C_RS[Aaz)(vS+58{'v8)M6MOb5%@iOh+.oulNw,TZ3(tiY TQ6tB b f[l Jbƌt_a Ș1MCA$͸W#H3^rծs^p?7-_3W9۲lv\ǹ k4[bl+Xku2<ϟQمdNj ;ppnMǐ{(8ݔVbMce⯫>Q 7}k0ҍ}8)==(!k#[~Xk R+֒aKʰO.FѤ>ih&6']HxӍvW+ =KE&޸bnnjLbaIm_^Bv LLwjȨYhI;so! 6nPs՞DwaV{ZwotP"eWGw:qiGc48Ys&mbm^w-\̚rQ{-dɚ\-isbmLQ&Ȧk=)ռX1o ,#{ٹkE^v Qm$ݮa$\Օw [H@W ANC'Hu5FX7_"Н}MS6 *f}NmӦ|IC[' n鸓s~ >KjvS h/t ,bBiUdZtXZ-ʌX̐{v2GĴ2 [k4n9mNJx6 , (ʥJLoߵc]ݾB-Gu'tgSi&& kxMQvISkxnqFcsnk| ~sQӗ1M`B|V;f, 0{HrR"O>58WkdWDDȮR[ޟn MIٟϽ GߡzzGΤ WҥkT,n;}1kofAߟȀbDpHWCHާ(*a۫yO?>9wI m8jMMڋ̀p8rpF-fN=SWc1i<E$W粭ۢ*/ߥP[ oG¢,tO|:jݒKEܽˮ w=j(_KnV9.*ZF'pAg}(;b҈fRPL9v.y.׹.Ќ'qwc[$%-kL9lm^f7֧CK|5bHΜEgf3c(R,ѾQL֒ŬiخƃEDDp76I};vƞ8}47Xn8؏<=;:FQ7̸eXF6p>:GRIz:DiM]UjJ/LDsˢ$;YC,1";p((wqj!dZ:\S r.L"ZV(&Z9`N0A4#gQD3EWA|_= }cPLLsp]oy2'̓ӓ>Ǜ`@5r"O9ϲAy IDAT2HMTUc7a$82a$UXdW*g”G]8|s  W\J\g`u1R}h&qVX~藫$̍$ydmW;R2Ɠ8h̜ۜV jJOah{3Dߪ_gfeuѤ'5of|m?BJ̢SGw(pA$(&텫ynecv?0H8.\%df=8N޲Gb^ZʾEēs=|jx.""bWRgT؛,hziG1%1plv~]XR; `%=)&{VWnQT摹a k6ll`ƧaKYDk/!s[CborL7+ UmyV\ԇYDB:MV|FčW|㥢 ®mXÚ5ȩlj 6Ē̢cU!k, DfnEq}<3IM6ƏbL/++^ZCaӆ\,M6 (ɑǏ1ˈ #wsZ~TkkAXZ9'^N LbFvEDDˠ.X1%(a"H>\䉤_Lv%sƀos`FXlʿXwK)7LI3Y DcNJ%6NXJqb Ujn INTTb}00}i Ŷw%=7}Wz"gz$Y0d(F<1gcEd-"+>qDt SYYu<+bW:^kymo7|-h\H=W ŏdne/͈SZ*8i,˯&w#UAwN93ZJD.r Ulq LIP|oJXp1( "i385FO%)erWgQ>p4toQ`1tuȹ-v9 JDlۻYO>amqoG66 $6m 3eTQ{qe떽{rkj{s0ظ,CZ""""""*vϒWxKW5`c. 70>8^`-9k.k`ݺ<8}(Q}⇔dJ?dØ{g?|U0W0^^fb0ҏ.-Na懼{ZJnq=: Ԝsv(w`Ǝ̽iv!PfN`lj'W` :c ]_#}$CB &3rX7ܣyHlK9ʐX"$D zhBẤTJAtj%LMt 0\%@ٮ24RD]Q{:4HCܸEp7쏺vߌ%*'"1Q{vTό<:%s{-X&8XB~Q J)Ƃ9G~QGw<"5.Ͱ [&VÊ1iGĜPȪX3f| = X &6-k3\E|Ղ a ᘕ[K"aGr{bMp0"""""""?m釞t=@nKϑ#GWGGu\*<6cǎx.OHHf""""#\4{cg|Zu_c[2koQ> ~ʓӞ$y>YTfKSr]}?,/V"""bWDZ G4r_dߟ;W_ɱsx$VZU |R K$""">\\|UIhZ\x_CWe_l>;u"`α,ޟ{YQ10"02#/1uU m@9Wp/xq9WbgYqo佛ǜc=3#>ocV{U޵ ︙v+,߼.x+9˚?`9s_semCN' ~n3Ug;¤#ܷST@&ŵzJA<U̜ɺqzn `U=kowJXxe8w叟rI0aR7fEg\Sw"zzW*n#"""bWߗx c턎B̎ĕ:}W x'Dže_| fUp=I\a:$s9ǂ!@hO+$0<]\z|6ӣ8zJo^Ή/1pRtˢt<+s L? /\ƣ/reJ X~ ݛ\lxn}p]y,wqڝ b {;wڟٱjr z#xr>ڬk;o㷷mpǔpM}\èaš,Kw5WX'l< ]Q+">^\z!eh<{!-e)qq,rII^ j Թ#C3$Jao>?H(M'֓j; <gV }blHaSKg^[t5!M?]~=&@G)Ģm|}l]% q veͫ vcL8`".5/>dRSgMգ￁ΝM0Fu<46.mo 䓯XuNz6`܉u{c\ݧ3W08ࡏ yLo"""bWDC;%_L(F4(y!U&X聗 z zp%_M)*KF|[(wZ۵mc"z0]:LIU0z,#`of1qMFm伻ʓ}Ey 44^o<'r7٦y/eb6 wtvWt왃_|_慿|Km W6Ӹ\WDDD쓎\z63Y_W_Ӭ=r8 иh,^tX?U UXn2>*plI#oi¨VtѿO0=sxg_w}伷۷Zl9F\r7$qhF~t.$&)]J9{yue\}4fNqy>w<3}:`Ͽ%nϩ1NG]7,Ťz3;$ZZEDD 㡲ݎQωߟ`"##?#b/xeW &6G5Mu;19Ǣ:W /S%GBoeS1=Gi9G4%4RAWwDgpf](L$L_x{91Ca~B=/qc­<<ՂGw>ᅷ3or*^KCԑ+?L=8n~B:qQtN̺ c?zE#WvHE-/-Jr<ڍ[rkGb$6*Nv_JL`֍SW׋xdc5!IDDDzsNJ & @='rv;DGGC΃"~:-=3.JՕ"""rkP+rp8"""""McV+E?oV?EO T]"""""""*vEDDDDDDT슈pVxsj6cPgnc!?}kߓn0V @""""""rݐJ?/dOkŮ%iƶπ󙿪ՃzDDDDDD|tc/xlc8&6h$nHoFv [#=^{^-!Οd`l12sn:֭aבcCn84%|S1yl԰mO嵻.#-<ݖwМsv\7'DE q=5 }wQ IRo)3G0k=>W;ga#8X^v$_EEp[!<56=t7CwrGёoқQaw1BYv ] `5݈Eհ8(󮨨Hҥ >><`.;?cg: ऴBNPӬmͷg2?7uS]3u07?h.W>eA- ,0""""pn.=)Ci}ؼ}/ =/N`L v~ cG: J0DDDDDD.Fsl c;Gu=}'= fqz3R^3 uHTB@J#"gFvEDDDD?uQ+""""""bWDDDDDDDŮ]:.ݭi8ZKOuA?@[Z]~IDDDD.b s~~~x^RFU7 E @B#044w&+F#w^߲""""r^4fףT*#϶wsc0!^O.Cp([77g4 (ʶ;fw/xp`2qFaEDDD9^R]yy< x/AaEDDD9]GpJey\t?{U*ߑ཈RDDDD(v=xJ4yx<{_Fe`[d8hN<_#^ֈ;6m었RXiÝ\J;T^Jynx<l=׵ bܸ@Z_x*ʝP2^`O:b[Ev6kxrݍ2fn70žMx=4^NmODDDD)v+ߕgzsi*pV~[j0[ t_ɃYLuiLS\.㾥euM0kض}3_z`V^F&OݯX]hJ'i py ҟyqn$׍7&vpSGo 5a0Q9 */x=bhkpF^ƟǛXF1_e^evmAm70l:Ļݹ!. .OVYpVՒ2_zqCƒp=v7)F%TN:`jo/S+""""rjs[(~3͋r"v5>ˏz6ky~5=.s?o}C֑pE8!=!!XK {S*lcyUwn'aXO֖}/ϸ<_ɯ.lNc:kbB~?%qgL_O9Olj7uo)k"z7Czi{oc{-Fsoa=-Kj:ڕY{Yos@>+3_⤵s:6-/)kp5_>v{"""""K{svU.3""y}مӿv}ahrTo$Q6ԍYDDDDY|N4{3uB߂TӧC[95Don4TsL/?UHmQEH]7&oLlTZ}?8K.ݗ\z)qL~ucհ""""BT]rɥHSM?cGi`&on=GQSM5=TDDDDDQK8="UM5=-K"""""ߴݲ D!M5iQȵ""""""ߨncſpUr"'CĨ@DDDDD:C,.Q®®®®®(슈(슈(슈(슈(슈®®b IDAT®|Eڢ:τ5 \DE9) ~UKpD%U"rT]*_MU+ԓ""+"_bΝlZIS-Y2XCݤ^Om[t"yU9 FWYS՝®|K%Xlm6`;`(l@ QRoi(츜 DًK *HIɎi#!ʄUDZ@DSf+-6PGn[-!ȡk)δa"d]`9(vL6.L&+EUEd>ߣwȡ7Aha3EiwSRjɳFaq[LD%)X(ץy`DEexCa V~&_h-o>|&lU7b]JLX, ɿ|3UTp Mlڹ`oq&K<xOԁ*jK9uSM y(ꦄJIlps z˘QΆmZJ+M*{5OzhAQlj *™pNkm6p`8tğcV6&|aAAHo(DU~"&{{.'2"2o^lZb75s9[Q\paÏeԱrS8جv2sPW޴8l$%&T (ץ{rk2a1䡬6~f³sޤDjR뭥$ej4b/SVL;W OZ>h/&ަDjK)SUj8T窤J(SUVJ-V&5g32jUWA?'֓U锉(ȷܟV|\,L%YbT w2wޚߵB;&K4K[-H7զ1;SR܁ 3K] q[rdd;eݓIā3ci.66N/M ]SKInU 3Xc*_"A#"ZL 7zhtx7WDDsԲ+dnS3.]zuQXTZvEDDDDD䢣]QQQQ9E@o*v}yC"@8V!3 91EGaWDDDDDDvEDDDDDDvEDDDDDDvEDDDDDDvEDDDDDDvEDDDDDDaWDdMz 2g HD\IQ'EhlFN$>Ϝ>)>(슈E(ZE Yx`6k0}.d\ Y,u^aR^+SX%aăsƐl<†0%sL<ث4&l9/ Sh!#'[K5HhُFxp#kɚ7VCʵĵ4qa*f\{=1G$aQQVJwAbwzMĴ޻s|nNbVVBQ仍~Ê]H{ӯelV"/ Ҽ,<[J -ڸ<_i,.gN+to=yӂ.RIњX_T)ʚ ocqKث8f9$s#,^>[gf-R2Xf̡*U2F!®|Ҙ~w K(dqHs8cIz@FaɆRG6b{|Qc qƍP1iCG+Vɋaw) m͝A8PC|};0CPYsj W}fgXӟ!@"$Hc!w5xa LK 澘`4k!`5b65RHKl2D튈tFzgWjF?Y_[PG$Qs}s_q X*{GhP0y*zm,^YuE|e`s ;jha׶jR~"ZvJL"}3`V pSS~r=~Z#&1 p8n)D`8>uOL$cW 4׬+"YeWŗl>n}`82kq1$Q4{2泮Θz7Nͽ٥I#X^fw"qXbZU"r>Ә>)&t7srOv0)IŰ}k!715Ij Ә> fOk<#X r_W5ab0tZQmmmm*op 2tR !""""!eWb[ä뉝A7aW񟒟 ͸qƩ EDDD䢠]N 슈\>Oˮ®(슈(슈(슈|5YDDDDDD.:j]]]]]QQQQQ]]]]]QQQQQQBlr]_:k2t/|}{bJ:L""""+"_T,͛#^kآQH\d5-l/¨t5m%u! nMgHLNšט76!W<餧;Iwϥ| 仇9|6. z~V΄ pSIw\n?T0!ʹ)pG1|VօtEDDDDaW仢Ly̬/S^3ͫ:B@(Јy l,ːW/Q&Ea:fog袗(//[wJiOrӔWan*3Dmφ({8}COKusչPqJJ<]"ZS`\޹[h”;VE3B VnI; 3h5 w2uӜWw(^yB'oox8/%`fӊԶX+5'&blQ]X,d+O}80FA{Ѓr0Ƃ4 zm`g^ͅ q@8SQG]DDDD>?uc:Siym9|L_m׸[֭5o}muW:wU*c"on[q&Ap<\c< в [\Բ+rsȿ<<>F_|eWbۙ9<wb5xy [>Pw2w_[;mLN”t'F5 NM"NQDDDD6\LԍYDDDDDDvEDDDDDDvEDDDDDDvEDDDDDDvEDDDDDDvEDDDDDDaWDDDDDDDaWDDDDDDDaWDDDDDDAq0z+_b,F D ql>D9=Ÿwqը~sGKއmmLهc`bʃ~=hRC2=,y8eɌ[KAt_JLkPPPtK`RC[^w!|۸j̱w!Ŝ}8UGj#0L#EDDDvEsxVob2)Y"}O  486#}.G\A'SX9FFO0\}{ˇFﺏy^! OBń,=/18Y-龏||z=.H\UWuRyw 4re2CL^h)&ߋ={?`I/3^{7fJlK?+>:Udtxq~$&Lt?=ݿ{+xx AILNs]ފ-4oa" bkHܼf&mdmgK]+bAPcw: WP:t@bR@d==8g!]-z8+{0WR5-"""+]Qi"A3tDRTyfΐt7Ɂ,4|D4B\8cK Dz/K/cMG.=՗1*2#z:.e_~ *å'Ysp1\ۃkEvEDDD&ҩ0Ä1bur6wp3u:q0t2ǙfˎMP%8 SnKe *$;iA1r+ɼ"CRXGEK2CpA1Ꮐb>m% cv吁;oton'rG{X<11{je|);u8><~FjbO!V߿"Fȗ&,.AyLL~)'n;3Maa#,Sp(+Wr$ZBX2(̄_grN?h㌚8C`˹v lTY ҟ \qv3p}"Loא;K xs_n e+/f]| ݻ:>vO?'sn<5U 3uԿp!;n}?c* >w&/^UgN/=T """"""r1Qˮ\4rw\TӋUݙEDDD(슈EG/®®®®®(슈(슈(슈(슈(슈®®®®®(슈(슈(슈(ȧk(e3ET*Oӹ z()f LQQcv򟍼ڎsRhšW~`B PڵAмFI{љQP*P^DqǦO [/YP^HqmKhLdVQ] ;M+ ?vrvJR"QoD&U4}(ye~0E>L&,EaU<9dlSK3nd[-.V\X 6+VSnjPN+ v~!|/\8m \oU>r[*!8vVYٳ'csJ:>d:,$3p =3'UŬ.+"$eѱCD?J_[K3g:ȯ”DO\.l g /#i'EĘGΝ;)u[Pi3aH xO `ڱ$<\N7MPUϕl μ2M.wc!+" ?s"]HY FN|8{TtS.^'q&6y#^/&e{4ȫM4E;dD%ex=IrX*/U%T/oL+M(t&ܿlϧwǑ!!>r3]9V[5Rf-!o5da\FZWA?\7:-o"""-"ر5;r&=`u 4@o IDATr5 &FR$Ɣ+芈ȷZvEDDDDD䢣wvEDDDDDDaWDDDDDDDaWDDDDDDDaWDDDDDDDaWDDDDDD!N`*v}yC"@8V!3 91EGaWDDDDDDvEDDDDDDvEDDDDDDvEDDDDDDvEDDDDDDvEDDDDDDaWDDΗP=EY3 {WC_aeRI#!byٿ53ɞM"<-:dU+dgeEv4<̟EvvY_ 0)k fdgϻyO3*A̟DVV6YYX\g˵uT'=;~❾Ђ g߷L9#dXI_dgg5%[;GQ'' ""rc2ax.֬vrxW(La Y|+Ze,[܎v佥ypYƲeV`l *R-G/_3ɺ^Oֳ]/M|m<<.OvEBs<#yWp k0%o򫧯8HA8^3k<#z?e7s<#׷[5ɔB"[d nA_g3?s*Fk^Q'yOщIVv6Y;KͤIYdfFa۞J.}Ile'O}+9g3cj6SWL"O<7#!a fY|r-s!r% &ǵfH4 "ltp{H9q?؁'oT&e˟̧o$ Nd,rrY'­ yIXl @Kӗ:>$[HmOl^ h#m>TС- Eo0n`u{fdЮJ}H2Tl#rOt.nB.OvEk}Ƕ1 O?wdSG{dSW 41n[Su$^*Ku?iNZD忿˳&7=k+~@Pu0'In À/f#`0`$|ngHbĜlx/gE;z Bшimsb1ZܔdcFv_۹,z-;X2y*o\;0v|0NJػ|/0bP@cu^Č$At]Z;Je,ӄ)j|A?n)=o{:֮k^x% kT7䧧~B?GLLf/{MėȎd{QgeE&6:4_Hbͬ Yp:nJc;mjvaLI$V[drd濳Wg?nI̻*t%%`Wc%bs5H 'vMajϭ{$ xn~T٥Ҋa=1¡wܔf`x#|-\WI#䴇仧3pvoBCg~ILzg";teNi } .5w'fC4==90k1M&{G3'o8} x&h'S!&qs@\* fcȈ-5a&C;ŋ~d<wg%9n5/l<fsό;Y3b qmy)bZ߷>f9'AT[[[A-DDDDD uMݘEDDDDD䢣+"""""" """"""" """"""" """"""" """"""" """"""rq슈EG-""""""+""""""+""""""+""""""+""""""+"""""" """"""" """"""" """"""" """"""" """"""+""""""+""""""+""""""+""""""+"""""" """"""" """"""" """"""" """"""" """""""@@i˩c㑋[O !a;pm"< {E^vEԛG\=3y)E&5F磈ȷZvE:KIa=z9|MYxȅv?Ͼ:""""]N Kp~/ُOV]'7Pt-s5: u8DDDD[O-"}lj~x&*}L%""""yj$bI1 =s p +Ix.#-ݼG\GTV58Dwg?;#ҫ+M(@&P:},~ ]}9ty >̂JňXf5fR%kd?o>p$Wc쟫=ލlX_>ݾGʰ•ac}E6YM}CH.^]u\.W1|ۓnyoab+b[W[+@W.4]b'-5KOywJֱpoF^){+_ˠWsefޯ*6_]B.}GINnܧ& fruvR̄|۩~KaH˹vެ7KINo ۩ۻlk[q=bپc3;cgձtwP!-Zjoupe@L?{ ,O| l؀_|6.6j;ߤ! W\RDDD[L-"B(ba3Wh~̈pY"at w.Hs1CR\ɾ5tYq97LڽE8Py%efdo]0_wQu%`S=o43dKήHɻvs3#+zQ T_΢Lv 7S=ރѤ Kt= } txݷǐ# y>ЧL+9=ԯ#х+rʛ=qcN :u]+.Q??Uc{+y0DfD=x} ?hFOW;P""""tK!#3d$pjޫ͛i8~l1JS72݇TV89 cs}6,k陜Bus=>[Hˬ{Zr41n?GÉ|;1$wMF^ AQq]ORXH}~@]M}t(LjnW,}z͎GYi^cدSsq}j6UUXzfs\7 ]5S^=7s~3on?&WcH=g#i6GZxa !DvكK5x+˩ te$[pA"""" "$O\4]rC>CO,*f>e!>T2uiN7֋l*$|NWs %:=6Vn3`Q;xd +!.7fM.jL],{v=<\oګ]@^G;a{{_'t:DDDDvE2"<7;xjt#ɛkgcY4GZ^39!Æ3n9k jCv-z:˙AzެfӾH`}#nSY\cqp}?}c52l ټ >ӵ/5{][NU&z3xhFm bڽeTo~}@)\5f5 USNDDDSjkkkS1\܎Xoƫ@DDDD䢧wvE.vlZ_d\v]nP7fMX&@(늈®tn]pxz^7ߌEJEDDDD'4`eyS`k$еx+ @C҇u3;00-a"! ^ ɫ`4568z{8t2DR=U%7SwŮbWDDDDDDD+""""""Q슈bWDDDDDDD+""""""Q슈(vEDDDDDD+""""""6׾,P(kv6~/.#,̳7n->'w_\wX]"[{X #/rJ""""&7{zXwf7#{gs;}L^_Znm3|Ĺ_mV-p 00Qoҗ"}2c^cqs,~2^fg̍߳|ghO C2+7=;mnrc.Cqbnҕ#G9uf]cרNc34f%.]ciBv{+>x)N/""""fvEޔݼR w[ei͍['>]Y!f}3|tSΝeH~?:sy,_`04{cO;E0qol01wOϟHrX{ K ?q"n{!GV¯~ٳp?=}\ʷGg> N/""""]w)uYIfGo#u1G&l/_e)L3>}oj`LL3~ |tǨ{SfwؼyI^R;vCNextg}E?eu{qFh޹nHoR{gMNr h~)aa?3C3c3sǙeһl rãrh/WC{p?c|yyZ,FL蝘jIşl3޻`8m܏3X<0>+MԾSey.;:i&,=ΘxmA/m:}'v+lr)<:ȝ-v+IsX1w֏aN'G?Gpkl~{Ůۺ .%6\3p8'? -\bȇG 7F9ȭ'Eissen vzc=}id_vVϿˎ/""""Ϧe"mV"dyΟ8;3׸ċbHO ӻuKU?pyމi okWaY-V7lywdѡ  epܥ/=A6w\W!g;X676708a5z/9(vE].3'8:>Ǒ#e .ֹҝ}mn^ȥk+ll܎Xl0}]ra}q#wi֕+[F_W=<'9qN.-ҕrkch'xʥ\A{6.sw~oQ0cܽfmc|.'f1?l_DDDD~-cym-_gy7l%NL~Ί7 x_k_=4;u텾~i&f4\gћTǤ?8:ɥLzIOW'Bg<4gjg57v7=sLkjs?|{c3ѽf8{KW_@9:öEDDDcy[KykSqk+~{)ǟ -fvEf۫\*i,0oS|c# ]yyf IDATifW䭶Fr"w9`QŮ(vEDDDDDD(vEDDDDDD"""""""]Ů[.R00 f-E\JT>v!ejMk$?\bœeK.bWݕb5t:G rgbNԺ-ʾGJHiljb3E\͒of1Zf>zWDDDD"ҕt00ZUreYXG>S#Gt"f@I~j% @z1,LV6pB1ML!Sn=}~$bZ95tF`%g5(7:>EDDDD+n?l àG1VR(eaAբUg&>zej% }v% :aLզhu|U%[G1q+$COo [WV26MN++""""]wSws$lo)2Y<#7n\li$1QxDOʄfpSA'YnViZ9kaMa;5C߮zD,\Y/Sn Rp i@@.;.VHK=F1CcRSJ%q?FB'jWDDDD^=슼̠Dѩ/.ՠXnId)IHSkds&b v*WZǤn [cgCw]',Yr8)}""""l)OذWKaeZXA[1i,?E.oӏShWkTPZ&R4 R({SNLyj?uIq ܠ LY3M,]_DDDDU """"""o슈bWDDDDDDD+""""""Q슈(vEDDDDDD+""""""WӠq0zz L7C-)H(!Z 4*9<ˠt Ua; sCI`9D_(vEB@Qg5""""y7Y*qD3hvC  <+' ܟa~ 0wQ\yM""""y$MJ<l WR(U[lX6 jލӠLẩ+Q슼f\!xjD^w&jN.O`}ğ/'[Qt H3?YYe2,˼WT|`m%2%b -0a~. Q)cLiuWfHC*I})}""""c1bWDDDDDDD+""""""Q슈(vEDDDDDD+""""""WӠq0zz L7C-)H(!ZyGV(yC _ܟv~γws"""""]7GBT(R 챷סy?O=&sxlS|=x*Z|!&Wp1bqa]DDDD"Na٘F7L$Ɵ*d$L (6y;S)5J;)|E5pϜ<5;{o[XfxK&p]R*=۴T[9|EDDDD"oX­yL?Tmb>a)3,0Yˑ|VL'@˿hϘ&ZC[HbZ^VAɱȄ- quhTXӝՍdMVLTH(dʴ|((U>zD#U'zZ[RRr=R#'hTh1(vEVJC\/ ,P%4f6c)|f@3?9zˏ úf׳/M6LsVFl`4o)6m#:qD+ 14}`6$&Id1Qp;yBЪe؆ia 4+""""D^sIR]*xM^w_~5kzыpcHۏq Qn> O%5J!(!7fvE^wF 3UnQHeXRCq+; *av?&p28"NP Q i&@No8.;#)YrclX~bDDDDD+&0Tk(XknLTr̐42eF`YXnu pnR>FcE>z5l 'Ъ6!jfV|Y2JC!Yr $cWO+ g%ع́n%z1ϒD"^-$xLhfH`!W!g'$KEDDD䝧] E+n;Oj$G/uG(V ]O3""""""̮(vEDDDDDD"""""""]ŮbWDDDDDD"""""""]ŮbWDDDDDDCmpjIENDB`rally-0.9.1/doc/source/images/Rally_Architecture.png0000664000567000056710000052700413073417716023633 0ustar jenkinsjenkins00000000000000PNG  IHDRs}DgAMA a cHRMz&u0`:pQ<bKGD pHYsHHFk>tIME!&'IDATxu93nR*b| [ Lv|*݅ 6  t}c7؅EXq̙q{NB!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B!B7S ]3f Xk)))i!XkСXYf{4tJA!jjjPJ8[Ci6p9j((;G&jc5m !ظ)VYk}qd2.EB&Zm5H(bwn袭Yֆ.@FB͔qǍ1Bvۆ.BW_}Z$  н{-ԃB!SZZJ0GOw1F)ew}YL8qb;ZkR=LCS!FRT^N4CK,iL=hzP!6W /0 cFTiYnC CkM2T+WۼP(f:qB!6vV)*juu2@A˖WܼP ަ:ߤA%BlZ_)pyXV~{-څ P(ĸqu]r9zh"hXkV]xEE\T0Bƛ!T<]VGj޴'ZW5`)†rQZA!)ac+z cSk}Bd2i0;CC3U؆.@ĉ5`bWkrKtNΆSF!EGkn-vm eԃ(vUV}\L-ѡ$J!ؼ)l`(toell^{5֎!Ơ4䠬+BANi i:B)Ri`Yz0A2[KKk[[~FqBb„ l(6pٮ];rsWX*iB7]dޞTmWM.~eA`nRH!zR"8 1Zo0.ӬjS c,%X ![eHwIYV5lcMz0=RB .K&5U`[kؐe—iօBOfBn2f7(TLPx!Y2aI$Xwj/ժk(lf`X_lPRb-xɶ4@1ZGl//nq7xN3&u[c䓂Bl2bs4 g,PL4h J)\堔Jn8SΦWߤd ẍpu8STUUqw1z@G7EUUpEasQ*39~8N>ۨsڏync[B &H:b1Acf%L>ދ 7\뽼h„ED:N E,GYEΣɗC6tX|8Q>x4W]5X,A"D)5ʣ['={rluj*4X{o,4gqhx8Κ2km׾Ϻuԝ٢Ժ/usyQ?w;2x }!$55qh4LkׇxRN:.]Iuu,uFMNmy.tR/ڴkQJ !?Eu\~-LmvxMyq-=7KZPcctݑ%]̖!{S*߇~_z?U~*XoՑ78yL-<ϲ%iҴ1;:ߓ4hySջO֤TkO{Bȧ5YHUR. .cܹ-;={[eЏ9ЋA(x8NE@omgB3wQʭOJ}'O>@ uZ研W3a'/}?NWxSexy|t`չE3aw4mژ޽vf睻306<,/_H$|ϾOJ9 8\=p.7_a_c,A %sp[gܧFTteMzM!6Gr)6 IUp\s!m3TNpOrQTcSL9^\%BLQIϣ]y{H*Vߢxɧ㎛ʯ&ͥ:Q?~ǒ(.)b: QP=Zj1w5" c=a4V,i"\b}C{pȑ{w$u7->sMCg]c8a.]ȉj46V&V͚yTTVѱs[\\:e 6yEMus5R7 iي| G4ڒYp9࠵檫U^yx.͚uߚ0[nyVzt2| ĉӢh | %'?E=9s9 iP>lhTᇟMCs39K(*ڏ+R[[E3ssҥ/G}9;x:f؉@dWbSSop%8xTW/ukXl%dr1RO?i~JX@2[/g=vf;3fW(lbWbbI͸ZUUb0/̍/`+9Cbqsfw~~_}l`\pYQSFֽ%0(D NJ]ZkqM qepHN0‚bK,$?CỌ6;lI$sYײC8mzO> A J#gywر͡\t@4X,yg^vrr8l_r#F"$XE¥,E"Q P(ṕko 4ڋyFwrQq 1f -ө''ГD"K/}D2/p58Nw 3yT.٧;sO? sॗs99}׸K㎧1cVTի~qeذYFsy1vkuK2M:j?b?bI<ԝu@`0H  3ghLRu|jj,X%X !?IM ($PÅӜL?- "ǹl-W^_Y[z%hقzs \%Y<C?ϱP4"pYl9FEy3** UlMsy("y=~y^{pI$r|]n}waa0/>&g\|ghۡq)? a'%bS$RlV2-SLa);xz 8oIںzhVsbSvݚ{8uN$qb?gy- ,礓z8/ݏdWNw?]I0~#o 8'{}N9<KEU.nSu|„歗>GPMMfrJZ7#H`j˽2YXw>KSO!'mp>z(\Զz]v~[׬{_~=yAtnn?DWN7z91b3 'wy>55q1oߒ{RYYBaa^RMECNN^xg΀wү_O?4n\^NeU\àAн6t܎pؤg]t2}TƎL~~.n{88թoqۑ4(SN^{HEE%%Mz-h׮ P_qEMMޑ3<* rfvtК&鰜 _!?G+MeE5{ Oqe4nRӃ^eēkr>@y#=lC 9[$) 7椳[b˨,bҁgrWO 6yiϩ9z *ҳ[b_x2"9a>zo*"i=5条1wBZjDƐeί x8i٪Nz$}QVtrmՆ0E\ pZvY\ppq#JT#3 Q|jL@)_F>ӷApc|dBb2>kvu6W w0p53n%Gʕ\|].'p^eRj:u[бXZF|~|#4mډfl7Lh6z!2,] =@L8LMf)O>6Ѣu3> *)'?9 ̅E¥,UMS,L;.!77€7L><|ͷtҹL=pM PQe+?kkkӓ W=iݺ-T}Ywbʔw曣С!_13f_1L9^{'qv}{SQBΝ8  }67Aѹs;|Otq)38錣G6Q;J[77FG;y6G/>P\L| | wF}D>琗 'IVݝZ:H-0o8KW:>pe]ks!KHƍ zDpZk|?F۶4J~yo9 q-pA=8s3bGK2'nINZӼyy":vlM dC<<N:7Sĸ9}AqSwsQYYM7pY7gpnyx p Pn[ᩧ2ol{v'bNl>ؑ[]Sg{Ӹy;1*[~6}4oQv*^{R(wݺm%vuZiB^`{c\F[38eO9o&~ϑ{Y^Aq"=PM5qDr"X,Pf9x[wywTUV3!6Ԙ  \ii)<2p\998 ]6i1y>F֢K"v(Q6޽Q]Ȅ>bC0R{ŌR ``0HjԒEEi% [`03V8nzyn7SO]gKBJ-Gb-6TzbKL~Z4o^Rw+ѺX)W iڴy*3˪&Hd߯m꜃Phe !hX@*ޓ d7y9 K_I4'<\Z%B)ꕍB!6[|T,BR5B9 jX|B3TϬVZְq|BRٺ+U+l=[6pZk~ RB JUViHV9@P! 7tIִAڒIxʄBl"Rl %IZBe"ЅX 6\jMKc#ba-6ta^UzP!6kRxP` SI oˆ.,K&w.BEii} , mDcf,2?]vzP!6kʦd 0Tfl2#3.--`OB&Ϙ.B &@m2SݻK*oQZZiBPZZv6B!B!6.B!Be.B!Be.B!Be.B!Be.B!Be.B!Be.B!Be.B!Be.B!Be.B!BenC@!BQ9=Ai6wRfl[jv=B4, B!B?@|4$A*ʀuPH`NݶnH|fj*QZ TUE+y(,X}ZkTfU"S4< B!B,^0XZ rk t?[ yO~eI@~jHړ_:oMZj)X+Rka<}T<Cb8at4m-ƚTKJغBi0 B!b7xez{(jIhE[XA0B oc]N;ٳDᕊZ$w%g>]XeV7le>? +~4o1D@MrыA}pq#X .CIB!,-  `ke7gL3@g_ }, -2%t(s5&rHw4FUkMuƢx'a$|! H§4n>&H-`Sdo*n8Fty>3R]{vw !R!BlV䥂VZjNE9xtaGe0k[MjHGa,(K*,KZ$<`' ϑpw!_e&GYjryu- Z3Pj55Ujvlsv ½G,8h'*}C8 ( 1Q 5ߦ]J2>Vfejth09Ab%\7N83h W&5L|c(Ԛ |RX$%aIkѾE'Mss" mLӁS)lNXn,Dи2Acp:pz^>_󰱼j-J8`,Gx=$du#r㧕RZ7jqP()X'ZRx1rѢE40(= :GTB;.`11XnƞyUxxk ؟Q$6<^jh6V#27`b. gYwK]W]Szti}y(?N$ZF7 2 C# ㄃(frGq 8fQ䇨 duҦg?[&`^(+氲c д&IWCZ(*mAXe~ş;b"r#RXk+Wm}Q@뺟wocꆅZV,"DRC,hPJ `8$\UE!lkimI§y§5h),w,+$JQX|X],cuY~E勈y[pP&1%y{>|%\-LX|k$ G[e T[ZN6t_ ֢kd|lE iZܫ(-x98ZʖDaD92S /m5J=ZkT˖-Sb)H$̞=y}ce`)ME QK4iV 51+ D"`g[ 4ѸGs }~֦К*ȗJ1|{GsQI>ͅp-ƤS8Z>%byBDē]զ(&sIUaf,!n-(Fk6VC"N0+0E=$\nҥK`0xB"اk׮oFٱSBgau뭷-0`Jx<^EqB>=0ޒh Ycb{zF'小u~؂I%HxeIġ::fLR#j͘']5Yl`3G) X]-fc'P$\$'(-}CoF,P5pYj-6Ac]lS"bC (\^c1%mSKxnmB6ɡ L VRb,# sVLK\ LdXˍSjm]8餓FqtB%sg;=_|E;Hbzd%XQ؀i5 5d ۹Az:#_Twټ U@zmF'Xt:Q)kQIY"7f\VZvϊ^%YJttV')NҷN,}x%`yO)`m(⯌CNGo,Z- EK+jL D6M1k%TkF)xcL\sЗ󤋬HpqsHB-2BPW$vˡRf O)D˳D5tI["7DM! ٱ*ZJcڌ!DOkQh RHD4Hy.csW]*rR\p|n3qE h5O -S;X6nzG+̖=k%*JU G)۞X.k|bNF@!b/Ą=<tjYkuı3yaEX:萬25&7 ښN.ɖy,+eyEȒ*V(NҊ*bpQpg). M F[UF h*h-GkpK+6.7b)'zo7!Z`GViE -rT7(ByNA'kNn]=[J#/HMA9I/}w46,-MS2oiS¤!y^YvGZ1XYSK!B7n  ·Q@^M,n"}Nɘ ak\&h[T%ٶMsY54|ˍVIiWȼGV뿾p)BXljhVg &.`~%!_&og@#,\XIk} #Q RJ6k}sP*Rݚmo֚dIOZni)vC&R!K0LQm >9:P:9M-:3J!UbqX+D'@Xc?|gq_jʷF([U`hA҃HMB¥B!߬N(Akiao X֮s".lKL<#6p lzlpXxDZTt=md M(%9Xb-KW V:ģ?bǾ|0 t$c-L;2.EfCp)FZ15J)I%q@!J`iPjۭ'=.5$m X(Y&Cll,(Gc2s(hY'BJ!6J l%fBT]*ChC7~ U+l}M-o #T$4$KmTYG(>2Ek<hq?cXUpH>[oo|rΝ֚h4J6mj8ztid]6ӧOCgSN'nc…4k," ϛok_M$acW^٠b 8s)dwGG) oL//>n'P$\ :IacK߲%sbg@)^.`(tAh̿s)?̘Ըo~+WҾ}{ZlIQQfkK.<3uNfZ3]f3XvMm~{̝;vۍwܑ)SpGpGSYY @,c̘1̘1&?~J<oD!v>hIuˋ0M2?DT'q=jR!3a#R|R3݊z 8DJrYecfoӄi· hb,'7>Wb <2 y9R졇bmӵt̙N\ves1Dlc<_~!ЪU+ ׻L}h4p:ʹwq7w}={)륗^}M7ĝwYUdZPRy`%X.Rr"١_ $XM?ήWjReS- o4H,/D,!oE$-| K(>Qi;E'8p7r뭷ҷol9kdZ4vB F@k8|߰/p/k,^Ǥׇ5qIg6ع S27``Ë0Y/_й`pщeWHb=ɄP(@ u]06"ꫯrs2ydFA˖-ׯ?fԌ@ @^3g#FȆW_}x}:f⣏>N`6m0h |I:ts=1O>X{_|HD">gMAAAv :g5jDaaar[!6'TT㢴 JqoPbY%K!'6Ot5~|n[m  Pi,=, q./Y8(ãҊgȘK!֓L0;SU[b17oW_͕W^.B׮]y0ax<;ܹsO"3nc0rH9S딩d2I X뚙gB#->+8)&ٶyVY BoaTk[UhQ%%1^~p hZ5TȔNZ.XO2alȐ!2~xFI(bw[n!f[ǎK6m8ϙ5kg}O?W^yHuje]NfeǎYx1Xmcpw} !f$$bfkE.sAb`)D˼&հ2]yy \cLКJF;Nj< &$\ de.]zzg};ywyZgzb1MƧ~СC{:[lF&4vԉ<_~D"[ořg &@=xT+hfBZ׹_ܙyG2N!&iDq8X y<#aذdC.۾*bznY)>BsKu08A#.$\ dZ3@*]xn{̟?@ @ee%#Gg- Ld'Z.O>d;nF'x"-[o_Μ9sBٮ2rHN=T{9`UuW Skq !ĆOV( .P@iƟbÒY3懨ަ-ǚ|Ad 7QH\2Rdj#n[nR)^?pq饗裏ҥ wuW}7'|+W8J)$={dmHm&=:߾ua%#{] ]N!oS T6, QJZ>cn6G^| 8z+;VZQPP@QQ-[uS/D"Ann.'x"7t'to5S={7pEq5d'c\wud2aXkZS]]M6mfm(**uٲ !рxn42RJa*)fY8[;<1mХpmokW@7+@w]<+s^VJ7qD[n1f3|H$D5'k-Zٳg8:uR-/&SXXHee%HuE)E9qa2slYkYlK,! Ҽysh1:Zj%8x<vk-3cM7ݔH䆚@H6t{$U8ݻwo2ԃGK-c lb~vl%,(0 e%-UQ ˅J0V0`r/--U",L9Pݻo% A$  [&leB%^$sٟ!5v3j}j# ;O<ֺ.=P&MhҤIol3Y)En_ BB~ğ X1 Ԙ-82GaV R`E)}#siXKv .$udm%zi\L+`6fEuEAGf㨣3ΠXky 2q/60JB,ʤ&-zi).S% n|)`I-i{s9_k\?0 eJp)zq&Bz˄mV=@9y7y衇ָ>V C"@<ٳ'={d}ib V.ҟg,. +ĦCMZx~M_PZrp< 3K Ν;3p@l;Gd¥8 } //Zk1qluzʕ||PT( `Q_ֶbԪ 0vZ9( l{, BluGJ1̜9ӛ9sWo}΀L&Z5nXcaf1̝;]vم_~{I?Mi4G+Q[K!6=Ja&0=36ZZ Z)L8Kap)D=9?E)e?|f}0ݍV^{lٲƔcVVVBᣏ>O>\RJnZ͝;wA<#4 h>YV.Bl٦1'T`Xymo.&3?謵}i=r7FTSSsP}: ʀ2H$]>}&iڊ@ g>RI&B!v}wH&9Y<΢ U-!'Rh|ke !6e gp5bnG:IXJ`岃7߀)obRZx`gPUJ-ǦW^o߾< f_f_QQxwU-bäߍF'cnI!#p,0x2 R7+cZ9c.J)qNEW^K& svUdB. dp!?ǂrC#~u5I bA~QCaI˥ dZ2V!5k.TرcaR*c>mVje<[jRAZ!ze6SN~3{b|z衸ey>|8#*++r-իEEEc6lcƌA)ŝwɎ;H޽g1|;8Fb-ٳ'%%%̙3C~iI.--e„ tؑGy{ٳ'[m@q;sy<8Ço! ~e݌35j~:3g8$ǎ˷~gرciٲ%zE/P-vƌ^ZkҢE 5uTէOD"z1fy1isR* jѢWUUUޯZk+[lP(D^$dq퀵XcA+(Adz2)VD||T.ϋ4`6K dѴi۷/SNewd\z|7[na 6p8̔)SaԨQN<̝;*~az_|%\œ9s9#(((ga…4k  wy=XsA2e _~%EEEL4.]0bblh+h۶-:uꫯwa]v}Jzk9~' ɓ/fРAwy[|CQXXX'\fB_nVj+Zl}GAA#Fk׮k|78OcҤI&L@v=z4999\r%p uZ?|jjj8C=z4Zk Bƍҥ SLCaŊ/,]{K/ .c?g=뮻~O.}g}6ӧOgĈD"&O GqDDFm8䓙2e Z" R]]ͻϰa8:th :]^{ihԨ+@~R}];|A&MW\i?|ܹsiԨ^x! DS}&@{xI&1k,:t=C2{sr9im[ꪫ㏙6mX+_|3<;m]C2d||g|q9K/lٲl3;&L?rrJ0oW\q'NdΜ9p\wuq?gܸq|g /sD2tPW_}?3>Ç-`^"srr8u]7 Ƙ[}IMtRjyo']}7p8 һwozЧIlO<2Ң o>9a/ W!ظ.cI3ώǠЎK!֟LYl8I&7 믿{I'QTT4k֌osxG֭[6fϞ'|駟+p뭷fuٓێ?aÆd(((ଳbĉ/$I<˖;LrGH$h֬??3eeeuY}/۴ig͇~Ȃ Ll6x+u<=؃+;JxW{88bazAUUK,Zˣ>J߾}e]SOwy'-H]LСdN:{_H$]}k??6mŋy'kС?cveeet҅d0'9O$''d2;L߾}y饗3us Çgȑxg[nm~'/8VJħDdkmgF`G)5qw-2yqLyx !Xs-0,;YZ6)ʅcDQRO_/R4̇sVߟۏڋ-,Y rjf͚eod0N;BgɄ 2e EJ)Կc&5jԨ~rKf͚ҥK(,,VYkժ?ey딱iӦ@*f tϟOII 7s :u`޼yk2e7sߞsR]]Mڵ+/B~/}cy-sN3ĕ-2Lݺucڴiu{G9yW 0UUU\vehт6m0sN߹s:3 G":s漴o߾~JJJ̔P T/Uf˄c9k/r1иq#W_a82-Y˄j1\UMM }eRz3l;s:,9͛Gyy9ӧOgРA2et} u6 ۯm:asOJJJi׮M6#>>k{޹̔M\L xz=KӯJk`kJӔRPJdB;bc^zQUUŧ~Ї(6YtT*6xуK,?ĢF*r\G-:7z_X^C3ld$\ղeKve.Rd@VUi[P|ߧ~ /Я_?M]wݕv}V/CflK\f&wٲet?^ڷDKiӆ~"Hv6ϙ3g)ۺm*++Fu+,YBaaa6{曔Q\\]&Fd}tme#|:ԩS&MdǴ>xcƌM6@*/_<ɜL2KdݤI:UUU}@L!bj^z[omLRZpRB"Jcs1'F'ۅZ<4kk␣U{[!WY\ayA*m,Bl,+VZ/^A w|&[h-Zk׮+TWWg?x|wޯϙ [ϟk7+VȎoVeʖ( 2o<>CsO62c3y뭷hҤ m۶R-0Ϲ@r:c 3w}?303j&wu`+C% H$(,,yd'Kx^Ry3yn Xߠ BT\=܌.P6` r)R*"usN;q駳xb|I5kFII 555qmx+$ nlݍ1s߽ޛmݖ?;;q$#<1c~2dpYgpAqsYg1i$:w̰a=z4?tޝS^^?̶n\P綫wQM&kl>zC=N8aÆѸqc˫_.B1tЁN;ߟH$½6lC߾}ˏ?ӧ{ꫯ/pgm]X%3o^OhhRyRiK1|zTTT8dWJѻwoD}KRLSi%)Q[$cqQ"4g&v[~D5.1론b%:k*}r8v2]`-$Uhqqb/ 8u+DSpC,[BJ3~dͥl>SZ.Eqk-t_5; ~)vFv޽;'| O<:3Eu]{,\J:w'\KRx<:ڗ 52Ӿ}{^}U/:ubԨQ#~iO?OFOׯ_v=n޼9rJvL֭zj6^tETUUrljb|͔/ӬY3>s;lm83d7ygq]^x;2zhv}:{rӶm[1ٖs9?xǢEϧ_~z'Z'NH=x5j~:cƌvܹ3zj}:dj?̗_~ɻ 'ロmM bAZ(!]G)|p u÷d"=%7e+}ZC=\$qX64SKEr3sP}tI&?aTTZc|J블R,^8 674ix|Z=Sُ_XR+qǮ XVVTWK;%-$Uyj>B51kj$-^293μ@lY0w)[sY'헛h.ޝ-aOf-c XJYv3[~\)MS/߰DY:(WVa>ϖ*R6XusPݻw ޯ#c d2Df3]5kǜ 5 fj{~粶ջ!weee >38:#8bʼ.ǚ)s3>ϙ|߯wۮi~̜LyV?O ꏷ>Syr|1}c{OMRowb1Oc.H^z5hDK(AiCG)2>U?G\Iצ_ ],] |@EJw{o4^ &@}{e+ŋ懖,I(85*r=UuZm\z9Qmop]lNTP1iO qynX J;:*:QQZϡX\WH梓{u5VNɆHc :>3`Pۢbk<-pzBlAgh|iu+pL0lAe#9lZm3Q*u6xyA՝/6P,<ׁf-Ŀ:TV.u>x&y⒯~M-;oٯ:͘TRe|h*C؍yi@˟xa* Eh((Th~?s)6ZbVYm2]43-j|Gu77ߤk׮ʔ(3MUfkRuZMe\9}kkbf9WSO=E޽yZ.ǺRXc=n}kYn]]w+5|pz]uӍ1+WJ'пӕRsɤk2q h A֠ *l7ja$1xʵQ q-.Y8?گ=w>wjTZa Dr<_} ]{Q]Һ(YYnBD6S'E:uW_;w;{Ip¢&^-=7[v١ZK$'jAwDM7)dnZb 6> v#!}⊵'>|8{ȴbjzZRi`1DxkC3MK&X*Zt4]Y/cSd|cq7PY}Ϳv]p@ʵL]wUU-(曉\5v3OZRX {_wׯ↓w\}m}bK(cu)@оw/2ɳ#'ҭs"{Q#ڎ~üͯG|Sn[v#ҤiKLc 9QmM6KO5?WG} oh>g./[+z郙Zr3gG`\rau‹lE FEaw]{a{~^rҤ̼_o.}^Ͽ͞5=kubw?̭t-CvbCܼi/wm7_~f[xrxYVd֏EaZNTS _yv?M͉穂FEɻ uՎ;.~M%.^l٦}졗]ǎ7:HOd*߇rf2k`rq?~ÎO-k-$63~mZ2~-}W.w6?~?3ܼ|NJ|fDLieo+˝f XNc:HB|wɩoMaNI->Siy(USMw۱ pʹ?X Xcp9^l2l2>[3u̲F7cmb+xQ |534mr;L?ZcU߾*X~哩S_i1懲{^* ԧiftQ p)6;kt6k)nOL!:hѺ]axf?#9>@06C'3oxهi6y\u;m`8l/\=Z a5) f-žX|ֻtumbiS&! GVU:7_{5m"ԯs Tqd02qlIh^o͖;V[w߯OEI71U~q6mc޷7-eƴk8jaVn]t7ˌran4c۾;Jgc6 1͚yiQB25Db˜oww;<*=铞Hm k]]kokXQAK !=~9;~kA^SSmW zľN&Cϧ{f_*e۫5v@^&Пy/ ~aSɤɲ,nki[D;v| {+ǎ?d4lֽڽi4r?Ov?%%CiP[#W3ߪaF0cƌ[hf&!2*"z&%%%G.]Tg w/_o1iM`xb,ZH ߕJŞc|"+}}ZlfxG!!i>< +U^[=[V.J%"y5@Ĭ(`iM Qf"RSawk2F`1x3ES}=t]ZJJ^ݷt-!%Ba-WwcEqlÌg]=)P*mے_kWֽJ歆VRVi<D}Fklo@S /6P_:Xc[[%&YͶ;{%3}ɇ,/-\;9xL\"sBA>Lgm&4 0:YfƎ 5+\KO`?8bkV)%1f_o1Q(,d oL8H-Ēk2?*JCzzmSE%?;ۚ6 GջwO/<1 oN{ A.(,v`ͪ/ *dW!3PQP[&[1a`9W ܾU8 v:oun?vlFi8+VtVe )CG-UͰ1Agk}ؖfG^7^|VG >c* t gaiL>t`1f -r{*aÆ{H&3fL{!W f*0PUy~$L`CBx}PQjN{TӨy55\;? ,RZ\V^$ X[ѻ_ Fپ/^lq ^60Ikpzgx].>>x͍Og"$r];T[8,KB%&e2!(*Ht*%JX ?ԍ=D"&GtXN!9Stu"ag^Ų-0U;w3Dbm,wy`!%#vs=Qh CϹGfkzv(Ƀ.,{'W+mRZD,!ⱸ~Nc^$aF4m4~DQض 0(R~SWW7CcccogM &܌<;zC(&mAQ,/ ɬ! (4j-@eN$S)DLlq"B&'Ma~ߠc0k^ǣǟqAm^AJ&-cԀ­g|o6 bfMPkC5nU}-M mhe;IDATy'^}6nd|]"뀺TvsO>0ҋokXS{Wʅp!oN|؟z?J =›.\kiXgxukyE݄oa}bݫ$^yoHƕiԎYnt̤/>=gGrg vhin2ln1l^{\X ?9n±kf<ғ8ϋOO/+&MӞ,h~-nϼ\pY Ol5|.ep.&Mس/Q6evQ(FnrdNdiaF5}t?BX*$3/c}m۞_\E6"~ƅ,{ʜL5w,OZ醋/ լ\;>uw?ҷqSY" ZuW/]uHZհɈ"NAw?ʂyQ嗟|nij{{O9tbY^_J^}{;O?rOLNk{TA[DŽmC!$o9xX>[tno|7_/+._yz>ɥ#O:noOsv}+ʤ4?\{9^&Ñ<%$!ySVv|w^V: vO^{yhrex j}l<V  KeJiH~JjO'4T@vI,/;w.eQ5`ÇߕWasQÆ gaOqm+p.b; ]H$.`p{|Ƙ1c@Dݻ5`7Ɍ;w+W" >H$L744"d/3߃-w~ @a^w@ lCR|kRrߎ_H6aEݪN"Bkm-zUv*w^šC׬\kekj˶m^Z3p(L&e%IUjmRMI,fZ2m?Z!ƺL%Skɀ*8@0N9"֖H>Ώza] _ TE&+ 鲊o"!FlxE}muyeЍiᤉ@@0D:Zjny׈Lh)Qvnڊ7fL0 f̘6<묳tݱzzHYP(ʕ+ҥjmm%"bχ#G-dlZ"Eρ@*(&kӾ0њUt]vDz LaEmqد>v ur175+`}oIADNiJ%8 5ֻb)3!֢D4/AfpP+ڬe(ԑhP(A Ak B Qd~ͮWՕ k\KZKʊ\z$XF~cO]Ν c5yEJĕǘ "lKɀ\H$Ӡ/?uq͑: 3#/=܋Ws->~0! +P ȟ)^%*HD^Y `kS Vm`f8&X6 +e۫02m޶<7߹<q2 ߦIˋ"2O`@:"5 B;4OLpiat3fW\!|>J֭ےBЛDtr" !BHJ&o/hk8Բ`RLbea)Tkktu׭ѳOyI ce lg xm~vzٽ(>&yv . 0N)ק-_?uݙDtֺuB+esS#AE, Ր&l_Z3UշWfR+D$QYRJlFҀYg`)2&0&n9y.7φalx}t){c…%? !\۶ܔǷp wf$;=%w#`^Ǧ]εZtҚ;ԿZC+~d;QcIWYK;;1 ה>c>ug'+vZJ)Gnvz@ʅFfPDfX6M#d@ZġЖ% ҲXJ,D@8";+fkF0$8@j!@AQ!׋eww[ NxZk&*ٌ !ZkZd0 ZZZrӓ٫: 3cرn~~%W !̂aYFCT  q m4md4wc9fL:Enjn,C+mhBcv/Yj_Y6^.[ n} H 3߈:B3f@|WwQ=r~GxW_{^ GRn<mCԉdžwc\nR)+ ! K'ז D T2]?eCj*S݇Ri@aӹK`o4l  "mN,0{ey5>033}nr,ϖZAkM**;~鈈 W=[nnLpyszw9u]IkmK0~\V.Ovm)dM:tp•W^ƍӉDRIm#;1ZkdmۦO;"b e| K#3?g Bnl pszGjQ\j;ٛ]Jlܤe0$t$"u&b 4oAiXv+2`  G,bHʤAm-, eoӂhJ4DsyuV@k ܠXNCI-%8`QTj)m4O5JX@KMm.g#D "֦ŏJJT\f;]xmsߍ_zęnj}O9jAҖ[hK lWke v]HlY)M|"]dY f- JE`,mG)/i`~m@ Keue&txD|j,f :Rh@&:zINc5ξtoyMpy뺟 !np]_]ve7nڵ+#k/.͛7O-X:)1GEDxw `/9G,":q/\gYQ?~Z|῏S :bAS)Y>:.Mkl G7V`Pد7fx;?mMIY}}+w_R#o7o ;""4"aYh˛{ .7_*S<Tlmm=3 / lF):r]זR~: d6`0 ";r3DD3Zf R @i1rH̚5i\=BfC6֝4JAJɟ}7r{C-UzWߵ}~+/9D~ez;M/ŀT.k` s?Sn8ui;oNݥGg-5rsŅO^|)'56+O1G&y_VE}%#miŖMhiI]ںtO>|;kk>OB^O>K:w~agAɏg{n</;/aK,4Kj}]3N}r3ebJzz5֟gןm}%?~_63/>y?!\tgvWJ_Ktpy 7g[<羬7䝫$ g XC_:sr3ٶm|wGb-cXW0_ڶǹ @3 jo\ׅeY:u[D4p#̻XHDBˍ븭 ]{uk`K$7SvJZk/Enzp귇;\vݦ|Yצ?h+~yWku@O=ly'Cw\_v.R246{#|?|mLI]^ӏ\/ QC^?c̒ 33mm7?_^t},4~^x85#yϻXx71cx݅WK'{j¯k=iO{iu8^~וgѢ1mWJCWVعmnQ (҅+b /ϔ[*ν:ȧ[U_}y0 c{sRKրmzHô'*gh! uN9w_.nm l ~S)Jl^/W&^n.^}[>-8g-R{g4a|[8'#DUsc^4=λ_SU0QFoזHZ!"e|1/r_z {4 g+Z-8'y;˿rQ}B4/l7s/q)|h|m;?հ ;۶m{+m9zw_ V C+IӴr>yҙ%`A_RV;_Z=7G!WW\t-Nzm3w_qxMiC>v !$x M Wg ۡmg[3YPVr)еG3hiO>T ٵLϯDN&-y,*w{dj=7^ Y~Ϊ]±A[[W,Yy M8<,jK@]6FA^^sϨRCxL>/n# q-wW:c>9"׎Ey/w+MӉxIt*%B!w_{?du)N/^upN>b/>?l_dBlU<fcȶ>?hv%[k)- wߘYoVch[s֭۹__L;˧=pW^ >p;OM CG|]V^vzoZZ! nC{`o.7$j"<)$ JC%{ .7 y@*ckNʙ:u*Ǝ fiq eYCY\\|8BL&7nMC,k<"'h&F'ڳ o=a0 JzX u5ծXe63$[H4mm̥l5,8\q?tji[Dq2ԭ*ӣWrv([f1)%K˝e 2L-'I]!d n989ˈ@V+NQ~OZQc_Kd".hmnv]o޴ϲN<a`m1knk!|5] ?5}|u8rtGа.ƅ`&RRIDNè{q]sN=сCON5ɛ/;ǡ:cj)i ^YɩDRGkKk\ɐ?pvWtrUj,|ֵ|\@H@9゙JrSS U/_kܡ B{Qd-u/ɇ5iaHb۷cg\xY^yؽ0J@׫Vx_zz^K˻wg?a&:pFjL\`ƿu8c .;\*6 cCs{ӧc+ `"׉f B)+n 0PLl N\kl-MmtJ?$qIͤSkTH)ٲ*ul/mzfF0ѹ=^3{KXeS%d3GZ-+;P5'#/-DPCE%]2yy~M:vw_W jxKe7k^9ksERZ^GrN&~ix/?;KO<Ժ5ocq-wZӟ{u>pړr̤nsECogկx%8F޿6™(?,?oԶ4p6|cETxڟOya6!??_577Kf9Dt7RXJD6ˍF{]wB6]iz4@:#CK׌?ד:vMSK0|ɔ u.kh~++,L&C!BK-!$D9sct*-wD׌޻BO=_Xƾ)&u.-^P'A+h^1בN&#z̚xh^wt?R;oM2ަ֒KÎ?S_{]N>.eN:ɲ$B~żwo/e2s={Kb܄cڢ2\P"V <z?vky,9:I utkV6>ؒm x=P]˒oke0z@)"yP{KX?qk@MR_aqDtKCkyS)Pڥ">cb-YkPמe1T?S|\~gB~V.wyu e/ߧ^!˂Z#l46 0m֬Ypmmm^7wID7J!HeAk )%FCh݋ҊBY|/ң?SQwƳ;x4}gjA$J'^7zH:BHpnf!Q- qd˫Ӟ)^f8MxTk6 5$H$|N!ۤ![X;nmnXh8b[˼ZB:7x \pGDOcoV5֘a}}]aҩdF XݯR+E ˅˗*|N uC5+P ~8͚,ߠm--ֻFNJ8>PQ[Kt"1lH^}vYQ_ H:v]5-s[iqTU/_? }yC>V*D)D-էGzhVG/F{i*,X[JTv;uO?|w[6~ŸQhZ+C45X&޲[ɱ8Jiq[ aǟ^.>q|h&+*Oc2qk9_yI 3xQ}esQu%e%LZD Y3-Y>'_z?%h{IK ΦuW_׼̂I?4ض_񩢊n=Sο&B* :S=P}̩>Wh-W@&g^<>J'#p5vp$m]bS"j**ݿEV/w7Zѫd}k<`1lavݲfۯBe1ί9V.'~J]LyKY[ )v.jX R 9im*걇jןZ17]y*{t$,u,s#vi=cJ0}4ؾlׁH^>׭8jd;LXPc(iHLӕ7 0~s}l #O8 Çߍ9yyyZ_ID ܜ.))!)%g2̜9 0߃?˭o1!.v5.byBθ3+;"*g!q4Zi5QKBҐPP(eY^`'-p^>t "f20t nndurCjA$QYw?Ě;wnd@Mrmhgka?ìY0n8DQ8Q]]}mۻն]@8,2cgF{W_W#"ajKS[}!^UXVZb9j TWZRJc" SUSGD̵I䪁Ӛ e?w?JҲxݸc@Skh}}Hq%bZxky_X^^bmj}f!Z\;ε "̚u?RFaZڱsnBh\LZSMk/HKr:)WH&ƵvBhuk?Dkj{q^D}eon߸ol"AP ^[$Z]0@A-iEO/1Ch?rsbK0 McJ۶3|6KDj_!7T$i!w8z+%nAҐ`/~%Uj"mkP?XD}ŏ=V.`Ncowg~%HKJ*l7u]b,bXSeK3O=\7^sKaGY_k=ҘD^} ʁ7:pG:N0 06ӧC?>W=v.K@qjDDzvn$1V.(8%0/O& .w<hֶBS]׾hdS/ :fNVrE ' 4pεO~b?aK8z`0Hx:o޲u!p]ƆlI@kaIZ#F'5гÒ!~|URV䶵*D @xY *.Ip-@JAEJձa ͜9zk  3 CD1̬can` P"=xOhM#Ϥ-Δ%Ԕ ~|:L&!oΆn(]wlEa0vr1aL>D+V(!w T:ST߸q{(x0 ٚAiڅOxw3uR%Tf@)ZC`G?;$o#U|zbEBL1p8 - yaƯ8hTz! 3e!3ӭkD($RYPVn%e[X"UqtTN0$uU> 5 ,r /r ,W/{/^[r >w!I?ŸP5U{0 0~X"V]f{חhbfED_{C#!8%P4Dg9PJSAtfqwz7nqdk/r}]G?'o  BO{rZY_u. 8xs)=?5 ZJTKO.y-F>g;L AoJΈv5dyX}Jo XC1Ai&4 0 7=V)RJ˲`Y,>=;[ |wהBP&{iJ#ꕩ/a~O“oa{ H~vP+Rc""@dozg֦>sOcoHD!Kq:^N}3>׳0*7mV)׻ $Z2#7675ymm2\˨y?hX4QEBV~]g0MpiaR >Zk3_]=R)D$=Ai%IkpT>,cB)n̜zw9_}_}Qշ+#RKRmq^ԑ|3i!hԹTk/@\X,]@͸/j ղE(,n( N9#RHJعuG?j_?_y{`+S(y)E67ȷ_^I ȶb-(@8O 应/t8"{e1K-7/_`t~TPZ!$f&!|]8k;XiX׌=]ƙ`0`K0 ^|E ,`RfVe4wfoҎL`r\ A$AAmEb6M 3yͥN&-ߋ8|LZQS#[6- 뗼ٙ 7_ֵ6mZ꼓: U3{1܈!ڪl}}#]WQ Hj #ǠeݓKAb!T2!Rս7G({h!{ *C^>3/ZHp>8#zv2˗}V$}$Wv[A K];U"(ntﭪ7됑ӶA8!8.d}=s-ݦϐCw߹W~+5 ?v. "z3zED"Zk|XGz#߆CD\0c7 @ro5|w#VµRJ&euY[|I BбfiOtYwhWݾ>g/)n]Muo -9ל;WJnn*&{o-䛺u958֖FsN* vw*_{na[D ^mVt"3#5/YUZQ*eqGrNMl7p{B43#㏬3q&Su#xcKBh. V1e$ wj̅laA͞={ vzFhp3וּVD$)LAQ"ATFb]13b-`bp2 (,8hACl7~K)λpqѓXSO0wZ4pO-kN:5}Ηw޳;he|,F{_rv K>ںU :z⹫/6x>[g?qo+}$6Gv푟?aB#S|Ag#4M~⹅'pPݿqշ߽7•ep֒y;Zt> ߖUf>?֚hAϧvKq|S[G!CtCO0!K[|@+2elܹs{LaF`Ys3dԩSֆj-B\ Dtm* AJil "^ q jJlUJl3=vAd2i/˭O\TXT".$L,a;_Q fcwrP|CIJ|#K_x _ڷfjp2{`fҤ6G~iXp~'bW?✯9zRM$@6+;;?]H]ڜ)ljr'yYwݵTq5U0jWW9E`\ ʥ]w|y!e+zzlovlilbHw cUyPeC-%=waV$3s&oaWB̞=;c{0 p]en4}m^uX ]vZ;!Me`q0Y~mLA<᳠] 8KXن! ZkjE*hin2+Ұ%/_J˻: \*-^!t,]8?H@4Yp'UywO0JTdjrDZ [([ *SbwkxR5kۯ[U^ڥҹsBᐭzg]z2qic٫k>,s#l\Zޯ‘zo60C%q3v@')>Ҵ? uƤ&\i@3캔Q^`)M0 BZq߼Fo֬Y;v,m@ADS\/ORDDC'SZ-I)p2)Rmm_a" >B^AJ'R:\U, ,D̕-M ˲yv7ؒݶ,ϟ=kF~<}j4VXgexyWyy'ٵPJJIBiW\q^']:5е7\hZ g3  (ؒLBDp2.XǟQ[j/)RX(_@p$J?so1④ld"a64[mm"mJ/Bݪ CP^fYib@iKEHה(u}3x dEמ4ate(bk+ܪm!LFh 0{礄t UWSc}6pؑ6J藞Z-Xb$T/#QpDt<'z(O:mߥϭw,[h~N͇Z&e/Kh XH2c)a$ 7hk$2>H vpS3yRjV׮] Xx1<axIJd2* ]|- R?vmAD^R"yO˲LE `v>d N%AeЮ.vU/q5" ){}3qMU+?O {miijo25ۯZ\Z9k]$ V8Wy5ZМOnx;?޷>sgپm3S˟ʈ rݎo~.jՀ!Res r9ݒ6G4h^w^}`GXhE]ݹv}Gs#)X'4+^v=oE/9K=Z4myY=|q@.~kf gRWWMpnCU=s 6rv'|T*t>ﯖe!DeYW^ uf aZkmYV)Rif>[k0 ˲3cĈ1MqA)c?eŶm P ǁm6{#3[6D:83l̈;>V", 4k07>"V 'yS/-V>=]^y%>_;#5vcL?nkN"KqYѽ*=H5;Wh{+n{xsCB<հf_+Yչ#y?.[mY?| 'ۀh~+fSνlq]J :|x90.}QJASЖRH)Ē kilLZ3l9ۯ}Uz]PTn1`Ԗ&Jĵ }~{[ryK>‹߻$p]F~P}}az]g@zKB!Jo,PXT*u˸.͍Mrv>[f֮FAP5խ/OD^Efm]{fZYl?qksöa;y_77E.{jRYZQgѼPYywt0j{VmRCH%Y0.}ݽ>.N=2d^?p4g}],E[3s}Dd.I,ۤ^sY;."L&ڕ"@ 0 NhZJI,ɤB@J~߸q 3[\f>xf?/7n !Lp-?h Z)EزfB[ՕQi0Y̍PҶ,X38DL F4OŊe S_e(-%cZrBC Y(s C(lN%!tݦ\|KCڦP J)WxQ*R2x B)PXhxE&vP* @P`4p2xL ""B  Jcv i3Ba3u`\pِ@F'» .7 D AN ]Ĵ؜X,H$ff5|>+Hh)e5l=D0 c34o޼Izꥮjr![` xcǏ*JصK.:2XSrFr8p_ފpU>V-8qADdBS""7X|3*\!f B+E$ _SZtJ[@LB`)ٱd4y}Y:Z8 -&% `!%Hĵ4!KK257)K7.)%gRRxx)يdc r&%HYnB#H$ˆsO3+mm .G={6BMRO0 5RkﺮF{vac \`]~~ʕ |z{WugF%F~` ݏ!ą$JBݣ. (Sg& /R!Vk%I>l}b>zk [X6vxŏ$"2I>Q?h ֗L':a&8|m,Z+lO0A]23H@UR> . kVG}H* A'&B؜0@[CflA8BVЧb0 06t:b8CJ)05@DZUuw;]tiav8"q28Rl>J  Z؈1$`sE_YFF^؀t*Xh~T+0 `ED5npcAA̜ކL}|J3 $eL$ ʌ.&]T%Pa Hmm}DgZ0?3{ fZatD}SBFЌ*e]{BU/DkeK0 ȦOqZC1Yq23駟.j*@{Әmp `@[\+99l)\S.J9RJ ppl fm2 0 ɮTP(\5W'D0 4I),1"A1=p@ɶmd2fd34$ B@Xt\|>H;1"G+䧀%( 1h!z6 `eg,zk1mnaFg4m4d]L&q.ZH|W[`, cƌiv: 'Lڞ0iBF0ܴ BXY2 qQ/Y`FYjń^^0K`oX$/١Fg9kdM7K0 h_l۶Woz qPSSX}o0w}nM5sATU @!3@jEMk S_'`K0 h'ӦMQZZƍ˲}xoP!1c0cƌrw$ 6#A T=sٻF p5$uNThuSH&4 0v8sy@zKDֺlplA հ c 8`R͚K0 hGӦMCkk+y,\{キ۷/p˲"`ЬLMڑֆDhdmNFGE``,RHe2@ghalK0 hgRJtUUU|q!kît`3 ~.gFՐe+k>psL5aa3"R _|ӧt2_)5HqmBd2iI;[TJ!4Ywi ']VqWH0 \3 ۶ѷoGp8QDLks`+Hѡx%\5j.Mpia!J?~ szK)RLrsE@* |:MDT1rα 0: "bBSҧMpiafF߾}mmm8Nd/7c` C@,$lK0:j(AI0>ʮwLpia"_~^WJy^n&_W1 AT3fjat :x޿ ka d2bΜ9R y 2ePV)c$A7UW] %-XZw . 0 cq3L2t]d/7#˚08 VMWZK͝atlD^ 4o)\hm&4 0M!C[3d/R3fQ-TRgi)wtatp @t[ , yE{ . 0 ckR.{9AJ !F&{9u8^*4$A MܐDav*d!mYOڮs4aal2 >c.DtZ<뺉Dstwa0$ RϹhH _k37%хv{n#aa?mذaCJ9+ ÇY"2Ȥ^XAfG((XalAAQhHeYB!m{w{0] \. \G4"2} c'v-ޛj)1_JȖ&/kyV6aalRVZŶm[B PXXǁo?ä \ZH>$&Š-誁-06U PZPp7W~*(|YKaɛ1cwJk 3{$9 d/7u?(¥i[[PaƦd PR@$2pF`0AΘLpiaL&2BXD0-_ eYZkP(C5~%8lH)c钐k]kLش0l魳\ފw Ld$ --`vkK0 ̘1WZ? +]q LLڎ@2$ Ui-`&#X^Ԅ^ X\.ARPHۏ . 0 c3aYb !,)e  ӳjf6c7#ޏ l[0-lB4~ ݭX~ۀJ/|6.apdpnjK0 LL:h̬ {L) @ >, REFrlI&4˦6 C 8f ZLѹߦ&4 0HvJ,EDn":SJ )%R|=Tg=A$!,0At߀>)~S06 [8X᡺8;8HN޼=MpiaiӦAJ fU}@#ÕR[)4 g}{4q#YmJu |tL8AKui3Ḳnn e . 0 c3fY N IB!H)e^nfVжB;Q.."_ףocyk`#I8 u:Dp8OR` ,jsH)d0 8awl1bSNرc٫'slRG16KG#M .-Օi4CP6sbƯ *-Wi: Wo2p ?n .gϞ ˲@D$~?smݶialfr'-P*~뺘3g\L"3RX8NW?~' ?W33p3=sAi R\Q!.a* '*es:Zp[+ 2PژDYnJ ho I8m貴Ufe HF|h Xu^й!~ye( tefʤSd>-^V  JC&7Ptq4@OxH3fF Bkh`"@eI;gܹQ5٫ÇoM"s;`Μ9u"Z`_?ؖia϶r`z"hf\ʬ_d'D$vuͮj[[̹[d2 !@Rā2|:Lfh K@X͌c0E.(쵼kHP[BSFMWl}_.Eٵri$PҔBҰMn{Ũe1淸z %+[]z!P ƑJc+WcHEpMl:~XćVD:`#p, -=z'kwM@![Z_QYy:(1#j%| " 2WɫVcxT+ #f%H f"Dp;θÙh˂0YK0 cCE~>Dq1ܚض}9s=zt{5k0BL&_eYxO}/R{r0`fD =F^m&ԗјGP 47\QQP@i[[+, hY` $,l)re_-X^Јޚ!p3"~&a5_–4 %@/ XHKT_IZ oyQ5l! !vRBgQJA]r4dFAusc>x G6X '(@Z$@ "JBpO!P(;/);; y:E# Qӊ +9", ``, 4$dFI%R&\3#+;2\aR{3# Jm^~exZ떖H)_{NB<²,^{W_m!@nle`@mpM[3 =H1w0( Rh@PD޹&w-JcO̠ՔA5hk)f@ KDxF4i a Mf"<& Ő} B^Ơ.1ng$$ 7Jiaj:TT&@i92F}}X@$22*h@P(ΐ HP E] *--f׀L zI @vX p*WP+k]wl&\k"0 cf1fS~G*BQQQ.{y޽{?b 8@{؀rSesA&@AX%&q}c"2%Dkԏ֠TFҖ^6 KONp*hm6?.@A54pvksC ̺nEzרA~Cn2y]Y˗矀n:{'{Gw0&W%u- 8lP̋7\ k f^ף|$9S;56m8ܲÇoj;蠃8`!s e^ *ɘʱ=s} U.K"l`?ve `OBKdF,h!pWEeό^Hi!3.쌂V'S aW#*@#[xV|3Ic>tj۶?8Xƛ6|hH`12cKAxJ1[nyX%FQN? MxycMTY 6P" >\ W8 ƙ#TmQ"Zo)L 9@$ z[2s+?X.hF❤DCk̀$paY #;BoR 3|X$!7´t?q ۤR)p[[vAD!ċR iݡ2k2?^#6"BPp^ (Җc 8NnNiU6Aj,fP ͍%`19f h ,_{9{+H@jhŽ㤥ݔDqcJ拙Ҵi&NLpilmm1 H.ˋ (^N@Ik !B=CzcЧϖ{PSӀ? '"HxK|T*P( Gxwyc㥨~\zl]bAE""bh5cK1MiQ{ (Uꭻ;iZ{_MQڸ73f C !{aι\UUcǎm&{~@ pUw{Ap%L.tq oܓs=c&ʣXZN! )Ƥx {lIi>ZW~KmY8'j&8g .9Ebc ~~+7_|3t؎> /AMIյ-iC1$PY,A 72gȗUc`0 qΩ1&$w+p-p1_QN[5ș-\jY%l3bL>&1ȱD)!SPh_NPD#?s3 U"dDhPeQq}{ݮ}s @Qps3FՎin)g*PJ%t!1pZ,jl@(ySNݭU1l^Ed` Ƙuťg%ObXL&c]Kqq(R!S|?|y=CDRaYp!2a8hANCTLEvH0,%޹rF>1 ڵoQ/2B'W ~ <]w{i#YD,L뮻K/@EE_|n{K.O?w\E!V\c,0cJ0&j}| 3UD\USN9SN90?^XIr-b`|e"iRSS&#jwy1ڿ&8Ƿv=k^院jYg0и|S@+ MM ;TLYO0F"@ΒlI " HZ]ukDvj36duA_hZ/"ݡ:Ә 9ŵt]2Wj9"=/ژ\]f:rQ>Y`K}OoKER@D"U}Iyg֩ /.= 72T#@o!ln N=0@/1cj>}ߗ]v Iιr'O ox_y:{?o;н{ׂ{j>p:Ə{M]vٚڅ̋l6b„gYѿlݶ2|-),^>}zѣGXϻ7ի w{JK+PͰێYl>E,_^ /<;|B.{r˭ HS:u&55x'pN)))KvEȂ_ cs~45e2߇|l1!o&o1%%E ;;vFU1{,\ÀӮ]MO[#"MXUo.W9O[g  Jb+ nPKg8Q0i; MV͍w/}jnyp=gwb7paREu,cbDPlkZ5;}SV J(_/lqe蜵[TXu";qϚb̘1$gH$)7n\k7Mťg'2rr6N.[Nl45e(-[oðah2޺7/NKO竞J?g-` \.G*՞fάI1gjjw>f60wzar%X-[FUyilf466sW_uםs+w%p]{ptЎ7ޘFǎ0vء/5yyRHn:2u ,YBiiG surmWa9ʡoL~`YUW /)Pƽ>/~q8pg0f޼񔖖EYR|\~|y9~2gXzISS=G}'NOykWO\{\{\xTV&[zy? LO%^_Uf989go/wG)..@y׍?CĘHpy.>b/&ZsNG޾.k:ehNZbYbi,K ?ZMh!֤ kI֤}H.i^mЌƠ e*MAX%pN eX: l,l}Ҳ< t'B:݉0@*Չ7x_~}ݙr?ŬYx[y'g>|.6͛W[$YgͬY8 0 pn)<0̆nV^)?_^{Yrꩇr뭏ӀX^y]&NW_}wީbW33IcCreGOdI7i;@y?irAPQ|p?n7OFq_W_}(8M.ԩ쳷q≇RWsMJ:~Yve;2L_=w S<Ōo_QG]8Ly . {ُ3jyy{쳅:m8o p b%""ꭗ6s~cQ. .UR.&LE% CHY,C=u۾/:4(4\ Qزg%_r# )C"#a4Qr}]Hߏz6G^ lw`% xC`~l);nU,[ ̓sn]T9czZ/.=k=[o媫e]ψgGYY g}47?egqXHIIo;,Z1{|V/˼0j8}@矟/jH?0O3g8 ; .tƇ@1 .`ʔ[~x޽"j~HӼCw'a, f&4IyZGj=;=:Tpɿc펦GE^۹ ቴ!pS6(Yz,aө_l;ug//PsEgU_~ix=#$GM9R_4R1N ⽅p1[ZF3tYEx..2"!Wt,e7A> Vn;JV~'N X |tv's 2&n-sJ* ={>3,6`}T;cxx睏.'/A5GIIW86ӳgW hĘˢoߙ_~{o1QDU|py~kcذ}3ݮ\fKՑJ Rğ7&LļyHB>hmiucH\Gu>Yf2m,^y}9)p)8jFMx~0L&C:[ ?`CjpQu|:mDС_,"#<˟|?/;x$IP9s>gZg[lDee9dr_! Rbh"oFmU\vٹuY~\zTT&hhp9;ld СH,ʖcʝw>K/rYoWZ/+H&0ЫW70(ơG׮hix2F嬿 Iι"k7{Y/L0 kmhhTD$?ziKà1PVD7ym!k zuY*aURs5/aB[Q>ɂY@` t銉"y 4 QL2ƒ "/\^\zDl6oןi]eIDATg?;h`Ѣ~gsm&YBQQ/L  o]4۰[S/Э[g99@!2naޝ'\T+ @m3t`E9Kxq<?(]JXR{y,X̋/=))I"4R3fd zu#\.GYYOܗzw0dȾ\~rsIB -rTIʸ41`tځg7lЃO>?+hܶ(tcf _4Jh2f]v9v뮻%Wz_ƺT*eڇ#E'" 0DC a̘1\{9Ga ۺEu; ??"471a9-cfg`^*S@W6DJв_+QBc} >w>ܹu@;$OnPA`bgb,s磏>eȐ=lQm\)+,\H˖}7 ҥ#Z 7\0܀O<YQD;-k΀C?ɖ[nL.WB:m .I) [?|zzΨ*鴠ZOEEE%[n1>^x<ŕ@ԩoswp#n= G Ϟ(ÎwPzj"MMtܞ!CvFpr"8sh.t*+p9ǒ˽ʠA??v**a(\d2Y.d|Bg˗/]vXk?ƭ)Qjixg#9VD}휵׊ʩ;7ftD$/6*T!ʰ q,Yt[J5%bEPhXڒ3k5t.t{ Kj{,{zD2*_nC ~N;'ow^V\{H1yTU̙s?]ck"˭Y~@zX|;1ڵz/)++Wnq |0k-Aǎ[yO8444qMбc;>˙e8ݸꪻ83Ϗ`WX#CMW_ܹQȠ]vى.;._ȡfmȬYx9s.l@_r>UUgпqq ˖qMн{ ,y VhjG8H:Դb4zTH̳ryIwO|K8q"]4HVQ\'O0Q CrKlixu:@`>j"-) ¹z 9 +#Qs!ƴO?fs9?]F-)$Ya܎ĽչSӢZ/ jcW!:¹ze?Yy{orn,@VeƔ R͟|ӊWRe;A|DSQ\|q/^s}%ףCQQa#Nk-s!`GuNo=ϰapQRRB:fG jkk7n?͐!Cf TW-3$ɬ|4 ԈsN(t:=AU7ffԨQcaIǩN+;ӏbp[.=k9s8װZa'"X['1TXwLL`m18^g60ܹsY=<:z¯~u"X\]!"f}n$0@\3")i[}4iG-E8뗅i~ hڛùǓ)m%J|hYa-^H358_%8ovi٠.տinn}466MMMVDn$/.>ꜳ555ލ̳T}Wq]ȏj&QU ATu*"/.'*[gAaąwW$IKXuV2nva/bԨЭ[O5}&[A?-q "A_ە?iOrnV'Ғ'E[;aujJI7OJ1xn3f Z[[snzmm5s[Ŕ̪Yl(4JaZ"rziHӭT;3djjj#Æ ۷oo(|ICUk#fyxꟌ?rƍJTpa<{d}o&9x&yk׮NnoxƎKQQjs~ι c8p`k7NĖK2 uHRΙ3馦2^=p Yu&^o]%΄NӮTZm+x"8Gx)//Ib+"2X0.n;2~xjjjqOSw}s")x<cLhjjҲXN6 "X~4l6?hYhrR>5&D$"`( `kkjjjfz#Ç'NB0@U#kL*auקx<65T mdֶ^ p?pSO\駟nѧv|/$qa8kPUO.K." Do^\a~Tⲥg̙Ux<M;m3.QTTdT9U555Y@2}𛸬&ob!Jw~ЩK.}駔0jԨ>=.$'Nf%q t:c|5ɏJ\԰K6z˥x/\{Uvc~Dj5EEEFUU>l6Kn݈%K$U5hܑ9'b\wZk/ϟ[ii)བྷ<ߎl6K .yu8c,4=1DQq9YCeP,mNMx< 'ONDT_-Vdl$37"86眉ȉ'cqQi:k 8 JĦsΰoi|<@1AT U_%vCVU;rPDn[[(.LR65s^x 80JtOx<ϗc?NN?w?o m}M,C.Jr~vJvEUKٯ6ϺK @\Ŕ?s0 1E*L/D>4UUg;AЙg ťxCl+ťxx<$ɔsIDqo6v-/^37l6N "EQTwmEs.b=mv]Z i1AMMMPZZ*aJqqqJsN1d`婥K,]Ut:R)c)@>%k Q%tQ ^\z<㡅{k7(24܍ 9SD^x˕1G"r3^*Eis p1Ƙ[| G[NEc1Zk5Ji6J)+CRSՊ5XAIRI=Dshn-YXMbdk/.=IK] -ڃwa@Df@`\Kޥ1\.G: S%˓L^iI¶J֦|iU;9snF.$s\/UU眉eιU1zKEZRR",Ú,Z/۳vb">x<6رca(_jjjE%%%hѢKJJn4ƄcHRlc^7傸 PXk%`sa""ұc=ǍҥKhlllqĢy xPUۉ%$UW"y諪Wț/1:nll\н{3rSD~%"gz"RO`ċKx8|p.a(Aʂ0 5(NKEAE6 sΈ8qDQ"LF0 ]6ͥiŋ˞{ZDU-^">=%o۵kCU_2Ƽ+"g@`7y5^5" &yFUu :Y7n555= ^\z<YdI2 B4Nd@2 zj4g}Q&L!C\ER){fVU Ð1cjd21\.GE2k,z쩀Ο?N:ڧʨjd- `kAd1Yn?vr:kmxkƘs99nԓ/#ιvAř[=߀nq&NuUѣG43+-:FU);ڧÊH}RΛ7Ջ(K6pC矣QlQTըSaE#I&)g-`ر-k&VKx1 WDQTp]U$~Vص줯h790 2__1&lԩĮExg#G+353(A6:~'"&/dlC :\ &S2{!I#~D\RmTur"l $^]π*'L]D:1Rs8Ծ K-#$ NRyAZ]u2}/$r%W\ϺGR2MwcG+**hll6K1W5|Ӿ}+UntѦ&.DdNkdz6ťx<UUUh"*++644&"'jvƘe 6L Hmմ0 Ð aHQQ!/vQSEd9[\\L&ADӧD@t֍vsqTT*U(~2lذ4Y.c\)*Ȧ"sæb@U/g[觪i"rK&9})^MMMa:~8/A`P9u>x<*4K,2PUgȟ#1KU yW˗ӡC "EGډ6L&øq|ܣsL&CEAˎwYYlֲB  1#|.qߝ?>aIA "DQdsƘS;UU?t>VVV2t5ԳHpa]JmT^D&kgUUDTL&Hs/"۪{"B|mtU 2q[5x<0 qQ^^X\`zڷoDa qD9DDX!,Ut:-3gd̙$cL,0U]@.X0 NR)Yd5ХK1d2c!Zd [@!s7˗/wLLȳ橭MggI JRWWXkm<8a1'g1AIIinnvtZkyU}>04ƘU}UM,xFx<g@sVU;Ap/8pvXxq80@Uw3HDsU.t멨9sN'L'p BUСË,Z\.Um a\'sc:w?sa8SU}l2("d2sn\oぽҩSEzAڙRQQ!ymYhuXd 4v@J3B-U5:(r/G׺,lpgMEjD g%x^\z<dzSUU^tu?˖-#+jGcv"`GsnU]\]]-FҲ2guz\CzdɒߊȥZ."[s_cl,63\;P__\1 /.pM7ưhѢCD`\!0xuB)-GMMā@97-ōVSuuŪQUUMxq$L[7<D*BUe{cߌ1cf"A|H^hO^D^.ax>J"oUrzPbW÷mnBUVƘ`sXX^,"n%"o'jc3`1fn)"O9ƘxDDq/"8oq!Zn|!"|R6^ԴX1_moz;S[[qp@Tkw7`~Ed!.&4oȐ\.c6uZ!v*1fxI~{ǟ/Ϟ=KlyФbI x1^\ DD""}|]ÍZ$I/M~%%%mcǮnr:Hii~nҤI[::e ϺE "K x;J/]i|kqQU?xqL Mb!I}}}e}ī>l7 R)1*q[TTK߿yD6NCDNx3(:Axt吷(E`֖e?32 C+ `g(d٬IӛTE#/2s-_zMƎKMMM|g _>$nU5Ӗ_ċ5K""zc&FQ~t"[/_[m;@Uw7?H0DU7>lnn1yz<@Uߍ;`VU|92Ç{j>δ^JFQ? `xo"snC@avIK)p?{? âd"2HzF/ڵ@w۷ 6 Y˞ ,]%uѭx~@ h).;'ZUUUxu:p%}j xq0+--XӣGT%Dmk^" B[ A.|QFDdLHR$gҁ :n>N:CfwND6F Us!x֖677 Xhر)9Ūcl#GHl*^ι~aťJ8,]r%_KAU/:>̨QZs.[b>s~%ob:VVVEC%"r.p,sΝoY V;f̘U=<稪AP|4vnWËKxK%PD\Z" uRU;ijΛ70 E{ťx<I:PU1$9>֪J91F%$aƵ.inn\!u*  EEEEZ(ҤMT( m ðLQN:ᜣN1*]vՅ &yx@D 2'j[$uXLȥMQiO{qfN#isо}{ѣa"/UA9s䡇A@gޔ 2y5N1&)))av#ˑJ"Jp9 ÐbwT*jIIIӁYJLqqijj 4J1&pΑJX|444:uҢ" P.\*"b&,z9'"e!QD$ >-1dȐw^xqx<:Bi={Ο?_(Q8Ug 7dɒ%}AଵT,0XkƂuoP;wv"Rf,H1DQd٢"IRul+)))ŋ>tZbs~a&6nzWܴce~Y3${< o <@Rr "c  k/.=YGffd2;N ga_4UZU1&(**:7($" oHssqM9:rFD\&p "23s.qΩkw|NgOfGw_Tk~~dTDw^z)"{ngcÅ1[Ufų"7jl9]W׬Y s8礢Yfz~u qYOTUƍMlSKx E;\EUVD.fiTu`OULDnSէs\q*t60Ɯcu"r ܩDrp3|7TD6XY" P {6MMM֦&zmz-Lr-+ sasbVVUUk/.=Y;v,555"A]EdcL EQ# Xk ÐT*sncX\lk/>ݪz\Yt阊@zG gFDw}DU}^/"ϓO>y7 !A ( N T(mh]@DLn\VZZvqJȗs530tg/ŋKxy7 ZKi yOv\-"n*Jf p >0x޼yG?8+i׮:vcG XZG:%^1cE^U=]Uǩj<ig_E7Nx7("`IXK{~]1bq=1jll܉UjzU0Z[H5kfd2adZ""'|.kk&T&BKⲵx!"s?Yko*yQR{ )S<"bbxa ީ s9L֦v[;`fm-H$%skIRӋARR*).+BmmYd)Ƙse]֪AlH>{*ӳxy+(tΖoy]Ug~۔ȗ)p`8LGQdE$(6ڴRY͋ݻw?f…ۈEEE777w6Fm OUUDU}59'||  t\.GA_^T(lCq@)Z "y0tef*ʕRv/.=Y˹[U=^D~lS:uιJ`_Z/_Xk)ƘWSLeBrA9s8 >'g}ȧz"sUaGn6r9ǀwy*W ι̙m'"SρEd1OA.YB>XL'@eeZt$:k7Nӵ>dȐ.ϭ477o_xXxrJ$&󄵶VDYkt1`tyѶ8p&IPC^dcA8^Ruz1f+`Ctok׎ݻCޘ0RU]T5-"a8cAP&>SԔ\..]LR1~Al]D YaFι9Q][/0qYC__cƌ&QXT\.f0^\z7$sc x<c477R;ϊH.JEQ6,[̔x_UURrsl((..6Zo/ڵkgTU'|,Z\.GϞ=MEg $˽{(0 o DhcdY-..kmDι_a.#gy[ PZ}x-cx44K³ "ߔ VFjb*^[ u^zo9˖-cҤIۗnݺ}&3i߷x< "xx!qw5jWY-_gVX0 Q;ADƊȯT[c&M9C~ ,N _p'=V"&t:M_^c ^z)[o5wy'T 6؀_~Ç3x`fΜYh=ѣWxڴi}L6mdz2t$aa0 p'N$a!EU5ιsca DT*Ŵiʀ͉_ F RTT)¥^Z%"̞=:.C9mkmA,.YFvJ**΄lWU.H]51&^uU嗿%W^y%tОy?gEDH_i!MJVKdzZ lZ8SDR*k'Ƙ yյv=E4R`n\JxqI_E+Mr)|lAĉ9OJ.:묕*<䓜wy<+զ&w_ 9s袋4h^{-qBBs(///Z?-I[}]]qR[[Kuu59(rEEEpvqqq7c#ι*k 1UTTv=7Gd$3W+HӀ$+;zɾ ^ЁI=^\akdRoܹt9sPUUlȑ#)**Og}6]v娣Z,@&a=`s=\{h4< *oVZ'|G8N V x<F*f&;40Z|WSS;Z*oԢhHE_).WZcË6L6:?~<_|1m;w988c=0`uQ_&vСC9r$W]uU!'gϞ 2KIuϟ@~/:I\jU[% x<ϏѣGS]]0 8 1ncLdz0tP鴨ZkO 5jƨQQ_WjTTT`AdY#o$n7pwuZ,Y"vm7n6qѳgO~1m4^2 ˙7oWf[M\TOKUUK.塇/FDK1igiiWTz<ϺͨQ99w*9&5R)aÆ|r&Nx<_1(֡MMMM555B䣚7S}j HD,@ss3tH:0 ijj8Z{+ 7 X9}h߾=90WMM ݻwgȑTUU裏EwzqڣG\Ѵip #eD<gݦ Dc `0FDRcʌ g/0=ȶl PU@&˹  @JJJ\EL&c0tAJL*rIގqVf{| l&.~>~̞= 7O<ȑ#裏xxWZNO? Gz:|,f'!";nry<gcĉlT:DUU>(^x<5 p9km\sR$ƅ\.7fCL&p\Nk`f2-ιsC9W\\}{;DaZfMr 0N:믿_~P䭷ޢ?ɓeMVuSmyĈ8+y饗 VU-T-ܒÇs72et!2J`>`N;C6A[8=geĉMcAhkEVU0 )..YKE"&"&" @ x0$"J4J/"ȵ"2JD^M}Ύn$b.~IiwtޝSN9wNSS444O_v1yd>\Wro9\r%tؑC=XKlUW]E߿?W]u*?0+g뮣a2{l^z%{9}Y{9ƌÇ~|xƌCEIɪ&1\.&W){pέ4x<w4:֏hkzk"rssEQt8YDRM9vQor܆}  l."g{O=e$-Inݺq7r 7pgsGУGN#<@(++[jNW*+ʊr!%S_j1[n%SLaРA\|ߟ}rGҡC^x;uƤI{94hUUUxz+xcDUeM7_weҥNt vۍfڵkǛoIee%n-n!)O… 8cWjWWUѣFbƌ̞=={һwoQlȑZK2dӦMc[=dzv0f Bc,r/06 7'$sLf{/y"Œ38sQUÀ }d[?0gq 6B[T`\-EY1n-rEٌڰ-i)[ƒ&a<_ )M~),U{0 9W*"a85ÁTu7y8kmbweݬVNVWBdl6 0t|6e\;4i˗/ZKǎEDNvynGBRDzq*\.0\PDn_[FQt 0ذaayyڬl2X/ҵksGyN8s9'PKfYsKZ\$ZʭUDС\|ڞ8-؅@"^[&l0V2oyuP[e㫎gUٲk8Z:YD &!ٓ"rJ.n_.d20o3(X@6&{& /.=?ͻo`5l7O7dܸqvI& C뜳9vqnfɅTʽ{ׯi2tPȏE BU}@U?6$"۩"2QUj$ORW-ԝsscOxb:v1ZJ,X?+CMZz7vi,]X4i$N?tj 54QVVƈ#_OYYYaݷ~I&1bjkk dᥗ^⢋.O?EUhm/8ùK>}:cmn`M6)#G?e˖\s5k׿ߧO>tM7<@_ts7H:裏'NcO>aĈz衜{W*g&R"bDd^\U%" 1Ǯc w׫`tI SծEp닋C ͛Y^\zɨwi{t%qg8l t2eJk73S3---5"JKKy׼S`EQa^"r pU˜sזL&\huuu-YԴ'xC9sWop 70tP&OLee2&"^zq뭷rҷo߂Xzx۷/"_~s1q<}DQD9Jx~'MĞ{6l_g}u]ǁȔ)SxilldҤIql&}WWE6DU]*(+"?[EэcƌcfZkwXW5ZLgqI?)T*u;/Z-ҳFXGϚ ꛹\ΨO!G"3(tR ~SMM }g3 ".]<@YY<<m|{tpG[rg3}t6tSnv-Zĉfmr-$~_ݢ 'h/.=Ϗ:9@qs{A***\QQYr aSSӠ ^'4q^zVeԨQ 2T*(̒%K pGDnc^U ~s1\΅azFD^aHss3MMM455l2GGܹs=z4A0k,Ν[Xnu477ӹsgȑ#+(**b„ ,]#F׶Zk9>W^7Eq '[o+_|q{l6ADYY \|,r v3g䭷ޢp-bM7eԩу-آ o?_},Y;@;w.ϧ]vlV<ׯΤdۺLMMMˏRDss1rgUUu= 2 QD^P43EHˍ 2Ćaё}ťRVV1ƈsnϲl޽uo8tΜ9ܹsK1w9ɓ'w[0=_"q<#)2T{6lpU=18猪0 BuDytԉ0 O 9S[?CN>dy***(-- Iɾ~ӟO2ay衇ӧz\466SYY}\w[lA޽yW19g}6?/"< /EEE?m~U\`҉G444\0,_qsV7nW~w""@U׋g'K\EEO=TkBex<IqιN%%%pRRƿ+u5]wgill0\g3]xFMqq1gFUVU5;u@VDGr9d26 CA "PSSS$B0>&믧O?U% C/_SO=ũJUUUa/JA?N>dFw܁s:gbeZhw}^{-~ [xꩧ8p G}tA=RնmU4seԩr):iҤw 7d455Ys+X v[-[ƆnfmƖ[nɦnJҥj'wYRRl  t6GK qCUKJJ8v|-Ə_(WaL&c@DT|UKU}D0 _LӃ9wuٸnҾ}{*AZSSceذatA_k+"z 7+ 7PX_,^"~핶LW?>|8z9qRc;o=K.aذaѣݺu^cƌaȸq V$N49UܲVf"D_z%rO(dY.r,s /,9vmǀo~{Gd2N=Tr˂GCCιR`; Ƿv4^\z<3g/^ɓ' 咸L@w盐׭sN1x7pM`w{O/~19G6Nc (2?-UJUZkN=TT2>#<\.ZK.K.!ɰ۳&[o#8#8phY崩Ν;s2azMyGygӧg}6W_}5#FSOka 7dРAlvaU;޽{۳[ӽ{wnvR{<_MR39V:7q=O2ya.1PZZTU'E~UF=~Sߑ|[d?CK.l2+aTUR "rp>/"/Yko֎1e'-[F}}}Imm-B~e""wuWpwvС+%! x'2`BUwqo:uOg=w! Co?tO?/FmD>}xg 89;01A`{ 43i[os=[o]cema]w-̫^[ntAL2{7믿>[m#FCtؑg}޽{P\\\8/{w!~;>`MFii)?ڵcĉlr-\~L2}ҳgOz|\覛n/̤I9s&nlzYgquϻUB'EEdCSTNDLE^2^\z~S;]JbŌ61 D9"s9P]]$|2眊H{ce@9wv1T*eTUs]vzAx͍7ޘ7x߫*뭷={,wѷo_ XҥK2 mYst:ͤI BsxˤڵR|kEE{JuVɾZ;WYYRmMEUy 厒ٳ'ÇÔ)S8s9S %D&MĜ9sV*ޛ{7tS6tw_W;v,555M203f\.;UH"NYx1{\.ws:}ttRy(! nݺQZZQy8zes""4[k!"CD`7JD.|d^DQEEE9'@*fl8 +"/1Ouuu ЖKEA\d.>ŦsX D`FQD:cqzD(%("NS[[?&L9 *!l弖^uުmM߲ƘyIKH9c {f1P^Gew'? @<&mhy=+3hРBxdex0M5jzٲe ;w$!!,0 q\Av% ~[!|>i^wZ ht 4tB.AHdBHdB<&O>_4kǗ.IpZϳ M|Tz:ȄXAKFsi޼9+V0iFgoVV-z]zl&&&rAKvv6z_sBٳ'-%$DTNN6mZUoR!ht.B+醶 1dթS'_E)u׎Y^b UϠ,8`2vS֔>JќzENx=%{!//)z󑕕aB@kԨA#%OlߤI:uDʕYvv !*$%%IbՀ @M!De*GkJ#ap]L4mr;rAdfֿKd ͉O>v;WҙbZ\j4R$%%М~n=ߏinaeY%srrRjܸ1}!??BTjׯ2(4hx*TvB6& JB[D`e˖dB)?c7n\bה3Nń%O^رcUT!66VN" Z=Z|9e )((VBMMMTh* i,;@RPCH0\yyyEGG? < 9d$9ҢhYڵkO_p-]B gI[,jϋ|?~<3g(RHm˞_.IbS\eYܹӉKɛCff&5k,HIώ92z kː h4'AyKfvZ -"** @[0!v-8=h{< 6DAJ`0GLL ir~ᇆ@-!DKuOtA !hPaB@O)u0fgg *_ͽzbbbd8N Bv@Q) b%뫯r3"+VW2RPe9"畴-y-O>\.,reަS<1RhVҫk:VҒ]HQh&>(999葄[+jy'Rj̧W^̝;yڣ{qD"^/=0ez0((((޽%u%&&ƈ.М!hѢ۷yu5Q4M0Tff TX.j[bVr[$v]+W.PJm6,Ylt5PR%233 J)tPg{?lUJ-B| ؿ欬,YF ,"..ΨY B[Z-[䙦?L*UhѢ@4j׮޽{ٴi:u9ۮXzѢE Ǫ~mXd J)ZjE 4 !زe u֥I&x<aŲeʢAs9eY߿^x˲رcuvS^=nʎ;ڵ#pW^۩^:-[$:: u1~x:KBB0HMM_~F4o\?+^~lJ?_Qh ʕ+D[=|jA#.Ռ x>n7YYYԯ_'rE-ȑ#yG1 ;v'%%`0H~~>{fʔ)TZ5kzoߞM6qA9ϟԼݾ};Æ 㧟~4Mҥ ӧOJ*!6m~;@DRSSi۶-1 4`,^5jpUW1}tky{-ZvZRSSkXx1*U"##VZТE /^СC |W޽3g{nnvK8pguow'O'3dڵEd>R*V {hZ\j4BLL WZ5xW֭[i2(ǙOGt/^ԩSyٲe Y85e2v՛3g ;n=Wx˲(#&&FabR,|E@KBul!Yl [2޽{WNN]65kteiIHHPRJ,˲|>@//c^|ܹ_q T@@i !Er[D0`}wtM 8_~5krJˇ~ȻKcС|w|nݚd 3<øqX|9;tޝ~#F'pJǎzТE ḳ>ʄ HOOn[nL:(,YBy7yyi׮7x#O>$a0~xOJJ _~%m۶ছnb$''s%0{lѣ޽;W^y%ƍc4i҄.|8]v3f`ܸq{X;edI+&RJE ^k;R2&&q]S,8x 2//O\.G̝$ s-"ޓ|R2**ʒR[&c™3gd~:tUV >ÇtRȶmƍ}L:իWӪU+JҜ;wtׯCeĈβg},ҨT~A1j(}Y&L@~~>yyyԮ]Xtܙݻw;q۷oBv?W_3@)E gaq 4֭[aٳ5jиqcn7&MbÆ ҡCn&~a6nyǚ5k0M_~ƍ됁c$"VK @eJ){?' QSxO(˲z<3F ]&@sz UV[tޝI&qw:L3UDi*j.]tqqqvOvsS@P)yJJ5B ;4MTI˲PJ)˲r\H)III9׷x?:9M4CE4k,tRa7 pӾ}{ .w}-[ҪU"Gq.\Hf۷/@? @xS>999@Hh,^999ԩSaÆ1aRSS{ڵ+UT!rJ7o@nn./o3Έ,cC 7o/"\.֭[ǀPFN:9q)))t҅VZ ke͂ hܸ1~_Z#f͊,C"<=<DM}G|toJ)Pb  ԩݱcGh:u$ TP sթS'5jٹs'"d64[ !ѻwoeNv&L)%?RJ~ R \WXXxaI@m`!1Rj"ݶ4员hV`0,˒iZ!$ /¹ - @˖-e׮]|ζ,];m[EXv-#Fp1O?y, ;܇v/ kF߾}y۷/߶)ٸq##119ر@oO͚5Khܸ1;?]vѴiS tϭ]H}f͚@֭3Rg=6zYZxN<Ģ~*deei]VJ)njizislIC֭ٲe 重kJ2P~2e;UvLRJ&%%bI! =`DEE)˲B5Ǯ N/!Ğ_ `޽SbEC)%‰}d H1 CJ [q\ϘI&MXt)gi9` `0!774_M6EXٴi~1c0dȐrsWb .BFʕ+RpB˗8qnpw2p@OXa?ƍƍ"1#5[jժX;tl96ƒ@)U/<{]xː!b iK,˚^唋 q( LQUJiOFJY̕R dJ)S1謔J 'yȮPP !]III _)5~o 0/Ai{y4h~;wfԨQq9Ic `0nG{W^?d>3F1Xf #FrNZjqgl2FE~B~JvxgHLLdɒ% 8,iӆj*j֬Y:۴iSVʸq83ٱcs!::˲:u*>sƍnUV1={r 70l0nʣ>JΝ{311+O|Q$NeH&Z\\,z0&LPqnM4jԨvRQJTNNR^Poݺ{N^mn˲n$dnk4M)b !0 +)!!.,|TaopeY+xE"9 TWJxg})etmۖ3j(/^L||<{fю@*,,䪫: jx;G}DBB.^z)vmNLa8p͛Ӯ];-oBeH(Z\dR\%vR^jI-\?mR0 WBBB{A!FQ^r+0mBO~| -,5MB),gf9!DnBB4@ rBtB !R 4)^wa9ᬞ0 ~.TW92a8.S$-6KKezfi;{9r?vȳ,5r?m [FeY|>|>_V /pʐ(baє e/p0t2l8`Oxv }Yʲӧh4eJ?<֮]ZliWeY2..GNNNIIDAT{n#x^)8HhPqxߞH lb[Pm#mw'RSI.*{%1H^zE^.Zϛ7@N~8wb߾}nݺh_vv7PJ+uB`p/ڕJL~"m˱ uC=ZT!)chqY(NkǎEwutJNNbKhN]tёa?Ww5kСCi9ީQR`0( ø*l=OPJYB}4MG!r׮]ԪULbetM"h4iG߾}ׯ>(C3vI/AϷۥ->a󑐐~`R"?ZBeaT [h4GEAZbf@RϏ2\j4FSѣ^) 4m'D#lP|JF X \ORjI!3P˲|BDB[ < ̬"HUJ BLB,Rvr+F9v%Ti1f0t22c/u"3j42}aС 6~Brp&Bۅ?*~UJ\J%Bԝ-ZtYrexʕ !:Jyyy U&rrr Y)%])aݫzc=%Bлwo B"up ťF)Dfu4SViӦG)%-˒RJo\\\'!čBٻ7o)6 PBdӅwAll,͛77P@@UZՈrssB#T8[~` 85`xofĉE=X`#F_IJ,Lӌp֍]raEv}[=i7ҥ7r_/:nok֔/Ba [e!Iq=@ȅl 0ɲin@dggowRRh v$cf|ۨٳgh4e$RJU O)J׸,{ކF)gf߾}TR)m?3?CumJ*)4ල?ۗ}Dγ]zpIiNmuF\\{ASYYY'&&aRf*&K1RDEEY}q=r-D~ڃQu[e޽III)fj"AFܓɠAfԩ@Ȳg[z-իGƍ;Ny+h۶-C !%%!'|‹/Hff&=z`|zBٓ_~{:233#F~z&NHNطopSM٥o߾ۗexz}lZ!+9*Z0”Rnx<=1gQє5.U @#).{M ˲SPP@Fxwh{1d @gԨQՋ;wryl2kX,.\s=ǵ^L||<_|zbݺueYۗoMyر#?# ǏOnX~=իW{nW^a͚5dggӫW/%..z .[!qo0zh|Is՜:A2.B!ZO_CeQtRSJ*4edƌ̛7O[!5MD!$.FnEKFSnFVZ͛q駟5\CNN3d k֭/`8iР}ӧOgܹ;VҴ4vOMF9"RJX;.͑Rє{ //͛SV-{=gٻOFwĥa|W|6mľ}hР[nug ۝}n Fڵk}v{1ٴiyyyԯ_;wm6O!!!aÆHϙ3듔Daa!4jԈA1}tkiӦ~]j ӷo_ʢaÆX%R|>~zz`~~~ 5BBtR֮]{Az&Ҵ,˱R233O?/B[)5)A,aT#[~E׸,>B֜TlKu]|O<={HIIaҤI@L.uqF kOիWd[8$z@ Evvv}hJ>}8ea‰xB٤I.];;4ugJsT[ofn̙3p9P;n4MƍǷ~Kll,YYYTX;p*U"p81gy&իWgʔ)r-oꫯCϛ7>q۷/Gu =>_5?<Æ I&ԭ[|ΓO>qz\}J*,Zh pVbILL$>>^ci0 ~ QJnݺyB@2[_ !椤,;vۖe]vq<38 sN(,,/(Kh4'pB *-,&Z\jwVCRVf0 d rݦi] %mBAC 5}>#<qqq̜98 p\K_{5=RJ,\)S믿rM7qSJTO?Dll,^{--[t!?3{||߿^zk,ׯρ\{t҅?˲h$nb&\s _~%'+F$&&ӪU+gTt֭111v۽UA!R , *$*ΰ,p-aRRgϞ7ocɜ@nn;33^z,Y"7oެ44鄰,K)L4ʲD(Z\j*EiQ@kTA`ai.0MSJ) )eYVE9.0"OYC)^)UU),R7C=d+|lSlAӵkuys)SF*2OJɠA+!YgU並irWsI)nsM6\_v{{M޽>YVczoV[H-eVVjҤ ~߮R6 Tg|:dXF)JvVJ\h1|p pEAќZDOe ,z@a`~HWJ} !XuiolJn B#5)Bi$BCH3MRj~h % ږ#ͳFv 0 'm g-r?ֲۗ,,b ^x *fϞoͳ>KttT$G;DiGx<pݤvڱsN#\Dk׎|LOO>S#m۶MTTaMLLlPXXXSFJy>@ќ2`!X-i:t`ʕRJAr޽{U8~ilM%~*7VJE [y.R w_ΖR6Msmff+>>C!x˲~# x< Ø*Zi)s@6СZj;UaC}ΚSMIIEwN߱KiκGaÆ;ѣy矹Wؒ8x Yb `֭Wcm,nLL0MS֮]|U4ŋѣ+jzOՌ3JlC~̚53ƎۨaÆ޽᧟~z_tttGT5!Dm.#_MJout~*\.A||X\'PJx2e"Mvs=Sn`HД},v}vBfLfMe-.5˲,45s/aXRJ ^)%Br-.5˲[[hU+` I\"ٸn|>>q)4M,Kq֯O?-{-\.G߯_X!DR*HB $f͚JIIIɸ\QQQ"DAB\w?*ևZ7\rϓO>ɴiӈB ®;xPJI/KjyϠ9uPUV]U:=:8j(|hnpWݦi*((D ^z!;S;e͚Z\jW;L-0 .O)9Ơ,˒.˰,KA%y/4A)%iJgeYeggW 2#x*}\E.˵Kq Ba2I=gnze-.5Eؽ[nR !xaw:B999 !n7 Kӄj]TÝJ@' H,((F;RRR5k۷o'++ !5k֤ytޝN:v?2rH~?@ʕ+Sn]ڶmKݩ[nD4ażyOIKKC)Ź˵^Ki\o/__ZlYD*x#LӤQFԩS /z \.>3^z%x Zhq(>Y"'))HY:feeyfj׮MժUH)߰,+ءC-D^^j߾=J)x V\iDT9kS\r%Ҿ\/ȑ#%RSfPJYe Li493"*T>na'_Æ < <yn~ ZeYXS~i }rY);ٚS] R\7RCBqBX)0JB{^GJ>\@Bv˲fW|ʚlVi_M'%%dzm6|>yyyY^{nݺ1x`8o<ϟϮ]ؿ?g4mڔ>!@0Xp!M4kaʕmڴitؑGy) 799_|E1{l"YYY曬ZtvҥKy7ҥ ';;n-bTPTH (((p;vU'//Olݺ<묳x뭷zHJJIII~@)%].+ ZVnnաC\G KMD곙}۷yf̦M jԨQuV`s#uVjԨQiӦyfo^M/mGqjXEAA ˪(GHh&r9f.ul޽ǦbNJ)#cՄRJa)DAA6s9p9,vQ& !&6 tŊԫW#;7chn:z=C~HHH`ժU$%%ѰaC;7n쬟M>}=z4mڴaРAEv;`ذaaǀݻw˓O>ɨQn,bԨQ= &0bRԫW5j8*MgCI;^Զ-v5 CժUZr%W_}l۶mh; 81c0fq"1i%:۝Ɯr! èXRʬ;A?0Z\j^zنRJJt!G(Y2_͚5RJkR*k>NNyUbE#,8 tկ_?~YI]RlFcҥK:t(iRXXiXe曄G ,[͛Yaa!-Z`ذaTX֬Y@vso9ٰaCX'[\ڿ9h4FSJ p{еO 37))))a=bƵxml1xb4h@vv6NiӦ1fի_-[hҤ pH~~>'OfΜ9ӇX"\] my<{V#Fǟ~NPUVex0`clӦӖҦr0Fv{¾} Rє{lw믿ީxxk9o`ܸq|GiZZofܹDJ<]:verr2se֬YN:ӳgOL$77LڷoAJIff&7o~̞=],۷(rdFh4G#tW=CSSFRє[l?LBBr͜;w.cǎeƌ 8`0ر#TJkذa߿n7jΣuN)%ժU;b e{JKiu,?nݻw~V^ 9cd;]#ƌC׮]C1bBy T͎ h޼9ذaEDQIbd?h4T (UӜp &5'h5۶mB N۲h&ꪫȠRJ_8wPbdq^{5y饗3fcl޼YV)ao믿瞣I&X^[|vؑ * M8p ^O\SSSIHHs|FєCzEDtWAxz+hNZ\j4rz}qmQR%PT[M8lbcc4h}9l3f ?0 4lؐuEK"fǎTT鰤?kfӦMN&oOD}2|p FsBHBq• !0MSAڵkWh4'$ @oa'$EhJ$'')hѯ_?R0 7x!P`FA^^111$+e -.5'bp ))ID~/9]4ӘvyR!}r J$ѷPKSD-di0999x{ה"i^^B`Btť愐'::`08hnBnS [3Rʛ1{lhN#"5d "3*R9MIJJ2 Je «] ׀這lA5'~Br݇9:nOA"2 RRJ) mۂwv44`0;#C .U-.5Jx-V|>VE+!W@)v(c)CdeeRnp;}lq6[BMԜ0<U!\ l/*X *,,T:FS>ޗ377]&0&J)S)%[lINhJB5qWݯ"lOj Wd…kArUJK+66)-.5'pGe\c]JEvnFDZo}M-zqiFEE !mZ[nŲ,:w\h4ebqk:}RZ]pX{f@ PMהƗ_~I0B$T"$DVݻwGBB^nOx<A7gfPP2KfiF~ O\c]^N6:uv5M4uV ~NXaݻŋZ#X KFzP5"{jժ(RJ-\ki^x|>zem,];;*̛7XrrrDZ@߿qɒ%F\\]<"εe˖Yg?^&Nh \D˻Ev[zǂp\bל BM}Wjy普?`s hGO%^z̝;ܶm[i7Ws |4j޽{ݻ !'?Z?ݰa_HD&ң'-.K}Z dgg3`v'|V)JNHHիYn J' #W^ _B\?iҤ:uGi;B 7taYU+jsϕ$&&oB|w^=\m϶lT-!E9UJY^"1 UPqժUBp{xb-zBawNJ)BB, BJi +xwO>]oު]6e߸DRsVa)󑝝MyKJ4b޽,X\a(6!LTyT!S| 8ӲtxphHƌ3jU̺uJMaacǎ=ϮpK:eϧ"99a˲kVX%,*@ P.-ZJ) `۶m̘1իWw4 c"B_&R& ҲRJ7E)Cr6m-߸q~?09QQQ] ! !%4#۴0d)e*UXC -[TH){Bs|DU(!u4u2SrkZ۞?\.^/999cYYY̛79sTϞ=r<_*hqy-dȑz]]@;c?0w__aNRLiRJWqu+4iF³7^0M&OFЀ~qkn@vgΜQFy EԔoo'99Y^y܇ X.4=NJJyn6mx'\@iJ=ڴi5o޼>o@AAlUmL(w"0 ֬YjӦMaFFƵBDTҏ?[,V0`E958n1qA/F"eFsrQJd;VvmWRJ -.Oc{խ[70 3\ۦ9ؿԼyRK~hBQxa‹\BRJTRاO1oeEK)-@hXFS0Mu؟iR꘭z;Vr]j{udEIw%oocq*kgHˏveMyA[.5oһwoleE?gk !Ɯ9stsFS&R2~xl† +f͚5i޼9]vM6%&"ms\;ҲeKn#L/d޼y믿RB4h@V袋UVFJaZz;O|Ō3W 6 /YfEs]wQV-~?mFs*ťFM!C߉BEтPKq!q)zMJJJiF9ER &zj4h[2sLz!zɴiӈw,pRJL,"*#SO+ITiڵkY`;v$!! 0uThժcƌw~Ldɒ%Kԯ_(RSSY|9cǎRJ̘1]:H7d|\}4iq5 ۷3g*VHbb"BV^Mzz: 6z +OIIyG#45F`PABJgdT@Z*U/,k4 3f̠Ek86q&\.<#jժżyhѢW^ye u&LZj\{H)⡇f˖-4j8$y&NH~~>@7rAF-\WGSRsƋ," x{T)]G4II',u-JcOͤK uܥF)u ??s9#77}>c}tB0tÎ;QFMbb"5"55}T˜g\2_=o6 dƌ7k;K _>p5uԩTX욟ϐ!CꪫhԨ۷/1ڵd|>}uʧdddGܬY3W@J馛ر#-ZKMFKqYê$"G:MY"P}CbS "4930`SWs,^ロ˗;[l`|'i};7oͷ~˞={E8sҶLFִOqݜ{EKŊ$:QQQ1d ڷoS$y墋.rRo>z-FQxذa <عm&77[RXXc!vT\eҷo_.R.,X3<Ra)̞=yHNN.&k4MB-8Iwȳ>0|.n7|gEaK51?|7oɓbTZ0aV|[{YdbGy ORZ"GGGĶܲe +Vtֵţvĉnݚ3g-<{= ܹs˜v{ԶYr5 -.5nj-֭[Gǎ]t֍cҴiSg3hk d4ie[p!Bn1vvH ahh4?@dFJe]y'ի:t3B.]eFeYtRzUq@ 4M.L,q$~Go>ctޝv_c…ԭ[ W_Zj\z@Hߟ+{w9syf^{5 dٵ-;d„ \ve-(vu)MyBKqc=\~*Uw^G>bccVZmS< aTRYnmmQfM{z5S=Du^o嬳BeΜ9O?cz-ZT6ߟJ*GFNJTTÆ Й75FTj&T]j ;[蕴O[I)hԨ3f`ڴiԨQ˲Y-)AJ8F<+n1lo캕\oTDy4Ǎ|ꩧ2er9p5j@JI۶m9pB̤RJ߿8rLm;v,ժU#0uTjQNh4ͱg"% \ ŷZNzСC 82MsY#=튯Wє-LvSXXܹs9sXt)M4qFL>cǒeY;v,1{/)))ddd0|p@?5F9B0b.;ќ`qc #G2k,>s,X?3{\~$$$0n8f̘'|dy;K欳΢{;@(yfh׮]6i4F9u]UXIFsj5[n=uaذa:u)c֬Yݛs9هαtMZUV7CKFhN RA'4?uX@5CKqc eooƘ1crrrR@~~><E9Pz1x`<W]uUm5Fє}\Mަ}E^=8)hq9f`ʕB|{ZV-gR~ѯ_"ԩ?̳c(8 8 g] /L&Mڵ+vwh4$Y2HqW:RJGdM6QZ5j֬xMjj*aиqcg*eY/^ n~vNL,RGnc땄єg7va AqoڞofQ>{";;˗믳d-ZmRh4_jW&p9жm"Ͳ,֯_ʕ+iڴ):t`ڵ\0 4ټy3˖-ò,=\ZhY,]Ν;Xp!~Ν;SV-'fƍTVJ*b ~76mJ֭2--Kb&ZQFyK)hذ!*T`…TX6mMnn.}t֍*U`Y/nݺ4hРU?`ݺuhтիMy@Kq?%ًJm6z>ct颅Fh4'[2l0ONz,4ԩSuWLIKK;|\߿4 }ǫJՑRw^/Ǐ?w /?8 y嗹[hڴ)W^y%~-RJ8/̿/F믿N5ddd0vX~vEn4hvb˖-ݻ믿{ɮ]hР?\s5wyL>ir}'u"W)/޹bVZ}v2dFќ$lɓ'3m4f͚֭[IMMeҤI,\ 裏̂ ضm۶mcŊ{\z^!TW_{aݤr 7+fl@&Mxw1bO?4;;:ubdff֭[Yh{.vb4oޜW^y`0ix㍼L<;ven6x'qЊ+رc?x2e 桇"55wyM6W_p;.xb>c^|E֭k{j%֔y"-ZXj4FsEfff&zx_=wq{/@y|Mزe 8QFaiӆT\.k׮壏>b\H)IHHॗ^B~ cƌcǎ䂗^z`iӦEOÆ ٷowc=F&M h#-V)D`uL*P~k4Fs؃Æ c„ \}|y\r%{9%%B T\6m!+dll,ժUs70sLp\ݻ4AvZ $L5\GJI6m8s$ylWYO>)SG9}˲R:bzÆ DEEqe9&~g\'ik^pƍ\2gf…̝;3h#KIe'9QK}ڵSrҥ!UyfڵkOΝxHfʹ;ܣGO3lahה "cqW(*,sssٳ'֭sGF&ݱ;6RÄ]u6#G' E;9siN_n'|>L-111_TThETTpar.=U0wRzYTaa2~+33S\.<~(؟|>1p@.2'FSݵkrsW0k,fϞMٵkcջꪫp\ۗ_k_|0gP2}u];E]/BLL ӦMcƍ\.]Zj1h {9u]G쮶۪syM7K.a֬Y}̝;*Ul^;wB,Zh3lْUVrxG5\F(a>sevK`--l۶E7rj:1mPBu'5Q&!!uԡAN,}YYYvlI߾}#/RʊBOII_~"rgॗ^"//Zr%`PȓJ)#OdˤiRA04 ÐJ) g>lT%)) !t\$%%\ڗSєw7H xꩧ$9g޼y)%}aҥ<̘1s=C䈳Zj1p@ԩ7L 7`ƍ\wu_bь3ŋSfM .X"_~%wu/2a!ѣphй^zs=C-;,tK/XG֭ː!C^z¥^X38_XE'xxmԜux b*p 0ڶm[a@tt=OFz`aSO=رcyYt)J);v,ۗڵk3q"~s?$//h."ON&Mh۶-W^y%?0;5j#))ϊ+x_,U%\£>JZZ*4k֌Ӻu"QaOtt4wy'3g> ~!Z;`ĉL>~4gx L+Wڵkg*C9?90!aY0RT&J)U4͉G)u?0RJ)Ti{MJ)v|r B  ,R> BB DSDgUJ-B`0l9:+VZ+im۶-6A PUTo߾ڵZlB=XzLW|Bngy#GSf#+)Xr<m?pllڵvd=]F_y'/__J&ISm<+V ϮN!ڶm[&ڮm2##~/:u0ydnf&MĿ/'3ۤIo6v;K/sw0|p,">>)8'Mҥ xi֬^{-_{~^x~+Wr-8DJrݍ{HJڏ]v$2T`-SҼm^jݻw'55I&9i4-4~HqaIIIz͛,^{-nyqL2~w^u[%u$l7a_ѣGs饗 /~| 2DA< )e_y(B !tj׮ܺuR{,k4Ph[>Ým%+p Axasg qh@P*Z !V!^,jh4۝xH_ܹsڵ+ihBq?8RwQ-mlwƍVZ|W4o\jN+to4ݻw7nHFڵ+~ d{|~~>ڵ3pkAk׮iӦ4h˲(,,[nz$%%X"{lx{y;vС+VpF_GxBЬY@!J@+> р_, T6MpTJI)euRj $ !>0 Sn05;/:K aM@meYV%{\hN$i=K/裏_^\s K,aӦMjJ״ԜvhqyaB۲5jBGeP۰a?wN1c*9 Y&p&--͛;v6_u]ťժUcݎ՜^(&eY W@p6d'~i>r6 kmUI)-! ]9%uTX *4"Fє!\.W_}K۶mKǎ|G1h3?p@"o'pA{nfg?d %%!!WbEÖR8=s޷op(vv6֜^J$0 \oYVR*̾9J~eZUp3a@y4o穧OX{,\{ e?3裏8蠃LüZ \uRI)_R 8> Z IBC[* a*^8AY q9s&ݺun@)EUUGy$ bȐ!+ }nX=z4Zkl:iqR6 !D#CkG!Λ/~p<{ٲYR[Exr =fQJ(|EJ)]J)(mJZ\"i}V[FZkQJ)l N4gkq3+be'ucرaN Y>%~;3f{Ci͊ ƌn0dh4J^Ab 2KKKCc+\ {wit>O8 $-h4ڤƔ R !Z[#Z'xԶQCsx*Bq!ZO!l8 (Rw(|e`s\ }َ>ï8i3gtz`9ϥKäҹ\>ae'}T`:/<^z)1~:r29,\H cx lPku:u]/_8f3m=zP\Qoͧ3e!NG},Ji?Z$88W'RY_/$5- _ 0uԎ>ja%K 6m:\.2^:7_6F۶gj;躮q Tw}C+^>Hlf~L0a0Oe0}lU]^ /_ٹZpBw477bH$T6EA,2tʶmggΜ9HJu]w`"p$DљXlg᏷<+olBU)YaX)TJ kU4EJ[ZLa&I)'+vijr6&o`ƒoQ]|Ŷ"y ] #.;xvbvZf>7n\jz +< =z !E~2q$9sxrÇ]cD jvڕNp=ZkT6}x xIT17gyF>_˧WJϼyc=;Nj6) Ha5< Pxbnv[![׎ZJqىXBUc39h4yAẮ@|=H)Z7440f!A|cFhu&AkM$i "N$n*BLn筼 8 ƌϕv]18#&N'O{)eҥKk^k˻>0`L;_׵%.P `yml]_u&߿ !İaLc`(l|aÆ/MuBJ9])nݺw]mf:Z/E"<"peEEEr9\]A *|L>]6445[ Mahnj3"jzS`c`PGoE8#t׏F6ٓN:iܹsK"L[hwC RZ!t]W'm 7[:⭷nBL/_ωy^ yDZxuz뭧|O̙34N[ú/V)5\! `mr uJ=,˺q>x`QS?FBeY7y.]K.5 P ,Zeٞe]mNR,]*ZZZtґJpܭa~ǧ9c% ^zw߽a1cHDy'w=AJy+]GamĶmѣctCCh۶׫j'K)%p:n!F\y4 xGG.\!Ďv^RQ v#Hbg|WreYY۶b|h,Q)biiܺ:t`a9iZ+um/y7(r^[*(++B:#***nUAy˦ Oe2cSNB0>M7>`D"X:y^ mp]תl?JJJ?ח˥a=Aΰa>OE&B֪V#.;dD>b<|W8`WMxGu~(zO&sa+!Hl_Rr <˲(**u]D"(eut9w… yYBRoq[fUdw䭷"V}L& \_Ɨ &555wx<_|ĉ"SLq:~ïA{ߗVJk8]6ݭ@XH:G K/Zw]wKCCCꫯRYYi6A!ʲ"aRJzm{VÈwtI\.,X aYցZ9c v؁w}7N)Bg0uL6zG}b̙owyakXoPJ)qDQl^%q6io/gCdyy>{O?y ڠ=REEEq`Ah ?@wZx1J]bJ\˲֫q˅Qz fxO ?3f޽{R^޽1c7b3ݰ^y]vŶmkvu׎ 1ҰXIkh&袵q#:)F ln躭N^iam&* ?1:Ӣ7 ΛoF ΋ehúven_!9!x}qi0:{5J)leh4i'BKfGWGY[aY~f9C'kx+8XFMii)䤞6mZGbĥ`y[~A$bhii ["'8: <$4٬[H$bD`YL6-P6Vj,// GNcĥ`uy^!NrR \WWGmm:BR`8 ,MMM&%H$Bqq1UUUac`09D"Duuu|",YR8\.=ϣ˲fm#yZJI.ò,lۖZkVe2DA:Zdzb7orbРAn:Hp]`,<駟k0qi0: ҥK Bm"Tx\널\J)$HH$dq#u].]Jmm-=zO>]-+~Н|l]__& B)4, Q Ն>7 k]w~5x\od2iҰXE.]p D"|dَ`=z4RJk};8W^r|u7`& x$---TWW_BeFQJMQJZa#`>;{'*++B5ҥ \p`X : {Wk! L^"W.A$x/x$qx9B1h'i !^Z,K|78&x 0ż U_||ꩧ: k-`0q$RJJݥm:'EEEhIӿ|ca $ZZ+Pm$C~@ߚ:Ak=ݶm< /g~9eR~>˜IDATܼG* !NQJo|0>(vB|wX LRcǎ5iK v˥`XuQb…D"X!fq+ƶm`XOBROZ%!v/Zkz~YJiQh|'gR'|`G^uuu/-C]WWGEE.xWAò,h(띥veBX% 8:g7eʔ7G]d鶴8%%%v@Ѕ˙gy :,r?f !2@_~͚5Xk~HUUUE" ~̎>ъ3 k%B umۇڶ8BHtM7` bV~KGW&J&0|fϞ_̝wމ8H)I$뼅ӧ/k@}?fBFg555oh3Zk(.//ﮔ_j~XBhdQJ} j>@-x!N$it޽i[͟?H$\UUܹsE޽J)뤔 Zk`XF\VHMMMᤥRJ% ɐ_CH@I)R*plZ2jjj*ZZZd2)ڭŴ8x޶ rs)ݮn̙39s&,Yo[kߚiiijv: zRX0 IKcދ5.ZeuRlLJuPVJ5or͑H$fY9@e^nBܓSJ]d/,S)u|}< C4)SN-_`c|Cӝ:u*?aԄ/\.g !?~ZR`_I)?BL&mLJ9ز/\⋆\3"m‰_@9)N2qRng[ASZo۶B9g}OagF^$9q;c駟aIZ3b9Əkw."Rj̩Y $tG) !DMMͬGydh4)0 X~hRh43)@d]v7psM9.]!MBܨ>?|JJJT"8)N_y6~hi:GZkmǟ~)7lذB/PJi5cƌ駟l0UqiXx: p+ o<yBC| @hǮZ:!"|1R/,["@SMM#@3!&(Z ܦJ)]\\3#o?(/vii)l˲$xG=?o~~9c4: :yk3&ZL"Ѩ| <:$rJEӦMkTct:yQ+Ns'JI !3XJH)嫯xt…t}6 ?#. L<`p]wyJkR$OYnIp^)eee-Tf8u7JRMX," (ݍ%H\.z>zG8W^)ZoTTTĶnǒFDc+rK6tSfΜܹsh4:#G$HyM0%S>+`HVe))%t!blO?qe'\N۶mnmqT&!Zc)<̆v]e]… %~l 庮RJDPJ/w)6:4d7 >ƿͶmۮB{̜9h4T0sԨQn44Z۶3l#ID"ֺX/xG1J Ǝ˴i:|uv`K)4#΅![0sL:Za rY~ b6뵳\j@mez{Uv-'7 +K*R)7L/HZ>ؕRtڕ<EbޭD3FC>iUrk4XovE!t6F"oeJ+kc1,B]#?9Ꮽs`UcmN&R`0 /F\V?`~7@Q2T/zR0?!t.dz9sOWyɯES]f 0bau^ `@-/MmnU^}p^@p.o< , `0t4 ?0`t0[]~)>quw`| !/HBg-˻`+_}YT9eYͻ) Ri0 aĥaUD"<)߿Q+l{$=l6Rʋb(ѣ>}zG7CkgyKZS\\L׮]q9ٳ'b67t'OƲ,ǡD"naN;n[)&nf|I;#dD `~Q^^}Gqqq7ŋsaQ[[K&!RYYIQQF"9/̳>Ko`0 ՆU!Ԛ7hl֍Fr!Rk@ckʔ)^MM Bx5N[E0й 믿3<ñ B˲xw9묳>}:[lf⣏>c!8䬳/<,?f}a~;\.#<~; Lmn_'x &B sϱ[0bRZk>#N8JMo ?^{rj0 2GEM1Ұ,a|}J) apSC!J)ʿ{NRJMƸq[۬wדH$BNK.CeС,XR|' :4sw{,'Of]wc(lض8tM䫯&L@J R۶Z{sRJx w_.r a} x(P)e)"|Z+VPB[)- ֪k{ر"m95=ueN  (ra϶mFA۸ \.G"ࠃ^`ƌ :osc1tP G)%_~9Æ c= 絯eY}xC12VK_!5552 :{Ma cĥaZhъL$ lFJQGnr̈?we0JX -| ,`ԨQ\A۶eK.k׮ǭQF-SVkMYY{lɓ0`[o5n!eqmqUW-c ֱ,+_&}k`X/?~/* !smg-њbT:ujƈKz`Sn i.(atFK=Gf~o`X%,ˢ?LyW+8Cd2$ rOpS]]N;СCI$78q]wiB ̙3yº1a&OE]T c/Lŋ׿&~v6 EжXmywt ΄R vK {`A/R*SM@moÌ02X=#t%r8N;7ȑ#qH$Z LnXp!RWW;Jee%iHVDsE2~?G}4H$+xW^)&`0Gl6믿#v :|+x7#`04IPB~<D~6 Y.߃/w9UD5*L>}+yL:c97X`_PUWW[ӿlF)E$a7W^!N/z\|A֌1" И1c~G]]EEE 2!CPYY.1 #\SB @`&Lu]#. 5N+)iue].@MMMf)S(Vܳ QJL&ׯ_8}GcqqDZ;3dȐ4'N$H:j@9RJδix9r$\.*DJɅ^ȶn˨QڈG۶yᇙ7o&MW^dYR+O'fu6lGuThi ~￟8N:ۏ . [ sG`6LPJ8n!cǎ妛n"}kyPt SO=Zb1\kBdZM>ʽ;CKjfY`y4iGn`TVVҫWYRޛ>Ccĉ\s5?%6|O?4UUUqDeG_vJyyy[qwޝ~`Xo,r*4_rBKky晎)0c. zK &O~: pyqy-|{(tMyr̟?2*++VW`;<_Bh ~߲~./^Y^tM+As Cg5VkKSLte2i0`0~Ez(tmll,hg}D"8z]e`\ `0F /J(4*Y垹f{ѢEi`:Bs ^WWG߾}d2)=Uݤ ?g#;U˷/㺮;V 5 Uwi0_?{D|R9 J<(iB- WDkWHR?nԀl\jߠJR:od`ǟp<0JAio)ەFjRt*R@ZHF#ed )MzJ+'mxx+`ɒ%tMF"&J۶u_"8VJ2 w}/aňK`XC_,&L`ܸqyxgĦpG4RD"_DRJ%J)./ZXWSO; 1)E'|bK+%4 __O+]ހV6|tT,Lk˴EMu-Bk]Ih@_YV?U&#QtfxTr־ϲ%N/_vWؖXHWA XTpK?򘣏AHQ+m9 Ar>9 R"Q6KʋXXEzdž>o]B)%9cCayqr%%%FGqiX-?qd2O^ Ԡ @SN] kqߑDm_0bla$Y! ,Ga-cڲe*VrBBufH !)EѨ]z+H$bm2o|:˅oU #. _8&g}1~]SSR4O=TG*xwx饗,ÇۆeC;УGƍǎ;選xm%Ks_0h FE.] uQQTTD2?"p^m.oVzĉq 瑱p^49d2,\bh@YY/{7RJL8 .2Ν[oG̙3,X0 Ç\.G.H˲(**f͚ ǂ9j)[1sU5q. ˤr\릕5}}t(ē1E1-)RLeU.)/☎ţml:6a4K9qio&[y7x`lD毈رcB*ro"=@~t`ZLG9r$h3}ꩧꫯ^{1rHN9k~+[g>l]FeY>d#FFiOo<"Jq+`ĥa.X pJ[X zZ` ư k𨣎[na뭷fĉ˽СCKXd W^y%/s[owq1o< nn`}ewgw7_f 4(i0tBk݄GkNv&^7U9ҞC!i0CAkMN;dTaPoz Kҋ0i~LŰZ \caI /(|dBtҎ"wy'/o& ./ft5W\q> J).rx >hx innfҤIp,h}ax믿fm;1cӢ<C`4O?ϼ6uMeHBFy t/&u\KHb2Jq$I\ƨkjW`om-XG}}X. |` Xxq{XeYRJyӧOj mw0h 6˔Ra5p@( Zӻwone{1p1,]#, !Zi"sf̥G ̀M6ߠVQM aWҾ\&-p >0(W]s` ϵ)$6.͹3kg}ϗeuQK8Zkbĥaw_kl/Pdَ\ fTϥ4Za@.oZJ:˛>L<1#6׍ݏߋ_?77dQ *ٷ٨]J(.OcD,;L1RX5'O CGS(񍋁KpKHXy'LT6CsC% E,]Xw.bu[e2z+a#^|=Aɤr} F\=gyaÆ6ڈ)e] .󞫨`̙z{ޱuBKb`,(L^KdkrI ِ!C7diC=sf-`W -i/s(Xn=+QAYeTҥ"11Ŷ0iSڷzm0ጂ<~s[!esz9SY-YR0w ho9K]܈sID*Ja5nڛ U1`#4{]Ĵb:5w]/tڕ?>}B#xeYsѣW ?z /A|,BHJvX%[Lƺ^҅ 0o ^LS} YLscH&`G,Jʒt.K1ɒ]RѵbxXN0Ctl!՜~i_N/tse<()KR\{.lЯ=t[)FIi(9?ʴZ_뵨۷/}UVVnf̘1ݶ=<{u=nL~ٳgwtmX : <-<Ѣ])*%tDĹڣ)MPOSCEײp^--iY%ݒŲ$v&lIQi%V,fEU)]K(*N,Dcyj -,mDgm?_ڞeOG,pnΓ~OuDf;G.d]r9\%՜%M4շIhnLܐnq# S8s@$N(N,[ BeU)%ETu/ԿWh\<minՅkX*aYy6eMp]y]9sS@XUTRR"J0u݁Yk ՀRD"fJA)e0:zw2yYi ް G.ҔnIKHdhkaj6NeYC^H%C.bG,l²}jY$E$KMDFm*).KRT ^#F"6oGZB:NLU-TpmCIj'TXљXV6t{٧WZ# &[?SC (Ӂe1ȭZ85MMMZ[ii0(Zk~$]ao`BrK. #Zh.eݣ͘K/?8[2347CKg%K:η4յ)E:Ud9\#;Q-ۏm%ZCIYX").KP\׉nJiJ+HPJSVYm[h$$"HwڶoQ D y!۝SWZk\+~UΏ(bnVWxTs` 9za|6 .[zH)ȤsdRY,ۢ)M)$Ke|IYru4֥e|sV(- }iG,)(²-*JH'(.KLƉ'Dvޚ'Q'G|+~^0ʂQu]A0huqFrbV'gW(4{iapeY/{F`v :1~r`iG˸f@y5Fh45\CMMMa5 n\B4j6ha? p%M4bL$@n]ی,L.her45# ՜)wo4շbuisXǺō8ortK6 g?'ʆB u*ϓVAۺoQ]qB2! M)~L#l czȤ߂?OZk:GsMb({,z$Nibɢ8J-eHKEHM8 w( ?E) :M2lbW=] a"_qĈ56Æ s;h:RJ\`Ĉvm83$4`zrK)CqVؐmEFap˲l3Υ@@ջwޜr)t$iSpPH& Uh4^pD#U]yd1> vㅖ34lY勤T4tͳ"up]&ҭ͍tuK8V6k VdqK,8JآQdIM%èy\\$?ן-D$LɏZ>@WQ\tX Ѵvpxkmh{+cA!CĨ5GpDF ,xW_d.A ."ׯ]vJ)w_?+pw߱{sH$BkO>-g}Fuu5o9m755q裏@.]89䓉D"!x˘={6h-ܒs9y뭷KB^{qgӳgϰg}W\[oEQQ~:m^x!8p W]uꫯ&JqGrwc„ bRy 7㏳`vyg.JzGfH<|nS&!wt N4w8mNatv"͖ۋ|M`Qm]THٯoamݏUUMhՉZN t~)m'{PTTOmm-{.{\s5\q.|<|ٓ#FsG3k,.?suqG{w1c?`n5wRQ˲ZD"a+܎XHli~67wgt6XABIU\ȼۧ$t{1WxRp["}S^^Gx8p ---\s5|}:]w_|1guZk?xfvm7=Piճgvm>o~U6=heM;x6|_ETO|Uc1~=Mxk`u.j`࣏>u]N=05G]]tc9y^X>x 7PQQ>ܹs믩W^dY.\HII F[n[o r!}~̙3M6ل> 75rwy'MMM|̛7R/^O?NϞ=I >*$fbĉ\|tA@ ;3xou]yW8s˹+/3g_}eeel1Vl 1_~%]v{8믿m\Oo9s!HA8/Ç#v$?XeYJ)"<۶3-کqG=[!*w߽m0 c4 (_yvy0Ջ/SRRFb_]vJ"tR\rh۶߿?^z)ds=/7ޘT*EQQxߟ>9|pnv}w?uyihh`?Mcc# @kڐ?@[oΞ= `7'.mߋ/Hmmm!|]k}niiq$%%%wht't6 ˥`Xc 09/Z+̙3iF$IsOn&["ql喤R)4 O@?Ʋ,6pC.B÷~3gb 6G|Gh:u*zjX_wߴPWWYg3<СCo&t}뭷"( ӧOwu]y;8սv-ZBk| UTTd !l庮r*0`0 #. :I ?UhH)Yx1MMM+h2sυb x!C6PYYO<}GQQD:n=z0|pyǹ뮻mڋ ofmȑ#o4iӟe7fM7/k.|vۍ:~ 7ܐێZJKK  lZ /{ wy;馛reN;ѵD\rn:v˽=zow4 v`b :I`h>}z8&ӧOg-|$Ga/Nj+>3~if͚G{o*++9xg97nlJ)n;fΜSO='|B$_;#Zk~рrX%2?,kRj3dmm%4 f1ff̜93Ջ;ӿ6njW^y%_~e+$q.B***B>}:O<K.%[s-g̘M7D4 ]$qPr9&MĶnpP__ d_JSSS~ѽ{wn; ,~g}+56 Z~~KRFm%\v;ZoDqqHӁUXj-۶?<`˲>ZK!*//g[3 1r=&h8fY8 Fi\O|{w$6B\ץ: ygٳ';sG㪫Oo;$HnX5N3n8verF{%\y 2;.\ɓ'H,#`vo]vo߾?lɜvfV[BKZ kl6[+HiB|!|R)ubZjʼ! `0T$۶inn N;w.{77x#wqjre2t8uuut[ng}O?`ϟ';: <ϣt:}}G: u}2?h7n?c,<ޖ: 2 ձ`Z^{5N96O o0ض2)&BhSO=u!Cz7B\%*Udҍ naZӝ`\d@`ѣW]uК3|e6ۚ?>'N`;lnݺ3u0UBa6ZFWV˲Xd w}79SODu9-̇Ў31cdx790an!mϲ`;xG2eJ4 8C&~~;qg0 ? XK,F"g9K/{1iYV8v׈޴m8ST*e۶Z6z 5XX,]REuL:pwni=3./ ˥לX|=;t4۝L*CKs)>ϳO쯾 N%s47ăϲ9ѢHiH4t6r-aLjND6|7L&9䓹{0aBci4662vXsO?P<9n> lxHBh";i}F-p*  jARk5pGyDn^.]RJ^,Zi} ?;ҿP566/ R(\ץGt֍fҤITVV?=z4|ʢnu7x#[lm]xGn8ø[꫉m[&jjC !,!ZeMOwt:A{f-zすBKkm`-ll/RN. x0/</N{=6R.Ӥ }s)RRҒb3l-(OcǓ[qyXykf=ymyiU2Cؠww*X&s3`X:[CۤByeqײsgs}O>~m466r{wN; . /J,Y*}QZ4^ ˊ|^|EfϞ7  )#?=Gum9ޕM1/_e<B,]BtS߾}GTTT ~W_dYVpf9\bRR)+!p]Wai0VmYd0p5{,ɗ}7<ן_Bi8,~F!x8385d9*d={VS79ʇb6o0Kκ\~w>y =-&RH)üCA~߿OھRj9e/J=zG+M74.uuNaEB:qdƌvqqqfܸqH)w ~"?,z>X /\ p`QGJ"b\9؋_p'y&s{މ3:GC2ly7B F1la2 Ӈ 5;+';8KG0ⲓP=IJ`{_a`rmʮM mX~5^1~xc^|E d/^eY/]ve dbԩFp NGV\q,IiFD2;Ji t Ңb\7 q9}xЙջo爠E YC-o i7Cׂ@fmvw]SkG;`X5IRA(l#-h6H۶,k =, qUSo05/Rt)P5I)c?4 kI%]ݍ9Oy^dȅ*Ӄjz{RIS{ZC~N)v}[zi\@iY1A -X%%SR?.>^zvwD*(3ڍ1㝏޳*Jk0V\ /`\B| EKTJŅ]g`(]O08./ǝzM^ eVisxG9ͷ_㍗ޣF9}h&SGE%g?z!K4:Ӥ9 »|xG`0t3W, T6' X9˲hnCHa0KÏR҇ ˬ\.dyZo}T#`57p,8p \մ6]4@`0 _#. +$.Yŋ3x`.]Jss3唕eQ/cРA۶=1dmkY|'d26tS6``$ ?>Ӈu47t~EZ  CQxg:0 SaXh˰0& rK]r% 6o7ߜޚ={r7夔ll6xۗ?jcc#~8 CaРAՅ۷/cǎeW^}ac>2tP?|&MĐ!Cr-2d>,:! X̤X2e2e SLlʔ) ' !{Uk= 8[kb~ JZaê^ɨgEX. +$p[;w.T^z{^zqꩧr 'PZZʄ >}:cƌa=੧N;7r)h96m;{/Gq㪫SNa}7J)&M%\¨Qmp;33}zGW_gYTI)OZ_%tʔ)Onĥ`Xghll,$i/ohlol5DEďzre(F6AN;! Fy F\K ̲, o1wX :n3f0aH&y卷[8犓u8kx7yF(EXB@3X "F\VҥKYx1'pXL&C<URۻx)̖]P;$rX 6[ 1јh,쨱GbDDņ" w9?fw%&ue֙ٽss*Yll z*c'qnb-V)_zݺuc̙\tE|Gxǧs)Spq^&^cvX[- XqN|G9B s}Zb7 G~Eld}S)3[ck-Њ~ˮR~x($oOsN䳏!kӖ/ -=ϋ7 Zgq6mqiRldz2V@37 Z֙qַ}ldK\=P'M~{N]:!K.g632<'hXȱ' f u [?E s+N:v t`\~TWWt—`G?z(aop)=\p!C_Ctԉ> cL)Ξ=:}455EQiYU9㜣k׮Ξ=]kW^^^,, e# |Vnvs(g͚ѬY-tP[SD\=w\l6Kyy9O>Dk\WKlMC ZܹsK9\ύ13/(㏯Zt;%GDDhVJڤ4d:58 4;:]4LPU$AD ŵa PU$@0J 9rktΑRX0x??<{7>(7x#n*ӁCd[[@|>O*|>Oǎ;v,/2 ;syݻ7:uZ˳>˞{Iy w?t_1& q9Y5Ɇn8oܹRT}mmGιO(Z4f2kmH$#x.g]nKk2h99bD-Q*j6i!:taHS>6Tz))],[6!0q0$7>|y"uMlF9^ݺb[IM-ywp6z#Nu}f9GgrYgpݱ;=gD<8|,n}t0 ;?\>=*2s'<гL6eijA{2BByEf**9k]ɺN,#`qW)m^㥄47#v4=vɶPnXl9cv/>ѧ0ws>?ܚyz8\ȟ;%&2rPUG3yǙ z߃`Z0S{>W1.XI&Ø1c AԾU pHh^.+u*Tpͷ1ƄGAXY!CՠA8蠃}W s?:sƘEDQDQER%֩F۹k9KhX#<Ǟy#ŧcO8 y xe0XH [}9q hko޼7C0뮻r[~y>L/\FsS 8\p5{0[;j˝,l]̭ȟM'Bo&$"Q?qO1S8rߞTNTz85? Wt{`UpҙøiF~sUd9Hs#r r}#pCq P6;lA*zlds=Xk9ChjYG/ I\y;xR'S4\/ӹK0`Fmن㎄aH*GsaȑTÇ^p3vXƌCN{91b| v//_=7t]t?0o9;CpO%5NsI'ZD#TЧIqڵk/^6(~m׿ DƘnW^L<ʍ1Dds~@\t+`vL|TXUUxȐ!V25A\Vxl|>0 T*%Ůp! @<)O^;8(D&"75}yιE$Aܝv'D`6DTc,0:0ܠA[:xܭ""|^Dc"rqxwΝܟf]{s46Y)uL/9λ6KXOݘLY5ɰlz@M2ifӯ=,ǜ[o9RƟ< ɐ;e6'G̢h)$|zNuu%ů؍Ok+1סe"8>˛y9u1/,Ȳ?`ْdi䳆q{lw=v3=xylFtJu`M6Jfhd98 :hO]Q82 Uݍ}k=?ފfy)HqJ&!QYYY*"S<3yW^y%_~9mmmTVVgee%W\q]v"BXk 9K;^!nZۛk}uܙ%K4TVVnݺɢEw2h (C,nvgEXyVDDoK!Аf~Ӈ>L1iH?ܦ@B !qۿeee6L9w ` ^)0qKb?-:r_:T5z!qBe% >ei|.4~6W e+N:'_֨$ɵ-ܘ&xqA#''˽+{@#qWx- 1g\ӝev/?7ޏJ*lYo^TR·fƫE -<]8𥝪*pP {;DTVk,VuΑJHR@<γܾek"(V,|T#ߑGV K,+v-njҼ*"nѢE. 'uuu/Ǧh,IDAT"úMsǥ@C>??HWXL."P!sHD@sn0s3Xk8Xk<= iae1PPcE"Q T9sHmmcsL&C>}ߨ0Xi=paI.󬵡H\ʿIRsG L<ߌ5F9B/s,wlN&8Vة}{0ӅdïcwvG<{c/prޣ8a\wե9]:A_?v #'xp >r?6!K|7 My%nh䈋|<=ӻ#/%Hםx̦C&a;q"\=W\+Fkw _L G0E|~>svN Yr=}i]:2d39l  L&{S?dvfϏcgA" LЫOwz~61}_N<|6E4NnEHDÊFZ[97Mq[tY:RkK'HЫW["k_xw/OۊiSwm+V5/)w_7qDrKjjj}xb@ouָxτx L&s<;9we2:NLib[)Quw՞wywy'W]UƘc^*T^ZWJ"="+yϪojWd[l6=p괁tDb/ه pCEH[mmrxyG.1J/iζ0e ?0I%K+mtq EK؆5>$ C**~-|!>|6 r $ mRdQVfOeİv錹ݩ|I\Cu5KEt#cwْ RuЕShȈ瞜Hnظ,]0 _DQ@C.Tuρ"iumק'O8 8[z \ U%!|\Gt :0TR[t:hk0`M6H II$i C mIŏI>p: <7޼=n-t2Ksw^6b04NJS|~]{ ͍-I""G^xz5ջ;K[;39c-q{lɗmzexr&ZJӢ(U[W pwu**}ۿkq]3¤Ix' &\.g?è>yϟ/ZS[[y.x7@D(\cO W3*|b9Ò%K8swz:( ! אb,-[8)(J{(6L$?=siyKDS"r.q \f޵[nc2aH,R_ʹx\K"AO60l冤{;QW|ˍx}vlȳhs,z/)#MՅwa^_@*dG)ckfwcͶe]|Bff~yT;ݝ=oxF9izJ"uH3ٍm8so`8k X]J U$gHyUzיd݂^)gk6ܤ3ߋZ*Š=FA"^_T:>Yt3i/%_ËϾR]:yEV'K*8z׍4Pf>ۤbע-j w!EQߒgy3f~4hZ(q"4sSS17OQ|D0Ƙws]b K(駟.N!R}yߊg`Na"ylc̟ ~? N&Zkq΅Q裏 q?~ Ru՗⮮wݚ62$I8$>K/7]H}ב.O /,t"qHztaևKS9STށm9#/Drj:TQӱŢ8o+/ܵ"ī/MevfƒSٻ+O<<{ڕQh X<_Gx]oҗyU5<<:vaohcxʙSyvgEΰ/afi˒Hip+ڨg{SDD8kIs/={>9io)ߞLNq>'y%g,g^eɇ͖hs1q_ƥO8&W7KNLYIN>so.z<KcB<5vm[ڨ7 6p~ۓ9c}Ϲp!l=ps^Tf=e&M'.?qTy3A<@%q7j`(v^oeT {4ϟݳgϣˊUs[lŖɎϐ!C8#x5--- ?>s  i&0X蜛 crA2~xRyJaKK A0a„(C9߇.?Ml@<>>xfZ㪜L+M4%'Ļ~{*ݪ"t"cyz%Ѧ8wǏve0#S^~4Î;=:mv@T<]̨sG?5F9 !C<=jwOz%|ጣ51gGѱK <dWKMU5rve'̙7>{Sgǜt#N:{-ㇻ5瞜HM#K3;-~1=,'} ]&7K3- >tO> ҉$yͮ}/M{ɬl=ps~yi kZ\+"¸ᡇGgтetՅqO\A{uR{4Ҍu^}54ٖ51ʏv ׷O,l~?-a~Oؒ=:Sa׭ id~x}fN:<֖6r9ԟCi4eYN=_T@cuP0PM̈́.FX]n s}mWyg’pyi#pIIZ?1 #:~]{x{e1V8 )hhpM.3HJ ʈ #z$TJ9Bv>ƿÇѫf=gwظ9s`4ҴGZKڤ9m*%j Ax4f(}Mbx|_l' .?^4=w%pefY{ UD4ьue qE_RN!!Ǝ~/9ܘ.sf,hNo֒ -ܢK$JaH$LN?9 @9w'p:r9G\x9bF`N#Ey8rmDxAgEQ96ckDQd( Ո3""\FQdABJSԌĻ*AȺ<+E, |뛸oPQUؿJ;l|{;C}HIU3er\u] |Lzyܚ]:go4L.!iM7R\!OaL+kA6~}Ɵgy?eާ qtT)g(<\1b+"ʔ+"~uX>Ie*눟:0!0{tXM.8m Mvgkj.&~r ~oR)E_.E&M~ԩO?JboD_Ce`w/>zl[̚c<]h|WgI>xwV\g~q[f 9ٳ7oaeeeȹ"rXV:sn1&XYEx~к)Rk1Vιd~q)T.}N|ߣFNp?H**KN~"\WkaqW-vf6bhum w'ZPHSTVa宺k(HJ{~MåR+9\ḁ>k7_r dv5w\իsӇyQh<9(++UTTr3TKEE[teeeeRME>`3k 19f. umR߄TaZVۆo,H8UH!,tî]5#UX,!k= ^"B]ZhTJ}?ZˤIL*2dA1dY|wZguT*5m;9󼃁H# xRG_zu)KˢhTJ}7Sc|uk([ErH$NfmW_}u^H6u1ZF\)Rj-pZM0!C "DQֈ ciRJuK:/"|' CgqiQA` SRJ)ZR)k?HSJ)Z|Kht<8RJ)R.}ZKyDRJ)ߡG2ɫA˅ .J)R;O7|/#H& 7ߣRJ)҂>ߞb$`n]]]2Æs;v/" |APJ)RY.= cוּ7 >^wu3d};v= (xVJ)RY.]Q2t:}sCCv1Ǹs9G~{<[˧os s5^ZVVvc[[D:W)RJ}ieY眓ZZZ^N_hf&LHeS{\H$lkk3QiwXRJ)r-ryӥK-Ztwuuu7zcs˛DQdVsRJ)R.s/ rQGo8+RJ)pvÅK,ZG)RJ}h\7RJ)RJ}'J)Z79p8ڣ~EuhTJ):EDpk<Y?SJsDa0VfiTJ):%̇xʹ5g0i kVRkpaBeI֜aE3g$̇k{W4\*Rj ";~KںDl֥M2AY+uB >e&E$dsniԅ+4~;vX}RJ)N;v#>ms`یIױKl>r}7UVQf"!_NJhMv'Hp9;)F0b ?>>(㖯htZh\ 3[/ތh?⤓N2cǎՂ)@åRJ)#Gx&عì\^>ZwbÊ]&>N科J'ګ#=v:w`۰Ti*R$%!a"w9⬹2pjTj><(J# ^ yBr6gXΝ.[`}[`d3ydКL'^2FljhyKasSvȑr-k{4\*_)18\ÊF.K|R6.[lLF9G}:ka /N]{vKCyصgԭZ*kM,A"Hx,nK5h7 J ՟Kҕqx,FBkdrH.$˒i͹enFZ. fs M^r ܼdʧLZ[2b"pMXeSoRߨI&Ug1cƌ#kkYQdg=|37!Kl&ah6sf.ܔsA|2m9imHS} M-fAY!QXFxe3'1Kg0CchMι/\I'TZk7Z曹 ϸNZ^|E} tJD.N6R3H$ιSE&_dI8hР+wq$ ^a25Nヽϻ\>a 7\Co}|s'Y&SJ+k כvŮ+t%rFFs!Z|Вυ.ɹL[\'ӚJ>zٶ6mQkm<璌B[Qb$4dgBc$Dd1Yf< |bL (ZJ'[}"Hl[N;B psB܍cj2 :-Z'%ɓ-1vy0QYmTJ)zI$"".]""ڝe""a#F`ܹtܙJ1Xkioeo^89\VږCa$Ka2]d۲.ӚsIKSkmye.Ɂ@- mYPldio-x:ZW "8ĘYEX&"Ed1`<Bκ2Ƙ,u\s_fzj7wg5)r:ϋ&0n.c̙q'G}u"\N8d2s9wh~{X!:RJ EdYk^ݣyc=l6L& x[ QhY<a>L I(@/E H9: k]ob (#U  7 cH=/~Q:H$7I|a)v+RWlUMw_F;37EDRFe sT*( X t(:Au݉κ>ٮ"o#kmKw-o`\a~UmI/ۇ/S gW E3k_־P5Dt+FB_s@BB DIcBD=,v0k1&&j\ޚsֱqɣm8G1â&DYkqC#rO&! L'a ='p6eֺDaƜu]YX4LP |z2`~ue2"Ֆ&F(1#mnxdJ.3qi0uAB0\ŭ_ǩZ(QI}w&slgouw)$疿od6 ;jl8bxY]!%o 9n5 DDDDDD xեt$zbeH/SOcfK żi5$bQ ^h,o x qrm _C0PthVé6Jލv7ϋRoQ;L@7餩ָu W7&(؏QqW%<ޮ֖m.zP7kOD{n0pxbv Hٲ,m'H[' AXH,M &t|C0&00c #DNI&`] &t,øbD"1{tC!J$7|"dҒ2D;:ZI*6 >юvLFn EE"I /"""""rj:n_h'1=Y>Q^'?9>wʹ a|Qufə!nbn&͟bۯm\z|]?ŋ^I7Ǘak־8}DBIfϼ-[Y|#39\ش\?D>^<<67o猾` nV^G>=)لIcշ•lٲW9Njyu$0DCkX{Lxo")\z:~o?ej*kmJէ7^DDDDDu1iq')O R`30\&d6~kt0t3 yYzM~i.Nl8&Q+1P8r8x +:q$~o_'ߺu{)<~%pWDRl|M'82>͟}_-o}/_>iX 1N3?-I#({oԶtsy/Gu s% |f&}SnQs|/VXkY=|clױzX.|?;]O{'fp_G ⒫oo~9,K]EDDDDDN~-;h9)eҵ}ϔbYB^/-UcNnl ih}uDжl2:P0Py)N1cY\ulݶ6?C=^}|K@K~^M*Z%Q>ﱬ*BcrZޝ0[&w?=kqE c//[= vl|r_3jHKGn-;Z=Ryv=`:s88Bi*̎`[Ӗ}5suGYI,E>;- :`)ZΙm0$:!a#+ITu$TU$P1O'(_6_^3{o+˃E4Uw!{\aH3k ߼k[]EDDDDDNFcЍ1XXضmŽ8yA0^8JlV-xmO(s4#dDLs~Cm/{:&g2S]6¶O`CA3^hz |Wk0C?a0!5#%1 ctnяb,7DC_d"'<9L}ޙ㍕4-ZI LdT^HT6g;n5/O?`sw?Ǖ?Gm \El)0;9CH'4`*+a VD%nċ??8~vW]5;8$ 1."""""rj7hαNDy):{iFMlf_vpn;q `Vxvywٵ-toe[6{]|滏ks|[?G]%~u6p(J*Ri uq/ʷ{[؍qCqul9$xz kՐ5zW)bɋ1;;iOlX=/>:7~3%V!Mpsdrp]<|:[(Rtf~|Ca|keԹEߟۢ5jo۶VSYJ?8xq<'KW^[n!jvEl7V6p\b UTr]H€i (˾+ɉafO[N:شi/>}101-?uNl9//]!Dܴ{$%աf2Vw ei}2Xb7Q7мeK0e;j-'nOpO{ InMlTE YݿWc.^f;N`M׿cAٽqX5^UJ[$NkU95lܴE_gwcN0Hu>W~'L8ދrc7 ^<ŶzI֎qL"oٸ\p\ xG2xn7ah  G, 8&A4 %^Xnuq]C,sB[}b( r( C"!a K'z0`@z/kC"#^AB&'7CO0x,NbŲ]z}̘1[+؇qS2t('"EDDDDD帓Ľ;|R2~Yx~fZc`YZ[DDDDDD_ѣGicqvyy[,`pZǨ\DDDDDDއޖ)}_ """"""S ).""""""" """""""uq9h8SZEDDDDDDN.""""""r*D"RyW_RyYF}EDDDDDDs$NDDDDDDD]DDDDDDDEDDDDDDEDDDDDDD]DDDDDDD]DDDDDDDEDDDDDDEDDDDDDD]DDDDDDD]DDDDDDDEDDDDDDEDDDDDDD]DDDDDDD]DDDDDDDEDDDDDDNqmXKk0ƨD۲92IsT"""""rL9NPYE{v],,`0)o>:q3U("""""rb7΄C(V)hT¶T%""""";n {I S[!EDDDDDN8,UJ"'esc'DN]L"""""61F]EDDDDDDN "r2h่ntsW/wyCדnA$q"""""""Y@7NgYyt{是tRܷ- GԳH ;8cOZ)^k^*~f c~Z.e|GB n*:r*v-z!c_AUoz,%7-De4>]QmQ{e00NSg+!T⳸<:-e_*#c섩( sQTٶ뺨۽xt4հ{.4"y㵠6+_% _8OvJ+C8q,}GwWCkc[HxtżG1=Ǵl$?k6$|ϧ; ,8 p\䐃OorOxtuR8!M rIq-87qюtG<< X={]Q$ s^^,N{")?1dn")3_8Kq}uvto3nN&Ko'4bzZ`j YMH!ʽ ʸw}+99YT_fA~i_j[ͧVG9cz هB:eR奥Y~f>/=؈oJ&9|u ?_\GNO%c>i_;y0m`>v0Zrjd<U18;LZd Əftncq!|Îl%OeF˒o@I;mCgIq"1~E4nɷK1cz(eQ>0ޥO^*ze1 >ٰ6"CSDT5A09t&`_C+nR*Y 1b5K6בdC3OjCԷt0 a|l"6Li>Rϯm$]Ą(Hե[([G{$H&=!#2V܄6G4bRLSODjRl'3f7t7YŲ՛il9֭lw3d¼8EDDD}LoÀev0&eĠ $2$|pY҇.kvrK#mvsdEڹj|$`7p'_4V%lhXՑ938_σHqEOn6WZdG-3r)ÄTF fZ3K- ֩YV{]{on;idɧi\{ݢt?zZy8220˯ oޥ>VI'-~_DM}-]čE0)< GkQdc5zj'i{wGceQO1&m*iHB L4xs~$tԻk޶g|M9\;b~Z4fԾo/ }J+k+q$eӧ!嚋'eԽ}U?t*cFp`14?[HVߏ..|s>ϟk[DDD~s>Nf@2ݼh$=6'9hzh9}||c0vN'5sdL>v˲-CuD&ค(V4Aٜ?11 O!cÆ*^qPlĨn7$YGqd.u$>$'J7V2&`YPXǸa}(IU ˼HmicgsQʩVӸe-$aҼy /HNJ6SV /mz$J+A8(g5~YjJYt9{64G?7sm{ceYI>w>w>"j_߻x~#=g}Ӻٵe#6S_KZ0A#OcВ v_ɞz/$g2x w ?3`t4UlZm[\8`e;C8WYW, )&+ ~󘑳:ٷeS v2lYAsOh%]q,! =9}QO˫3wVrpA]Źx,N}]&0&s/GJ.M{Y+M.S 7-j_;솾6M'lXFet) !nlr79{W<]>K{ӊ:-~?XTɨ  IDAT/ֶeʌMjF.c {/: gMߺ\-O!W1~v H uFzBΞς'ꨪFeToaמ%~xRsx9^VN C?9o\v x5_bagӭ-UV9 6vo&c`m`k)WOc}uaD lE?zr1Xk;HB\3y"D\f]7OG0;ɣMmm}bD(/K响?;&e VOfٱE$,'̃[}(ذsNV\.4ytxnJ$jl ]XMM-,IlvnfoN8-=adO;^`{S;5]'Pz5y'se2wxҎFnLOru7K%w| O< =x~#8Cymx99lzZO?Heâc;/Ķ,2`b&кJ[yOdr՟b@ȶlvTpN̼KGE3Φ{QYvjkG&g 甤qaDr[)|aἇÈ˿GFtTl']'awAs,GcؐAjVlk;?օ6Ū`9t7f8[6T}1@|;03s,l=/}d|-Bzlla*Œ:ia6_!tdg25)V`tS}ͼ.X>k_)?Z]qǐxt>*[3:Hg'_MHuYyjcvJppXmO,,6%m%Yz%c&XN2F먿ZuYW#wU؎uP8/mrj}>ѯ0d|-ӎqD˱mO؆n9:~ioe;A G2Fqu>~^^8囹u6ABj(-MM}#tuP_=G&r][_k `@QYY+#|t'`glh o> ,_ic3"'tgJU'8웃t!LU+LNJYIgbfs367tY^9_8C ԶDzi]qtFK5l*e -&Fg fKMtyZwďsK b4uKHʟج'X6feRv+42 F3h߹ 1} UtӘwkݰpzwt58X?lw\爣ڇ'tl?,~>{m<ƶβ-Qq$}\"8XE} kX(ww5/>6WNgW78$gRc71 =RΙ>mmoŵǾy0tlJ&C~7uJYn%vk?r2M֙7 ͊qyzz~^>ۜ[t"/Lz´$[IFh| IL>6#V:2qjkX#99@̧pp2~ ǚ[YQj;%(ʲ7H=A (>^bf%{l^\" 8YIlond*RY kXî.i~8zǩ$ָ 3s7^vϰ?2c4VWRYUCS{˶d N伫?\}V;wUQ)1 X2Vļf62'^BE[  @DYZ="E #9#TTtfbp$R|}i#/CGwqtμc .ˬ%a>\2Xv>Uow.Bg|aʀ Vvִ(()'7f/O/awȍ"#~3kW]8Mư1ڮp0Xt]kOlsgq?yN@JN!=/ ^ZEy雙mr'kUMXIAݭ44&̘QⅭTmx'^ 0mDaxWekXj-.%SK̹p"4Kgf^eVf6}RcTn[/,-by3dO}ٶvOU^ f$}_̇Gᑓۙ ),&+z`3ӬALj!d;EDDDՀnȿ&FDcqG-,}x{'E#Dcމ /ZMCSL>3}/A,%; oU|dKy%P'5 >r/")p7iQNzrP%$ruc ,""""";n z7.r2}iDDDDD\9!PD@"'qK+;@u.RR4"'rU"""""rvq$ )N5Emm /9NfxFӸ4]S"""""((((Ȼ}'>t-X4 v lxq x,%EDDDDD?*[6n'^OͅM#B t,(BEǾL|NOM3 f%?!r-0SetWOn<+MOVr=[ """"""1>1s\y.Aۧ3,0>n0\p:Ҿ78 2X!3/4h6&5' &L8;CRBAvۈz6ofYO@]DDDDDDCە@yX! sȈqE H q]t?r?t9c % 1nYn LNa>L,rR .""""""di5͂zZe' $tEXGUy1&o3L"6˷*H3Ù3c3H4]DDDDDDSgfvs C[Mlʋ7w31q#I[XS r11ɡ Wi x#;5{ϋ A) ^ +?B{eN+mn0v%2Yy ;<ϨA?}&r6VD6sf;K/ƢƂ;$%S0˥G;PIIY G('16iYy !58,|u{;x_ݸ,&.>h_qj8`D&ctǼ#|D/|hxm܆D4J7'D;p_"""""""vDs|Xpnu0 xT8]9咜쪴EDDDDDDV(Y@,K#rЃ HD%$rDQ BDDDDD2sYH$Bcc#gë(ԜB -w*TMU@@P) {C$!rS Z.^y93* :^=N'N\\\ QT*JRT>@lfNS*h0LOhëvit)[=a/o-5s֫N=,T$4jO"`1ayW}gW}ɶ5R< Wx Z(ͦ81a[w635V}\ lv zѴ/ÜSLʼnX;i85<~9V2ni>Cʾԍʠc61q-DҿJܱRZQ/.5Ytx?1M!v7N5q]ѭ̓/' k>][.{ Ĝ;C4 *rb0үW/_RX7s1'`:@k ~^qg^-tbSOmb<<]#9- ՁbTJ%7W *)Q%|ܥ³$>t]:UomZ,%RgQiϖnerxN,FF]ЫTVr߾W&HiSdkK%WLJ8nRN-QYvd| I.{n<9}_oEDַ$W?I mUMz>(_(=%8iِ(ҶBu/J_VU%""r~R?&r,/W 5kJrˉߘWWAlWTԬQ]*&}?p~pRbIdTuUnȴ!eK?K,{x+yWzU+&$Bb,M-~VOtVgJwŵY]Wnm! w6WbR8IUO|ۀOJ6x (`M# ŠCAETyg"7*WJ |5a^{tL)(qCqy{'L#=%{v@iK_vn"!ja- u?슍axEu^̀5:uȖL9Qш2tt  )GR ` cvzJPP : CSm^ pݯ?ZӬJL65"2϶!Z8bT;M@h9::Ճ(r WJ ?&#yKY>X _WWjvg-+ӲesR\l_ 5Du]c'8~4ϭΩxeE%?k!j94F#F`Ĩ|AJ\76[j:+˻T;,YY]ˆNsLƩtVOwO6N./˔{IӅF< ooʷy-7(KQ3S-RO3g]?L }㪘4}ҁ>.xo`vow{sN6-z w/ӔW>/Oyph*+P' %s*-VumwOe<<|}g;92mf:Gʆx6}bR3دMzZNb&R߇F#й?~QYq!rӎq3:o/;E] IDATaN=˖𺶉YÝ JW ҇H֕t})xnYYК"5ZҷW;Rlg&}% (w1=1c=˓YKW^+P_>yOp k ͇30`({,yY]ȎM g߷E< ֖PпiUb 56gAժ ޮl؟“k!ogo"7oZ_4^eEE?H2F_`ޅ܂Ruق`/7Bk[vݶQxy~PCi_5wjtьg4+ㇻg0OŜJ8ÊM4~gfXϭKm{Bň[@qIWe>[Xg`VO\}_S5T_?+!_"_n:*w z0eu}%yR[#UK\yY #J8:9xyt')^#͊!d9R3UB[aڊ'2hURC/ ڰ?yP^m-/>p'H7~r7Z}$4|sc#? }Uղx:YZ~CC%^lrpf[iӣp4:irJ&+nĞ&!xJQ'TjŧPV)&o.#?MJz]jҫ6%#*S~^z>izі+/lgk$FBzΖS%Pk1iپvA=}E)yݩt+O0îȫUGJ,9)Wֳ[ eE$-t>m|N)ZoTJ u/@Jo+vm4X{䂸6_"/S@~gEʃKRA_NI:o'϶M^ 4VΜs32-r-0RA'znTijXMy-YNX_9DŻHy]īRGx\KKpEub䫻"T0!?*WV-AKUqW˶Oo tb*>Hn˄A>K"4HDK <1lbO}a$mT DD)eCD:9$yWk Mڎ:t5U\dކ[#1RXRf<)[m*\r-ǯ.a@B:g8GofgUd[sp /|ҊDd#wؐ(,| AٗHUaCa3#'+ (Fx~^ .h /B_ mnduG󮰨W$gLIٕ/54 ' WO"1jp 1Q߲G# JƂÌE¥Dxsr4G1d'SVb Qy%糠 VK w8Hx|4Ⱦti _v^$gP#Ӛw$/me%J[% cq?wFS4$[;y77uzm-U?̹u۬D>A֥o/b&uBYKfaWZ+$5n$C"ON%qEoHw":ŝ^{>yvNR- b8qk9i>8;6,VרC8\4z- q(M3ki^w ANO>Î7`KxzYbvP`J`'<ݱD-|ӷP'X_8 5x^:^sr"*Noky }>z;Q凴֙w7u| s8J j's+".&ӎj%=+q/p%8se!f C,~ʼnw,N3I=vhQl2 O}Ӣ(ZEhv`͵-Ն- azO? A=@>v ;T-VXJت̿Hڥqh4$cOUnts/x]gtM`;w_ `ԣ;FMcw[\}AG=TG{{:,8!?ߎVrӠX;mB縂8!JA:ıgd甡q"FAQ4wyh ly6DLFIRn өE塯/fcәKLywInT.}^+wYG+ǷV#Ƿb :4&8vv;4{i=^ybSqx.y%8PjAgc^|\ e1x͂$Rx>i|x6rlE?jYΆ{ %pg'n1ن )vlNk}Ajew.tjE^sXpc2>w'^#~o|~|>ˠY CK';{n8p: n"htZRma8ꎟSEA#N+ӜL6sUG!O?R!•!H"W>F^?LJ56Z)ڣYip3FrКKbt³9 S+qlXăG1y*^O~Vnu>H>&b>Ty}oɦ[m;( ϻٕ( ;>kߝ]&|` _.&<2`1lȬG>9p,ՍVܼLU&8, EʮIa:6ύ>#1Y׃kUtx=pS 88cک`ߕ 7 {ym*Ų;\5ZiFABPWo ;58=k&:PDZy^tS#'?'po| P%@5k;;ngл?vsN ' B.)wnn5nUӿsWbxe*!,|bkٹ:~oMhQj|) hDLZü׆5$g-FSw(|1I\5כyoo*|0c;oѷr;>UЮ>daoG|B!g/&Ů?x'nwX|wݏ2[tߑusؑX::  vƴE:<vvn埥ei\X"Ny'Y7^YΌo0(Γbe}1zlW&6J\lY0oN8=w,+΢|ò|RTYJ(7.Ԣ:\_63u/.$jOzZCٷi3+,r%^J(>d׎$ l@ \샼3p*>+fڸ'$6"#}ђ3dޒhVTQ'X~şvC>Kr9藫m7CrX^1P{pMDwy?dȃ+{6D&(uDh$RYx&K$cG87]CY~T]Kj(O }%i'/HmuoOmtz Rx=?wf鹅 C$90URIpA_ٟ.G-&w\϶ɍ/I Gү@̈7 K6uRG~([k+> IFD'_t*&Dԫ/%}풺p%꘥*A[T}Qʁ[,.y ɛ3OI5CL"RGr:M$yck`ਈXIWĥסGn"C}m"b{K󒮢`g>YzrT1DAO-A =-Z hrE/ Om} n]IZOW$9=_IZBfHYq 9df HsoK0PE9!"&tO׽reUk,`q\M3Y@"'tY2PlI,|ֆ-S֎i,e|""CnbA1Jsɹ<qʱO+KX SFiU 1[o{)ruAjK8sqx{ mY\yXr&3pFH/jxkw!]7f~:޺x>-wW7d91 / O"@@J,Y7WIq /cD<N yZG3bhH/aG͓2F$UһvhAedЂb{G^( .6Y.HꆺFAS8vGr=m*0z/xgX?~peUK^mǬT*J/soǷ[*ųT+OiR$MǮ`!sYP\R9Bkѹvl''{;in'JRT'B1?=7W5{`_Oo59#yzt+fJLTFy)6~k,HNqWT*J_POxy(՟Q*JM*f(\PR6Z///\]]PTqjL7,TJRFf3L&t:il*a# @ U*JRT*a2Uy) 6< QT*JRT?958W "NSoQT*JRTy*KQRT*JRTyRT*JRT*G]MwNSJRT*/p^WttT*JRT*߀2JRT*JO]RT*JRT*5@WT*JRT*J5:L&#&;0 4ʟFdĠS6T*JRT*Jw -# 9wlL__+VA>!#q0!Z֊h K!;#e8loM0ME=U*JRT*Jo 45Z'nPsj&?wi)()_Q\ڹ!cG"N'y?lw?} Oޥ>ood|V##菦7؋1ⱊ3JRT*JRU{v qYBA~w5hI3)҂ݜN|| v|\} 1fm`Ґ{d04((`!>6<oxQ:ILL'lCqqHF-b":6'|Z{R,LjjdݿOB胛:b'51\+]E - 9Aor#87sj2 E(a&!)\;xxN~e^T/!-ͣl]ߜB`T.cmL$!MdӧS)] _B~Vq6 !N&$ND{oPBI\ԃI&^MX@]-q mx|T(&G&7_!!GKH Du.EHl([뜿@Ɠb(HvxJ)_$7onvSlܳqm2l*OPO>?>Qx.epzB#+PGͳ1V,T19H'Fx?:2q sDV 2]-B3f̘{###■ђ{ikj 7}),˝9OD-x ˄#;0/N>Y~42TrKȩ[ZXE=+s7RHĩ:}DBh0 y\<|ۏqz,D@Lyט>{bqG.sn.dr7[,I`eއ7%ܹ}wfϹh]EC#%1ٲz'_[G&E Os#ˊ%φ[Jػy][N$D߇(~=s2%hZvVOho~Vc- I.~G`pVK:Z jt//Reum$VoˊQ4!nJ~a&Xz9 IDATK36U^Yf6@7u*cp[:y{n0SgHR]:i;NƁX0k+&0e[4_bJ Oc>ό9C8*u.ppg2}{ԴXoC^Dcv\X.дINg49q"5(U!U͏Y&xaqs+]Fһ@fp3y~{ Ǔxpsgggo㥡y1l%;la:q3gL lvп[gz{y`cÌ=cpdcΈt*7EoIJYv5qk\ݕߟ\ʶdzxoM8V{n]0z,$֎#7K&bt5aFA1f9e Y+ܤL_WncSf|~ =|?Cx8̏ 'X^°#b6m% vc!4)îr2j4#>İ~(jrk~${/i(F lB~ a˞˔}?#Fa`l.ڍŷ2^ɘ12f`W{yxkpX85SӘt|<] Lw1r`bk8Ԭy ez}{)3j =zoNwswq~tۋwZ4d"fqsJf_eƛq5/S_K:Iظ}f=d0-w,.k*|-R;uߖM2r4.ݗ>]ݖ֫Sg耢hi ?'X|9Rr>?ꛢAyh\©WڇpMTCFà+S3 5 ص^T(SaaT(!K'O4d]>7mlj)T*F-64Z @o4`xd_ d'$asR$*TjЌC{Ӷ Sx=?_ XZ8Ѡ7(1$Hpa/Ѥ<.:Ѫū~xNh\#h7dBR2} W E j/jyw;W{0f@%|L;݌jz$ƎZ};Xӯ~Vt0yבf*Fnx~sZ,JJ\ V3_2{ޣʝ ]KnfJѓT1sc5ЬWhBzfDBdxwLq?hH"OB6[nÏbb`0swǠ\ѻmYRjI{$:W/R_*ih1Pl,_SI6Z\p1* &ZAP?uG 'g8y|\HJl7*دmKk><~ZO櫧hyG~̜Q OpFZ,R gwT?~86>%jTqBLbtzW=b\t9vT1ܻlU+ռr.pP?6.-&sK%?\1(޶LvLy)>r 6x8e̼B' s4&? JAϰ0B΅5\n92? y0u?I"F}ĭh7'll(, AǨ/(&Ʋp]mTV@>|wjH·RU+QTgUZ&/C]Ze ;Vn( .mYMYGrhV_]9ޭl`h:ѾWK&~֕"ۻ!^Vj.áX6|xаӻTorF;AA0]+d 8qj*)OS|$ ӄ2йֳ z7c#5n&݄*7ezp !LQÚ-jp| %8HM%>&3jhh#-5${)[D eT¨Sz`ǫj\{HWʼnٜ!쇻NC.WgY?J];RZ 4mքƮ1,:c/ ~'J3c3,UCބ9I\ݻ]79NAp`k>OAwcD((Lnn]jڝb(O\ xep ,HY)R&^ )Y5k.e 6Fls!?lj3v9و" /Aq| g~6 "Xu2xc{MPF8ɡɫ< G9cC·Sx?4;bɱ(M`pE&@;5Zizs.,EN7W#ÌD2 nekbHc4N?F%@Ykі~o/gθU^k[pǺ,-n5`3SPB^F#ܜzVeq7\??dzj~u޿w:њ|(Sm{7N k秪+ $.%$@W]c@/.gz+G2mNgٰXlXW[s&[A#,Ʊ8l[mlLţhR$ګp-W\tZ-8XXm΂BX6,yww=uvfCPg/f`h *R+Rpfo7u\;$ (\=|r \F!z.ZY"wr4*oyw?7<_dކ[5K%r;rIϱҾ G`{H+/ϡɽ-~i3lh>>zF7eP3vˏ3Ӡcv I}Эcxi̪Fp^{3}-wՂ%Ix%WCaMڋL ܏ˋbDVxMe<<ᨌ<4UoyL2$&;4.MPTFC"<Ȯ/26=2Ӟȁ;胧Yg[D"T_1ϳh@C\Mhj߈o=}4E${%d=U$vwKWA*i+EGttIB(I_l*^~y\YT5 ﺮ]zwy^nE䒧-]HŮ=z~Ϻ$\*VxK[?pƝBATBVzNLH@x?\ҳ݄Dȗ(Aw?4QWMzv!l#<:#DX?ZLz2!|Ns0sʄF`FÑAV _(Q57i)Fà m󑙖IM!3#‰cGVr*zdJEZ0a& R +"#ODH쥏O/.TG{jMqIWJtDCNz_]r"B5'nBB1K| x HOå mFǙNf h#42 ]u_H l˭If{0Q!4Yy%B# lC ,&Lu撖YOAXc6z"ߡ` $:V\(Efz /S:E"ҳqc!$<`?RV"Jˤg 0,H#@K5\c|2)-(P;"3EPdyT?B礓]Lr(ifBƒPsQc )Z !)ejD|lфn EVZ>(ŵ! }t&%?C T(  vAnO] >eV  -pn6т.  -    tAAAAD.   bIzAHNNFUU  7*˗d2]ʕ+'AAAf.I$DS   tUU)(()$7.  eY_.7$I狄AAt  AAA~3I    "@AD6AA^ׯg=W`:1,>u?~!ü9y &KU$aޑN1L<)s py&q[je~ r,9 zj'̊ճyXKg[ЎSPJзUc|rH%OG yo>ؐ<ʔyS4oVélV~:6mWthciݨM >`|gWi؁Ư"犚JG0xI%Oxl-֣ٝ1$e4~^~Z9~|%vO7dd@>ףЬa}73Tg61g7|ֻ>OyksϸqnŎFҾY[FC  %1w Is c6 IDATx-O>='kK ؔ2qT$|'T4nօO_'i6N$ו(,GHhу~]I~^Э[w:\G]#x~]=&=Ý-Oٗ.|5NMhKclI=fGA>WI}sN^_K;>]'}'3qn퐱g3Зnlm/gW ]HqS]ǩ'у*~}DV 騊<5*׻n/=Ə 8ڍ痟%sJN(x/m,W|IH=HQsY9K_ ލ1>t/~>S֯ddzޘ{G22kI_oc^Ï1OO!il[5IS0'9It pjyElʔ-{z>߅&WֺQ ]8}tbt+I ڶspv>iu1^5ΨW_޲xj2Z3ĉ&Ho8K#lr P/n,0soK3ټv$,f30%kgݶkB<y 'c;]8bnQ3l.Kh}mpw >k/nH?s)[jINIB̊9Vu8"@9y k^Lxg>]zdy'ц _ YQÛPLv!VJlڐLAe縫{O?Ô ORnDur4XDOо?ϿBA))NVB#b]y'V&j2?=yxÇ%t7lY=Ͳ'3`;#3,X{d)ʊ[ SFg?8q-3#&cF|y/W򋝨El:OWp;ot3=5({K W)6 fB#ˡ_Ʃݻ=g8?dDѐy#E.M)ā8zVq/_JiEUc;3y0TC'uL2ʷ~a/i)Y>d*[*@y{(]^xz;qsNzQ3q4;dOΙ"t4AMNv6vBjQt)NF^(sbyH_0GKMFὫqa2PR|hdɱ>l31VG/d\qtZZv+vrxF424>|F?,77:7_&뭑T`4]y,HAx>'`(4[E d%meXx"wO C/UdI`4 {mȫy'ג'xٻIhɀ0i@r}u1rG {<5"^<ÄDTnyadszM$isjiڌ':χCEiz5 + MS~GچWp#-bt(@\ړR=˒4O )ρGX)iC=d#MCAt|.Wqvl@QeFU?]1YK2U-i+JGhY-9͌dkVO`0\~OGoN d(G浱^@TٚWvqhكjms0UJ_QG`.h"j8Y[)S QF#Y.}GIZ< 9Q|5U>7*nzo4c+<'?}`CCm _lŦh$R1J'SH!*!Ti*b?Usr SfG>søÈR|˩ϒPzi 2eмIMe&ao3+>}d)[= zaS)_&L}P0s] $MO~Pyo݌p40l\HgUl6]*Xez -((MWA4 ]tgظzK%1T'k&KLĪ`(w`0aƊrU^v\\H:g.%?ac~NgF3S(&&Ղ͂l9an\nvY!qRG&tV-+bB#yBGv呁ϧ* F } Q q#7"չa@JçMlE]o4_\rz$ϙApz&N]5ogA>mSt<Я>oMkؒ3ɤOUV'yTXDv+E$N?S%*22 ϥmͩ{ 4<~"i=7!*<`?BB tO"(S&toɴE.B NEy)~h4K$XbMS9k} Z{$jnyUA>PeTm,?}"7fxz\UU,p6+>x'!;]'5U6ݺVĊ ,Ȝ =\zԨ5-]qc"H DdqR>b/< +~7Жql:^|k62$>|[Zz钌ld$dr铯yD^|wL9dg/I2f xp9gMuˢ9)$k Ӧ(N7HYŅ;JLzsJ Oۻ#kyi>x a-4zq{uۋ;%W^Ͻ-: qQdU/nkNxݚq׋nՒ's(كS:ϋTm(cf:cVjؗw燡Hly#['Nm iOT!Qfs@{v e]hѢ3Σ9UiŎmfk6ԋfXTRW􎸙zً}ӠrY*kB9̘GGkUVy- |}ӽa$-Ztez<>=8we4Kӟ-U#?xu]ٌx{/;" v׍WW`46]ұ=NdχӼ])jÛ]R|^Ĵޓ|[1wmlrSMhσj(YAdڸYUcJT2R͝aEQn?¤I 5,&IRbEQQU_I_>Ѡ.cK)ÔYAv'{H|fB@f=V=K^XmO`h-<'.<XlvB躑UQ# k WQ1./.c#^2vSWd1Y4.E]s]A7Z c1H|.E`IR))k08sz m݊Gh AVͧ`ӑt Դ.M xdg6` 8ss(a7(SQ`& b@Rdfl >daA6 Ba~1рt5)J͢]\8 ` /۔X[??-5`'Og}uh&лe-b3LjeZȁӳ_spr$}^1elZdzq(HZ3H?!)<7㕁1k "@AA Se]FRBԢN ի\'P\.\^7jpMbM*aE8URve"u%!ͦXHm0J&jX$#Y~3ַ-6n¼5AT)ň@5ۼ]{mԆlްmdzI4d0{Iš籚nªP;5wQj֥m(g0{isu37lbMTԓvLL 5 pgzrv4t$9fjj6 B,]תNrvш=cqclOeV [SIm=]ٴUƭOi@xRd_`H o=Iԋ"Up˘Q&RBAAQ: (M@^\:4Mp+.*RKUYcP*YIv^mpʨ(PPnM є$pggZĿZ.Y 83/ ,[= zȞ8SH?zA&fwŮ 6L`6cdMeH?I.*F-fWBAGŏ;գSxLss:TMVc3mPxi-&ՌCIM[{GSx 9;[**an<Ź",cY0J`Z@㧻I9qHbk1SE"Ps   藃d GQ>zM|U|#lǘ|E PeLZsz;)0nJ? 0i& ` 9/FehZɘdtt=$ի!2UtShŇ ]l0Py+^eė lZ$KGV{Pͪ\:/٠"G]4gany*Gh`+YJSH69c%YM&ɖ}<2tAAAtMɄ-,viU9ݧz,Y¹#.I,Z޵ ٺR "0O ^5J卤<ןX%lm=v‰eq:9wx",FpD)t2bB|))OPVoDtI B:S!ޟdg]c\.rh%00JQ:U^ {ě5!v6#YN3qq1hNJG5~"S2 {@?Z3ӨG)"AAA |vb*Q^ZՌƂTnڈ>$726{eOe;ƈ;_cU'X#F37MUK|*m*8WdVqOU3;4|/G@Q u=3v"'2w0 9rY'ֳo<㇣ȉ5J蚆ϧ^BU|䥝fg/.$btctJԬʪT,nɧ"TqTV^͇ib[2(Jl%;TZlqpN6E=+FEWeggiAA3Mӈh~(Ad|9'ٝnVX,>応hL2`ɩIwQ GRBvYI•ʡ{!( {HM5W!Tɘ[5#YNfTMDR-.,:.y_AW)8P~$%CX|4M>u+z~4uz~{[㕤䷶UhʕuMCXAAA{E   AAAA    ]eA/:~ nIA/" D A{Q9<#4sa~) d^ɓfwke833rb>r}0$?I5}eM$Nh,'~.̙{{L㹗P5?1qz"P|b;f-!9Vt܏'nmRoFνxe I*LNر={b,7O]tlߚ| Mx?:u̠&s]\С;sLe:vg'KD*Lo>.H?:gKPE"4)* haϲL<|X:9q ׆x"tAn6O2[FOsc(7" J!:.&7'o`C GyۃuSx L#b y]Bww*ʧÅ-$3OQH~Q;f2KNg>yxk<ާx wRO9-*wN:˛4q">}uBD"T;eaLvS>x*a+2L3o'`o 2 ~'{ IDAT31G^`}ه)~gҟuh`ګتx_R=hL}?[5u{!scƱpB~\s[=iӮ ߞ+W+/%SiSq:O2aTڵjN.r{6%ŏ$}G|x/ڴ.'WёMkՂ=1W.>ϲ\2wvu>2e9߯83жy[ +_:/EVymY ,:)'`ڭy`u~=DFKd|Sw]n m>m GܴXFd"]$/ [Np)luh5h0,@1YـlfQf:?yN1V'k)Rq#j=?̺O'sȡc@#/b7}9 x9z$hHZk}}q-cZ3s\Sh=7w֭A|=qXdnP'b1 'QN-eyoz|$5{fx󧷩]xXq3 f}|EBVmLG]-iI砄#D.6cъ3omIDn*yy:Iv0k$ZD.W횦Q\\.fB$4w^݅$m^䓼Ljf.J!*,!çx.T=-]L ]H۲| KK_ uB@6T&z / x|JI{䝧0Ry9߂54u1sJ˅Oy`J1f尮q:/)Dl&ܙd6[^5 Z+Z/foQ}Jy\XNRx{oo T%+꫽h犑TPyx}^.l]G2&6~.(.N9)=^zk*u,K4]%u:Na1,现;њF2i\P@vvqݫC<>t:Dp={jWm%=F Cwt.cVҸwiep jOى#ʖ4ūP_ }s4gթ؄X^ya:7_ Z_|#>0~K·O'+$I?blv\.UY=}]K gJ(/t#*ʌ)a0YJW\*zd[Dr =ԃT2^~I0D6{XJkpAv-[H,(!S֨s|L[ 3F+"s)|.|>[gMAvO J|%`kVSŭӵK?c~i0JCJ+u:ӧw4:( 8枪;tً+-qsaaF .ͺXsOFp`lJSΩBpԈU#syx5n Gz-$APjFu\բ| *ǔe4" )@`8Ql`ֵ٘#1SV56ĥBsY.PKlL:ntK⋵}g~*>4pixzPtYґ6kHT};72[r]ţuJVuHZ03|~ _Õr|kyUPU^/wJNvUџ&d{f8Ȳ2uEŀF$e}H-*{4λ YLϪ?:ܽ.lc0;An l$${cXw? E>ι}+:(' ^#i>dxqMܑH 4*sO4dbrx#dJ[YA i\͊y{.Ë KJ[blElG[Ç4mqloGJgؓl 2/WQ 7$a2~"8?o5x'X[CeϬ-ML^ލ[( I"^9Ƽ{pyX4똾Y.!9BMI HE%cY߆ЇU|#e>@ܖ|M|05RBWYqg? @=يk|m24 Z]: վ4 nnJJp"=7T\0']|6&u+Ѻ~4R\W(Ѻ<ֺ#6QJ;tAέbDJ#\߄}س"G7|"~tl̼ǟUaGAŚp%O+rskih+_2kl>E>ruNn;ҵIzI)O3hCdu1zQȇΏ.x2>noc _ chСKo#h ,CfVq`*'Վ,4VqU19 6?F3iTZbLJ/U^ޞܝEw4`,=C+>>i!ja{웈M=A$XfT<C@Lh+%Bd ڄ@De-A1ӥg':'!Ɲ8 :$HAAAgtId*qȐs=kxJsKQu0\6OtUdEx>M?AwSXPLUts#ȋ,kTtKh\>{m;3~h;Be   =Nj4 ([ f+ѨmCQΊ?pF'qc[E3ꆵۭ&tL4nדZXvp%ۃ۫#a2Zꆕ{UM&dCūKXlV̆Z ՌIp`9 L_%."4y 9| SUKMq$ c}@7d-&=]ޟanV.ZrzPML8Lh\wy?:հlzT( NAy5 ӂC|:OX j0}ۜJ   tI6aS$ugxؽݐE !oƠ>9JZA۞W@y~r^gPe2aa]De"7D`cPrj 9ʡԃQ~-^F6i˨ԤNp*r Ьh(Y´L HԔ;+ٚm/vɠj#DцcTLYW%Q]B,dKF6m-/|4*ؙڈ+:'"    LZ4OO $/68ZۉN9—kSQv[C-Z >:g=cq`54$,f /@N\n`Hn,Koي΁:*!fBS000ԪȊLL˖t8q,~&b0d >v)&a)-$KC yKz'>64n1<=N lޒnaEC6*yу]^ )I,lϵ%GA  m @#[*u/J` îoO00la Rļ68Ej Ѵi@呰ϼaHuبyuc sݵ&:м'ʢs"Ske/>r7Tr^1?]M擧_׎t3gnI<r#+KuJ-.z瀛ͳi?QF{)-Ak*Y̛{Ju]ow..[ƌy,Mc׬[Tl&^1pOp{"zeYQ%g~%ٞӼM30:?q˷hߩ'=9TΛ:ӱh&~Jyaܥ]M;ҩC'.y25+ڵCmݖ>cKx TD>w!|wU|Y돽 x`L?:o}IdL.6|;>]Fx<4?_φ{Q QtdZm% ؏\L$wkOZfnb(2fKXG .4k-c:x kRP=7d!|;Pr}`/ Q_:t W#U;ZټϏ2<͹YCdJlIIa}B CZwU\>WwRQ\ܒ{70N?hA@?vL\?R^i=k0P jQC=rѼ-wlP[Z/ ɉ'Ww~Ð 4SxQ[sÝ$$u3y*:C WG$ԍoh~r5XBᇄFuqY`%.!ZCYq1%%9Kf&4*@;\ kn 'C X_t%#"@ӲrNFXV1^s11<gRY[AaJcٔ:uE]ExN_lf%GPif$)4| W5J/GUDSՒ{$zq]SDQ)@Pr,*|!Ȯr*DSvs-)U!#>>ᥬT IDATJ"&1gN:9n,4U6n{Ne8}VҴ-xfQX}qc޸&.s/xf@ }Rl57&¼\+J:_?OT&A_p˻ӵx3bxLb~'δOnϻV KwY^ݗt&>LgCr|E>M侁"曏|'̿4Ҙg5 mxWAC[O$\ A lO%Y99/>1(zy^T"#&vjs<GTlE8 '93qSxj kq1X$UTTxTxDPVTLqu%>o9G>_$>MAC"=sSLDb00п#ڷI .xϿ#>v hq%NDpd4m MګmlݗM m@6qkn"bI=@ul5&ãдOQ?^Bf!2>dAѾDhB]^A|ar4ky{:9U^MZҮM& L1,GmT{iܚm@];Y#)v[@h!nݒh3ClZ=;9 1vlOxSZI".ct-W`t*=5m$.ptҩფyQ"qR\',:Eح aʝHSMJ a\8$Ժ f15݃A /}[[dFzAw}_ӠS\9.Բ5;^ڞ3&&o;;#rb/{ٻ%UdfrwèEׁ;d\&ܸj֞kCFwsqMe1I6o_†rrua Gs$Nyd6`=p.q{Vм''}d1w$S5k쇏ɼYڕm|4r[檎Nʳi-}:6&-xb:{6 ^Sy4 ĭO_fVNԠv5qBd-}CjUZM^ ?xPp>|"~ KlyQFCͬAؑ#lwI1NG4QB|X*Vl9 uB@V]Zmk.x뷢yDs0(Np>/@y@D!Tջ)2&ZOݔ x?WNg7ol πd~LEKtSMz E8=~O}r(}4b遽wx٩=ը .6ZC~!wCsi> wqq$4:/NaA$L&&ڟ>,!' "/&mT^K Gv5qt^p`1AF8zV̦lƼTE 8ٻYF|*RY yc kʐH]p{ߪqиMJu}\7d`TTʻj؜q%TnD2ld4h^MŃ&-k*}^f0@g蚆5mב4fӦKQG$=G$uTM.Ltr;KWW|/wkؑT M0, ]wӹkwʠzUV\/;nu_M $6m"aΆ=}Ouom X}5 Gd"QFDtV-Ne\(Z5>j>Y)J`53{i|2=Eh<>.i|hO yEPw+Sh y1I>^CAE2eԐwJ95LI3;VwWȘyj#E\s'F[YV(1jF0jr9u\jr԰ǧI m߾ȏ Vf,5]:624DZ} LVVLXVgg⬗M$uj@Ө}3PT7MpCy{}}^u7 R5s^ʵ3 O;3I$H~LFx C^6u,~=7}FbQj(تvT3V^}}-`QQ :s-'ѻ/w$vlȥrk)- ULar (,-ޯ)(|CѲq_X;|ŧhS?ئ&?¶cƗKx=~X l{y9?12ظf]$x;};t <իeeďGP/E6 򱛉L}OlyRӈGp މȉX r k l{c8<ϲ$zz]zj[MUkxV4<'}f3v(xN9VWU<}izgk),y_74ܞӯU?۱oJSK6I^uW!URɷouExe*EI<4 w ZǺ(Ů $4oPkP-2 5zlaɊlԐ MEUҎR B2Tsd^]֚I~``hCayDXB[F]^#>D6d2\⥹׋f˦<'E2&_CܡЌm 5}A35zPqރ[ 4]3b%,<@?[4'RG0>V(=|3bSZPE;Ah´޵5z5%QdVHpZ6kL l!us>n.ݡgN`Y!f|r}dydz-0t|wM:[^}'>nj{`܅:42o>aWw70cU9nl*/> Ѷ:_$LSF@#ضV=Db˧y`"o|r=7vN_ˬVKT$#x e|Z?{Y:ʲ,_b3.ƫkmhsb fZY97p{HAwygo)IfW^ɛ'ps .$ YOiCCinD^?bݸô祡1 WQ+qMF3$"ןyeGՄwJًv4 OֳNb5)XbN̘Z6p106?_&Rjμ=,YBt]?" lo KvZdj%3,r'נd4<^c,]CSϱ`G!`ƪTU4߱p'׿\QW|Ξ />m7ap(h,]q9 R lTm\_1x ml۶CYeMxsnPwh,zϣ&6S2Hy1kh9;e*ѵw}v<Fos0)s;24$ =m#7Өs|fOYeܝndjwj}wjtNbR>Yy(NE;}_OZfXNXK6ug(ԴL5ViXq`Vc pi U߷i爐ǝƯu z~Ø"*\SՇxN6jCƯrg|.8&%+iD#1AK*eeķAN":vyBj*̚+ONh3^^3i\f`{ Qe/c$lx(ɯ"IXJ'SSH`4iFK67Χny d8u3GiMpt|b?]ֱ-diݥ ~r6oL]w3cqoIAj6f-`ر;vӬG+QPa[e&.ŷki=~"Rx]kKVx&߭ϣ3}H&SsW,VJ+g\kM^Jst#,*rYdLeZH}}y]s #5c۾MŴFlf@gU2ǐsЗjqwF$ S|"/fjD1]\,F\h0<$==`  >0~_!޺س'كg?yQ=i,ZYӐ-NK{s!V7R…"Jgp*1Nv%>&qǍWgx5Ժ\ _R9|Y zxt JEG3\{հE橥ƅ.b3*\惯|.kfBt2V/"k6PsRt(LV?֪j {+aqXE?8"DD.yt-`ƷIɪm(D\ACYt5O<3W LၯZ3mH¼lV7ۇ?wl}.ߺ }[.u7Mfq(^=wlޙShk9x=<>Iߡ 0p-t֍( lj^쪣,]b%'ekˈab$^d#ecQ w`yYBطe+;K 97}UdҶ5C5 AGQLCyx"vFT]̝<9lp~[\ڿ-6ui'x Or=,%09`EZ LB%YNc 2p&~&LImbfS& K5Ed̶^\ɺ>oo;%k$a}f4c.kA,v slFD0,Hc0Ӹe i rv$`İeܚo|+"}gH.ĖMلY?ǨkzP} 7d`MΘSZ/I@.aOْQKd!tqCw"|DӪuy, -WO~+'zL+j mB*YK[ʝIxss ~:$%鯽ϲ͸apy U|Qr9ԫ}N6n3K {}n]WF)5{|ſ{]˹Ͼ;B7fpMJ>N˵-_Tk~lG~)ytyCnhX´ay;t'XY "NޗЧY(Teķ uJgtq2#a(عADVacL 5F;9|Lh˃9z-Y߶e҉.qm㍩h{ܤ&>wKeϳ0o/q~Ͻ>'sud*װwFG>j 7S?3Jf>tʼ,#?l8[9\%Ija3Ke4W Wᳯټ5,ߛbA9/Otw^YTdơ*` n5Ͻ=};pP%..^A)eT*U9,͛4B|_NU 0I|f!.ɼIBV Kp Pïo@hD8"ô_d}m1>|lTf7NyWcrX1P[ IDAT1@1W'G;o~ɑJ/>J4|UėYZO\=埽;JP~(aj 53cM112?:ٙq1t@s9Ȕ9iH?;kCWyl.?₋XSs`L/=JX\?33A)gdUֵSem&LFṼZ̑Ȟ*䠖na_P#zu dy/VWcbk/1lC 45Aa[ N;1%~#@m臷S҉2[Qt*<,8yDwD፷n#uftjݪWQȢxGym:OV3e--oC=<|cs_k~um>ρ\q-t~t'ǂ{2Yأh6c)BO1ª;q˗tm%_䍕UsO 0"0eڇSՏFqxW$b?=/NߏN޹\J}bϙHhx8=I= ưf`ud3RxZY{N`GOgޠK fOyԸ蒄avd%ZЩ,)`BH3C?2Yu7.?ϼᛯWR4@a߁>Җ!TR eQQIt$_*srF'Ԍ܄V@O,<OD`. ^^-1S:$j2ֲ4{'\IF>L[> 3n\rG0KW:6/an !3s%&Ұ"2W ñR5k)qFGG-˖lz zM4aV1ghf,ʒ])UTtdaȵyH-'pG`%qc"c5c !$4MuA( %%]BUvͼ^)g&4jjj f(Wv f)ӓsa 3 d 渆U\(AňUAY9 swUʻwz|gT:;1#^ފ4g|"kX k:hUPd E35(&@$N?uMǏIȍ/:$!m MȲxcznjG8K9''5~9Y?ae] vep\s,غI)\) =#507iG yȲkyf(U_'ȒcL Մ.,YYRGd M:@X$U5$$@q`!lvgU6 a| n:>yL4*MjQwнsHfmapipPQ-{#Ksg#>YwFt퀣<4r8}8FÒB=1O见f'I" 8{A&1T:tIawH h$Oѿ8w Ƣ?9*+x+RϮeLr>ĔQgd C]}AwrK Bas8Tqk~reZF_~eH3Xp82&S9Q~ :Y%$sDݮy :#z la5؀0̿5LX;ɄQF?G~ T5$GHȇKnXL`i6%\[]Vsdzt/$#C{RYwO8. MVqtlWF 5&咝T%sKWJ:3jr 8~PaŶm5iM߽Dcxd\y@YnyY\8;۲i߮co=%^=;!Cc_e|Ж۲in`GőOhjBys'es>F9y/!T[ 8xp7U>>~c uguiwCˉUl_mxKw[ÆxzuJ`c*a=\Ǵ+VJsNt+NBm]ѻ ]s@|H>a޶̱"El$/;c-7߾"#쯍!ʹ_bU{ z¤eo'`)i§{)d#\_x m[4UYjh35W|r?L@CLXIu K\<))H@9][t2:cLݶy8X~ccr[ؼ1i: 5%D\$IFbsij9nA.@`oq7S_ =~tTB02*Rf_A:ԅ0D 7P]1XLj.GO]=9QT ,+wR֪tt9zĞsǺ38\'C1ԇHw޿ %X2HHe̽KdqdNHf[棏:^މE:mYm!+wFHLp~I]wn޼?nwRls1tYv>hy)ͧ(/ 4i> VbJmʠK@pxL6rj^Xb2Zu̶!RͬBv15L[ XMĦEHwetrNX6 Gi'+U0+{֬Ïxs:;]. Ңxcjŭ$Uyc[{p8PINZi|g0EIE `ϡ,~s|>j`*F  {3jKפk捏:`4p*i&?/,VɈTw?Y"OvfO_H\HkxX JE,o`B!]ПgcQ6&~ncˉK7ooNGa[7 _kVWf-Ih;5w`VN8l썡:dcW[),W28;g/`Oρ Sh>=ns`$ >W)ZjsX%ϴQ<<~ّ]|4{N¡ٌ~+)wA d?._HDM.Oc0T5#l>#ښ#h@l+9o8F;Fv;Y| Y~X[cڨqCX GhƱxŹta fB'6xaLDs@ MKmʙ^ޛ2v3Y$V}ZHYtjEfLܟʼn hzͳWl[fppe [t  <*~H#P }g&Y䅜0gϥ{tO?}><6-̼7'̦wpiYLp y&}om- _g‹ tV!#3ϛ~!vpSlQz%VDd~H=3#4h5M8e =H9]ʐ߮%OG˓(qؾVW]OT!Є\~:;Sl=D 'ckӓ_W^-{B\|?0w1ui2;[q+Oc+V%d^z2wkE %:HrVYLz}#=Q琡"D ѐ[alݞ6)0lޟ{ClOZgx8jӇ28-^<2~G{_CMM VY@ Wd:LFVgˮ;|N'N˿HR5[vs4J6XQI ml1)CjNQWW%5{Y+HiBRM,]sz.ꙻr;1%2GY,Xuf`l^%n=-,TZa55 ;Lv*VlC)CI*2︆jjF-lEtEkjhֹ3WkG;65N-Ma,&!G"h߯f98mMƹS1غm %qxq DBj*먭9=|-H=ԥQ}wRvpW5Ŏ)쥦GChdèX; %-&j IU,w0L8cKjFG0WZ[X(t1ÈLsRpݤe咞HQb\?}QI~n2V42q[ R3oCjJMb-_7:HN"?'J'7D'BBi_-1F'͚DXZ Jqg4%7^'se 靏 3 !T H4q'Fـ5)y /Xl锖mّ!Czf1ߤgoD޹S0T'bVLlV'͚`%R hVޜtKmNE+U4(Cu%-ڕ`֡Ah'o?ڕ0&3`P?HDzT[ұ8cj% 8F*&'Yb\.' Ӷ%W,f?crh$8C$k(Is e[E2I.Z7h l͠녗!'Eç9)}}[%`!)5%&L2M/-{SvS(=6/C/yMH 4MO"-#DG̥ 7F#qY$9~qF[$)RGJӀt"/Ukfݻq1 oL6u7ɵ\uP(9te4K2>[r}ieՈ ۙl6liSQJYC2Pd|Ƭ:.1 ^FGs"[.xNE҂]<)|@u<9iٝzqc4|1lki>gŦ V m;2FC[Hr^NM7Ӝ#k;#kO Ǐ\Mzӷ" r%OSA |$_Pa+b T0 RvNy5T*#ת Y&1/yL7Uj kJEFXtP%E[z-LTɼ+XClb!C.@I?c։]<¿jX>f94΋kt/Ii$ tF蚊fO ;q^hɿFwD'DZa>>-Ա|&VOVshl#\k"5*eO,cV̾m;93#Gm4zr_otn;fW.uPs0;peptK]nJVvbNq" 閳bL.?'&KlS -3Iye /UX ]WKVHhJ&&*ewĘ%N=SFDL I͚IxkٽHU٧Ҭߕ cާfhKnLHh&wʤ  $+X4oݖ~mp3jVj`<|dEd\ĒЊ5˘glq;T1iztVȓu=7Sn*m⹱I8VѲkv#u4]ɄVGg2k$p)VGW!u+r.e'sPRRRcZrqt]Gh Q5K>cԪ sݵ9$;Bl]u[(J %&.qF $Y]]AAd1Hhz.+ \Ȫ?]e6حIE `tPCOд5+ŭEz=xe69v"DbfyinaWuOEn=h egjWU`'0dCzzL* h<APVk g&Ơ  $fƠ c4hIqm]S 20MQ `ـF+eT6h`1)h*&#FY" Ah0 + ]_<VQQ  -!jH?6l IDATlQa§$$ ʏ뜫U%tIԸ?M@%`HEdYhT~.bd"$) j$B8ݏѨHRtH䇞5|~t1]AA g."@AAIH;"sk*BAA~4M#++ (tA   " AAAA    ]AAAD.   AAAA$IHAAA~iYu`0(RH M~8u[U68-mMQxA :ƃHE9_C{ Mlwl#҂$Lg::n9;;vl;DLFI&г7j4'Iݺ*o$YlfEOOHz*Vc)vWA!YΟch֒$H߳-\嫶S&-+hddÚV%@2`Ku9I:XͮJVTPe9+V@S(. 9v6dO>rXl5-6f ,N߽܆7wq ͓M_ 2r*=G'ziO70~>Ct$ufW[ʗc+Ľ_J`P[t,g&| W;x:ZxاpE-y}oeTċbן1{N֜.B+Jn9:n1o 0xJ 2G7{=]-cϢdgU^Ֆi/E?Ϣݕ%d rl6}Co]BQ||{U<<=o+J,{nl9ckm*>N ڑjvx J<&U{8]Ǵ`Mo{ro˨o2󮽏~1pNshb|QĤV`*ݷ_N]cg{H*{o#uT6m`gn<>Ʉ/6vw5Eaxe߲p W?{|]GF"$񸵋xsn˞I~(殧Ж{#}:7"cWE[A83~x >:;P9)^ފ_zMC?K{ zG㏾>^[6R/Ƞ/G_l|3ohzٷW 5-_­t6լ&;σLs~nL߾;[EupJ<:S GwQۂd d7!ttH]|{6ma¢.0# eYAQd=[~p/M:AQR|d  |9NK.*΄.x/ik۩tW2 X2srۃϞX<6=l䅿ϾMyPY1܅{"3ɱ}L yvlovU$a*Hǰ5ŧŗ*Iц1] m`K_; Ʉ4塪cw71zX] "`Vg8MjBLR Sd{j.NA[ne_dMe(mIcX^yeJ8!h-7'X"sфѠf ?|JhXBb$d;Z8H &YɑFC{l3 /Ub\c` d2c`ARk5x1ź)fQϽxc^Sh_5ELѹNVvzlhJ2FH$.bOF w5ۊTṩ3_NXčt"m =ՄT@hN`n<,)f bXÉf<MӪ]m`wv#Gbh4a20JqbF1Jh c{0jNYvFYCB^x]͖&u7J:^ ͳld mDԟ!oC cA??!-{Ȋb=*c?15 ƢNn[ x_bEm7m{1i*rRŐ3kAmh ɼTK0& $$TFc'ݍ*rL2. rc&O)j&]//N"sh5S&}ވt'{챍RoҍiF6ңd_lL;6,oILډG͔)St*kEY69]gv&Y}ؾ| $'ac~I1X҆.;L1#}:!5BbqS{[0e$a0]PS& VS%?nҟ4D&Sg# $n+K7" _5աBdT4U"9M.a[0孧b`f9=$_k}ĵuL UR}u$>:ecAGW -].cOx3ьPshb@ Zż-8j;\rtSM"e9EC[0嵇K"kT͛̀,~}@d3@TWUQoȦS#NoR=Ȓ𧗑] yرn¿ßޗqI=DxTsmŭ99gV@Vh;Kw/[Ҥ= U5]6 ݾ; o^K22o_:vlރ#?&iDfcUGXֻώe𹛑h9XMB1h/pMS$gxV䲲DH nܟ~sQE Z+ۜ/R=;]Ζ$l)7bkHͶR3+7t?~oD;l92/⑻z` s]L|f8obs0#;z 5 $dxLm|KbW6'{Q#:hLiA,?ChX(x?F&V1GrO7xh*1Msk 444Ζ|^~%n$ɝMFP]CA土sZ?8Q!I/xhj9# h5?ŭCgii%kŏĜ7bP4 Jp3/"m~- hhPQ,6ʕ<=3 ˹}k:E\\k{]Od. 4.M#;;Sx-Ds~<_] hsU/a LN~">! 'H7m'n,;7 ]:o1;"x:#ʸ\ C $N׭a}SK\s{CS\3h~}sF=u WvW-v]W5x ^g?>.sv񏺅i0Key1{K]b Ni[7pж<-=7_w2W+$جrH3$l3l=s _K2 2GPԗgb\Ms}(X չx<~HЏ/bMMKfN(NKboq6{i̓\=,d9_4v$׃4xhH$ƛX1 .⤳lzvE".+z711-iˀHifڽ{7n[L'g*IQAo}- 0l"8Q RWS/a:sQ 55AqNp=u>.)⧦@DlwrZOֈ.lFZxlFZO 6]ePC>jj<]8u%&$KU+GА1Zv :ŁŠԍ٠Vŕ `u80+Iq`'Dz?fAF S]%"aR@S_]' 0ǻ}> VOUN\-ࡶ≈-h!/55b1%[SE_E2ZpbuU5L6fenk豤PS_Ǧ +s`d au`%":kXw61Xbp90pH7 EjlA`OvTS]™EׂVHb∵2BYQW,ٽv**=ދtM"&1cupHS@!kt,1q9gš%E,6+FZ N&*asXa/5 T3u:f{,.彤7, je#1.*j|JpƯp:FDw"#ɤk*}!X x? &ZAAAZ%e:^W5Mfid![YCE2|9c;&t*Oäj*Id1!Ep, gdoDF.Nch٪l8    _)87F3 X՞M| iU>}^yx/s#u)r-Wl¡&о"8AaHIk.O#j Yc 0D;s]|.m3YG_fWmGzS.݁߼[0"/YNHaw͸w;J-T\ou<x~)*5tv-1s)v l}'iЩھ^0&g2y͒0:1e;GiskCgތ/>n;\Ķo1zE7\ٟ. yYԸӳz6Ka,qE hul bO-_TŲ0HAAA4,FYУ&6ۻiVB|rHpU&>|^u":誊%!CZWu31 Md5Lګ_Ғ:֬[Λaw^PSV0tjk+I _}֔4m`ۊ iҢ5ؽ, )gSw2OcW*^D"E"  _o p1dςDGGҸ~ft]AAV~vUAU2JABSX Ԭ#$ \F?,cϻr]xKKe|Ffe<]2ӖnfYR.7h}s4v2 d"j]drx 9䃫 v=M>IL0\0d'!$P5[cdx˗TfjɷJSӟ-Zp%(ըf7\mi$gfPX׀ ]1a6а,)=z3xׅ1BquKGF1#L*Rw nRֈzG1}4#.HdTpٝo   zmb0$9<Ŋ%"hp4ÿ̉(cPQ5j uv4ӎ.|THфnw :`ݬcs HNEtFƂ|y8MGG؁"ٵu6&xh+|*:.aCSϬQsER8y(uT߄AM]b `(;w`ڍ8>թ_T8nFWO7^L*]Bk1Nz)TQn;ŋ}o 99v؎\.2:п~ 1   Ÿ'@ќ.Atہ p5*l՟FI7 >j);%ao CZoQBTVtPx!e>fk][1[L&'pdRKzSe<]R4aP+99TiU%$d<\Ωx5eP+Iɫ*\ܟbL-k}28]`H8\#k= ԋTxaH&t>3ucvlOoƭ`%?#{8s MESrUȘ'9ieHOW50[0)HɦW(4}eg:S5 7ǻRGjvnʯ PAAAt ̞4H< \aS5ѼK+^(-V#8*3"_ 'I>זթv}]CO\Q2TUMNQvxQslN}8[?Maz:YDؽHNVƱ3gHKDe)O/0ވk]j&S@C9{x3sSpXL~}jر8Ѻ>8r<uqO'JěvSY\BjCJryFa9='aE`w)8sIg! k3[ef/;#yhRAAAGʋ/(**.VobԈA5nh&Z Ms2&/4km`08LAV9Ѹ c$@wPTTHVL&u3Us.ϐ0Dՠ.F'eTK4o߁Qx%4[!gҫ a+%Y:LD5m@b#+; K05}S/*.34Q"#L,A !&myP_LVQ5=uMU?Oj5_TKz7ɮtQt8A.U$aC?dDqPVVBtbuoldT܀f,I ooo   ϙł﶑סy}?M=\ظNnTZf7S`w?X u[f} Kf`ފ=TzԧaH߫C>f~޴9&:HNĔ*"~ɲ~:OY3]קQ?;o?h ߽.`[xW5_se'Gч0Tat)}4%٩=J[V[L;RNlC?9du!0 q^*C՘q$iEUqGÓ-w?ZM^zENc{r.wFWҧbYNY|Sق[pf|)7=o0p<7G1wRRA^+o@q7_K튵Mы;oN7yc!JP,nπ8 ԰gs+vٽ#6_8<!q{S^b14;a\_u;&Rhkݢ n`XF-#Fw+,=Z;rDp.tA~OɫY~Ÿ{~11^;Kc i۶?4 R⬙Ped ;m7WKH;x,z.zgOѣukr<6yZm $Tg%x >_̢=;%I!A)_q/Ͻ)T[]sZw;@)oΞ$, 3oٌb},1ޟW{ަ 7 /cо/S`7Hi{ڶ=5#*KN]˲J8ʧcO~'3=kՂ|X^.gՋcв5=ƾ2o@uI kJQ <+c>iֺ7r?KfԎKm#95:g_z>=EB/XLڜ;o$Aɡ;gX{gɱ]|8}sڷZuc6dl]GK(:ugϞtlʠ;صի5-}'N"B_/ NTcˁF&Vd>wN=TSÕZ̽;Ӧ} ӫxbTڴ=-'Kʷ乬1wlG{@HTcփl׆&@fgfOzĒBV2n.8~)zҦHf%cJ3;|Y7{+2xξw8k?+[ŭrֽB N@j0VW 2_q.@.&kf )L}ma擸c> μ``?c^ewpIQl2ﭟP'e9g;ls3Gѧ0ɏ[Iv }R*N}Ł:w2Qtjm:"|}o_*|a 11G)|["u7ߔÛ֐]t+5m3GK(hZ9:to3"ɭwNed#kՎxdw&ngJ;T:xt:*[MfDjw7?w 6gfML;؁/$rO S|W,[RN[9v)WK Ϭ wy{1u[byfy=^z=[MȮ;M̆bQL]HUX懏{, qk%S־ׁ4G&|ykH(%GXi&RK(Tj$w<4zf5˓I=;vRRC "{?N78k͖~JXdPx^I?=J3hJv.7nTМ*u#?qsG+Χ tB'zА JM ,V]G#2NTMCzaU54M݁؛'F";_̇}Kf$IH$-E9^n65+-05 Ffw_7|*mO7 ְЧ]? 7B@aD/͹ɫVÌ>$ͯ Xh73sTx4vV ص{u0kW/xxIҼ ?[HՏגQ@USZR]`6@5@ϸpdQ^UHlzL"w=̽I?DVy5YgNrB6!_rZlbv%$)~1kxR&(:8ug_"/edF7U,@U+9q0mFp[FOndTՉfut+<{֮9t2r,Myr8tiAieAwr>9T| V*Q]k>gv͛Ө~0n ÷Ig`R Zw#p4BL|@ϢW 8KI=JV֏=yPz'ܵ;53d͊Rz #C>_0/hTCk@ߑ%[X|lN7dkrd=R+$ xhv<]߮فrD2?Nw>C{d狜$ >=wK7 YUTS?|Nל8dÍ=*&eYr v;%ns%Y L 7KP+yYi4[҈Gc«;v6:kl6;Kk<ЙJ|-Qa9//.ͧQ1v:V20 _>N;oSǓď_h(stmI4YOlCAM?D#TK)Nyzdxmych{5^SEHo?~Z3ږ̣ox?TȧOb C\zuV4p7d$h/W\[u^o‚Bݺl|e˽vI1D|2F邧kq<3=|:c|kTHSDˆL~iZqLnқ@wQt傻7Ɠ7፞fNrs}pShG T6OK$zBAA_?q:"@0@ӟg0}υ;/cXvẊ]˘n VAAzwϝ;b zuu5QQQ"14{1Iddܨx[  !腅b   tq_G߆AAA1pSAAAE7p8D"  ŸF$I ]HAAA  U@@HAAAϠ   AAAA    IϠKȲ,I h/-2,*.a0蚆֘$dYFOu4oފAFu&r b L IDAT  I$Wr&T ^4" X%T.ȹJb[5 TŽ4L4Ǡ΁$c2@Uq82@ W xҰ~0ۿ !kٞómcnTǀ    %Z5y:rp"Eg^t=+_/alt$ylٰ5 ;/M݌ñJvoTUYs&R<ݖq᮫hcj!;6""f-B]I$ +IA9_t]0KC EHٛGSш[ ss;7:͆AIV0MFL 0(&zi4]A0 T[H(å 2րf#&XAf pwHϘ?> Ir~6 EU tl`P0-HYuzxzD^{|78w,  2oV.+7d~ H F,&EMEI`00(2K3`P;Qjm6PdQUqoxtBNr,OviOCH,v%핞Ţ+ȾVv8ɣsK^#y5Ss~<*2|KX=D}#͸~]_tO{YxzC3wPՋ.]ҭ[7u)8ѷg7uJ>`Eȿ^k]KTw [1$NT_N:oNŽV}瓥;\B>xs|7XtY*y=ops߶LmH˧ړ:[B]ߔBI* +Qxl/6%æP'56#ؾ|Q ۖ%^3)uZlڂA]P݈#{7}t|l)5Ц_?V&`y^ ҙAnTec$kȰ~qxVvwMH [YƝBea})vјRH^6ZJTefc+cCl͌Q3ӥO+uר.cΣJ2ۂ%]yWY1ElڟGö 5-SE2q=WWO6.]:ڢi}1O'چ4HYlظS9]hס#}[Eԃ,Y{S6tՉ.Q>E/Gs"xhcĦyv.HPU YuMQdPU Mk򋤣i:H2EF׵u EjSIWQ1P3,]SQunj2R +2¾d泗WkkNTc솹lMgY//_|c1)w>i,偹Xfc/>Ǭytw%װY~,ikybFYgCݗSrgIpʢ{y|їӍhD]4`er4r OT#>"J_ۀXכߘ̛d,xm.8!LPkC3_dڛ;#KzͼOK?Z}s٠ 㠴J T'rmMm;@CAV Ȓ*| OL~*s?dR'T6n]_F31k;ćs2{AdRБe:VYJqU9z4PuU kds&L m})IM䓯`GT;S((\PdF`SўdμM)H;JDXEw(Ӭ:IE1>T|iӢ>nǘp@$qsz6P|>F@Qf=d;A$Lz9|wH4pj^ 'qԅx99/-' mHH2gŶtY1b,;ŶtaAV yb0ՄRwpX{.Ww#wȬi0~{U>v⦅&w}& 1y`ӧ=^]2rJR|y o dd10H| טg%{#?>qG.s̷2_}cP/3~aG]m yFH.mĩ B{0W\u 8m zv E}S1.dQ"8 )\d''0{Jt}g)R蚆P:K*QEBC q1xB0 @di/E iC3nX/#2+k꒑Fo"p{eNX/P5cu\\p( 0F2_/[8};lKul1*PrK,w7#yww )gڀ&֚USY}֭TWX[:ps5#^`F\>XX9 u4+SXF`S0IYV}Ф%иYXӇ1El!PqcbNԎbl?< 5' SI*m%*YMZ4?I}C7Yzw$4`B/5seeTᎧ UQpi M'ctpJ3)v[L6EB{2t7b>ej>xQs<zzř|ky֎Dof֗Gg_%ܝ`rǜDJrwLj^dԓ'Wo`gh/Ф Xy+_Ͳ/ԌsV{roXzv]Ϳ Od4m.D KAz!DLyN䥑_K(!E?7<ь^=9y cwO["ł%'lI)hp<50bFI : à/]o"5{ Z'V;/|*ꏑ<DLx[t*(2SVdC4[1i% _\3G4B; U\!58hd+$*sH-Ъk/zuv?Kg-4rNi3V ᷺.jvlY6nVpR50F];/g:U۫8l:FIk@F]l1d@"5 B]v#_4'wKz5]#e,ޱR"\W A9ğ=7 4ZPXt%=u'%N3DE tNI#WE5&Sgo9Wt ѫ -כVĆN=_6,4 d@b< Np99΂zuc$Uspt($2VSg* O/d-U+nGPkKgOJe!}fDYZNH9*#YݜApLMWO\&?ڸ C͠0fSuZ#iާ/?OVugKu F@pџ;:-)݉aa%株\%wf _YIS3zEU\Hh5yLy~R`^5dIɗZx1$_]Dݍ/M{m^PUBCTY9_@jb%Ik:gNjQȉXiSd O㉩$%'`^|k`na[}x߸Pŏ=߮k*1mha(dF=^sKOKT,Qy ֬bƢ،1Y"^sdFdץ tDc?+h2*CXMI:̪Y/S`c^tAu,_$3:SZzşi1\ZEץw ꩬ*zq$ڃ`o~](7SId׹\!=.miM,nǣA;Dp~[/,*Ź9VCdO:xk&Β$H#E˿}K[srgQWT`;`TeQLuLxb)~4 |:6:YTZCu+fw((̣?qU+"Q'[3qeۖ[7?Mb͟1o )VcwC,_^rMS֒A6oƏrv~ _$]LiF׶MH^F3$b?w!P?PLWҾ f]]%3^n5P{m7 Ch@`>MA`f 8;I$Bґk_AQB!ӶT\99gwffg^}5U}|MV~`oGy`j,Q1t,O7[c uŻ+:Fp٥/,no+Raҕ/na`4:a.AIq7OhagX~ lI܌+waG鴮v\~¹c"]ǧ'O&A8Ϡx<}z*,;i,YÙ:jE"2x2^{ev~Qع dv p)|y/:wж}>IۖmE:ۊ|z>)ӭmWyԝãuG)+ IDATsAr*Lã:qibX].x2qF[/ЪOy+};_Ihl#, ʃԆ +9T\&j;XQ*͚.؃,ZiRrIjb/yJQv o=GP[T)fdIB9HnY ) HvHJk^]@AUB԰ݹTHBj *wl.`8lR8Q yG$fONhtg=v<>q#W~.^f[_v$S8{xf3|̜,ھt#~ בqwg >f$JxwL1_h4G'|]f''Na"Ϣ]s&o99KE2y{7dHfg[ټt7N0ep:VՉH?lRM[=+ݻzF$DžhUtff XRbp(P[=ѭħT{:ٿ}7u]S"H=dBuWx)uiF#S %PT%)@=gS*^~Iޟʿ.]uϭ]F̷y09\  pVt J\m+kR6|?p']8-^Y?723ɄOR]$@*ŠL0q[t@=mCdQmP=ML~m6HD^%i ;_gPmՃ)/ R6{䭙wYYNXrٽ :[=J+ xgmi'^yc'?4xw b틷FLB޽ѷkm?-GҮ&4#vWE.Rv-C,HTvW{G&z$c;k\AUI(F70s-rhA6Q) HB^Y{5w u;߷{MjOﮩxp٣I I$+VSB+5;.?o@ēduA_55g0WsPOƔ@7BmL[`Hf{pq!琇LdtZ6>e~h%ƈ;Kٶr3b⩷lTrs'yaL<ӽtMk_D쫂Cu$sݷm~xg,)^}3kj/oq Գ~ ~^DwA{' inVv&wSL66$b<؃t -ǔv\cR,D$a=OPoX[m=c:QwA}/'f<{$?Op+Z%{yLl: l8l lϊ H֌ Xbma3n}q53uy;Xe @c^7=DˢUh4kHٞo)\?-+Xc}s!*.)TWp";)?~ymA)۲)1fW)@xLv#bk8-@2prI}V%vOߋZ$o;UMuM!K]R->vanٗaMKxkẛxkE%.6Ps9ƒH3!i+zAy| ?3O?ˆvWu:#xL|/Y8m.nفUMt'?\η;D^~Qy^4eiDMygylDk"!۔|MЩwzG& 8eHLiGwVϽ'ٽSW[>ˬ$ÿ![f$"I_';+߶XȺ<~z49h;%5%T:]ЂME1Un@p}IqҩȔ&X1cJ6mw;5W1~Xth+U)V"h/* k껎*(2Eud '[DQo=h!b{8;9z* @ԭ:ZU[ũeGH8IGϴORCJHPPF|o!c\ d .{9m uu{5]][aw$Zby 42YȺ"Y@21!f9ڇ J,oKL#U)py.+~SVp۞z?J`?!D)˾tiC:_fѺ+Mla놕~nZȵ79}m`օ|uW,Gg4 ];޳f:~4u]^ÈX9ERozGBY]K@4Ĝ7 00ٿ%[3 ybv c=@| @MZpTR|?-O^ ?*zh0e6\i&iAb'4+",ƲKw&=ȊcHf~˙gPA45x6~0kضfi:M<|.ԟ1Gtr\;Mgqf)E#is.o\Ɔ>9^z @B_$Yگ< $Kw-dw+ٸ|&o1,2Lj)7,a49UӏC!LgݚJ2AA,f::S"KY>DmHpo@u $&QztὙر!k.;}T ݴ-Wu}REyDy?TTT`b*c.`~*Qe혤 ou*KS%(5lߞGG!$؎|$ Ōp>I|*"*78J2 0~9/OݟΓItrn(FUl߲[r(G8 gf9o׸$4ُ2 kj LhCBTTծ=1R ns,͚FbԖRҒ$TW58l*M=1,_^T:EFz)δ,.Tœ[nvb yD'`rW92[Uj %51*m2Sqhj]:͈Ө*#SGb-֐ }aaIFJȀApΜ58" NnIa9厛oBsbA1k6ॲEN6.+_0oOf"SSs ,EC> <skذӲm<?~ÂY7?Ȗw] t"i0ʢz:uhS=5S]ۆkoØ;>m)\=߯%xnYuRVa&]SMயEE׾470gRv84"^y{h7j}MtZ:z;#mCݨ8Ғ‘ZJi)pƮ02H ;YvZȠY|dT ^4$ ׹1NeUa˷WZ0E1K,SF\_q}W-]_,Əl3ygwz64:Ec~$$Ŗ nk>u^ CAzLKsx?6&C;D:NY IA|Ni.J^^^NRRo /=o6(_̄+q8[;ELE P]eA8(5Sp30I"y=S\yks(l57UXUK .&f߳.jIЋUBXXL2XYd6aRdM^adnS|QPdELXM2: = $a^fHHٹ~+K42#*4f˒mD6ahXPf#ؓ:2O*F5?͚˒5ˈl@26N6tA% 7p(֯ͦMt+=zp~\=aß&q6whL=҆gZƋ\8sHA2J:L蒂3w1u0h@{5`KȞ2S di`ZM{EEuMZˬ[96۬ce&طm5SW1Ki8nu ~dQv>^;ɭ0[:ޢ<4%4Bݹl+6KȬ[G NbdD-ɧ߮cg @_se|5Ћv0МO4G^ ]qaQDDp$1a" \f]MOѱbf>@W&4.ˇt&&9Sm7GPQ9V2{~<&S[2t`;-XbDk$\{_F6[%ߡ`6+  ?7uTޜU.t.IBoH2Btt 0r57gQ}p~R{$1v 7/϶TuTUYﰄY/a;Ii{,ɡ`yE^YdSyyi`?|)gEoZ+)I0mH.#њJdzs'7\7]z;)2uj :`D:7~c~Ɍś#ܾѺu*^hڽ],\'s $-AM|.ڵJᅴ5T~'(8elh rl`PL,fEAAuFAd+n]sن4 6C1tl=Ys]7$M[( g ZE*YiDZ,`SqFT&҅0w ʹ8-0L$6ƭc4f쪢ØY´i3ő T$5oC$Z=%[cqG%4"Hohq6JoJZtK4{3+J:[5m؋[fU 43olZ͏cnƨQ8@xhbUr{v$1%!~2ܶD7 b"".hđ*?:6I%u(i{jAAA7(f+Kyg ^vbAhjfSɄuTIƨ*fcn!ZXZnT00&1^q!:7Njj d&84g)jR fX w˫ KU.`?H ڴ"D·n`SUBA*?ڶ!tGBL6wY!%F$}/kZEAqvn Y2+}`ZK R}MİXa!ϯH̆.  7 49Y=Kt9~5 sV":6l9!f@:K U3B.auX1Y* Eҩ*(0a?`q(b! Zaa!AADGGc2^nnAZn Ld I>mE-ټo)gbr,&*[quF0aoʏPnk!jZsύQjcLMFvg;E4ղkOqg98Q!mkW{\>@.&6>ɫ`Vi׶? g\>;La~xXNJ#MPg CeM ^F.ldrsNDCU<5l_{K(hm22frhsWFxz9E6`  p3 E9:@$34#iatM? I,Zy%T[iޅz".A|gƏQjI@֌ẇ|Y¤枒QL ^Aa-Sa'ko]NH> ) EV0#f|Nʡ]HHr(m;ijgҦ]Abwذم&&(dL&ŷ}C{4EkY{JDHT2#{us2p~̶ ޗ ޙ\var%&6&-qkvlesfFÕ?`2Hdp^kc[\AA qH:77 6٢:ǿ*c6+(kȊج'M1%<. 6 CUx5߳Q,, xTdf3fTO\IBzqzÚ :^fAW=^ #IZ CG1M cxW$ͫɄ"ǎb6(ۍG?w{t$rDb  p6tAD.  Ÿ%$AAA    tAAAA8?D…M5*4L'm8orzlX.&J}PԻ5l$+ij+P% ![N^}[WIE7$c%ߌᩣ.c&ς+26@m0p֠*6sqpQ]UMDh7ѭIee5.l 8 ᭣V'0$&VPQjK PDW{*U G`0AHA %ԧ֙D\뺞D5<~[ y­*p`WE!91k!F=-l:|6,łC8Z,NZ7/<9n4~^߇0ѐZeԊBJ G;+ɼڟG&}@ːj,|U?QyYe b<&>UsX6!0O^&3kZ{ IDATv `3W՜5 x?`ҌZ3]\ҫ\;kbS`JfQN=j8hb m͸It f"_N.4_n/sMj-=o׉F6zxws/I Y$wR:@"("nyrtJ=T.g|壽!g j2Yn+ y{#T$>'R>mgF9BO}g@wyy - ˛}E5?|=eQo&3S~ŦԅA`efP|m;&e\"@'=& [!j fuI6aR$ߧ6=.5" ^>nUk&?Z_Ā0[aۛ>2Q*`t)sY_ % iKH~?69h`6+CIؾW',{ VՇ.y:tO|z))A_rMϟ7+$$&d^&ch 9Y#>}8J31H?>comBns-,]|q$Qf0xۉZϪGzV|}~Z'vA)LR:g-<|5'&2 z~ _rĽnjC;#*k1[ngɳL|W% Tov+cЦ t _$loz*:J D]@=jJ@ ֆ, tO W}KhǛjG+QVhj=@"ϡÿ# ՁK>?A GCRkՋp3U(90m-mdo1(G(@Iee-f?Y Js⊡njʩC=bpջȥ|\o/^;uMQsy饩= jk^M<#*!߼q{st佗y{>ukHi@q!\Q62)>%T.N$ xf /Tla7_gM?b]Q^dwn?%_TûFvĮ$z+Rw_eM#{h=a]1$mR}bDR)JpIvPtvCryͻ"K.w_Ϧnj1 O㛏*Nd;.$=o+KޡꂟI;4)EU1 Y<-/YV8IENDB`rally-0.9.1/doc/source/images/Report-Trends-Configuration.png0000664000567000056710000005122613073417716025361 0ustar jenkinsjenkins00000000000000PNG  IHDR67RbKGD pHYs  tIME 66Z IDATxwtT$!@B "EDTD(^"xEԋ *zT,ti"*$$$$ H kA̙ucz^DDDDDDdT DDDDDD EDDDDDDPDDDDDD EDDDDDDPDDDDDDof@x.,JJJp*9d2KXXE9 I!r| ܷo`6뻓sq ͥN:CBKOOLJn'""B9S(rbU@@@*BHy<L& q0x<BDDDDPDDDDDD EDDDDDDPDDDDDD EDDDDD UBQ(BQ(BQ(B9oԎNSO׵XS}0'b8 ɦ82χԤDg>~Fjֆ+k=߷i7ј}oaUqV1̄7z%|˃ϧp+ep=?\>1[[ -^"g/{݌ף4"-Ԋכ1evec.3YUPށFMh8)|L6ܙ?1i#яi _LXIxr?!݂ m{\‚}|i"QA >'9\{ٟK)݉h&QmF]´r2${x{Sk)s>.Ɵ2bE%F^4Z5ym}2@I6#MnsɋSXNFKcx?䓢SԻ}41cɘ#pd34:AjR3pu{^^%-=Z9[.L5>җF~GBwp= pfMcٖ4mu~3 jGM P~ݖ~gnq sY4A> 3֌6۸y5|yc{+ej QZ׬%cyxIw^{'5W2t}[~Kbz\grW.NS qϽh7=—Iw1lxl3Z*5WuZDDD Ǯ1↋c"/Cnm-!F_ݚ=u0e^G0fP3^6Q))<[nṇ8$>Ż/E@99Y]YbKfwM8%w17[b*~lKry}S1C.|Mq=٧ fcǮ~E">>6ӛKs[MHX^)̘?JΞwD.mcm]l nAin[ƽROG{'Ǣ<ɶK:"fˏͳ)S XMh ͇#㏮sF<.jeO2LlO%:˜wl{.پ/̦Z} /N㍙q0ॗ.Bqٞ諈YLaAJ:كy:\` W~-d Lz4nS-jfl3.ׅ `ڭC2A+YIB3 X1b7X Ȳpw0Z۠UFԪ ,]]Mџ7h\Vއ14!G"}4CЪѿM8Fѯ_#}JFM\GuΪpO{50KV#vy=kbfb{ɥݯyq<& Tڮj꫈K!5irxQB:Ȯ"/~jW[ RVȖ<~wv0-?Vz.,J{~n'i`4P8 |FjX/\bYKzZ :45mжvޞ]\qLVGOc[,<4 S\8W{?h9@ ǎEaN.N HE0(u*0T߲ň_d$ICӇ1-kcL(4Ӵqr ~tXs'7zm>ڑ髈쓽%\o9*1%']39hՑ®S0{b1v^^})q5L\W3S t0ؗ}Ô7x}[Bl&Nj4U'dD7z7h] 1&jY^砸6vg7iC7ښ.5͍=k_l1n+ZhҶ?y+ybL<ĊW{X7DrQ~:nw3s) AZǔG>838Stj7>??89-۩}}3vʌ)4a\_yaU ⸯ{Kt_xy$o KCe8 ` 3?3g1:b}j(q_3 |u1V?,%ߓ7x||);lEIw- SgsYVrc .k>c e)z<|q?}c@?p rೌ}+|o1 6Ƣ_?{~ߒLӀ_ƴK]j )Ώ?- y$hp'@౗DˏNObǹ,P1. 4Tr̋HJ<J.^xe@ŶY7㞹{|DDDD` iLPH͆i/ >Od~F9qW17_d֨C __qY >b >ևvJvo#Ք:沓]kٚFbR$masQcKz괌+TwNjwH_.¯8c];̰67m)S%o@˚S/! GBQi GֱfNxFof( ~MpN4 aߖHbmt7[ى~Ѷx,j_ƍ]APjgK+ wKNЕa9Ә>SYrZelHǻ;2VߞdWl>Zq?OtϷώgv>ν>HO- zݸ.\@q~)KVN3 scCAcE~ 治28rƎʏ EDDD.n-<4d'y<@X󦄚؈֑ߖˡ gyjҼ]ikߖ\ӮA6"ЖEg"u|ri8Fs }f&~kAL}ݣV\mbi{6& >5ia Ç4,}T ܇\N.@TFn-cLd\·kglqz!tnVP-}CHUؔd@}{'bC}0b7bTv纫~z>w>@kx$[qr݈}~cNpUi̫9B'9lYh"0`ߚ$ r$R+NֺrmÎDxt?6Spw'vx|x;ҀZ P#.<չwhƝý_OgW:Sq)޼D~ ` ==\2JjUftuM`5W}Z"Ђ!!Xs)VԢ{_ֈz~DžQkÊEF17|ҵ_3m]UƼj9B!h z=–\C:n"g5{o1;jגC/u= &k)&qdl8AX.yKixs]vF\T\eNƴnۖ6Mj5+)b׺e̜qk^B91~Ogقw""""rB۪.Oͤ[R6Z-/g>[7f`Ŏ\05 硇Nf4y{nߦgdOt}չb9mf;oMb$AziYubƔϏ92Lz~cu`3`<<ԯSJmz8酓'82g?9>""""rB lOM.DӋށ!FGŸdnLdm3 h f<dVy3n 2 0Vi;1)F/W ĊVg_nBNF;I߼]?tݛG53Seqob Iϯx 9|K3fhT4V,Nz8R=/\R1?v;[>M+^Sp}}z).Hv=cs,(}>).V%JrWl[K*y#""""a(Is 0kCt9OH0lA(vz䙹& ٬>v}DYbi&@ZWA'emn@#P+ᔷU;e;7 ])IGąѱ(I*>&긳Y3siO=.k84g{ӓÆe(, 7cS~ v/g?YCyFs!`عncI+0>""""rB/Z9GQPei>Q\ȹ`ƅsbj&˵1u[[DF~1jΛLh#vZ>M5;1gKSb-5hX iX5"$uWo$~,mlMɡcB(@&>!K'ڳ IDATCڎU{9-\ h^3xigJN,&lq\~UkcSi@_E$@l^ܞFnQ>b֤r2> Kg WѴ)tE̙2+(yiƴ s`|DDDDR3F# N|ݗG1Q:#b!7C.{{I)63 W7lBC|k9_9ɾFc ~$+{3^_ŌrIm:ԑ3a#/^x?Kk<̝83~cۿ^cd-/+wh ?M4/cSy{^KեGusɆl $m{/V擳jϯvl|߽ǒ|R17 r]l` B{'2zK `oFc(΀ hx1uA @Ve4 d̄,v IZGN=ӵ)8aƖ/؂/4 WlM0wIJ#؃906ӣwW8۟L_cji|k$b~`ٱB߱7ngɺJqxxNK}7"ċ%(:/Sk>0.m0ۧauj^KA\\9W $rnqaƸYm/-)""""R%ȹǓsYent]@(""""P( g/Ʌ%qQ"98H^akL2rBqKwWמ9o K """""r)s'M_̛?@Q_uŃ1@}!""""P(!7U1hwEzYBB9]X'"""""P("""""" """"""P("""""" """"""P("""""" """"""P("""""" """"""P("""""" """"""P("""""" """"""P(""""""g;6\.rrrp:x^UKDDDDDb0X,cXirq|}}ZT9x<%%%ԬY\PdW9x?n N'VU9YVNgN ^.9F<OB\Q%Q(BQ(BQ(BQ(y)cg<*,"""""r3WJŧ^E!EDDDDDj<8ՉĦ@("""""rBzMF ȅ =y&~hu_*""""""g@eM oq ᭞1""""""gj)93G7L^qj+"""""rBs6tk%eȅ dpUmEDDDDD.PGPDDDDD fSs;~TDDDDDB >4_|}Yz˽355PUHDDDDDw0uXFdB^&#"""""rNBK$ >#ڇࡌlqɾ 00[c(LaZpї}|;e_ oT,|z$Cx髷iiKeb=MJHvRIwJKdn7ᗇCi_DDDDD 4OQ.-E;ŠKPbzԢEPMLɡPx:SnuzZԕh_E]╰;yd\1Cxg9gBaV*׀5MFˍ])Ә9FJ[n;&8xgn-rqv y%T:Ys)\aBцn ΞDs)z}ؿzyob/8k]E*9^}Xd3yj};VTqL&XDDDDDΓHl/."7!u6EaXs+֦^x$\ Y i!Wg72I5UiɀףH("""""^Jhh*tNMܿ>~k u-&**긟k- o-_p_""""" """"""2_t#|ήlW`5?7jIS߸wi9rkT*q;s\ATJم]cMpioI哥vوt VAL1#yw^;߿kS: zgowJ>x5=bs(M{؇fw*3Y۳{Sɭ՛O|as)^iCHnm>e8 <ůl_FC=?+޿I½!8x(##[Cx!~럵1z=oH4f$/KJ븲<nғ0axfxL߁@O d^mZRY0AƾXijj yBڄ[vwȊ8-!8xA&ͮBЋLՓF6ø[\/`{1d`V~1\y[lk汵g,N3hlF8ٌ#xֳ` w$fR4&뇥NUQK-Z `b:N M9$%gQ$ Xjᚫ"9q/Eފ|>>DDDDDcf4t=-znn|H&lam ŷd'np妐NcYt(DPSHFk܅z 6f%>{b4xB]ܝ1=iL ؛iX;xÅWphb}8EDDDDD x ƐKyiz{amEnj.±Y09A>y FFjٝァ? C..6ikA،^(/< V)I㋫c3:sWDDDDDNp.A%l;ّUE@Z^Wxu. 8Ns>eyOv|S^) 7H7璘uoKl&Sڷ38nE5?.!Y1~L?Y]"0w"x1<8VvF}KcO&3>֣ȧL&،YǬ4"[L~/}; """" k ɔ[{ѣgmͮ4ӯw[%k{o0>ri +;1uK?Ɏ5 ;-̢/4sRۍH~{v&'`LzY>]>Y^4Û-2sʆSᅱ.vaݧ6 ыGeCyb|[GTSCa.cJ4(""""-755 ~3wJ{;嶯xgx"U 8y%;;~޿"f0-)ie Pe썾(ȅC8v=w2uO8{Q*˅(:ȅE\t(BWО7cºҾJ?W&3Aܙ]GKѯn竧owtNB["""""rB9T(.8gG|Q-d[<3;?qrBaYpMFԋ;sE<& fY+ȍܙq sI*:1'0x ד/=A;|o Dg1Oxi]Q{Մa֙Ν{2d>S-w$Mᦄ&J;n8+>Lz=!۴`ƒ}&t%4oדџ1kY*DcIҹ4+Z*PgVJ:V ;']švqkڰh iv]DDDDDΟBc0z&܂- ;FW*`Qa_h=`ozl#᎞ڌXjcdé=hd3-ڄr01g_q;Š]bCͧN0&Z[1pm=֢jhݯ#mcd.D*R2񌞻 P}CqA.{qbp}"""""'e z)$#E嵎*0\o0QYV3 K݇ds8EM<.7 2VIQvWn 4fEBN S~#5:]O˷e3yx[+v r^r6V~EDDDD 7;'8A\Rs]m<8ւv0l~EDDDD.Ph ہ䷇ѫkgnxY. ASٕeE#8y"3l/."7x;>NR#SamnǘѥKdGԧٵ\FZ@w5-^ifUz:!~cVp~ $\,k? Wd#龑5ey[B5)M^oc _<=f>05Pw5?k^*~W!""""rA&**긟X*(0iI ] ۖ7&@(""""rY%Sr{dp:}&9.QQ((a+*T Q(<y~LXW@Cq= h?M%'ډ+k5+j2cDΝu ovz6zwL/d+]DDDDD &Z_MλeY~Q\2pώjQxs{{ i8O+}IBYϱk:Ǡ'p.^ߏ~2|N㸱__zuLc.-z}_.34Wk<[j0uL=2X9xo zvLWos,\G;z yJ" l~5{wyA/hqʹë3M3&}"""""rn;&,$h221M!V?ژh}|%l~&9~S w7#o<{t~9ЛjeL᥯ަ-d6+}"_Ek-f-+üvRIrӝĮPF85VF b`|mX4;:v9?3`'M[lzw ^T05GE!yn?2cu0FDp+F5ZT:&Μd(Fhl\}=w9AqA.{qbp|*Afll2`M!םQ7T|c ;c{*әb; ᑱ7*F?VÃ.| ^kmos+[E Cۻ8Z= N~QViej)QG"9Uwn;E ['w"D,X̮=.)活z>f{{]_^߹yj^pȳTvçpS9I_jњUKk'JuUK{ewCucRd"@WT WP>=鹼*4t٫;%WQڑmڰ5֑/뻃6 S5@`{ gnc,v\ns}ivΕOWc(Z_훫(9ɟN^ڤ= C;({ GF j+ƫIP AS^wa]ky~@fYThh(=eRTTyy=1B! B@(sFOŵUcdBa ]+֦ oBa>G5 (#5K&c~ eŐLI( @Ep(yJv38_PAjwn5I]c4"!PѾ -Y F5ShfTE4oBPUW2lւ_ #2 iJ ajxX,O e+2h1+X/m%Sw+*! dkzv]6g/p0uÚ"Mq@dYNS.KQQQ=΄1B! B\cg)MG\LP[kt%}[}%i/«R_~ZOm wkZ۪\$BO)UT#*(e˲Izi7h|ʺzzX;LU IDATҺSj}t:`=yiC >U%PS\&%j& =I&M˷$)KUR&=؈btQ^6cQ35MSE& H<1Xְx6Sp9R냟֨o;nl#YFƤzQ:KZ2zSιK~GU]gOoMY=4wm'ձgZRΔ)Q.ޭo*yhiI%DĴ{X skg}Z2d? 76 UkN>M+nn^Mj~=b5IIēWE4QD@Oڨ}9,OTnz-6"~Qthtz0.Nqq5`ss5q+_W圙<: ɏZMsE ozD<zZit~j0l֭xISk@(;&]Sdu>D&Hgd-_MgCfjroB}>M קKFO4uE"!0Mt:ZFO;SߍIaԕݳ֠: ru~EOw+S9k5uޢ4זC T[ hQ#$sd'WSԋu8G2Qjz`"P]hOoB7 B(B!Jl0FoVfY:D-~ܬ{iuϿ2!gfumJZn dnvS3B?aSdu%lD$h'T/]أezmtc:k߿։nig}zp.@@&VUnt#Q73jVbnIXYzQl}~s/G-W--ӾnmrK/UNC?aϙ9J0[nuO4rNe/A-q:!{g>rb=1^]GmPکU9^}yT1ڴVl/zO~#ut[ƫߤUڟ_שƨ4npou}gSEYJti݀=4kzL]:+zy[fcE٥Usԙ,~Ywv[ BRE/l~Z\n{z,?xYgjRšzolE\{ތafnnb @(,IlybPH֣|h'ih_A_znF|CʚٓNoUÖ ҷГQj֧KVfckkok{տr=?;Nw^RK_QjڲZr DOВ*xB?ZxEOۥ_w) c΢pGYFFיzoJ`ֲ>J3oN9i/ْQjcF/v|!Wx񅍟X_~TZ7*_1q1%P1)u +T7j隳ᄪ4~TRNQ[ՑC>6VvRT7PBa:ZTy{_c_hue)W[zVڕ?x}MXYRF(I%*=5d1r򖢵/YUM2$KeɺS7OoךXYdDuA'A~跩vl׷USCuj1}( num"$Kp (b}E>CQvY=I[,Ir^G R,U]S;‹T׽nTJtKrԺ5'TS [ ~ lWmƿVh1f| Py)wj}<eO]Gá뻍Ѵ5gޫݱ|JV' PŗrS+՛ûiYnRۗqw7#u%Xg?w2td*5V# UAϦpkPfSϛqBfBeLk? /;ξjȗUB,J|]yV xU`r|RWǿ^Yo~GHy___}XTκ7XNߦ!vU~+cˮe|sOʴG^pu-Zլ9RFWo]PX N2*oУs}>\zd}SbׄM5{%OPڿh] J9E/WV2졪wr&]j}dq=;7]2,VYU%yT4}E/ܮϕlxugTWuʹP|7;d[G8&E(2fL( bkx uiԛWiwJLӣ#۴a[ &WΏ'͍5ꅧKݕ(c{Fk_Ԝ/铼i?k㧻+Js?1-_\E֦mtcsepޓbQ6%SNd9Y-ޣ薪\XxVOI絟O 6hrn*'4BTHY$_6}w^"_'!U7Olp3*N-dm0ԢC/n&٣;ۘ/Ljײv,|W[]eXkRՠ}sU޾/G]I{\8@3eַ PǚM3li:6VOM㇨LeYѯ:3"#a_3UBxڌΔ%8JwtƆ$?æZתAm5.T=4~ğudN{mI3{6w7Q^4Bˏx,=NqvYҤ9հ\ B@( er IENDB`rally-0.9.1/doc/source/images/Rally-Plugins.png0000664000567000056710000007155213073417716022552 0ustar jenkinsjenkins00000000000000PNG  IHDRgAMA asRGB cHRMz&u0`:pQ<qPLTEāYYYDDD...nnn..$߲Ҩ껁gYYGuĜDD6nnX$).ߨꜰĄGOYugs6=DXbn(((zzTTỴhhn@@D++.xxx.)$ɲҽӻuYPGncXİD=6tgZZZpppoooaaaEEEui]TTTů444).$߲ҨPYGĜtgcnX=D6u NcbKGDH pHYsIupIDATx[@ԯ &,1 #$$! BwC` 6=ݛ=!_?uN/Ȳd%UIxsNU |ͱI!Ԡo- ^Kf}cP1 `Q79>F 6{Ƥf?(2.760%3[h"7o 1>A0%8=pG4FB` qgajQE7}0n0I6B@2.fFI< UBBȸuc9]1]̠}Gf@ȌmDHXtvA85A1@"7`"h4€O KFI !씤hL'˦LxbZJ#C2X\#ҀR˺>! @E29!FH./h+ آlXl( x`O?\.dє9^}$ʆS 340ɂÀJ" =un!ͤkY8I 3p6B3|6g3J(0 &ej\SPh6 YFUZYy\!v,]oZƛi:^yl4`BѠڢ}|o; @pCN"C#*m's3I$,/c8,LHGNJe !\.+T?8h&/, =^8#AeJǤirzE\1.,Q2YuUQˀin7PU3`^t <^N.IE ;hb< $)5D69ܝc AdVb )q: H{fyy9/(E67KوhZ <#sA%Z}<5NH[|\YP|Ā7`4ip얢(tFg0jf HYU6`SߦYpSR{a"FjM{ >OX~<0 ytMQCR jK8 H tX%FxP !q hQt|W*[&^&x_6BvT^ Oָ|,uPjtsƩҌ:W4m ڒXIȸv-@ UOH*MZogYkCuZLnZw6(>kdG< cz_ 97[ CgA@`@ 0K˵qOo?.}7`HWGd2uD{NL?B3vŰҗp0* j|qڅhRG(w=H|!# T3}U.[.so??.1`,nYa3|OvsXH@6b)8ڀ Bs:"Ÿbv7݄]*aHȅjZɮ`UhV|`@^X*>Ko@adi6h ! S̘=<rH TGN^=b0`*IRӢK\7M ̫ 9ㅚ sZpZ)ޠM1Ï D (FF#Mv +s"'bE6%'/K_4"͇W̨f54s`p" ۑgX$MD>FL꒧ $-E+ËBM|T pWbqЦV~0C@XaˀiZ壠4 +Q`%XH'ȀQ{-skw&:2jӼHVaW5AȀaɬՏA3 4OͧX9^s -,PVTjv˽{K пh"3Ӏғ26 /֖Q3?[nnKen'm.TK -SND`sUBHԺwLa@ˀ*nK7C7U]Y3Vf =>lsP,WYקxh<_?)Nr Nyjl8̙2j |(̛CclE RLu noA `@'FKhշ|6`,3t8Q&ASmVl1aCL8jPo&/TE {E.NT|>;c 4u4Ug< z/P͂H|,v̌yMe0EL*kDI#2wB#Ik!mHFL~Qd/KVa\A⭂ǘU^㲮L<.oGEOÀ%0pv +0 h _<3`47%0̀ Z e KO,.>п.6'ԛO6L,~Q-]~%`,.Ǭ՛˰,3F--KJvu~? 0%VI- ɪ{gu:&:h0 ht OxPPZo+r=?]xD 'KO^,Dܣ ϟ<2 OWLhO<]Oˏ>]xj5Ud{/-?~>4t ߽`,s `eGv@2?Go8cO[oջLWT>ϭO}h}'*'W}+Xv/0KX s"]d'X:)[Z Hʒgg+Y? (?\8 (&Az_ƤO<nLrrc 8:.6hD3a-dW:4 --Y ΧI \!х\t.<⊺ DŽK> À 0`# NxТ2,ˀݏ(75 ?7]7Oh'eK?r> gp,SR`Y%OJYM>|LGX}`@ x xDR6X" >iғe6H=]EˀO?#ȴԻ{voqрa@yGIyc*|辤WV^,/# 0ev5N57OU3lޯߧ ->[g>цvgW]e@e~Б+ 8.6>{Rˇ'2ʎi߳+uzV^H/={aFfR\+/pKOeeTc ^e;zi?2r[&iǮ0]lú^E;lкbqVw.dk~K.V΀À~t 3xWƊO~7yq0_!z蓖|F]Pǵ+=02hSFڀO[[zw _68-{ q/l#m #Gq0Pkޘ"Z9~" 1  0ހ.1 ]  _7t`͸׿B?}Oʯ. ,?ypI$yZ.?/K/qc*x ]LMx/dݨcb/+O?OGdB=XdW~_My؏4a@қsWOcKz~$sK?#RϗZۏxޏ`@_e ICUw+-9XV#Ǫ|[rT=)M"ďG^б/ $B?ݯ}>Ot2 \]jAM}!Yy <5c]̂o6xT)X|h} d3e%zPn}; x$nqA<{l 0` ?ܯϟ,=ˀxRI=zB "=&ὐ%ˀ{xg-ťE|qx~}k-'Txf~K[|&k}L.g d+KVpI` HS2zV[ |Te<Պ#6٣GK+kEӀK|!g8yuL 7/g>.^l\ |ȧYOx"Wd Hc@5#5j|J }vZz&#DTx(Daʀ+Bq\,FJ "wH7Fx/ 0` N55ӀRz,t /Ds>fi&ֆ yY+"=DF ,ʋ-\6ಮtY,€# HeԢ4ܵT/|Hn ,p>Q1%33]%ZBI-.9 xϪU`@( qLfed- L]v@g_6i--Y7u{DY *>66 - 8$PmZMP*G?{`XY H'\Ez%[kP/ o,ܣ>gӀT@,\; He'BsZ({_=-<WHYϗWS{<45-9 DֹBEj*:OXOse!٣ h??S"l.0` -#K^=3}k^<Wx*ICh3#P+;~W/?r߮jztʽe{hYGh͌$/Cqzǯ+6a $1`k8}`@`pIO_Mp HmEn|!`@`pi Hm/0`  _`@ п 0 (`@0P $0` H`@0/04.@^À/043hoa@ (DHX* (`@ҵCBA1a@ 8.ƒ֍ q/l0`K"2hS`@ mH_r0Pt b /0K  =oH@7.xep Y t 7`HVprI]" Fj 7/Xpy 0K߀xܱ_x$%o@Ly<6t `7Be o`@0b.@`@ 0Kƻ?/`@w,RF2sM1e ȥ&Vt: ͫQɖgDD푴cO_i@)˞3gW3!c?ҀYj]ފ#&SW @Ȁ9gT8Nņ1-C)Ii8hS9"K#Œ 4`ʪ^Z)Ȭ;3`4ȀisZJ2"XfVuZPa'3>RNF9 (c@ 38eytp` 0xd D"a%ae4#e`" 5d$1oqˀ&9 0)PId92K)#] DC΂ )t΂ť,غCs L_2qJ~|VDztK ^^d8r OQ[ f8JQ C=! Q[P֕-\"-2`$RK"JAQ i=5i`0zZ xeVqsT0 d7y>z(2<Fin6S' h7d=}%TpO#a@gO$(#9nLڎ*; ӛx&KR?f|ɀjL:M0GH0/I64BZƝޞPH,~ӹa OKtY*DU|a1R\ 5BV 1Ά) A7K]}\`<1D,ƒpBo}`c]Lxc 8?3 י K xSAbX⦇+ʀ$[%ztp}О^^# /0K0]!0P>C ަ'> [ 1 xh@C: bUB5hyzt 0sM׍[Tܟ"4 onSz8 ـC% VGSr4s ӀM6`0[T^OS*ഞWZ})JMVH5Vށ}L 5"uL g0fȋy)e5%8jz6 *VUo+.{FUalښX[n cm8/ˋQZ,qϤE>.i%x2x>T c4<%>G 9m@Mà@Q^5|f;S*vԎCb6ڶE37 QVz{Vmur{N&v&vӚodٗ~?0{ܒ49>T{EV4Sx:¨vB0 NRt*Cb~uhVw0Pkȼ UY瓇Հͼ`@w=ۂ+]ge^]mؚyg;WPDV٭>0ePWF2R f i#KÖEn_Y] Uа\ :=tHfT@'H*f*8s(pS@7?Ͳ'n :l*vpk}ʀMx€>fcâqaX Uwك߽4 cm)١H*պRMNp{mqUku[\|Y}GvW߿|ڊ ^6}I=ymqn,C7tLD)5 =isG|1黢}fT^ZWgn_m݀ Xw $/eH4 f[nۢ rg76)Rk-*9y*vv-]od ~4+&MiM򉯭k\ZXzOnz}Lj=2kSZ5yd.o[fRLF`e&*VVYL+#76V7y{~%Weoio5|gku(qsnkWnnnOTx}SN^ܑ;77wk&)U\{Cȭ7;´oM1:UI;oCprԵ[` mno(}T+޲ mՎvm upmydin; 5mUPKeV vVV5qo8t2Sa;4^Nyk|BvJBK3=7h@ś Ks/W[sPӀ4J_m(֛53 B0`0]b u_7VH_xVUlnZ'RLBUcKKLǎ˷N]͐ܢQ}klm7l:*㾶nW)"nKt7iÌtvL`gׄ3 ڮeGUVw`j.bur[̀7Yp]ipmNo|.10K kmַV7 P\6 L/wJl _w7VwdU]{рӾ][BI˗vdՍ7ʍܥx67piյ_Vz&}{o 0K w9ᥴwGf[9aeC YW[V?IwʤjغdUNnlxy0&Nmܷ,tɀxJg[d{Ŗ,fMZ~~0o]b ͡.m76Ԯ5 _r@U-xRǺYhW2\q>񖤥Fgܠ7ɤVڀ;-~[o^5 Vv($tx6[> %ـ gYs7v/o\j(/oԵ}Mot5M̮U |i븘,hl.%F€Oouja8of:0]^%hkW0]#mSY__Ԩ.V>t 10K0]sƅ  c`@ڀulk7׻] ^wTYgg91*v'0} ^P{ȂdٵKӃXvr[اQeWdwBK*AL M{n9W+N;`Sn1UvFk59B072n t+x7@D)Ol 5 ?⼦4w옦5mШ)3~l> 3⮆zD m@Wo8kx9Kv%rtmq^4'w+ʀjբ]eM-Z{MOsihmmM^_RBiW/:y5Za9;u|/w^[nh=O&Xד;7sBt[dFp$>hx$,Č?AP:dc FIzmaj%p;.kjrWٌ4{: h5 hګ-ڢ2 e@̶d*FSu˳Ve *uμ0|7+уК#ȮU$,x(g83i8htwy>=e][=YjX[T!vZ\պ^DDJ-ߛI$ 2"/-gJWŻM%.eݵj15pxX܀3nuwc HIZq)%'Tׂ _0Zzu/_:]U;]2ޛEwUќys.>z%}07v/\6x _tR5VJ|iƦ^dhkݞTb/9#lqtز PзF7 J g|G QS25y15/ܝ0{`t]]+>04}4[oM3VbZEJgcˊ )U2z~[E|wuVMCFuv, IjpDo^2 l-޴ Í0`_&S 8#:"[;y%M:J)5 rv$5Z*]-M::y-Pnlnmhn[ \7v%T&nenlZ攛伩Jq.q؎V|#:| VՀs=&`-ep3cNi2x'Z_@j[-L)кkf8Kc[n-Dlyy`UQook[n]F#j wVy j# abuK>)?0=ڀ3ܪ~s\ijSڀ2? ~2e\v^v̌@ϔ -c7oJm|n93K>Kh5lΘFGW 鮼fy=PkwW36lnl;ӳ%l͍woZMucs}D*J]DpNܽÏW͈;ws#[Sx/p%/]ˣv찶vBGpИc7nGvw&hqI`/ 1ү3\5xr]8Kq5e⶘5}"_p]t=9S&mEށۣ x΄vwh*!U>\ė~ۮжֿzl2स5-' {\$% Ҁ0`{؀!1yF)K:Uv=05 0`{؀q'v\|K-'^ =V<.n͈[7''N#kd"dq+C` G%&ǧ&nML;!="zNLNY{\@f 40`;7qέv1)@7{A7:A $`@1uWq # $`@psV&.ϯ 0?MC0Hv(W #` s9 $`0`pӱDj! 0`f:IGuA\X/=! $`6̊ ~Tka4̐  j9qK0Olש۔ŪiϜzcU1"z(kjo^uJl7oz iʉަISqqzũ>pkО^,hN t^'D6rrvgN&gzufF 7~zu~!X ʀ7;:J~ Cf<;^V 0pkq݀?Ab8XG,nn0Z"3dG[3u[ nb󠁘K/o@J V\ 0ksѵcVܚ3˳pZِS_ r)"k%htvtq {]-SmGަ0 pE#%&ovjl'"u0b: ؜0 рcBLN֜KnfϜvlO0`s`@z$4khwNݘ^1 ~>ƯSsw'^/njt יGxՙ>_- 9d0Tl 8ɹ>uW tþ32 k`BjcĹj90 膩PhP‚190 蚩;d$́A]g&p}ƻ ^0uX́AOL͈^:'&~1`<-8TF z#0^P}Ն`p\mvEppxݧfP7הir-gJ~8E'R5 (^kҰ8(u;m0AxjX.p=24=!DQsIMHL DFHv@!JRnGx *Q-Yv&o^i@HJ!aB8"})NHf W01/6Y|̐BUt>L5`LH0/I64{*"1BX+Q4fb+5513Ȁ.Ig Z1`<kXEETNF!oLa_0%k$gO'z8`qR鰇C^/ O% h(5'U]*Իխh x÷YQER-[Q%ХՒrL~*MWՓ+nE`@oE)[rqˀYUK&l:D氧FA05FVQ=ͣJ<5DޗՓ"hHOsq!-"-+͗_ܭ%VtC|׺B*n6V4d3'iNPdbI0þJ4*LD*X4M_iӀ|:-Yu2ɫz}%ӀY3`8L8)h$/oQVuzj)v.qπ`׹pVk^iN*VG'3cÀ>c6o6yZ()b92b&'v$A; H"[q.PGcV9JcIӀ"!ccV5 PS>j?+Q4(óeãZXZ:U*g{yetݑ*z"5vۇ*Sg+y/gUR>9+V{ǵʫ.j}L jGe'v$,EFIx07#i/4k9 H u[R^QI^}v7`֪܊|X"Gc xP.JEk{ڀGp:X9U}~6\T*uZ*<\G71v^gRq3ު 8NW9 H pLSdA˾>S\koYATt:VhUdn7 dTNX2r_؀{Bt?NȁGc,ГcdI۵cKŪ$`ۀVNP,XF͜i?o N]9y*/D*"G wc@z+BjZ0ev 44W#T8;"WO9:+Fk\# ڙ,ӀGڀY3 GEytsA48 8xc@nU0#"yòcUf]aY(;-4uɀ v2[Vzti@ijLvm*ݨ(/XnJcʀJ TsΝytPvWsPƀճwV*Ե3.}*0ƀy\Ҁˀ^HF0]i|".|"G}p{B)}r 0`rܽAT9P0ͷ0`B: j3JVUg-D(oTJZ:ۧOE%WU'.~tɀT}u|`iΌ9lSyH?R 9)qĕ@xc- {Dt{6x;-)i|^d"/FK!YsI!t g"hOSh09U#yp x-pwB]UY/ǀJ8)6c@mNirɀ[s%;!.5P422*YqՑ03< Bad9.!O≮ JiA@Z>rn+`JaL:T=<RHwмK=!'ӫ xZ>hQ=JJص2%Ǖyw? 87;VwTPO#K,[S{c'qkÀJ] ȭ- ȣZjw,- h7=tv.Kkcu{|0`2N^,ʸ8?9. ެlv~$##vFD\g@\ W5`n|Lmh,E;`_="@ ȀQ![Ԩ/M94(ȁؔre4h98w%&`@3p&gn;w8K [0KKWp?6{<.^Q-GՒk٦Yria@ Z%`@ .V>t  ZC c`@ڀ+G'-f8qi ] GM.vqbOIt rEޯZlM+T5/|D%<7ˀn[V[.M.yv='+} XLW.5WtU ϵ3vq>9 v@.w+8ycLUQY!MxLsVDUTg-|ܒNNExrDS~ΐ3ZLVx',OΨv}Qo@#Q׭mڙ9 3 c`@ITURx XYyhyt>Q)"42`gչ'e*_gjV8}v bTU`P͞_W?{:7 iEa$i@_3n%Fр5aIFUˀYUZT.GUUAV)o:YS9SI5`afGkB]4ྐྵ7 YX͓xmҁڪ=W%FрFlrDC:QbJ6%O9Xܗ9/{fZg;K 蹺SˀsYuREI4>j]0]bT J8rPܜf"2Pj:q$IqˢJ73`ӣʻmm2lb]ap8!>rӀ213p> iM%=:LX36`Vr, :J+q^>1|tƽ XiV€%t+OE8>~ř/{~6ai􌲋y8={wM} >@gF`@>A1 $`@>A1 $`@>A1 $`@>A1 $`@>A1 $`@>A1 $`@>A1 $`@>A1  0gH`Abhk oOnx5A_(]KʿB^ݹVC ߾oQ _Kgg1"oM[D`@i[K^^onYM6Y&p?}M踰|;+_nswt^Y0 Pc@0 &-6 8%7e+./w 8N 8\LMn 4ZuZ()j53)&n*;|SpHo3nM9 (g6AƏD>*01zl1;f6ݹ3}Sll53Ĭ>E TNgMOO}xFm?6 ;*rOy%`@pdie `@9V%`@pMe8sw.QwE %`@p]n2}oMܙ<t \p.kU \t \.{BK.@a2+ ?{BF%`@p}h`f.kpSq7K2>)nc2> zi~BTSrkWh|1t 0 qڀjj|4!Y=AVHjpҎbf 0?0 蝫Vlmޤ)'-٩;bouxg`, #&oEi=k Q!-*kz(#&e MYPHpXIy[^peGLȽQs|2q(3vͲa/CLعӀw`JfY!2`A/# & ^Z/oYRUlW9. f9nի7Z n2>xkw0NgX"%g(-Uca$i3_\Jʘ s#p2E,T8RhNN< eJnQiv Ȁ4}΀1!bIz·9l@WŔW8d'iU}qCÀ %ݍo ^ ܺuɀc4Z>A IQg٪w)t0!wf2yE2ֲ`87U{S/1싍 thvc3s,ݚdqqY̕2`FEbLE6`8ePd6ۀY"Ӗ)%8P-SgPz\ȫ(s (X++vtW컛bf.tɀS< 8 -PZ-qˀ\\L0N6 Ehl I7Ma Q•?nո0dROzݔ[,S7iGޜ:d#aRӀYxˀy7]ћ9H7`U VMQ6rqn-e[eT)bۄBĝSw )#g8 XN-?9mF.gʀFz^Rf|QBMPAbhkto1&fg01P (\b>T(9Q-47 $Fxh&`ӀIYi/BAˆԋTM1HҠE1nhc3wǵc68BP^QYLe/[D73%5y],O UU`]ylT;` NS% ѸeL85`F?GT.Ca@0dOH`=AĬm~ڴ 0 h c4;V#4B=3 ZМ_&2``@ ـa5uA+`@h 10K0]} ζ4 ccO\})#hxj@!f` 0ʏu4B^Fh't #m!oLa0j]0,tFZ0`KZpn xX*5D# րqW75`F_-a^4\u15VT>Ov< x_YYrZGYZ=iw^E&ΒU6 Qi^*| ] c8GHgIz4OK6f0xiѪK2_n:ۼLDO@u0& k2pZaS6Y첂n,ZQdmDyԺlߦH} ҂T>l0|$S˝LS^?2|_+;|V&͕#:w!Uр҇u,k.ddW2ttam[68UH ڀ9v]ix>2(Ȁ^k"I+E zwB 6VU&i@CyBF׀3J_+2RTe2Cf_ptw)U&Ξ9L#r"Z /Yt%?~)nUzBj|q#RPӓ>fLɈyy5/E|db[YuTlU@YplL2<8}RsE\W3['԰w >??0{oKs{d"XɌ(ـ4 V>3vfYm=Un<#%a!d,mLGm&|H$Cj2n̒:n7Lt,EW^;H]R! w©5k闢8=t!}TIQT9Vce$6:Ɓ47˂{ =9T  ΀ No"it13B>oӅx&rk>"=By #kCwGMpGSgH51<(jGuձҢМDƀey9"wRjk@ua|IG/$S}ճa@??_;5 Q+t2䰃ΆXg퇝m{̀ πδ/} E_ 0 [~.:>%`@ .V>t 10KxnxjvŠE1>t v- xR9_MU*͋2އ+Z+mm .vq+નʯR?VʢM2n/&ZX^5d.5 ; %<7ÀJ6`&NuHG VMv€>t ϳ`+S9?[*+)55`2>8,?yw|yЮV|\gCʴsUݙLZ=VjM˪u6w|\^{囄} ^vOEZZ| jefEG["}2#)eT>PR6OT)lӂ5#v!-y3tUu,WC 8F+spX~>b*ٗ( mU7rBj2ǏU.|B򔆬CT40]b vK,ZH.Y9lRWOg X92W*;Ե—QUMɼ+|Z΀ShvP|Z+'$.1l<C[ H v K%3;V*yZ;frblf@=rN)ߗFepZ xV_[1<{ҁ3*HUGGҩ|9ջsĽ}6INU+O%-^sEKgDuluZjvɀgqb@(>*umUn4}ɀ\DƳ,rZLCjzΕw|7\D&g1(1=02hSocirx˾ !j\U=$8U8gѹ)m wiGeͣ5lDV> Whs H?51AV댿SQ5ks}(%j^68-GrZObC_JENgv|?qV*b[ H+KgEǥ-HiE^t#kZrW; ϠP>|"{Cz:g*''إ '`Usv`q5UbKGrL'Ҟ9*)O?AļMj}5nw~W-3#)'|\ҳJU9l}KgRA>} L; $0`} >@ $0`} >@ $0`} >@ $0`}qgз 0=0`}̘=0`}m!n@A33ot $`@?s=A3sB=n k&ot $`@_348x 8`v-ipIyuۮ%  կ cZ0`A e{ݯ W`-ipLM[ȃ2}ZB 7jm[*p߀XOtt<h 0 $Wx8A(c@0A3uKѐS3hcL=51;pN0`3 Mna,9}| > :!Пm'v ~mA*p?+˄tg0tVKP mQA"ħA%(|E v v v v v v v v v v v v v v v v v v v v v v v }C`€+o_> UCJ}ʀ/..˅Y: }CZW3X7?~ԡO_o0lxd@TO}WU?/>Ӧy_!>-T:}C~'(ҡo~zs0 #&р>4ON HEzIm[x tw*-R (__pçe$hPK*_I~C'|m}}a@@x"hy4/_>[r^ӧ_T_YK?/.\a@@xim<Ly}PcdHKz U׭Csi.`@G?jϏ_؅O {B͏I~~7\|ۭ41WS M.~QK]'>PR|o=d WoyA_~]e@6ohsJ51׏~?GPo|6u/tDw__%fljjYBj Yo|majAa5e?y14fY~-;-r"巿BO#诵%O:oTۿzgk ?~bA*tWw,*ހV,w]LHK7zP|Mଳ_섿j o? 2*MxgݏAy?~QӒϏT^PE<2և_yрĿy}z~5..y~Y죥hᥩ)[iYڀ u/*&Q|Ely_<=Oϳs`*p;mYBD?7ۀu;FY?oZ`xij@ůBqGR|b\H\( |</n)ui@ev\2u=ۀ8:=V}m07ֺ#6! Ҁ8 Q 篎Jmq4guo4fĀ87 O{Uz-~ңcNr/r[ya@Uv}?**$/S>Bk25Bdǯ`@FVu>gB>r-\.|U?i;eJ1?4{|oe@(oƿ|ʢ'>~̀ uj5esɀ_uw} ͆+ʣ?9 ȣM{مfRPԻo 8ZQcYu}賹5>`fv MZ={Ae~kzzЧ&s\ZGb0 F>ʀ2)O- }iYϟX XШ._|?*BW)_w0l`΁6`΁6`΁6`΁6`΁6`΁6`΁6`΁6`΁6`΁6`΁6`΁6`΁6`΁6`΁6`΁6`΁6`΁6`΁6`΁6`΁6`΁6`΁6`΁6`΁6`΁6` 0lqf  *à>A ># `ǯ֋3Y&$:a! CHh|n CT?6ױ%tEXtdate:create2016-01-26T00:45:53+00:00w? %tEXtdate:modify2016-01-26T00:45:53+00:00bhIENDB`rally-0.9.1/doc/source/images/Report-Task-Scenario-Data-Aggregated.png0000664000567000056710000015347313073417716026666 0ustar jenkinsjenkins00000000000000PNG  IHDR `bKGD pHYs  tIME PZ IDATxwxUܖ{(!{# *V]]׮kYQEz{ ={r{5~!ws朹y͙90MDDDDDDDr,BQ BQ B .dffRRRR XV<== nCDDDDb{/0~fSVkAAԨQC*"""" r4<<< Tg PZZJDD:CDDDD.zQ\\:Oחbu(n7VU'epQ BQ BQ BQ BQ BQ B9v! 7Kh?v-ۅ/Mp}Pz`2s%Z|Y)ajҋ0|éۺ/7xMl\scOL3yٽٕΌ>W{@{m'𮝼'bDU,h/i?d ,w1 6)BNz3mpS^Kv/[G'5KK2A[䖄@\[?v4/R]Oǥd_z pvw{xqQs~n#WO塰lcK!򜹊DVwi l'SeEDDD/ݥLQwCMy~=6>5+fWx>=9%ۙ<}}ðԍa]{);FV"`m-Ϋ {`m߀RW3`j!,+Sl !X<G &`Q'W`1wzWS\ϳ0vgF yG8$G8@a̎zu&}L[pKteJ=r#5˼وMD3ZlD0Kd(qrW.(A7q6&eN?U部e9}CΑi[.OD_,d{r-9B-0xe <=! $:9WSxnF6cygX83e/+J/]<>I! """"!,*tMn'&$(&x~榻im!lm4-YadLo_ 99{=;ro!^BKٛ뢹WW5ˊ(v;T]bu8vtv-ՋfټBIg9 A 7Zٕͤ$&dW9 뎌6zZ6KB10;0M.܀UEDDD/̗lfiqrh*Lfۓa0,mǫInˌd+]M,>arC4qZjc0,ڎ;0-Bhl[1{ IvQ <ސ6GaQ$\BK;w! `궿t$..ua=}¦ԓ^˂W 4u|)Oi1nZ )oǙyNfgI5iZ8.U/0N Uuuϱ)\>i._]{}x3;s0tdR\3`JL~sDp `Oq;a~g1rq6ӇSWqJ64 f8hf%(:LouK%t}jX\2;NOf,vq[|d|,ϏލF-vsHHH!!!/aOrWv&~fFn濸+4K~qHlדTpmnvl$MreloB0Y|f2ؑd沭A#v&%G7؝\zMMaBQZt{'—kJɽ3ufrLpre}/bpO_,#/lvazqla/,4ǬYk!iV@zyDjtt+2~|6y^Qϔɬ680)?Myǫl c[O}07Ãk;.[5{2>JO۸P<X2v`N߾'o2/gT8~G?a565ylqD,\Q_IqVvg7n8Ξ/3ñXmM`r~>otffV7=3S60>D J*ka)؟Lz~=|+ٿaa7(c]AYiԎ_A+i֋¤ob0,&؇/IˁGq>ntvRUȄdYk3 J hp1v a{ݪ DZ[[DDD:ۋ/I޾lN3nmy5xaPMZa%,_vY;GY4{o:/>%$AY#y+*)}+)zXر.}],rsw «1fggtrY|Б:bL9ݜk|A}:"h*3v"i:% -qo/c3c~;㷶!f3y7ѫ/( ۘzDfx+yচ$ϙNb@'0_fHMʩxfm%<> g|j̝ܹY"\\SЗC+iWBDE\ѫ5ΰxoO3KS|ǃ--avt}Tv4'n̓e,]ȕYl`;e) l惯Ղ(RSXGſ&Y*n<%9]/[ ɮ^p! lD+R&%:t˃ ,TN6!԰G0bM|ם""""0MS7b]SGh/7SVDDDDDDEDDDDDĦ.DDDD"((((\lz1))I=scϖ^Nz0MT׈QBQ BQ BQ BQ BQ BQ BQ BQ BQ Bٿ$oѓ |d.EMaʈ!Dceڗ"fy7} :P5_zt&x_GY~1״XE߻~rf>&. q2K3p&N53l:1{S#>! m5˙2c'8,^oI 9|R~:*/"""""Y4+ӋE(<$ADm,Z~G_?WVnp8]P\gL 6N`iQ4n6LZN⤉|G•sYy[jul s?oNmh x'\gahڴM^\7sCnF {_^Gq=ԗNm:T.VlP^fﯠ{k#G0⍛\}n*Yxx|MZ!~j:h ? Js_aG<էQ$6q,ua>dYh 2$mć" ߻Ǧdy$azκܞqb'7Ҙ-4Ђ1}|ϭڀڐwD̫ˊp㏵43Gkě=pusr١_Vf&'\DDDD L\RDuSa.݈tWd 2g1>w#q9%8#%.o\(1_js{4">p?CeY=ien= ˢQ""""" 6NdIQ8Ws9aUʁu{0cב$K&8+~=MB7y7^'v<Lw9.wMR2 )kgMlXs(q#"B7bopGE\xNQ'; 0H/B2K yӕGÞiL X4swCÛ~JfҁBXN/F}H˳UdP}#lGvCϵT_ ,`%Rv7.ܘ'x ˑMg~|=vXBwSQI쥼w6Wt):KO}gNT/&0^! #lc0,έR(`˗i&;x* ֜`G/OY$JF&..ĿzhFK/[5D4ܽG3 ٹb O<8|JDD/pYI&ˇ36 >1?p=}`-AO%*,`j57m%bNjmz+\@Xi"K kO_7/ɪ q7'k?x~:o-)g~jލoDLYճ'?b q++#( UL2'yeq)Eֱ%݉iaݕā'^gF^7в`|~$sWeܸ,."#Wg1so!붥dIO$/b7PˁGq>neSJ.3]n Q>݃q|I{.OBok mE3}EDDDD.x 4a15qu 7)z&ph&҅>M]:.z0xوmAKy۹I/>y5U(ӻZa|5&^I|6' -=7|gxV)=$ջ縄FÝ+Jiiթ=9Z!o_RU[XY4kiP^.Fio[8?T~f㳲>(;4jٱ$tIje(VH^d.&o Ձ4&`2r3ؽ~>f@fƿ7͕_1//g˫)VW6{ٙ߅h ozlq7O]KV/0ya7 IDATnMB`OgNq>AD4u|H6Yɴ6l` $W)9-6oBC g !xs"""""?݆MlkTeh; kf\T?s(?ED]bp*2 O>R {V2k\V'SDpbvw~.eiL-+IƗ1`@mug0k$IF+&5-<8%z4͞HJ+MptuK. _Kim/p u0 FM摮XϣOIp=Y0o IY` E ,b/|kEn;~;:h%l >\g򑷸f2'NbxQY'\ٛ?81Z} Hc=tR\:bsǿbF=jgspgh B=={$sp':jSkܥgcl\!z*im痷a ,03 FyJumOKI/<\1+' e =t/ NR(IDDDD䌁ގo#&ҙ }9w17@ǿ73f̡epGcw?q?}7$Le52H0hѻE bҶ-bIl)6ᡖdٷN>4t Wul@=w]vGCt $' [-p 7!u+/k;;tQ7s͒q970)I̷5v!Z}}1hڎ19$N:F7*Zƕ| 7I@!Ԫ3=y#;7g{c[޿'3-*c ӝw/be.ce BҸQ7nD:xliJ-A49|QF\!ɓ_45W+[ӕ͖ ϰ>yPynnO=I+ky|gA[.x {y,IML6ka0u DsO2֑C~RxelǔV\얇)Z6/|~5N[?g鷌"7Ig9daPgmtUsԹvt܃h59tt /E|\(I+dcIDDDD!` nōϾIuKXd6#$3i ֱ`2` AۍgL-;wӷ6 .^؀rw9n*%YX Hnm9;Fϻ3} on2l ZDn"߄<^'ï`_y[XoέW.Y0rv05 ZWs[ȓNS{w ląlkן#<`V.;u~#au]?rٿlٽzjq^~3OLĴM2n#q&6mƤL\lY0--σ3_}ӢɯK2ٳe=KllLfO c[֝wMB8^0l^xUOg}7#┡Atx]FQv8} ,?`'U-D7i˶k۔6e-hi;?P."+*(c*u4lLxDĹ6}oɒ7g ^5|0,gCa9Q <8YSqY>NeMݽhv&MG!JXxmurS#'s6c]>=W3H8|q;N"""""{ 5 !}khș71v! 9ș+%i'.}pC8xJsN-v:2 #nvS뗱`|6`Q9<#<ȣSޮX\Jƒŋ@/ ߇mB$2 <0)=</p pCM%拟=N""""ukZ~dYʮ9-xxRGV4NhZLy8@$~13{IvkzcjupZRO٧%]E5"LH0Bv%:Po3od[Z uНG ~㠜mcG"}χP}i''l ,j'=Sj&rNwYcXVB,<>K)(=_bt{`3S&%NSuOcѸ L7[ N~Ŏjmyy]xYä8i+ ^ք3>KBzX3rM||gLq]4H~})xnCZƬ%]C˚/OMդ4y?9vQIDDDDO#v׷L!O=!cf-f;žYx߼6/^yGV6'PrAӔi֥u-R &KvQeEd'od3 @4i_罟e69wnH^s*)!=M`%mz0C%|GLZJrI85%@dOls֋Kr 9:|b4~( +ωǂ]$W`2d ~& x.t$""""ri}?(|Ĩc714om)Ot [O; {G[nH7f7알y%c/̈aH3-S-q2 ~;/#8>vۦi'5sM$""""R0\WFԐxo" DDDDDDEDDDDDDPDDDDDDEDDDDDDPDDDDDD<ʨ_fEDDDDDEDDDDDDPDDDDDDEDDDDDDPDDDDDDEDDDDDDPDDDDDDEDDDDDDPDDDDDDEDDDDDDPDDDDDDEDDDDDDPDDDDDDEDDDDDDPDDDDDDEDDDDDDPDDDDDDEDDDDDDPDDDDDDEDDDDDjlzq7bDbyɆ_gyO=ԑELg?k]_q-r.afZ>{ {?ƣ=ñVKNvyϷ4ާ#qlu_0t )<+k=7ߞ֑ǒ=<o6|\]\hn w-川vumKC'[87.8<C[c?n`\B[+_*\ksV|*3)LZ̜Y>wlCVy]΍ =:DDDD3""""""Q+MrIL\۾ לe ,\rķ >]˪}g2czvgRn enΙ&cX%% )?^9ē~xx)Xu}~]LAlԙ+HM '{靈чjAgǍg>i犢^N|Ң z7>M^ړguSs!fdGr:E bu`9vnxO3ܞ!mۗ}:O: Nӎ8*[=SxE=#k:b_ ߯>杓ah~sHzyw4YӶ=;7/}5S pW]dM4֓Z=/F#ƺ6TVVLGDU~e;6y3zRa=h_K?;;qKcJ^Eay-Ԣ||^HVNfH8U )ͬ)GP초ߟ܉ki{{쟱k玶AXm\og$?6NWzݓؾvWy cOɍñ|jR}3>4hG(LӉ۽q;H+?4ScKR汙8&|8 Its|֧gPuU?.;ѭ;|лfZ7Gӂ̓*k8B &Wo zoET9$""""r&wI@Ҍ3.o5f@|5w:+A_>5^O{˱^L$cF Q>5j>sK1qT\@"<*ne]~,K!(:倒[XݔdfPB=+ixդmS@ϓӜN15T3f]Gh۹,ʮL"""" . /}gZ`ú lڲsY>g hy 4%wi2 -A h g)4Y_}WRhVX=j VwQUmtR ! %` J*E"( D#((J" Z 5IH! ~={# Ot9(ඞ3PQguh;O: ޲EOP {b5PxSpv^Z,I;X#GlĄuG.\<y1McQ0qݾ}ֽ ~mb4H|Q*ȋۥcl]4-剕 n 'ʖ$X!Q87BQI1vOďQ <#ؽi/gCSVuͱMbr.MdZl|)-(k- fqeC<;Z6w)[-`8X ֫Dӿ{QZ>Yرm5,\:W}4~Xp⨓A t GjZ;g!Hgy2e:nY͂Þ .FkB SblĪ(wԭ gr:ŅGsj7:%@eLXruEZ̛KaFh޶N7Չ\՟5?}6-#͛|Ҿq9[vY+m<_Xy.+gbu9s0& ض#b>&ȥkIIK:lʼns/o_lT IwӳO*}ʁY#qP [,~i-QR}1)E}8PHVMI˿ȶP`)٠7'؉R}*0_ԍGSdmވNÂv^x0 9OJ[id] n.;VAΰrt'B d ޙK{2}>2.fimjN7Ƙ'ʖ5?c^6y.פ fI>fWH{ G`;$sx|uR禂i;g B`rD9ZLmRqkL6~&cW|`7Gb>ƹf/̳L֘=f䣮[M|>I;M@]fxWog:wl:wl;ń'.yjJ9m~;N *u"S떧\s:_z2Dr  KςK&|NƊ)|9bf_^a[iN]qJ43|ӟ1璍)L^3#n&fCLVݝdip5"SȒh:\\k^VǼ;qSrvsɨNl_8ŜqԞrʤ$5̉/L%+|Maz~s@Z++ݙTvsTٷ9kL2Hm3Ǯ<] L5G)*,`16rg]4$l6f=pӫf"zA95ɆyGzJeiä>j(:^}Ơ̚ןEz޵<龰1)?ZsM馡3ƭ,sΞ`v/b ԺogUeb0U:sbL2y[= Me$ɻ>燱De #Q@.qsfϛ@Ι,7|q\"{:pꁎ\uRrOXjЉK&tyAP"yS.89,jQ^z7fM2zbӼq㖷yk2ZrhIa]C%]~9?ϴȆ2ñiw-5k\ wm qf{N3{5Gv%wyVsnI>kV})ht}.3 bN|S顽̭Ś]rsM 2z~ϗukv~ h5P ٙRc'AL; S)G1f˘.jwTw.5q):׾o Vn61L.Y O$533!M͜ ,tu|Y߷La;-ny=ͻ;m]v[|Ԍ}չehhJX MӞMn*Zu풑mMoc㖫i'u7 &Yзw8l~u硻dA>7DI5"PE  J2Є)2lOXN”RnoT#Yz9: Ƭv[ȧkFUkQ/@TχDq~oIN~`/ 'kYQ67BLOR]fc V?QCZzAz>Y9RfG*q[L]Ř}//J((((ȃ152""""""B,b1?y8;;dQ """""" """"""@("""""" """"""@("""""" """"""@("""""" """"""@("""""" """"""”3,މu_dɔdi]z]Vb?ÜW1`AjH_{{ KVoDƟ59.Z6M`C]c@I$bGDmBCkҴ{9N\p;4|b5bZe|ܳ5uC~8!IIm߀j5ddMBPBל~U/9u)f.:ܤ/hA=vX7qC  _cSM3sΗtEQkALcG9b_&bH>rNbX.OMdoBϡH郫L6^~=0ɼ]c&$&f{O˻==>_n2ɓ'_{ |&] ,h7+Nurܾ>n^#L|s K29ϱ?GZvM`Yp cd7T%F3!l4:ӱ~UBCѨP~?I׿N_DCNr:k@\R>kAN4)Ԋ-!vN@G}q.LIFTAOKxy])m3y.E,)W Ϝl~garcסy}|ݿ*9,׾.>l65f]6/$yB%S<Lp>GNA~/P g8lLB+)f<[~F a?J?ǜ/yk6F,#I~swh.O6)sEG̈wx̫,>50jis~ofNA׼6A7RQc`ꭚ/rcۺ5/y 5Z%󿨃Nb!.9kbhYi(?nN.Hz:f#YRkx7=^=*]%.쮬y6kA7p-'ٮM #)wIr9>PD;+3ty }3L͚k:S6k@dN, =Z5ٽ 6M3y'%x.$~I|~P6q4O Gv|rp>qW?}*`hԪۅ"B Պj!fY/o؉A+2xX4L$+V{j nds2\ [I@%rጘ}$,8ySY]8?0,ˏ]sUơ9ZԪA7vo4mG;m{${~\ӯ-8X8X?@|8#{ a(D^י lfNgL1y [\qrpѪ#ww~Foj5 /6,'JBvLK)]<Ma)^>p1 $Ǒ肳Ė9|vbu'Gg~wO}d)cIrRضt [YaoX|HOΑ\)ꩋW,Ӛ^-I:ADDgOg\>>p̜+Np8{ţTG|?&,CBي52[a~ |GN>vʭ8'nˁ#uqbb lL}%twQ9;O1lD&u$b 칫K _>t2[MM=xInZ$:{bP^h>ө{o}#Vz^iSWԋW^Λ[$% -G0} N i0Rޝèjo0`P>zC/hBgpbGo=Z>3R}K+}P95m|K\~=b7^8[]Iv>uQAw+ۂß$nnV#0d3@`GIҖc;]i{.{nזO ؚ[_g +ec WL&&UPH@("""""@("""""" """"""@("""""" """"""@("""""" """"""@("""""" """"""@(""""""#nj<~FFDDDDD!rccЈd=:eTDDDDDDPDDDDDDEDDDDDDPDDDDDDEDDDDDDPDDDDDDEDDDDDDPDDDDDDEDDDDDDPDDDDDDEDDDDDDPDDDDDD@x~s}/Owgq]zC/ G\TP1`w0l̾Б\2axlrr.M8Ef炩// >'Oez ._pӣ yz"?F%r :Z dlk^W'n =/$CL#y9hmpU|-p IDATnڟv4nt-6Y|Z}ngD}pyk~yx_d\=-vcY(ɩM}vߟ Ïrk@ibs" 6zǧ> ^aރbޏϓǤsuv"ʡr\Kmv*$%x >%F!]:Ųk6_rg^<ͧk澗&D`x5?*jX1K^nM=gt胴6>_v&v%Y]pFl,8:?~H`\ k Gb}D[}E՟sȑ_ׂOAq=1ġ8 :< UXL"ϴ~6~_)8{8kl8uq(\R[$ϻy"iS3m0/X0vO_[|h5>53tղLNҠyʻtNR ly-j >@"G f$a;'.bۭm}<ΏݜJ9sBny&M,$3vsQV(؉8R]p6q;cFLY.&g9 Op!QZc#Ɩ|e1r^M[z! N83[.Nr9A5_5_)غxuӰa~[\kܕ)-}~nj_gxJnyCv5Q }wWY z­xSՉ`'Lz@m^>W㣢7[[ӛf9xS+ i$?{OfwhhP#iϩEO> 1|-›s~Z ݯlPN~K0샄8RޯB9_w 50K]r%Wfهm NzǁZ/IRYONX1""""""YQ BQ BQ BQ BQ Bo8f`xxFFDDDDD!rccDCtʨH@("""""@("""""" """"""@("""""" """"""@("""""" """"""@("""""" """"""@("""""" """"""@5ҘN">'jZcvx\e13G3H`l,ޏg[y9)` s@[.Ӗi7ӌ/ioBߘ= )&c&@WY}j$,=y]mg'ʳP&ܜ\HzjrUJ -o{zT"鳌[ w%v$E&* @&oH]o;zɿ]}#{$֜Hp>@}>bwLeks9gm.eJÑX=cጻc2Ɵ@oe쓤LjNN{aYw™*?Ĕ0uw~˯`0,ˏ}ơ9ZԪA7vo4mG;m,;_XuefqpGhivt*c%:&} 4v&)>GM'ow;~[p@z~22DDDyxS>N!ә97~W~۝l?q mav7苔(-],!+Ήr~t~{S__\N>P_YG[.eqbx,v6?߯z}BMs}e8"{v8xdeZ:Kzz ]0^Œ|,^ST+F,ʨb838&QIFuxA_Œ'򧽮7>Cr3É9IhaVtÉgo?np狗Fa|ͤ/a[95sΈ~m4ޝ8uƘ@}pdCq^WQ3͖ᮚʼ1 C?Cax{2rb;b5fYONUZ>oUDDDDDD(B,ꮞ2*""""""'2*"""""" """""" """"""@("""""" """"""@("""""" """"""@("""""" 0ZKtv_R2_sr-snpRk00 # p*ܞ&wqWwor||5{vض3cq>gW@ -y3VlW\m=dZ]s]DDDDgwv [W& ^(}+q?v!*ExWX Ԥrw8p#)s <],.¯L(?Dr,?͘0 ,}e^>]~bldl?Ź$[]'T5իJNtŐtvɚ9/y)IMy%t MC9M&7pmxOől͆O JWk@jE$s>l9sdHb-xRJZ4,xf쫣8M95JRY=Jqa:~xb#nUG:6湞P9}, oxX+ɻa=gsO\V_A'\+?_LI.خeLkx5 $&sv|8u h>t. &Gmd_\'nM͊ yOP#]16g$)]M>̦yز3=g߿ cjbqټ5l9[2;m4%c}>?VctjA?eϿdw{}fǞйj0(~#?oMlG4'l/.DzI#8jur0Rέc·S$gؿvoA޴,~[+"""" ?_:f3y ~iX K>B[ap@> LhZ˷_Ak@ :I@,QXdfn;OoXXmY8sadFOL,.mۇBrBꎵc~joMH9:c G { ִ0WL Uh׺I:sbΞe,4Ƴnb5)vT-*]%!5[RA.n MbN{(fkr gh~>)3%qlsaEкsknŗJO*x;1!Lk@M=rf>g/tmx>y ؝ӑU1s2&cW?QpH7J{ba}EDDDDF֜>?CLR~ [/83%D v,od_Rݛ{ή!Z };HbSJ f;O(p0`4z>'fPfE⯿NШ]Fs]M5ϋ\9y foi L(0_#vt憗㩂.`b\ۘry>F0ayʰ$5 syX,nZ܅-[\uqӃ9_oOЕs tǵ}k?䕵~]Άo3e5u-Y;7U1 _BZvf.ǻwYj?TO ޔhґZkcq4Rr]G갉 o})\VoZ|S(ėJxfiN|hgEDDD$K49߽5J' sX1g2c/ɋy6 wh<۽;/|b,7t;{U{2 N<+Eջ.sC:ˑ ̍;͗ 4RHg.dbض cal<֦>/їN}ޕ%n^(i7 KgZ6I,:QNw\<,8ϊgI;-R; xũ$]} MS?b?{R Ma0g I8nL+""""xWxXn}޵];w3I)[1Q+,q{7tqQ2o|n;Ǒ~1yx4#k2ëeKuN/>&*4{#R'5(E`$1`3!38 cAn^W9|F;]dpvޭ@Ule[Il>Cy2.k0lZ.>nw-8zd:^SK~@>wCF#׍L[pv~NyߝtKhhى7x5{c1v<01sTz\ͺKr7{B|u = H;*4k S= !45lOG;!Qv,O=ceڟՎX9m3fΌ~cG9Wsbvݙ|ࡘF>gA/DL!7#%Eq|B!4!(F[aglvʀ]9,\&?}.vo>kKoԕYÎhMIެb9N=Ysp\rCC|(x\?]6>\ {w53q D2Ōn# Rp%wj#dk'=Wr@yIteX)oG;RݵrLoYLC%цl;+B!Ġ ឵/Rd_u Ko<[^9ٹG0Y =@7)8p <<찤ϔ:4SuF:M)_7.!dpj~ f}~4w=3p`TGM{y'U[/@J"sֻ+kDVe=Vc55PH /m_~܃, `Df+B!f:&CݑpbOڔT{Y)(uI,:r056[gWw4UH?zn=-R-l-i:'T/{ \ii.H~3 4{bk8~R&'tOrV,O 鎴*+2E{.tP*|'LBӲ b-o&=Df& !B1hB8L~`Ҵ~ y>lNY5F#='bRkf{ynJ9ONBTqXrP/4{CU&s7xvz]3}˔IxLuTOtW /x0e^=Z" IDAT$h@җ_j+WO+S\uvS@ PY|_a[M5ċU!`*+8a:_Wjt%$pǞ❢F]*!IGP.㺥{0eq1<6p`+g~ߋ}8ڈ( Wd2!r+芰[eX$Sl؆4HpJYR1~9[8m\puE/Fn  U9|+ڎ>=Mo^o~:~&`0MG~|B!bb,w^Wѿ+hsκݳa><ĒO?'6W?)ty` {7|X\_hqm'6rsHP%1;Xls7ks2v]y Gw}⃟a~'p 7nh7'ԝǚg*iUܽw1,cz \FБ02?VGK"YSaO?QӢn$͹Ư91MUY).ɥvsn{W'O6 !BILE=ʩbۻIә1xfa940M{noSHj%-ཷ6PhϯcɜÉWrOd=g^FW ]obWzU3;)\{+dD<0~"ߺ7o՜ʽKQPHs _{l!;׭csaͽ*)iq9Wp2#9R؂\]NaL)+Vq5x`W1MSwTN9gvm0w^$CDSx1Vɋy^~(`\ :67G2s.7NasI*(9,:m%W]wNO0B!G-0 C XS|\RPC~o`ȅ蠲/!B!fl'yv:Wqu־e D)o8-->(B!8Z dTz6;-y-3>ʜ}/ ӵYIIKmE'nfi Es=߿L/$%up=v|n&ǜvx{1ox}w} w-,wX0I<|A7GߧK|tGxk81˄e w36y*S~5%r^)Wb9IJ qYīeqB!BLL.g>;ٟRR.3\ܽZ+sK{x׹p˿?k7趖sGww7x(w-[F_$_KzO;.⥲y@j^yl_ &<şﳵ wŞP"X'n+[~<vtC/axK 8 !B!=!¬+e~9_9/oOT?Ws's2WU2[zt3oޥf4=`-ϣ0on:>iXOW/E{4~BfNy7`7Ck?eʅ 9oXż@'jB>ڵ|3/ƝocPU}$sw[/x=~&Su;0`:u|jg}$ ɼ7꽓,(  owFHּXpޟH!kn}eh50YC-3q8!B!3sc61ۮ=aI-̻>o^~eyA&FassZqt >[6I:43(`4x3C@>_ ̢b{U4'ue7\.8qB!BsMryS;>߭$[o +o|?#aW;ӟY|kdU~yΝάF8#}Y19OZkݑ’ %)ᒇ+zIL|2 j膧B!bbP 0$ B!Bq1IB!BB!B! !B!P!B!$B!B!$!B!B! B!BDd!b@Xͼ]ACgLp׹ILgɚnO[ 3¼TɤLV9xlY g},)z=˘3s41nyZ5װn:],Uǭo!ٜ<1O#Lzo5I,X1+f!!B!V lVQqa[*n|v;].;}q$gr;v6Cgb:g[\iM'DH3ivO&7+Pz˩RW9wE11m<]Mqu/u?P]?m+mo9 eE(uuZpdqzk4`G7<:ot#n [Ƨ^n#=RQAC~xf, @)?yS9ՒqpB]yu>7cUN=<<=J^Ey/arm6x`k]uZ n<=&aLv6¬~`؂]z͈ mQyt=0$B!^kvZ浮36zg̙ά3VXZoxlفIlŮC+=]~0M34Y$[9kS5<]ïg-ƯS/^_?{*Ktӻ+U99zhr`ӕ?cbۤݯQW;zj^}=n<62b]Boglneq+g~ 8iIi5yLv !K:"^%y2Z??k -XlZ+= -CzBjSVΤyWrmge8bqZ Zz;nL]p5w]w<@QɛO~ɍ,U9;KGnI[@xZɓ?{bugSq.!i)|<+ȎU9ݑ_9Cu97raVo 'U<43|ݸ}Mo(Ǧ}{#zTN݁8/u]-/ B!b CrNi{kGd'{vXT}..x?x/~rz䝀2,q,g5_bկ3IĴ%;8S^m%'E_~Qe2fD)nc뿶R=,L'=,~QU\bREY"^t< El+e_da†dKlީɑO35);ngeP;qI$B!s"^W3.*J*uk|\2'eec;J19\76 /~~I}AOeƢ&WxauQsObٌYr4r?>.]ʏ x1)5 庱9o}?+?F;g}9|BQrr77Wji*^z9Ws9$B!so^Ι;c3;_0K^v 寜d]LJ"oAMiu4DPK8%q3TNa}VZɜnIXͻx`)$I™S9yWgfb2MG|'"+F΃ï96n]|+-v%Ӗp7x~YVΏNMǔ<.Ħ'gNv38)y?$ȴLf39<X:1?0&9B!Sd.SNɉ}Wj1\YI8nWnZb sN_,N(yjL )].7<?1#}FBvMlE8=y&Wͼ $ЖP!1뼴wJ;hM9y|R,q֫;hfy9,٤LV!P&cN X&HU޽è`޹`MZv{>) Yu*>wH1LS.[#d .X0+tB@IB!B%C(B! !B!P!B!$ u|^2EQXZ>n4HrE13kCgdxP~}[|/B!BIG-9j~W?Zk|?s'w0uǓ7AgyyjV^ͫG( !B!.&cA c[K M4ݟ*XQV<;r׆$}s\wN`LQ-\ղ[49B!B[LjOGC YтlN 2{t+#e9ӿ/cD O˂rB!w $ IiI@ X6|<#Vt[ܞpۈnB!B8' Sjt ʔT397C(>(joۈn'ž`utt+1xJLTb*#J|cqN8R֦>ˬr6_H!36&E,Y:!B!P.G[+b2֓{髭 E+9pŗ|{ӃiMm/x/.k9߿);_@ p eB!BiF䠿8]qT7M}`Μ9rJL%S!1J}eF B! vFnjL-9,[>'sb-k%mdYNYgel# !B!CTJ#ZY3';!N;DҤOȔ¤ nUbmD}!B!@t5u4;  ^w&R$lPٟ*`2ǥYXf6"~>"Ub*STH<%ktU8QTQ%1! aO;{j" n=ie)1F'a!!%OJL%Bb:V7J47@Bp44 8`]!kŗYOlwwOvjdlҢ0B!G |qͣ 쫧Cm`s1Ir^u:x`r6O71͸??ww#5c1W O_K!㹇u1B!Ąg`P.hL7xbrW!M:)as0\|&ľJsSb*$S`7[29} +]F9M2lW@yuk2zS*+]*׽ԕ !B տ*u:@$nǶg@W)vrb{qHB(bdxq!x҃t{㭕 0 eqI!;R 8d!F@Wmt-8C},X‰N!ƍvT09i*93$@h;#L/P!D"Ҍ0݁Nj+=΃4zw<(!ĸж]v9ɦd ^[.gJT&eK i2*b$P]m =P̣+qX_g !P/ ӍI)qIT ḿNc;G. B7M=Q,'9A=@@(8*PYA(xp{:Xz׮Ú IBARgC;Jvzz*%B#&烚_OA[AߟvOa*#L ]s(uʨ !8 lnJlyBv:Ԗ'j` tIGā8Fm^[9:wգ6VoD{j-dn8NFn9a%!B1BTKYK~V:BF0QʗB#ߴ=̩W~x-aP5UMnɃBF=QC@ !7oMܞ쫏k¶&ː@BO`2̴:/ u8 IDAT>j *]Elߊ 4ęg@BB!1 {Tն-iƯUr c& %ޣ?aڂ=hY`^tJ;F69wP!7nVGl?bVfV `%GIU6$aC]p` )@$!BW{.{ޣ]37nO !Bڊw.=ކZ 2giR Ժ+GodX(lgk!J [:zY$BF@;V`sل?9Y(KQПSB!]4k U.US)_BO,PM_;񫃯`vBĨ]U[}Os乣Se4)[w !ѧ {FZ"&{T7ZJL`j#o t4GBMO݈^`oBn&_aWHB(^{6X  ;p$1(2Ā%4 R׮OU^[Y[KcVKKB("bA=nOQQRgQ1 "~LFהF`X92>{|4_gh&u{{l;RSCd>[I(P.p# 8?"*ѡ7gh. vݝPj5Qctw dT@W)v])$!BU(=l]2rŴ\A8OX*5>[mC;)uHI"q5/J !?b(dzyb`mWkqD5~~q۷agFp1P!~i2O('7ݶ؍< p"̽'I82FфrwgbZO%\ !Dbm;%B 0bqΘO~kq5򨛯z*N}޲ 3lԹ O%$BBz0!&jz!~`2n5SB~ȶkŎ cӟЯyFhwPcV_ŪE3^´%qk-ou7}rRzkY+iO$k5fr<)<)Py\#/8"j D|5؃=Hgbմ t ;P5,l)`j8@S. ǿnOe7CȢC)l+_*Af"dgē?}s Z.>,A;DҤORS UCt=-ey!Ę]{$~I1J@vPuů02+fRL$RI3lJ%ŔJ9SLI1:m\B "=9#|SW{.ڑ$tO68bڥ$R": /\X:jqB|Kϔ)O.-]L渴+.M%eWvtt$K}J<%wWUPe,PٮؼLӣǯ{Q ?*~FŒdIBɤlJ90)&9Gcϰ >)t9"x4**dM5-df(sb SF9]?~ʭKN}N,,~b0P̌;aрUibZtfcV\̊e|G!P#1Pym=Wj:vbde?7t6 .-+p'[lDVl=b =jՐ`znki̿~^xZrLׯ XoF˄bTK D ') _Gs-Az7`J7%8@π L4j-bՒѕri^J83kŨTЍ蓧Q>z^+kH$S2ӓg1;uӓg(Qw)H{\O0_|1 ҄Db*LCmz,Z)~ڋHl3&+N蘪*i rGu, 6(C ZRlny/XpYG3slQUmĹ+כ Vn@Gy'EQtd' O3g0'uS!2娹2!(wʄq##)Viכm8C} #L3_@ @v[mT@ץ$B WGy1ynulWؑ87:{) Q6L%a_3TMAGv3n68KnlfktkJ0JWɨW-Tv{ףVk[{.M8NB!f_Q`H{hH └:6 '>+(՛y? ڸ {P[s7x=4c䔗nϣOy } ~SY0~GKA4vzv=vsi5&T3zI"ZOT@Đ;d}SB%$&L4٬l< 7}~JHvGOh[k 3sNp\죦kt9$F7tJ!Su/Vr#K=QFڌ $!Bt*q1g{&宂!׸GU㤠PnMm% մ tR,`C8X讒 !DY6z#L憦ԙOyD/څVg;u1h$޺Vjۄ?Jwcd>gL:\[Is1m5'؉UmafGk::\Ѱm:=JtnI"BzQ%bG" ;ֻ!je#`oJT~ SAـ,&e |(MAP^:>5kF((HyZw1'U A: d{*w)}qRB!J Ƒ:O6O8"Mzv7<ӚܺD?hFyWV=!j zOل>{Q7 $|fTBoF9%6auDz)q:_BHs}aX|DAj_ml}@Ukd}#[mivd216tCY$HA5abVHi\  ?!;%T2h2vuώ޲ Vrx 52+¹a׻Ԝ[a]pjj;;\% Zwۛ1I" a1.t7߷x47M Jp#Wu}='\8]>&}:ƙr00ELӇPeI b(| V)ݎ^2a OKۍ0ʝ͂8B_z:OlS؎=dua#D{7C Rk8_Rauߐy|5 O\B )r]0*(l4~ zx[j5L4dX Y9i3#~˓sb=o v_ւF]NB!ƌWP.f}գ7vQ`S8|nTKb}PmAGj7gc|X8Bl@W q0f*޼kLkujsA7 1A0}ᓭdSgc Z~D3abWěR5ZV$qMX!"׌nJ_8kW{bMC7a kEBO?ξͨz$ZHaC6mMPuO׼y* oP;t&C\PuX=jb"*V2$c(I]}[8EjSGB!%Avc;z0[M&\|P0.{k:c^; E{݄C%NuLG5!oTs̍&a%jL_<+yڈdCaFJVRY~D70Q26LZ8e꬝PifӧYbt7a-=#1)&l.Za2xHkhShS0ObnI[@)EP_Hּ0aޠocmF>|Ғ=8dJbzlf#'y&&eb># vSܕP 354;Z8T' `YhH àԕi%}KPh _5ْ(:.\+ Ģ:}̝6}LpǦv'p}8]YYc+ &Ns"ĀV a 1]Jm-'j@cSN7:ɭCS}0z=Nj[L -6la+V{/8 !: d{Rg& ;-z{oMowBj˰5 asv汘|3RLJYJ$hL+՜̧p=91b%1!=î-=d #KfDMBf*q].1چUm#՜c96}&{UCirF:\GWGŽRsY~%P[P[H513e.S15);!cUJb5LXl\$'qCo 3S0ْ0?܃&L{~y3X0pBaj2/{ܳzܨauL)'iw;ǰzK AE߇ #]c@A c}3]`A?f7ÞU2hV,8=~l0\1{!=ĺݛcL4xw۳Xնc4<1Z[QG4i%?sMKO{7$T2h!qM`:kEaP*Lΐ}d@hNƼQ~u]hbipy='0 $ R"e,˲,JlŎ*[˛r7[{ekvчMnպUآDJ@A}sw~A$nsz7zH'zEM#ψQhĚNry90ǽ8/Lr]ĕ#g%]$87p5򖡚g{(ibaS-mBA]z/5}݌k >s={O {/Crghϐ7kYݤueK# Nc“Q凥GQ^ Uu-0gﺊ m?Ft`ƳD~uR +<^Q˜}:sqgfѲ NFEG0J(B̹33$\s]R^% Kl6SE{zCME`X2 /9MavО9oyo&4K汖KIobeIDAT@MӴ8/2+jb"8ej Dj"9&7a,A;:bjٛ8<@//e &G M!'kO!jTO^Mk-H\ːsPi7U\ ק;c C"ޘ;7Cܗhp҄C".']4Ṣ|Q!𢦈^mJlwR22&#Qq#r Jv>MNdp۔wE8Zˈn:n9eP5n/$"iz9 O!zɫ=UlSU0I{kW%S-:lj~d9UkN#38F90Z]pW]&m $8?xU#nOK|WWA]^;C.>lFNnݨǡ[(`Я*7!rnq=H-%aSPp:8o;E23  V"BLnGw9zw[ݔ'rL+;s0:k30?WórUMKDClfHvWҸHge0J"g M@\qՌ=QxW R$&9;Դf' wu8 tvݰ0^x?Zk?=NJ H4 _%jxgu]JH( ly+CUkM}HMX̀Qҫևn]}~6O( Ӵ8MK- eCp}?XA+Y̨(\7eERQ5ge-SB뿙21Xީ\=m0VCb2"VɭovQFp޷@4˼I>)@&A  X- wp{dAo7u:5q//,kղr $#sc(UPaDWeۡ,t >ttf+KQ"+8ss;JT{Yܝa *K(t[q ,Lg̈#HNyP~%Cٔ!p~2?746}:Q3YW{uGZH:%>m" W!*h;' XQGAMg7c1C T0t07![A)Ct{ڙO`TSDb6ͭ L;xU_~]Vx0~Gy"Zxz %6eFFD#eȈE[h=^z)7XחGkf P!u8SGQhlXLܚ L̍7f dPvNMgÓx;2aS ߏ]Q:Q+E"Xn |XS] D\ku!gQT;Dqr2+g2* "{TfE*8)xOpMciRڦ2򝌒/h Qkՠ2~)jV}[&ɪ3? MrŰE?LA:ձư y|'cvgŇGN_]O`0lޒ"#fMG͆x;R6:BWڶdžW7-Z˞vu)yHb6*Քe=SG ho Y,S ‹=5 6űJNmy"3o|=ǔU)R>4( '̵w6fmڒ*d~Y6ola&EXR_iH.T;!'tSUTJ9p~‰ "9ƣlBT`.<ޠ .擠O+㵼jt*4*/]J×x(V}(>AHqid48.UYAXJ.zACganZQ~Bf>=OnRPGG .3`Ju~XR # K-*(nNk8R/jnyo`-.f_8;jTB>gkJ}zgAd՚KoV}u:vl7n@F  _^uc-21X{-~3 KZ0ǢGV#jjƠYw>H,[xXAeF㊆[xui IȞZ;K34g$0J^R2O,*7#gYa{&Z^.:>O5|M*QEg †I]!7g/"$r$.g[yREk+BUƚ00UZQ9JoXGJ|]C5E ey"YᐊmCc2aؙF+C,WYEp E!owXioe&?/?Wݗtc.w*tQ_8FN2Y}^U0T}Pm6%r\d5s=9GgEGNRs[';x,H{ 4{f c,-ї^uMaܳmgXX ]v?k;6U#0a}Ġ- ,14?)B^/ؓvcR1^sp׊x^N_ TSHwhs_f&<.""' tMgdN$Qδy$?+;O5 + Yo .s -/NRoJj ZҌ؂D gD{%yI$o w?5{L'n ka8b+n2FO3>gɯR}XwfzAC+uC9;{ Bgǿ~_Zf{C _ Iw%D=&m_X^|_"kM\.Nrf>Z]M#Vq?π=5Xe9[Wf1 ^PK,6.:S|7IZ-2N_pI˯C^v^8?o gAjNͰިSb&9Døm|g:cz#O>LQ1Cg!3I^6SU/<~P({22C8Kg 2Z9c7X%E9u81ilך aXh~Pz"-mASFFy{_;T[piVs:#{;roTGQ\VOI8~0ae$nvv6':QXgJ"~DDL'`ha*KJWew̔> L_' ftN%A%k#I>%8"__wwq4 3e?ְ mkQ8b ,jc`pe9;" '9=*밺zXx{׬j]Έ<2USܘ{]]D|aŒΰȒ7 ȶvK&%=G *9rHHMw1[ !ckߤփԤXa:|:Apr3êkm jK}yN:NwعoC2Ɯ~EJOf9=҃e1U$i(9}?~NM-w?X,,3L{xe, &\-ϥ_* I^ۼ\s5,%"\vKg/潾 wT|pj ,ܮn$kL 1IULsuQ޺)a9Dp΋\vc$O0ڝ@Dr8rE _U6Lǻ^<'C0 n ]p3CU9.Σ̹?d f|ްOD39`sB*Ҡ^,˜iFDhx]S2z<+,Dvp\+GLwF~ugёS%)䥼:{FdDEc^âχ?RIENDB`rally-0.9.1/doc/source/verification/0000775000567000056710000000000013073420067020536 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/source/verification/howto/0000775000567000056710000000000013073420067021676 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/source/verification/howto/migrate_from_old_design.rst0000664000567000056710000004040013073417716027277 0ustar jenkinsjenkins00000000000000.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ======================================================== HowTo migrate from Verification component 0.7.0 to 0.8.0 ======================================================== .. note:: This document describes migration process from 0.7.0 to 0.8.0 Rally version. You can apply this instruction for migration to later versions, but check all references and release notes before trying to do it. Verification Component was introduced long time ago even before the first Rally release. It started as a small helper thing but became a big powerful tool. Since it was not designed to all features that were implemented there later, it contained a lot of workarounds and hacks. New Verification Component, which we are happy to introduce, should fix all architecture issues and improve user-experience. Unfortunately, fixing all those obsolete architecture decisions could not be done in a backward-compatible way, or it would produce much more workarounds. That is why we decided to redesign the whole component in a clear way - remove old code and write a new one from scratch. Migration to New Verification Component should be simple and do not take too much time. You can find description of made changes below. .. contents:: :depth: 2 :local: Reports ------- We completely reworked verification reports and merged comparison to main report. Now you can build one report for multiple number of verifications. For more details follow :ref:`verification-reports` Verification statuses --------------------- +------------+------------+---------------------------------------------------+ | Old Status | New Status | Description | +============+============+===================================================+ | init | init | Initial state. It appears instantly after calling | | | | ``rally verify start`` command before the actual | | | | run of verifier's tool. | +------------+------------+---------------------------------------------------+ | running | | It was used right after checking status of | | | | verifier. It is redundant in terms of new design. | +------------+------------+---------------------------------------------------+ | verifying | running | Identifies the process of tool execution. | +------------+------------+---------------------------------------------------+ | finished | finished | Previously, "finished" state was used for an | | | | identification of just finished verification. By | | | | "finished" meant that verification has any test | | | | result. Now it means that verification was | | | | executed and doesn't have failures, unexpected | | | | success or any kind of errors. | | +------------+---------------------------------------------------+ | | failed | Old purpose is an identification of "errors", | | | | situations when results are empty. The right use | | | | is an identification of finished verification | | | | with tests in "failed" and "uxsuccess" | | | | (unexpected success) statuses. | +------------+------------+---------------------------------------------------+ | failed | crashed | Something went wrong while launching verification.| +------------+------------+---------------------------------------------------+ The latest information about verification statuses you can find at :ref:`verification_statuses`. Command Line Interface ---------------------- You can find the latest information about Verification Component CLI here - :ref:`rally-verify-cli-reference`. Installing verifier """"""""""""""""""" Command for Rally 0.7.0 - `rally verify install `_ .. code-block:: console $ rally verify install --deployment --source --version \ --system-wide Command since Rally 0.8.0: .. code-block:: console $ rally verify create-verifier --type "tempest" --source \ --version --system-wide --name Here you can find several important improvements: 1) Rally team introduced new entity - :ref:`verifiers`. Verifier stores all information about installed tool (i.e., source, version, system-wide) in a database. You do not need to transmit the same arguments into all ``rally verify`` commands as it was previously with ``--system-wide`` flag. 2) You can use particular verifier for multiple deployments. ``--deployment`` flag moved to ``rally verify start`` command. Also, you can run it simultaneously (checking in parallel different sets, different cloud, etc) 3) Verification Component can use not only Tempest for verifying system. Check :ref:`known-verifier-types` for full list of supported tools. 4) You can have unlimited number of verifiers. Re-install verifier aka update """""""""""""""""""""""""""""" Command for Rally 0.7.0 - `rally verify reinstall `_ .. code-block:: console $ rally verify reinstall --deployment --source --version \ --system-wide Command since Rally 0.8.0: .. code-block:: console $ rally verify update-verifier --id --source --version \ --system-wide --no-system-wide --update-venv Changes: 1) ``rally verify update-verifier`` doesn't require deployment id 2) You can switch between usage of system-wide installation and virtual environment. 3) You can update just virtual environment without cloning verifier code again Uninstall """"""""" Command for Rally 0.7.0 - `rally verify uninstall `_ .. code-block:: console $ rally verify uninstall --deployment Command since Rally 0.8.0: .. code-block:: console $ rally verify delete-verifier --id --deployment-id --force Changes: 1) As it was mentioned before, Verifier doesn't have an alignment to any particular deployment, so deployment argument is optional now. If --deployment-id argument is specified only deployment specific data will be removed (i.e, configurations). 2) New --force flag for removing all verifications results for that verifier. Installation extensions """"""""""""""""""""""" Command for Rally 0.7.0 - `rally verify installplugin `_ .. code-block:: console $ rally verify installplugin --deployment --source \ --version --system-wide Command since Rally 0.8.0: .. code-block:: console $ rally verify add-verifier-ext --id --source --version \ --extra-settings Changes: 1) --system-wide flag is removed. Rally checks the verifier information to identify where to install the extension - in a system-side way or use virtual environment. 2) New --extra-settings flag. In case of Tempest, it is redundant, but for other verifiers allows to transmit some extra installation settings for verifier extension. Uninstall extensions """""""""""""""""""" Command for Rally 0.7.0 - `rally verify uninstallplugin `_ .. code-block:: console $ rally verify uninstallplugin --deployment --repo-name \ --system-wide Command since Rally 0.8.0: .. code-block:: console $ rally verify delete-verifier-ext --id --name Changes: 1) It is one more place where you do not need to pass --system-wide flag anymore. 2) --deployment flag is gone. 3) --repo-name is renamed to just --name. List extensions """"""""""""""" Command for Rally 0.7.0 - `rally verify listplugins `_ .. code-block:: console $ rally verify listplugins --deployment --system-wide Command since Rally 0.8.0: .. code-block:: console $ rally verify list-verifier-exts --id Changes: 1) No need to specify --system-wide flag. 2) --deployment flag is gone. Discover available tests """""""""""""""""""""""" Command for Rally 0.7.0 - `rally verify discover `_ .. code-block:: console $ rally verify discover --deployment --system-wide --pattern Command since Rally 0.8.0: .. code-block:: console $ rally verify list-verifier-tests --id --pattern Changes: 1) No need to specify --system-wide flag. 2) --deployment flag is gone. Configuring """"""""""" Commands for Rally 0.7.0: * The command for generating configs `rally verify genconfig `_ .. code-block:: console $ rally verify genconfig --deployment --tempest-config \ --add-options --override Command since Rally 0.8.0: .. code-block:: console $ rally verify configure-verifier --id --deployment-id \ --extend --override --reconfigure --show Changes: 1) The argument ``--override`` replaces old ``--tempest-config`` name. First of all, argument name "override" is a unified word without alignment to any tool. Also, it describes in the best way the meaning of the action: use client specified configuration file. 2) The argument ``--extend`` replaces old ``--add-options``. It accepts a path to config in INI format or JSON/YAML string. In future, it will be extended with the ability to specify a path to JSON/YAML file. 3) The argument ``--reconfigure`` replaces old ``--override``. It means that existing file will be ignored and new one will be used/created. Show config """"""""""" Command for Rally 0.7.0 - `rally verify showconfig `_ .. code-block:: console $ rally verify showconfig --deployment Command since Rally 0.8.0: .. code-block:: console $ rally verify configure-verifier --id --deployment-id --show Changes: We do not have a separate command for that task. ``rally verify configure-verifier --show`` shows an existing configuration (if it exists) if ``--reconfigure`` argument is not specified. Running verification """""""""""""""""""" Command for Rally 0.7.0 - `rally verify start `_ .. code-block:: console $ rally verify start --deployment --set --regex \ --load-list --tests-file --skip-list \ --tempest-config --xfail-list --system-wide \ --concurrency --failing --no-use Command since Rally 0.8.0: .. code-block:: console $ rally verify start --id --deployment-id --pattern \ --load-list --skip-list --xfail-list \ --concurrency --no-use --detailed Changes: 1) You need to pass verifier id 2) Arguments ``--set`` and ``--regex`` are merged in the new model to single ``--pattern`` argument. Name of tests set should be specified like ``--pattern set=``. It was done to provide a way for each verifier to support custom arguments. 3) The argument ``--tests-file`` was deprecated in Rally 0.6.0 and we are ready to remove it. 4) Arguments ``--skip-list`` and ``--xfail-list`` accept path to file in JSON/YAML format. Content should be a dictionary, where keys are tests names (full name with id and tags) and values are reasons. 5) The argument ``--tempest-config`` is gone. Use ``rally verify configure-verifier --id --deployment-id --override `` instead. 6) The argument ``--system-wide`` is gone like in most of other commands. 7) In case of specified ``--detailed`` arguments, traces of failed tests will be displayed (default behaviour in old verification design) Show verification result """""""""""""""""""""""" Commands for Rally 0.7.0: * The command for showing results of verification `rally verify show `_ .. code-block:: console $ rally verify show --uuid --sort-by --detailed * Separate command which calls ``rally verify show`` with hardcoded ``--detailed`` flag `rally verify detailed `_ .. code-block:: console $ rally verify detailed --uuid --sort-by Command since Rally 0.8.0: .. code-block:: console $ rally verify show --uuid --sort-by --detailed Changes: 1) Redundant ``rally verify detailed`` command is removed 2) Sorting tests via ``--sort-by`` argument is extended to name/duration/status Listing all verifications """"""""""""""""""""""""" Command for Rally 0.7.0 - `rally verify list `_ .. code-block:: console $ rally verify list Command since Rally 0.8.0: .. code-block:: console $ rally verify list --id --deployment-id --status Changes: You can filter verifications by verifiers, by deployments and results statuses. Importing results """"""""""""""""" Command for Rally 0.7.0 - `rally verify import `_ .. code-block:: console $ rally verify import --deployment --set --file --no-use Command since Rally 0.8.0: .. code-block:: console $ rally verify import --id --deployment-id --file \ --run-args --no-use Changes: 1) You need to specify verifier to import results for. 2) The argument ``--set`` is merged into unified ``--run-args``. Building reports """""""""""""""" Commands for Rally 0.7.0: * The command for building HTML/JSON reports of verification `rally verify results `_ .. code-block:: console $ rally verify results --uuid --html --json --output-file * The command for comparison two verifications `rally verify compare `_ .. code-block:: console $ rally verify compare --uuid-1 --uuid-2 --csv --html \ --json --output-file --threshold Command since Rally 0.8.0: .. code-block:: console $ rally verify report --uuid --type --to --open Changes: 1) Building reports becomes pluggable. You can extend reporters types. See :ref:`verification-reports` for more details. 2) The argument ``--type`` expects type of report (HTML/JSON). There are no more separate arguments for each report type. .. hint:: You can list all supported types, executing ``rally plugin list --plugin-base VerificationReporter`` command. 3) Reports are not aligned to only local types, so the argument ``--to`` replaces ``--output-file``. In case of HTML/JSON reports, it can include a path to the local file like it was previously or URL to some external system with credentials like ``https://username:password@example.com:777``. 4) The comparison is embedded into main reports and it is not limited by two verifications results. There are no reasons for the separate command for that task. The End """"""" Have nice verifications! rally-0.9.1/doc/source/verification/howto/add_support_for_new_tool.rst0000664000567000056710000001054513073417716027544 0ustar jenkinsjenkins00000000000000.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _howto-add-support-for-new-tool: ============================== HowTo add support for new tool ============================== First of all, you should start from the reading of :ref:`plugins` page. After you learned basic things about Rally plugin mechanism, let's move to Verifier interface itself. .. contents:: :depth: 2 :local: Spec ---- All verifiers plugins should inherit ``rally.verification.manager.VerifierManager`` and implement all abstract methods. Here you can find its interface: .. autoclass:: rally.verification.manager.VerifierManager :members: :exclude-members: base_ref, check_system_wide, checkout, install_venv, parse_results, validate Example of Fake Verifier Manager -------------------------------- FakeTool is a tool which doesn't require configuration and installation. .. code-block:: python import random import re from rally.verification import manager # Verification component expects that method "run" of verifier returns # object. Class Result is a simple wrapper for two expected properties. class Result(object): def __init__(self, totals, tests): self.totals = totals self.tests = tests @manager.configure("fake-tool", default_repo="https://example.com") class FakeTool(manager.VerifierManager): """Fake Tool \o/""" TESTS = ["fake_tool.tests.bar.FatalityTestCase.test_one", "fake_tool.tests.bar.FatalityTestCase.test_two", "fake_tool.tests.bar.FatalityTestCase.test_three", "fake_tool.tests.bar.FatalityTestCase.test_four", "fake_tool.tests.foo.MegaTestCase.test_one", "fake_tool.tests.foo.MegaTestCase.test_two", "fake_tool.tests.foo.MegaTestCase.test_three", "fake_tool.tests.foo.MegaTestCase.test_four"] # This fake verifier doesn't launch anything, just returns random # results, so let's override parent methods to avoid redundant # clonning repo, checking packages and so on. def install(self): pass def uninstall(self, full=False): pass # Each tool, which supports configuration, has the own mechanism # for that task. Writing unified method is impossible. That is why # `VerificationManager` implements the case when the tool doesn't # need (doesn't support) configuration at all. Such behaviour is # ideal for FakeTool, since we do not need to change anything :) # Let's implement method `run` to return random data. def run(self, context): totals = {"tests_count": len(self.TESTS), "tests_duration": 0, "failures": 0, "skipped": 0, "success": 0, "unexpected_success": 0, "expected_failures": 0} tests = {} for name in self.TESTS: duration = random.randint(0, 10000)/100. totals["tests_duration"] += duration test = {"name": name, "status": random.choice(["success", "fail"]), "duration": "%s" % duration} if test["status"] == "fail": test["traceback"] = "Ooooppps" totals["failures"] += 1 else: totals["success"] += 1 tests[name] = test return Result(totals, tests=tests) def list_tests(self, pattern=""): return [name for name in self.TESTS if re.match(pattern, name)] rally-0.9.1/doc/source/verification/howto/add_new_reporter.rst0000664000567000056710000000727513073417716025775 0ustar jenkinsjenkins00000000000000.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _howto-add-new-reporting-mechanism: ================================= HowTo add new reporting mechanism ================================= Reporting mechanism for verifications is pluggable. Custom plugins can be used for custom output formats or for exporting results to external systems. We hardly recommend to read :ref:`plugins` page to understand how do Rally Plugins work. .. contents:: :depth: 2 :local: Spec ---- All reporters should inherit ``rally.verification.reporter.VerificationReporter`` and implement all abstract methods. Here you can find its interface: .. autoclass:: rally.verification.reporter.VerificationReporter :members: Example of custom JSON Reporter ------------------------------- Basically, you need to implement only two methods "validate" and "generate". Method "validate" should check that destination of the report is right. Method "generate" should build a report or export results somewhere; actually, it is up to you what it should do but return format is strict, see `Spec <#spec>`_ section for what it can return. .. code-block:: python import json from rally.verification import reporter @reporter.configure("summary-in-json") class SummaryInJsonReporter(reporter.VerificationReporter): """Store summary of verification(s) in JSON format""" # ISO 8601 TIME_FORMAT = "%Y-%m-%dT%H:%M:%S%z" @classmethod def validate(cls, output_destination): # we do not have any restrictions for destination, so nothing to # check pass def generate(self): report = {} for v in self.verifications: report[v.uuid] = { "started_at": v.created_at.strftime(self.TIME_FORMAT), "finished_at": v.updated_at.strftime(self.TIME_FORMAT), "status": v.status, "run_args": v.run_args, "tests_count": v.tests_count, "tests_duration": v.tests_duration, "skipped": v.skipped, "success": v.success, "expected_failures": v.expected_failures, "unexpected_success": v.unexpected_success, "failures": v.failures, # v.tests includes all information about launched tests, # but for simplification of this fake reporters, let's # save just names "launched_tests": [test["name"] for test in v.tests.values()] } raw_report = json.dumps(report, indent=4) if self.output_destination: # In case of output_destination existence report will be saved # to hard drive and there is nothing to print to stdout, so # "print" key is not used return {"files": {self.output_destination: raw_report}, "open": self.output_destination} else: # it is something that will be print at CLI layer. return {"print": raw_report} rally-0.9.1/doc/source/verification/howto/index.rst0000664000567000056710000000014613073417716023547 0ustar jenkinsjenkins00000000000000 ===== HowTo ===== .. toctree:: :maxdepth: 1 :glob: ./add* migrate_from_old_design rally-0.9.1/doc/source/verification/reports.rst0000664000567000056710000001023613073417716022777 0ustar jenkinsjenkins00000000000000.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _verification-reports: ==================== Verification reports ==================== Rally stores all verifications results in its DataBase so that you can access and process results at any time. No matter what verifier you use, results will be stored in a unified way and reports will be unified too. We support several types of reports out of the box: :include-var:`rally.cli.commands.verify.DEFAULT_REPORT_TYPES`; but our reporting system is pluggable so that you can write your own plugin to build some specific reports or to export results to the specific system (see :ref:`howto-add-new-reporting-mechanism` for more details`). .. contents:: :depth: 2 :local: HTML reports ------------ HTML report is the most convenient type of reports. It includes as much as possible useful information about Verifications. Here is an example of HTML report for 3 verifications. It was generated by next command: .. code-block:: console $ rally verify report --uuid --type html \ --to ./report.html .. image:: ../images/Report-Verify-for-4-Verifications.png :align: center The report consists of two tables. First one is a summary table. It includes base information about verifications: UUIDs; numbers of tests; when they were launched; statuses; etc. Also, you can find detailed information grouped by tests statuses at the right part of the table. If the size (height) of the summary table seems too large for you and hinders to see more tests results, you can push "Toggle Header" button. The second table contains actual verifications results. They are grouped by tests names. The result of the test for particular verification overpainted by one of the next colours: * *Red* - It means that test has "failed" status * *Orange* - It is "unexpected success". Most of the parsers calculates it just like failure * *Green* - Everything is ok. The test succeeded. * *Yellow* - It is "expected failure". * *Light Blue* - Test is skipped. It is not good and not bad Several verifications comparison is a default embedded behaviour of reports. The difference between verifications is displayed in brackets after actual test duration. Sign **+** means that current result is bigger that standard by the number going after the sign. Sign **-** is an opposite to **+**. Please, note that all diffs are comparisons with the first verification in a row. Filtering results """"""""""""""""" You can filter tests by setting or removing a mark from check box of the particular status column of the summary table. .. image:: ../images/Report-Verify-filter-by-status.png :align: center Tests Tags """""""""" Some of the tests tools support tests tagging. It can be used for setting unique IDs, groups, etc. Usually, such tags are included in test name. It is inconvenient and Rally stores tags separately. By default they are hidden, but if you push "Toggle tags" button, they will be displayed under tests names. .. image:: ../images/Report-Verify-toggle-tags.png :align: center Tracebacks & Reasons """""""""""""""""""" Tests with "failed" and "expected failure" statuses have tracebacks of failures. Tests with "skipped", "expected failure", "unexpected success" status has "reason" of events. By default, both tracebacks and reasons are hidden, but you can show them by clicking on the appropriate test. .. image:: ../images/Report-Verify-tracebacks.png :align: center .. image:: ../images/Report-Verify-xfail.png :align: center Plugins Reference for all out-of-the-box reporters -------------------------------------------------- .. generate_plugin_reference:: :base_cls: Verification Reporter rally-0.9.1/doc/source/verification/overview.rst0000664000567000056710000000575513073417716023161 0ustar jenkinsjenkins00000000000000.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Historical background --------------------- Tempest, OpenStack’s official test suite, is a powerful tool for running a set of functional tests against an OpenStack cluster. Tempest automatically runs against every patch in every project of OpenStack, which lets us avoid merging changes that break functionality. Unfortunately, it has limited opportunities to be used, to process its results, etc. That is why we started Verification Component initiative a long time ago (see `a blog post `_ for more details, but be careful as all user interface is changed completely since that time). What is Verification Component and why do you need it? ------------------------------------------------------ The primary goal of Rally Product is to provide a simple way to do complex things. As for functional testing, Verification Component includes interfaces for: * **Managing things**. Create an isolated virtual environment and install verification tool there? Yes, we can do it! Clone tool from Git repositories? Sure! Store several versions of one tool (you know, sometimes they are incompatible, with different required packages and so on)? Of course! In general, Verification Component allows to install, upgrade, reinstall, configure your tool. You should not care about zillion options anymore Rally will discover them via cloud UX and make the configuration file for you automatically. * **Launching verifiers**. Launchers of specific tools don't always contain all required features, Rally team tries to fix this omission. Verification Component supports some of them like expected failures, a list of tests to skip, a list of tests to launch, re-running previous verification or just failed tests from it and so on. Btw, all verification runs arguments are stored in the database. * **Processing results**. Rally DataBase stores all `verifications `_ and you can obtain unified (across different verifiers) results at any time. You can find a verification run summary there, run arguments which were used, error messages and etc. Comparison mechanism for several verifications is available too. Verification reports can be generated in several formats: HTML, JSON, JUnit-XML (see :ref:`verification-reports` for more details). Also, reports mechanism is expendable and you can write your own plugin for whatever system you want. rally-0.9.1/doc/source/verification/verifiers.rst0000664000567000056710000000705213073417716023301 0ustar jenkinsjenkins00000000000000.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _verifiers: ========= Verifiers ========= .. contents:: :depth: 1 :local: What is it? ----------- Verifier Plugin is a compatibility layer between Rally and the specific tool (such as Tempest) which runs tests. It implements features like installation, configuration, upgrades, running, etc in terms of the tool. It is a driver in other words. It is a pluggable entity, which means that you can easily add support for whatever tool you want (see :ref:`howto-add-support-for-new-tool` page for more information). Even more, you can deliver such plugin separately from Rally itself, but we firmly recommend to push a change to Rally upstream (see :ref:`contribute` guide), so Rally core-team will able to review it and help to improve. Verifier is an instance of the Verifier Plugin. It is an installed tool. For example, "Tempest" is a set of functional tests, it is Verifier Plugin (we have a plugin for it). Installed Tempest 12.0 from https://github.com/openstack/tempest in a virtual environment is the verifier. Verifier is not aligned to any particular deployment like it was in the past, you can use one verifier for testing unlimited number of deployments (each deployment will have separate configuration files for the tool). Verifier & Verifier Plugin are the main entities which Verification component operates with. Another one is the verifications results. Verifier statuses ----------------- All verifiers can be in next statuses: * *init* - Initial state. It appears while you call ``rally verify create-verifier`` command and installation step is not yet started. * *installing* - Installation of the verifier is not a quick task. It is about cloning tool, checking packages or installing virtual environments with all required packages. This state indicates that this step is in the process. * *installed* - It should be one of your favourite states. It means that everything is ok and you can start verifying your cloud. * *updating* - This state identifies the process of updating verifier (version, source, packages, etc.). * *extending* - The process of extending a verifier by its plugins. * *failed* - Something went wrong while installation. .. _verification_statuses: Verification statuses --------------------- * *init* - Initial state. It appears instantly after calling ``rally verify start`` command before the actual run of verifier's tool. * *running* - Identifies the process of execution tool. * *finished*- Verification is finished without errors and failures. * *failed* - Verification is finished, but there are some failed tests. * *crashed* - Unexpected error had happened while running verification. .. _known-verifier-types: Known verifier types -------------------- Out of the box """""""""""""" You can execute command ``rally verify list-plugins`` locally to check available verifiers in your environment. Cut down from Global :ref:`plugin-reference` page: .. generate_plugin_reference:: :base_cls: Verifier Manager Third-party """"""""""" Nothing here yet. rally-0.9.1/doc/source/verification/index.rst0000664000567000056710000000221313073417716022404 0ustar jenkinsjenkins00000000000000.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ====================== Verification Component ====================== Functional testing is a first step to ensuring that your product works as expected and API covers all use-cases. Rally Verification Component is all about this. It is not designed to generate a real big load (for this job we have :ref:`task-component`), but it should be enough to check that your environment works by different tools (we call them :ref:`glossary-verification`). .. toctree:: :maxdepth: 2 :glob: verifiers reports cli_reference howto/index .. include:: ./overview.rst rally-0.9.1/doc/source/verification/cli_reference.rst0000664000567000056710000000144613073417716024071 0ustar jenkinsjenkins00000000000000.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _rally-verify-cli-reference: ====================== Command Line Interface ====================== Cut down from Global :ref:`cli-reference` .. contents:: :depth: 2 :local: .. make_cli_reference:: :group: verify rally-0.9.1/doc/source/overview/0000775000567000056710000000000013073420067017722 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/source/overview/user_stories.rst0000664000567000056710000000200613073417716023207 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _user_stories: User stories ============ Many users of Rally were able to make interesting discoveries concerning their OpenStack clouds using our benchmarking tool. Numerous user stories presented below show how Rally has made it possible to find performance bugs and validate improvements for different OpenStack installations. .. toctree:: :glob: :maxdepth: 1 stories/** rally-0.9.1/doc/source/overview/overview.rst0000664000567000056710000001517013073417720022330 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _overview: .. contents:: :depth: 1 :local: Overview ======== **Rally** is a **benchmarking tool** that **automates** and **unifies** multi-node OpenStack deployment, cloud verification, benchmarking & profiling. It can be used as a basic tool for an *OpenStack CI/CD system* that would continuously improve its SLA, performance and stability. Who Is Using Rally ------------------ Here's a small selection of some of the many companies using Rally: .. image:: ../images/Rally_who_is_using.png :align: center Use Cases --------- Let's take a look at 3 major high level Use Cases of Rally: .. image:: ../images/Rally-UseCases.png :align: center Generally, there are a few typical cases where Rally proves to be of great use: 1. Automate measuring & profiling focused on how new code changes affect the OS performance; 2. Using Rally profiler to detect scaling & performance issues; 3. Investigate how different deployments affect the OS performance: * Find the set of suitable OpenStack deployment architectures; * Create deployment specifications for different loads (amount of controllers, swift nodes, etc.); 4. Automate the search for hardware best suited for particular OpenStack cloud; 5. Automate the production cloud specification generation: * Determine terminal loads for basic cloud operations: VM start & stop, Block Device create/destroy & various OpenStack API methods; * Check performance of basic cloud operations in case of different loads. Real-life examples ------------------ To be substantive, let's investigate a couple of real-life examples of Rally in action. How does amqp_rpc_single_reply_queue affect performance? ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Rally allowed us to reveal a quite an interesting fact about **Nova**. We used *NovaServers.boot_and_delete* benchmark scenario to see how the *amqp_rpc_single_reply_queue* option affects VM bootup time (it turns on a kind of fast RPC). Some time ago it was `shown `_ that cloud performance can be boosted by setting it on, so we naturally decided to check this result with Rally. To make this test, we issued requests for booting and deleting VMs for a number of concurrent users ranging from 1 to 30 with and without the investigated option. For each group of users, a total number of 200 requests was issued. Averaged time per request is shown below: .. image:: ../images/Amqp_rpc_single_reply_queue.png :align: center **So Rally has unexpectedly indicated that setting the *amqp_rpc_single_reply_queue* option apparently affects the cloud performance, but in quite an opposite way rather than it was thought before.** Performance of Nova list command ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Another interesting result comes from the *NovaServers.boot_and_list_server* scenario, which enabled us to we launched the following benchmark with Rally: * **Benchmark environment** (which we also call **"Context"**): 1 temporary OpenStack user. * **Benchmark scenario**: boot a single VM from this user & list all VMs. * **Benchmark runner** setting: repeat this procedure 200 times in a continuous way. During the execution of this benchmark scenario, the user has more and more VMs on each iteration. Rally has shown that in this case, the performance of the **VM list** command in Nova is degrading much faster than one might expect: .. image:: ../images/Rally_VM_list.png :align: center Complex scenarios ^^^^^^^^^^^^^^^^^ In fact, the vast majority of Rally scenarios is expressed as a sequence of **"atomic" actions**. For example, *NovaServers.snapshot* is composed of 6 atomic actions: 1. boot VM 2. snapshot VM 3. delete VM 4. boot VM from snapshot 5. delete VM 6. delete snapshot Rally measures not only the performance of the benchmark scenario as a whole, but also that of single atomic actions. As a result, Rally also plots the atomic actions performance data for each benchmark iteration in a quite detailed way: .. image:: ../images/Rally_snapshot_vm.png :align: center Architecture ------------ Usually OpenStack projects are implemented *"as-a-Service"*, so Rally provides this approach. In addition, it implements a *CLI-driven* approach that does not require a daemon: 1. **Rally as-a-Service**: Run rally as a set of daemons that present Web UI *(work in progress)* so 1 RaaS could be used by a whole team. 2. **Rally as-an-App**: Rally as a just lightweight and portable CLI app (without any daemons) that makes it simple to use & develop. The diagram below shows how this is possible: .. image:: ../images/Rally_Architecture.png :align: center The actual **Rally core** consists of 4 main components, listed below in the order they go into action: 1. **Server Providers** - provide a **unified interface** for interaction with different **virtualization technologies** (*LXS*, *Virsh* etc.) and **cloud suppliers** (like *Amazon*): it does so via *ssh* access and in one *L3 network*; 2. **Deploy Engines** - deploy some OpenStack distribution (like *DevStack* or *FUEL*) before any benchmarking procedures take place, using servers retrieved from Server Providers; 3. **Verification** - runs *Tempest* (or another specific set of tests) against the deployed cloud to check that it works correctly, collects results & presents them in human readable form; 4. **Benchmark Engine** - allows to write parameterized benchmark scenarios & run them against the cloud. It should become fairly obvious why Rally core needs to be split to these parts if you take a look at the following diagram that visualizes a rough **algorithm for starting benchmarking OpenStack at scale**. Keep in mind that there might be lots of different ways to set up virtual servers, as well as to deploy OpenStack to them. .. image:: ../images/Rally_QA.png :align: center rally-0.9.1/doc/source/overview/glossary.rst0000664000567000056710000001151013073417716022324 0ustar jenkinsjenkins00000000000000:tocdepth: 1 .. _glossary: ======== Glossary ======== .. warning:: Unfortunately, our glossary is not full, but the Rally team is working on improving it. If you cannot find a definition in which you are interested, feel free to ping us via IRC (#openstack-rally channel at Freenode) or via E-Mail (openstack-dev@lists.openstack.org with tag [Rally]). .. contents:: :depth: 1 :local: Common ====== Alembic ------- A lightweight database migration tool which powers Rally migrations. Read more at `Official Alembic documentation `_ DB Migrations ------------- Rally supports database schema and data transformations, which are also known as migrations. This allows you to get your data up-to-date with latest Rally version. Rally ----- A testing tool that automates and unifies multi-node OpenStack deployment and cloud verification. It can be used as a basic tool for an OpenStack CI/CD system that would continuously improve its SLA, performance and stability. Rally Config ------------ Rally behavior can be customized by editing its configuration file, *rally.conf*, in `configparser `_ format. While being installed, Rally generates a config with default values from its `sample `_. When started, Rally searches for its config in "/etc/rally/rally.conf", "~/.rally/rally.conf", "/etc/rally/rally.conf" Rally DB -------- Rally uses a relational database as data storage. Several database backends are supported: SQLite (default), PostgreSQL, and MySQL. The database connection can be set via the configuration file option *[database]/connection*. Rally Plugin ------------ Most parts of Rally `are pluggable `_. Scenarios, runners, contexts and even charts for HTML report are plugins. It is easy to create your own plugin and use it. Read more at `plugin reference `_. Deployment ========== Deployment ---------- A set of information about target environment (for example: URI and authentication credentials) which is saved in the database. It is used to define the target system for testing each time a task is started. It has a "type" value which changes task behavior for the selected target system; for example type "openstack" will enable OpenStack authentication and services. Task ==== Cleanup ------- This is a specific context which removes all resources on target system that were created by the current task. If some Rally-related resources remain, please `file a bug `_ and attach the task file and a list of remaining resources. Context ------- A type of plugin that can run some actions on the target environment before the workloads start and after the last workload finishes. This allows, for example, preparing the environment for workloads (e.g., create resources and change parameters) and restoring the environment later. Each Context must implement ``setup()`` and ``cleanup()`` methods. Input task ---------- A file that describes how to run a Rally Task. It can be in JSON or YAML format. The *rally task start* command needs this file to run the task. The input task is pre-processed by the `Jinja2 `_ templating engine so it is very easy to create repeated parts or calculate specific values at runtime. It is also possible to pass values via CLI arguments, using the *--task-args* or *--task-args-file* options. Runner ------ This is a Rally plugin which decides how to run Workloads. For example, they can be run serially in a single process, or using concurrency. Scenario -------- Synonym for `Workload <#workload>`_ Service ------- Abstraction layer that represents target environment API. For example, this can be some OpenStack service. A Service provides API versioning and action timings, simplifies API calls, and reduces code duplication. It can be used in any Rally plugin. SLA --- Service-Level Agreement (Success Criteria). Allows you to determine whether a subtask or workload is successful by setting success criteria rules. Subtask ------- A part of a Task. There can be many subtasks in a single Task. Task ---- An entity which includes all the necessary data for a test run, and results of this run. Workload -------- An important part of Task: a plugin which is run by the runner. It is usually run in separate thread. Workloads are grouped into Subtasks. Verify ====== Rally can run different subunit-based testing tools against a target environment, for example `tempest `_ for OpenStack. .. _glossary-verification: Verification ------------ A result of running some third-party subunit-based testing tool. rally-0.9.1/doc/source/overview/index.rst0000664000567000056710000000141113073417716021567 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ====================== Rally project overview ====================== .. toctree:: :glob: overview glossary user_stories rally-0.9.1/doc/source/_templates/0000775000567000056710000000000013073420067020211 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/source/_templates/openstackrally/0000775000567000056710000000000013073420067023244 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/source/_templates/openstackrally/theme.conf0000664000567000056710000000003313073417716025220 0ustar jenkinsjenkins00000000000000[theme] inherit = openstackrally-0.9.1/doc/source/_templates/openstackrally/_static/0000775000567000056710000000000013073420067024672 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/source/_templates/openstackrally/_static/img.css0000664000567000056710000000004213073417716026163 0ustar jenkinsjenkins00000000000000.body img { max-width: 100%; }rally-0.9.1/doc/source/_templates/openstackrally/layout.html0000664000567000056710000000461013073417716025457 0ustar jenkinsjenkins00000000000000{% extends "openstack/layout.html" %} {% set show_source = False %} {% set css_files = css_files + ["_static/img.css"] %} {# sidebarlogo is a top block in sidebar. Let's use it to display home link #} {%- block sidebarlogo %}

What is Rally?

{%- endblock %} {# Display global toc instead of local #} {%- block sidebartoc %}

Contents

{{ toctree() }} {%- endblock %} {# Turn off sections "Previous topic" and "Next topic" #} {%- block sidebarrel %}{% endblock %} {% block projectsource %}

Contacts

IRC
#openstack-rally channel at FreeNode
E-mail
openstack-dev@lists.openstack.org with "[Rally]" tag in subject

Useful links

{% endblock %} {# copy-pasted from original theme and extended with Rally links #} {%- block header_navigation %}
  • Home
  • Projects
  • User Stories
  • Community
  • Blog
  • Wiki
  • Documentation
  • {% endblock %} rally-0.9.1/doc/source/feature_requests.rst0000664000567000056710000000217513073417716022210 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _feature_requests: Request New Features ==================== To request a new feature, you should create a document similar to other feature requests and then contribute it to the **doc/feature_request** directory of the Rally repository (see the :ref:`How-to-contribute tutorial `). If you don't have time to contribute your feature request via Gerrit, please contact Boris Pavlovic (boris@pavlovic.me) Active feature requests: .. toctree:: :glob: :maxdepth: 1 feature_request/* rally-0.9.1/doc/source/project_info/0000775000567000056710000000000013073420067020535 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/source/project_info/index.rst0000664000567000056710000002225613073417716022414 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _project_info: Project Info and Release Notes ============================== Maintainers ----------- Project Team Lead (PTL) ~~~~~~~~~~~~~~~~~~~~~~~ +------------------------------+------------------------------------------------+ | Contact | Area of interest | +------------------------------+------------------------------------------------+ | | Andrey Kurilin | * Chief Architect | | | andreykurilin (irc) | * Release management | | | andreykurilin (gitter) | * Community management | | | andr.kurilin@gmail.com | * Core team management | | | akurilin@mirantis.com | * Road Map | +------------------------------+------------------------------------------------+ | *If you would like to refactor whole Rally or have UX/community/other issues please contact me.* Project Core maintainers ~~~~~~~~~~~~~~~~~~~~~~~~ +------------------------------+------------------------------------------------+ | Contact | Area of interest | +------------------------------+------------------------------------------------+ | | Alexander Maretskiy | * Rally reports | | | amaretskiy (irc) | * Front-end | | | amaretskiy@mirantis.com | | +------------------------------+------------------------------------------------+ | | Anton Studenov | * Rally Deployment | | | tohin (irc) | * Task Hooks | | | astudenov@mirantis.com | | +------------------------------+------------------------------------------------+ | | Boris Pavlovic | * Founder and ideological leader | | | boris-42 (irc) | * Architect | | | boris@pavlovic.me | * Rally task & benchmark | +------------------------------+------------------------------------------------+ | | Chris St. Pierre | * Rally task & benchmark | | | stpierre (irc) | * Bash guru ;) | | | cstpierr@cisco.com | | +------------------------------+------------------------------------------------+ | | Illia Khudoshyn | * Rally task & benchmark | | | ikhudoshyn (irc) | | | | ikhudoshyn@mirantis.com | | +------------------------------+------------------------------------------------+ | | Kun Huang | * Rally task & benchmark | | | kun_huang (irc) | | | | gareth.huang@huawei.com | | +------------------------------+------------------------------------------------+ | | Li Yingjun | * Rally task & benchmark | | | liyingjun (irc) | | | | yingjun.li@kylin-cloud.com | | +------------------------------+------------------------------------------------+ | | Roman Vasilets | * Rally task & benchmark | | | rvasilets (irc) | | | | pomeo92@gmail.com | | +------------------------------+------------------------------------------------+ | | Sergey Skripnick | * Rally CI/CD | | | redixin (irc) | * Rally deploy | | | sskripnick@mirantis.com | * Automation of everything | +------------------------------+------------------------------------------------+ | | Yair Fried | * Rally-Tempest integration | | | yfried (irc) | * Rally task & benchmark | | | yfried@redhat.com | | +------------------------------+------------------------------------------------+ | | Yaroslav Lobankov | * Rally Verification | | | ylobankov (irc) | | | | ylobankov@mirantis.com | | +------------------------------+------------------------------------------------+ | *All cores from this list are reviewing all changes that are proposed to Rally. To avoid duplication of efforts, please contact them before starting work on your code.* Plugin Core reviewers ~~~~~~~~~~~~~~~~~~~~~ +------------------------------+------------------------------------------------+ | Contact | Area of interest | +------------------------------+------------------------------------------------+ | | Ivan Kolodyazhny | * Cinder plugins | | | e0ne (irc) | | | | e0ne@e0ne.info | | +------------------------------+------------------------------------------------+ | | Nikita Konovalov | * Sahara plugins | | | NikitaKonovalov (irc) | | | | nkonovalov@mirantis.com | | +------------------------------+------------------------------------------------+ | | Oleg Bondarev | * Neutron plugins | | | obondarev (irc) | | | | obondarev@mirantis.com | | +------------------------------+------------------------------------------------+ | | Sergey Kraynev | * Heat plugins | | | skraynev (irc) | | | | skraynev@mirantis.com | | +------------------------------+------------------------------------------------+ | | Spyros Trigazis | * Magnum plugins | | | strigazi (irc) | | | | strigazi@gmail.com | | +------------------------------+------------------------------------------------+ | *All cores from this list are responsible for their component plugins. To avoid duplication of efforts, please contact them before starting working on your own plugins.* Useful links ------------ - `Source code`_ - `Rally roadmap`_ - `Project space`_ - `Bugs`_ - `Patches on review`_ - `Meeting logs`_ (server: **irc.freenode.net**, channel: **#openstack-meeting**) - `IRC logs`_ (server: **irc.freenode.net**, channel: **#openstack-rally**) - `Gitter chat`_ - `Trello board`_ Where can I discuss and propose changes? ---------------------------------------- - Our IRC channel: **#openstack-rally** on **irc.freenode.net**; - Weekly Rally team meeting (in IRC): **#openstack-meeting** on **irc.freenode.net**, held on Mondays at 14:00 UTC; - OpenStack mailing list: **openstack-dev@lists.openstack.org** (see `subscription and usage instructions`_); - `Rally team on Launchpad`_: Answers/Bugs/Blueprints. .. _release_notes: .. include:: release_notes.rst .. references: .. _Source code: https://github.com/openstack/rally .. _Rally roadmap: https://docs.google.com/a/mirantis.com/spreadsheets/d/16DXpfbqvlzMFaqaXAcJsBzzpowb_XpymaK2aFY2gA2g/edit#gid=0 .. _Project space: http://launchpad.net/rally .. _Bugs: https://bugs.launchpad.net/rally .. _Patches on review: https://review.openstack.org/#/q/status:open+rally,n,z .. _Meeting logs: http://eavesdrop.openstack.org/meetings/rally/2016/ .. _IRC logs: http://irclog.perlgeek.de/openstack-rally .. _Gitter chat: https://gitter.im/rally-dev/Lobby .. _Trello board: https://trello.com/b/DoD8aeZy/rally .. _subscription and usage instructions: http://lists.openstack.org/cgi-bin/mailman/listinfo/openstack-dev .. _Rally team on Launchpad: https://launchpad.net/rally rally-0.9.1/doc/source/project_info/release_notes.rst0000664000567000056710000000136413073417716024132 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Release Notes ------------- .. toctree:: :maxdepth: 1 release_notes/archive.rst release_notes/latest.rst rally-0.9.1/doc/source/index.rst0000664000567000056710000000310313073417716017721 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ============== What is Rally? ============== **OpenStack** is, undoubtedly, a really *huge* ecosystem of cooperative services. **Rally** is a **benchmarking tool** that answers the question: **"How does OpenStack work at scale?"**. To make this possible, Rally **automates** and **unifies** multi-node OpenStack deployment, cloud verification, benchmarking & profiling. Rally does it in a **generic** way, making it possible to check whether OpenStack is going to work well on, say, a 1k-servers installation under high load. Thus it can be used as a basic tool for an *OpenStack CI/CD system* that would continuously improve its SLA, performance and stability. .. image:: ./images/Rally-Actions.png :align: center Contents ======== .. toctree:: :maxdepth: 2 overview/index install_and_upgrade/index quick_start/index cli_reference task/index verification/index plugins/index contribute feature_requests project_info/index rally-0.9.1/doc/source/quick_start/0000775000567000056710000000000013073420067020405 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/source/quick_start/tutorial/0000775000567000056710000000000013073420067022250 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/source/quick_start/tutorial/step_6_aborting_load_generation_on_sla_failure.rst0000664000567000056710000001372213073417716034357 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _tutorial_step_6_aborting_load_generation_on_sla_failure: Step 6. Aborting load generation on success criteria failure ============================================================ Benchmarking pre-production and production OpenStack clouds is not a trivial task. From the one side it is important to reach the OpenStack cloud's limits, from the other side the cloud shouldn't be damaged. Rally aims to make this task as simple as possible. Since the very beginning Rally was able to generate enough load for any OpenStack cloud. Generating too big a load was the major issue for production clouds, because Rally didn't know how to stop the load until it was too late. With the **"stop on SLA failure"** feature, however, things are much better. This feature can be easily tested in real life by running one of the most important and plain benchmark scenario called *"Authenticate.keystone"*. This scenario just tries to authenticate from users that were pre-created by Rally. Rally input task looks as follows (*auth.yaml*): .. code-block:: yaml --- Authenticate.keystone: - runner: type: "rps" times: 6000 rps: 50 context: users: tenants: 5 users_per_tenant: 10 sla: max_avg_duration: 5 In human-readable form this input task means: *Create 5 tenants with 10 users in each, after that try to authenticate to Keystone 6000 times performing 50 authentications per second (running new authentication request every 20ms). Each time we are performing authentication from one of the Rally pre-created user. This task passes only if max average duration of authentication takes less than 5 seconds.* **Note that this test is quite dangerous because it can DDoS Keystone**. We are running more and more simultaneously authentication requests and things may go wrong if something is not set properly (like on my DevStack deployment in Small VM on my laptop). Let’s run Rally task with **an argument that prescribes Rally to stop load on SLA failure**: .. code-block:: console $ rally task start --abort-on-sla-failure auth.yaml .... +--------+-----------+-----------+-----------+---------------+---------------+---------+-------+ | action | min (sec) | avg (sec) | max (sec) | 90 percentile | 95 percentile | success | count | +--------+-----------+-----------+-----------+---------------+---------------+---------+-------+ | total | 0.108 | 8.58 | 65.97 | 19.782 | 26.125 | 100.0% | 2495 | +--------+-----------+-----------+-----------+---------------+---------------+---------+-------+ On the resulting table there are 2 interesting things: 1. Average duration was 8.58 sec which is more than 5 seconds 2. Rally performed only 2495 (instead of 6000) authentication requests To understand better what has happened let’s generate HTML report: .. code-block:: bash rally task report --out auth_report.html .. image:: ../../images/Report-Abort-on-SLA-task-1.png :align: center On the chart with durations we can observe that the duration of authentication request reaches 65 seconds at the end of the load generation. **Rally stopped load at the very last moment just before bad things happened. The reason why it runs so many attempts to authenticate is because of not enough good success criteria.** We had to run a lot of iterations to make average duration bigger than 5 seconds. Let’s chose better success criteria for this task and run it one more time. .. code-block:: yaml --- Authenticate.keystone: - runner: type: "rps" times: 6000 rps: 50 context: users: tenants: 5 users_per_tenant: 10 sla: max_avg_duration: 5 max_seconds_per_iteration: 10 failure_rate: max: 0 Now our task is going to be successful if the following three conditions hold: 1. maximum average duration of authentication should be less than 5 seconds 2. maximum duration of any authentication should be less than 10 seconds 3. no failed authentication should appear Let’s run it! .. code-block:: console $ rally task start --abort-on-sla-failure auth.yaml ... +--------+-----------+-----------+-----------+---------------+---------------+---------+-------+ | action | min (sec) | avg (sec) | max (sec) | 90 percentile | 95 percentile | success | count | +--------+-----------+-----------+-----------+---------------+---------------+---------+-------+ | total | 0.082 | 5.411 | 22.081 | 10.848 | 14.595 | 100.0% | 1410 | +--------+-----------+-----------+-----------+---------------+---------------+---------+-------+ .. image:: ../../images/Report-Abort-on-SLA-task-2.png :align: center This time load stopped after 1410 iterations versus 2495 which is much better. The interesting thing on this chart is that first occurrence of "> 10 second" authentication happened on 950 iteration. The reasonable question: "Why does Rally run 500 more authentication requests then?". This appears from the math: During the execution of **bad** authentication (10 seconds) Rally performed about 50 request/sec * 10 sec = 500 new requests as a result we run 1400 iterations instead of 950. (based on: http://boris-42.me/rally-tricks-stop-load-before-your-openstack-goes-wrong/) rally-0.9.1/doc/source/quick_start/tutorial/step_2_input_task_format.rst0000664000567000056710000002212713073417716030022 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _tutorial_step_2_input_task_format: Step 2. Rally input task format =============================== .. contents:: :local: Basic input task syntax ----------------------- Rally comes with a really great collection of :ref:`plugins ` and in most real-world cases you will use multiple plugins to test your OpenStack cloud. Rally makes it very easy to run **different test cases defined in a single task**. To do so, use the following syntax: .. code-block:: json { "": [, , ...] "": [, ...] } where **, as before, is a dictionary: .. code-block:: json { "args": { }, "runner": { }, "context": { }, "sla": { } } Multiple benchmarks in a single task ------------------------------------ As an example, let's edit our configuration file from :ref:`step 1 ` so that it prescribes Rally to launch not only the **NovaServers.boot_and_delete_server** scenario, but also the **KeystoneBasic.create_delete_user** scenario. All we have to do is to append the configuration of the second scenario as yet another top-level key of our JSON file: *multiple-scenarios.json* .. code-block:: json { "NovaServers.boot_and_delete_server": [ { "args": { "flavor": { "name": "m1.tiny" }, "image": { "name": "^cirros.*-disk$" }, "force_delete": false }, "runner": { "type": "constant", "times": 10, "concurrency": 2 }, "context": { "users": { "tenants": 3, "users_per_tenant": 2 } } } ], "KeystoneBasic.create_delete_user": [ { "args": {}, "runner": { "type": "constant", "times": 10, "concurrency": 3 } } ] } Now you can start this benchmark task as usually: .. code-block:: console $ rally task start multiple-scenarios.json ... +--------------------+-----------+-----------+-----------+---------------+---------------+---------+-------+ | action | min (sec) | avg (sec) | max (sec) | 90 percentile | 95 percentile | success | count | +--------------------+-----------+-----------+-----------+---------------+---------------+---------+-------+ | nova.boot_server | 8.06 | 11.354 | 18.594 | 18.54 | 18.567 | 100.0% | 10 | | nova.delete_server | 4.364 | 5.054 | 6.837 | 6.805 | 6.821 | 100.0% | 10 | | total | 12.572 | 16.408 | 25.396 | 25.374 | 25.385 | 100.0% | 10 | +--------------------+-----------+-----------+-----------+---------------+---------------+---------+-------+ Load duration: 84.1959171295 Full duration: 102.033041 -------------------------------------------------------------------------------- ... +----------------------+-----------+-----------+-----------+---------------+---------------+---------+-------+ | action | min (sec) | avg (sec) | max (sec) | 90 percentile | 95 percentile | success | count | +----------------------+-----------+-----------+-----------+---------------+---------------+---------+-------+ | keystone.create_user | 0.676 | 0.875 | 1.03 | 1.02 | 1.025 | 100.0% | 10 | | keystone.delete_user | 0.407 | 0.647 | 0.84 | 0.739 | 0.79 | 100.0% | 10 | | total | 1.082 | 1.522 | 1.757 | 1.724 | 1.741 | 100.0% | 10 | +----------------------+-----------+-----------+-----------+---------------+---------------+---------+-------+ Load duration: 5.72119688988 Full duration: 10.0808410645 ... Note that the HTML reports you can generate by typing **rally task report --out=report_name.html** after your benchmark task has completed will get richer as your benchmark task configuration file includes more benchmark scenarios. Let's take a look at the report overview page for a task that covers all the scenarios available in Rally: .. code-block:: bash rally task report --out=report_multiple_scenarios.html --open .. image:: ../../images/Report-Multiple-Overview.png :align: center Multiple configurations of the same scenario -------------------------------------------- Yet another thing you can do in Rally is to launch **the same benchmark scenario multiple times with different configurations**. That's why our configuration file stores a list for the key *"NovaServers.boot_and_delete_server"*: you can just append a different configuration of this benchmark scenario to this list to get it. Let's say, you want to run the **boot_and_delete_server** scenario twice: first using the *"m1.tiny"* flavor and then using the *"m1.small"* flavor: *multiple-configurations.json* .. code-block:: json { "NovaServers.boot_and_delete_server": [ { "args": { "flavor": { "name": "m1.tiny" }, "image": { "name": "^cirros.*-disk$" }, "force_delete": false }, "runner": {...}, "context": {...} }, { "args": { "flavor": { "name": "m1.small" }, "image": { "name": "^cirros.*-disk$" }, "force_delete": false }, "runner": {...}, "context": {...} } ] } That's it! You will get again the results for each configuration separately: .. code-block:: console $ rally task start --task=multiple-configurations.json ... +--------------------+-----------+-----------+-----------+---------------+---------------+---------+-------+ | action | min (sec) | avg (sec) | max (sec) | 90 percentile | 95 percentile | success | count | +--------------------+-----------+-----------+-----------+---------------+---------------+---------+-------+ | nova.boot_server | 7.896 | 9.433 | 13.14 | 11.329 | 12.234 | 100.0% | 10 | | nova.delete_server | 4.435 | 4.898 | 6.975 | 5.144 | 6.059 | 100.0% | 10 | | total | 12.404 | 14.331 | 17.979 | 16.72 | 17.349 | 100.0% | 10 | +--------------------+-----------+-----------+-----------+---------------+---------------+---------+-------+ Load duration: 73.2339417934 Full duration: 91.1692159176 -------------------------------------------------------------------------------- ... +--------------------+-----------+-----------+-----------+---------------+---------------+---------+-------+ | action | min (sec) | avg (sec) | max (sec) | 90 percentile | 95 percentile | success | count | +--------------------+-----------+-----------+-----------+---------------+---------------+---------+-------+ | nova.boot_server | 8.207 | 8.91 | 9.823 | 9.692 | 9.758 | 100.0% | 10 | | nova.delete_server | 4.405 | 4.767 | 6.477 | 4.904 | 5.691 | 100.0% | 10 | | total | 12.735 | 13.677 | 16.301 | 14.596 | 15.449 | 100.0% | 10 | +--------------------+-----------+-----------+-----------+---------------+---------------+---------+-------+ Load duration: 71.029528141 Full duration: 88.0259010792 ... The HTML report will also look similar to what we have seen before: .. code-block:: bash rally task report --out=report_multiple_configuraions.html --open .. image:: ../../images/Report-Multiple-Configurations-Overview.png :align: center ././@LongLink0000000000000000000000000000015100000000000011212 Lustar 00000000000000rally-0.9.1/doc/source/quick_start/tutorial/step_1_setting_up_env_and_running_benchmark_from_samples.rstrally-0.9.1/doc/source/quick_start/tutorial/step_1_setting_up_env_and_running_benchmark_from_samples0000664000567000056710000003173013073417720035650 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _tutorial_step_1_setting_up_env_and_running_benchmark_from_samples: Step 1. Setting up the environment and running a benchmark from samples ======================================================================= .. contents:: :local: In this demo, we will show how to perform some basic operations in Rally, such as registering an OpenStack cloud, benchmarking it and generating benchmark reports. We assume that you have gone through :ref:`tutorial_step_0_installation` and have an already existing OpenStack deployment with Keystone available at **. Registering an OpenStack deployment in Rally -------------------------------------------- First, you have to provide Rally with an OpenStack deployment it is going to benchmark. This should be done either through `OpenRC files`_ or through deployment `configuration files`_. In case you already have an *OpenRC*, it is extremely simple to register a deployment with the *deployment create* command: .. code-block:: console $ . openrc admin admin $ rally deployment create --fromenv --name=existing +--------------------------------------+----------------------------+------------+------------------+--------+ | uuid | created_at | name | status | active | +--------------------------------------+----------------------------+------------+------------------+--------+ | 28f90d74-d940-4874-a8ee-04fda59576da | 2015-01-18 00:11:38.059983 | existing | deploy->finished | | +--------------------------------------+----------------------------+------------+------------------+--------+ Using deployment : ... Alternatively, you can put the information about your cloud credentials into a JSON configuration file (let's call it `existing.json`_). The *deployment create* command has a slightly different syntax in this case: .. code-block:: console $ rally deployment create --file=existing.json --name=existing +--------------------------------------+----------------------------+------------+------------------+--------+ | uuid | created_at | name | status | active | +--------------------------------------+----------------------------+------------+------------------+--------+ | 28f90d74-d940-4874-a8ee-04fda59576da | 2015-01-18 00:11:38.059983 | existing | deploy->finished | | +--------------------------------------+----------------------------+------------+------------------+--------+ Using deployment : ... Note the last line in the output. It says that the just created deployment is now used by Rally; that means that all the benchmarking operations from now on are going to be performed on this deployment. Later we will show how to switch between different deployments. Finally, the *deployment check* command enables you to verify that your current deployment is healthy and ready to be benchmarked: .. code-block:: console $ rally deployment check keystone endpoints are valid and following services are available: +----------+----------------+-----------+ | services | type | status | +----------+----------------+-----------+ | cinder | volume | Available | | cinderv2 | volumev2 | Available | | ec2 | ec2 | Available | | glance | image | Available | | heat | orchestration | Available | | heat-cfn | cloudformation | Available | | keystone | identity | Available | | nova | compute | Available | | novav21 | computev21 | Available | | s3 | s3 | Available | +----------+----------------+-----------+ Benchmarking ------------ Now that we have a working and registered deployment, we can start benchmarking it. The sequence of benchmarks to be launched by Rally should be specified in a *benchmark task configuration file* (either in *JSON* or in *YAML* format). Let's try one of the sample benchmark tasks available in `samples/tasks/scenarios`_, say, the one that boots and deletes multiple servers (*samples/tasks/scenarios/nova/boot-and-delete.json*): .. code-block:: json { "NovaServers.boot_and_delete_server": [ { "args": { "flavor": { "name": "m1.tiny" }, "image": { "name": "^cirros.*-disk$" }, "force_delete": false }, "runner": { "type": "constant", "times": 10, "concurrency": 2 }, "context": { "users": { "tenants": 3, "users_per_tenant": 2 } } } ] } To start a benchmark task, run the ``task start`` command (you can also add the *-v* option to print more logging information): .. code-block:: console $ rally task start samples/tasks/scenarios/nova/boot-and-delete.json -------------------------------------------------------------------------------- Preparing input task -------------------------------------------------------------------------------- Input task is: -------------------------------------------------------------------------------- Task 6fd9a19f-5cf8-4f76-ab72-2e34bb1d4996: started -------------------------------------------------------------------------------- Benchmarking... This can take a while... To track task status use: rally task status or rally task detailed -------------------------------------------------------------------------------- Task 6fd9a19f-5cf8-4f76-ab72-2e34bb1d4996: finished -------------------------------------------------------------------------------- test scenario NovaServers.boot_and_delete_server args position 0 args values: {u'args': {u'flavor': {u'name': u'm1.tiny'}, u'force_delete': False, u'image': {u'name': u'^cirros.*-disk$'}}, u'context': {u'users': {u'project_domain': u'default', u'resource_management_workers': 30, u'tenants': 3, u'user_domain': u'default', u'users_per_tenant': 2}}, u'runner': {u'concurrency': 2, u'times': 10, u'type': u'constant'}} +--------------------+-----------+-----------+-----------+---------------+---------------+---------+-------+ | action | min (sec) | avg (sec) | max (sec) | 90 percentile | 95 percentile | success | count | +--------------------+-----------+-----------+-----------+---------------+---------------+---------+-------+ | nova.boot_server | 7.99 | 9.047 | 11.862 | 9.747 | 10.805 | 100.0% | 10 | | nova.delete_server | 4.427 | 4.574 | 4.772 | 4.677 | 4.725 | 100.0% | 10 | | total | 12.556 | 13.621 | 16.37 | 14.252 | 15.311 | 100.0% | 10 | +--------------------+-----------+-----------+-----------+---------------+---------------+---------+-------+ Load duration: 70.1310448647 Full duration: 87.545541048 HINTS: * To plot HTML graphics with this data, run: rally task report 6fd9a19f-5cf8-4f76-ab72-2e34bb1d4996 --out output.html * To get raw JSON output of task results, run: rally task results 6fd9a19f-5cf8-4f76-ab72-2e34bb1d4996 Using task: 6fd9a19f-5cf8-4f76-ab72-2e34bb1d4996 Note that the Rally input task above uses *regular expressions* to specify the image and flavor name to be used for server creation, since concrete names might differ from installation to installation. If this benchmark task fails, then the reason for that might a non-existing image/flavor specified in the task. To check what images/flavors are available in the deployment you are currently benchmarking, you might use the *rally show* command: .. code-block:: console $ rally show images +--------------------------------------+-----------------------+-----------+ | UUID | Name | Size (B) | +--------------------------------------+-----------------------+-----------+ | 8dfd6098-0c26-4cb5-8e77-1ecb2db0b8ae | CentOS 6.5 (x86_64) | 344457216 | | 2b8d119e-9461-48fc-885b-1477abe2edc5 | CirrOS 0.3.4 (x86_64) | 13287936 | +--------------------------------------+-----------------------+-----------+ $ rally show flavors Flavors for user `admin` in tenant `admin`: +----+-----------+-------+----------+-----------+-----------+ | ID | Name | vCPUs | RAM (MB) | Swap (MB) | Disk (GB) | +----+-----------+-------+----------+-----------+-----------+ | 1 | m1.tiny | 1 | 512 | | 1 | | 2 | m1.small | 1 | 2048 | | 20 | | 3 | m1.medium | 2 | 4096 | | 40 | | 4 | m1.large | 4 | 8192 | | 80 | | 5 | m1.xlarge | 8 | 16384 | | 160 | +----+-----------+-------+----------+-----------+-----------+ Report generation ----------------- One of the most beautiful things in Rally is its task report generation mechanism. It enables you to create illustrative and comprehensive HTML reports based on the benchmarking data. To create and open at once such a report for the last task you have launched, call: .. code-block:: bash rally task report --out=report1.html --open This will produce an HTML page with the overview of all the scenarios that you've included into the last benchmark task completed in Rally (in our case, this is just one scenario, and we will cover the topic of multiple scenarios in one task in :ref:`the next step of our tutorial `): .. image:: ../../images/Report-Overview.png :align: center This aggregating table shows the duration of the load produced by the corresponding scenario (*"Load duration"*), the overall benchmark scenario execution time, including the duration of environment preparation with contexts (*"Full duration"*), the number of iterations of each scenario (*"Iterations"*), the type of the load used while running the scenario (*"Runner"*), the number of failed iterations (*"Errors"*) and finally whether the scenario has passed certain Success Criteria (*"SLA"*) that were set up by the user in the input configuration file (we will cover these criteria in :ref:`one of the next steps `). By navigating in the left panel, you can switch to the detailed view of the benchmark results for the only scenario we included into our task, namely **NovaServers.boot_and_delete_server**: .. image:: ../../images/Report-Scenario-Overview.png :align: center This page, along with the description of the success criteria used to check the outcome of this scenario, shows more detailed information and statistics about the duration of its iterations. Now, the *"Total durations"* table splits the duration of our scenario into the so-called **"atomic actions"**: in our case, the **"boot_and_delete_server"** scenario consists of two actions - **"boot_server"** and **"delete_server"**. You can also see how the scenario duration changed throughout its iterations in the *"Charts for the total duration"* section. Similar charts, but with atomic actions detailed are on the *"Details"* tab of this page: .. image:: ../../images/Report-Scenario-Atomic.png :align: center Note that all the charts on the report pages are very dynamic: you can change their contents by clicking the switches above the graph and see more information about its single points by hovering the cursor over these points. Take some time to play around with these graphs and then move on to :ref:`the next step of our tutorial `. .. references: .. _OpenRC files: http://docs.openstack.org/user-guide/content/cli_openrc.html .. _configuration files: https://github.com/openstack/rally/tree/master/samples/deployments .. _existing.json: https://github.com/openstack/rally/blob/master/samples/deployments/existing.json .. _samples/tasks/scenarios: https://github.com/openstack/rally/tree/master/samples/tasks/scenarios rally-0.9.1/doc/source/quick_start/tutorial/step_9_deploying_openstack.rst0000664000567000056710000000445013073417716030340 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _tutorial_step_9_deploying_openstack: Step 9. Deploying OpenStack from Rally ====================================== Along with supporting already existing OpenStack deployments, Rally itself can **deploy OpenStack automatically** by using one of its *deployment engines*. Take a look at other `deployment configuration file samples`_. For example, *devstack-in-existing-servers.json* is a deployment configuration file that tells Rally to deploy OpenStack with **Devstack** on the existing servers with given credentials: .. code-block:: json { "type": "DevstackEngine", "provider": { "type": "ExistingServers", "credentials": [{"user": "root", "host": "10.2.0.8"}] } } You can try to deploy OpenStack in your Virtual Machine using this script. Edit the configuration file with your IP address/user name and run, as usual: .. code-block:: console $ rally deployment create --file=samples/deployments/for_deploying_openstack_with_rally/devstack-in-existing-servers.json --name=new-devstack +---------------------------+----------------------------+--------------+------------------+ | uuid | created_at | name | status | +---------------------------+----------------------------+--------------+------------------+ | | 2015-01-10 22:00:28.270941 | new-devstack | deploy->finished | +---------------------------+----------------------------+--------------+------------------+ Using deployment : .. references: .. _deployment configuration file samples: https://github.com/openstack/rally/tree/master/samples/deployments rally-0.9.1/doc/source/quick_start/tutorial/step_3_benchmarking_with_existing_users.rst0000664000567000056710000001704313073417720033104 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _tutorial_step_3_benchmarking_with_existing_users: Step 3. Benchmarking OpenStack with existing users ================================================== .. contents:: :local: Motivation ---------- There are two very important reasons from the production world of why it is preferable to use some already existing users to benchmark your OpenStack cloud: 1. *Read-only Keystone Backends:* creating temporary users for benchmark scenarios in Rally is just impossible in case of r/o Keystone backends like *LDAP* and *AD*. 2. *Safety:* Rally can be run from an isolated group of users, and if something goes wrong, this won’t affect the rest of the cloud users. Registering existing users in Rally ----------------------------------- The information about existing users in your OpenStack cloud should be passed to Rally at the :ref:`deployment initialization step `. You have to use the **ExistingCloud** deployment plugin that just provides Rally with credentials of an already existing cloud. The difference from the deployment configuration we've seen previously is that you should set up the *"users"* section with the credentials of already existing users. Let's call this deployment configuration file *existing_users.json*: .. code-block:: json { "type": "ExistingCloud", "auth_url": "http://example.net:5000/v2.0/", "region_name": "RegionOne", "endpoint_type": "public", "admin": { "username": "admin", "password": "pa55word", "tenant_name": "demo" }, "users": [ { "username": "b1", "password": "1234", "tenant_name": "testing" }, { "username": "b2", "password": "1234", "tenant_name": "testing" } ] } This deployment configuration requires some basic information about the OpenStack cloud like the region name, auth url. admin user credentials, and any amount of users already existing in the system. Rally will use their credentials to generate load in against this deployment as soon as we register it as usual: .. code-block:: console $ rally deployment create --file existings_users --name our_cloud +--------------------------------------+----------------------------+-----------+------------------+--------+ | uuid | created_at | name | status | active | +--------------------------------------+----------------------------+-----------+------------------+--------+ | 1849a9bf-4b18-4fd5-89f0-ddcc56eae4c9 | 2015-03-28 02:43:27.759702 | our_cloud | deploy->finished | | +--------------------------------------+----------------------------+-----------+------------------+--------+ Using deployment: 1849a9bf-4b18-4fd5-89f0-ddcc56eae4c9 ~/.rally/openrc was updated After that, the **rally show** command lists the resources for each user separately: .. code-block:: console $ rally show images Images for user `admin` in tenant `admin`: +--------------------------------------+---------------------------------+-----------+ | UUID | Name | Size (B) | +--------------------------------------+---------------------------------+-----------+ | 041cfd70-0e90-4ed6-8c0c-ad9c12a94191 | cirros-0.3.4-x86_64-uec | 25165824 | | 87710f09-3625-4496-9d18-e20e34906b72 | Fedora-x86_64-20-20140618-sda | 209649664 | | b0f269be-4859-48e0-a0ca-03fb80d14602 | cirros-0.3.4-x86_64-uec-ramdisk | 3740163 | | d82eaf7a-ff63-4826-9aa7-5fa105610e01 | cirros-0.3.4-x86_64-uec-kernel | 4979632 | +--------------------------------------+---------------------------------+-----------+ Images for user `b1` in tenant `testing`: +--------------------------------------+---------------------------------+-----------+ | UUID | Name | Size (B) | +--------------------------------------+---------------------------------+-----------+ | 041cfd70-0e90-4ed6-8c0c-ad9c12a94191 | cirros-0.3.4-x86_64-uec | 25165824 | | 87710f09-3625-4496-9d18-e20e34906b72 | Fedora-x86_64-20-20140618-sda | 209649664 | | b0f269be-4859-48e0-a0ca-03fb80d14602 | cirros-0.3.4-x86_64-uec-ramdisk | 3740163 | | d82eaf7a-ff63-4826-9aa7-5fa105610e01 | cirros-0.3.4-x86_64-uec-kernel | 4979632 | +--------------------------------------+---------------------------------+-----------+ Images for user `b2` in tenant `testing`: +--------------------------------------+---------------------------------+-----------+ | UUID | Name | Size (B) | +--------------------------------------+---------------------------------+-----------+ | 041cfd70-0e90-4ed6-8c0c-ad9c12a94191 | cirros-0.3.4-x86_64-uec | 25165824 | | 87710f09-3625-4496-9d18-e20e34906b72 | Fedora-x86_64-20-20140618-sda | 209649664 | | b0f269be-4859-48e0-a0ca-03fb80d14602 | cirros-0.3.4-x86_64-uec-ramdisk | 3740163 | | d82eaf7a-ff63-4826-9aa7-5fa105610e01 | cirros-0.3.4-x86_64-uec-kernel | 4979632 | +--------------------------------------+---------------------------------+-----------+ With this new deployment being active, Rally will use the already existing users *"b1"* and *"b2"* instead of creating the temporary ones when launching benchmark task that do not specify the *"users"* context. Running benchmark scenarios with existing users ----------------------------------------------- After you have registered a deployment with existing users, don't forget to remove the *"users"* context from your benchmark task configuration if you want to use existing users, like in the following configuration file (*boot-and-delete.json*): .. code-block:: json { "NovaServers.boot_and_delete_server": [ { "args": { "flavor": { "name": "m1.tiny" }, "image": { "name": "^cirros.*-disk$" }, "force_delete": false }, "runner": { "type": "constant", "times": 10, "concurrency": 2 }, "context": {} } ] } When you start this task, it will use the existing users *"b1"* and *"b2"* instead of creating the temporary ones: .. code-block:: bash rally task start samples/tasks/scenarios/nova/boot-and-delete.json It goes without saying that support of benchmarking with predefined users simplifies the usage of Rally for generating loads against production clouds. (based on: http://boris-42.me/rally-can-generate-load-with-passed-users-now/) rally-0.9.1/doc/source/quick_start/tutorial/step_8_discovering_more_plugins.rst0000664000567000056710000001473513073417716031404 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _tutorial_step_8_discovering_more_plugins: Step 8. Discovering more plugins in Rally ========================================= .. contents:: :local: Plugins in the Rally repository --------------------------------- Rally currently comes with a great collection of plugins that use the API of different OpenStack projects like **Keystone**, **Nova**, **Cinder**, **Glance** and so on. The good news is that you can combine multiple plugins in one task to test your cloud in a comprehensive way. First, let's see what plugins are available in Rally. One of the ways to discover these plugins is just to inspect their `source code`_. another is to use build-in rally plugin command. CLI: rally plugin show ---------------------- Rally plugin CLI command is much more convenient way to learn about different plugins in Rally. This command allows to list plugins and show detailed information about them: .. code-block:: console $ rally plugin show create_meter_and_get_stats NAME CeilometerStats.create_meter_and_get_stats NAMESPACE default MODULE rally.plugins.openstack.scenarios.ceilometer.stats DESCRIPTION Meter is first created and then statistics is fetched for the same using GET /v2/meters/(meter_name)/statistics. PARAMETERS +--------+------------------------------------------------+ | name | description | +--------+------------------------------------------------+ | kwargs | contains optional arguments to create a meter | | | | +--------+------------------------------------------------+ In case if multiple found benchmarks found command list all matches elements: .. code-block:: console $ rally plugin show NovaKeypair Multiple plugins found: +-------------------------------------------------+-----------+-------------------------------------------------------+ | name | namespace | title | +-------------------------------------------------+-----------+-------------------------------------------------------+ | NovaKeypair.boot_and_delete_server_with_keypair | default | Boot and delete server with keypair. | | NovaKeypair.create_and_delete_keypair | default | Create a keypair with random name and delete keypair. | | NovaKeypair.create_and_list_keypairs | default | Create a keypair with random name and list keypairs. | +-------------------------------------------------+-----------+-------------------------------------------------------+ CLI: rally plugin list ---------------------- This command can be used to list filtered by name list of plugins. .. code-block:: console $ rally plugin list --name Keystone +--------------------------------------------------+-----------+-----------------------------------------------------------------+ | name | namespace | title | +--------------------------------------------------+-----------+-----------------------------------------------------------------+ | Authenticate.keystone | default | Check Keystone Client. | | KeystoneBasic.add_and_remove_user_role | default | Create a user role add to a user and disassociate. | | KeystoneBasic.create_add_and_list_user_roles | default | Create user role, add it and list user roles for given user. | | KeystoneBasic.create_and_delete_ec2credential | default | Create and delete keystone ec2-credential. | | KeystoneBasic.create_and_delete_role | default | Create a user role and delete it. | | KeystoneBasic.create_and_delete_service | default | Create and delete service. | | KeystoneBasic.create_and_list_ec2credentials | default | Create and List all keystone ec2-credentials. | | KeystoneBasic.create_and_list_services | default | Create and list services. | | KeystoneBasic.create_and_list_tenants | default | Create a keystone tenant with random name and list all tenants. | | KeystoneBasic.create_and_list_users | default | Create a keystone user with random name and list all users. | | KeystoneBasic.create_delete_user | default | Create a keystone user with random name and then delete it. | | KeystoneBasic.create_tenant | default | Create a keystone tenant with random name. | | KeystoneBasic.create_tenant_with_users | default | Create a keystone tenant and several users belonging to it. | | KeystoneBasic.create_update_and_delete_tenant | default | Create, update and delete tenant. | | KeystoneBasic.create_user | default | Create a keystone user with random name. | | KeystoneBasic.create_user_set_enabled_and_delete | default | Create a keystone user, enable or disable it, and delete it. | | KeystoneBasic.create_user_update_password | default | Create user and update password for that user. | | KeystoneBasic.get_entities | default | Get instance of a tenant, user, role and service by id's. | +--------------------------------------------------+-----------+-----------------------------------------------------------------+ .. references: .. _source code: https://github.com/openstack/rally/tree/master/rally/plugins/ rally-0.9.1/doc/source/quick_start/tutorial/step_4_adding_success_criteria_for_benchmarks.rst0000664000567000056710000001321213073417716034171 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _tutorial_step_4_adding_success_criteria_for_benchmarks: Step 4. Adding success criteria (SLA) for benchmarks ==================================================== .. contents:: :local: SLA - Service-Level Agreement (Success Criteria) ------------------------------------------------ Rally allows you to set success criteria (also called *SLA - Service-Level Agreement*) for every benchmark. Rally will automatically check them for you. To configure the SLA, add the *"sla"* section to the configuration of the corresponding benchmark (the check name is a key associated with its target value). You can combine different success criteria: .. code-block:: json { "NovaServers.boot_and_delete_server": [ { "args": { ... }, "runner": { ... }, "context": { ... }, "sla": { "max_seconds_per_iteration": 10, "failure_rate": { "max": 25 } } } ] } Such configuration will mark the **NovaServers.boot_and_delete_server** benchmark scenario as not successful if either some iteration took more than 10 seconds or more than 25% iterations failed. Checking SLA ------------ Let us show you how Rally SLA work using a simple example based on **Dummy benchmark scenarios**. These scenarios actually do not perform any OpenStack-related stuff but are very useful for testing the behaviors of Rally. Let us put in a new task, *test-sla.json*, 2 scenarios -- one that does nothing and another that just throws an exception: .. code-block:: json { "Dummy.dummy": [ { "args": {}, "runner": { "type": "constant", "times": 5, "concurrency": 2 }, "context": { "users": { "tenants": 3, "users_per_tenant": 2 } }, "sla": { "failure_rate": {"max": 0.0} } } ], "Dummy.dummy_exception": [ { "args": {}, "runner": { "type": "constant", "times": 5, "concurrency": 2 }, "context": { "users": { "tenants": 3, "users_per_tenant": 2 } }, "sla": { "failure_rate": {"max": 0.0} } } ] } Note that both scenarios in these tasks have the **maximum failure rate of 0%** as their **success criterion**. We expect that the first scenario will pass this criterion while the second will fail it. Let's start the task: .. code-block:: bash rally task start test-sla.json After the task completes, run *rally task sla_check* to check the results again the success criteria you defined in the task: .. code-block:: console $ rally task sla_check +-----------------------+-----+--------------+--------+-------------------------------------------------------------------------------------------------------+ | benchmark | pos | criterion | status | detail | +-----------------------+-----+--------------+--------+-------------------------------------------------------------------------------------------------------+ | Dummy.dummy | 0 | failure_rate | PASS | Maximum failure rate percent 0.0% failures, minimum failure rate percent 0% failures, actually 0.0% | | Dummy.dummy_exception | 0 | failure_rate | FAIL | Maximum failure rate percent 0.0% failures, minimum failure rate percent 0% failures, actually 100.0% | +-----------------------+-----+--------------+--------+-------------------------------------------------------------------------------------------------------+ Exactly as expected. SLA in task report ------------------ SLA checks are nicely visualized in task reports. Generate one: .. code-block:: bash rally task report --out=report_sla.html --open Benchmark scenarios that have passed SLA have a green check on the overview page: .. image:: ../../images/Report-SLA-Overview.png :align: center Somewhat more detailed information about SLA is displayed on the scenario pages: .. image:: ../../images/Report-SLA-Scenario.png :align: center Success criteria present a very useful concept that enables not only to analyze the outcome of your benchmark tasks, but also to control their execution. In :ref:`one of the next sections ` of our tutorial, we will show how to use SLA to abort the load generation before your OpenStack goes wrong. rally-0.9.1/doc/source/quick_start/tutorial/step_7_working_with_multple_openstack_clouds.rst0000664000567000056710000001776213073417716034204 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _tutorial_step_7_working_with_multple_openstack_clouds: Step 7. Working with multiple OpenStack clouds ============================================== Rally is an awesome tool that allows you to work with multiple clouds and can itself deploy them. We already know how to work with :ref:`a single cloud `. Let us now register 2 clouds in Rally: the one that we have access to and the other that we know is registered with wrong credentials. .. code-block:: console $ . openrc admin admin # openrc with correct credentials $ rally deployment create --fromenv --name=cloud-1 +--------------------------------------+----------------------------+------------+------------------+--------+ | uuid | created_at | name | status | active | +--------------------------------------+----------------------------+------------+------------------+--------+ | 4251b491-73b2-422a-aecb-695a94165b5e | 2015-01-18 00:11:14.757203 | cloud-1 | deploy->finished | | +--------------------------------------+----------------------------+------------+------------------+--------+ Using deployment: 4251b491-73b2-422a-aecb-695a94165b5e ~/.rally/openrc was updated ... $ . bad_openrc admin admin # openrc with wrong credentials $ rally deployment create --fromenv --name=cloud-2 +--------------------------------------+----------------------------+------------+------------------+--------+ | uuid | created_at | name | status | active | +--------------------------------------+----------------------------+------------+------------------+--------+ | 658b9bae-1f9c-4036-9400-9e71e88864fc | 2015-01-18 00:38:26.127171 | cloud-2 | deploy->finished | | +--------------------------------------+----------------------------+------------+------------------+--------+ Using deployment: 658b9bae-1f9c-4036-9400-9e71e88864fc ~/.rally/openrc was updated ... Let us now list the deployments we have created: .. code-block:: console $ rally deployment list +--------------------------------------+----------------------------+------------+------------------+--------+ | uuid | created_at | name | status | active | +--------------------------------------+----------------------------+------------+------------------+--------+ | 4251b491-73b2-422a-aecb-695a94165b5e | 2015-01-05 00:11:14.757203 | cloud-1 | deploy->finished | | | 658b9bae-1f9c-4036-9400-9e71e88864fc | 2015-01-05 00:40:58.451435 | cloud-2 | deploy->finished | * | +--------------------------------------+----------------------------+------------+------------------+--------+ Note that the second is marked as **"active"** because this is the deployment we have created most recently. This means that it will be automatically (unless its UUID or name is passed explicitly via the *--deployment* parameter) used by the commands that need a deployment, like *rally task start ...* or *rally deployment check*: .. code-block:: console $ rally deployment check Authentication Issues: wrong keystone credentials specified in your endpoint properties. (HTTP 401). $ rally deployment check --deployment=cloud-1 keystone endpoints are valid and following services are available: +----------+----------------+-----------+ | services | type | status | +----------+----------------+-----------+ | cinder | volume | Available | | cinderv2 | volumev2 | Available | | ec2 | ec2 | Available | | glance | image | Available | | heat | orchestration | Available | | heat-cfn | cloudformation | Available | | keystone | identity | Available | | nova | compute | Available | | novav21 | computev21 | Available | | s3 | s3 | Available | +----------+----------------+-----------+ You can also switch the active deployment using the **rally deployment use** command: .. code-block:: console $ rally deployment use cloud-1 Using deployment: 658b9bae-1f9c-4036-9400-9e71e88864fc ~/.rally/openrc was updated ... $ rally deployment check keystone endpoints are valid and following services are available: +----------+----------------+-----------+ | services | type | status | +----------+----------------+-----------+ | cinder | volume | Available | | cinderv2 | volumev2 | Available | | ec2 | ec2 | Available | | glance | image | Available | | heat | orchestration | Available | | heat-cfn | cloudformation | Available | | keystone | identity | Available | | nova | compute | Available | | novav21 | computev21 | Available | | s3 | s3 | Available | +----------+----------------+-----------+ Note the first two lines of the CLI output for the *rally deployment use* command. They tell you the UUID of the new active deployment and also say that the *~/.rally/openrc* file was updated -- this is the place where the "active" UUID is actually stored by Rally. One last detail about managing different deployments in Rally is that the *rally task list* command outputs only those tasks that were run against the currently active deployment, and you have to provide the *--all-deployments* parameter to list all the tasks: .. code-block:: console $ rally task list +--------------------------------------+-----------------+----------------------------+----------------+----------+--------+-----+ | uuid | deployment_name | created_at | duration | status | failed | tag | +--------------------------------------+-----------------+----------------------------+----------------+----------+--------+-----+ | c21a6ecb-57b2-43d6-bbbb-d7a827f1b420 | cloud-1 | 2015-01-05 01:00:42.099596 | 0:00:13.419226 | finished | False | | | f6dad6ab-1a6d-450d-8981-f77062c6ef4f | cloud-1 | 2015-01-05 01:05:57.653253 | 0:00:14.160493 | finished | False | | +--------------------------------------+-----------------+----------------------------+----------------+----------+--------+-----+ $ rally task list --all-deployment +--------------------------------------+-----------------+----------------------------+----------------+----------+--------+-----+ | uuid | deployment_name | created_at | duration | status | failed | tag | +--------------------------------------+-----------------+----------------------------+----------------+----------+--------+-----+ | c21a6ecb-57b2-43d6-bbbb-d7a827f1b420 | cloud-1 | 2015-01-05 01:00:42.099596 | 0:00:13.419226 | finished | False | | | f6dad6ab-1a6d-450d-8981-f77062c6ef4f | cloud-1 | 2015-01-05 01:05:57.653253 | 0:00:14.160493 | finished | False | | | 6fd9a19f-5cf8-4f76-ab72-2e34bb1d4996 | cloud-2 | 2015-01-05 01:14:51.428958 | 0:00:15.042265 | finished | False | | +--------------------------------------+-----------------+----------------------------+----------------+----------+--------+-----+ rally-0.9.1/doc/source/quick_start/tutorial/step_10_verifying_cloud_via_tempest_verifier.rst0000664000567000056710000012744213073417716034041 0ustar jenkinsjenkins00000000000000.. Copyright 2017 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _tutorial_step_10_verifying_cloud_via_tempest_verifier: Step 10. Verifying cloud via Tempest verifier ============================================= .. contents:: :local: As you may know, Rally has a verification component (aka **'rally verify'**). Earlier the purpose of this component was to simplify work with `Tempest `_ framework (The OpenStack Integration Test Suite). Rally provided a quite simple interface to install and configure Tempest, run tests and build a report with results. But now the verification component allows us to simplify work not only with Tempest but also with any test frameworks or tools. All you need is to create a plugin for your framework or tool, and you will be able to use **'rally verify'** interface for it. At this point, Rally supports only one plugin in the verification component out of the box - as you might guess, Tempest plugin. In this guide, we will show how to use Tempest and Rally together via the updated **'rally verify'** interface. We assume that you already have a :ref:`Rally installation ` and have already :ref:`registered an OpenStack deployment ` in Rally. So, let's get started! Create/delete Tempest verifier ------------------------------ Execute the following command to create a Tempest verifier: .. code-block:: console $ rally verify create-verifier --type tempest --name tempest-verifier 2017-01-18 14:43:20.807 5125 INFO rally.api [-] Creating verifier 'tempest-verifier'. 2017-01-18 14:43:21.203 5125 INFO rally.verification.manager [-] Cloning verifier repo from https://git.openstack.org/openstack/tempest. 2017-01-18 14:43:32.458 5125 INFO rally.verification.manager [-] Creating virtual environment. It may take a few minutes. 2017-01-18 14:43:49.786 5125 INFO rally.api [-] Verifier 'tempest-verifier' (UUID=cde1b03d-d1eb-47f2-a997-3fd21b1d8810) has been successfully created! Using verifier 'tempest-verifier' (UUID=cde1b03d-d1eb-47f2-a997-3fd21b1d8810) as the default verifier for the future operations. The command clones Tempest from the **https://git.openstack.org/openstack/tempest** repository and installs it in a Python virtual environment for the current deployment by default. All information about the created verifier is stored in a database. It allows us to set up different Tempest versions and easily switch between them. How to do it will be described bellow. You can list all installed verifiers via the **rally verify list-verifiers** command. The arguments below allow us to override the default behavior. Use the **--source** argument to specify an alternate git repository location. The path to a local Tempest repository or a URL of a remote repository are both valid values. .. code-block:: console $ rally verify create-verifier --type tempest --name tempest-verifier --source /home/ubuntu/tempest/ 2017-01-18 14:53:19.958 5760 INFO rally.api [-] Creating verifier 'tempest-verifier'. 2017-01-18 14:53:20.166 5760 INFO rally.verification.manager [-] Cloning verifier repo from /home/ubuntu/tempest/. 2017-01-18 14:53:20.299 5760 INFO rally.verification.manager [-] Creating virtual environment. It may take a few minutes. 2017-01-18 14:53:32.517 5760 INFO rally.api [-] Verifier 'tempest-verifier' (UUID=3f878030-1edf-455c-ae5e-07836e3d7e35) has been successfully created! Using verifier 'tempest-verifier' (UUID=3f878030-1edf-455c-ae5e-07836e3d7e35) as the default verifier for the future operations. .. code-block:: console $ rally verify create-verifier --type tempest --name tempest-verifier --source https://github.com/openstack/tempest.git 2017-01-18 14:54:57.786 5907 INFO rally.api [-] Creating verifier 'tempest-verifier'. 2017-01-18 14:54:57.990 5907 INFO rally.verification.manager [-] Cloning verifier repo from https://github.com/openstack/tempest.git. 2017-01-18 14:55:05.729 5907 INFO rally.verification.manager [-] Creating virtual environment. It may take a few minutes. 2017-01-18 14:55:22.943 5907 INFO rally.api [-] Verifier 'tempest-verifier' (UUID=e84a947c-b9d3-434b-853b-176a597902e5) has been successfully created! Using verifier 'tempest-verifier' (UUID=e84a947c-b9d3-434b-853b-176a597902e5) as the default verifier for the future operations. Use the **--version** argument to specify a Tempest commit ID or tag. .. code-block:: console $ rally verify create-verifier --type tempest --name tempest-verifier --version 198e5b4b871c3d09c20afb56dca9637a8cf86ac8 2017-01-18 14:57:02.274 6068 INFO rally.api [-] Creating verifier 'tempest-verifier'. 2017-01-18 14:57:02.461 6068 INFO rally.verification.manager [-] Cloning verifier repo from https://git.openstack.org/openstack/tempest. 2017-01-18 14:57:15.356 6068 INFO rally.verification.manager [-] Switching verifier repo to the '198e5b4b871c3d09c20afb56dca9637a8cf86ac8' version. 2017-01-18 14:57:15.423 6068 INFO rally.verification.manager [-] Creating virtual environment. It may take a few minutes. 2017-01-18 14:57:28.004 6068 INFO rally.api [-] Verifier 'tempest-verifier' (UUID=532d7ad2-902e-4764-aa53-335f67dadc7f) has been successfully created! Using verifier 'tempest-verifier' (UUID=532d7ad2-902e-4764-aa53-335f67dadc7f) as the default verifier for the future operations. .. code-block:: console $ rally verify create-verifier --type tempest --name tempest-verifier --source /home/ubuntu/tempest/ --version 13.0.0 2017-01-18 15:01:53.971 6518 INFO rally.api [-] Creating verifier 'tempest-verifier'. 2017-01-18 15:01:54.180 6518 INFO rally.verification.manager [-] Cloning verifier repo from /home/ubuntu/tempest/. 2017-01-18 15:01:54.274 6518 INFO rally.verification.manager [-] Switching verifier repo to the '13.0.0' version. 2017-01-18 15:01:54.336 6518 INFO rally.verification.manager [-] Creating virtual environment. It may take a few minutes. 2017-01-18 15:02:06.623 6518 INFO rally.api [-] Verifier 'tempest-verifier' (UUID=96ffc4bc-4ac2-4ae9-b3c2-d6b16b871027) has been successfully created! Using verifier 'tempest-verifier' (UUID=96ffc4bc-4ac2-4ae9-b3c2-d6b16b871027) as the default verifier for the future operations. Use the **--system-wide** argument to perform system-wide Tempest installation. In this case, the virtual environment will not be created and Tempest requirements will not be installed. Moreover, it is assumed that requirements are already present in the local environment. This argument is useful when users don't have an Internet connection to install requirements, but they have pre-installed ones in the local environment. .. code-block:: console $ rally verify create-verifier --type tempest --name tempest-verifier --source /home/ubuntu/tempest/ --version 13.0.0 --system-wide 2017-01-18 15:22:09.198 7224 INFO rally.api [-] Creating verifier 'tempest-verifier'. 2017-01-18 15:22:09.408 7224 INFO rally.verification.manager [-] Cloning verifier repo from /home/ubuntu/tempest/. 2017-01-18 15:22:09.494 7224 INFO rally.verification.manager [-] Switching verifier repo to the '13.0.0' version. 2017-01-18 15:22:10.965 7224 INFO rally.api [-] Verifier 'tempest-verifier' (UUID=14c94c12-633a-4522-bd3d-2508f2b9d681) has been successfully created! Using verifier 'tempest-verifier' (UUID=14c94c12-633a-4522-bd3d-2508f2b9d681) as the default verifier for the future operations. To delete the Tempest verifier for all deployments execute the following command: .. code-block:: console $ rally verify delete-verifier --id 14c94c12-633a-4522-bd3d-2508f2b9d681 2017-01-18 15:27:03.485 7474 INFO rally.api [-] Deleting verifier 'tempest-verifier' (UUID=14c94c12-633a-4522-bd3d-2508f2b9d681). 2017-01-18 15:27:03.607 7474 INFO rally.api [-] Verifier has been successfully deleted! If you have any verifications, use the **--force** argument to delete the verifier and all stored verifications. .. code-block:: console $ rally verify delete-verifier --id ec58af86-5217-4bbd-b9e5-491df6873b82 Failed to delete verifier 'tempest-verifier' (UUID=ec58af86-5217-4bbd-b9e5-491df6873b82) because there are stored verifier verifications! Please, make sure that they are not important to you. Use 'force' flag if you would like to delete verifications as well. .. code-block:: console $ rally verify delete-verifier --id ec58af86-5217-4bbd-b9e5-491df6873b82 --force 2017-01-18 15:49:12.840 8685 INFO rally.api [-] Deleting all verifications created by verifier 'tempest-verifier' (UUID=ec58af86-5217-4bbd-b9e5-491df6873b82). 2017-01-18 15:49:12.843 8685 INFO rally.api [-] Deleting verification (UUID=c3d1408a-a224-4d31-b38f-4caf8ce06a95). 2017-01-18 15:49:12.951 8685 INFO rally.api [-] Verification has been successfully deleted! 2017-01-18 15:49:12.961 8685 INFO rally.api [-] Deleting verification (UUID=a437537e-538b-4637-b6ab-ecb8072f0c71). 2017-01-18 15:49:13.052 8685 INFO rally.api [-] Verification has been successfully deleted! 2017-01-18 15:49:13.061 8685 INFO rally.api [-] Deleting verification (UUID=5cec0579-4b4e-46f3-aeb4-a481a7bc5663). 2017-01-18 15:49:13.152 8685 INFO rally.api [-] Verification has been successfully deleted! 2017-01-18 15:49:13.152 8685 INFO rally.api [-] Deleting verifier 'tempest-verifier' (UUID=ec58af86-5217-4bbd-b9e5-491df6873b82). 2017-01-18 15:49:13.270 8685 INFO rally.api [-] Verifier has been successfully deleted! Use the **--deployment-id** argument to remove the only deployment-specific data, for example, the config file, etc. .. code-block:: console $ rally verify delete-verifier --deployment-id 351fdfa2-99ad-4447-ba31-22e76630df97 2017-01-18 15:30:27.793 7659 INFO rally.api [-] Deleting deployment-specific data for verifier 'tempest-verifier' (UUID=ec58af86-5217-4bbd-b9e5-491df6873b82). 2017-01-18 15:30:27.797 7659 INFO rally.api [-] Deployment-specific data has been successfully deleted! When the **--deployment-id** and **--force** arguments are used together, the only deployment-specific data and only verifications of the specified deployment will be deleted. .. code-block:: console $ rally verify delete-verifier --deployment-id 351fdfa2-99ad-4447-ba31-22e76630df97 --force 2017-01-18 15:55:02.657 9004 INFO rally.api [-] Deleting all verifications created by verifier 'tempest-verifier' (UUID=fbbd2bc0-dd92-4e1d-805c-672af7c5ec78) for deployment '351fdfa2-99ad-4447-ba31-22e76630df97'. 2017-01-18 15:55:02.661 9004 INFO rally.api [-] Deleting verification (UUID=a3d3d53c-79a6-4151-85ce-f4a7323d2f4c). 2017-01-18 15:55:02.767 9004 INFO rally.api [-] Verification has been successfully deleted! 2017-01-18 15:55:02.776 9004 INFO rally.api [-] Deleting verification (UUID=eddea799-bbc5-485c-a284-1747a30e3f1e). 2017-01-18 15:55:02.869 9004 INFO rally.api [-] Verification has been successfully deleted! 2017-01-18 15:55:02.870 9004 INFO rally.api [-] Deleting deployment-specific data for verifier 'tempest-verifier' (UUID=fbbd2bc0-dd92-4e1d-805c-672af7c5ec78). 2017-01-18 15:55:02.878 9004 INFO rally.api [-] Deployment-specific data has been successfully deleted! Configure Tempest verifier -------------------------- Execute the following command to configure the Tempest verifier for the current deployment: .. code-block:: console $ rally verify configure-verifier 2017-01-18 16:00:24.495 9377 INFO rally.api [-] Configuring verifier 'tempest-verifier' (UUID=59e8bd5b-55e1-4ab8-b506-a5853c7a92e9) for deployment 'tempest' (UUID=4a62f373-9ce7-47a3-8165-6dc7353f754a). 2017-01-18 16:00:27.497 9377 INFO rally.api [-] Verifier 'tempest-verifier' (UUID=59e8bd5b-55e1-4ab8-b506-a5853c7a92e9) has been successfully configured for deployment 'tempest' (UUID=4a62f373-9ce7-47a3-8165-6dc7353f754a)! Use the **--deployment-id** argument to configure the verifier for any deployment registered in Rally. .. code-block:: console $ rally verify configure-verifier --deployment-id If you want to reconfigure the Tempest verifier, just add the **--reconfigure** argument to the command. .. code-block:: console $ rally verify configure-verifier --reconfigure 2017-01-18 16:08:50.932 9786 INFO rally.api [-] Configuring verifier 'tempest-verifier' (UUID=16b73e48-09ad-4a54-92eb-2f2708b72c54) for deployment 'tempest-2' (UUID=351fdfa2-99ad-4447-ba31-22e76630df97). 2017-01-18 16:08:50.933 9786 INFO rally.api [-] Verifier is already configured! 2017-01-18 16:08:50.933 9786 INFO rally.api [-] Reconfiguring verifier. 2017-01-18 16:08:52.806 9786 INFO rally.api [-] Verifier 'tempest-verifier' (UUID=16b73e48-09ad-4a54-92eb-2f2708b72c54) has been successfully configured for deployment 'tempest-2' (UUID=351fdfa2-99ad-4447-ba31-22e76630df97)! Moreover, it is possible to extend the default verifier configuration by providing the **--extend** argument. .. code-block:: console $ cat extra_options.conf [some-section-1] some-option = some-value [some-section-2] some-option = some-value .. code-block:: console $ rally verify configure-verifier --extend extra_options.conf 2017-01-18 16:15:12.248 10029 INFO rally.api [-] Configuring verifier 'tempest-verifier' (UUID=16b73e48-09ad-4a54-92eb-2f2708b72c54) for deployment 'tempest-2' (UUID=351fdfa2-99ad-4447-ba31-22e76630df97). 2017-01-18 16:15:12.249 10029 INFO rally.api [-] Verifier is already configured! 2017-01-18 16:15:12.249 10029 INFO rally.api [-] Adding extra options to verifier configuration. 2017-01-18 16:15:12.439 10029 INFO rally.api [-] Verifier 'tempest-verifier' (UUID=16b73e48-09ad-4a54-92eb-2f2708b72c54) has been successfully configured for deployment 'tempest-2' (UUID=351fdfa2-99ad-4447-ba31-22e76630df97)! .. code-block:: console $ rally verify configure-verifier --extend '{section-1: {option: value}, section-2: {option: value}}' 2017-01-18 16:18:07.317 10180 INFO rally.api [-] Configuring verifier 'tempest-verifier' (UUID=16b73e48-09ad-4a54-92eb-2f2708b72c54) for deployment 'tempest-2' (UUID=351fdfa2-99ad-4447-ba31-22e76630df97). 2017-01-18 16:18:07.317 10180 INFO rally.api [-] Verifier is already configured! 2017-01-18 16:18:07.317 10180 INFO rally.api [-] Adding extra options to verifier configuration. 2017-01-18 16:18:07.549 10180 INFO rally.api [-] Verifier 'tempest-verifier' (UUID=16b73e48-09ad-4a54-92eb-2f2708b72c54) has been successfully configured for deployment 'tempest-2' (UUID=351fdfa2-99ad-4447-ba31-22e76630df97)! In order to see the generated Tempest config file use the **--show** argument. .. code-block:: console $ rally verify configure-verifier --show 2017-01-18 16:19:25.412 10227 INFO rally.api [-] Configuring verifier 'tempest-verifier' (UUID=16b73e48-09ad-4a54-92eb-2f2708b72c54) for deployment 'tempest-2' (UUID=351fdfa2-99ad-4447-ba31-22e76630df97). 2017-01-18 16:19:25.412 10227 INFO rally.api [-] Verifier is already configured! [DEFAULT] debug = True log_file = tempest.log use_stderr = False [auth] use_dynamic_credentials = True admin_username = admin admin_password = admin admin_project_name = admin admin_domain_name = Default ... Start a verification -------------------- In order to start a verification execute the following command: .. code-block:: console $ rally verify start 2017-01-18 16:49:35.367 12162 INFO rally.api [-] Starting verification (UUID=0673ca09-bdb6-4814-a33e-17731559ff33) for deployment 'tempest-2' (UUID=351fdfa2-99ad-4447-ba31-22e76630df97) by verifier 'tempest-verifier' (UUID=16b73e48-09ad-4a54-92eb-2f2708b72c54). 2017-01-18 16:49:44.404 12162 INFO tempest-verifier [-] {0} tempest.api.baremetal.admin.test_chassis.TestChassis ... skip: TestChassis skipped as Ironic is not available 2017-01-18 16:49:44.404 12162 INFO tempest-verifier [-] {0} tempest.api.baremetal.admin.test_drivers.TestDrivers ... skip: TestDrivers skipped as Ironic is not available 2017-01-18 16:49:44.429 12162 INFO tempest-verifier [-] {3} tempest.api.baremetal.admin.test_ports_negative.TestPortsNegative ... skip: TestPortsNegative skipped as Ironic is not available 2017-01-18 16:49:44.438 12162 INFO tempest-verifier [-] {2} tempest.api.baremetal.admin.test_nodestates.TestNodeStates ... skip: TestNodeStates skipped as Ironic is not available 2017-01-18 16:49:44.438 12162 INFO tempest-verifier [-] {2} tempest.api.baremetal.admin.test_ports.TestPorts ... skip: TestPorts skipped as Ironic is not available 2017-01-18 16:49:44.439 12162 INFO tempest-verifier [-] {1} tempest.api.baremetal.admin.test_api_discovery.TestApiDiscovery ... skip: TestApiDiscovery skipped as Ironic is not available 2017-01-18 16:49:44.439 12162 INFO tempest-verifier [-] {1} tempest.api.baremetal.admin.test_nodes.TestNodes ... skip: TestNodes skipped as Ironic is not available 2017-01-18 16:49:47.083 12162 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_availability_zone_negative.AZAdminNegativeTestJSON.test_get_availability_zone_list_detail_with_non_admin_user ... success [1.013s] 2017-01-18 16:49:47.098 12162 INFO tempest-verifier [-] {1} tempest.api.compute.admin.test_availability_zone.AZAdminV2TestJSON.test_get_availability_zone_list ... success [1.063s] 2017-01-18 16:49:47.321 12162 INFO tempest-verifier [-] {1} tempest.api.compute.admin.test_availability_zone.AZAdminV2TestJSON.test_get_availability_zone_list_detail ... success [0.224s] ... By default, the command runs the full suite of Tempest tests for the current deployment. Also, it is possible to run tests of any created verifier, and for any registered deployment in Rally, using the **--id** and **--deployment-id** arguments. .. code-block:: console $ rally verify start --id --deployment-id Also, there is a possibility to run a certain suite of Tempest tests, using the **--pattern** argument. .. code-block:: console $ rally verify start --pattern set=compute 2017-01-18 16:58:40.378 12631 INFO rally.api [-] Starting verification (UUID=a4bd3993-ba3d-425c-ab81-38b2f627e682) for deployment 'tempest-2' (UUID=351fdfa2-99ad-4447-ba31-22e76630df97) by verifier 'tempest-verifier' (UUID=16b73e48-09ad-4a54-92eb-2f2708b72c54). 2017-01-18 16:58:44.883 12631 INFO tempest-verifier [-] {1} tempest.api.compute.admin.test_auto_allocate_network.AutoAllocateNetworkTest ... skip: The microversion range[2.37 - latest] of this test is out of the configuration range[None - None]. 2017-01-18 16:58:47.330 12631 INFO tempest-verifier [-] {1} tempest.api.compute.admin.test_availability_zone.AZAdminV2TestJSON.test_get_availability_zone_list ... success [0.680s] 2017-01-18 16:58:47.416 12631 INFO tempest-verifier [-] {2} tempest.api.compute.admin.test_availability_zone_negative.AZAdminNegativeTestJSON.test_get_availability_zone_list_detail_with_non_admin_user ... success [0.761s] 2017-01-18 16:58:47.610 12631 INFO tempest-verifier [-] {1} tempest.api.compute.admin.test_availability_zone.AZAdminV2TestJSON.test_get_availability_zone_list_detail ... success [0.280s] 2017-01-18 16:58:47.694 12631 INFO tempest-verifier [-] {3} tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_create_flavor_using_string_ram ... success [1.015s] 2017-01-18 16:58:48.514 12631 INFO tempest-verifier [-] {3} tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_create_flavor_verify_entry_in_list_details ... success [0.820s] 2017-01-18 16:58:48.675 12631 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_agents.AgentsAdminTestJSON.test_create_agent ... success [0.777s] 2017-01-18 16:58:49.090 12631 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_agents.AgentsAdminTestJSON.test_delete_agent ... success [0.415s] 2017-01-18 16:58:49.160 12631 INFO tempest-verifier [-] {3} tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_create_flavor_with_int_id ... success [0.646s] 2017-01-18 16:58:49.546 12631 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_agents.AgentsAdminTestJSON.test_list_agents ... success [0.455s] ... Available suites for Tempest 14.0.0 (the latest Tempest release when this documentation was written) are **full**, **smoke**, **compute**, **identity**, **image**, **network**, **object_storage**, **orchestration**, **volume**, **scenario**. The number of available suites depends on Tempest version because some test sets move from the Tempest tree to the corresponding Tempest plugins. Moreover, users can run a certain set of tests, using a regular expression. .. code-block:: console $ rally verify start --pattern tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON 2017-01-18 17:00:36.590 12745 INFO rally.api [-] Starting verification (UUID=1e12510e-7391-48ed-aba2-8fefe1075a87) for deployment 'tempest-2' (UUID=351fdfa2-99ad-4447-ba31-22e76630df97) by verifier 'tempest-verifier' (UUID=16b73e48-09ad-4a54-92eb-2f2708b72c54). 2017-01-18 17:00:44.241 12745 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_create_flavor_using_string_ram ... success [1.044s] 2017-01-18 17:00:45.108 12745 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_create_flavor_verify_entry_in_list_details ... success [0.868s] 2017-01-18 17:00:45.863 12745 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_create_flavor_with_int_id ... success [0.754s] 2017-01-18 17:00:47.575 12745 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_create_flavor_with_none_id ... success [1.712s] 2017-01-18 17:00:48.260 12745 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_create_flavor_with_uuid_id ... success [0.684s] 2017-01-18 17:00:50.951 12745 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_create_list_flavor_without_extra_data ... success [2.689s] 2017-01-18 17:00:51.631 12745 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_create_server_with_non_public_flavor ... success [0.680s] 2017-01-18 17:00:54.192 12745 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_is_public_string_variations ... success [2.558s] 2017-01-18 17:00:55.102 12745 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_list_non_public_flavor ... success [0.911s] 2017-01-18 17:00:55.774 12745 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_list_public_flavor_with_other_user ... success [0.673s] 2017-01-18 17:00:59.602 12745 INFO rally.api [-] Verification (UUID=1e12510e-7391-48ed-aba2-8fefe1075a87) has been successfully finished for deployment 'tempest-2' (UUID=351fdfa2-99ad-4447-ba31-22e76630df97)! ====== Totals ====== Ran: 10 tests in 14.578 sec. - Success: 10 - Skipped: 0 - Expected failures: 0 - Unexpected success: 0 - Failures: 0 Using verification (UUID=1e12510e-7391-48ed-aba2-8fefe1075a87) as the default verification for the future operations. In such a way it is possible to run tests from a certain directory or class, and even run a single test. .. code-block:: console $ rally verify start --pattern tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_create_flavor_using_string_ram 2017-01-18 17:01:43.993 12819 INFO rally.api [-] Starting verification (UUID=b9a386e1-d1a1-41b3-b369-9607173de63e) for deployment 'tempest-2' (UUID=351fdfa2-99ad-4447-ba31-22e76630df97) by verifier 'tempest-verifier' (UUID=16b73e48-09ad-4a54-92eb-2f2708b72c54). 2017-01-18 17:01:52.592 12819 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_create_flavor_using_string_ram ... success [1.214s] 2017-01-18 17:01:57.220 12819 INFO rally.api [-] Verification (UUID=b9a386e1-d1a1-41b3-b369-9607173de63e) has been successfully finished for deployment 'tempest-2' (UUID=351fdfa2-99ad-4447-ba31-22e76630df97)! ====== Totals ====== Ran: 1 tests in 4.139 sec. - Success: 1 - Skipped: 0 - Expected failures: 0 - Unexpected success: 0 - Failures: 0 Using verification (UUID=b9a386e1-d1a1-41b3-b369-9607173de63e) as the default verification for the future operations. In order to see errors of failed tests after the verification finished use the **--detailed** argument. .. code-block:: console $ rally verify start --pattern tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON --detailed 2017-01-25 19:34:41.113 16123 INFO rally.api [-] Starting verification (UUID=ceb6f26b-5830-42c5-ab09-bfd985ed4cb7) for deployment 'tempest-2' (UUID=38a397d0-ee11-475d-ab08-e17be09d0bcd) by verifier 'tempest-verifier' (UUID=bbf51ada-9dd6-4b25-b1b6-b651e0541dde). 2017-01-25 19:34:50.188 16123 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_aggregate_add_host_create_server_with_az ... fail [0.784s] 2017-01-25 19:34:51.587 16123 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_aggregate_add_host_get_details ... success [1.401s] 2017-01-25 19:34:52.947 16123 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_aggregate_add_host_list ... success [1.359s] 2017-01-25 19:34:53.863 16123 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_aggregate_add_remove_host ... success [0.915s] 2017-01-25 19:34:54.577 16123 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_aggregate_create_delete ... success [0.714s] 2017-01-25 19:34:55.221 16123 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_aggregate_create_delete_with_az ... success [0.643s] 2017-01-25 19:34:55.974 16123 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_aggregate_create_update_metadata_get_details ... success [0.752s] 2017-01-25 19:34:56.689 16123 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_aggregate_create_update_with_az ... success [0.714s] 2017-01-25 19:34:57.144 16123 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_aggregate_create_verify_entry_in_list ... success [0.456s] 2017-01-25 19:35:01.132 16123 INFO rally.api [-] Verification (UUID=ceb6f26b-5830-42c5-ab09-bfd985ed4cb7) has been successfully finished for deployment 'tempest-2' (UUID=38a397d0-ee11-475d-ab08-e17be09d0bcd)! ============================= Failed 1 test - output below: ============================= tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_aggregate_add_host_create_server_with_az --------------------------------------------------------------------------------------------------------------- Traceback (most recent call last): File "tempest/api/compute/admin/test_aggregates.py", line 226, in test_aggregate_add_host_create_server_with_az self.client.add_host(aggregate['id'], host=self.host) File "tempest/lib/services/compute/aggregates_client.py", line 95, in add_host post_body) File "tempest/lib/common/rest_client.py", line 275, in post return self.request('POST', url, extra_headers, headers, body, chunked) File "tempest/lib/services/compute/base_compute_client.py", line 48, in request method, url, extra_headers, headers, body, chunked) File "tempest/lib/common/rest_client.py", line 663, in request self._error_checker(resp, resp_body) File "tempest/lib/common/rest_client.py", line 775, in _error_checker raise exceptions.Conflict(resp_body, resp=resp) tempest.lib.exceptions.Conflict: An object with that identifier already exists Details: {u'message': u"Cannot add host to aggregate 2658. Reason: One or more hosts already in availability zone(s) [u'tempest-test_az-34611847'].", u'code': 409} ====== Totals ====== Ran: 9 tests in 12.391 sec. - Success: 8 - Skipped: 0 - Expected failures: 0 - Unexpected success: 0 - Failures: 1 Using verification (UUID=ceb6f26b-5830-42c5-ab09-bfd985ed4cb7) as the default verification for the future operations. Also, there is a possibility to run Tempest tests from a file. Users can specify a list of tests in the file and run them, using the **--load-list** argument. .. code-block:: console $ cat load-list.txt tempest.api.identity.admin.v2.test_endpoints.EndPointsTestJSON.test_create_list_delete_endpoint[id-9974530a-aa28-4362-8403-f06db02b26c1] tempest.api.identity.admin.v2.test_endpoints.EndPointsTestJSON.test_list_endpoints[id-11f590eb-59d8-4067-8b2b-980c7f387f51] tempest.api.identity.admin.v2.test_roles.RolesTestJSON.test_assign_user_role[id-0146f675-ffbd-4208-b3a4-60eb628dbc5e] tempest.api.identity.admin.v2.test_roles.RolesTestJSON.test_get_role_by_id[id-db6870bd-a6ed-43be-a9b1-2f10a5c9994f] tempest.api.identity.admin.v2.test_roles.RolesTestJSON.test_list_roles[id-75d9593f-50b7-4fcf-bd64-e3fb4a278e23] tempest.api.identity.admin.v2.test_roles.RolesTestJSON.test_list_user_roles[id-262e1e3e-ed71-4edd-a0e5-d64e83d66d05] tempest.api.identity.admin.v2.test_roles.RolesTestJSON.test_remove_user_role[id-f0b9292c-d3ba-4082-aa6c-440489beef69] tempest.api.identity.admin.v2.test_roles.RolesTestJSON.test_role_create_delete[id-c62d909d-6c21-48c0-ae40-0a0760e6db5e] .. code-block:: console $ rally verify start --load-list load-list.txt 2017-01-18 17:04:13.900 12964 INFO rally.api [-] Starting verification (UUID=af766b2f-cada-44db-a0c2-336ab0c17c27) for deployment 'tempest-2' (UUID=351fdfa2-99ad-4447-ba31-22e76630df97) by verifier 'tempest-verifier' (UUID=16b73e48-09ad-4a54-92eb-2f2708b72c54). 2017-01-18 17:04:21.813 12964 INFO tempest-verifier [-] {1} tempest.api.identity.admin.v2.test_endpoints.EndPointsTestJSON.test_create_list_delete_endpoint ... success [1.237s] 2017-01-18 17:04:22.115 12964 INFO tempest-verifier [-] {1} tempest.api.identity.admin.v2.test_endpoints.EndPointsTestJSON.test_list_endpoints ... success [0.301s] 2017-01-18 17:04:24.507 12964 INFO tempest-verifier [-] {0} tempest.api.identity.admin.v2.test_roles.RolesTestJSON.test_assign_user_role ... success [3.663s] 2017-01-18 17:04:25.164 12964 INFO tempest-verifier [-] {0} tempest.api.identity.admin.v2.test_roles.RolesTestJSON.test_get_role_by_id ... success [0.657s] 2017-01-18 17:04:25.435 12964 INFO tempest-verifier [-] {2} tempest.api.identity.admin.v2.test_roles.RolesTestJSON.test_list_roles ... success [0.271s] 2017-01-18 17:04:27.905 12964 INFO tempest-verifier [-] {2} tempest.api.identity.admin.v2.test_roles.RolesTestJSON.test_list_user_roles ... success [2.468s] 2017-01-18 17:04:30.645 12964 INFO tempest-verifier [-] {0} tempest.api.identity.admin.v2.test_roles.RolesTestJSON.test_remove_user_role ... success [2.740s] 2017-01-18 17:04:31.886 12964 INFO tempest-verifier [-] {3} tempest.api.identity.admin.v2.test_roles.RolesTestJSON.test_role_create_delete ... success [1.239s] 2017-01-18 17:04:38.122 12964 INFO rally.api [-] Verification (UUID=af766b2f-cada-44db-a0c2-336ab0c17c27) has been successfully finished for deployment 'tempest-2' (UUID=351fdfa2-99ad-4447-ba31-22e76630df97)! ====== Totals ====== Ran: 8 tests in 14.748 sec. - Success: 8 - Skipped: 0 - Expected failures: 0 - Unexpected success: 0 - Failures: 0 Using verification (UUID=af766b2f-cada-44db-a0c2-336ab0c17c27) as the default verification for the future operations. Moreover, it is possible to skip a certain list of Tempest tests, using the **--skip-list** argument. .. code-block:: console $ cat skip-list.yaml tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_create_flavor_using_string_ram[id-3b541a2e-2ac2-4b42-8b8d-ba6e22fcd4da]: tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_create_flavor_verify_entry_in_list_details[id-8261d7b0-be58-43ec-a2e5-300573c3f6c5]: Reason 1 tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_create_flavor_with_int_id[id-8b4330e1-12c4-4554-9390-e6639971f086]: tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_create_flavor_with_none_id[id-f83fe669-6758-448a-a85e-32d351f36fe0]: Reason 2 tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_create_flavor_with_uuid_id[id-94c9bb4e-2c2a-4f3c-bb1f-5f0daf918e6d]: .. code-block:: console $ rally verify start --pattern tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON --skip-list skip-list.yaml 2017-01-18 17:13:44.475 13424 INFO rally.api [-] Starting verification (UUID=ec94b397-b546-4f12-82ba-bb17f052c3d0) for deployment 'tempest-2' (UUID=351fdfa2-99ad-4447-ba31-22e76630df97) by verifier 'tempest-verifier' (UUID=16b73e48-09ad-4a54-92eb-2f2708b72c54). 2017-01-18 17:13:49.298 13424 INFO tempest-verifier [-] {-} tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_create_flavor_with_int_id ... skip 2017-01-18 17:13:49.298 13424 INFO tempest-verifier [-] {-} tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_create_flavor_with_none_id ... skip: Reason 2 2017-01-18 17:13:49.298 13424 INFO tempest-verifier [-] {-} tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_create_flavor_using_string_ram ... skip 2017-01-18 17:13:49.298 13424 INFO tempest-verifier [-] {-} tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_create_flavor_with_uuid_id ... skip 2017-01-18 17:13:49.299 13424 INFO tempest-verifier [-] {-} tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_create_flavor_verify_entry_in_list_details ... skip: Reason 1 2017-01-18 17:13:54.035 13424 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_create_list_flavor_without_extra_data ... success [1.889s] 2017-01-18 17:13:54.765 13424 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_create_server_with_non_public_flavor ... success [0.732s] 2017-01-18 17:13:57.478 13424 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_is_public_string_variations ... success [2.709s] 2017-01-18 17:13:58.438 13424 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_list_non_public_flavor ... success [0.962s] 2017-01-18 17:13:59.180 13424 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_flavors.FlavorsAdminTestJSON.test_list_public_flavor_with_other_user ... success [0.742s] 2017-01-18 17:14:03.969 13424 INFO rally.api [-] Verification (UUID=ec94b397-b546-4f12-82ba-bb17f052c3d0) has been successfully finished for deployment 'tempest-2' (UUID=351fdfa2-99ad-4447-ba31-22e76630df97)! ====== Totals ====== Ran: 10 tests in 9.882 sec. - Success: 5 - Skipped: 5 - Expected failures: 0 - Unexpected success: 0 - Failures: 0 Using verification (UUID=ec94b397-b546-4f12-82ba-bb17f052c3d0) as the default verification for the future operations. Also, it is possible to specify the path to a file with a list of Tempest tests that are expected to fail. In this case, the specified tests will have the **xfail** status instead of **fail**. .. code-block:: console $ cat xfail-list.yaml tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_aggregate_add_host_create_server_with_az[id-96be03c7-570d-409c-90f8-e4db3c646996]: Some reason why the test fails .. code-block:: console $ rally verify start --pattern tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON --xfail-list xfail-list.yaml 2017-01-18 17:20:04.064 13720 INFO rally.api [-] Starting verification (UUID=c416b724-0276-4c24-ab60-3ba7078c0a80) for deployment 'tempest-2' (UUID=351fdfa2-99ad-4447-ba31-22e76630df97) by verifier 'tempest-verifier' (UUID=16b73e48-09ad-4a54-92eb-2f2708b72c54). 2017-01-18 17:20:17.359 13720 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_aggregate_add_host_create_server_with_az ... xfail [6.328s]: Some reason why the test fails 2017-01-18 17:20:18.337 13720 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_aggregate_add_host_get_details ... success [0.978s] 2017-01-18 17:20:19.379 13720 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_aggregate_add_host_list ... success [1.042s] 2017-01-18 17:20:20.213 13720 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_aggregate_add_remove_host ... success [0.833s] 2017-01-18 17:20:20.956 13720 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_aggregate_create_delete ... success [0.743s] 2017-01-18 17:20:21.772 13720 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_aggregate_create_delete_with_az ... success [0.815s] 2017-01-18 17:20:22.737 13720 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_aggregate_create_update_metadata_get_details ... success [0.964s] 2017-01-18 17:20:25.061 13720 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_aggregate_create_update_with_az ... success [2.323s] 2017-01-18 17:20:25.595 13720 INFO tempest-verifier [-] {0} tempest.api.compute.admin.test_aggregates.AggregatesAdminTestJSON.test_aggregate_create_verify_entry_in_list ... success [0.533s] 2017-01-18 17:20:30.142 13720 INFO rally.api [-] Verification (UUID=c416b724-0276-4c24-ab60-3ba7078c0a80) has been successfully finished for deployment 'tempest-2' (UUID=351fdfa2-99ad-4447-ba31-22e76630df97)! ====== Totals ====== Ran: 9 tests in 17.118 sec. - Success: 8 - Skipped: 0 - Expected failures: 1 - Unexpected success: 0 - Failures: 0 Using verification (UUID=c416b724-0276-4c24-ab60-3ba7078c0a80) as the default verification for the future operations. Sometimes users may want to use the specific concurrency for running tests based on their deployments and available resources. In this case, they can use the **--concurrency** argument to specify how many processes to use to run Tempest tests. The default value (0) auto-detects CPU count. .. code-block:: console $ rally verify start --load-list load-list.txt --concurrency 1 2017-01-18 17:05:38.658 13054 INFO rally.api [-] Starting verification (UUID=cbf5e604-6bc9-47cd-9c8c-5e4c9e9545a0) for deployment 'tempest-2' (UUID=351fdfa2-99ad-4447-ba31-22e76630df97) by verifier 'tempest-verifier' (UUID=16b73e48-09ad-4a54-92eb-2f2708b72c54). 2017-01-18 17:05:45.474 13054 INFO tempest-verifier [-] {0} tempest.api.identity.admin.v2.test_endpoints.EndPointsTestJSON.test_create_list_delete_endpoint ... success [0.917s] 2017-01-18 17:05:45.653 13054 INFO tempest-verifier [-] {0} tempest.api.identity.admin.v2.test_endpoints.EndPointsTestJSON.test_list_endpoints ... success [0.179s] 2017-01-18 17:05:55.497 13054 INFO tempest-verifier [-] {0} tempest.api.identity.admin.v2.test_roles.RolesTestJSON.test_assign_user_role ... success [2.673s] 2017-01-18 17:05:56.237 13054 INFO tempest-verifier [-] {0} tempest.api.identity.admin.v2.test_roles.RolesTestJSON.test_get_role_by_id ... success [0.740s] 2017-01-18 17:05:56.642 13054 INFO tempest-verifier [-] {0} tempest.api.identity.admin.v2.test_roles.RolesTestJSON.test_list_roles ... success [0.403s] 2017-01-18 17:06:00.011 13054 INFO tempest-verifier [-] {0} tempest.api.identity.admin.v2.test_roles.RolesTestJSON.test_list_user_roles ... success [3.371s] 2017-01-18 17:06:02.987 13054 INFO tempest-verifier [-] {0} tempest.api.identity.admin.v2.test_roles.RolesTestJSON.test_remove_user_role ... success [2.973s] 2017-01-18 17:06:04.927 13054 INFO tempest-verifier [-] {0} tempest.api.identity.admin.v2.test_roles.RolesTestJSON.test_role_create_delete ... success [1.939s] 2017-01-18 17:06:11.166 13054 INFO rally.api [-] Verification (UUID=cbf5e604-6bc9-47cd-9c8c-5e4c9e9545a0) has been successfully finished for deployment 'tempest-2' (UUID=351fdfa2-99ad-4447-ba31-22e76630df97)! ====== Totals ====== Ran: 8 tests in 23.043 sec. - Success: 8 - Skipped: 0 - Expected failures: 0 - Unexpected success: 0 - Failures: 0 Using verification (UUID=cbf5e604-6bc9-47cd-9c8c-5e4c9e9545a0) as the default verification for the future operations. Also, there is a possibility to rerun tests from any verification. In order to rerun tests from some verification execute the following command: .. code-block:: console $ rally verify rerun --uuid cbf5e604-6bc9-47cd-9c8c-5e4c9e9545a0 2017-01-18 17:29:35.692 14127 INFO rally.api [-] Re-running tests from verification (UUID=cbf5e604-6bc9-47cd-9c8c-5e4c9e9545a0) for deployment 'tempest-2' (UUID=351fdfa2-99ad-4447-ba31-22e76630df97). 2017-01-18 17:29:35.792 14127 INFO rally.api [-] Starting verification (UUID=51aa3275-f028-4f2d-9d63-0db679fdf266) for deployment 'tempest-2' (UUID=351fdfa2-99ad-4447-ba31-22e76630df97) by verifier 'tempest-verifier' (UUID=16b73e48-09ad-4a54-92eb-2f2708b72c54). 2017-01-18 17:29:43.980 14127 INFO tempest-verifier [-] {1} tempest.api.identity.admin.v2.test_endpoints.EndPointsTestJSON.test_create_list_delete_endpoint ... success [2.172s] 2017-01-18 17:29:44.156 14127 INFO tempest-verifier [-] {1} tempest.api.identity.admin.v2.test_endpoints.EndPointsTestJSON.test_list_endpoints ... success [0.177s] 2017-01-18 17:29:45.333 14127 INFO tempest-verifier [-] {0} tempest.api.identity.admin.v2.test_roles.RolesTestJSON.test_assign_user_role ... success [3.302s] 2017-01-18 17:29:45.952 14127 INFO tempest-verifier [-] {0} tempest.api.identity.admin.v2.test_roles.RolesTestJSON.test_get_role_by_id ... success [0.619s] 2017-01-18 17:29:46.219 14127 INFO tempest-verifier [-] {0} tempest.api.identity.admin.v2.test_roles.RolesTestJSON.test_list_roles ... success [0.266s] 2017-01-18 17:29:48.964 14127 INFO tempest-verifier [-] {0} tempest.api.identity.admin.v2.test_roles.RolesTestJSON.test_list_user_roles ... success [2.744s] 2017-01-18 17:29:52.543 14127 INFO tempest-verifier [-] {0} tempest.api.identity.admin.v2.test_roles.RolesTestJSON.test_remove_user_role ... success [3.578s] 2017-01-18 17:29:53.843 14127 INFO tempest-verifier [-] {0} tempest.api.identity.admin.v2.test_roles.RolesTestJSON.test_role_create_delete ... success [1.300s] 2017-01-18 17:30:01.258 14127 INFO rally.api [-] Verification (UUID=51aa3275-f028-4f2d-9d63-0db679fdf266) has been successfully finished for deployment 'tempest-2' (UUID=351fdfa2-99ad-4447-ba31-22e76630df97)! ====== Totals ====== Ran: 8 tests in 14.926 sec. - Success: 8 - Skipped: 0 - Expected failures: 0 - Unexpected success: 0 - Failures: 0 Verification UUID: 51aa3275-f028-4f2d-9d63-0db679fdf266. In order to rerun only failed tests add the **--failed** argument to the command. .. code-block:: console $ rally verify rerun --uuid --failed A separated page about building verification reports: :ref:`verification-reports`. rally-0.9.1/doc/source/quick_start/tutorial/step_5_task_templates.rst0000664000567000056710000002353013073417716027313 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _tutorial_step_5_task_templates: Step 5. Rally task templates ============================ .. contents:: :local: Basic template syntax --------------------- A nice feature of the input task format used in Rally is that it supports the **template syntax** based on `Jinja2`_. This turns out to be extremely useful when, say, you have a fixed structure of your task but you want to parameterize this task in some way. For example, imagine your input task file (*task.yaml*) runs a set of Nova scenarios: .. code-block:: yaml --- NovaServers.boot_and_delete_server: - args: flavor: name: "m1.tiny" image: name: "^cirros.*-disk$" runner: type: "constant" times: 2 concurrency: 1 context: users: tenants: 1 users_per_tenant: 1 NovaServers.resize_server: - args: flavor: name: "m1.tiny" image: name: "^cirros.*-disk$" to_flavor: name: "m1.small" runner: type: "constant" times: 3 concurrency: 1 context: users: tenants: 1 users_per_tenant: 1 In both scenarios above, the *"^cirros.*-disk$"* image is passed to the scenario as an argument (so that these scenarios use an appropriate image while booting servers). Let’s say you want to run the same set of scenarios with the same runner/context/sla, but you want to try another image while booting server to compare the performance. The most elegant solution is then to turn the image name into a template variable: .. code-block:: yaml --- NovaServers.boot_and_delete_server: - args: flavor: name: "m1.tiny" image: name: {{image_name}} runner: type: "constant" times: 2 concurrency: 1 context: users: tenants: 1 users_per_tenant: 1 NovaServers.resize_server: - args: flavor: name: "m1.tiny" image: name: {{image_name}} to_flavor: name: "m1.small" runner: type: "constant" times: 3 concurrency: 1 context: users: tenants: 1 users_per_tenant: 1 and then pass the argument value for **{{image_name}}** when starting a task with this configuration file. Rally provides you with different ways to do that: 1. Pass the argument values directly in the command-line interface (with either a JSON or YAML dictionary): .. code-block:: bash rally task start task.yaml --task-args '{"image_name": "^cirros.*-disk$"}' rally task start task.yaml --task-args 'image_name: "^cirros.*-disk$"' 2. Refer to a file that specifies the argument values (JSON/YAML): .. code-block:: bash rally task start task.yaml --task-args-file args.json rally task start task.yaml --task-args-file args.yaml where the files containing argument values should look as follows: *args.json*: .. code-block:: json { "image_name": "^cirros.*-disk$" } *args.yaml*: .. code-block:: yaml --- image_name: "^cirros.*-disk$" Passed in either way, these parameter values will be substituted by Rally when starting a task: .. code-block:: console $ rally task start task.yaml --task-args "image_name: "^cirros.*-disk$"" -------------------------------------------------------------------------------- Preparing input task -------------------------------------------------------------------------------- Input task is: --- NovaServers.boot_and_delete_server: - args: flavor: name: "m1.tiny" image: name: ^cirros.*-disk$ runner: type: "constant" times: 2 concurrency: 1 context: users: tenants: 1 users_per_tenant: 1 NovaServers.resize_server: - args: flavor: name: "m1.tiny" image: name: ^cirros.*-disk$ to_flavor: name: "m1.small" runner: type: "constant" times: 3 concurrency: 1 context: users: tenants: 1 users_per_tenant: 1 -------------------------------------------------------------------------------- Task cbf7eb97-0f1d-42d3-a1f1-3cc6f45ce23f: started -------------------------------------------------------------------------------- Benchmarking... This can take a while... Using the default values ------------------------ Note that the ``Jinja2`` template syntax allows you to set the default values for your parameters. With default values set, your task file will work even if you don't parameterize it explicitly while starting a task. The default values should be set using the *{% set ... %}* clause (*task.yaml*): .. code-block:: yaml {% set image_name = image_name or "^cirros.*-disk$" %} --- NovaServers.boot_and_delete_server: - args: flavor: name: "m1.tiny" image: name: {{image_name}} runner: type: "constant" times: 2 concurrency: 1 context: users: tenants: 1 users_per_tenant: 1 ... If you don't pass the value for *{{image_name}}* while starting a task, the default one will be used: .. code-block:: console $ rally task start task.yaml -------------------------------------------------------------------------------- Preparing input task -------------------------------------------------------------------------------- Input task is: --- NovaServers.boot_and_delete_server: - args: flavor: name: "m1.tiny" image: name: ^cirros.*-disk$ runner: type: "constant" times: 2 concurrency: 1 context: users: tenants: 1 users_per_tenant: 1 ... Advanced templates ------------------ Rally makes it possible to use all the power of ``Jinja2`` template syntax, including the mechanism of **built-in functions**. This enables you to construct elegant task files capable of generating complex load on your cloud. As an example, let us make up a task file that will create new users with increasing concurrency. The input task file (*task.yaml*) below uses the ``Jinja2`` **for-endfor** construct to accomplish that: .. code-block:: yaml --- KeystoneBasic.create_user: {% for i in range(2, 11, 2) %} - args: {} runner: type: "constant" times: 10 concurrency: {{i}} sla: failure_rate: max: 0 {% endfor %} In this case, you don’t need to pass any arguments via *--task-args/--task-args-file*, but as soon as you start this task, Rally will automatically unfold the for-loop for you: .. code-block:: console $ rally task start task.yaml -------------------------------------------------------------------------------- Preparing input task -------------------------------------------------------------------------------- Input task is: --- KeystoneBasic.create_user: - args: {} runner: type: "constant" times: 10 concurrency: 2 sla: failure_rate: max: 0 - args: {} runner: type: "constant" times: 10 concurrency: 4 sla: failure_rate: max: 0 - args: {} runner: type: "constant" times: 10 concurrency: 6 sla: failure_rate: max: 0 - args: {} runner: type: "constant" times: 10 concurrency: 8 sla: failure_rate: max: 0 - args: {} runner: type: "constant" times: 10 concurrency: 10 sla: failure_rate: max: 0 -------------------------------------------------------------------------------- Task ea7e97e3-dd98-4a81-868a-5bb5b42b8610: started -------------------------------------------------------------------------------- Benchmarking... This can take a while... As you can see, the Rally task template syntax is a simple but powerful mechanism that not only enables you to write elegant task configurations, but also makes them more readable for other people. When used appropriately, it can really improve the understanding of your benchmarking procedures in Rally when shared with others. .. references: .. _Jinja2: https://pypi.python.org/pypi/Jinja2 rally-0.9.1/doc/source/quick_start/tutorial/step_0_installation.rst0000664000567000056710000000323113073417716026763 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _tutorial_step_0_installation: Step 0. Installation ==================== The easiest way to install Rally is by running its `installation script`_: .. code-block:: bash wget -q -O- https://raw.githubusercontent.com/openstack/rally/master/install_rally.sh | bash # or using curl: curl https://raw.githubusercontent.com/openstack/rally/master/install_rally.sh | bash If you execute the script as regular user, Rally will create a new virtual environment in ``~/rally/`` and install in it Rally, and will use `sqlite` as database backend. If you execute the script as root, Rally will be installed system wide. For more installation options, please refer to the :ref:`installation ` page. **Note:** Rally requires Python version 2.7 or 3.4. Now that you have Rally installed, you are ready to start :ref:`benchmarking OpenStack with it `! .. references: .. _installation script: https://raw.githubusercontent.com/openstack/rally/master/install_rally.sh rally-0.9.1/doc/source/quick_start/gates.rst0000664000567000056710000001265613073417716022263 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _gates: Rally OpenStack Gates ===================== Gate jobs --------- The **OpenStack CI system** uses the so-called **"Gate jobs"** to control merges of patches submitted for review on Gerrit. These **Gate jobs** usually just launch a set of tests -- unit, functional, integration, style -- that check that the proposed patch does not break the software and can be merged into the target branch, thus providing additional guarantees for the stability of the software. Create a custom Rally Gate job ------------------------------ You can create a **Rally Gate job** for your project to run Rally benchmarks against the patchsets proposed to be merged into your project. To create a rally-gate job, you should create a **rally-jobs/** directory at the root of your project. As a rule, this directory contains only **{projectname}.yaml**, but more scenarios and jobs can be added as well. This yaml file is in fact an input Rally task file specifying benchmark scenarios that should be run in your gate job. To make *{projectname}.yaml* run in gates, you need to add *"rally-jobs"* to the "jobs" section of *projects.yaml* in *openstack-infra/project-config*. Example: Rally Gate job for Glance ---------------------------------- Let's take a look at an example for the `Glance`_ project: Edit *jenkins/jobs/projects.yaml:* .. parsed-literal:: - project: name: glance node: 'bare-precise || bare-trusty' tarball-site: tarballs.openstack.org doc-publisher-site: docs.openstack.org jobs: - python-jobs - python-icehouse-bitrot-jobs - python-juno-bitrot-jobs - openstack-publish-jobs - translation-jobs **- rally-jobs** Also add *gate-rally-dsvm-{projectname}* to *zuul/layout.yaml*: .. parsed-literal:: - name: openstack/glance template: - name: merge-check - name: python26-jobs - name: python-jobs - name: openstack-server-publish-jobs - name: openstack-server-release-jobs - name: periodic-icehouse - name: periodic-juno - name: check-requirements - name: integrated-gate - name: translation-jobs - name: large-ops - name: experimental-tripleo-jobs check: - check-devstack-dsvm-cells **- gate-rally-dsvm-glance** gate: - gate-devstack-dsvm-cells experimental: - gate-grenade-dsvm-forward To add one more scenario and job, you need to add *{scenarioname}.yaml* file here, and *gate-rally-dsvm-{scenarioname}* to *projects.yaml*. For example, you can add *myscenario.yaml* to *rally-jobs* directory in your project and then edit *jenkins/jobs/projects.yaml* in this way: .. parsed-literal:: - project: name: glance github-org: openstack node: bare-precise tarball-site: tarballs.openstack.org doc-publisher-site: docs.openstack.org jobs: - python-jobs - python-havana-bitrot-jobs - openstack-publish-jobs - translation-jobs - rally-jobs **- 'gate-rally-dsvm-{name}': name: myscenario** Finally, add *gate-rally-dsvm-myscenario* to *zuul/layout.yaml*: .. parsed-literal:: - name: openstack/glance template: - name: python-jobs - name: openstack-server-publish-jobs - name: periodic-havana - name: check-requirements - name: integrated-gate check: - check-devstack-dsvm-cells - check-tempest-dsvm-postgres-full - gate-tempest-dsvm-large-ops - gate-tempest-dsvm-neutron-large-ops **- gate-rally-dsvm-myscenario** It is also possible to arrange your input task files as templates based on ``Jinja2``. Say, you want to set the image names used throughout the *myscenario.yaml* task file as a variable parameter. Then, replace concrete image names in this file with a variable: .. code-block:: yaml ... NovaServers.boot_and_delete_server: - args: image: name: {{image_name}} ... NovaServers.boot_and_list_server: - args: image: name: {{image_name}} ... and create a file named *myscenario_args.yaml* that will define the parameter values: .. code-block:: yaml --- image_name: "^cirros.*-disk$" this file will be automatically used by Rally to substitute the variables in *myscenario.yaml*. Plugins & Extras in Rally Gate jobs ----------------------------------- Along with scenario configs in yaml, the **rally-jobs** directory can also contain two subdirectories: - **plugins**: :ref:`Plugins ` needed for your gate job; - **extra**: auxiliary files like bash scripts or images. Both subdirectories will be copied to *~/.rally/* before the job gets started. .. references: .. _Glance: https://wiki.openstack.org/wiki/Glance rally-0.9.1/doc/source/quick_start/tutorial.rst0000664000567000056710000000266113073417716023016 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _tutorial: Rally step-by-step ================== In the following tutorial, we will guide you step-by-step through different use cases that might occur in Rally, starting with the easy ones and moving towards more complicated cases. .. toctree:: :glob: :maxdepth: 1 tutorial/step_0_installation tutorial/step_1_setting_up_env_and_running_benchmark_from_samples tutorial/step_2_input_task_format tutorial/step_3_benchmarking_with_existing_users tutorial/step_4_adding_success_criteria_for_benchmarks tutorial/step_5_task_templates tutorial/step_6_aborting_load_generation_on_sla_failure tutorial/step_7_working_with_multple_openstack_clouds tutorial/step_8_discovering_more_plugins tutorial/step_9_deploying_openstack tutorial/step_10_verifying_cloud_via_tempest_verifier rally-0.9.1/doc/source/quick_start/index.rst0000664000567000056710000000175313073417716022263 0ustar jenkinsjenkins00000000000000.. Copyright 2015 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =========== Quick start =========== This section will guide you through all steps of using Rally - from installation to its advanced usage in different use cases (including running Rally in OpenStack CI system gates to control merges of patches submitted for review on Gerrit code review system). .. toctree:: :glob: :maxdepth: 2 tutorial gates rally-0.9.1/doc/source/task/0000775000567000056710000000000013073420067017016 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/source/task/index.rst0000664000567000056710000002343313073417716020673 0ustar jenkinsjenkins00000000000000.. _task-component: ============== Task Component ============== This section describes Rally Task Component (including feature presented since Rally v0.5.0, allowing to analyze statistics trends for the given tasks). .. contents:: :depth: 2 :local: HTML Reports ============ HTML reports provide comprehensive analysis. Data is structured and displayed interactively, with charts and tables. Task Report ----------- Get the whole information about task workloads results, in pretty and convenient format! .. image:: ../images/Report-Collage.png Generate report for single task, using task UUID ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Having a finished task, generate report with command: .. code-block:: shell $ rally task report --out Example: .. code-block:: shell $ rally task report 6f63d9ec-eecd-4696-8e9c-2ba065c68535 --out report.html Generate report for single task, using JSON file ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Report can be generated from a task results JSON file. This file can be generated with command *rally task results*: .. code-block:: shell $ rally task results 6f63d9ec-eecd-4696-8e9c-2ba065c68535 > results.json $ rally task report results.json --out report.html Generate report for many tasks ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Report can be generated from many tasks. All workloads from specified tasks results will be composed into an entire report. To generate report, use *--tasks* argument with specified list of tasks UUIDs and/or tasks results JSON files. Example: .. code-block:: shell $ rally task report --tasks 6f63d9ec-eecd-4696-8e9c-2ba065c68535 20ae7e95-7395-4be4-aec2-b89220adee60 a5737eba-a204-43d6-a262-d5ea4b0065da results.json another_results.json --out report.html Task Overview ~~~~~~~~~~~~~ This is a table with brief summary of all workloads results. All columns are sortable and clickable. .. image:: ../images/Report-Task-Overview.png Load duration +++++++++++++ Time from first iteration start to last iteration end. In other words, this is a time of all workload iterations execution. Full duration +++++++++++++ This time includes iterations time (`Load duration <#load-duration>`_) plus time taken by another actions related to the task, mostly Contexts execution time. Iterations ++++++++++ How many times the workload has run. This comes from the value of *runner.times* in task input file. Failures ++++++++ Number of failed iterations. Failure means that there was an Exception raised. Success (SLA) +++++++++++++ This is a boolean result of workload SLA. See `Service-level agreement explanation <#id2>`_ below. Input file ~~~~~~~~~~ This shows JSON which can be used to run a task with exactly the same workloads list and configuration. This is not an exact copy (neither concatenation) of actually used input files (in command *rally task start*), however this is exactly what is needed to run workloads given in the report. .. image:: ../images/Report-Task-Input-file.png Tab «Overview» ~~~~~~~~~~~~~~ Service-level agreement +++++++++++++++++++++++ `SLA`_ results appear in task report only if *"sla"* section is defined in task input file. For example, having this in task input file: .. code-block:: json "sla": { "performance_degradation": { "max_degradation": 50 }, "max_seconds_per_iteration": 1.0, "failure_rate": { "max": 0 }, "outliers": { "max": 1, "min_iterations": 10, "sigmas": 10 }, "max_avg_duration": 0.5 } will result SLA section similar to the following: .. image:: ../images/Report-Task-SLA.png What if workload has no "sla" configuration in input file? ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ If *"sla"* section is missed in input file, then block *Service-level agreement* is not displayed and its result is assumed to be always passed (no matter how many failures occurred). Total durations +++++++++++++++ There is a durations analysis, which is represented by statistics table and duration StackedArea chart. .. image:: ../images/Report-Task-Total-durations.png Table with statistics data ^^^^^^^^^^^^^^^^^^^^^^^^^^ **Action** Name of the workload metric that has some duration saved. This is either an atomic action name or *Total* which points to workload `load duration <#load-duration>`_. **Min (sec)** `Minimal`_ duration value **Median (sec)** `Median`_ duration value **90%ile (sec)** `Percentile`_ for 90% durations **95%ile (sec)** `Percentile`_ for 95% durations **Max (sec)** `Maximal`_ duration value **Avg (sec)** `Average`_ duration value **Success** Percent of successful runs. This is how many percent of this action runs (number of runs is given in *Count* column) were successful. **Count** Number of actually run atomic actions. This can differ from `iterations count <#iterations>`_ because some atomic actions do not start if some exception is raised before in the workload runtime (for example in previous atomic action). StackedArea with durations per iteration ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ This chart shows `load_duration <#load-duration>`_ and `idle_duration <#id5>`_ values per iteration. If there is only one iteration, then chart is useless so it is hidden. Idle duration ^^^^^^^^^^^^^ Sometimes workload does nothing for some reason (waiting for something or just making a dummy load). This is achieved by calling *time.sleep()* and spent time is called *idle duration*. Load Profile ++++++++++++ `Load profile`_ chart shows number of iterations running in parallel for each workload moment: .. image:: ../images/Report-Task-Load-profile.png Distribution ++++++++++++ Pie chart shows percent of successful and failed `iterations <#iterations>`_. Histogram shows durations distribution with the following `methods`_ (selected in dropdown list): **Square Root Choice**, **Sturges Formula**, **Rise Rule** .. image:: ../images/Report-Task-Distribution.png Tab «Details» ~~~~~~~~~~~~~ Atomic Action Durations +++++++++++++++++++++++ There is a StackedArea chart that shows atomic actions durations per iteration. If there is only one iteration, then chart is useless so it is hidden. .. image:: ../images/Report-Task-Actions-durations.png Distribution ++++++++++++ `Distribution <#distribution>`_ for atomic actions durations Tab «Scenario Data» ~~~~~~~~~~~~~~~~~~~ This tab only appears if workload provides some custom output via method *Scenario.add_output()*. Aggregated ++++++++++ This shows charts with data aggregated from all iterations. This means that each X axis point represents an iteration, so each iteration provided some values that are aggregated into charts or tables. .. image:: ../images/Report-Task-Scenario-Data-Aggregated.png Per iteration +++++++++++++ Each iteration can create its own, complete charts and tables. .. image:: ../images/Report-Task-Scenario-Data-Per-iteration.png Tab «Failures» ++++++++++++++ Complete information about exceptions raised during the workload run **Iteration** Number of iteration where exception is occurred **Exception type** Type of raised Exception subclass **Exception message** Message delivered by the exception Click on a row expands it with exception traceback. .. image:: ../images/Report-Task-Failures.png Tab «Input Task» ~~~~~~~~~~~~~~~~ This shows JSON for input file which can be used to run current workload. .. image:: ../images/Report-Task-Subtask-configuration.png Trends Report ------------- If same workload is run several times, some results of these runs can be compared. Compared metrics are ssuccess rate (percent of successful iterations) and statistics for durations. How to generate trends report ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Use command *rally task trends* with given tasks UUIDs and/or tasks results JSON files and the name of desired output file. Example: .. code-block:: shell $ rally task trends --tasks 6f63d9ec-eecd-4696-8e9c-2ba065c68535 a5737eba-a204-43d6-a262-d5ea4b0065da --out trends.html What is an order of workload runs? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Workload run number in shown on charts X axis, the order of runs is exactly as it comes from tasks data in the moment of report generation. Trends overview ~~~~~~~~~~~~~~~ .. image:: ../images/Report-Trends-Overview.png If workload has been actually run only once ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ That is obvious that it is not possible to have trend for a single value. There should be at least two workload runs to make results comparison possible. So in this case there is only a help message displayed. .. image:: ../images/Report-Trends-single-run.png Tab «Total» ~~~~~~~~~~~ Total durations +++++++++++++++ Shows workload `load_duration <#load-duration>`_ statistics trends. Total success rate ++++++++++++++++++ Shows trends for percent of successful iterations .. image:: ../images/Report-Trends-Total.png Tab «Atomic actions» ++++++++++++++++++++ Statistics trends for atomic actions durations. Charts are same as for total durations. .. image:: ../images/Report-Trends-Atomic-actions.png Tab «Configuration» +++++++++++++++++++ Here is a configuration JSON for current workload. .. image:: ../images/Report-Trends-Configuration.png CLI References ============== For more information regarding Rally Task Component CLI please proceed to `CLI reference <../cli/cli_reference.html#category-task>`_ .. references: .. _SLA: https://en.wikipedia.org/wiki/Service-level_agreement .. _Minimal: https://en.wikipedia.org/wiki/Maxima_and_minima .. _Median: https://en.wikipedia.org/wiki/Median .. _Percentile: https://en.wikipedia.org/wiki/Percentile .. _Maximal: https://en.wikipedia.org/wiki/Maxima_and_minima .. _Average: https://en.wikipedia.org/wiki/Average .. _Load profile: https://en.wikipedia.org/wiki/Load_profile .. _methods: https://en.wikipedia.org/wiki/Histogram rally-0.9.1/doc/source/miscellaneous/0000775000567000056710000000000013073420067020717 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/source/miscellaneous/concepts.rst0000664000567000056710000003434413073417716023306 0ustar jenkinsjenkins00000000000000.. Copyright 2014 Mirantis Inc. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _main_concepts: Main concepts of Rally ====================== Benchmark Scenarios ------------------- Concept ^^^^^^^ The concept of **benchmark scenarios** is a central one in Rally. Benchmark scenarios are what Rally actually uses to **test the performance of an OpenStack deployment**. They also play the role of main building blocks in the configurations of benchmark tasks. Each benchmark scenario performs a small **set of atomic operations**, thus testing some **simple use case**, usually that of a specific OpenStack project. For example, the **"NovaServers"** scenario group contains scenarios that use several basic operations available in **nova**. The **"boot_and_delete_server"** benchmark scenario from that group allows to benchmark the performance of a sequence of only **two simple operations**: it first **boots** a server (with customizable parameters) and then **deletes** it. User's view ^^^^^^^^^^^ From the user's point of view, Rally launches different benchmark scenarios while performing some benchmark task. **Benchmark task** is essentially a set of benchmark scenarios run against some OpenStack deployment in a specific (and customizable) manner by the CLI command: .. code-block:: bash rally task start --task= Accordingly, the user may specify the names and parameters of benchmark scenarios to be run in **benchmark task configuration files**. A typical configuration file would have the following contents: .. code-block:: json { "NovaServers.boot_server": [ { "args": { "flavor_id": 42, "image_id": "73257560-c59b-4275-a1ec-ab140e5b9979" }, "runner": {"times": 3}, "context": {...} }, { "args": { "flavor_id": 1, "image_id": "3ba2b5f6-8d8d-4bbe-9ce5-4be01d912679" }, "runner": {"times": 3}, "context": {...} } ], "CinderVolumes.create_volume": [ { "args": { "size": 42 }, "runner": {"times": 3}, "context": {...} } ] } In this example, the task configuration file specifies two benchmarks to be run, namely **"NovaServers.boot_server"** and **"CinderVolumes.create_volume"** (benchmark name = *ScenarioClassName.method_name*). Each benchmark scenario may be started several times with different parameters. In our example, that's the case with **"NovaServers.boot_server"**, which is used to test booting servers from different images & flavors. Note that inside each scenario configuration, the benchmark scenario is actually launched **3 times** (that is specified in the **"runner"** field). It can be specified in **"runner"** in more detail how exactly the benchmark scenario should be launched; we elaborate on that in the *"Scenario Runners"* section below. .. _ScenariosDevelopment: Developer's view ^^^^^^^^^^^^^^^^ From the developer's perspective, a benchmark scenario is a method marked by a **@configure** decorator and placed in a class that inherits from the base `Scenario`_. There may be arbitrary many benchmark scenarios in a scenario class; each of them should be referenced to (in the task configuration file) as *ScenarioClassName.method_name*. In a toy example below, we define a scenario class *MyScenario* with one benchmark scenario *MyScenario.scenario*. This benchmark scenario tests the performance of a sequence of 2 actions, implemented via private methods in the same class. Both methods are marked with the **@atomic_action_timer** decorator. This allows Rally to handle those actions in a special way and, after benchmarks complete, show runtime statistics not only for the whole scenarios, but for separate actions as well. .. code-block:: python from rally.task import atomic from rally.task import scenario class MyScenario(scenario.Scenario): """My class that contains benchmark scenarios.""" @atomic.action_timer("action_1") def _action_1(self, **kwargs): """Do something with the cloud.""" @atomic.action_timer("action_2") def _action_2(self, **kwargs): """Do something with the cloud.""" @scenario.configure() def scenario(self, **kwargs): self._action_1() self._action_2() Scenario runners ---------------- Concept ^^^^^^^ **Scenario Runners** in Rally are entities that control the execution type and order of benchmark scenarios. They support different running **strategies for creating load on the cloud**, including simulating *concurrent requests* from different users, periodic load, gradually growing load and so on. User's view ^^^^^^^^^^^ The user can specify which type of load on the cloud he would like to have through the **"runner"** section in the **task configuration file**: .. code-block:: json { "NovaServers.boot_server": [ { "args": { "flavor_id": 42, "image_id": "73257560-c59b-4275-a1ec-ab140e5b9979" }, "runner": { "type": "constant", "times": 15, "concurrency": 2 }, "context": { "users": { "tenants": 1, "users_per_tenant": 3 }, "quotas": { "nova": { "instances": 20 } } } } ] } The scenario running strategy is specified by its **type** and also by some type-specific parameters. Available types include: * **constant**, for creating a constant load by running the scenario for a fixed number of **times**, possibly in parallel (that's controlled by the *"concurrency"* parameter). * **constant_for_duration** that works exactly as **constant**, but runs the benchmark scenario until a specified number of seconds elapses (**"duration"** parameter). * **rps**, which executes benchmark scenarios with intervals between two consecutive runs, specified in the **"rps"** field in times per second. * **serial**, which is very useful to test new scenarios since it just runs the benchmark scenario for a fixed number of **times** in a single thread. Also, all scenario runners can be provided (again, through the **"runner"** section in the config file) with an optional *"timeout"* parameter, which specifies the timeout for each single benchmark scenario run (in seconds). .. _RunnersDevelopment: Developer's view ^^^^^^^^^^^^^^^^ It is possible to extend Rally with new Scenario Runner types, if needed. Basically, each scenario runner should be implemented as a subclass of the base `ScenarioRunner`_ class and located in the `rally.plugins.common.runners package`_. The interface each scenario runner class should support is fairly easy: .. code-block:: python from rally.task import runner from rally import consts class MyScenarioRunner(runner.ScenarioRunner): """My scenario runner.""" # This string is what the user will have to specify in the task # configuration file (in "runner": {"type": ...}) __execution_type__ = "my_scenario_runner" # CONFIG_SCHEMA is used to automatically validate the input # config of the scenario runner, passed by the user in the task # configuration file. CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "type": { "type": "string" }, "some_specific_property": {...} } } def _run_scenario(self, cls, method_name, ctx, args): """Run the scenario 'method_name' from scenario class 'cls' with arguments 'args', given a context 'ctx'. This method should return the results dictionary wrapped in a runner.ScenarioRunnerResult object (not plain JSON) """ results = ... return runner.ScenarioRunnerResult(results) Benchmark contexts ------------------ Concept ^^^^^^^ The notion of **contexts** in Rally is essentially used to define different types of **environments** in which benchmark scenarios can be launched. Those environments are usually specified by such parameters as the number of **tenants and users** that should be present in an OpenStack project, the **roles** granted to those users, extended or narrowed **quotas** and so on. User's view ^^^^^^^^^^^ From the user's prospective, contexts in Rally are manageable via the **task configuration files**. In a typical configuration file, each benchmark scenario to be run is not only supplied by the information about its arguments and how many times it should be launched, but also with a special **"context"** section. In this section, the user may configure a number of contexts he needs his scenarios to be run within. In the example below, the **"users" context** specifies that the *"NovaServers.boot_server"* scenario should be run from **1 tenant** having **3 users** in it. Bearing in mind that the default quota for the number of instances is 10 instances per tenant, it is also reasonable to extend it to, say, **20 instances** in the **"quotas" context**. Otherwise the scenario would eventually fail, since it tries to boot a server 15 times from a single tenant. .. code-block:: json { "NovaServers.boot_server": [ { "args": { "flavor_id": 42, "image_id": "73257560-c59b-4275-a1ec-ab140e5b9979" }, "runner": { "type": "constant", "times": 15, "concurrency": 2 }, "context": { "users": { "tenants": 1, "users_per_tenant": 3 }, "quotas": { "nova": { "instances": 20 } } } } ] } .. _ContextDevelopment: Developer's view ^^^^^^^^^^^^^^^^ From the developer's view, contexts management is implemented via **Context classes**. Each context type that can be specified in the task configuration file corresponds to a certain subclass of the base `Context`_ class. Every context class should implement a fairly simple **interface**: .. code-block:: python from rally.task import context from rally import consts @context.configure(name="your_context", # Corresponds to the context field name in task configuration files order=100500, # a number specifying the priority with which the context should be set up hidden=False) # True if the context cannot be configured through the input task file class YourContext(context.Context): """Yet another context class.""" # The schema of the context configuration format CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "additionalProperties": False, "properties": { "property_1": , "property_2": } } def __init__(self, context): super(YourContext, self).__init__(context) # Initialize the necessary stuff def setup(self): # Prepare the environment in the desired way def cleanup(self): # Cleanup the environment properly Consequently, the algorithm of initiating the contexts can be roughly seen as follows: .. code-block:: python context1 = Context1(ctx) context2 = Context2(ctx) context3 = Context3(ctx) context1.setup() context2.setup() context3.setup() context3.cleanup() context2.cleanup() context1.cleanup() - where the order of contexts in which they are set up depends on the value of their *order* attribute. Contexts with lower *order* have higher priority: *1xx* contexts are reserved for users-related stuff (e.g. users/tenants creation, roles assignment etc.), *2xx* - for quotas etc. The *hidden* attribute defines whether the context should be a *hidden* one. **Hidden contexts** cannot be configured by end-users through the task configuration file as shown above, but should be specified by a benchmark scenario developer through a special *@scenario.configure(context={...})* decorator. Hidden contexts are typically needed to satisfy some specific benchmark scenario-specific needs, which don't require the end-user's attention. For example, the hidden **"cleanup" context** (:mod:`rally.plugins.openstack.context.cleanup`) is used to make generic cleanup after running benchmark. So user can't change it configuration via task and break his cloud. If you want to dive deeper, also see the context manager (:mod:`rally.task.context`) class that actually implements the algorithm described above. .. references: .. _Scenario: https://github.com/openstack/rally/blob/0.1/rally/task/scenario.py#L94 .. _ScenarioRunner: https://github.com/openstack/rally/blob/master/rally/task/runner.py .. _rally.plugins.common.runners package: https://github.com/openstack/rally/tree/master/rally/plugins/common/runners .. _Context: https://github.com/openstack/rally/blob/master/rally/task/context.py rally-0.9.1/doc/source/cli_reference.rst0000664000567000056710000000020113073417716021373 0ustar jenkinsjenkins00000000000000.. _cli-reference: Command Line Interface ====================== .. contents:: :depth: 1 :local: .. make_cli_reference:: rally-0.9.1/doc/README.rst0000664000567000056710000000240213073417716016250 0ustar jenkinsjenkins00000000000000======================== Content of doc directory ======================== This directory contains everything that is related to documentation and bureaucracy. You can find here 4 subdirectories: feature_request ~~~~~~~~~~~~~~~ If some use case is not covered by Rally, it is the right place to request it. To request new feature you should just explain use case on high level. Technical details and writing code are not required at all. source ~~~~~~ Source of documentation. Latest version of documentation_. .. _documentation: http://rally.readthedocs.org/ specs ~~~~~ Specs are detailed description of proposed changes in project. Usually they answer on what, why, how to change in project and who is going to work on change. user_stories ~~~~~~~~~~~~ Place where you can share any of Rally user experience. E.g. fixing some bugs, measuring performance of different architectures or comparing different hardware and so on.. release_notes ~~~~~~~~~~~~~ The latest.rst_ contains new features and API changes of Rally's latest release. And you could find all old releases in archive_. .. _latest.rst: https://github.com/openstack/rally/blob/master/doc/release_notes/latest.rst .. _archive: https://github.com/openstack/rally/tree/master/doc/release_notes/archive rally-0.9.1/doc/user_stories/0000775000567000056710000000000013073420067017302 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/user_stories/nova/0000775000567000056710000000000013073420067020245 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/user_stories/nova/boot_server.rst0000664000567000056710000002060513073417716023342 0ustar jenkinsjenkins00000000000000========================================================================================== Finding a Keystone bug while benchmarking 20 node HA cloud performance at creating 400 VMs ========================================================================================== *(Contributed by Alexander Maretskiy, Mirantis)* Below we describe how we found a `bug in Keystone`_ and achieved 2x average performance increase at booting Nova servers after fixing that bug. Our initial goal was to benchmark the booting of a significant amount of servers on a cluster (running on a custom build of `Mirantis OpenStack`_ v5.1) and to ensure that this operation has reasonable performance and completes with no errors. Goal ---- - Get data on how a cluster behaves when a huge amount of servers is started - Get data on how good the neutron component is good in this case Summary ------- - Creating 400 servers with configured networking - Servers are being created simultaneously - 5 servers at the same time Hardware -------- Having a real hardware lab with 20 nodes: +--------+-------------------------------------------------------+ | Vendor | SUPERMICRO SUPERSERVER | +--------+-------------------------------------------------------+ | CPU | 12 cores, Intel(R) Xeon(R) CPU E5-2620 v2 @ 2.10GHz | +--------+-------------------------------------------------------+ | RAM | 32GB (4 x Samsung DDRIII 8GB) | +--------+-------------------------------------------------------+ | HDD | 1TB | +--------+-------------------------------------------------------+ Cluster ------- This cluster was created via Fuel Dashboard interface. +----------------------+--------------------------------------------+ | Deployment | Custom build of `Mirantis OpenStack`_ v5.1 | +----------------------+--------------------------------------------+ | OpenStack release | Icehouse | +----------------------+--------------------------------------------+ | Operating System | Ubuntu 12.04.4 | +----------------------+--------------------------------------------+ | Mode | High availability | +----------------------+--------------------------------------------+ | Hypervisor | KVM | +----------------------+--------------------------------------------+ | Networking | Neutron with GRE segmentation | +----------------------+--------------------------------------------+ | Controller nodes | 3 | +----------------------+--------------------------------------------+ | Compute nodes | 17 | +----------------------+--------------------------------------------+ Rally ----- **Version** For this benchmark, we use custom Rally with the following patch: https://review.openstack.org/#/c/96300/ **Deployment** Rally was deployed for cluster using `ExistingCloud`_ type of deployment. **Server flavor** .. code-block:: console $ nova flavor-show ram64 +----------------------------+--------------------------------------+ | Property | Value | +----------------------------+--------------------------------------+ | OS-FLV-DISABLED:disabled | False | | OS-FLV-EXT-DATA:ephemeral | 0 | | disk | 0 | | extra_specs | {} | | id | 2e46aba0-9e7f-4572-8b0a-b12cfe7e06a1 | | name | ram64 | | os-flavor-access:is_public | True | | ram | 64 | | rxtx_factor | 1.0 | | swap | | | vcpus | 1 | +----------------------------+--------------------------------------+ **Server image** .. code-block:: console $ nova image-show TestVM +----------------------------+-------------------------------------------------+ | Property | Value | +----------------------------+-------------------------------------------------+ | OS-EXT-IMG-SIZE:size | 13167616 | | created | 2014-08-21T11:18:49Z | | id | 7a0d90cb-4372-40ef-b711-8f63b0ea9678 | | metadata murano_image_info | {"title": "Murano Demo", "type": "cirros.demo"} | | minDisk | 0 | | minRam | 64 | | name | TestVM | | progress | 100 | | status | ACTIVE | | updated | 2014-08-21T11:18:50Z | +----------------------------+-------------------------------------------------+ **Task configuration file (in JSON format):** .. code-block:: json { "NovaServers.boot_server": [ { "args": { "flavor": { "name": "ram64" }, "image": { "name": "TestVM" } }, "runner": { "type": "constant", "concurrency": 5, "times": 400 }, "context": { "neutron_network": { "network_ip_version": 4 }, "users": { "concurrent": 30, "users_per_tenant": 5, "tenants": 5 }, "quotas": { "neutron": { "subnet": -1, "port": -1, "network": -1, "router": -1 } } } } ] } The only difference between first and second run is that runner.times for first time was set to 500 Results ------- **First time - a bug was found:** Starting from 142 server, we have error from novaclient: **Error : Unauthorized (HTTP 401).** That is how a `bug in Keystone`_ was found. +------------------+-----------+-----------+-----------+---------------+---------------+---------+-------+ | action | min (sec) | avg (sec) | max (sec) | 90 percentile | 95 percentile | success | count | +------------------+-----------+-----------+-----------+---------------+---------------+---------+-------+ | nova.boot_server | 6.507 | 17.402 | 100.303 | 39.222 | 50.134 | 26.8% | 500 | | total | 6.507 | 17.402 | 100.303 | 39.222 | 50.134 | 26.8% | 500 | +------------------+-----------+-----------+-----------+---------------+---------------+---------+-------+ **Second run, with bugfix:** After a patch was applied (using RPC instead of neutron client in metadata agent), we got **100% success and 2x improved average performance**: +------------------+-----------+-----------+-----------+---------------+---------------+---------+-------+ | action | min (sec) | avg (sec) | max (sec) | 90 percentile | 95 percentile | success | count | +------------------+-----------+-----------+-----------+---------------+---------------+---------+-------+ | nova.boot_server | 5.031 | 8.008 | 14.093 | 9.616 | 9.716 | 100.0% | 400 | | total | 5.031 | 8.008 | 14.093 | 9.616 | 9.716 | 100.0% | 400 | +------------------+-----------+-----------+-----------+---------------+---------------+---------+-------+ .. references: .. _bug in Keystone: https://bugs.launchpad.net/keystone/+bug/1360446 .. _Mirantis OpenStack: https://software.mirantis.com/ .. _ExistingCloud: https://github.com/openstack/rally/blob/master/samples/deployments/existing.json rally-0.9.1/doc/user_stories/keystone/0000775000567000056710000000000013073420067021143 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/user_stories/keystone/authenticate.rst0000664000567000056710000001436713073417716024375 0ustar jenkinsjenkins00000000000000==================================================================================== 4x performance increase in Keystone inside Apache using the token creation benchmark ==================================================================================== *(Contributed by Neependra Khare, Red Hat)* Below we describe how we were able to get and verify a 4x better performance of Keystone inside Apache. To do that, we ran a Keystone token creation benchmark with Rally under different load (this benchmark scenario essentially just authenticate users with keystone to get tokens). Goal ---- - Get the data about performance of token creation under different load. - Ensure that keystone with increased public_workers/admin_workers values and under Apache works better than the default setup. Summary ------- - As the concurrency increases, time to authenticate the user gets up. - Keystone is CPU bound process and by default only one thread of *keystone-all* process get started. We can increase the parallelism by: 1. increasing *public_workers/admin_workers* values in *keystone.conf* file 2. running Keystone inside Apache - We configured Keystone with 4 *public_workers* and ran Keystone inside Apache. In both cases we got up to 4x better performance as compared to default Keystone configuration. Setup ----- Server : Dell PowerEdge R610 CPU make and model : Intel(R) Xeon(R) CPU X5650 @ 2.67GHz CPU count: 24 RAM : 48 GB Devstack - Commit#d65f7a2858fb047b20470e8fa62ddaede2787a85 Keystone - Commit#455d50e8ae360c2a7598a61d87d9d341e5d9d3ed Keystone API - 2 To increase public_workers - Uncomment line with *public_workers* and set *public_workers* to 4. Then restart Keystone service. To run Keystone inside Apache - Added *APACHE_ENABLED_SERVICES=key* in *localrc* file while setting up OpenStack environment with Devstack. Results ------- 1. Concurrency = 4 .. code-block:: json {'context': {'users': {'concurrent': 30, 'tenants': 12, 'users_per_tenant': 512}}, 'runner': {'concurrency': 4, 'times': 10000, 'type': 'constant'}} +--------+-----------+-----------+-----------+---------------+---------------+---------+-------+-----------------------+--------------+ | action | min (sec) | avg (sec) | max (sec) | 90 percentile | 95 percentile | success | count |apache enabled keystone|public_workers| +--------+-----------+-----------+-----------+---------------+---------------+---------+-------+-----------------------+--------------+ | total | 0.537 | 0.998 | 4.553 | 1.233 | 1.391 | 100.0% | 10000 | N | 1 | +--------+-----------+-----------+-----------+---------------+---------------+---------+-------+-----------------------+--------------+ | total | 0.189 | 0.296 | 5.099 | 0.417 | 0.474 | 100.0% | 10000 | N | 4 | +--------+-----------+-----------+-----------+---------------+---------------+---------+-------+-----------------------+--------------+ | total | 0.208 | 0.299 | 3.228 | 0.437 | 0.485 | 100.0% | 10000 | Y | NA | +--------+-----------+-----------+-----------+---------------+---------------+---------+-------+-----------------------+--------------+ 2. Concurrency = 16 .. code-block:: json {'context': {'users': {'concurrent': 30, 'tenants': 12, 'users_per_tenant': 512}}, 'runner': {'concurrency': 16, 'times': 10000, 'type': 'constant'}} +--------+-----------+-----------+-----------+---------------+---------------+---------+-------+-----------------------+--------------+ | action | min (sec) | avg (sec) | max (sec) | 90 percentile | 95 percentile | success | count |apache enabled keystone|public_workers| +--------+-----------+-----------+-----------+---------------+---------------+---------+-------+-----------------------+--------------+ | total | 1.036 | 3.905 | 11.254 | 5.258 | 5.700 | 100.0% | 10000 | N | 1 | +--------+-----------+-----------+-----------+---------------+---------------+---------+-------+-----------------------+--------------+ | total | 0.187 | 1.012 | 5.894 | 1.61 | 1.856 | 100.0% | 10000 | N | 4 | +--------+-----------+-----------+-----------+---------------+---------------+---------+-------+-----------------------+--------------+ | total | 0.515 | 0.970 | 2.076 | 1.113 | 1.192 | 100.0% | 10000 | Y | NA | +--------+-----------+-----------+-----------+---------------+---------------+---------+-------+-----------------------+--------------+ 3. Concurrency = 32 .. code-block:: json {'context': {'users': {'concurrent': 30, 'tenants': 12, 'users_per_tenant': 512}}, 'runner': {'concurrency': 32, 'times': 10000, 'type': 'constant'}} +--------+-----------+-----------+-----------+---------------+---------------+---------+-------+-----------------------+--------------+ | action | min (sec) | avg (sec) | max (sec) | 90 percentile | 95 percentile | success | count |apache enabled keystone|public_workers| +--------+-----------+-----------+-----------+---------------+---------------+---------+-------+-----------------------+--------------+ | total | 1.493 | 7.752 | 16.007 | 10.428 | 11.183 | 100.0% | 10000 | N | 1 | +--------+-----------+-----------+-----------+---------------+---------------+---------+-------+-----------------------+--------------+ | total | 0.198 | 1.967 | 8.54 | 3.223 | 3.701 | 100.0% | 10000 | N | 4 | +--------+-----------+-----------+-----------+---------------+---------------+---------+-------+-----------------------+--------------+ | total | 1.115 | 1.986 | 6.224 | 2.133 | 2.244 | 100.0% | 10000 | Y | NA | +--------+-----------+-----------+-----------+---------------+---------------+---------+-------+-----------------------+--------------+ rally-0.9.1/doc/release_notes/0000775000567000056710000000000013073420067017404 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/release_notes/latest.rst0000664000567000056710000000365613073417720021446 0ustar jenkinsjenkins00000000000000============ Rally v0.9.1 ============ Overview -------- +------------------+-----------------------+ | Release date | **4/12/2017** | +------------------+-----------------------+ Details ------- Unfortunately, Rally 0.9.0 contains various bugs. We work hard to fix them, improve our CI to avoid such issues in future and ready to present a new Rally release which includes only bug-fixes. Fixed bugs ~~~~~~~~~~ * [deployment] Credentials is not updated as soon as deployment is recreated. Need to call recreate request twice. `Launchpad bug-report #1675271 `_ * [task] Scenario `IronicNodes.create_and_list_node `_ had a wrong check that list of all nodes contains newly created one. * [task][cleanup] Do not remove quotas in case of existing users * [task][cleanup] Various traces of neutron resources * [core] Keystone v3, authentication error for Rally users if the value of project_domain_name of admin user isn't equal "default" `Launchpad bug-report #1680837 `_ * [task] Scenario `NovaHosts.list_and_get_hosts `_ obtains hostname for all hosts. But it fails in some environments if host is not compute. `Launchpad bug-report #1675254 `_ * [verification] Rally fails to run on systems on which python-virtualenv is not installed `Launchpad bug-report #1678047 `_ * [verification] CLI `rally verify rerun `_ fails with TypeError due to wring integration with Rally API. Thanks ~~~~~~ 2 Everybody! rally-0.9.1/doc/release_notes/archive/0000775000567000056710000000000013073420067021025 5ustar jenkinsjenkins00000000000000rally-0.9.1/doc/release_notes/archive/v0.3.0.rst0000664000567000056710000001276013073417716022420 0ustar jenkinsjenkins00000000000000============ Rally v0.3.0 ============ Information ----------- +------------------+-----------------------+ | Commits | **69** | +------------------+-----------------------+ | Bug fixes | **7** | +------------------+-----------------------+ | Dev cycle | **29 days** | +------------------+-----------------------+ | Release date | **2/16/2016** | +------------------+-----------------------+ Details ------- This release, as well as all previous ones, includes a lot of internal and external changes. Most important of them are listed below. .. warning:: In this release Rally DB schema migration is introduced. While upgrading Rally from previous versions it is required now to run ``rally-manade db upgrade``. Please see 'Documentation' section for details. CLI changes ~~~~~~~~~~~ * .. warning:: [Removed] ``rally info`` in favor of ``rally plugin *``. It was deprecated for a long time. * [Modified] ``rally deployment check`` now prints services, which don't have names, since such services can be used via api_versions context. * .. warning:: [Modified] ``rally verify [re]install`` option --no-tempest-venv was deprecated in favor of --system-wide * [Added] ``rally-manage db revision`` displays current revision of Rally database schema * [Added] ``rally-manage db upgrade`` upgrades pre-existing Rally database schema to the latest revision * [Added] ``rally-manage db downgrade`` to downgrades existing Rally database schema to previous revision * [Added] ``rally task export`` exports task results to external services (only CLI command introduced, no real service support implemented yet, however one could write own plugins) * [Added] ``rally verify export`` exports verification results to external services (only CLI command introduced, no real service support implemented yet, however one could write own plugins) Rally Deployment ~~~~~~~~~~~~~~~~ * .. warning:: ``fuel`` deployment engine is removed since it was outdated and lacked both usage and support Rally Task ~~~~~~~~~~ Add custom labels for "Scenario Output" charts * X-axis label can be specified to add_output() by "axis_label" key of chart options dict. The key is named "axis_label" but not "x_label" because chart can be displayed as table, so we explicitly mention "axis" in option name to make this parameter useless for tables * Y-axis label can be specified to add_output() by "label" key of chart options dict In some cases this parameter can be used for rendering tables - it becomes column name in case if chart with single iteration is transformed into table * As mentioned above, if we have output chart with single iteration, then it is transformed to table, because chart with single value is useless * OutputLinesChart is added, it is displayed by NVD3 lineChart() * Chart "description" is optional now. Description is not shown if it is not specified explicitly * Scenario Dummy.add_output is improved to display labels and OutputLinesChart * Fix: If Y-values are too long and overlaps chart box, then JavaScript updates chart width in runtime to fit width of chart graphs + Y values to their DOM container Rally Certification ~~~~~~~~~~~~~~~~~~~ None. Specs & Feature Requests ~~~~~~~~~~~~~~~~~~~~~~~~ * [Spec][Introduced] Export task and verification results to external services https://github.com/openstack/rally/blob/master/doc/specs/in-progress/task_and_verification_export.rst * [Spec][Implemented] Consistent resource names https://github.com/openstack/rally/blob/master/doc/specs/implemented/consistent_resource_names.rst * [Feature request][Implemented] Tempest concurrency https://github.com/openstack/rally/blob/master/doc/feature_request/implemented/add_possibility_to_specify_concurrency_for_tempest.rst Plugins ~~~~~~~ * **Scenarios**: - [added] VMTasks.workload_heat - [added] NovaFlavors.list_flavors - [updated] Flavors for Master and Worker node groups are now configured separately for SaharaCluster.* scenarios * **Context**: - .. warning:: [deprecated] rally.plugins.openstack.context.cleanup in favor of rally.plugins.openstack.cleanup - [improved] sahara_cluster Flavors for Master and Worker node groups are now configured separately in ``sahara_cluster`` context Miscellaneous ~~~~~~~~~~~~~ * Cinder version 2 is used by default * Keystone API v3 compatibility improved - Auth URL in both formats http://foo.rally:5000/v3 and http://foo.rally:5000 is supported for Keystone API v3 - Tempest configuration file is created properly according to Keystone API version used * ``install_rally.sh --branch`` now accepts all git tree-ish, not just branches or tags * VM console logs are now printed when Rally fails to connect to VM * Add support for Rally database schema migration (see 'Documentation' section) Bug fixes ~~~~~~~~~ **7 bugs were fixed**: * #1540563: Rally is incompatible with liberty Neutron client The root cause is that in Neutron Liberty client, the _fx function doesn't take any explicit keyword parameter but Rally is passing one (tenant_id). * #1543414: The `rally verify start` command fails when running a verification against Kilo OpenStack * #1538341: Error in logic to retrieve image details in image_valid_on_flavor Documentation ~~~~~~~~~~~~~ * Add documentation for DB migration https://github.com/openstack/rally/blob/master/rally/common/db/sqlalchemy/migrations/README.rst Thanks ~~~~~~ 2 Everybody! rally-0.9.1/doc/release_notes/archive/v0.6.0.rst0000664000567000056710000001671013073417716022422 0ustar jenkinsjenkins00000000000000============ Rally v0.6.0 ============ Overview -------- +------------------+-----------------------+ | Release date | **9/05/2016** | +------------------+-----------------------+ Details ------- Common ~~~~~~ * Added Python 3.5 support * Sync requirements with OpenStack global-requirements * Start using latest way of authentication - keystoneauth library * Start porting all scenario plugins to class-based view. Specs & Feature Requests ~~~~~~~~~~~~~~~~~~~~~~~~ * `[Implemented] SLA Performance degradation plugin `_ * `[Proposed] New Tasks Configuration section - hook `_ Database ~~~~~~~~ * disable db downgrade api * [require migration] upgrade deployment config Docker image ~~~~~~~~~~~~ * Add sudo rights to rally user Rally is a pluggable framework. External plugins can require installation of additional python or system packages, so we decided to add sudo rights. * Move from ubuntu:14.04 base image to ubuntu:16.04 . Ubuntu 16.04 is current/latest LTS release. Let's use it. * pre-install vim Since there are a lot of users who like to experiment and modify samples inside container, rally team decided to pre-install vim * configure/pre-install bash-completion Rally provides bash-completion script, but it doesn't work without installed `bash-completion` package and now it is included in our image. Rally Deployment ~~~~~~~~~~~~~~~~ * Add strict jsonschema validation for ExistingCloud deployments. All incorrect and unexpected properties will not be ignored anymore. If you need to store some extra parameters, you can use new "extra" property. * Fix an issue with endpoint_type. Previously, endpoint type was not transmitted to keystone client. In this case, keystoneclient used default endpoint type (for different API calls it can differ). Behaviour after the fix: - None endpoint type -> Rally will initialize all clients without setting endpoint type. It means that clients will choose what default values for endpoint type use by itself. Most of clients have "public" as default values. Keystone use "admin" or "internal" by default. - Not none endpoint type -> Rally will initialize all clients with this endpoint. Be careful, by default most of keystone v2 api calls do not work with public endpoint type. Rally Task ~~~~~~~~~~ * [core] Iterations numbers in logging and reports must be synchronized. Now they start from 1 . * [config] users_context.keystone_default_role is a new config option (Defaults to "member") for setting default user role for new users in case of Keystone V3. * [Reports] Embed Rally version into HTML reports This adds Rally version via meta tag into HTML reports: * [Reports] Expand menu if there is only one menu group * [logging] Remove deprecated rally.common.log module * [Trends][Reports] Add success rate chart to trends report * [Reports] Hide menu list if there is no data at all Rally Verify ~~~~~~~~~~~~ * Updating Tempest config file - Some tests (for boto, horizon, etc.) were removed from Tempest and now there is no need to keep the corresponding options in Tempest config file. - Some options in Tempest were moved from one section to another and we should to do the corresponding changes in Rally to be up to date with the latest Tempest version. * Adding '--skip-list' arg to `rally verify start` cmd `CLI argument for --skip-list `_ * *NEW!!*: - `Command for plugin listing `_ - `Command to uninstall plugins `_ * Rename and deprecated several arguments for `rally verify start` cmd: - tests-file -> load-list - xfails-file -> xfail-list Plugins ~~~~~~~ **Scenarios**: * Extend Sahara scenarios with autoconfig param Affected plugins: - `SaharaClusters.create_and_delete_cluster `_ - `SaharaClusters.create_scale_delete_cluster `_ - `SaharaNodeGroupTemplates.create_and_list_node_group_templates `_ - `SaharaNodeGroupTemplates.create_delete_node_group_templates `_ * *NEW!!*: - `MonascaMetrics.list_metrics `_ - `SenlinClusters.create_and_delete_cluster `_ - `Watcher.create_audit_template_and_delete `_ - `Watcher.create_audit_and_delete `_ - `Watcher.list_audit_templates `_ * Rename **murano.create_service** to **murano.create_services** atomic action **SLA**: *NEW!!*: `performance degradation plugin `_ **Contexts**: * *NEW!!*: - `Monasca monasca_metrics `_ - `Senlin profiles `_ - `Watcher audit_templates `_ * Extend `manila_share_networks `_ context with share-network autocreation support. * Extend `volumes `_ context to allow volume_type to be None to allow using default value Bug fixes ~~~~~~~~~ * [existing users] Quota context does not restore original settings on exit `Launchpad bug-report #1595578 `_ * [keystone v3] Rally task's test user role setting failed `Launchpad bug-report #1595081 `_ * [existing users] context cannot fetch 'tenant' and 'user' details from cloud deployment `Launchpad bug-report #1602157 `_ * UnboundLocalError: local variable 'cmd' referenced before assignment `Launchpad bug-report #1587941 `_ * [Reports] Fix trends report generation if there are n/a results Documentation ~~~~~~~~~~~~~ * Add page about task reports `RTD page for reports `_ Thanks ~~~~~~ 2 Everybody! rally-0.9.1/doc/release_notes/archive/v0.1.2.rst0000664000567000056710000001373313073417716022421 0ustar jenkinsjenkins00000000000000============ Rally v0.1.2 ============ Information ----------- +------------------+-----------------------+ | Commits | **208** | +------------------+-----------------------+ | Bug fixes | **37** | +------------------+-----------------------+ | Dev cycle | **77 days** | +------------------+-----------------------+ | Release date | **23/December/2015** | +------------------+-----------------------+ Details ------- This release, as well as all previous ones, includes a lot of internal and external changes. Most important of them are listed below. .. warning:: Release 0.1.2 is the last release with Python 2.6 support. Deprecations ~~~~~~~~~~~~ * Class `rally.common.objects.Endpoint` was renamed to `Credentials`. Old class is kept for backward compatibility. Please, stop using the old class in your plugins. .. warning:: dict key was changed too in user context from "endpoint" to "credential" * rally.task.utils: wait_is_ready(), wait_for(), wait_for_delete() deprecated you should use wait_for_status() instead. Rally Verify ~~~~~~~~~~~~ * Added possibility to run Tempest tests listed in a file(--tests-file argument in ``verify start``) * Added possibility to upload Tempest subunit stream logs into data base * Improvements in generating Tempest config file * Reworked subunit stream parser * Don't install Tempest when `rally verify [gen/show]config` * Rally team tries to simplify usage of each our component. Now Rally verification has some kind of a context like in Tasks. Before launching each verification, Rally checks existence of required resources(networks, images, flavours, etc) in Tempest configuration file and pre-creates them. Do not worry, all these resources will not be forgotten and left, Rally will clean them after verification. Rally Task ~~~~~~~~~~ * Add --html-static argument to ``rally task report`` which allows to generate HTML reports that doesn't require Internet. * Rally supports different API versions now via api_versions context: .. code-block:: none CinderVolumes.create_and_delete_volume: - args: size: 1 runner: type: "constant" times: 2 concurrency: 2 context: users: tenants: 2 users_per_tenant: 2 api_versions: cinder: version: 2 service_name: cinderv2 * Move rally.osclients.Clients to plugin base Rally OSclients is pluggable now and it is very easy to extend OSClient for your cloud out of Rally tree. * Add 'merge' functionality to SLA All SLA plugins should implement merge() method now. In future this will be used for distributed load generation. Where SLA results from different runners will be merged together. * New optional_action_timer decorator Allows to make the methods that can be both atomic_action or regular method. Method changes behavior based on value in extra key "atomic_action" Rally Certification ~~~~~~~~~~~~~~~~~~~ * Fix Glance certification arguments * Add Neutron Quotas only if Neutron service is available Specs & Feature Requests ~~~~~~~~~~~~~~~~~~~~~~~~ * Spec consistent-resource-names: Resource name is based on Task id now. It is a huge step to persistence and disaster cleanups. * Add a spec for distributed load generation: https://github.com/openstack/rally/blob/master/doc/specs/in-progress/distributed_runner.rst * Improvements for scenario output format https://github.com/openstack/rally/blob/master/doc/specs/in-progress/improve_scenario_output_format.rst * Task and Verify results export command https://github.com/openstack/rally/blob/master/doc/specs/in-progress/task_and_verification_export.rst Plugins ~~~~~~~ * **Scenarios**: * [new] NovaServers.boot_and_get_console_output * [new] NovaServers.boot_and_show_server * [new] NovaServers.boot_server_attach_created_volume_and_resize * [new] NovaServers.boot_server_from_volume_and_resize * [new] NeutronSecurityGroup.create_and_delete_security_groups * [new] NeutronSecurityGroup.create_and_list_security_groups * [new] NeutronSecurityGroup.create_and_update_security_groups * [new] NeutronLoadbalancerV1.create_and_delete_healthmonitors * [new] NeutronLoadbalancerV1.create_and_list_healthmonitors * [new] NeutronLoadbalancerV1.create_and_update_healthmonitors * [new] SwiftObjects.list_and_download_objects_in_containers * [new] SwiftObjects.list_objects_in_containers * [new] FuelNodes.add_and_remove_node * [new] CeilometerMeters.list_matched_meters * [new] CeilometerResource.list_matched_resources * [new] CeilometerSamples.list_matched_samples * [new] CeilometerStats.get_stats * [new] Authenticate.validate_monasca * [new] DesignateBasic.create_and_delete_zone * [new] DesignateBasic.create_and_list_zones * [new] DesignateBasic.list_recordsets * [new] DesignateBasic.list_zones * [fix] CinderVolumes.create_nested_snapshots_and_attach_volume Remove random nested level which produce different amount of atomic actions and bad reports. * Support for Designate V2 api * A lot of improvements in Sahara scenarios * **Context**: * [new] api_versions Context allows us to setup client to communicate to specific service. * [new] swift_objects Context pre creates swift objects for future usage in scenarios * [update] sahara_cluster It supports proxy server which allows to use single floating IP for whole cluster. * [fix] cleanup Fix cleanup of networks remove vip before port. Bug fixes ~~~~~~~~~ **37 bugs were fixed, the most critical are**: * Follow symlinks in plugin discovery * Use sed without -i option for portability (install_rally.sh) * Fixed race in rally.common.broker * Fixed incorrect iteration number on "Failures" Tab * Fixing issue with create_isolated_networks = False * Fix docker build command Documentation ~~~~~~~~~~~~~ Fixed some minor typos and inaccuracies. Thanks ~~~~~~ We would like to thank Andreas Jaeger for ability to provide Python 2.6 support in this release. rally-0.9.1/doc/release_notes/archive/v0.1.1.rst0000664000567000056710000000574613073417716022425 0ustar jenkinsjenkins00000000000000============ Rally v0.1.1 ============ Information ----------- +------------------+-----------------------+ | Commits | **32** | +------------------+-----------------------+ | Bug fixes | **9** | +------------------+-----------------------+ | Dev cycle | **11 days** | +------------------+-----------------------+ | Release date | **6/October/2015** | +------------------+-----------------------+ Details ------- This release contains new features, new 6 plugins, 9 bug fixes, various code and API improvements. New Features ~~~~~~~~~~~~ * **Rally verify generates proper tempest.conf file now** Improved script that generates tempest.conf, now it works out of box for most of the clouds and most of Tempest tests will pass without hacking it. * **Import Tempest results to Rally DB** ``rally verify import`` command allows you to import already existing Tempest results and work with them as regular "rally verify start" results: generate HTML/CSV reports & compare different runs. API Changes ~~~~~~~~~~~~ **Rally CLI changes** * [add] ``rally verify import`` imports raw Tempest results to Rally Specs & Feature requests ~~~~~~~~~~~~~~~~~~~~~~~~ There is no new specs and feature requests. Plugins ~~~~~~~ * **Scenarios**: [new] NeutronNetworks.create_and_list_floating_ips [new] NeutronNetworks.create_and_delete_floating_ips [new] MuranoPackages.import_and_list_packages [new] MuranoPackages.import_and_delete_package [new] MuranoPackages.import_and_filter_applications [new] MuranoPackages.package_lifecycle [improved] NovaKeypair.boot_and_delete_server_with_keypair New argument ``server_kwargs``, these kwargs are used to boot server. [fix] NeutronLoadbalancerV1.create_and_delete_vips Now it works in case of concurrency > 1 * **Contexts**: [improved] network Network context accepts two new arguments: ``subnets_per_network`` and ``network_create_args``. [fix] network Fix cleanup if nova-network is used. Networks should be dissociate from project before deletion [fix] custom_image Nova server that is used to create custom image was not deleted if script that prepares server failed. Bug fixes ~~~~~~~~~ **9 bugs were fixed, the most critical are**: * Fix install_rally.sh script Set 777 access to /var/lib/rally/database file if system-wide method of installation is used. * Rally HTML reports Overview table had few mistakes * Success rate was always 100% * Percentiles were wrongly calculated * Missing Ironic, Murano and Workload(vm) options in default config file * ``rally verify start`` failed while getting network_id * ``rally verify genconfig`` hangs forever if Horizon is not available Documentation ~~~~~~~~~~~~~ * **Fix project maintainers page** Update the information about Rally maintainers * **Document rally --plugin-paths CLI argument** * **Code blocks in documentation looks prettier now** rally-0.9.1/doc/release_notes/archive/v0.0.2.rst0000664000567000056710000001337313073417716022420 0ustar jenkinsjenkins00000000000000============ Rally v0.0.2 ============ Information ----------- +------------------+-----------------+ | Commits | **100** | +------------------+-----------------+ | Bug fixes | **18** | +------------------+-----------------+ | Dev cycle | **45 days** | +------------------+-----------------+ | Release date | **12/Mar/2015** | +------------------+-----------------+ Details ------- This release contains new features, new benchmark plugins, bug fixes, various code and API improvements. New Features ~~~~~~~~~~~~ * rally task start **--abort-on-sla-failure** Stopping load before things go wrong. Load generation will be interrupted if SLA criteria stop passing. * Rally verify command supports multiple Tempest sources now. * python34 support * postgres DB backend support API changes ~~~~~~~~~~~ * [new] **rally [deployment | verify | task] use** subcommand It should be used instead of root command **rally use** * [new] Rally as a Lib API To avoid code duplication between Rally as CLI tool and Rally as a Service we decide to make Rally as a Lib as a common part between these 2 modes. Rally as a Service will be a daemon that just maps HTTP request to Rally as a Lib API. * [deprecated] **rally use** CLI command * [deprecated] Old Rally as a Lib API Old Rally API was quite mixed up so we decide to deprecate it Plugins ~~~~~~~ * **Benchmark Scenario Runners**: [improved] Improved algorithm of generation load in **constant runner** Before we used processes to generate load, now it creates pool of processes (amount of processes is equal to CPU count) after that in each process use threads to generate load. So now you can easily generate load of 1k concurrent scenarios. [improved] Unify code of **constant** and **rps** runners [interface] Added **abort()** to runner's plugin interface New method **abort()** is used to immediately interrupt execution. * **Benchmark Scenarios**: [new] DesignateBasic.create_and_delete_server [new] DesignateBasic.create_and_list_servers [new] DesignateBasic.list_servers [new] MistralWorkbooks.list_workbooks [new] MistralWorkbooks.create_workbook [new] Quotas.neutron_update [new] HeatStacks.create_update_delete_stack [new] HeatStacks.list_stacks_and_resources [new] HeatStacks.create_suspend_resume_delete_stac [new] HeatStacks.create_check_delete_stack [new] NeutronNetworks.create_and_delete_routers [new] NovaKeypair.create_and_delete_keypair [new] NovaKeypair.create_and_list_keypairs [new] NovaKeypair.boot_and_delete_server_with_keypair [new] NovaServers.boot_server_from_volume_and_live_migrate [new] NovaServers.boot_server_attach_created_volume_and_live_migrate [new] CinderVolumes.create_and_upload_volume_to_image [fix] CinderVolumes.create_and_attach_volume Pass optional \*\*kwargs only to create server command [fix] GlanceImages.create_image_and_boot_instances Pass optional \*\*kwargs only to create server command [fix] TempestScenario.\* removed stress cleanup. Major issue is that tempest stress cleanup cleans whole OpenStack. This is very dangerous, so it's better to remove it and leave some extra resources. [improved] NovaSecGroup.boot_and_delete_server_with_secgroups Add optional \*\*kwargs that are passed to boot server comment * **Benchmark Context**: [new] **stacks** Generates passed amount of heat stacks for all tenants. [new] **custom_image** Prepares images for benchmarks in VMs. To Support generating workloads in VMs by existing tools like: IPerf, Blogbench, HPCC and others we have to have prepared images, with already installed and configured tools. Rally team decide to generate such images on fly from passed to avoid requirements of having big repository with a lot of images. This context is abstract context that allows to automate next steps: 1) runs VM with passed image (with floating ip and other stuff) 2) execute abstract method that has access to VM 3) snapshot this image In future we are going to use this as a base for making context that prepares images. [improved] **allow_ssh** Automatically disable it if security group are disabled in neutron. [improved] **keypair** Key pairs are stored in "users" space it means that accessing keypair from scenario is simpler now: self.context["user"]["keypair"]["private"] [fix] **users** Pass proper EndpointType for newly created users [fix] **sahara_edp** The Job Binaries data should be treated as a binary content * **Benchmark SLA**: [interface] SLA calculations is done in additive way now Resolves scale issues, because now we don't need to have whole array of iterations in memory to process SLA. This is required to implement **--abort-on-sla-failure** feature [all] SLA plugins were rewritten to implement new interface Bug fixes ~~~~~~~~~ **18 bugs were fixed, the most critical are**: * Fix **rally task detailed --iterations-data** It didn't work in case of missing atomic actions. Such situation can occur if scenario method raises exceptions * Add user-friendly message if the task cannot be deleted In case of trying to delete task that is not in "finished" status users get traces instead of user-friendly message try to run it with --force key. * Network context cleanups networks properly now Documentation ~~~~~~~~~~~~~ * Image sizes are fixed * New tutorial in "Step by Step" relate to **--abort-on-sla-failure** * Various fixes rally-0.9.1/doc/release_notes/archive/v0.3.1.rst0000664000567000056710000000220113073417716022406 0ustar jenkinsjenkins00000000000000============ Rally v0.3.1 ============ Information ----------- +------------------+-----------------------+ | Commits | **9** | +------------------+-----------------------+ | Bug fixes | **6** | +------------------+-----------------------+ | Dev cycle | **2 days** | +------------------+-----------------------+ | Release date | **2/18/2016** | +------------------+-----------------------+ Details ------- This release is more about bug-fixes than features. .. warning:: Please, update 0.3.0 to latest one. Features ~~~~~~~~ * Pass api_versions info to glance images context * [Verify] Don't create new flavor when flavor already exists Bug fixes ~~~~~~~~~ **6 bugs were fixed, the most critical are**: * #1545889: Existing deployment with given endpoint doesn't work anymore * #1547092: Insecure doesn't work with Rally 0.3.0 * #1547083: Rally Cleanup failed with api_versions context in 0.3.0 release * #1544839: Job gate-rally-dsvm-zaqar-zaqar fails since the recent Rally patch * #1544522: Non-existing "called_once_with" method of Mock library is used rally-0.9.1/doc/release_notes/archive/v0.3.3.rst0000664000567000056710000000610313073417716022415 0ustar jenkinsjenkins00000000000000============ Rally v0.3.3 ============ Information ----------- +------------------+-----------------------+ | Commits | **20** | +------------------+-----------------------+ | Dev cycle | **10 days** | +------------------+-----------------------+ | Release date | **3/24/2016** | +------------------+-----------------------+ Details ------- A half of patches relate to Cleanup. We have once again proved that ideal stuff can be improved. :) Specs & Feature Requests ~~~~~~~~~~~~~~~~~~~~~~~~ * `[Spec][Introduced] Improve atomic actions format`__ __ https://github.com/openstack/rally/blob/master/doc/specs/in-progress/improve_atomic_actions_format.rst Plugins ~~~~~~~ * **Cleanups**: * Use proper attribute to get heat stack name * Always assign a name to created images. This is necessary for name-based cleanup. If a name is not specified, one will be generated automatically. * Improve filtering glance images in case of V2 API * Delete only images created by images context Since the images context allows creating images with arbitrary names, name-based cleanup won't work for it, so we have to delete the exact list of images that it created instead. * New config option to set cleanup threads Allow the user to change the number of cleanup threads via the rally config. When scaling out to thousands of instances, the cleanup can take forever with the static 20 threads. * Add inexact matching to name_matches_object This will support places where we create resources with names that start with a given name pattern, but include some additional identifier afterwards. For instance, when bulk creating instances, Nova appends a UUID to each instance name. * **Scenarios**: * Add sample of template for testing for testing heat caching. * Introduced new scenario `Dummy.dummy_random_action`__. It is suitable for demonstration of upcoming trends report. __ http://rally.readthedocs.org/en/latest/plugin/plugin_reference.html#dummy-dummy-random-action-scenario * **Contexts**: `api_versions`__ context was extended to support switch between Keystone V2 and V3 API versions. Now it is possible to use one Rally deployment to check both Keystone APIs. __ http://rally.readthedocs.org/en/latest/plugin/plugin_reference.html#api-versions-context * **Newcomer in the family**: All ResourceType classes are pluggable now and it is much easier to use and extend them. .. warning:: Decorator ``rally.task.types.set`` is deprecated now in favor of ``rally.task.types.convert``. Bug fixes ~~~~~~~~~ * #1536172: rally deployment destroy failed with traceback for failed deployments. At current moment it is impossible to delete deployment if for some reason deployment engine plugin cannot be found, because exception will be thrown. Documentation ~~~~~~~~~~~~~ * Remove extra link in `All release notes` Previously, two links for latest release were presented. * Update release notes for 0.3.2 * Fixed indents for warning messages * Fixed all references Thanks ~~~~~~ To Everybody! rally-0.9.1/doc/release_notes/archive/v0.3.2.rst0000664000567000056710000001503313073417716022416 0ustar jenkinsjenkins00000000000000============ Rally v0.3.2 ============ Information ----------- +------------------+-----------------------+ | Commits | **55** | +------------------+-----------------------+ | Dev cycle | **25 days** | +------------------+-----------------------+ | Release date | **3/14/2016** | +------------------+-----------------------+ Details ------- This release, as well as all previous ones, includes a lot of internal and external changes. Most important of them are listed below. CLI changes ~~~~~~~~~~~ * .. warning:: [Modified] Option '--tempest-config' for 'rally verify reinstall' command was deprecated for removal. * .. warning:: [Removed] Option `--system-wide-install` was removed from `rally verify` commands in favor of `--system-wide` option. * .. warning:: [Modified] Step of installation of Tempest during execution of the `rally verify start` command was deprecated and will be removed in the future. Please use `rally verify install` instead. * Rework commands.task.TaskCommands.detailed. Now output of the command contains the same results as in HTML report. Rally Verify ~~~~~~~~~~~~ * Re-run failed Tempest tests Add the ability to re-run the tempest tests that failed in the last test execution. Sometimes Tempest tests fail due to a special temporary condition in the environment, in such cases it is very useful to be able to re-execute those tests. Running the following command will re-run all the test that failed during the last test execution regardless of what test suite was run. .. code-block:: bash rally verify start --failing Specs & Feature Requests ~~~~~~~~~~~~~~~~~~~~~~~~ * `[Spec][Introduced] Refactoring scenario utils`__ __ https://github.com/openstack/rally/blob/master/doc/specs/in-progress/refactor_scenario_utils.rst * `[Spec] Deployment unification`__ __ https://github.com/openstack/rally/blob/master/doc/specs/in-progress/deployment_type.rst Plugins ~~~~~~~ * **Scenarios**: * [updated] Fix flavor for cloudera manager Cloudera manager need master-node flavor * [added] Expand Nova API benchmark in Rally Add support for listing nova hosts, agents, availability-zones and aggregates. * [updated] Make sure VolumeGenerator uses the api version info while cleanup * Designate V2 - Add recordset scenarios Add create_and_(list|delete)_recordset scenarios Remove the test also that checks the allowed methods, this is in order for us to be able to have a private method _walk_pages that will do fetching of pages for us vs attempting to fetch 1 giant list at once. * unify \*_kwargs name in scenarios When running a scenario, `kwargs` is used as default key-word arguments. But in some scenarios, there are more and one services being called, and we use xxx_kwargs for this case. However, some xxx_kwargs are not unified for same usage[0]. Unifying these could avoid misleading for end users. Another improvement is to add xxx_kwargs with empty settings for scenario config files. [0] http://paste.openstack.org/show/489505/ * .. warning:: Deprecated arguments 'script' and 'interpreter' were removed in favor of 'command' argument. VM task scenarios executes a script with a interpreter provided through a formatted argument called 'command' which expects the remote_path or local_path of the script and optionally an interpreter with which the script has to be executed. Miscellaneous ~~~~~~~~~~~~~ * Avoid using `len(x)` to check if x is empty This cases are using `len()` to check if collection has items. As collections have a boolean representation too, directly check for true / false. And fix the wrong mock in its unit test. * Fix install_rally.sh to get it to work on MacOSX On MacOSX, `mktemp` requires being passed a template. This change modifies the calls to `mktemp` to explicitly pass a template so that the code works on both MacOSX and linux. * Use new-style Python classes There are some classes in the code that didn't inherited from nothing and this is an old-style classes. A "New Class" is the recommended way to create a class in modern Python.A "New Class" should always inherit from `object` or another new-style class. Hacking rule added as well. * Make Rally cope with unversioned keystone URL With the change, the client version that's returned is now determined by the keystoneclient library itself based on whether you supply a URL with a version in it or not. * Fix rally-mos job to work with mos-8.0 Also remove hardcoded values for some other jobs. * Add name() to ResourceManager This will allow us to perform cleanup based on the name. * Add task_id argument to name_matches_object This will be used to ensure that we are only deleting resources for a particular Rally task. * Extend api.Task.get_detailed Extend api.Task.get_detailed with ability to return task data as dict with extended results. Bug fixes ~~~~~~~~~ **The most critical fixed bugs are**: * #1547624: Wrong configuration for baremetal(ironic) tempest tests * #1536800: openrc values are not quoted The openrc file created after rally deployment --fromenv did not quote the values for environment variables that will be exported. * #1509027: Heat delete_stack never exits if status is DELETE_FAILED * #1540545: Refactored atomic action in authenticate scenario * #1469897: Incompatible with Keystone v3 argument in service create scenario * #1550262: Different results in ``rally task detailed``, ``rally task report`` and ``rally task status`` commands. * #1553024: Backward incompatible change in neutronclient(release 4.1.0) broke Tempest config generation to support latest neutronclient. Documentation ~~~~~~~~~~~~~ * Add documentation for DB migration * Make documentation for output plugins * Add descriptive docstrings for plugins based on OutputChart * Register these plugins in `Rally Plugins Reference`__ __ http://docs.openstack.org/developer/rally/plugin/plugin_reference.html * Documentation tox fix Added information about debugging unit test with tox. Replace 3 references to py26 with py34 to reflect current rally tox configuration. * Change structure of rally plugin and plugin references page * Update the scenario development, runner and context sections * The design of `Rally Plugins Reference`__ page was improved __ http://docs.openstack.org/developer/rally/plugin/plugin_reference.html * New page was added - `CLI references`__ __ http://docs.openstack.org/developer/rally/cli/cli_reference.html Thanks ~~~~~~ To Everybody! rally-0.9.1/doc/release_notes/archive/v0.8.1.rst0000664000567000056710000000154213073417716022422 0ustar jenkinsjenkins00000000000000============ Rally v0.8.1 ============ Overview -------- +------------------+-----------------------+ | Release date | **1/27/2017** | +------------------+-----------------------+ Details ------- Fix for python requirements list. Plugins ~~~~~~~ **Scenarios**: * *Updated*: - Use new network for each subnet at `NeutronNetworks.create_and_list_subnets `_ scenario. * *NEW!!*: - `CinderVolumeTypes.create_and_list_encryption_type `_ - `Quotas.cinder_get `_ Thanks ~~~~~~ 2 Everybody! rally-0.9.1/doc/release_notes/archive/v0.1.0.rst0000664000567000056710000003171413073417716022416 0ustar jenkinsjenkins00000000000000============ Rally v0.1.0 ============ Information ----------- +------------------+-----------------------+ | Commits | **355** | +------------------+-----------------------+ | Bug fixes | **90** | +------------------+-----------------------+ | Dev cycle | **132 days** | +------------------+-----------------------+ | Release date | **25/September/2015** | +------------------+-----------------------+ Details ------- This release contains new features, new 42 plugins, 90 bug fixes, various code and API improvements. New Features & API changes ~~~~~~~~~~~~~~~~~~~~~~~~~~ * **Improved installation script** * Add parameters: * ``--develop`` parameter to install rally in editable (develop) mode * ``--no-color`` to switch off output colorizing useful for automated output parsing and terminals that don't support colors. * Puts rally.conf under virtualenv etc/rally/ so you can have several rally installations in virtualenv * Many fixes related to access of different file, like: rally.conf, rally db file in case of sqlite * Update pip before Rally installation * Fix reinstallation * **Separated Rally plugins & framework** Now plugins are here: https://github.com/openstack/rally/tree/master/rally/plugins Plugins are as well separated common/* for common plugins that can be use no matter what is tested and OpenStack related plugins * **New Rally Task framework** * All plugins has the same Plugin base: rally.common.plugin.pluing.Plugin They have the same mechanisms for: discovering, providing information based on docstrings, and in future they will use the same deprecation/rename mechanism. * Some of files are moved: * rally/benchmark -> rally/task *This was done to unify naming of rally task command and actually code that implements it.* * rally/benchmark/sla/base.py -> rally/task/sla.py * rally/benchmark/context/base.py -> rally/task/context.py * rally/benchmark/scenarios/base.py -> rally/task/scenario.py * rally/benchmark/runners/base.py -> rally/task/runner.py * rally/benchmark/scenarios/utils.py -> rally/task/utils.py This was done to: * avoid doing rally.benchmark.scenarios import base as scenario_base * remove one level of nesting * simplify framework structure * Some of classes and methods were renamed * Plugin configuration: * context.context() -> context.configure() * scenario.scenario() -> scenario.configure() * Introduced runner.configure() * Introduced sla.configure() This resolves 3 problems: * Unifies configuration of different types of plugins * Simplifies plugin interface * Looks nice with new modules path: >>> from rally.task import scenario >>> @scenario.configure() * Atomic Actions were changed: * New rally.task.atomic module This allow us in future to reuse atomic actions in Context plugins * Renames: rally.benchmark.scenarios.base.AtomicAction -> rally.task.atomic.ActionTimer rally.benchmark.scenarios.base.atomic_action() -> rally.task.atomic.action_timer() * **Context plugins decide how to map their data for scenario** Now Context.map_for_scenario method can be override to decide how to pass context object to each iteration of scenario. * Samples of NEW vs OLD context, sla, scenario and runner plugins: * Context .. code-block:: python # Old from rally.benchmark.context import base @base.context(name="users", order=100) class YourContext(base.Context): def setup(self): # ... def cleanup(self): # ... # New from rally.task import context @context.configure(name="users", order=100) class YourContext(context.Context): def setup(self): # ... def cleanup(self): # ... def map_for_scenario(self): # Maps context object to the scenario context object # like context["users"] -> context["user"] and so on. .. * Scenario .. code-block:: python # Old Scenario from rally.benchmark.scenarios import base from rally.benchmark import validation class ScenarioPlugin(base.Scenario): @base.scenario() def some(self): self._do_some_action() @base.atomic_action_timer("some_timer") def _do_some_action(self): # ... # New Scenario from rally.task import atomic from rally.task import scenario from rally.task import validation # OpenStack scenario has different base now: # rally.plugins.openstack.scenario.OpenStackScenario class ScenarioPlugin(scenario.Scenario): @scenario.configure() def some(self): self._do_some_action() @atomic.action_timer("some_action") def _do_some_action(self): # ... .. * Runner .. code-block:: python ## Old from rally.benchmark.runners import base class SomeRunner(base.ScenarioRunner): __execution_type__ = "some_runner" def _run_scenario(self, cls, method_name, context, args) # Load generation def abort(self): # Method that aborts load generation ## New from rally.task import runner @runner.configure(name="some_runner") class SomeRunner(runner.ScenarioRunner): def _run_scenario(self, cls, method_name, context, args) # Load generation def abort(self): # Method that aborts load generation .. * SLA .. code-block:: python # Old from rally.benchmark import sla class FailureRate(sla.SLA): # ... # New from rally.task import sla @sla.configure(name="failure_rate") class FailureRate(sla.SLA): # ... .. * **Rally Task aborted command** Finally you can gracefully shutdown running task by calling: .. code:: bash rally task abort .. * **Rally CLI changes** * [add] ``rally --plugin-paths`` specify the list of directories with plugins * [add] ``rally task report --junit`` - generate a JUnit report This allows users to feed reports to tools such as Jenkins. * [add] ``rally task abort`` - aborts running Rally task when run with the ``--soft`` key, the ``rally task abort`` command is waiting until the currently running subtask is finished, otherwise the command interrupts subtask immediately after current scenario iterations are finished. * [add] ``rally plugin show`` prints detailed information about plugin * [add] ``rally plugin list`` prints table with rally plugin names and titles * [add] ``rally verify genconfig`` generates tempest.conf without running it. * [add] ``rally verify install`` install tempest for specified deployment * [add] ``rally verify reinstall`` removes tempest for specified deployment * [add] ``rally verify uninstall`` uninstall tempest of specified deployment * [fix] ``rally verify start --no-use`` --no-use was always turned on * [remove] ``rally use`` now each command has subcommand ``use`` * [remove] ``rally info`` * [remove] ``rally-manage tempest`` now it is covered by ``rally verify`` * **New Rally task reports** * New code is based on OOP style which is base step to make pluggable Reports * Reports are now generated for only one iteration over the resulting data which resolves scalability issues when we are working with large amount of iterations. * New Load profiler plot that shows amount of iterations that are working in parallel * Failed iterations are shown as a red areas on stacked are graphic. Non backward compatible changes ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * [remove] ``rally use`` cli command * [remove] ``rally info`` cli command * [remove] ``--uuid`` parameter from ``rally deployment `` * [remove ``--deploy-id`` parameter from: ``rally task ``, ``rally verify ``, ``rally show `` Specs & Feature requests ~~~~~~~~~~~~~~~~~~~~~~~~ [feature request] Explicitly specify existing users for scenarios [feature request] Improve install script and add --uninstall and --version [feature request] Allows specific repos & packages in install-rally.sh [feature request] Add ability to capture logs from tested services [feature request] Check RPC queue perfdata [spec] Refactoring Rally cleanup [spec] Consistent resource names Plugins ~~~~~~~ * **Scenarios**: [new] CinderVolumes.create_volume_backup [new] CinderVolumes.create_and_restore_volume_backup [new] KeystoneBasic.add_and_remove_user_role [new] KeystoneBasic.create_and_delete_role [new] KeystoneBasic.create_add_and_list_user_roles [new] FuelEnvironments.list_environments [new] CinderVolumes.modify_volume_metadata [new] NovaServers.boot_and_delete_multiple_servers [new] NeutronLoadbalancerV1.create_and_list_pool [new] ManilaShares.list_shares [new] CeilometerEvents.create_user_and_get_event [new] CeilometerEvents.create_user_and_list_event_types [new] CeilometerEvents.create_user_and_list_events [new] CeilometerTraits.create_user_and_list_trait_descriptions [new] CeilometerTraits.create_user_and_list_traits [new] NeutronLoadbalancerV1.create_and_delete_pools [new] NeutronLoadbalancerV1.create_and_update_pools [new] ManilaShares.create_and_delete_share [new] ManilaShares.create_share_network_and_delete [new] ManilaShares.create_share_network_and_list [new] HeatStacks.create_and_delete_stack [new] ManilaShares.list_share_servers [new] HeatStacks.create_snapshot_restore_delete_stack [new] KeystoneBasic.create_and_delete_ec2credential [new] KeystoneBasic.create_and_list_ec2credentials [new] HeatStacks.create_stack_and_scale [new] ManilaShares.create_security_service_and_delete [new] KeystoneBasic.create_user_set_enabled_and_delete [new] ManilaShares.attach_security_service_to_share_network [new] IronicNodes.create_and_delete_node [new] IronicNodes.create_and_list_node [new] CinderVolumes.create_and_list_volume_backups [new] NovaNetworks.create_and_list_networks [new] NovaNetworks.create_and_delete_network [new] EC2Servers.list_servers [new] VMTasks.boot_runcommand_delete_custom_imagea [new] CinderVolumes.create_and_update_volume * **Contexts**: [new] ManilaQuotas Add context for setting up Manila quotas: shares, gigabytes, snapshots, snapshot_gigabytes, share_networks [new] ManilaShareNetworks Context for share networks that will be used in case of usage deployment with existing users. Provided share networks via context option "share_networks" will be balanced between all share creations of scenarios. [new] Lbaas Context to create LBaaS-v1 resources [new] ImageCommandCustomizerContext Allows image customization using side effects of a command execution. E.g. one can install an application to the image and use these image for 'boot_runcommand_delete' scenario afterwards. [new] EC2ServerGenerator Context that creates servers using EC2 api [new] ExistingNetwork This context lets you use existing networks that have already been created instead of creating new networks with Rally. This is useful when, for instance, you are using Neutron with a dumb router that is not capable of creating new networks on the fly. * **SLA**: [remove] max_failure_rate - use failure_rate instead Bug fixes ~~~~~~~~~ **90 bugs were fixed, the most critical are**: * Many fixes related that fixes access of rally.conf and DB files * Incorrect apt-get "-yes" parameter in install_rally.sh script * Rally bash completion doesn't exist in a virtualenv * Rally show networks CLI command worked only with nova networks * RPS runner was not properly generating load * Check is dhcp_agent_scheduler support or not in network cleanup * NetworkContext doesn't work with Nova V2.1 * Rally task input file was not able to use jinja2 include directive * Rally in docker image was not able to * Rally docker image didn't contain samples * Do not update the average duration when iteration failed Documentation ~~~~~~~~~~~~~ * **Add plugin reference page** :ref:`Rally Plugins Reference page ` page contains a full list with * **Add maintainers section on project info page** :ref:`Rally Maintainers section ` contains information about core contributors of OpenStack Rally their responsibilities and contacts. This will help us to make our community more transparent and open for newbies. * **Added who is using section in docs** * **Many small fixes** rally-0.9.1/doc/release_notes/archive/v0.8.0.rst0000664000567000056710000002352113073417716022422 0ustar jenkinsjenkins00000000000000============ Rally v0.8.0 ============ Overview -------- +------------------+-----------------------+ | Release date | **1/25/2017** | +------------------+-----------------------+ Details ------- Specs & Feature Requests ~~~~~~~~~~~~~~~~~~~~~~~~ * `[Implemented] Refactor Verification Component `_ * `[Implemented] Scaling & Refactoring Rally DB `_ Installation ~~~~~~~~~~~~ We switched to use bindep library for checking required system packages. All our dependencies moved to separate file (like requirements.txt for python packages) `bindep.txt `_. Database ~~~~~~~~ .. warning:: Database schema is changed, you must run `rally-manage db upgrade `_ to be able to use old Rally installation with latest release. * change structure of database to be more flexible * save raw task results in chunks (see raw_result_chunk_size option of [DEFAULT] rally configuration section) * add db revision check in rally API, so it is impossible to use rally with wrong db now. Rally API ~~~~~~~~~ Single entry point for Rally API is added - rally.api.API . Old API classes (``rally.api.Task``, ``rally.api.Verification``, ``rally.api.Deployment``) are deprecated now. Rally CLI ~~~~~~~~~ * ``rally task sla_check`` is deprecated now in favor of ``rally task sla-check`` * Deprecated category ``rally show`` was removed. * `rally plugin list` is extended with plugin base column Task Component ~~~~~~~~~~~~~~ - [Random names] scenario for checking performance of generate_random_name method is added to our CI with proper SLA. Be sure, whatever number of random names you need, it will not affect performance of Rally at all, we checked. - [atomic actions] scenario for checking performance of calculating atomic actions is added to our CI with proper SLA. Be sure, whatever number atomics you have in scenarios, it will not affect performance of Rally at all, we checked. - [services] new entity is introduced for helping to provide compatibility layer between different API versions of one service. Verification component ~~~~~~~~~~~~~~~~~~~~~~ We completely redesign the whole Verification component. For more details see `our new docs for that component `_ Unfortunately, such big change could not be done in backward compatible way, so old code is not compatible with new one. See `HowTo migrate from Verification component 0.7.0 to 0.8.0 `_ Plugins ~~~~~~~ **Services**: * Glance: Switched from V1 to V2 API by default. * Keystone: - Transmit endpoint_type to keystoneclient - Full keystone V3 support **Scenarios**: * *Updated*: - The meaning of the volume_type argument is changes in `CinderVolumes.create_snapshot_and_attach_volume `_ scenario. It should contain actual volume type instead of boolean value to choose random volume type. - Extend `GlanceImages.create_image_and_boot_instances `_ with create_image_kwargs and boot_server_kwargs arguments. * *NEW!!*: - `CeilometerAlarms.create_and_get_alarm `_ - `CinderVolumeBackups.create_incremental_volume_backup `_ - `CinderVolumeTypes.create_and_delete_volume_type `_ - `CinderVolumeTypes.create_volume_type_and_encryption_type `_ - `CinderVolumes.create_and_accept_transfer `_ - `CinderVolumes.create_and_get_volume `_ - `CinderVolumes.create_volume_and_update_readonly_flag `_ - `CinderVolumes.list_transfers `_ - `CinderVolumes.list_types `_ - `KeystoneBasic.create_and_get_role `_ - `ManilaShares.create_and_list_share `_ - `ManilaShares.set_and_delete_metadata `_ - `MistralExecutions.create_execution_from_workbook `_ - `MistralExecutions.list_executions `_ - `NeutronLoadbalancerV2.create_and_list_loadbalancers `_ - `NeutronNetworks.create_and_show_network `_ - `NeutronNetworks.list_agents `_ - `NovaAggregates.create_aggregate_add_host_and_boot_server `_ - `NovaAggregates.create_and_get_aggregate_details `_ - `NovaFlavors.create_and_delete_flavor `_ - `NovaFlavors.create_flavor_and_add_tenant_access `_ - `NovaHosts.list_and_get_hosts `_ - `NovaHypervisors.list_and_get_uptime_hypervisors `_ - `NovaHypervisors.list_and_search_hypervisors `_ - `NovaHypervisors.statistics_hypervisors `_ - `NovaSecGroup.boot_server_and_add_secgroups `_ - `NovaServerGroups.create_and_list_server_groups `_ - `Quotas.nova_get `_ **Hooks**: * *NEW!!*: - `fault_injection `_ **Runners** * *Updated*: - `RPS runner `_ is extended with ability to increase 'rps' value by arithmetic progression across certain duration. Now it can be also a dict specifying progression parameters: .. code-block:: json rps": { "start": 1, "end": 10, "step": 1, "duration": 2 } This will generate rps value: ``start, start + step, start + 2 * step, .., end`` across certain 'duration' seconds each step. If iteration count not ended at the last step of progression, then rps will continue to generate with "end" value. Note that the last rps could be generated smaller. Fixed bugs ~~~~~~~~~~ * [hooks] incorrect encoding of stdout/stderr streams opened by sys_call hook for py3 * [hooks] sorting Hook column at HTML report doesn't work * [tasks][scenarios][neutron] L3 HA: Unable to complete operation on subnet `Launchpad bug-report #1562878 `_ * [tasks] JSON report doesn't save order of atomics * [tasks][cleanup][nova] Failed to remove aggregate which has hosts in it * [tasks] `--abort-on-sla-failure `_ mechanism works only for current workload, but does not stop the next ones. * [hooks] hooks section isn't displayed in HTML report Thanks ~~~~~~ 2 Everybody! rally-0.9.1/doc/release_notes/archive/v0.5.0.rst0000664000567000056710000003303713073417716022422 0ustar jenkinsjenkins00000000000000============ Rally v0.5.0 ============ Information ----------- +------------------+-----------------------+ | Commits | **175** | +------------------+-----------------------+ | Bug fixes | **19** | +------------------+-----------------------+ | Dev cycle | **93 days** | +------------------+-----------------------+ | Release date | **7/20/2016** | +------------------+-----------------------+ Details ------- This release took much more time than we expected, but we have a lot of reasons for such delay and if you look at our change-log, you will understand them.:) Here is a quick introduction: * To make our releases as much as possible stable, we added upper limits for each of our requirements; * A lot of deprecated lines of code were removed, so be careful; * Statistics trends for given tasks were introduced; * Support for tempest plugins was added; * Several new pages at docs. Specs & Feature Requests ~~~~~~~~~~~~~~~~~~~~~~~~ * `[Introduced && implemented] Introduce class-based scenario implementation `_ * `[Introduced] Rally Task Validation refactoring `_ * `[Introduced] Scaling & Refactoring Rally DB `_ * `[Introduced] SLA Performance degradation plugin `_ Logging ~~~~~~~ * disable urllib3 warnings only if the library provide them Database ~~~~~~~~ [doesn't require migration] Transform DB layer to return dicts, not SQLAlchemy models Rally Deployment ~~~~~~~~~~~~~~~~ * Support single-AZ deployment This supports the case where OpenStack is deployed with a single AZ for both controller(s) and compute(s), and not all hosts in the AZ that contains an instance are guaranteed to have the nova-compute service. * Extend creation from environment with several new vars - OS_ENDPOINT_TYPE/OS_INTERFACE - OS_USER_DOMAIN_NAME - OS_PROJECT_DOMAIN_NAME * Improve devstack plugin for Keystone V3 Rally Task ~~~~~~~~~~ *NEW!!* Statistics trends for given tasks. Rally Verify ~~~~~~~~~~~~ * Remove '--tempest-config' arg from 'reinstall' command .. warning:: Using `--tempest-config` is became an error from this release. Use `rally verify genconfig` cmd for all config related stuff. * Don't install Tempest when `rally verify start` .. warning:: Use should use `rally verify install` cmd to install tempest now * Add ability to setup version of Tempest to install `CLI argument to setup version `_ * Configure 'aodh' service in 'service_available' section * Check existence of Tempest-tree in `rally verify discover` cmd * Make Tempest work with auth url which doesn't include keystone version Tempest needs /v2.0 and /v3 at the end of URLs. Actually, we can't fix Tempest, so we extend our configuration module with workaround which allow to specify auth_url without version in rally deployment config. * Use default list of plugins for sahara * Move tempest related options of rally configuration to separate section. * *NEW!!* Support for tempest plugins. `CLI argument to install them `_ Plugins ~~~~~~~ In this release we are happy to introduce new entity - plugins Base classes We have a lot of base plugin entities: Context, Scenario, SLA and etc. Sometimes plugins of different bases can have equal names(i.e ceilometer OSClient and ceilometer Context). It is normal and we should allow such conflicts. To support such cases we introduced new entity - plugin base. Statements of plugin bases: - Each plugin base is unique entity; - Names of plugin bases can't conflict with each other; - Names of two or more plugins in one plugin base can't conflict with each other(in case of same namespace). - Names of two or more plugins in different plugin base can conflict Current list of plugin bases: - rally.task.context.Context - rally.task.scenario.Scenario - rally.task.types.ResourceType - rally.task.exporter.TaskExporter - rally.task.processing.charts.Chart - rally.task.runner.ScenarioRunner - rally.task.sla.SLA - rally.deployment.serverprovider.provider.ProviderFactory - rally.deployment.engine.Engine - rally.osclients.OSClient **OSClients** * *NEW!!* Support for Senlin client * *NEW!!* Support for Gnocchi client * *NEW!!* Support for Magnum client * *NEW!!* Support for Watcher client * Transmit endpoint_type to saharaclient **Scenarios**: * *NEW!!*: - `Authenticate.validate_ceilometer `_ - `CinderVolumes.create_volume_from_snapshot `_ - `CinderVolumes.create_volume_and_clone `_ - `NovaFlavors.create_and_list_flavor_access `_ - `NovaFlavors.create_flavor `_ - `NovaServers.boot_and_update_server `_ - `NovaServers.boot_server_from_volume_snapshot `_ * [Sahara] Add configs to MapR plugin * Extend CinderVolumes.create_and_upload_volume_to_image with "image" argument `Plugin Reference `_ * Deprecate Dummy.dummy_with_scenario_output scenario in favor of Dummy.dummy_output .. warning:: Dummy.dummy_with_scenario_output scenario will be removed after several releases `Deprecated Plugin Reference `_ `New Plugin Reference `_ * Extend CinderVolumes.create_volume_and_clone with nested_level Add nested_level argument for nested cloning volume to new volume * Extend `CinderVolumes.create_nested_snapshots_and_attach_volume `_ Two new arguments were added: create_volume_kwargs and create_snapshot_kwargs .. warning:: All arguments related to snapshot creation should be transmitted only via create_snapshot_kwargs. * Introduce new style of scenarios - class based. `Spec Reference `_ * Improve report for VMTasks.boot_runcommand_delete * [Sahara] Added 5.5.0 version for cdh-plugin and 1.6.0 version for spark * Extend boot_server_from_volume_and_delete, boot_server_from_volume, boot_server_from_volume_and_live_migrate, boot_server_from_volume_snapshot scenarios of NovaServers class with "volume_type" parameter. **Contexts**: * *NEW!!*: - `Cinder volume_types `_ - `Murano environments `_ - `Heat dataplane `_ * Use Broker Pattern in Keystone roles context * Use immutable types for locking context configuration Since context configuration passed to Context.__init__() was a mutable type (dict or list), sometimes we had unexpected changes done by unpredictable code (for example, in wrappers). * Add possibility to balance usage of users For the moment all users for tasks were taken randomly and there was no way to balance them between tasks. It may be very useful when we have difference between first usage of tenant/user and all consecutive. In this case we get different load results. Therefore, "users" context was extended with new config option 'user_choice_method' that defines approach for picking up users. Two values are available: - random - round_robin Default one is compatible with old approach - "random". * Make sahara_image and custom_image contexts glance v2 compatible * Extend servers context with "nics" parameter * Extend network context with "dns_nameservers" parameter * Extend volume context with "volume_type" parameter **Cleanup**: * Mark several cleanup resources as tenant_resource Nova servers and security groups are tenant related resources, but resource decorator missed that fact which makes cleanup tries to delete one resources several times. * Turn off redundant nova servers cleanup for NovaFlavors.list_flavors scenario * Add neutron cleanup for NeutronSecurityGroup.create_and_delete_security_groups **Exporter**: Rename task-exporter "file-exporter" to "file". .. warning:: "file-exporter" is deprecated and will be removed in further releases. **Types**: Remove deprecated types. .. warning:: you should use rally.task.types.convert instead of rally.task.types.set decorator **Validators** * Add a required_api_version validator * Add validators for scenario arguments **Utils**: Use glance wrapper where appropriate to support compatibility between V1 and V2 Bug fixes ~~~~~~~~~ **19 bugs were fixed**: * Wrong arguments order of Keystone wrapper in case of V2 and V3 * AttributeError while disabling urllib3 warnings on old installations `Launchpad bug-report #1573650 `_ * install_rally.sh script is failed while obtaining setuptools * "-inf" load duration in case of wrong runner plugin and failed start of contexts * Strange input task in the report `Launchpad bug-report #1570328 `_ * Wrong behaviour of boot_server_from_volume scenarios in case of booting server from image. The arg of image must be None, when booting server from volume. Otherwise still boot server from image. Affected scenarios: NovaServers.boot_server_from_volume NovaServers.boot_server_from_volume_and_delete NovaServers.boot_server_from_volume_and_resize NovaServers.boot_server_from_volume_and_live_migrate `Launchpad bug-report #1578556 `_ * Weak validation of json schema of RPS runner JSON Schema of RPS runner doesn't have "required" field. It means that users are able to pass wrong configs and we will have runtime error while running task. * Rally doesn't take cacert setting while creating keystone session `Launchpad bug-report #1577360 `_ * Heat scenarios fail when API uses TLS `Launchpad bug-report #1585456 `_ * Example in comment of context manila_share_networks wrong `Launchpad bug-report #1587164 `_ * There is no way to get UUID of a verification after it is created by "rally verify start" or "rally verify import_results" when --no-use is set `Launchpad bug-report #1587034 `_ * Exposed ssh timeout and interval in vm scenario `Launchpad bug-report #1587728 `_ * Ceilometer scenario doesn't require "ceilometer" ctx `Launchpad bug-report #1557642 `_ * "servers" context requires setting network id for multiple possible networks found. `Launchpad bug-report #1592292 `_ * nested_level data type incorrect in create_nested_snapshots_and_attach_volume `Launchpad bug-report #1594656 `_ * Rally cleanup servers raises exception `Launchpad bug-report #1584104 `_ * Stopping server is redundant before cold-migrating server `Launchpad bug-report #1594730 `_ * existing_users context doesn't work in case of Keystone v3 * Whether validates flavor's disk or not depends on booting type of the instance `Launchpad bug-report #1596756 `_ Documentation ~~~~~~~~~~~~~ * Re-use openstack theme for building docs outside rtd. `Rally Docs at docs.openstack.org `_ * Add page for Verification component `RTD page for Verification component `_ * Add glossary page `RTD page for Glossary `_ * Adjust docs reference to "KeystoneBasic.authenticate" scenario `Step 6. Aborting load generation on success criteria failure `_ Thanks ~~~~~~ 2 Everybody! rally-0.9.1/doc/release_notes/archive/v0.2.0.rst0000664000567000056710000001132213073417716022410 0ustar jenkinsjenkins00000000000000============ Rally v0.2.0 ============ Information ----------- +------------------+-----------------------+ | Commits | **48** | +------------------+-----------------------+ | Bug fixes | *6** | +------------------+-----------------------+ | Dev cycle | **19 days** | +------------------+-----------------------+ | Release date | **1/11/2015** | +------------------+-----------------------+ Details ------- This release, as well as all previous ones, includes a lot of internal and external changes. Most important of them are listed below. .. warning:: Release 0.2.0 doesn't support python 26 Deprecations ~~~~~~~~~~~~ * Option --system-wide-install for ``rally verify start`` was deprecated in favor of --system-wide * `rally show` commands were deprecated because of 3 reasons: - It blocks us to make Rally generic testing tool - It complicates work on Rally as a Service - You can always use standard OpenStack clients to do the same Rally Verify ~~~~~~~~~~~~ * Add "xfail" mechanism for Tempest tests. This mechanism allows us to list some tests, that are expected to fail, in a YAML file and these tests will have "xfail" status instead of "fail". Use new argument "--xfails-file" of rally verify start command. Rally Task ~~~~~~~~~~ * --out argument of `rally task report` is optional now If you don't specify --out it will just print the resulting report * Better scenario output support As far as you know each scenario plugin are able to return data as a dict. This dict contained set of key-values {: } where each name was line on graph and each number was one of point. Each scenario run adds a single point for each line on that graph. This allows to add extra data to the Rally and see how some values were changed over time. However, in case when Rally was used to execute some other tool and collect it's data this was useless. To address this **Scenario.add_output(additive, complete)** was introduced: Now it is possible to generate as many as you need graphs by calling this method multiple times. There are two types of graph additive and complete. **Additive** is the same as legacy concept of output data which is generated from results of all iterations, **complete** are used when you would like to return whole chart from each iteration. HTML report has proper sub-tabs *Aggregated* and *Per iteration* inside *Scenario Data* tab. Here is a simple example how output can be added in any scenario plugin: .. code-block:: python # This represents a single X point in result StackedArea. # Values from other X points are taken from other iterations. self.add_output(additive={"title": "How do A and B changes", "description": ("Trend for A and B " "during the scenario run"), "chart_plugin": "StackedArea", "data": [["foo", 42], ["bar", 24]]}) # This is a complete Pie chart that belongs to this concrete iteration self.add_output( complete={"title": "", "description": ("Complete results for Foo and Bar " "from this iteration"), "chart_plugin": "Pie", "data": [["foo", 42], ["bar", 24]]}) Rally Certification ~~~~~~~~~~~~~~~~~~~ None. Specs & Feature Requests ~~~~~~~~~~~~~~~~~~~~~~~~ [Spec][Implemented] improve_scenario_output_format https://github.com/openstack/rally/blob/master/doc/specs/implemented/improve_scenario_output_format.rst Plugins ~~~~~~~ * **Scenarios**: * [new] DesignateBasic.create_and_update_domain * [improved] CinderVolumes.create_and_attach_volume .. warning:: Use "create_vm_params" dict argument instead of ``**kwargs`` for instance parameters. * **Context**: * [improved] images .. warning:: The min_ram and min_disk arguments in favor of image_args, which lets the user specify any image creation keyword arguments they want. Bug fixes ~~~~~~~~~ **6 bugs were fixed**: * #1522935: CinderVolumes.create_and_attach_volume does not accept additional args for create_volume * #1530770: "rally verify" fails with error 'TempestResourcesContext' object has no attribute 'generate_random_name' * #1530075: cirros_img_url in rally.conf doesn't take effective in verification tempest * #1517839: Make CONF.set_override with parameter enforce_type=True by default * #1489059: "db type could not be determined" running py34 * #1262123: Horizon is unreachable outside VM when we are using DevStack + OpenStack Documentation ~~~~~~~~~~~~~ None. Thanks ~~~~~~ 2 Everybody! rally-0.9.1/doc/release_notes/archive/v0.7.0.rst0000664000567000056710000001332213073417716022417 0ustar jenkinsjenkins00000000000000============ Rally v0.7.0 ============ Overview -------- +------------------+-----------------------+ | Release date | **10/11/2016** | +------------------+-----------------------+ Details ------- Specs & Feature Requests ~~~~~~~~~~~~~~~~~~~~~~~~ * [Used] Ported all rally scenarios to class base `Spec reference `_ * `[Implemented] New Plugins Type - Hook `_ Database ~~~~~~~~ .. warning:: Database schema is changed, you must run `rally-manage db upgrade `_ to be able to use old Rally installation with latest release. * [require migration] fix for wrong format of "verification_log" of tasks * [require migration] remove admin_domain_name from OpenStack deployments Rally Deployment ~~~~~~~~~~~~~~~~ * Remove admin_domain_name from openstack deployment Reason: admin_domain_name parameter is absent in Keystone Credentials. Rally Task ~~~~~~~~~~ * [Trends][Reports] Use timestamps on X axis in trends report * [Reports] Add new OutputTextArea chart plugin New chart plugin can show arbitrary textual data on "Scenario Stata -> Per iteration" tab. This finally allows to show non-numeric data like IP addresses, notes and even long comments. Plugin `Dummy.dummy_output `_ is also updated to provide demonstration. * [cli] Add version info to *rally task start* output * [api] Allow to delete stopped tasks without force=True It is reasonable to protect deletion of running tasks (statuses INIT, VERIFYING, RUNNING, ABORTING and so on...) but it is strange to protect deletion for stopped tasks (statuses FAILED and ABORTED). Also this is annoying in CLI usage. * Added hooks and triggers. Hook is a new entity which can be launched on specific events. Trigger is another new entity which processes events and launches hooks. For example, hook can launch specific destructive action - just execute cli command(we have sys_call hook for this task) and it can be launched by simple trigger on specific iteration(s) or time (there is event trigger). Rally Verify ~~~~~~~~~~~~ Scenario tests in Tempest require an image file. Logic of obtaining this image is changed: * If CONF.tempest.img_name_regex is set, Rally tries to find an image matching to the regex in Glance and download it for the tests. * If CONF.tempest.img_name_regex is not set (or Rally didn't find the image matching to CONF.tempest.img_name_regex), Rally downloads the image by the link specified in CONF.tempest.img_url. Plugins ~~~~~~~ **Scenarios**: * *Removed*: `Dummy.dummy_with_scenario_output `_ It was deprecated in 0.5.0 .. warning:: This plugin is not available anymore in 0.7.0 * *NEW!!*: - `MagnumClusterTemplates.list_cluster_templates `_ - `MagnumClusters.list_clusters `_ - `MagnumClusters.create_and_list_clusters `_ - `NovaAggregates.create_aggregate_add_and_remove_host `_ - `NovaAggregates.create_and_list_aggregates `_ - `NovaAggregates.create_and_delete_aggregate `_ - `NovaAggregates.create_and_update_aggregate `_ - `NovaFlavors.create_and_get_flavor `_ - `NovaFlavors.create_flavor_and_set_keys `_ - `NovaHypervisors.list_and_get_hypervisors `_ - `NovaServers.boot_server_associate_and_dissociate_floating_ip `_ - `KeystoneBasic.authenticate_user_and_validate_token `_ **Contexts**: * *NEW!!*: - `Manila manila_security_services `_ - `Magnum cluster_templates `_ - `Magnum clusters `_ **OSClients**: Port all openstack clients to use keystone session. Bug fixes ~~~~~~~~~ * [tasks] rally task detailed incorrect / inconsistent output `Launchpad bug-report #1562713 `_ Thanks ~~~~~~ 2 Everybody! rally-0.9.1/doc/release_notes/archive/v0.0.1.rst0000664000567000056710000000123313073417716022407 0ustar jenkinsjenkins00000000000000============ Rally v0.0.1 ============ Information ----------- +------------------+-----------------+ | Commits | **1039** | +------------------+-----------------+ | Bug fixes | **0** | +------------------+-----------------+ | Dev cycle | **547 days** | +------------------+-----------------+ | Release date | **26/Jan/2015** | +------------------+-----------------+ Details ------- Rally is awesome tool for testing verifying and benchmarking OpenStack clouds. A lot of people started using Rally in their CI/CD so Rally team should provide more stable product with clear strategy of deprecation and upgrades. rally-0.9.1/doc/release_notes/archive/v0.9.1.rst0000664000567000056710000000365613073417720022426 0ustar jenkinsjenkins00000000000000============ Rally v0.9.1 ============ Overview -------- +------------------+-----------------------+ | Release date | **4/12/2017** | +------------------+-----------------------+ Details ------- Unfortunately, Rally 0.9.0 contains various bugs. We work hard to fix them, improve our CI to avoid such issues in future and ready to present a new Rally release which includes only bug-fixes. Fixed bugs ~~~~~~~~~~ * [deployment] Credentials is not updated as soon as deployment is recreated. Need to call recreate request twice. `Launchpad bug-report #1675271 `_ * [task] Scenario `IronicNodes.create_and_list_node `_ had a wrong check that list of all nodes contains newly created one. * [task][cleanup] Do not remove quotas in case of existing users * [task][cleanup] Various traces of neutron resources * [core] Keystone v3, authentication error for Rally users if the value of project_domain_name of admin user isn't equal "default" `Launchpad bug-report #1680837 `_ * [task] Scenario `NovaHosts.list_and_get_hosts `_ obtains hostname for all hosts. But it fails in some environments if host is not compute. `Launchpad bug-report #1675254 `_ * [verification] Rally fails to run on systems on which python-virtualenv is not installed `Launchpad bug-report #1678047 `_ * [verification] CLI `rally verify rerun `_ fails with TypeError due to wring integration with Rally API. Thanks ~~~~~~ 2 Everybody! rally-0.9.1/doc/release_notes/archive/v0.0.4.rst0000664000567000056710000001617713073417716022427 0ustar jenkinsjenkins00000000000000============ Rally v0.0.4 ============ Information ----------- +------------------+-----------------+ | Commits | **87** | +------------------+-----------------+ | Bug fixes | **21** | +------------------+-----------------+ | Dev cycle | **30 days** | +------------------+-----------------+ | Release date | **14/May/2015** | +------------------+-----------------+ Details ------- This release contains new features, new benchmark plugins, bug fixes, various code and API improvements. New Features & API changes ~~~~~~~~~~~~~~~~~~~~~~~~~~ * Rally now can generate load with users that already exist Now one can use Rally for benchmarking OpenStack clouds that are using LDAP, AD or any other read-only keystone backend where it is not possible to create any users. To do this, one should set up the "users" section of the deployment configuration of the ExistingCloud type. This feature also makes it safer to run Rally against production clouds: when run from an isolated group of users, Rally won’t affect rest of the cloud users if something goes wrong. * New decorator *@osclients.Clients.register* can add new OpenStack clients at runtime It is now possible to add a new OpenStack client dynamically at runtime. The added client will be available from osclients.Clients at the module level and cached. Example: .. code-block:: none >>> from rally import osclients >>> @osclients.Clients.register("supernova") ... def another_nova_client(self): ... from novaclient import client as nova ... return nova.Client("2", auth_token=self.keystone().auth_token, ... **self._get_auth_info(password_key="key")) ... >>> clients = osclients.Clients.create_from_env() >>> clients.supernova().services.list()[:2] [, ] * Assert methods now available for scenarios and contexts There is now a new *FunctionalMixin* class that implements basic unittest assert methods. The *base.Context* and *base.Scenario* classes inherit from this mixin, so now it is possible to use *base.assertX()* methods in scenarios and contexts. * Improved installation script The installation script has been almost completely rewritten. After this change, it can be run from an unprivileged user, supports different database types, allows to specify a custom python binary, always asks confirmation before doing potentially dangerous actions, automatically install needed software if run as root, and also automatically cleans up the virtualenv and/or the downloaded repository if interrupted. Specs & Feature requests ~~~~~~~~~~~~~~~~~~~~~~~~ * [Spec] Reorder plugins The spec describes how to split Rally framework and plugins codebase to make it simpler for newbies to understand how Rally code is organized and how it works. * [Feature request] Specify what benchmarks to execute in task This feature request proposes to add the ability to specify benchmark(s) to be executed when the user runs the *rally task start* command. A possible solution would be to add a special flag to the *rally task start* command. Plugins ~~~~~~~ * **Benchmark Scenario Runners**: * Add limits for maximum Core usage to constant and rps runners The new 'max_cpu_usage' parameter can be used to avoid possible 100% usage of all available CPU cores by reducing the number of CPU cores available for processes started by the corresponding runner. * **Benchmark Scenarios**: * [new] KeystoneBasic.create_update_and_delete_tenant * [new] KeystoneBasic.create_user_update_password * [new] NovaServers.shelve_and_unshelve_server * [new] NovaServers.boot_and_associate_floating_ip * [new] NovaServers.boot_lock_unlock_and_delete * [new] NovaHypervisors.list_hypervisors * [new] CeilometerSamples.list_samples * [new] CeilometerResource.get_resources_on_tenant * [new] SwiftObjects.create_container_and_object_then_delete_all * [new] SwiftObjects.create_container_and_object_then_download_object * [new] SwiftObjects.create_container_and_object_then_list_objects * [new] MuranoEnvironments.create_and_deploy_environment * [new] HttpRequests.check_random_request * [new] HttpRequests.check_request * [improved] NovaServers live migrate benchmarks add 'min_sleep' and 'max_sleep' parameters to simulate a pause between VM booting and running live migration * [improved] NovaServers.boot_and_live_migrate_server add a usage sample to samples/tasks * [improved] CinderVolumes benchmarks support size range to be passed to the 'size' argument as a dictionary *{"min": , "max": }* * **Benchmark Contexts**: * [new] MuranoPackage This new context can upload a package to Murano from some specified path. * [new] CeilometerSampleGenerator Context that can be used for creating samples and collecting resources for benchmarks in a list. * **Benchmark SLA**: * [new] outliers This new SLA checks that the number of outliers (calculated from the mean and standard deviation of the iteration durations) does not exceed some maximum value. The SLA is highly configurable: the parameters used for outliers threshold calculation can be set by the user. Bug fixes ~~~~~~~~~ **21 bugs were fixed, the most critical are**: * Make it possible to use relative imports for plugins that are outside of rally package. * Fix heat stacks cleanup by deleting them only 1 time per tenant (get rid of "stack not found" errors in logs). * Fix the wrong behavior of 'rally task detailed --iterations-data' (it lacked the iteration info before). * Fix security groups cleanup: a security group called "default", created automatically by Neutron, did not get deleted for each tenant. Other changes ~~~~~~~~~~~~~~~~~~~~~~~~~~ * Streaming algorithms that scale This release introduces the common/streaming_algorithms.py module. This module is going to contain implementations of benchmark data processing algorithms that scale: these algorithms do not store exhaustive information about every single benchmark iteration duration processed. For now, the module contains implementations of algorithms for computation of mean & standard deviation. * Coverage job to check that new patches come with unit tests Rally now has a coverage job that checks that every patch submitted for review does not decrease the number of lines covered by unit tests (at least too much). This job allows to mark most patches with no unit tests with '-1'. * Splitting the plugins code (Runners & SLA) into common/openstack plugins According to the spec "Reorder plugins" (see above), the plugins code for runners and SLA has been moved to the *plugins/common/* directory. Only base classes now remain in the *benchmark/* directory. Documentation ~~~~~~~~~~~~~ * Various fixes * Remove obsolete *.rst* files (*deploy_engines.rst* / *server_providers.rst* / ...) * Restructure the docs files to make them easier to navigate through * Move the chapter on task templates to the 4th step in the tutorial * Update the information about meetings (new release meeting & time changes) rally-0.9.1/doc/release_notes/archive/v0.9.0.rst0000664000567000056710000001265713073417716022433 0ustar jenkinsjenkins00000000000000============ Rally v0.9.0 ============ Overview -------- +------------------+-----------------------+ | Release date | **3/20/2017** | +------------------+-----------------------+ Details ------- Command Line Interface ~~~~~~~~~~~~~~~~~~~~~~ * `rally plugin list` now does not contain hidden plugins. Task component ~~~~~~~~~~~~~~ * Added check for duplicated keys in task files. * The order of subtasks (scenarios/workloads) is not ignored any more. You can generate whatever you want load or use that feature for up the cloud (put small scenario to the start of task to wake up the cloud before the real load). * Information about workload creation is added to HTML-reports. * Task statuses is changed to be more clear and cover more cases: - ``verifying`` is renamed to ``validating``. - ``failed`` is divided for 2 statuses - ``validation_failed``, which means that task did not pass validation step, and ``crashed``, which means that something went wrong in rally engine. * Our awesome cleanup become more awesome! The filter mechanism is improved to discover resources in projects created only by Rally (it works for most of resources, except several network-related ). It makes possible to run Rally with existing users in real tenants without fear to remove something important. Verification component ~~~~~~~~~~~~~~~~~~~~~~ * Fixed an issue with missed tests while listing all supported tests of specified verifier. * Fixed an issue with displaying the wrong version of verifier in case of cloning from the local directory. * Extend `rally verify rerun `_ with ``--detailed``, ``--no-use``, ``--tag`` and ``--concurrency`` arguments. * Add output examples for `JSON `_ and `JUnit-XML `_ reporters. Plugins ~~~~~~~ **Contexts** * Extend cinder quotas to support ``backups`` and ``backup_gigabytes``. **Deployment Engines**: *Updated* Extend `DevstackEngine `_ with ``enable_plugin`` option. **OpenStack clients**: * Extend support for auth urls like ``https://example.com:35357/foo/bar/v3`` * Pass endpoint type to heatclient **Scenarios**: * *NEW!!* - `CinderVolumeTypes.create_and_delete_encryption_type `_ - `CinderVolumeTypes.create_and_set_volume_type_keys `_ - `KeystoneBasic.create_and_list_roles `_ - `KeystoneBasic.create_and_update_user `_ - `NovaKeypair.create_and_get_keypair `_ - `NovaServers.resize_shutoff_server `_ - `VMTasks.dd_load_test `_ * *UPDATED!!* - Extend `VMTasks.boot_runcommand_delete `_ to display just raw text output of executed command. * *DELETED* Scenario `VMTasks.boot_runcommand_delete_custom_image `_ is removed since `VMTasks.boot_runcommand_delete `_ covers the case of that particular scenario without adding any complexity. **Validators**: * Extend ``required_contexts`` validator to support ``at least one of the`` logic. * Fix a bunch of JSON schemas which are used for validation of all plugins. Documentation ~~~~~~~~~~~~~ We totally reworked `Plugins Reference `_ page. Now it looks more like `Command Line Interface `_, which means that you can get links for particular parameter of particular plugin. Also, you can find expected parameters and their types of all contexts, hooks, SLAs and so on! Most of them still miss descriptions, but we are working on adding them. Fixed bugs ~~~~~~~~~~ * [osclients] Custom auth mechanism was used for zaqarclient instead of unified keystonesession, which led to auth errors at some envs. * [plugins] During running `CinderVolumes.create_and_restore_volume_backup `_ scenario we had a race problem with backup deleting due to wrong check of backup status. * [plugins][verifications] Jenkins expexts "classname" JUnitXML attribute instead of "class_name". Thanks ~~~~~~ 2 Everybody! rally-0.9.1/doc/release_notes/archive/v0.4.0.rst0000664000567000056710000002061413073417716022416 0ustar jenkinsjenkins00000000000000============ Rally v0.4.0 ============ Information ----------- +------------------+-----------------------+ | Commits | **76** | +------------------+-----------------------+ | Bug fixes | **12** | +------------------+-----------------------+ | Dev cycle | **28 days** | +------------------+-----------------------+ | Release date | **4/18/2016** | +------------------+-----------------------+ Details ------- .. warning:: Rally DB schema was changed since previous release. See `HOWTO `_ about updating your database. CLI changes ~~~~~~~~~~~ * Add status messages of db migration process * Display task errors in human-friendly form * Support OS_PROJECT_NAME as well as OS_TENANT_NAME Messages ~~~~~~~~ * Removed deprecation warning in case of transmitted "name" attribute while creation neutron resources. .. warning:: Deprecated code was deleted. * Suppress warning insecure URL messages Do not spam end users by insecure URL messages because it is quite valid case in testing process Database ~~~~~~~~ While preparing for deployment refactoring: * db schema was changed; * migration with new column `credentials` to deployment model was added; * columns `users` and `admin` were dropped. Rally Task ~~~~~~~~~~ * Remove deprecated scenario output mechanism via returing value .. warning:: Deprecated code was deleted. * Friendlier error message with empty task file This is particularly useful when a Jinja2 template results in an empty task. The current error message isn't very helpful: Task config is invalid: `'NoneType' object has no attribute 'get'` * Add Heat template validator Plugins ~~~~~~~ **Scenarios**: * Extend VM bind actions with "pause_unpause", "suspend_resume", "lock_unlock", "shelve_unshelve". * Add exact error message into `VMTasks.runcommand_heat scenario`__ __ http://rally.readthedocs.org/en/0.4.0/plugin/plugin_reference.html#vmtasks-runcommand-heat-scenario * Add heat scenarios: output-show, output-list Current patch contains 4 scenarios from heat repo: - `output-show for old algorithm `_ - `output-show for new algorithm `_ - `output-list for old algorithm `_ - `output-list for new algorithm `_ **Contexts**: * Reduce default speed of users creation in users context from 30 to 20 by default. **SLAs**: * *NEW!!* MaxAverageDurationPerAtomic : Maximum average duration of one iterations atomic actions in seconds. `Plugin Reference `_ **Reports**: * Improve results calculation in charts.Table * Use int instead of float for Y axis. It's number of parallel iterations and it can't be float. * Remove accuracy that makes no sense, and creates a lot of noise on this graph * Include failed iterations as well, otherwise we will calculate load incorrectly * Graph should start from 0 (begging of experiment) * Add 2 points at the end of graph to get at the end of graph 0 iterations in parallel **Task Exporter**: In previous release we introduced new mechanism to export results in various external systems and various formats. In this release, we added first plugin for this stuff - `file_exporter` **Services**: Remove hardcoded timeout from heat service **Utils**: Make glance web uploads streamable Without this change entire file get's downloaded into memory and can cause issues. Rally Verify ~~~~~~~~~~~~ * Set time precision to 3 digits (instead of 5) after dot. * Don't use "--parallel" flag when concurrency == 1 If concurrency equals to 1, it means that we use only one thread to run Tempest tests and the "--parallel" flag is not needed. Plugin for DevStack ~~~~~~~~~~~~~~~~~~~ * Support to be enabled with different plugin name Allow rally to be installed by devstack through a different plugin name, e.g: .. code-block:: bash enable_plugin test-rally http://github.com/rally/rally.git master * Removed uncalled code Devstack won't "source plugin.sh source" any more. Bug fixes ~~~~~~~~~ **12 bugs were fixed**: * X-Fail mechanism did not work for TestCase which failed on setUp step If Tempest fails in a test's setUpClass(), there is only one subunit event for each TestCase. In this case, Rally did not check partial test with x-fail list and marked test as "fail" insted of "x-fail". `Launchpad bug-report #1568133 `_ * Weak isolation of scenario arguments between iterations Input arguments for sub-task were shared between all iterations. Rally team found one scenario which modified mutable input variable. Affected scenario: NeutronNetworks.create_and_update_ports * Incompatible filters between V1 and V2 for Glance images listing Glance V1 and V2 have different filters. For example, "owner" is a separate kwarg in V1, not a generic filter. Also, visibility has different labels in different APIs. We modified our Glance wrapper to support Glance V2 format of filters for both V1 and V2 * Wrong way to store validation errors Results of failed task validations saved in incorrect format. It broke and made un-userfriendly `rally task detailed` command. `Launchpad bug-report #1562713 `_ * Hardcoded task's status in `rally task results` If there are no results for task, `rally task results` printed message that task has failed status, but it can be not true(tasks in running state do not have results). `Launchpad bug-report #1539096 `_ * Tempest context failed to create network resources While we merged improvement for keystoneclient, we used wrong way to obtain tenant id in TempestContext. `Launchpad bug-report #1550848 `_ * Tasks based on Tempest failed to parse execution time. There is an ability in Rally to launch tasks based on Tempest. Since launch of Tempest is just subprocess, it is needed to parse subunit to set correct atomic actions. There was an issue while converting task execution time. `Launchpad bug-report #1566712 `_ * JSONSchema huge impact on task performance Before runner sent data to engine we were checking jsonschema. This operation is very expensive and in some cases it can take a lot of time. Here are test results, with Dummy.dummy_output scenario, sleep 0.5s (added manually), 8000 iterations, 400 in parallel: * on master branch before the fix: Load duration: 117.659588099 Full duration: 227.451056004 * on master before the fix but remove jsonschema validation in scenario: Load duration: 12.5437350273 Full duration: 128.942219973 * on this patch before the fix (pure python validation): Load duration: 11.5991640091 Full duration: 22.7199981213 * Wrong Calculation of running iterations in parallel Load profile chart was calculated wrongly. It showed more running iterations in parallel than actually are running. * Rally did not show "missing argument" error raised by argparse while parsing cli args `Launchpad bug-report #1562916 `_ * Issue while checking required arguments in CLI There was a possible issue in case of several required arguments `Launchpad bug-report #1555764 `_ * Prepare step of verification did not check visibility of obtained image When we request a list of images to choose one of them for tests, we should make sure all images are active and they are PUBLIC. If images are not public, we will have failures of Tempest tests as described in the bug. `Launchpad bug-report #1564431 `_ Thanks ~~~~~~ 2 Everybody! rally-0.9.1/doc/release_notes/archive/v0.0.3.rst0000664000567000056710000000736113073417716022421 0ustar jenkinsjenkins00000000000000============ Rally v0.0.3 ============ Information ----------- +------------------+-----------------+ | Commits | **53** | +------------------+-----------------+ | Bug fixes | **14** | +------------------+-----------------+ | Dev cycle | **33 days** | +------------------+-----------------+ | Release date | **14/Apr/2015** | +------------------+-----------------+ Details ------- This release contains new features, new benchmark plugins, bug fixes, various code and API improvements. New Features & API changes ~~~~~~~~~~~~~~~~~~~~~~~~~~ * Add the ability to specify versions for clients in benchmark scenarios You can call self.clients("glance", "2") and get any client for specific version. * Add API for tempest uninstall $ rally-manage tempest uninstall # removes fully tempest for active deployment * Add a --uuids-only option to rally task list $ rally task list --uuids-only # returns list with only task uuids * Adds endpoint to --fromenv deployment creation $ rally deployment create --fromenv # recognizes standard OS_ENDPOINT environment variable * Configure SSL per deployment Now SSL information is deployment specific not Rally specific and rally.conf option is deprecated Like in this sample https://github.com/openstack/rally/blob/14d0b5ba0c75ececfdb6a6c121d9cf2810571f77/samples/deployments/existing.json#L11-L12 Specs ~~~~~ * [spec] Proposal for new task input file format This spec describes new task input format that will allow us to generate multi scenario load which is crucial for HA and more real life testing: https://github.com/openstack/rally/blob/master/doc/specs/in-progress/new_rally_input_task_format.rst Plugins ~~~~~~~ * **Benchmark Scenario Runners**: * Add a maximum concurrency option to rps runner To avoid running to heavy load you can set 'concurrency' to configuration and in case if cloud is not able to process all requests it won't start more parallel requests then 'concurrency' value. * **Benchmark Scenarios**: [new] CeilometerAlarms.create_alarm_and_get_history [new] KeystoneBasic.get_entities [new] EC2Servers.boot_server [new] KeystoneBasic.create_and_delete_service [new] MuranoEnvironments.list_environments [new] MuranoEnvironments.create_and_delete_environment [new] NovaServers.suspend_and_resume_server [new] NovaServers.pause_and_unpause_server [new] NovaServers.boot_and_rebuild_server [new] KeystoneBasic.create_and_list_services [new] HeatStacks.list_stacks_and_events [improved] VMTask.boot_runcommand_delete restore ability to use fixed IP and floating IP to connect to VM via ssh [fix] NovaServers.boot_server_attach_created_volume_and_live_migrate Kwargs in nova scenario were wrongly passed * **Benchmark SLA**: * [new] aborted_on_sla This is internal SLA criteria, that is added if task was aborted * [new] something_went_wrong This is internal SLA criteria, that is added if something went wrong, context failed to create or runner raised some exceptions Bug fixes ~~~~~~~~~ **14 bugs were fixed, the most critical are**: * Set default task uuid to running task. Before it was set only after task was fully finished. * The "rally task results" command showed a disorienting "task not found" message for a task that is currently running. * Rally didn't know how to reconnect to OpenStack in case if token expired. Documentation ~~~~~~~~~~~~~ * New tutorial **task templates** https://rally.readthedocs.org/en/latest/tutorial/step_5_task_templates.html * Various fixes rally-0.9.1/doc/release_notes/archive.rst0000664000567000056710000000021013073417716021557 0ustar jenkinsjenkins00000000000000.. _release_notes/archive: ================= All release notes ================= .. toctree:: :glob: :maxdepth: 1 archive/* rally-0.9.1/tests/0000775000567000056710000000000013073420067015151 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/__init__.py0000664000567000056710000000000013073417716017257 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/ci/0000775000567000056710000000000013073420067015544 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/ci/test_install.sh0000775000567000056710000000253413073417716020623 0ustar jenkinsjenkins00000000000000#!/bin/sh -ex # # Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. env sudo yum remove -y python-crypto || true # NOTE(pabelanger): We run apt-get update to ensure we don't have a stale # package cache in the gate. sudo apt-get update || true sudo ./install_rally.sh --system --yes rally deployment list [ -d /etc/bash_completion.d ] && cat /etc/bash_completion.d/rally.bash_completion || true sudo ./install_rally.sh --system --yes rally deployment list sudo ./install_rally.sh --yes -d /tmp/rallytest_root/ /tmp/rallytest_root/bin/rally deployment list cat /tmp/rallytest_root/etc/bash_completion.d/rally.bash_completion sudo rm -fr ~/.rally ./install_rally.sh --yes -d /tmp/rallytest_user /tmp/rallytest_user/bin/rally deployment list ./install_rally.sh --overwrite --dbtype sqlite rally-0.9.1/tests/ci/hooks/0000775000567000056710000000000013073420067016667 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/ci/hooks/certification_post_test_hook.sh0000775000567000056710000000046713073417716025213 0ustar jenkinsjenkins00000000000000#!/usr/bin/env bash SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" source $SCRIPT_DIR/../rally_gate_functions.sh setUp TASK=$RALLY_DIR/certification/openstack/task.yaml TASK_ARGS=$RALLY_DIR/rally-jobs/certifcation_task_args.yaml TASK_ARGS="--task-args-file $TASK_ARGS" run $TASK $TASK_ARGS rally-0.9.1/tests/ci/__init__.py0000664000567000056710000000000013073417716017652 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/ci/sync_requirements.py0000664000567000056710000003042113073417716021704 0ustar jenkinsjenkins00000000000000#!/usr/bin/env python # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Synchronizes, formats and prepares requirements to release(obtains and adds maximum allowed version). """ import argparse import logging import re import sys import textwrap import requests LOG = logging.getLogger(__name__) if not LOG.handlers: LOG.addHandler(logging.StreamHandler()) LOG.setLevel(logging.INFO) GLOBAL_REQUIREMENTS_LOCATIONS = ( "https://raw.githubusercontent.com/openstack/requirements/master/", "http://git.openstack.org/cgit/openstack/requirements/plain/" ) GLOBAL_REQUIREMENTS_FILENAME = "global-requirements.txt" RALLY_REQUIREMENTS_FILES = ( "requirements.txt", "test-requirements.txt" ) DO_NOT_TOUCH_TAG = "[do-not-touch]" class Comment(object): def __init__(self, s=None, finished=False): self._comments = [] self.is_finished = finished if s: self.append(s) def finish_him(self): self.is_finished = True def append(self, s): self._comments.append(s[1:].strip()) def __str__(self): return textwrap.fill("\n".join(self._comments), width=80, initial_indent="# ", subsequent_indent="# ") class Requirement(object): RE_NAME = re.compile(r"[a-zA-Z0-9-._]+") RE_CONST_VERSION = re.compile(r"==[a-zA-Z0-9.]+") RE_MIN_VERSION = re.compile(r">=?[a-zA-Z0-9.]+") RE_MAX_VERSION = re.compile(r"<=?[a-zA-Z0-9.]+") RE_NE_VERSIONS = re.compile(r"!=[a-zA-Z0-9.]+") # NOTE(andreykurilin): one license can have different labels. Let's use # unified variant. LICENSE_MAP = {"MIT license": "MIT", "MIT License": "MIT", "BSD License": "BSD", "Apache 2.0": "Apache License, Version 2.0"} def __init__(self, package_name, version): self.package_name = package_name self.version = version self._license = None self._pypy_info = None self.do_not_touch = False def sync_max_version_with_pypy(self): if isinstance(self.version, dict) and not self.do_not_touch: self.version["max"] = "<=%s" % self.pypy_info["info"]["version"] @property def pypy_info(self): if self._pypy_info is None: resp = requests.get("https://pypi.python.org/pypi/%s/json" % self.package_name) if resp.status_code != 200: raise Exception(resp.text) self._pypy_info = resp.json() return self._pypy_info @property def license(self): if self._license is None: if self.pypy_info["info"]["license"]: self._license = self.pypy_info["info"]["license"] else: # try to parse classifiers prefix = "License :: OSI Approved :: " classifiers = [c[len(prefix):] for c in self.pypy_info["info"]["classifiers"] if c.startswith(prefix)] self._license = "/".join(classifiers) self._license = self.LICENSE_MAP.get(self._license, self._license) if self._license == "UNKNOWN": self._license = None return self._license @classmethod def parse_line(cls, line): match = cls.RE_NAME.match(line) if match: name = match.group() # remove name versions = line.replace(name, "") # remove comments versions = versions.split("#")[0] # remove python classifiers versions = versions.split(";")[0].strip() if not cls.RE_CONST_VERSION.match(versions): versions = versions.strip().split(",") min_version = None max_version = None ne_versions = [] for version in versions: if cls.RE_MIN_VERSION.match(version): if min_version: raise Exception("Found several min versions for " "%s package." % name) min_version = version elif cls.RE_MAX_VERSION.match(version): if max_version: raise Exception("Found several max versions for " "%s package." % name) max_version = version elif cls.RE_NE_VERSIONS.match(version): ne_versions.append(version) versions = {"min": min_version, "max": max_version, "ne": ne_versions} return cls(name, versions) def __str__(self): if isinstance(self.version, dict): version = [] min_equal_to_max = False if self.version["min"] and self.version["max"]: if (self.version["min"].startswith(">=") and self.version["max"].startswith("<=") and self.version["min"][2:] == self.version["max"][2:]): # min and max versions are equal there is no need to write # both of them min_equal_to_max = True version.append("==%s" % self.version["min"][2:]) if not min_equal_to_max and self.version["min"]: version.append(self.version["min"]) if not min_equal_to_max and self.version["ne"]: version.extend(self.version["ne"]) if not min_equal_to_max and self.version["max"]: version.append(self.version["max"]) version = ",".join(version) else: if self.do_not_touch: version = self.version else: # remove const version version = ">=%s" % self.version[2:] string = "%s%s" % (self.package_name, version) if self.license: # NOTE(andreykurilin): When I start implementation of this script, # python-keystoneclient dependency string took around ~45-55 # chars, so let's use this length as indent. Feel free to modify # it to lower or greater value. magic_number = 55 if len(string) < magic_number: indent = magic_number - len(string) else: indent = 2 string += " " * indent + "# " + self.license return string def __eq__(self, other): return (isinstance(other, self.__class__) and self.package_name == other.package_name) def __ne__(self, other): return not self.__eq__(other) def parse_data(raw_data, include_comments=True): # first elem is None to simplify checks of last elem in requirements requirements = [None] for line in raw_data.split("\n"): if line.startswith("#"): if not include_comments: continue if getattr(requirements[-1], "is_finished", True): requirements.append(Comment()) requirements[-1].append(line) elif line == "": # just empty line if isinstance(requirements[-1], Comment): requirements[-1].finish_him() requirements.append(Comment(finished=True)) else: if (isinstance(requirements[-1], Comment) and not requirements[-1].is_finished): requirements[-1].finish_him() # parse_line req = Requirement.parse_line(line) if req: if (isinstance(requirements[-1], Comment) and DO_NOT_TOUCH_TAG in str(requirements[-1])): req.do_not_touch = True requirements.append(req) for i in range(len(requirements) - 1, 0, -1): # remove empty lines at the end of file if isinstance(requirements[i], Comment): if str(requirements[i]) == "": requirements.pop(i) else: break return requirements[1:] def _read_requirements(): """Read all rally requirements.""" LOG.info("Reading rally requirements...") for file_name in RALLY_REQUIREMENTS_FILES: LOG.debug("Try to read '%s'." % file_name) with open(file_name) as f: data = f.read() LOG.info("Parsing requirements from %s." % file_name) yield file_name, parse_data(data) def _write_requirements(filename, requirements): """Saves requirements to file.""" LOG.info("Saving requirements to %s." % filename) with open(filename, "w") as f: for entity in requirements: f.write(str(entity)) f.write("\n") def _sync(): LOG.info("Obtaining global-requirements...") for i in range(0, len(GLOBAL_REQUIREMENTS_LOCATIONS)): url = GLOBAL_REQUIREMENTS_LOCATIONS[i] + GLOBAL_REQUIREMENTS_FILENAME LOG.debug("Try to obtain global-requirements from %s" % url) try: raw_gr = requests.get(url).text except requests.ConnectionError as e: LOG.exception(e) if i == len(GLOBAL_REQUIREMENTS_LOCATIONS) - 1: # there are no more urls to try raise Exception("Unable to obtain %s" % GLOBAL_REQUIREMENTS_FILENAME) else: break LOG.info("Parsing global-requirements...") # NOTE(andreykurilin): global-requirements includes comments which can be # unrelated to Rally project. gr = parse_data(raw_gr, include_comments=False) for filename, requirements in _read_requirements(): for i in range(0, len(requirements)): if (isinstance(requirements[i], Requirement) and not requirements[i].do_not_touch): try: gr_item = gr[gr.index(requirements[i])] except ValueError: # it not g-r requirements if isinstance(requirements[i].version, dict): requirements[i].version["max"] = None else: requirements[i].version = gr_item.version yield filename, requirements def sync(): """Synchronizes Rally requirements with OpenStack global-requirements.""" for filename, requirements in _sync(): _write_requirements(filename, requirements) def format_requirements(): """Obtain package licenses from pypy and write requirements to file.""" for filename, requirements in _read_requirements(): _write_requirements(filename, requirements) def add_uppers(): """Obtains latest version of packages and put them to requirements.""" for filename, requirements in _sync(): LOG.info("Obtaining latest versions of packages for %s." % filename) for req in requirements: if isinstance(req, Requirement): if isinstance(req.version, dict) and not req.version["max"]: req.sync_max_version_with_pypy() _write_requirements(filename, requirements) def main(): parser = argparse.ArgumentParser( prog="Python Requirement Manager for Rally", description=__doc__.strip(), add_help=True ) action_groups = parser.add_mutually_exclusive_group() action_groups.add_argument("--format", action="store_const", const=format_requirements, dest="action") action_groups.add_argument("--add-upper", action="store_const", const=add_uppers, dest="action") action_groups.set_defaults(action=sync) parser.parse_args(sys.argv[1:]).action() if __name__ == "__main__": sys.exit(main()) rally-0.9.1/tests/ci/rally-gate.sh0000775000567000056710000000230713073417716020155 0ustar jenkinsjenkins00000000000000#!/bin/bash -ex # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # This script is executed by post_test_hook function in devstack gate. SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" source $SCRIPT_DIR/rally_gate_functions.sh PROJECT=`echo $ZUUL_PROJECT | cut -d \/ -f 2` RALLY_JOB_DIR=$BASE/new/$PROJECT/rally-scenarios if [ ! -d $RALLY_JOB_DIR ]; then RALLY_JOB_DIR=$BASE/new/$PROJECT/rally-jobs fi echo $RALLY_JOB_DIR echo $RALLY_DIR ls $BASE/new/$PROJECT setUp $RALLY_JOB_DIR BASE_FOR_TASK=${RALLY_JOB_DIR}/${RALLY_SCENARIO} TASK=${BASE_FOR_TASK}.yaml TASK_ARGS="" if [ -f ${BASE_FOR_TASK}_args.yaml ]; then TASK_ARGS="--task-args-file ${BASE_FOR_TASK}_args.yaml" fi run $TASK $TASK_ARGS rally-0.9.1/tests/ci/pytest_launcher.py0000775000567000056710000001066313073417720021342 0ustar jenkinsjenkins00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import argparse import os import subprocess import sys PYTEST_REPORT = os.environ.get("PYTEST_REPORT", ".test_results/pytest_results.html") TESTR_REPORT = "testr_results.html" PYTEST_ARGUMENTS = ("py.test" # base command " --html=%(html_report)s" # html report " --durations=10" # get a list of the slowest 10 tests " %(path)s" ) def error(msg): print(msg) exit(1) def main(args): parser = argparse.ArgumentParser(args[0]) parser.add_argument("discovery_path", metavar="", type=str, help="Path to location of all tests.") parser.add_argument("--posargs", metavar="", type=str, default="", help="TOX posargs. Currently supported only string to " "partial test or tests group to launch.") parser.add_argument("--timeout", metavar="", type=int, default=60, help="Timeout for individual test execution. " "Defaults to 60") args = parser.parse_args(args[1:]) # We allow only one parameter - path to partial test or tests group path = args.posargs if len(path.split(" ")) > 1: error("Wrong value of posargs. It should include only path to single " "test or tests group to launch.") # NOTE(andreykurilin): Previously, next format was supported: # tests.unit.test_osclients.SomeTestCase.some_method # It is more simple and pythonic than native pytest-way: # tests/unit/test_osclients.py::SomeTestCase::some_method # Let's return this support if path: if "/" not in path: path = path.split(".") module = "" for i in range(0, len(path)): part = os.path.join(module, path[i]) if os.path.exists(part): module = part continue if os.path.exists("%s.py" % part): if i != (len(path) - 1): module = "%s.py::%s" % (part, "::".join(path[i + 1:])) else: module = "%s.py" % part break error("Non-existing path to single test or tests group to " "launch. %s %s" % (module, part)) path = module path = os.path.abspath(os.path.expanduser(path)) if not path.startswith(os.path.abspath(args.discovery_path)): # Prevent to launch functional tests from unit tests launcher. error("Wrong path to single test or tests group to launch. It " "should be in %s." % args.discovery_path) else: path = args.discovery_path print("Test(s) to launch (pytest format): %s" % path) # NOTE(andreykurilin): we cannot publish pytest reports at gates, but we # can mask them as testr reports. It looks like a dirty hack and I # prefer to avoid it, but I see no other solutions at this point. # apply dirty hack only in gates. if os.environ.get("ZUUL_PROJECT"): pytest_report = TESTR_REPORT else: pytest_report = PYTEST_REPORT args = PYTEST_ARGUMENTS % {"html_report": pytest_report, "path": path, "timeout": args.timeout} try: subprocess.check_call(args.split(" "), stderr=subprocess.STDOUT) except subprocess.CalledProcessError: # NOTE(andreykurilin): it is ok, since tests can fail. exit_code = 1 else: exit_code = 0 if os.path.exists(pytest_report) and os.environ.get("ZUUL_PROJECT"): subprocess.check_call(["gzip", "-9", "-f", pytest_report], stderr=subprocess.STDOUT) if exit_code == 1: error("") if __name__ == "__main__": sys.exit(main(sys.argv)) rally-0.9.1/tests/ci/rally_app.py0000664000567000056710000000156413073417716020116 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Simple app based on rally api for testing porpuses""" import sys from rally import api as rapi def main(): api = rapi.API(config_args=sys.argv[1:]) print(len(api.task.list())) return 0 if __name__ == "__main__": sys.exit(main()) rally-0.9.1/tests/ci/osresources.py0000775000567000056710000003263313073417720020506 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """List and compare most used OpenStack cloud resources.""" import argparse import json import subprocess import sys import six from rally.cli import cliutils from rally.common import objects from rally.common.plugin import discover from rally import consts from rally import osclients def skip_if_service(service): def wrapper(func): def inner(self): if service in self.clients.services().values(): return [] return func(self) return inner return wrapper class ResourceManager(object): REQUIRED_SERVICE = None STR_ATTRS = ("id", "name") def __init__(self, clients): self.clients = clients def is_available(self): if self.REQUIRED_SERVICE: return self.REQUIRED_SERVICE in self.clients.services().values() return True @property def client(self): return getattr(self.clients, self.__class__.__name__.lower())() def get_resources(self): all_resources = [] cls = self.__class__.__name__.lower() for prop in dir(self): if not prop.startswith("list_"): continue f = getattr(self, prop) resources = f() or [] resource_name = prop[5:][:-1] for raw_res in resources: res = {"cls": cls, "resource_name": resource_name, "id": {}, "props": {}} if not isinstance(raw_res, dict): raw_res = {k: getattr(raw_res, k) for k in dir(raw_res) if not k.startswith("_") if not callable(getattr(raw_res, k))} for key, value in raw_res.items(): if key.startswith("_"): continue if key in self.STR_ATTRS: res["id"][key] = value else: try: res["props"][key] = json.dumps(value, indent=2) except TypeError: res["props"][key] = str(value) if not res["id"] and not res["props"]: raise ValueError("Failed to represent resource %r" % raw_res) all_resources.append(res) return all_resources class Keystone(ResourceManager): REQUIRED_SERVICE = consts.Service.KEYSTONE def list_users(self): return self.client.users.list() def list_tenants(self): if hasattr(self.client, "projects"): return self.client.projects.list() # V3 return self.client.tenants.list() # V2 def list_roles(self): return self.client.roles.list() def list_ec2credentials(self): users = self.list_users() ec2_list = [] for user in users: ec2_list.extend( self.client.ec2.list(user.id)) return ec2_list class Magnum(ResourceManager): REQUIRED_SERVICE = consts.Service.MAGNUM def list_cluster_templates(self): result = [] marker = None while True: ct_list = self.client.cluster_templates.list(marker=marker) if not ct_list: break result.extend(ct_list) marker = ct_list[-1].uuid return result def list_clusters(self): result = [] marker = None while True: clusters = self.client.clusters.list(marker=marker) if not clusters: break result.extend(clusters) marker = clusters[-1].uuid return result class Mistral(ResourceManager): REQUIRED_SERVICE = consts.Service.MISTRAL def list_workbooks(self): return self.client.workbooks.list() def list_workflows(self): return self.client.workflows.list() def list_executions(self): return self.client.executions.list() class Nova(ResourceManager): REQUIRED_SERVICE = consts.Service.NOVA def list_flavors(self): return self.client.flavors.list() def list_floating_ip_pools(self): return self.client.floating_ip_pools.list() @skip_if_service(consts.Service.NEUTRON) def list_floating_ips(self): return self.client.floating_ips.list() def list_floating_ips_bulk(self): return self.client.floating_ips_bulk.list() def list_aggregates(self): return self.client.aggregates.list() def list_hosts(self): return self.client.hosts.list() def list_hypervisors(self): return self.client.hypervisors.list() def list_agents(self): return self.client.agents.list() def list_keypairs(self): return self.client.keypairs.list() @skip_if_service(consts.Service.NEUTRON) def list_networks(self): return self.client.networks.list() @skip_if_service(consts.Service.NEUTRON) def list_security_groups(self): return self.client.security_groups.list( search_opts={"all_tenants": True}) def list_servers(self): return self.client.servers.list( search_opts={"all_tenants": True}) def list_services(self): return self.client.services.list() def list_availability_zones(self): return self.client.availability_zones.list() class Neutron(ResourceManager): REQUIRED_SERVICE = consts.Service.NEUTRON def has_extension(self, name): extensions = self.client.list_extensions().get("extensions", []) return any(ext.get("alias") == name for ext in extensions) def list_networks(self): return self.client.list_networks()["networks"] def list_subnets(self): return self.client.list_subnets()["subnets"] def list_routers(self): return self.client.list_routers()["routers"] def list_ports(self): return self.client.list_ports()["ports"] def list_floatingips(self): return self.client.list_floatingips()["floatingips"] def list_security_groups(self): return self.client.list_security_groups()["security_groups"] def list_health_monitors(self): if self.has_extension("lbaas"): return self.client.list_health_monitors()["health_monitors"] def list_pools(self): if self.has_extension("lbaas"): return self.client.list_pools()["pools"] def list_vips(self): if self.has_extension("lbaas"): return self.client.list_vips()["vips"] class Glance(ResourceManager): REQUIRED_SERVICE = consts.Service.GLANCE def list_images(self): return self.client.images.list() class Heat(ResourceManager): REQUIRED_SERVICE = consts.Service.HEAT def list_resource_types(self): return self.client.resource_types.list() def list_stacks(self): return self.client.stacks.list() class Cinder(ResourceManager): REQUIRED_SERVICE = consts.Service.CINDER def list_availability_zones(self): return self.client.availability_zones.list() def list_backups(self): return self.client.backups.list() def list_volume_snapshots(self): return self.client.volume_snapshots.list() def list_volume_types(self): return self.client.volume_types.list() def list_encryption_types(self): return self.client.volume_encryption_types.list() def list_transfers(self): return self.client.transfers.list() def list_volumes(self): # ignore cache volumes for images volumes = self.client.volumes.list(search_opts={"all_tenants": True}) return [v for v in volumes if not v.name.startswith("image-")] class Senlin(ResourceManager): REQUIRED_SERVICE = consts.Service.SENLIN def list_clusters(self): return self.client.clusters() def list_profiles(self): return self.client.profiles() class Watcher(ResourceManager): REQUIRED_SERVICE = consts.Service.WATCHER REPR_KEYS = ("uuid", "name") def list_audits(self): return self.client.audit.list() def list_audit_templates(self): return self.client.audit_template.list() def list_goals(self): return self.client.goal.list() def list_strategies(self): return self.client.strategy.list() def list_action_plans(self): return self.client.action_plan.list() class CloudResources(object): """List and compare cloud resources. resources = CloudResources(auth_url=..., ...) saved_list = resources.list() # Do something with the cloud ... changes = resources.compare(saved_list) has_changed = any(changes) removed, added = changes """ def __init__(self, **kwargs): self.clients = osclients.Clients(objects.Credential(**kwargs)) def list(self): managers_classes = discover.itersubclasses(ResourceManager) resources = [] for cls in managers_classes: manager = cls(self.clients) if manager.is_available(): resources.extend(manager.get_resources()) return resources def compare(self, with_list): def make_uuid(res): return"%s.%s:%s" % ( res["cls"], res["resource_name"], ";".join(["%s=%s" % (k, v) for k, v in sorted(res["id"].items())])) current_resources = dict((make_uuid(r), r) for r in self.list()) saved_resources = dict((make_uuid(r), r) for r in with_list) removed = set(saved_resources.keys()) - set(current_resources.keys()) removed = [saved_resources[k] for k in sorted(removed)] added = set(current_resources.keys()) - set(saved_resources.keys()) added = [current_resources[k] for k in sorted(added)] return removed, added def _print_tabular_resources(resources, table_label): def dict_formatter(d): return "\n".join("%s:%s" % (k, v) for k, v in d.items()) cliutils.print_list( objs=[dict(r) for r in resources], fields=("cls", "resource_name", "id", "fields"), field_labels=("service", "resource type", "id", "fields"), table_label=table_label, formatters={"id": lambda d: dict_formatter(d["id"]), "fields": lambda d: dict_formatter(d["props"])} ) print("") def main(): parser = argparse.ArgumentParser( description=("Save list of OpenStack cloud resources or compare " "with previously saved list.")) parser.add_argument("--credentials", type=argparse.FileType("r"), metavar="", help="cloud credentials in JSON format") group = parser.add_mutually_exclusive_group(required=True) group.add_argument("--dump-list", type=argparse.FileType("w"), metavar="", help="dump resources to given file in JSON format") group.add_argument("--compare-with-list", type=argparse.FileType("r"), metavar="", help=("compare current resources with a list from " "given JSON file")) args = parser.parse_args() if args.credentials: config = json.load(args.credentials) else: out = subprocess.check_output(["rally", "deployment", "config"]) config = json.loads(out if six.PY2 else out.decode("utf-8")) config.update(config.pop("admin")) del config["type"] if "users" in config: del config["users"] resources = CloudResources(**config) if args.dump_list: resources_list = resources.list() json.dump(resources_list, args.dump_list) elif args.compare_with_list: given_list = json.load(args.compare_with_list) changes = resources.compare(with_list=given_list) removed, added = changes # filter out expected additions expected = [] for resource in added: if ((resource["cls"] == "keystone" and resource["resource_name"] == "role" and resource["id"].get("name") == "_member_") or (resource["cls"] == "nova" and resource["resource_name"] == "security_group" and resource["id"].get("name") == "default")): expected.append(resource) for resource in expected: added.remove(resource) if removed: _print_tabular_resources(removed, "Removed resources") if added: _print_tabular_resources(added, "Added resources (unexpected)") if expected: _print_tabular_resources(expected, "Added resources (expected)") if any(changes): return 0 # `1' will fail gate job return 0 if __name__ == "__main__": sys.exit(main()) rally-0.9.1/tests/ci/cover.sh0000775000567000056710000000436313073417716017236 0ustar jenkinsjenkins00000000000000#!/bin/bash # # Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. ALLOWED_EXTRA_MISSING=4 show_diff () { head -1 $1 diff -U 0 $1 $2 | sed 1,2d } if ! git diff --exit-code || ! git diff --cached --exit-code then echo "There are uncommitted changes!" echo "Please clean git working directory and try again" exit 1 fi # Checkout master and save coverage report git checkout HEAD^ baseline_report=$(mktemp -t rally_coverageXXXXXXX) py.test --cov=rally tests/unit/ --cov-report=html coverage report > $baseline_report mv cover cover-master cat $baseline_report baseline_missing=$(awk 'END { print $3 }' $baseline_report) # Checkout back and save coverage report git checkout - current_report=$(mktemp -t rally_coverageXXXXXXX) py.test --cov=rally tests/unit/ --cov-report=html coverage report > $current_report current_missing=$(awk 'END { print $3 }' $current_report) # Show coverage details allowed_missing=$((baseline_missing+ALLOWED_EXTRA_MISSING)) echo "Allowed to introduce missing lines : ${ALLOWED_EXTRA_MISSING}" echo "Missing lines in master : ${baseline_missing}" echo "Missing lines in proposed change : ${current_missing}" if [ $allowed_missing -gt $current_missing ]; then if [ $baseline_missing -lt $current_missing ]; then show_diff $baseline_report $current_report echo "I believe you can cover all your code with 100% coverage!" else echo "Thank you! You are awesome! Keep writing unit tests! :)" fi exit_code=0 else show_diff $baseline_report $current_report echo "Please write more unit tests, we should keep our test coverage :( " exit_code=1 fi rm $baseline_report $current_report exit $exit_code rally-0.9.1/tests/ci/README.rst0000664000567000056710000000705013073417720017237 0ustar jenkinsjenkins00000000000000=============== Rally Gate Jobs =============== For each patch submitted for review on Gerrit, there is a set of tests called **gate jobs** to be run against it. These tests check whether the Rally code works correctly after applying the patch and provide additional guarantees that it won't break the software when it gets merged. Rally gate jobs contain tests checking the codestyle (via *pep8*), unit tests suites, functional tests and a set of Rally benchmark tasks that are executed against a real *devstack* deployment. rally-gate.sh ------------- This script runs a set of real Rally benchmark tasks and fetches their results in textual / visualized form (available via a special html page by clicking the corresponding job title in Gerrit). It checks that scenarios don't fail while being executed against a devstack deployment and also tests SLA criteria to ensure that benchmark tasks have completed successfully. rally-integrated.sh ------------------- This script runs a functional tests suite for Rally CLI. The tests call a range of Rally CLI commands and check that their output contains the expected data. rally-verify.sh --------------- This script runs various "rally verify" commands. This set of commands allow us to perform Tempest tests of OpenStack live cloud and display verification results. The verification results obtained by running various "rally verify " commands including "start", "show", "list" are compared using the "rally verify results" command, which are then saved in csv, html and json formats in the "rally-verify" directory. Jenkins uses this script by running the 'gate-rally-dsvm-verify' job. test_install.sh --------------- This script tests the correct working of the install_rally.sh, used for the installation of Rally. Jenkins tests this script by running it against Centos6 and Ubuntu 12.04 in the corresponding jobs 'gate-rally-install-bare-centos6' and 'gate-rally-install-bare-precise'. Jenkins ------- Jenkins is a Continuous Integration system which works as the scheduler. It receives events related to proposed changes, triggers tests based on those events, and reports back. For each patch that is uploaded for review on Gerrit, Jenkins runs it against the various rally gate jobs listed below along with their functions and local equivalents: * gate-rally-pep8 : code style check (equal to tox -epep8) * gate-rally-docs : documention generation (equal to tox -edocs) * gate-rally-python27 : unit tests against python27 (equal to tox -epy27) * gate-rally-python34(non-voting) : unit tests against python34 ( equal to tox -epy34) (non-voting since not all tests pass this) * rally-coverage : generates unit test coverage (equal to tox -cover) * gate-rally-install-bare-centos6 : testing of test_install.sh(described above) against Centos * gate-rally-install-bare-precise : testing of test_install.sh(described above) against Ubuntu 10.04 * gate-rally-dsvm-rally : runs rally-gate.sh(described above) against OpenStack deployed by devstack with nova-network (It is standard dsvm job) * gate-rally-dsvm-neutron-rally : runs rally-gate.sh against OpenStack deployed by devastack with neutron * gate-rally-dsvm-cli : runs rally-integrated.sh ( equal to tox -ecli) * gate-rally-dsvm-verify(non-voting) : runs rally-verify.sh and tests Rally and Tempest integration in all possible ways * gate-rally-tox-self(non-voting) : not yet used and a success in these tests(except non-voting) would mean that the patch is approved by Jenkins. rally-0.9.1/tests/ci/rally_verify.py0000775000567000056710000002654113073417720020642 0ustar jenkinsjenkins00000000000000#!/usr/bin/env python # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import argparse import gzip import json import logging import os import re import subprocess import sys import uuid from rally.cli import envutils from rally.common import objects from rally import osclients from rally.ui import utils LOG = logging.getLogger(__name__) LOG.setLevel(logging.DEBUG) BASE_DIR = "rally-verify" MODES = {"full": "--pattern set=full", "light": "--pattern set=smoke"} DEPLOYMENT_NAME = "devstack" VERIFIER_TYPE = "tempest" VERIFIER_SOURCE = "https://git.openstack.org/openstack/tempest" VERIFIER_EXT_REPO = "https://git.openstack.org/openstack/keystone" VERIFIER_EXT_NAME = "keystone_tests" SKIP_TESTS = ( "tempest.api.compute.flavors.test_flavors.FlavorsV2TestJSON." "test_get_flavor[id-1f12046b-753d-40d2-abb6-d8eb8b30cb2f,smoke]: " "This test was skipped intentionally") XFAIL_TESTS = ( "tempest.api.compute.servers.test_server_actions.ServerActionsTestJSON." "test_get_vnc_console[id-c6bc11bf-592e-4015-9319-1c98dc64daf5]: " "This test fails because 'novnc' console type is unavailable") TEST_NAME_RE = re.compile(r"^[a-zA-Z_.0-9]+(\[[a-zA-Z-,=0-9]*\])?$") # NOTE(andreykurilin): this variable is used to generate output file names # with prefix ${CALL_COUNT}_ . _call_count = 0 # NOTE(andreykurilin): if some command fails, script should end with # error status _return_status = 0 def call_rally(cmd, print_output=False, output_type=None): """Execute a Rally command and write result in files.""" global _return_status global _call_count _call_count += 1 data = {"cmd": "rally --rally-debug %s" % cmd} stdout_file = "{base_dir}/{prefix}_{cmd}.txt.gz" cmd = cmd.replace("/", "_") data.update({"stdout_file": stdout_file.format(base_dir=BASE_DIR, prefix=_call_count, cmd=cmd.replace(" ", "_"))}) if output_type: data["output_file"] = data["stdout_file"].replace( ".txt.", ".%s." % output_type) data["cmd"] += " --to %s" % data["output_file"] data["cmd"] += " --type %s" % output_type try: LOG.info("Try to execute `%s`." % data["cmd"]) stdout = subprocess.check_output(data["cmd"].split(), stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: LOG.error("Command `%s` failed." % data["cmd"]) stdout = e.output data["status"] = "fail" _return_status = 1 else: data["status"] = "success" if output_type: # let's gzip results with open(data["output_file"]) as f: output = f.read() with gzip.open(data["output_file"], "wb") as f: f.write(output) stdout = "$ %s\n%s" % (data["cmd"], stdout) with gzip.open(data["stdout_file"], "wb") as f: f.write(stdout) if print_output: print(stdout) return data def start_verification(args): """Start a verification, show results and generate reports.""" results = call_rally("verify start %s" % args) results["uuid"] = envutils.get_global(envutils.ENV_VERIFICATION) results["show"] = call_rally("verify show") results["show_detailed"] = call_rally("verify show --detailed") for output_type in ("json", "html", "junit-xml"): results[output_type.replace("-", "_")] = call_rally( "verify report", output_type=output_type) # NOTE(andreykurilin): we need to clean verification uuid from global # environment to be able to load it next time(for another verification). envutils.clear_global(envutils.ENV_VERIFICATION) return results def write_file(filename, data): """Create a file and write some data to it.""" path = os.path.join(BASE_DIR, filename) with open(path, "wb") as f: f.write(data) return path def generate_trends_reports(uuid_1, uuid_2): """Generate trends reports.""" results = {} for output_type in ("json", "html", "junit-xml"): results[output_type.replace("-", "_")] = call_rally( "verify report --uuid %s %s" % (uuid_1, uuid_2), output_type=output_type) return results def render_page(**render_vars): template = utils.get_template("ci/index_verify.html") with open(os.path.join(BASE_DIR, "extra/index.html"), "w") as f: f.write(template.render(**render_vars)) def main(): parser = argparse.ArgumentParser(description="Launch rally-verify job.") parser.add_argument("--mode", type=str, default="light", help="Mode of job. The 'full' mode corresponds to the " "full set of verifier tests. The 'light' mode " "corresponds to the smoke set of verifier tests.", choices=MODES.keys()) parser.add_argument("--compare", action="store_true", help="Start the second verification to generate a " "trends report for two verifications.") # TODO(ylobankov): Remove hard-coded Tempest related things and make it # configurable. parser.add_argument("--ctx-create-resources", action="store_true", help="Make Tempest context create needed resources " "for the tests.") args = parser.parse_args() if not os.path.exists("%s/extra" % BASE_DIR): os.makedirs("%s/extra" % BASE_DIR) # Choose and check the deployment call_rally("deployment use --deployment %s" % DEPLOYMENT_NAME) call_rally("deployment check") config = json.loads( subprocess.check_output(["rally", "deployment", "config"])) config.update(config.pop("admin")) del config["type"] clients = osclients.Clients(objects.Credential(**config)) if args.ctx_create_resources: # If the 'ctx-create-resources' arg is provided, delete images and # flavors, and also create a shared network to make Tempest context # create needed resources. LOG.info("The 'ctx-create-resources' arg is provided. Deleting " "images and flavors, and also creating a shared network " "to make Tempest context create needed resources.") LOG.info("Deleting images.") for image in clients.glance().images.list(): clients.glance().images.delete(image.id) LOG.info("Deleting flavors.") for flavor in clients.nova().flavors.list(): clients.nova().flavors.delete(flavor.id) LOG.info("Creating a shared network.") net_body = { "network": { "name": "shared-net-%s" % str(uuid.uuid4()), "tenant_id": clients.keystone.auth_ref.project_id, "shared": True } } clients.neutron().create_network(net_body) else: # Otherwise, just in case create only flavors with the following # properties: RAM = 64MB and 128MB, VCPUs = 1, disk = 0GB to make # Tempest context discover them. LOG.info("The 'ctx-create-resources' arg is not provided. " "Creating flavors to make Tempest context discover them.") for flv_ram in [64, 128]: params = { "name": "flavor-%s" % str(uuid.uuid4()), "ram": flv_ram, "vcpus": 1, "disk": 0 } LOG.info( "Creating flavor '%s' with the following properties: RAM " "= %dMB, VCPUs = 1, disk = 0GB" % (params["name"], flv_ram)) clients.nova().flavors.create(**params) render_vars = dict(verifications=[]) # List plugins for verifiers management render_vars["list_plugins"] = call_rally("verify list-plugins") # Create a verifier render_vars["create_verifier"] = call_rally( "verify create-verifier --type %s --name my-verifier --source %s" % (VERIFIER_TYPE, VERIFIER_SOURCE)) # Show the verifier render_vars["show_verifier"] = call_rally("verify show-verifier") # List verifiers render_vars["list_verifiers"] = call_rally("verify list-verifiers") # Get verifier ID verifier_id = envutils.get_global(envutils.ENV_VERIFIER) # Get the penultimate verifier commit ID repo_dir = os.path.join( os.path.expanduser("~"), ".rally/verification/verifier-%s/repo" % verifier_id) p_commit_id = subprocess.check_output( ["git", "log", "-n", "1", "--pretty=format:%H"], cwd=repo_dir).strip() # Switch the verifier to the penultimate version render_vars["update_verifier"] = call_rally( "verify update-verifier --version %s --update-venv" % p_commit_id) # Generate and show the verifier config file render_vars["configure_verifier"] = call_rally( "verify configure-verifier --show") # Add a verifier extension render_vars["add_verifier_ext"] = call_rally( "verify add-verifier-ext --source %s" % VERIFIER_EXT_REPO) # List verifier extensions render_vars["list_verifier_exts"] = call_rally("verify list-verifier-exts") # List verifier tests render_vars["list_verifier_tests"] = call_rally( "verify list-verifier-tests %s" % MODES[args.mode]) # Start a verification, show results and generate reports skip_list_path = write_file("skip-list.yaml", SKIP_TESTS) xfail_list_path = write_file("xfail-list.yaml", XFAIL_TESTS) run_args = ("%s --skip-list %s --xfail-list %s --tag first-run %s-set " "--detailed" % (MODES[args.mode], skip_list_path, xfail_list_path, args.mode)) render_vars["verifications"].append(start_verification(run_args)) if args.compare: # Start another verification, show results and generate reports with gzip.open(render_vars["list_verifier_tests"]["stdout_file"]) as f: tests = [t for t in f.read().split("\n") if TEST_NAME_RE.match(t)] load_list_path = write_file("load-list.txt", "\n".join(tests)) run_args = "--load-list %s --tag second-run %s-set --detailed" % ( load_list_path, args.mode) render_vars["verifications"].append(start_verification(run_args)) # Generate trends reports for two verifications render_vars["compare"] = generate_trends_reports( render_vars["verifications"][-2]["uuid"], render_vars["verifications"][-1]["uuid"]) # List verifications render_vars["list"] = call_rally("verify list") # Delete the verifier extension render_vars["delete_verifier_ext"] = call_rally( "verify delete-verifier-ext --name %s" % VERIFIER_EXT_NAME) # Delete the verifier and all verifications render_vars["delete_verifier"] = call_rally( "verify delete-verifier --id %s --force" % verifier_id) render_page(**render_vars) return _return_status if __name__ == "__main__": sys.exit(main()) rally-0.9.1/tests/ci/wip-rally-gate.py0000775000567000056710000001445313073417716020775 0ustar jenkinsjenkins00000000000000#!/usr/bin/env python # # Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import errno import json import os import pwd import re import shutil import subprocess import sys import tempfile from six.moves.urllib import parse from rally.ui import utils def use_keystone_v3(): """Alter deployment to use keystone v3.""" print("Changing deployment to v3") config = json.loads(subprocess.check_output(["rally", "deployment", "config"])) v3_url = parse.urlsplit(config["auth_url"])._replace(path="v3").geturl() config["auth_url"] = v3_url endpoint = config.get("endpoint") if endpoint: v3_enpoint = parse.urlsplit(endpoint)._replace(path="v3").geturl() config["endpoint"] = v3_enpoint config["project_name"] = config["tenant"] config["project_domain_name"] = config["tenant"] cfg_file = tempfile.NamedTemporaryFile() json.dump(config, cfg_file) print("New config for keystone v3:") print(json.dumps(config, indent=2)) cfg_file.flush() subprocess.call(["rally", "deployment", "create", "--name", "V3", "--file", cfg_file.name]) print(subprocess.check_output(["rally", "deployment", "check"])) TAG_HANDLERS = {"v3": use_keystone_v3} def perror(s): sys.stderr.write(s + "\n") sys.stderr.flush() def run(cmd, stdout=None, gzip=True, check=False): """Run shell command. Save output to file, and gzip-compress if needed. If exit status is non-zero and check is True then raise exception. Return exit status otherwise. """ print("Starting %s" % " ".join(cmd)) status = subprocess.call(cmd, stdout=open(stdout, "w") if stdout else None) if stdout and gzip: subprocess.call(["gzip", "-9", stdout]) if check and status: raise Exception("Failed with status %d" % status) return status def run_task(task, tags=None): new_home_dir = tempfile.mkdtemp(prefix="rally_gate_") shutil.copytree(os.path.join(pwd.getpwuid(os.getuid()).pw_dir, ".rally"), os.path.join(new_home_dir, ".rally")) print("Setting $HOME to %s" % new_home_dir) os.environ["HOME"] = new_home_dir for tag in tags or []: if tag == "args": continue if tag not in TAG_HANDLERS: perror("Warning! Unknown tag '%s'" % tag) continue try: TAG_HANDLERS[tag]() except Exception as e: perror("Error processing tag '%s': %s" % (tag, e)) run(["rally", "task", "validate", "--task", task], check=True) cmd = ["rally", "task", "start", "--task", task] args_file, ext = task.rsplit(".", 1) args_file = args_file + "_args." + ext if os.path.isfile(args_file): cmd += ["--task-args-file", args_file] run(cmd, check=True) task_name = os.path.split(task)[-1] pub_dir = os.environ.get("RCI_PUB_DIR", "rally-plot") try: os.makedirs(os.path.join(pub_dir, "extra")) except Exception as e: if e.errno != errno.EEXIST: raise run(["rally", "task", "report", "--out", "%s/%s.html" % (pub_dir, task_name)]) run(["rally", "task", "results"], stdout="%s/results-%s.json" % (pub_dir, task_name)) status = run(["rally", "task", "sla-check"], stdout="%s/%s.sla.txt" % (pub_dir, task_name)) run(["rally", "task", "detailed"], stdout="rally-plot/detailed-%s.txt" % task_name) run(["rally", "task", "detailed", "--iterations-data"], stdout="rally-plot/detailed_with_iterations-%s.txt" % task_name) return status def get_name_from_git(): """Determine org/project name from git.""" r = re.compile(".*/(.*?)/(.*?).git$") for l in open(".git/config"): m = r.match(l.strip()) if m: return m.groups() raise Exception("Unable to get project name from git") def get_project_name(): for var in ("ZUUL_PROJECT", "GERRIT_PROJECT"): if var in os.environ: return os.environ[var].split("/") return get_name_from_git() def main(): statuses = [] org, project = get_project_name() base = os.environ.get("BASE") if base: base_jobs_dir = os.path.join(base, "new", project) else: base_jobs_dir = os.path.realpath(".") rally_root = "/home/rally/rally/" if not os.path.exists(rally_root): rally_root = os.environ["BASE"] + "/new/rally/" jobs_dir = os.path.join(base_jobs_dir, "rally-jobs") if not os.path.exists(jobs_dir): # fallback to legacy path jobs_dir = os.path.join(base_jobs_dir, "rally-scenarios") if not os.path.exists(jobs_dir): raise Exception("Rally jobs directory does not exist.") for directory in ("plugins", "extra"): dst = os.path.expanduser("~/.rally/%s" % directory) try: shutil.copytree(os.path.join(jobs_dir, directory), dst) except OSError as e: if e.errno != errno.EEXIST: raise scenario = os.environ.get("RALLY_SCENARIO", project).rsplit(".", 1) scenario_name = scenario.pop(0) scenario_ext = scenario.pop() if scenario else "yaml" print("Processing scenario %s" % scenario_name) for fname in os.listdir(jobs_dir): print("Processing %s" % fname) if fname.startswith(scenario_name): tags = fname[len(scenario_name):-len(scenario_ext) - 1].split("_") statuses.append(run_task(os.path.join(jobs_dir, fname), tags)) else: print("Ignoring file %s" % fname) print("Exit statuses: %r" % statuses) template = utils.get_template("ci/index.html") with open("rally-plot/extra/index.html", "w") as output: output.write(template.render()) return any(statuses) if __name__ == "__main__": sys.exit(main()) rally-0.9.1/tests/ci/render.py0000664000567000056710000000237013073417716017406 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from __future__ import print_function import re import sys from rally.ui import utils HELP_MESSAGE = ( "Usage:\n\t" "render.py ci/template.html" "[= = ...]\n\n\t" "Where key-1,value-1 and key-2,value-2 are key pairs of template.") if __name__ == "__main__": args = sys.argv if (len(args) < 1 or not all(re.match("^[^=]+=[^=]+$", arg) for arg in args[2:])): print(HELP_MESSAGE, file=sys.stderr) sys.exit(1) render_kwargs = dict([arg.split("=") for arg in args[2:]]) print(utils.get_template(args[1]).render(**render_kwargs)) rally-0.9.1/tests/ci/rally_gate_functions.sh0000664000567000056710000001102013073417720022307 0ustar jenkinsjenkins00000000000000#!/usr/bin/env bash RALLY_DIR=$BASE/new/rally RALLY_PLUGINS_DIR=~/.rally/plugins RALLY_EXTRA_DIR=~/.rally/extra function setUp () { set -x JOB_DIR=$1 mkdir -p $RALLY_PLUGINS_DIR mkdir -p $RALLY_EXTRA_DIR if [ -n "$JOB_DIR" ]; then PLUGINS_DIR=${JOB_DIR}/plugins EXTRA_DIR=${JOB_DIR}/extra if [ -d $PLUGINS_DIR ]; then cp -r $PLUGINS_DIR/ $RALLY_PLUGINS_DIR fi if [ -d $EXTRA_DIR ]; then cp -r $EXTRA_DIR/* ~/.rally/extra/ fi fi touch ~/.rally/extra/fake-image.img env set -o pipefail rally deployment use --deployment devstack source ~/.rally/openrc admin admin # NOTE(ikhudoshyn): Create additional users and register a new env # so that we could run scenarios using 'existing_users' context if [ "$DEVSTACK_GATE_PREPOPULATE_USERS" = "1" ]; then # NOTE(andreykurilin): let's hardcode version, since we already # hardcoded arguments for users... export OS_IDENTITY_API_VERSION=3 openstack --version openstack project create rally-test-project-1 openstack user create --project rally-test-project-1 --password rally-test-password-1 rally-test-user-1 openstack role add --project rally-test-project-1 --user rally-test-user-1 Member openstack project create rally-test-project-2 openstack user create --project rally-test-project-2 --password rally-test-password-2 rally-test-user-2 openstack role add --project rally-test-project-2 --user rally-test-user-2 Member set +e NEUTRON_EXISTS=$(openstack --os-interface admin service list | grep neutron) set -e if [ "$NEUTRON_EXISTS" ]; then OS_QUOTA_STR="--networks -1 --subnets -1 --routers -1 --floating-ips -1 --subnetpools -1 --secgroups -1 --secgroup-rules -1 --ports -1" openstack --debug quota set $OS_QUOTA_STR rally-test-project-1 openstack --debug quota show rally-test-project-1 openstack --debug quota set $OS_QUOTA_STR rally-test-project-2 openstack --debug quota show rally-test-project-2 fi DEPLOYMENT_CONFIG_FILE=~/.rally/with-existing-users-config rally deployment config > $DEPLOYMENT_CONFIG_FILE sed -i '1a "users": [\ {\ "username": "rally-test-user-1",\ "password": "rally-test-password-1",\ "project_name": "rally-test-project-1",\ "user_domain_name": "Default",\ "project_domain_name": "Default"\ },\ {\ "username": "rally-test-user-2",\ "password": "rally-test-password-2",\ "project_name": "rally-test-project-2",\ "user_domain_name": "Default",\ "project_domain_name": "Default"\ }\ ],\ ' $DEPLOYMENT_CONFIG_FILE rally deployment create --name devstask-with-users --filename $DEPLOYMENT_CONFIG_FILE fi rally deployment config rally --debug deployment check if rally deployment check | grep 'nova' | grep 'Available' > /dev/null; then nova flavor-create m1.nano 42 64 0 1 fi } function run () { set -x TASK=$1 TASK_ARGS="$2 $3" if [ "$DEVSTACK_GATE_USE_PYTHON3" = "True" ]; then PYTHON=python3 else PYTHON=python fi $PYTHON $RALLY_DIR/tests/ci/osresources.py --dump-list resources_at_start.txt rally --rally-debug task start --task $TASK $TASK_ARGS mkdir -p rally-plot/extra $PYTHON $RALLY_DIR/tests/ci/render.py ci/index.html > rally-plot/extra/index.html cp $TASK rally-plot/task.txt tar -czf rally-plot/plugins.tar.gz -C $RALLY_PLUGINS_DIR . rally task results | python -m json.tool > rally-plot/results.json gzip -9 rally-plot/results.json rally task detailed > rally-plot/detailed.txt gzip -9 rally-plot/detailed.txt rally task detailed --iterations-data > rally-plot/detailed_with_iterations.txt gzip -9 rally-plot/detailed_with_iterations.txt rally task report --out rally-plot/results.html gzip -9 rally-plot/results.html # NOTE(stpierre): if the sla check fails, we still want osresources.py # to run, so we turn off -e and save the return value set +e rally task sla-check | tee rally-plot/sla.txt retval=$? set -e cp resources_at_start.txt rally-plot/ $PYTHON $RALLY_DIR/tests/ci/osresources.py\ --compare-with-list resources_at_start.txt\ | gzip > rally-plot/resources_diff.txt.gz exit $retval }rally-0.9.1/tests/unit/0000775000567000056710000000000013073420067016130 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/fakes.py0000664000567000056710000015501213073417720017601 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import multiprocessing import random import re import string import uuid from ceilometerclient import exc as ceilometer_exc from glanceclient import exc import mock from neutronclient.common import exceptions as neutron_exceptions from novaclient import exceptions as nova_exceptions import six from swiftclient import exceptions as swift_exceptions from rally import api from rally.common import objects from rally.common import utils as rally_utils from rally import consts from rally.task import context from rally.task import scenario def generate_uuid(): return str(uuid.uuid4()) def generate_name(prefix="", length=12, choices=string.ascii_lowercase): """Generate pseudo-random name. :param prefix: str, custom prefix for genertated name :param length: int, length of autogenerated part of result name :param choices: str, chars that accurs in generated name :returns: str, pseudo-random name """ return prefix + "".join(random.choice(choices) for i in range(length)) def generate_mac(): """Generate pseudo-random MAC address. :returns: str, MAC address """ rand_str = generate_name(choices="0123456789abcdef", length=12) return ":".join(re.findall("..", rand_str)) def setup_dict(data, required=None, defaults=None): """Setup and validate dict scenario_base. on mandatory keys and default data. This function reduces code that constructs dict objects with specific schema (e.g. for API data). :param data: dict, input data :param required: list, mandatory keys to check :param defaults: dict, default data :returns: dict, with all keys set :raises IndexError, ValueError: If input data is incorrect """ required = required or [] for i in set(required) - set(data): raise IndexError("Missed: %s" % i) defaults = defaults or {} for i in set(data) - set(required) - set(defaults): raise ValueError("Unexpected: %s" % i) defaults.update(data) return defaults class FakeResource(object): def __init__(self, manager=None, name=None, status="ACTIVE", items=None, deployment_uuid=None, id=None): self.name = name or generate_uuid() self.status = status self.manager = manager self.uuid = generate_uuid() self.id = id or self.uuid self.items = items or {} self.deployment_uuid = deployment_uuid or generate_uuid() def __getattr__(self, name): # NOTE(msdubov): e.g. server.delete() -> manager.delete(server) def manager_func(*args, **kwargs): return getattr(self.manager, name)(self, *args, **kwargs) return manager_func def __getitem__(self, key): return self.items[key] class FakeServer(FakeResource): def suspend(self): self.status = "SUSPENDED" def lock(self): setattr(self, "OS-EXT-STS:locked", True) def unlock(self): setattr(self, "OS-EXT-STS:locked", False) class FakeImage(FakeResource): def __init__(self, manager=None, id="image-id-0", min_ram=0, size=0, min_disk=0, status="active", name=None): super(FakeImage, self).__init__(manager, id=id, name=name) self.min_ram = min_ram self.size = size self.min_disk = min_disk self.status = status self.update = mock.MagicMock() class FakeStrategy(FakeResource): pass class FakeGoal(FakeResource): pass class FakeMurano(FakeResource): pass class FakeFloatingIP(FakeResource): pass class FakeFloatingIPPool(FakeResource): pass class FakeTenant(FakeResource): def __init__(self, manager, name): super(FakeTenant, self).__init__(manager, name=name) class FakeUser(FakeResource): pass class FakeService(FakeResource): pass class FakeNetwork(FakeResource): pass class FakeFlavor(FakeResource): def __init__(self, id="flavor-id-0", manager=None, ram=0, disk=0, vcpus=1, name="flavor-name-0"): super(FakeFlavor, self).__init__(manager, id=id) self.ram = ram self.disk = disk self.vcpus = vcpus self.name = name class FakeKeypair(FakeResource): pass class FakeStack(FakeResource): pass class FakeDomain(FakeResource): pass class FakeQuotas(FakeResource): pass class FakeSecurityGroup(FakeResource): def __init__(self, manager=None, rule_manager=None, id=None, name=None): super(FakeSecurityGroup, self).__init__(manager, id=id, name=name) self.rule_manager = rule_manager @property def rules(self): return [rule for rule in self.rule_manager.list() if rule.parent_group_id == self.id] class FakeSecurityGroupRule(FakeResource): def __init__(self, name, **kwargs): super(FakeSecurityGroupRule, self).__init__(name) if "cidr" in kwargs: kwargs["ip_range"] = {"cidr": kwargs["cidr"]} del kwargs["cidr"] for key, value in kwargs.items(): self.items[key] = value setattr(self, key, value) class FakeMetric(FakeResource): def __init_(self, manager=None, **kwargs): super(FakeMetric, self).__init__(manager) self.metric = kwargs.get("metric_name") self.optional_args = kwargs.get("optional_args", {}) class FakeAlarm(FakeResource): def __init__(self, manager=None, **kwargs): super(FakeAlarm, self).__init__(manager) self.meter_name = kwargs.get("meter_name") self.threshold = kwargs.get("threshold") self.state = kwargs.get("state", "fake-alarm-state") self.alarm_id = kwargs.get("alarm_id", "fake-alarm-id") self.state = kwargs.get("state", "ok") self.optional_args = kwargs.get("optional_args", {}) class FakeSample(FakeResource): def __init__(self, manager=None, **kwargs): super(FakeSample, self).__init__(manager) self.counter_name = kwargs.get("counter_name", "fake-counter-name") self.counter_type = kwargs.get("counter_type", "fake-counter-type") self.counter_unit = kwargs.get("counter_unit", "fake-counter-unit") self.counter_volume = kwargs.get("counter_volume", 100) @property def resource_id(self): return "fake-resource-id" def to_dict(self): return {"counter_name": self.counter_name, "counter_type": self.counter_type, "counter_unit": self.counter_unit, "counter_volume": self.counter_volume, "resource_id": self.resource_id} class FakeVolume(FakeResource): @property def _info(self): return {"id": "uuid"} class FakeVolumeType(FakeResource): pass class FakeVolumeTransfer(FakeResource): pass class FakeVolumeSnapshot(FakeResource): pass class FakeVolumeBackup(FakeResource): pass class FakeRole(FakeResource): pass class FakeQueue(FakeResource): def __init__(self, manager=None, name="myqueue"): super(FakeQueue, self).__init__(manager, name) self.queue_name = name self.messages = FakeMessagesManager(name) def post(self, messages): for msg in messages: self.messages.create(**msg) def messages(self): return self.messages.list() class FakeDbInstance(FakeResource): pass class FakeMessage(FakeResource): def __init__(self, manager=None, **kwargs): super(FakeMessage, self).__init__(manager) self.body = kwargs.get("body", "fake-body") self.ttl = kwargs.get("ttl", 100) class FakeAvailabilityZone(FakeResource): def __init__(self, manager=None): super(FakeAvailabilityZone, self).__init__(manager) self.zoneName = mock.MagicMock() self.zoneState = mock.MagicMock() self.hosts = mock.MagicMock() class FakeWorkbook(FakeResource): def __init__(self, manager=None): super(FakeWorkbook, self).__init__(manager) self.workbook = mock.MagicMock() class FakeWorkflow(FakeResource): def __init__(self, manager=None): super(FakeWorkflow, self).__init__(manager) self.workflow = mock.MagicMock() class FakeExecution(FakeResource): def __init__(self, manager=None): super(FakeExecution, self).__init__(manager) self.execution = mock.MagicMock() class FakeObject(FakeResource): pass class FakeClusterTemplate(FakeResource): pass class FakeManager(object): def __init__(self): super(FakeManager, self).__init__() self.cache = {} self.resources_order = [] def get(self, resource_uuid): return self.cache.get(resource_uuid) def delete(self, resource_uuid): cached = self.get(resource_uuid) if cached is not None: cached.status = "DELETED" del self.cache[resource_uuid] self.resources_order.remove(resource_uuid) def _cache(self, resource): self.resources_order.append(resource.uuid) self.cache[resource.uuid] = resource return resource def list(self, **kwargs): return [self.cache[key] for key in self.resources_order] def find(self, **kwargs): for resource in self.cache.values(): match = True for key, value in kwargs.items(): if getattr(resource, key, None) != value: match = False break if match: return resource class FakeServerManager(FakeManager): def __init__(self, image_mgr=None): super(FakeServerManager, self).__init__() self.images = image_mgr or FakeImageManager() def get(self, resource_uuid): server = self.cache.get(resource_uuid) if server is not None: return server raise nova_exceptions.NotFound("Server %s not found" % (resource_uuid)) def _create(self, server_class=FakeServer, name=None): server = self._cache(server_class(self)) if name is not None: server.name = name return server def create(self, name, image_id, flavor_id, **kwargs): return self._create(name=name) def create_image(self, server, name): image = self.images._create() return image.uuid def add_floating_ip(self, server, fip): pass def remove_floating_ip(self, server, fip): pass def delete(self, resource): if not isinstance(resource, six.string_types): resource = resource.id cached = self.get(resource) if cached is not None: cached.status = "DELETED" del self.cache[resource] self.resources_order.remove(resource) class FakeImageManager(FakeManager): def __init__(self): super(FakeImageManager, self).__init__() def get(self, resource_uuid): image = self.cache.get(resource_uuid) if image is not None: return image raise exc.HTTPNotFound("Image %s not found" % (resource_uuid)) def _create(self, image_class=FakeImage, name=None, id=None): image = self._cache(image_class(self)) image.owner = "dummy" image.id = image.uuid if name is not None: image.name = name return image def create(self, name, copy_from, container_format, disk_format): return self._create(name=name) def delete(self, resource): if not isinstance(resource, six.string_types): resource = resource.id cached = self.get(resource) if cached is not None: cached.status = "DELETED" del self.cache[resource] self.resources_order.remove(resource) class FakeStrategyManager(FakeManager): def get(self, resource_name): for key in self.resources_order: if self.cache[key].name == resource_name: return self.cache[key] class FakeGoalManager(FakeManager): def get(self, resource_name): for key in self.resources_order: if self.cache[key].name == resource_name: return self.cache[key] class FakePackageManager(FakeManager): def create(self, package_descr, package_arch, package_class=FakeMurano): package = self._cache(package_class(self)) package.name = list(package_arch.keys())[0] return package class FakeFloatingIPsManager(FakeManager): def create(self): return FakeFloatingIP(self) class FakeFloatingIPPoolsManager(FakeManager): def create(self): return FakeFloatingIPPool(self) class FakeTenantsManager(FakeManager): def create(self, name): return self._cache(FakeTenant(self, name)) def update(self, tenant_id, name=None, description=None): tenant = self.get(tenant_id) name = name or (tenant.name + "_updated") desc = description or (tenant.name + "_description_updated") tenant.name = name tenant.description = desc return self._cache(tenant) class FakeNetworkManager(FakeManager): def create(self, net_id): net = FakeNetwork(self) net.id = net_id return self._cache(net) class FakeFlavorManager(FakeManager): def create(self): flv = FakeFlavor(self) return self._cache(flv) class FakeKeypairManager(FakeManager): def create(self, name, public_key=None): kp = FakeKeypair(self) kp.name = name or kp.name return self._cache(kp) def delete(self, resource): if not isinstance(resource, six.string_types): resource = resource.id cached = self.get(resource) if cached is not None: cached.status = "DELETED" del self.cache[resource] self.resources_order.remove(resource) class FakeClusterTemplateManager(FakeManager): def create(self, name): cluster_template = FakeClusterTemplate(self) cluster_template.name = name or cluster_template.name return self._cache(cluster_template) def delete(self, resource): if not isinstance(resource, six.string_types): resource = resource.id cached = self.get(resource) if cached is not None: del self.cache[resource] self.resources_order.remove(resource) class FakeStackManager(FakeManager): def create(self, name): stack = FakeStack(self) stack.name = name or stack.name return self._cache(stack) def delete(self, resource): if not isinstance(resource, six.string_types): resource = resource.id cached = self.get(resource) if cached is not None: cached.status = "DELETE_COMPLETE" del self.cache[resource] self.resources_order.remove(resource) class FakeDomainManager(FakeManager): def create(self, name): domain = FakeDomain(self) domain.name = name or domain.name return self._cache(domain) def delete(self, resource): if not isinstance(resource, six.string_types): resource = resource.id cached = self.get(resource) if cached is not None: cached.status = "DELETE_COMPLETE" del self.cache[resource] self.resources_order.remove(resource) class FakeNovaQuotasManager(FakeManager): def update(self, tenant_id, **kwargs): fq = FakeQuotas(self) return self._cache(fq) def delete(self, tenant_id): pass class FakeCinderQuotasManager(FakeManager): def update(self, tenant_id, **kwargs): fq = FakeQuotas(self) return self._cache(fq) def delete(self, tenant_id): pass class FakeSecurityGroupManager(FakeManager): def __init__(self, rule_manager=None): super(FakeSecurityGroupManager, self).__init__() self.rule_manager = rule_manager self.create("default") def create(self, name, description=""): sg = FakeSecurityGroup( manager=self, rule_manager=self.rule_manager) sg.name = name or sg.name sg.description = description return self._cache(sg) def to_dict(self, obj): return {"id": obj.id, "name": obj.name} def find(self, name, **kwargs): kwargs["name"] = name for resource in self.cache.values(): match = True for key, value in kwargs.items(): if getattr(resource, key, None) != value: match = False break if match: return resource raise nova_exceptions.NotFound("Security Group not found") def delete(self, resource): if not isinstance(resource, six.string_types): resource = resource.id cached = self.get(resource) if cached is not None: cached.status = "DELETED" del self.cache[resource] self.resources_order.remove(resource) class FakeSecurityGroupRuleManager(FakeManager): def __init__(self): super(FakeSecurityGroupRuleManager, self).__init__() def create(self, parent_group_id, **kwargs): kwargs["parent_group_id"] = parent_group_id sgr = FakeSecurityGroupRule(self, **kwargs) return self._cache(sgr) class FakeUsersManager(FakeManager): def create(self, username, password, email, tenant_id): user = FakeUser(manager=self, name=username) user.name = username or user.name return self._cache(user) class FakeServicesManager(FakeManager): def list(self): return [] class FakeVolumeManager(FakeManager): def __init__(self): super(FakeVolumeManager, self).__init__() self.__volumes = {} self.__tenant_id = generate_uuid() def create(self, size=None, **kwargs): volume = FakeVolume(self) volume.size = size or 1 volume.name = kwargs.get("display_name", volume.name) volume.status = "available" volume.tenant_id = self.__tenant_id self.__volumes[volume.id] = volume return self._cache(volume) def list(self): return self.__volumes.values() def delete(self, resource): super(FakeVolumeManager, self).delete(resource.id) del self.__volumes[resource.id] class FakeVolumeTypeManager(FakeManager): def create(self, name): vol_type = FakeVolumeType(self) vol_type.name = name or vol_type.name return self._cache(vol_type) class FakeVolumeTransferManager(FakeManager): def __init__(self): super(FakeVolumeTransferManager, self).__init__() self.__volume_transfers = {} def list(self): return self.__volume_transfers.values() def create(self, name): transfer = FakeVolumeTransfer(self) transfer.name = name or transfer.name self.__volume_transfers[transfer.id] = transfer return self._cache(transfer) def delete(self, resource): super(FakeVolumeTransferManager, self).delete(resource.id) del self.__volume_transfers[resource.id] class FakeVolumeSnapshotManager(FakeManager): def __init__(self): super(FakeVolumeSnapshotManager, self).__init__() self.__snapshots = {} self.__tenant_id = generate_uuid() def create(self, name, force=False, display_name=None): snapshot = FakeVolumeSnapshot(self) snapshot.name = name or snapshot.name snapshot.status = "available" snapshot.tenant_id = self.__tenant_id self.__snapshots[snapshot.id] = snapshot return self._cache(snapshot) def list(self): return self.__snapshots.values() def delete(self, resource): super(FakeVolumeSnapshotManager, self).delete(resource.id) del self.__snapshots[resource.id] class FakeVolumeBackupManager(FakeManager): def __init__(self): super(FakeVolumeBackupManager, self).__init__() self.__backups = {} self.__tenant_id = generate_uuid() def create(self, name): backup = FakeVolumeBackup(self) backup.name = name or backup.name self.__backups[backup.id] = backup return self._cache(backup) def list(self): return self.__backups.values() def delete(self, resource): super(FakeVolumeBackupManager, self).delete(resource.id) del self.__backups[resource.id] class FakeRolesManager(FakeManager): def create(self, role_id, name): role = FakeRole(self) role.name = name role.id = role_id return self._cache(role) def roles_for_user(self, user, tenant): role = FakeRole(self) role.name = "admin" return [role, ] def add_user_role(self, user, role, tenant): pass class FakeMetricManager(FakeManager): def create(self, **kwargs): metric = FakeMetric(self, **kwargs) return self._cache(metric) def get(self, metric_id): metric = self.find(metric_id=metric_id) return [metric] class FakeAlarmManager(FakeManager): def get(self, alarm_id): alarm = self.find(alarm_id=alarm_id) if alarm: return [alarm] raise ceilometer_exc.HTTPNotFound( "Alarm with %s not found" % (alarm_id)) def update(self, alarm_id, **fake_alarm_dict_diff): alarm = self.get(alarm_id)[0] for attr, value in fake_alarm_dict_diff.items(): setattr(alarm, attr, value) return alarm def create(self, **kwargs): alarm = FakeAlarm(self, **kwargs) return self._cache(alarm) def delete(self, alarm_id): alarm = self.find(alarm_id=alarm_id) if alarm is not None: alarm.status = "DELETED" del self.cache[alarm.id] self.resources_order.remove(alarm.id) def get_state(self, alarm_id): alarm = self.find(alarm_id=alarm_id) if alarm is not None: return getattr(alarm, "state", "fake-alarm-state") def get_history(self, alarm_id): return ["fake-alarm-history"] def set_state(self, alarm_id, state): alarm = self.find(alarm_id=alarm_id) if alarm is not None: return setattr(alarm, "state", state) class FakeSampleManager(FakeManager): def create(self, **kwargs): sample = FakeSample(self, **kwargs) return [self._cache(sample)] def list(self): return ["fake-samples"] class FakeMeterManager(FakeManager): def list(self): return ["fake-meter"] class FakeMetricsManager(FakeManager): def list(self): return ["fake-metric"] class FakeCeilometerResourceManager(FakeManager): def get(self, resource_id): return ["fake-resource-info"] def list(self): return ["fake-resource"] class FakeStatisticsManager(FakeManager): def list(self, meter): return ["%s-statistics" % meter] class FakeQueryManager(FakeManager): def query(self, filter, orderby, limit): return ["fake-query-result"] class FakeQueuesManager(FakeManager): def __init__(self): super(FakeQueuesManager, self).__init__() self.__queues = {} def create(self, name): queue = FakeQueue(self, name) self.__queues[queue.name] = queue return self._cache(queue) def list(self): return self.__queues.values() def delete(self, queue): super(FakeQueuesManager, self).delete(queue.name) del self.__queues[queue.name] class FakeDbInstanceManager(FakeManager): def __init__(self): super(FakeDbInstanceManager, self).__init__() self.__db_instances = {} def create(self, name, flavor_id, size): instance = FakeDbInstance(self) instance.name = name or instance.name instance.flavor_id = flavor_id instance.size = size return self._cache(instance) def list(self): return self.__db_instances.values() def delete(self, resource): if not isinstance(resource, six.string_types): resource = resource.id cached = self.get(resource) if cached is not None: cached.status = "DELETE_COMPLETE" del self.cache[resource] self.resources_order.remove(resource) class FakeMessagesManager(FakeManager): def __init__(self, queue="myqueue"): super(FakeMessagesManager, self).__init__() self.__queue = queue self.__messages = {} def create(self, **kwargs): message = FakeMessage(self, **kwargs) self.__messages[message.id] = message return self._cache(message) def list(self): return self.__messages.values() def delete(self, message): super(FakeMessagesManager, self).delete(message.id) del self.__messages[message.id] class FakeAvailabilityZonesManager(FakeManager): def __init__(self): super(FakeAvailabilityZonesManager, self).__init__() self.zones = FakeAvailabilityZone() def list(self): return [self.zones] class FakeWorkbookManager(FakeManager): def __init__(self): super(FakeWorkbookManager, self).__init__() self.workbook = FakeWorkbook() def list(self): return [self.workbook] class FakeWorkflowManager(FakeManager): def __init__(self): super(FakeWorkflowManager, self).__init__() self.workflow = FakeWorkflow() def list(self): return [self.workflow] class FakeExecutionManager(FakeManager): def __init__(self): super(FakeExecutionManager, self).__init__() self.execution = FakeExecution() def list(self): return [self.execution] def create(self): return self.execution class FakeObjectManager(FakeManager): def get_account(self, **kwargs): containers = self.list() return (mock.MagicMock(), [{"name": con.name} for con in containers]) def get_container(self, name, **kwargs): container = self.find(name=name) if container is None: raise swift_exceptions.ClientException("Container GET failed") return (mock.MagicMock(), [{"name": obj} for obj in container.items]) def put_container(self, name, **kwargs): if self.find(name=name): raise swift_exceptions.ClientException("Container PUT failed") self._cache(FakeObject(name=name)) def delete_container(self, name, **kwargs): container = self.find(name=name) if container is None or len(container.items.keys()) > 0: raise swift_exceptions.ClientException("Container DELETE failed") self.delete(container.uuid) def get_object(self, container_name, object_name, **kwargs): container = self.find(name=container_name) if container is None or object_name not in container.items: raise swift_exceptions.ClientException("Object GET failed") return (mock.MagicMock(), container.items[object_name]) def put_object(self, container_name, object_name, content, **kwargs): container = self.find(name=container_name) if container is None: raise swift_exceptions.ClientException("Object PUT failed") container.items[object_name] = content return mock.MagicMock() def delete_object(self, container_name, object_name, **kwargs): container = self.find(name=container_name) if container is None or object_name not in container.items: raise swift_exceptions.ClientException("Object DELETE failed") del container.items[object_name] class FakeServiceCatalog(object): def get_credentials(self): return {"image": [{"publicURL": "http://fake.to"}], "metering": [{"publicURL": "http://fake.to"}], "monitoring": [{"publicURL": "http://fake.to"}]} def url_for(self, **kwargs): return "http://fake.to" class FakeGlanceClient(object): def __init__(self): self.images = FakeImageManager() class FakeMuranoClient(object): def __init__(self): self.packages = FakePackageManager() class FakeCinderClient(object): def __init__(self): self.volumes = FakeVolumeManager() self.volume_types = FakeVolumeTypeManager() self.transfers = FakeVolumeTransferManager() self.volume_snapshots = FakeVolumeSnapshotManager() self.backups = FakeVolumeBackupManager() self.quotas = FakeCinderQuotasManager() class FakeNovaClient(object): def __init__(self, failed_server_manager=False): self.images = FakeImageManager() self.servers = FakeServerManager(self.images) self.floating_ips = FakeFloatingIPsManager() self.floating_ip_pools = FakeFloatingIPPoolsManager() self.networks = FakeNetworkManager() self.flavors = FakeFlavorManager() self.keypairs = FakeKeypairManager() self.security_group_rules = FakeSecurityGroupRuleManager() self.security_groups = FakeSecurityGroupManager( rule_manager=self.security_group_rules) self.quotas = FakeNovaQuotasManager() self.set_management_url = mock.MagicMock() self.availability_zones = FakeAvailabilityZonesManager() class FakeHeatClient(object): def __init__(self): self.stacks = FakeStackManager() class FakeDesignateClient(object): def __init__(self): self.domains = FakeDomainManager() class FakeKeystoneClient(object): def __init__(self): self.tenants = FakeTenantsManager() self.users = FakeUsersManager() self.roles = FakeRolesManager() self.project_id = "abc123" self.auth_url = "http://example.com:5000/v2.0/" self.auth_token = "fake" self.auth_user_id = generate_uuid() self.auth_tenant_id = generate_uuid() self.service_catalog = FakeServiceCatalog() self.services = FakeServicesManager() self.region_name = "RegionOne" self.auth_ref = mock.Mock() self.auth_ref.role_names = ["admin"] self.version = "v2.0" self.session = mock.MagicMock() self.authenticate = mock.MagicMock() def authenticate(self): return True def list_users(self): return self.users.list() def list_projects(self): return self.tenants.list() def list_services(self): return self.services.list() def list_roles(self): return self.roles.list() def delete_user(self, uuid): return self.users.delete(uuid) class FakeCeilometerClient(object): def __init__(self): self.alarms = FakeAlarmManager() self.meters = FakeMeterManager() self.resources = FakeCeilometerResourceManager() self.statistics = FakeStatisticsManager() self.samples = FakeSampleManager() self.query_alarms = FakeQueryManager() self.query_samples = FakeQueryManager() self.query_alarm_history = FakeQueryManager() class FakeGnocchiClient(object): def __init__(self): self.metric = FakeMetricManager() class FakeMonascaClient(object): def __init__(self): self.metrics = FakeMetricsManager() class FakeNeutronClient(object): def __init__(self, **kwargs): self.__networks = {} self.__subnets = {} self.__routers = {} self.__ports = {} self.__pools = {} self.__vips = {} self.__fips = {} self.__healthmonitors = {} self.__tenant_id = kwargs.get("tenant_id", generate_uuid()) self.format = "json" self.version = "2.0" @staticmethod def _filter(resource_list, search_opts): return [res for res in resource_list if all(res[field] == value for field, value in search_opts.items())] def add_interface_router(self, router_id, data): subnet_id = data["subnet_id"] if (router_id not in self.__routers or subnet_id not in self.__subnets): raise neutron_exceptions.NeutronClientException subnet = self.__subnets[subnet_id] port = self.create_port( {"port": {"network_id": subnet["network_id"]}})["port"] port["device_id"] = router_id port["fixed_ips"].append({"subnet_id": subnet_id, "ip_address": subnet["gateway_ip"]}) return {"subnet_id": subnet_id, "tenant_id": port["tenant_id"], "port_id": port["id"], "id": router_id} def create_network(self, data): network = setup_dict(data["network"], defaults={"name": generate_name("net_"), "admin_state_up": True}) network_id = generate_uuid() network.update({"id": network_id, "status": "ACTIVE", "subnets": [], "provider:physical_network": None, "tenant_id": self.__tenant_id, "provider:network_type": "local", "router:external": True, "shared": False, "provider:segmentation_id": None}) self.__networks[network_id] = network return {"network": network} def create_pool(self, data): pool = setup_dict(data["pool"], required=["lb_method", "protocol", "subnet_id"], defaults={"name": generate_name("pool_"), "admin_state_up": True}) if pool["subnet_id"] not in self.__subnets: raise neutron_exceptions.NeutronClientException pool_id = generate_uuid() pool.update({"id": pool_id, "status": "PENDING_CREATE", "tenant_id": self.__tenant_id}) self.__pools[pool_id] = pool return {"pool": pool} def create_vip(self, data): vip = setup_dict(data["vip"], required=["protocol_port", "protocol", "subnet_id", "pool_id"], defaults={"name": generate_name("vip_"), "admin_state_up": True}) if (vip["subnet_id"] not in self.__subnets) or (vip["pool_id"] not in self.__pools): raise neutron_exceptions.NeutronClientException vip_id = generate_uuid() vip.update({"id": vip_id, "status": "PENDING_CREATE", "tenant_id": self.__tenant_id}) self.__vips[vip_id] = vip return {"vip": vip} def create_floatingip(self, data): fip = setup_dict(data["floatingip"], required=["floating_network"], defaults={"admin_state_up": True}) if (fip["floating_network"] not in self.__nets): raise neutron_exceptions.NeutronClientException fip_id = generate_uuid() fip.update({"id": fip_id, "tenant_id": self.__tenant_id}) self.__fips[fip_id] = fip return {"fip": fip} def create_health_monitor(self, data): healthmonitor = setup_dict(data["healthmonitor"], required=["type", "timeout", "delay", "max_retries"], defaults={"admin_state_up": True}) healthmonitor_id = generate_uuid() healthmonitor.update({"id": healthmonitor_id, "status": "PENDING_CREATE", "tenant_id": self.__tenant_id}) self.__healthmonitors[healthmonitor_id] = healthmonitor return {"healthmonitor": healthmonitor} def create_port(self, data): port = setup_dict(data["port"], required=["network_id"], defaults={"name": generate_name("port_"), "admin_state_up": True}) if port["network_id"] not in self.__networks: raise neutron_exceptions.NeutronClientException port_id = generate_uuid() port.update({"id": port_id, "status": "ACTIVE", "binding:host_id": "fakehost", "extra_dhcp_opts": [], "binding:vnic_type": "normal", "binding:vif_type": "ovs", "device_owner": "", "mac_address": generate_mac(), "binding:profile": {}, "binding:vif_details": {u"port_filter": True}, "security_groups": [], "fixed_ips": [], "device_id": "", "tenant_id": self.__tenant_id, "allowed_address_pairs": []}) self.__ports[port_id] = port return {"port": port} def create_router(self, data): router = setup_dict(data["router"], defaults={"name": generate_name("router_"), "external_gateway_info": None, "admin_state_up": True}) router_id = generate_uuid() router.update({"id": router_id, "status": "ACTIVE", "external_gateway_info": None, "tenant_id": self.__tenant_id}) self.__routers[router_id] = router return {"router": router} def create_subnet(self, data): subnet = setup_dict( data["subnet"], required=["network_id", "cidr", "ip_version"], defaults={"name": generate_name("subnet_"), "dns_nameservers": ["8.8.8.8", "8.8.4.4"]}) if subnet["network_id"] not in self.__networks: raise neutron_exceptions.NeutronClientException subnet_id = generate_uuid() subnet.update({"id": subnet_id, "enable_dhcp": True, "tenant_id": self.__tenant_id, "ipv6_ra_mode": None, "allocation_pools": [], "gateway_ip": re.sub("./.*$", "1", subnet["cidr"]), "ipv6_address_mode": None, "ip_version": 4, "host_routes": []}) self.__subnets[subnet_id] = subnet return {"subnet": subnet} def update_resource(self, resource_id, resource_dict, data): if resource_id not in resource_dict: raise neutron_exceptions.NeutronClientException self.resource_list[resource_id].update(data) def update_network(self, network_id, data): self.update_resource(network_id, self.__networks, data) def update_pool(self, pool_id, data): self.update_resource(pool_id, self.__pools, data) def update_vip(self, vip_id, data): self.update_resource(vip_id, self.__vips, data) def update_health_monitor(self, healthmonitor_id, data): self.update_resource(healthmonitor_id, self.__healthmonitors, data) def update_subnet(self, subnet_id, data): self.update_resource(subnet_id, self.__subnets, data) def update_port(self, port_id, data): self.update_resource(port_id, self.__ports, data) def update_router(self, router_id, data): self.update_resource(router_id, self.__routers, data) def delete_network(self, network_id): if network_id not in self.__networks: raise neutron_exceptions.NeutronClientException for port in self.__ports.values(): if port["network_id"] == network_id: # Network is in use by port raise neutron_exceptions.NeutronClientException del self.__networks[network_id] return "" def delete_pool(self, pool_id): if pool_id not in self.__pools: raise neutron_exceptions.NeutronClientException del self.__pools[pool_id] return "" def delete_vip(self, vip_id): if vip_id not in self.__vips: raise neutron_exceptions.NeutronClientException del self.__vips[vip_id] def delete_health_monitor(self, healthmonitor_id): if healthmonitor_id not in self.__healthmonitors: raise neutron_exceptions.NeutronClientException del self.__healthmonitors[healthmonitor_id] return "" def delete_floatingip(self, fip_id): if fip_id not in self.__fips: raise neutron_exceptions.NeutronClientException del self.__fips[fip_id] return "" def delete_port(self, port_id): if port_id not in self.__ports: raise neutron_exceptions.PortNotFoundClient if self.__ports[port_id]["device_owner"]: # Port is owned by some device raise neutron_exceptions.NeutronClientException del self.__ports[port_id] return "" def delete_router(self, router_id): if router_id not in self.__routers: raise neutron_exceptions.NeutronClientException for port in self.__ports.values(): if port["device_id"] == router_id: # Router has active port raise neutron_exceptions.NeutronClientException del self.__routers[router_id] return "" def delete_subnet(self, subnet_id): if subnet_id not in self.__subnets: raise neutron_exceptions.NeutronClientException for port in self.__ports.values(): for fip in port["fixed_ips"]: if fip["subnet_id"] == subnet_id: # Subnet has IP allocation from some port raise neutron_exceptions.NeutronClientException del self.__subnets[subnet_id] return "" def list_networks(self, **search_opts): nets = self._filter(self.__networks.values(), search_opts) return {"networks": nets} def list_pools(self, **search_opts): pools = self._filter(self.__pools.values(), search_opts) return {"pools": pools} def list_vips(self, **search_opts): vips = self._filter(self.__vips.values(), search_opts) return {"vips": vips} def list_health_monitors(self, **search_opts): healthmonitors = self._filter( self.__healthmonitors.values(), search_opts) return {"healthmonitors": healthmonitors} def list_ports(self, **search_opts): ports = self._filter(self.__ports.values(), search_opts) return {"ports": ports} def list_routers(self, **search_opts): routers = self._filter(self.__routers.values(), search_opts) return {"routers": routers} def list_subnets(self, **search_opts): subnets = self._filter(self.__subnets.values(), search_opts) return {"subnets": subnets} def list_floatingips(self, **search_opts): fips = self._filter(self.__fips.values(), search_opts) return {"floatingips": fips} def remove_interface_router(self, router_id, data): subnet_id = data["subnet_id"] if (router_id not in self.__routers or subnet_id not in self.__subnets): raise neutron_exceptions.NeutronClientException subnet = self.__subnets[subnet_id] for port_id, port in self.__ports.items(): if port["device_id"] == router_id: for fip in port["fixed_ips"]: if fip["subnet_id"] == subnet_id: del self.__ports[port_id] return {"subnet_id": subnet_id, "tenant_id": subnet["tenant_id"], "port_id": port_id, "id": router_id} raise neutron_exceptions.NeutronClientException def associate_health_monitor(self, pool_id, healthmonitor_id): if pool_id not in self.__pools: raise neutron_exceptions.NeutronClientException if healthmonitor_id not in self.__healthmonitors: raise neutron_exceptions.NeutronClientException self.__pools[pool_id]["pool"]["healthmonitors"] = healthmonitor_id return {"pool": self.__pools[pool_id]} def disassociate_health_monitor(self, pool_id, healthmonitor_id): if pool_id not in self.__pools: raise neutron_exceptions.NeutronClientException if healthmonitor_id not in self.__healthmonitors: raise neutron_exceptions.NeutronClientException del self.__pools[pool_id]["pool"]["healthmonitors"][healthmonitor_id] return "" class FakeIronicClient(object): def __init__(self): # TODO(romcheg):Fake Manager subclasses to manage BM nodes. pass class FakeSaharaClient(object): def __init__(self): self.job_executions = mock.MagicMock() self.jobs = mock.MagicMock() self.job_binary_internals = mock.MagicMock() self.job_binaries = mock.MagicMock() self.data_sources = mock.MagicMock() self.clusters = mock.MagicMock() self.cluster_templates = mock.MagicMock() self.node_group_templates = mock.MagicMock() self.setup_list_methods() def setup_list_methods(self): mock_with_id = mock.MagicMock() mock_with_id.id = 42 # First call of list returns a list with one object, the next should # empty after delete. self.job_executions.list.side_effect = [[mock_with_id], []] self.jobs.list.side_effect = [[mock_with_id], []] self.job_binary_internals.list.side_effect = [[mock_with_id], []] self.job_binaries.list.side_effect = [[mock_with_id], []] self.data_sources.list.side_effect = [[mock_with_id], []] self.clusters.list.side_effect = [[mock_with_id], []] self.cluster_templates.list.side_effect = [[mock_with_id], []] self.node_group_templates.list.side_effect = [[mock_with_id], []] class FakeZaqarClient(object): def __init__(self): self.queues = FakeQueuesManager() def queue(self, name, **kwargs): return self.queues.create(name, **kwargs) class FakeTroveClient(object): def __init__(self): self.instances = FakeDbInstanceManager() class FakeMistralClient(object): def __init__(self): self.workbook = FakeWorkbookManager() self.workflow = FakeWorkflowManager() self.execution = FakeExecutionManager() class FakeSwiftClient(FakeObjectManager): pass class FakeEC2Client(object): def __init__(self): pass class FakeSenlinClient(object): def __init__(self): # TODO(Yanyan Hu):Fake interfaces of senlinclient. pass class FakeMagnumClient(object): def __init__(self): self.cluster_templates = FakeClusterTemplateManager() class FakeWatcherClient(object): def __init__(self): self.strategy = FakeStrategyManager() self.goal = FakeGoalManager() class FakeClients(object): def __init__(self, credential_=None): self._nova = None self._glance = None self._keystone = None self._cinder = None self._neutron = None self._sahara = None self._heat = None self._designate = None self._ceilometer = None self._zaqar = None self._trove = None self._mistral = None self._swift = None self._murano = None self._monasca = None self._ec2 = None self._senlin = None self._watcher = None self._credential = credential_ or objects.Credential( "http://fake.example.org:5000/v2.0/", "fake_username", "fake_password", "fake_tenant_name") def keystone(self, version=None): if not self._keystone: self._keystone = FakeKeystoneClient() return self._keystone def verified_keystone(self): return self.keystone() def nova(self): if not self._nova: self._nova = FakeNovaClient() return self._nova def glance(self): if not self._glance: self._glance = FakeGlanceClient() return self._glance def cinder(self): if not self._cinder: self._cinder = FakeCinderClient() return self._cinder def neutron(self): if not self._neutron: self._neutron = FakeNeutronClient() return self._neutron def sahara(self): if not self._sahara: self._sahara = FakeSaharaClient() return self._sahara def heat(self): if not self._heat: self._heat = FakeHeatClient() return self._heat def designate(self): if not self._designate: self._designate = FakeDesignateClient() return self._designate def ceilometer(self): if not self._ceilometer: self._ceilometer = FakeCeilometerClient() return self._ceilometer def monasca(self): if not self._monasca: self._monasca = FakeMonascaClient() return self._monasca def zaqar(self): if not self._zaqar: self._zaqar = FakeZaqarClient() return self._zaqar def trove(self): if not self._trove: self._trove = FakeTroveClient() return self._trove def mistral(self): if not self._mistral: self._mistral = FakeMistralClient() return self._mistral def swift(self): if not self._swift: self._swift = FakeSwiftClient() return self._swift def murano(self): if not self._murano: self._murano = FakeMuranoClient() return self._murano def ec2(self): if not self._ec2: self._ec2 = FakeEC2Client() return self._ec2 def senlin(self): if not self._senlin: self._senlin = FakeSenlinClient() return self._senlin def watcher(self): if not self._watcher: self._watcher = FakeWatcherClient() return self._watcher class FakeRunner(object): CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "type": { "type": "string", "enum": ["fake"] }, "a": { "type": "string" }, "b": { "type": "number" } }, "required": ["type", "a"] } class FakeScenario(scenario.Scenario): def idle_time(self): return 0 def do_it(self, **kwargs): pass def with_output(self, **kwargs): return {"data": {"a": 1}, "error": None} def with_add_output(self): self.add_output(additive={"title": "Additive", "description": "Additive description", "data": [["a", 1]], "chart_plugin": "FooPlugin"}, complete={"title": "Complete", "description": "Complete description", "data": [["a", [[1, 2], [2, 3]]]], "chart_plugin": "BarPlugin"}) def too_long(self, **kwargs): pass def something_went_wrong(self, **kwargs): raise Exception("Something went wrong") def raise_timeout(self, **kwargs): raise multiprocessing.TimeoutError() @scenario.configure(name="classbased.fooscenario") class FakeClassBasedScenario(FakeScenario): """Fake class-based scenario.""" def run(self, *args, **kwargs): pass class FakeTimer(rally_utils.Timer): def duration(self): return 10 def timestamp(self): return 0 def finish_timestamp(self): return 3 @context.configure(name="fake", order=1) class FakeContext(context.Context): CONFIG_SCHEMA = { "type": "object", "$schema": consts.JSON_SCHEMA, "properties": { "test": { "type": "integer" }, }, "additionalProperties": False } def __init__(self, context_obj=None): context_obj = context_obj or {} context_obj.setdefault("config", {}) context_obj["config"].setdefault("fake", None) context_obj.setdefault("task", mock.MagicMock()) super(FakeContext, self).__init__(context_obj) def setup(self): pass def cleanup(self): pass @context.configure(name="fake_hidden_context", order=1, hidden=True) class FakeHiddenContext(FakeContext): pass @context.configure(name="fake_user_context", order=1) class FakeUserContext(FakeContext): admin = { "id": "adminuuid", "credential": objects.Credential("aurl", "aname", "apwd", "atenant") } user = { "id": "uuid", "credential": objects.Credential("url", "name", "pwd", "tenant"), "tenant_id": "uuid" } tenants = {"uuid": {"name": "tenant"}} def __init__(self, ctx): super(FakeUserContext, self).__init__(ctx) self.context.setdefault("admin", FakeUserContext.admin) self.context.setdefault("users", [FakeUserContext.user]) self.context.setdefault("tenants", FakeUserContext.tenants) self.context.setdefault( "scenario_name", "NovaServers.boot_server_from_volume_and_delete") class FakeDeployment(dict): update_status = mock.Mock() def __init__(self, **kwargs): namespace = kwargs.pop("namespace", "openstack") kwargs["credentials"] = { namespace: [{"admin": kwargs.pop("admin", None), "users": kwargs.pop("users", [])}]} dict.__init__(self, **kwargs) def get_credentials_for(self, namespace): return self["credentials"][namespace][0] class FakeTask(dict): def __init__(self, task=None, temporary=False, **kwargs): self.is_temporary = temporary self.task = task or kwargs self.set_failed = mock.Mock() self.set_validation_failed = mock.Mock() def __getitem__(self, key): if key in self: return self[key] return self.task[key] def to_dict(self): return self class FakeAPI(object): def __init__(self): self._deployment = mock.create_autospec(api._Deployment) self._task = mock.create_autospec(api._Task) self._verifier = mock.create_autospec(api._Verifier) self._verification = mock.create_autospec(api._Verification) @property def deployment(self): return self._deployment @property def task(self): return self._task @property def verifier(self): return self._verifier @property def verification(self): return self._verification rally-0.9.1/tests/unit/__init__.py0000664000567000056710000000000013073417716020236 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/test_resources.py0000664000567000056710000000325513073417717021570 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import difflib import os from oslo_utils import encodeutils import rally from rally.cli import cliutils from tests.unit import test RES_PATH = os.path.join(os.path.dirname(rally.__file__), os.pardir, "etc") class BashCompletionTestCase(test.TestCase): def test_bash_completion(self): old = open(os.path.join(RES_PATH, "rally.bash_completion"), "r").read().splitlines() new = cliutils._generate_bash_completion_script().splitlines() if old != new: for line in difflib.unified_diff(old, new): print(line) new_filename = "/tmp/rally.bash.new" with open(new_filename, "wb") as new_file: new_file.write(encodeutils.safe_encode("\n".join(new))) self.fail("bash completion script is outdated. " "New script is located at %s " "You may fix this by executing " "`mv %s etc/rally.bash_completion`" % (new_filename, new_filename)) rally-0.9.1/tests/unit/test_osclients.py0000664000567000056710000011655013073417720021556 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt from keystoneclient import exceptions as keystone_exceptions import mock from oslo_config import cfg from rally.common import objects from rally import consts from rally import exceptions from rally import osclients from tests.unit import fakes from tests.unit import test @osclients.configure("dummy") class DummyClient(osclients.OSClient): def create_client(self, *args, **kwargs): pass class OSClientTestCaseUtils(object): def set_up_keystone_mocks(self): self.ksc_module = mock.MagicMock(__version__="2.0.0") self.ksc_client = mock.MagicMock() self.ksa_identity_plugin = mock.MagicMock() self.ksa_password = mock.MagicMock( return_value=self.ksa_identity_plugin) self.ksa_identity = mock.MagicMock(Password=self.ksa_password) self.ksa_auth = mock.MagicMock() self.ksa_session = mock.MagicMock() self.patcher = mock.patch.dict("sys.modules", {"keystoneclient": self.ksc_module, "keystoneauth1": self.ksa_auth}) self.patcher.start() self.addCleanup(self.patcher.stop) self.ksc_module.client = self.ksc_client self.ksa_auth.identity = self.ksa_identity self.ksa_auth.session = self.ksa_session def make_auth_args(self): auth_kwargs = { "auth_url": "http://auth_url/", "username": "user", "password": "password", "tenant_name": "tenant", "domain_name": "domain", "project_name": "project_name", "project_domain_name": "project_domain_name", "user_domain_name": "user_domain_name", } kwargs = {"https_insecure": False, "https_cacert": None} kwargs.update(auth_kwargs) return auth_kwargs, kwargs @ddt.ddt class OSClientTestCase(test.TestCase, OSClientTestCaseUtils): def test_choose_service_type(self): default_service_type = "default_service_type" @osclients.configure("test_choose_service_type", default_service_type=default_service_type) class FakeClient(osclients.OSClient): create_client = mock.MagicMock() fake_client = FakeClient(mock.MagicMock(), {}, {}) self.assertEqual(default_service_type, fake_client.choose_service_type()) self.assertEqual("foo", fake_client.choose_service_type("foo")) @mock.patch("rally.osclients.Keystone.service_catalog") @ddt.data( {"endpoint_type": None, "service_type": None, "region_name": None}, {"endpoint_type": "et", "service_type": "st", "region_name": "rn"} ) @ddt.unpack def test__get_endpoint(self, mock_keystone_service_catalog, endpoint_type, service_type, region_name): credential = objects.Credential("http://auth_url/v2.0", "user", "pass", endpoint_type=endpoint_type, region_name=region_name) mock_choose_service_type = mock.MagicMock() osclient = osclients.OSClient(credential, {}, mock.MagicMock()) osclient.choose_service_type = mock_choose_service_type mock_url_for = mock_keystone_service_catalog.url_for self.assertEqual(mock_url_for.return_value, osclient._get_endpoint(service_type)) call_args = { "service_type": mock_choose_service_type.return_value, "region_name": region_name} if endpoint_type: call_args["interface"] = endpoint_type mock_url_for.assert_called_once_with(**call_args) mock_choose_service_type.assert_called_once_with(service_type) @mock.patch("rally.osclients.Keystone.get_session") def test__get_session(self, mock_keystone_get_session): osclient = osclients.OSClient(None, None, None) auth_url = "auth_url" version = "version" import warnings with mock.patch.object(warnings, "warn") as mock_warn: self.assertEqual(mock_keystone_get_session.return_value, osclient._get_session(auth_url, version)) self.assertFalse(mock_warn.called) mock_keystone_get_session.assert_called_once_with(version) class CachedTestCase(test.TestCase): def test_cached(self): clients = osclients.Clients(mock.MagicMock()) client_name = "CachedTestCase.test_cached" fake_client = osclients.configure(client_name)( osclients.OSClient(clients.credential, clients.api_info, clients.cache)) fake_client.create_client = mock.MagicMock() self.assertEqual({}, clients.cache) fake_client() self.assertEqual( {client_name: fake_client.create_client.return_value}, clients.cache) fake_client.create_client.assert_called_once_with() fake_client() fake_client.create_client.assert_called_once_with() fake_client("2") self.assertEqual( {client_name: fake_client.create_client.return_value, "%s('2',)" % client_name: fake_client.create_client.return_value}, clients.cache) clients.clear() self.assertEqual({}, clients.cache) @ddt.ddt class TestCreateKeystoneClient(test.TestCase, OSClientTestCaseUtils): def setUp(self): super(TestCreateKeystoneClient, self).setUp() self.credential = objects.Credential("http://auth_url/v2.0", "user", "pass", "tenant") def test_create_client(self): # NOTE(bigjools): This is a very poor testing strategy as it # tightly couples the test implementation to the tested # function's implementation. Ideally, we'd use a fake keystone # but all that's happening here is that it's checking the right # parameters were passed to the various parts that create a # client. Hopefully one day we'll get a real fake from the # keystone guys. self.set_up_keystone_mocks() keystone = osclients.Keystone(self.credential, {}, mock.MagicMock()) keystone.get_session = mock.Mock( return_value=(self.ksa_session, self.ksa_identity_plugin,)) client = keystone.create_client(version=3) kwargs_session = self.credential.to_dict() kwargs_session.update({ "auth_url": "http://auth_url/", "session": self.ksa_session, "timeout": 180.0}) keystone.get_session.assert_called_once_with(version="3") self.ksc_client.Client.assert_called_once_with( session=self.ksa_session, timeout=180.0, version="3") self.assertIs(client, self.ksc_client.Client()) def test_create_client_removes_url_path_if_version_specified(self): # If specifying a version on the client creation call, ensure # the auth_url is versionless and the version required is passed # into the Client() call. self.set_up_keystone_mocks() auth_kwargs, all_kwargs = self.make_auth_args() keystone = osclients.Keystone( self.credential, {}, mock.MagicMock()) keystone.get_session = mock.Mock( return_value=(self.ksa_session, self.ksa_identity_plugin,)) client = keystone.create_client(version="3") self.assertIs(client, self.ksc_client.Client()) called_with = self.ksc_client.Client.call_args_list[0][1] self.assertEqual( {"session": self.ksa_session, "timeout": 180.0, "version": "3"}, called_with) @ddt.data("http://auth_url/v2.0", "http://auth_url/v3", "http://auth_url/", "auth_url") def test_keystone_get_session(self, auth_url): credential = objects.Credential(auth_url, "user", "pass", "tenant") self.set_up_keystone_mocks() keystone = osclients.Keystone(credential, {}, {}) version_data = mock.Mock(return_value=[{"version": (1, 0)}]) self.ksa_auth.discover.Discover.return_value = ( mock.Mock(version_data=version_data)) self.assertEqual((self.ksa_session.Session.return_value, self.ksa_identity_plugin), keystone.get_session()) if auth_url.endswith("v2.0"): self.ksa_password.assert_called_once_with( auth_url=auth_url, password="pass", tenant_name="tenant", username="user") else: self.ksa_password.assert_called_once_with( auth_url=auth_url, password="pass", tenant_name="tenant", username="user", domain_name=None, project_domain_name=None, user_domain_name=None) self.ksa_session.Session.assert_has_calls( [mock.call(timeout=180.0, verify=True), mock.call(auth=self.ksa_identity_plugin, timeout=180.0, verify=True)]) def test_keystone_property(self): keystone = osclients.Keystone(None, None, None) self.assertRaises(exceptions.RallyException, lambda: keystone.keystone) @mock.patch("rally.osclients.Keystone.get_session") def test_auth_ref(self, mock_keystone_get_session): session = mock.MagicMock() auth_plugin = mock.MagicMock() mock_keystone_get_session.return_value = (session, auth_plugin) cache = {} keystone = osclients.Keystone(None, None, cache) self.assertEqual(auth_plugin.get_access.return_value, keystone.auth_ref) self.assertEqual(auth_plugin.get_access.return_value, cache["keystone_auth_ref"]) # check that auth_ref was cached. keystone.auth_ref mock_keystone_get_session.assert_called_once_with() @ddt.ddt class OSClientsTestCase(test.TestCase): def setUp(self): super(OSClientsTestCase, self).setUp() self.credential = objects.Credential("http://auth_url/v2.0", "user", "pass", "tenant") self.clients = osclients.Clients(self.credential, {}) self.fake_keystone = fakes.FakeKeystoneClient() keystone_patcher = mock.patch( "rally.osclients.Keystone.create_client", return_value=self.fake_keystone) self.mock_create_keystone_client = keystone_patcher.start() self.auth_ref_patcher = mock.patch("rally.osclients.Keystone.auth_ref") self.auth_ref = self.auth_ref_patcher.start() self.service_catalog = self.auth_ref.service_catalog self.service_catalog.url_for = mock.MagicMock() def test_create_from_env(self): with mock.patch.dict("os.environ", {"OS_AUTH_URL": "foo_auth_url", "OS_USERNAME": "foo_username", "OS_PASSWORD": "foo_password", "OS_TENANT_NAME": "foo_tenant_name", "OS_REGION_NAME": "foo_region_name"}): clients = osclients.Clients.create_from_env() self.assertEqual("foo_auth_url", clients.credential.auth_url) self.assertEqual("foo_username", clients.credential.username) self.assertEqual("foo_password", clients.credential.password) self.assertEqual("foo_tenant_name", clients.credential.tenant_name) self.assertEqual("foo_region_name", clients.credential.region_name) def test_keystone(self): self.assertNotIn("keystone", self.clients.cache) client = self.clients.keystone() self.assertEqual(client, self.fake_keystone) credential = {"timeout": cfg.CONF.openstack_client_http_timeout, "insecure": False, "cacert": None} kwargs = self.credential.to_dict() kwargs.update(credential) self.mock_create_keystone_client.assert_called_once_with() self.assertEqual(self.fake_keystone, self.clients.cache["keystone"]) def test_verified_keystone(self): self.auth_ref.role_names = ["admin"] self.assertEqual(self.mock_create_keystone_client.return_value, self.clients.verified_keystone()) def test_verified_keystone_user_not_admin(self): self.auth_ref.role_names = ["notadmin"] self.assertRaises(exceptions.InvalidAdminException, self.clients.verified_keystone) @mock.patch("rally.osclients.Keystone.get_session") def test_verified_keystone_unauthorized(self, mock_keystone_get_session): self.auth_ref_patcher.stop() mock_keystone_get_session.side_effect = ( keystone_exceptions.Unauthorized ) self.assertRaises(exceptions.InvalidEndpointsException, self.clients.verified_keystone) @mock.patch("rally.osclients.Keystone.get_session") def test_verified_keystone_unreachable(self, mock_keystone_get_session): self.auth_ref_patcher.stop() mock_keystone_get_session.side_effect = ( keystone_exceptions.AuthorizationFailure ) self.assertRaises(exceptions.HostUnreachableException, self.clients.verified_keystone) @mock.patch("rally.osclients.Nova._get_endpoint") def test_nova(self, mock_nova__get_endpoint): fake_nova = fakes.FakeNovaClient() mock_nova__get_endpoint.return_value = "http://fake.to:2/fake" mock_nova = mock.MagicMock() mock_nova.client.Client.return_value = fake_nova mock_keystoneauth1 = mock.MagicMock() self.assertNotIn("nova", self.clients.cache) with mock.patch.dict("sys.modules", {"novaclient": mock_nova, "keystoneauth1": mock_keystoneauth1}): mock_keystoneauth1.discover.Discover.return_value = ( mock.Mock(version_data=mock.Mock(return_value=[ {"version": (2, 0)}])) ) client = self.clients.nova() self.assertEqual(fake_nova, client) kw = { "version": "2", "session": mock_keystoneauth1.session.Session(), "endpoint_override": mock_nova__get_endpoint.return_value} mock_nova.client.Client.assert_called_once_with(**kw) self.assertEqual(fake_nova, self.clients.cache["nova"]) @mock.patch("rally.osclients.Neutron._get_endpoint") def test_neutron(self, mock_neutron__get_endpoint): fake_neutron = fakes.FakeNeutronClient() mock_neutron__get_endpoint.return_value = "http://fake.to:2/fake" mock_neutron = mock.MagicMock() mock_keystoneauth1 = mock.MagicMock() mock_neutron.client.Client.return_value = fake_neutron self.assertNotIn("neutron", self.clients.cache) with mock.patch.dict("sys.modules", {"neutronclient.neutron": mock_neutron, "keystoneauth1": mock_keystoneauth1}): client = self.clients.neutron() self.assertEqual(fake_neutron, client) kw = { "session": mock_keystoneauth1.session.Session(), "endpoint_override": mock_neutron__get_endpoint.return_value} mock_neutron.client.Client.assert_called_once_with("2.0", **kw) self.assertEqual(fake_neutron, self.clients.cache["neutron"]) @mock.patch("rally.osclients.Neutron._get_endpoint") def test_neutron_endpoint_type(self, mock_neutron__get_endpoint): fake_neutron = fakes.FakeNeutronClient() mock_neutron__get_endpoint.return_value = "http://fake.to:2/fake" mock_neutron = mock.MagicMock() mock_keystoneauth1 = mock.MagicMock() mock_neutron.client.Client.return_value = fake_neutron self.assertNotIn("neutron", self.clients.cache) self.credential.endpoint_type = "internal" with mock.patch.dict("sys.modules", {"neutronclient.neutron": mock_neutron, "keystoneauth1": mock_keystoneauth1}): client = self.clients.neutron() self.assertEqual(fake_neutron, client) kw = { "session": mock_keystoneauth1.session.Session(), "endpoint_override": mock_neutron__get_endpoint.return_value, "endpoint_type": "internal"} mock_neutron.client.Client.assert_called_once_with("2.0", **kw) self.assertEqual(fake_neutron, self.clients.cache["neutron"]) @mock.patch("rally.osclients.Heat._get_endpoint") def test_heat(self, mock_heat__get_endpoint): fake_heat = fakes.FakeHeatClient() mock_heat__get_endpoint.return_value = "http://fake.to:2/fake" mock_heat = mock.MagicMock() mock_keystoneauth1 = mock.MagicMock() mock_heat.client.Client.return_value = fake_heat self.assertNotIn("heat", self.clients.cache) with mock.patch.dict("sys.modules", {"heatclient": mock_heat, "keystoneauth1": mock_keystoneauth1}): client = self.clients.heat() self.assertEqual(fake_heat, client) kw = { "session": mock_keystoneauth1.session.Session(), "endpoint": mock_heat__get_endpoint.return_value, "endpoint_override": mock_heat__get_endpoint.return_value} mock_heat.client.Client.assert_called_once_with("1", **kw) self.assertEqual(fake_heat, self.clients.cache["heat"]) @mock.patch("rally.osclients.Heat._get_endpoint") def test_heat_endpoint_type_interface(self, mock_heat__get_endpoint): fake_heat = fakes.FakeHeatClient() mock_heat__get_endpoint.return_value = "http://fake.to:2/fake" mock_heat = mock.MagicMock() mock_keystoneauth1 = mock.MagicMock() mock_heat.client.Client.return_value = fake_heat self.assertNotIn("heat", self.clients.cache) self.credential.endpoint_type = "internal" self.credential.interface = "internal" with mock.patch.dict("sys.modules", {"heatclient": mock_heat, "keystoneauth1": mock_keystoneauth1}): client = self.clients.heat() self.assertEqual(fake_heat, client) kw = { "session": mock_keystoneauth1.session.Session(), "endpoint": mock_heat__get_endpoint.return_value, "endpoint_override": mock_heat__get_endpoint.return_value, "endpoint_type": "internal", "interface": "internal"} mock_heat.client.Client.assert_called_once_with("1", **kw) self.assertEqual(fake_heat, self.clients.cache["heat"]) @mock.patch("rally.osclients.Glance._get_endpoint") def test_glance(self, mock_glance__get_endpoint): fake_glance = fakes.FakeGlanceClient() mock_glance = mock.MagicMock() mock_glance__get_endpoint.return_value = "http://fake.to:2/fake" mock_keystoneauth1 = mock.MagicMock() mock_glance.Client = mock.MagicMock(return_value=fake_glance) with mock.patch.dict("sys.modules", {"glanceclient": mock_glance, "keystoneauth1": mock_keystoneauth1}): self.assertNotIn("glance", self.clients.cache) client = self.clients.glance() self.assertEqual(fake_glance, client) kw = { "version": "2", "session": mock_keystoneauth1.session.Session(), "endpoint_override": mock_glance__get_endpoint.return_value} mock_glance.Client.assert_called_once_with(**kw) self.assertEqual(fake_glance, self.clients.cache["glance"]) @mock.patch("rally.osclients.Cinder._get_endpoint") def test_cinder(self, mock_cinder__get_endpoint): fake_cinder = mock.MagicMock(client=fakes.FakeCinderClient()) mock_cinder = mock.MagicMock() mock_cinder.client.Client.return_value = fake_cinder mock_cinder__get_endpoint.return_value = "http://fake.to:2/fake" mock_keystoneauth1 = mock.MagicMock() self.assertNotIn("cinder", self.clients.cache) with mock.patch.dict("sys.modules", {"cinderclient": mock_cinder, "keystoneauth1": mock_keystoneauth1}): client = self.clients.cinder() self.assertEqual(fake_cinder, client) kw = { "session": mock_keystoneauth1.session.Session(), "endpoint_override": mock_cinder__get_endpoint.return_value} mock_cinder.client.Client.assert_called_once_with( "2", **kw) self.assertEqual(fake_cinder, self.clients.cache["cinder"]) @mock.patch("rally.osclients.Manila._get_endpoint") def test_manila(self, mock_manila__get_endpoint): mock_manila = mock.MagicMock() mock_manila__get_endpoint.return_value = "http://fake.to:2/fake" mock_keystoneauth1 = mock.MagicMock() self.assertNotIn("manila", self.clients.cache) with mock.patch.dict("sys.modules", {"manilaclient": mock_manila, "keystoneauth1": mock_keystoneauth1}): client = self.clients.manila() self.assertEqual(mock_manila.client.Client.return_value, client) kw = { "session": mock_keystoneauth1.session.Session(), "service_catalog_url": mock_manila__get_endpoint.return_value } mock_manila.client.Client.assert_called_once_with("1", **kw) self.assertEqual( mock_manila.client.Client.return_value, self.clients.cache["manila"]) @mock.patch("rally.osclients.Ceilometer._get_endpoint") def test_ceilometer(self, mock_ceilometer__get_endpoint): fake_ceilometer = fakes.FakeCeilometerClient() mock_ceilometer = mock.MagicMock() mock_ceilometer__get_endpoint.return_value = "http://fake.to:2/fake" mock_keystoneauth1 = mock.MagicMock() mock_ceilometer.client.get_client = mock.MagicMock( return_value=fake_ceilometer) self.assertNotIn("ceilometer", self.clients.cache) with mock.patch.dict("sys.modules", {"ceilometerclient": mock_ceilometer, "keystoneauth1": mock_keystoneauth1}): client = self.clients.ceilometer() self.assertEqual(fake_ceilometer, client) kw = { "session": mock_keystoneauth1.session.Session(), "endpoint_override": mock_ceilometer__get_endpoint.return_value } mock_ceilometer.client.get_client.assert_called_once_with("2", **kw) self.assertEqual(fake_ceilometer, self.clients.cache["ceilometer"]) def test_gnocchi(self): fake_gnocchi = fakes.FakeGnocchiClient() mock_gnocchi = mock.MagicMock() mock_gnocchi.client.Client.return_value = fake_gnocchi mock_keystoneauth1 = mock.MagicMock() self.assertNotIn("gnocchi", self.clients.cache) with mock.patch.dict("sys.modules", {"gnocchiclient": mock_gnocchi, "keystoneauth1": mock_keystoneauth1}): mock_keystoneauth1.discover.Discover.return_value = ( mock.Mock(version_data=mock.Mock(return_value=[ {"version": (1, 0)}])) ) client = self.clients.gnocchi() self.assertEqual(fake_gnocchi, client) kw = {"version": "1", "session": mock_keystoneauth1.session.Session(), "service_type": "metric"} mock_gnocchi.client.Client.assert_called_once_with(**kw) self.assertEqual(fake_gnocchi, self.clients.cache["gnocchi"]) def test_monasca(self): fake_monasca = fakes.FakeMonascaClient() mock_monasca = mock.MagicMock() mock_monasca.client.Client.return_value = fake_monasca self.assertNotIn("monasca", self.clients.cache) with mock.patch.dict("sys.modules", {"monascaclient": mock_monasca}): client = self.clients.monasca() self.assertEqual(fake_monasca, client) self.service_catalog.url_for.assert_called_once_with( service_type="monitoring", region_name=self.credential.region_name) os_endpoint = self.service_catalog.url_for.return_value kw = {"token": self.auth_ref.auth_token, "timeout": cfg.CONF.openstack_client_http_timeout, "insecure": False, "cacert": None, "username": self.credential.username, "password": self.credential.password, "tenant_name": self.credential.tenant_name, "auth_url": self.credential.auth_url } mock_monasca.client.Client.assert_called_once_with("2_0", os_endpoint, **kw) self.assertEqual(mock_monasca.client.Client.return_value, self.clients.cache["monasca"]) @mock.patch("rally.osclients.Ironic._get_endpoint") def test_ironic(self, mock_ironic__get_endpoint): fake_ironic = fakes.FakeIronicClient() mock_ironic = mock.MagicMock() mock_ironic.client.get_client = mock.MagicMock( return_value=fake_ironic) mock_ironic__get_endpoint.return_value = "http://fake.to:2/fake" mock_keystoneauth1 = mock.MagicMock() self.assertNotIn("ironic", self.clients.cache) with mock.patch.dict("sys.modules", {"ironicclient": mock_ironic, "keystoneauth1": mock_keystoneauth1}): client = self.clients.ironic() self.assertEqual(fake_ironic, client) kw = { "session": mock_keystoneauth1.session.Session(), "endpoint": mock_ironic__get_endpoint.return_value} mock_ironic.client.get_client.assert_called_once_with("1", **kw) self.assertEqual(fake_ironic, self.clients.cache["ironic"]) @mock.patch("rally.osclients.Sahara._get_endpoint") def test_sahara(self, mock_sahara__get_endpoint): fake_sahara = fakes.FakeSaharaClient() mock_sahara = mock.MagicMock() mock_sahara.client.Client = mock.MagicMock(return_value=fake_sahara) mock_sahara__get_endpoint.return_value = "http://fake.to:2/fake" mock_keystoneauth1 = mock.MagicMock() self.assertNotIn("sahara", self.clients.cache) with mock.patch.dict("sys.modules", {"saharaclient": mock_sahara, "keystoneauth1": mock_keystoneauth1}): client = self.clients.sahara() self.assertEqual(fake_sahara, client) kw = { "session": mock_keystoneauth1.session.Session(), "sahara_url": mock_sahara__get_endpoint.return_value} mock_sahara.client.Client.assert_called_once_with(1.1, **kw) self.assertEqual(fake_sahara, self.clients.cache["sahara"]) def test_zaqar(self): fake_zaqar = fakes.FakeZaqarClient() mock_zaqar = mock.MagicMock() mock_zaqar.client.Client = mock.MagicMock(return_value=fake_zaqar) self.assertNotIn("zaqar", self.clients.cache) mock_keystoneauth1 = mock.MagicMock() with mock.patch.dict("sys.modules", {"zaqarclient.queues": mock_zaqar, "keystoneauth1": mock_keystoneauth1}): client = self.clients.zaqar() self.assertEqual(fake_zaqar, client) self.service_catalog.url_for.assert_called_once_with( service_type="messaging", region_name=self.credential.region_name) fake_zaqar_url = self.service_catalog.url_for.return_value mock_zaqar.client.Client.assert_called_once_with( url=fake_zaqar_url, version=1.1, session=mock_keystoneauth1.session.Session()) self.assertEqual(fake_zaqar, self.clients.cache["zaqar"], mock_keystoneauth1.session.Session()) @mock.patch("rally.osclients.Trove._get_endpoint") def test_trove(self, mock_trove__get_endpoint): fake_trove = fakes.FakeTroveClient() mock_trove = mock.MagicMock() mock_trove.client.Client = mock.MagicMock(return_value=fake_trove) mock_trove__get_endpoint.return_value = "http://fake.to:2/fake" mock_keystoneauth1 = mock.MagicMock() self.assertNotIn("trove", self.clients.cache) with mock.patch.dict("sys.modules", {"troveclient": mock_trove, "keystoneauth1": mock_keystoneauth1}): client = self.clients.trove() self.assertEqual(fake_trove, client) kw = { "session": mock_keystoneauth1.session.Session(), "endpoint": mock_trove__get_endpoint.return_value} mock_trove.client.Client.assert_called_once_with("1.0", **kw) self.assertEqual(fake_trove, self.clients.cache["trove"]) def test_mistral(self): fake_mistral = fakes.FakeMistralClient() mock_mistral = mock.Mock() mock_mistral.client.client.return_value = fake_mistral self.assertNotIn("mistral", self.clients.cache) with mock.patch.dict( "sys.modules", {"mistralclient": mock_mistral, "mistralclient.api": mock_mistral}): client = self.clients.mistral() self.assertEqual(fake_mistral, client) self.service_catalog.url_for.assert_called_once_with( service_type="workflowv2", region_name=self.credential.region_name ) fake_mistral_url = self.service_catalog.url_for.return_value mock_mistral.client.client.assert_called_once_with( mistral_url=fake_mistral_url, service_type="workflowv2", auth_token=self.auth_ref.auth_token ) self.assertEqual(fake_mistral, self.clients.cache["mistral"]) def test_swift(self): fake_swift = fakes.FakeSwiftClient() mock_swift = mock.MagicMock() mock_swift.client.Connection = mock.MagicMock(return_value=fake_swift) self.assertNotIn("swift", self.clients.cache) with mock.patch.dict("sys.modules", {"swiftclient": mock_swift}): client = self.clients.swift() self.assertEqual(client, fake_swift) self.service_catalog.url_for.assert_called_once_with( service_type="object-store", region_name=self.credential.region_name) kw = {"retries": 1, "preauthurl": self.service_catalog.url_for.return_value, "preauthtoken": self.auth_ref.auth_token, "insecure": False, "cacert": None, "user": self.credential.username, "tenant_name": self.credential.tenant_name, } mock_swift.client.Connection.assert_called_once_with(**kw) self.assertEqual(self.clients.cache["swift"], fake_swift) @mock.patch("rally.osclients.EC2._get_endpoint") def test_ec2(self, mock_ec2__get_endpoint): mock_boto = mock.Mock() self.fake_keystone.ec2 = mock.Mock() self.fake_keystone.ec2.create.return_value = mock.Mock( access="fake_access", secret="fake_secret") mock_ec2__get_endpoint.return_value = "http://fake.to:1/fake" fake_ec2 = fakes.FakeEC2Client() mock_boto.connect_ec2_endpoint.return_value = fake_ec2 self.assertNotIn("ec2", self.clients.cache) with mock.patch.dict("sys.modules", {"boto": mock_boto}): client = self.clients.ec2() self.assertEqual(fake_ec2, client) kw = { "url": "http://fake.to:1/fake", "aws_access_key_id": "fake_access", "aws_secret_access_key": "fake_secret", "is_secure": self.credential.insecure, } mock_boto.connect_ec2_endpoint.assert_called_once_with(**kw) self.assertEqual(fake_ec2, self.clients.cache["ec2"]) @mock.patch("rally.osclients.Keystone.service_catalog") def test_services(self, mock_keystone_service_catalog): available_services = {consts.ServiceType.IDENTITY: {}, consts.ServiceType.COMPUTE: {}, "some_service": {}} mock_get_endpoints = mock_keystone_service_catalog.get_endpoints mock_get_endpoints.return_value = available_services clients = osclients.Clients(self.credential) self.assertEqual( {consts.ServiceType.IDENTITY: consts.Service.KEYSTONE, consts.ServiceType.COMPUTE: consts.Service.NOVA, "some_service": "__unknown__"}, clients.services()) def test_murano(self): fake_murano = fakes.FakeMuranoClient() mock_murano = mock.Mock() mock_murano.client.Client.return_value = fake_murano self.assertNotIn("murano", self.clients.cache) with mock.patch.dict("sys.modules", {"muranoclient": mock_murano}): client = self.clients.murano() self.assertEqual(fake_murano, client) self.service_catalog.url_for.assert_called_once_with( service_type="application-catalog", region_name=self.credential.region_name ) kw = {"endpoint": self.service_catalog.url_for.return_value, "token": self.auth_ref.auth_token} mock_murano.client.Client.assert_called_once_with("1", **kw) self.assertEqual(fake_murano, self.clients.cache["murano"]) @mock.patch("rally.osclients.Keystone.get_session") @ddt.data( {}, {"version": "2"}, {"version": "1"}, {"version": None} ) @ddt.unpack def test_designate(self, mock_keystone_get_session, version=None): fake_designate = fakes.FakeDesignateClient() mock_designate = mock.Mock() mock_designate.client.Client.return_value = fake_designate mock_keystone_get_session.return_value = ("fake_session", "fake_auth_plugin") self.assertNotIn("designate", self.clients.cache) with mock.patch.dict("sys.modules", {"designateclient": mock_designate}): if version is not None: client = self.clients.designate(version=version) else: client = self.clients.designate() self.assertEqual(fake_designate, client) self.service_catalog.url_for.assert_called_once_with( service_type="dns", region_name=self.credential.region_name ) default = version or "1" # Check that we append /v url = self.service_catalog.url_for.return_value url.__iadd__.assert_called_once_with("/v%s" % default) mock_keystone_get_session.assert_called_once_with() if version == "2": mock_designate.client.Client.assert_called_once_with( version, endpoint_override=url.__iadd__.return_value, session="fake_session") elif version == "1": mock_designate.client.Client.assert_called_once_with( version, endpoint=url.__iadd__.return_value, session="fake_session") key = "designate" if version is not None: key += "%s" % {"version": version} self.assertEqual(fake_designate, self.clients.cache[key]) def test_senlin(self): mock_senlin = mock.MagicMock() self.assertNotIn("senlin", self.clients.cache) with mock.patch.dict("sys.modules", {"senlinclient": mock_senlin}): client = self.clients.senlin() self.assertEqual(mock_senlin.client.Client.return_value, client) mock_senlin.client.Client.assert_called_once_with( "1", username=self.credential.username, password=self.credential.password, project_name=self.credential.tenant_name, cert=self.credential.cacert, auth_url=self.credential.auth_url) self.assertEqual( mock_senlin.client.Client.return_value, self.clients.cache["senlin"]) @mock.patch("rally.osclients.Magnum._get_endpoint") def test_magnum(self, mock_magnum__get_endpoint): fake_magnum = fakes.FakeMagnumClient() mock_magnum = mock.MagicMock() mock_magnum.client.Client.return_value = fake_magnum mock_magnum__get_endpoint.return_value = "http://fake.to:2/fake" mock_keystoneauth1 = mock.MagicMock() self.assertNotIn("magnum", self.clients.cache) with mock.patch.dict("sys.modules", {"magnumclient": mock_magnum, "keystoneauth1": mock_keystoneauth1}): client = self.clients.magnum() self.assertEqual(fake_magnum, client) kw = { "interface": self.credential.endpoint_type, "session": mock_keystoneauth1.session.Session(), "magnum_url": mock_magnum__get_endpoint.return_value} mock_magnum.client.Client.assert_called_once_with(**kw) self.assertEqual(fake_magnum, self.clients.cache["magnum"]) @mock.patch("rally.osclients.Watcher._get_endpoint") def test_watcher(self, mock_watcher__get_endpoint): fake_watcher = fakes.FakeWatcherClient() mock_watcher = mock.MagicMock() mock_watcher__get_endpoint.return_value = "http://fake.to:2/fake" mock_keystoneauth1 = mock.MagicMock() mock_watcher.client.Client.return_value = fake_watcher self.assertNotIn("watcher", self.clients.cache) with mock.patch.dict("sys.modules", {"watcherclient": mock_watcher, "keystoneauth1": mock_keystoneauth1}): client = self.clients.watcher() self.assertEqual(fake_watcher, client) kw = { "session": mock_keystoneauth1.session.Session(), "endpoint": mock_watcher__get_endpoint.return_value} mock_watcher.client.Client.assert_called_once_with("1", **kw) self.assertEqual(fake_watcher, self.clients.cache["watcher"]) rally-0.9.1/tests/unit/test_logging.py0000664000567000056710000001561313073417717021205 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging import mock from rally.common import logging as log from tests.unit import test class LogTestCase(test.TestCase): @mock.patch("rally.common.logging.CONF") @mock.patch("rally.common.logging.handlers") @mock.patch("rally.common.logging.oslogging") def test_setup(self, mock_oslogging, mock_handlers, mock_conf): proj = "fakep" version = "fakev" mock_handlers.ColorHandler.LEVEL_COLORS = { logging.DEBUG: "debug_color"} mock_conf.rally_debug = True log.setup(proj, version) self.assertIn(logging.RDEBUG, mock_handlers.ColorHandler.LEVEL_COLORS) self.assertEqual( mock_handlers.ColorHandler.LEVEL_COLORS[logging.DEBUG], mock_handlers.ColorHandler.LEVEL_COLORS[logging.RDEBUG]) mock_oslogging.setup.assert_called_once_with(mock_conf, proj, version) mock_oslogging.getLogger(None).logger.setLevel.assert_called_once_with( logging.RDEBUG) @mock.patch("rally.common.logging.log") @mock.patch("rally.common.logging.RallyContextAdapter") @mock.patch("rally.common.logging.oslogging") def test_getLogger(self, mock_oslogging, mock_rally_context_adapter, mock_log): name = "fake" vers = "fake" mock_oslogging._loggers = {} returned_logger = log.getLogger(name, vers) self.assertIn(name, mock_oslogging._loggers) mock_rally_context_adapter.assert_called_once_with( mock_log.getLogger(name), {"project": "rally", "version": vers}) self.assertEqual(mock_oslogging._loggers[name], returned_logger) class LogRallyContaxtAdapter(test.TestCase): @mock.patch("rally.common.logging.log") @mock.patch("rally.common.logging.oslogging.KeywordArgumentAdapter") def test_debug(self, mock_keyword_argument_adapter, mock_log): mock_log.RDEBUG = 123 fake_msg = "fake message" radapter = log.RallyContextAdapter(mock.MagicMock(), "fakep") radapter.log = mock.MagicMock() radapter.debug(fake_msg) radapter.log.assert_called_once_with(mock_log.RDEBUG, fake_msg) class ExceptionLoggerTestCase(test.TestCase): @mock.patch("rally.common.logging.is_debug") def test_context(self, mock_is_debug): # Prepare mock_is_debug.return_value = True logger = mock.MagicMock() exception = Exception() # Run with log.ExceptionLogger(logger, "foo") as e: raise exception # Assertions logger.warning.assert_called_once_with("foo") logger.exception.assert_called_once_with(exception) logger.debug.assert_called_once_with(exception) self.assertEqual(e.exception, exception) class LogCatcherTestCase(test.TestCase): # FIXME(pboldin): These are really functional tests and should be moved # there when the infrastructure is ready def test_logcatcher(self): LOG = log.getLogger("testlogger") LOG.logger.setLevel(log.INFO) with log.LogCatcher(LOG) as catcher: LOG.warning("Warning") LOG.info("Info") LOG.debug("Debug") catcher.assertInLogs("Warning") self.assertRaises(AssertionError, catcher.assertInLogs, "Error") self.assertEqual(["Warning", "Info"], catcher.fetchLogs()) self.assertEqual(2, len(catcher.fetchLogRecords())) class CatcherHandlerTestCase(test.TestCase): @mock.patch("logging.handlers.BufferingHandler.__init__") def test_init(self, mock_buffering_handler___init__): catcher_handler = log.CatcherHandler() mock_buffering_handler___init__.assert_called_once_with( catcher_handler, 0) def test_shouldFlush(self): catcher_handler = log.CatcherHandler() self.assertFalse(catcher_handler.shouldFlush()) def test_emit(self): catcher_handler = log.CatcherHandler() catcher_handler.buffer = mock.Mock() catcher_handler.emit("foobar") catcher_handler.buffer.append.assert_called_once_with("foobar") class LogCatcherUnitTestCase(test.TestCase): def setUp(self): super(LogCatcherUnitTestCase, self).setUp() patcher = mock.patch("rally.common.logging.CatcherHandler") self.catcher_handler = patcher.start() self.catcher_handler.return_value.buffer = [ mock.Mock(msg="foo"), mock.Mock(msg="bar")] self.addCleanup(patcher.stop) self.logger = mock.Mock() def test_init(self): catcher = log.LogCatcher(self.logger) self.assertEqual(self.logger.logger, catcher.logger) self.assertEqual(self.catcher_handler.return_value, catcher.handler) self.catcher_handler.assert_called_once_with() def test_enter(self): catcher = log.LogCatcher(self.logger) self.assertEqual(catcher, catcher.__enter__()) self.logger.logger.addHandler.assert_called_once_with( self.catcher_handler.return_value) def test_exit(self): catcher = log.LogCatcher(self.logger) catcher.__exit__(None, None, None) self.logger.logger.removeHandler.assert_called_once_with( self.catcher_handler.return_value) def test_assertInLogs(self): catcher = log.LogCatcher(self.logger) self.assertEqual(["foo"], catcher.assertInLogs("foo")) self.assertEqual(["bar"], catcher.assertInLogs("bar")) self.assertRaises(AssertionError, catcher.assertInLogs, "foobar") def test_assertInLogs_contains(self): catcher = log.LogCatcher(self.logger) record_mock = mock.MagicMock() self.catcher_handler.return_value.buffer = [record_mock] record_mock.msg.__contains__.return_value = True self.assertEqual([record_mock.msg], catcher.assertInLogs("foo")) record_mock.msg.__contains__.assert_called_once_with("foo") def test_fetchLogRecords(self): catcher = log.LogCatcher(self.logger) self.assertEqual(self.catcher_handler.return_value.buffer, catcher.fetchLogRecords()) def test_fetchLogs(self): catcher = log.LogCatcher(self.logger) self.assertEqual( [r.msg for r in self.catcher_handler.return_value.buffer], catcher.fetchLogs()) rally-0.9.1/tests/unit/rally_jobs/0000775000567000056710000000000013073420067020270 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/rally_jobs/__init__.py0000664000567000056710000000000013073417717022377 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/rally_jobs/test_jobs.py0000664000567000056710000000523213073417717022650 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import traceback import mock import rally from rally import api from rally.common.plugin import discover from rally.common import yamlutils as yaml from rally.task import engine from tests.unit import test class RallyJobsTestCase(test.TestCase): rally_jobs_path = os.path.join( os.path.dirname(rally.__file__), "..", "rally-jobs") @mock.patch("rally.task.engine.TaskEngine" "._validate_config_semantic") def test_schema_is_valid( self, mock_task_engine__validate_config_semantic): discover.load_plugins(os.path.join(self.rally_jobs_path, "plugins")) files = {f for f in os.listdir(self.rally_jobs_path) if (os.path.isfile(os.path.join(self.rally_jobs_path, f)) and f.endswith(".yaml") and not f.endswith("_args.yaml"))} # TODO(andreykurilin): figure out why it fails files -= {"rally-mos.yaml", "sahara-clusters.yaml"} for filename in files: full_path = os.path.join(self.rally_jobs_path, filename) with open(full_path) as task_file: try: args_file = os.path.join( self.rally_jobs_path, filename.rsplit(".", 1)[0] + "_args.yaml") args = {} if os.path.exists(args_file): args = yaml.safe_load(open(args_file).read()) if not isinstance(args, dict): raise TypeError( "args file %s must be dict in yaml or json " "presenatation" % args_file) task = api._Task.render_template(task_file.read(), **args) task = yaml.safe_load(task) eng = engine.TaskEngine(task, mock.MagicMock(), mock.Mock()) eng.validate() except Exception: print(traceback.format_exc()) self.fail("Wrong task input file: %s" % full_path) rally-0.9.1/tests/unit/test_hacking.py0000664000567000056710000003646413073417717021172 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import tokenize import ddt import six from tests.hacking import checks from tests.unit import test @ddt.ddt class HackingTestCase(test.TestCase): def test__parse_assert_mock_str(self): pos, method, obj = checks._parse_assert_mock_str( "mock_clients.fake().quotas.delete.assert_called_once()") self.assertEqual("assert_called_once", method) self.assertEqual("mock_clients.fake().quotas.delete", obj) def test__parse_assert_mock_str_no_assert(self): pos, method, obj = checks._parse_assert_mock_str( "mock_clients.fake().quotas.delete.") self.assertIsNone(pos) self.assertIsNone(method) self.assertIsNone(obj) @ddt.data( {"line": "fdafadfdas # noqa", "result": []}, {"line": " # fdafadfdas", "result": []}, {"line": " ", "result": []}, {"line": "otherstuff", "result": [42]} ) @ddt.unpack def test_skip_ignored_lines(self, line, result): @checks.skip_ignored_lines def any_gen(physical_line, logical_line, file_name): yield 42 self.assertEqual(result, list(any_gen(line, line, "f"))) def test_correct_usage_of_assert_from_mock(self): correct_method_names = ["assert_any_call", "assert_called_once_with", "assert_called_with", "assert_has_calls"] for name in correct_method_names: line = "some_mock.%s(asd)" % name self.assertEqual(0, len( list(checks.check_assert_methods_from_mock( line, line, "./tests/fake/test")))) def test_wrong_usage_of_broad_assert_from_mock(self): fake_method = "rtfm.assert_something()" actual_number, actual_msg = next(checks.check_assert_methods_from_mock( fake_method, fake_method, "./tests/fake/test")) self.assertEqual(4, actual_number) self.assertTrue(actual_msg.startswith("N301")) def test_wrong_usage_of_assert_called_from_mock(self): fake_method = "rtfm.assert_called()" actual_number, actual_msg = next(checks.check_assert_methods_from_mock( fake_method, fake_method, "./tests/fake/test")) self.assertEqual(4, actual_number) self.assertTrue(actual_msg.startswith("N302")) def test_wrong_usage_of_assert_called_once_from_mock(self): fake_method = "rtfm.assert_called_once()" actual_number, actual_msg = next(checks.check_assert_methods_from_mock( fake_method, fake_method, "./tests/fake/test")) self.assertEqual(4, actual_number) self.assertTrue(actual_msg.startswith("N303")) def _assert_good_samples(self, checker, samples, module_file="f"): for s in samples: self.assertEqual([], list(checker(s, s, module_file)), s) def _assert_bad_samples(self, checker, samples, module_file="f"): for s in samples: self.assertEqual(1, len(list(checker(s, s, module_file))), s) def test_check_wrong_logging_import(self): bad_imports = ["from oslo_log import log", "import oslo_log", "import logging"] good_imports = ["from rally.common import logging", "from rally.common.logging", "import rally.common.logging"] for bad in bad_imports: checkres = checks.check_import_of_logging(bad, bad, "fakefile") self.assertIsNotNone(next(checkres)) for bad in bad_imports: checkres = checks.check_import_of_logging( bad, bad, "./rally/common/logging.py") self.assertEqual([], list(checkres)) for good in good_imports: checkres = checks.check_import_of_logging(good, good, "fakefile") self.assertEqual([], list(checkres)) def test_no_translate_debug_logs(self): bad_samples = ["LOG.debug(_('foo'))"] self._assert_bad_samples(checks.no_translate_debug_logs, bad_samples) good_samples = ["LOG.debug('foo')", "LOG.info(_('foo'))"] self._assert_good_samples(checks.no_translate_debug_logs, good_samples) def test_no_use_conf_debug_check(self): bad_samples = [ "if CONF.debug:", "if cfg.CONF.debug" ] self._assert_bad_samples(checks.no_use_conf_debug_check, bad_samples) good_samples = ["if logging.is_debug()"] self._assert_good_samples(checks.no_use_conf_debug_check, good_samples) @ddt.data( { "line": "self.assertTrue(isinstance(e, exception.BuildAbortExc))", "result": 1 }, { "line": "self.assertTrue()", "result": 0 } ) @ddt.unpack def test_assert_true_instance(self, line, result): self.assertEqual( result, len(list(checks.assert_true_instance(line, line, "f")))) @ddt.data( { "line": "self.assertEqual(type(als['QuicAssist']), list)", "result": 1 }, { "line": "self.assertTrue()", "result": 0 } ) @ddt.unpack def test_assert_equal_type(self, line, result): self.assertEqual( len(list(checks.assert_equal_type(line, line, "f"))), result) @ddt.data( {"line": "self.assertEqual(A, None)", "result": 1}, {"line": "self.assertEqual(None, A)", "result": 1}, {"line": "self.assertIsNone()", "result": 0} ) @ddt.unpack def test_assert_equal_none(self, line, result): self.assertEqual( len(list(checks.assert_equal_none(line, line, "f"))), result) @ddt.data( {"line": "self.assertNotEqual(A, None)", "result": 1}, {"line": "self.assertNotEqual(None, A)", "result": 1}, {"line": "self.assertIsNotNone()", "result": 0} ) @ddt.unpack def test_assert_not_equal_none(self, line, result): self.assertEqual( len(list(checks.assert_not_equal_none(line, line, "f"))), result) def test_assert_true_or_false_with_in_or_not_in(self): good_lines = [ "self.assertTrue(any(A > 5 for A in B))", "self.assertTrue(any(A > 5 for A in B), 'some message')", "self.assertFalse(some in list1 and some2 in list2)" ] self._assert_good_samples(checks.assert_true_or_false_with_in, good_lines) bad_lines = [ "self.assertTrue(A in B)", "self.assertFalse(A in B)", "self.assertTrue(A not in B)", "self.assertFalse(A not in B)", "self.assertTrue(A in B, 'some message')", "self.assertFalse(A in B, 'some message')", "self.assertTrue(A not in B, 'some message')", "self.assertFalse(A not in B, 'some message')", "self.assertTrue(A in 'some string with spaces')", "self.assertTrue(A in 'some string with spaces')", "self.assertTrue(A in ['1', '2', '3'])", "self.assertTrue(A in [1, 2, 3])" ] self._assert_bad_samples(checks.assert_true_or_false_with_in, bad_lines) def test_assert_equal_in(self): good_lines = [ "self.assertEqual(any(a==1 for a in b), True)", "self.assertEqual(True, any(a==1 for a in b))", "self.assertEqual(any(a==1 for a in b), False)", "self.assertEqual(False, any(a==1 for a in b))" ] self._assert_good_samples(checks.assert_equal_in, good_lines) bad_lines = [ "self.assertEqual(a in b, True)", "self.assertEqual(a not in b, True)", "self.assertEqual('str' in 'string', True)", "self.assertEqual('str' not in 'string', True)", "self.assertEqual(True, a in b)", "self.assertEqual(True, a not in b)", "self.assertEqual(True, 'str' in 'string')", "self.assertEqual(True, 'str' not in 'string')", "self.assertEqual(a in b, False)", "self.assertEqual(a not in b, False)", "self.assertEqual('str' in 'string', False)", "self.assertEqual('str' not in 'string', False)", "self.assertEqual(False, a in b)", "self.assertEqual(False, a not in b)", "self.assertEqual(False, 'str' in 'string')", "self.assertEqual(False, 'str' not in 'string')", ] self._assert_bad_samples(checks.assert_equal_in, bad_lines) def test_check_no_direct_rally_objects_import(self): bad_imports = ["from rally.common.objects import task", "import rally.common.objects.task"] self._assert_bad_samples(checks.check_no_direct_rally_objects_import, bad_imports) self._assert_good_samples( checks.check_no_direct_rally_objects_import, bad_imports, module_file="./rally/common/objects/__init__.py") good_imports = ["from rally.common import objects"] self._assert_good_samples(checks.check_no_direct_rally_objects_import, good_imports) def test_check_no_oslo_deprecated_import(self): bad_imports = ["from oslo.config", "import oslo.config", "from oslo.db", "import oslo.db", "from oslo.i18n", "import oslo.i18n", "from oslo.serialization", "import oslo.serialization", "from oslo.utils", "import oslo.utils"] self._assert_bad_samples(checks.check_no_oslo_deprecated_import, bad_imports) def test_check_quotas(self): bad_lines = [ "a = '1'", "a = \"a\" + 'a'", "'", "\"\"\"\"\"\" + ''''''" ] self._assert_bad_samples(checks.check_quotes, bad_lines) good_lines = [ "\"'a'\" + \"\"\"a'''fdfd'''\"\"\"", "\"fdfdfd\" + \"''''''\"", "a = '' # noqa " ] self._assert_good_samples(checks.check_quotes, good_lines) def test_check_no_constructor_data_struct(self): bad_struct = [ "= dict()", "= list()" ] self._assert_bad_samples(checks.check_no_constructor_data_struct, bad_struct) good_struct = [ "= []", "= {}", ] self._assert_good_samples(checks.check_no_constructor_data_struct, good_struct) def test_check_dict_formatting_in_string(self): bad = [ "\"%(a)s\" % d", "\"Split across \"\n\"multiple lines: %(a)f\" % d", "\"%(a)X split across \"\n\"multiple lines\" % d", "\"%(a)-5.2f: Split %(\"\n\"a)#Lu stupidly\" % d", "\"Comment between \" # wtf\n\"split lines: %(a) -6.2f\" % d", "\"Two strings\" + \" added: %(a)-6.2f\" % d", "\"half legit (%(a)s %(b)s)\" % d + \" half bogus: %(a)s\" % d", "(\"Parenthesized: %(a)s\") % d", "(\"Parenthesized \"\n\"concatenation: %(a)s\") % d", "(\"Parenthesized \" + \"addition: %(a)s\") % d", "\"Complete %s\" % (\"foolisness: %(a)s%(a)s\" % d)", "\"Modulus %(a)s\" % {\"a\": (5 % 3)}" ] for sample in bad: sample = "print(%s)" % sample tokens = tokenize.generate_tokens( six.moves.StringIO(sample).readline) self.assertEqual( 1, len(list(checks.check_dict_formatting_in_string(sample, tokens)))) sample = "print(\"%(a)05.2lF\" % d + \" added: %(a)s\" % d)" tokens = tokenize.generate_tokens(six.moves.StringIO(sample).readline) self.assertEqual( 2, len(list(checks.check_dict_formatting_in_string(sample, tokens)))) good = [ "\"This one is okay: %(a)s %(b)s\" % d", "\"So is %(a)s\"\n\"this one: %(b)s\" % d" ] for sample in good: sample = "print(%s)" % sample tokens = tokenize.generate_tokens( six.moves.StringIO(sample).readline) self.assertEqual( [], list(checks.check_dict_formatting_in_string(sample, tokens))) @ddt.data( "text = unicode('sometext')", "text = process(unicode('sometext'))" ) def test_check_using_unicode(self, line): checkres = checks.check_using_unicode(line, line, "fakefile") self.assertIsNotNone(next(checkres)) self.assertEqual([], list(checkres)) def test_check_raises(self): checkres = checks.check_raises( "text = :raises: Exception if conditions", "fakefile") self.assertIsNotNone(checkres) checkres = checks.check_raises( "text = :raises Exception: if conditions", "fakefile") self.assertIsNone(checkres) def test_check_db_imports_of_cli(self): line = "from rally.common import db" next(checks.check_db_imports_in_cli( line, line, "./rally/cli/filename")) checkres = checks.check_db_imports_in_cli( line, line, "./filename") self.assertRaises(StopIteration, next, checkres) def test_check_objects_imports_of_cli(self): line = "from rally.common import objects" next(checks.check_objects_imports_in_cli( line, line, "./rally/cli/filename")) checkres = checks.check_objects_imports_in_cli( line, line, "./filename") self.assertRaises(StopIteration, next, checkres) @ddt.data( "class Oldstype():", "class Oldstyle:" ) def test_check_old_type_class(self, line): checkres = checks.check_old_type_class(line, line, "fakefile") self.assertIsNotNone(next(checkres)) self.assertEqual([], list(checkres)) def test_check_datetime_alias(self): lines = ["import datetime as date", "import datetime", "import datetime as dto", "from datetime import datetime as dtime"] for line in lines: checkres = checks.check_datetime_alias(line, line, "fakefile") self.assertIsNotNone(next(checkres)) self.assertEqual([], list(checkres)) line = "import datetime as dt" checkres = checks.check_datetime_alias(line, line, "fakefile") def test_check_log_warn(self): bad_samples = ["LOG.warn('foo')", "LOG.warn(_('bar'))"] self._assert_bad_samples(checks.check_log_warn, bad_samples) good_samples = ["LOG.warning('foo')", "LOG.warning(_('bar'))"] self._assert_good_samples(checks.check_log_warn, good_samples) rally-0.9.1/tests/unit/common/0000775000567000056710000000000013073420067017420 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/common/test_yamlutils.py0000664000567000056710000000266613073417716023075 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from yaml import constructor from rally.common import yamlutils from tests.unit import test class YamlTestcase(test.TestCase): """Test yaml loading method.""" def setUp(self): super(YamlTestcase, self).setUp() def test_safe_load(self): stream = "{'a': 1, 'b': {'a': 2}}" stream_obj = yamlutils.safe_load(stream) self.assertEqual({"a": 1, "b": {"a": 2}}, stream_obj) def test_safe_load_duplicate_key(self): stream = "{'a': 1, 'a': 2}" self.assertRaises(constructor.ConstructorError, yamlutils.safe_load, stream) def test_safe_load_order_key(self): stream = "{'b': 1, 'a': 1, 'c': 1}" stream_obj = yamlutils.safe_load(stream) self.assertEqual({"a": 1, "b": 1, "c": 1}, stream_obj) self.assertEqual(["b", "a", "c"], list(stream_obj)) rally-0.9.1/tests/unit/common/__init__.py0000664000567000056710000000000013073417716021526 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/common/objects/0000775000567000056710000000000013073420067021051 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/common/objects/__init__.py0000664000567000056710000000000013073417716023157 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/common/objects/test_task.py0000664000567000056710000003703113073417720023432 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests for db.task layer.""" import datetime as dt import ddt import jsonschema import mock from rally.common import objects from rally import consts from rally import exceptions from tests.unit import test @ddt.ddt class TaskTestCase(test.TestCase): def setUp(self): super(TaskTestCase, self).setUp() self.task = { "uuid": "00ef46a2-c5b8-4aea-a5ca-0f54a10cbca1", "status": consts.TaskStatus.INIT, "verification_log": "", } @mock.patch("rally.common.objects.task.db.task_create") def test_init_with_create(self, mock_task_create): mock_task_create.return_value = self.task task = objects.Task(status=consts.TaskStatus.CRASHED) mock_task_create.assert_called_once_with({ "status": consts.TaskStatus.CRASHED}) self.assertEqual(task["uuid"], self.task["uuid"]) @mock.patch("rally.common.objects.task.db.task_create") def test_init_without_create(self, mock_task_create): task = objects.Task(task=self.task) self.assertFalse(mock_task_create.called) self.assertEqual(task["uuid"], self.task["uuid"]) @mock.patch("rally.common.objects.task.uuid.uuid4", return_value="some_uuid") @mock.patch("rally.common.objects.task.db.task_create") def test_init_with_fake_true(self, mock_task_create, mock_uuid4): task = objects.Task(temporary=True) self.assertFalse(mock_task_create.called) self.assertTrue(mock_uuid4.called) self.assertEqual(task["uuid"], mock_uuid4.return_value) @mock.patch("rally.common.objects.task.db.task_get") def test_get(self, mock_task_get): mock_task_get.return_value = self.task task = objects.Task.get(self.task["uuid"]) mock_task_get.assert_called_once_with(self.task["uuid"]) self.assertEqual(task["uuid"], self.task["uuid"]) @mock.patch("rally.common.objects.task.db.task_get_status") def test_get_status(self, mock_task_get_status): task = objects.Task(task=self.task) status = task.get_status(task["uuid"]) self.assertEqual(status, mock_task_get_status.return_value) @mock.patch("rally.common.objects.task.db.task_delete") @mock.patch("rally.common.objects.task.db.task_create") def test_create_and_delete(self, mock_task_create, mock_task_delete): mock_task_create.return_value = self.task task = objects.Task() task.delete() mock_task_delete.assert_called_once_with( self.task["uuid"], status=None) @mock.patch("rally.common.objects.task.db.task_delete") @mock.patch("rally.common.objects.task.db.task_create") def test_create_and_delete_status(self, mock_task_create, mock_task_delete): mock_task_create.return_value = self.task task = objects.Task() task.delete(status=consts.TaskStatus.FINISHED) mock_task_delete.assert_called_once_with( self.task["uuid"], status=consts.TaskStatus.FINISHED) @mock.patch("rally.common.objects.task.db.task_delete") def test_delete_by_uuid(self, mock_task_delete): objects.Task.delete_by_uuid(self.task["uuid"]) mock_task_delete.assert_called_once_with( self.task["uuid"], status=None) @mock.patch("rally.common.objects.task.db.task_delete") def test_delete_by_uuid_status(self, mock_task_delete): objects.Task.delete_by_uuid(self.task["uuid"], consts.TaskStatus.FINISHED) mock_task_delete.assert_called_once_with( self.task["uuid"], status=consts.TaskStatus.FINISHED) @mock.patch("rally.common.objects.task.db.task_list", return_value=[{"uuid": "a", "created_at": "b", "status": consts.TaskStatus.CRASHED, "tag": "d", "deployment_name": "some_name"}]) def list(self, mock_db_task_list): tasks = objects.Task.list(status="somestatus") mock_db_task_list.assert_called_once_with("somestatus", None) self.assertIs(type(tasks), list) self.assertIsInstance(tasks[0], objects.Task) self.assertEqual(mock_db_task_list.return_value["uuis"], tasks[0]["uuid"]) @mock.patch("rally.common.objects.deploy.db.task_update") @mock.patch("rally.common.objects.task.db.task_create") def test_update(self, mock_task_create, mock_task_update): mock_task_create.return_value = self.task mock_task_update.return_value = {"opt": "val2"} deploy = objects.Task(opt="val1") deploy._update({"opt": "val2"}) mock_task_update.assert_called_once_with( self.task["uuid"], {"opt": "val2"}) self.assertEqual(deploy["opt"], "val2") @ddt.data( { "status": "some_status", "allowed_statuses": ("s_1", "s_2") }, { "status": "some_status", "allowed_statuses": None } ) @ddt.unpack @mock.patch("rally.common.objects.task.db.task_update_status") @mock.patch("rally.common.objects.task.db.task_update") def test_update_status(self, mock_task_update, mock_task_update_status, status, allowed_statuses): task = objects.Task(task=self.task) task.update_status(consts.TaskStatus.FINISHED, allowed_statuses) if allowed_statuses: self.assertFalse(mock_task_update.called) mock_task_update_status.assert_called_once_with( self.task["uuid"], consts.TaskStatus.FINISHED, allowed_statuses ) else: self.assertFalse(mock_task_update_status.called) mock_task_update.assert_called_once_with( self.task["uuid"], {"status": consts.TaskStatus.FINISHED}, ) @mock.patch("rally.common.objects.task.db.task_update") def test_update_verification_log(self, mock_task_update): mock_task_update.return_value = self.task task = objects.Task(task=self.task) task.set_validation_failed({"a": "fake"}) mock_task_update.assert_called_once_with( self.task["uuid"], {"status": consts.TaskStatus.VALIDATION_FAILED, "validation_result": {"a": "fake"}} ) @mock.patch("rally.common.objects.task.charts") def test_extend_results(self, mock_charts): self.assertRaises(TypeError, objects.Task.extend_results) mock_stat = mock.Mock() mock_stat.render.return_value = "durations_stat" mock_charts.MainStatsTable.return_value = mock_stat now = dt.datetime.now() iterations = [ {"timestamp": i + 2, "duration": i + 5, "scenario_output": {"errors": "", "data": {}}, "error": [], "idle_duration": i, "atomic_actions": { "keystone.create_user": i + 10}} for i in range(10)] obsolete = [ {"task_uuid": "foo_uuid", "created_at": now, "updated_at": None, "id": 11, "key": {"kw": {"foo": 42}, "name": "Foo.bar", "pos": 0}, "data": {"raw": iterations, "sla": [], "hooks": [], "full_duration": 40, "load_duration": 32}}] expected = [ {"iterations": "foo_iterations", "sla": [], "hooks": [], "key": {"kw": {"foo": 42}, "name": "Foo.bar", "pos": 0}, "info": { "atomic": {"keystone.create_user": {"max_duration": 19, "min_duration": 10}}, "iterations_count": 10, "iterations_failed": 0, "max_duration": 14, "min_duration": 5, "tstamp_start": 2, "full_duration": 40, "load_duration": 32, "stat": "durations_stat"}}] # serializable is default results = objects.Task.extend_results(obsolete) self.assertIsInstance(results[0]["iterations"], type(iter([]))) self.assertEqual(list(results[0]["iterations"]), iterations) results[0]["iterations"] = "foo_iterations" self.assertEqual(results, expected) # serializable is False results = objects.Task.extend_results(obsolete, serializable=False) self.assertIsInstance(results[0]["iterations"], type(iter([]))) self.assertEqual(list(results[0]["iterations"]), iterations) results[0]["iterations"] = "foo_iterations" self.assertEqual(results, expected) # serializable is True results = objects.Task.extend_results(obsolete, serializable=True) self.assertEqual(list(results[0]["iterations"]), iterations) expected[0]["created_at"] = now.strftime("%Y-%d-%mT%H:%M:%S") expected[0]["updated_at"] = None jsonschema.validate(results[0], objects.task.TASK_EXTENDED_RESULT_SCHEMA) results[0]["iterations"] = "foo_iterations" self.assertEqual(results, expected) @mock.patch("rally.common.objects.task.db.task_result_get_all_by_uuid", return_value="foo_results") def test_get_results(self, mock_task_result_get_all_by_uuid): task = objects.Task(task=self.task) results = task.get_results() mock_task_result_get_all_by_uuid.assert_called_once_with( self.task["uuid"]) self.assertEqual(results, "foo_results") @mock.patch("rally.common.objects.task.db.task_update") def test_set_failed(self, mock_task_update): mock_task_update.return_value = self.task task = objects.Task(task=self.task) task.set_failed("foo_type", "foo_error_message", "foo_trace") mock_task_update.assert_called_once_with( self.task["uuid"], {"status": consts.TaskStatus.CRASHED, "validation_result": {"etype": "foo_type", "msg": "foo_error_message", "trace": "foo_trace"}}, ) @mock.patch("rally.common.objects.task.Subtask") def test_add_subtask(self, mock_subtask): task = objects.Task(task=self.task) subtask = task.add_subtask(title="foo") mock_subtask.assert_called_once_with( self.task["uuid"], title="foo") self.assertIs(subtask, mock_subtask.return_value) @ddt.data( { "soft": True, "status": consts.TaskStatus.INIT }, { "soft": True, "status": consts.TaskStatus.VALIDATING, "soft": True, "status": consts.TaskStatus.ABORTED }, { "soft": True, "status": consts.TaskStatus.FINISHED }, { "soft": True, "status": consts.TaskStatus.CRASHED }, { "soft": False, "status": consts.TaskStatus.ABORTED }, { "soft": False, "status": consts.TaskStatus.FINISHED }, { "soft": False, "status": consts.TaskStatus.CRASHED } ) @ddt.unpack def test_abort_with_finished_states(self, soft, status): task = objects.Task(mock.MagicMock(), fake=True) task.get_status = mock.MagicMock(return_value=status) task.update_status = mock.MagicMock() self.assertRaises(exceptions.RallyException, task.abort, soft) self.assertEqual(1, task.get_status.call_count) self.assertFalse(task.update_status.called) @ddt.data(True, False) def test_abort_with_running_state(self, soft): task = objects.Task(mock.MagicMock(), fake=True) task.get_status = mock.MagicMock(return_value="running") task.update_status = mock.MagicMock() task.abort(soft) if soft: status = consts.TaskStatus.SOFT_ABORTING else: status = consts.TaskStatus.ABORTING task.update_status.assert_called_once_with( status, allowed_statuses=(consts.TaskStatus.RUNNING, consts.TaskStatus.SOFT_ABORTING) ) class SubtaskTestCase(test.TestCase): def setUp(self): super(SubtaskTestCase, self).setUp() self.subtask = { "task_uuid": "00ef46a2-c5b8-4aea-a5ca-0f54a10cbca1", "uuid": "00ef46a2-c5b8-4aea-a5ca-0f54a10cbca2", "title": "foo", } @mock.patch("rally.common.objects.task.db.subtask_create") def test_init(self, mock_subtask_create): mock_subtask_create.return_value = self.subtask subtask = objects.Subtask("bar", title="foo") mock_subtask_create.assert_called_once_with( "bar", title="foo") self.assertEqual(subtask["uuid"], self.subtask["uuid"]) @mock.patch("rally.common.objects.task.Workload") @mock.patch("rally.common.objects.task.db.subtask_create") def test_add_workload(self, mock_subtask_create, mock_workload): mock_subtask_create.return_value = self.subtask subtask = objects.Subtask("bar", title="foo") workload = subtask.add_workload({"bar": "baz"}) mock_workload.assert_called_once_with( self.subtask["task_uuid"], self.subtask["uuid"], {"bar": "baz"}) self.assertIs(workload, mock_workload.return_value) class WorkloadTestCase(test.TestCase): def setUp(self): super(WorkloadTestCase, self).setUp() self.workload = { "task_uuid": "00ef46a2-c5b8-4aea-a5ca-0f54a10cbca1", "uuid": "00ef46a2-c5b8-4aea-a5ca-0f54a10cbca3", } @mock.patch("rally.common.objects.task.db.workload_create") def test_init(self, mock_workload_create): mock_workload_create.return_value = self.workload workload = objects.Workload("uuid1", "uuid2", {"bar": "baz"}) mock_workload_create.assert_called_once_with( "uuid1", "uuid2", {"bar": "baz"}) self.assertEqual(workload["uuid"], self.workload["uuid"]) @mock.patch("rally.common.objects.task.db.workload_data_create") @mock.patch("rally.common.objects.task.db.workload_create") def test_add_workload_data(self, mock_workload_create, mock_workload_data_create): mock_workload_create.return_value = self.workload workload = objects.Workload("uuid1", "uuid2", {"bar": "baz"}) workload = workload.add_workload_data(0, {"data": "foo"}) mock_workload_data_create.assert_called_once_with( self.workload["task_uuid"], self.workload["uuid"], 0, {"data": "foo"}) @mock.patch("rally.common.objects.task.db.workload_set_results") @mock.patch("rally.common.objects.task.db.workload_create") def test_set_results(self, mock_workload_create, mock_workload_set_results): mock_workload_create.return_value = self.workload workload = objects.Workload("uuid1", "uuid2", {"bar": "baz"}) workload = workload.set_results({"data": "foo"}) mock_workload_set_results.assert_called_once_with( self.workload["uuid"], {"data": "foo"}) rally-0.9.1/tests/unit/common/objects/test_credential.py0000664000567000056710000000704013073417720024577 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common import objects from rally import consts from tests.unit import test class CredentialTestCase(test.TestCase): def test_to_dict(self): credential = objects.Credential( "foo_url", "foo_user", "foo_password", tenant_name="foo_tenant", permission=consts.EndpointPermission.ADMIN) self.assertEqual(credential.to_dict(), {"auth_url": "foo_url", "username": "foo_user", "password": "foo_password", "tenant_name": "foo_tenant", "region_name": None, "domain_name": None, "endpoint": None, "endpoint_type": None, "https_insecure": False, "https_cacert": None, "project_domain_name": None, "user_domain_name": None}) def test_to_dict_with_include_permission(self): credential = objects.Credential( "foo_url", "foo_user", "foo_password", tenant_name="foo_tenant", permission=consts.EndpointPermission.ADMIN) self.assertEqual(credential.to_dict(include_permission=True), {"auth_url": "foo_url", "username": "foo_user", "password": "foo_password", "tenant_name": "foo_tenant", "region_name": None, "domain_name": None, "endpoint": None, "permission": consts.EndpointPermission.ADMIN, "endpoint_type": None, "https_insecure": False, "https_cacert": None, "project_domain_name": None, "user_domain_name": None}) def test_to_dict_with_kwarg_credential(self): credential = objects.Credential( "foo_url", "foo_user", "foo_password", tenant_name="foo_tenant", permission=consts.EndpointPermission.ADMIN, endpoint="foo_endpoint", endpoint_type=consts.EndpointType.PUBLIC) self.assertEqual(credential.to_dict(), {"auth_url": "foo_url", "username": "foo_user", "password": "foo_password", "tenant_name": "foo_tenant", "region_name": None, "domain_name": None, "endpoint": "foo_endpoint", "endpoint_type": consts.EndpointType.PUBLIC, "https_insecure": False, "https_cacert": None, "project_domain_name": None, "user_domain_name": None}) rally-0.9.1/tests/unit/common/objects/test_deploy.py0000664000567000056710000002267713073417720023776 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests for db.deploy layer.""" import jsonschema import mock from rally.common import objects from rally import consts from rally import exceptions from tests.unit import test class DeploymentTestCase(test.TestCase): def setUp(self): super(DeploymentTestCase, self).setUp() self.deployment = { "uuid": "baa1bfb6-0c38-4f6c-9bd0-45968890e4f4", "name": "", "config": {}, "credentials": {}, "status": consts.DeployStatus.DEPLOY_INIT, } self.resource = { "id": 42, "deployment_uuid": self.deployment["uuid"], "provider_name": "provider", "type": "some", "info": {"key": "value"}, } @mock.patch("rally.common.objects.deploy.db.deployment_create") def test_init_with_create(self, mock_deployment_create): mock_deployment_create.return_value = self.deployment deploy = objects.Deployment() mock_deployment_create.assert_called_once_with({}) self.assertEqual(deploy["uuid"], self.deployment["uuid"]) @mock.patch("rally.common.objects.deploy.db.deployment_create") def test_init_without_create(self, mock_deployment_create): deploy = objects.Deployment(deployment=self.deployment) self.assertFalse(mock_deployment_create.called) self.assertEqual(deploy["uuid"], self.deployment["uuid"]) @mock.patch("rally.common.objects.deploy.db.deployment_get") def test_get(self, mock_deployment_get): mock_deployment_get.return_value = self.deployment deploy = objects.Deployment.get(self.deployment["uuid"]) mock_deployment_get.assert_called_once_with(self.deployment["uuid"]) self.assertEqual(deploy["uuid"], self.deployment["uuid"]) @mock.patch("rally.common.objects.deploy.db.deployment_delete") @mock.patch("rally.common.objects.deploy.db.deployment_create") def test_create_and_delete(self, mock_deployment_create, mock_deployment_delete): mock_deployment_create.return_value = self.deployment deploy = objects.Deployment() deploy.delete() mock_deployment_delete.assert_called_once_with(self.deployment["uuid"]) @mock.patch("rally.common.objects.deploy.db.deployment_delete") def test_delete_by_uuid(self, mock_deployment_delete): objects.Deployment.delete_by_uuid(self.deployment["uuid"]) mock_deployment_delete.assert_called_once_with(self.deployment["uuid"]) @mock.patch("rally.common.objects.deploy.db.deployment_update") @mock.patch("rally.common.objects.deploy.db.deployment_create") def test_update(self, mock_deployment_create, mock_deployment_update): mock_deployment_create.return_value = self.deployment mock_deployment_update.return_value = {"opt": "val2"} deploy = objects.Deployment(opt="val1") deploy._update({"opt": "val2"}) mock_deployment_update.assert_called_once_with( self.deployment["uuid"], {"opt": "val2"}) self.assertEqual(deploy["opt"], "val2") @mock.patch("rally.common.objects.deploy.db.deployment_update") def test_update_status(self, mock_deployment_update): mock_deployment_update.return_value = self.deployment deploy = objects.Deployment(deployment=self.deployment) deploy.update_status(consts.DeployStatus.DEPLOY_FAILED) mock_deployment_update.assert_called_once_with( self.deployment["uuid"], {"status": consts.DeployStatus.DEPLOY_FAILED}, ) @mock.patch("rally.common.objects.deploy.db.deployment_update") def test_update_name(self, mock_deployment_update): mock_deployment_update.return_value = self.deployment deploy = objects.Deployment(deployment=self.deployment) deploy.update_name("new_name") mock_deployment_update.assert_called_once_with( self.deployment["uuid"], {"name": "new_name"}, ) @mock.patch("rally.common.objects.deploy.db.deployment_update") def test_update_config(self, mock_deployment_update): mock_deployment_update.return_value = self.deployment deploy = objects.Deployment(deployment=self.deployment) deploy.update_config({"opt": "val"}) mock_deployment_update.assert_called_once_with( self.deployment["uuid"], {"config": {"opt": "val"}}, ) @mock.patch("rally.common.objects.deploy.db.deployment_update") def test_update_credentials(self, mock_deployment_update): mock_deployment_update.return_value = self.deployment deploy = objects.Deployment(deployment=self.deployment) credentials = {"foo": [{"admin": {"fake_admin": True}, "users": [{"fake_user": True}]}]} deploy.update_credentials(credentials) mock_deployment_update.assert_called_once_with( self.deployment["uuid"], { "credentials": {"foo": [{"admin": {"fake_admin": True}, "users": [{"fake_user": True}]}]} }) def test_get_credentials_for(self): credentials = {"foo": [{"admin": {"fake_admin": True}, "users": [{"fake_user": True}]}]} self.deployment["credentials"] = credentials deploy = objects.Deployment(deployment=self.deployment) creds = deploy.get_credentials_for("foo") self.assertEqual(credentials["foo"][0], creds) def test_get_deprecated(self): credentials = {"openstack": [{"admin": {"fake_admin": True}, "users": [{"fake_user": True}]}]} self.deployment["credentials"] = credentials deploy = objects.Deployment(deployment=self.deployment) self.assertEqual(credentials["openstack"][0]["admin"], deploy["admin"]) self.assertEqual(credentials["openstack"][0]["users"], deploy["users"]) def test_update_empty_credentials(self): deploy = objects.Deployment(deployment=self.deployment) self.assertRaises(jsonschema.ValidationError, deploy.update_credentials, {}) def test_get_credentials_error(self): deploy = objects.Deployment(deployment=self.deployment) self.assertRaises(exceptions.RallyException, deploy.get_credentials_for, "bar") @mock.patch("rally.common.objects.deploy.db.resource_create") def test_add_resource(self, mock_resource_create): mock_resource_create.return_value = self.resource deploy = objects.Deployment(deployment=self.deployment) resource = deploy.add_resource("provider", type="some", info={"key": "value"}) self.assertEqual(resource["id"], self.resource["id"]) mock_resource_create.assert_called_once_with({ "deployment_uuid": self.deployment["uuid"], "provider_name": "provider", "type": "some", "info": {"key": "value"}, }) @mock.patch("rally.common.objects.task.db.resource_delete") def test_delete(self, mock_resource_delete): objects.Deployment.delete_resource(42) mock_resource_delete.assert_called_once_with(42) @mock.patch("rally.common.objects.task.db.resource_get_all") def test_get_resources(self, mock_resource_get_all): mock_resource_get_all.return_value = [self.resource] deploy = objects.Deployment(deployment=self.deployment) resources = deploy.get_resources(provider_name="provider", type="some") self.assertEqual(len(resources), 1) self.assertEqual(resources[0]["id"], self.resource["id"]) @mock.patch("rally.common.objects.deploy.dt.datetime") @mock.patch("rally.common.objects.deploy.db.deployment_update") def test_update_set_started(self, mock_deployment_update, mock_datetime): mock_datetime.now = mock.Mock(return_value="fake_time") mock_deployment_update.return_value = self.deployment deploy = objects.Deployment(deployment=self.deployment) deploy.set_started() mock_deployment_update.assert_called_once_with( self.deployment["uuid"], {"started_at": "fake_time", "status": consts.DeployStatus.DEPLOY_STARTED} ) @mock.patch("rally.common.objects.deploy.dt.datetime") @mock.patch("rally.common.objects.deploy.db.deployment_update") def test_update_set_completed(self, mock_deployment_update, mock_datetime): mock_datetime.now = mock.Mock(return_value="fake_time") mock_deployment_update.return_value = self.deployment deploy = objects.Deployment(deployment=self.deployment) deploy.set_completed() mock_deployment_update.assert_called_once_with( self.deployment["uuid"], {"completed_at": "fake_time", "status": consts.DeployStatus.DEPLOY_FINISHED} ) rally-0.9.1/tests/unit/common/objects/test_verification.py0000664000567000056710000001163013073417720025147 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.common import objects from rally import consts from tests.unit import test class VerificationTestCase(test.TestCase): def setUp(self): super(VerificationTestCase, self).setUp() self.db_obj = {"uuid": "uuid-1"} @mock.patch("rally.common.objects.verification.db.verification_create") def test_init(self, mock_verification_create): v = objects.Verification(self.db_obj) self.assertEqual(0, mock_verification_create.call_count) self.assertEqual(self.db_obj["uuid"], v.uuid) self.assertEqual(self.db_obj["uuid"], v["uuid"]) @mock.patch("rally.common.objects.verification.db.verification_create") def test_create(self, mock_verification_create): objects.Verification.create("some-verifier", "some-deployment", [], {}) mock_verification_create.assert_called_once_with( "some-verifier", "some-deployment", [], {}) @mock.patch("rally.common.objects.verification.db.verification_get") def test_get(self, mock_verification_get): mock_verification_get.return_value = self.db_obj v = objects.Verification.get(self.db_obj["uuid"]) mock_verification_get.assert_called_once_with(self.db_obj["uuid"]) self.assertEqual(self.db_obj["uuid"], v.uuid) @mock.patch("rally.common.objects.verification.db.verification_list") def test_list(self, mock_verification_list): mock_verification_list.return_value = [self.db_obj] vs = objects.Verification.list() mock_verification_list.assert_called_once_with(None, None, None, None) self.assertEqual(self.db_obj["uuid"], vs[0].uuid) @mock.patch("rally.common.objects.verification.db.verification_delete") def test_delete(self, mock_verification_delete): objects.Verification(self.db_obj).delete() mock_verification_delete.assert_called_once_with(self.db_obj["uuid"]) @mock.patch("rally.common.objects.verification.db.verification_update") def test_update_status(self, mock_verification_update): v = objects.Verification(self.db_obj) v.update_status(status="some-status") mock_verification_update.assert_called_once_with(self.db_obj["uuid"], status="some-status") @mock.patch("rally.common.objects.verification.db.verification_update") def test_finish(self, mock_verification_update): v = objects.Verification(self.db_obj) totals = { "tests_count": 2, "tests_duration": 0.54, "success": 2, "skip": 0, "expected_failures": 0, "unexpected_success": 0, "failures": 0 } tests = { "foo_test[gate,negative]": { "name": "foo_test", "duration": 0.25, "status": "success", "tags": ["gate", "negative"] }, "bar_test[gate,negative]": { "name": "bar_test", "duration": 0.29, "status": "success", "tags": ["gate", "negative"] } } v.finish(totals, tests) mock_verification_update.assert_called_once_with( self.db_obj["uuid"], status=consts.VerificationStatus.FINISHED, tests=tests, **totals) v = objects.Verification(self.db_obj) totals.update(failures=1) mock_verification_update.reset_mock() v.finish(totals, tests) mock_verification_update.assert_called_once_with( self.db_obj["uuid"], status=consts.VerificationStatus.FAILED, tests=tests, **totals) v = objects.Verification(self.db_obj) totals.update(failures=0, unexpected_success=1) mock_verification_update.reset_mock() v.finish(totals, tests) mock_verification_update.assert_called_once_with( self.db_obj["uuid"], status=consts.VerificationStatus.FAILED, tests=tests, **totals) @mock.patch("rally.common.objects.verification.db.verification_update") def test_set_error(self, mock_verification_update): v = objects.Verification(self.db_obj) v.set_error("Some error") mock_verification_update.assert_called_once_with( self.db_obj["uuid"], status=consts.VerificationStatus.CRASHED) rally-0.9.1/tests/unit/common/objects/test_verifier.py0000664000567000056710000000763513073417720024312 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.common import objects from rally import exceptions from tests.unit import test class VerifierTestCase(test.TestCase): def setUp(self): super(VerifierTestCase, self).setUp() self.db_obj = {"uuid": "uuid-1"} @mock.patch("rally.common.objects.verifier.db.verifier_create") def test_init(self, mock_verifier_create): v = objects.Verifier(self.db_obj) self.assertEqual(0, mock_verifier_create.call_count) self.assertEqual(self.db_obj["uuid"], v.uuid) self.assertEqual(self.db_obj["uuid"], v["uuid"]) @mock.patch("rally.common.objects.verifier.db.verifier_create") def test_create(self, mock_verifier_create): objects.Verifier.create("a", "b", "c", "d", "e", False) mock_verifier_create.assert_called_once_with( name="a", vtype="b", namespace="c", source="d", version="e", system_wide=False, extra_settings=None) @mock.patch("rally.common.objects.verifier.db.verifier_get") def test_get(self, mock_verifier_get): mock_verifier_get.return_value = self.db_obj v = objects.Verifier.get(self.db_obj["uuid"]) mock_verifier_get.assert_called_once_with(self.db_obj["uuid"]) self.assertEqual(self.db_obj["uuid"], v.uuid) @mock.patch("rally.common.objects.verifier.db.verifier_list") def test_list(self, mock_verifier_list): mock_verifier_list.return_value = [self.db_obj] vs = objects.Verifier.list() mock_verifier_list.assert_called_once_with(None) self.assertEqual(self.db_obj["uuid"], vs[0].uuid) @mock.patch("rally.common.objects.verifier.db.verifier_delete") def test_delete(self, mock_verifier_delete): objects.Verifier.delete(self.db_obj["uuid"]) mock_verifier_delete.assert_called_once_with(self.db_obj["uuid"]) @mock.patch("rally.common.objects.verifier.db.verifier_update") def test_update_status(self, mock_verifier_update): v = objects.Verifier(self.db_obj) v.update_status(status="some-status") mock_verifier_update.assert_called_once_with(self.db_obj["uuid"], status="some-status") @mock.patch("rally.common.objects.verifier.db.deployment_get") def test_deployment_property(self, mock_deployment_get): v = objects.Verifier(self.db_obj) mock_deployment_get.return_value = {"name": "foo", "uuid": "bar"} v.set_deployment("some-deployment") self.assertEqual("foo", v.deployment["name"]) self.assertEqual("bar", v.deployment["uuid"]) def test_deployment_property_raise_exc(self): v = objects.Verifier(self.db_obj) self.assertRaises(exceptions.RallyException, getattr, v, "deployment") @mock.patch("rally.common.objects.verifier.manager") def test_manager_property(self, mock_manager): self.db_obj["type"] = "some" self.db_obj["namespace"] = "namespace" v = objects.Verifier(self.db_obj) self.assertIsNone(v._manager) self.assertFalse(mock_manager.VerifierManager.get.called) self.assertEqual( mock_manager.VerifierManager.get.return_value.return_value, v.manager) mock_manager.VerifierManager.get.assert_called_once_with( self.db_obj["type"], self.db_obj["namespace"]) rally-0.9.1/tests/unit/common/test_streaming_algorithms.py0000664000567000056710000002635213073417716025272 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import math import ddt import six from rally.common import streaming_algorithms as algo from tests.unit import test class MeanComputationTestCase(test.TestCase): def test_empty_stream(self): mean_computation = algo.MeanComputation() self.assertIsNone(mean_computation.result()) def test_one_value(self): mean_computation = algo.MeanComputation() mean_computation.add(10.0) self.assertEqual(10.0, mean_computation.result()) def test_stream(self): stream = range(10) mean_computation = algo.MeanComputation() for value in stream: mean_computation.add(value) excepted_mean = float(sum(stream)) / len(stream) self.assertEqual(excepted_mean, mean_computation.result()) def test_merge(self): single_mean = algo.MeanComputation() for val in six.moves.range(100): single_mean.add(val) means = [algo.MeanComputation() for _ in six.moves.range(10)] for idx, mean in enumerate(means): for val in six.moves.range(idx * 10, (idx + 1) * 10): mean.add(val) merged_mean = means[0] for mean in means[1:]: merged_mean.merge(mean) self.assertEqual(single_mean.count, merged_mean.count) self.assertEqual(single_mean.total, merged_mean.total) self.assertEqual(single_mean.result(), merged_mean.result()) class StdDevComputationTestCase(test.TestCase): def test_empty_stream(self): std_computation = algo.StdDevComputation() self.assertIsNone(std_computation.result()) def test_one_value(self): std_computation = algo.StdDevComputation() std_computation.add(10.0) self.assertIsNone(std_computation.result()) def test_two_values(self): std_computation = algo.StdDevComputation() std_computation.add(10.0) std_computation.add(10.0) self.assertEqual(0.0, std_computation.result()) def test_stream(self): stream = range(10) std_computation = algo.StdDevComputation() for value in stream: std_computation.add(value) mean = float(sum(stream)) / len(stream) excepted_std = math.sqrt(sum((x - mean) ** 2 for x in stream) / (len(stream) - 1)) self.assertEqual(excepted_std, std_computation.result()) def test_merge(self): single_std = algo.StdDevComputation() for val in six.moves.range(100): single_std.add(val) stds = [algo.StdDevComputation() for _ in six.moves.range(10)] for idx, std in enumerate(stds): for val in six.moves.range(idx * 10, (idx + 1) * 10): std.add(val) merged_std = stds[0] for std in stds[1:]: merged_std.merge(std) self.assertEqual(single_std.count, merged_std.count) self.assertEqual(single_std.mean, merged_std.mean) self.assertEqual(single_std.dev_sum, merged_std.dev_sum) self.assertEqual(single_std.result(), merged_std.result()) class MinComputationTestCase(test.TestCase): def test_add_and_result(self): comp = algo.MinComputation() [comp.add(i) for i in [3, 5.2, 2, -1, 1, 8, 33.4, 0, -3, 42, -2]] self.assertEqual(-3, comp.result()) def test_add_raises(self): comp = algo.MinComputation() self.assertRaises(TypeError, comp.add) self.assertRaises(TypeError, comp.add, None) self.assertRaises(TypeError, comp.add, "str") def test_result_empty(self): comp = algo.MinComputation() self.assertRaises(TypeError, comp.result, 1) self.assertIsNone(comp.result()) def test_merge(self): single_min_algo = algo.MinComputation() for val in six.moves.range(100): single_min_algo.add(val) algos = [algo.MinComputation() for _ in six.moves.range(10)] for idx, min_algo in enumerate(algos): for val in six.moves.range(idx * 10, (idx + 1) * 10): min_algo.add(val) merged_min_algo = algos[0] for min_algo in algos[1:]: merged_min_algo.merge(min_algo) self.assertEqual(single_min_algo._value, merged_min_algo._value) self.assertEqual(single_min_algo.result(), merged_min_algo.result()) class MaxComputationTestCase(test.TestCase): def test_add_and_result(self): comp = algo.MaxComputation() [comp.add(i) for i in [3, 5.2, 2, -1, 1, 8, 33.4, 0, -3, 42, -2]] self.assertEqual(42, comp.result()) def test_add_raises(self): comp = algo.MaxComputation() self.assertRaises(TypeError, comp.add) self.assertRaises(TypeError, comp.add, None) self.assertRaises(TypeError, comp.add, "str") def test_result_empty(self): comp = algo.MaxComputation() self.assertRaises(TypeError, comp.result, 1) self.assertIsNone(comp.result()) def test_merge(self): single_max_algo = algo.MaxComputation() for val in six.moves.range(100): single_max_algo.add(val) algos = [algo.MaxComputation() for _ in six.moves.range(10)] for idx, max_algo in enumerate(algos): for val in six.moves.range(idx * 10, (idx + 1) * 10): max_algo.add(val) merged_max_algo = algos[0] for max_algo in algos[1:]: merged_max_algo.merge(max_algo) self.assertEqual(single_max_algo._value, merged_max_algo._value) self.assertEqual(single_max_algo.result(), merged_max_algo.result()) @ddt.ddt class PercentileComputationTestCase(test.TestCase): mixed1 = [0] mixed6 = [100, 100, 0, 100, 100, 100] mixed5 = [0, 0, 100, 0, 0] mixed16 = [55.71, 83.05, 24.12, 27, 48.36, 16.36, 96.23, 6, 16.0, 88.11, 29.52, 99.2, 79.96, 77.84, 85.45, 85.32, 7, 17.1, 3.02, 15.23] mixed50 = [51.63, 82.2, 52.52, .05, 66, 94.03, 78.6, 80.9, 51.89, 79, 1.4, 65.06, 12.46, 51.89, 41, 45.39, 124, 62.2, 32.72, 56.98, 31.19, 26.27, 97.3, 56.6, 19.75, 69, 25.03, 10.76, 17.71, 29.4, 15.75, 19.88, 90.16, 82.0, 63.4, 14.84, 49.07, 72.06, 41, 1.48, 82.19, 48.45, 53, 88.33, 52.31, 62, 15.96, 21.17, 25.33, 53.27] mixed5000 = mixed50 * 1000 range5000 = range(5000) @ddt.data( {"stream": "mixed1", "percent": 0.95, "expected": 0}, {"stream": "mixed6", "percent": 0.5, "expected": 100}, {"stream": "mixed5", "percent": 0.5, "expected": 0}, {"stream": "mixed5", "percent": 0.999, "expected": 99.6}, {"stream": "mixed5", "percent": 0.001, "expected": 0}, {"stream": "mixed16", "percent": 0.25, "expected": 16.27}, {"stream": "mixed16", "percent": 0.50, "expected": 38.94}, {"stream": "mixed16", "percent": 0.90, "expected": 88.92200000000001}, {"stream": "mixed50", "percent": 0.25, "expected": 25.105}, {"stream": "mixed50", "percent": 0.50, "expected": 51.89}, {"stream": "mixed50", "percent": 0.90, "expected": 82.81300000000002}, {"stream": "mixed5000", "percent": 0.25, "expected": 35.54600000000001}, {"stream": "mixed5000", "percent": 0.50, "expected": 48.351}, {"stream": "mixed5000", "percent": 0.90, "expected": 66.05880000000437}, {"stream": "range5000", "percent": 0.25, "expected": 1249.75}, {"stream": "range5000", "percent": 0.50, "expected": 2499.5}, {"stream": "range5000", "percent": 0.90, "expected": 4499.1}) @ddt.unpack def test_add_and_result(self, percent, stream, expected): comp = algo.PercentileComputation(percent=percent, length=len( getattr(self, stream))) [comp.add(i) for i in getattr(self, stream)] self.assertEqual(expected, comp.result()) def test_add_raises(self): comp = algo.PercentileComputation(0.50, 100) self.assertRaises(TypeError, comp.add) def test_result_empty(self): self.assertRaises(TypeError, algo.PercentileComputation) comp = algo.PercentileComputation(0.50, 100) self.assertIsNone(comp.result()) class IncrementComputationTestCase(test.TestCase): def test_add_and_result(self): comp = algo.IncrementComputation() for i in range(1, 100): self.assertEqual(i - 1, comp.result()) comp.add(42) self.assertEqual(i, comp.result()) def test_merge(self): single_inc = algo.IncrementComputation() for val in six.moves.range(100): single_inc.add(val) incs = [algo.IncrementComputation() for _ in six.moves.range(10)] for idx, inc in enumerate(incs): for val in six.moves.range(idx * 10, (idx + 1) * 10): inc.add(val) merged_inc = incs[0] for inc in incs[1:]: merged_inc.merge(inc) self.assertEqual(single_inc._count, merged_inc._count) self.assertEqual(single_inc.result(), merged_inc.result()) @ddt.ddt class DegradationComputationTestCase(test.TestCase): @ddt.data( ([], None, None, 0.0), ([30.0, 30.0, 30.0, 30.0], 30.0, 30.0, 0.0), ([45.0, 45.0, 45.0, 30.0], 30.0, 45.0, 50.0), ([15.0, 10.0, 20.0, 19.0], 10.0, 20.0, 100.0), ([30.0, 56.0, 90.0, 73.0], 30.0, 90.0, 200.0)) @ddt.unpack def test_add(self, stream, min_value, max_value, result): comp = algo.DegradationComputation() for value in stream: comp.add(value) self.assertEqual(min_value, comp.min_value.result()) self.assertEqual(max_value, comp.max_value.result()) self.assertEqual(result, comp.result()) @ddt.data(-10.0, -1.0, -1, 0.0, 0) def test_add_raise(self, value): comp = algo.DegradationComputation() self.assertRaises(ValueError, comp.add, value) @ddt.data(([39.0, 30.0, 32.0], [49.0, 40.0, 51.0], 30.0, 51.0, 70.0), ([31.0, 30.0, 32.0], [39.0, 45.0, 43.0], 30.0, 45.0, 50.0), ([], [31.0, 30.0, 45.0], 30.0, 45.0, 50.0), ([31.0, 30.0, 45.0], [], 30.0, 45.0, 50.0), ([], [], None, None, 0.0)) @ddt.unpack def test_merge(self, stream1, stream2, min_value, max_value, result): comp1 = algo.DegradationComputation() for value in stream1: comp1.add(value) comp2 = algo.DegradationComputation() for value in stream2: comp2.add(value) comp1.merge(comp2) self.assertEqual(min_value, comp1.min_value.result()) self.assertEqual(max_value, comp1.max_value.result()) self.assertEqual(result, comp1.result()) rally-0.9.1/tests/unit/common/test_logging.py0000664000567000056710000000576013073417716022476 0ustar jenkinsjenkins00000000000000# # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.common.i18n import _ from rally.common import logging from tests.unit import test class LogTestCase(test.TestCase): def test_log_task_wrapper(self): mock_log = mock.MagicMock() msg = "test %(a)s %(b)s" class TaskLog(object): def __init__(self): self.task = {"uuid": "some_uuid"} @logging.log_task_wrapper(mock_log, msg, a=10, b=20) def some_method(self, x, y): return x + y t = TaskLog() self.assertEqual(t.some_method.__name__, "some_method") self.assertEqual(t.some_method(2, 2), 4) params = {"msg": msg % {"a": 10, "b": 20}, "uuid": t.task["uuid"]} expected = [ mock.call(_("Task %(uuid)s | Starting: %(msg)s") % params), mock.call(_("Task %(uuid)s | Completed: %(msg)s") % params) ] self.assertEqual(mock_log.mock_calls, expected) def test_log_deprecated(self): mock_log = mock.MagicMock() @logging.log_deprecated("some alternative", "0.0.1", mock_log) def some_method(x, y): return x + y self.assertEqual(some_method(2, 2), 4) mock_log.assert_called_once_with("'some_method' is deprecated in " "Rally v0.0.1: some alternative") def test_log_deprecated_args(self): mock_log = mock.MagicMock() @logging.log_deprecated_args("Deprecated test", "0.0.1", ("z",), mock_log, once=True) def some_method(x, y, z): return x + y + z self.assertEqual(some_method(2, 2, z=3), 7) mock_log.assert_called_once_with( "Deprecated test (args `z' deprecated in Rally v0.0.1)") mock_log.reset_mock() self.assertEqual(some_method(2, 2, z=3), 7) self.assertFalse(mock_log.called) @logging.log_deprecated_args("Deprecated test", "0.0.1", ("z",), mock_log, once=False) def some_method(x, y, z): return x + y + z self.assertEqual(some_method(2, 2, z=3), 7) mock_log.assert_called_once_with( "Deprecated test (args `z' deprecated in Rally v0.0.1)") mock_log.reset_mock() self.assertEqual(some_method(2, 2, z=3), 7) mock_log.assert_called_once_with( "Deprecated test (args `z' deprecated in Rally v0.0.1)") rally-0.9.1/tests/unit/common/io/0000775000567000056710000000000013073420067020027 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/common/io/__init__.py0000664000567000056710000000000013073417716022135 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/common/io/subunit_v2.stream0000664000567000056710000000661613073417716023364 0ustar jenkinsjenkins00000000000000)4+test_foo.SimpleTestCase.test_skip_something|ɳ):1test_foo.SimpleTestCase.test_something_that_failsx#);2test_foo.SimpleTestCase.test_something_that_passesJ<)@A7test_foo.SimpleTestCase.test_something_that_takes_5_sec6[w)@C9test_foo.SimpleTestCase.test_something_with_expected_failt)@A7test_foo.SimpleTestCaseWithBrokenCleanup.test_somethinge^)>5test_foo.SimpleTestCaseWithBrokenSetup.test_something!+ """ self.assertEqual(expected.replace("\n", ""), j.to_xml()) def test_empty_testsuite(self): j = junit.JUnit("test") expected = """ """ self.assertEqual(expected.replace("\n", ""), j.to_xml()) def test_invalid_outcome(self): j = junit.JUnit("test") self.assertRaises(ValueError, j.add_test, "Foo.Bar", 1.23, outcome=1024) rally-0.9.1/tests/unit/common/test_sshutils.py0000664000567000056710000003323213073417716022721 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import socket import ddt import mock from rally.common import sshutils from rally import exceptions from tests.unit import test class FakeParamikoException(Exception): pass class SSHTestCase(test.TestCase): """Test all small SSH methods.""" def setUp(self): super(SSHTestCase, self).setUp() self.ssh = sshutils.SSH("root", "example.net") @mock.patch("rally.common.sshutils.SSH._get_pkey") def test_construct(self, mock_ssh__get_pkey): mock_ssh__get_pkey.return_value = "pkey" ssh = sshutils.SSH("root", "example.net", port=33, pkey="key", key_filename="kf", password="secret") mock_ssh__get_pkey.assert_called_once_with("key") self.assertEqual("root", ssh.user) self.assertEqual("example.net", ssh.host) self.assertEqual(33, ssh.port) self.assertEqual("pkey", ssh.pkey) self.assertEqual("kf", ssh.key_filename) self.assertEqual("secret", ssh.password) def test_construct_default(self): self.assertEqual("root", self.ssh.user) self.assertEqual("example.net", self.ssh.host) self.assertEqual(22, self.ssh.port) self.assertIsNone(self.ssh.pkey) self.assertIsNone(self.ssh.key_filename) self.assertIsNone(self.ssh.password) @mock.patch("rally.common.sshutils.paramiko") def test__get_pkey_invalid(self, mock_paramiko): mock_paramiko.SSHException = FakeParamikoException rsa = mock_paramiko.rsakey.RSAKey dss = mock_paramiko.dsskey.DSSKey rsa.from_private_key.side_effect = mock_paramiko.SSHException dss.from_private_key.side_effect = mock_paramiko.SSHException self.assertRaises(exceptions.SSHError, self.ssh._get_pkey, "key") @mock.patch("rally.common.sshutils.six.moves.StringIO") @mock.patch("rally.common.sshutils.paramiko") def test__get_pkey_dss(self, mock_paramiko, mock_string_io): mock_paramiko.SSHException = FakeParamikoException mock_string_io.return_value = "string_key" mock_paramiko.dsskey.DSSKey.from_private_key.return_value = "dss_key" rsa = mock_paramiko.rsakey.RSAKey rsa.from_private_key.side_effect = mock_paramiko.SSHException key = self.ssh._get_pkey("key") dss_calls = mock_paramiko.dsskey.DSSKey.from_private_key.mock_calls self.assertEqual([mock.call("string_key")], dss_calls) self.assertEqual(key, "dss_key") mock_string_io.assert_called_once_with("key") @mock.patch("rally.common.sshutils.six.moves.StringIO") @mock.patch("rally.common.sshutils.paramiko") def test__get_pkey_rsa(self, mock_paramiko, mock_string_io): mock_paramiko.SSHException = FakeParamikoException mock_string_io.return_value = "string_key" mock_paramiko.rsakey.RSAKey.from_private_key.return_value = "rsa_key" dss = mock_paramiko.dsskey.DSSKey dss.from_private_key.side_effect = mock_paramiko.SSHException key = self.ssh._get_pkey("key") rsa_calls = mock_paramiko.rsakey.RSAKey.from_private_key.mock_calls self.assertEqual([mock.call("string_key")], rsa_calls) self.assertEqual(key, "rsa_key") mock_string_io.assert_called_once_with("key") @mock.patch("rally.common.sshutils.SSH._get_pkey") @mock.patch("rally.common.sshutils.paramiko") def test__get_client(self, mock_paramiko, mock_ssh__get_pkey): mock_ssh__get_pkey.return_value = "key" fake_client = mock.Mock() mock_paramiko.SSHClient.return_value = fake_client mock_paramiko.AutoAddPolicy.return_value = "autoadd" ssh = sshutils.SSH("admin", "example.net", pkey="key") client = ssh._get_client() self.assertEqual(fake_client, client) client_calls = [ mock.call.set_missing_host_key_policy("autoadd"), mock.call.connect("example.net", username="admin", port=22, pkey="key", key_filename=None, password=None, timeout=1), ] self.assertEqual(client_calls, client.mock_calls) def test_close(self): with mock.patch.object(self.ssh, "_client") as m_client: self.ssh.close() m_client.close.assert_called_once_with() self.assertFalse(self.ssh._client) @mock.patch("rally.common.sshutils.six.moves.StringIO") def test_execute(self, mock_string_io): mock_string_io.side_effect = stdio = [mock.Mock(), mock.Mock()] stdio[0].read.return_value = "stdout fake data" stdio[1].read.return_value = "stderr fake data" with mock.patch.object(self.ssh, "run", return_value=0) as mock_run: status, stdout, stderr = self.ssh.execute("cmd", stdin="fake_stdin", timeout=43) mock_run.assert_called_once_with( "cmd", stdin="fake_stdin", stdout=stdio[0], stderr=stdio[1], timeout=43, raise_on_error=False) self.assertEqual(0, status) self.assertEqual("stdout fake data", stdout) self.assertEqual("stderr fake data", stderr) @mock.patch("rally.common.sshutils.time") def test_wait_timeout(self, mock_time): mock_time.time.side_effect = [1, 50, 150] self.ssh.execute = mock.Mock(side_effect=[exceptions.SSHError, exceptions.SSHError, 0]) self.assertRaises(exceptions.SSHTimeout, self.ssh.wait) self.assertEqual([mock.call("uname")] * 2, self.ssh.execute.mock_calls) @mock.patch("rally.common.sshutils.time") def test_wait(self, mock_time): mock_time.time.side_effect = [1, 50, 100] self.ssh.execute = mock.Mock(side_effect=[exceptions.SSHError, exceptions.SSHError, 0]) self.ssh.wait() self.assertEqual([mock.call("uname")] * 3, self.ssh.execute.mock_calls) @ddt.ddt class SSHRunTestCase(test.TestCase): """Test SSH.run method in different aspects. Also tested method "execute". """ def setUp(self): super(SSHRunTestCase, self).setUp() self.fake_client = mock.Mock() self.fake_session = mock.Mock() self.fake_transport = mock.Mock() self.fake_transport.open_session.return_value = self.fake_session self.fake_client.get_transport.return_value = self.fake_transport self.fake_session.recv_ready.return_value = False self.fake_session.recv_stderr_ready.return_value = False self.fake_session.send_ready.return_value = False self.fake_session.exit_status_ready.return_value = True self.fake_session.recv_exit_status.return_value = 0 self.ssh = sshutils.SSH("admin", "example.net") self.ssh._get_client = mock.Mock(return_value=self.fake_client) @mock.patch("rally.common.sshutils.select") def test_execute(self, mock_select): mock_select.select.return_value = ([], [], []) self.fake_session.recv_ready.side_effect = [1, 0, 0] self.fake_session.recv_stderr_ready.side_effect = [1, 0] self.fake_session.recv.return_value = b"ok" self.fake_session.recv_stderr.return_value = b"error" self.fake_session.exit_status_ready.return_value = 1 self.fake_session.recv_exit_status.return_value = 127 self.assertEqual((127, "ok", "error"), self.ssh.execute("cmd")) self.fake_session.exec_command.assert_called_once_with("cmd") @mock.patch("rally.common.sshutils.select") def test_execute_args(self, mock_select): mock_select.select.return_value = ([], [], []) self.fake_session.recv_ready.side_effect = [1, 0, 0] self.fake_session.recv_stderr_ready.side_effect = [1, 0] self.fake_session.recv.return_value = b"ok" self.fake_session.recv_stderr.return_value = b"error" self.fake_session.exit_status_ready.return_value = 1 self.fake_session.recv_exit_status.return_value = 127 result = self.ssh.execute(["cmd", "arg1", "arg2 with space"]) self.assertEqual((127, "ok", "error"), result) self.fake_session.exec_command.assert_called_once_with( "cmd arg1 'arg2 with space'") @mock.patch("rally.common.sshutils.select") def test_run(self, mock_select): mock_select.select.return_value = ([], [], []) self.assertEqual(0, self.ssh.run("cmd")) @mock.patch("rally.common.sshutils.select") def test_run_nonzero_status(self, mock_select): mock_select.select.return_value = ([], [], []) self.fake_session.recv_exit_status.return_value = 1 self.assertRaises(exceptions.SSHError, self.ssh.run, "cmd") self.assertEqual(1, self.ssh.run("cmd", raise_on_error=False)) @mock.patch("rally.common.sshutils.select") def test_run_stdout(self, mock_select): mock_select.select.return_value = ([], [], []) self.fake_session.recv_ready.side_effect = [True, True, False] self.fake_session.recv.side_effect = [b"ok1", b"ok2"] stdout = mock.Mock() self.ssh.run("cmd", stdout=stdout) self.assertEqual([mock.call("ok1"), mock.call("ok2")], stdout.write.mock_calls) @mock.patch("rally.common.sshutils.select") def test_run_stderr(self, mock_select): mock_select.select.return_value = ([], [], []) self.fake_session.recv_stderr_ready.side_effect = [True, False] self.fake_session.recv_stderr.return_value = b"error" stderr = mock.Mock() self.ssh.run("cmd", stderr=stderr) stderr.write.assert_called_once_with("error") @mock.patch("rally.common.sshutils.select") def test_run_stdin(self, mock_select): """Test run method with stdin. Third send call was called with "e2" because only 3 bytes was sent by second call. So remainig 2 bytes of "line2" was sent by third call. """ mock_select.select.return_value = ([], [], []) self.fake_session.exit_status_ready.side_effect = [0, 0, 0, True] self.fake_session.send_ready.return_value = True self.fake_session.send.side_effect = [5, 3, 2] fake_stdin = mock.Mock() fake_stdin.read.side_effect = ["line1", "line2", ""] fake_stdin.closed = False def close(): fake_stdin.closed = True fake_stdin.close = mock.Mock(side_effect=close) self.ssh.run("cmd", stdin=fake_stdin) call = mock.call send_calls = [call("line1"), call("line2"), call("e2")] self.assertEqual(send_calls, self.fake_session.send.mock_calls) @mock.patch("rally.common.sshutils.select") def test_run_select_error(self, mock_select): self.fake_session.exit_status_ready.return_value = False mock_select.select.return_value = ([], [], [True]) self.assertRaises(exceptions.SSHError, self.ssh.run, "cmd") @mock.patch("rally.common.sshutils.time") @mock.patch("rally.common.sshutils.select") def test_run_timemout(self, mock_select, mock_time): mock_time.time.side_effect = [1, 3700] mock_select.select.return_value = ([], [], []) self.fake_session.exit_status_ready.return_value = False self.assertRaises(exceptions.SSHTimeout, self.ssh.run, "cmd") @mock.patch("rally.common.sshutils.open", create=True) def test__put_file_shell(self, mock_open): self.ssh.run = mock.Mock() self.ssh._put_file_shell("localfile", "remotefile", 0o42) self.ssh.run.assert_called_once_with( "cat > remotefile; chmod 042 remotefile", stdin=mock_open.return_value.__enter__.return_value) @mock.patch("rally.common.sshutils.os.stat") def test__put_file_sftp(self, mock_stat): sftp = self.fake_client.open_sftp.return_value = mock.MagicMock() sftp.__enter__.return_value = sftp mock_stat.return_value = os.stat_result([0o753] + [0] * 9) self.ssh._put_file_sftp("localfile", "remotefile") sftp.put.assert_called_once_with("localfile", "remotefile") mock_stat.assert_called_once_with("localfile") sftp.chmod.assert_called_once_with("remotefile", 0o753) sftp.__exit__.assert_called_once_with(None, None, None) def test__put_file_sftp_mode(self): sftp = self.fake_client.open_sftp.return_value = mock.MagicMock() sftp.__enter__.return_value = sftp self.ssh._put_file_sftp("localfile", "remotefile", mode=0o753) sftp.put.assert_called_once_with("localfile", "remotefile") sftp.chmod.assert_called_once_with("remotefile", 0o753) sftp.__exit__.assert_called_once_with(None, None, None) @ddt.data(sshutils.paramiko.SSHException, socket.error) def test_put_file(self, exc): self.ssh._put_file_sftp = mock.Mock(side_effect=exc()) self.ssh._put_file_shell = mock.Mock() self.ssh.put_file("foo", "bar", 42) self.ssh._put_file_sftp.assert_called_once_with("foo", "bar", mode=42) self.ssh._put_file_shell.assert_called_once_with("foo", "bar", mode=42) rally-0.9.1/tests/unit/common/test_version.py0000664000567000056710000000277313073417716022536 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.common import version from tests.unit import test class ModuleTestCase(test.TestCase): VERSION_REGEX = "^\d+\.\d+\.\d+(~dev\d+)?$" def test_version_info(self): version_str = version.version_info.semantic_version().debian_string() self.assertRegexpMatches(version_str, self.VERSION_REGEX) @mock.patch("rally.common.version.version_info") def test_version_string(self, mock_version_info): mock_sv = mock.Mock() mock_sv.debian_string.return_value = "foo_version" mock_version_info.semantic_version.return_value = mock_sv self.assertEqual("foo_version", version.version_string()) @mock.patch("rally.common.db.api.schema_revision", return_value="foo") def test_database_revision(self, mock_schema_revision): self.assertEqual("foo", version.database_revision()) mock_schema_revision.assert_called_once_with(detailed=True) rally-0.9.1/tests/unit/common/db/0000775000567000056710000000000013073420067020005 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/common/db/test_types.py0000664000567000056710000001215413073417716022574 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests for custom sqlalchemy types""" import mock import sqlalchemy as sa import testtools from rally.common.db.sqlalchemy import types class JsonEncodedTest(testtools.TestCase): def test_impl(self): self.assertEqual(sa.Text, types.JSONEncodedDict.impl) self.assertEqual(sa.Text, types.JSONEncodedList.impl) self.assertEqual(sa.Text, types.MutableJSONEncodedDict.impl) self.assertEqual(sa.Text, types.MutableJSONEncodedList.impl) def test_process_bind_param(self): t = types.JSONEncodedDict() self.assertEqual("{\"a\": 1}", t.process_bind_param({"a": 1}, None)) def test_process_bind_param_none(self): t = types.JSONEncodedDict() self.assertIsNone(t.process_bind_param(None, None)) def test_process_result_value(self): t = types.JSONEncodedDict() self.assertEqual({"a": 1}, t.process_result_value("{\"a\": 1}", None)) t = types.JSONEncodedList() self.assertEqual([[2, 1], [1, 2]], t.process_result_value( "[[2, 1], [1, 2]]", None)) with mock.patch("json.loads") as mock_json_loads: t.process_result_value("[[2, 1], [1, 2]]", None) mock_json_loads.asser_called_once_with([(2, 1), (1, 2)]) def test_process_result_value_none(self): t = types.JSONEncodedDict() self.assertIsNone(t.process_result_value(None, None)) t = types.JSONEncodedList() self.assertIsNone(t.process_result_value(None, None)) class MutableDictTest(testtools.TestCase): def test_creation(self): sample = {"a": 1, "b": 2} d = types.MutableDict(sample) self.assertEqual(sample, d) def test_coerce_dict(self): sample = {"a": 1, "b": 2} md = types.MutableDict.coerce("test", sample) self.assertEqual(sample, md) self.assertIsInstance(md, types.MutableDict) def test_coerce_mutable_dict(self): sample = {"a": 1, "b": 2} sample_md = types.MutableDict(sample) md = types.MutableDict.coerce("test", sample_md) self.assertEqual(sample, md) self.assertIs(sample_md, md) def test_coerce_unsupported(self): with testtools.ExpectedException(ValueError): types.MutableDict.coerce("test", []) @mock.patch.object(types.MutableDict, "changed") def test_changed_on_setitem(self, mock_mutable_dict_changed): sample = {"a": 1, "b": 2} d = types.MutableDict(sample) d["b"] = 3 self.assertEqual({"a": 1, "b": 3}, d) self.assertEqual(1, mock_mutable_dict_changed.call_count) @mock.patch.object(types.MutableDict, "changed") def test_changed_on_delitem(self, mock_mutable_dict_changed): sample = {"a": 1, "b": 2} d = types.MutableDict(sample) del d["b"] self.assertEqual({"a": 1}, d) self.assertEqual(1, mock_mutable_dict_changed.call_count) class MutableListTest(testtools.TestCase): def test_creation(self): sample = [1, 2, 3] d = types.MutableList(sample) self.assertEqual(sample, d) def test_coerce_list(self): sample = [1, 2, 3] md = types.MutableList.coerce("test", sample) self.assertEqual(sample, md) self.assertIsInstance(md, types.MutableList) def test_coerce_mutable_list(self): sample = [1, 2, 3] sample_md = types.MutableList(sample) md = types.MutableList.coerce("test", sample_md) self.assertEqual(sample, md) self.assertIs(sample_md, md) def test_coerce_unsupported(self): with testtools.ExpectedException(ValueError): types.MutableList.coerce("test", {}) @mock.patch.object(types.MutableList, "changed") def test_changed_on_append(self, mock_mutable_list_changed): sample = [1, 2, 3] lst = types.MutableList(sample) lst.append(4) self.assertEqual([1, 2, 3, 4], lst) self.assertEqual(1, mock_mutable_list_changed.call_count) @mock.patch.object(types.MutableList, "changed") def test_changed_on_setitem(self, mock_mutable_list_changed): sample = [1, 2, 3] lst = types.MutableList(sample) lst[2] = 4 self.assertEqual([1, 2, 4], lst) self.assertEqual(1, mock_mutable_list_changed.call_count) @mock.patch.object(types.MutableList, "changed") def test_changed_on_delitem(self, mock_mutable_list_changed): sample = [1, 2, 3] lst = types.MutableList(sample) del lst[2] self.assertEqual([1, 2], lst) self.assertEqual(1, mock_mutable_list_changed.call_count) rally-0.9.1/tests/unit/common/db/test_migrations_base.py0000664000567000056710000001333213073417716024575 0ustar jenkinsjenkins00000000000000# Copyright 2010-2011 OpenStack Foundation # Copyright 2012-2013 IBM Corp. # Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # # Ripped off from Murano's test_migrations.py # # There is an ongoing work to extact similar code to oslo incubator. Once it is # extracted we'll be able to remove this file and use oslo. import io import os from alembic import command from alembic import config as alembic_config from alembic import migration from alembic import script as alembic_script from oslo_config import cfg import rally.common.db.sqlalchemy.api as s_api from rally.common.i18n import _LE from rally.common import logging LOG = logging.getLogger(__name__) CONF = cfg.CONF class BaseWalkMigrationMixin(object): ALEMBIC_CONFIG = alembic_config.Config( os.path.join(os.path.dirname(s_api.__file__), "alembic.ini") ) ALEMBIC_CONFIG.rally_config = CONF def _configure(self, engine): """Configure database connection. For each type of repository we should do some of configure steps. For migrate_repo we should set under version control our database. For alembic we should configure database settings. For this goal we should use oslo.config and openstack.commom.db.sqlalchemy.session with database functionality (reset default settings and session cleanup). """ CONF.set_override("connection", str(engine.url), group="database") def _alembic_command(self, alembic_command, engine, *args, **kwargs): """Call alembic command. Most of alembic command return data into output. We should redefine this setting for getting info. """ self.ALEMBIC_CONFIG.stdout = buf = io.StringIO() CONF.set_override("connection", str(engine.url), group="database") getattr(command, alembic_command)(*args, **kwargs) res = buf.getvalue().strip() LOG.debug("Alembic command `{command}` returns: {result}".format( command=alembic_command, result=res)) return res def _up_and_down_versions(self): """Get revisions versions. Since alembic version has a random algorithm of generation (SA-migrate has an ordered autoincrement naming) we should store a tuple of versions (version for upgrade and version for downgrade) for successful testing of migrations. """ env = alembic_script.ScriptDirectory.from_config(self.ALEMBIC_CONFIG) versions = [] for rev in env.walk_revisions(): if rev.revision == s_api.INITIAL_REVISION_UUID: # NOTE(rpromyshlennikov): we skip initial migration here continue versions.append((rev.revision, rev.down_revision or "-1")) versions.reverse() return versions def walk_versions(self, engine=None): """Walk through versions. Determine latest version script from the repo, then upgrade from 1 through to the latest, with no data in the databases. This just checks that the schema itself upgrades successfully. """ self._configure(engine) # NOTE(ikhudoshyn): Now DB contains certain schema # so we can not execute all migrations starting from # init. So we cleanup the DB. s_api.get_backend().schema_cleanup() up_and_down_versions = self._up_and_down_versions() for ver_up, ver_down in up_and_down_versions: self._migrate_up(engine, ver_up, with_data=True) def _get_version_from_db(self, engine): """Return latest version for each type of migrate repo from db.""" conn = engine.connect() try: context = migration.MigrationContext.configure(conn) version = context.get_current_revision() or "-1" finally: conn.close() return version def _migrate(self, engine, version, cmd): """Base method for manipulation with migrate repo. It will upgrade the actual database. """ self._alembic_command(cmd, engine, self.ALEMBIC_CONFIG, version) def _migrate_up(self, engine, version, with_data=False): """Migrate up to a new version of the db. We allow for data insertion and post checks at every migration version with special _pre_upgrade_### and _check_### functions in the main test. """ # NOTE(sdague): try block is here because it's impossible to debug # where a failed data migration happens otherwise check_version = version try: if with_data: data = None pre_upgrade = getattr( self, "_pre_upgrade_%s" % check_version, None) if pre_upgrade: data = pre_upgrade(engine) self._migrate(engine, version, "upgrade") self.assertEqual(version, self._get_version_from_db(engine)) if with_data: check = getattr(self, "_check_%s" % check_version, None) if check: check(engine, data) except Exception: LOG.error(_LE("Failed to migrate to version {ver} on engine {eng}") .format(ver=version, eng=engine)) raise rally-0.9.1/tests/unit/common/db/__init__.py0000664000567000056710000000000013073417716022113 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/common/db/test_migrations.py0000664000567000056710000016440313073417716023611 0ustar jenkinsjenkins00000000000000# Copyright (c) 2016 Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests for DB migration.""" import copy import json import pickle import pprint import uuid import alembic import mock from oslo_db.sqlalchemy import test_migrations from oslo_db.sqlalchemy import utils as db_utils from oslo_utils import timeutils import six import sqlalchemy as sa import rally from rally.common import db from rally.common.db.sqlalchemy import api from rally.common.db.sqlalchemy import models from rally import consts from rally.deployment.engines import existing from tests.unit.common.db import test_migrations_base from tests.unit import test as rtest class MigrationTestCase(rtest.DBTestCase, test_migrations.ModelsMigrationsSync): """Test for checking of equality models state and migrations. For the opportunistic testing you need to set up a db named 'openstack_citest' with user 'openstack_citest' and password 'openstack_citest' on localhost. The test will then use that db and user/password combo to run the tests. For PostgreSQL on Ubuntu this can be done with the following commands:: sudo -u postgres psql postgres=# create user openstack_citest with createdb login password 'openstack_citest'; postgres=# create database openstack_citest with owner openstack_citest; For MySQL on Ubuntu this can be done with the following commands:: mysql -u root >create database openstack_citest; >grant all privileges on openstack_citest.* to openstack_citest@localhost identified by 'openstack_citest'; Output is a list that contains information about differences between db and models. Output example:: [('add_table', Table('bat', MetaData(bind=None), Column('info', String(), table=), schema=None)), ('remove_table', Table(u'bar', MetaData(bind=None), Column(u'data', VARCHAR(), table=), schema=None)), ('add_column', None, 'foo', Column('data', Integer(), table=)), ('remove_column', None, 'foo', Column(u'old_data', VARCHAR(), table=None)), [('modify_nullable', None, 'foo', u'x', {'existing_server_default': None, 'existing_type': INTEGER()}, True, False)]] * ``remove_*`` means that there is extra table/column/constraint in db; * ``add_*`` means that it is missing in db; * ``modify_*`` means that on column in db is set wrong type/nullable/server_default. Element contains information: - what should be modified, - schema, - table, - column, - existing correct column parameters, - right value, - wrong value. """ def setUp(self): # we change DB metadata in tests so we reload # models to refresh the metadata to it's original state six.moves.reload_module(rally.common.db.sqlalchemy.models) super(MigrationTestCase, self).setUp() self.alembic_config = api._alembic_config() self.engine = api.get_engine() # remove everything from DB and stamp it as 'base' # so that migration (i.e. upgrade up to 'head') # will actually take place db.schema_cleanup() db.schema_stamp("base") def db_sync(self, engine): db.schema_upgrade() def get_engine(self): return self.engine def get_metadata(self): return models.BASE.metadata def include_object(self, object_, name, type_, reflected, compare_to): if type_ == "table" and name == "alembic_version": return False return super(MigrationTestCase, self).include_object( object_, name, type_, reflected, compare_to) def _create_fake_model(self, table_name): type( "FakeModel", (models.BASE, models.RallyBase), {"__tablename__": table_name, "id": sa.Column(sa.Integer, primary_key=True, autoincrement=True)} ) def _get_metadata_diff(self): with self.get_engine().connect() as conn: opts = { "include_object": self.include_object, "compare_type": self.compare_type, "compare_server_default": self.compare_server_default, } mc = alembic.migration.MigrationContext.configure(conn, opts=opts) # compare schemas and fail with diff, if it"s not empty diff = self.filter_metadata_diff( alembic.autogenerate.compare_metadata(mc, self.get_metadata())) return diff @mock.patch("rally.common.db.sqlalchemy.api.Connection.schema_stamp") def test_models_sync(self, mock_connection_schema_stamp): # drop all tables after a test run self.addCleanup(db.schema_cleanup) # run migration scripts self.db_sync(self.get_engine()) diff = self._get_metadata_diff() if diff: msg = pprint.pformat(diff, indent=2, width=20) self.fail( "Models and migration scripts aren't in sync:\n%s" % msg) @mock.patch("rally.common.db.sqlalchemy.api.Connection.schema_stamp") def test_models_sync_negative__missing_table_in_script( self, mock_connection_schema_stamp): # drop all tables after a test run self.addCleanup(db.schema_cleanup) self._create_fake_model("fake_model") # run migration scripts self.db_sync(self.get_engine()) diff = self._get_metadata_diff() self.assertEqual(1, len(diff)) action, object = diff[0] self.assertEqual("add_table", action) self.assertIsInstance(object, sa.Table) self.assertEqual("fake_model", object.name) @mock.patch("rally.common.db.sqlalchemy.api.Connection.schema_stamp") def test_models_sync_negative__missing_model_in_metadata( self, mock_connection_schema_stamp): # drop all tables after a test run self.addCleanup(db.schema_cleanup) table = self.get_metadata().tables["workers"] self.get_metadata().remove(table) # run migration scripts self.db_sync(self.get_engine()) diff = self._get_metadata_diff() self.assertEqual(1, len(diff)) action, object = diff[0] self.assertEqual("remove_table", action) self.assertIsInstance(object, sa.Table) self.assertEqual("workers", object.name) class MigrationWalkTestCase(rtest.DBTestCase, test_migrations_base.BaseWalkMigrationMixin): """Test case covers upgrade method in migrations.""" def setUp(self): super(MigrationWalkTestCase, self).setUp() self.engine = api.get_engine() def assertColumnExists(self, engine, table, column): t = db_utils.get_table(engine, table) self.assertIn(column, t.c) def assertColumnsExists(self, engine, table, columns): for column in columns: self.assertColumnExists(engine, table, column) def assertColumnCount(self, engine, table, columns): t = db_utils.get_table(engine, table) self.assertEqual(len(t.columns), len(columns)) def assertColumnNotExists(self, engine, table, column): t = db_utils.get_table(engine, table) self.assertNotIn(column, t.c) def assertIndexExists(self, engine, table, index): t = db_utils.get_table(engine, table) index_names = [idx.name for idx in t.indexes] self.assertIn(index, index_names) def assertColumnType(self, engine, table, column, sqltype): t = db_utils.get_table(engine, table) col = getattr(t.c, column) self.assertIsInstance(col.type, sqltype) def assertIndexMembers(self, engine, table, index, members): self.assertIndexExists(engine, table, index) t = db_utils.get_table(engine, table) index_columns = None for idx in t.indexes: if idx.name == index: index_columns = idx.columns.keys() break self.assertEqual(sorted(members), sorted(index_columns)) def test_walk_versions(self): self.walk_versions(self.engine) def _check_3177d36ea270(self, engine, data): self.assertEqual( "3177d36ea270", api.get_backend().schema_revision(engine=engine)) self.assertColumnExists(engine, "deployments", "credentials") self.assertColumnNotExists(engine, "deployments", "admin") self.assertColumnNotExists(engine, "deployments", "users") def _pre_upgrade_54e844ebfbc3(self, engine): self._54e844ebfbc3_deployments = { # right config which should not be changed after migration "should-not-be-changed-1": { "admin": {"username": "admin", "password": "passwd", "project_name": "admin"}, "auth_url": "http://example.com:5000/v3", "region_name": "RegionOne", "type": "ExistingCloud"}, # right config which should not be changed after migration "should-not-be-changed-2": { "admin": {"username": "admin", "password": "passwd", "tenant_name": "admin"}, "users": [{"username": "admin", "password": "passwd", "tenant_name": "admin"}], "auth_url": "http://example.com:5000/v2.0", "region_name": "RegionOne", "type": "ExistingCloud"}, # not ExistingCloud config which should not be changed "should-not-be-changed-3": { "url": "example.com", "type": "Something"}, # normal config created with "fromenv" feature "from-env": { "admin": {"username": "admin", "password": "passwd", "tenant_name": "admin", "project_domain_name": "", "user_domain_name": ""}, "auth_url": "http://example.com:5000/v2.0", "region_name": "RegionOne", "type": "ExistingCloud"}, # public endpoint + keystone v3 config with tenant_name "ksv3_public": { "admin": {"username": "admin", "password": "passwd", "tenant_name": "admin", "user_domain_name": "bla", "project_domain_name": "foo"}, "auth_url": "http://example.com:5000/v3", "region_name": "RegionOne", "type": "ExistingCloud", "endpoint_type": "public"}, # internal endpoint + existing_users "existing_internal": { "admin": {"username": "admin", "password": "passwd", "tenant_name": "admin"}, "users": [{"username": "admin", "password": "passwd", "tenant_name": "admin", "project_domain_name": "", "user_domain_name": ""}], "auth_url": "http://example.com:5000/v2.0", "region_name": "RegionOne", "type": "ExistingCloud", "endpoint_type": "internal"}, } deployment_table = db_utils.get_table(engine, "deployments") deployment_status = consts.DeployStatus.DEPLOY_FINISHED with engine.connect() as conn: for deployment in self._54e844ebfbc3_deployments: conf = json.dumps(self._54e844ebfbc3_deployments[deployment]) conn.execute( deployment_table.insert(), [{"uuid": deployment, "name": deployment, "config": conf, "enum_deployments_status": deployment_status, "credentials": six.b(json.dumps([])), "users": six.b(json.dumps([])) }]) def _check_54e844ebfbc3(self, engine, data): self.assertEqual("54e844ebfbc3", api.get_backend().schema_revision(engine=engine)) original_deployments = self._54e844ebfbc3_deployments deployment_table = db_utils.get_table(engine, "deployments") with engine.connect() as conn: deployments_found = conn.execute( deployment_table.select()).fetchall() for deployment in deployments_found: # check deployment self.assertIn(deployment.uuid, original_deployments) self.assertIn(deployment.name, original_deployments) config = json.loads(deployment.config) if config != original_deployments[deployment.uuid]: if deployment.uuid.startswith("should-not-be-changed"): self.fail("Config of deployment '%s' is changes, but " "should not." % deployment.uuid) endpoint_type = (original_deployments[ deployment.uuid].get("endpoint_type")) if endpoint_type in (None, "public"): self.assertNotIn("endpoint_type", config) else: self.assertIn("endpoint_type", config) self.assertEqual(endpoint_type, config["endpoint_type"]) existing.ExistingCloud({"config": config}).validate() else: if not deployment.uuid.startswith("should-not-be-changed"): self.fail("Config of deployment '%s' is not changes, " "but should." % deployment.uuid) # this deployment created at _pre_upgrade step is not needed # anymore and we can remove it conn.execute( deployment_table.delete().where( deployment_table.c.uuid == deployment.uuid) ) def _pre_upgrade_08e1515a576c(self, engine): self._08e1515a576c_logs = [ {"pre": "No such file name", "post": {"etype": IOError.__name__, "msg": "No such file name"}}, {"pre": "Task config is invalid: bla", "post": {"etype": "InvalidTaskException", "msg": "Task config is invalid: bla"}}, {"pre": "Failed to load task foo", "post": {"etype": "FailedToLoadTask", "msg": "Failed to load task foo"}}, {"pre": ["SomeCls", "msg", json.dumps( ["File some1.py, line ...\n", "File some2.py, line ...\n"])], "post": {"etype": "SomeCls", "msg": "msg", "trace": "Traceback (most recent call last):\n" "File some1.py, line ...\n" "File some2.py, line ...\nSomeCls: msg"}}, ] deployment_table = db_utils.get_table(engine, "deployments") task_table = db_utils.get_table(engine, "tasks") self._08e1515a576c_deployment_uuid = "08e1515a576c-uuuu-uuuu-iiii-dddd" with engine.connect() as conn: conn.execute( deployment_table.insert(), [{"uuid": self._08e1515a576c_deployment_uuid, "name": self._08e1515a576c_deployment_uuid, "config": six.b("{}"), "enum_deployments_status": consts.DeployStatus.DEPLOY_FINISHED, "credentials": six.b(json.dumps([])), "users": six.b(json.dumps([])) }]) for i in range(0, len(self._08e1515a576c_logs)): log = json.dumps(self._08e1515a576c_logs[i]["pre"]) conn.execute( task_table.insert(), [{"uuid": i, "verification_log": log, "status": "failed", "enum_tasks_status": "failed", "deployment_uuid": self._08e1515a576c_deployment_uuid }]) def _check_08e1515a576c(self, engine, data): self.assertEqual("08e1515a576c", api.get_backend().schema_revision(engine=engine)) tasks = self._08e1515a576c_logs deployment_table = db_utils.get_table(engine, "deployments") task_table = db_utils.get_table(engine, "tasks") with engine.connect() as conn: tasks_found = conn.execute(task_table.select()).fetchall() for task in tasks_found: actual_log = json.loads(task.verification_log) self.assertIsInstance(actual_log, dict) expected = tasks[int(task.uuid)]["post"] for key in expected: self.assertEqual(expected[key], actual_log[key]) conn.execute( task_table.delete().where(task_table.c.uuid == task.uuid)) deployment_uuid = self._08e1515a576c_deployment_uuid conn.execute( deployment_table.delete().where( deployment_table.c.uuid == deployment_uuid)) def _pre_upgrade_e654a0648db0(self, engine): deployment_table = db_utils.get_table(engine, "deployments") task_table = db_utils.get_table(engine, "tasks") taskresult_table = db_utils.get_table(engine, "task_results") self._e654a0648db0_task_uuid = str(uuid.uuid4()) self._e654a0648db0_deployment_uuid = str(uuid.uuid4()) with engine.connect() as conn: conn.execute( deployment_table.insert(), [{ "uuid": self._e654a0648db0_deployment_uuid, "name": self._e654a0648db0_deployment_uuid, "config": "{}", "enum_deployments_status": consts.DeployStatus.DEPLOY_INIT, "credentials": six.b(json.dumps([])), "users": six.b(json.dumps([])) }] ) conn.execute( task_table.insert(), [{ "uuid": self._e654a0648db0_task_uuid, "created_at": timeutils.utcnow(), "updated_at": timeutils.utcnow(), "status": consts.TaskStatus.FINISHED, "verification_log": json.dumps({}), "tag": "test_tag", "deployment_uuid": self._e654a0648db0_deployment_uuid }] ) conn.execute( taskresult_table.insert(), [ { "task_uuid": self._e654a0648db0_task_uuid, "created_at": timeutils.utcnow(), "updated_at": timeutils.utcnow(), "key": json.dumps({ "name": "test_scenario", "pos": 0, "kw": { "args": {"a": "A"}, "runner": {"type": "theRunner"}, "context": {"c": "C"}, "sla": {"s": "S"} } }), "data": json.dumps({ "raw": [ {"error": "e", "duration": 3}, {"duration": 1}, {"duration": 8}, ], "load_duration": 42, "full_duration": 142, "sla": [{"success": True}, {"success": False}] }) } ] ) def _check_e654a0648db0(self, engine, data): self.assertEqual( "e654a0648db0", api.get_backend().schema_revision(engine=engine)) task_table = db_utils.get_table(engine, "tasks") subtask_table = db_utils.get_table(engine, "subtasks") workload_table = db_utils.get_table(engine, "workloads") workloaddata_table = db_utils.get_table(engine, "workloaddata") tag_table = db_utils.get_table(engine, "tags") deployment_table = db_utils.get_table(engine, "deployments") with engine.connect() as conn: # Check task tasks_found = conn.execute( task_table.select(). where(task_table.c.uuid == self._e654a0648db0_task_uuid) ).fetchall() self.assertEqual(len(tasks_found), 1) task_found = tasks_found[0] self.assertEqual(task_found.uuid, self._e654a0648db0_task_uuid) self.assertEqual(task_found.deployment_uuid, self._e654a0648db0_deployment_uuid) self.assertEqual(task_found.status, consts.TaskStatus.FINISHED) # NOTE(ikhudoshyn): if for all workloads success == True self.assertEqual(task_found.pass_sla, False) # NOTE(ikhudoshyn): sum of all full_durations of all workloads self.assertEqual(task_found.task_duration, 142) # NOTE(ikhudoshyn): we have no info on validation duration in old # schema self.assertEqual(task_found.validation_duration, 0) self.assertEqual(json.loads(task_found.validation_result), {}) # Check subtask subtasks_found = conn.execute( subtask_table.select(). where(subtask_table.c.task_uuid == self._e654a0648db0_task_uuid) ).fetchall() self.assertEqual(len(subtasks_found), 1) subtask_found = subtasks_found[0] self.assertEqual(subtask_found.task_uuid, self._e654a0648db0_task_uuid) # NOTE(ikhudoshyn): if for all workloads success == True self.assertEqual(subtask_found.pass_sla, False) # NOTE(ikhudoshyn): sum of all full_durations of all workloads self.assertEqual(subtask_found.duration, 142) self._e654a0648db0_subtask_uuid = subtask_found.uuid # Check tag tags_found = conn.execute( tag_table.select(). where(tag_table.c.uuid == self._e654a0648db0_task_uuid) ).fetchall() self.assertEqual(len(tags_found), 1) self.assertEqual(tags_found[0].tag, "test_tag") self.assertEqual(tags_found[0].type, consts.TagType.TASK) # Check workload workloads_found = conn.execute( workload_table.select(). where(workload_table.c.task_uuid == self._e654a0648db0_task_uuid) ).fetchall() self.assertEqual(len(workloads_found), 1) workload_found = workloads_found[0] self.assertEqual(workload_found.task_uuid, self._e654a0648db0_task_uuid) self.assertEqual(workload_found.subtask_uuid, self._e654a0648db0_subtask_uuid) self.assertEqual(workload_found.name, "test_scenario") self.assertEqual(workload_found.position, 0) self.assertEqual(workload_found.runner_type, "theRunner") self.assertEqual(workload_found.runner, json.dumps({"type": "theRunner"})) self.assertEqual(workload_found.sla, json.dumps({"s": "S"})) self.assertEqual(workload_found.args, json.dumps({"a": "A"})) self.assertEqual(workload_found.context, json.dumps({"c": "C"})) self.assertEqual(workload_found.sla_results, json.dumps({ "sla": [ {"success": True}, {"success": False} ] })) self.assertEqual(workload_found.context_execution, json.dumps({})) self.assertEqual(workload_found.load_duration, 42) self.assertEqual(workload_found.full_duration, 142) self.assertEqual(workload_found.min_duration, 1) self.assertEqual(workload_found.max_duration, 8) self.assertEqual(workload_found.total_iteration_count, 3) self.assertEqual(workload_found.failed_iteration_count, 1) self.assertEqual(workload_found.pass_sla, False) self._e654a0648db0_workload_uuid = workload_found.uuid # Check workloadData workloaddata_found = conn.execute( workloaddata_table.select(). where(workloaddata_table.c.task_uuid == self._e654a0648db0_task_uuid) ).fetchall() self.assertEqual(len(workloaddata_found), 1) wloaddata_found = workloaddata_found[0] self.assertEqual(wloaddata_found.task_uuid, self._e654a0648db0_task_uuid) self.assertEqual(wloaddata_found.workload_uuid, self._e654a0648db0_workload_uuid) self.assertEqual(wloaddata_found.chunk_order, 0) self.assertEqual(wloaddata_found.chunk_size, 0) self.assertEqual(wloaddata_found.compressed_chunk_size, 0) self.assertEqual(wloaddata_found.iteration_count, 3) self.assertEqual(wloaddata_found.failed_iteration_count, 1) self.assertEqual( wloaddata_found.chunk_data, json.dumps( { "raw": [ {"error": "e", "duration": 3}, {"duration": 1}, {"duration": 8}, ] } ) ) # Delete all stuff created at _pre_upgrade step conn.execute( tag_table.delete(). where(tag_table.c.uuid == self._e654a0648db0_task_uuid) ) conn.execute( workloaddata_table.delete(). where(workloaddata_table.c.task_uuid == self._e654a0648db0_task_uuid) ) conn.execute( workload_table.delete(). where(workload_table.c.task_uuid == self._e654a0648db0_task_uuid) ) conn.execute( subtask_table.delete(). where(subtask_table.c.task_uuid == self._e654a0648db0_task_uuid) ) conn.execute( task_table.delete(). where(task_table.c.uuid == self._e654a0648db0_task_uuid) ) conn.execute( deployment_table.delete(). where(deployment_table.c.uuid == self._e654a0648db0_deployment_uuid) ) def _pre_upgrade_6ad4f426f005(self, engine): deployment_table = db_utils.get_table(engine, "deployments") task_table = db_utils.get_table(engine, "tasks") task_result_table = db_utils.get_table(engine, "task_results") with engine.connect() as conn: # create deployment conf = { "admin": {"username": "admin", "password": "passwd", "project_name": "admin"}, "auth_url": "http://example.com:5000/v3", "region_name": "RegionOne", "type": "ExistingCloud" } deployment_status = consts.DeployStatus.DEPLOY_FINISHED conn.execute( deployment_table.insert(), [{ "uuid": "my_deployment", "name": "my_deployment", "config": json.dumps(conf), "enum_deployments_status": deployment_status, "credentials": six.b(json.dumps([])), "users": six.b(json.dumps([])) }]) # create task conn.execute( task_table.insert(), [{ "uuid": "my_task", "deployment_uuid": "my_deployment", "status": consts.TaskStatus.INIT, }]) # create task result with empty data conn.execute( task_result_table.insert(), [{ "task_uuid": "my_task", "key": json.dumps({}), "data": json.dumps({}), }] ) def _check_6ad4f426f005(self, engine, data): self.assertEqual("6ad4f426f005", api.get_backend().schema_revision(engine=engine)) deployment_table = db_utils.get_table(engine, "deployments") task_table = db_utils.get_table(engine, "tasks") task_result_table = db_utils.get_table(engine, "task_results") with engine.connect() as conn: task_results = conn.execute(task_result_table.select()).fetchall() self.assertEqual(1, len(task_results)) task_result = task_results[0] # check that "hooks" field added self.assertEqual({"hooks": []}, json.loads(task_result.data)) # Remove task result conn.execute( task_result_table.delete().where( task_result_table.c.id == task_result.id) ) # Remove task conn.execute( task_table.delete().where(task_table.c.uuid == "my_task")) # Remove deployment conn.execute( deployment_table.delete().where( deployment_table.c.uuid == "my_deployment") ) def _pre_upgrade_32fada9b2fde(self, engine): self._32fada9b2fde_deployments = { # right config which should not be changed after migration "should-not-be-changed-1": { "admin": {"username": "admin", "password": "passwd", "project_name": "admin"}, "auth_url": "http://example.com:5000/v3", "region_name": "RegionOne", "type": "ExistingCloud"}, # right config which should not be changed after migration "should-not-be-changed-2": { "admin": {"username": "admin", "password": "passwd", "tenant_name": "admin"}, "users": [{"username": "admin", "password": "passwd", "tenant_name": "admin"}], "auth_url": "http://example.com:5000/v2.0", "region_name": "RegionOne", "type": "ExistingCloud"}, # not ExistingCloud config which should not be changed "should-not-be-changed-3": { "url": "example.com", "type": "Something"}, # with `admin_domain_name` field "with_admin_domain_name": { "admin": {"username": "admin", "password": "passwd", "project_name": "admin", "admin_domain_name": "admin"}, "auth_url": "http://example.com:5000/v3", "region_name": "RegionOne", "type": "ExistingCloud"}, } deployment_table = db_utils.get_table(engine, "deployments") deployment_status = consts.DeployStatus.DEPLOY_FINISHED with engine.connect() as conn: for deployment in self._32fada9b2fde_deployments: conf = json.dumps( self._32fada9b2fde_deployments[deployment]) conn.execute( deployment_table.insert(), [{"uuid": deployment, "name": deployment, "config": conf, "enum_deployments_status": deployment_status, "credentials": six.b(json.dumps([])), "users": six.b(json.dumps([])) }]) def _check_32fada9b2fde(self, engine, data): self.assertEqual("32fada9b2fde", api.get_backend().schema_revision(engine=engine)) original_deployments = self._32fada9b2fde_deployments deployment_table = db_utils.get_table(engine, "deployments") with engine.connect() as conn: deployments_found = conn.execute( deployment_table.select()).fetchall() for deployment in deployments_found: # check deployment self.assertIn(deployment.uuid, original_deployments) self.assertIn(deployment.name, original_deployments) config = json.loads(deployment.config) if config != original_deployments[deployment.uuid]: if deployment.uuid.startswith("should-not-be-changed"): self.fail("Config of deployment '%s' is changes, but " "should not." % deployment.uuid) if "admin_domain_name" in deployment.config: self.fail("Config of deployment '%s' should not " "contain `admin_domain_name` field." % deployment.uuid) endpoint_type = (original_deployments[ deployment.uuid].get("endpoint_type")) if endpoint_type in (None, "public"): self.assertNotIn("endpoint_type", config) else: self.assertIn("endpoint_type", config) self.assertEqual(endpoint_type, config["endpoint_type"]) existing.ExistingCloud({"config": config}).validate() else: if not deployment.uuid.startswith("should-not-be-changed"): self.fail("Config of deployment '%s' is not changes, " "but should." % deployment.uuid) # this deployment created at _pre_upgrade step is not needed # anymore and we can remove it conn.execute( deployment_table.delete().where( deployment_table.c.uuid == deployment.uuid) ) def _pre_upgrade_484cd9413e66(self, engine): self._484cd9413e66_deployment_uuid = "484cd9413e66-deploy" self._484cd9413e66_verifications = [ {"total": {"time": 1.0, "failures": 2, "skipped": 3, "success": 4, "errors": 0, "tests": 2 }, "test_cases": {"test1": {"status": "OK"}, "test2": {"status": "FAIL", "failure": {"log": "trace"}}}, "set_name": "full"}, {"total": {"time": 2.0, "failures": 3, "skipped": 4, "success": 5, "unexpected_success": 6, "expected_failures": 7, "tests": 2 }, "test_cases": {"test1": {"status": "success"}, "test2": {"status": "failed", "" "traceback": "trace"}}, "set_name": "smoke"} ] deployment_table = db_utils.get_table(engine, "deployments") verifications_table = db_utils.get_table(engine, "verifications") vresults_table = db_utils.get_table(engine, "verification_results") deployment_status = consts.DeployStatus.DEPLOY_FINISHED vstatus = consts.TaskStatus.FINISHED with engine.connect() as conn: conn.execute( deployment_table.insert(), [{"uuid": self._484cd9413e66_deployment_uuid, "name": self._484cd9413e66_deployment_uuid, "config": six.b(json.dumps([])), "enum_deployments_status": deployment_status, "credentials": six.b(json.dumps([])), "users": six.b(json.dumps([])) }]) for i in range(len(self._484cd9413e66_verifications)): verification = self._484cd9413e66_verifications[i] vuuid = "uuid-%s" % i conn.execute( verifications_table.insert(), [{"uuid": vuuid, "deployment_uuid": self._484cd9413e66_deployment_uuid, "status": vstatus, "set_name": verification["set_name"], "tests": verification["total"]["tests"], "failures": verification["total"]["failures"], "time": verification["total"]["time"], "errors": 0, }]) data = copy.deepcopy(verification) data["total"]["test_cases"] = data["test_cases"] data = data["total"] conn.execute( vresults_table.insert(), [{"uuid": vuuid, "verification_uuid": vuuid, "data": json.dumps(data) }]) def _check_484cd9413e66(self, engine, data): self.assertEqual("484cd9413e66", api.get_backend().schema_revision(engine=engine)) verifications_table = db_utils.get_table(engine, "verifications") with engine.connect() as conn: verifications = conn.execute( verifications_table.select()).fetchall() for i in range(len(verifications)): verification_orig = self._484cd9413e66_verifications[i] verification = verifications[i] total = {"time": verification.tests_duration, "failures": verification.failures, "skipped": verification.skipped, "success": verification.success, "tests": verification.tests_count} results = verification_orig["test_cases"] old_format = "errors" in verification_orig["total"] if old_format: total["errors"] = 0 for test_name in results: status = results[test_name]["status"] if status == "OK": status = "success" elif status == "FAIL": status = "fail" results[test_name]["traceback"] = results[ test_name]["failure"].pop("log") results[test_name].pop("failure") results[test_name]["status"] = status else: uxsucess = verification.unexpected_success total["unexpected_success"] = uxsucess total["expected_failures"] = verification.expected_failures self.assertEqual(verification_orig["total"], total) self.assertEqual(results, json.loads(verification.tests)) self.assertEqual( {"pattern": "set=%s" % verification_orig["set_name"]}, json.loads(verification.run_args)) self.assertEqual( verification_orig["total"].get("unexpected_success", 0), verification.unexpected_success) self.assertEqual( verification_orig["total"].get("expected_failures", 0), verification.expected_failures) conn.execute( verifications_table.delete().where( verifications_table.c.uuid == verification.uuid) ) deployment_table = db_utils.get_table(engine, "deployments") conn.execute( deployment_table.delete().where( deployment_table.c.uuid == self._484cd9413e66_deployment_uuid) ) def _pre_upgrade_37fdbb373e8d(self, engine): self._37fdbb373e8d_deployment_uuid = "37fdbb373e8d-deployment" self._37fdbb373e8d_verifier_uuid = "37fdbb373e8d-verifier" self._37fdbb373e8d_verifications_tests = [ { "test_1[smoke, negative]": { "name": "test_1", "time": 2.32, "status": "success", "tags": ["smoke", "negative"] }, "test_2[smoke, negative]": { "name": "test_2", "time": 4.32, "status": "success", "tags": ["smoke", "negative"] } }, { "test_3[smoke, negative]": { "name": "test_3", "time": 6.32, "status": "success", "tags": ["smoke", "negative"] }, "test_4[smoke, negative]": { "name": "test_4", "time": 8.32, "status": "success", "tags": ["smoke", "negative"] } } ] deployment_table = db_utils.get_table(engine, "deployments") verifiers_table = db_utils.get_table(engine, "verifiers") verifications_table = db_utils.get_table(engine, "verifications") deployment_status = consts.DeployStatus.DEPLOY_FINISHED with engine.connect() as conn: conn.execute( deployment_table.insert(), [{"uuid": self._37fdbb373e8d_deployment_uuid, "name": self._37fdbb373e8d_deployment_uuid, "config": six.b(json.dumps([])), "enum_deployments_status": deployment_status, "credentials": six.b(json.dumps([])), "users": six.b(json.dumps([])) }]) conn.execute( verifiers_table.insert(), [{"uuid": self._37fdbb373e8d_verifier_uuid, "name": self._37fdbb373e8d_verifier_uuid, "type": "some-type", "status": consts.VerifierStatus.INSTALLED }]) for i in range(len(self._37fdbb373e8d_verifications_tests)): tests = self._37fdbb373e8d_verifications_tests[i] conn.execute( verifications_table.insert(), [{"uuid": "verification-uuid-%s" % i, "deployment_uuid": self._37fdbb373e8d_deployment_uuid, "verifier_uuid": self._37fdbb373e8d_verifier_uuid, "status": consts.VerificationStatus.FINISHED, "tests": json.dumps(tests) }]) def _check_37fdbb373e8d(self, engine, data): self.assertEqual("37fdbb373e8d", api.get_backend().schema_revision(engine=engine)) verifications_table = db_utils.get_table(engine, "verifications") with engine.connect() as conn: verifications = conn.execute( verifications_table.select()).fetchall() self.assertEqual(len(verifications), len(self._37fdbb373e8d_verifications_tests)) for i in range(len(verifications)): v = verifications[i] updated_tests = json.loads(v.tests) expected_tests = self._37fdbb373e8d_verifications_tests[i] for test in expected_tests.values(): duration = test.pop("time") test["duration"] = duration self.assertEqual(expected_tests, updated_tests) conn.execute( verifications_table.delete().where( verifications_table.c.uuid == v.uuid) ) deployment_table = db_utils.get_table(engine, "deployments") conn.execute( deployment_table.delete().where( deployment_table.c.uuid == self._37fdbb373e8d_deployment_uuid) ) def _pre_upgrade_a6f364988fc2(self, engine): self._a6f364988fc2_tags = [ { "uuid": "uuid-1", "type": "task", "tag": "tag-1" }, { "uuid": "uuid-2", "type": "subtask", "tag": "tag-2" }, { "uuid": "uuid-3", "type": "task", "tag": "tag-3" } ] tags_table = db_utils.get_table(engine, "tags") with engine.connect() as conn: for t in self._a6f364988fc2_tags: conn.execute( tags_table.insert(), [{ "uuid": t["uuid"], "enum_tag_types": t["type"], "type": t["type"], "tag": t["tag"] }]) def _check_a6f364988fc2(self, engine, data): self.assertEqual("a6f364988fc2", api.get_backend().schema_revision(engine=engine)) tags_table = db_utils.get_table(engine, "tags") with engine.connect() as conn: tags = conn.execute(tags_table.select()).fetchall() self.assertEqual(len(tags), len(self._a6f364988fc2_tags)) for i in range(len(tags)): for k in ("uuid", "type", "tag"): self.assertEqual(self._a6f364988fc2_tags[i][k], tags[i][k]) conn.execute( tags_table.delete().where( tags_table.c.uuid == tags[i].uuid)) def _pre_upgrade_f33f4610dcda(self, engine): self._f33f4610dcda_deployment_uuid = "f33f4610dcda-deployment" self._f33f4610dcda_verifier_uuid = "f33f4610dcda-verifier" self._f33f4610dcda_verifications = [ {"status": "init", "failures": 0, "unexpected_success": 0}, {"status": "running", "failures": 0, "unexpected_success": 0}, {"status": "finished", "failures": 0, "unexpected_success": 0}, {"status": "finished", "failures": 1, "unexpected_success": 0, "new_status": "failed"}, {"status": "finished", "failures": 1, "unexpected_success": 1, "new_status": "failed"}, {"status": "finished", "failures": 0, "unexpected_success": 1, "new_status": "failed"}, {"status": "failed", "failures": 0, "unexpected_success": 0, "new_status": "crashed"}, ] deployment_table = db_utils.get_table(engine, "deployments") verifiers_table = db_utils.get_table(engine, "verifiers") verifications_table = db_utils.get_table(engine, "verifications") deployment_status = consts.DeployStatus.DEPLOY_FINISHED with engine.connect() as conn: conn.execute( deployment_table.insert(), [{"uuid": self._f33f4610dcda_deployment_uuid, "name": self._f33f4610dcda_deployment_uuid, "config": six.b(json.dumps([])), "enum_deployments_status": deployment_status, "credentials": six.b(json.dumps([])), "users": six.b(json.dumps([])) }]) conn.execute( verifiers_table.insert(), [{"uuid": self._f33f4610dcda_verifier_uuid, "name": self._f33f4610dcda_verifier_uuid, "type": "some-type", "status": consts.VerifierStatus.INSTALLED }]) for i in range(len(self._f33f4610dcda_verifications)): v = self._f33f4610dcda_verifications[i] conn.execute( verifications_table.insert(), [{"uuid": "verification-uuid-%s" % i, "deployment_uuid": self._f33f4610dcda_deployment_uuid, "verifier_uuid": self._f33f4610dcda_verifier_uuid, "status": v["status"], "failures": v["failures"], "unexpected_success": v["unexpected_success"] }]) def _check_f33f4610dcda(self, engine, data): self.assertEqual("f33f4610dcda", api.get_backend().schema_revision(engine=engine)) verifications_table = db_utils.get_table(engine, "verifications") with engine.connect() as conn: verifications = conn.execute( verifications_table.select()).fetchall() self.assertEqual(len(verifications), len(self._f33f4610dcda_verifications)) for i in range(len(verifications)): if "new_status" in self._f33f4610dcda_verifications[i]: self.assertEqual( self._f33f4610dcda_verifications[i]["new_status"], verifications[i].status) conn.execute( verifications_table.delete().where( verifications_table.c.uuid == verifications[i].uuid) ) deployment_table = db_utils.get_table(engine, "deployments") conn.execute( deployment_table.delete().where( deployment_table.c.uuid == self._f33f4610dcda_deployment_uuid) ) def _pre_upgrade_4ef544102ba7(self, engine): self._4ef544102ba7_deployment_uuid = "4ef544102ba7-deploy" self.tasks = { "should-not-be-changed-1": { "uuid": "should-not-be-changed-1", "deployment_uuid": self._4ef544102ba7_deployment_uuid, "validation_result": { "etype": "SomeCls", "msg": "msg", "trace": "Traceback (most recent call last):\n" "File some1.py, line ...\n" "File some2.py, line ...\nSomeCls: msg"}, "status": "finished"}, "should-be-changed-1": { "uuid": "should-be-changed-1", "deployment_uuid": self._4ef544102ba7_deployment_uuid, "validation_result": {}, "status": "failed"}, "should-be-changed-2": { "uuid": "should-be-changed-2", "deployment_uuid": self._4ef544102ba7_deployment_uuid, "validation_result": {}, "status": "verifying"}, } deployment_table = db_utils.get_table(engine, "deployments") with engine.connect() as conn: conn.execute( deployment_table.insert(), [{"uuid": self._4ef544102ba7_deployment_uuid, "name": self._4ef544102ba7_deployment_uuid, "config": six.b(json.dumps([])), "enum_deployments_status": consts.DeployStatus.DEPLOY_FINISHED, "credentials": six.b(json.dumps([])), "users": six.b(json.dumps([])) }]) task_table = db_utils.get_table(engine, "tasks") with engine.connect() as conn: for task in self.tasks: conn.execute( task_table.insert(), [{ "deployment_uuid": self.tasks[task][ "deployment_uuid"], "status": self.tasks[task]["status"], "validation_result": json.dumps( self.tasks[task]["validation_result"]), "uuid": self.tasks[task]["uuid"] }]) subtask_table = db_utils.get_table(engine, "subtasks") with engine.connect() as conn: for task in self.tasks: conn.execute( subtask_table.insert(), [{ "task_uuid": self.tasks[task]["uuid"], "status": consts.SubtaskStatus.RUNNING, "context": json.dumps({}), "sla": json.dumps({}), "run_in_parallel": False, "uuid": "subtask_" + self.tasks[task]["uuid"] }]) def _check_4ef544102ba7(self, engine, data): self.assertEqual("4ef544102ba7", api.get_backend().schema_revision(engine=engine)) org_tasks = self.tasks task_table = db_utils.get_table(engine, "tasks") subtask_table = db_utils.get_table(engine, "subtasks") with engine.connect() as conn: subtasks_found = conn.execute( subtask_table.select()).fetchall() for subtask in subtasks_found: conn.execute( subtask_table.delete().where( subtask_table.c.id == subtask.id) ) with engine.connect() as conn: tasks_found = conn.execute( task_table.select()).fetchall() self.assertEqual(3, len(tasks_found)) for task in tasks_found: self.assertIn("uuid", task) self.assertIn("status", task) if task.status != org_tasks[task.uuid]["status"]: if task.uuid.startswith("should-not-be-changed"): self.fail("Config of deployment '%s' is changes, but " "should not." % task.uuid) if task.status != "crashed" and task.uuid == ( "should-be-changed-1"): self.fail("Task '%s' status should be changed to " "crashed." % task.uuid) if task.status != "validating" and task.uuid == ( "should-be-changed-2"): self.fail("Task '%s' status should be changed to " "validating." % task.uuid) else: if not task.uuid.startswith("should-not-be-changed"): self.fail("Config of deployment '%s' is not changes, " "but should." % task.uuid) conn.execute( task_table.delete().where( task_table.c.id == task.id) ) deployment_table = db_utils.get_table(engine, "deployments") conn.execute( deployment_table.delete().where( deployment_table.c.uuid == self._4ef544102ba7_deployment_uuid) ) def _pre_upgrade_92aaaa2a6bb3(self, engine): self._92aaaa2a6bb3_deployments = [ ("1-cred", [["openstack", {"foo": "bar"}]]), ("2-cred", [["openstack", {"foo": "bar1"}], ["openstack", {"foo": "bar2"}]]), ("multi-cred", [["spam", {"foo": "bar1"}], ["eggs", {"foo": "bar2"}]]), ] deployment_table = db_utils.get_table(engine, "deployments") deployment_status = consts.DeployStatus.DEPLOY_FINISHED with engine.connect() as conn: for deployment, creds in self._92aaaa2a6bb3_deployments: conn.execute( deployment_table.insert(), [{"uuid": deployment, "name": deployment, "config": json.dumps({}), "enum_deployments_status": deployment_status, "credentials": pickle.dumps(creds), }]) def _check_92aaaa2a6bb3(self, engine, data): expected_credentials = [ ("1-cred", {"openstack": [{"foo": "bar"}]}), ("2-cred", {"openstack": [{"foo": "bar1"}, {"foo": "bar2"}]}), ("multi-cred", {"spam": [{"foo": "bar1"}], "eggs": [{"foo": "bar2"}]}), ] deployment_table = db_utils.get_table(engine, "deployments") with engine.connect() as conn: for deployment, expected_creds in expected_credentials: dep_obj = conn.execute( deployment_table.select().where( deployment_table.c.uuid == deployment)).fetchone() self.assertEqual( expected_creds, json.loads(dep_obj.credentials)) conn.execute( deployment_table.delete().where( deployment_table.c.uuid == deployment)) rally-0.9.1/tests/unit/common/db/test_api.py0000664000567000056710000012370013073417720022174 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests for db.api layer.""" import copy import datetime as dt import json import ddt import mock from six import moves from rally.common import db from rally.common.db import api as db_api from rally import consts from rally import exceptions from tests.unit import test NOW = dt.datetime.now() class FakeSerializable(object): def __init__(self, **kwargs): self.dict = {} self.dict.update(kwargs) def _as_dict(self): return self.dict @ddt.ddt class SerializeTestCase(test.DBTestCase): def setUp(self): super(SerializeTestCase, self).setUp() @ddt.data( {"data": 1, "serialized": 1}, {"data": 1.1, "serialized": 1.1}, {"data": "a string", "serialized": "a string"}, {"data": NOW, "serialized": NOW}, {"data": {"k1": 1, "k2": 2}, "serialized": {"k1": 1, "k2": 2}}, {"data": [1, "foo"], "serialized": [1, "foo"]}, {"data": ["foo", 1, {"a": "b"}], "serialized": ["foo", 1, {"a": "b"}]}, {"data": FakeSerializable(a=1), "serialized": {"a": 1}}, {"data": [FakeSerializable(a=1), FakeSerializable(b=FakeSerializable(c=1))], "serialized": [{"a": 1}, {"b": {"c": 1}}]}, ) @ddt.unpack def test_serialize(self, data, serialized): @db_api.serialize def fake_method(): return data results = fake_method() self.assertEqual(results, serialized) def test_serialize_value_error(self): @db_api.serialize def fake_method(): class Fake(object): pass return Fake() self.assertRaises(ValueError, fake_method) class ConnectionTestCase(test.DBTestCase): def test_schema_revision(self): rev = db.schema_revision() drev = db.schema_revision(detailed=True) self.assertEqual(drev["revision"], rev) self.assertEqual(drev["revision"], drev["current_head"]) class TasksTestCase(test.DBTestCase): def setUp(self): super(TasksTestCase, self).setUp() self.deploy = db.deployment_create({}) def _get_task(self, uuid): return db.task_get(uuid) def _get_task_status(self, uuid): return db.task_get_status(uuid) def _create_task(self, values=None): values = values or {} if "deployment_uuid" not in values: values["deployment_uuid"] = self.deploy["uuid"] return db.task_create(values) def test_task_get_not_found(self): self.assertRaises(exceptions.TaskNotFound, db.task_get, "f885f435-f6ca-4f3e-9b3e-aeb6837080f2") def test_task_get_status_not_found(self): self.assertRaises(exceptions.TaskNotFound, db.task_get_status, "f885f435-f6ca-4f3e-9b3e-aeb6837080f2") def test_task_create(self): task = self._create_task() db_task = self._get_task(task["uuid"]) self.assertIsNotNone(db_task["uuid"]) self.assertIsNotNone(db_task["id"]) self.assertEqual(db_task["status"], consts.TaskStatus.INIT) def test_task_create_with_tag(self): task = self._create_task(values={"tag": "test_tag"}) db_task = self._get_task(task["uuid"]) self.assertIsNotNone(db_task["uuid"]) self.assertIsNotNone(db_task["id"]) self.assertEqual(db_task["status"], consts.TaskStatus.INIT) self.assertEqual(db_task["tag"], "test_tag") def test_task_create_without_uuid(self): _uuid = "19be8589-48b0-4af1-a369-9bebaaa563ab" task = self._create_task({"uuid": _uuid}) db_task = self._get_task(task["uuid"]) self.assertEqual(db_task["uuid"], _uuid) def test_task_update(self): task = self._create_task({}) db.task_update(task["uuid"], {"status": consts.TaskStatus.CRASHED}) db_task = self._get_task(task["uuid"]) self.assertEqual(db_task["status"], consts.TaskStatus.CRASHED) def test_task_update_with_tag(self): task = self._create_task({}) db.task_update(task["uuid"], { "status": consts.TaskStatus.CRASHED, "tag": "test_tag" }) db_task = self._get_task(task["uuid"]) self.assertEqual(db_task["status"], consts.TaskStatus.CRASHED) self.assertEqual(db_task["tag"], "test_tag") def test_task_update_not_found(self): self.assertRaises(exceptions.TaskNotFound, db.task_update, "fake_uuid", {}) def test_task_update_status(self): self.assertRaises(exceptions.RallyException, db.task_update_status, "fake_uuid", consts.TaskStatus.RUNNING, [consts.TaskStatus.RUNNING]) def test_task_update_all_stats(self): _uuid = self._create_task({})["uuid"] for status in consts.TaskStatus: db.task_update(_uuid, {"status": status}) db_task = self._get_task(_uuid) self.assertEqual(db_task["status"], status) def test_task_list_empty(self): self.assertEqual([], db.task_list()) def test_task_list(self): INIT = consts.TaskStatus.INIT task_init = sorted(self._create_task()["uuid"] for i in moves.range(3)) FINISHED = consts.TaskStatus.FINISHED task_finished = sorted(self._create_task( {"status": FINISHED, "deployment_uuid": self.deploy["uuid"]} )["uuid"] for i in moves.range(3)) task_all = sorted(task_init + task_finished) def get_uuids(status=None, deployment=None): tasks = db.task_list(status=status, deployment=deployment) return sorted(task["uuid"] for task in tasks) self.assertEqual(task_all, get_uuids(None)) self.assertEqual(task_init, get_uuids(status=INIT)) self.assertEqual(task_finished, get_uuids(status=FINISHED)) self.assertRaises(exceptions.DeploymentNotFound, get_uuids, deployment="non-existing-deployment") deleted_task_uuid = task_finished.pop() db.task_delete(deleted_task_uuid) self.assertEqual(task_init, get_uuids(INIT)) self.assertEqual(sorted(task_finished), get_uuids(FINISHED)) def test_task_delete(self): task1, task2 = self._create_task()["uuid"], self._create_task()["uuid"] db.task_delete(task1) self.assertRaises(exceptions.TaskNotFound, self._get_task, task1) self.assertEqual(task2, self._get_task(task2)["uuid"]) def test_task_delete_not_found(self): self.assertRaises(exceptions.TaskNotFound, db.task_delete, "da6f820c-b133-4b9f-8534-4c3bcc40724b") def test_task_delete_with_results(self): task_id = self._create_task()["uuid"] key = { "name": "atata", "pos": 0, "kw": { "args": {"a": "A"}, "context": {"c": "C"}, "sla": {"s": "S"}, "runner": {"r": "R", "type": "T"} } } data = { "sla": [ {"s": "S", "success": True}, {"1": "2", "success": True}, {"a": "A", "success": True} ], "load_duration": 13, "full_duration": 42 } subtask = db.subtask_create(task_id, title="foo") workload = db.workload_create(task_id, subtask["uuid"], key) db.workload_data_create(task_id, workload["uuid"], 0, {"raw": []}) db.workload_set_results(workload["uuid"], data) res = db.task_result_get_all_by_uuid(task_id) self.assertEqual(len(res), 1) db.task_delete(task_id) res = db.task_result_get_all_by_uuid(task_id) self.assertEqual(len(res), 0) def test_task_delete_by_uuid_and_status(self): values = { "status": consts.TaskStatus.FINISHED, } task1 = self._create_task(values=values)["uuid"] task2 = self._create_task(values=values)["uuid"] db.task_delete(task1, status=consts.TaskStatus.FINISHED) self.assertRaises(exceptions.TaskNotFound, self._get_task, task1) self.assertEqual(task2, self._get_task(task2)["uuid"]) def test_task_delete_by_uuid_and_status_invalid(self): task = self._create_task( values={"status": consts.TaskStatus.INIT})["uuid"] self.assertRaises(exceptions.TaskInvalidStatus, db.task_delete, task, status=consts.TaskStatus.FINISHED) def test_task_delete_by_uuid_and_status_not_found(self): self.assertRaises(exceptions.TaskNotFound, db.task_delete, "fcd0483f-a405-44c4-b712-99c9e52254eb", status=consts.TaskStatus.FINISHED) def test_task_result_get_all_by_uuid(self): task1 = self._create_task()["uuid"] task2 = self._create_task()["uuid"] key = { "name": "atata", "pos": 0, "kw": { "args": {"task_id": "task_id"}, "context": {"c": "C"}, "sla": {"s": "S"}, "runner": {"r": "R", "type": "T"}, "hooks": [], } } data = { "sla": [{"success": True}], "load_duration": 13, "full_duration": 42, "hooks": [], } for task_id in (task1, task2): key["kw"]["args"]["task_id"] = task_id data["sla"][0] = {"success": True} subtask = db.subtask_create(task_id, title="foo") workload = db.workload_create(task_id, subtask["uuid"], key) db.workload_data_create(task_id, workload["uuid"], 0, {"raw": []}) db.workload_set_results(workload["uuid"], data) for task_id in (task1, task2): res = db.task_result_get_all_by_uuid(task_id) key["kw"]["args"]["task_id"] = task_id data["sla"][0] = {"success": True} data["raw"] = [] self.assertEqual(len(res), 1) self.assertEqual(res[0]["key"], key) self.assertEqual(res[0]["data"], data) def test_task_get_detailed(self): validation_result = { "etype": "FooError", "msg": "foo message", "trace": "foo t/b", } task1 = self._create_task({"validation_result": validation_result, "tag": "bar"}) key = { "name": "atata", "pos": 0, "kw": { "args": {"a": "A"}, "context": {"c": "C"}, "sla": {"s": "S"}, "runner": {"r": "R", "type": "T"}, "hooks": [], } } data = { "sla": [ {"s": "S", "success": True}, {"1": "2", "success": True}, {"a": "A", "success": True} ], "load_duration": 13, "full_duration": 42, "hooks": [], } subtask = db.subtask_create(task1["uuid"], title="foo") workload = db.workload_create(task1["uuid"], subtask["uuid"], key) db.workload_data_create( task1["uuid"], workload["uuid"], 0, {"raw": []}) db.workload_set_results(workload["uuid"], data) task1_full = db.task_get_detailed(task1["uuid"]) self.assertEqual(validation_result, json.loads(task1_full["verification_log"])) self.assertEqual("bar", task1_full["tag"]) results = task1_full["results"] self.assertEqual(1, len(results)) self.assertEqual(key, results[0]["key"]) self.assertEqual({ "raw": [], "sla": [ {"s": "S", "success": True}, {"1": "2", "success": True}, {"a": "A", "success": True} ], "load_duration": 13, "full_duration": 42, "hooks": [], }, results[0]["data"]) def test_task_get_detailed_last(self): task1 = self._create_task() key = { "name": "atata", "pos": 0, "kw": { "args": {"a": "A"}, "context": {"c": "C"}, "sla": {"s": "S"}, "runner": {"r": "R", "type": "T"}, "hooks": [], } } data = { "sla": [ {"s": "S", "success": True}, {"1": "2", "success": True}, {"a": "A", "success": True} ], "load_duration": 13, "full_duration": 42, "hooks": [], } subtask = db.subtask_create(task1["uuid"], title="foo") workload = db.workload_create(task1["uuid"], subtask["uuid"], key) db.workload_data_create( task1["uuid"], workload["uuid"], 0, {"raw": []}) db.workload_set_results(workload["uuid"], data) task1_full = db.task_get_detailed_last() results = task1_full["results"] self.assertEqual(1, len(results)) self.assertEqual(key, results[0]["key"]) self.assertEqual({ "raw": [], "sla": [ {"s": "S", "success": True}, {"1": "2", "success": True}, {"a": "A", "success": True} ], "load_duration": 13, "full_duration": 42, "hooks": [], }, results[0]["data"]) def test_task_result_create(self): task_id = self._create_task()["uuid"] key = { "name": "atata", "pos": 0, "kw": { "args": {"a": "A"}, "context": {"c": "C"}, "sla": {"s": "S"}, "hooks": [{"name": "foo_hook", "args": "bar", "trigger": {"name": "foo_trigger", "args": "baz"}}], "runner": {"r": "R", "type": "T"} } } raw_data = { "raw": [ {"error": "anError", "duration": 0, "timestamp": 1}, {"duration": 1, "timestamp": 1}, {"duration": 2, "timestamp": 2} ], } data = { "sla": [ {"s": "S", "success": True}, {"1": "2", "success": True}, {"a": "A", "success": True} ], "load_duration": 13, "full_duration": 42, "hooks": [ {"config": {"name": "foo_hook", "args": "bar", "trigger": {"name": "foo_trigger", "args": "baz"}}, "results": [ {"status": "success", "started_at": 10.0, "finished_at": 11.0, "triggered_by": {"time": 5}}], "summary": {}} ], } subtask = db.subtask_create(task_id, title="foo") workload = db.workload_create(task_id, subtask["uuid"], key) db.workload_data_create(task_id, workload["uuid"], 0, raw_data) db.workload_set_results(workload["uuid"], data) res = db.task_result_get_all_by_uuid(task_id) self.assertEqual(1, len(res)) self.assertEqual(raw_data["raw"], res[0]["data"]["raw"]) self.assertEqual(key, res[0]["key"]) def test_task_multiple_raw_result_create(self): task_id = self._create_task()["uuid"] key = { "name": "atata", "pos": 0, "kw": { "args": {"a": "A"}, "context": {"c": "C"}, "sla": {"s": "S"}, "runner": {"r": "R", "type": "T"}, "hooks": [], } } subtask = db.subtask_create(task_id, title="foo") workload = db.workload_create(task_id, subtask["uuid"], key) db.workload_data_create(task_id, workload["uuid"], 0, { "raw": [ {"error": "anError", "timestamp": 10, "duration": 1}, {"duration": 1, "timestamp": 10, "duration": 1}, {"duration": 2, "timestamp": 10, "duration": 1}, {"duration": 3, "timestamp": 10, "duration": 1}, ], }) db.workload_data_create(task_id, workload["uuid"], 1, { "raw": [ {"error": "anError2", "timestamp": 10, "duration": 1}, {"duration": 6, "timestamp": 10, "duration": 1}, {"duration": 5, "timestamp": 10, "duration": 1}, {"duration": 4, "timestamp": 10, "duration": 1}, ], }) db.workload_data_create(task_id, workload["uuid"], 2, { "raw": [ {"duration": 7, "timestamp": 10, "duration": 1}, {"duration": 8, "timestamp": 10, "duration": 1}, ], }) db.workload_set_results(workload["uuid"], { "sla": [{"success": True}], "load_duration": 13, "full_duration": 42 }) res = db.task_result_get_all_by_uuid(task_id) self.assertEqual(len(res), 1) self.assertEqual(res[0]["key"], key) self.assertEqual(res[0]["data"], { "raw": [ {"error": "anError", "timestamp": 10, "duration": 1}, {"duration": 1, "timestamp": 10, "duration": 1}, {"duration": 2, "timestamp": 10, "duration": 1}, {"duration": 3, "timestamp": 10, "duration": 1}, {"error": "anError2", "timestamp": 10, "duration": 1}, {"duration": 6, "timestamp": 10, "duration": 1}, {"duration": 5, "timestamp": 10, "duration": 1}, {"duration": 4, "timestamp": 10, "duration": 1}, {"duration": 7, "timestamp": 10, "duration": 1}, {"duration": 8, "timestamp": 10, "duration": 1}, ], "sla": [{"success": True}], "hooks": [], "load_duration": 13, "full_duration": 42 }) db.task_delete(task_id) res = db.task_result_get_all_by_uuid(task_id) self.assertEqual(len(res), 0) class SubtaskTestCase(test.DBTestCase): def setUp(self): super(SubtaskTestCase, self).setUp() self.deploy = db.deployment_create({}) self.task = db.task_create({"deployment_uuid": self.deploy["uuid"]}) def test_subtask_create(self): subtask = db.subtask_create(self.task["uuid"], title="foo") self.assertEqual("foo", subtask["title"]) self.assertEqual(self.task["uuid"], subtask["task_uuid"]) class WorkloadTestCase(test.DBTestCase): def setUp(self): super(WorkloadTestCase, self).setUp() self.deploy = db.deployment_create({}) self.task = db.task_create({"deployment_uuid": self.deploy["uuid"]}) self.task_uuid = self.task["uuid"] self.subtask = db.subtask_create(self.task_uuid, title="foo") self.subtask_uuid = self.subtask["uuid"] def test_workload_create(self): key = { "name": "atata", "pos": 0, "kw": { "args": {"a": "A"}, "context": {"c": "C"}, "sla": {"s": "S"}, "runner": {"r": "R", "type": "T"} } } workload = db.workload_create(self.task_uuid, self.subtask_uuid, key) self.assertEqual("atata", workload["name"]) self.assertEqual(0, workload["position"]) self.assertEqual({"a": "A"}, workload["args"]) self.assertEqual({"c": "C"}, workload["context"]) self.assertEqual({"s": "S"}, workload["sla"]) self.assertEqual({"r": "R", "type": "T"}, workload["runner"]) self.assertEqual("T", workload["runner_type"]) self.assertEqual(self.task_uuid, workload["task_uuid"]) self.assertEqual(self.subtask_uuid, workload["subtask_uuid"]) def test_workload_set_results_with_raw_data(self): key = { "name": "atata", "pos": 0, "kw": { "args": {"a": "A"}, "context": {"c": "C"}, "sla": {"s": "S"}, "runner": {"r": "R", "type": "T"} } } raw_data = { "raw": [ {"error": "anError", "duration": 0, "timestamp": 1}, {"duration": 1, "timestamp": 1}, {"duration": 2, "timestamp": 2} ], } data = { "sla": [ {"s": "S", "success": True}, {"1": "2", "success": True}, {"a": "A", "success": True} ], "load_duration": 13, "full_duration": 42 } workload = db.workload_create(self.task_uuid, self.subtask_uuid, key) db.workload_data_create(self.task_uuid, workload["uuid"], 0, raw_data) workload = db.workload_set_results(workload["uuid"], data) self.assertEqual("atata", workload["name"]) self.assertEqual(0, workload["position"]) self.assertEqual({"a": "A"}, workload["args"]) self.assertEqual({"c": "C"}, workload["context"]) self.assertEqual({"s": "S"}, workload["sla"]) self.assertEqual({"r": "R", "type": "T"}, workload["runner"]) self.assertEqual("T", workload["runner_type"]) self.assertEqual(13, workload["load_duration"]) self.assertEqual(42, workload["full_duration"]) self.assertEqual(0, workload["min_duration"]) self.assertEqual(2, workload["max_duration"]) self.assertEqual(3, workload["total_iteration_count"]) self.assertEqual(1, workload["failed_iteration_count"]) self.assertTrue(workload["pass_sla"]) self.assertEqual([], workload["hooks"]) self.assertEqual(data["sla"], workload["sla_results"]["sla"]) self.assertEqual(self.task_uuid, workload["task_uuid"]) self.assertEqual(self.subtask_uuid, workload["subtask_uuid"]) def test_workload_set_results_empty_raw_data(self): key = { "name": "atata", "pos": 0, "kw": { "args": {"a": "A"}, "context": {"c": "C"}, "sla": {"s": "S"}, "runner": {"r": "R", "type": "T"} } } data = { "sla": [ {"s": "S", "success": False}, {"1": "2", "success": True}, {"a": "A", "success": True} ], "load_duration": 13, "full_duration": 42 } workload = db.workload_create(self.task_uuid, self.subtask_uuid, key) workload = db.workload_set_results(workload["uuid"], data) self.assertEqual("atata", workload["name"]) self.assertEqual(0, workload["position"]) self.assertEqual({"a": "A"}, workload["args"]) self.assertEqual({"c": "C"}, workload["context"]) self.assertEqual({"s": "S"}, workload["sla"]) self.assertEqual({"r": "R", "type": "T"}, workload["runner"]) self.assertEqual("T", workload["runner_type"]) self.assertEqual(13, workload["load_duration"]) self.assertEqual(42, workload["full_duration"]) self.assertEqual(0, workload["min_duration"]) self.assertEqual(0, workload["max_duration"]) self.assertEqual(0, workload["total_iteration_count"]) self.assertEqual(0, workload["failed_iteration_count"]) self.assertFalse(workload["pass_sla"]) self.assertEqual([], workload["hooks"]) self.assertEqual(data["sla"], workload["sla_results"]["sla"]) self.assertEqual(self.task_uuid, workload["task_uuid"]) self.assertEqual(self.subtask_uuid, workload["subtask_uuid"]) class WorkloadDataTestCase(test.DBTestCase): def setUp(self): super(WorkloadDataTestCase, self).setUp() self.deploy = db.deployment_create({}) self.task = db.task_create({"deployment_uuid": self.deploy["uuid"]}) self.task_uuid = self.task["uuid"] self.subtask = db.subtask_create(self.task_uuid, title="foo") self.subtask_uuid = self.subtask["uuid"] self.key = {"name": "atata", "pos": 0, "kw": {"runner": {"r": "R", "type": "T"}}} self.workload = db.workload_create(self.task_uuid, self.subtask_uuid, self.key) self.workload_uuid = self.workload["uuid"] def test_workload_data_create(self): data = { "raw": [ {"error": "anError", "duration": 0, "timestamp": 1}, {"duration": 1, "timestamp": 1}, {"duration": 2, "timestamp": 2} ] } workload_data = db.workload_data_create(self.task_uuid, self.workload_uuid, 0, data) self.assertEqual(3, workload_data["iteration_count"]) self.assertEqual(1, workload_data["failed_iteration_count"]) self.assertEqual(dt.datetime.fromtimestamp(1), workload_data["started_at"]) self.assertEqual(dt.datetime.fromtimestamp(4), workload_data["finished_at"]) self.assertEqual(data, workload_data["chunk_data"]) self.assertEqual(self.task_uuid, workload_data["task_uuid"]) self.assertEqual(self.workload_uuid, workload_data["workload_uuid"]) @mock.patch("time.time") def test_workload_data_create_empty(self, mock_time): mock_time.return_value = 10 data = {"raw": []} workload_data = db.workload_data_create(self.task_uuid, self.workload_uuid, 0, data) self.assertEqual(0, workload_data["iteration_count"]) self.assertEqual(0, workload_data["failed_iteration_count"]) self.assertEqual(dt.datetime.fromtimestamp(10), workload_data["started_at"]) self.assertEqual(dt.datetime.fromtimestamp(10), workload_data["finished_at"]) self.assertEqual(data, workload_data["chunk_data"]) self.assertEqual(self.task_uuid, workload_data["task_uuid"]) self.assertEqual(self.workload_uuid, workload_data["workload_uuid"]) class DeploymentTestCase(test.DBTestCase): def test_deployment_create(self): deploy = db.deployment_create({"config": {"opt": "val"}}) deploys = db.deployment_list() self.assertEqual(len(deploys), 1) self.assertEqual(deploy["uuid"], deploys[0]["uuid"]) self.assertEqual(deploy["status"], consts.DeployStatus.DEPLOY_INIT) self.assertEqual(deploy["config"], {"opt": "val"}) self.assertEqual(deploy["credentials"], {}) def test_deployment_create_several(self): # Create a deployment deploys = db.deployment_list() self.assertEqual(len(deploys), 0) deploy_one = db.deployment_create({"config": {"opt1": "val1"}}) deploys = db.deployment_list() self.assertEqual(len(deploys), 1) self.assertEqual(deploy_one["uuid"], deploys[0]["uuid"]) self.assertEqual(deploy_one["status"], consts.DeployStatus.DEPLOY_INIT) self.assertEqual(deploy_one["config"], {"opt1": "val1"}) # Create another deployment and sure that they are different deploy_two = db.deployment_create({"config": {"opt2": "val2"}}) deploys = db.deployment_list() self.assertEqual(len(deploys), 2) self.assertEqual(set([deploy_one["uuid"], deploy_two["uuid"]]), set([deploy["uuid"] for deploy in deploys])) self.assertNotEqual(deploy_one["uuid"], deploy_two["uuid"]) self.assertEqual(deploy_two["status"], consts.DeployStatus.DEPLOY_INIT) self.assertEqual(deploy_two["config"], {"opt2": "val2"}) def test_deployment_update(self): credentials = { "openstack": [{"admin": {"foo": "bar"}, "users": ["foo_user"]}]} deploy = db.deployment_create({}) self.assertEqual(deploy["config"], {}) self.assertEqual(deploy["credentials"], {}) update_deploy = db.deployment_update( deploy["uuid"], {"config": {"opt": "val"}, "credentials": copy.deepcopy(credentials)}) self.assertEqual(update_deploy["uuid"], deploy["uuid"]) self.assertEqual(update_deploy["config"], {"opt": "val"}) self.assertEqual(update_deploy["credentials"], credentials) get_deploy = db.deployment_get(deploy["uuid"]) self.assertEqual(get_deploy["uuid"], deploy["uuid"]) self.assertEqual(get_deploy["config"], {"opt": "val"}) self.assertEqual(update_deploy["credentials"], credentials) def test_deployment_update_several(self): # Create a deployment and update it deploy_one = db.deployment_create({}) self.assertEqual(deploy_one["config"], {}) update_deploy_one = db.deployment_update( deploy_one["uuid"], {"config": {"opt1": "val1"}}) self.assertEqual(update_deploy_one["uuid"], deploy_one["uuid"]) self.assertEqual(update_deploy_one["config"], {"opt1": "val1"}) get_deploy_one = db.deployment_get(deploy_one["uuid"]) self.assertEqual(get_deploy_one["uuid"], deploy_one["uuid"]) self.assertEqual(get_deploy_one["config"], {"opt1": "val1"}) # Create another deployment deploy_two = db.deployment_create({}) update_deploy_two = db.deployment_update( deploy_two["uuid"], {"config": {"opt2": "val2"}}) self.assertEqual(update_deploy_two["uuid"], deploy_two["uuid"]) self.assertEqual(update_deploy_two["config"], {"opt2": "val2"}) get_deploy_one_again = db.deployment_get(deploy_one["uuid"]) self.assertEqual(get_deploy_one_again["uuid"], deploy_one["uuid"]) self.assertEqual(get_deploy_one_again["config"], {"opt1": "val1"}) def test_deployment_get(self): deploy_one = db.deployment_create({"config": {"opt1": "val1"}}) deploy_two = db.deployment_create({"config": {"opt2": "val2"}}) get_deploy_one = db.deployment_get(deploy_one["uuid"]) get_deploy_two = db.deployment_get(deploy_two["uuid"]) self.assertNotEqual(get_deploy_one["uuid"], get_deploy_two["uuid"]) self.assertEqual(get_deploy_one["config"], {"opt1": "val1"}) self.assertEqual(get_deploy_two["config"], {"opt2": "val2"}) def test_deployment_get_not_found(self): self.assertRaises(exceptions.DeploymentNotFound, db.deployment_get, "852e932b-9552-4b2d-89e3-a5915780a5e3") def test_deployment_list(self): deploy_one = db.deployment_create({}) deploy_two = db.deployment_create({}) deploys = db.deployment_list() self.assertEqual(sorted([deploy_one["uuid"], deploy_two["uuid"]]), sorted([deploy["uuid"] for deploy in deploys])) def test_deployment_list_with_status_and_name(self): deploy_one = db.deployment_create({}) deploy_two = db.deployment_create({ "config": {}, "status": consts.DeployStatus.DEPLOY_FAILED, }) deploy_three = db.deployment_create({"name": "deployment_name"}) deploys = db.deployment_list(status=consts.DeployStatus.DEPLOY_INIT) deploys.sort(key=lambda x: x["id"]) self.assertEqual(len(deploys), 2) self.assertEqual(deploys[0]["uuid"], deploy_one["uuid"]) deploys = db.deployment_list(status=consts.DeployStatus.DEPLOY_FAILED) self.assertEqual(len(deploys), 1) self.assertEqual(deploys[0]["uuid"], deploy_two["uuid"]) deploys = db.deployment_list( status=consts.DeployStatus.DEPLOY_FINISHED) self.assertEqual(len(deploys), 0) deploys = db.deployment_list(name="deployment_name") self.assertEqual(deploys[0]["uuid"], deploy_three["uuid"]) self.assertEqual(len(deploys), 1) def test_deployment_list_parent(self): deploy = db.deployment_create({}) subdeploy1 = db.deployment_create({"parent_uuid": deploy["uuid"]}) subdeploy2 = db.deployment_create({"parent_uuid": deploy["uuid"]}) self.assertEqual( [deploy["uuid"]], [d["uuid"] for d in db.deployment_list()]) subdeploys = db.deployment_list(parent_uuid=deploy["uuid"]) self.assertEqual(set([subdeploy1["uuid"], subdeploy2["uuid"]]), set([d["uuid"] for d in subdeploys])) def test_deployment_delete(self): deploy_one = db.deployment_create({}) deploy_two = db.deployment_create({}) db.deployment_delete(deploy_two["uuid"]) deploys = db.deployment_list() self.assertEqual(len(deploys), 1) self.assertEqual(deploys[0]["uuid"], deploy_one["uuid"]) def test_deployment_delete_not_found(self): self.assertRaises(exceptions.DeploymentNotFound, db.deployment_delete, "5f2883be-46c8-4c4b-a4fe-988ad0c6b20a") def test_deployment_delete_is_busy(self): deployment = db.deployment_create({}) db.resource_create({"deployment_uuid": deployment["uuid"]}) db.resource_create({"deployment_uuid": deployment["uuid"]}) self.assertRaises(exceptions.DeploymentIsBusy, db.deployment_delete, deployment["uuid"]) class ResourceTestCase(test.DBTestCase): def test_create(self): deployment = db.deployment_create({}) resource = db.resource_create({ "deployment_uuid": deployment["uuid"], "provider_name": "fakeprovider", "type": "faketype", }) resources = db.resource_get_all(deployment["uuid"]) self.assertTrue(resource["id"]) self.assertEqual(len(resources), 1) self.assertTrue(resource["id"], resources[0]["id"]) self.assertEqual(resource["deployment_uuid"], deployment["uuid"]) self.assertEqual(resource["provider_name"], "fakeprovider") self.assertEqual(resource["type"], "faketype") def test_delete(self): deployment = db.deployment_create({}) res = db.resource_create({"deployment_uuid": deployment["uuid"]}) db.resource_delete(res["id"]) resources = db.resource_get_all(deployment["uuid"]) self.assertEqual(len(resources), 0) def test_delete_not_found(self): self.assertRaises(exceptions.ResourceNotFound, db.resource_delete, 123456789) def test_get_all(self): deployment0 = db.deployment_create({}) deployment1 = db.deployment_create({}) res0 = db.resource_create({"deployment_uuid": deployment0["uuid"]}) res1 = db.resource_create({"deployment_uuid": deployment1["uuid"]}) res2 = db.resource_create({"deployment_uuid": deployment1["uuid"]}) resources = db.resource_get_all(deployment1["uuid"]) self.assertEqual(sorted([res1["id"], res2["id"]]), sorted([r["id"] for r in resources])) resources = db.resource_get_all(deployment0["uuid"]) self.assertEqual(len(resources), 1) self.assertEqual(res0["id"], resources[0]["id"]) def test_get_all_by_provider_name(self): deployment = db.deployment_create({}) res_one = db.resource_create({ "deployment_uuid": deployment["uuid"], "provider_name": "one", }) res_two = db.resource_create({ "deployment_uuid": deployment["uuid"], "provider_name": "two", }) resources = db.resource_get_all(deployment["uuid"], provider_name="one") self.assertEqual(len(resources), 1) self.assertEqual(res_one["id"], resources[0]["id"]) resources = db.resource_get_all(deployment["uuid"], provider_name="two") self.assertEqual(len(resources), 1) self.assertEqual(res_two["id"], resources[0]["id"]) def test_get_all_by_provider_type(self): deployment = db.deployment_create({}) res_one = db.resource_create({ "deployment_uuid": deployment["uuid"], "type": "one", }) res_two = db.resource_create({ "deployment_uuid": deployment["uuid"], "type": "two", }) resources = db.resource_get_all(deployment["uuid"], type="one") self.assertEqual(len(resources), 1) self.assertEqual(res_one["id"], resources[0]["id"]) resources = db.resource_get_all(deployment["uuid"], type="two") self.assertEqual(len(resources), 1) self.assertEqual(res_two["id"], resources[0]["id"]) class VerifierTestCase(test.DBTestCase): def test_verifier_create(self): v = db.verifier_create("a", "b", "c", "d", "e", False) self.assertEqual("a", v["name"]) def test_verifier_get(self): v = db.verifier_create("a", "b", "c", "d", "e", False) self.assertEqual("a", db.verifier_get(v["uuid"])["name"]) def test_verifier_get_raise_exc(self): self.assertRaises(exceptions.ResourceNotFound, db.verifier_get, "1234") def test_verifier_list(self): v1 = db.verifier_create("a1", "b1", "c1", "d1", "e1", False) v2 = db.verifier_create("a2", "b2", "c2", "d2", "e2", False) vs = db.verifier_list() self.assertEqual(sorted([v1["uuid"], v2["uuid"]]), sorted([v["uuid"] for v in vs])) v1 = db.verifier_update(v1["uuid"], status="foo") vs = db.verifier_list(status="foo") self.assertEqual(len(vs), 1) self.assertEqual(v1["uuid"], vs[0]["uuid"]) def test_verifier_delete(self): v = db.verifier_create("a", "b", "c", "d", "e", False) db.verifier_delete(v["uuid"]) self.assertRaises(exceptions.ResourceNotFound, db.verifier_delete, v["uuid"]) def test_verification_update(self): v = db.verifier_create("a", "b", "c", "d", "e", False) v = db.verifier_update(v["uuid"], source="foo", version="bar") self.assertEqual("foo", v["source"]) self.assertEqual("bar", v["version"]) class VerificationTestCase(test.DBTestCase): def setUp(self): super(VerificationTestCase, self).setUp() self.verifier = db.verifier_create("a", "b", "c", "d", "e", False) self.deploy = db.deployment_create({}) def _create_verification(self): verifier_uuid = self.verifier["uuid"] deployment_uuid = self.deploy["uuid"] return db.verification_create(verifier_uuid, deployment_uuid, [], {}) def test_verification_create(self): v = self._create_verification() self.assertEqual(self.verifier["uuid"], v["verifier_uuid"]) self.assertEqual(self.deploy["uuid"], v["deployment_uuid"]) def test_verification_get(self): v = db.verification_get(self._create_verification()["uuid"]) self.assertEqual(self.verifier["uuid"], v["verifier_uuid"]) self.assertEqual(self.deploy["uuid"], v["deployment_uuid"]) def test_verification_get_raise_exc(self): self.assertRaises(exceptions.ResourceNotFound, db.verification_get, "1234") def test_verification_list(self): deploy = db.deployment_create({}) v1 = db.verification_create( self.verifier["uuid"], deploy["uuid"], ["foo", "bar"], {}) v2 = self._create_verification() vs = db.verification_list(self.verifier["uuid"]) self.assertEqual(sorted([v1["uuid"], v2["uuid"]]), sorted([v["uuid"] for v in vs])) vs = db.verification_list(self.verifier["uuid"], deploy["uuid"]) self.assertEqual(len(vs), 1) self.assertEqual(v1["uuid"], vs[0]["uuid"]) vs = db.verification_list(tags=["bar"]) self.assertEqual(len(vs), 1) self.assertEqual(v1["uuid"], vs[0]["uuid"]) v2 = db.verification_update(v2["uuid"], status="foo") vs = db.verification_list(status="foo") self.assertEqual(len(vs), 1) self.assertEqual(v2["uuid"], vs[0]["uuid"]) def test_verification_delete(self): v = self._create_verification() db.verification_delete(v["uuid"]) self.assertRaises(exceptions.ResourceNotFound, db.verification_delete, v["uuid"]) def test_verification_update(self): v = self._create_verification() v = db.verification_update(v["uuid"], status="foo", tests_count=10) self.assertEqual("foo", v["status"]) self.assertEqual(10, v["tests_count"]) class WorkerTestCase(test.DBTestCase): def setUp(self): super(WorkerTestCase, self).setUp() self.worker = db.register_worker({"hostname": "test"}) def test_register_worker_duplicate(self): self.assertRaises(exceptions.WorkerAlreadyRegistered, db.register_worker, {"hostname": "test"}) def test_get_worker(self): worker = db.get_worker("test") self.assertEqual(self.worker["id"], worker["id"]) self.assertEqual(self.worker["hostname"], worker["hostname"]) def test_get_worker_not_found(self): self.assertRaises(exceptions.WorkerNotFound, db.get_worker, "notfound") def test_unregister_worker(self): db.unregister_worker("test") self.assertRaises(exceptions.WorkerNotFound, db.get_worker, "test") def test_unregister_worker_not_found(self): self.assertRaises(exceptions.WorkerNotFound, db.unregister_worker, "fake") def test_update_worker(self): db.update_worker("test") worker = db.get_worker("test") self.assertNotEqual(self.worker["updated_at"], worker["updated_at"]) def test_update_worker_not_found(self): self.assertRaises(exceptions.WorkerNotFound, db.update_worker, "fake") rally-0.9.1/tests/unit/common/test_fileutils.py0000664000567000056710000000560013073417716023041 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import mock from rally.common import fileutils from tests.unit import test class FileUtilsTestCase(test.TestCase): @mock.patch("os.path.exists", return_value=True) @mock.patch.dict("os.environ", values={}, clear=True) def test_load_env_vile(self, mock_exists): file_data = ["FAKE_ENV=fake_env\n"] with mock.patch("rally.common.fileutils.open", mock.mock_open( read_data=file_data), create=True) as mock_file: mock_file.return_value.readlines.return_value = file_data fileutils.load_env_file("path_to_file") self.assertIn("FAKE_ENV", os.environ) mock_file.return_value.readlines.assert_called_once_with() @mock.patch("os.path.exists", return_value=True) def test_update_env_file(self, mock_exists): file_data = ["FAKE_ENV=old_value\n", "FAKE_ENV2=any\n"] with mock.patch("rally.common.fileutils.open", mock.mock_open( read_data=file_data), create=True) as mock_file: mock_file.return_value.readlines.return_value = file_data fileutils.update_env_file("path_to_file", "FAKE_ENV", "new_value") calls = [mock.call("FAKE_ENV2=any\n"), mock.call( "FAKE_ENV=new_value")] mock_file.return_value.readlines.assert_called_once_with() mock_file.return_value.write.assert_has_calls(calls) class PackDirTestCase(test.TestCase): @mock.patch("os.walk") @mock.patch("zipfile.ZipFile") def test_pack_dir(self, mock_zip_file, mock_walk): mock_walk.side_effect = [ [("foo_root", [], ["file1", "file2", "file3"])]] fileutils.pack_dir("rally-jobs/extra/murano/HelloReporter", "fake_dir/package.zip") mock_zip_file.assert_called_once_with("fake_dir/package.zip", mode="w") mock_walk.assert_called_once_with( "rally-jobs/extra/murano/HelloReporter") mock_zip_file.return_value.assert_has_calls( [mock.call.write("foo_root/file1", "../../../../foo_root/file1"), mock.call.write("foo_root/file2", "../../../../foo_root/file2"), mock.call.write("foo_root/file3", "../../../../foo_root/file3"), mock.call.close()]) rally-0.9.1/tests/unit/common/plugin/0000775000567000056710000000000013073420067020716 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/common/plugin/__init__.py0000664000567000056710000000000013073417716023024 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/common/plugin/test_plugin.py0000664000567000056710000001460013073417720023630 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.plugin import plugin from rally import exceptions from tests.unit import test class PluginModuleTestCase(test.TestCase): def test_deprecated_func(self): @plugin.deprecated("some", "0.0.1") @plugin.configure(name="deprecated_func_plugin_test") @plugin.from_func() def func(): return 42 self.assertEqual("deprecated_func_plugin_test", func.get_name()) self.assertEqual({"reason": "some", "rally_version": "0.0.1"}, func.is_deprecated()) self.assertEqual(42, func()) def test_configure(self): @plugin.configure(name="configure_func_plugin_test") @plugin.from_func() def func(a): return a self.assertEqual("configure_func_plugin_test", func.get_name()) self.assertFalse(func.is_hidden()) self.assertEqual(42, func(42)) def test_deprecated_cls(self): @plugin.deprecated("God why?", "0.0.2") @plugin.configure(name="deprecated_class_plugin_check") class MyPlugin(plugin.Plugin): pass self.assertEqual({"reason": "God why?", "rally_version": "0.0.2"}, MyPlugin.is_deprecated()) def test_configure_cls(self): @plugin.configure(name="get_name_class_plugin") class MyPlugin(plugin.Plugin): pass self.assertEqual("get_name_class_plugin", MyPlugin.get_name()) def test_configure_different_bases(self): name = "test_configure_different_bases" @plugin.base() class OneBase(plugin.Plugin): pass @plugin.base() class SecondBase(plugin.Plugin): pass @plugin.configure(name, namespace=name) class A(OneBase): pass @plugin.configure(name, namespace=name) class B(SecondBase): pass self.assertEqual(A, OneBase.get(name)) self.assertEqual(B, SecondBase.get(name)) def test_get_multiple_chooses(self): name = "test_get_multiple_chooses" @plugin.base() class OneBase(plugin.Plugin): pass @plugin.base() class SecondBase(plugin.Plugin): pass @plugin.configure(name, namespace=name) class A(OneBase): pass @plugin.configure(name, namespace=name) class B(SecondBase): pass self.assertRaises(exceptions.MultipleMatchesFound, plugin.Plugin.get, name, name) def test_from_func(self): @plugin.from_func() def func(): return 42 missing = [field for field in set(dir(plugin.Plugin)) - set(dir(func)) if not field.startswith("__")] self.assertEqual([], missing) self.assertTrue(issubclass(func._plugin, plugin.Plugin)) self.assertEqual(42, func()) def test_from_func_with_basecls(self): class FakeFuncBasePlugin(plugin.Plugin): pass @plugin.from_func(FakeFuncBasePlugin) def func(): return 43 self.assertTrue(issubclass(func._plugin, FakeFuncBasePlugin)) self.assertEqual(43, func()) def test_from_func_with_bad_basecls(self): class FakeFuncBasePlugin(object): pass self.assertRaises(TypeError, plugin.from_func, FakeFuncBasePlugin) @plugin.configure(name="test_base_plugin") class BasePlugin(plugin.Plugin): pass @plugin.configure(name="test_some_plugin") class SomePlugin(BasePlugin): pass @plugin.configure(name="test_hidden_plugin", hidden=True) class HiddenPlugin(BasePlugin): pass @plugin.deprecated("some_reason", "0.1.1") @plugin.configure(name="test_deprecated_plugin") class DeprecatedPlugin(BasePlugin): pass class NotInitedPlugin(BasePlugin): pass class PluginTestCase(test.TestCase): def test_unregister(self): @plugin.configure(name="test_some_temp_plugin") class SomeTempPlugin(BasePlugin): pass SomeTempPlugin.unregister() self.assertRaises(exceptions.PluginNotFound, BasePlugin.get, "test_some_temp_plugin") def test_get(self): self.assertEqual(SomePlugin, BasePlugin.get("test_some_plugin")) def test_get_hidden(self): self.assertEqual(HiddenPlugin, BasePlugin.get("test_hidden_plugin", allow_hidden=True)) def test_get_hidden_not_found(self): self.assertRaises(exceptions.PluginNotFound, BasePlugin.get, "test_hidden_plugin") def test_get_not_found(self): self.assertRaises(exceptions.PluginNotFound, BasePlugin.get, "non_existing") def test_get_multiple_found(self): @plugin.configure("test_2_plugins_with_same_name") class A(plugin.Plugin): pass class B(plugin.Plugin): pass self.assertRaises(exceptions.PluginWithSuchNameExists, plugin.configure("test_2_plugins_with_same_name"), B) def test_get_name(self): self.assertEqual("test_some_plugin", SomePlugin.get_name()) def test_get_all(self): self.assertEqual(set([SomePlugin, DeprecatedPlugin]), set(BasePlugin.get_all())) self.assertEqual([], SomePlugin.get_all()) def test_get_all_hidden(self): self.assertEqual(set([SomePlugin, DeprecatedPlugin, HiddenPlugin]), set(BasePlugin.get_all(allow_hidden=True))) def test_is_deprecated(self): self.assertFalse(SomePlugin.is_deprecated()) self.assertEqual(DeprecatedPlugin.is_deprecated(), {"reason": "some_reason", "rally_version": "0.1.1"}) rally-0.9.1/tests/unit/common/plugin/test_meta.py0000664000567000056710000001374413073417716023275 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.plugin import meta from tests.unit import test class TestMetaMixinTestCase(test.TestCase): def test_meta_is_inited(self): class Meta(meta.MetaMixin): pass class SubMeta(Meta): pass self.assertRaises(ReferenceError, Meta._meta_is_inited) self.assertFalse(Meta._meta_is_inited(raise_exc=False)) self.assertRaises(ReferenceError, SubMeta._meta_is_inited) self.assertFalse(SubMeta._meta_is_inited(raise_exc=False)) Meta._meta_init() self.assertTrue(Meta._meta_is_inited()) self.assertTrue(Meta._meta_is_inited(raise_exc=False)) self.assertRaises(ReferenceError, SubMeta._meta_is_inited) self.assertFalse(SubMeta._meta_is_inited(raise_exc=False)) SubMeta._meta_init() self.assertTrue(SubMeta._meta_is_inited()) self.assertTrue(SubMeta._meta_is_inited(raise_exc=False)) def test_meta_clear(self): class Meta(meta.MetaMixin): pass Meta._meta_init() Meta._meta_set("aaa", 42) meta_ref = Meta._meta Meta._meta_clear() self.assertRaises(AttributeError, getattr, Meta, "_meta") self.assertEqual({}, meta_ref) def test_meta_set_and_get(self): class Meta(meta.MetaMixin): pass Meta._meta_init() Meta._meta_set("aaa", 42) self.assertEqual(Meta._meta_get("aaa"), 42) def test_meta_get_default(self): class Meta(meta.MetaMixin): pass Meta._meta_init() self.assertEqual(Meta._meta_get("b", 42), 42) def test_meta_get_if_is_not_inited(self): class Meta(meta.MetaMixin): pass self.assertRaises(ReferenceError, Meta._meta_get, "any") def test_meta_set_if_is_not_inited(self): class Meta(meta.MetaMixin): pass self.assertRaises(ReferenceError, Meta._meta_set, "a", 1) def test_meta_setdefault(self): class Meta(meta.MetaMixin): pass self.assertRaises(ReferenceError, Meta._meta_setdefault, "any", 42) Meta._meta_init() Meta._meta_setdefault("any", 42) self.assertEqual(42, Meta._meta_get("any")) Meta._meta_setdefault("any", 2) self.assertEqual(42, Meta._meta_get("any")) def test_default_meta(self): class Meta(meta.MetaMixin): DEFAULT_META = {"foo": "bar"} class SubMeta(Meta): pass class SubMetaWithDefault(Meta): DEFAULT_META = {"foo": "spam"} class SubSubMeta(SubMeta): DEFAULT_META = {"baz": "eggs"} Meta._meta_init() SubMeta._meta_init() SubMetaWithDefault._meta_init() SubSubMeta._meta_init() self.assertEqual("bar", Meta._meta_get("foo")) self.assertEqual("bar", SubMeta._meta_get("foo")) self.assertEqual("spam", SubMetaWithDefault._meta_get("foo")) self.assertEqual("bar", SubSubMeta._meta_get("foo")) self.assertEqual("eggs", SubSubMeta._meta_get("baz")) self.assertIsNone(Meta._meta_get("baz")) self.assertIsNone(SubMeta._meta_get("baz")) self.assertIsNone(SubMetaWithDefault._meta_get("baz")) def test_default_meta_change(self): class Meta(meta.MetaMixin): DEFAULT_META = {"foo": []} class SubMeta(Meta): pass Meta._meta_init() SubMeta._meta_init() self.assertEqual([], Meta._meta_get("foo")) self.assertEqual([], SubMeta._meta_get("foo")) SubMeta._meta_get("foo").append("bar") self.assertEqual([], Meta._meta_get("foo")) self.assertEqual(["bar"], SubMeta._meta_get("foo")) Meta._meta_get("foo").append("baz") self.assertEqual(["baz"], Meta._meta_get("foo")) self.assertEqual(["bar"], SubMeta._meta_get("foo")) def test_default_meta_validators(self): class A(meta.MetaMixin): DEFAULT_META = {"validators": ["a"]} class B(A): DEFAULT_META = {"validators": ["b", "foo"]} class C(A): DEFAULT_META = {"validators": ["c", "foo"]} class D(B, C): DEFAULT_META = {"validators": ["d"]} A._meta_init() B._meta_init() C._meta_init() D._meta_init() self.assertEqual(["a"], A._meta_get("validators")) self.assertEqual(["a", "b", "foo"], B._meta_get("validators")) self.assertEqual(["a", "c", "foo"], C._meta_get("validators")) self.assertEqual(["a", "c", "foo", "b", "foo", "d"], D._meta_get("validators")) def test_default_meta_context(self): class A(meta.MetaMixin): DEFAULT_META = {"context": {"foo": "a"}} class B(A): DEFAULT_META = {"context": {"foo": "b", "baz": "b"}} class C(A): DEFAULT_META = {"context": {"foo": "c", "spam": "c"}} class D(B, C): DEFAULT_META = {"context": {"bar": "d"}} A._meta_init() B._meta_init() C._meta_init() D._meta_init() self.assertEqual({"foo": "a"}, A._meta_get("context")) self.assertEqual({"foo": "b", "baz": "b"}, B._meta_get("context")) self.assertEqual({"foo": "c", "spam": "c"}, C._meta_get("context")) self.assertEqual({"foo": "b", "baz": "b", "spam": "c", "bar": "d"}, D._meta_get("context")) rally-0.9.1/tests/unit/common/plugin/test_info.py0000664000567000056710000000544313073417716023277 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.common.plugin import info from tests.unit import test class DocstringTestCase(test.TestCase): def test_parse_complete_docstring(self): docstring = """One-line description. Multi- line- description. :param p1: Param 1 description. :param p2: Param 2 description. :returns: Return value description. """ expected = { "short_description": "One-line description.", "long_description": "Multi-\nline-\ndescription.", "params": [{"name": "p1", "doc": "Param 1 description.\n"}, {"name": "p2", "doc": "Param 2\n" "description.\n"}], "returns": "Return value\ndescription." } self.assertEqual(expected, info.parse_docstring(docstring)) def test_parse_incomplete_docstring(self): docstring = """One-line description. :param p1: Param 1 description. :param p2: Param 2 description. """ expected = { "short_description": "One-line description.", "long_description": "", "params": [{"name": "p1", "doc": "Param 1 description.\n"}, {"name": "p2", "doc": "Param 2\n" "description.\n"}], "returns": "" } self.assertEqual(expected, info.parse_docstring(docstring)) def test_parse_docstring_with_no_params(self): docstring = """One-line description. Multi- line- description. :returns: Return value description. """ expected = { "short_description": "One-line description.", "long_description": "Multi-\nline-\ndescription.", "params": [], "returns": "Return value\ndescription." } self.assertEqual(expected, info.parse_docstring(docstring)) def test_parse_docstring_short_only(self): docstring = """One-line description.""" expected = { "short_description": "One-line description.", "long_description": "", "params": [], "returns": "" } self.assertEqual(expected, info.parse_docstring(docstring)) rally-0.9.1/tests/unit/common/plugin/test_discover.py0000664000567000056710000001004013073417716024147 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.common.plugin import discover from tests.unit import test DISCOVER = "rally.common.plugin.discover" class IterSubclassesTestCase(test.TestCase): def test_itersubclasses(self): class A(object): pass class B(A): pass class C(A): pass class D(C): pass self.assertEqual([B, C, D], list(discover.itersubclasses(A))) class LoadExtraModulesTestCase(test.TestCase): @mock.patch("%s.os.path.isdir" % DISCOVER, return_value=True) @mock.patch("%s.imp.load_module" % DISCOVER) @mock.patch("%s.imp.find_module" % DISCOVER, return_value=(mock.MagicMock(), None, None)) @mock.patch("%s.os.walk" % DISCOVER, return_value=[ ("/somewhere", ("/subdir", ), ("plugin1.py", )), ("/somewhere/subdir", ("/subsubdir", ), ("plugin2.py", "withoutextension")), ("/somewhere/subdir/subsubdir", [], ("plugin3.py", ))]) def test_load_plugins_from_dir_successful(self, mock_os_walk, mock_find_module, mock_load_module, mock_isdir): test_path = "/somewhere" discover.load_plugins(test_path) expected = [ mock.call("plugin1", ["/somewhere"]), mock.call("plugin2", ["/somewhere/subdir"]), mock.call("plugin3", ["/somewhere/subdir/subsubdir"]) ] self.assertEqual(expected, mock_find_module.mock_calls) self.assertEqual(3, len(mock_load_module.mock_calls)) @mock.patch("%s.os.path.isfile" % DISCOVER, return_value=True) @mock.patch("%s.imp.load_source" % DISCOVER) def test_load_plugins_from_file_successful(self, mock_load_source, mock_isfile): discover.load_plugins("/somewhere/plugin.py") expected = [mock.call("plugin", "/somewhere/plugin.py")] self.assertEqual(expected, mock_load_source.mock_calls) @mock.patch("%s.os" % DISCOVER) def test_load_plugins_from_nonexisting_and_empty_dir(self, mock_os): # test no fails for nonexisting directory mock_os.path.isdir.return_value = False discover.load_plugins("/somewhere") # test no fails for empty directory mock_os.path.isdir.return_value = True mock_os.walk.return_value = [] discover.load_plugins("/somewhere") @mock.patch("%s.os.path.isfile" % DISCOVER, return_value=True) def test_load_plugins_from_file_fails(self, mock_isfile): discover.load_plugins("/somewhere/plugin.py") @mock.patch("%s.os.path.isfile" % DISCOVER, return_value=False) def test_load_plugins_from_nonexisting_file(self, mock_isfile): # test no fails for nonexisting file discover.load_plugins("/somewhere/plugin.py") @mock.patch("%s.imp.load_module" % DISCOVER, side_effect=Exception()) @mock.patch("%s.imp.find_module" % DISCOVER) @mock.patch("%s.os.path" % DISCOVER, return_value=True) @mock.patch("%s.os.walk" % DISCOVER, return_value=[("/etc/.rally/plugins", [], ("load_it.py", ))]) def test_load_plugins_fails(self, mock_os_walk, mock_os_path, mock_find_module, mock_load_module): # test no fails if module is broken # TODO(olkonami): check exception is handled correct discover.load_plugins("/somewhere") rally-0.9.1/tests/unit/common/test_broker.py0000664000567000056710000000537113073417716022332 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import mock from rally.common import broker from tests.unit import test class BrokerTestCase(test.TestCase): def test__publisher(self): mock_publish = mock.MagicMock() queue = collections.deque() broker._publisher(mock_publish, queue) mock_publish.assert_called_once_with(queue) def test__publisher_fails(self): mock_publish = mock.MagicMock(side_effect=Exception()) queue = collections.deque() broker._publisher(mock_publish, queue) def test__consumer(self): queue = collections.deque([1, 2, 3]) mock_consume = mock.MagicMock() broker._consumer(mock_consume, queue) self.assertEqual(3, mock_consume.call_count) self.assertEqual(0, len(queue)) def test__consumer_cache(self): cache_keys_history = [] def consume(cache, item): cache[item] = True cache_keys_history.append(list(cache)) queue = collections.deque([1, 2, 3]) broker._consumer(consume, queue) self.assertEqual([[1], [1, 2], [1, 2, 3]], cache_keys_history) def test__consumer_fails(self): queue = collections.deque([1, 2, 3]) mock_consume = mock.MagicMock(side_effect=Exception()) broker._consumer(mock_consume, queue) self.assertEqual(0, len(queue)) @mock.patch("rally.common.broker.LOG") def test__consumer_indexerror(self, mock_log): consume = mock.Mock() consume.side_effect = IndexError() queue = collections.deque([1, 2, 3]) broker._consumer(consume, queue) self.assertTrue(mock_log.warning.called) self.assertFalse(queue) expected = [mock.call({}, 1), mock.call({}, 2), mock.call({}, 3)] self.assertEqual(expected, consume.mock_calls) def test_run(self): def publish(queue): queue.append(1) queue.append(2) queue.append(3) consumed = set() def consume(cache, item): consumed.add(item) consumer_count = 2 broker.run(publish, consume, consumer_count) self.assertEqual(set([1, 2, 3]), consumed) rally-0.9.1/tests/unit/common/test_utils.py0000664000567000056710000006741413073417716022214 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from __future__ import print_function import collections import string import sys import threading import time import ddt import mock from six.moves import queue as Queue import testtools from rally.common import utils from rally import exceptions from tests.unit import test class ImmutableMixinTestCase(test.TestCase): def test_without_base_values(self): im = utils.ImmutableMixin() self.assertRaises(exceptions.ImmutableException, im.__setattr__, "test", "test") def test_with_base_values(self): class A(utils.ImmutableMixin): def __init__(self, test): self.test = test super(A, self).__init__() a = A("test") self.assertRaises(exceptions.ImmutableException, a.__setattr__, "abc", "test") self.assertEqual(a.test, "test") class EnumMixinTestCase(test.TestCase): def test_enum_mix_in(self): class Foo(utils.EnumMixin): a = 10 b = 20 CC = "2000" self.assertEqual(set(list(Foo())), set([10, 20, "2000"])) def test_with_underscore(self): class Foo(utils.EnumMixin): a = 10 b = 20 _CC = "2000" self.assertEqual(set(list(Foo())), set([10, 20])) class StdIOCaptureTestCase(test.TestCase): def test_stdout_capture(self): stdout = sys.stdout messages = ["abcdef", "defgaga"] with utils.StdOutCapture() as out: for msg in messages: print(msg) self.assertEqual(out.getvalue().rstrip("\n").split("\n"), messages) self.assertEqual(stdout, sys.stdout) def test_stderr_capture(self): stderr = sys.stderr messages = ["abcdef", "defgaga"] with utils.StdErrCapture() as err: for msg in messages: print(msg, file=sys.stderr) self.assertEqual(err.getvalue().rstrip("\n").split("\n"), messages) self.assertEqual(stderr, sys.stderr) class TimerTestCase(test.TestCase): def test_timer_duration(self): start_time = time.time() end_time = time.time() with mock.patch("rally.common.utils.time") as mock_time: mock_time.time = mock.MagicMock(return_value=start_time) with utils.Timer() as timer: mock_time.time = mock.MagicMock(return_value=end_time) self.assertIsNone(timer.error) self.assertEqual(start_time, timer.timestamp()) self.assertEqual(end_time, timer.finish_timestamp()) self.assertEqual(end_time - start_time, timer.duration()) def test_timer_exception(self): try: with utils.Timer() as timer: raise Exception() except Exception: pass self.assertEqual(3, len(timer.error)) self.assertEqual(timer.error[0], type(Exception())) def module_level_method(): pass class MethodClassTestCase(test.TestCase): @testtools.skipIf(sys.version_info > (2, 9), "Problems with access to " "class from ") def test_method_class_for_class_level_method(self): class A(object): def m(self): pass self.assertEqual(A, utils.get_method_class(A.m)) def test_method_class_for_module_level_method(self): self.assertIsNone(utils.get_method_class(module_level_method)) class FirstIndexTestCase(test.TestCase): def test_list_with_existing_matching_element(self): lst = [1, 3, 5, 7] self.assertEqual(utils.first_index(lst, lambda e: e == 1), 0) self.assertEqual(utils.first_index(lst, lambda e: e == 5), 2) self.assertEqual(utils.first_index(lst, lambda e: e == 7), 3) def test_list_with_non_existing_matching_element(self): lst = [1, 3, 5, 7] self.assertIsNone(utils.first_index(lst, lambda e: e == 2)) class EditDistanceTestCase(test.TestCase): def test_distance_empty_strings(self): dist = utils.distance("", "") self.assertEqual(0, dist) def test_distance_equal_strings(self): dist = utils.distance("abcde", "abcde") self.assertEqual(0, dist) def test_distance_replacement(self): dist = utils.distance("abcde", "__cde") self.assertEqual(2, dist) def test_distance_insertion(self): dist = utils.distance("abcde", "ab__cde") self.assertEqual(2, dist) def test_distance_deletion(self): dist = utils.distance("abcde", "abc") self.assertEqual(2, dist) class TenantIteratorTestCase(test.TestCase): def test_iterate_per_tenant(self): users = [] tenants_count = 2 users_per_tenant = 5 for tenant_id in range(tenants_count): for user_id in range(users_per_tenant): users.append({"id": str(user_id), "tenant_id": str(tenant_id)}) expected_result = [ ({"id": "0", "tenant_id": str(i)}, str(i)) for i in range( tenants_count)] real_result = [i for i in utils.iterate_per_tenants(users)] self.assertEqual(expected_result, real_result) class RAMIntTestCase(test.TestCase): def test__int__(self): self.assertEqual(0, int(utils.RAMInt())) self.assertEqual(10, int(utils.RAMInt(10))) def test__str__(self): self.assertEqual("0", str(utils.RAMInt())) self.assertEqual("20", str(utils.RAMInt(20))) def test__next__(self): ri = utils.RAMInt() for i in range(0, 3): self.assertEqual(i, next(ri)) def test_next(self): ri = utils.RAMInt() for i in range(0, 3): self.assertEqual(i, ri.next()) def test_reset(self): ri = utils.RAMInt() ri.next() ri.reset() self.assertEqual(0, int(ri)) @ddt.ddt class RandomNameTestCase(test.TestCase): @ddt.data( {}, {"task_id": "fake-task"}, {"task_id": "2short", "expected": "s_rally_blargles_dweebled"}, {"task_id": "fake!task", "expected": "s_rally_blargles_dweebled"}, {"fmt": "XXXX-test-XXX-test", "expected": "fake-test-bla-test"}) @ddt.unpack @mock.patch("random.choice") def test_generate_random_name(self, mock_choice, task_id="faketask", expected="s_rally_faketask_blargles", fmt="s_rally_XXXXXXXX_XXXXXXXX"): class FakeNameGenerator(utils.RandomNameGeneratorMixin): RESOURCE_NAME_FORMAT = fmt task = {"uuid": task_id} generator = FakeNameGenerator() mock_choice.side_effect = iter("blarglesdweebled") self.assertEqual(generator.generate_random_name(), expected) class FakeNameGenerator(utils.RandomNameGeneratorMixin): RESOURCE_NAME_FORMAT = fmt verification = {"uuid": task_id} generator = FakeNameGenerator() mock_choice.side_effect = iter("blarglesdweebled") self.assertEqual(generator.generate_random_name(), expected) def test_generate_random_name_bogus_name_format(self): class FakeNameGenerator(utils.RandomNameGeneratorMixin): RESOURCE_NAME_FORMAT = "invalid_XXX_format" task = {"uuid": "fake-task-id"} generator = FakeNameGenerator() self.assertRaises(ValueError, generator.generate_random_name) @ddt.data( {"good": ("rally_abcdefgh_abcdefgh", "rally_12345678_abcdefgh", "rally_ABCdef12_ABCdef12"), "bad": ("rally_abcd_efgh", "rally_abcd!efg_12345678", "rally_", "rally__", "rally_abcdefgh_", "rally_abcdefghi_12345678", "foo", "foo_abcdefgh_abcdefgh")}, {"task_id": "abcd1234", "good": ("rally_abcd1234_abcdefgh", "rally_abcd1234_abcd1234", "rally_abcd1234_AbCdEf12"), "bad": ("rally_12345678_abcdefgh", "rally_12345678_abcd1234", "rally_abcd1234_", "rally_abcd1234_!!!!!!!!", "rally_ABCD1234_abcdefgh")}, {"task_id": "abcd1234", "exact": False, "good": ("rally_abcd1234_abcdefghfoo", "rally_abcd1234_abcdefgh", "rally_abcd1234_abcdefgh-bar", "rally_abcd1234_abcdefgh+!@$"), "bad": ("rally_abcd1234_", "rally_abcd1234_!!!!!!!!", "rally_abcd1234_abcdefg")}, {"fmt": "][*_XXX_XXX", "chars": "abc(.*)", "good": ("][*_abc_abc", "][*_abc_((("), "bad": ("rally_ab_cd", "rally_ab!_abc", "rally_", "rally__", "rally_abc_", "rally_abcd_abc", "foo", "foo_abc_abc")}, {"fmt": "XXXX-test-XXX-test", "good": ("abcd-test-abc-test",), "bad": ("rally-abcdefgh-abcdefgh", "abc-test-abc-test", "abcd_test_abc_test", "abc-test-abcd-test")}) @ddt.unpack def test_cls_name_matches_object( self, good=(), bad=(), fmt="rally_XXXXXXXX_XXXXXXXX", chars=string.ascii_letters + string.digits, task_id=None, exact=True): class FakeNameGenerator(utils.RandomNameGeneratorMixin): RESOURCE_NAME_FORMAT = fmt RESOURCE_NAME_ALLOWED_CHARACTERS = chars task = {"uuid": task_id or "fakeuuid"} for name in good: self.assertTrue( FakeNameGenerator.name_matches_object(name, task_id, exact), "%(name)s unexpectedly didn't match RESOURCE_NAME_FORMAT " "%(fmt)s with exact=%(exact)s" % {"name": name, "fmt": fmt, "exact": exact}) for name in bad: self.assertFalse( FakeNameGenerator.name_matches_object(name, task_id, exact), "%(name)s unexpectedly matched RESOURCE_NAME_FORMAT %(fmt)s " "with exact=%(exact)s" % {"name": name, "fmt": fmt, "exact": exact}) def test_name_matches_object(self): name = "foo" obj = mock.Mock() self.assertTrue(utils.name_matches_object(name, obj)) obj.name_matches_object.assert_called_once_with(name) def test_name_matches_object_kwargs(self): name = "foo" obj = mock.Mock() self.assertTrue(utils.name_matches_object(name, obj, task_id="taskid", exact=False)) obj.name_matches_object.assert_called_once_with(name, task_id="taskid", exact=False) def test_name_matches_object_identical_list(self): class One(utils.RandomNameGeneratorMixin): name_matches_object = mock.Mock(return_value=False) class Two(utils.RandomNameGeneratorMixin): name_matches_object = mock.Mock(return_value=False) name = "foo" self.assertFalse(utils.name_matches_object(name, One, Two)) # ensure that exactly one of the two objects is checked self.assertItemsEqual( One.name_matches_object.call_args_list + Two.name_matches_object.call_args_list, [mock.call(name)]) def test_name_matches_object_differing_list(self): class One(utils.RandomNameGeneratorMixin): name_matches_object = mock.Mock(return_value=False) class Two(utils.RandomNameGeneratorMixin): RESOURCE_NAME_FORMAT = "foo_XXX_XXX" name_matches_object = mock.Mock(return_value=False) class Three(utils.RandomNameGeneratorMixin): RESOURCE_NAME_ALLOWED_CHARACTERS = "12345" name_matches_object = mock.Mock(return_value=False) class Four(utils.RandomNameGeneratorMixin): RESOURCE_NAME_FORMAT = "bar_XXX_XXX" RESOURCE_NAME_ALLOWED_CHARACTERS = "abcdef" name_matches_object = mock.Mock(return_value=False) classes = (One, Two, Three, Four) name = "foo" self.assertFalse(utils.name_matches_object(name, *classes)) for cls in classes: cls.name_matches_object.assert_called_once_with(name) def test_cls_name_matches_object_identity(self): generator = utils.RandomNameGeneratorMixin() generator.task = {"uuid": "faketask"} self.assertTrue(generator.name_matches_object( generator.generate_random_name())) self.assertTrue(utils.RandomNameGeneratorMixin.name_matches_object( generator.generate_random_name())) def test_name_matches_object_identity(self): generator = utils.RandomNameGeneratorMixin() generator.task = {"uuid": "faketask"} self.assertTrue(utils.name_matches_object( generator.generate_random_name(), generator)) self.assertTrue(utils.name_matches_object( generator.generate_random_name(), utils.RandomNameGeneratorMixin)) def test_consistent_task_id_part(self): class FakeNameGenerator(utils.RandomNameGeneratorMixin): RESOURCE_NAME_FORMAT = "XXXXXXXX_XXXXXXXX" generator = FakeNameGenerator() generator.task = {"uuid": "good-task-id"} names = [generator.generate_random_name() for i in range(100)] task_id_parts = set([n.split("_")[0] for n in names]) self.assertEqual(len(task_id_parts), 1) generator.task = {"uuid": "bogus! task! id!"} names = [generator.generate_random_name() for i in range(100)] task_id_parts = set([n.split("_")[0] for n in names]) self.assertEqual(len(task_id_parts), 1) @ddt.ddt class MergeTestCase(test.TestCase): @ddt.data( # regular data {"sources": [[[1, 3, 5], [5, 7, 9, 14], [17, 21, 36, 41]], [[2, 2, 4], [9, 10], [16, 19, 23, 26, 91]], [[5], [5, 7, 11, 14, 14, 19, 23]]], "expected_output": [[1, 2, 2, 3, 4, 5, 5, 5, 5, 7], [7, 9, 9, 10, 11, 14, 14, 14, 16, 17], [19, 19, 21, 23, 23, 26, 36, 41, 91]]}, # with one empty source {"sources": [[[1, 3, 5], [5, 7, 9, 14], [17, 21, 36, 41]], [[2, 2, 4], [9, 10], [16, 19, 23, 26, 91]], [[5], [5, 7, 11, 14, 14, 19, 23]], []], "expected_output": [[1, 2, 2, 3, 4, 5, 5, 5, 5, 7], [7, 9, 9, 10, 11, 14, 14, 14, 16, 17], [19, 19, 21, 23, 23, 26, 36, 41, 91]]}, # with one source that produces an empty list {"sources": [[[1, 3, 5], [5, 7, 9, 14], [17, 21, 36, 41]], [[2, 2, 4], [9, 10], [16, 19, 23, 26, 91]], [[5], [5, 7, 11, 14, 14, 19, 23]], [[]]], "expected_output": [[1, 2, 2, 3, 4, 5, 5, 5, 5, 7], [7, 9, 9, 10, 11, 14, 14, 14, 16, 17], [19, 19, 21, 23, 23, 26, 36, 41, 91]]}, # with empty lists appered in sources {"sources": [[[1, 3, 5], [], [], [5, 7, 9, 14], [17, 21, 36, 41]], [[], [2, 2, 4], [9, 10], [16, 19, 23, 26, 91]], [[5], [5, 7, 11, 14, 14, 19, 23], []]], "expected_output": [[1, 2, 2, 3, 4, 5, 5, 5, 5, 7], [7, 9, 9, 10, 11, 14, 14, 14, 16, 17], [19, 19, 21, 23, 23, 26, 36, 41, 91]]}, # only one source {"sources": [[[1, 3, 5], [5, 7, 9, 14], [17, 21, 36, 41]]], "expected_output": [[1, 3, 5, 5, 7, 9, 14, 17, 21, 36], [41]]}, # no sources passed in {"sources": [], "expected_output": []}, # several sources, all empty {"sources": [[], [], [], []], "expected_output": []} ) @ddt.unpack def test_merge(self, sources, expected_output): in_iters = [iter(src) for src in sources] out = list(utils.merge(10, *in_iters)) self.assertEqual(out, expected_output) class TimeoutThreadTestCase(test.TestCase): def test_timeout_thread(self): """Create and kill thread by timeout. This single test covers 3 methods: terminate_thread, timeout_thread, and interruptable_sleep. This test is more like integrated then unit, but it is much better then unreadable 500 lines of mocking and checking. """ queue = Queue.Queue() killer_thread = threading.Thread( target=utils.timeout_thread, args=(queue,), ) test_thread = threading.Thread( target=utils.interruptable_sleep, args=(30, 0.01), ) test_thread.start() start_time = time.time() queue.put((test_thread, start_time + 1)) killer_thread.start() test_thread.join() end_time = time.time() queue.put((None, None)) killer_thread.join() time_elapsed = end_time - start_time # NOTE(sskripnick): Killing thread with PyThreadState_SetAsyncExc # works with sinificant delay. Make sure this delay is less # than 10 seconds. self.assertLess(time_elapsed, 11, "Thread killed too late (%s seconds)" % time_elapsed) class LockedDictTestCase(test.TestCase): def test_init_unlock_and_update(self): def setitem(obj, key, value): obj[key] = value def delitem(obj, key): del obj[key] d = utils.LockedDict() self.assertIsInstance(d, dict) self.assertEqual(d, {}) d = utils.LockedDict(foo="bar", spam={"a": ["b", {"c": "d"}]}) self.assertEqual(d, {"foo": "bar", "spam": {"a": ("b", {"c": "d"})}}) self.assertIsInstance(d["spam"], utils.LockedDict) self.assertIsInstance(d["spam"]["a"][1], utils.LockedDict) self.assertRaises(RuntimeError, setitem, d, 123, 456) self.assertRaises(RuntimeError, delitem, d, "foo") self.assertRaises(RuntimeError, setitem, d["spam"]["a"][1], 123, 456) self.assertRaises(RuntimeError, delitem, d["spam"]["a"][1], "c") self.assertRaises(RuntimeError, d.update, {123: 456}) self.assertRaises(RuntimeError, d.setdefault, 123, 456) self.assertRaises(RuntimeError, d.pop, "foo") self.assertRaises(RuntimeError, d.popitem) self.assertRaises(RuntimeError, d.clear) self.assertEqual(d, {"foo": "bar", "spam": {"a": ("b", {"c": "d"})}}) with d.unlocked(): d["spam"] = 42 self.assertEqual(d, {"foo": "bar", "spam": 42}) d.clear() self.assertEqual(d, {}) d.setdefault("foo", 42) d.update({"bar": 24}) self.assertEqual(d, {"foo": 42, "bar": 24}) self.assertEqual(24, d.pop("bar")) self.assertEqual(("foo", 42), d.popitem()) d[123] = 456 self.assertEqual(d, {123: 456}) self.assertRaises(RuntimeError, setitem, d, 123, 456) self.assertRaises(RuntimeError, delitem, d, "foo") @mock.patch("rally.common.utils.copy.deepcopy") def test___deepcopy__(self, mock_deepcopy): mock_deepcopy.side_effect = lambda *args, **kw: (args, kw) d = utils.LockedDict(foo="bar", spam={"a": ["b", {"c": "d"}]}) args, kw = d.__deepcopy__() self.assertEqual({"memo": None}, kw) self.assertEqual(({"foo": "bar", "spam": {"a": ("b", {"c": "d"})}},), args) self.assertEqual(dict, type(args[0])) self.assertEqual(dict, type(args[0]["spam"])) self.assertEqual(dict, type(args[0]["spam"]["a"][1])) mock_deepcopy.reset_mock() args, kw = d.__deepcopy__("foo_memo") self.assertEqual(({"foo": "bar", "spam": {"a": ("b", {"c": "d"})}},), args) self.assertEqual({"memo": "foo_memo"}, kw) @ddt.ddt class FloatFormatterTestCase(test.TestCase): @ddt.data( { "num_float": 0, "num_str": "0.0" }, { "num_float": 37, "num_str": "37.0" }, { "num_float": 0.0000001, "num_str": "0.0" }, { "num_float": 0.000000, "num_str": "0.0" }, { "num_float": 1.0000001, "num_str": "1.0" }, { "num_float": 1.0000011, "num_str": "1.000001" }, { "num_float": 1.0000019, "num_str": "1.000002" } ) @ddt.unpack def test_format_float_to_str(self, num_float, num_str): self.assertEqual(num_str, utils.format_float_to_str(num_float)) class DequeAsQueueTestCase(test.TestCase): def setUp(self): super(DequeAsQueueTestCase, self).setUp() self.deque = collections.deque() self.deque_as_queue = utils.DequeAsQueue(self.deque) def test_qsize(self): self.assertEqual(0, self.deque_as_queue.qsize()) self.deque.append(10) self.assertEqual(1, self.deque_as_queue.qsize()) def test_put(self): self.deque_as_queue.put(10) self.assertEqual(10, self.deque.popleft()) def test_get(self): self.deque.append(33) self.assertEqual(33, self.deque_as_queue.get()) def test_empty(self): self.assertFalse(self.deque_as_queue.empty()) self.deque.append(10) self.assertTrue(self.deque_as_queue.empty()) class StopwatchTestCase(test.TestCase): @mock.patch("rally.common.utils.interruptable_sleep") @mock.patch("rally.common.utils.time") def test_stopwatch(self, mock_time, mock_interruptable_sleep): mock_time.time.side_effect = [0, 0, 1, 2, 3] sw = utils.Stopwatch() sw.start() sw.sleep(1) sw.sleep(2) sw.sleep(3) mock_interruptable_sleep.assert_has_calls([ mock.call(1), mock.call(1), mock.call(1), ]) @mock.patch("rally.common.utils.interruptable_sleep") @mock.patch("rally.common.utils.time") def test_no_sleep(self, mock_time, mock_interruptable_sleep): mock_time.time.side_effect = [0, 1] sw = utils.Stopwatch() sw.start() sw.sleep(1) self.assertFalse(mock_interruptable_sleep.called) @mock.patch("rally.common.utils.time") def test_stopwatch_with_event(self, mock_time): mock_time.time.side_effect = [0, 0, 1, 2, 3] event = mock.Mock(spec=threading.Event)() sw = utils.Stopwatch(stop_event=event) sw.start() sw.sleep(1) sw.sleep(2) sw.sleep(3) event.wait.assert_has_calls([ mock.call(1), mock.call(1), mock.call(1), ]) class BackupTestCase(test.TestCase): def setUp(self): super(BackupTestCase, self).setUp() p = mock.patch("rally.common.utils.os.mkdir") self.mock_mkdir = p.start() self.addCleanup(p.stop) @mock.patch("rally.common.utils.os.path.exists") @mock.patch("rally.common.utils.uuid") def test_generate_random_path(self, mock_uuid, mock_exists): mock_exists.side_effect = lambda a: "exist" in a mock_uuid.uuid4.side_effect = ("exist", "foo") self.assertEqual("/some/foo", utils.generate_random_path("/some")) mock_exists.assert_has_calls(( mock.call("/some/exist"), mock.call("/some/foo"), )) @mock.patch("rally.common.utils.generate_random_path") def test___init__(self, mock_generate_random_path): utils.BackupHelper() mock_generate_random_path.assert_called_once_with() self.mock_mkdir.assert_called_once_with( mock_generate_random_path.return_value) @mock.patch("rally.common.utils.generate_random_path") @mock.patch("rally.common.utils.shutil.copytree") def test_backup(self, mock_copytree, mock_generate_random_path): backup_dir = "another_dir" mock_generate_random_path.side_effect = ("base_tmp_dir", backup_dir) bh = utils.BackupHelper() path = "some_path" bh.backup(path) mock_copytree.assert_called_once_with(path, backup_dir, symlinks=True) self.assertEqual(backup_dir, bh._stored_data[path]) mock_copytree.reset_mock() self.assertRaises(exceptions.RallyException, bh.backup, path) self.assertFalse(mock_copytree.called) @mock.patch("rally.common.utils.BackupHelper.rollback") @mock.patch("rally.common.utils.generate_random_path") @mock.patch("rally.common.utils.shutil.copytree") def test_backup_failed_while_copy(self, mock_copytree, mock_generate_random_path, mock_backup_helper_rollback): backup_dir = "another_dir" mock_generate_random_path.side_effect = ("base_tmp_dir", backup_dir) mock_copytree.side_effect = RuntimeError bh = utils.BackupHelper() path = "some_path" self.assertRaises(RuntimeError, bh.backup, path) mock_copytree.assert_called_once_with(path, backup_dir, symlinks=True) self.assertTrue(not bh._stored_data) mock_backup_helper_rollback.assert_called_once_with() @mock.patch("rally.common.utils.BackupHelper.backup") def test_call(self, mock_backup_helper_backup): path = "/some/path" bh = utils.BackupHelper() self.assertEqual(bh, bh(path)) mock_backup_helper_backup.assert_called_once_with(path) @mock.patch("rally.common.utils." "os.path.exists", side_effect=(False, True, True)) @mock.patch("rally.common.utils.shutil.rmtree") def test___del__(self, mock_rmtree, mock_exists): paths = {"original_path": "/tmp/backup_of_something", "another_path": "/tmp/backup_of_another_thing"} bh = utils.BackupHelper() bh._stored_data = paths del bh self.assertEqual([mock.call(p) for p in paths.values()], mock_rmtree.call_args_list) @mock.patch("rally.common.utils.os.path.exists", side_effect=(False, True)) @mock.patch("rally.common.utils.shutil.rmtree") @mock.patch("rally.common.utils.shutil.copytree") def test_rollback(self, mock_copytree, mock_rmtree, mock_exists): bh = utils.BackupHelper() original_path = "/some/original/path" tmp_path = "/temporary/location/for/backup" path = {original_path: tmp_path} bh._stored_data = path rollback_method = mock.MagicMock() args = (1, 2, 3) kwargs = {"arg1": "value"} bh.add_rollback_action(rollback_method, *args, **kwargs) bh.rollback() mock_rmtree.assert_called_once_with(original_path) mock_copytree.assert_called_once_with(tmp_path, original_path, symlinks=True) self.assertTrue(not bh._stored_data) rollback_method.assert_called_once_with(*args, **kwargs) @mock.patch("rally.common.utils.BackupHelper.rollback") def test_context_manager(self, mock_backup_helper_rollback): bh = utils.BackupHelper() with bh: pass self.assertFalse(mock_backup_helper_rollback.called) bh = utils.BackupHelper() try: with bh: raise RuntimeError() except RuntimeError: # it is expected behaviour pass else: self.fail("BackupHelper context manager should not hide an " "exception") self.assertTrue(mock_backup_helper_rollback.called) rally-0.9.1/tests/unit/cli/0000775000567000056710000000000013073420067016677 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/cli/__init__.py0000664000567000056710000000000013073417716021005 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/cli/commands/0000775000567000056710000000000013073420067020500 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/cli/commands/__init__.py0000664000567000056710000000000013073417716022606 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/cli/commands/test_task.py0000664000567000056710000013674713073417720023077 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import datetime as dt import json import os.path import ddt import mock import yaml from rally import api from rally.cli.commands import task from rally import consts from rally import exceptions from tests.unit import fakes from tests.unit import test @ddt.ddt class TaskCommandsTestCase(test.TestCase): def setUp(self): super(TaskCommandsTestCase, self).setUp() self.task = task.TaskCommands() self.fake_api = fakes.FakeAPI() with mock.patch("rally.api.API.check_db_revision"): self.real_api = api.API() @mock.patch("rally.cli.commands.task.open", create=True) def test__load_task(self, mock_open): input_task = "{'ab': {{test}}}" input_args = "{'test': 2}" # NOTE(boris-42): Such order of files is because we are reading # file with args before file with template. mock_open.side_effect = [ mock.mock_open(read_data="{'test': 1}").return_value, mock.mock_open(read_data=input_task).return_value ] task_conf = self.task._load_task( self.real_api, "in_task", task_args_file="in_args_path") self.assertEqual({"ab": 1}, task_conf) mock_open.side_effect = [ mock.mock_open(read_data=input_task).return_value ] task_conf = self.task._load_task( self.real_api, "in_task", task_args=input_args) self.assertEqual(task_conf, {"ab": 2}) mock_open.side_effect = [ mock.mock_open(read_data="{'test': 1}").return_value, mock.mock_open(read_data=input_task).return_value ] task_conf = self.task._load_task( self.real_api, "in_task", task_args=input_args, task_args_file="any_file") self.assertEqual(task_conf, {"ab": 2}) @mock.patch("rally.cli.commands.task.open", create=True) def test__load_task_wrong_task_args_file(self, mock_open): mock_open.side_effect = [ mock.mock_open(read_data="{'test': {}").return_value ] self.assertRaises(task.FailedToLoadTask, self.task._load_task, self.fake_api, "in_task", task_args_file="in_args_path") @mock.patch("rally.cli.commands.task.open", create=True) def test__load_task_wrong_task_args_file_exception(self, mock_open): mock_open.side_effect = IOError self.assertRaises(IOError, self.task._load_task, self.fake_api, "in_task", task_args_file="in_args_path") def test__load_task_wrong_input_task_args(self): self.assertRaises(task.FailedToLoadTask, self.task._load_task, self.real_api, "in_task", "{'test': {}") self.assertRaises(task.FailedToLoadTask, self.task._load_task, self.real_api, "in_task", "[]") @mock.patch("rally.cli.commands.task.open", create=True) def test__load_task_task_render_raise_exc(self, mock_open): mock_open.side_effect = [ mock.mock_open(read_data="{'test': {{t}}}").return_value ] self.assertRaises(task.FailedToLoadTask, self.task._load_task, self.real_api, "in_task") @mock.patch("rally.cli.commands.task.open", create=True) def test__load_task_task_not_in_yaml(self, mock_open): mock_open.side_effect = [ mock.mock_open(read_data="{'test': {}").return_value ] self.fake_api.task.render_template.return_value = "||" self.assertRaises(task.FailedToLoadTask, self.task._load_task, self.fake_api, "in_task") def test_load_task_including_other_template(self): other_template_path = os.path.join( os.path.dirname(__file__), "..", "..", "..", "..", "samples/tasks/scenarios/nova/boot.json") input_task = "{%% include \"%s\" %%}" % os.path.basename( other_template_path) expect = self.task._load_task(self.real_api, other_template_path) with mock.patch("rally.cli.commands.task.open", create=True) as mock_open: mock_open.side_effect = [ mock.mock_open(read_data=input_task).return_value ] input_task_file = os.path.join( os.path.dirname(other_template_path), "input_task.json") actual = self.task._load_task(self.real_api, input_task_file) self.assertEqual(expect, actual) @mock.patch("rally.cli.commands.task.os.path.isfile", return_value=True) @mock.patch("rally.cli.commands.task.TaskCommands._load_task", return_value={"uuid": "some_uuid"}) def test__load_and_validate_task(self, mock__load_task, mock_os_path_isfile): deployment = "some_deployment_uuid" self.fake_api.task.validate.return_value = fakes.FakeTask() self.task._load_and_validate_task(self.fake_api, "some_task", "task_args", "task_args_file", deployment) mock__load_task.assert_called_once_with( self.fake_api, "some_task", "task_args", "task_args_file") self.fake_api.task.validate.assert_called_once_with( deployment, mock__load_task.return_value, None) def test__load_and_validate_file(self): deployment = "some_deployment_uuid" self.assertRaises(IOError, self.task._load_and_validate_task, self.fake_api, "some_task", "task_args", "task_args_file", deployment) @mock.patch("rally.cli.commands.task.version") @mock.patch("rally.cli.commands.task.os.path.isfile", return_value=True) @mock.patch("rally.cli.commands.task.TaskCommands.use") @mock.patch("rally.cli.commands.task.TaskCommands.detailed") @mock.patch("rally.cli.commands.task.TaskCommands._load_task", return_value={"some": "json"}) def test_start(self, mock__load_task, mock_detailed, mock_use, mock_os_path_isfile, mock_version): deployment_id = "e0617de9-77d1-4875-9b49-9d5789e29f20" task_path = "path_to_config.json" self.fake_api.task.create.return_value = fakes.FakeTask( uuid="some_new_uuid", tag="tag") self.fake_api.task.validate.return_value = fakes.FakeTask( some="json", uuid="some_uuid", temporary=True) self.task.start(self.fake_api, task_path, deployment_id, do_use=True) mock_version.version_string.assert_called_once_with() self.fake_api.task.create.assert_called_once_with( deployment_id, None) self.fake_api.task.start.assert_called_once_with( deployment_id, mock__load_task.return_value, task=self.fake_api.task.validate.return_value, abort_on_sla_failure=False) mock__load_task.assert_called_once_with( self.fake_api, task_path, None, None) mock_use.assert_called_once_with(self.fake_api, "some_new_uuid") mock_detailed.assert_called_once_with(self.fake_api, task_id="some_new_uuid") @mock.patch("rally.cli.commands.task.os.path.isfile", return_value=True) @mock.patch("rally.cli.commands.task.TaskCommands.detailed") @mock.patch("rally.cli.commands.task.TaskCommands._load_task", return_value="some_config") def test_start_on_unfinished_deployment( self, mock__load_task, mock_detailed, mock_os_path_isfile): deployment_id = "e0617de9-77d1-4875-9b49-9d5789e29f20" deployment_name = "xxx_name" task_path = "path_to_config.json" self.fake_api.task.create.return_value = fakes.FakeTask( uuid="some_new_uuid", tag="tag") exc = exceptions.DeploymentNotFinishedStatus( name=deployment_name, uuid=deployment_id, status=consts.DeployStatus.DEPLOY_INIT) self.fake_api.task.create.side_effect = exc self.assertEqual(1, self.task.start(self.fake_api, task_path, deployment="any", tag="some_tag")) @mock.patch("rally.cli.commands.task.os.path.isfile", return_value=True) @mock.patch("rally.cli.commands.task.TaskCommands.detailed") @mock.patch("rally.cli.commands.task.TaskCommands._load_task", return_value="some_config") def test_start_with_task_args(self, mock__load_task, mock_detailed, mock_os_path_isfile): self.fake_api.task.create.return_value = fakes.FakeTask( uuid="new_uuid", tag="some_tag") self.fake_api.task.validate.return_value = fakes.FakeTask( uuid="some_id") task_path = "path_to_config.json" task_args = "task_args" task_args_file = "task_args_file" self.task.start(self.fake_api, task_path, deployment="any", task_args=task_args, task_args_file=task_args_file, tag="some_tag") mock__load_task.assert_called_once_with( self.fake_api, task_path, task_args, task_args_file) self.fake_api.task.validate.assert_called_once_with( "any", mock__load_task.return_value, {}) self.fake_api.task.start.assert_called_once_with( "any", mock__load_task.return_value, task=self.fake_api.task.create.return_value, abort_on_sla_failure=False) mock_detailed.assert_called_once_with( self.fake_api, task_id=self.fake_api.task.create.return_value["uuid"]) self.fake_api.task.create.assert_called_once_with("any", "some_tag") @mock.patch("rally.cli.commands.task.envutils.get_global") def test_start_no_deployment_id(self, mock_get_global): mock_get_global.side_effect = exceptions.InvalidArgumentsException self.assertRaises(exceptions.InvalidArgumentsException, self.task.start, "path_to_config.json", None) @mock.patch("rally.cli.commands.task.os.path.isfile", return_value=True) @mock.patch("rally.cli.commands.task.TaskCommands._load_task", return_value={"some": "json"}) def test_start_invalid_task(self, mock__load_task, mock_os_path_isfile): self.fake_api.task.create.return_value = fakes.FakeTask( temporary=False, tag="tag", uuid="uuid") exc = exceptions.InvalidTaskException self.fake_api.task.start.side_effect = exc result = self.task.start(self.fake_api, "task_path", "deployment", tag="tag") self.assertEqual(1, result) self.fake_api.task.create.assert_called_once_with("deployment", "tag") self.fake_api.task.start.assert_called_once_with( "deployment", mock__load_task.return_value, task=self.fake_api.task.create.return_value, abort_on_sla_failure=False) def test_abort(self): test_uuid = "17860c43-2274-498d-8669-448eff7b073f" self.task.abort(self.fake_api, test_uuid) self.fake_api.task.abort.assert_called_once_with( test_uuid, False, async=False) @mock.patch("rally.cli.commands.task.envutils.get_global") def test_abort_no_task_id(self, mock_get_global): mock_get_global.side_effect = exceptions.InvalidArgumentsException self.assertRaises(exceptions.InvalidArgumentsException, self.task.abort, self.fake_api, None) def test_status(self): test_uuid = "a3e7cefb-bec2-4802-89f6-410cc31f71af" value = {"task_id": "task", "status": "status"} self.fake_api.task.get.return_value = value self.task.status(self.fake_api, test_uuid) self.fake_api.task.get.assert_called_once_with(test_uuid) @mock.patch("rally.cli.commands.task.envutils.get_global") def test_status_no_task_id(self, mock_get_global): mock_get_global.side_effect = exceptions.InvalidArgumentsException self.assertRaises(exceptions.InvalidArgumentsException, self.task.status, None) def test_detailed(self): test_uuid = "c0d874d4-7195-4fd5-8688-abe82bfad36f" self.fake_api.task.get_detailed.return_value = { "id": "task", "uuid": test_uuid, "status": "status", "results": [ { "key": { "name": "fake_name", "pos": "fake_pos", "kw": "fake_kw" }, "info": { "load_duration": 3.2, "full_duration": 3.5, "iterations_count": 4, "atomic": {"foo": {}, "bar": {}}}, "iterations": [ {"duration": 0.9, "idle_duration": 0.1, "output": {"additive": [], "complete": []}, "atomic_actions": {"foo": 0.6, "bar": 0.7}, "error": ["type", "message", "traceback"] }, {"duration": 1.2, "idle_duration": 0.3, "output": {"additive": [], "complete": []}, "atomic_actions": {"foo": 0.6, "bar": 0.7}, "error": ["type", "message", "traceback"] }, {"duration": 0.7, "idle_duration": 0.5, "scenario_output": { "data": {"foo": 0.6, "bar": 0.7}, "errors": "some" }, "atomic_actions": {"foo": 0.6, "bar": 0.7}, "error": ["type", "message", "traceback"] }, {"duration": 0.5, "idle_duration": 0.5, "output": {"additive": [], "complete": []}, "atomic_actions": {"foo": 0.6, "bar": 0.7}, "error": ["type", "message", "traceback"] } ] } ] } self.task.detailed(self.fake_api, test_uuid) self.fake_api.task.get_detailed.assert_called_once_with( test_uuid, extended_results=True) self.task.detailed(self.fake_api, test_uuid, iterations_data=True) @mock.patch("rally.cli.commands.task.sys.stdout") @mock.patch("rally.cli.commands.task.logging") @ddt.data({"debug": True}, {"debug": False}) @ddt.unpack def test_detailed_task_failed(self, mock_logging, mock_stdout, debug): test_uuid = "test_task_id" value = { "id": "task", "uuid": test_uuid, "status": consts.TaskStatus.CRASHED, "results": [], "verification_log": json.dumps({"etype": "error_type", "msg": "error_message", "trace": "error_traceback"}) } self.fake_api.task.get_detailed.return_value = value mock_logging.is_debug.return_value = debug self.task.detailed(self.fake_api, test_uuid) verification = yaml.safe_load(value["verification_log"]) if debug: expected_calls = [mock.call("Task test_task_id: crashed"), mock.call("%s" % verification["trace"])] mock_stdout.write.assert_has_calls(expected_calls, any_order=True) else: expected_calls = [mock.call("Task test_task_id: crashed"), mock.call("%s" % verification["etype"]), mock.call("%s" % verification["msg"]), mock.call("\nFor more details run:\nrally " "-d task detailed %s" % test_uuid)] mock_stdout.write.assert_has_calls(expected_calls, any_order=True) @mock.patch("rally.cli.commands.task.sys.stdout") def test_detailed_task_status_not_in_finished_abort(self, mock_stdout): test_uuid = "test_task_id" value = { "id": "task", "uuid": test_uuid, "status": consts.TaskStatus.INIT, "results": [] } self.fake_api.task.get_detailed.return_value = value self.task.detailed(self.fake_api, test_uuid) expected_calls = [mock.call("Task test_task_id: init"), mock.call("\nThe task test_task_id marked as " "'init'. Results available when it " "is 'finished'.")] mock_stdout.write.assert_has_calls(expected_calls, any_order=True) @mock.patch("rally.cli.commands.task.envutils.get_global") def test_detailed_no_task_id(self, mock_get_global): mock_get_global.side_effect = exceptions.InvalidArgumentsException self.assertRaises(exceptions.InvalidArgumentsException, self.task.detailed, None) def test_detailed_wrong_id(self): test_uuid = "eb290c30-38d8-4c8f-bbcc-fc8f74b004ae" self.fake_api.task.get_detailed.return_value = None self.task.detailed(self.fake_api, test_uuid) self.fake_api.task.get_detailed.assert_called_once_with( test_uuid, extended_results=True) @mock.patch("json.dumps") def test_results(self, mock_json_dumps): task_id = "foo_task_id" created_at = dt.datetime(2017, 2, 6, 1, 1, 1) data = [ {"key": "foo_key", "data": {"raw": "foo_raw", "sla": [], "hooks": [], "load_duration": 1.0, "full_duration": 2.0}, "created_at": created_at} ] result = map(lambda x: {"key": x["key"], "result": x["data"]["raw"], "load_duration": x["data"]["load_duration"], "full_duration": x["data"]["full_duration"], "created_at": x.get("created_at").strftime( "%Y-%d-%mT%H:%M:%S"), "hooks": x["data"]["hooks"], "sla": x["data"]["sla"]}, data) fake_task = fakes.FakeTask({"status": consts.TaskStatus.FINISHED}) fake_task.get_results = mock.Mock(return_value=data) self.fake_api.task.get.return_value = fake_task self.task.results(self.fake_api, task_id) self.assertEqual(1, mock_json_dumps.call_count) self.assertEqual(1, len(mock_json_dumps.call_args[0])) self.assertSequenceEqual(result, mock_json_dumps.call_args[0][0]) self.assertEqual({"sort_keys": False, "indent": 4}, mock_json_dumps.call_args[1]) self.fake_api.task.get.assert_called_once_with(task_id) @mock.patch("rally.cli.commands.task.sys.stdout") def test_results_no_data(self, mock_stdout): task_id = "foo_task_id" fake_task = fakes.FakeTask({"status": consts.TaskStatus.CRASHED}) self.fake_api.task.get.return_value = fake_task self.assertEqual(1, self.task.results(self.fake_api, task_id)) self.fake_api.task.get.assert_called_once_with(task_id) expected_out = ("Task status is %s. Results " "available when it is one of %s.") % ( consts.TaskStatus.CRASHED, ", ".join((consts.TaskStatus.FINISHED, consts.TaskStatus.ABORTED))) mock_stdout.write.assert_has_calls([mock.call(expected_out)]) def _make_result(self, keys): return [{"key": {"name": key, "pos": 0}, "data": {"raw": key + "_raw", "sla": key + "_sla", "hooks": key + "_hooks", "load_duration": 1.2, "full_duration": 2.3}} for key in keys] @mock.patch("rally.cli.commands.task.jsonschema.validate", return_value=None) @mock.patch("rally.cli.commands.task.os.path") @mock.patch("rally.cli.commands.task.open", create=True) @mock.patch("rally.cli.commands.task.plot") @mock.patch("rally.cli.commands.task.webbrowser") def test_trends(self, mock_webbrowser, mock_plot, mock_open, mock_os_path, mock_validate): mock_os_path.exists = lambda p: p.startswith("path_to_") mock_os_path.expanduser = lambda p: p + "_expanded" mock_os_path.realpath.side_effect = lambda p: "realpath_" + p results_iter = iter([self._make_result(["bar"]), self._make_result(["spam"])]) fake_task = self.fake_api.task.get.return_value fake_task.get_results.side_effect = results_iter mock_plot.trends.return_value = "rendered_trends_report" mock_fd = mock.mock_open( read_data="[\"result_1_from_file\", \"result_2_from_file\"]") mock_open.side_effect = mock_fd ret = self.task.trends(self.fake_api, tasks=["ab123456-38d8-4c8f-bbcc-fc8f74b004ae", "cd654321-38d8-4c8f-bbcc-fc8f74b004ae", "path_to_file"], out="output.html", out_format="html") expected = [ {"load_duration": 1.2, "full_duration": 2.3, "sla": "bar_sla", "hooks": "bar_hooks", "key": {"name": "bar", "pos": 0}, "result": "bar_raw"}, {"load_duration": 1.2, "full_duration": 2.3, "sla": "spam_sla", "hooks": "spam_hooks", "key": {"name": "spam", "pos": 0}, "result": "spam_raw"}, "result_1_from_file", "result_2_from_file"] mock_plot.trends.assert_called_once_with(expected) self.assertEqual([mock.call("path_to_file_expanded", "r"), mock.call("output.html_expanded", "w+")], mock_open.mock_calls) self.assertIsNone(ret) self.assertEqual([mock.call("result_1_from_file", self.fake_api.task.TASK_RESULT_SCHEMA), mock.call("result_2_from_file", self.fake_api.task.TASK_RESULT_SCHEMA)], mock_validate.mock_calls) self.assertEqual([mock.call("ab123456-38d8-4c8f-bbcc-fc8f74b004ae"), mock.call().get_results(), mock.call("cd654321-38d8-4c8f-bbcc-fc8f74b004ae"), mock.call().get_results()], self.fake_api.task.get.mock_calls) self.assertFalse(mock_webbrowser.open_new_tab.called) mock_fd.return_value.write.assert_called_once_with( "rendered_trends_report") @mock.patch("rally.cli.commands.task.jsonschema.validate", return_value=None) @mock.patch("rally.cli.commands.task.os.path") @mock.patch("rally.cli.commands.task.open", create=True) @mock.patch("rally.cli.commands.task.plot") @mock.patch("rally.cli.commands.task.webbrowser") def test_trends_single_file_and_open_webbrowser( self, mock_webbrowser, mock_plot, mock_open, mock_os_path, mock_validate): mock_os_path.exists.return_value = True mock_os_path.expanduser = lambda path: path mock_os_path.realpath.side_effect = lambda p: "realpath_" + p mock_open.side_effect = mock.mock_open(read_data="[\"result\"]") ret = self.task.trends(self.real_api, tasks=["path_to_file"], open_it=True, out="output.html", out_format="html") self.assertIsNone(ret) mock_webbrowser.open_new_tab.assert_called_once_with( "file://realpath_output.html") @mock.patch("rally.cli.commands.task.os.path") @mock.patch("rally.cli.commands.task.open", create=True) @mock.patch("rally.cli.commands.task.plot") def test_trends_task_id_is_not_uuid_like(self, mock_plot, mock_open, mock_os_path): mock_os_path.exists.return_value = False self.fake_api.task.get.return_value.get_results.return_value = ( self._make_result(["foo"])) ret = self.task.trends(self.fake_api, tasks=["ab123456-38d8-4c8f-bbcc-fc8f74b004ae"], out="output.html", out_format="html") self.assertIsNone(ret) ret = self.task.trends(self.fake_api, tasks=["this-is-not-uuid"], out="output.html", out_format="html") self.assertEqual(1, ret) @mock.patch("rally.cli.commands.task.os.path") @mock.patch("rally.cli.commands.task.open", create=True) @mock.patch("rally.cli.commands.task.plot") def test_trends_wrong_results_format(self, mock_plot, mock_open, mock_os_path): mock_os_path.exists.return_value = True mock_open.side_effect = mock.mock_open(read_data="[42]") ret = self.task.trends(self.real_api, tasks=["path_to_file"], out="output.html", out_format="html") self.assertEqual(1, ret) with mock.patch("rally.api._Task.TASK_RESULT_SCHEMA", {"type": "number"}): ret = self.task.trends(self.real_api, tasks=["path_to_file"], out="output.html", out_format="html") self.assertIsNone(ret) def test_trends_no_tasks_given(self): ret = self.task.trends(self.fake_api, tasks=[], out="output.html", out_format="html") self.assertEqual(1, ret) @mock.patch("rally.cli.commands.task.jsonschema.validate", return_value=None) @mock.patch("rally.cli.commands.task.os.path.realpath", side_effect=lambda p: "realpath_%s" % p) @mock.patch("rally.cli.commands.task.open", side_effect=mock.mock_open(), create=True) @mock.patch("rally.cli.commands.task.plot") @mock.patch("rally.cli.commands.task.webbrowser") def test_report_one_uuid(self, mock_webbrowser, mock_plot, mock_open, mock_realpath, mock_validate): task_id = "eb290c30-38d8-4c8f-bbcc-fc8f74b004ae" data = [ {"key": {"name": "class.test", "pos": 0}, "data": {"raw": "foo_raw", "sla": "foo_sla", "hooks": "foo_hooks", "load_duration": 0.1, "full_duration": 1.2}, "created_at": "2017-06-02T07:33:04"}, {"key": {"name": "class.test", "pos": 0}, "data": {"raw": "bar_raw", "sla": "bar_sla", "hooks": "bar_hooks", "load_duration": 2.1, "full_duration": 2.2}, "created_at": "2017-06-02T07:33:04"}] results = [{"key": x["key"], "result": x["data"]["raw"], "sla": x["data"]["sla"], "hooks": x["data"]["hooks"], "load_duration": x["data"]["load_duration"], "full_duration": x["data"]["full_duration"], "created_at": x["created_at"]} for x in data] mock_results = mock.Mock(return_value=data) self.fake_api.task.get.return_value.get_results = mock_results mock_plot.plot.return_value = "html_report" def reset_mocks(): for m in (self.fake_api.task.get, mock_webbrowser, mock_plot, mock_open): m.reset_mock() self.task.report(self.fake_api, tasks=task_id, out="/tmp/%s.html" % task_id) mock_open.assert_called_once_with("/tmp/%s.html" % task_id, "w+") mock_plot.plot.assert_called_once_with(results, include_libs=False) mock_open.side_effect().write.assert_called_once_with("html_report") self.fake_api.task.get.assert_called_once_with(task_id) # JUnit reset_mocks() self.task.report(self.fake_api, tasks=task_id, out="/tmp/%s.html" % task_id, out_format="junit") mock_open.assert_called_once_with("/tmp/%s.html" % task_id, "w+") self.assertFalse(mock_plot.plot.called) # HTML reset_mocks() self.task.report(self.fake_api, task_id, out="output.html", open_it=True, out_format="html") mock_webbrowser.open_new_tab.assert_called_once_with( "file://realpath_output.html") mock_plot.plot.assert_called_once_with(results, include_libs=False) # HTML with embedded JS/CSS reset_mocks() self.task.report(self.fake_api, task_id, open_it=False, out="output.html", out_format="html_static") self.assertFalse(mock_webbrowser.open_new_tab.called) mock_plot.plot.assert_called_once_with(results, include_libs=True) @mock.patch("rally.cli.commands.task.jsonschema.validate", return_value=None) @mock.patch("rally.cli.commands.task.os.path.realpath", side_effect=lambda p: "realpath_%s" % p) @mock.patch("rally.cli.commands.task.open", side_effect=mock.mock_open(), create=True) @mock.patch("rally.cli.commands.task.plot") @mock.patch("rally.cli.commands.task.webbrowser") def test_report_bunch_uuids(self, mock_webbrowser, mock_plot, mock_open, mock_realpath, mock_validate): tasks = ["eb290c30-38d8-4c8f-bbcc-fc8f74b004ae", "eb290c30-38d8-4c8f-bbcc-fc8f74b004af"] data = [ {"key": {"name": "test", "pos": 0}, "data": {"raw": "foo_raw", "sla": "foo_sla", "hooks": "foo_hooks", "load_duration": 0.1, "full_duration": 1.2}, "created_at": "2017-06-02T07:33:04"}, {"key": {"name": "test", "pos": 0}, "data": {"raw": "bar_raw", "sla": "bar_sla", "hooks": "bar_hooks", "load_duration": 2.1, "full_duration": 2.2}, "created_at": "2017-06-02T07:33:04"}] results = [] for task_uuid in tasks: results.extend( map(lambda x: {"key": x["key"], "result": x["data"]["raw"], "sla": x["data"]["sla"], "hooks": x["data"]["hooks"], "load_duration": x["data"]["load_duration"], "full_duration": x["data"]["full_duration"], "created_at": x["created_at"]}, data)) mock_results = mock.Mock(return_value=data) self.fake_api.task.get.return_value.get_results = mock_results mock_plot.plot.return_value = "html_report" def reset_mocks(): for m in (self.fake_api.task.get, mock_webbrowser, mock_plot, mock_open): m.reset_mock() self.task.report(self.fake_api, tasks=tasks, out="/tmp/1_test.html") mock_open.assert_called_once_with("/tmp/1_test.html", "w+") mock_plot.plot.assert_called_once_with(results, include_libs=False) mock_open.side_effect().write.assert_called_once_with("html_report") expected_get_calls = [mock.call(task) for task in tasks] self.fake_api.task.get.assert_has_calls( expected_get_calls, any_order=True) @mock.patch("rally.cli.commands.task.json.load") @mock.patch("rally.cli.commands.task.os.path.exists", return_value=True) @mock.patch("rally.cli.commands.task.jsonschema.validate", return_value=None) @mock.patch("rally.cli.commands.task.os.path.realpath", side_effect=lambda p: "realpath_%s" % p) @mock.patch("rally.cli.commands.task.open", create=True) @mock.patch("rally.cli.commands.task.plot") def test_report_one_file(self, mock_plot, mock_open, mock_realpath, mock_validate, mock_path_exists, mock_json_load): task_file = "/tmp/some_file.json" data = [ {"key": {"name": "test", "pos": 0}, "data": {"raw": "foo_raw", "sla": "foo_sla", "load_duration": 0.1, "full_duration": 1.2}, "created_at": "2017-06-02T07:33:04"}, {"key": {"name": "test", "pos": 1}, "data": {"raw": "bar_raw", "sla": "bar_sla", "load_duration": 2.1, "full_duration": 2.2}, "created_at": "2017-06-02T07:33:04"}] results = [{"key": x["key"], "result": x["data"]["raw"], "sla": x["data"]["sla"], "load_duration": x["data"]["load_duration"], "full_duration": x["data"]["full_duration"], "created_at": x["created_at"]} for x in data] mock_plot.plot.return_value = "html_report" mock_open.side_effect = mock.mock_open(read_data=results) mock_json_load.return_value = results def reset_mocks(): for m in mock_plot, mock_open, mock_json_load, mock_validate: m.reset_mock() self.task.report(self.real_api, tasks=task_file, out="/tmp/1_test.html") expected_open_calls = [mock.call(task_file, "r"), mock.call("/tmp/1_test.html", "w+")] mock_open.assert_has_calls(expected_open_calls, any_order=True) mock_plot.plot.assert_called_once_with(results, include_libs=False) mock_open.side_effect().write.assert_called_once_with("html_report") @mock.patch("rally.cli.commands.task.os.path.exists", return_value=True) @mock.patch("rally.cli.commands.task.json.load") @mock.patch("rally.cli.commands.task.open", create=True) def test_report_exceptions(self, mock_open, mock_json_load, mock_path_exists): results = [ {"key": {"name": "test", "pos": 0}, "data": {"raw": "foo_raw", "sla": "foo_sla", "load_duration": 0.1, "full_duration": 1.2}}] mock_open.side_effect = mock.mock_open(read_data=results) mock_json_load.return_value = results ret = self.task.report(self.real_api, tasks="/tmp/task.json", out="/tmp/tmp.hsml") self.assertEqual(ret, 1) for m in mock_open, mock_json_load: m.reset_mock() mock_path_exists.return_value = False ret = self.task.report(self.real_api, tasks="/tmp/task.json", out="/tmp/tmp.hsml") self.assertEqual(ret, 1) @mock.patch("rally.cli.commands.task.sys.stderr") @mock.patch("rally.cli.commands.task.os.path.exists", return_value=True) @mock.patch("rally.cli.commands.task.json.load") @mock.patch("rally.cli.commands.task.open", create=True) def test_report_invalid_format(self, mock_open, mock_json_load, mock_path_exists, mock_stderr): result = self.task.report(self.real_api, tasks="/tmp/task.json", out="/tmp/tmp.html", out_format="invalid") self.assertEqual(1, result) expected_out = "Invalid output format: invalid" mock_stderr.write.assert_has_calls([mock.call(expected_out)]) @mock.patch("rally.cli.commands.task.cliutils.print_list") @mock.patch("rally.cli.commands.task.envutils.get_global", return_value="123456789") def test_list(self, mock_get_global, mock_print_list): self.fake_api.task.list.return_value = [ fakes.FakeTask(uuid="a", created_at=dt.datetime.now(), updated_at=dt.datetime.now(), status="c", tag="d", deployment_name="some_name")] self.task.list(self.fake_api, status="running") self.fake_api.task.list.assert_called_once_with( deployment=mock_get_global.return_value, status=consts.TaskStatus.RUNNING) headers = ["uuid", "deployment_name", "created_at", "duration", "status", "tag"] mock_print_list.assert_called_once_with( self.fake_api.task.list.return_value, headers, sortby_index=headers.index("created_at")) @mock.patch("rally.cli.commands.task.cliutils.print_list") @mock.patch("rally.cli.commands.task.envutils.get_global", return_value="123456789") def test_list_uuids_only(self, mock_get_global, mock_print_list): self.fake_api.task.list.return_value = [ fakes.FakeTask(uuid="a", created_at=dt.datetime.now(), updated_at=dt.datetime.now(), status="c", tag="d", deployment_name="some_name")] self.task.list(self.fake_api, status="running", uuids_only=True) self.fake_api.task.list.assert_called_once_with( deployment=mock_get_global.return_value, status=consts.TaskStatus.RUNNING) mock_print_list.assert_called_once_with( self.fake_api.task.list.return_value, ["uuid"], print_header=False, print_border=False) def test_list_wrong_status(self): self.assertEqual(1, self.task.list(self.fake_api, deployment="fake", status="wrong non existing status")) def test_list_no_results(self): self.fake_api.task.list.return_value = [] self.assertIsNone(self.task.list(self.fake_api, deployment="fake", all_deployments=True)) self.fake_api.task.list.assert_called_once_with() self.fake_api.task.list.reset_mock() self.assertIsNone(self.task.list(self.fake_api, deployment="d", status=consts.TaskStatus.RUNNING)) self.fake_api.task.list.assert_called_once_with( deployment="d", status=consts.TaskStatus.RUNNING) def test_delete(self): task_uuid = "8dcb9c5e-d60b-4022-8975-b5987c7833f7" force = False self.task.delete(self.fake_api, task_uuid, force=force) self.fake_api.task.delete.assert_called_once_with( task_uuid, force=force) def test_delete_multiple_uuid(self): task_uuids = ["4bf35b06-5916-484f-9547-12dce94902b7", "52cad69d-d3e4-47e1-b445-dec9c5858fe8", "6a3cb11c-ac75-41e7-8ae7-935732bfb48f", "018af931-0e5a-40d5-9d6f-b13f4a3a09fc"] force = False self.task.delete(self.fake_api, task_uuids, force=force) self.assertTrue( self.fake_api.task.delete.call_count == len(task_uuids)) expected_calls = [mock.call(task_uuid, force=force) for task_uuid in task_uuids] self.assertTrue(self.fake_api.task.delete.mock_calls == expected_calls) @mock.patch("rally.cli.commands.task.cliutils.print_list") def test_sla_check(self, mock_print_list): data = [{"key": {"name": "fake_name", "pos": "fake_pos", "kw": "fake_kw"}, "data": {"scenario_duration": 42.0, "raw": [], "sla": [{"benchmark": "KeystoneBasic.create_user", "criterion": "max_seconds_per_iteration", "pos": 0, "success": False, "detail": "Max foo, actually bar"}]}}] fake_task = self.fake_api.task.get.return_value fake_task.get_results.return_value = copy.deepcopy(data) result = self.task.sla_check(self.fake_api, task_id="fake_task_id") self.assertEqual(1, result) self.fake_api.task.get.assert_called_with("fake_task_id") data[0]["data"]["sla"][0]["success"] = True fake_task.get_results.return_value = data result = self.task.sla_check(self.fake_api, task_id="fake_task_id", tojson=True) self.assertEqual(0, result) @mock.patch("rally.cli.commands.task.os.path.isfile", return_value=True) @mock.patch("rally.cli.commands.task.open", side_effect=mock.mock_open(read_data="{\"some\": \"json\"}"), create=True) def test_validate(self, mock_open, mock_os_path_isfile): self.fake_api.task.render_template = self.real_api.task.render_template self.task.validate(self.fake_api, "path_to_config.json", "fake_id") self.fake_api.task.validate.assert_called_once_with( "fake_id", {"some": "json"}, None) @mock.patch("rally.cli.commands.task.os.path.isfile", return_value=True) @mock.patch("rally.cli.commands.task.TaskCommands._load_task", side_effect=task.FailedToLoadTask) def test_validate_failed_to_load_task(self, mock__load_task, mock_os_path_isfile): args = "args" args_file = "args_file" result = self.task.validate(self.real_api, "path_to_task", "fake_deployment_id", task_args=args, task_args_file=args_file) self.assertEqual(1, result) mock__load_task.assert_called_once_with( self.real_api, "path_to_task", args, args_file) @mock.patch("rally.cli.commands.task.os.path.isfile", return_value=True) @mock.patch("rally.cli.commands.task.TaskCommands._load_task") def test_validate_invalid(self, mock__load_task, mock_os_path_isfile): exc = exceptions.InvalidTaskException self.fake_api.task.validate.side_effect = exc result = self.task.validate(self.fake_api, "path_to_task", "deployment") self.assertEqual(1, result) self.fake_api.task.validate.assert_called_once_with( "deployment", mock__load_task.return_value, None) @mock.patch("rally.common.fileutils._rewrite_env_file") def test_use(self, mock__rewrite_env_file): task_id = "80422553-5774-44bd-98ac-38bd8c7a0feb" self.task.use(self.fake_api, task_id) mock__rewrite_env_file.assert_called_once_with( os.path.expanduser("~/.rally/globals"), ["RALLY_TASK=%s\n" % task_id]) def test_use_not_found(self): task_id = "ddc3f8ba-082a-496d-b18f-72cdf5c10a14" exc = exceptions.TaskNotFound(uuid=task_id) self.fake_api.task.get.side_effect = exc self.assertRaises(exceptions.TaskNotFound, self.task.use, self.fake_api, task_id) @mock.patch("rally.task.exporter.Exporter.get") def test_export(self, mock_exporter_get): mock_client = mock.Mock() mock_exporter_class = mock.Mock(return_value=mock_client) mock_exporter_get.return_value = mock_exporter_class self.task.export(self.fake_api, "fake_uuid", "file:///fake_path.json") mock_exporter_get.assert_called_once_with("file") mock_client.export.assert_called_once_with("fake_uuid") @mock.patch("rally.task.exporter.Exporter.get") def test_export_exception(self, mock_exporter_get): mock_client = mock.Mock() mock_exporter_class = mock.Mock(return_value=mock_client) mock_exporter_get.return_value = mock_exporter_class mock_client.export.side_effect = IOError self.task.export(self.fake_api, "fake_uuid", "file:///fake_path.json") mock_exporter_get.assert_called_once_with("file") mock_client.export.assert_called_once_with("fake_uuid") @mock.patch("rally.cli.commands.task.sys.stdout") @mock.patch("rally.task.exporter.Exporter.get") def test_export_InvalidConnectionString(self, mock_exporter_get, mock_stdout): mock_exporter_class = mock.Mock( side_effect=exceptions.InvalidConnectionString) mock_exporter_get.return_value = mock_exporter_class self.task.export(self.fake_api, "fake_uuid", "file:///fake_path.json") mock_stdout.write.assert_has_calls([ mock.call("The connection string is not valid: None. " "Please check your connection string."), mock.call("\n")]) mock_exporter_get.assert_called_once_with("file") @mock.patch("rally.cli.commands.task.plot.charts") @mock.patch("rally.cli.commands.task.sys.stdout") @ddt.data({"error_type": "test_no_trace_type", "error_message": "no_trace_error_message", "error_traceback": None, }, {"error_type": "test_error_type", "error_message": "test_error_message", "error_traceback": "test\nerror\ntraceback", }) @ddt.unpack def test_show_task_errors_no_trace(self, mock_stdout, mock_charts, error_type, error_message, error_traceback=None): mock_charts.MainStatsTable.columns = ["Column 1", "Column 2"] test_uuid = "test_task_id" error_data = [error_type, error_message] if error_traceback: error_data.append(error_traceback) self.fake_api.task.get_detailed.return_value = { "id": "task", "uuid": test_uuid, "status": "finished", "results": [{ "key": { "name": "fake_name", "pos": "fake_pos", "kw": "fake_kw" }, "info": { "stat": {"cols": ["Column 1", "Column 2"], "rows": [[11, 22], [33, 44]]}, "load_duration": 3.2, "full_duration": 3.5, "iterations_count": 1, "iterations_failed": 1, "atomic": {"foo": {}, "bar": {}}}, "iterations": [ {"duration": 0.9, "idle_duration": 0.1, "output": {"additive": [], "complete": []}, "atomic_actions": {"foo": 0.6, "bar": 0.7}, "error": error_data }, ]}, ], "verification_log": json.dumps([error_type, error_message, error_traceback]) } self.task.detailed(self.fake_api, test_uuid) self.fake_api.task.get_detailed.assert_called_once_with( test_uuid, extended_results=True) mock_stdout.write.assert_has_calls([ mock.call(error_traceback or "No traceback available.") ], any_order=False) rally-0.9.1/tests/unit/cli/commands/test_plugin.py0000664000567000056710000001250413073417716023420 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock import six from rally.cli import cliutils from rally.cli.commands import plugin as plugin_cmd from rally.common.plugin import plugin from rally.common import utils from tests.unit import test @ddt.ddt class PluginCommandsTestCase(test.TestCase): def setUp(self): super(PluginCommandsTestCase, self).setUp() self.plugin_cmd = plugin_cmd.PluginCommands() @plugin.configure("p1", "p1_ns") class Plugin1(plugin.Plugin): """T1. Description of T1 :param x: x arg :param y: y arg """ pass self.Plugin1 = Plugin1 @plugin.configure("p2", "p2_ns") class Plugin2(plugin.Plugin): """T2.""" pass self.Plugin2 = Plugin2 @plugin.configure("p3", "p2_ns") class Plugin3(plugin.Plugin): """T3.""" pass self.Plugin3 = Plugin3 def tearDown(self): super(PluginCommandsTestCase, self).tearDown() self.Plugin1.unregister() self.Plugin2.unregister() self.Plugin3.unregister() def test__print_plugins_list(self): out = six.StringIO() original_print_list = cliutils.print_list def print_list(*args, **kwargs): kwargs["out"] = out original_print_list(*args, **kwargs) with mock.patch.object(plugin_cmd.cliutils, "print_list", new=print_list): plugin_cmd.PluginCommands._print_plugins_list( [self.Plugin1, self.Plugin2]) self.assertEqual( "+-------------+------+-----------+-------+\n" "| Plugin base | Name | Namespace | Title |\n" "+-------------+------+-----------+-------+\n" "| Plugin | p1 | p1_ns | T1. |\n" "| Plugin | p2 | p2_ns | T2. |\n" "+-------------+------+-----------+-------+\n", out.getvalue()) def test_show(self): with utils.StdOutCapture() as out: plugin_cmd.PluginCommands().show(None, "p1", "p1_ns") output = out.getvalue() self.assertIn("NAME\n\tp1", output) self.assertIn("NAMESPACE\n\tp1_ns", output) self.assertIn("cli.commands.test_plugin", output) self.assertIn("DESCRIPTION\n\tDescription of T1", output) self.assertIn("PARAMETERS", output) @ddt.data( { "name": "nonex", "namespace": None, "text": "There is no plugin: nonex\n" }, { "name": "nonexplugin", "namespace": "nonex", "text": "There is no plugin: nonexplugin in nonex namespace\n" } ) @ddt.unpack def test_show_not_found(self, name, namespace, text): with utils.StdOutCapture() as out: plugin_cmd.PluginCommands().show(None, name, namespace) self.assertEqual(out.getvalue(), text) @mock.patch("rally.cli.commands.plugin.PluginCommands._print_plugins_list") def test_show_many(self, mock_plugin_commands__print_plugins_list): with utils.StdOutCapture() as out: with mock.patch("rally.cli.commands.plugin.plugin.Plugin." "get_all") as mock_plugin_get_all: mock_plugin_get_all.return_value = [self.Plugin2, self.Plugin3] plugin_cmd.PluginCommands().show(None, "p", "p2_ns") self.assertEqual(out.getvalue(), "Multiple plugins found:\n") mock_plugin_get_all.assert_called_once_with(namespace="p2_ns") mock_plugin_commands__print_plugins_list.assert_called_once_with([ self.Plugin2, self.Plugin3]) @ddt.data( { "name": None, "namespace": "nonex", "text": "There is no plugin namespace: nonex\n" }, { "name": "p2", "namespace": "p1_ns", "text": "There is no plugin: p2\n" } ) @ddt.unpack def test_list_not_found(self, name, namespace, text): with utils.StdOutCapture() as out: plugin_cmd.PluginCommands().list(None, name, namespace) self.assertEqual(out.getvalue(), text) @mock.patch("rally.cli.commands.plugin.PluginCommands._print_plugins_list") def test_list(self, mock_plugin_commands__print_plugins_list): plugin_cmd.PluginCommands().list(None, None, "p1_ns") plugin_cmd.PluginCommands().list(None, "p1", "p1_ns") plugin_cmd.PluginCommands().list(None, "p2", None) mock_plugin_commands__print_plugins_list.assert_has_calls([ mock.call([self.Plugin1]), mock.call([self.Plugin1]), mock.call([self.Plugin2]) ]) rally-0.9.1/tests/unit/cli/commands/test_verify.py0000664000567000056710000006442613073417720023433 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import datetime as dt import tempfile import mock import six from rally.cli import cliutils from rally.cli.commands import verify from rally.cli import envutils from rally import consts from rally import exceptions from rally import plugins from rally.verification import reporter from tests.unit import fakes from tests.unit import test class VerifyCommandsTestCase(test.TestCase): def setUp(self): super(VerifyCommandsTestCase, self).setUp() self.verify = verify.VerifyCommands() self.fake_api = fakes.FakeAPI() @mock.patch("rally.cli.commands.verify.cliutils.print_list") @mock.patch("rally.cli.commands.verify.logging.is_debug", return_value=True) def test_list_plugins(self, mock_is_debug, mock_print_list): self.verify.list_plugins(self.fake_api, namespace="some") self.fake_api.verifier.list_plugins.assert_called_once_with("some") @mock.patch("rally.cli.commands.verify.fileutils.update_globals_file") def test_create_verifier(self, mock_update_globals_file): self.fake_api.verifier.create.return_value = "v_uuid" self.fake_api.verifier.get.return_value = mock.Mock(uuid="v_uuid") self.verify.create_verifier(self.fake_api, "a", vtype="b", namespace="c", source="d", version="e", system_wide=True, extra={}) self.fake_api.verifier.create.assert_called_once_with( "a", vtype="b", namespace="c", source="d", version="e", system_wide=True, extra_settings={}) self.fake_api.verifier.get.assert_called_once_with("v_uuid") mock_update_globals_file.assert_called_once_with( envutils.ENV_VERIFIER, "v_uuid") @mock.patch("rally.cli.commands.verify.fileutils.update_globals_file") def test_use_verifier(self, mock_update_globals_file): self.fake_api.verifier.get.return_value = mock.Mock(uuid="v_uuid") self.verify.use_verifier(self.fake_api, "v_uuid") self.fake_api.verifier.get.assert_called_once_with("v_uuid") mock_update_globals_file.assert_called_once_with( envutils.ENV_VERIFIER, "v_uuid") @mock.patch("rally.cli.commands.verify.cliutils.print_list") @mock.patch("rally.cli.commands.verify.logging.is_debug", return_value=True) def test_list_verifiers(self, mock_is_debug, mock_print_list): self.verify.list_verifiers(self.fake_api) self.fake_api.verifier.list.return_value = [] self.verify.list_verifiers(self.fake_api, "foo") self.verify.list_verifiers(self.fake_api) self.assertEqual([mock.call(None), mock.call("foo"), mock.call(None)], self.fake_api.verifier.list.call_args_list) @mock.patch("rally.cli.commands.verify.envutils.get_global") def test_show_verifier(self, mock_get_global): fake_verifier = self.fake_api.verifier.get.return_value fake_verifier.uuid = "v_uuid" fake_verifier.name = "Verifier!" fake_verifier.type = "CoolTool" fake_verifier.namespace = "ExampleNamespace" fake_verifier.description = "The best tool in the world" fake_verifier.created_at = dt.datetime(2016, 1, 1, 17, 0, 3, 66) fake_verifier.updated_at = dt.datetime(2016, 1, 1, 17, 1, 5, 77) fake_verifier.status = "installed" fake_verifier.source = "https://example.com" fake_verifier.version = "master" fake_verifier.system_wide = False fake_verifier.extra_settings = {} fake_verifier.manager.repo_dir = "./verifiers/repo" fake_verifier.manager.venv_dir = "./verifiers/.venv" # It is a hard task to mock default value of function argument, so we # need to apply this workaround original_print_dict = cliutils.print_dict print_dict_calls = [] def print_dict(*args, **kwargs): print_dict_calls.append(six.StringIO()) kwargs["out"] = print_dict_calls[-1] original_print_dict(*args, **kwargs) with mock.patch.object(verify.cliutils, "print_dict", new=print_dict): self.verify.show_verifier(self.fake_api, "v_uuid") self.assertEqual(1, len(print_dict_calls)) self.assertEqual( "+---------------------------------------------+\n" "| Verifier |\n" "+----------------+----------------------------+\n" "| UUID | v_uuid |\n" "| Status | installed |\n" "| Created at | 2016-01-01 17:00:03 |\n" "| Updated at | 2016-01-01 17:01:05 |\n" "| Active | - |\n" "| Name | Verifier! |\n" "| Description | The best tool in the world |\n" "| Type | CoolTool |\n" "| Namespace | ExampleNamespace |\n" "| Source | https://example.com |\n" "| Version | master |\n" "| System-wide | False |\n" "| Extra settings | - |\n" "| Location | ./verifiers/repo |\n" "| Venv location | ./verifiers/.venv |\n" "+----------------+----------------------------+\n", print_dict_calls[0].getvalue()) self.fake_api.verifier.get.assert_called_once_with("v_uuid") def test_delete_verifier(self): self.verify.delete_verifier(self.fake_api, "v_id", "d_id", force=True) self.fake_api.verifier.delete.assert_called_once_with( "v_id", "d_id", True) def test_update_verifier(self): self.verify.update_verifier(self.fake_api, "v_id") self.assertFalse(self.fake_api.verifier.update.called) self.verify.update_verifier(self.fake_api, "v_id", update_venv=True, system_wide=True) self.assertFalse(self.fake_api.verifier.update.called) self.verify.update_verifier(self.fake_api, "v_id", system_wide=True, no_system_wide=True) self.assertFalse(self.fake_api.verifier.update.called) self.verify.update_verifier(self.fake_api, "v_id", version="a", system_wide=True) self.fake_api.verifier.update.assert_called_once_with( "v_id", system_wide=True, version="a", update_venv=None) @mock.patch("rally.cli.commands.verify.open", create=True) @mock.patch("rally.cli.commands.verify.os.path.exists") def test_configure_verifier(self, mock_exists, mock_open): self.verify.configure_verifier(self.fake_api, "v_id", "d_id", new_configuration="/p/a/t/h", reconfigure=True, show=True) self.assertFalse(self.fake_api.verifier.configure.called) mock_exists.return_value = False self.verify.configure_verifier(self.fake_api, "v_id", "d_id", new_configuration="/p/a/t/h", show=True) self.assertFalse(self.fake_api.verifier.override_configuration.called) mock_exists.return_value = True mock_open.return_value = mock.mock_open(read_data="data").return_value self.verify.configure_verifier(self.fake_api, "v_id", "d_id", new_configuration="/p/a/t/h", show=True) mock_open.assert_called_once_with("/p/a/t/h") self.fake_api.verifier.override_configuration("v_id", "d_id", "data") tf = tempfile.NamedTemporaryFile() with open(tf.name, "w") as f: f.write("[DEFAULT]\nopt = val\n[foo]\nopt = val") self.verify.configure_verifier(self.fake_api, "v_id", "d_id", extra_options=tf.name) expected_options = {"foo": {"opt": "val"}, "DEFAULT": {"opt": "val"}} self.fake_api.verifier.configure.assert_called_once_with( "v_id", "d_id", extra_options=expected_options, reconfigure=False) self.verify.configure_verifier(self.fake_api, "v_id", "d_id", extra_options="{foo: {opt: val}, " "DEFAULT: {opt: val}}") self.fake_api.verifier.configure.assert_called_with( "v_id", "d_id", extra_options=expected_options, reconfigure=False) def test_list_verifier_tests(self): self.fake_api.verifier.list_tests.return_value = ["test_1", "test_2"] self.verify.list_verifier_tests(self.fake_api, "v_id", "p") self.fake_api.verifier.list_tests.return_value = [] self.verify.list_verifier_tests(self.fake_api, "v_id", "p") self.fake_api.verifier.list_tests.assert_has_calls( [mock.call("v_id", "p"), mock.call("v_id", "p")]) def test_add_verifier_ext(self): self.verify.add_verifier_ext(self.fake_api, "v_id", "a", "b", "c") self.fake_api.verifier.add_extension.assert_called_once_with( "v_id", source="a", version="b", extra_settings="c") @mock.patch("rally.cli.commands.verify.cliutils.print_list") @mock.patch("rally.cli.commands.verify.logging.is_debug", return_value=True) def test_list_verifier_exts(self, mock_is_debug, mock_print_list): self.verify.list_verifier_exts(self.fake_api, "v_id") self.fake_api.verifier.list_extensions.return_value = [] self.verify.list_verifier_exts(self.fake_api, "v_id") self.assertEqual( [mock.call("v_id"), mock.call("v_id")], self.fake_api.verifier.list_extensions.call_args_list) def test_delete_verifier_ext(self): self.verify.delete_verifier_ext(self.fake_api, "v_id", "ext_name") self.fake_api.verifier.delete_extension.assert_called_once_with( "v_id", "ext_name") @mock.patch("rally.cli.commands.verify.fileutils.update_globals_file") @mock.patch("rally.cli.commands.verify.os.path.exists") def test_start(self, mock_exists, mock_update_globals_file): self.verify.start(self.fake_api, "v_id", "d_id", pattern="pattern", load_list="load-list") self.assertFalse(self.fake_api.verification.start.called) verification = mock.Mock(uuid="v_uuid") failed_test = { "test_2": { "name": "test_2", "status": "fail", "duration": 2, "traceback": "Some traceback" } } test_results = { "tests": { "test_1": { "name": "test_1", "status": "success", "duration": 2, "tags": [] } }, "totals": { "tests_count": 2, "tests_duration": 4, "success": 2, "skipped": 0, "expected_failures": 0, "unexpected_success": 0, "failures": 0 } } test_results["tests"].update(failed_test) results = mock.Mock(**test_results) results.filter_tests.return_value = failed_test self.fake_api.verification.start.return_value = (verification, results) self.fake_api.verification.get.return_value = verification mock_exists.return_value = False self.verify.start(self.fake_api, "v_id", "d_id", load_list="/p/a/t/h") self.assertFalse(self.fake_api.verification.start.called) mock_exists.return_value = True tf = tempfile.NamedTemporaryFile() with open(tf.name, "w") as f: f.write("test_1\ntest_2") self.verify.start(self.fake_api, "v_id", "d_id", tags=["foo"], load_list=tf.name) self.fake_api.verification.start.assert_called_once_with( "v_id", "d_id", tags=["foo"], load_list=["test_1", "test_2"]) mock_exists.return_value = False self.fake_api.verification.start.reset_mock() self.verify.start(self.fake_api, "v_id", "d_id", skip_list="/p/a/t/h") self.assertFalse(self.fake_api.verification.start.called) tf = tempfile.NamedTemporaryFile() with open(tf.name, "w") as f: f.write("test_1:\ntest_2: Reason\n") mock_exists.return_value = True self.verify.start(self.fake_api, "v_id", "d_id", skip_list=tf.name) self.fake_api.verification.start.assert_called_once_with( "v_id", "d_id", tags=None, skip_list={"test_1": None, "test_2": "Reason"}) mock_exists.return_value = False self.fake_api.verification.start.reset_mock() self.verify.start(self.fake_api, "v_id", "d_id", xfail_list="/p/a/t/h") self.assertFalse(self.fake_api.verification.start.called) tf = tempfile.NamedTemporaryFile() with open(tf.name, "w") as f: f.write("test_1:\ntest_2: Reason\n") mock_exists.return_value = True self.verify.start(self.fake_api, "v_id", "d_id", xfail_list=tf.name) self.fake_api.verification.start.assert_called_once_with( "v_id", "d_id", tags=None, xfail_list={"test_1": None, "test_2": "Reason"}) self.fake_api.verification.get.assert_called_with("v_uuid") mock_update_globals_file.assert_called_with( envutils.ENV_VERIFICATION, "v_uuid") self.fake_api.verification.get.reset_mock() mock_update_globals_file.reset_mock() self.verify.start(self.fake_api, "v_id", "d_id", detailed=True, do_use=False) self.assertFalse(self.fake_api.verification.get.called) self.assertFalse(mock_update_globals_file.called) @mock.patch("rally.cli.commands.verify.os.path.exists") @mock.patch("rally.cli.commands.verify.fileutils.update_globals_file") def test_start_on_unfinished_deployment(self, mock_update_globals_file, mock_exists): deployment_id = "d_id" deployment_name = "xxx_name" exc = exceptions.DeploymentNotFinishedStatus( name=deployment_name, uuid=deployment_id, status=consts.DeployStatus.DEPLOY_INIT) self.fake_api.verification.start.side_effect = exc self.assertEqual( 1, self.verify.start(self.fake_api, "v_id", deployment_id)) @mock.patch("rally.cli.commands.verify.fileutils.update_globals_file") def test_use(self, mock_update_globals_file): self.fake_api.verification.get.return_value = mock.Mock(uuid="v_uuid") self.verify.use(self.fake_api, "v_uuid") self.fake_api.verification.get.assert_called_once_with("v_uuid") mock_update_globals_file.assert_called_once_with( envutils.ENV_VERIFICATION, "v_uuid") def test_rerun(self): verification = mock.Mock(uuid="v_uuid") results = mock.Mock(totals={"tests_count": 2, "tests_duration": 4, "success": 2, "skipped": 0, "expected_failures": 0, "unexpected_success": 0, "failures": 0}) self.fake_api.verification.rerun.return_value = (verification, results) self.verify.rerun(self.fake_api, "v_uuid", "d_id", failed=True,) self.fake_api.verification.rerun.assert_called_once_with( "v_uuid", concurrency=None, deployment_id="d_id", failed=True, tags=None) def test_show(self): deployment_name = "Some Deploy" deployment_uuid = "some-deploy-uuid" verifier_name = "My Verifier" verifier_uuid = "my-verifier-uuid" verifier_type = "OldSchoolTestTool" verifier_namespace = "OpenStack" verifier = mock.Mock(type=verifier_type, namespace=verifier_namespace) verifier.name = verifier_name verifier.uuid = verifier_uuid verification = { "uuid": "uuuiiiiddd", "tags": ["bar", "foo"], "status": "success", "created_at": dt.datetime(2016, 1, 1, 17, 0, 3, 66), "updated_at": dt.datetime(2016, 1, 1, 17, 1, 5, 77), "tests_count": 2, "tests_duration": 4, "success": 1, "skipped": 0, "expected_failures": 0, "unexpected_success": 0, "failures": 1, "run_args": { "load_list": ["test_1", "test_2"], "skip_list": ["test_3"], "concurrency": "3" }, "tests": { "test_1": { "name": "test_1", "status": "success", "duration": 2, "tags": [] }, "test_2": { "name": "test_2", "status": "fail", "duration": 2, "traceback": "Some traceback" } } } self.fake_api.verifier.get.return_value = verifier self.fake_api.verification.get.return_value = mock.Mock(**verification) self.fake_api.deployment.get.return_value = {"name": deployment_name, "uuid": deployment_uuid} # It is a hard task to mock default value of function argument, so we # need to apply this workaround original_print_dict = cliutils.print_dict print_dict_calls = [] def print_dict(*args, **kwargs): print_dict_calls.append(six.StringIO()) kwargs["out"] = print_dict_calls[-1] original_print_dict(*args, **kwargs) with mock.patch.object(verify.cliutils, "print_dict", new=print_dict): self.verify.show(self.fake_api, "v_uuid", detailed=True) self.assertEqual(1, len(print_dict_calls)) self.assertEqual( "+----------------------------------------------------------------" "--------------------+\n" "| Verification " " |\n" "+---------------------+------------------------------------------" "--------------------+\n" "| UUID | uuuiiiiddd " " |\n" "| Status | success " " |\n" "| Started at | 2016-01-01 17:00:03 " " |\n" "| Finished at | 2016-01-01 17:01:05 " " |\n" "| Duration | 0:01:02 " " |\n" "| Run arguments | concurrency: 3 " " |\n" "| | load_list: (value is too long, will be di" "splayed separately) |\n" "| | skip_list: (value is too long, will be di" "splayed separately) |\n" "| Tags | bar, foo " " |\n" "| Verifier name | My Verifier (UUID: my-verifier-uuid) " " |\n" "| Verifier type | OldSchoolTestTool (namespace: OpenStack) " " |\n" "| Deployment name | Some Deploy (UUID: some-deploy-uuid) " " |\n" "| Tests count | 2 " " |\n" "| Tests duration, sec | 4 " " |\n" "| Success | 1 " " |\n" "| Skipped | 0 " " |\n" "| Expected failures | 0 " " |\n" "| Unexpected success | 0 " " |\n" "| Failures | 1 " " |\n" "+---------------------+------------------------------------------" "--------------------+\n", print_dict_calls[0].getvalue()) self.fake_api.verification.get.assert_called_once_with("v_uuid") @mock.patch("rally.cli.commands.verify.cliutils.print_list") def test_list(self, mock_print_list): self.verify.list(self.fake_api, "v_id", "d_id") self.fake_api.verification.list.return_value = [] self.verify.list(self.fake_api, "v_id", "d_id", "foo", "bar") self.verify.list(self.fake_api) self.assertEqual([mock.call("v_id", "d_id", None, None), mock.call("v_id", "d_id", "foo", "bar"), mock.call(None, None, None, None)], self.fake_api.verification.list.call_args_list) def test_delete(self): self.verify.delete(self.fake_api, "v_uuid") self.fake_api.verification.delete.assert_called_once_with("v_uuid") self.verify.delete(self.fake_api, ["v1_uuid", "v2_uuid"]) self.fake_api.verification.delete.assert_has_calls( [mock.call("v1_uuid"), mock.call("v2_uuid")]) @mock.patch("rally.cli.commands.verify.os") @mock.patch("rally.cli.commands.verify.webbrowser.open_new_tab") @mock.patch("rally.cli.commands.verify.open", create=True) def test_report(self, mock_open, mock_open_new_tab, mock_os): output_dest = "/p/a/t/h" output_type = "type" content = "content" self.fake_api.verification.report.return_value = { "files": {output_dest: content}, "open": output_dest} mock_os.path.exists.return_value = False self.verify.report(self.fake_api, "v_uuid", output_type=output_type, output_dest=output_dest, open_it=True) self.fake_api.verification.report.assert_called_once_with( ["v_uuid"], output_type, output_dest) mock_open.assert_called_once_with(mock_os.path.abspath.return_value, "w") mock_os.makedirs.assert_called_once_with( mock_os.path.dirname.return_value) mock_open.reset_mock() mock_open_new_tab.reset_mock() mock_os.makedirs.reset_mock() mock_os.path.exists.return_value = True self.fake_api.verification.report.return_value = { "files": {output_dest: content}, "print": "foo"} self.verify.report(self.fake_api, "v_uuid", output_type=output_type, output_dest=output_dest) self.assertFalse(mock_open_new_tab.called) self.assertFalse(mock_os.makedirs.called) @mock.patch("rally.cli.commands.verify.VerifyCommands.use") @mock.patch("rally.cli.commands.verify.open", create=True) @mock.patch("rally.cli.commands.verify.os.path.exists") def test_import_results(self, mock_exists, mock_open, mock_use): mock_exists.return_value = False self.verify.import_results(self.fake_api, "v_id", "d_id") self.assertFalse(self.fake_api.verification.import_results.called) verification = mock.Mock(uuid="verification_uuid") results = mock.Mock(totals={"tests_count": 2, "tests_duration": 4, "success": 2, "skipped": 0, "expected_failures": 0, "unexpected_success": 0, "failures": 0}) self.fake_api.verification.import_results.return_value = ( verification, results) mock_exists.return_value = True mock_open.return_value = mock.mock_open(read_data="data").return_value self.verify.import_results(self.fake_api, "v_id", "d_id", file_to_parse="/p/a/t/h") mock_open.assert_called_once_with("/p/a/t/h", "r") self.fake_api.verification.import_results.assert_called_once_with( "v_id", "d_id", "data") mock_use.assert_called_with("verification_uuid") mock_use.reset_mock() self.verify.import_results(self.fake_api, "v_id", "d_id", do_use=False) self.assertFalse(mock_use.called) @plugins.ensure_plugins_are_loaded def test_default_reporters(self): available_reporters = { cls.get_name().lower() for cls in reporter.VerificationReporter.get_all() # ignore possible external plugins if cls.__module__.startswith("rally")} listed_in_cli = {name.lower() for name in verify.DEFAULT_REPORT_TYPES} not_listed = available_reporters - listed_in_cli if not_listed: self.fail("All default reporters should be listed in " "%s.DEFAULTS_REPORTERS (case of letters doesn't matter)." " Missed reporters: %s" % (verify.__name__, ", ".join(not_listed))) rally-0.9.1/tests/unit/cli/commands/test_deployment.py0000664000567000056710000004702713073417720024305 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os from keystoneclient import exceptions as keystone_exceptions import mock from rally.cli.commands import deployment from rally.cli import envutils from rally.common import objects from rally import consts from rally import exceptions from tests.unit import fakes from tests.unit import test class DeploymentCommandsTestCase(test.TestCase): def setUp(self): super(DeploymentCommandsTestCase, self).setUp() self.deployment = deployment.DeploymentCommands() self.fake_api = fakes.FakeAPI() @mock.patch.dict(os.environ, {"RALLY_DEPLOYMENT": "my_deployment_id"}) @mock.patch("rally.cli.commands.deployment.DeploymentCommands.list") @mock.patch("rally.cli.commands.deployment.open", side_effect=mock.mock_open(read_data="{\"some\": \"json\"}"), create=True) def test_create(self, mock_open, mock_deployment_commands_list): self.deployment.create(self.fake_api, "fake_deploy", False, "path_to_config.json") self.fake_api.deployment.create.assert_called_once_with( {"some": "json"}, "fake_deploy") @mock.patch.dict(os.environ, {"OS_AUTH_URL": "fake_auth_url", "OS_USERNAME": "fake_username", "OS_PASSWORD": "fake_password", "OS_TENANT_NAME": "fake_tenant_name", "OS_REGION_NAME": "fake_region_name", "OS_ENDPOINT_TYPE": "fake_endpoint_typeURL", "OS_ENDPOINT": "fake_endpoint", "OS_INSECURE": "True", "OS_CACERT": "fake_cacert", "RALLY_DEPLOYMENT": "fake_deployment_id"}) @mock.patch("rally.cli.commands.deployment.DeploymentCommands.list") def test_createfromenv_keystonev2(self, mock_list): self.deployment.create(self.fake_api, "from_env", True) self.fake_api.deployment.create.assert_called_once_with( { "type": "ExistingCloud", "auth_url": "fake_auth_url", "region_name": "fake_region_name", "endpoint_type": "fake_endpoint_type", "endpoint": "fake_endpoint", "admin": { "username": "fake_username", "password": "fake_password", "tenant_name": "fake_tenant_name" }, "https_insecure": True, "https_cacert": "fake_cacert" }, "from_env" ) @mock.patch.dict(os.environ, {"OS_AUTH_URL": "fake_auth_url", "OS_USERNAME": "fake_username", "OS_PASSWORD": "fake_password", "OS_TENANT_NAME": "fake_tenant_name", "OS_REGION_NAME": "fake_region_name", "OS_ENDPOINT_TYPE": "fake_endpoint_typeURL", "OS_PROJECT_DOMAIN_NAME": "fake_pdn", "OS_USER_DOMAIN_NAME": "fake_udn", "OS_ENDPOINT": "fake_endpoint", "OS_INSECURE": "True", "OS_CACERT": "fake_cacert", "RALLY_DEPLOYMENT": "fake_deployment_id"}) @mock.patch("rally.cli.commands.deployment.DeploymentCommands.list") def test_createfromenv_keystonev3(self, mock_list): self.deployment.create(self.fake_api, "from_env", True) self.fake_api.deployment.create.assert_called_once_with( { "type": "ExistingCloud", "auth_url": "fake_auth_url", "region_name": "fake_region_name", "endpoint_type": "fake_endpoint_type", "endpoint": "fake_endpoint", "admin": { "username": "fake_username", "password": "fake_password", "user_domain_name": "fake_udn", "project_domain_name": "fake_pdn", "project_name": "fake_tenant_name" }, "https_insecure": True, "https_cacert": "fake_cacert" }, "from_env" ) @mock.patch("rally.cli.commands.deployment.DeploymentCommands.list") @mock.patch("rally.cli.commands.deployment.DeploymentCommands.use") @mock.patch("rally.cli.commands.deployment.open", side_effect=mock.mock_open(read_data="{\"uuid\": \"uuid\"}"), create=True) def test_create_and_use(self, mock_open, mock_deployment_commands_use, mock_deployment_commands_list): self.fake_api.deployment.create.return_value = dict(uuid="uuid") self.deployment.create(self.fake_api, "fake_deploy", False, "path_to_config.json", True) self.fake_api.deployment.create.assert_called_once_with( {"uuid": "uuid"}, "fake_deploy") mock_deployment_commands_list.assert_called_once_with( self.fake_api, deployment_list=[{"uuid": "uuid"}]) mock_deployment_commands_use.assert_called_once_with( self.fake_api, "uuid") def test_recreate(self): deployment_id = "43924f8b-9371-4152-af9f-4cf02b4eced4" self.deployment.recreate(self.fake_api, deployment_id) self.fake_api.deployment.recreate.assert_called_once_with( deployment_id, None) @mock.patch("rally.cli.commands.deployment.open", side_effect=mock.mock_open(read_data="{\"some\": \"json\"}"), create=True) def test_recreate_config(self, mock_open): deployment_id = "43924f8b-9371-4152-af9f-4cf02b4eced4" self.deployment.recreate(self.fake_api, deployment_id, filename="my.json") self.fake_api.deployment.recreate.assert_called_once_with( deployment_id, {"some": "json"}) @mock.patch("rally.cli.commands.deployment.envutils.get_global") def test_recreate_no_deployment_id(self, mock_get_global): mock_get_global.side_effect = exceptions.InvalidArgumentsException self.assertRaises(exceptions.InvalidArgumentsException, self.deployment.recreate, None) def test_destroy(self): deployment_id = "53fd0273-60ce-42e5-a759-36f1a683103e" self.deployment.destroy(self.fake_api, deployment_id) self.fake_api.deployment.destroy.assert_called_once_with(deployment_id) @mock.patch("rally.cli.commands.deployment.envutils.get_global") def test_destroy_no_deployment_id(self, mock_get_global): mock_get_global.side_effect = exceptions.InvalidArgumentsException self.assertRaises(exceptions.InvalidArgumentsException, self.deployment.destroy, self.fake_api, None) @mock.patch("rally.cli.commands.deployment.cliutils.print_list") @mock.patch("rally.cli.commands.deployment.utils.Struct") @mock.patch("rally.cli.commands.deployment.envutils.get_global") def test_list_different_deployment_id(self, mock_get_global, mock_struct, mock_print_list): current_deployment_id = "26a3ce76-0efa-40e4-86e5-514574bd1ff6" mock_get_global.return_value = current_deployment_id fake_deployment_list = [ {"uuid": "fa34aea2-ae2e-4cf7-a072-b08d67466e3e", "created_at": "03-12-2014", "name": "dep1", "status": "deploy->started", "active": "False"}] self.fake_api.deployment.list.return_value = fake_deployment_list self.deployment.list(self.fake_api) fake_deployment = fake_deployment_list[0] fake_deployment["active"] = "" mock_struct.assert_called_once_with(**fake_deployment) headers = ["uuid", "created_at", "name", "status", "active"] mock_print_list.assert_called_once_with([mock_struct()], headers, sortby_index=headers.index( "created_at")) @mock.patch("rally.cli.commands.deployment.cliutils.print_list") @mock.patch("rally.cli.commands.deployment.utils.Struct") @mock.patch("rally.cli.commands.deployment.envutils.get_global") def test_list_current_deployment_id(self, mock_get_global, mock_struct, mock_print_list): current_deployment_id = "64258e84-ffa1-4011-9e4c-aba07bdbcc6b" mock_get_global.return_value = current_deployment_id fake_deployment_list = [{"uuid": current_deployment_id, "created_at": "13-12-2014", "name": "dep2", "status": "deploy->finished", "active": "True"}] self.fake_api.deployment.list.return_value = fake_deployment_list self.deployment.list(self.fake_api) fake_deployment = fake_deployment_list[0] fake_deployment["active"] = "*" mock_struct.assert_called_once_with(**fake_deployment) headers = ["uuid", "created_at", "name", "status", "active"] mock_print_list.assert_called_once_with([mock_struct()], headers, sortby_index=headers.index( "created_at")) @mock.patch("json.dumps") def test_config(self, mock_json_dumps): deployment_id = "fa4a423e-f15d-4d83-971a-89574f892999" value = {"config": "config"} self.fake_api.deployment.get.return_value = value self.deployment.config(self.fake_api, deployment_id) mock_json_dumps.assert_called_once_with(value["config"], sort_keys=True, indent=4) self.fake_api.deployment.get.assert_called_once_with(deployment_id) @mock.patch("rally.cli.commands.deployment.envutils.get_global") def test_config_no_deployment_id(self, mock_get_global): mock_get_global.side_effect = exceptions.InvalidArgumentsException self.assertRaises(exceptions.InvalidArgumentsException, self.deployment.config, self.fake_api, None) @mock.patch("rally.cli.commands.deployment.cliutils.print_list") @mock.patch("rally.cli.commands.deployment.utils.Struct") def test_show(self, mock_struct, mock_print_list): deployment_id = "b1a6153e-a314-4cb3-b63b-cf08c1a416c3" value = { "admin": { "auth_url": "url", "username": "u", "password": "p", "tenant_name": "t", "region_name": "r", "endpoint_type": consts.EndpointType.INTERNAL }, "users": [] } deployment = self.fake_api.deployment.get.return_value deployment.get_credentials_for.return_value = value self.deployment.show(self.fake_api, deployment_id) self.fake_api.deployment.get.assert_called_once_with(deployment_id) headers = ["auth_url", "username", "password", "tenant_name", "region_name", "endpoint_type"] fake_data = ["url", "u", "***", "t", "r", consts.EndpointType.INTERNAL] mock_struct.assert_called_once_with(**dict(zip(headers, fake_data))) mock_print_list.assert_called_once_with([mock_struct()], headers) @mock.patch("rally.cli.commands.deployment.envutils.get_global") def test_deploy_no_deployment_id(self, mock_get_global): mock_get_global.side_effect = exceptions.InvalidArgumentsException self.assertRaises(exceptions.InvalidArgumentsException, self.deployment.show, None) @mock.patch("os.remove") @mock.patch("os.symlink") @mock.patch("os.path.exists", return_value=True) @mock.patch("rally.common.fileutils.update_env_file") def test_use(self, mock_update_env_file, mock_path_exists, mock_symlink, mock_remove): deployment_id = "593b683c-4b16-4b2b-a56b-e162bd60f10b" self.fake_api.deployment.get.return_value = fakes.FakeDeployment( uuid=deployment_id, admin={"auth_url": "fake_auth_url", "username": "fake_username", "password": "fake_password", "tenant_name": "fake_tenant_name", "endpoint": "fake_endpoint", "region_name": None}) with mock.patch("rally.cli.commands.deployment.open", mock.mock_open(), create=True) as mock_file: self.deployment.use(self.fake_api, deployment_id) self.assertEqual(2, mock_path_exists.call_count) mock_update_env_file.assert_called_once_with(os.path.expanduser( "~/.rally/globals"), "RALLY_DEPLOYMENT", "%s\n" % deployment_id) mock_file.return_value.write.assert_any_call( "export OS_ENDPOINT='fake_endpoint'\n") mock_file.return_value.write.assert_any_call( "export OS_AUTH_URL='fake_auth_url'\n" "export OS_USERNAME='fake_username'\n" "export OS_PASSWORD='fake_password'\n" "export OS_TENANT_NAME='fake_tenant_name'\n") mock_symlink.assert_called_once_with( os.path.expanduser("~/.rally/openrc-%s" % deployment_id), os.path.expanduser("~/.rally/openrc")) mock_remove.assert_called_once_with(os.path.expanduser( "~/.rally/openrc")) @mock.patch("os.remove") @mock.patch("os.symlink") @mock.patch("os.path.exists", return_value=True) @mock.patch("rally.common.fileutils.update_env_file") def test_use_with_v3_auth(self, mock_update_env_file, mock_path_exists, mock_symlink, mock_remove): deployment_id = "593b683c-4b16-4b2b-a56b-e162bd60f10b" self.fake_api.deployment.get.return_value = fakes.FakeDeployment( uuid=deployment_id, admin={ "auth_url": "http://localhost:5000/v3", "username": "fake_username", "password": "fake_password", "tenant_name": "fake_tenant_name", "endpoint": "fake_endpoint", "region_name": None, "user_domain_name": "fake_user_domain", "project_domain_name": "fake_project_domain"}) with mock.patch("rally.cli.commands.deployment.open", mock.mock_open(), create=True) as mock_file: self.deployment.use(self.fake_api, deployment_id) self.assertEqual(2, mock_path_exists.call_count) mock_update_env_file.assert_called_once_with(os.path.expanduser( "~/.rally/globals"), "RALLY_DEPLOYMENT", "%s\n" % deployment_id) mock_file.return_value.write.assert_any_call( "export OS_ENDPOINT='fake_endpoint'\n") mock_file.return_value.write.assert_any_call( "export OS_AUTH_URL='http://localhost:5000/v3'\n" "export OS_USERNAME='fake_username'\n" "export OS_PASSWORD='fake_password'\n" "export OS_TENANT_NAME='fake_tenant_name'\n") mock_file.return_value.write.assert_any_call( "export OS_USER_DOMAIN_NAME='fake_user_domain'\n" "export OS_PROJECT_DOMAIN_NAME='fake_project_domain'\n") mock_symlink.assert_called_once_with( os.path.expanduser("~/.rally/openrc-%s" % deployment_id), os.path.expanduser("~/.rally/openrc")) mock_remove.assert_called_once_with(os.path.expanduser( "~/.rally/openrc")) @mock.patch("rally.cli.commands.deployment.DeploymentCommands." "_update_openrc_deployment_file") @mock.patch("rally.common.fileutils.update_globals_file") def test_use_by_name(self, mock_update_globals_file, mock__update_openrc_deployment_file): fake_deployment = fakes.FakeDeployment( uuid="fake_uuid", admin="fake_credentials") self.fake_api.deployment.list.return_value = [fake_deployment] self.fake_api.deployment.get.return_value = fake_deployment status = self.deployment.use(self.fake_api, deployment="fake_name") self.assertIsNone(status) self.fake_api.deployment.get.assert_called_once_with("fake_name") mock_update_globals_file.assert_called_once_with( envutils.ENV_DEPLOYMENT, "fake_uuid") mock__update_openrc_deployment_file.assert_called_once_with( "fake_uuid", "fake_credentials") def test_deployment_not_found(self): deployment_id = "e87e4dca-b515-4477-888d-5f6103f13b42" exc = exceptions.DeploymentNotFound(deployment=deployment_id) self.fake_api.deployment.get.side_effect = exc self.assertEqual(1, self.deployment.use(self.fake_api, deployment_id)) @mock.patch("rally.cli.commands.deployment.cliutils.print_list") def test_deployment_check(self, mock_print_list): deployment_id = "e87e4dca-b515-4477-888d-5f6103f13b42" sample_credential = objects.Credential("http://192.168.1.1:5000/v2.0/", "admin", "adminpass").to_dict() deployment = {"admin": sample_credential, "users": [sample_credential]} self.fake_api.deployment.get.return_value = deployment self.fake_api.deployment.check.return_value = {} self.deployment.check(self.fake_api, deployment_id) self.fake_api.deployment.get.assert_called_once_with(deployment_id) self.fake_api.deployment.check.assert_called_once_with(deployment) headers = ["services", "type", "status"] mock_print_list.assert_called_once_with([], headers) def test_deployment_check_not_exist(self): deployment_id = "e87e4dca-b515-4477-888d-5f6103f13b42" exc = exceptions.DeploymentNotFound(deployment=deployment_id) self.fake_api.deployment.get.side_effect = exc self.assertEqual(self.deployment.check( self.fake_api, deployment_id), 1) def test_deployment_check_raise(self): deployment_id = "e87e4dca-b515-4477-888d-5f6103f13b42" sample_credential = objects.Credential("http://192.168.1.1:5000/v2.0/", "admin", "adminpass").to_dict() sample_credential["not-exist-key"] = "error" deployment = self.fake_api.deployment.get.return_value deployment.get_credentials_for.return_value = { "admin": sample_credential, "users": []} refused = keystone_exceptions.ConnectionRefused() self.fake_api.deployment.check.side_effect = refused self.assertEqual(self.deployment.check( self.fake_api, deployment_id), 1) rally-0.9.1/tests/unit/cli/test_manage.py0000664000567000056710000000440113073417716021546 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys import mock from rally.cli import manage from tests.unit import fakes from tests.unit import test class CmdManageTestCase(test.TestCase): @mock.patch("rally.cli.manage.cliutils") def test_main(self, mock_cliutils): manage.main() categories = {"db": manage.DBCommands} mock_cliutils.run.assert_called_once_with(sys.argv, categories) class DBCommandsTestCase(test.TestCase): def setUp(self): super(DBCommandsTestCase, self).setUp() self.db_commands = manage.DBCommands() self.fake_api = fakes.FakeAPI() @mock.patch("rally.cli.manage.envutils") @mock.patch("rally.cli.manage.db") def test_recreate(self, mock_db, mock_envutils): self.db_commands.recreate(self.fake_api) db_calls = [mock.call.schema_cleanup(), mock.call.schema_create()] self.assertEqual(db_calls, mock_db.mock_calls) envutils_calls = [mock.call.clear_env()] self.assertEqual(envutils_calls, mock_envutils.mock_calls) @mock.patch("rally.cli.manage.db") def test_create(self, mock_db): self.db_commands.create(self.fake_api) calls = [mock.call.schema_create()] self.assertEqual(calls, mock_db.mock_calls) @mock.patch("rally.cli.manage.db") def test_upgrade(self, mock_db): self.db_commands.upgrade(self.fake_api) calls = [mock.call.schema_upgrade()] mock_db.assert_has_calls(calls) @mock.patch("rally.cli.manage.db") def test_revision(self, mock_db): self.db_commands.revision(self.fake_api) calls = [mock.call.schema_revision()] mock_db.assert_has_calls(calls) rally-0.9.1/tests/unit/cli/test_envutils.py0000664000567000056710000002366713073417716022206 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import mock from six import moves from rally.cli import envutils from rally import exceptions from tests.unit import test class EnvUtilsTestCase(test.TestCase): def test_default_from_global(self): @envutils.default_from_global("test_arg_name", "test_env_name", "test_missing_arg") def test_function(test_arg_name=None): pass with mock.patch("sys.stdout", new_callable=moves.StringIO) as mock_stdout: test_function() self.assertEqual(mock_stdout.getvalue(), "Missing argument: --test_missing_arg\n") @mock.patch.dict(os.environ, values={envutils.ENV_DEPLOYMENT: "my_deployment_id"}, clear=True) def test_get_deployment_id_in_env(self): deployment_id = envutils.get_global(envutils.ENV_DEPLOYMENT) self.assertEqual("my_deployment_id", deployment_id) @mock.patch.dict(os.environ, values={}, clear=True) @mock.patch("rally.cli.envutils.fileutils.load_env_file") def test_get_deployment_id_with_exception(self, mock_load_env_file): self.assertRaises(exceptions.InvalidArgumentsException, envutils.get_global, envutils.ENV_DEPLOYMENT, True) mock_load_env_file.assert_called_once_with(os.path.expanduser( "~/.rally/globals")) @mock.patch.dict(os.environ, values={}, clear=True) @mock.patch("rally.cli.envutils.fileutils.load_env_file") def test_get_deployment_id_with_none(self, mock_load_env_file): self.assertIsNone(envutils.get_global(envutils.ENV_DEPLOYMENT)) mock_load_env_file.assert_called_once_with(os.path.expanduser( "~/.rally/globals")) @mock.patch.dict(os.environ, values={envutils.ENV_TASK: "my_task_id"}, clear=True) def test_get_task_id_in_env(self): self.assertEqual("my_task_id", envutils.get_global(envutils.ENV_TASK)) @mock.patch.dict(os.environ, values={}, clear=True) @mock.patch("rally.cli.envutils.fileutils.load_env_file") def test_get_task_id_with_exception(self, mock_load_env_file): self.assertRaises(exceptions.InvalidArgumentsException, envutils.get_global, envutils.ENV_TASK, True) mock_load_env_file.assert_called_once_with(os.path.expanduser( "~/.rally/globals")) @mock.patch.dict(os.environ, values={}, clear=True) @mock.patch("rally.cli.envutils.fileutils.load_env_file") def test_get_task_id_with_none(self, mock_load_env_file): self.assertIsNone(envutils.get_global("RALLY_TASK")) mock_load_env_file.assert_called_once_with(os.path.expanduser( "~/.rally/globals")) @mock.patch.dict(os.environ, values={envutils.ENV_DEPLOYMENT: "test_deployment_id"}, clear=True) @mock.patch("os.path.exists") @mock.patch("rally.cli.envutils.fileutils.update_env_file", return_value=True) def test_clear_global(self, mock_update_env_file, mock_path_exists): envutils.clear_global(envutils.ENV_DEPLOYMENT) mock_update_env_file.assert_called_once_with(os.path.expanduser( "~/.rally/globals"), envutils.ENV_DEPLOYMENT, "\n") self.assertEqual(os.environ, {}) @mock.patch.dict(os.environ, values={envutils.ENV_DEPLOYMENT: "test_deployment_id", envutils.ENV_TASK: "test_task_id"}, clear=True) @mock.patch("os.path.exists") @mock.patch("rally.cli.envutils.fileutils.update_env_file", return_value=True) def test_clear_env(self, mock_update_env_file, mock_path_exists): envutils.clear_env() self.assertEqual(os.environ, {}) @mock.patch.dict(os.environ, {"OS_AUTH_URL": "fake_auth_url", "OS_USERNAME": "fake_username", "OS_PASSWORD": "fake_password", "OS_TENANT_NAME": "fake_tenant_name", "OS_REGION_NAME": "fake_region_name", "OS_ENDPOINT_TYPE": "fake_endpoint_typeURL", "OS_ENDPOINT": "fake_endpoint", "OS_INSECURE": "True", "OS_CACERT": "fake_cacert"}) def test_get_creds_from_env_vars_keystone_v2(self): expected_creds = { "auth_url": "fake_auth_url", "admin": { "username": "fake_username", "password": "fake_password", "tenant_name": "fake_tenant_name" }, "endpoint_type": "fake_endpoint_type", "endpoint": "fake_endpoint", "region_name": "fake_region_name", "https_cacert": "fake_cacert", "https_insecure": True } creds = envutils.get_creds_from_env_vars() self.assertEqual(expected_creds, creds) @mock.patch.dict(os.environ, {"OS_AUTH_URL": "fake_auth_url", "OS_USERNAME": "fake_username", "OS_PASSWORD": "fake_password", "OS_TENANT_NAME": "fake_tenant_name", "OS_REGION_NAME": "fake_region_name", "OS_ENDPOINT_TYPE": "fake_endpoint_typeURL", "OS_ENDPOINT": "fake_endpoint", "OS_INSECURE": "True", "OS_PROJECT_DOMAIN_NAME": "fake_pdn", "OS_USER_DOMAIN_NAME": "fake_udn", "OS_CACERT": "fake_cacert"}) def test_get_creds_from_env_vars_keystone_v3(self): expected_creds = { "auth_url": "fake_auth_url", "admin": { "username": "fake_username", "password": "fake_password", "user_domain_name": "fake_udn", "project_domain_name": "fake_pdn", "project_name": "fake_tenant_name" }, "endpoint_type": "fake_endpoint_type", "endpoint": "fake_endpoint", "region_name": "fake_region_name", "https_cacert": "fake_cacert", "https_insecure": True } creds = envutils.get_creds_from_env_vars() self.assertEqual(expected_creds, creds) @mock.patch.dict(os.environ, {"OS_AUTH_URL": "fake_auth_url", "OS_PASSWORD": "fake_password", "OS_REGION_NAME": "fake_region_name", "OS_ENDPOINT": "fake_endpoint", "OS_INSECURE": "True", "OS_CACERT": "fake_cacert"}) def test_get_creds_from_env_vars_when_required_vars_missing(self): if "OS_USERNAME" in os.environ: del os.environ["OS_USERNAME"] self.assertRaises(exceptions.ValidationError, envutils.get_creds_from_env_vars) @mock.patch.dict(os.environ, {"OS_TENANT_NAME": "fake_tenant_name"}, clear=True) def test_get_project_name_from_env_when_tenant_name(self): project_name = envutils.get_project_name_from_env() self.assertEqual("fake_tenant_name", project_name) @mock.patch.dict(os.environ, {"OS_PROJECT_NAME": "fake_project_name"}, clear=True) def test_get_project_name_from_env_when_project_name(self): project_name = envutils.get_project_name_from_env() self.assertEqual("fake_project_name", project_name) @mock.patch.dict(os.environ, {"OS_TENANT_NAME": "fake_tenant_name", "OS_PROJECT_NAME": "fake_project_name"}) def test_get_project_name_from_env_when_both(self): project_name = envutils.get_project_name_from_env() self.assertEqual("fake_project_name", project_name) @mock.patch.dict(os.environ, values={}, clear=True) def test_get_project_name_from_env_when_neither(self): self.assertRaises(exceptions.ValidationError, envutils.get_project_name_from_env) @mock.patch.dict(os.environ, {"OS_ENDPOINT_TYPE": "fake_endpoint_typeURL"}, clear=True) def test_get_endpoint_type_from_env_when_endpoint_type(self): endpoint_type = envutils.get_endpoint_type_from_env() self.assertEqual("fake_endpoint_type", endpoint_type) @mock.patch.dict(os.environ, {"OS_INTERFACE": "fake_interface"}, clear=True) def test_get_endpoint_type_from_env_when_interface(self): endpoint_type = envutils.get_endpoint_type_from_env() self.assertEqual("fake_interface", endpoint_type) @mock.patch.dict(os.environ, {"OS_ENDPOINT_TYPE": "fake_endpoint_typeURL", "OS_INTERFACE": "fake_interface"}) def test_get_endpoint_type_from_env_when_both(self): endpoint_type = envutils.get_endpoint_type_from_env() self.assertEqual("fake_endpoint_type", endpoint_type) @mock.patch.dict(os.environ, values={}, clear=True) def test_get_endpoint_type_from_env_when_neither(self): endpoint_type = envutils.get_endpoint_type_from_env() self.assertIsNone(endpoint_type) rally-0.9.1/tests/unit/cli/test_cliutils.py0000664000567000056710000010421413073417716022151 0ustar jenkinsjenkins00000000000000# Copyright 2013: Intel Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt from keystoneclient import exceptions as keystone_exc import mock from oslo_config import cfg import six import sqlalchemy.exc from rally.cli import cliutils from rally.cli.commands import deployment from rally.cli.commands import task from rally.cli.commands import verify from rally import exceptions from tests.unit import test CONF = cfg.CONF FAKE_TASK_UUID = "bb0f621c-29bd-495c-9d7a-d844335ed0fa" @ddt.ddt class CliUtilsTestCase(test.TestCase): def setUp(self): super(CliUtilsTestCase, self).setUp() self.categories = { "deployment": deployment.DeploymentCommands, "task": task.TaskCommands, "verify": verify.VerifyCommands } def tearDown(self): self._unregister_opts() super(CliUtilsTestCase, self).tearDown() def test_print_dict(self): out = six.StringIO() dict = {"key": "value"} cliutils.print_dict(dict, out=out) self.assertEqual("+----------+-------+\n" "| Property | Value |\n" "+----------+-------+\n" "| key | value |\n" "+----------+-------+\n", out.getvalue()) def test_print_dict_wrap(self): out = six.StringIO() dict = {"key1": "not wrapped", "key2": "this will be wrapped"} cliutils.print_dict(dict, wrap=16, out=out) self.assertEqual("+----------+--------------+\n" "| Property | Value |\n" "+----------+--------------+\n" "| key1 | not wrapped |\n" "| key2 | this will be |\n" "| | wrapped |\n" "+----------+--------------+\n", out.getvalue()) def test_print_dict_formatters_and_fields(self): out = six.StringIO() dict = {"key1": "value", "key2": "Value", "key3": "vvv"} formatters = {"foo": lambda x: x["key1"], "bar": lambda x: x["key2"]} fields = ["foo", "bar"] cliutils.print_dict(dict, formatters=formatters, fields=fields, out=out) self.assertEqual("+----------+-------+\n" "| Property | Value |\n" "+----------+-------+\n" "| foo | value |\n" "| bar | Value |\n" "+----------+-------+\n", out.getvalue()) def test_print_dict_header(self): out = six.StringIO() dict = {"key": "value"} cliutils.print_dict(dict, table_label="Some Table", print_header=False, out=out) self.assertEqual("+-------------+\n" "| Some Table |\n" "+-----+-------+\n" "| key | value |\n" "+-----+-------+\n", out.getvalue()) def test_print_dict_objects(self): class SomeStruct(object): def __init__(self, a, b): self.a = a self.b = b @property def c(self): return self.a + self.b def foo(self): pass @classmethod def bar(cls): pass @staticmethod def foobar(): pass out = six.StringIO() formatters = {"c": lambda x: "a + b = %s" % x.c} cliutils.print_dict(SomeStruct(1, 2), formatters=formatters, out=out) self.assertEqual("+----------+-----------+\n" "| Property | Value |\n" "+----------+-----------+\n" "| a | 1 |\n" "| b | 2 |\n" "| c | a + b = 3 |\n" "+----------+-----------+\n", out.getvalue()) def test_print_dict_with_spec_chars(self): out = six.StringIO() dict = {"key": "line1\r\nline2"} cliutils.print_dict(dict, out=out) self.assertEqual("+----------+-------+\n" "| Property | Value |\n" "+----------+-------+\n" "| key | line1 |\n" "| | line2 |\n" "+----------+-------+\n", out.getvalue()) def test_make_header(self): h1 = cliutils.make_header("msg", size=4, symbol="=") self.assertEqual(h1, "====\nmsg\n====\n") def test_make_table_header(self): actual = cliutils.make_table_header("Response Times (sec)", 40) expected = "\n".join( ("+--------------------------------------+", "| Response Times (sec) |",) ) self.assertEqual(expected, actual) actual = cliutils.make_table_header("Response Times (sec)", 39) expected = "\n".join( ("+-------------------------------------+", "| Response Times (sec) |",) ) self.assertEqual(expected, actual) self.assertRaises(ValueError, cliutils.make_table_header, "Response Times (sec)", len("Response Times (sec)")) @ddt.data({"obj": mock.Mock(foo=6.56565), "args": ["foo", 3], "expected": 6.566}, {"obj": mock.Mock(foo=6.56565), "args": ["foo"], "expected": 6.56565}, {"obj": mock.Mock(foo=None), "args": ["foo"], "expected": "n/a"}, {"obj": mock.Mock(foo="n/a"), "args": ["foo"], "expected": "n/a"}, {"obj": mock.Mock(foo="n/a"), "args": ["foo", 3], "expected": "n/a"}, {"obj": {"foo": 6.56565}, "args": ["foo", 3], "expected": 6.566}, {"obj": {"foo": 6.56565}, "args": ["foo"], "expected": 6.56565}, {"obj": {"foo": None}, "args": ["foo"], "expected": "n/a"}, {"obj": {"foo": "n/a"}, "args": ["foo"], "expected": "n/a"}, {"obj": {"foo": "n/a"}, "args": ["foo", 3], "expected": "n/a"}, {"obj": object, "args": ["unexpected_field", 3], "expected": AttributeError}, {"obj": {"foo": 42}, "args": ["unexpected_field", 3], "expected": KeyError}) @ddt.unpack def test_pretty_float_formatter(self, obj, args, expected=None): formatter = cliutils.pretty_float_formatter(*args) if type(expected) == type and issubclass(expected, Exception): self.assertRaises(expected, formatter, obj) else: self.assertEqual(expected, formatter(obj)) def test_process_keyestone_exc(self): @cliutils.process_keystone_exc def a(a): if a == 1: raise keystone_exc.Unauthorized() if a == 2: raise keystone_exc.AuthorizationFailure() if a == 3: raise keystone_exc.ConnectionRefused() return a self.assertEqual(1, a(1)) self.assertEqual(1, a(2)) self.assertEqual(1, a(3)) self.assertEqual(4, a(4)) def test__methods_of_with_class(self): class fake_class(object): def public(self): pass def _private(self): pass result = cliutils._methods_of(fake_class) self.assertEqual(1, len(result)) self.assertEqual("public", result[0][0]) def test__methods_of_with_object(self): class fake_class(object): def public(self): pass def _private(self): pass mock_obj = fake_class() result = cliutils._methods_of(mock_obj) self.assertEqual(1, len(result)) self.assertEqual("public", result[0][0]) def test__methods_of_empty_result(self): class fake_class(object): def _private(self): pass def _private2(self): pass mock_obj = fake_class() result = cliutils._methods_of(mock_obj) self.assertEqual(result, []) def _unregister_opts(self): CONF.reset() category_opt = cfg.SubCommandOpt("category", title="Command categories", help="Available categories" ) CONF.unregister_opt(category_opt) @mock.patch("rally.api.API", side_effect=exceptions.RallyException("config_file")) def test_run_fails(self, mock_rally_api_api): ret = cliutils.run(["rally", "version"], self.categories) self.assertEqual(ret, 2) mock_rally_api_api.assert_called_once_with( config_args=["version"], skip_db_check=True) @mock.patch("rally.api.API.check_db_revision") def test_run_version(self, mock_api_check_db_revision): ret = cliutils.run(["rally", "version"], self.categories) self.assertEqual(ret, 0) @mock.patch("rally.api.API.check_db_revision") def test_run_bash_completion(self, mock_api_check_db_revision): ret = cliutils.run(["rally", "bash-completion"], self.categories) self.assertEqual(ret, 0) @mock.patch("rally.api.API.check_db_revision") @mock.patch("rally.common.db.task_get", side_effect=exceptions.TaskNotFound(uuid=FAKE_TASK_UUID)) def test_run_task_not_found(self, mock_task_get, mock_api_check_db_revision): ret = cliutils.run(["rally", "task", "status", "%s" % FAKE_TASK_UUID], self.categories) self.assertTrue(mock_task_get.called) self.assertEqual(ret, 1) @mock.patch("rally.api.API.check_db_revision") @mock.patch("rally.cli.cliutils.validate_args", side_effect=cliutils.MissingArgs("missing")) def test_run_task_failed(self, mock_validate_args, mock_api_check_db_revision): ret = cliutils.run(["rally", "task", "status", "%s" % FAKE_TASK_UUID], self.categories) self.assertTrue(mock_validate_args.called) self.assertEqual(ret, 1) @mock.patch("rally.api.API.check_db_revision") def test_run_failed_to_open_file(self, mock_api_check_db_revision): class FailuresCommands(object): def failed_to_open_file(self): raise IOError("No such file") ret = cliutils.run(["rally", "failure", "failed-to-open-file"], {"failure": FailuresCommands}) self.assertEqual(1, ret) @mock.patch("rally.api.API.check_db_revision") def test_run_sqlalchmey_operational_failure(self, mock_api_check_db_revision): class SQLAlchemyCommands(object): def operational_failure(self): raise sqlalchemy.exc.OperationalError("Can't open DB file") ret = cliutils.run(["rally", "failure", "operational-failure"], {"failure": SQLAlchemyCommands}) self.assertEqual(1, ret) class TestObj(object): x = 1 y = 2 z = 3.142857142857143 aOrB = 3 # mixed case field @ddt.data( {"args": [[TestObj()], ["x", "y"]], "kwargs": {"print_header": True, "print_border": True, "sortby_index": None}, "expected": ("+---+---+\n" "| x | y |\n" "+---+---+\n" "| 1 | 2 |\n" "+---+---+")}, {"args": [[TestObj()], ["z"]], "kwargs": {"print_header": True, "print_border": True, "sortby_index": None, "formatters": {"z": cliutils.pretty_float_formatter("z", 5)}}, "expected": ("+---------+\n" "| z |\n" "+---------+\n" "| 3.14286 |\n" "+---------+")}, {"args": [[TestObj()], ["x"]], "kwargs": {"print_header": True, "print_border": True}, "expected": ("+---+\n" "| x |\n" "+---+\n" "| 1 |\n" "+---+")}, {"args": [[TestObj()], ["x", "y"]], "kwargs": {"print_header": True, "print_border": True}, "expected": ("+---+---+\n" "| x | y |\n" "+---+---+\n" "| 1 | 2 |\n" "+---+---+")}, {"args": [[TestObj()], ["x"]], "kwargs": {"print_header": False, "print_border": False}, "expected": "1"}, {"args": [[TestObj()], ["x", "y"]], "kwargs": {"print_header": False, "print_border": False}, "expected": "1 2"}, {"args": [[TestObj()], ["x"]], "kwargs": {"print_header": True, "print_border": False}, "expected": "x \n1"}, {"args": [[TestObj()], ["x", "y"]], "kwargs": {"print_header": True, "print_border": False}, "expected": "x y \n1 2"}, {"args": [[TestObj()], ["x"]], "kwargs": {"print_header": False, "print_border": True}, "expected": ("+--+\n" "|1 |\n" "+--+")}, {"args": [[TestObj()], ["x", "y"]], "kwargs": {"print_header": False, "print_border": True}, "expected": ("+--+--+\n" "|1 |2 |\n" "+--+--+")}, {"args": [[TestObj()], ["aOrB"]], "kwargs": {"print_header": True, "print_border": True, "mixed_case_fields": ["aOrB"]}, "expected": ("+------+\n" "| aOrB |\n" "+------+\n" "| 3 |\n" "+------+")}, {"args": [[TestObj()], ["aOrB"]], "kwargs": {"print_header": False, "print_border": True, "mixed_case_fields": ["aOrB"]}, "expected": ("+--+\n" "|3 |\n" "+--+")}, {"args": [[TestObj()], ["aOrB"]], "kwargs": {"print_header": True, "print_border": False, "mixed_case_fields": ["aOrB"]}, "expected": "aOrB \n3"}, {"args": [[TestObj()], ["aOrB"]], "kwargs": {"print_header": False, "print_border": False, "mixed_case_fields": ["aOrB"]}, "expected": "3"}, {"args": [[{"x": 1, "y": 2}], ["x", "y"]], "kwargs": {"print_header": True, "print_border": True, "sortby_index": None}, "expected": ("+---+---+\n" "| x | y |\n" "+---+---+\n" "| 1 | 2 |\n" "+---+---+")}, {"args": [[{"z": 3.142857142857143}], ["z"]], "kwargs": {"print_header": True, "print_border": True, "sortby_index": None, "formatters": {"z": cliutils.pretty_float_formatter("z", 5)}}, "expected": ("+---------+\n" "| z |\n" "+---------+\n" "| 3.14286 |\n" "+---------+")}, {"args": [[{"x": 1}], ["x"]], "kwargs": {"print_header": True, "print_border": True}, "expected": ("+---+\n" "| x |\n" "+---+\n" "| 1 |\n" "+---+")}, {"args": [[{"x": 1, "y": 2}], ["x", "y"]], "kwargs": {"print_header": True, "print_border": True}, "expected": ("+---+---+\n" "| x | y |\n" "+---+---+\n" "| 1 | 2 |\n" "+---+---+")}) @ddt.unpack def test_print_list(self, args, kwargs, expected): out = six.moves.StringIO() kwargs["out"] = out cliutils.print_list(*args, **kwargs) self.assertEqual(expected, out.getvalue().strip()) def test_print_list_raises(self): out = six.moves.StringIO() self.assertRaisesRegexp( ValueError, "Field labels list.*has different number " "of elements than fields list", cliutils.print_list, [self.TestObj()], ["x"], field_labels=["x", "y"], sortby_index=None, out=out) def test_help_for_grouped_methods(self): class SomeCommand(object): @cliutils.help_group("1_manage") def install(self): pass @cliutils.help_group("1_manage") def uninstall(self): pass @cliutils.help_group("1_manage") def reinstall(self): pass @cliutils.help_group("2_launch") def run(self): pass @cliutils.help_group("2_launch") def rerun(self): pass @cliutils.help_group("3_results") def show(self): pass @cliutils.help_group("3_results") def list(self): pass def do_do_has_do_has_mesh(self): pass self.assertEqual( "\n\nCommands:\n" " do-do-has-do-has-mesh \n" "\n" " install \n" " reinstall \n" " uninstall \n" "\n" " rerun \n" " run \n" "\n" " list \n" " show \n", cliutils._compose_category_description(SomeCommand)) class ValidateArgsTest(test.TestCase): def test_lambda_no_args(self): cliutils.validate_args(lambda: None) def _test_lambda_with_args(self, *args, **kwargs): cliutils.validate_args(lambda x, y: None, *args, **kwargs) def test_lambda_positional_args(self): self._test_lambda_with_args(1, 2) def test_lambda_kwargs(self): self._test_lambda_with_args(x=1, y=2) def test_lambda_mixed_kwargs(self): self._test_lambda_with_args(1, y=2) def test_lambda_missing_args1(self): self.assertRaises(cliutils.MissingArgs, self._test_lambda_with_args) def test_lambda_missing_args2(self): self.assertRaises(cliutils.MissingArgs, self._test_lambda_with_args, 1) def test_lambda_missing_args3(self): self.assertRaises(cliutils.MissingArgs, self._test_lambda_with_args, y=2) def test_lambda_missing_args4(self): self.assertRaises(cliutils.MissingArgs, self._test_lambda_with_args, 1, x=2) def _test_lambda_with_default(self, *args, **kwargs): cliutils.validate_args(lambda x, y, z=3: None, *args, **kwargs) def test_lambda_positional_args_with_default(self): self._test_lambda_with_default(1, 2) def test_lambda_kwargs_with_default(self): self._test_lambda_with_default(x=1, y=2) def test_lambda_mixed_kwargs_with_default(self): self._test_lambda_with_default(1, y=2) def test_lambda_positional_args_all_with_default(self): self._test_lambda_with_default(1, 2, 3) def test_lambda_kwargs_all_with_default(self): self._test_lambda_with_default(x=1, y=2, z=3) def test_lambda_mixed_kwargs_all_with_default(self): self._test_lambda_with_default(1, y=2, z=3) def test_lambda_with_default_missing_args1(self): self.assertRaises(cliutils.MissingArgs, self._test_lambda_with_default) def test_lambda_with_default_missing_args2(self): self.assertRaises(cliutils.MissingArgs, self._test_lambda_with_default, 1) def test_lambda_with_default_missing_args3(self): self.assertRaises(cliutils.MissingArgs, self._test_lambda_with_default, y=2) def test_lambda_with_default_missing_args4(self): self.assertRaises(cliutils.MissingArgs, self._test_lambda_with_default, y=2, z=3) def test_function_no_args(self): def func(): pass cliutils.validate_args(func) def _test_function_with_args(self, *args, **kwargs): def func(x, y): pass cliutils.validate_args(func, *args, **kwargs) def test_function_positional_args(self): self._test_function_with_args(1, 2) def test_function_kwargs(self): self._test_function_with_args(x=1, y=2) def test_function_mixed_kwargs(self): self._test_function_with_args(1, y=2) def test_function_missing_args1(self): self.assertRaises(cliutils.MissingArgs, self._test_function_with_args) def test_function_missing_args2(self): self.assertRaises(cliutils.MissingArgs, self._test_function_with_args, 1) def test_function_missing_args3(self): self.assertRaises(cliutils.MissingArgs, self._test_function_with_args, y=2) def _test_function_with_default(self, *args, **kwargs): def func(x, y, z=3): pass cliutils.validate_args(func, *args, **kwargs) def test_function_positional_args_with_default(self): self._test_function_with_default(1, 2) def test_function_kwargs_with_default(self): self._test_function_with_default(x=1, y=2) def test_function_mixed_kwargs_with_default(self): self._test_function_with_default(1, y=2) def test_function_positional_args_all_with_default(self): self._test_function_with_default(1, 2, 3) def test_function_kwargs_all_with_default(self): self._test_function_with_default(x=1, y=2, z=3) def test_function_mixed_kwargs_all_with_default(self): self._test_function_with_default(1, y=2, z=3) def test_function_with_default_missing_args1(self): self.assertRaises(cliutils.MissingArgs, self._test_function_with_default) def test_function_with_default_missing_args2(self): self.assertRaises(cliutils.MissingArgs, self._test_function_with_default, 1) def test_function_with_default_missing_args3(self): self.assertRaises(cliutils.MissingArgs, self._test_function_with_default, y=2) def test_function_with_default_missing_args4(self): self.assertRaises(cliutils.MissingArgs, self._test_function_with_default, y=2, z=3) def test_bound_method_no_args(self): class Foo(object): def bar(self): pass cliutils.validate_args(Foo().bar) def _test_bound_method_with_args(self, *args, **kwargs): class Foo(object): def bar(self, x, y): pass cliutils.validate_args(Foo().bar, *args, **kwargs) def test_bound_method_positional_args(self): self._test_bound_method_with_args(1, 2) def test_bound_method_kwargs(self): self._test_bound_method_with_args(x=1, y=2) def test_bound_method_mixed_kwargs(self): self._test_bound_method_with_args(1, y=2) def test_bound_method_missing_args1(self): self.assertRaises(cliutils.MissingArgs, self._test_bound_method_with_args) def test_bound_method_missing_args2(self): self.assertRaises(cliutils.MissingArgs, self._test_bound_method_with_args, 1) def test_bound_method_missing_args3(self): self.assertRaises(cliutils.MissingArgs, self._test_bound_method_with_args, y=2) def _test_bound_method_with_default(self, *args, **kwargs): class Foo(object): def bar(self, x, y, z=3): pass cliutils.validate_args(Foo().bar, *args, **kwargs) def test_bound_method_positional_args_with_default(self): self._test_bound_method_with_default(1, 2) def test_bound_method_kwargs_with_default(self): self._test_bound_method_with_default(x=1, y=2) def test_bound_method_mixed_kwargs_with_default(self): self._test_bound_method_with_default(1, y=2) def test_bound_method_positional_args_all_with_default(self): self._test_bound_method_with_default(1, 2, 3) def test_bound_method_kwargs_all_with_default(self): self._test_bound_method_with_default(x=1, y=2, z=3) def test_bound_method_mixed_kwargs_all_with_default(self): self._test_bound_method_with_default(1, y=2, z=3) def test_bound_method_with_default_missing_args1(self): self.assertRaises(cliutils.MissingArgs, self._test_bound_method_with_default) def test_bound_method_with_default_missing_args2(self): self.assertRaises(cliutils.MissingArgs, self._test_bound_method_with_default, 1) def test_bound_method_with_default_missing_args3(self): self.assertRaises(cliutils.MissingArgs, self._test_bound_method_with_default, y=2) def test_bound_method_with_default_missing_args4(self): self.assertRaises(cliutils.MissingArgs, self._test_bound_method_with_default, y=2, z=3) def test_unbound_method_no_args(self): class Foo(object): def bar(self): pass cliutils.validate_args(Foo.bar, Foo()) def _test_unbound_method_with_args(self, *args, **kwargs): class Foo(object): def bar(self, x, y): pass cliutils.validate_args(Foo.bar, Foo(), *args, **kwargs) def test_unbound_method_positional_args(self): self._test_unbound_method_with_args(1, 2) def test_unbound_method_kwargs(self): self._test_unbound_method_with_args(x=1, y=2) def test_unbound_method_mixed_kwargs(self): self._test_unbound_method_with_args(1, y=2) def test_unbound_method_missing_args1(self): self.assertRaises(cliutils.MissingArgs, self._test_unbound_method_with_args) def test_unbound_method_missing_args2(self): self.assertRaises(cliutils.MissingArgs, self._test_unbound_method_with_args, 1) def test_unbound_method_missing_args3(self): self.assertRaises(cliutils.MissingArgs, self._test_unbound_method_with_args, y=2) def _test_unbound_method_with_default(self, *args, **kwargs): class Foo(object): def bar(self, x, y, z=3): pass cliutils.validate_args(Foo.bar, Foo(), *args, **kwargs) def test_unbound_method_positional_args_with_default(self): self._test_unbound_method_with_default(1, 2) def test_unbound_method_kwargs_with_default(self): self._test_unbound_method_with_default(x=1, y=2) def test_unbound_method_mixed_kwargs_with_default(self): self._test_unbound_method_with_default(1, y=2) def test_unbound_method_with_default_missing_args1(self): self.assertRaises(cliutils.MissingArgs, self._test_unbound_method_with_default) def test_unbound_method_with_default_missing_args2(self): self.assertRaises(cliutils.MissingArgs, self._test_unbound_method_with_default, 1) def test_unbound_method_with_default_missing_args3(self): self.assertRaises(cliutils.MissingArgs, self._test_unbound_method_with_default, y=2) def test_unbound_method_with_default_missing_args4(self): self.assertRaises(cliutils.MissingArgs, self._test_unbound_method_with_default, y=2, z=3) def test_class_method_no_args(self): class Foo(object): @classmethod def bar(cls): pass cliutils.validate_args(Foo.bar) def _test_class_method_with_args(self, *args, **kwargs): class Foo(object): @classmethod def bar(cls, x, y): pass cliutils.validate_args(Foo.bar, *args, **kwargs) def test_class_method_positional_args(self): self._test_class_method_with_args(1, 2) def test_class_method_kwargs(self): self._test_class_method_with_args(x=1, y=2) def test_class_method_mixed_kwargs(self): self._test_class_method_with_args(1, y=2) def test_class_method_missing_args1(self): self.assertRaises(cliutils.MissingArgs, self._test_class_method_with_args) def test_class_method_missing_args2(self): self.assertRaises(cliutils.MissingArgs, self._test_class_method_with_args, 1) def test_class_method_missing_args3(self): self.assertRaises(cliutils.MissingArgs, self._test_class_method_with_args, y=2) def _test_class_method_with_default(self, *args, **kwargs): class Foo(object): @classmethod def bar(cls, x, y, z=3): pass cliutils.validate_args(Foo.bar, *args, **kwargs) def test_class_method_positional_args_with_default(self): self._test_class_method_with_default(1, 2) def test_class_method_kwargs_with_default(self): self._test_class_method_with_default(x=1, y=2) def test_class_method_mixed_kwargs_with_default(self): self._test_class_method_with_default(1, y=2) def test_class_method_with_default_missing_args1(self): self.assertRaises(cliutils.MissingArgs, self._test_class_method_with_default) def test_class_method_with_default_missing_args2(self): self.assertRaises(cliutils.MissingArgs, self._test_class_method_with_default, 1) def test_class_method_with_default_missing_args3(self): self.assertRaises(cliutils.MissingArgs, self._test_class_method_with_default, y=2) def test_class_method_with_default_missing_args4(self): self.assertRaises(cliutils.MissingArgs, self._test_class_method_with_default, y=2, z=3) def test_static_method_no_args(self): class Foo(object): @staticmethod def bar(): pass cliutils.validate_args(Foo.bar) def _test_static_method_with_args(self, *args, **kwargs): class Foo(object): @staticmethod def bar(x, y): pass cliutils.validate_args(Foo.bar, *args, **kwargs) def test_static_method_positional_args(self): self._test_static_method_with_args(1, 2) def test_static_method_kwargs(self): self._test_static_method_with_args(x=1, y=2) def test_static_method_mixed_kwargs(self): self._test_static_method_with_args(1, y=2) def test_static_method_missing_args1(self): self.assertRaises(cliutils.MissingArgs, self._test_static_method_with_args) def test_static_method_missing_args2(self): self.assertRaises(cliutils.MissingArgs, self._test_static_method_with_args, 1) def test_static_method_missing_args3(self): self.assertRaises(cliutils.MissingArgs, self._test_static_method_with_args, y=2) def _test_static_method_with_default(self, *args, **kwargs): class Foo(object): @staticmethod def bar(x, y, z=3): pass cliutils.validate_args(Foo.bar, *args, **kwargs) def test_static_method_positional_args_with_default(self): self._test_static_method_with_default(1, 2) def test_static_method_kwargs_with_default(self): self._test_static_method_with_default(x=1, y=2) def test_static_method_mixed_kwargs_with_default(self): self._test_static_method_with_default(1, y=2) def test_static_method_with_default_missing_args1(self): self.assertRaises(cliutils.MissingArgs, self._test_static_method_with_default) def test_static_method_with_default_missing_args2(self): self.assertRaises(cliutils.MissingArgs, self._test_static_method_with_default, 1) def test_static_method_with_default_missing_args3(self): self.assertRaises(cliutils.MissingArgs, self._test_static_method_with_default, y=2) def test_static_method_with_default_missing_args4(self): self.assertRaises(cliutils.MissingArgs, self._test_static_method_with_default, y=2, z=3) def test_alias_decorator(self): alias_fn = mock.Mock(name="alias_fn") cmd_name = "test-command" wrapped = cliutils.alias(cmd_name) self.assertEqual(wrapped(alias_fn).alias, cmd_name) class CategoryParserTestCase(test.TestCase): def setUp(self): super(CategoryParserTestCase, self).setUp() self.categoryParser = cliutils.CategoryParser() def test_format_help(self): self.assertIsNotNone(self.categoryParser.format_help()) rally-0.9.1/tests/unit/plugins/0000775000567000056710000000000013073420067017611 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/__init__.py0000664000567000056710000000000013073417717021720 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/0000775000567000056710000000000013073420067021600 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/services/0000775000567000056710000000000013073420067023423 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/services/__init__.py0000664000567000056710000000000013073417717025532 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/services/identity/0000775000567000056710000000000013073420067025254 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/services/identity/__init__.py0000664000567000056710000000000013073417717027363 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/services/identity/test_keystone_common.py0000664000567000056710000002373213073417717032115 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack import service from rally.plugins.openstack.services.identity import identity from rally.plugins.openstack.services.identity import keystone_common from tests.unit import test class FullUnifiedKeystone(keystone_common.UnifiedKeystoneMixin, service.Service): """Implementation of UnifiedKeystoneMixin with Service base class.""" pass class UnifiedKeystoneMixinTestCase(test.TestCase): def setUp(self): super(UnifiedKeystoneMixinTestCase, self).setUp() self.clients = mock.MagicMock() self.name_generator = mock.MagicMock() self.impl = mock.MagicMock() self.version = "some" self.service = FullUnifiedKeystone( clients=self.clients, name_generator=self.name_generator) self.service._impl = self.impl self.service.version = self.version def test__unify_service(self): class SomeFakeService(object): id = 123123123123123 name = "asdfasdfasdfasdfadf" other_var = "asdfasdfasdfasdfasdfasdfasdf" service = self.service._unify_service(SomeFakeService()) self.assertIsInstance(service, identity.Service) self.assertEqual(SomeFakeService.id, service.id) self.assertEqual(SomeFakeService.name, service.name) def test__unify_role(self): class SomeFakeRole(object): id = 123123123123123 name = "asdfasdfasdfasdfadf" other_var = "asdfasdfasdfasdfasdfasdfasdf" role = self.service._unify_role(SomeFakeRole()) self.assertIsInstance(role, identity.Role) self.assertEqual(SomeFakeRole.id, role.id) self.assertEqual(SomeFakeRole.name, role.name) def test_delete_user(self): user_id = "id" self.service.delete_user(user_id) self.impl.delete_user.assert_called_once_with(user_id) def test_get_user(self): user_id = "id" self.service._unify_user = mock.MagicMock() self.assertEqual(self.service._unify_user.return_value, self.service.get_user(user_id)) self.impl.get_user.assert_called_once_with(user_id) self.service._unify_user.assert_called_once_with( self.impl.get_user.return_value) def test_create_service(self): self.service._unify_service = mock.MagicMock() name = "some_Service" service_type = "computeNextGen" description = "we will Rock you!" self.assertEqual(self.service._unify_service.return_value, self.service.create_service( name=name, service_type=service_type, description=description)) self.service._unify_service.assert_called_once_with( self.service._impl.create_service.return_value) self.service._impl.create_service.assert_called_once_with( name=name, service_type=service_type, description=description) def test_delete_service(self): service_id = "id" self.service.delete_service(service_id) self.impl.delete_service.assert_called_once_with(service_id) def test_get_service(self): service_id = "id" self.service._unify_service = mock.MagicMock() self.assertEqual(self.service._unify_service.return_value, self.service.get_service(service_id)) self.impl.get_service.assert_called_once_with(service_id) self.service._unify_service.assert_called_once_with( self.impl.get_service.return_value) def test_get_service_by_name(self): service_id = "id" self.service._unify_service = mock.MagicMock() self.assertEqual(self.service._unify_service.return_value, self.service.get_service_by_name(service_id)) self.impl.get_service_by_name.assert_called_once_with(service_id) self.service._unify_service.assert_called_once_with( self.impl.get_service_by_name.return_value) def test_delete_role(self): role_id = "id" self.service.delete_role(role_id) self.impl.delete_role.assert_called_once_with(role_id) def test_get_role(self): role_id = "id" self.service._unify_role = mock.MagicMock() self.assertEqual(self.service._unify_role.return_value, self.service.get_role(role_id)) self.impl.get_role.assert_called_once_with(role_id) self.service._unify_role.assert_called_once_with( self.impl.get_role.return_value) def test_list_ec2credentials(self): user_id = "id" self.assertEqual(self.impl.list_ec2credentials.return_value, self.service.list_ec2credentials(user_id)) self.impl.list_ec2credentials.assert_called_once_with(user_id) def test_delete_ec2credential(self): user_id = "id" access = mock.MagicMock() self.assertEqual(self.impl.delete_ec2credential.return_value, self.service.delete_ec2credential(user_id, access=access)) self.impl.delete_ec2credential.assert_called_once_with(user_id=user_id, access=access) def test_fetch_token(self): self.assertEqual(self.impl.fetch_token.return_value, self.service.fetch_token()) self.impl.fetch_token.assert_called_once_with() def test_validate_token(self): token = "id" self.assertEqual(self.impl.validate_token.return_value, self.service.validate_token(token)) self.impl.validate_token.assert_called_once_with(token) class FullKeystone(service.Service, keystone_common.KeystoneMixin): """Implementation of KeystoneMixin with Service base class.""" pass class KeystoneMixinTestCase(test.TestCase): def setUp(self): super(KeystoneMixinTestCase, self).setUp() self.clients = mock.MagicMock() self.kc = self.clients.keystone.return_value self.name_generator = mock.MagicMock() self.version = "some" self.service = FullKeystone( clients=self.clients, name_generator=self.name_generator) self.service.version = self.version def test_list_users(self): self.assertEqual(self.kc.users.list.return_value, self.service.list_users()) self.kc.users.list.assert_called_once_with() def test_delete_user(self): user_id = "fake_id" self.service.delete_user(user_id) self.kc.users.delete.assert_called_once_with(user_id) def test_get_user(self): user_id = "fake_id" self.service.get_user(user_id) self.kc.users.get.assert_called_once_with(user_id) def test_delete_service(self): service_id = "fake_id" self.service.delete_service(service_id) self.kc.services.delete.assert_called_once_with(service_id) def test_list_services(self): self.assertEqual(self.kc.services.list.return_value, self.service.list_services()) self.kc.services.list.assert_called_once_with() def test_get_service(self): service_id = "fake_id" self.service.get_service(service_id) self.kc.services.get.assert_called_once_with(service_id) def test_get_service_by_name(self): class FakeService(object): def __init__(self, name): self.name = name service_name = "fake_name" services = [FakeService(name="foo"), FakeService(name=service_name), FakeService(name="bar")] self.service.list_services = mock.MagicMock(return_value=services) self.assertEqual(services[1], self.service.get_service_by_name(service_name)) def test_delete_role(self): role_id = "fake_id" self.service.delete_role(role_id) self.kc.roles.delete.assert_called_once_with(role_id) def test_list_roles(self): self.assertEqual(self.kc.roles.list.return_value, self.service.list_roles()) self.kc.roles.list.assert_called_once_with() def test_get_role(self): role_id = "fake_id" self.service.get_role(role_id) self.kc.roles.get.assert_called_once_with(role_id) def test_list_ec2credentials(self): user_id = "fake_id" self.assertEqual(self.kc.ec2.list.return_value, self.service.list_ec2credentials(user_id)) self.kc.ec2.list.assert_called_once_with(user_id) def test_delete_ec2credentials(self): user_id = "fake_id" access = mock.MagicMock() self.service.delete_ec2credential(user_id, access=access) self.kc.ec2.delete.assert_called_once_with(user_id=user_id, access=access) @mock.patch("rally.osclients.Clients") def test_fetch_token(self, mock_clients): expected_token = mock_clients.return_value.keystone.auth_ref.auth_token self.assertEqual(expected_token, self.service.fetch_token()) mock_clients.assert_called_once_with( credential=self.clients.credential, api_info=self.clients.api_info) def test_validate_token(self): token = "some_token" self.service.validate_token(token) self.kc.tokens.validate.assert_called_once_with(token) rally-0.9.1/tests/unit/plugins/openstack/services/identity/test_keystone_v2.py0000664000567000056710000004641713073417717031161 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import uuid import ddt import mock from rally.plugins.openstack.services.identity import identity from rally.plugins.openstack.services.identity import keystone_v2 from tests.unit import test PATH = "rally.plugins.openstack.services.identity.keystone_v2" @ddt.ddt class KeystoneV2ServiceTestCase(test.TestCase): def setUp(self): super(KeystoneV2ServiceTestCase, self).setUp() self.clients = mock.MagicMock() self.kc = self.clients.keystone.return_value self.name_generator = mock.MagicMock() self.service = keystone_v2.KeystoneV2Service( self.clients, name_generator=self.name_generator) def test_create_tenant(self): name = "name" tenant = self.service.create_tenant(name) self.assertEqual(tenant, self.kc.tenants.create.return_value) self.kc.tenants.create.assert_called_once_with(name) @ddt.data({"tenant_id": "fake_id", "name": True, "enabled": True, "description": True}, {"tenant_id": "fake_id", "name": "some", "enabled": False, "description": "descr"}) @ddt.unpack def test_update_tenant(self, tenant_id, name, enabled, description): self.name_generator.side_effect = ("foo", "bar") self.service.update_tenant(tenant_id, name=name, description=description, enabled=enabled) name = "foo" if name is True else name description = "bar" if description is True else description self.kc.tenants.update.assert_called_once_with( tenant_id, name=name, description=description, enabled=enabled) def test_delete_tenant(self): tenant_id = "fake_id" self.service.delete_tenant(tenant_id) self.kc.tenants.delete.assert_called_once_with(tenant_id) def test_list_tenants(self): self.assertEqual(self.kc.tenants.list.return_value, self.service.list_tenants()) self.kc.tenants.list.assert_called_once_with() def test_get_tenant(self): tenant_id = "fake_id" self.service.get_tenant(tenant_id) self.kc.tenants.get.assert_called_once_with(tenant_id) def test_create_user(self): name = "name" password = "passwd" email = "rally@example.com" tenant_id = "project" user = self.service.create_user(name, password=password, email=email, tenant_id=tenant_id) self.assertEqual(user, self.kc.users.create.return_value) self.kc.users.create.assert_called_once_with( name=name, password=password, email=email, tenant_id=tenant_id, enabled=True) def test_create_users(self): self.service.create_user = mock.MagicMock() n = 2 tenant_id = "some" self.assertEqual([self.service.create_user.return_value] * n, self.service.create_users(number_of_users=n, tenant_id=tenant_id)) self.assertEqual([mock.call(tenant_id=tenant_id)] * n, self.service.create_user.call_args_list) def test_update_user_with_wrong_params(self): user_id = "fake_id" card_with_cvv2 = "1234 5678 9000 0000 : 666" self.assertRaises(NotImplementedError, self.service.update_user, user_id, card_with_cvv2=card_with_cvv2) def test_update_user(self): user_id = "fake_id" name = "new name" email = "new.name2016@example.com" enabled = True self.service.update_user(user_id, name=name, email=email, enabled=enabled) self.kc.users.update.assert_called_once_with( user_id, name=name, email=email, enabled=enabled) def test_update_user_password(self): user_id = "fake_id" password = "qwerty123" self.service.update_user_password(user_id, password=password) self.kc.users.update_password.assert_called_once_with( user_id, password=password) @ddt.data({"name": None, "service_type": None, "description": None}, {"name": "some", "service_type": "st", "description": "d"}) @ddt.unpack def test_create_service(self, name, service_type, description): self.assertEqual(self.kc.services.create.return_value, self.service.create_service(name=name, service_type=service_type, description=description)) name = name or self.name_generator.return_value service_type = service_type or "rally_test_type" description = description or self.name_generator.return_value self.kc.services.create.assert_called_once_with( name, service_type=service_type, description=description) def test_create_role(self): name = "some" self.service.create_role(name) self.kc.roles.create.assert_called_once_with(name) def test_add_role(self): role_id = "fake_id" user_id = "user_id" tenant_id = "tenant_id" self.service.add_role(role_id, user_id=user_id, tenant_id=tenant_id) self.kc.roles.add_user_role.assert_called_once_with( user=user_id, role=role_id, tenant=tenant_id) def test_list_roles(self): self.assertEqual(self.kc.roles.list.return_value, self.service.list_roles()) self.kc.roles.list.assert_called_once_with() def test_list_roles_for_user(self): user_id = "user_id" tenant_id = "tenant_id" self.assertEqual(self.kc.roles.roles_for_user.return_value, self.service.list_roles_for_user(user_id, tenant_id=tenant_id)) self.kc.roles.roles_for_user.assert_called_once_with(user_id, tenant_id) def test_revoke_role(self): role_id = "fake_id" user_id = "user_id" tenant_id = "tenant_id" self.service.revoke_role(role_id, user_id=user_id, tenant_id=tenant_id) self.kc.roles.remove_user_role.assert_called_once_with( user=user_id, role=role_id, tenant=tenant_id) def test_create_ec2credentials(self): user_id = "fake_id" tenant_id = "fake_id" self.assertEqual(self.kc.ec2.create.return_value, self.service.create_ec2credentials( user_id, tenant_id=tenant_id)) self.kc.ec2.create.assert_called_once_with(user_id, tenant_id=tenant_id) @ddt.ddt class UnifiedKeystoneV2ServiceTestCase(test.TestCase): def setUp(self): super(UnifiedKeystoneV2ServiceTestCase, self).setUp() self.clients = mock.MagicMock() self.service = keystone_v2.UnifiedKeystoneV2Service(self.clients) self.service._impl = mock.MagicMock() def test_init_identity_service(self): self.clients.keystone.return_value.version = "v2.0" self.assertIsInstance(identity.Identity(self.clients)._impl, keystone_v2.UnifiedKeystoneV2Service) def test__check_domain(self): self.service._check_domain("Default") self.service._check_domain("default") self.assertRaises(NotImplementedError, self.service._check_domain, "non-default") def test__unify_tenant(self): class KeystoneV2Tenant(object): def __init__(self, domain_id="domain_id"): self.id = str(uuid.uuid4()) self.name = str(uuid.uuid4()) self.domain_id = domain_id tenant = KeystoneV2Tenant() project = self.service._unify_tenant(tenant) self.assertIsInstance(project, identity.Project) self.assertEqual(tenant.id, project.id) self.assertEqual(tenant.name, project.name) self.assertEqual("default", project.domain_id) self.assertNotEqual(tenant.domain_id, project.domain_id) def test__unify_user(self): class KeystoneV2User(object): def __init__(self, tenantId=None): self.id = str(uuid.uuid4()) self.name = str(uuid.uuid4()) if tenantId is not None: self.tenantId = tenantId user = KeystoneV2User() unified_user = self.service._unify_user(user) self.assertIsInstance(unified_user, identity.User) self.assertEqual(user.id, unified_user.id) self.assertEqual(user.name, unified_user.name) self.assertEqual("default", unified_user.domain_id) self.assertIsNone(unified_user.project_id) tenant_id = "tenant_id" user = KeystoneV2User(tenantId=tenant_id) unified_user = self.service._unify_user(user) self.assertIsInstance(unified_user, identity.User) self.assertEqual(user.id, unified_user.id) self.assertEqual(user.name, unified_user.name) self.assertEqual("default", unified_user.domain_id) self.assertEqual(tenant_id, unified_user.project_id) @mock.patch("%s.UnifiedKeystoneV2Service._check_domain" % PATH) @mock.patch("%s.UnifiedKeystoneV2Service._unify_tenant" % PATH) def test_create_project( self, mock_unified_keystone_v2_service__unify_tenant, mock_unified_keystone_v2_service__check_domain): mock_unify_tenant = mock_unified_keystone_v2_service__unify_tenant mock_check_domain = mock_unified_keystone_v2_service__check_domain name = "name" self.assertEqual(mock_unify_tenant.return_value, self.service.create_project(name)) mock_check_domain.assert_called_once_with("Default") mock_unify_tenant.assert_called_once_with( self.service._impl.create_tenant.return_value) self.service._impl.create_tenant.assert_called_once_with(name) def test_update_project(self): tenant_id = "fake_id" name = "name" description = "descr" enabled = False self.service.update_project(project_id=tenant_id, name=name, description=description, enabled=enabled) self.service._impl.update_tenant.assert_called_once_with( tenant_id=tenant_id, name=name, description=description, enabled=enabled) def test_delete_project(self): tenant_id = "fake_id" self.service.delete_project(tenant_id) self.service._impl.delete_tenant.assert_called_once_with(tenant_id) @mock.patch("%s.UnifiedKeystoneV2Service._unify_tenant" % PATH) def test_get_project(self, mock_unified_keystone_v2_service__unify_tenant): mock_unify_tenant = mock_unified_keystone_v2_service__unify_tenant tenant_id = "id" self.assertEqual(mock_unify_tenant.return_value, self.service.get_project(tenant_id)) mock_unify_tenant.assert_called_once_with( self.service._impl.get_tenant.return_value) self.service._impl.get_tenant.assert_called_once_with(tenant_id) @mock.patch("%s.UnifiedKeystoneV2Service._unify_tenant" % PATH) def test_list_projects(self, mock_unified_keystone_v2_service__unify_tenant): mock_unify_tenant = mock_unified_keystone_v2_service__unify_tenant tenants = [mock.MagicMock()] self.service._impl.list_tenants.return_value = tenants self.assertEqual([mock_unify_tenant.return_value], self.service.list_projects()) mock_unify_tenant.assert_called_once_with(tenants[0]) @mock.patch("%s.UnifiedKeystoneV2Service._check_domain" % PATH) @mock.patch("%s.UnifiedKeystoneV2Service._unify_user" % PATH) def test_create_user(self, mock_unified_keystone_v2_service__unify_user, mock_unified_keystone_v2_service__check_domain): mock_check_domain = mock_unified_keystone_v2_service__check_domain mock_unify_user = mock_unified_keystone_v2_service__unify_user name = "name" password = "passwd" tenant_id = "project" self.assertEqual(mock_unify_user.return_value, self.service.create_user(name, password=password, project_id=tenant_id)) mock_check_domain.assert_called_once_with("Default") mock_unify_user.assert_called_once_with( self.service._impl.create_user.return_value) self.service._impl.create_user.assert_called_once_with( username=name, password=password, tenant_id=tenant_id, enabled=True) @mock.patch("%s.UnifiedKeystoneV2Service._check_domain" % PATH) @mock.patch("%s.UnifiedKeystoneV2Service._unify_user" % PATH) def test_create_users(self, mock_unified_keystone_v2_service__unify_user, mock_unified_keystone_v2_service__check_domain): mock_check_domain = mock_unified_keystone_v2_service__check_domain tenant_id = "project" n = 3 domain_name = "Default" self.service.create_users( tenant_id, number_of_users=3, user_create_args={"domain_name": domain_name}) mock_check_domain.assert_called_once_with(domain_name) self.service._impl.create_users.assert_called_once_with( tenant_id=tenant_id, number_of_users=n, user_create_args={"domain_name": domain_name}) @mock.patch("%s.UnifiedKeystoneV2Service._unify_user" % PATH) def test_list_users(self, mock_unified_keystone_v2_service__unify_user): mock_unify_user = mock_unified_keystone_v2_service__unify_user users = [mock.MagicMock()] self.service._impl.list_users.return_value = users self.assertEqual([mock_unify_user.return_value], self.service.list_users()) mock_unify_user.assert_called_once_with(users[0]) @ddt.data({"user_id": "id", "enabled": False, "name": "Fake", "email": "badboy@example.com", "password": "pass"}, {"user_id": "id", "enabled": None, "name": None, "email": None, "password": None}) @ddt.unpack def test_update_user(self, user_id, enabled, name, email, password): self.service.update_user(user_id, enabled=enabled, name=name, email=email, password=password) if password: self.service._impl.update_user_password.assert_called_once_with( user_id=user_id, password=password) args = {} if enabled is not None: args["enabled"] = enabled if name is not None: args["name"] = name if email is not None: args["email"] = email if args: self.service._impl.update_user.assert_called_once_with( user_id, **args) @mock.patch("%s.UnifiedKeystoneV2Service._unify_service" % PATH) def test_list_services(self, mock_unified_keystone_v2_service__unify_service): mock_unify_service = mock_unified_keystone_v2_service__unify_service services = [mock.MagicMock()] self.service._impl.list_services.return_value = services self.assertEqual([mock_unify_service.return_value], self.service.list_services()) mock_unify_service.assert_called_once_with(services[0]) @mock.patch("%s.UnifiedKeystoneV2Service._unify_role" % PATH) def test_create_role(self, mock_unified_keystone_v2_service__unify_role): mock_unify_role = mock_unified_keystone_v2_service__unify_role name = "some" self.assertEqual(mock_unify_role.return_value, self.service.create_role(name)) self.service._impl.create_role.assert_called_once_with(name) mock_unify_role.assert_called_once_with( self.service._impl.create_role.return_value) def test_add_role(self): role_id = "fake_id" user_id = "user_id" project_id = "user_id" self.service.add_role(role_id, user_id=user_id, project_id=project_id) self.service._impl.add_role.assert_called_once_with( user_id=user_id, role_id=role_id, tenant_id=project_id) def test_delete_role(self): role_id = "fake_id" self.service.delete_role(role_id) self.service._impl.delete_role.assert_called_once_with(role_id) def test_revoke_role(self): role_id = "fake_id" user_id = "user_id" project_id = "user_id" self.service.revoke_role(role_id, user_id=user_id, project_id=project_id) self.service._impl.revoke_role.assert_called_once_with( user_id=user_id, role_id=role_id, tenant_id=project_id) @mock.patch("%s.UnifiedKeystoneV2Service._unify_role" % PATH) def test_list_roles(self, mock_unified_keystone_v2_service__unify_role): mock_unify_role = mock_unified_keystone_v2_service__unify_role roles = [mock.MagicMock()] another_roles = [mock.MagicMock()] self.service._impl.list_roles.return_value = roles self.service._impl.list_roles_for_user.return_value = another_roles # case 1 self.assertEqual([mock_unify_role.return_value], self.service.list_roles()) self.service._impl.list_roles.assert_called_once_with() mock_unify_role.assert_called_once_with(roles[0]) self.assertFalse(self.service._impl.list_roles_for_user.called) self.service._impl.list_roles.reset_mock() mock_unify_role.reset_mock() # case 2 user = "user" project = "project" self.assertEqual([mock_unify_role.return_value], self.service.list_roles(user_id=user, project_id=project)) self.service._impl.list_roles_for_user.assert_called_once_with( user, tenant_id=project) self.assertFalse(self.service._impl.list_roles.called) mock_unify_role.assert_called_once_with(another_roles[0]) # case 3 self.assertRaises(NotImplementedError, self.service.list_roles, domain_name="some") def test_create_ec2credentials(self): user_id = "id" tenant_id = "tenant-id" self.assertEqual(self.service._impl.create_ec2credentials.return_value, self.service.create_ec2credentials( user_id=user_id, project_id=tenant_id)) self.service._impl.create_ec2credentials.assert_called_once_with( user_id=user_id, tenant_id=tenant_id) rally-0.9.1/tests/unit/plugins/openstack/services/identity/test_keystone_v3.py0000664000567000056710000005642313073417717031160 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import uuid import ddt import mock from rally import exceptions from rally.plugins.openstack.services.identity import identity from rally.plugins.openstack.services.identity import keystone_v3 from tests.unit import test PATH = "rally.plugins.openstack.services.identity.keystone_v3" @ddt.ddt class KeystoneV3ServiceTestCase(test.TestCase): def setUp(self): super(KeystoneV3ServiceTestCase, self).setUp() self.clients = mock.MagicMock() self.kc = self.clients.keystone.return_value self.name_generator = mock.MagicMock() self.service = keystone_v3.KeystoneV3Service( self.clients, name_generator=self.name_generator) def test__get_domain_id_not_found(self): from keystoneclient import exceptions as kc_exceptions self.kc.domains.get.side_effect = kc_exceptions.NotFound self.kc.domains.list.return_value = [] domain_name_or_id = "some" self.assertRaises(exceptions.GetResourceNotFound, self.service._get_domain_id, domain_name_or_id) self.kc.domains.get.assert_called_once_with(domain_name_or_id) self.kc.domains.list.assert_called_once_with(name=domain_name_or_id) def test__get_domain_id_find_by_name(self): from keystoneclient import exceptions as kc_exceptions self.kc.domains.get.side_effect = kc_exceptions.NotFound domain = mock.MagicMock() self.kc.domains.list.return_value = [domain] domain_name_or_id = "some" self.assertEqual(domain.id, self.service._get_domain_id(domain_name_or_id)) self.kc.domains.get.assert_called_once_with(domain_name_or_id) self.kc.domains.list.assert_called_once_with(name=domain_name_or_id) def test__get_domain_id_find_by_id(self): domain = mock.MagicMock() self.kc.domains.get.return_value = domain domain_name_or_id = "some" self.assertEqual(domain.id, self.service._get_domain_id(domain_name_or_id)) self.kc.domains.get.assert_called_once_with(domain_name_or_id) self.assertFalse(self.kc.domains.list.called) @mock.patch("%s.KeystoneV3Service._get_domain_id" % PATH) def test_create_project(self, mock__get_domain_id): name = "name" domain_name = "domain" domain_id = "id" mock__get_domain_id.return_value = domain_id project = self.service.create_project(name, domain_name=domain_name) mock__get_domain_id.assert_called_once_with(domain_name) self.assertEqual(project, self.kc.projects.create.return_value) self.kc.projects.create.assert_called_once_with(name=name, domain=domain_id) @ddt.data({"project_id": "fake_id", "name": True, "enabled": True, "description": True}, {"project_id": "fake_id", "name": "some", "enabled": False, "description": "descr"}) @ddt.unpack def test_update_project(self, project_id, name, enabled, description): self.service.update_project(project_id, name=name, description=description, enabled=enabled) if name is True: name = self.name_generator.return_value if description is True: description = self.name_generator.return_value self.kc.projects.update.assert_called_once_with( project_id, name=name, description=description, enabled=enabled) def test_delete_project(self): project_id = "fake_id" self.service.delete_project(project_id) self.kc.projects.delete.assert_called_once_with(project_id) def test_list_projects(self): self.assertEqual(self.kc.projects.list.return_value, self.service.list_projects()) self.kc.projects.list.assert_called_once_with() def test_get_project(self): project_id = "fake_id" self.service.get_project(project_id) self.kc.projects.get.assert_called_once_with(project_id) @mock.patch("%s.LOG" % PATH) @mock.patch("%s.KeystoneV3Service._get_domain_id" % PATH) def test_create_user(self, mock__get_domain_id, mock_log): name = "name" password = "passwd" project_id = "project" domain_name = "domain" self.service.list_roles = mock.MagicMock(return_value=[]) user = self.service.create_user(name, password=password, project_id=project_id, domain_name=domain_name) self.assertEqual(user, self.kc.users.create.return_value) self.kc.users.create.assert_called_once_with( name=name, password=password, default_project=project_id, domain=mock__get_domain_id.return_value, enabled=True) self.assertTrue(mock_log.warning.called) @mock.patch("%s.LOG" % PATH) @mock.patch("%s.KeystoneV3Service._get_domain_id" % PATH) def test_create_user_without_project_id(self, mock__get_domain_id, mock_log): name = "name" password = "passwd" domain_name = "domain" self.service.list_roles = mock.MagicMock(return_value=[]) user = self.service.create_user(name, password=password, domain_name=domain_name) self.assertEqual(user, self.kc.users.create.return_value) self.kc.users.create.assert_called_once_with( name=name, password=password, default_project=None, domain=mock__get_domain_id.return_value, enabled=True) self.assertFalse(self.service.list_roles.called) self.assertFalse(mock_log.warning.called) @mock.patch("%s.LOG" % PATH) @mock.patch("%s.KeystoneV3Service._get_domain_id" % PATH) def test_create_user_and_add_role( self, mock_keystone_v3_service__get_domain_id, mock_log): mock__get_domain_id = mock_keystone_v3_service__get_domain_id name = "name" password = "passwd" project_id = "project" domain_name = "domain" class Role(object): def __init__(self, name): self.name = name self.id = str(uuid.uuid4()) self.service.list_roles = mock.MagicMock( return_value=[Role("admin"), Role("member")]) self.service.add_role = mock.MagicMock() user = self.service.create_user(name, password=password, project_id=project_id, domain_name=domain_name) self.assertEqual(user, self.kc.users.create.return_value) self.kc.users.create.assert_called_once_with( name=name, password=password, default_project=project_id, domain=mock__get_domain_id.return_value, enabled=True) self.assertFalse(mock_log.warning.called) self.service.add_role.assert_called_once_with( role_id=self.service.list_roles.return_value[1].id, user_id=user.id, project_id=project_id) def test_create_users(self): self.service.create_user = mock.MagicMock() n = 2 project_id = "some" self.assertEqual([self.service.create_user.return_value] * n, self.service.create_users(number_of_users=n, project_id=project_id)) self.assertEqual([mock.call(project_id=project_id)] * n, self.service.create_user.call_args_list) @ddt.data(None, "some") def test_update_user(self, domain_name): user_id = "fake_id" name = "new name" project_id = "new project" password = "pass" email = "mail" description = "n/a" enabled = False default_project = "some" self.service._get_domain_id = mock.MagicMock() self.service.update_user(user_id, name=name, domain_name=domain_name, project_id=project_id, password=password, email=email, description=description, enabled=enabled, default_project=default_project) domain = None if domain_name: self.service._get_domain_id.assert_called_once_with(domain_name) domain = self.service._get_domain_id.return_value else: self.assertFalse(self.service._get_domain_id.called) self.kc.users.update.assert_called_once_with( user_id, name=name, domain=domain, project=project_id, password=password, email=email, description=description, enabled=enabled, default_project=default_project) @ddt.data({"name": None, "service_type": None, "description": None, "enabled": True}, {"name": "some", "service_type": "st", "description": "d", "enabled": False}) @ddt.unpack def test_create_service(self, name, service_type, description, enabled): self.assertEqual(self.kc.services.create.return_value, self.service.create_service(name=name, service_type=service_type, description=description, enabled=enabled)) name = name or self.name_generator.return_value service_type = service_type or "rally_test_type" description = description or self.name_generator.return_value self.kc.services.create.assert_called_once_with( name, type=service_type, description=description, enabled=enabled) @mock.patch("%s.KeystoneV3Service._get_domain_id" % PATH) def test_create_role(self, mock__get_domain_id): domain_name = "domain" name = "some" user = self.service.create_role(name, domain_name=domain_name) self.assertEqual(user, self.kc.roles.create.return_value) self.kc.roles.create.assert_called_once_with( name, domain=mock__get_domain_id.return_value) @ddt.data({"domain_name": "domain", "user_id": "user", "project_id": "pr"}, {"domain_name": None, "user_id": None, "project_id": None}) @ddt.unpack def test_list_roles(self, domain_name, user_id, project_id): self.service._get_domain_id = mock.MagicMock() self.assertEqual(self.kc.roles.list.return_value, self.service.list_roles(user_id=user_id, domain_name=domain_name, project_id=project_id)) domain = None if domain_name: self.service._get_domain_id.assert_called_once_with(domain_name) domain = self.service._get_domain_id.return_value else: self.assertFalse(self.service._get_domain_id.called) self.kc.roles.list.assert_called_once_with(user=user_id, domain=domain, project=project_id) def test_add_role(self): role_id = "fake_id" user_id = "user_id" project_id = "project_id" self.service.add_role(role_id, user_id=user_id, project_id=project_id) self.kc.roles.grant.assert_called_once_with( user=user_id, role=role_id, project=project_id) def test_revoke_role(self): role_id = "fake_id" user_id = "user_id" project_id = "tenant_id" self.service.revoke_role(role_id, user_id=user_id, project_id=project_id) self.kc.roles.revoke.assert_called_once_with( user=user_id, role=role_id, project=project_id) def test_get_role(self): role_id = "fake_id" self.service.get_role(role_id) self.kc.roles.get.assert_called_once_with(role_id) def test_create_domain(self): name = "some_domain" descr = "descr" enabled = False self.service.create_domain(name, description=descr, enabled=enabled) self.kc.domains.create.assert_called_once_with( name, description=descr, enabled=enabled) def test_create_ec2credentials(self): user_id = "fake_id" project_id = "fake_id" self.assertEqual(self.kc.ec2.create.return_value, self.service.create_ec2credentials( user_id, project_id=project_id)) self.kc.ec2.create.assert_called_once_with(user_id, project_id=project_id) @ddt.ddt class UnifiedKeystoneV3ServiceTestCase(test.TestCase): def setUp(self): super(UnifiedKeystoneV3ServiceTestCase, self).setUp() self.clients = mock.MagicMock() self.service = keystone_v3.UnifiedKeystoneV3Service(self.clients) self.service._impl = mock.MagicMock() def test_init_identity_service(self): self.clients.keystone.return_value.version = "v3" self.assertIsInstance(identity.Identity(self.clients)._impl, keystone_v3.UnifiedKeystoneV3Service) def test__unify_project(self): class KeystoneV3Project(object): def __init__(self): self.id = str(uuid.uuid4()) self.name = str(uuid.uuid4()) self.domain_id = str(uuid.uuid4()) project = KeystoneV3Project() unified_project = self.service._unify_project(project) self.assertIsInstance(unified_project, identity.Project) self.assertEqual(project.id, unified_project.id) self.assertEqual(project.name, unified_project.name) self.assertEqual(project.domain_id, unified_project.domain_id) self.assertEqual(project.domain_id, unified_project.domain_id) def test__unify_user(self): class KeystoneV3User(object): def __init__(self, project_id=None): self.id = str(uuid.uuid4()) self.name = str(uuid.uuid4()) self.domain_id = str(uuid.uuid4()) if project_id is not None: self.default_project_id = project_id user = KeystoneV3User() unified_user = self.service._unify_user(user) self.assertIsInstance(unified_user, identity.User) self.assertEqual(user.id, unified_user.id) self.assertEqual(user.name, unified_user.name) self.assertEqual(user.domain_id, unified_user.domain_id) self.assertIsNone(unified_user.project_id) project_id = "tenant_id" user = KeystoneV3User(project_id=project_id) unified_user = self.service._unify_user(user) self.assertIsInstance(unified_user, identity.User) self.assertEqual(user.id, unified_user.id) self.assertEqual(user.name, unified_user.name) self.assertEqual(user.domain_id, unified_user.domain_id) self.assertEqual(project_id, unified_user.project_id) @mock.patch("%s.UnifiedKeystoneV3Service._unify_project" % PATH) def test_create_project(self, mock_unified_keystone_v3_service__unify_project): mock_unify_project = mock_unified_keystone_v3_service__unify_project name = "name" domain = "domain" self.assertEqual(mock_unify_project.return_value, self.service.create_project(name, domain_name=domain)) mock_unify_project.assert_called_once_with( self.service._impl.create_project.return_value) self.service._impl.create_project.assert_called_once_with( name, domain_name=domain) def test_update_project(self): project_id = "fake_id" name = "name" description = "descr" enabled = False self.service.update_project(project_id=project_id, name=name, description=description, enabled=enabled) self.service._impl.update_project.assert_called_once_with( project_id=project_id, name=name, description=description, enabled=enabled) def test_delete_project(self): project_id = "fake_id" self.service.delete_project(project_id) self.service._impl.delete_project.assert_called_once_with(project_id) @mock.patch("%s.UnifiedKeystoneV3Service._unify_project" % PATH) def test_get_project(self, mock_unified_keystone_v3_service__unify_project): mock_unify_project = mock_unified_keystone_v3_service__unify_project project_id = "id" self.assertEqual(mock_unify_project.return_value, self.service.get_project(project_id)) mock_unify_project.assert_called_once_with( self.service._impl.get_project.return_value) self.service._impl.get_project.assert_called_once_with(project_id) @mock.patch("%s.UnifiedKeystoneV3Service._unify_project" % PATH) def test_list_projects(self, mock_unified_keystone_v3_service__unify_project): mock_unify_project = mock_unified_keystone_v3_service__unify_project projects = [mock.MagicMock()] self.service._impl.list_projects.return_value = projects self.assertEqual([mock_unify_project.return_value], self.service.list_projects()) mock_unify_project.assert_called_once_with(projects[0]) @mock.patch("%s.UnifiedKeystoneV3Service._unify_user" % PATH) def test_create_user(self, mock_unified_keystone_v3_service__unify_user): mock_unify_user = mock_unified_keystone_v3_service__unify_user name = "name" password = "passwd" project_id = "project" domain_name = "domain" default_role = "role" self.assertEqual(mock_unify_user.return_value, self.service.create_user(name, password=password, project_id=project_id, domain_name=domain_name, default_role=default_role)) mock_unify_user.assert_called_once_with( self.service._impl.create_user.return_value) self.service._impl.create_user.assert_called_once_with( username=name, password=password, project_id=project_id, domain_name=domain_name, default_role=default_role, enabled=True) @mock.patch("%s.UnifiedKeystoneV3Service._unify_user" % PATH) def test_create_users(self, mock_unified_keystone_v3_service__unify_user): project_id = "project" n = 3 domain_name = "Default" self.service.create_users( project_id, number_of_users=3, user_create_args={"domain_name": domain_name}) self.service._impl.create_users.assert_called_once_with( project_id=project_id, number_of_users=n, user_create_args={"domain_name": domain_name}) @mock.patch("%s.UnifiedKeystoneV3Service._unify_user" % PATH) def test_list_users(self, mock_unified_keystone_v3_service__unify_user): mock_unify_user = mock_unified_keystone_v3_service__unify_user users = [mock.MagicMock()] self.service._impl.list_users.return_value = users self.assertEqual([mock_unify_user.return_value], self.service.list_users()) mock_unify_user.assert_called_once_with(users[0]) @ddt.data({"user_id": "id", "enabled": False, "name": "Fake", "email": "badboy@example.com", "password": "pass"}, {"user_id": "id", "enabled": None, "name": None, "email": None, "password": None}) @ddt.unpack def test_update_user(self, user_id, enabled, name, email, password): self.service.update_user(user_id, enabled=enabled, name=name, email=email, password=password) self.service._impl.update_user.assert_called_once_with( user_id, enabled=enabled, name=name, email=email, password=password) @mock.patch("%s.UnifiedKeystoneV3Service._unify_service" % PATH) def test_list_services(self, mock_unified_keystone_v3_service__unify_service): mock_unify_service = mock_unified_keystone_v3_service__unify_service services = [mock.MagicMock()] self.service._impl.list_services.return_value = services self.assertEqual([mock_unify_service.return_value], self.service.list_services()) mock_unify_service.assert_called_once_with(services[0]) @mock.patch("%s.UnifiedKeystoneV3Service._unify_role" % PATH) def test_create_role(self, mock_unified_keystone_v3_service__unify_role): mock_unify_role = mock_unified_keystone_v3_service__unify_role name = "some" domain = "some" self.assertEqual(mock_unify_role.return_value, self.service.create_role(name, domain_name=domain)) self.service._impl.create_role.assert_called_once_with( name, domain_name=domain) mock_unify_role.assert_called_once_with( self.service._impl.create_role.return_value) def test_add_role(self): role_id = "fake_id" user_id = "user_id" project_id = "user_id" self.service.add_role(role_id, user_id=user_id, project_id=project_id) self.service._impl.add_role.assert_called_once_with( user_id=user_id, role_id=role_id, project_id=project_id) def test_revoke_role(self): role_id = "fake_id" user_id = "user_id" project_id = "user_id" self.service.revoke_role(role_id, user_id=user_id, project_id=project_id) self.service._impl.revoke_role.assert_called_once_with( user_id=user_id, role_id=role_id, project_id=project_id) @mock.patch("%s.UnifiedKeystoneV3Service._unify_role" % PATH) def test_list_roles(self, mock_unified_keystone_v3_service__unify_role): mock_unify_role = mock_unified_keystone_v3_service__unify_role roles = [mock.MagicMock()] self.service._impl.list_roles.return_value = roles self.assertEqual([mock_unify_role.return_value], self.service.list_roles()) mock_unify_role.assert_called_once_with(roles[0]) def test_create_ec2credentials(self): user_id = "id" project_id = "project-id" self.assertEqual(self.service._impl.create_ec2credentials.return_value, self.service.create_ec2credentials( user_id=user_id, project_id=project_id)) self.service._impl.create_ec2credentials.assert_called_once_with( user_id=user_id, project_id=project_id) rally-0.9.1/tests/unit/plugins/openstack/services/identity/test_identity.py0000664000567000056710000002226613073417717030536 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally.plugins.openstack.services.identity import identity from tests.unit import test @ddt.ddt class IdentityTestCase(test.TestCase): def setUp(self): super(IdentityTestCase, self).setUp() self.clients = mock.MagicMock() def get_service_with_fake_impl(self): path = "rally.plugins.openstack.services.identity.identity" with mock.patch("%s.Identity.discover_impl" % path) as mock_discover: mock_discover.return_value = mock.MagicMock(), None service = identity.Identity(self.clients) return service def test_create_project(self): service = self.get_service_with_fake_impl() project_name = "name" domain_name = "domain" service.create_project(project_name, domain_name=domain_name) service._impl.create_project.assert_called_once_with( project_name, domain_name=domain_name) def test_update_project(self): service = self.get_service_with_fake_impl() project_id = "id" project_name = "name" description = "descr" enabled = False service.update_project(project_id=project_id, name=project_name, description=description, enabled=enabled) service._impl.update_project.assert_called_once_with( project_id, name=project_name, description=description, enabled=enabled) def test_delete_project(self): service = self.get_service_with_fake_impl() project = "id" service.delete_project(project) service._impl.delete_project.assert_called_once_with(project) def test_list_projects(self): service = self.get_service_with_fake_impl() service.list_projects() service._impl.list_projects.assert_called_once_with() def test_get_project(self): service = self.get_service_with_fake_impl() project = "id" service.get_project(project) service._impl.get_project.assert_called_once_with(project) def test_create_user(self): service = self.get_service_with_fake_impl() username = "username" password = "password" project_id = "project_id" domain_name = "domain_name" service.create_user(username=username, password=password, project_id=project_id, domain_name=domain_name) service._impl.create_user.assert_called_once_with( username=username, password=password, project_id=project_id, domain_name=domain_name, default_role="member") def test_create_users(self): service = self.get_service_with_fake_impl() project_id = "project_id" n = 3 user_create_args = {} service.create_users(project_id, number_of_users=n, user_create_args=user_create_args) service._impl.create_users.assert_called_once_with( project_id, number_of_users=n, user_create_args=user_create_args) def test_delete_user(self): service = self.get_service_with_fake_impl() user_id = "fake_id" service.delete_user(user_id) service._impl.delete_user.assert_called_once_with(user_id) def test_list_users(self): service = self.get_service_with_fake_impl() service.list_users() service._impl.list_users.assert_called_once_with() def test_update_user(self): service = self.get_service_with_fake_impl() user_id = "id" user_name = "name" email = "mail" password = "pass" enabled = False service.update_user(user_id, name=user_name, password=password, email=email, enabled=enabled) service._impl.update_user.assert_called_once_with( user_id, name=user_name, password=password, email=email, enabled=enabled) def test_get_user(self): service = self.get_service_with_fake_impl() user = "id" service.get_user(user) service._impl.get_user.assert_called_once_with(user) def test_create_service(self): service = self.get_service_with_fake_impl() service_name = "name" service_type = "service_type" description = "descr" service.create_service(service_name, service_type=service_type, description=description) service._impl.create_service.assert_called_once_with( name=service_name, service_type=service_type, description=description) def test_delete_service(self): service = self.get_service_with_fake_impl() service_id = "id" service.delete_service(service_id) service._impl.delete_service.assert_called_once_with(service_id) def test_list_services(self): service = self.get_service_with_fake_impl() service.list_services() service._impl.list_services.assert_called_once_with() def test_get_service(self): service = self.get_service_with_fake_impl() service_id = "id" service.get_service(service_id) service._impl.get_service.assert_called_once_with(service_id) def test_get_service_by_name(self): service = self.get_service_with_fake_impl() service_name = "name" service.get_service_by_name(service_name) service._impl.get_service_by_name.assert_called_once_with(service_name) def test_create_role(self): service = self.get_service_with_fake_impl() name = "name" service.create_role(name) service._impl.create_role.assert_called_once_with( name=name, domain_name=None) def test_add_role(self): service = self.get_service_with_fake_impl() role_id = "id" user_id = "user_id" project_id = "project_id" service.add_role(role_id, user_id=user_id, project_id=project_id) service._impl.add_role.assert_called_once_with(role_id=role_id, user_id=user_id, project_id=project_id) def test_delete_role(self): service = self.get_service_with_fake_impl() role = "id" service.delete_role(role) service._impl.delete_role.assert_called_once_with(role) def test_revoke_role(self): service = self.get_service_with_fake_impl() role_id = "id" user_id = "user_id" project_id = "project_id" service.revoke_role(role_id, user_id=user_id, project_id=project_id) service._impl.revoke_role.assert_called_once_with( role_id=role_id, user_id=user_id, project_id=project_id) @ddt.data((None, None, None), ("user_id", "project_id", "domain")) def test_list_roles(self, params): user, project, domain = params service = self.get_service_with_fake_impl() service.list_roles(user_id=user, project_id=project, domain_name=domain) service._impl.list_roles.assert_called_once_with(user_id=user, project_id=project, domain_name=domain) def test_get_role(self): service = self.get_service_with_fake_impl() role = "id" service.get_role(role) service._impl.get_role.assert_called_once_with(role) def test_create_ec2credentials(self): service = self.get_service_with_fake_impl() user_id = "id" project_id = "project-id" service.create_ec2credentials(user_id=user_id, project_id=project_id) service._impl.create_ec2credentials.assert_called_once_with( user_id=user_id, project_id=project_id) def test_list_ec2credentials(self): service = self.get_service_with_fake_impl() user_id = "id" service.list_ec2credentials(user_id=user_id) service._impl.list_ec2credentials.assert_called_once_with(user_id) def test_delete_ec2credential(self): service = self.get_service_with_fake_impl() user_id = "id" access = "access" service.delete_ec2credential(user_id=user_id, access=access) service._impl.delete_ec2credential.assert_called_once_with( user_id=user_id, access=access) def test_fetch_token(self): service = self.get_service_with_fake_impl() service.fetch_token() service._impl.fetch_token.assert_called_once_with() def test_validate_token(self): service = self.get_service_with_fake_impl() token = "id" service.validate_token(token) service._impl.validate_token.assert_called_once_with(token) rally-0.9.1/tests/unit/plugins/openstack/services/heat/0000775000567000056710000000000013073420067024344 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/services/heat/__init__.py0000664000567000056710000000000013073417717026453 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/services/heat/test_main.py0000664000567000056710000001047113073417720026706 0ustar jenkinsjenkins00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.services.heat import main from tests.unit import test class Stack(main.Stack): def __init__(self): self.scenario = mock.Mock() class StackTestCase(test.ScenarioTestCase): @mock.patch("rally.plugins.openstack.services.heat.main.open", create=True) def test___init__(self, mock_open): reads = [mock.Mock(), mock.Mock()] reads[0].read.return_value = "template_contents" reads[1].read.return_value = "file1_contents" mock_open.side_effect = reads stack = main.Stack("scenario", "task", "template", parameters="parameters", files={"f1_name": "f1_path"}) self.assertEqual("template_contents", stack.template) self.assertEqual({"f1_name": "file1_contents"}, stack.files) self.assertEqual([mock.call("template"), mock.call("f1_path")], mock_open.mock_calls) reads[0].read.assert_called_once_with() reads[1].read.assert_called_once_with() @mock.patch("rally.plugins.openstack.services.heat.main.utils") def test__wait(self, mock_utils): fake_stack = mock.Mock() stack = Stack() stack.stack = fake_stack = mock.Mock() stack._wait(["ready_statuses"], ["failure_statuses"]) mock_utils.wait_for_status.assert_called_once_with( fake_stack, check_interval=1.0, ready_statuses=["ready_statuses"], failure_statuses=["failure_statuses"], timeout=3600.0, update_resource=mock_utils.get_from_manager()) @mock.patch("rally.task.atomic") @mock.patch("rally.plugins.openstack.services.heat.main.open") @mock.patch("rally.plugins.openstack.services.heat.main.Stack._wait") def test_create(self, mock_stack__wait, mock_open, mock_task_atomic): mock_scenario = mock.MagicMock() mock_scenario.generate_random_name.return_value = "fake_name" mock_open().read.return_value = "fake_content" mock_new_stack = { "stack": { "id": "fake_id" } } mock_scenario.clients("heat").stacks.create.return_value = ( mock_new_stack) stack = main.Stack( scenario=mock_scenario, task=mock.Mock(), template=mock.Mock(), files={} ) stack.create() mock_scenario.clients("heat").stacks.create.assert_called_once_with( files={}, parameters=None, stack_name="fake_name", template="fake_content" ) mock_scenario.clients("heat").stacks.get.assert_called_once_with( "fake_id") mock_stack__wait.assert_called_once_with(["CREATE_COMPLETE"], ["CREATE_FAILED"]) @mock.patch("rally.task.atomic") @mock.patch("rally.plugins.openstack.services.heat.main.open") @mock.patch("rally.plugins.openstack.services.heat.main.Stack._wait") def test_update(self, mock_stack__wait, mock_open, mock_task_atomic): mock_scenario = mock.MagicMock(stack_id="fake_id") mock_parameters = mock.Mock() mock_open().read.return_value = "fake_content" stack = main.Stack( scenario=mock_scenario, task=mock.Mock(), template=None, files={}, parameters=mock_parameters ) stack.stack_id = "fake_id" stack.parameters = mock_parameters stack.update({"foo": "bar"}) mock_scenario.clients("heat").stacks.update.assert_called_once_with( "fake_id", files={}, template="fake_content", parameters=mock_parameters ) mock_stack__wait.assert_called_once_with(["UPDATE_COMPLETE"], ["UPDATE_FAILED"]) rally-0.9.1/tests/unit/plugins/openstack/test_types.py0000664000567000056710000004046313073417717024374 0ustar jenkinsjenkins00000000000000# Copyright (C) 2014 Yahoo! Inc. All Rights Reserved. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally import exceptions from rally.plugins.openstack import types from tests.unit import fakes from tests.unit import test class FlavorTestCase(test.TestCase): def setUp(self): super(FlavorTestCase, self).setUp() self.clients = fakes.FakeClients() self.clients.nova().flavors._cache(fakes.FakeResource(name="m1.tiny", id="1")) self.clients.nova().flavors._cache(fakes.FakeResource(name="m1.nano", id="42")) self.clients.nova().flavors._cache(fakes.FakeResource(name="m1.large", id="44")) self.clients.nova().flavors._cache(fakes.FakeResource(name="m1.large", id="45")) def test_transform_by_id(self): resource_config = {"id": "42"} flavor_id = types.Flavor.transform( clients=self.clients, resource_config=resource_config) self.assertEqual(flavor_id, "42") def test_transform_by_name(self): resource_config = {"name": "m1.nano"} flavor_id = types.Flavor.transform( clients=self.clients, resource_config=resource_config) self.assertEqual(flavor_id, "42") def test_transform_by_name_no_match(self): resource_config = {"name": "m1.medium"} self.assertRaises(exceptions.InvalidScenarioArgument, types.Flavor.transform, self.clients, resource_config) def test_transform_by_name_multiple_match(self): resource_config = {"name": "m1.large"} self.assertRaises(exceptions.InvalidScenarioArgument, types.Flavor.transform, self.clients, resource_config) def test_transform_by_regex(self): resource_config = {"regex": "m(1|2)\.nano"} flavor_id = types.Flavor.transform( clients=self.clients, resource_config=resource_config) self.assertEqual(flavor_id, "42") def test_transform_by_regex_multiple_match(self): resource_config = {"regex": "^m1"} self.assertRaises(exceptions.InvalidScenarioArgument, types.Flavor.transform, self.clients, resource_config) def test_transform_by_regex_no_match(self): resource_config = {} self.assertRaises(exceptions.InvalidScenarioArgument, types.Flavor.transform, self.clients, resource_config) class EC2FlavorTestCase(test.TestCase): def setUp(self): super(EC2FlavorTestCase, self).setUp() self.clients = fakes.FakeClients() self.clients.nova().flavors._cache(fakes.FakeResource(name="m1.tiny", id="1")) self.clients.nova().flavors._cache(fakes.FakeResource(name="m1.nano", id="2")) self.clients.nova().flavors._cache(fakes.FakeResource(name="m1.large", id="3")) self.clients.nova().flavors._cache(fakes.FakeResource(name="m1.xlarge", id="3")) def test_transform_by_name(self): resource_config = {"name": "m1.nano"} flavor_name = types.EC2Flavor.transform( clients=self.clients, resource_config=resource_config) self.assertEqual(flavor_name, "m1.nano") def test_transform_by_id(self): resource_config = {"id": "2"} flavor_name = types.EC2Flavor.transform( clients=self.clients, resource_config=resource_config) self.assertEqual(flavor_name, "m1.nano") def test_transform_by_id_no_match(self): resource_config = {"id": "4"} self.assertRaises(exceptions.InvalidScenarioArgument, types.EC2Flavor.transform, self.clients, resource_config) def test_transform_by_id_multiple_match(self): resource_config = {"id": "3"} self.assertRaises(exceptions.MultipleMatchesFound, types.EC2Flavor.transform, self.clients, resource_config) class GlanceImageTestCase(test.TestCase): def setUp(self): super(GlanceImageTestCase, self).setUp() self.clients = fakes.FakeClients() image1 = fakes.FakeResource(name="cirros-0.3.4-uec", id="100") self.clients.glance().images._cache(image1) image2 = fakes.FakeResource(name="cirros-0.3.4-uec-ramdisk", id="101") self.clients.glance().images._cache(image2) image3 = fakes.FakeResource(name="cirros-0.3.4-uec-ramdisk-copy", id="102") self.clients.glance().images._cache(image3) image4 = fakes.FakeResource(name="cirros-0.3.4-uec-ramdisk-copy", id="103") self.clients.glance().images._cache(image4) def test_transform_by_id(self): resource_config = {"id": "100"} image_id = types.GlanceImage.transform( clients=self.clients, resource_config=resource_config) self.assertEqual(image_id, "100") def test_transform_by_name(self): resource_config = {"name": "^cirros-0.3.4-uec$"} image_id = types.GlanceImage.transform( clients=self.clients, resource_config=resource_config) self.assertEqual(image_id, "100") def test_transform_by_name_no_match(self): resource_config = {"name": "cirros-0.3.4-uec-boot"} self.assertRaises(exceptions.InvalidScenarioArgument, types.GlanceImage.transform, self.clients, resource_config) def test_transform_by_name_match_multiple(self): resource_config = {"name": "cirros-0.3.4-uec-ramdisk-copy"} self.assertRaises(exceptions.InvalidScenarioArgument, types.GlanceImage.transform, self.clients, resource_config) def test_transform_by_regex(self): resource_config = {"regex": "-uec$"} image_id = types.GlanceImage.transform( clients=self.clients, resource_config=resource_config) self.assertEqual(image_id, "100") def test_transform_by_regex_match_multiple(self): resource_config = {"regex": "^cirros"} self.assertRaises(exceptions.InvalidScenarioArgument, types.GlanceImage.transform, self.clients, resource_config) def test_transform_by_regex_no_match(self): resource_config = {"regex": "-boot$"} self.assertRaises(exceptions.InvalidScenarioArgument, types.GlanceImage.transform, self.clients, resource_config) class GlanceImageArgsTestCase(test.TestCase): def test_transform(self): self.assertEqual({}, types.GlanceImageArguments.transform( clients=None, resource_config={})) self.assertEqual( {"visibility": "public"}, types.GlanceImageArguments.transform( clients=None, resource_config={"visibility": "public"})) self.assertEqual( {"visibility": "public"}, types.GlanceImageArguments.transform( clients=None, resource_config={"visibility": "public", "is_public": False})) self.assertEqual( {"visibility": "private"}, types.GlanceImageArguments.transform( clients=None, resource_config={"is_public": False})) class EC2ImageTestCase(test.TestCase): def setUp(self): super(EC2ImageTestCase, self).setUp() self.clients = fakes.FakeClients() image1 = fakes.FakeResource(name="cirros-0.3.4-uec", id="100") self.clients.glance().images._cache(image1) image2 = fakes.FakeResource(name="cirros-0.3.4-uec-ramdisk", id="102") self.clients.glance().images._cache(image2) image3 = fakes.FakeResource(name="cirros-0.3.4-uec-ramdisk-copy", id="102") self.clients.glance().images._cache(image3) image4 = fakes.FakeResource(name="cirros-0.3.4-uec-ramdisk-copy", id="103") self.clients.glance().images._cache(image4) ec2_image1 = fakes.FakeResource(name="cirros-0.3.4-uec", id="200") ec2_image2 = fakes.FakeResource(name="cirros-0.3.4-uec-ramdisk", id="201") ec2_image3 = fakes.FakeResource(name="cirros-0.3.4-uec-ramdisk-copy", id="202") ec2_image4 = fakes.FakeResource(name="cirros-0.3.4-uec-ramdisk-copy", id="203") self.clients.ec2().get_all_images = mock.Mock( return_value=[ec2_image1, ec2_image2, ec2_image3, ec2_image4]) def test_transform_by_name(self): resource_config = {"name": "^cirros-0.3.4-uec$"} ec2_image_id = types.EC2Image.transform( clients=self.clients, resource_config=resource_config) self.assertEqual(ec2_image_id, "200") def test_transform_by_id(self): resource_config = {"id": "100"} ec2_image_id = types.EC2Image.transform( clients=self.clients, resource_config=resource_config) self.assertEqual(ec2_image_id, "200") def test_transform_by_id_no_match(self): resource_config = {"id": "101"} self.assertRaises(exceptions.InvalidScenarioArgument, types.EC2Image.transform, self.clients, resource_config) def test_transform_by_id_match_multiple(self): resource_config = {"id": "102"} self.assertRaises(exceptions.MultipleMatchesFound, types.EC2Image.transform, self.clients, resource_config) def test_transform_by_name_no_match(self): resource_config = {"name": "cirros-0.3.4-uec-boot"} self.assertRaises(exceptions.InvalidScenarioArgument, types.EC2Image.transform, self.clients, resource_config) def test_transform_by_name_match_multiple(self): resource_config = {"name": "cirros-0.3.4-uec-ramdisk-copy"} self.assertRaises(exceptions.InvalidScenarioArgument, types.EC2Image.transform, self.clients, resource_config) def test_transform_by_regex(self): resource_config = {"regex": "-uec$"} ec2_image_id = types.EC2Image.transform( clients=self.clients, resource_config=resource_config) self.assertEqual(ec2_image_id, "200") def test_transform_by_regex_match_multiple(self): resource_config = {"regex": "^cirros"} self.assertRaises(exceptions.InvalidScenarioArgument, types.EC2Image.transform, self.clients, resource_config) def test_transform_by_regex_no_match(self): resource_config = {"regex": "-boot$"} self.assertRaises(exceptions.InvalidScenarioArgument, types.EC2Image.transform, self.clients, resource_config) class VolumeTypeTestCase(test.TestCase): def setUp(self): super(VolumeTypeTestCase, self).setUp() self.clients = fakes.FakeClients() volume_type1 = fakes.FakeResource(name="lvmdriver-1", id=100) self.clients.cinder().volume_types._cache(volume_type1) def test_transform_by_id(self): resource_config = {"id": 100} volumetype_id = types.VolumeType.transform( clients=self.clients, resource_config=resource_config) self.assertEqual(volumetype_id, 100) def test_transform_by_name(self): resource_config = {"name": "lvmdriver-1"} volumetype_id = types.VolumeType.transform( clients=self.clients, resource_config=resource_config) self.assertEqual(volumetype_id, 100) def test_transform_by_name_no_match(self): resource_config = {"name": "nomatch-1"} self.assertRaises(exceptions.InvalidScenarioArgument, types.VolumeType.transform, self.clients, resource_config) def test_transform_by_regex(self): resource_config = {"regex": "^lvm.*-1"} volumetype_id = types.VolumeType.transform( clients=self.clients, resource_config=resource_config) self.assertEqual(volumetype_id, 100) def test_transform_by_regex_no_match(self): resource_config = {"regex": "dd"} self.assertRaises(exceptions.InvalidScenarioArgument, types.VolumeType.transform, self.clients, resource_config) class NeutronNetworkTestCase(test.TestCase): def setUp(self): super(NeutronNetworkTestCase, self).setUp() self.clients = fakes.FakeClients() net1_data = {"network": { "name": "net1" }} network1 = self.clients.neutron().create_network(net1_data) self.net1_id = network1["network"]["id"] def test_transform_by_id(self): resource_config = {"id": self.net1_id} network_id = types.NeutronNetwork.transform( clients=self.clients, resource_config=resource_config) self.assertEqual(network_id, self.net1_id) def test_transform_by_name(self): resource_config = {"name": "net1"} network_id = types.NeutronNetwork.transform( clients=self.clients, resource_config=resource_config) self.assertEqual(network_id, self.net1_id) def test_transform_by_name_no_match(self): resource_config = {"name": "nomatch-1"} self.assertRaises(exceptions.InvalidScenarioArgument, types.NeutronNetwork.transform, self.clients, resource_config) @ddt.ddt class WatcherStrategyTestCase(test.TestCase): def setUp(self): super(WatcherStrategyTestCase, self).setUp() self.clients = fakes.FakeClients() self.strategy = self.clients.watcher().strategy._cache( fakes.FakeResource(name="dummy", id="1")) @ddt.data({"resource_config": {"name": "dummy"}}) @ddt.unpack def test_transform_by_name(self, resource_config=None): strategy_id = types.WatcherStrategy.transform(self.clients, resource_config) self.assertEqual(self.strategy.uuid, strategy_id) @ddt.data({"resource_config": {"name": "dummy-1"}}) @ddt.unpack def test_transform_by_name_no_match(self, resource_config=None): self.assertRaises(exceptions.RallyException, types.WatcherStrategy.transform, self.clients, resource_config) @ddt.ddt class WatcherGoalTestCase(test.TestCase): def setUp(self): super(WatcherGoalTestCase, self).setUp() self.clients = fakes.FakeClients() self.goal = self.clients.watcher().goal._cache( fakes.FakeResource(name="dummy", id="1")) @ddt.data({"resource_config": {"name": "dummy"}}) @ddt.unpack def test_transform_by_name(self, resource_config=None): goal_id = types.WatcherGoal.transform(self.clients, resource_config) self.assertEqual(self.goal.uuid, goal_id) @ddt.data({"resource_config": {"name": "dummy-1"}}) @ddt.unpack def test_transform_by_name_no_match(self, resource_config=None): self.assertRaises(exceptions.RallyException, types.WatcherGoal.transform, self.clients, resource_config) rally-0.9.1/tests/unit/plugins/openstack/__init__.py0000664000567000056710000000000013073417717023707 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/wrappers/0000775000567000056710000000000013073420067023443 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/wrappers/__init__.py0000664000567000056710000000000013073417717025552 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/wrappers/test_cinder.py0000664000567000056710000001015413073417717026331 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally import exceptions from rally.plugins.openstack.wrappers import cinder as cinder_wrapper from tests.unit import test @ddt.ddt class CinderWrapperTestCase(test.ScenarioTestCase): @ddt.data( {"version": "1", "expected_class": cinder_wrapper.CinderV1Wrapper}, {"version": "2", "expected_class": cinder_wrapper.CinderV2Wrapper} ) @ddt.unpack def test_wrap(self, version, expected_class): client = mock.MagicMock() client.choose_version.return_value = version self.assertIsInstance(cinder_wrapper.wrap(client, mock.Mock()), expected_class) @mock.patch("rally.plugins.openstack.wrappers.cinder.LOG") def test_wrap_wrong_version(self, mock_log): client = mock.MagicMock() client.choose_version.return_value = "dummy" self.assertRaises(exceptions.InvalidArgumentsException, cinder_wrapper.wrap, client, mock.Mock()) self.assertTrue(mock_log.warning.mock_called) class CinderV1WrapperTestCase(test.TestCase): def setUp(self): super(CinderV1WrapperTestCase, self).setUp() self.client = mock.MagicMock() self.client.choose_version.return_value = "1" self.owner = mock.Mock() self.wrapped_client = cinder_wrapper.wrap(self.client, self.owner) def test_create_volume(self): self.wrapped_client.create_volume(1, display_name="fake_vol") self.client.return_value.volumes.create.assert_called_once_with( 1, display_name=self.owner.generate_random_name.return_value) def test_update_volume(self): self.wrapped_client.update_volume("fake_id", display_name="fake_vol", display_description="_updated") self.client.return_value.volumes.update.assert_called_once_with( "fake_id", display_name=self.owner.generate_random_name.return_value, display_description="_updated") def test_create_snapshot(self): self.wrapped_client.create_snapshot("fake_id", display_name="fake_snap") (self.client.return_value.volume_snapshots.create. assert_called_once_with( "fake_id", display_name=self.owner.generate_random_name.return_value)) class CinderV2WrapperTestCase(test.TestCase): def setUp(self): super(CinderV2WrapperTestCase, self).setUp() self.client = mock.MagicMock() self.client.choose_version.return_value = "2" self.owner = mock.Mock() self.wrapped_client = cinder_wrapper.wrap(self.client, self.owner) def test_create_volume(self): self.wrapped_client.create_volume(1, name="fake_vol") self.client.return_value.volumes.create.assert_called_once_with( 1, name=self.owner.generate_random_name.return_value) def test_create_snapshot(self): self.wrapped_client.create_snapshot("fake_id", name="fake_snap") (self.client.return_value.volume_snapshots.create. assert_called_once_with( "fake_id", name=self.owner.generate_random_name.return_value)) def test_update_volume(self): self.wrapped_client.update_volume("fake_id", name="fake_vol", description="_updated") self.client.return_value.volumes.update.assert_called_once_with( "fake_id", name=self.owner.generate_random_name.return_value, description="_updated") rally-0.9.1/tests/unit/plugins/openstack/wrappers/test_keystone.py0000664000567000056710000002406213073417717026731 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from keystoneclient import exceptions import mock from rally.plugins.openstack.wrappers import keystone from tests.unit import test class KeystoneWrapperTestBase(object): def test_list_services(self): service = mock.MagicMock() service.id = "fake_id" service.name = "Foobar" service.extra_field = "extra_field" self.client.services.list.return_value = [service] result = list(self.wrapped_client.list_services()) self.assertEqual([("fake_id", "Foobar")], result) self.assertEqual("fake_id", result[0].id) self.assertEqual("Foobar", result[0].name) self.assertFalse(hasattr(result[0], "extra_field")) def test_wrap(self): client = mock.MagicMock() client.version = "dummy" self.assertRaises(NotImplementedError, keystone.wrap, client) def test_delete_service(self): self.wrapped_client.delete_service("fake_id") self.client.services.delete.assert_called_once_with("fake_id") def test_list_roles(self): role = mock.MagicMock() role.id = "fake_id" role.name = "Foobar" role.extra_field = "extra_field" self.client.roles.list.return_value = [role] result = list(self.wrapped_client.list_roles()) self.assertEqual([("fake_id", "Foobar")], result) self.assertEqual("fake_id", result[0].id) self.assertEqual("Foobar", result[0].name) self.assertFalse(hasattr(result[0], "extra_field")) def test_delete_role(self): self.wrapped_client.delete_role("fake_id") self.client.roles.delete.assert_called_once_with("fake_id") class KeystoneV2WrapperTestCase(test.TestCase, KeystoneWrapperTestBase): def setUp(self): super(KeystoneV2WrapperTestCase, self).setUp() self.client = mock.MagicMock() self.client.version = "v2.0" self.wrapped_client = keystone.wrap(self.client) def test_create_project(self): self.wrapped_client.create_project("Foobar") self.client.tenants.create.assert_called_once_with("Foobar") def test_create_project_in_non_default_domain_fail(self): self.assertRaises( NotImplementedError, self.wrapped_client.create_project, "Foobar", "non-default-domain") def test_delete_project(self): self.wrapped_client.delete_project("fake_id") self.client.tenants.delete.assert_called_once_with("fake_id") def test_list_projects(self): tenant = mock.MagicMock() tenant.id = "fake_id" tenant.name = "Foobar" tenant.extra_field = "extra_field" self.client.tenants.list.return_value = [tenant] result = list(self.wrapped_client.list_projects()) self.assertEqual([("fake_id", "Foobar", "default")], result) self.assertEqual("fake_id", result[0].id) self.assertEqual("Foobar", result[0].name) self.assertEqual("default", result[0].domain_id) self.assertFalse(hasattr(result[0], "extra_field")) def test_create_user(self): self.wrapped_client.create_user("foo", "bar", email="foo@bar.com", project_id="tenant_id", domain_name="default") self.client.users.create.assert_called_once_with( "foo", "bar", "foo@bar.com", "tenant_id") def test_create_user_in_non_default_domain_fail(self): self.assertRaises( NotImplementedError, self.wrapped_client.create_user, "foo", "bar", email="foo@bar.com", project_id="tenant_id", domain_name="non-default-domain") def test_delete_user(self): self.wrapped_client.delete_user("fake_id") self.client.users.delete.assert_called_once_with("fake_id") def test_list_users(self): user = mock.MagicMock() user.id = "fake_id" user.name = "foo" user.tenantId = "tenant_id" user.extra_field = "extra_field" self.client.users.list.return_value = [user] result = list(self.wrapped_client.list_users()) self.assertEqual([("fake_id", "foo", "tenant_id", "default")], result) self.assertEqual("fake_id", result[0].id) self.assertEqual("foo", result[0].name) self.assertEqual("tenant_id", result[0].project_id) self.assertEqual("default", result[0].domain_id) self.assertFalse(hasattr(result[0], "extra_field")) def test_create_role(self): self.wrapped_client.create_role("foo_name") self.client.roles.create.assert_called_once_with("foo_name") def test_add_role(self): self.wrapped_client.add_role("fake_role_id", "fake_user_id", "fake_project_id") self.client.roles.add_user_role.assert_called_once_with( "fake_user_id", "fake_role_id", tenant="fake_project_id") def test_remove_role(self): self.wrapped_client.remove_role("fake_role_id", "fake_user_id", "fake_project_id") self.client.roles.remove_user_role.assert_called_once_with( "fake_user_id", "fake_role_id", tenant="fake_project_id") class KeystoneV3WrapperTestCase(test.TestCase, KeystoneWrapperTestBase): def setUp(self): super(KeystoneV3WrapperTestCase, self).setUp() self.client = mock.MagicMock() self.client.version = "v3" self.wrapped_client = keystone.wrap(self.client) self.client.domains.get.side_effect = exceptions.NotFound self.client.domains.list.return_value = [ mock.MagicMock(id="domain_id")] def test_create_project(self): self.wrapped_client.create_project("Foobar", "domain") self.client.projects.create.assert_called_once_with( name="Foobar", domain="domain_id") def test_create_project_with_non_existing_domain_fail(self): self.client.domains.list.return_value = [] self.assertRaises(exceptions.NotFound, self.wrapped_client.create_project, "Foobar", "non-existing-domain") def test_delete_project(self): self.wrapped_client.delete_project("fake_id") self.client.projects.delete.assert_called_once_with("fake_id") def test_list_projects(self): project = mock.MagicMock() project.id = "fake_id" project.name = "Foobar" project.domain_id = "domain_id" project.extra_field = "extra_field" self.client.projects.list.return_value = [project] result = list(self.wrapped_client.list_projects()) self.assertEqual([("fake_id", "Foobar", "domain_id")], result) self.assertEqual("fake_id", result[0].id) self.assertEqual("Foobar", result[0].name) self.assertEqual("domain_id", result[0].domain_id) self.assertFalse(hasattr(result[0], "extra_field")) def test_create_user(self): fake_role = mock.MagicMock(id="fake_role_id") fake_role.name = "__member__" self.client.roles.list.return_value = [fake_role] self.client.users.create.return_value = mock.MagicMock( id="fake_user_id") self.wrapped_client.create_user( "foo", "bar", email="foo@bar.com", project_id="project_id", domain_name="domain") self.client.users.create.assert_called_once_with( name="foo", password="bar", email="foo@bar.com", default_project="project_id", domain="domain_id") def test_create_user_with_non_existing_domain_fail(self): self.client.domains.list.return_value = [] self.assertRaises(exceptions.NotFound, self.wrapped_client.create_user, "foo", "bar", email="foo@bar.com", project_id="project_id", domain_name="non-existing-domain") def test_delete_user(self): self.wrapped_client.delete_user("fake_id") self.client.users.delete.assert_called_once_with("fake_id") def test_list_users(self): user = mock.MagicMock() user.id = "fake_id" user.name = "foo" user.default_project_id = "project_id" user.domain_id = "domain_id" user.extra_field = "extra_field" self.client.users.list.return_value = [user] result = list(self.wrapped_client.list_users()) self.assertEqual([("fake_id", "foo", "project_id", "domain_id")], result) self.assertEqual("fake_id", result[0].id) self.assertEqual("foo", result[0].name) self.assertEqual("project_id", result[0].project_id) self.assertEqual("domain_id", result[0].domain_id) self.assertFalse(hasattr(result[0], "extra_field")) def test_create_role(self, **kwargs): self.wrapped_client.create_role("foo_name", domain="domain", **kwargs) self.client.roles.create.assert_called_once_with( "foo_name", domain="domain", **kwargs) def test_add_role(self): self.wrapped_client.add_role("fake_role_id", "fake_user_id", "fake_project_id") self.client.roles.grant.assert_called_once_with( "fake_role_id", user="fake_user_id", project="fake_project_id") def test_remove_role(self): self.wrapped_client.remove_role("fake_role_id", "fake_user_id", "fake_project_id") self.client.roles.revoke.assert_called_once_with( "fake_role_id", user="fake_user_id", project="fake_project_id") rally-0.9.1/tests/unit/plugins/openstack/wrappers/test_network.py0000664000567000056710000006350113073417717026562 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.common import utils from rally import consts from rally import exceptions from rally.plugins.openstack.wrappers import network from tests.unit import test from neutronclient.common import exceptions as neutron_exceptions from novaclient import exceptions as nova_exceptions SVC = "rally.plugins.openstack.wrappers.network." class Owner(utils.RandomNameGeneratorMixin): task = {"uuid": "task-uuid"} class NovaNetworkWrapperTestCase(test.TestCase): class Net(object): def __init__(self, **kwargs): if "tenant_id" in kwargs: kwargs["project_id"] = kwargs.pop("tenant_id") self.__dict__.update(kwargs) def setUp(self): self.owner = Owner() self.owner.generate_random_name = mock.Mock() super(NovaNetworkWrapperTestCase, self).setUp() def get_wrapper(self, *skip_cidrs, **kwargs): mock_clients = mock.Mock() mock_clients.nova.return_value.networks.list.return_value = [ self.Net(cidr=cidr) for cidr in skip_cidrs] return network.NovaNetworkWrapper(mock_clients, self.owner, config=kwargs) def test__init__(self): skip_cidrs = ["foo_cidr", "bar_cidr"] service = self.get_wrapper(*skip_cidrs) self.assertEqual(service.skip_cidrs, skip_cidrs) service.client.networks.list.assert_called_once_with() @mock.patch("rally.plugins.openstack.wrappers.network.generate_cidr") def test__generate_cidr(self, mock_generate_cidr): skip_cidrs = [5, 7] cidrs = iter(range(7)) mock_generate_cidr.side_effect = ( lambda start_cidr: start_cidr + next(cidrs) ) service = self.get_wrapper(*skip_cidrs, start_cidr=3) self.assertEqual(service._generate_cidr(), 3) self.assertEqual(service._generate_cidr(), 4) self.assertEqual(service._generate_cidr(), 6) # 5 is skipped self.assertEqual(service._generate_cidr(), 8) # 7 is skipped self.assertEqual(service._generate_cidr(), 9) self.assertEqual(mock_generate_cidr.mock_calls, [mock.call(start_cidr=3)] * 7) def test_create_network(self): service = self.get_wrapper() service.client.networks.create.side_effect = ( lambda **kwargs: self.Net(id="foo_id", **kwargs)) service._generate_cidr = mock.Mock(return_value="foo_cidr") net = service.create_network("foo_tenant", network_create_args={"fakearg": "fake"}, bar="spam") self.assertEqual(net, {"id": "foo_id", "name": self.owner.generate_random_name.return_value, "cidr": "foo_cidr", "status": "ACTIVE", "external": False, "tenant_id": "foo_tenant"}) service._generate_cidr.assert_called_once_with() service.client.networks.create.assert_called_once_with( project_id="foo_tenant", cidr="foo_cidr", label=self.owner.generate_random_name.return_value, fakearg="fake") def test_delete_network(self): service = self.get_wrapper() service.client.networks.delete.return_value = "foo_deleted" self.assertEqual(service.delete_network({"id": "foo_id"}), "foo_deleted") service.client.networks.disassociate.assert_called_once_with( "foo_id", disassociate_host=False, disassociate_project=True) service.client.networks.delete.assert_called_once_with("foo_id") def test_list_networks(self): service = self.get_wrapper() service.client.networks.list.reset_mock() service.client.networks.list.return_value = [ self.Net(id="foo_id", project_id="foo_tenant", cidr="foo_cidr", label="foo_label"), self.Net(id="bar_id", project_id="bar_tenant", cidr="bar_cidr", label="bar_label")] expected = [ {"id": "foo_id", "cidr": "foo_cidr", "name": "foo_label", "status": "ACTIVE", "external": False, "tenant_id": "foo_tenant"}, {"id": "bar_id", "cidr": "bar_cidr", "name": "bar_label", "status": "ACTIVE", "external": False, "tenant_id": "bar_tenant"}] self.assertEqual(expected, service.list_networks()) service.client.networks.list.assert_called_once_with() def test__get_floating_ip(self): wrap = self.get_wrapper() wrap.client.floating_ips.get.return_value = mock.Mock(id="foo_id", ip="foo_ip") fip = wrap._get_floating_ip("fip_id") wrap.client.floating_ips.get.assert_called_once_with("fip_id") self.assertEqual(fip, "foo_id") wrap.client.floating_ips.get.side_effect = ( nova_exceptions.NotFound("")) self.assertIsNone(wrap._get_floating_ip("fip_id")) self.assertRaises(exceptions.GetResourceNotFound, wrap._get_floating_ip, "fip_id", do_raise=True) def test_create_floating_ip(self): wrap = self.get_wrapper() wrap.client.floating_ips.create.return_value = mock.Mock(id="foo_id", ip="foo_ip") fip = wrap.create_floating_ip(ext_network="bar_net", bar="spam") self.assertEqual(fip, {"ip": "foo_ip", "id": "foo_id"}) wrap.client.floating_ips.create.assert_called_once_with("bar_net") net = mock.Mock() net.name = "foo_net" wrap.client.floating_ip_pools.list.return_value = [net] fip = wrap.create_floating_ip() self.assertEqual(fip, {"ip": "foo_ip", "id": "foo_id"}) wrap.client.floating_ips.create.assert_called_with("foo_net") def test_delete_floating_ip(self): wrap = self.get_wrapper() fip_found = iter(range(3)) def get_fip(*args, **kwargs): for i in fip_found: return "fip_id" raise exceptions.GetResourceNotFound(resource="") wrap._get_floating_ip = mock.Mock(side_effect=get_fip) wrap.delete_floating_ip("fip_id") wrap.client.floating_ips.delete.assert_called_once_with("fip_id") self.assertFalse(wrap._get_floating_ip.called) wrap.delete_floating_ip("fip_id", wait=True) self.assertEqual( [mock.call("fip_id", do_raise=True)] * 4, wrap._get_floating_ip.mock_calls) def test_supports_extension(self): wrap = self.get_wrapper() self.assertFalse(wrap.supports_extension("extension")[0]) self.assertTrue(wrap.supports_extension("security-group")[0]) class NeutronWrapperTestCase(test.TestCase): def setUp(self): self.owner = Owner() self.owner.generate_random_name = mock.Mock() super(NeutronWrapperTestCase, self).setUp() def get_wrapper(self, *skip_cidrs, **kwargs): return network.NeutronWrapper(mock.Mock(), self.owner, config=kwargs) def test_SUBNET_IP_VERSION(self): self.assertEqual(network.NeutronWrapper.SUBNET_IP_VERSION, 4) @mock.patch("rally.plugins.openstack.wrappers.network.generate_cidr") def test__generate_cidr(self, mock_generate_cidr): cidrs = iter(range(5)) mock_generate_cidr.side_effect = ( lambda start_cidr: start_cidr + next(cidrs) ) service = self.get_wrapper(start_cidr=3) self.assertEqual(service._generate_cidr(), 3) self.assertEqual(service._generate_cidr(), 4) self.assertEqual(service._generate_cidr(), 5) self.assertEqual(service._generate_cidr(), 6) self.assertEqual(service._generate_cidr(), 7) self.assertEqual(mock_generate_cidr.mock_calls, [mock.call(start_cidr=3)] * 5) def test_external_networks(self): wrap = self.get_wrapper() wrap.client.list_networks.return_value = {"networks": "foo_networks"} self.assertEqual(wrap.external_networks, "foo_networks") wrap.client.list_networks.assert_called_once_with( **{"router:external": True}) def test_get_network(self): wrap = self.get_wrapper() neutron_net = {"id": "foo_id", "name": self.owner.generate_random_name.return_value, "tenant_id": "foo_tenant", "status": "foo_status", "router:external": "foo_external", "subnets": "foo_subnets"} expected_net = {"id": "foo_id", "name": self.owner.generate_random_name.return_value, "tenant_id": "foo_tenant", "status": "foo_status", "external": "foo_external", "router_id": None, "subnets": "foo_subnets"} wrap.client.show_network.return_value = {"network": neutron_net} net = wrap.get_network(net_id="foo_id") self.assertEqual(net, expected_net) wrap.client.show_network.assert_called_once_with("foo_id") wrap.client.show_network.side_effect = ( neutron_exceptions.NeutronClientException) self.assertRaises(network.NetworkWrapperException, wrap.get_network, net_id="foo_id") wrap.client.list_networks.return_value = {"networks": [neutron_net]} net = wrap.get_network(name="foo_name") self.assertEqual(net, expected_net) wrap.client.list_networks.assert_called_once_with(name="foo_name") wrap.client.list_networks.return_value = {"networks": []} self.assertRaises(network.NetworkWrapperException, wrap.get_network, name="foo_name") def test_create_v1_pool(self): subnet = "subnet_id" tenant = "foo_tenant" service = self.get_wrapper() expected_pool = {"pool": { "id": "pool_id", "name": self.owner.generate_random_name.return_value, "subnet_id": subnet, "tenant_id": tenant}} service.client.create_pool.return_value = expected_pool resultant_pool = service.create_v1_pool(tenant, subnet) service.client.create_pool.assert_called_once_with({ "pool": {"lb_method": "ROUND_ROBIN", "subnet_id": subnet, "tenant_id": tenant, "protocol": "HTTP", "name": self.owner.generate_random_name.return_value}}) self.assertEqual(resultant_pool, expected_pool) def test_create_network(self): service = self.get_wrapper() service.client.create_network.return_value = { "network": {"id": "foo_id", "name": self.owner.generate_random_name.return_value, "status": "foo_status"}} net = service.create_network("foo_tenant") service.client.create_network.assert_called_once_with({ "network": {"tenant_id": "foo_tenant", "name": self.owner.generate_random_name.return_value}}) self.assertEqual(net, {"id": "foo_id", "name": self.owner.generate_random_name.return_value, "status": "foo_status", "external": False, "tenant_id": "foo_tenant", "router_id": None, "subnets": []}) def test_create_network_with_subnets(self): subnets_num = 4 service = self.get_wrapper() subnets_cidrs = iter(range(subnets_num)) subnets_ids = iter(range(subnets_num)) service._generate_cidr = mock.Mock( side_effect=lambda: "cidr-%d" % next(subnets_cidrs)) service.client.create_subnet = mock.Mock( side_effect=lambda i: { "subnet": {"id": "subnet-%d" % next(subnets_ids)}}) service.client.create_network.return_value = { "network": {"id": "foo_id", "name": self.owner.generate_random_name.return_value, "status": "foo_status"}} net = service.create_network("foo_tenant", subnets_num=subnets_num) service.client.create_network.assert_called_once_with({ "network": {"tenant_id": "foo_tenant", "name": self.owner.generate_random_name.return_value}}) self.assertEqual(net, {"id": "foo_id", "name": self.owner.generate_random_name.return_value, "status": "foo_status", "external": False, "router_id": None, "tenant_id": "foo_tenant", "subnets": ["subnet-%d" % i for i in range(subnets_num)]}) self.assertEqual( service.client.create_subnet.mock_calls, [mock.call({"subnet": {"name": self.owner.generate_random_name.return_value, "enable_dhcp": True, "network_id": "foo_id", "tenant_id": "foo_tenant", "ip_version": service.SUBNET_IP_VERSION, "dns_nameservers": ["8.8.8.8", "8.8.4.4"], "cidr": "cidr-%d" % i}}) for i in range(subnets_num)]) def test_create_network_with_router(self): service = self.get_wrapper() service.create_router = mock.Mock(return_value={"id": "foo_router"}) service.client.create_network.return_value = { "network": {"id": "foo_id", "name": self.owner.generate_random_name.return_value, "status": "foo_status"}} net = service.create_network("foo_tenant", add_router=True) self.assertEqual(net, {"id": "foo_id", "name": self.owner.generate_random_name.return_value, "status": "foo_status", "external": False, "tenant_id": "foo_tenant", "router_id": "foo_router", "subnets": []}) service.create_router.assert_called_once_with(external=True, tenant_id="foo_tenant") def test_create_network_with_router_and_subnets(self): subnets_num = 4 service = self.get_wrapper() service._generate_cidr = mock.Mock(return_value="foo_cidr") service.create_router = mock.Mock(return_value={"id": "foo_router"}) service.client.create_subnet = mock.Mock( return_value={"subnet": {"id": "foo_subnet"}}) service.client.create_network.return_value = { "network": {"id": "foo_id", "name": self.owner.generate_random_name.return_value, "status": "foo_status"}} net = service.create_network("foo_tenant", add_router=True, subnets_num=subnets_num, dns_nameservers=["foo_nameservers"]) self.assertEqual(net, {"id": "foo_id", "name": self.owner.generate_random_name.return_value, "status": "foo_status", "external": False, "tenant_id": "foo_tenant", "router_id": "foo_router", "subnets": ["foo_subnet"] * subnets_num}) service.create_router.assert_called_once_with(external=True, tenant_id="foo_tenant") self.assertEqual( service.client.create_subnet.mock_calls, [mock.call({"subnet": {"name": self.owner.generate_random_name.return_value, "enable_dhcp": True, "network_id": "foo_id", "tenant_id": "foo_tenant", "ip_version": service.SUBNET_IP_VERSION, "dns_nameservers": ["foo_nameservers"], "cidr": "foo_cidr"}})] * subnets_num) self.assertEqual(service.client.add_interface_router.mock_calls, [mock.call("foo_router", {"subnet_id": "foo_subnet"}) for i in range(subnets_num)]) @mock.patch("rally.plugins.openstack.wrappers.network.NeutronWrapper" ".supports_extension", return_value=(False, "")) def test_delete_network(self, mock_neutron_wrapper_supports_extension): service = self.get_wrapper() service.client.list_ports.return_value = {"ports": []} service.client.delete_network.return_value = "foo_deleted" result = service.delete_network({"id": "foo_id", "router_id": None, "subnets": []}) self.assertEqual(result, "foo_deleted") self.assertEqual(service.client.remove_gateway_router.mock_calls, []) self.assertEqual( service.client.remove_interface_router.mock_calls, []) self.assertEqual(service.client.delete_router.mock_calls, []) self.assertEqual(service.client.delete_subnet.mock_calls, []) service.client.delete_network.assert_called_once_with("foo_id") def test_delete_v1_pool(self): service = self.get_wrapper() pool = {"pool": {"id": "pool-id"}} service.delete_v1_pool(pool["pool"]["id"]) service.client.delete_pool.assert_called_once_with("pool-id") @mock.patch("rally.plugins.openstack.wrappers.network.NeutronWrapper" ".supports_extension", return_value=(True, "")) def test_delete_network_with_dhcp_and_router_and_ports_and_subnets( self, mock_neutron_wrapper_supports_extension): service = self.get_wrapper() agents = ["foo_agent", "bar_agent"] subnets = ["foo_subnet", "bar_subnet"] ports = [{"id": "foo_port", "device_owner": "network:router_interface", "device_id": "rounttter"}, {"id": "bar_port", "device_owner": "network:dhcp"}] service.client.list_dhcp_agent_hosting_networks.return_value = ( {"agents": [{"id": agent_id} for agent_id in agents]}) service.client.list_ports.return_value = ({"ports": ports}) service.client.delete_network.return_value = "foo_deleted" result = service.delete_network( {"id": "foo_id", "router_id": "foo_router", "subnets": subnets, "lb_pools": []}) self.assertEqual(result, "foo_deleted") self.assertEqual( service.client.remove_network_from_dhcp_agent.mock_calls, [mock.call(agent_id, "foo_id") for agent_id in agents]) self.assertEqual(service.client.remove_gateway_router.mock_calls, [mock.call("foo_router")]) service.client.delete_port.assert_called_once_with(ports[1]["id"]) service.client.remove_interface_router.assert_called_once_with( ports[0]["device_id"], {"port_id": ports[0]["id"]}) self.assertEqual(service.client.delete_subnet.mock_calls, [mock.call(subnet_id) for subnet_id in subnets]) service.client.delete_network.assert_called_once_with("foo_id") mock_neutron_wrapper_supports_extension.assert_called_once_with( "dhcp_agent_scheduler") def test_list_networks(self): service = self.get_wrapper() service.client.list_networks.return_value = {"networks": "foo_nets"} self.assertEqual(service.list_networks(), "foo_nets") service.client.list_networks.assert_called_once_with() @mock.patch(SVC + "NeutronWrapper.external_networks") def test_create_floating_ip(self, mock_neutron_wrapper_external_networks): wrap = self.get_wrapper() wrap.create_port = mock.Mock(return_value={"id": "port_id"}) wrap.client.create_floatingip = mock.Mock( return_value={"floatingip": {"id": "fip_id", "floating_ip_address": "fip_ip"}}) self.assertRaises(ValueError, wrap.create_floating_ip) mock_neutron_wrapper_external_networks.__get__ = lambda *args: [] self.assertRaises(network.NetworkWrapperException, wrap.create_floating_ip, tenant_id="foo_tenant") mock_neutron_wrapper_external_networks.__get__ = ( lambda *args: [{"id": "ext_id"}] ) fip = wrap.create_floating_ip(tenant_id="foo_tenant", port_id="port_id") self.assertEqual(fip, {"id": "fip_id", "ip": "fip_ip"}) wrap.get_network = mock.Mock( return_value={"id": "foo_net", "external": True}) wrap.create_floating_ip(tenant_id="foo_tenant", ext_network="ext_net", port_id="port_id") wrap.get_network = mock.Mock( return_value={"id": "foo_net", "external": False}) wrap.create_floating_ip(tenant_id="foo_tenant", port_id="port_id") self.assertRaises(network.NetworkWrapperException, wrap.create_floating_ip, tenant_id="foo_tenant", ext_network="ext_net") def test_delete_floating_ip(self): wrap = self.get_wrapper() wrap.delete_floating_ip("fip_id") wrap.delete_floating_ip("fip_id", ignored_kwarg="bar") self.assertEqual(wrap.client.delete_floatingip.mock_calls, [mock.call("fip_id")] * 2) @mock.patch(SVC + "NeutronWrapper.external_networks") def test_create_router(self, mock_neutron_wrapper_external_networks): wrap = self.get_wrapper() wrap.client.create_router.return_value = {"router": "foo_router"} mock_neutron_wrapper_external_networks.__get__ = ( lambda *args: [{"id": "ext_id"}] ) router = wrap.create_router() wrap.client.create_router.assert_called_once_with( {"router": {"name": self.owner.generate_random_name.return_value}}) self.assertEqual(router, "foo_router") router = wrap.create_router(external=True, foo="bar") wrap.client.create_router.assert_called_with( {"router": {"name": self.owner.generate_random_name.return_value, "external_gateway_info": { "network_id": "ext_id", "enable_snat": True}, "foo": "bar"}}) def test_create_port(self): wrap = self.get_wrapper() wrap.client.create_port.return_value = {"port": "foo_port"} port = wrap.create_port("foo_net") wrap.client.create_port.assert_called_once_with( {"port": {"network_id": "foo_net", "name": self.owner.generate_random_name.return_value}}) self.assertEqual(port, "foo_port") port = wrap.create_port("foo_net", foo="bar") wrap.client.create_port.assert_called_with( {"port": {"network_id": "foo_net", "name": self.owner.generate_random_name.return_value, "foo": "bar"}}) def test_supports_extension(self): wrap = self.get_wrapper() wrap.client.list_extensions.return_value = ( {"extensions": [{"alias": "extension"}]}) self.assertTrue(wrap.supports_extension("extension")[0]) wrap.client.list_extensions.return_value = ( {"extensions": [{"alias": "extension"}]}) self.assertFalse(wrap.supports_extension("dummy-group")[0]) wrap.client.list_extensions.return_value = {} self.assertFalse(wrap.supports_extension("extension")[0]) class FunctionsTestCase(test.TestCase): def test_generate_cidr(self): with mock.patch("rally.plugins.openstack.wrappers.network.cidr_incr", iter(range(1, 4))): self.assertEqual(network.generate_cidr(), "10.2.1.0/24") self.assertEqual(network.generate_cidr(), "10.2.2.0/24") self.assertEqual(network.generate_cidr(), "10.2.3.0/24") with mock.patch("rally.plugins.openstack.wrappers.network.cidr_incr", iter(range(1, 4))): start_cidr = "1.1.0.0/26" self.assertEqual(network.generate_cidr(start_cidr), "1.1.0.64/26") self.assertEqual(network.generate_cidr(start_cidr), "1.1.0.128/26") self.assertEqual(network.generate_cidr(start_cidr), "1.1.0.192/26") def test_wrap(self): mock_clients = mock.Mock() mock_clients.nova().networks.list.return_value = [] config = {"fakearg": "fake"} owner = Owner() mock_clients.services.return_value = {"foo": consts.Service.NEUTRON} wrapper = network.wrap(mock_clients, owner, config) self.assertIsInstance(wrapper, network.NeutronWrapper) self.assertEqual(wrapper.owner, owner) self.assertEqual(wrapper.config, config) mock_clients.services.return_value = {"foo": "bar"} wrapper = network.wrap(mock_clients, owner, config) self.assertIsInstance(wrapper, network.NovaNetworkWrapper) self.assertEqual(wrapper.owner, owner) self.assertEqual(wrapper.config, config) rally-0.9.1/tests/unit/plugins/openstack/wrappers/test_glance.py0000664000567000056710000002656013073417717026326 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import tempfile import ddt from glanceclient import exc as glance_exc import mock from oslo_config import cfg from rally import exceptions from rally.plugins.openstack.wrappers import glance as glance_wrapper from tests.unit import test CONF = cfg.CONF @ddt.ddt class GlanceWrapperTestCase(test.ScenarioTestCase): @ddt.data( {"version": "1", "expected_class": glance_wrapper.GlanceV1Wrapper}, {"version": "2", "expected_class": glance_wrapper.GlanceV2Wrapper} ) @ddt.unpack def test_wrap(self, version, expected_class): client = mock.MagicMock() client.choose_version.return_value = version self.assertIsInstance(glance_wrapper.wrap(client, mock.Mock()), expected_class) @mock.patch("rally.plugins.openstack.wrappers.glance.LOG") def test_wrap_wrong_version(self, mock_log): client = mock.MagicMock() client.choose_version.return_value = "dummy" self.assertRaises(exceptions.InvalidArgumentsException, glance_wrapper.wrap, client, mock.Mock()) self.assertTrue(mock_log.warning.mock_called) @ddt.ddt class GlanceV1WrapperTestCase(test.ScenarioTestCase): _tempfile = tempfile.NamedTemporaryFile() def setUp(self): super(GlanceV1WrapperTestCase, self).setUp() self.client = mock.MagicMock() self.client.choose_version.return_value = "1" self.owner = mock.Mock() self.wrapped_client = glance_wrapper.wrap(self.client, self.owner) def test_get_image(self): image = mock.Mock() return_image = self.wrapped_client.get_image(image) self.client.return_value.images.get.assert_called_once_with(image.id) self.assertEqual(return_image, self.client.return_value.images.get.return_value) def test_get_image_not_found(self): image = mock.Mock() self.client.return_value.images.get.side_effect = ( glance_exc.HTTPNotFound) self.assertRaises(exceptions.GetResourceNotFound, self.wrapped_client.get_image, image) self.client.return_value.images.get.assert_called_once_with(image.id) @ddt.data( {"location": "image_location", "visibility": "private"}, {"location": "image_location", "fakearg": "fake"}, {"location": "image_location", "name": "image_name"}, {"location": _tempfile.name, "visibility": "public"}) @ddt.unpack @mock.patch("six.moves.builtins.open") def test_create_image(self, mock_open, location, **kwargs): return_image = self.wrapped_client.create_image("container_format", location, "disk_format", **kwargs) call_args = kwargs call_args["container_format"] = "container_format" call_args["disk_format"] = "disk_format" if location.startswith("/"): call_args["data"] = mock_open.return_value mock_open.assert_called_once_with(location) mock_open.return_value.close.assert_called_once_with() else: call_args["copy_from"] = location if "name" not in kwargs: call_args["name"] = self.owner.generate_random_name.return_value if "visibility" in kwargs: call_args["is_public"] = call_args.pop("visibility") == "public" self.client().images.create.assert_called_once_with(**call_args) self.mock_wait_for_status.mock.assert_called_once_with( self.client().images.create.return_value, ["active"], update_resource=self.wrapped_client.get_image, check_interval=CONF.benchmark.glance_image_create_poll_interval, timeout=CONF.benchmark.glance_image_create_timeout) self.assertEqual(self.mock_wait_for_status.mock.return_value, return_image) @ddt.data({"expected": True}, {"visibility": "public", "expected": True}, {"visibility": "private", "expected": False}) @ddt.unpack def test_set_visibility(self, visibility=None, expected=None): image = mock.Mock() if visibility is None: self.wrapped_client.set_visibility(image) else: self.wrapped_client.set_visibility(image, visibility=visibility) self.client().images.update.assert_called_once_with( image.id, is_public=expected) @ddt.data({}, {"fakearg": "fake"}) def test_list_images_basic(self, filters): self.assertEqual(self.wrapped_client.list_images(**filters), self.client().images.list.return_value) self.client().images.list.assert_called_once_with(filters=filters) def test_list_images_with_owner(self): self.assertEqual(self.wrapped_client.list_images(fakearg="fake", owner="fakeowner"), self.client().images.list.return_value) self.client().images.list.assert_called_once_with( owner="fakeowner", filters={"fakearg": "fake"}) def test_list_images_visibility_public(self): public_images = [mock.Mock(is_public=True), mock.Mock(is_public=True)] private_images = [mock.Mock(is_public=False), mock.Mock(is_public=False)] self.client().images.list.return_value = public_images + private_images self.assertEqual(self.wrapped_client.list_images(fakearg="fake", visibility="public"), public_images) self.client().images.list.assert_called_once_with( filters={"fakearg": "fake"}) def test_list_images_visibility_private(self): public_images = [mock.Mock(is_public=True), mock.Mock(is_public=True)] private_images = [mock.Mock(is_public=False), mock.Mock(is_public=False)] self.client().images.list.return_value = public_images + private_images self.assertEqual(self.wrapped_client.list_images(fakearg="fake", visibility="private"), private_images) self.client().images.list.assert_called_once_with( filters={"fakearg": "fake"}) @ddt.ddt class GlanceV2WrapperTestCase(test.ScenarioTestCase): _tempfile = tempfile.NamedTemporaryFile() def setUp(self): super(GlanceV2WrapperTestCase, self).setUp() self.client = mock.MagicMock() self.client.choose_version.return_value = "2" self.owner = mock.Mock() self.wrapped_client = glance_wrapper.wrap(self.client, self.owner) def test_get_image(self): image = mock.Mock() return_image = self.wrapped_client.get_image(image) self.client.return_value.images.get.assert_called_once_with(image.id) self.assertEqual(return_image, self.client.return_value.images.get.return_value) def test_get_image_not_found(self): image = mock.Mock() self.client.return_value.images.get.side_effect = ( glance_exc.HTTPNotFound) self.assertRaises(exceptions.GetResourceNotFound, self.wrapped_client.get_image, image) self.client.return_value.images.get.assert_called_once_with(image.id) @ddt.data( {"location": "image_location", "visibility": "private"}, {"location": "image_location", "fakearg": "fake"}, {"location": "image_location", "name": "image_name"}, {"location": _tempfile.name, "visibility": "public"}, {"location": "image_location", "expected_kwargs": {"visibility": "public"}, "is_public": True}) @ddt.unpack @mock.patch("six.moves.builtins.open") @mock.patch("requests.get") def test_create_image(self, mock_requests_get, mock_open, location, expected_kwargs=None, **kwargs): self.wrapped_client.get_image = mock.Mock() created_image = mock.Mock() uploaded_image = mock.Mock() self.mock_wait_for_status.mock.side_effect = [created_image, uploaded_image] return_image = self.wrapped_client.create_image("container_format", location, "disk_format", **kwargs) create_args = expected_kwargs or kwargs create_args["container_format"] = "container_format" create_args["disk_format"] = "disk_format" create_args.setdefault("name", self.owner.generate_random_name.return_value) self.client().images.create.assert_called_once_with(**create_args) if location.startswith("/"): data = mock_open.return_value mock_open.assert_called_once_with(location) else: data = mock_requests_get.return_value.raw mock_requests_get.assert_called_once_with(location, stream=True) data.close.assert_called_once_with() self.client().images.upload.assert_called_once_with(created_image.id, data) self.mock_wait_for_status.mock.assert_has_calls([ mock.call( self.client().images.create.return_value, ["queued"], update_resource=self.wrapped_client.get_image, check_interval=CONF.benchmark. glance_image_create_poll_interval, timeout=CONF.benchmark.glance_image_create_timeout), mock.call( created_image, ["active"], update_resource=self.wrapped_client.get_image, check_interval=CONF.benchmark. glance_image_create_poll_interval, timeout=mock.ANY)]) self.assertEqual(uploaded_image, return_image) @ddt.data({}, {"visibility": "public"}, {"visibility": "private"}) @ddt.unpack def test_set_visibility(self, visibility=None): image = mock.Mock() if visibility is None: self.wrapped_client.set_visibility(image) visibility = "public" else: self.wrapped_client.set_visibility(image, visibility=visibility) self.client().images.update.assert_called_once_with( image.id, visibility=visibility) @ddt.data({}, {"fakearg": "fake"}) def test_list_images(self, filters): self.assertEqual(self.wrapped_client.list_images(**filters), self.client().images.list.return_value) self.client().images.list.assert_called_once_with(filters=filters) rally-0.9.1/tests/unit/plugins/openstack/test_service.py0000664000567000056710000000371413073417717024666 0ustar jenkinsjenkins00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack import service from tests.unit import test class DiscoverTestCase(test.TestCase): def test_discover_network_impl_based_on_service(self): class SomeService(service.UnifiedOpenStackService): pass @service.service("nova-network", "network", version="1", client_name="nova") class NovaNetService(service.Service): pass @service.compat_layer(NovaNetService) class UnifiedNovaNetService(SomeService): @classmethod def is_applicable(cls, clients): return True @service.service("neutron", "network", version="2") class NeutronV2Service(service.Service): pass @service.compat_layer(NeutronV2Service) class UnifiedNeutronV2Service(SomeService): pass clients = mock.MagicMock() clients.nova.choose_version.return_value = "1" clients.neutron.choose_version.return_value = "2" clients.services.return_value = {} self.assertIsInstance(SomeService(clients)._impl, UnifiedNovaNetService) clients.nova.return_value.services.list.reset_mock() clients.services.return_value = {"network": "neutron"} self.assertIsInstance(SomeService(clients)._impl, UnifiedNeutronV2Service) rally-0.9.1/tests/unit/plugins/openstack/hook/0000775000567000056710000000000013073420067022540 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/hook/__init__.py0000664000567000056710000000000013073417717024647 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/hook/test_fault_injection.py0000664000567000056710000001207313073417720027333 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import jsonschema import mock from os_faults.api import error from rally import consts from rally.plugins.openstack.hook import fault_injection from tests.unit import fakes from tests.unit import test def create_config(**kwargs): return { "name": "fault_injection", "args": kwargs, "trigger": { "name": "event", "args": { "unit": "iteration", "at": [10] } } } @ddt.ddt class FaultInjectionHookTestCase(test.TestCase): def setUp(self): super(FaultInjectionHookTestCase, self).setUp() self.task = {"deployment_uuid": "foo_uuid"} @ddt.data((create_config(action="foo"), True), (create_config(action="foo", verify=True), True), (create_config(action=10), False), (create_config(action="foo", verify=10), False), (create_config(), False)) @ddt.unpack def test_config_schema(self, config, valid): if valid: fault_injection.FaultInjectionHook.validate(config) else: self.assertRaises(jsonschema.ValidationError, fault_injection.FaultInjectionHook.validate, config) @mock.patch("rally.common.objects.Deployment.get") @mock.patch("os_faults.human_api") @mock.patch("os_faults.connect") @mock.patch("rally.common.utils.Timer", side_effect=fakes.FakeTimer) def test_run(self, mock_timer, mock_connect, mock_human_api, mock_deployment_get): injector_inst = mock_connect.return_value hook = fault_injection.FaultInjectionHook( self.task, {"action": "foo", "verify": True}, {"iteration": 1}) hook.run_sync() self.assertEqual( {"finished_at": fakes.FakeTimer().finish_timestamp(), "started_at": fakes.FakeTimer().timestamp(), "status": consts.HookStatus.SUCCESS, "triggered_by": {"iteration": 1}}, hook.result()) mock_connect.assert_called_once_with(None) injector_inst.verify.assert_called_once_with() mock_human_api.assert_called_once_with(injector_inst, "foo") @mock.patch("rally.common.objects.Deployment.get") @mock.patch("os_faults.human_api") @mock.patch("os_faults.connect") @mock.patch("rally.common.utils.Timer", side_effect=fakes.FakeTimer) def test_run_extra_config(self, mock_timer, mock_connect, mock_human_api, mock_deployment_get): mock_deployment_get.return_value = { "config": {"type": "ExistingCloud", "extra": {"cloud_config": {"conf": "foo_config"}}}} injector_inst = mock_connect.return_value hook = fault_injection.FaultInjectionHook( self.task, {"action": "foo"}, {"iteration": 1}) hook.run_sync() self.assertEqual( {"finished_at": fakes.FakeTimer().finish_timestamp(), "started_at": fakes.FakeTimer().timestamp(), "status": consts.HookStatus.SUCCESS, "triggered_by": {"iteration": 1}}, hook.result()) mock_connect.assert_called_once_with({"conf": "foo_config"}) mock_human_api.assert_called_once_with(injector_inst, "foo") @mock.patch("rally.common.objects.Deployment.get") @mock.patch("os_faults.human_api") @mock.patch("os_faults.connect") @mock.patch("rally.common.utils.Timer", side_effect=fakes.FakeTimer) def test_run_error(self, mock_timer, mock_connect, mock_human_api, mock_deployment_get): injector_inst = mock_connect.return_value mock_human_api.side_effect = error.OSFException("foo error") hook = fault_injection.FaultInjectionHook( self.task, {"action": "foo", "verify": True}, {"iteration": 1}) hook.run_sync() self.assertEqual( {"finished_at": fakes.FakeTimer().finish_timestamp(), "started_at": fakes.FakeTimer().timestamp(), "status": consts.HookStatus.FAILED, "error": { "details": mock.ANY, "etype": "OSFException", "msg": "foo error"}, "triggered_by": {"iteration": 1}}, hook.result()) mock_connect.assert_called_once_with(None) injector_inst.verify.assert_called_once_with() mock_human_api.assert_called_once_with(injector_inst, "foo") rally-0.9.1/tests/unit/plugins/openstack/verification/0000775000567000056710000000000013073420067024262 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/verification/__init__.py0000664000567000056710000000000013073417717026371 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/verification/tempest/0000775000567000056710000000000013073420067025743 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/verification/tempest/__init__.py0000664000567000056710000000000013073417717030052 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/verification/tempest/test_context.py0000664000567000056710000004471213073417720031052 0ustar jenkinsjenkins00000000000000# Copyright 2017: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import ddt import mock from oslo_config import cfg import requests from rally import exceptions from rally.plugins.openstack.verification.tempest import config from rally.plugins.openstack.verification.tempest import context from tests.unit import fakes from tests.unit import test CONF = cfg.CONF CREDS = { "admin": { "username": "admin", "tenant_name": "admin", "password": "admin-12345", "auth_url": "http://test:5000/v2.0/", "permission": "admin", "region_name": "test", "https_insecure": False, "https_cacert": "/path/to/cacert/file", "user_domain_name": "admin", "project_domain_name": "admin" }, "uuid": "fake_deployment" } PATH = "rally.plugins.openstack.verification.tempest.context" @ddt.ddt class TempestContextTestCase(test.TestCase): def setUp(self): super(TempestContextTestCase, self).setUp() mock.patch("rally.osclients.Clients").start() self.mock_isfile = mock.patch("os.path.isfile", return_value=True).start() self.deployment = fakes.FakeDeployment(**CREDS) cfg = {"verifier": mock.Mock(deployment=self.deployment), "verification": {"uuid": "uuid"}} cfg["verifier"].manager.home_dir = "/p/a/t/h" cfg["verifier"].manager.configfile = "/fake/path/to/config" self.context = context.TempestContext(cfg) self.context.conf.add_section("compute") self.context.conf.add_section("orchestration") self.context.conf.add_section("scenario") @mock.patch("six.moves.builtins.open", side_effect=mock.mock_open(), create=True) def test__download_image_from_glance(self, mock_open): self.mock_isfile.return_value = False img_path = os.path.join(self.context.data_dir, "foo") img = mock.MagicMock() glanceclient = self.context.clients.glance() glanceclient.images.data.return_value = "data" self.context._download_image_from_source(img_path, img) mock_open.assert_called_once_with(img_path, "wb") glanceclient.images.data.assert_called_once_with(img.id) mock_open().write.assert_has_calls([mock.call("d"), mock.call("a"), mock.call("t"), mock.call("a")]) @mock.patch("six.moves.builtins.open", side_effect=mock.mock_open()) @mock.patch("requests.get", return_value=mock.MagicMock(status_code=200)) def test__download_image_from_url_success(self, mock_get, mock_open): self.mock_isfile.return_value = False img_path = os.path.join(self.context.data_dir, "foo") mock_get.return_value.iter_content.return_value = "data" self.context._download_image_from_source(img_path) mock_get.assert_called_once_with(CONF.tempest.img_url, stream=True) mock_open.assert_called_once_with(img_path, "wb") mock_open().write.assert_has_calls([mock.call("d"), mock.call("a"), mock.call("t"), mock.call("a")]) @mock.patch("requests.get") @ddt.data(404, 500) def test__download_image_from_url_failure(self, status_code, mock_get): self.mock_isfile.return_value = False mock_get.return_value = mock.MagicMock(status_code=status_code) self.assertRaises(exceptions.RallyException, self.context._download_image_from_source, os.path.join(self.context.data_dir, "foo")) @mock.patch("requests.get", side_effect=requests.ConnectionError()) def test__download_image_from_url_connection_error( self, mock_requests_get): self.mock_isfile.return_value = False self.assertRaises(exceptions.RallyException, self.context._download_image_from_source, os.path.join(self.context.data_dir, "foo")) @mock.patch("rally.plugins.openstack.wrappers." "network.NeutronWrapper.create_network") @mock.patch("six.moves.builtins.open", side_effect=mock.mock_open()) def test_options_configured_manually( self, mock_open, mock_neutron_wrapper_create_network): self.context.available_services = ["glance", "heat", "nova", "neutron"] self.context.conf.set("compute", "image_ref", "id1") self.context.conf.set("compute", "image_ref_alt", "id2") self.context.conf.set("compute", "flavor_ref", "id3") self.context.conf.set("compute", "flavor_ref_alt", "id4") self.context.conf.set("compute", "fixed_network_name", "name1") self.context.conf.set("orchestration", "instance_type", "id5") self.context.conf.set("scenario", "img_file", "id6") self.context.__enter__() glanceclient = self.context.clients.glance() novaclient = self.context.clients.nova() self.assertEqual(glanceclient.images.create.call_count, 0) self.assertEqual(novaclient.flavors.create.call_count, 0) self.assertEqual(mock_neutron_wrapper_create_network.call_count, 0) def test__create_tempest_roles(self): role1 = CONF.tempest.swift_operator_role role2 = CONF.tempest.swift_reseller_admin_role role3 = CONF.tempest.heat_stack_owner_role role4 = CONF.tempest.heat_stack_user_role client = self.context.clients.verified_keystone() client.roles.list.return_value = [fakes.FakeRole(name=role1), fakes.FakeRole(name=role2)] client.roles.create.side_effect = [fakes.FakeFlavor(name=role3), fakes.FakeFlavor(name=role4)] self.context._create_tempest_roles() self.assertEqual(client.roles.create.call_count, 2) created_roles = [role.name for role in self.context._created_roles] self.assertIn(role3, created_roles) self.assertIn(role4, created_roles) @mock.patch("rally.plugins.openstack.wrappers.glance.wrap") def test__discover_image(self, mock_wrap): client = mock_wrap.return_value client.list_images.return_value = [fakes.FakeImage(name="Foo"), fakes.FakeImage(name="CirrOS")] image = self.context._discover_image() self.assertEqual("CirrOS", image.name) @mock.patch("six.moves.builtins.open", side_effect=mock.mock_open(), create=True) @mock.patch("rally.plugins.openstack.wrappers.glance.wrap") @mock.patch("os.path.isfile", return_value=False) def test__download_image(self, mock_isfile, mock_wrap, mock_open): img_1 = mock.MagicMock() img_1.name = "Foo" img_2 = mock.MagicMock() img_2.name = "CirrOS" glanceclient = self.context.clients.glance() glanceclient.images.data.return_value = "data" mock_wrap.return_value.list_images.return_value = [img_1, img_2] self.context._download_image() img_path = os.path.join(self.context.data_dir, self.context.image_name) mock_wrap.return_value.list_images.assert_called_once_with( status="active", visibility="public") glanceclient.images.data.assert_called_once_with(img_2.id) mock_open.assert_called_once_with(img_path, "wb") mock_open().write.assert_has_calls([mock.call("d"), mock.call("a"), mock.call("t"), mock.call("a")]) # We can choose any option to test the '_configure_option' method. So let's # configure the 'flavor_ref' option. def test__configure_option(self): helper_method = mock.MagicMock() helper_method.side_effect = [fakes.FakeFlavor(id="id1")] self.context.conf.set("compute", "flavor_ref", "") self.context._configure_option("compute", "flavor_ref", helper_method=helper_method, flv_ram=64) self.assertEqual(helper_method.call_count, 1) result = self.context.conf.get("compute", "flavor_ref") self.assertEqual("id1", result) @mock.patch("rally.plugins.openstack.wrappers.glance.wrap") def test__discover_or_create_image_when_image_exists(self, mock_wrap): client = mock_wrap.return_value client.list_images.return_value = [fakes.FakeImage(name="CirrOS")] image = self.context._discover_or_create_image() self.assertEqual("CirrOS", image.name) self.assertEqual(0, client.create_image.call_count) self.assertEqual(0, len(self.context._created_images)) @mock.patch("rally.plugins.openstack.wrappers.glance.wrap") def test__discover_or_create_image(self, mock_wrap): client = mock_wrap.return_value image = self.context._discover_or_create_image() self.assertEqual(image, client.create_image.return_value) self.assertEqual(self.context._created_images[0], client.create_image.return_value) client.create_image.assert_called_once_with( container_format=CONF.tempest.img_container_format, image_location=mock.ANY, disk_format=CONF.tempest.img_disk_format, name=mock.ANY, visibility="public") def test__discover_or_create_flavor_when_flavor_exists(self): client = self.context.clients.nova() client.flavors.list.return_value = [fakes.FakeFlavor(id="id1", ram=64, vcpus=1, disk=0)] flavor = self.context._discover_or_create_flavor(64) self.assertEqual("id1", flavor.id) self.assertEqual(0, len(self.context._created_flavors)) def test__discover_or_create_flavor(self): client = self.context.clients.nova() client.flavors.create.side_effect = [fakes.FakeFlavor(id="id1")] flavor = self.context._discover_or_create_flavor(64) self.assertEqual("id1", flavor.id) self.assertEqual("id1", self.context._created_flavors[0].id) def test__create_network_resources(self): client = self.context.clients.neutron() fake_network = { "id": "nid1", "name": "network", "status": "status"} client.create_network.side_effect = [{"network": fake_network}] client.create_router.side_effect = [{"router": {"id": "rid1"}}] client.create_subnet.side_effect = [{"subnet": {"id": "subid1"}}] network = self.context._create_network_resources() self.assertEqual("nid1", network["id"]) self.assertEqual("nid1", self.context._created_networks[0]["id"]) self.assertEqual("rid1", self.context._created_networks[0]["router_id"]) self.assertEqual("subid1", self.context._created_networks[0]["subnets"][0]) def test__cleanup_tempest_roles(self): self.context._created_roles = [fakes.FakeRole(), fakes.FakeRole()] self.context._cleanup_tempest_roles() client = self.context.clients.keystone() self.assertEqual(client.roles.delete.call_count, 2) @mock.patch("rally.plugins.openstack.wrappers.glance.wrap") def test__cleanup_images(self, mock_wrap): self.context._created_images = [fakes.FakeImage(id="id1"), fakes.FakeImage(id="id2")] self.context.conf.set("compute", "image_ref", "id1") self.context.conf.set("compute", "image_ref_alt", "id2") wrapper = mock_wrap.return_value wrapper.get_image.side_effect = [ fakes.FakeImage(id="id1", status="DELETED"), fakes.FakeImage(id="id2"), fakes.FakeImage(id="id2", status="DELETED")] self.context._cleanup_images() client = self.context.clients.glance() client.images.delete.assert_has_calls([mock.call("id1"), mock.call("id2")]) self.assertEqual("", self.context.conf.get("compute", "image_ref")) self.assertEqual("", self.context.conf.get("compute", "image_ref_alt")) def test__cleanup_flavors(self): self.context._created_flavors = [fakes.FakeFlavor(id="id1"), fakes.FakeFlavor(id="id2"), fakes.FakeFlavor(id="id3")] self.context.conf.set("compute", "flavor_ref", "id1") self.context.conf.set("compute", "flavor_ref_alt", "id2") self.context.conf.set("orchestration", "instance_type", "id3") self.context._cleanup_flavors() client = self.context.clients.nova() self.assertEqual(client.flavors.delete.call_count, 3) self.assertEqual("", self.context.conf.get("compute", "flavor_ref")) self.assertEqual("", self.context.conf.get("compute", "flavor_ref_alt")) self.assertEqual("", self.context.conf.get("orchestration", "instance_type")) @mock.patch("rally.plugins.openstack.wrappers." "network.NeutronWrapper.delete_network") def test__cleanup_network_resources( self, mock_neutron_wrapper_delete_network): self.context._created_networks = [{"name": "net-12345"}] self.context.conf.set("compute", "fixed_network_name", "net-12345") self.context._cleanup_network_resources() self.assertEqual(mock_neutron_wrapper_delete_network.call_count, 1) self.assertEqual("", self.context.conf.get("compute", "fixed_network_name")) @mock.patch("six.moves.builtins.open", side_effect=mock.mock_open()) @mock.patch("%s.TempestContext._configure_option" % PATH) @mock.patch("%s.TempestContext._create_tempest_roles" % PATH) @mock.patch("rally.verification.utils.create_dir") @mock.patch("%s.osclients.Clients" % PATH) def test_setup(self, mock_clients, mock_create_dir, mock__create_tempest_roles, mock__configure_option, mock_open): self.deployment = fakes.FakeDeployment(**CREDS) verifier = mock.Mock(deployment=self.deployment) verifier.manager.home_dir = "/p/a/t/h" # case #1: no neutron and heat mock_clients.return_value.services.return_value = {} ctx = context.TempestContext({"verifier": verifier}) ctx.conf = mock.Mock() ctx.setup() ctx.conf.read.assert_called_once_with(verifier.manager.configfile) mock_create_dir.assert_called_once_with(ctx.data_dir) mock__create_tempest_roles.assert_called_once_with() mock_open.assert_called_once_with(verifier.manager.configfile, "w") ctx.conf.write(mock_open.side_effect()) self.assertEqual( [mock.call("DEFAULT", "log_file", "/p/a/t/h/tempest.log"), mock.call("oslo_concurrency", "lock_path", "/p/a/t/h/lock_files"), mock.call("scenario", "img_dir", "/p/a/t/h"), mock.call("scenario", "img_file", ctx.image_name, helper_method=ctx._download_image), mock.call("compute", "image_ref", helper_method=ctx._discover_or_create_image), mock.call("compute", "image_ref_alt", helper_method=ctx._discover_or_create_image), mock.call("compute", "flavor_ref", helper_method=ctx._discover_or_create_flavor, flv_ram=config.CONF.tempest.flavor_ref_ram), mock.call("compute", "flavor_ref_alt", helper_method=ctx._discover_or_create_flavor, flv_ram=config.CONF.tempest.flavor_ref_alt_ram)], mock__configure_option.call_args_list) mock_create_dir.reset_mock() mock__create_tempest_roles.reset_mock() mock_open.reset_mock() mock__configure_option.reset_mock() # case #2: neutron and heat are presented mock_clients.return_value.services.return_value = { "network": "neutron", "orchestration": "heat"} ctx = context.TempestContext({"verifier": verifier}) ctx.conf = mock.Mock() ctx.setup() ctx.conf.read.assert_called_once_with(verifier.manager.configfile) mock_create_dir.assert_called_once_with(ctx.data_dir) mock__create_tempest_roles.assert_called_once_with() mock_open.assert_called_once_with(verifier.manager.configfile, "w") ctx.conf.write(mock_open.side_effect()) self.assertEqual( [mock.call("DEFAULT", "log_file", "/p/a/t/h/tempest.log"), mock.call("oslo_concurrency", "lock_path", "/p/a/t/h/lock_files"), mock.call("scenario", "img_dir", "/p/a/t/h"), mock.call("scenario", "img_file", ctx.image_name, helper_method=ctx._download_image), mock.call("compute", "image_ref", helper_method=ctx._discover_or_create_image), mock.call("compute", "image_ref_alt", helper_method=ctx._discover_or_create_image), mock.call("compute", "flavor_ref", helper_method=ctx._discover_or_create_flavor, flv_ram=config.CONF.tempest.flavor_ref_ram), mock.call("compute", "flavor_ref_alt", helper_method=ctx._discover_or_create_flavor, flv_ram=config.CONF.tempest.flavor_ref_alt_ram), mock.call("compute", "fixed_network_name", helper_method=ctx._create_network_resources), mock.call("orchestration", "instance_type", helper_method=ctx._discover_or_create_flavor, flv_ram=config.CONF.tempest.heat_instance_type_ram)], mock__configure_option.call_args_list) rally-0.9.1/tests/unit/plugins/openstack/verification/tempest/test_manager.py0000664000567000056710000002531613073417717031005 0ustar jenkinsjenkins00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json import os import subprocess import mock from rally import exceptions from rally.plugins.openstack.verification.tempest import manager from tests.unit import test PATH = "rally.plugins.openstack.verification.tempest.manager" class TempestManagerTestCase(test.TestCase): def test_run_environ_property(self): mock.patch("%s.testr.TestrLauncher.run_environ" % PATH, new={"some": "key"}).start() tempest = manager.TempestManager(mock.MagicMock(uuid="uuuiiiddd")) env = {"some": "key", "OS_TEST_PATH": os.path.join(tempest.repo_dir, "tempest/test_discover"), "TEMPEST_CONFIG": "tempest.conf", "TEMPEST_CONFIG_DIR": os.path.dirname(tempest.configfile)} self.assertEqual(env, tempest.run_environ) def test_configfile_property(self): tempest = manager.TempestManager(mock.MagicMock(uuid="uuuiiiddd")) self.assertEqual(os.path.join(tempest.home_dir, "tempest.conf"), tempest.configfile) @mock.patch("six.moves.builtins.open", side_effect=mock.mock_open()) def test_get_configuration(self, mock_open): tempest = manager.TempestManager(mock.MagicMock(uuid="uuuiiiddd")) tempest.get_configuration() mock_open.assert_called_once_with(tempest.configfile) mock_open.side_effect().read.assert_called_once_with() @mock.patch("%s.config.TempestConfigfileManager" % PATH) def test_configure(self, mock_tempest_configfile_manager): tempest = manager.TempestManager(mock.MagicMock(uuid="uuuiiiddd")) cm = mock_tempest_configfile_manager.return_value extra_options = mock.Mock() self.assertEqual(cm.create.return_value, tempest.configure(extra_options)) mock_tempest_configfile_manager.assert_called_once_with( tempest.verifier.deployment) cm.create.assert_called_once_with(tempest.configfile, extra_options) @mock.patch("%s.config.os.path.exists" % PATH) def test_is_configured(self, mock_exists): tempest = manager.TempestManager(mock.MagicMock(uuid="uuuiiiddd")) self.assertTrue(tempest.is_configured()) @mock.patch("rally.verification.utils.extend_configfile") def test_extend_configuration(self, mock_extend_configfile): tempest = manager.TempestManager(mock.MagicMock(uuid="uuuiiiddd")) extra_options = mock.Mock() self.assertEqual(mock_extend_configfile.return_value, tempest.extend_configuration(extra_options)) mock_extend_configfile.assert_called_once_with(extra_options, tempest.configfile) @mock.patch("six.moves.builtins.open", side_effect=mock.mock_open()) def test_override_configuration(self, mock_open): tempest = manager.TempestManager(mock.MagicMock(uuid="uuuiiiddd")) new_content = mock.Mock() tempest.override_configuration(new_content) mock_open.assert_called_once_with(tempest.configfile, "w") mock_open.side_effect().write.assert_called_once_with(new_content) @mock.patch("%s.os.path.exists" % PATH) @mock.patch("%s.utils.check_output" % PATH) @mock.patch("%s.TempestManager.check_system_wide" % PATH) def test_install_extension(self, mock_check_system_wide, mock_check_output, mock_exists): tempest = manager.TempestManager(mock.MagicMock(uuid="uuuiiiddd", system_wide=True)) e = self.assertRaises(NotImplementedError, tempest.install_extension, None, None, {"key": "value"}) self.assertIn("verifiers don't support extra installation settings", "%s" % e) test_reqs_path = os.path.join(tempest.base_dir, "extensions", "example", "test-requirements.txt") # case #1 system-wide installation source = "https://github.com/example/example" tempest.install_extension(source) path = os.path.join(tempest.base_dir, "extensions") mock_check_output.assert_called_once_with( ["pip", "install", "--no-deps", "--src", path, "-e", "git+https://github.com/example/example@master#egg=example"], cwd=tempest.base_dir, env=tempest.environ) mock_check_system_wide.assert_called_once_with( reqs_file_path=test_reqs_path) mock_check_output.reset_mock() # case #2 virtual env with specified version tempest.verifier.system_wide = False version = "some" tempest.install_extension(source, version=version) self.assertEqual([ mock.call([ "pip", "install", "--src", path, "-e", "git+https://github.com/example/example@some#egg=example"], cwd=tempest.base_dir, env=tempest.environ), mock.call(["pip", "install", "-r", test_reqs_path], cwd=tempest.base_dir, env=tempest.environ)], mock_check_output.call_args_list) @mock.patch("%s.utils.check_output" % PATH) def test_list_extensions(self, mock_check_output): plugins_list = [ {"name": "some", "entry_point": "foo.bar", "location": "/tmp"}, {"name": "another", "entry_point": "bar.foo", "location": "/tmp"} ] mock_check_output.return_value = json.dumps(plugins_list) tempest = manager.TempestManager(mock.MagicMock(uuid="uuuiiiddd")) self.assertEqual(plugins_list, tempest.list_extensions()) self.assertEqual(1, mock_check_output.call_count) mock_check_output.reset_mock() mock_check_output.side_effect = subprocess.CalledProcessError("", "") self.assertRaises(exceptions.RallyException, tempest.list_extensions) self.assertEqual(1, mock_check_output.call_count) @mock.patch("%s.TempestManager.list_extensions" % PATH) @mock.patch("%s.os.path.exists" % PATH) @mock.patch("%s.shutil.rmtree" % PATH) def test_uninstall_extension(self, mock_rmtree, mock_exists, mock_list_extensions): plugins_list = [ {"name": "some", "entry_point": "foo.bar", "location": "/tmp"}, {"name": "another", "entry_point": "bar.foo", "location": "/tmp"} ] mock_list_extensions.return_value = plugins_list tempest = manager.TempestManager(mock.MagicMock(uuid="uuuiiiddd")) tempest.uninstall_extension("some") mock_rmtree.assert_called_once_with(plugins_list[0]["location"]) mock_list_extensions.assert_called_once_with() mock_rmtree.reset_mock() mock_list_extensions.reset_mock() self.assertRaises(exceptions.RallyException, tempest.uninstall_extension, "unexist") mock_list_extensions.assert_called_once_with() self.assertFalse(mock_rmtree.called) @mock.patch("%s.TempestManager._transform_pattern" % PATH) @mock.patch("%s.testr.TestrLauncher.list_tests" % PATH) def test_list_tests(self, mock_testr_launcher_list_tests, mock__transform_pattern): tempest = manager.TempestManager(mock.MagicMock(uuid="uuuiiiddd")) self.assertEqual(mock_testr_launcher_list_tests.return_value, tempest.list_tests()) mock_testr_launcher_list_tests.assert_called_once_with("") self.assertFalse(mock__transform_pattern.called) mock_testr_launcher_list_tests.reset_mock() pattern = mock.Mock() self.assertEqual(mock_testr_launcher_list_tests.return_value, tempest.list_tests(pattern)) mock_testr_launcher_list_tests.assert_called_once_with( mock__transform_pattern.return_value) mock__transform_pattern.assert_called_once_with(pattern) @mock.patch("%s.testr.TestrLauncher.validate_args" % PATH) def test_validate_args(self, mock_testr_launcher_validate_args): tm = manager.TempestManager(mock.Mock()) tm.validate_args({}) tm.validate_args({"pattern": "some.test"}) tm.validate_args({"pattern": "set=smoke"}) tm.validate_args({"pattern": "set=compute"}) tm.validate_args({"pattern": "set=full"}) e = self.assertRaises(exceptions.ValidationError, tm.validate_args, {"pattern": "foo=bar"}) self.assertEqual("Validation error: 'pattern' argument should be a " "regexp or set name (format: 'tempest.api.identity." "v3', 'set=smoke').", "%s" % e) e = self.assertRaises(exceptions.ValidationError, tm.validate_args, {"pattern": "set=foo"}) self.assertIn("Test set 'foo' not found in available Tempest test " "sets. Available sets are ", "%s" % e) def test__transform_pattern(self): tempest = manager.TempestManager(mock.MagicMock(uuid="uuuiiiddd")) self.assertEqual("foo", tempest._transform_pattern("foo")) self.assertEqual("foo=bar", tempest._transform_pattern("foo=bar")) self.assertEqual("", tempest._transform_pattern("set=full")) self.assertEqual("smoke", tempest._transform_pattern("set=smoke")) self.assertEqual("tempest.bar", tempest._transform_pattern("set=bar")) self.assertEqual("tempest.api.compute", tempest._transform_pattern("set=compute")) @mock.patch("%s.TempestManager._transform_pattern" % PATH) def test_prepare_run_args(self, mock__transform_pattern): tempest = manager.TempestManager(mock.MagicMock(uuid="uuuiiiddd")) self.assertEqual({}, tempest.prepare_run_args({})) self.assertFalse(mock__transform_pattern.called) self.assertEqual({"foo": "bar"}, tempest.prepare_run_args({"foo": "bar"})) self.assertFalse(mock__transform_pattern.called) pattern = mock.Mock() self.assertEqual({"pattern": mock__transform_pattern.return_value}, tempest.prepare_run_args({"pattern": pattern})) mock__transform_pattern.assert_called_once_with(pattern) rally-0.9.1/tests/unit/plugins/openstack/verification/tempest/test_config.py0000664000567000056710000002122113073417720030621 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from oslo_config import cfg from rally.plugins.openstack.verification.tempest import config from tests.unit import fakes from tests.unit import test CONF = cfg.CONF CREDS = { "admin": { "username": "admin", "tenant_name": "admin", "password": "admin-12345", "auth_url": "http://test:5000/v2.0/", "permission": "admin", "region_name": "test", "https_insecure": False, "https_cacert": "/path/to/cacert/file", "user_domain_name": "admin", "project_domain_name": "admin" }, "uuid": "fake_deployment" } PATH = "rally.plugins.openstack.verification.tempest.config" @ddt.ddt class TempestConfigfileManagerTestCase(test.TestCase): def setUp(self): super(TempestConfigfileManagerTestCase, self).setUp() mock.patch("rally.osclients.Clients").start() deployment = fakes.FakeDeployment(**CREDS) self.tempest = config.TempestConfigfileManager(deployment) def test__configure_auth(self): self.tempest.conf.add_section("auth") self.tempest._configure_auth() expected = ( ("admin_username", CREDS["admin"]["username"]), ("admin_password", CREDS["admin"]["password"]), ("admin_project_name", CREDS["admin"]["tenant_name"]), ("admin_domain_name", CREDS["admin"]["user_domain_name"])) result = self.tempest.conf.items("auth") for item in expected: self.assertIn(item, result) @ddt.data("data_processing", "data-processing") def test__configure_data_processing(self, service_type): self.tempest.available_services = ["sahara"] self.tempest.clients.services.return_value = { service_type: "sahara"} self.tempest.conf.add_section("data-processing") self.tempest._configure_data_processing() self.assertEqual( self.tempest.conf.get( "data-processing", "catalog_type"), service_type) def test__configure_identity(self): self.tempest.conf.add_section("identity") self.tempest._configure_identity() expected = ( ("region", CREDS["admin"]["region_name"]), ("auth_version", "v2"), ("uri", CREDS["admin"]["auth_url"][:-1]), ("uri_v3", CREDS["admin"]["auth_url"].replace("/v2.0/", "/v3")), ("disable_ssl_certificate_validation", str(CREDS["admin"]["https_insecure"])), ("ca_certificates_file", CREDS["admin"]["https_cacert"])) result = self.tempest.conf.items("identity") for item in expected: self.assertIn(item, result) def test__configure_network_if_neutron(self): self.tempest.available_services = ["neutron"] client = self.tempest.clients.neutron() client.list_networks.return_value = { "networks": [ { "status": "ACTIVE", "id": "test_id", "name": "test_name", "router:external": True } ] } self.tempest.conf.add_section("network") self.tempest._configure_network() self.assertEqual(self.tempest.conf.get("network", "public_network_id"), "test_id") self.assertEqual(self.tempest.conf.get("network", "floating_network_name"), "test_name") def test__configure_network_if_nova(self): self.tempest.available_services = ["nova"] client = self.tempest.clients.nova() client.networks.list.return_value = [ mock.MagicMock(human_id="fake-network")] self.tempest.conf.add_section("compute") self.tempest.conf.add_section("validation") self.tempest._configure_network() expected = {"compute": ("fixed_network_name", "fake-network"), "validation": ("network_for_ssh", "fake-network")} for section, option in expected.items(): result = self.tempest.conf.items(section) self.assertIn(option, result) def test__configure_network_feature_enabled(self): self.tempest.available_services = ["neutron"] client = self.tempest.clients.neutron() client.list_ext.return_value = { "extensions": [ {"alias": "dvr"}, {"alias": "extra_dhcp_opt"}, {"alias": "extraroute"} ] } self.tempest.conf.add_section("network-feature-enabled") self.tempest._configure_network_feature_enabled() client.list_ext.assert_called_once_with("extensions", "/extensions", retrieve_all=True) self.assertEqual(self.tempest.conf.get( "network-feature-enabled", "api_extensions"), "dvr,extra_dhcp_opt,extraroute") def test__configure_object_storage(self): self.tempest.conf.add_section("object-storage") self.tempest._configure_object_storage() expected = ( ("operator_role", CONF.tempest.swift_operator_role), ("reseller_admin_role", CONF.tempest.swift_reseller_admin_role)) result = self.tempest.conf.items("object-storage") for item in expected: self.assertIn(item, result) def test__configure_orchestration(self): self.tempest.conf.add_section("orchestration") self.tempest._configure_orchestration() expected = ( ("stack_owner_role", CONF.tempest.heat_stack_owner_role), ("stack_user_role", CONF.tempest.heat_stack_user_role)) result = self.tempest.conf.items("orchestration") for item in expected: self.assertIn(item, result) def test__configure_service_available(self): available_services = ("nova", "cinder", "glance", "sahara") self.tempest.available_services = available_services self.tempest.conf.add_section("service_available") self.tempest._configure_service_available() expected = ( ("neutron", "False"), ("heat", "False"), ("nova", "True"), ("swift", "False"), ("cinder", "True"), ("sahara", "True"), ("glance", "True")) result = self.tempest.conf.items("service_available") for item in expected: self.assertIn(item, result) @ddt.data({}, {"service": "neutron", "connect_method": "floating"}) @ddt.unpack def test__configure_validation(self, service="nova", connect_method="fixed"): self.tempest.available_services = [service] self.tempest.conf.add_section("validation") self.tempest._configure_validation() expected = (("connect_method", connect_method), ) result = self.tempest.conf.items("validation") for item in expected: self.assertIn(item, result) @mock.patch("%s.six.StringIO" % PATH) @mock.patch("six.moves.builtins.open", side_effect=mock.mock_open()) @mock.patch("inspect.getmembers") def test_create(self, mock_inspect_getmembers, mock_open, mock_string_io): configure_something_method = mock.MagicMock() mock_inspect_getmembers.return_value = [("_configure_something", configure_something_method)] self.tempest.conf.read = mock.Mock() self.tempest.conf.write = mock.Mock() self.tempest.conf.read.return_value = "[section]\noption = value" fake_extra_conf = {"section2": {"option2": "value2"}} self.tempest.create("/path/to/fake/conf", fake_extra_conf) self.assertEqual(configure_something_method.call_count, 1) self.assertIn(("option2", "value2"), self.tempest.conf.items("section2")) mock_open.assert_called_once_with("/path/to/fake/conf", "w") self.tempest.conf.write.assert_has_calls( [mock.call(mock_open.side_effect()), mock.call(mock_string_io.return_value)]) mock_string_io.return_value.getvalue.assert_called_once_with() rally-0.9.1/tests/unit/plugins/openstack/test_scenario.py0000664000567000056710000001373313073417720025025 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from oslotest import mockpatch from rally.plugins.openstack import scenario as base_scenario from tests.unit import test @ddt.ddt class OpenStackScenarioTestCase(test.TestCase): def setUp(self): super(OpenStackScenarioTestCase, self).setUp() self.osclients = mockpatch.Patch( "rally.osclients.Clients") self.useFixture(self.osclients) self.context = test.get_test_context() self.context.update({"foo": "bar"}) def test_init(self): scenario = base_scenario.OpenStackScenario(self.context) self.assertEqual(self.context, scenario.context) def test_init_admin_context(self): self.context["admin"] = {"credential": mock.Mock()} scenario = base_scenario.OpenStackScenario(self.context) self.assertEqual(self.context, scenario.context) self.osclients.mock.assert_called_once_with( self.context["admin"]["credential"], {}) scenario = base_scenario.OpenStackScenario( self.context, admin_clients="foobar") def test_init_admin_clients(self): scenario = base_scenario.OpenStackScenario( self.context, admin_clients="foobar") self.assertEqual(self.context, scenario.context) self.assertEqual("foobar", scenario._admin_clients) def test_init_user_context(self): user = {"credential": mock.Mock(), "tenant_id": "foo"} self.context["users"] = [user] self.context["tenants"] = {"foo": {"name": "bar"}} self.context["user_choice_method"] = "random" scenario = base_scenario.OpenStackScenario(self.context) self.assertEqual(user, scenario.context["user"]) self.assertEqual(self.context["tenants"]["foo"], scenario.context["tenant"]) self.osclients.mock.assert_called_once_with(user["credential"], {}) def test_init_clients(self): scenario = base_scenario.OpenStackScenario(self.context, admin_clients="spam", clients="ham") self.assertEqual("spam", scenario._admin_clients) self.assertEqual("ham", scenario._clients) def test_init_user_clients(self): scenario = base_scenario.OpenStackScenario( self.context, clients="foobar") self.assertEqual(self.context, scenario.context) self.assertEqual("foobar", scenario._clients) def test__choose_user_random(self): users = [{"credential": mock.Mock(), "tenant_id": "foo"} for _ in range(5)] self.context["users"] = users self.context["tenants"] = {"foo": {"name": "bar"}, "baz": {"name": "spam"}} self.context["user_choice_method"] = "random" scenario = base_scenario.OpenStackScenario() scenario._choose_user(self.context) self.assertIn("user", self.context) self.assertIn(self.context["user"], self.context["users"]) self.assertIn("tenant", self.context) tenant_id = self.context["user"]["tenant_id"] self.assertEqual(self.context["tenants"][tenant_id], self.context["tenant"]) @ddt.data((1, "0", "bar"), (2, "0", "foo"), (3, "1", "bar"), (4, "1", "foo"), (5, "0", "bar"), (6, "0", "foo"), (7, "1", "bar"), (8, "1", "foo")) @ddt.unpack def test__choose_user_round_robin(self, iteration, expected_user_id, expected_tenant_id): self.context["iteration"] = iteration self.context["user_choice_method"] = "round_robin" self.context["users"] = [] self.context["tenants"] = {} for tid in ("foo", "bar"): users = [{"id": str(i), "tenant_id": tid} for i in range(2)] self.context["users"] += users self.context["tenants"][tid] = {"name": tid, "users": users} scenario = base_scenario.OpenStackScenario() scenario._choose_user(self.context) self.assertIn("user", self.context) self.assertIn(self.context["user"], self.context["users"]) self.assertEqual(expected_user_id, self.context["user"]["id"]) self.assertIn("tenant", self.context) tenant_id = self.context["user"]["tenant_id"] self.assertEqual(self.context["tenants"][tenant_id], self.context["tenant"]) self.assertEqual(expected_tenant_id, tenant_id) @mock.patch("rally.task.scenario.Scenario.validate") def test_validate(self, mock_scenario_validate): cred1 = mock.Mock() cred2 = mock.Mock() cred3 = mock.Mock() self.osclients.mock.side_effect = [cred1, cred2, cred3] base_scenario.OpenStackScenario.validate( name="foo_name", config="foo_config", admin="foo_admin", users=[{"credential": "foo_user1"}, {"credential": "foo_user2"}], deployment=None) mock_scenario_validate.assert_called_once_with( name="foo_name", config="foo_config", admin=cred1, users=[cred2, cred3], deployment=None) self.osclients.mock.assert_has_calls([ mock.call("foo_admin"), mock.call("foo_user1"), mock.call("foo_user2"), ]) rally-0.9.1/tests/unit/plugins/openstack/context/0000775000567000056710000000000013073420067023264 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/watcher/0000775000567000056710000000000013073420067024721 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/watcher/__init__.py0000664000567000056710000000000013073417717027030 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/watcher/test_audit_templates.py0000664000567000056710000000710613073417720031524 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.context.watcher import audit_templates from tests.unit import fakes from tests.unit import test CTX = "rally.plugins.openstack.context.watcher" SCN = "rally.plugins.openstack.scenarios.watcher" TYP = "rally.plugins.openstack.types" class AuditTemplateTestCase(test.ScenarioTestCase): @mock.patch("%s.utils.WatcherScenario._create_audit_template" % SCN, return_value=mock.MagicMock()) @mock.patch("%s.WatcherStrategy.transform" % TYP, return_value=mock.MagicMock()) @mock.patch("%s.WatcherGoal.transform" % TYP, return_value=mock.MagicMock()) @mock.patch("%s.audit_templates.osclients" % CTX, return_value=fakes.FakeClients()) def test_setup(self, mock_osclients, mock_watcher_goal_transform, mock_watcher_strategy_transform, mock_watcher_scenario__create_audit_template): users = [{"id": 1, "tenant_id": 1, "credential": mock.MagicMock()}] self.context.update({ "config": { "audit_templates": { "audit_templates_per_admin": 1, "fill_strategy": "random", "params": [ { "goal": { "name": "workload_balancing" }, "strategy": { "name": "workload_stabilization" } }, { "goal": { "name": "workload_balancing" }, "strategy": { "name": "workload_stabilization" } } ] }, }, "admin": { "credential": mock.MagicMock() }, "users": users }) audit_template = audit_templates.AuditTemplateGenerator(self.context) audit_template.setup() goal_id = mock_watcher_goal_transform.return_value strategy_id = mock_watcher_strategy_transform.return_value mock_calls = [mock.call(goal_id, strategy_id)] mock_watcher_scenario__create_audit_template.assert_has_calls( mock_calls) @mock.patch("%s.audit_templates.resource_manager.cleanup" % CTX) def test_cleanup(self, mock_cleanup): audit_templates_mocks = [mock.Mock() for i in range(2)] self.context.update({ "admin": { "credential": mock.MagicMock() }, "audit_templates": audit_templates_mocks }) audit_templates_ctx = audit_templates.AuditTemplateGenerator( self.context) audit_templates_ctx.cleanup() mock_cleanup.assert_called_once_with( names=["watcher.action_plan", "watcher.audit_template"], admin=self.context["admin"]) rally-0.9.1/tests/unit/plugins/openstack/context/__init__.py0000664000567000056710000000000013073417717025373 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/magnum/0000775000567000056710000000000013073420067024550 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/magnum/__init__.py0000664000567000056710000000000013073417717026657 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/magnum/test_clusters.py0000664000567000056710000001256713073417720030042 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import mock from rally.plugins.openstack.context.magnum import clusters from tests.unit import test CTX = "rally.plugins.openstack.context.magnum" SCN = "rally.plugins.openstack.scenarios" class ClustersGeneratorTestCase(test.ScenarioTestCase): def _gen_tenants(self, count): tenants = {} for id_ in range(count): tenants[str(id_)] = {"name": str(id_)} return tenants def _gen_tenants_with_cluster_template(self, count): tenants = {} for id_ in range(count): tenants[str(id_)] = {"name": str(id_)} tenants[str(id_)]["cluster_template"] = "rally_ct_uuid" return tenants @mock.patch("%s.magnum.utils.MagnumScenario._create_cluster" % SCN, return_value=mock.Mock()) def test_setup_using_existing_cluster_template(self, mock__create_cluster): tenants_count = 2 users_per_tenant = 5 tenants = self._gen_tenants(tenants_count) users = [] for ten_id in tenants: for i in range(users_per_tenant): users.append({"id": i, "tenant_id": ten_id, "credential": mock.MagicMock()}) self.context.update({ "config": { "users": { "tenants": tenants_count, "users_per_tenant": users_per_tenant, "concurrent": 10, }, "clusters": { "cluster_template_uuid": "123456789", "node_count": 2 } }, "users": users, "tenants": tenants }) mock_cluster = mock__create_cluster.return_value new_context = copy.deepcopy(self.context) for id_ in new_context["tenants"]: new_context["tenants"][id_]["cluster"] = mock_cluster.uuid cluster_ctx = clusters.ClusterGenerator(self.context) cluster_ctx.setup() self.assertEqual(new_context, self.context) cluster_ctx_config = self.context["config"]["clusters"] node_count = cluster_ctx_config.get("node_count") cluster_template_uuid = cluster_ctx_config.get("cluster_template_uuid") mock_calls = [mock.call(cluster_template=cluster_template_uuid, node_count=node_count) for i in range(tenants_count)] mock__create_cluster.assert_has_calls(mock_calls) @mock.patch("%s.magnum.utils.MagnumScenario._create_cluster" % SCN, return_value=mock.Mock()) def test_setup(self, mock__create_cluster): tenants_count = 2 users_per_tenant = 5 tenants = self._gen_tenants_with_cluster_template(tenants_count) users = [] for ten_id in tenants: for i in range(users_per_tenant): users.append({"id": i, "tenant_id": ten_id, "credential": mock.MagicMock()}) self.context.update({ "config": { "users": { "tenants": tenants_count, "users_per_tenant": users_per_tenant, "concurrent": 10, }, "cluster_templates": { "dns_nameserver": "8.8.8.8", "external_network_id": "public", "flavor_id": "m1.small", "docker_volume_size": 5, "coe": "kubernetes", "image_id": "fedora-atomic-latest", "network_driver": "flannel" }, "clusters": { "node_count": 2 } }, "users": users, "tenants": tenants }) mock_cluster = mock__create_cluster.return_value new_context = copy.deepcopy(self.context) for id_ in new_context["tenants"]: new_context["tenants"][id_]["cluster"] = mock_cluster.uuid cluster_ctx = clusters.ClusterGenerator(self.context) cluster_ctx.setup() self.assertEqual(new_context, self.context) cluster_ctx_config = self.context["config"]["clusters"] node_count = cluster_ctx_config.get("node_count") mock_calls = [mock.call(cluster_template="rally_ct_uuid", node_count=node_count) for i in range(tenants_count)] mock__create_cluster.assert_has_calls(mock_calls) @mock.patch("%s.cluster_templates.resource_manager.cleanup" % CTX) def test_cleanup(self, mock_cleanup): self.context.update({ "users": mock.MagicMock() }) clusters_ctx = clusters.ClusterGenerator(self.context) clusters_ctx.cleanup() mock_cleanup.assert_called_once_with( names=["magnum.clusters"], users=self.context["users"]) rally-0.9.1/tests/unit/plugins/openstack/context/magnum/test_cluster_templates.py0000664000567000056710000001041413073417720031722 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.context.magnum import cluster_templates from tests.unit import fakes from tests.unit import test BASE_CTX = "rally.task.context" CTX = "rally.plugins.openstack.context" BASE_SCN = "rally.task.scenarios" SCN = "rally.plugins.openstack.scenarios" class ClusterTemplatesGeneratorTestCase(test.ScenarioTestCase): """Generate tenants.""" def _gen_tenants(self, count): tenants = {} for id_ in range(count): tenants[str(id_)] = dict(name=str(id_)) return tenants @mock.patch("%s.magnum.utils.MagnumScenario." "_create_cluster_template" % SCN, return_value=fakes.FakeClusterTemplate(id="uuid")) @mock.patch("%s.nova.utils.NovaScenario._create_keypair" % SCN, return_value="key1") def test_setup(self, mock_nova_scenario__create_keypair, mock__create_cluster_template): tenants_count = 2 users_per_tenant = 5 tenants = self._gen_tenants(tenants_count) users = [] for ten_id in tenants: for i in range(users_per_tenant): users.append({"id": i, "tenant_id": ten_id, "credential": mock.MagicMock()}) self.context.update({ "config": { "users": { "tenants": tenants_count, "users_per_tenant": users_per_tenant, "concurrent": 10, }, "cluster_templates": { "dns_nameserver": "8.8.8.8", "external_network_id": "public", "flavor_id": "m1.small", "docker_volume_size": 5, "coe": "kubernetes", "image_id": "fedora-atomic-latest", "network_driver": "flannel" } }, "users": users, "tenants": tenants }) ct_ctx = cluster_templates.ClusterTemplateGenerator(self.context) ct_ctx.setup() ct_ctx_config = self.context["config"]["cluster_templates"] image_id = ct_ctx_config.get("image_id") external_network_id = ct_ctx_config.get( "external_network_id") dns_nameserver = ct_ctx_config.get("dns_nameserver") flavor_id = ct_ctx_config.get("flavor_id") docker_volume_size = ct_ctx_config.get("docker_volume_size") network_driver = ct_ctx_config.get("network_driver") coe = ct_ctx_config.get("coe") mock_calls = [mock.call(image_id=image_id, keypair_id="key1", external_network_id=external_network_id, dns_nameserver=dns_nameserver, flavor_id=flavor_id, docker_volume_size=docker_volume_size, network_driver=network_driver, coe=coe) for i in range(tenants_count)] mock__create_cluster_template.assert_has_calls(mock_calls) # check that stack ids have been saved in context for ten_id in self.context["tenants"].keys(): self.assertIsNotNone( self.context["tenants"][ten_id]["cluster_template"]) @mock.patch("%s.magnum.cluster_templates.resource_manager.cleanup" % CTX) def test_cleanup(self, mock_cleanup): self.context.update({ "users": mock.MagicMock() }) ct_ctx = cluster_templates.ClusterTemplateGenerator(self.context) ct_ctx.cleanup() mock_cleanup.assert_called_once_with(names=["magnum.cluster_templates", "nova.keypairs"], users=self.context["users"]) rally-0.9.1/tests/unit/plugins/openstack/context/cinder/0000775000567000056710000000000013073420067024530 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/cinder/__init__.py0000664000567000056710000000000013073417717026637 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/cinder/test_volumes.py0000775000567000056710000001467613073417720027656 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import ddt import jsonschema import mock from rally.plugins.openstack.context.cinder import volumes from rally.plugins.openstack.scenarios.cinder import utils as cinder_utils from tests.unit import fakes from tests.unit import test CTX = "rally.plugins.openstack.context" SCN = "rally.plugins.openstack.scenarios" @ddt.ddt class VolumeGeneratorTestCase(test.ScenarioTestCase): def _gen_tenants(self, count): tenants = {} for id_ in range(count): tenants[str(id_)] = {"name": str(id_)} return tenants def test_init(self): self.context.update({ "config": { "volumes": { "size": 1, "volumes_per_tenant": 5, } } }) inst = volumes.VolumeGenerator(self.context) self.assertEqual(inst.config, self.context["config"]["volumes"]) @ddt.data({"config": {"size": 1, "volumes_per_tenant": 5}}, {"config": {"size": 1, "type": None, "volumes_per_tenant": 5}}, {"config": {"size": 1, "type": -1, "volumes_per_tenant": 5}, "validation_raises": jsonschema.exceptions.ValidationError}) @ddt.unpack @mock.patch("%s.cinder.utils.CinderScenario._create_volume" % SCN, return_value=fakes.FakeVolume(id="uuid")) def test_setup(self, mock_cinder_scenario__create_volume, config, validation_raises=None): try: volumes.VolumeGenerator.validate(config) except Exception as e: if not isinstance(e, validation_raises): raise users_per_tenant = 5 volumes_per_tenant = config.get("volumes_per_tenant", 5) tenants = self._gen_tenants(2) users = [] for id_ in tenants: for i in range(users_per_tenant): users.append({"id": i, "tenant_id": id_, "credential": mock.MagicMock()}) self.context.update({ "config": { "users": { "tenants": 2, "users_per_tenant": 5, "concurrent": 10, }, "volumes": config }, "admin": { "credential": mock.MagicMock() }, "users": users, "tenants": tenants }) new_context = copy.deepcopy(self.context) for id_ in tenants.keys(): new_context["tenants"][id_].setdefault("volumes", []) for i in range(volumes_per_tenant): new_context["tenants"][id_]["volumes"].append({"id": "uuid"}) volumes_ctx = volumes.VolumeGenerator(self.context) volumes_ctx.setup() self.assertEqual(new_context, self.context) @mock.patch("%s.cinder.volumes.resource_manager.cleanup" % CTX) def test_cleanup(self, mock_cleanup): tenants_count = 2 users_per_tenant = 5 volumes_per_tenant = 5 tenants = self._gen_tenants(tenants_count) users = [] for id_ in tenants.keys(): for i in range(users_per_tenant): users.append({"id": i, "tenant_id": id_, "credential": "credential"}) tenants[id_].setdefault("volumes", []) for j in range(volumes_per_tenant): tenants[id_]["volumes"].append({"id": "uuid"}) self.context.update({ "config": { "users": { "tenants": 2, "users_per_tenant": 5, "concurrent": 10, }, "volumes": { "size": 1, "volumes_per_tenant": 5, } }, "admin": { "credential": mock.MagicMock() }, "users": users, "tenants": tenants }) volumes_ctx = volumes.VolumeGenerator(self.context) volumes_ctx.cleanup() mock_cleanup.assert_called_once_with( names=["cinder.volumes"], users=self.context["users"], api_versions=None, superclass=cinder_utils.CinderScenario, task_id=self.context["task"]["uuid"]) @mock.patch("%s.cinder.volumes.resource_manager.cleanup" % CTX) def test_cleanup_api_versions(self, mock_cleanup): tenants_count = 2 users_per_tenant = 5 volumes_per_tenant = 5 tenants = self._gen_tenants(tenants_count) users = [] for id_ in tenants.keys(): for i in range(users_per_tenant): users.append({"id": i, "tenant_id": id_, "credential": "credential"}) tenants[id_].setdefault("volumes", []) for j in range(volumes_per_tenant): tenants[id_]["volumes"].append({"id": "uuid"}) api_version = { "cinder": { "version": 1, "service_type": "volume" } } self.context.update({ "config": { "users": { "tenants": 2, "users_per_tenant": 5, "concurrent": 10, }, "volumes": { "size": 1, "type": "volume_type", "volumes_per_tenant": 5, }, "api_versions": api_version }, "admin": { "credential": mock.MagicMock() }, "users": users, "tenants": tenants }) volumes_ctx = volumes.VolumeGenerator(self.context) volumes_ctx.cleanup() mock_cleanup.assert_called_once_with( names=["cinder.volumes"], users=self.context["users"], api_versions=api_version, superclass=cinder_utils.CinderScenario, task_id=self.context["task"]["uuid"]) rally-0.9.1/tests/unit/plugins/openstack/context/cinder/test_volume_types.py0000664000567000056710000000425013073417717030705 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.context.cinder import volume_types from tests.unit import test CTX = "rally.plugins.openstack.context" class VolumeTypeGeneratorTestCase(test.ContextTestCase): def setUp(self): super(VolumeTypeGeneratorTestCase, self).setUp() self.context.update({"admin": {"credential": "admin_creds"}}) def test_setup(self): self.context.update({"config": {"volume_types": ["foo", "bar"]}}) create = self.clients("cinder", admin=True).volume_types.create create.side_effect = (mock.Mock(id="foo-id"), mock.Mock(id="bar-id")) vtype_ctx = volume_types.VolumeTypeGenerator(self.context) vtype_ctx.setup() create.assert_has_calls( [mock.call("foo"), mock.call("bar")]) self.assertEqual(self.context["volume_types"], [{"id": "foo-id", "name": "foo"}, {"id": "bar-id", "name": "bar"}]) def test_cleanup(self): self.context.update({ "config": {"volume_types": ["foo", "bar"]}, "volume_types": [ {"id": "foo_id", "name": "foo"}, {"id": "bar_id", "name": "bar"}], "api_versions": { "cinder": {"version": 2, "service_type": "volumev2"}}}) vtype_ctx = volume_types.VolumeTypeGenerator(self.context) vtype_ctx.cleanup() delete = self.clients("cinder", admin=True).volume_types.delete delete.assert_has_calls( [mock.call("foo_id"), mock.call("bar_id")]) rally-0.9.1/tests/unit/plugins/openstack/context/ec2/0000775000567000056710000000000013073420067023735 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/ec2/__init__.py0000664000567000056710000000000013073417717026044 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/ec2/test_servers.py0000664000567000056710000001005213073417720027037 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import mock from rally.plugins.openstack.context.ec2 import servers from rally.plugins.openstack.scenarios.ec2 import utils as ec2_utils from tests.unit import fakes from tests.unit import test CTX = "rally.plugins.openstack.context.ec2" SCN = "rally.plugins.openstack.scenarios" TYP = "rally.plugins.openstack.types" class EC2ServerGeneratorTestCase(test.TestCase): def _gen_tenants_and_users(self, tenants_count, users_per_tenant): tenants = {} for id in range(tenants_count): tenants[str(id)] = dict(name=str(id)) users = [] for tenant_id in tenants.keys(): for i in range(users_per_tenant): users.append({"id": i, "tenant_id": tenant_id, "credential": "credential"}) return tenants, users def _get_context(self, users, tenants): return { "config": { "users": { "tenants": 2, "users_per_tenant": 5, "concurrent": 10}, "ec2_servers": { "servers_per_tenant": 5, "image": {"name": "foo_image"}, "flavor": {"name": "foo_flavor"} } }, "admin": {"credential": mock.MagicMock()}, "task": mock.MagicMock(), "users": users, "tenants": tenants } @mock.patch("%s.ec2.utils.EC2Scenario._boot_servers" % SCN, return_value=[fakes.FakeServer(id=str(i)) for i in range(5)]) @mock.patch("%s.EC2Image.transform" % TYP, return_value=mock.MagicMock()) @mock.patch("%s.servers.osclients" % CTX, return_value=fakes.FakeClients()) def test_setup(self, mock_osclients, mock_ec2_image_transform, mock_ec2_scenario__boot_servers): tenants_count = 2 users_per_tenant = 5 servers_per_tenant = 5 tenants, users = self._gen_tenants_and_users(tenants_count, users_per_tenant) real_context = self._get_context(users, tenants) new_context = copy.deepcopy(real_context) for tenant_id in new_context["tenants"]: new_context["tenants"][tenant_id].setdefault("ec2_servers", []) for i in range(servers_per_tenant): new_context["tenants"][tenant_id]["ec2_servers"].append(str(i)) servers_ctx = servers.EC2ServerGenerator(real_context) servers_ctx.setup() self.assertEqual(new_context, servers_ctx.context) @mock.patch("%s.servers.osclients" % CTX) @mock.patch("%s.servers.resource_manager.cleanup" % CTX) def test_cleanup(self, mock_cleanup, mock_osclients): tenants_count = 2 users_per_tenant = 5 servers_per_tenant = 5 tenants, users = self._gen_tenants_and_users(tenants_count, users_per_tenant) for tenant_id in tenants.keys(): tenants[tenant_id].setdefault("ec2_servers", []) for i in range(servers_per_tenant): tenants[tenant_id]["ec2_servers"].append(str(i)) context = self._get_context(users, tenants) servers_ctx = servers.EC2ServerGenerator(context) servers_ctx.cleanup() mock_cleanup.assert_called_once_with( names=["ec2.servers"], users=context["users"], superclass=ec2_utils.EC2Scenario, task_id=context["task"]["uuid"]) rally-0.9.1/tests/unit/plugins/openstack/context/nova/0000775000567000056710000000000013073420067024227 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/nova/test_flavors.py0000664000567000056710000000756513073417717027341 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import mock from novaclient import exceptions as nova_exceptions from rally.plugins.openstack.context.nova import flavors from tests.unit import test CTX = "rally.plugins.openstack.context.nova" class FlavorsGeneratorTestCase(test.TestCase): def setUp(self): super(FlavorsGeneratorTestCase, self).setUp() self.context = { "config": { "flavors": [{ "name": "flavor_name", "ram": 2048, "disk": 10, "vcpus": 3, "ephemeral": 3, "swap": 5, "extra_specs": { "key": "value" } }] }, "admin": { "credential": mock.MagicMock() }, "task": mock.MagicMock(), } @mock.patch("%s.flavors.osclients.Clients" % CTX) def test_setup(self, mock_clients): # Setup and mock mock_create = mock_clients().nova().flavors.create mock_create().to_dict.return_value = {"flavor_key": "flavor_value"} # Run flavors_ctx = flavors.FlavorsGenerator(self.context) flavors_ctx.setup() # Assertions self.assertEqual(flavors_ctx.context["flavors"], {"flavor_name": {"flavor_key": "flavor_value"}}) mock_clients.assert_called_with(self.context["admin"]["credential"]) mock_create.assert_called_with( name="flavor_name", ram=2048, vcpus=3, disk=10, ephemeral=3, swap=5) mock_create().set_keys.assert_called_with({"key": "value"}) mock_create().to_dict.assert_called_with() @mock.patch("%s.flavors.osclients.Clients" % CTX) def test_setup_failexists(self, mock_clients): # Setup and mock new_context = copy.deepcopy(self.context) new_context["flavors"] = {} mock_flavor_create = mock_clients().nova().flavors.create exception = nova_exceptions.Conflict("conflict") mock_flavor_create.side_effect = exception # Run flavors_ctx = flavors.FlavorsGenerator(self.context) flavors_ctx.setup() # Assertions self.assertEqual(new_context, flavors_ctx.context) mock_clients.assert_called_with(self.context["admin"]["credential"]) mock_flavor_create.assert_called_once_with( name="flavor_name", ram=2048, vcpus=3, disk=10, ephemeral=3, swap=5) @mock.patch("%s.flavors.osclients.Clients" % CTX) def test_cleanup(self, mock_clients): # Setup and mock real_context = { "flavors": { "flavor_name": { "flavor_name": "flavor_name", "id": "flavor_name" } }, "admin": { "credential": mock.MagicMock() }, "task": mock.MagicMock(), } # Run flavors_ctx = flavors.FlavorsGenerator(real_context) flavors_ctx.cleanup() # Assertions mock_clients.assert_called_with(real_context["admin"]["credential"]) mock_flavors_delete = mock_clients().nova().flavors.delete mock_flavors_delete.assert_called_with("flavor_name") rally-0.9.1/tests/unit/plugins/openstack/context/nova/__init__.py0000664000567000056710000000000013073417717026336 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/nova/test_keypairs.py0000664000567000056710000000710313073417717027500 0ustar jenkinsjenkins00000000000000# Copyright 2014: Rackspace UK # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.context.nova import keypairs from tests.unit import test CTX = "rally.plugins.openstack.context.nova" class KeyPairContextTestCase(test.TestCase): def setUp(self): super(KeyPairContextTestCase, self).setUp() self.users = 2 task = {"uuid": "foo_task_id"} self.ctx_with_keys = { "users": [ { "keypair": { "id": "key_id_1", "key": "key_1", "name": "key_name_1" }, "credential": "credential_1" }, { "keypair": { "id": "key_id_2", "key": "key_2", "name": "key_name_2" }, "credential": "credential_2" }, ], "task": task } self.ctx_without_keys = { "users": [{"credential": "credential_1"}, {"credential": "credential_2"}], "task": task } def test_keypair_setup(self): keypair_ctx = keypairs.Keypair(self.ctx_without_keys) keypair_ctx._generate_keypair = mock.Mock(side_effect=[ {"id": "key_id_1", "key": "key_1", "name": "key_name_1"}, {"id": "key_id_2", "key": "key_2", "name": "key_name_2"}, ]) keypair_ctx.setup() self.assertEqual(keypair_ctx.context, self.ctx_with_keys) keypair_ctx._generate_keypair.assert_has_calls( [mock.call("credential_1"), mock.call("credential_2")]) @mock.patch("%s.keypairs.resource_manager.cleanup" % CTX) def test_keypair_cleanup(self, mock_cleanup): keypair_ctx = keypairs.Keypair(self.ctx_with_keys) keypair_ctx.cleanup() mock_cleanup.assert_called_once_with( names=["nova.keypairs"], users=self.ctx_with_keys["users"], superclass=keypairs.Keypair, task_id=self.ctx_with_keys["task"]["uuid"]) @mock.patch("rally.osclients.Clients") def test_keypair_generate(self, mock_clients): mock_keypairs = mock_clients.return_value.nova.return_value.keypairs mock_keypair = mock_keypairs.create.return_value mock_keypair.public_key = "public_key" mock_keypair.private_key = "private_key" mock_keypair.id = "key_id" keypair_ctx = keypairs.Keypair(self.ctx_without_keys) keypair_ctx.generate_random_name = mock.Mock() key = keypair_ctx._generate_keypair("credential") self.assertEqual({ "id": "key_id", "name": keypair_ctx.generate_random_name.return_value, "private": "private_key", "public": "public_key" }, key) mock_clients.assert_has_calls([ mock.call().nova().keypairs.create( keypair_ctx.generate_random_name.return_value), ]) rally-0.9.1/tests/unit/plugins/openstack/context/nova/test_servers.py0000775000567000056710000001434013073417720027340 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import mock from rally.plugins.openstack.context.nova import servers from rally.plugins.openstack.scenarios.nova import utils as nova_utils from tests.unit import fakes from tests.unit import test CTX = "rally.plugins.openstack.context.nova" SCN = "rally.plugins.openstack.scenarios" TYP = "rally.plugins.openstack.types" class ServerGeneratorTestCase(test.ScenarioTestCase): def _gen_tenants(self, count): tenants = {} for id_ in range(count): tenants[str(id_)] = {"name": str(id_)} return tenants def test_init(self): tenants_count = 2 servers_per_tenant = 5 self.context.update({ "config": { "servers": { "servers_per_tenant": servers_per_tenant, } }, "tenants": self._gen_tenants(tenants_count)}) inst = servers.ServerGenerator(self.context) self.assertEqual({"auto_assign_nic": False, "servers_per_tenant": 5}, inst.config) @mock.patch("%s.nova.utils.NovaScenario._boot_servers" % SCN, return_value=[ fakes.FakeServer(id="uuid"), fakes.FakeServer(id="uuid"), fakes.FakeServer(id="uuid"), fakes.FakeServer(id="uuid"), fakes.FakeServer(id="uuid") ]) @mock.patch("%s.GlanceImage.transform" % TYP, return_value=mock.MagicMock()) @mock.patch("%s.Flavor.transform" % TYP, return_value=mock.MagicMock()) @mock.patch("%s.servers.osclients" % CTX, return_value=fakes.FakeClients()) def test_setup(self, mock_osclients, mock_flavor_transform, mock_glance_image_transform, mock_nova_scenario__boot_servers): tenants_count = 2 users_per_tenant = 5 servers_per_tenant = 5 tenants = self._gen_tenants(tenants_count) users = [] for id_ in tenants.keys(): for i in range(users_per_tenant): users.append({"id": i, "tenant_id": id_, "credential": mock.MagicMock()}) self.context.update({ "config": { "users": { "tenants": 2, "users_per_tenant": 5, "concurrent": 10, }, "servers": { "auto_assign_nic": True, "servers_per_tenant": 5, "image": { "name": "cirros-0.3.4-x86_64-uec", }, "flavor": { "name": "m1.tiny", }, }, }, "admin": { "credential": mock.MagicMock() }, "users": users, "tenants": tenants }) new_context = copy.deepcopy(self.context) for id_ in new_context["tenants"]: new_context["tenants"][id_].setdefault("servers", []) for i in range(servers_per_tenant): new_context["tenants"][id_]["servers"].append("uuid") servers_ctx = servers.ServerGenerator(self.context) servers_ctx.setup() self.assertEqual(new_context, self.context) image_id = mock_glance_image_transform.return_value flavor_id = mock_flavor_transform.return_value servers_ctx_config = self.context["config"]["servers"] expected_auto_nic = servers_ctx_config.get("auto_assign_nic", False) expected_nics = servers_ctx_config.get("nics", []) expected_requests = servers_ctx_config.get("servers_per_tenant", False) called_times = len(tenants) mock_calls = [mock.call(image_id, flavor_id, auto_assign_nic=expected_auto_nic, nics=expected_nics, requests=expected_requests) for i in range(called_times)] mock_nova_scenario__boot_servers.assert_has_calls(mock_calls) @mock.patch("%s.servers.osclients" % CTX) @mock.patch("%s.servers.resource_manager.cleanup" % CTX) def test_cleanup(self, mock_cleanup, mock_osclients): tenants_count = 2 users_per_tenant = 5 servers_per_tenant = 5 tenants = self._gen_tenants(tenants_count) users = [] for id_ in tenants.keys(): for i in range(users_per_tenant): users.append({"id": i, "tenant_id": id_, "credential": "credential"}) tenants[id_].setdefault("servers", []) for j in range(servers_per_tenant): tenants[id_]["servers"].append("uuid") self.context.update({ "config": { "users": { "tenants": 2, "users_per_tenant": 5, "concurrent": 10, }, "servers": { "servers_per_tenant": 5, "image": { "name": "cirros-0.3.4-x86_64-uec", }, "flavor": { "name": "m1.tiny", }, }, }, "admin": { "credential": mock.MagicMock() }, "users": users, "tenants": tenants }) servers_ctx = servers.ServerGenerator(self.context) servers_ctx.cleanup() mock_cleanup.assert_called_once_with( names=["nova.servers"], users=self.context["users"], superclass=nova_utils.NovaScenario, task_id=self.context["task"]["uuid"]) rally-0.9.1/tests/unit/plugins/openstack/context/glance/0000775000567000056710000000000013073420067024515 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/glance/__init__.py0000664000567000056710000000000013073417717026624 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/glance/test_images.py0000664000567000056710000001657513073417720027413 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import ddt import jsonschema import mock from rally.plugins.openstack.context.glance import images from tests.unit import test CTX = "rally.plugins.openstack.context.glance" SCN = "rally.plugins.openstack.scenarios.glance" @ddt.ddt class ImageGeneratorTestCase(test.ScenarioTestCase): def _gen_tenants(self, count): tenants = {} for id_ in range(count): tenants[str(id_)] = {"name": str(id_)} return tenants def test_init_validation(self): self.context["config"] = { "images": { "image_url": "mock_url" } } self.assertRaises(jsonschema.ValidationError, images.ImageGenerator.validate, self.context) @ddt.data( {}, {"min_disk": 1, "min_ram": 2}, {"image_name": "foo"}, {"tenants": 3, "users_per_tenant": 2, "images_per_tenant": 5}, {"image_args": {"min_disk": 1, "min_ram": 2, "visibility": "public"}}, {"api_versions": {"glance": {"version": 2, "service_type": "image"}}}) @ddt.unpack @mock.patch("rally.plugins.openstack.wrappers.glance.wrap") @mock.patch("rally.osclients.Clients") def test_setup(self, mock_clients, mock_wrap, image_container="bare", image_type="qcow2", image_url="http://example.com/fake/url", tenants=1, users_per_tenant=1, images_per_tenant=1, image_name=None, min_ram=None, min_disk=None, image_args=None, api_versions=None): tenant_data = self._gen_tenants(tenants) users = [] for tenant_id in tenant_data: for i in range(users_per_tenant): users.append({"id": i, "tenant_id": tenant_id, "credential": mock.MagicMock()}) self.context.update({ "config": { "users": { "tenants": tenants, "users_per_tenant": users_per_tenant, "concurrent": 10, }, "images": { "image_url": image_url, "image_type": image_type, "image_container": image_container, "images_per_tenant": images_per_tenant, } }, "admin": { "credential": mock.MagicMock() }, "users": users, "tenants": tenant_data }) if api_versions: self.context["config"]["api_versions"] = api_versions expected_image_args = {} if image_args is not None: self.context["config"]["images"]["image_args"] = image_args expected_image_args.update(image_args) if image_name is not None: self.context["config"]["images"]["image_name"] = image_name if min_ram is not None: self.context["config"]["images"]["min_ram"] = min_ram expected_image_args["min_ram"] = min_ram if min_disk is not None: self.context["config"]["images"]["min_disk"] = min_disk expected_image_args["min_disk"] = min_disk wrapper = mock_wrap.return_value new_context = copy.deepcopy(self.context) for tenant_id in new_context["tenants"].keys(): new_context["tenants"][tenant_id]["images"] = [ wrapper.create_image.return_value.id ] * images_per_tenant images_ctx = images.ImageGenerator(self.context) images_ctx.setup() self.assertEqual(new_context, self.context) wrapper_calls = [] wrapper_calls.extend([mock.call(mock_clients.return_value.glance, images_ctx)] * tenants) wrapper_calls.extend( [mock.call().create_image( image_container, image_url, image_type, name=mock.ANY, **expected_image_args)] * tenants * images_per_tenant) mock_wrap.assert_has_calls(wrapper_calls, any_order=True) if image_name: for args in wrapper.create_image.call_args_list: self.assertTrue(args[1]["name"].startswith(image_name)) mock_clients.assert_has_calls( [mock.call(mock.ANY, api_info=api_versions)] * tenants) @ddt.data( {}, {"api_versions": {"glance": {"version": 2, "service_type": "image"}}}) @ddt.unpack @mock.patch("rally.plugins.openstack.wrappers.glance.wrap") @mock.patch("rally.osclients.Clients") def test_cleanup(self, mock_clients, mock_wrap, api_versions=None): tenants_count = 2 users_per_tenant = 5 images_per_tenant = 5 tenants = self._gen_tenants(tenants_count) users = [] created_images = [] for tenant_id in tenants: for i in range(users_per_tenant): users.append({"id": i, "tenant_id": tenant_id, "credential": mock.MagicMock()}) tenants[tenant_id].setdefault("images", []) for j in range(images_per_tenant): image = mock.Mock() created_images.append(image) tenants[tenant_id]["images"].append(image) self.context.update({ "config": { "users": { "tenants": tenants_count, "users_per_tenant": users_per_tenant, "concurrent": 10, }, "images": { "image_url": "mock_url", "image_type": "qcow2", "image_container": "bare", "images_per_tenant": images_per_tenant, "image_name": "some_name", "min_ram": 128, "min_disk": 1, } }, "admin": { "credential": mock.MagicMock() }, "users": users, "tenants": tenants }) if api_versions: self.context["config"]["api_versions"] = api_versions images_ctx = images.ImageGenerator(self.context) images_ctx.cleanup() wrapper_calls = [] wrapper_calls.extend([mock.call(mock_clients.return_value.glance, images_ctx)] * tenants_count) mock_wrap.assert_has_calls(wrapper_calls, any_order=True) glance_client = mock_clients.return_value.glance.return_value glance_client.images.delete.assert_has_calls([mock.call(i) for i in created_images]) glance_client.images.get.assert_has_calls([mock.call(i) for i in created_images]) mock_clients.assert_has_calls( [mock.call(mock.ANY, api_info=api_versions)] * tenants_count, any_order=True) rally-0.9.1/tests/unit/plugins/openstack/context/ceilometer/0000775000567000056710000000000013073420067025414 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/ceilometer/__init__.py0000664000567000056710000000000013073417717027523 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/ceilometer/test_samples.py0000664000567000056710000001565313073417717030513 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import mock import six from rally import exceptions from rally.plugins.openstack.context.ceilometer import samples from rally.plugins.openstack.scenarios.ceilometer import utils as ceilo_utils from tests.unit import test CTX = "rally.plugins.openstack.context.ceilometer" class CeilometerSampleGeneratorTestCase(test.TestCase): def _gen_tenants(self, count): tenants = {} for id_ in range(count): tenants[str(id_)] = {"name": str(id_)} return tenants def _gen_context(self, tenants_count, users_per_tenant, resources_per_tenant, samples_per_resource): tenants = self._gen_tenants(tenants_count) users = [] for id_ in tenants.keys(): for i in range(users_per_tenant): users.append({"id": i, "tenant_id": id_, "credential": mock.MagicMock()}) context = test.get_test_context() context.update({ "config": { "users": { "tenants": tenants_count, "users_per_tenant": users_per_tenant, "concurrent": 10, }, "ceilometer": { "counter_name": "fake-counter-name", "counter_type": "fake-counter-type", "counter_unit": "fake-counter-unit", "counter_volume": 100, "resources_per_tenant": resources_per_tenant, "samples_per_resource": samples_per_resource, "timestamp_interval": 60, "metadata_list": ( {"status": "active", "name": "fake_resource", "deleted": "False", "created_at": "2015-09-04T12:34:19.000000"}, {"status": "not_active", "name": "fake_resource_1", "deleted": "False", "created_at": "2015-09-10T06:55:12.000000"}, ) } }, "admin": { "credential": mock.MagicMock() }, "users": users, "tenants": tenants, "user_choice_method": "random", }) return tenants, context def test_init(self): context = {} context["task"] = mock.MagicMock() context["config"] = { "ceilometer": { "counter_name": "cpu_util", "counter_type": "gauge", "counter_unit": "instance", "counter_volume": 1.0, "resources_per_tenant": 5, "samples_per_resource": 5, "timestamp_interval": 60, "metadata_list": ( {"status": "active", "name": "fake_resource", "deleted": "False", "created_at": "2015-09-04T12:34:19.000000"}, {"status": "not_active", "name": "fake_resource_1", "deleted": "False", "created_at": "2015-09-10T06:55:12.000000"}, ) } } inst = samples.CeilometerSampleGenerator(context) self.assertEqual(inst.config, context["config"]["ceilometer"]) def test__store_batch_samples(self): tenants_count = 2 users_per_tenant = 2 resources_per_tenant = 2 samples_per_resource = 2 tenants, real_context = self._gen_context( tenants_count, users_per_tenant, resources_per_tenant, samples_per_resource) ceilometer_ctx = samples.CeilometerSampleGenerator(real_context) scenario = ceilo_utils.CeilometerScenario(real_context) self.assertRaises( exceptions.ContextSetupFailure, ceilometer_ctx._store_batch_samples, scenario, ["foo", "bar"], 1) def test_setup(self): tenants_count = 2 users_per_tenant = 2 resources_per_tenant = 2 samples_per_resource = 2 tenants, real_context = self._gen_context( tenants_count, users_per_tenant, resources_per_tenant, samples_per_resource) scenario = ceilo_utils.CeilometerScenario(real_context) sample = { "counter_name": "fake-counter-name", "counter_type": "fake-counter-type", "counter_unit": "fake-counter-unit", "counter_volume": 100, "metadata_list": [ {"status": "active", "name": "fake_resource", "deleted": "False", "created_at": "2015-09-04T12:34:19.000000"}, {"status": "not_active", "name": "fake_resource_1", "deleted": "False", "created_at": "2015-09-10T06:55:12.000000"} ] } scenario.generate_random_name = mock.Mock( return_value="fake_resource-id") kwargs = copy.deepcopy(sample) samples_to_create = list( scenario._make_samples(count=samples_per_resource, interval=60, **kwargs) )[0] new_context = copy.deepcopy(real_context) for id_ in tenants.keys(): new_context["tenants"][id_].setdefault("samples", []) new_context["tenants"][id_].setdefault("resources", []) for i in six.moves.xrange(resources_per_tenant): for sample in samples_to_create: new_context["tenants"][id_]["samples"].append(sample) new_context["tenants"][id_]["resources"].append( sample["resource_id"]) with mock.patch("%s.samples.ceilo_utils.CeilometerScenario" "._create_samples" % CTX) as mock_create_samples: mock_create_samples.return_value = [] for i, sample in enumerate(samples_to_create): sample_object = mock.MagicMock(resource_id="fake_resource-id") sample_object.to_dict.return_value = sample mock_create_samples.return_value.append(sample_object) ceilometer_ctx = samples.CeilometerSampleGenerator(real_context) ceilometer_ctx.setup() self.assertEqual(new_context, ceilometer_ctx.context) def test_cleanup(self): tenants, context = self._gen_context(2, 5, 3, 3) ceilometer_ctx = samples.CeilometerSampleGenerator(context) ceilometer_ctx.cleanup() rally-0.9.1/tests/unit/plugins/openstack/context/senlin/0000775000567000056710000000000013073420067024554 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/senlin/__init__.py0000664000567000056710000000000013073417717026663 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/senlin/test_profiles.py0000664000567000056710000000620313073417717030021 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.context.senlin import profiles from tests.unit import test BASE_CTX = "rally.task.context" CTX = "rally.plugins.openstack.context" BASE_SCN = "rally.task.scenarios" SCN = "rally.plugins.openstack.scenarios" class ProfilesGeneratorTestCase(test.ScenarioTestCase): """Generate tenants.""" def _gen_tenants(self, count): tenants = {} for _id in range(count): tenants[str(_id)] = {"id": str(_id)} return tenants def setUp(self): super(ProfilesGeneratorTestCase, self).setUp() self.tenants_count = 2 self.users_per_tenant = 3 tenants = self._gen_tenants(self.tenants_count) users = [] for tenant in tenants: for i in range(self.users_per_tenant): users.append({"id": i, "tenant_id": tenant, "credential": mock.MagicMock()}) self.context = { "config": { "users": { "tenants": self.tenants_count, "users_per_tenant": self.users_per_tenant }, "profiles": { "type": "profile_type_name", "version": "1.0", "properties": {"k1": "v1", "k2": "v2"} }, }, "users": users, "tenants": tenants, "task": mock.MagicMock() } @mock.patch("%s.senlin.utils.SenlinScenario._create_profile" % SCN, return_value=mock.MagicMock(id="TEST_PROFILE_ID")) def test_setup(self, mock_senlin_scenario__create_profile): profile_ctx = profiles.ProfilesGenerator(self.context) profile_ctx.setup() spec = self.context["config"]["profiles"] mock_calls = [mock.call(spec) for i in range(self.tenants_count)] mock_senlin_scenario__create_profile.assert_has_calls(mock_calls) for tenant in self.context["tenants"]: self.assertEqual("TEST_PROFILE_ID", self.context["tenants"][tenant]["profile"]) @mock.patch("%s.senlin.utils.SenlinScenario._delete_profile" % SCN) def test_cleanup(self, mock_senlin_scenario__delete_profile): for tenant in self.context["tenants"]: self.context["tenants"][tenant].update( {"profile": "TEST_PROFILE_ID"}) profile_ctx = profiles.ProfilesGenerator(self.context) profile_ctx.cleanup() mock_calls = [mock.call("TEST_PROFILE_ID") for i in range( self.tenants_count)] mock_senlin_scenario__delete_profile.assert_has_calls(mock_calls) rally-0.9.1/tests/unit/plugins/openstack/context/monasca/0000775000567000056710000000000013073420067024705 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/monasca/__init__.py0000664000567000056710000000000013073417717027014 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/monasca/test_metrics.py0000664000567000056710000000754013073417717030002 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock import six from rally.plugins.openstack.context.monasca import metrics from rally.plugins.openstack.scenarios.monasca import utils as monasca_utils from tests.unit import test CTX = "rally.plugins.openstack.context.monasca" class MonascaMetricGeneratorTestCase(test.TestCase): def _gen_tenants(self, count): tenants = {} for id in six.moves.range(count): tenants[str(id)] = {"name": str(id)} return tenants def _gen_context(self, tenants_count, users_per_tenant, metrics_per_tenant): tenants = self._gen_tenants(tenants_count) users = [] for id in tenants.keys(): for i in six.moves.range(users_per_tenant): users.append({"id": i, "tenant_id": id, "endpoint": mock.MagicMock()}) context = test.get_test_context() context.update({ "config": { "users": { "tenants": tenants_count, "users_per_tenant": users_per_tenant, "concurrent": 10, }, "monasca_metrics": { "name": "fake-metric-name", "dimensions": { "region": "fake-region", "service": "fake-identity", "hostname": "fake-hostname", "url": "fake-url" }, "metrics_per_tenant": metrics_per_tenant, }, "roles": [ "monasca-user" ] }, "admin": { "endpoint": mock.MagicMock() }, "users": users, "tenants": tenants }) return tenants, context @mock.patch("%s.metrics.rutils.interruptable_sleep" % CTX) @mock.patch("%s.metrics.monasca_utils.MonascaScenario" % CTX) def test_setup(self, mock_monasca_scenario, mock_interruptable_sleep): tenants_count = 2 users_per_tenant = 4 metrics_per_tenant = 5 tenants, real_context = self._gen_context( tenants_count, users_per_tenant, metrics_per_tenant) monasca_ctx = metrics.MonascaMetricGenerator(real_context) monasca_ctx.setup() self.assertEqual(tenants_count, mock_monasca_scenario.call_count, "Scenario should be constructed same times as " "number of tenants") self.assertEqual(metrics_per_tenant * tenants_count, mock_monasca_scenario.return_value._create_metrics. call_count, "Total number of metrics created should be tenant" "counts times metrics per tenant") first_call = mock.call(0.001) second_call = mock.call(monasca_utils.CONF.benchmark. monasca_metric_create_prepoll_delay, atomic_delay=1) self.assertEqual([first_call] * metrics_per_tenant * tenants_count + [second_call], mock_interruptable_sleep.call_args_list, "Method interruptable_sleep should be called " "tenant counts times metrics plus one") rally-0.9.1/tests/unit/plugins/openstack/context/dataplane/0000775000567000056710000000000013073420067025215 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/dataplane/__init__.py0000664000567000056710000000000013073417717027324 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/dataplane/test_heat.py0000664000567000056710000001030513073417717027556 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import functools import mock from rally.plugins.openstack.context import dataplane from tests.unit import test MOD = "rally.plugins.openstack.context.dataplane.heat." class TestHeatWorkload(test.ScenarioTestCase): @mock.patch(MOD + "pkgutil") def test_get_data_resource(self, mock_pkgutil): mock_pkgutil.get_data.return_value = "fake_data" data = dataplane.heat.get_data([1, 2]) self.assertEqual("fake_data", data) mock_pkgutil.get_data.assert_called_once_with(1, 2) @mock.patch(MOD + "open") def test_get_data_file(self, mock_open): data = dataplane.heat.get_data(1) self.assertEqual(mock_open.return_value.read.return_value, data) mock_open.assert_called_once_with(1) def test__get_context_parameter(self): user = [1, 2] tenant = [3, 4, {"one": 1}] self.context["tenants"] = {1: tenant} ctx = dataplane.heat.HeatDataplane(self.context) gcp = functools.partial(ctx._get_context_parameter, user, 1) self.assertEqual(1, gcp("user.0")) self.assertEqual(2, gcp("user.1")) self.assertEqual(3, gcp("tenant.0")) self.assertEqual(1, gcp("tenant.2.one")) @mock.patch(MOD + "osclients.Clients") def test__get_public_network_id(self, mock_clients): fake_net = {"id": "fake_id"} fake_nc = mock.Mock(name="fake_neutronclient") fake_nc.list_networks.return_value = {"networks": [fake_net]} mock_clients.neutron.return_value = fake_nc mock_clients.return_value = mock.Mock( neutron=mock.Mock(return_value=fake_nc)) self.context["admin"] = {"credential": "fake_credential"} ctx = dataplane.heat.HeatDataplane(self.context) network_id = ctx._get_public_network_id() self.assertEqual("fake_id", network_id) mock_clients.assert_called_once_with("fake_credential") @mock.patch(MOD + "get_data") @mock.patch(MOD + "HeatDataplane._get_context_parameter") @mock.patch(MOD + "heat_utils") def test_setup(self, mock_heat_utils, mock_heat_dataplane__get_context_parameter, mock_get_data): self.context.update({ "config": { "heat_dataplane": { "stacks_per_tenant": 1, "template": "tpl.yaml", "files": {"file1": "f1.yaml", "file2": "f2.yaml"}, "parameters": {"key": "value"}, "context_parameters": {"ctx.key": "ctx.value"}, } }, "users": [{"tenant_id": "t1", "keypair": {"name": "kp1"}}, ], "tenants": {"t1": {"networks": [{"router_id": "rid"}]}}, }) mock_heat_dataplane__get_context_parameter.return_value = "gcp" mock_get_data.side_effect = ["tpl", "sf1", "sf2"] ctx = dataplane.heat.HeatDataplane(self.context) ctx._get_public_network_id = mock.Mock(return_value="fake_net") ctx.setup() workloads = self.context["tenants"]["t1"]["stack_dataplane"] self.assertEqual(1, len(workloads)) wl = workloads[0] fake_scenario = mock_heat_utils.HeatScenario.return_value self.assertEqual(fake_scenario._create_stack.return_value.id, wl[0]) self.assertEqual("tpl", wl[1]) self.assertIn("sf1", wl[2].values()) self.assertIn("sf2", wl[2].values()) expected = { "ctx.key": "gcp", "key": "value", "key_name": "kp1", "network_id": "fake_net", "router_id": "rid"} self.assertEqual(expected, wl[3]) rally-0.9.1/tests/unit/plugins/openstack/context/designate/0000775000567000056710000000000013073420067025227 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/designate/__init__.py0000664000567000056710000000000013073417717027336 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/designate/test_zones.py0000664000567000056710000001013613073417720030001 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import mock from rally.plugins.openstack.context.designate import zones from rally.plugins.openstack.scenarios.designate import utils from tests.unit import test CTX = "rally.plugins.openstack.context" SCN = "rally.plugins.openstack.scenarios" class ZoneGeneratorTestCase(test.ScenarioTestCase): def _gen_tenants(self, count): tenants = {} for id_ in range(count): tenants[str(id_)] = {"name": str(id_)} return tenants def test_init(self): self.context.update({ "config": { "zones": { "zones_per_tenant": 5, } } }) inst = zones.ZoneGenerator(self.context) self.assertEqual(inst.config, self.context["config"]["zones"]) @mock.patch("%s.designate.utils.DesignateScenario._create_zone" % SCN, return_value={"id": "uuid"}) def test_setup(self, mock_designate_scenario__create_zone): tenants_count = 2 users_per_tenant = 5 zones_per_tenant = 5 tenants = self._gen_tenants(tenants_count) users = [] for id_ in tenants.keys(): for i in range(users_per_tenant): users.append({"id": i, "tenant_id": id_, "credential": mock.MagicMock()}) self.context.update({ "config": { "users": { "tenants": 2, "users_per_tenant": 5, "concurrent": 10, }, "zones": { "zones_per_tenant": zones_per_tenant, } }, "admin": { "credential": mock.MagicMock() }, "users": users, "tenants": tenants }) new_context = copy.deepcopy(self.context) for id_ in tenants.keys(): new_context["tenants"][id_].setdefault("zones", []) for i in range(zones_per_tenant): new_context["tenants"][id_]["zones"].append({"id": "uuid"}) zones_ctx = zones.ZoneGenerator(self.context) zones_ctx.setup() self.assertEqual(new_context, self.context) @mock.patch("%s.designate.zones.resource_manager.cleanup" % CTX) def test_cleanup(self, mock_cleanup): tenants_count = 2 users_per_tenant = 5 zones_per_tenant = 5 tenants = self._gen_tenants(tenants_count) users = [] for id_ in tenants.keys(): for i in range(users_per_tenant): users.append({"id": i, "tenant_id": id_, "endpoint": "endpoint"}) tenants[id_].setdefault("zones", []) for j in range(zones_per_tenant): tenants[id_]["zones"].append({"id": "uuid"}) self.context.update({ "config": { "users": { "tenants": 2, "users_per_tenant": 5, "concurrent": 10, }, "zones": { "zones_per_tenant": 5, } }, "admin": { "endpoint": mock.MagicMock() }, "users": users, "tenants": tenants }) zones_ctx = zones.ZoneGenerator(self.context) zones_ctx.cleanup() mock_cleanup.assert_called_once_with( names=["designate.zones"], users=self.context["users"], superclass=utils.DesignateScenario, task_id=self.context["task"]["uuid"]) rally-0.9.1/tests/unit/plugins/openstack/context/swift/0000775000567000056710000000000013073420067024420 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/swift/__init__.py0000664000567000056710000000000013073417717026527 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/swift/test_objects.py0000664000567000056710000001743613073417717027505 0ustar jenkinsjenkins00000000000000# Copyright 2015: Cisco Systems, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally import exceptions from rally.plugins.openstack.context.swift import objects from tests.unit import test class SwiftObjectGeneratorTestCase(test.TestCase): @mock.patch("rally.osclients.Clients") def test_setup(self, mock_clients): containers_per_tenant = 2 objects_per_container = 7 context = test.get_test_context() context.update({ "config": { "swift_objects": { "containers_per_tenant": containers_per_tenant, "objects_per_container": objects_per_container, "object_size": 1024, "resource_management_workers": 10 } }, "tenants": { "t1": {"name": "t1_name"}, "t2": {"name": "t2_name"} }, "users": [ {"id": "u1", "tenant_id": "t1", "credential": "c1"}, {"id": "u2", "tenant_id": "t2", "credential": "c2"} ] }) objects_ctx = objects.SwiftObjectGenerator(context) objects_ctx.setup() for tenant_id in context["tenants"]: containers = context["tenants"][tenant_id]["containers"] self.assertEqual(containers_per_tenant, len(containers)) for container in containers: self.assertEqual(objects_per_container, len(container["objects"])) @mock.patch("rally.osclients.Clients") @mock.patch("rally.plugins.openstack.context.swift.utils." "swift_utils.SwiftScenario") def test_cleanup(self, mock_swift_scenario, mock_clients): context = test.get_test_context() context.update({ "config": { "swift_objects": { "resource_management_workers": 1 } }, "tenants": { "t1": { "name": "t1_name", "containers": [ {"user": {"id": "u1", "tenant_id": "t1", "credential": "c1"}, "container": "c1", "objects": ["o1", "o2", "o3"]} ] }, "t2": { "name": "t2_name", "containers": [ {"user": {"id": "u2", "tenant_id": "t2", "credential": "c2"}, "container": "c2", "objects": ["o4", "o5", "o6"]} ] } } }) objects_ctx = objects.SwiftObjectGenerator(context) objects_ctx.cleanup() expected_containers = ["c1", "c2"] mock_swift_scenario.return_value._delete_container.assert_has_calls( [mock.call(con) for con in expected_containers], any_order=True) expected_objects = [("c1", "o1"), ("c1", "o2"), ("c1", "o3"), ("c2", "o4"), ("c2", "o5"), ("c2", "o6")] mock_swift_scenario.return_value._delete_object.assert_has_calls( [mock.call(con, obj) for con, obj in expected_objects], any_order=True) for tenant_id in context["tenants"]: self.assertEqual(0, len(context["tenants"][tenant_id]["containers"])) @mock.patch("rally.osclients.Clients") def test_setup_failure_clients_put_container(self, mock_clients): context = test.get_test_context() context.update({ "config": { "swift_objects": { "containers_per_tenant": 2, "object_size": 10, "resource_management_workers": 5 } }, "tenants": { "t1": {"name": "t1_name"}, "t2": {"name": "t2_name"} }, "users": [ {"id": "u1", "tenant_id": "t1", "credential": "c1"}, {"id": "u2", "tenant_id": "t2", "credential": "c2"} ] }) mock_swift = mock_clients.return_value.swift.return_value mock_swift.put_container.side_effect = [Exception, True, Exception, Exception] objects_ctx = objects.SwiftObjectGenerator(context) self.assertRaisesRegexp(exceptions.ContextSetupFailure, "containers, expected 4 but got 1", objects_ctx.setup) @mock.patch("rally.osclients.Clients") def test_setup_failure_clients_put_object(self, mock_clients): context = test.get_test_context() context.update({ "tenants": { "t1": {"name": "t1_name"}, "t2": {"name": "t2_name"} }, "users": [ {"id": "u1", "tenant_id": "t1", "credential": "c1"}, {"id": "u2", "tenant_id": "t2", "credential": "c2"} ] }) mock_swift = mock_clients.return_value.swift.return_value mock_swift.put_object.side_effect = [Exception, True] objects_ctx = objects.SwiftObjectGenerator(context) self.assertRaisesRegexp(exceptions.ContextSetupFailure, "objects, expected 2 but got 1", objects_ctx.setup) @mock.patch("rally.osclients.Clients") def test_cleanup_failure_clients_delete_container(self, mock_clients): context = test.get_test_context() context.update({ "tenants": { "t1": { "name": "t1_name", "containers": [ {"user": {"id": "u1", "tenant_id": "t1", "credential": "c1"}, "container": "coooon", "objects": []}] * 3 } } }) mock_swift = mock_clients.return_value.swift.return_value mock_swift.delete_container.side_effect = [True, True, Exception] objects_ctx = objects.SwiftObjectGenerator(context) objects_ctx.cleanup() self.assertEqual(1, len(context["tenants"]["t1"]["containers"])) @mock.patch("rally.osclients.Clients") def test_cleanup_failure_clients_delete_object(self, mock_clients): context = test.get_test_context() context.update({ "tenants": { "t1": { "name": "t1_name", "containers": [ {"user": {"id": "u1", "tenant_id": "t1", "credential": "c1"}, "container": "c1", "objects": ["oooo"] * 3} ] } } }) mock_swift = mock_clients.return_value.swift.return_value mock_swift.delete_object.side_effect = [True, Exception, True] objects_ctx = objects.SwiftObjectGenerator(context) objects_ctx._delete_containers = mock.MagicMock() objects_ctx.cleanup() self.assertEqual( 1, sum([len(container["objects"]) for container in context["tenants"]["t1"]["containers"]])) rally-0.9.1/tests/unit/plugins/openstack/context/swift/test_utils.py0000664000567000056710000001576513073417717027217 0ustar jenkinsjenkins00000000000000# Copyright 2015: Cisco Systems, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.context.swift import utils from tests.unit import test class SwiftObjectMixinTestCase(test.TestCase): @mock.patch("rally.osclients.Clients") def test__create_containers(self, mock_clients): tenants = 2 containers_per_tenant = 2 context = test.get_test_context() context.update({ "tenants": { "1001": {"name": "t1_name"}, "1002": {"name": "t2_name"} }, "users": [ {"id": "u1", "tenant_id": "1001", "credential": "c1"}, {"id": "u2", "tenant_id": "1002", "credential": "c2"} ] }) mixin = utils.SwiftObjectMixin() containers = mixin._create_containers(context, containers_per_tenant, 15) self.assertEqual(tenants * containers_per_tenant, len(containers)) for index, container in enumerate(sorted(containers)): offset = int(index / containers_per_tenant) + 1 self.assertEqual(str(1000 + offset), container[0]) for index, tenant_id in enumerate(sorted(context["tenants"]), start=1): containers = context["tenants"][tenant_id]["containers"] self.assertEqual(containers_per_tenant, len(containers)) for container in containers: self.assertEqual("u%d" % index, container["user"]["id"]) self.assertEqual("c%d" % index, container["user"]["credential"]) self.assertEqual(0, len(container["objects"])) @mock.patch("rally.osclients.Clients") def test__create_objects(self, mock_clients): tenants = 2 containers_per_tenant = 1 objects_per_container = 5 context = test.get_test_context() context.update({ "tenants": { "1001": { "name": "t1_name", "containers": [ {"user": { "id": "u1", "tenant_id": "1001", "credential": "c0"}, "container": "c1", "objects": []} ] }, "1002": { "name": "t2_name", "containers": [ {"user": { "id": "u2", "tenant_id": "1002", "credential": "c2"}, "container": "c2", "objects": []} ] } } }) mixin = utils.SwiftObjectMixin() objects_list = mixin._create_objects(context, objects_per_container, 1024, 25) self.assertEqual( tenants * containers_per_tenant * objects_per_container, len(objects_list)) chunk = containers_per_tenant * objects_per_container for index, obj in enumerate(sorted(objects_list)): offset = int(index / chunk) + 1 self.assertEqual(str(1000 + offset), obj[0]) self.assertEqual("c%d" % offset, obj[1]) for tenant_id in context["tenants"]: for container in context["tenants"][tenant_id]["containers"]: self.assertEqual(objects_per_container, len(container["objects"])) @mock.patch("rally.osclients.Clients") def test__delete_containers(self, mock_clients): context = test.get_test_context() context.update({ "tenants": { "1001": { "name": "t1_name", "containers": [ {"user": { "id": "u1", "tenant_id": "1001", "credential": "c1"}, "container": "c1", "objects": []} ] }, "1002": { "name": "t2_name", "containers": [ {"user": { "id": "u2", "tenant_id": "1002", "credential": "c2"}, "container": "c2", "objects": []} ] } } }) mixin = utils.SwiftObjectMixin() mixin._delete_containers(context, 1) mock_swift = mock_clients.return_value.swift.return_value expected_containers = ["c1", "c2"] mock_swift.delete_container.assert_has_calls( [mock.call(con) for con in expected_containers], any_order=True) for tenant_id in context["tenants"]: self.assertEqual(0, len(context["tenants"][tenant_id]["containers"])) @mock.patch("rally.osclients.Clients") def test__delete_objects(self, mock_clients): context = test.get_test_context() context.update({ "tenants": { "1001": { "name": "t1_name", "containers": [ {"user": { "id": "u1", "tenant_id": "1001", "credential": "c1"}, "container": "c1", "objects": ["o1", "o2", "o3"]} ] }, "1002": { "name": "t2_name", "containers": [ {"user": { "id": "u2", "tenant_id": "1002", "credential": "c2"}, "container": "c2", "objects": ["o4", "o5", "o6"]} ] } } }) mixin = utils.SwiftObjectMixin() mixin._delete_objects(context, 1) mock_swift = mock_clients.return_value.swift.return_value expected_objects = [("c1", "o1"), ("c1", "o2"), ("c1", "o3"), ("c2", "o4"), ("c2", "o5"), ("c2", "o6")] mock_swift.delete_object.assert_has_calls( [mock.call(con, obj) for con, obj in expected_objects], any_order=True) for tenant_id in context["tenants"]: for container in context["tenants"][tenant_id]["containers"]: self.assertEqual(0, len(container["objects"])) rally-0.9.1/tests/unit/plugins/openstack/context/network/0000775000567000056710000000000013073420067024755 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/network/__init__.py0000664000567000056710000000000013073417717027064 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/network/test_allow_ssh.py0000664000567000056710000001361713073417717030401 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.context.network import allow_ssh from tests.unit import fakes from tests.unit import test CTX = "rally.plugins.openstack.context.network.allow_ssh" class AllowSSHContextTestCase(test.TestCase): def setUp(self): super(AllowSSHContextTestCase, self).setUp() self.users = 2 self.secgroup_name = "test-secgroup" self.ctx_with_secgroup = test.get_test_context() self.ctx_with_secgroup.update({ "users": [ { "tenant_id": "uuid1", "credential": "credential", "secgroup": {"id": "secgroup_id", "name": "secgroup"} } ] * self.users, "admin": {"tenant_id": "uuid2", "credential": "admin_credential"}, "tenants": {"uuid1": {"id": "uuid1", "name": "uuid1"}}, }) self.ctx_without_secgroup = test.get_test_context() self.ctx_without_secgroup.update({ "users": [{"tenant_id": "uuid1", "credential": "credential"}, {"tenant_id": "uuid1", "credential": "credential"}], "admin": {"tenant_id": "uuid2", "credential": "admin_credential"}, "tenants": {"uuid1": {"id": "uuid1", "name": "uuid1"}}, }) @mock.patch("%s.osclients.Clients" % CTX) def test__prepare_open_secgroup(self, mock_clients): fake_nova = fakes.FakeNovaClient() self.assertEqual(len(fake_nova.security_groups.list()), 1) mock_cl = mock.MagicMock() mock_cl.nova.return_value = fake_nova mock_clients.return_value = mock_cl ret = allow_ssh._prepare_open_secgroup("credential", self.secgroup_name) self.assertEqual(self.secgroup_name, ret["name"]) self.assertEqual(2, len(fake_nova.security_groups.list())) self.assertIn( self.secgroup_name, [sg.name for sg in fake_nova.security_groups.list()]) # run prep again, check that another security group is not created allow_ssh._prepare_open_secgroup("credential", self.secgroup_name) self.assertEqual(2, len(fake_nova.security_groups.list())) @mock.patch("%s.osclients.Clients" % CTX) def test__prepare_open_secgroup_rules(self, mock_clients): fake_nova = fakes.FakeNovaClient() # NOTE(hughsaunders) Default security group is precreated self.assertEqual(1, len(fake_nova.security_groups.list())) mock_cl = mock.MagicMock() mock_cl.nova.return_value = fake_nova mock_clients.return_value = mock_cl allow_ssh._prepare_open_secgroup("credential", self.secgroup_name) self.assertEqual(2, len(fake_nova.security_groups.list())) rally_open = fake_nova.security_groups.find(self.secgroup_name) self.assertEqual(3, len(rally_open.rules)) # run prep again, check that extra rules are not created allow_ssh._prepare_open_secgroup("credential", self.secgroup_name) rally_open = fake_nova.security_groups.find(self.secgroup_name) self.assertEqual(3, len(rally_open.rules)) @mock.patch("%s.osclients.Clients" % CTX) @mock.patch("%s._prepare_open_secgroup" % CTX) @mock.patch("rally.plugins.openstack.wrappers.network.wrap") def test_secgroup_setup_cleanup_with_secgroup_supported( self, mock_network_wrap, mock__prepare_open_secgroup, mock_clients): mock_network_wrapper = mock.MagicMock() mock_network_wrapper.supports_extension.return_value = ( True, "") mock_network_wrap.return_value = mock_network_wrapper mock__prepare_open_secgroup.return_value = { "name": "secgroup", "id": "secgroup_id"} mock_clients.return_value = mock.MagicMock() secgrp_ctx = allow_ssh.AllowSSH(self.ctx_with_secgroup) secgrp_ctx.setup() self.assertEqual(self.ctx_with_secgroup, secgrp_ctx.context) secgrp_ctx.cleanup() self.assertEqual( [ mock.call("admin_credential"), mock.call("credential"), mock.call().nova(), mock.call().nova().security_groups.get("secgroup_id"), mock.call().nova().security_groups.get().delete() ], mock_clients.mock_calls) mock_network_wrap.assert_called_once_with( mock_clients.return_value, secgrp_ctx, config={}) @mock.patch("%s.osclients.Clients" % CTX) @mock.patch("rally.plugins.openstack.wrappers.network.wrap") def test_secgroup_setup_with_secgroup_unsupported( self, mock_network_wrap, mock_clients): mock_network_wrapper = mock.MagicMock() mock_network_wrapper.supports_extension.return_value = ( False, "Not supported") mock_network_wrap.return_value = mock_network_wrapper mock_clients.return_value = mock.MagicMock() secgrp_ctx = allow_ssh.AllowSSH(dict(self.ctx_without_secgroup)) secgrp_ctx.setup() self.assertEqual(self.ctx_without_secgroup, secgrp_ctx.context) mock_clients.assert_called_once_with("admin_credential") mock_network_wrap.assert_called_once_with( mock_clients.return_value, secgrp_ctx, config={}) rally-0.9.1/tests/unit/plugins/openstack/context/network/test_network.py0000664000567000056710000001167513073417717030101 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock import netaddr from rally.plugins.openstack.context.network import networks as network_context from tests.unit import test NET = "rally.plugins.openstack.wrappers.network." @ddt.ddt class NetworkTestCase(test.TestCase): def get_context(self, **kwargs): return {"task": {"uuid": "foo_task"}, "admin": {"credential": "foo_admin"}, "config": {"network": kwargs}, "users": [{"id": "foo_user", "tenant_id": "foo_tenant"}, {"id": "bar_user", "tenant_id": "bar_tenant"}], "tenants": {"foo_tenant": {"networks": [{"id": "foo_net"}]}, "bar_tenant": {"networks": [{"id": "bar_net"}]}}} def test_START_CIDR_DFLT(self): netaddr.IPNetwork(network_context.Network.DEFAULT_CONFIG["start_cidr"]) @mock.patch("rally.osclients.Clients") @mock.patch(NET + "wrap", return_value="foo_service") def test__init__default(self, mock_wrap, mock_clients): context = network_context.Network(self.get_context()) self.assertEqual(context.config["networks_per_tenant"], 1) self.assertEqual(context.config["start_cidr"], network_context.Network.DEFAULT_CONFIG["start_cidr"]) self.assertIsNone(context.config["dns_nameservers"]) @mock.patch("rally.osclients.Clients") @mock.patch(NET + "wrap", return_value="foo_service") def test__init__explicit(self, mock_wrap, mock_clients): context = network_context.Network( self.get_context(start_cidr="foo_cidr", networks_per_tenant=42, network_create_args={"fakearg": "fake"}, dns_nameservers=["1.2.3.4", "5.6.7.8"])) self.assertEqual(context.config["networks_per_tenant"], 42) self.assertEqual(context.config["start_cidr"], "foo_cidr") self.assertEqual(context.config["network_create_args"], {"fakearg": "fake"}) self.assertEqual(context.config["dns_nameservers"], ("1.2.3.4", "5.6.7.8")) @ddt.data({}, {"dns_nameservers": []}, {"dns_nameservers": ["1.2.3.4", "5.6.7.8"]}) @ddt.unpack @mock.patch(NET + "wrap") @mock.patch("rally.plugins.openstack.context.network.networks.utils") @mock.patch("rally.osclients.Clients") def test_setup(self, mock_clients, mock_utils, mock_wrap, **dns_kwargs): mock_utils.iterate_per_tenants.return_value = [ ("foo_user", "foo_tenant"), ("bar_user", "bar_tenant")] mock_create = mock.Mock(side_effect=lambda t, **kw: t + "-net") mock_utils.generate_random_name = mock.Mock() mock_wrap.return_value = mock.Mock(create_network=mock_create) nets_per_tenant = 2 net_context = network_context.Network( self.get_context(networks_per_tenant=nets_per_tenant, network_create_args={"fakearg": "fake"}, **dns_kwargs)) net_context.setup() if "dns_nameservers" in dns_kwargs: dns_kwargs["dns_nameservers"] = tuple( dns_kwargs["dns_nameservers"]) create_calls = [ mock.call(tenant, add_router=True, subnets_num=1, network_create_args={"fakearg": "fake"}, **dns_kwargs) for user, tenant in mock_utils.iterate_per_tenants.return_value] mock_create.assert_has_calls(create_calls) mock_utils.iterate_per_tenants.assert_called_once_with( net_context.context["users"]) expected_networks = ["bar_tenant-net", "foo_tenant-net"] * nets_per_tenant actual_networks = [] for tenant_id, tenant_ctx in net_context.context["tenants"].items(): actual_networks.extend(tenant_ctx["networks"]) self.assertSequenceEqual(sorted(expected_networks), sorted(actual_networks)) @mock.patch("rally.osclients.Clients") @mock.patch(NET + "wrap") def test_cleanup(self, mock_wrap, mock_clients): net_context = network_context.Network(self.get_context()) net_context.cleanup() mock_wrap().delete_network.assert_has_calls( [mock.call({"id": "foo_net"}), mock.call({"id": "bar_net"})], any_order=True) rally-0.9.1/tests/unit/plugins/openstack/context/murano/0000775000567000056710000000000013073420067024565 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/murano/__init__.py0000664000567000056710000000000013073417717026674 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/murano/test_murano_environments.py0000664000567000056710000000571413073417720032317 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.context.murano import murano_environments from tests.unit import test CTX = "rally.plugins.openstack.context.murano.murano_environments" SCN = "rally.plugins.openstack.scenarios" class MuranoEnvironmentGeneratorTestCase(test.TestCase): def setUp(self): super(MuranoEnvironmentGeneratorTestCase, self).setUp() @staticmethod def _get_context(): return { "config": { "users": { "tenants": 2, "users_per_tenant": 1, "concurrent": 1, }, "murano_environments": { "environments_per_tenant": 1 } }, "admin": { "credential": mock.MagicMock() }, "task": mock.MagicMock(), "users": [ { "id": "user_0", "tenant_id": "tenant_0", "credential": "credential" }, { "id": "user_1", "tenant_id": "tenant_1", "credential": "credential" } ], "tenants": { "tenant_0": {"name": "tenant_0_name"}, "tenant_1": {"name": "tenant_1_name"} } } @mock.patch("%s.murano.utils.MuranoScenario._create_environment" % SCN) def test_setup(self, mock_create_env): murano_ctx = murano_environments.EnvironmentGenerator( self._get_context()) murano_ctx.setup() self.assertEqual(2, len(murano_ctx.context["tenants"])) tenant_id = murano_ctx.context["users"][0]["tenant_id"] self.assertEqual([mock_create_env.return_value], murano_ctx.context["tenants"][tenant_id][ "environments"]) @mock.patch("%s.murano.utils.MuranoScenario._create_environment" % SCN) @mock.patch("%s.resource_manager.cleanup" % CTX) def test_cleanup(self, mock_cleanup, mock_create_env): murano_ctx = murano_environments.EnvironmentGenerator( self._get_context()) murano_ctx.setup() murano_ctx.cleanup() mock_cleanup.assert_called_once_with(names=["murano.environments"], users=murano_ctx.context["users"]) rally-0.9.1/tests/unit/plugins/openstack/context/murano/test_murano_packages.py0000664000567000056710000001030213073417720031333 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.context.murano import murano_packages from rally.plugins.openstack.scenarios.murano import utils as murano_utils from tests.unit import test CTX = "rally.plugins.openstack.context.murano.murano_packages" class MuranoPackageGeneratorTestCase(test.TestCase): def setUp(self): super(MuranoPackageGeneratorTestCase, self).setUp() @staticmethod def _get_context(): return { "config": { "users": { "tenants": 2, "users_per_tenant": 1, "concurrent": 1, }, "murano_packages": { "app_package": ( "rally-jobs/extra/murano/" "applications/HelloReporter/" "io.murano.apps.HelloReporter.zip") } }, "admin": { "credential": mock.MagicMock() }, "task": mock.MagicMock(), "users": [ { "id": "user_0", "tenant_id": "tenant_0", "credential": "credential" }, { "id": "user_1", "tenant_id": "tenant_1", "credential": "credential" } ], "tenants": { "tenant_0": {"name": "tenant_0_name"}, "tenant_1": {"name": "tenant_1_name"} } } @mock.patch("%s.osclients" % CTX) def test_setup(self, mock_osclients): mock_app = mock.MagicMock(id="fake_app_id") (mock_osclients.Clients().murano(). packages.create.return_value) = mock_app murano_ctx = murano_packages.PackageGenerator(self._get_context()) murano_ctx.setup() self.assertEqual(2, len(murano_ctx.context["tenants"])) tenant_id = murano_ctx.context["users"][0]["tenant_id"] self.assertEqual([mock_app], murano_ctx.context["tenants"][tenant_id]["packages"]) @mock.patch("%s.osclients" % CTX) @mock.patch("%s.resource_manager.cleanup" % CTX) def test_cleanup_with_zip(self, mock_cleanup, mock_osclients): mock_app = mock.Mock(id="fake_app_id") (mock_osclients.Clients().murano(). packages.create.return_value) = mock_app murano_ctx = murano_packages.PackageGenerator(self._get_context()) murano_ctx.setup() murano_ctx.cleanup() mock_cleanup.assert_called_once_with( names=["murano.packages"], users=murano_ctx.context["users"], superclass=murano_utils.MuranoScenario, task_id=murano_ctx.context["task"]["uuid"]) @mock.patch("%s.osclients" % CTX) @mock.patch("%s.resource_manager.cleanup" % CTX) def test_cleanup_with_dir(self, mock_cleanup, mock_osclients): mock_app = mock.Mock(id="fake_app_id") (mock_osclients.Clients().murano(). packages.create.return_value) = mock_app ctx_dict = self._get_context() app_dir = ("rally-jobs/extra/murano/applications/" "HelloReporter/io.murano.apps.HelloReporter/") ctx_dict["config"]["murano_packages"]["app_package"] = app_dir murano_ctx = murano_packages.PackageGenerator(ctx_dict) murano_ctx.setup() murano_ctx.cleanup() mock_cleanup.assert_called_once_with( names=["murano.packages"], users=murano_ctx.context["users"], superclass=murano_utils.MuranoScenario, task_id=ctx_dict["task"]["uuid"]) rally-0.9.1/tests/unit/plugins/openstack/context/test_fuel.py0000664000567000056710000001014413073417717025640 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally import exceptions from rally.plugins.openstack.context import fuel from tests.unit import test BASE = "rally.plugins.openstack.context.fuel" class FuelEnvGeneratorTestCase(test.TestCase): @mock.patch(BASE + ".FuelEnvGenerator._create_envs", return_value=["env1"]) @mock.patch(BASE + ".fuel_utils.FuelScenario") def test_setup(self, mock_fuel_scenario, mock__create_envs): context = {} context["config"] = {"fuel_environments": {"environments": 1}} context["task"] = {"uuid": "some_uuid"} context["admin"] = {"credential": "some_credential"} env_ctx = fuel.FuelEnvGenerator(context) env_ctx.setup() self.assertIn("fuel", env_ctx.context) self.assertIn("environments", env_ctx.context["fuel"]) mock__create_envs.assert_called_once_with() mock_fuel_scenario.assert_called_once_with(context) @mock.patch(BASE + ".FuelEnvGenerator._create_envs", return_value=["env1"]) @mock.patch(BASE + ".fuel_utils.FuelScenario") def test_setup_error(self, mock_fuel_scenario, mock__create_envs): context = {} context["config"] = {"fuel_environments": {"environments": 5}} context["task"] = {"uuid": "some_uuid"} context["admin"] = {"credential": "some_credential"} env_ctx = fuel.FuelEnvGenerator(context) self.assertRaises(exceptions.ContextSetupFailure, env_ctx.setup) def test__create_envs(self): config = {"environments": 4, "release_id": 42, "network_provider": "provider", "deployment_mode": "mode", "net_segment_type": "type", "resource_management_workers": 3} context = {"task": {}, "config": {"fuel_environments": config}} env_ctx = fuel.FuelEnvGenerator(context) env_ctx.fscenario = mock.Mock() env_ctx.fscenario.return_value._create_environment.return_value = "id" self.assertEqual(config["environments"], len(env_ctx._create_envs())) enves = config.pop("environments") config.pop("resource_management_workers") exp_calls = [mock.call(**config) for i in range(enves)] env_ctx.fscenario._create_environment.has_calls(exp_calls, any_order=True) def test__delete_envs(self): config = {"release_id": 42, "network_provider": "provider", "deployment_mode": "mode", "net_segment_type": "type", "resource_management_workers": 3} context = {"task": {}, "config": {"fuel_environments": config}, "fuel": {"environments": ["id", "id", "id"]}} env_ctx = fuel.FuelEnvGenerator(context) env_ctx.fscenario = mock.Mock() env_ctx._delete_envs() self.assertEqual({}, context["fuel"]) def test_cleanup(self): config = {"release_id": 42, "network_provider": "provider", "deployment_mode": "mode", "net_segment_type": "type", "resource_management_workers": 3} context = {"task": {"uuid": "some_id"}, "config": {"fuel_environments": config}, "fuel": {"environments": ["id", "id", "id"]}} env_ctx = fuel.FuelEnvGenerator(context) env_ctx._delete_envs = mock.Mock() env_ctx.cleanup() env_ctx._delete_envs.assert_called_once_with() rally-0.9.1/tests/unit/plugins/openstack/context/heat/0000775000567000056710000000000013073420067024205 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/heat/__init__.py0000664000567000056710000000000013073417717026314 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/heat/test_stacks.py0000664000567000056710000000657213073417720027122 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.context.heat import stacks from rally.plugins.openstack.scenarios.heat import utils as heat_utils from tests.unit import fakes from tests.unit import test CTX = "rally.plugins.openstack.context" SCN = "rally.plugins.openstack.scenarios" class TestStackGenerator(test.ScenarioTestCase): def _gen_tenants(self, count): tenants = {} for id_ in range(count): tenants[str(id_)] = dict(name=str(id_)) return tenants def test_init(self): self.context.update({ "config": { "stacks": { "stacks_per_tenant": 1, "resources_per_stack": 1 } } }) inst = stacks.StackGenerator(self.context) self.assertEqual(inst.config, self.context["config"]["stacks"]) @mock.patch("%s.heat.utils.HeatScenario._create_stack" % SCN, return_value=fakes.FakeStack(id="uuid")) def test_setup(self, mock_heat_scenario__create_stack): tenants_count = 2 users_per_tenant = 5 stacks_per_tenant = 1 tenants = self._gen_tenants(tenants_count) users = [] for ten_id in tenants: for i in range(users_per_tenant): users.append({"id": i, "tenant_id": ten_id, "credential": mock.MagicMock()}) self.context.update({ "config": { "users": { "tenants": tenants_count, "users_per_tenant": users_per_tenant, "concurrent": 10, }, "stacks": { "stacks_per_tenant": stacks_per_tenant, "resources_per_stack": 1 } }, "users": users, "tenants": tenants }) stack_ctx = stacks.StackGenerator(self.context) stack_ctx.setup() self.assertEqual(tenants_count * stacks_per_tenant, mock_heat_scenario__create_stack.call_count) # check that stack ids have been saved in context for ten_id in self.context["tenants"].keys(): self.assertEqual(stacks_per_tenant, len(self.context["tenants"][ten_id]["stacks"])) @mock.patch("%s.heat.stacks.resource_manager.cleanup" % CTX) def test_cleanup(self, mock_cleanup): self.context.update({ "users": mock.MagicMock() }) stack_ctx = stacks.StackGenerator(self.context) stack_ctx.cleanup() mock_cleanup.assert_called_once_with( names=["heat.stacks"], users=self.context["users"], superclass=heat_utils.HeatScenario, task_id=self.context["task"]["uuid"]) rally-0.9.1/tests/unit/plugins/openstack/context/sahara/0000775000567000056710000000000013073420067024523 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/sahara/__init__.py0000664000567000056710000000000013073417717026632 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/sahara/test_sahara_output_data_sources.py0000664000567000056710000001266613073417720033564 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.common import objects from rally.plugins.openstack.context.sahara import sahara_output_data_sources from rally.plugins.openstack.scenarios.sahara import utils as sahara_utils from tests.unit import test CTX = "rally.plugins.openstack.context.sahara" class SaharaOutputDataSourcesTestCase(test.ScenarioTestCase): def setUp(self): super(SaharaOutputDataSourcesTestCase, self).setUp() fake_dict = objects.Credential("http://fake.example.org:5000/v2.0/", "user", "passwd") self.tenants_num = 2 self.users_per_tenant = 2 self.users = self.tenants_num * self.users_per_tenant self.task = mock.MagicMock() self.tenants = {} self.users_key = [] for i in range(self.tenants_num): self.tenants[str(i)] = {"id": str(i), "name": str(i), "sahara": {"image": "42"}} for j in range(self.users_per_tenant): self.users_key.append({"id": "%s_%s" % (str(i), str(j)), "tenant_id": str(i), "credential": fake_dict}) self.user_key = [{"id": i, "tenant_id": j, "credential": "credential"} for j in range(self.tenants_num) for i in range(self.users_per_tenant)] self.context.update({ "config": { "users": { "tenants": self.tenants_num, "users_per_tenant": self.users_per_tenant, }, "sahara_output_data_sources": { "output_type": "hdfs", "output_url_prefix": "hdfs://test_host/", }, }, "admin": {"credential": mock.MagicMock()}, "task": mock.MagicMock(), "users": self.users_key, "tenants": self.tenants }) def check_setup(self): context = sahara_output_data_sources.SaharaOutputDataSources.context[ "sahara"]["output_conf"] self.assertIsNotNone(context.get("output_type")) self.assertIsNotNone(context.get("output_url_prefix")) @mock.patch("%s.sahara_output_data_sources.resource_manager.cleanup" % CTX) @mock.patch("%s.sahara_output_data_sources.osclients" % CTX) def test_setup_and_cleanup_hdfs(self, mock_osclients, mock_cleanup): mock_sahara = mock_osclients.Clients(mock.MagicMock()).sahara() mock_sahara.data_sources.create.return_value = mock.MagicMock( id=42) sahara_ctx = sahara_output_data_sources.SaharaOutputDataSources( self.context) sahara_ctx.generate_random_name = mock.Mock() output_ds_crete_calls = [] for i in range(self.tenants_num): output_ds_crete_calls.append(mock.call( name=sahara_ctx.generate_random_name.return_value, description="", data_source_type="hdfs", url="hdfs://test_host/")) sahara_ctx.setup() mock_sahara.data_sources.create.assert_has_calls( output_ds_crete_calls) sahara_ctx.cleanup() mock_cleanup.assert_called_once_with( names=["sahara.data_sources"], users=self.context["users"], superclass=sahara_utils.SaharaScenario, task_id=self.context["task"]["uuid"]) @mock.patch("%s.sahara_output_data_sources.osclients" % CTX) def test_setup_inputs_swift(self, mock_osclients): mock_sahara = mock_osclients.Clients(mock.MagicMock()).sahara() self.context.update({ "config": { "users": { "tenants": self.tenants_num, "users_per_tenant": self.users_per_tenant, }, "sahara_output_data_sources": { "output_type": "swift", "output_url_prefix": "rally", }, }, "admin": {"credential": mock.MagicMock()}, "task": mock.MagicMock(), "users": self.users_key, "tenants": self.tenants, "user_choice_method": "random", }) sahara_ctx = sahara_output_data_sources.SaharaOutputDataSources( self.context) sahara_ctx.generate_random_name = mock.Mock(return_value="random_name") output_ds_crete_calls = [] for i in range(self.tenants_num): output_ds_crete_calls.append(mock.call( name="random_name", description="", data_source_type="swift", url="swift://random_name.sahara/", credential_user="user", credential_pass="passwd" )) sahara_ctx.setup() mock_sahara.data_sources.create.assert_has_calls( output_ds_crete_calls) sahara_ctx.cleanup() rally-0.9.1/tests/unit/plugins/openstack/context/sahara/test_sahara_image.py0000664000567000056710000001475113073417720030547 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally import exceptions from rally.plugins.openstack.context.sahara import sahara_image from rally.plugins.openstack.scenarios.glance import utils as glance_utils from tests.unit import test BASE_CTX = "rally.task.context" CTX = "rally.plugins.openstack.context.sahara.sahara_image" BASE_SCN = "rally.task.scenarios" SCN = "rally.plugins.openstack.scenarios" class SaharaImageTestCase(test.ScenarioTestCase): def setUp(self): super(SaharaImageTestCase, self).setUp() self.tenants_num = 2 self.users_per_tenant = 2 self.users = self.tenants_num * self.users_per_tenant self.task = mock.MagicMock() self.tenants = {} self.users_key = [] for i in range(self.tenants_num): self.tenants[str(i)] = {"id": str(i), "name": str(i), "sahara": {"image": "42"}} for j in range(self.users_per_tenant): self.users_key.append({"id": "%s_%s" % (str(i), str(j)), "tenant_id": str(i), "credential": mock.MagicMock()}) @property def url_image_context(self): self.context.update({ "config": { "users": { "tenants": self.tenants_num, "users_per_tenant": self.users_per_tenant, }, "sahara_image": { "image_url": "http://somewhere", "plugin_name": "test_plugin", "hadoop_version": "test_version", "username": "test_user" } }, "admin": {"credential": mock.MagicMock()}, "users": self.users_key, "tenants": self.tenants }) return self.context @property def existing_image_context(self): self.context.update({ "config": { "users": { "tenants": self.tenants_num, "users_per_tenant": self.users_per_tenant, }, "sahara_image": { "image_uuid": "some_id" } }, "admin": {"credential": mock.MagicMock()}, "users": self.users_key, "tenants": self.tenants, }) return self.context @mock.patch("%s.glance.utils.GlanceScenario._create_image" % SCN, return_value=mock.MagicMock(id=42)) @mock.patch("%s.resource_manager.cleanup" % CTX) def test_setup_and_cleanup_url_image(self, mock_cleanup, mock_glance_scenario__create_image): ctx = self.url_image_context sahara_ctx = sahara_image.SaharaImage(ctx) sahara_ctx.generate_random_name = mock.Mock() glance_calls = [] for i in range(self.tenants_num): glance_calls.append( mock.call(container_format="bare", image_location="http://somewhere", disk_format="qcow2", name=sahara_ctx.generate_random_name.return_value)) sahara_update_image_calls = [] sahara_update_tags_calls = [] for i in range(self.tenants_num): sahara_update_image_calls.append(mock.call(image_id=42, user_name="test_user", desc="")) sahara_update_tags_calls.append(mock.call( image_id=42, new_tags=["test_plugin", "test_version"])) sahara_ctx.setup() mock_glance_scenario__create_image.assert_has_calls(glance_calls) self.clients("sahara").images.update_image.assert_has_calls( sahara_update_image_calls) self.clients("sahara").images.update_tags.assert_has_calls( sahara_update_tags_calls) sahara_ctx.cleanup() mock_cleanup.assert_called_once_with( names=["glance.images"], users=ctx["users"], superclass=glance_utils.GlanceScenario, task_id=ctx["task"]["uuid"]) @mock.patch("%s.glance.utils.GlanceScenario._create_image" % SCN, return_value=mock.MagicMock(id=42)) @mock.patch("%s.resource_manager.cleanup" % CTX) @mock.patch("%s.osclients.Clients" % CTX) def test_setup_and_cleanup_existing_image( self, mock_clients, mock_cleanup, mock_glance_scenario__create_image): mock_clients.glance.images.get.return_value = mock.MagicMock( is_public=True) ctx = self.existing_image_context sahara_ctx = sahara_image.SaharaImage(ctx) sahara_ctx.setup() for tenant_id in sahara_ctx.context["tenants"]: image_id = ( sahara_ctx.context["tenants"][tenant_id]["sahara"]["image"]) self.assertEqual("some_id", image_id) self.assertFalse(mock_glance_scenario__create_image.called) sahara_ctx.cleanup() self.assertFalse(mock_cleanup.called) @mock.patch("%s.osclients.Glance.create_client" % CTX) def test_check_existing_image(self, mock_glance_create_client): ctx = self.existing_image_context sahara_ctx = sahara_image.SaharaImage(ctx) sahara_ctx.setup() mock_glance_create_client.images.get.asser_called_once_with("some_id") @mock.patch("%s.osclients.Glance.create_client" % CTX) def test_check_existing_private_image_fail(self, mock_glance_create_client): mock_glance_create_client.return_value.images.get.return_value = ( mock.MagicMock(is_public=False)) ctx = self.existing_image_context sahara_ctx = sahara_image.SaharaImage(ctx) self.assertRaises(exceptions.BenchmarkSetupFailure, sahara_ctx.setup) mock_glance_create_client.images.get.asser_called_once_with("some_id") rally-0.9.1/tests/unit/plugins/openstack/context/sahara/test_sahara_cluster.py0000664000567000056710000001272313073417720031143 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from oslo_config import cfg from rally import exceptions from rally.plugins.openstack.context.sahara import sahara_cluster from rally.plugins.openstack.scenarios.sahara import utils as sahara_utils from tests.unit import test CONF = cfg.CONF CTX = "rally.plugins.openstack.context.sahara" class SaharaClusterTestCase(test.ScenarioTestCase): patch_benchmark_utils = False def setUp(self): super(SaharaClusterTestCase, self).setUp() self.tenants_num = 2 self.users_per_tenant = 2 self.users = self.tenants_num * self.users_per_tenant self.tenants = {} self.users_key = [] for i in range(self.tenants_num): self.tenants[str(i)] = {"id": str(i), "name": str(i), "sahara": {"image": "42"}} for j in range(self.users_per_tenant): self.users_key.append({"id": "%s_%s" % (str(i), str(j)), "tenant_id": str(i), "credential": mock.MagicMock()}) CONF.set_override("sahara_cluster_check_interval", 0, "benchmark", enforce_type=True) self.context.update({ "config": { "users": { "tenants": self.tenants_num, "users_per_tenant": self.users_per_tenant }, "sahara_cluster": { "master_flavor_id": "test_flavor_m", "worker_flavor_id": "test_flavor_w", "workers_count": 2, "plugin_name": "test_plugin", "hadoop_version": "test_version" } }, "admin": {"credential": mock.MagicMock()}, "users": self.users_key, "tenants": self.tenants }) @mock.patch("%s.sahara_cluster.resource_manager.cleanup" % CTX) @mock.patch("%s.sahara_cluster.utils.SaharaScenario._launch_cluster" % CTX, return_value=mock.MagicMock(id=42)) def test_setup_and_cleanup(self, mock_sahara_scenario__launch_cluster, mock_cleanup): sahara_ctx = sahara_cluster.SaharaCluster(self.context) launch_cluster_calls = [] for i in self.tenants: launch_cluster_calls.append(mock.call( flavor_id=None, plugin_name="test_plugin", hadoop_version="test_version", master_flavor_id="test_flavor_m", worker_flavor_id="test_flavor_w", workers_count=2, image_id=self.context["tenants"][i]["sahara"]["image"], floating_ip_pool=None, volumes_per_node=None, volumes_size=1, auto_security_group=True, security_groups=None, node_configs=None, cluster_configs=None, enable_anti_affinity=False, enable_proxy=False, wait_active=False, use_autoconfig=True )) self.clients("sahara").clusters.get.side_effect = [ mock.MagicMock(status="not-active"), mock.MagicMock(status="active")] sahara_ctx.setup() mock_sahara_scenario__launch_cluster.assert_has_calls( launch_cluster_calls) sahara_ctx.cleanup() mock_cleanup.assert_called_once_with( names=["sahara.clusters"], users=self.context["users"], superclass=sahara_utils.SaharaScenario, task_id=self.context["task"]["uuid"]) @mock.patch("%s.sahara_cluster.utils.SaharaScenario._launch_cluster" % CTX, return_value=mock.MagicMock(id=42)) def test_setup_and_cleanup_error(self, mock_sahara_scenario__launch_cluster): sahara_ctx = sahara_cluster.SaharaCluster(self.context) launch_cluster_calls = [] for i in self.tenants: launch_cluster_calls.append(mock.call( flavor_id=None, plugin_name="test_plugin", hadoop_version="test_version", master_flavor_id="test_flavor_m", worker_flavor_id="test_flavor_w", workers_count=2, image_id=self.context["tenants"][i]["sahara"]["image"], floating_ip_pool=None, volumes_per_node=None, volumes_size=1, auto_security_groups=True, security_groups=None, node_configs=None, cluster_configs=None, wait_active=False, use_autoconfig=True )) self.clients("sahara").clusters.get.side_effect = [ mock.MagicMock(status="not-active"), mock.MagicMock(status="error") ] self.assertRaises(exceptions.SaharaClusterFailure, sahara_ctx.setup) rally-0.9.1/tests/unit/plugins/openstack/context/sahara/test_sahara_input_data_sources.py0000664000567000056710000001426713073417720033362 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.context.sahara import sahara_input_data_sources from rally.plugins.openstack.scenarios.sahara import utils as sahara_utils from tests.unit import test CTX = "rally.plugins.openstack.context.sahara" class SaharaInputDataSourcesTestCase(test.ScenarioTestCase): def setUp(self): super(SaharaInputDataSourcesTestCase, self).setUp() self.tenants_num = 2 self.users_per_tenant = 2 self.task = mock.MagicMock() self.tenants = {} self.users = [] for i in range(self.tenants_num): tenant_id = "tenant_%d" % i self.tenants[tenant_id] = {"id": tenant_id, "name": tenant_id + "_name", "sahara": {"image": "foo_image"}} for u in range(self.users_per_tenant): user_id = "%s_user_%d" % (tenant_id, u) self.users.append( {"id": user_id, "tenant_id": tenant_id, "credential": mock.Mock(auth_url="foo_url", username=user_id + "_name", password="foo_password")}) self.context.update({ "config": { "users": { "tenants": self.tenants_num, "users_per_tenant": self.users_per_tenant, }, "sahara_input_data_sources": { "input_type": "hdfs", "input_url": "hdfs://test_host/", }, }, "admin": {"credential": mock.MagicMock()}, "users": self.users, "tenants": self.tenants }) @mock.patch("%s.sahara_input_data_sources.resource_manager.cleanup" % CTX) @mock.patch("%s.sahara_input_data_sources.osclients" % CTX) def test_setup_and_cleanup(self, mock_osclients, mock_cleanup): mock_sahara = mock_osclients.Clients.return_value.sahara.return_value mock_sahara.data_sources.create.return_value = mock.MagicMock(id=42) sahara_ctx = sahara_input_data_sources.SaharaInputDataSources( self.context) sahara_ctx.generate_random_name = mock.Mock() input_ds_crete_calls = [] for i in range(self.tenants_num): input_ds_crete_calls.append(mock.call( name=sahara_ctx.generate_random_name.return_value, description="", data_source_type="hdfs", url="hdfs://test_host/")) sahara_ctx.setup() mock_sahara.data_sources.create.assert_has_calls( input_ds_crete_calls) sahara_ctx.cleanup() mock_cleanup.assert_called_once_with( names=["sahara.data_sources"], users=self.context["users"], superclass=sahara_utils.SaharaScenario, task_id=self.context["task"]["uuid"]) @mock.patch("requests.get") @mock.patch("%s.sahara_input_data_sources.osclients" % CTX) @mock.patch("%s.sahara_input_data_sources.resource_manager" % CTX) @mock.patch("%s.sahara_input_data_sources.swift_utils" % CTX) def test_setup_inputs_swift(self, mock_swift_utils, mock_resource_manager, mock_osclients, mock_get): mock_swift_scenario = mock.Mock() mock_swift_scenario._create_container.side_effect = ( lambda container_name: "container_%s" % container_name) mock_swift_scenario._upload_object.side_effect = iter( ["uploaded_%d" % i for i in range(10)]) mock_swift_utils.SwiftScenario.return_value = mock_swift_scenario self.context.update({ "config": { "users": { "tenants": self.tenants_num, "users_per_tenant": self.users_per_tenant, }, "sahara_input_data_sources": { "input_type": "swift", "input_url": "swift://rally.sahara/input_url", "swift_files": [{ "name": "first", "download_url": "http://host"}] }, }, "admin": {"credential": mock.MagicMock()}, "task": mock.MagicMock(), "users": self.users, "tenants": self.tenants }) sahara_ctx = sahara_input_data_sources.SaharaInputDataSources( self.context) sahara_ctx.generate_random_name = mock.Mock( side_effect=iter(["random_name_%d" % i for i in range(10)])) input_ds_create_calls = [] for i in range(self.tenants_num): input_ds_create_calls.append(mock.call( name="random_name_%d" % i, description="", data_source_type="swift", url="swift://rally.sahara/input_url", credential_user="tenant_%d_user_0_name" % i, credential_pass="foo_password" )) sahara_ctx.setup() self.assertEqual( input_ds_create_calls, (mock_osclients.Clients.return_value.sahara.return_value .data_sources.create.mock_calls)) self.assertEqual({"container_name": "container_rally_rally", "swift_objects": ["uploaded_0", "uploaded_1"]}, self.context["sahara"]) sahara_ctx.cleanup() mock_resource_manager.cleanup.assert_called_once_with( names=["sahara.data_sources"], users=self.context["users"], superclass=sahara_utils.SaharaScenario, task_id=self.context["task"]["uuid"]) rally-0.9.1/tests/unit/plugins/openstack/context/sahara/test_sahara_job_binaries.py0000664000567000056710000001270313073417717032114 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.context.sahara import sahara_job_binaries from rally.plugins.openstack.scenarios.sahara import utils as sahara_utils from tests.unit import test CTX = "rally.plugins.openstack.context.sahara" class SaharaJobBinariesTestCase(test.ScenarioTestCase): def setUp(self): super(SaharaJobBinariesTestCase, self).setUp() self.tenants_num = 2 self.users_per_tenant = 2 self.users = self.tenants_num * self.users_per_tenant self.task = mock.MagicMock() self.tenants = {} self.users_key = [] for i in range(self.tenants_num): self.tenants[str(i)] = {"id": str(i), "name": str(i), "sahara": {"image": "42"}} for j in range(self.users_per_tenant): self.users_key.append({"id": "%s_%s" % (str(i), str(j)), "tenant_id": str(i), "credential": "credential"}) self.user_key = [{"id": i, "tenant_id": j, "credential": "credential"} for j in range(self.tenants_num) for i in range(self.users_per_tenant)] self.context.update({ "config": { "users": { "tenants": self.tenants_num, "users_per_tenant": self.users_per_tenant, }, "sahara_job_binaries": { "libs": [ { "name": "test.jar", "download_url": "http://example.com/test.jar" } ], "mains": [ { "name": "test.jar", "download_url": "http://example.com/test.jar" } ] }, }, "admin": {"credential": mock.MagicMock()}, "task": mock.MagicMock(), "users": self.users_key, "tenants": self.tenants }) @mock.patch("%s.sahara_job_binaries.resource_manager.cleanup" % CTX) @mock.patch(("%s.sahara_job_binaries.SaharaJobBinaries." "download_and_save_lib") % CTX) @mock.patch("%s.sahara_job_binaries.osclients" % CTX) def test_setup_and_cleanup( self, mock_osclients, mock_sahara_job_binaries_download_and_save_lib, mock_cleanup): mock_sahara = mock_osclients.Clients(mock.MagicMock()).sahara() sahara_ctx = sahara_job_binaries.SaharaJobBinaries(self.context) download_calls = [] for i in range(self.tenants_num): download_calls.append(mock.call( sahara=mock_sahara, lib_type="mains", name="test.jar", download_url="http://example.com/test.jar", tenant_id=str(i))) download_calls.append(mock.call( sahara=mock_sahara, lib_type="libs", name="test.jar", download_url="http://example.com/test.jar", tenant_id=str(i))) sahara_ctx.setup() (mock_sahara_job_binaries_download_and_save_lib. assert_has_calls(download_calls)) sahara_ctx.cleanup() mock_cleanup.assert_called_once_with( names=["sahara.job_binary_internals", "sahara.job_binaries"], users=self.context["users"], superclass=sahara_utils.SaharaScenario, task_id=self.context["task"]["uuid"]) @mock.patch("%s.sahara_job_binaries.requests" % CTX) @mock.patch("%s.sahara_job_binaries.osclients" % CTX) def test_download_and_save_lib(self, mock_osclients, mock_requests): mock_requests.get.content.return_value = "some_binary_content" mock_sahara = mock_osclients.Clients(mock.MagicMock()).sahara() mock_sahara.job_binary_internals.create.return_value = ( mock.MagicMock(id=42)) sahara_ctx = sahara_job_binaries.SaharaJobBinaries(self.context) sahara_ctx.context["tenants"]["0"]["sahara"] = {"mains": []} sahara_ctx.context["tenants"]["0"]["sahara"]["libs"] = [] sahara_ctx.download_and_save_lib(sahara=mock_sahara, lib_type="mains", name="test_binary", download_url="http://somewhere", tenant_id="0") sahara_ctx.download_and_save_lib(sahara=mock_sahara, lib_type="libs", name="test_binary_2", download_url="http://somewhere", tenant_id="0") mock_requests.get.assert_called_once_with("http://somewhere") rally-0.9.1/tests/unit/plugins/openstack/context/vm/0000775000567000056710000000000013073420067023706 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/vm/__init__.py0000664000567000056710000000000013073417717026015 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/vm/test_image_command_customizer.py0000664000567000056710000000673613073417717032407 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests for the image customizer using a command execution.""" import mock from rally import exceptions from rally.plugins.openstack.context.vm import image_command_customizer from tests.unit import test BASE = "rally.plugins.openstack.context.vm.image_command_customizer" class ImageCommandCustomizerContextVMTestCase(test.TestCase): def setUp(self): super(ImageCommandCustomizerContextVMTestCase, self).setUp() self.context = { "task": mock.MagicMock(), "config": { "image_command_customizer": { "image": {"name": "image"}, "flavor": {"name": "flavor"}, "username": "fedora", "password": "foo_password", "floating_network": "floating", "port": 1022, "command": { "interpreter": "foo_interpreter", "script_file": "foo_script" } } }, "admin": { "credential": "credential", } } self.user = {"keypair": {"private": "foo_private"}} self.fip = {"ip": "foo_ip"} @mock.patch("%s.vm_utils.VMScenario" % BASE) def test_customize_image(self, mock_vm_scenario): mock_vm_scenario.return_value._run_command.return_value = ( 0, "foo_stdout", "foo_stderr") customizer = image_command_customizer.ImageCommandCustomizerContext( self.context) retval = customizer.customize_image(server=None, ip=self.fip, user=self.user) mock_vm_scenario.assert_called_once_with(customizer.context) mock_vm_scenario.return_value._run_command.assert_called_once_with( "foo_ip", 1022, "fedora", "foo_password", pkey="foo_private", command={"interpreter": "foo_interpreter", "script_file": "foo_script"}) self.assertEqual((0, "foo_stdout", "foo_stderr"), retval) @mock.patch("%s.vm_utils.VMScenario" % BASE) def test_customize_image_fail(self, mock_vm_scenario): mock_vm_scenario.return_value._run_command.return_value = ( 1, "foo_stdout", "foo_stderr") customizer = image_command_customizer.ImageCommandCustomizerContext( self.context) exc = self.assertRaises( exceptions.ScriptError, customizer.customize_image, server=None, ip=self.fip, user=self.user) str_exc = str(exc) self.assertIn("foo_stdout", str_exc) self.assertIn("foo_stderr", str_exc) mock_vm_scenario.return_value._run_command.assert_called_once_with( "foo_ip", 1022, "fedora", "foo_password", pkey="foo_private", command={"interpreter": "foo_interpreter", "script_file": "foo_script"}) rally-0.9.1/tests/unit/plugins/openstack/context/vm/test_custom_image.py0000664000567000056710000002231313073417717030004 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests for the Benchmark VM image context.""" import mock from rally.plugins.openstack.context.vm import custom_image from rally.task import context from tests.unit import test BASE = "rally.plugins.openstack.context.vm.custom_image" @context.configure(name="test_custom_image", order=500) class TestImageGenerator(custom_image.BaseCustomImageGenerator): def _customize_image(self, *args): pass class BaseCustomImageContextVMTestCase(test.TestCase): def setUp(self): super(BaseCustomImageContextVMTestCase, self).setUp() self.context = test.get_test_context() self.context.update({ "config": { "test_custom_image": { "image": {"name": "image"}, "flavor": {"name": "flavor"}, "username": "fedora", "floating_network": "floating", "port": 1022, } }, "admin": { "credential": "credential", }, "users": [ {"tenant_id": "tenant_id0"}, {"tenant_id": "tenant_id1"}, {"tenant_id": "tenant_id2"} ], "tenants": { "tenant_id0": {}, "tenant_id1": {}, "tenant_id2": {} } }) @mock.patch("%s.osclients.Clients" % BASE) @mock.patch("%s.types.GlanceImage.transform" % BASE, return_value="image") @mock.patch("%s.types.Flavor.transform" % BASE, return_value="flavor") @mock.patch("rally.plugins.openstack.wrappers.glance.wrap") @mock.patch("%s.vmtasks.BootRuncommandDelete" % BASE) def test_create_one_image( self, mock_boot_runcommand_delete, mock_glance_wrap, mock_flavor_transform, mock_glance_image_transform, mock_clients ): ip = {"ip": "foo_ip", "id": "foo_id", "is_floating": True} fake_server = mock.Mock() fake_image = mock.MagicMock( to_dict=mock.MagicMock(return_value={"id": "image"})) scenario = mock_boot_runcommand_delete.return_value = mock.MagicMock( _create_image=mock.MagicMock(return_value=fake_image), _boot_server_with_fip=mock.MagicMock( return_value=(fake_server, ip)) ) generator_ctx = TestImageGenerator(self.context) generator_ctx._customize_image = mock.MagicMock() user = { "credential": "credential", "keypair": {"name": "keypair_name"}, "secgroup": {"name": "secgroup_name"} } custom_image = generator_ctx.create_one_image(user, foo_arg="foo_value") mock_glance_wrap.assert_called_once_with( mock_clients.return_value.glance, generator_ctx) mock_flavor_transform.assert_called_once_with( clients=mock_clients.return_value, resource_config={"name": "flavor"}) mock_glance_image_transform.assert_called_once_with( clients=mock_clients.return_value, resource_config={"name": "image"}) mock_boot_runcommand_delete.assert_called_once_with( self.context, clients=mock_clients.return_value) scenario._boot_server_with_fip.assert_called_once_with( image="image", flavor="flavor", floating_network="floating", key_name="keypair_name", security_groups=["secgroup_name"], userdata=None, foo_arg="foo_value") scenario._stop_server.assert_called_once_with(fake_server) generator_ctx._customize_image.assert_called_once_with( fake_server, ip, user) scenario._create_image.assert_called_once_with(fake_server) mock_glance_wrap.return_value.set_visibility.assert_called_once_with( fake_image) scenario._delete_server_with_fip.assert_called_once_with( fake_server, ip) self.assertEqual({"id": "image"}, custom_image) @mock.patch("%s.osclients.Clients" % BASE) @mock.patch("%s.types.GlanceImage.transform" % BASE, return_value="image") @mock.patch("%s.types.Flavor.transform" % BASE, return_value="flavor") @mock.patch("rally.plugins.openstack.wrappers.glance.wrap") @mock.patch("%s.vmtasks.BootRuncommandDelete" % BASE) def test_create_one_image_cleanup( self, mock_boot_runcommand_delete, mock_glance_wrap, mock_flavor_transform, mock_glance_image_transform, mock_clients ): ip = {"ip": "foo_ip", "id": "foo_id", "is_floating": True} fake_server = mock.Mock() fake_image = mock.MagicMock( to_dict=mock.MagicMock(return_value={"id": "image"})) scenario = mock_boot_runcommand_delete.return_value = mock.MagicMock( _create_image=mock.MagicMock(return_value=fake_image), _boot_server_with_fip=mock.MagicMock( return_value=(fake_server, ip)), _generate_random_name=mock.MagicMock(return_value="foo_name"), ) generator_ctx = TestImageGenerator(self.context) generator_ctx._customize_image = mock.MagicMock( side_effect=ValueError()) user = { "credential": "credential", "keypair": {"name": "keypair_name"}, "secgroup": {"name": "secgroup_name"} } self.assertRaises( ValueError, generator_ctx.create_one_image, user, foo_arg="foo_value") generator_ctx._customize_image.assert_called_once_with( fake_server, ip, user) scenario._delete_server_with_fip.assert_called_once_with( fake_server, ip) @mock.patch("%s.nova_utils.NovaScenario" % BASE) @mock.patch("%s.osclients.Clients" % BASE) def test_delete_one_image(self, mock_clients, mock_nova_scenario): nova_scenario = mock_nova_scenario.return_value = mock.MagicMock() nova_client = nova_scenario.clients.return_value nova_client.images.get.return_value = "image_obj" generator_ctx = TestImageGenerator(self.context) user = {"credential": "credential", "keypair": {"name": "keypair_name"}} custom_image = {"id": "image"} generator_ctx.delete_one_image(user, custom_image) mock_nova_scenario.assert_called_once_with( context=self.context, clients=mock_clients.return_value) nova_scenario.clients.assert_called_once_with("nova") nova_client.images.get.assert_called_once_with("image") nova_scenario._delete_image.assert_called_once_with("image_obj") def test_setup_admin(self): self.context["tenants"]["tenant_id0"]["networks"] = [ {"id": "network_id"}] generator_ctx = TestImageGenerator(self.context) generator_ctx.create_one_image = mock.Mock( return_value="custom_image") generator_ctx.make_image_public = mock.Mock() generator_ctx.setup() generator_ctx.create_one_image.assert_called_once_with( self.context["users"][0], nics=[{"net-id": "network_id"}]) def test_cleanup_admin(self): tenant = self.context["tenants"]["tenant_id0"] custom_image = tenant["custom_image"] = {"id": "image"} generator_ctx = TestImageGenerator(self.context) generator_ctx.delete_one_image = mock.Mock() generator_ctx.cleanup() generator_ctx.delete_one_image.assert_called_once_with( self.context["users"][0], custom_image) def test_setup(self): self.context.pop("admin") generator_ctx = TestImageGenerator(self.context) generator_ctx.create_one_image = mock.Mock( side_effect=["custom_image0", "custom_image1", "custom_image2"]) generator_ctx.setup() self.assertEqual( [mock.call(user) for user in self.context["users"]], generator_ctx.create_one_image.mock_calls) for i in range(3): self.assertEqual( "custom_image%d" % i, self.context["tenants"]["tenant_id%d" % i]["custom_image"] ) def test_cleanup(self): self.context.pop("admin") for i in range(3): self.context["tenants"]["tenant_id%d" % i]["custom_image"] = { "id": "custom_image%d" % i} generator_ctx = TestImageGenerator(self.context) generator_ctx.delete_one_image = mock.Mock() generator_ctx.cleanup() self.assertEqual( [mock.call(self.context["users"][i], {"id": "custom_image%d" % i}) for i in range(3)], generator_ctx.delete_one_image.mock_calls) rally-0.9.1/tests/unit/plugins/openstack/context/manila/0000775000567000056710000000000013073420067024525 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/manila/__init__.py0000664000567000056710000000000013073417717026634 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/manila/test_manila_shares.py0000664000567000056710000001704113073417720030751 0ustar jenkinsjenkins00000000000000# Copyright 2016 Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import ddt import mock import six from rally import consts as rally_consts from rally.plugins.openstack.context.manila import consts from rally.plugins.openstack.context.manila import manila_shares from tests.unit import test MANILA_UTILS_PATH = ( "rally.plugins.openstack.scenarios.manila.utils.ManilaScenario.") class Fake(object): def __init__(self, **kwargs): for k, v in kwargs.items(): setattr(self, k, v) def __getitem__(self, item): return getattr(self, item) def to_dict(self): return self.__dict__ @ddt.ddt class SharesTestCase(test.TestCase): TENANTS_AMOUNT = 3 USERS_PER_TENANT = 4 SHARES_PER_TENANT = 7 SHARE_NETWORKS = [{"id": "sn_%s_id" % d} for d in range(3)] def _get_context(self, use_share_networks=False, shares_per_tenant=None, share_size=1, share_proto="fake_proto", share_type=None): tenants = {} for t_id in range(self.TENANTS_AMOUNT): tenants[six.text_type(t_id)] = {"name": six.text_type(t_id)} users = [] for t_id in sorted(list(tenants.keys())): for i in range(self.USERS_PER_TENANT): users.append( {"id": i, "tenant_id": t_id, "credential": "fake"}) context = { "config": { "users": { "tenants": self.TENANTS_AMOUNT, "users_per_tenant": self.USERS_PER_TENANT, "user_choice_method": "round_robin", }, consts.SHARE_NETWORKS_CONTEXT_NAME: { "use_share_networks": use_share_networks, "share_networks": self.SHARE_NETWORKS, }, consts.SHARES_CONTEXT_NAME: { "shares_per_tenant": ( shares_per_tenant or self.SHARES_PER_TENANT), "size": share_size, "share_proto": share_proto, "share_type": share_type, }, }, "admin": { "credential": mock.MagicMock(), }, "task": mock.MagicMock(), "users": users, "tenants": tenants, } if use_share_networks: for t in context["tenants"].keys(): context["tenants"][t][consts.SHARE_NETWORKS_CONTEXT_NAME] = { "share_networks": self.SHARE_NETWORKS, } return context def test_init(self): ctxt = { "task": mock.MagicMock(), "config": { consts.SHARES_CONTEXT_NAME: {"foo": "bar"}, "fake": {"fake_key": "fake_value"}, }, } inst = manila_shares.Shares(ctxt) self.assertEqual( {"foo": "bar", "shares_per_tenant": 1, "size": 1, "share_proto": "NFS", "share_type": None}, inst.config) self.assertIn( rally_consts.JSON_SCHEMA, inst.CONFIG_SCHEMA.get("$schema")) self.assertFalse(inst.CONFIG_SCHEMA.get("additionalProperties")) self.assertEqual("object", inst.CONFIG_SCHEMA.get("type")) props = inst.CONFIG_SCHEMA.get("properties", {}) self.assertEqual( {"minimum": 1, "type": "integer"}, props.get("shares_per_tenant")) self.assertEqual({"minimum": 1, "type": "integer"}, props.get("size")) self.assertEqual({"type": "string"}, props.get("share_proto")) self.assertEqual({"type": "string"}, props.get("share_type")) self.assertEqual(455, inst.get_order()) self.assertEqual(consts.SHARES_CONTEXT_NAME, inst.get_name()) @mock.patch(MANILA_UTILS_PATH + "_create_share") @ddt.data(True, False) def test_setup( self, use_share_networks, mock_manila_scenario__create_share): share_type = "fake_share_type" ctxt = self._get_context( use_share_networks=use_share_networks, share_type=share_type) inst = manila_shares.Shares(ctxt) shares = [ Fake(id="fake_share_id_%d" % s_id) for s_id in range(self.TENANTS_AMOUNT * self.SHARES_PER_TENANT) ] mock_manila_scenario__create_share.side_effect = shares expected_ctxt = copy.deepcopy(ctxt) inst.setup() self.assertEqual( self.TENANTS_AMOUNT * self.SHARES_PER_TENANT, mock_manila_scenario__create_share.call_count) for d in range(self.TENANTS_AMOUNT): self.assertEqual( [ s.to_dict() for s in shares[ (d * self.SHARES_PER_TENANT):( d * self.SHARES_PER_TENANT + self.SHARES_PER_TENANT ) ] ], inst.context.get("tenants", {}).get("%s" % d, {}).get("shares") ) self.assertEqual(expected_ctxt["task"], inst.context.get("task")) self.assertEqual(expected_ctxt["config"], inst.context.get("config")) self.assertEqual(expected_ctxt["users"], inst.context.get("users")) if use_share_networks: mock_calls = [ mock.call( share_proto=ctxt["config"][consts.SHARES_CONTEXT_NAME][ "share_proto"], size=ctxt["config"][consts.SHARES_CONTEXT_NAME]["size"], share_type=ctxt["config"][consts.SHARES_CONTEXT_NAME][ "share_type"], share_network=self.SHARE_NETWORKS[ int(t_id) % len(self.SHARE_NETWORKS)]["id"] ) for t_id in expected_ctxt["tenants"].keys() ] else: mock_calls = [ mock.call( share_proto=ctxt["config"][consts.SHARES_CONTEXT_NAME][ "share_proto"], size=ctxt["config"][consts.SHARES_CONTEXT_NAME]["size"], share_type=ctxt["config"][consts.SHARES_CONTEXT_NAME][ "share_type"], ) for t_id in expected_ctxt["tenants"].keys() ] mock_manila_scenario__create_share.assert_has_calls( mock_calls, any_order=True) @mock.patch(MANILA_UTILS_PATH + "_create_share") @mock.patch("rally.plugins.openstack.cleanup.manager.cleanup") def test_cleanup( self, mock_cleanup_manager_cleanup, mock_manila_scenario__create_share): ctxt = self._get_context() inst = manila_shares.Shares(ctxt) shares = [ Fake(id="fake_share_id_%d" % s_id) for s_id in range(self.TENANTS_AMOUNT * self.SHARES_PER_TENANT) ] mock_manila_scenario__create_share.side_effect = shares inst.setup() inst.cleanup() mock_cleanup_manager_cleanup.assert_called_once_with( names=["manila.shares"], users=inst.context.get("users", []), ) rally-0.9.1/tests/unit/plugins/openstack/context/manila/test_manila_security_services.py0000664000567000056710000001443713073417720033244 0ustar jenkinsjenkins00000000000000# Copyright 2015 Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock import six from rally.plugins.openstack.context.manila import consts from rally.plugins.openstack.context.manila import manila_security_services from tests.unit import test CONTEXT_NAME = consts.SECURITY_SERVICES_CONTEXT_NAME @ddt.ddt class SecurityServicesTestCase(test.ScenarioTestCase): TENANTS_AMOUNT = 3 USERS_PER_TENANT = 4 SECURITY_SERVICES = [ {"security_service_type": ss_type, "dns_ip": "fake_dns_ip_%s" % ss_type, "server": "fake_server_%s" % ss_type, "domain": "fake_domain_%s" % ss_type, "user": "fake_user_%s" % ss_type, "password": "fake_password_%s" % ss_type} for ss_type in ("ldap", "kerberos", "active_directory") ] def _get_context(self, security_services=None, networks_per_tenant=2, neutron_network_provider=True): if security_services is None: security_services = self.SECURITY_SERVICES tenants = {} for t_id in range(self.TENANTS_AMOUNT): tenants[six.text_type(t_id)] = {"name": six.text_type(t_id)} tenants[six.text_type(t_id)]["networks"] = [] for i in range(networks_per_tenant): network = {"id": "fake_net_id_%s" % i} if neutron_network_provider: network["subnets"] = ["fake_subnet_id_of_net_%s" % i] else: network["cidr"] = "101.0.5.0/24" tenants[six.text_type(t_id)]["networks"].append(network) users = [] for t_id in tenants.keys(): for i in range(self.USERS_PER_TENANT): users.append({"id": i, "tenant_id": t_id, "endpoint": "fake"}) context = { "config": { "users": { "tenants": self.TENANTS_AMOUNT, "users_per_tenant": self.USERS_PER_TENANT, }, CONTEXT_NAME: { "security_services": security_services, }, }, "admin": { "endpoint": mock.MagicMock(), }, "task": mock.MagicMock(), "users": users, "tenants": tenants, } return context def test_init(self): context = { "task": mock.MagicMock(), "config": { CONTEXT_NAME: {"foo": "bar"}, "not_manila": {"not_manila_key": "not_manila_value"}, } } inst = manila_security_services.SecurityServices(context) self.assertEqual(inst.config.get("foo"), "bar") self.assertFalse(inst.config.get("security_services")) self.assertEqual(445, inst.get_order()) self.assertEqual(CONTEXT_NAME, inst.get_name()) @mock.patch.object(manila_security_services.manila_utils, "ManilaScenario") @ddt.data(True, False) def test_setup_security_services_set(self, neutron_network_provider, mock_manila_scenario): ctxt = self._get_context( neutron_network_provider=neutron_network_provider) inst = manila_security_services.SecurityServices(ctxt) inst.setup() self.assertEqual( self.TENANTS_AMOUNT, mock_manila_scenario.call_count) self.assertEqual( mock_manila_scenario.call_args_list, [mock.call({ "task": inst.task, "config": {"api_versions": []}, "user": user}) for user in inst.context["users"] if user["id"] == 0] ) mock_create_security_service = ( mock_manila_scenario.return_value._create_security_service) expected_calls = [] for ss in self.SECURITY_SERVICES: expected_calls.extend([mock.call(**ss), mock.call().to_dict()]) mock_create_security_service.assert_has_calls(expected_calls) self.assertEqual( self.TENANTS_AMOUNT * len(self.SECURITY_SERVICES), mock_create_security_service.call_count) self.assertEqual( self.TENANTS_AMOUNT, len(inst.context["config"][CONTEXT_NAME]["security_services"])) for tenant in inst.context["tenants"]: self.assertEqual( self.TENANTS_AMOUNT, len(inst.context["tenants"][tenant][CONTEXT_NAME][ "security_services"]) ) @mock.patch.object(manila_security_services.manila_utils, "ManilaScenario") def test_setup_security_services_not_set(self, mock_manila_scenario): ctxt = self._get_context(security_services=[]) inst = manila_security_services.SecurityServices(ctxt) inst.setup() self.assertFalse(mock_manila_scenario.called) self.assertFalse( mock_manila_scenario.return_value._create_security_service.called) self.assertIn(CONTEXT_NAME, inst.context["config"]) self.assertIn( "security_services", inst.context["config"][CONTEXT_NAME]) self.assertEqual( 0, len(inst.context["config"][CONTEXT_NAME]["security_services"])) for tenant in inst.context["tenants"]: self.assertEqual( 0, len(inst.context["tenants"][tenant][CONTEXT_NAME][ "security_services"]) ) @mock.patch.object(manila_security_services, "resource_manager") def test_cleanup_security_services_enabled(self, mock_resource_manager): ctxt = self._get_context() inst = manila_security_services.SecurityServices(ctxt) inst.cleanup() mock_resource_manager.cleanup.assert_called_once_with( names=["manila.security_services"], users=ctxt["users"]) rally-0.9.1/tests/unit/plugins/openstack/context/manila/test_manila_share_networks.py0000664000567000056710000004375213073417717032540 0ustar jenkinsjenkins00000000000000# Copyright 2015 Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import ddt import mock import six from rally import consts as rally_consts from rally import exceptions from rally.plugins.openstack.context.manila import consts from rally.plugins.openstack.context.manila import manila_share_networks from tests.unit import test MANILA_UTILS_PATH = ("rally.plugins.openstack.scenarios.manila.utils." "ManilaScenario.") class Fake(object): def __init__(self, **kwargs): for k, v in kwargs.items(): setattr(self, k, v) def __getitem__(self, item): return getattr(self, item) def to_dict(self): return self.__dict__ @ddt.ddt class ShareNetworksTestCase(test.TestCase): TENANTS_AMOUNT = 3 USERS_PER_TENANT = 4 SECURITY_SERVICES = [ {"type": ss_type, "dns_ip": "fake_dns_ip_%s" % ss_type, "server": "fake_server_%s" % ss_type, "domain": "fake_domain_%s" % ss_type, "user": "fake_user_%s" % ss_type, "password": "fake_password_%s" % ss_type, "name": "fake_optional_name_%s" % ss_type} for ss_type in ("ldap", "kerberos", "active_directory") ] def _get_context(self, use_security_services=False, networks_per_tenant=2, neutron_network_provider=True): tenants = {} for t_id in range(self.TENANTS_AMOUNT): tenants[six.text_type(t_id)] = {"name": six.text_type(t_id)} tenants[six.text_type(t_id)]["networks"] = [] for i in range(networks_per_tenant): network = {"id": "fake_net_id_%s" % i} if neutron_network_provider: network["subnets"] = ["fake_subnet_id_of_net_%s" % i] else: network["cidr"] = "101.0.5.0/24" tenants[six.text_type(t_id)]["networks"].append(network) users = [] for t_id in tenants.keys(): for i in range(self.USERS_PER_TENANT): users.append( {"id": i, "tenant_id": t_id, "credential": "fake"}) context = { "config": { "users": { "tenants": self.TENANTS_AMOUNT, "users_per_tenant": self.USERS_PER_TENANT, "random_user_choice": False, }, consts.SHARE_NETWORKS_CONTEXT_NAME: { "use_share_networks": True, "share_networks": [], }, consts.SECURITY_SERVICES_CONTEXT_NAME: { "security_services": ( self.SECURITY_SERVICES if use_security_services else []) }, "network": { "networks_per_tenant": networks_per_tenant, "start_cidr": "101.0.5.0/24", }, }, "admin": { "credential": mock.MagicMock(), }, "task": mock.MagicMock(), "users": users, "tenants": tenants, "user_choice_method": "random", } return context def setUp(self): super(self.__class__, self).setUp() self.ctxt_use_existing = { "task": mock.MagicMock(), "config": { "existing_users": {"foo": "bar"}, consts.SHARE_NETWORKS_CONTEXT_NAME: { "use_share_networks": True, "share_networks": { "tenant_1_id": ["sn_1_id", "sn_2_name"], "tenant_2_name": ["sn_3_id", "sn_4_name", "sn_5_id"], }, }, }, "tenants": { "tenant_1_id": {"id": "tenant_1_id", "name": "tenant_1_name"}, "tenant_2_id": {"id": "tenant_2_id", "name": "tenant_2_name"}, }, "users": [ {"tenant_id": "tenant_1_id", "credential": {"c1": "foo"}}, {"tenant_id": "tenant_2_id", "credential": {"c2": "bar"}}, ], } self.existing_sns = [ Fake(id="sn_%s_id" % i, name="sn_%s_name" % i) for i in range(1, 6) ] def test_init(self): context = { "task": mock.MagicMock(), "config": { consts.SHARE_NETWORKS_CONTEXT_NAME: {"foo": "bar"}, "not_manila": {"not_manila_key": "not_manila_value"}, }, } inst = manila_share_networks.ShareNetworks(context) self.assertEqual( {"foo": "bar", "share_networks": {}, "use_share_networks": False}, inst.config) self.assertIn( rally_consts.JSON_SCHEMA, inst.CONFIG_SCHEMA.get("$schema")) self.assertFalse(inst.CONFIG_SCHEMA.get("additionalProperties")) self.assertEqual("object", inst.CONFIG_SCHEMA.get("type")) props = inst.CONFIG_SCHEMA.get("properties", {}) self.assertEqual({"type": "object"}, props.get("share_networks")) self.assertEqual({"type": "boolean"}, props.get("use_share_networks")) self.assertEqual(450, inst.get_order()) self.assertEqual( consts.SHARE_NETWORKS_CONTEXT_NAME, inst.get_name()) def test_setup_share_networks_disabled(self): ctxt = { "task": mock.MagicMock(), "config": { consts.SHARE_NETWORKS_CONTEXT_NAME: { "use_share_networks": False, }, }, consts.SHARE_NETWORKS_CONTEXT_NAME: { "delete_share_networks": False, }, } inst = manila_share_networks.ShareNetworks(ctxt) expected_ctxt = copy.deepcopy(inst.context) inst.setup() self.assertEqual(expected_ctxt, inst.context) @mock.patch("rally.osclients.Clients") @mock.patch(MANILA_UTILS_PATH + "_list_share_networks") def test_setup_use_existing_share_networks( self, mock_manila_scenario__list_share_networks, mock_clients): existing_sns = self.existing_sns expected_ctxt = copy.deepcopy(self.ctxt_use_existing) inst = manila_share_networks.ShareNetworks(self.ctxt_use_existing) mock_manila_scenario__list_share_networks.return_value = ( self.existing_sns) expected_ctxt.update({ "delete_share_networks": False, "tenants": { "tenant_1_id": { "id": "tenant_1_id", "name": "tenant_1_name", consts.SHARE_NETWORKS_CONTEXT_NAME: { "share_networks": [ sn.to_dict() for sn in existing_sns[0:2]], }, }, "tenant_2_id": { "id": "tenant_2_id", "name": "tenant_2_name", consts.SHARE_NETWORKS_CONTEXT_NAME: { "share_networks": [ sn.to_dict() for sn in existing_sns[2:5]], }, }, } }) inst.setup() self.assertEqual(expected_ctxt["task"], inst.context.get("task")) self.assertEqual(expected_ctxt["config"], inst.context.get("config")) self.assertEqual(expected_ctxt["users"], inst.context.get("users")) self.assertEqual( False, inst.context.get(consts.SHARE_NETWORKS_CONTEXT_NAME, {}).get( "delete_share_networks")) self.assertEqual(expected_ctxt["tenants"], inst.context.get("tenants")) def test_setup_use_existing_share_networks_tenant_not_found(self): ctxt = copy.deepcopy(self.ctxt_use_existing) ctxt.update({"tenants": {}}) inst = manila_share_networks.ShareNetworks(ctxt) self.assertRaises(exceptions.ContextSetupFailure, inst.setup) @mock.patch("rally.osclients.Clients") @mock.patch(MANILA_UTILS_PATH + "_list_share_networks") def test_setup_use_existing_share_networks_sn_not_found( self, mock_manila_scenario__list_share_networks, mock_clients): ctxt = copy.deepcopy(self.ctxt_use_existing) ctxt["config"][consts.SHARE_NETWORKS_CONTEXT_NAME][ "share_networks"] = {"tenant_1_id": ["foo"]} inst = manila_share_networks.ShareNetworks(ctxt) mock_manila_scenario__list_share_networks.return_value = ( self.existing_sns) self.assertRaises(exceptions.ContextSetupFailure, inst.setup) def test_setup_use_existing_share_networks_with_empty_list(self): ctxt = copy.deepcopy(self.ctxt_use_existing) ctxt["config"][consts.SHARE_NETWORKS_CONTEXT_NAME][ "share_networks"] = {} inst = manila_share_networks.ShareNetworks(ctxt) self.assertRaises(exceptions.ContextSetupFailure, inst.setup) @ddt.data(True, False) @mock.patch("rally.osclients.Clients") @mock.patch(MANILA_UTILS_PATH + "_create_share_network") @mock.patch(MANILA_UTILS_PATH + "_add_security_service_to_share_network") def test_setup_autocreate_share_networks_with_security_services( self, neutron, mock_manila_scenario__add_security_service_to_share_network, mock_manila_scenario__create_share_network, mock_clients): networks_per_tenant = 2 ctxt = self._get_context( networks_per_tenant=networks_per_tenant, neutron_network_provider=neutron, use_security_services=True, ) inst = manila_share_networks.ShareNetworks(ctxt) for tenant_id in list(ctxt["tenants"].keys()): inst.context["tenants"][tenant_id][ consts.SECURITY_SERVICES_CONTEXT_NAME] = { "security_services": [ Fake(id="fake_id").to_dict() for i in (1, 2, 3) ] } inst.setup() self.assertEqual(ctxt["task"], inst.context.get("task")) self.assertEqual(ctxt["config"], inst.context.get("config")) self.assertEqual(ctxt["users"], inst.context.get("users")) self.assertEqual(ctxt["tenants"], inst.context.get("tenants")) mock_add_security_service_to_share_network = ( mock_manila_scenario__add_security_service_to_share_network) mock_add_security_service_to_share_network.assert_has_calls([ mock.call(mock.ANY, mock.ANY) for i in range( self.TENANTS_AMOUNT * networks_per_tenant * len(self.SECURITY_SERVICES))]) if neutron: sn_args = { "neutron_net_id": mock.ANY, "neutron_subnet_id": mock.ANY, } else: sn_args = {"nova_net_id": mock.ANY} expected_calls = [ mock.call(**sn_args), mock.call().to_dict(), mock.ANY, mock.ANY, mock.ANY, ] mock_manila_scenario__create_share_network.assert_has_calls( expected_calls * (self.TENANTS_AMOUNT * networks_per_tenant)) mock_clients.assert_has_calls([ mock.call("fake", {}) for i in range(self.TENANTS_AMOUNT)]) @ddt.data(True, False) @mock.patch("rally.osclients.Clients") @mock.patch(MANILA_UTILS_PATH + "_create_share_network") @mock.patch(MANILA_UTILS_PATH + "_add_security_service_to_share_network") def test_setup_autocreate_share_networks_wo_security_services( self, neutron, mock_manila_scenario__add_security_service_to_share_network, mock_manila_scenario__create_share_network, mock_clients): networks_per_tenant = 2 ctxt = self._get_context( networks_per_tenant=networks_per_tenant, neutron_network_provider=neutron, ) inst = manila_share_networks.ShareNetworks(ctxt) inst.setup() self.assertEqual(ctxt["task"], inst.context.get("task")) self.assertEqual(ctxt["config"], inst.context.get("config")) self.assertEqual(ctxt["users"], inst.context.get("users")) self.assertEqual(ctxt["tenants"], inst.context.get("tenants")) self.assertFalse( mock_manila_scenario__add_security_service_to_share_network.called) if neutron: sn_args = { "neutron_net_id": mock.ANY, "neutron_subnet_id": mock.ANY, } else: sn_args = {"nova_net_id": mock.ANY} expected_calls = [mock.call(**sn_args), mock.call().to_dict()] mock_manila_scenario__create_share_network.assert_has_calls( expected_calls * (self.TENANTS_AMOUNT * networks_per_tenant)) mock_clients.assert_has_calls([ mock.call("fake", {}) for i in range(self.TENANTS_AMOUNT)]) @mock.patch("rally.osclients.Clients") @mock.patch(MANILA_UTILS_PATH + "_create_share_network") @mock.patch(MANILA_UTILS_PATH + "_add_security_service_to_share_network") def test_setup_autocreate_share_networks_wo_networks( self, mock_manila_scenario__add_security_service_to_share_network, mock_manila_scenario__create_share_network, mock_clients): ctxt = self._get_context(networks_per_tenant=0) inst = manila_share_networks.ShareNetworks(ctxt) inst.setup() self.assertEqual(ctxt["task"], inst.context.get("task")) self.assertEqual(ctxt["config"], inst.context.get("config")) self.assertEqual(ctxt["users"], inst.context.get("users")) self.assertEqual(ctxt["tenants"], inst.context.get("tenants")) self.assertFalse( mock_manila_scenario__add_security_service_to_share_network.called) expected_calls = [mock.call(), mock.call().to_dict()] mock_manila_scenario__create_share_network.assert_has_calls( expected_calls * self.TENANTS_AMOUNT) mock_clients.assert_has_calls([ mock.call("fake", {}) for i in range(self.TENANTS_AMOUNT)]) @mock.patch("rally.osclients.Clients") @mock.patch(MANILA_UTILS_PATH + "_delete_share_network") @mock.patch(MANILA_UTILS_PATH + "_list_share_servers") @mock.patch(MANILA_UTILS_PATH + "_list_share_networks") def test_cleanup_used_existing_share_networks( self, mock_manila_scenario__list_share_networks, mock_manila_scenario__list_share_servers, mock_manila_scenario__delete_share_network, mock_clients): inst = manila_share_networks.ShareNetworks(self.ctxt_use_existing) mock_manila_scenario__list_share_networks.return_value = ( self.existing_sns) inst.setup() inst.cleanup() self.assertFalse(mock_manila_scenario__list_share_servers.called) self.assertFalse(mock_manila_scenario__delete_share_network.called) self.assertEqual(2, mock_clients.call_count) for user in self.ctxt_use_existing["users"]: self.assertIn(mock.call(user["credential"], {}), mock_clients.mock_calls) @ddt.data(True, False) @mock.patch("rally.task.utils.wait_for_status") @mock.patch("rally.osclients.Clients") @mock.patch(MANILA_UTILS_PATH + "_delete_share_network") @mock.patch(MANILA_UTILS_PATH + "_create_share_network") @mock.patch(MANILA_UTILS_PATH + "_add_security_service_to_share_network") @mock.patch(MANILA_UTILS_PATH + "_list_share_servers") def test_cleanup_autocreated_share_networks( self, use_security_services, mock_manila_scenario__list_share_servers, mock_manila_scenario__add_security_service_to_share_network, mock_manila_scenario__create_share_network, mock_manila_scenario__delete_share_network, mock_clients, mock_wait_for_status): fake_share_servers = ["fake_share_server"] mock_manila_scenario__list_share_servers.return_value = ( fake_share_servers) networks_per_tenant = 2 ctxt = self._get_context( networks_per_tenant=networks_per_tenant, use_security_services=use_security_services, ) inst = manila_share_networks.ShareNetworks(ctxt) for tenant_id in list(ctxt["tenants"].keys()): inst.context["tenants"][tenant_id][ consts.SECURITY_SERVICES_CONTEXT_NAME] = { "security_services": [ Fake(id="fake_id").to_dict() for i in (1, 2, 3) ] } inst.setup() mock_clients.assert_has_calls([ mock.call("fake", {}) for i in range(self.TENANTS_AMOUNT)]) inst.cleanup() self.assertEqual(self.TENANTS_AMOUNT * 4, mock_clients.call_count) self.assertEqual( self.TENANTS_AMOUNT * networks_per_tenant, mock_manila_scenario__list_share_servers.call_count) mock_manila_scenario__list_share_servers.assert_has_calls( [mock.call(search_opts=mock.ANY)]) self.assertEqual( self.TENANTS_AMOUNT * networks_per_tenant, mock_manila_scenario__delete_share_network.call_count) self.assertEqual( self.TENANTS_AMOUNT * networks_per_tenant, mock_wait_for_status.call_count) mock_wait_for_status.assert_has_calls([ mock.call( fake_share_servers[0], ready_statuses=["deleted"], check_deletion=True, update_resource=mock.ANY, timeout=180, check_interval=2), ]) rally-0.9.1/tests/unit/plugins/openstack/context/quotas/0000775000567000056710000000000013073420067024600 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/quotas/test_quotas.py0000664000567000056710000003221513073417720027532 0ustar jenkinsjenkins00000000000000# Copyright 2014: Dassault Systemes # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import random import ddt import jsonschema import mock from rally.common import logging from rally.plugins.openstack.context.quotas import quotas from tests.unit import test QUOTAS_PATH = "rally.plugins.openstack.context.quotas" @ddt.ddt class QuotasTestCase(test.TestCase): def setUp(self): super(QuotasTestCase, self).setUp() self.unlimited = -1 self.context = { "config": { }, "tenants": { "t1": {"credential": mock.MagicMock()}, "t2": {"credential": mock.MagicMock()}}, "admin": {"credential": mock.MagicMock()}, "task": mock.MagicMock() } def test_quotas_schemas(self): ctx = copy.deepcopy(self.context) ctx["config"]["quotas"] = { "cinder": { "volumes": self.unlimited, "snapshots": self.unlimited, "gigabytes": self.unlimited }, "nova": { "instances": self.unlimited, "cores": self.unlimited, "ram": self.unlimited, "floating_ips": self.unlimited, "fixed_ips": self.unlimited, "metadata_items": self.unlimited, "injected_files": self.unlimited, "injected_file_content_bytes": self.unlimited, "injected_file_path_bytes": self.unlimited, "key_pairs": self.unlimited, "security_groups": self.unlimited, "security_group_rules": self.unlimited }, "neutron": { "network": self.unlimited, "subnet": self.unlimited, "port": self.unlimited, "router": self.unlimited, "floatingip": self.unlimited, "security_group": self.unlimited, "security_group_rule": self.unlimited } } for service in ctx["config"]["quotas"]: for key in ctx["config"]["quotas"][service]: # Test invalid values ctx["config"]["quotas"][service][key] = self.unlimited - 1 try: quotas.Quotas.validate(ctx["config"]["quotas"]) except jsonschema.ValidationError: pass else: self.fail("Invalid value %s must raise a validation error" % ctx["config"]["quotas"][service][key]) ctx["config"]["quotas"][service][key] = 2.5 try: quotas.Quotas.validate(ctx["config"]["quotas"]) except jsonschema.ValidationError: pass else: self.fail("Invalid value %s must raise a validation error" % ctx["config"]["quotas"][service][key]) ctx["config"]["quotas"][service][key] = "-1" try: quotas.Quotas.validate(ctx["config"]["quotas"]) except jsonschema.ValidationError: pass else: self.fail("Invalid value %s must raise a validation error" % ctx["config"]["quotas"][service][key]) # Test valid values ctx["config"]["quotas"][service][key] = random.randint(0, 1000000) try: quotas.Quotas.validate(ctx["config"]["quotas"]) except jsonschema.ValidationError: self.fail("Positive integers are valid quota values") ctx["config"]["quotas"][service][key] = self.unlimited try: quotas.Quotas.validate(ctx["config"]["quotas"]) except jsonschema.ValidationError: self.fail("%d is a valid quota value" % self.unlimited) # Test additional keys are refused ctx["config"]["quotas"][service]["additional"] = self.unlimited try: quotas.Quotas.validate(ctx["config"]["quotas"]) except jsonschema.ValidationError: pass else: self.fail("Additional keys must raise a validation error") del ctx["config"]["quotas"][service]["additional"] # Test valid keys are optional ctx["config"]["quotas"][service] = {} try: quotas.Quotas.validate(ctx["config"]["quotas"]) except jsonschema.ValidationError: self.fail("Valid quota keys are optional") @mock.patch("%s.quotas.osclients.Clients" % QUOTAS_PATH) @mock.patch("%s.cinder_quotas.CinderQuotas" % QUOTAS_PATH) @ddt.data(True, False) def test_cinder_quotas(self, ex_users, mock_cinder_quotas, mock_clients): cinder_quo = mock_cinder_quotas.return_value ctx = copy.deepcopy(self.context) if ex_users: ctx["existing_users"] = None ctx["config"]["quotas"] = { "cinder": { "volumes": self.unlimited, "snapshots": self.unlimited, "gigabytes": self.unlimited } } tenants = ctx["tenants"] cinder_quotas = ctx["config"]["quotas"]["cinder"] cinder_quo.get.return_value = cinder_quotas with quotas.Quotas(ctx) as quotas_ctx: quotas_ctx.setup() if ex_users: self.assertEqual([mock.call(tenant) for tenant in tenants], cinder_quo.get.call_args_list) self.assertEqual([mock.call(tenant, **cinder_quotas) for tenant in tenants], cinder_quo.update.call_args_list) mock_cinder_quotas.reset_mock() if ex_users: self.assertEqual([mock.call(tenant, **cinder_quotas) for tenant in tenants], cinder_quo.update.call_args_list) else: self.assertEqual([mock.call(tenant) for tenant in tenants], cinder_quo.delete.call_args_list) @mock.patch("%s.quotas.osclients.Clients" % QUOTAS_PATH) @mock.patch("%s.nova_quotas.NovaQuotas" % QUOTAS_PATH) @ddt.data(True, False) def test_nova_quotas(self, ex_users, mock_nova_quotas, mock_clients): nova_quo = mock_nova_quotas.return_value ctx = copy.deepcopy(self.context) if ex_users: ctx["existing_users"] = None ctx["config"]["quotas"] = { "nova": { "instances": self.unlimited, "cores": self.unlimited, "ram": self.unlimited, "floating-ips": self.unlimited, "fixed-ips": self.unlimited, "metadata_items": self.unlimited, "injected_files": self.unlimited, "injected_file_content_bytes": self.unlimited, "injected_file_path_bytes": self.unlimited, "key_pairs": self.unlimited, "security_groups": self.unlimited, "security_group_rules": self.unlimited, } } tenants = ctx["tenants"] nova_quotas = ctx["config"]["quotas"]["nova"] nova_quo.get.return_value = nova_quotas with quotas.Quotas(ctx) as quotas_ctx: quotas_ctx.setup() if ex_users: self.assertEqual([mock.call(tenant) for tenant in tenants], nova_quo.get.call_args_list) self.assertEqual([mock.call(tenant, **nova_quotas) for tenant in tenants], nova_quo.update.call_args_list) mock_nova_quotas.reset_mock() if ex_users: self.assertEqual([mock.call(tenant, **nova_quotas) for tenant in tenants], nova_quo.update.call_args_list) else: self.assertEqual([mock.call(tenant) for tenant in tenants], nova_quo.delete.call_args_list) @mock.patch("%s.quotas.osclients.Clients" % QUOTAS_PATH) @mock.patch("%s.neutron_quotas.NeutronQuotas" % QUOTAS_PATH) @ddt.data(True, False) def test_neutron_quotas(self, ex_users, mock_neutron_quotas, mock_clients): neutron_quo = mock_neutron_quotas.return_value ctx = copy.deepcopy(self.context) if ex_users: ctx["existing_users"] = None ctx["config"]["quotas"] = { "neutron": { "network": self.unlimited, "subnet": self.unlimited, "port": self.unlimited, "router": self.unlimited, "floatingip": self.unlimited, "security_group": self.unlimited, "security_group_rule": self.unlimited } } tenants = ctx["tenants"] neutron_quotas = ctx["config"]["quotas"]["neutron"] neutron_quo.get.return_value = neutron_quotas with quotas.Quotas(ctx) as quotas_ctx: quotas_ctx.setup() if ex_users: self.assertEqual([mock.call(tenant) for tenant in tenants], neutron_quo.get.call_args_list) self.assertEqual([mock.call(tenant, **neutron_quotas) for tenant in tenants], neutron_quo.update.call_args_list) neutron_quo.reset_mock() if ex_users: self.assertEqual([mock.call(tenant, **neutron_quotas) for tenant in tenants], neutron_quo.update.call_args_list) else: self.assertEqual([mock.call(tenant) for tenant in tenants], neutron_quo.delete.call_args_list) @mock.patch("rally.plugins.openstack.context." "quotas.quotas.osclients.Clients") @mock.patch("rally.plugins.openstack.context." "quotas.nova_quotas.NovaQuotas") @mock.patch("rally.plugins.openstack.context." "quotas.cinder_quotas.CinderQuotas") @mock.patch("rally.plugins.openstack.context." "quotas.neutron_quotas.NeutronQuotas") def test_no_quotas(self, mock_neutron_quotas, mock_cinder_quotas, mock_nova_quotas, mock_clients): ctx = copy.deepcopy(self.context) if "quotas" in ctx["config"]: del ctx["config"]["quotas"] with quotas.Quotas(ctx) as quotas_ctx: quotas_ctx.setup() self.assertFalse(mock_cinder_quotas.update.called) self.assertFalse(mock_nova_quotas.update.called) self.assertFalse(mock_neutron_quotas.update.called) self.assertFalse(mock_cinder_quotas.delete.called) self.assertFalse(mock_nova_quotas.delete.called) self.assertFalse(mock_neutron_quotas.delete.called) @ddt.data( {"quotas_ctxt": {"nova": {"cpu": 1}}, "quotas_class_path": "nova_quotas.NovaQuotas"}, {"quotas_ctxt": {"neutron": {"network": 2}}, "quotas_class_path": "neutron_quotas.NeutronQuotas"}, {"quotas_ctxt": {"cinder": {"volumes": 3}}, "quotas_class_path": "cinder_quotas.CinderQuotas"}, {"quotas_ctxt": {"manila": {"shares": 4}}, "quotas_class_path": "manila_quotas.ManilaQuotas"}, {"quotas_ctxt": {"designate": {"domains": 5}}, "quotas_class_path": "designate_quotas.DesignateQuotas"}, ) @ddt.unpack def test_exception_during_cleanup(self, quotas_ctxt, quotas_class_path): quotas_path = "%s.%s" % (QUOTAS_PATH, quotas_class_path) with mock.patch(quotas_path) as mock_quotas: mock_quotas.return_value.update.side_effect = Exception ctx = copy.deepcopy(self.context) ctx["config"]["quotas"] = quotas_ctxt quotas_instance = quotas.Quotas(ctx) quotas_instance.original_quotas = [] for service in quotas_ctxt: for tenant in self.context["tenants"]: quotas_instance.original_quotas.append( (service, tenant, quotas_ctxt[service])) # NOTE(boris-42): ensure that cleanup didn't raise exceptions. with logging.LogCatcher(quotas.LOG) as log: quotas_instance.cleanup() log.assertInLogs("Failed to restore quotas for tenant") self.assertEqual(mock_quotas.return_value.update.call_count, len(self.context["tenants"])) rally-0.9.1/tests/unit/plugins/openstack/context/quotas/__init__.py0000664000567000056710000000000013073417717026707 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/quotas/test_neutron_quotas.py0000664000567000056710000000411713073417717031312 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.context.quotas import neutron_quotas from tests.unit import test class NeutronQuotasTestCase(test.TestCase): def setUp(self): super(NeutronQuotasTestCase, self).setUp() self.quotas = { "network": 20, "subnet": 20, "port": 100, "router": 20, "floatingip": 100, "security_group": 100, "security_group_rule": 100 } def test_update(self): clients = mock.MagicMock() neutron_quo = neutron_quotas.NeutronQuotas(clients) tenant_id = mock.MagicMock() neutron_quo.update(tenant_id, **self.quotas) body = {"quota": self.quotas} clients.neutron().update_quota.assert_called_once_with(tenant_id, body=body) def test_delete(self): clients = mock.MagicMock() neutron_quo = neutron_quotas.NeutronQuotas(clients) tenant_id = mock.MagicMock() neutron_quo.delete(tenant_id) clients.neutron().delete_quota.assert_called_once_with(tenant_id) def test_get(self): tenant_id = "tenant_id" clients = mock.MagicMock() clients.neutron.return_value.show_quota.return_value = { "quota": self.quotas} neutron_quo = neutron_quotas.NeutronQuotas(clients) self.assertEqual(self.quotas, neutron_quo.get(tenant_id)) clients.neutron().show_quota.assert_called_once_with(tenant_id) rally-0.9.1/tests/unit/plugins/openstack/context/quotas/test_cinder_quotas.py0000664000567000056710000000412613073417717031064 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.context.quotas import cinder_quotas from tests.unit import test class CinderQuotasTestCase(test.TestCase): def test_update(self): mock_clients = mock.MagicMock() cinder_quo = cinder_quotas.CinderQuotas(mock_clients) tenant_id = mock.MagicMock() quotas_values = { "volumes": 10, "snapshots": 50, "backups": 20, "backup_gigabytes": 1000, "gigabytes": 1000 } cinder_quo.update(tenant_id, **quotas_values) mock_clients.cinder().quotas.update.assert_called_once_with( tenant_id, **quotas_values) def test_delete(self): mock_clients = mock.MagicMock() cinder_quo = cinder_quotas.CinderQuotas(mock_clients) tenant_id = mock.MagicMock() cinder_quo.delete(tenant_id) mock_clients.cinder().quotas.delete.assert_called_once_with(tenant_id) def test_get(self): tenant_id = "tenant_id" quotas = {"gigabytes": "gb", "snapshots": "ss", "volumes": "v", "backups": "b", "backup_gigabytes": "b_g"} quota_set = mock.MagicMock(**quotas) clients = mock.MagicMock() clients.cinder.return_value.quotas.get.return_value = quota_set cinder_quo = cinder_quotas.CinderQuotas(clients) self.assertEqual(quotas, cinder_quo.get(tenant_id)) clients.cinder().quotas.get.assert_called_once_with(tenant_id) rally-0.9.1/tests/unit/plugins/openstack/context/quotas/test_designate_quotas.py0000664000567000056710000000371313073417717031564 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.context.quotas import designate_quotas from tests.unit import test class DesignateQuotasTestCase(test.TestCase): def test_update(self): clients = mock.MagicMock() quotas = designate_quotas.DesignateQuotas(clients) tenant_id = mock.MagicMock() quotas_values = { "domains": 5, "domain_recordsets": 20, "domain_records": 20, "recordset_records": 20, } quotas.update(tenant_id, **quotas_values) clients.designate().quotas.update.assert_called_once_with( tenant_id, quotas_values) def test_delete(self): clients = mock.MagicMock() quotas = designate_quotas.DesignateQuotas(clients) tenant_id = mock.MagicMock() quotas.delete(tenant_id) clients.designate().quotas.reset.assert_called_once_with(tenant_id) def test_get(self): tenant_id = "tenant_id" quotas = {"domains": -1, "domain_recordsets": 2, "domain_records": 3, "recordset_records": 3} clients = mock.MagicMock() clients.designate.return_value.quotas.get.return_value = quotas designate_quo = designate_quotas.DesignateQuotas(clients) self.assertEqual(quotas, designate_quo.get(tenant_id)) clients.designate().quotas.get.assert_called_once_with(tenant_id) rally-0.9.1/tests/unit/plugins/openstack/context/quotas/test_manila_quotas.py0000664000567000056710000000411613073417717031060 0ustar jenkinsjenkins00000000000000# Copyright 2015 Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.context.quotas import manila_quotas from tests.unit import test class ManilaQuotasTestCase(test.TestCase): def test_update(self): clients = mock.MagicMock() instance = manila_quotas.ManilaQuotas(clients) tenant_id = mock.MagicMock() quotas_values = { "shares": 10, "gigabytes": 13, "snapshots": 7, "snapshot_gigabytes": 51, "share_networks": 1014, } instance.update(tenant_id, **quotas_values) clients.manila.return_value.quotas.update.assert_called_once_with( tenant_id, **quotas_values) def test_delete(self): clients = mock.MagicMock() instance = manila_quotas.ManilaQuotas(clients) tenant_id = mock.MagicMock() instance.delete(tenant_id) clients.manila.return_value.quotas.delete.assert_called_once_with( tenant_id) def test_get(self): tenant_id = "tenant_id" quotas = {"gigabytes": "gb", "snapshots": "ss", "shares": "v", "snapshot_gigabytes": "sg", "share_networks": "sn"} quota_set = mock.MagicMock(**quotas) clients = mock.MagicMock() clients.manila.return_value.quotas.get.return_value = quota_set manila_quo = manila_quotas.ManilaQuotas(clients) self.assertEqual(quotas, manila_quo.get(tenant_id)) clients.manila().quotas.get.assert_called_once_with(tenant_id) rally-0.9.1/tests/unit/plugins/openstack/context/quotas/test_nova_quotas.py0000664000567000056710000000444113073417717030563 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.context.quotas import nova_quotas from tests.unit import test class NovaQuotasTestCase(test.TestCase): def setUp(self): super(NovaQuotasTestCase, self).setUp() self.quotas = { "instances": 10, "cores": 100, "ram": 100000, "floating_ips": 100, "fixed_ips": 10000, "metadata_items": 5, "injected_files": 5, "injected_file_content_bytes": 2048, "injected_file_path_bytes": 1024, "key_pairs": 50, "security_groups": 50, "security_group_rules": 50, "server_group_members": 777, "server_groups": 33 } def test_update(self): clients = mock.MagicMock() nova_quo = nova_quotas.NovaQuotas(clients) tenant_id = mock.MagicMock() nova_quo.update(tenant_id, **self.quotas) clients.nova().quotas.update.assert_called_once_with(tenant_id, **self.quotas) def test_delete(self): clients = mock.MagicMock() nova_quo = nova_quotas.NovaQuotas(clients) tenant_id = mock.MagicMock() nova_quo.delete(tenant_id) clients.nova().quotas.delete.assert_called_once_with(tenant_id) def test_get(self): tenant_id = "tenant_id" quota_set = mock.MagicMock(**self.quotas) clients = mock.MagicMock() clients.nova.return_value.quotas.get.return_value = quota_set nova_quo = nova_quotas.NovaQuotas(clients) self.assertEqual(self.quotas, nova_quo.get(tenant_id)) clients.nova().quotas.get.assert_called_once_with(tenant_id) rally-0.9.1/tests/unit/plugins/openstack/context/neutron/0000775000567000056710000000000013073420067024756 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/neutron/__init__.py0000664000567000056710000000000013073417717027065 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/neutron/test_existing_network.py0000664000567000056710000000554713073417717032015 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.context.network import existing_network from tests.unit import test CTX = "rally.plugins.openstack.context.network" class ExistingNetworkTestCase(test.TestCase): def setUp(self): super(ExistingNetworkTestCase, self).setUp() self.config = {"foo": "bar"} self.context = test.get_test_context() self.context.update({ "users": [ {"id": 1, "tenant_id": "tenant1", "credential": mock.Mock()}, {"id": 2, "tenant_id": "tenant2", "credential": mock.Mock()}, ], "tenants": { "tenant1": {}, "tenant2": {}, }, "config": { "existing_network": self.config }, }) @mock.patch("rally.osclients.Clients") @mock.patch("rally.plugins.openstack.wrappers.network.wrap") def test_setup(self, mock_network_wrap, mock_clients): networks = [mock.Mock(), mock.Mock(), mock.Mock()] net_wrappers = { "tenant1": mock.Mock( **{"list_networks.return_value": networks[0:2]}), "tenant2": mock.Mock( **{"list_networks.return_value": networks[2:]}) } mock_network_wrap.side_effect = [net_wrappers["tenant1"], net_wrappers["tenant2"]] context = existing_network.ExistingNetwork(self.context) context.setup() mock_clients.assert_has_calls([ mock.call(u["credential"]) for u in self.context["users"]]) mock_network_wrap.assert_has_calls([ mock.call(mock_clients.return_value, context, config=self.config), mock.call(mock_clients.return_value, context, config=self.config)]) for net_wrapper in net_wrappers.values(): net_wrapper.list_networks.assert_called_once_with() self.assertEqual( self.context["tenants"], { "tenant1": {"networks": networks[0:2]}, "tenant2": {"networks": networks[2:]}, } ) def test_cleanup(self): # NOTE(stpierre): Test that cleanup is not abstract existing_network.ExistingNetwork({"task": mock.MagicMock()}).cleanup() rally-0.9.1/tests/unit/plugins/openstack/context/neutron/test_lbaas.py0000664000567000056710000001721013073417717027462 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.context.neutron import lbaas as lbaas_context from tests.unit import test NET = "rally.plugins.openstack.wrappers.network." class LbaasTestCase(test.TestCase): def get_context(self, **kwargs): foo_tenant = {"networks": [{"id": "foo_net", "tenant_id": "foo_tenant", "subnets": ["foo_subnet"]}]} bar_tenant = {"networks": [{"id": "bar_net", "tenant_id": "bar_tenant", "subnets": ["bar_subnet"]}]} return {"task": {"uuid": "foo_task"}, "admin": {"credential": "foo_admin"}, "users": [{"id": "foo_user", "tenant_id": "foo_tenant"}, {"id": "bar_user", "tenant_id": "bar_tenant"}], "config": {"lbaas": kwargs}, "tenants": {"foo_tenant": foo_tenant, "bar_tenant": bar_tenant}} @mock.patch("rally.osclients.Clients") @mock.patch(NET + "wrap", return_value="foo_service") def test__init__default(self, mock_wrap, mock_clients): context = lbaas_context.Lbaas(self.get_context()) self.assertEqual( context.config["pool"]["lb_method"], lbaas_context.Lbaas.DEFAULT_CONFIG["pool"]["lb_method"]) self.assertEqual( context.config["pool"]["protocol"], lbaas_context.Lbaas.DEFAULT_CONFIG["pool"]["protocol"]) self.assertEqual( context.config["lbaas_version"], lbaas_context.Lbaas.DEFAULT_CONFIG["lbaas_version"]) @mock.patch("rally.osclients.Clients") @mock.patch(NET + "wrap", return_value="foo_service") def test__init__explicit(self, mock_wrap, mock_clients): context = lbaas_context.Lbaas( self.get_context(pool={"lb_method": "LEAST_CONNECTIONS"})) self.assertEqual(context.config["pool"]["lb_method"], "LEAST_CONNECTIONS") @mock.patch(NET + "wrap") @mock.patch("rally.plugins.openstack.context.neutron.lbaas.utils") @mock.patch("rally.osclients.Clients") def test_setup_with_lbaas(self, mock_clients, mock_utils, mock_wrap): mock_utils.iterate_per_tenants.return_value = [ ("foo_user", "foo_tenant"), ("bar_user", "bar_tenant")] foo_net = {"id": "foo_net", "tenant_id": "foo_tenant", "subnets": ["foo_subnet"], "lb_pools": [{"pool": {"id": "foo_pool", "tenant_id": "foo_tenant"}}]} bar_net = {"id": "bar_net", "tenant_id": "bar_tenant", "subnets": ["bar_subnet"], "lb_pools": [{"pool": {"id": "bar_pool", "tenant_id": "bar_tenant"}}]} expected_net = [bar_net, foo_net] mock_create = mock.Mock( side_effect=lambda t, s, **kw: {"pool": {"id": str(t.split("_")[0]) + "_pool", "tenant_id": t}}) actual_net = [] mock_wrap.return_value = mock.Mock(create_v1_pool=mock_create) net_wrapper = mock_wrap(mock_clients.return_value) net_wrapper.supports_extension.return_value = (True, None) fake_args = {"lbaas_version": 1} lb_context = lbaas_context.Lbaas(self.get_context(**fake_args)) lb_context.setup() mock_utils.iterate_per_tenants.assert_called_once_with( lb_context.context["users"]) net_wrapper.supports_extension.assert_called_once_with("lbaas") for tenant_id, tenant_ctx in ( sorted(lb_context.context["tenants"].items())): for network in tenant_ctx["networks"]: actual_net.append(network) self.assertEqual(expected_net, actual_net) @mock.patch(NET + "wrap") @mock.patch("rally.plugins.openstack.context.neutron.lbaas.utils") @mock.patch("rally.osclients.Clients") def test_setup_with_no_lbaas(self, mock_clients, mock_utils, mock_wrap): mock_utils.iterate_per_tenants.return_value = [ ("bar_user", "bar_tenant")] mock_create = mock.Mock(side_effect=lambda t, **kw: t + "-net") mock_wrap.return_value = mock.Mock(create_v1_pool=mock_create) fake_args = {"lbaas_version": 1} lb_context = lbaas_context.Lbaas(self.get_context(**fake_args)) net_wrapper = mock_wrap(mock_clients.return_value) net_wrapper.supports_extension.return_value = (False, None) lb_context.setup() mock_utils.iterate_per_tenants.assert_not_called() net_wrapper.supports_extension.assert_called_once_with("lbaas") assert not net_wrapper.create_v1_pool.called @mock.patch(NET + "wrap") @mock.patch("rally.plugins.openstack.context.neutron.lbaas.utils") @mock.patch("rally.osclients.Clients") def test_setup_with_lbaas_version_not_one(self, mock_clients, mock_utils, mock_wrap): mock_utils.iterate_per_tenants.return_value = [ ("bar_user", "bar_tenant")] mock_create = mock.Mock(side_effect=lambda t, **kw: t + "-net") mock_wrap.return_value = mock.Mock(create_v1_pool=mock_create) fake_args = {"lbaas_version": 2} lb_context = lbaas_context.Lbaas(self.get_context(**fake_args)) net_wrapper = mock_wrap(mock_clients.return_value) net_wrapper.supports_extension.return_value = (True, None) self.assertRaises(NotImplementedError, lb_context.setup) @mock.patch("rally.osclients.Clients") @mock.patch(NET + "wrap") def test_cleanup(self, mock_wrap, mock_clients): net_wrapper = mock_wrap(mock_clients.return_value) lb_context = lbaas_context.Lbaas(self.get_context()) expected_pools = [] for tenant_id, tenant_ctx in lb_context.context["tenants"].items(): resultant_pool = {"pool": { "id": str(tenant_id.split("_")[0]) + "_pool"}} expected_pools.append(resultant_pool) for network in ( lb_context.context["tenants"][tenant_id]["networks"]): network.setdefault("lb_pools", []).append(resultant_pool) lb_context.cleanup() net_wrapper.delete_v1_pool.assert_has_calls( [mock.call(pool["pool"]["id"]) for pool in expected_pools]) @mock.patch("rally.osclients.Clients") @mock.patch(NET + "wrap") def test_cleanup_lbaas_version_not_one(self, mock_wrap, mock_clients): fakeargs = {"lbaas_version": 2} net_wrapper = mock_wrap(mock_clients.return_value) lb_context = lbaas_context.Lbaas(self.get_context(**fakeargs)) for tenant_id, tenant_ctx in lb_context.context["tenants"].items(): resultant_pool = {"pool": { "id": str(tenant_id.split("_")[0]) + "_pool"}} for network in ( lb_context.context["tenants"][tenant_id]["networks"]): network.setdefault("lb_pools", []).append(resultant_pool) lb_context.cleanup() assert not net_wrapper.delete_v1_pool.called rally-0.9.1/tests/unit/plugins/openstack/context/keystone/0000775000567000056710000000000013073420067025125 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/keystone/__init__.py0000664000567000056710000000000013073417717027234 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/keystone/test_users.py0000664000567000056710000003663413073417720027715 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.common import objects from rally import consts from rally import exceptions from rally.plugins.openstack.context.keystone import users from tests.unit import test CTX = "rally.plugins.openstack.context.keystone.users" class UserGeneratorTestCase(test.ScenarioTestCase): tenants_num = 1 users_per_tenant = 5 users_num = tenants_num * users_per_tenant threads = 10 def setUp(self): super(UserGeneratorTestCase, self).setUp() self.osclients_patcher = mock.patch("%s.osclients" % CTX) self.osclients = self.osclients_patcher.start() self.context.update({ "config": { "users": { "tenants": self.tenants_num, "users_per_tenant": self.users_per_tenant, "resource_management_workers": self.threads, } }, "admin": {"credential": mock.MagicMock()}, "users": [], "task": {"uuid": "task_id"} }) def tearDown(self): self.osclients_patcher.stop() super(UserGeneratorTestCase, self).tearDown() @mock.patch("%s.network.wrap" % CTX) def test__remove_default_security_group_not_needed(self, mock_wrap): services = {"compute": consts.Service.NOVA} self.osclients.Clients().services.return_value = services user_generator = users.UserGenerator(self.context) user_generator._remove_default_security_group() self.assertFalse(mock_wrap.called) @mock.patch("%s.network.wrap" % CTX) def test__remove_default_security_group_neutron_no_sg(self, mock_wrap): net_wrapper = mock.Mock(SERVICE_IMPL=consts.Service.NEUTRON) net_wrapper.supports_extension.return_value = (False, None) mock_wrap.return_value = net_wrapper user_generator = users.UserGenerator(self.context) admin_clients = mock.Mock() admin_clients.services.return_value = { "compute": consts.Service.NOVA, "neutron": consts.Service.NEUTRON} user_clients = [mock.Mock(), mock.Mock()] self.osclients.Clients.side_effect = [admin_clients] + user_clients user_generator._remove_default_security_group() mock_wrap.assert_called_once_with(admin_clients, user_generator) net_wrapper.supports_extension.assert_called_once_with( "security-group") @mock.patch("rally.common.utils.iterate_per_tenants") @mock.patch("%s.network" % CTX) @mock.patch("rally.task.utils.check_service_status", return_value=False) def test__remove_default_security_group( self, mock_check_service_status, mock_network, mock_iterate_per_tenants): net_wrapper = mock.Mock(SERVICE_IMPL=consts.Service.NEUTRON) net_wrapper.supports_extension.return_value = (True, None) mock_network.wrap.return_value = net_wrapper user_generator = users.UserGenerator(self.context) admin_clients = mock.Mock() admin_clients.services.return_value = { "compute": consts.Service.NOVA, "neutron": consts.Service.NEUTRON} user_clients = [mock.Mock(), mock.Mock()] self.osclients.Clients.side_effect = [admin_clients] + user_clients mock_iterate_per_tenants.return_value = [ (mock.MagicMock(), "t1"), (mock.MagicMock(), "t2")] user_generator._remove_default_security_group() mock_network.wrap.assert_called_once_with(admin_clients, user_generator) mock_iterate_per_tenants.assert_called_once_with( user_generator.context["users"]) expected = [mock.call(user_generator.credential)] + [ mock.call(u["credential"]) for u, t in mock_iterate_per_tenants.return_value] self.osclients.Clients.assert_has_calls(expected, any_order=True) expected_deletes = [] for clients in user_clients: user_nova = clients.nova.return_value user_nova.security_groups.find.assert_called_once_with( name="default") expected_deletes.append( mock.call(user_nova.security_groups.find.return_value.id)) nova_admin = admin_clients.neutron.return_value nova_admin.delete_security_group.assert_has_calls(expected_deletes, any_order=True) @mock.patch("rally.task.utils.check_service_status", return_value=True) def test__remove_associated_networks(self, mock_check_service_status): def fake_get_network(req_network): for network in networks: if network.project_id == req_network.project_id: return network networks = [mock.MagicMock(project_id="t1"), mock.MagicMock(project_id="t4")] nova_admin = mock.MagicMock() clients = mock.MagicMock() self.osclients.Clients.return_value = clients clients.services.return_value = {"compute": "nova"} clients.nova.return_value = nova_admin nova_admin.networks.list.return_value = networks nova_admin.networks.get = fake_get_network user_generator = users.UserGenerator(self.context) user_generator.context["tenants"] = {"t1": {"id": "t1", "name": "t1"}, "t2": {"id": "t2", "name": "t2"}} user_generator._remove_associated_networks() mock_check_service_status.assert_called_once_with(mock.ANY, "nova-network") nova_admin.networks.disassociate.assert_called_once_with(networks[0]) @mock.patch("rally.task.utils.check_service_status", return_value=True) def test__remove_associated_networks_failure(self, mock_check_service_status): def fake_get_network(req_network): for network in networks: if network.project_id == req_network.project_id: return network networks = [mock.MagicMock(project_id="t1"), mock.MagicMock(project_id="t4")] nova_admin = mock.MagicMock() clients = mock.MagicMock() self.osclients.Clients.return_value = clients clients.services.return_value = {"compute": "nova"} clients.nova.return_value = nova_admin nova_admin.networks.list.return_value = networks nova_admin.networks.get = fake_get_network nova_admin.networks.disassociate.side_effect = Exception() user_generator = users.UserGenerator(self.context) user_generator.context["tenants"] = {"t1": {"id": "t1", "name": "t1"}, "t2": {"id": "t2", "name": "t2"}} user_generator._remove_associated_networks() mock_check_service_status.assert_called_once_with(mock.ANY, "nova-network") nova_admin.networks.disassociate.assert_called_once_with(networks[0]) @mock.patch("%s.identity" % CTX) def test__create_tenants(self, mock_identity): self.context["config"]["users"]["tenants"] = 1 user_generator = users.UserGenerator(self.context) tenants = user_generator._create_tenants() self.assertEqual(1, len(tenants)) id, tenant = tenants.popitem() self.assertIn("name", tenant) @mock.patch("%s.identity" % CTX) def test__create_users(self, mock_identity): self.context["config"]["users"]["users_per_tenant"] = 2 user_generator = users.UserGenerator(self.context) user_generator.context["tenants"] = {"t1": {"id": "t1", "name": "t1"}, "t2": {"id": "t2", "name": "t2"}} users_ = user_generator._create_users() self.assertEqual(4, len(users_)) for user in users_: self.assertIn("id", user) self.assertIn("credential", user) @mock.patch("%s.identity" % CTX) def test__delete_tenants(self, mock_identity): user_generator = users.UserGenerator(self.context) user_generator.context["tenants"] = {"t1": {"id": "t1", "name": "t1"}, "t2": {"id": "t2", "name": "t2"}} user_generator._delete_tenants() self.assertEqual(len(user_generator.context["tenants"]), 0) @mock.patch("%s.identity" % CTX) def test__delete_tenants_failure(self, mock_identity): identity_service = mock_identity.Identity.return_value identity_service.delete_project.side_effect = Exception() user_generator = users.UserGenerator(self.context) user_generator.context["tenants"] = {"t1": {"id": "t1", "name": "t1"}, "t2": {"id": "t2", "name": "t2"}} user_generator._delete_tenants() self.assertEqual(len(user_generator.context["tenants"]), 0) @mock.patch("%s.identity" % CTX) def test__delete_users(self, mock_identity): user_generator = users.UserGenerator(self.context) user1 = mock.MagicMock() user2 = mock.MagicMock() user_generator.context["users"] = [user1, user2] user_generator._delete_users() self.assertEqual(len(user_generator.context["users"]), 0) @mock.patch("%s.identity" % CTX) def test__delete_users_failure(self, mock_identity): identity_service = mock_identity.Identity.return_value identity_service.delete_user.side_effect = Exception() user_generator = users.UserGenerator(self.context) user1 = mock.MagicMock() user2 = mock.MagicMock() user_generator.context["users"] = [user1, user2] user_generator._delete_users() self.assertEqual(len(user_generator.context["users"]), 0) @mock.patch("%s.identity" % CTX) def test_setup_and_cleanup(self, mock_identity): with users.UserGenerator(self.context) as ctx: ctx.setup() self.assertEqual(len(ctx.context["users"]), self.users_num) self.assertEqual(len(ctx.context["tenants"]), self.tenants_num) self.assertEqual("random", ctx.context["user_choice_method"]) # Cleanup (called by content manager) self.assertEqual(len(ctx.context["users"]), 0) self.assertEqual(len(ctx.context["tenants"]), 0) @mock.patch("rally.common.broker.LOG.warning") @mock.patch("%s.identity" % CTX) def test_setup_and_cleanup_with_error_during_create_user( self, mock_identity, mock_log_warning): identity_service = mock_identity.Identity.return_value identity_service.create_user.side_effect = Exception() with users.UserGenerator(self.context) as ctx: self.assertRaises(exceptions.ContextSetupFailure, ctx.setup) mock_log_warning.assert_called_with( "Failed to consume a task from the queue: ") # Ensure that tenants get deleted anyway self.assertEqual(0, len(ctx.context["tenants"])) @mock.patch("%s.identity" % CTX) def test_users_and_tenants_in_context(self, mock_identity): identity_service = mock_identity.Identity.return_value credential = objects.Credential("foo_url", "foo", "foo_pass", https_insecure=True, https_cacert="cacert") tmp_context = dict(self.context) tmp_context["config"]["users"] = {"tenants": 1, "users_per_tenant": 2, "resource_management_workers": 1} tmp_context["admin"]["credential"] = credential credential_dict = credential.to_dict(False) user_list = [mock.MagicMock(id="id_%d" % i) for i in range(self.users_num)] identity_service.create_user.side_effect = user_list with users.UserGenerator(tmp_context) as ctx: ctx.generate_random_name = mock.Mock() ctx.setup() create_tenant_calls = [] for i, t in enumerate(ctx.context["tenants"]): create_tenant_calls.append( mock.call(ctx.generate_random_name.return_value, ctx.config["project_domain"])) for user in ctx.context["users"]: self.assertEqual(set(["id", "credential", "tenant_id"]), set(user.keys())) user_credential_dict = user["credential"].to_dict(False) excluded_keys = ["auth_url", "username", "password", "tenant_name", "region_name", "project_domain_name", "user_domain_name"] for key in (set(credential_dict.keys()) - set(excluded_keys)): self.assertEqual(credential_dict[key], user_credential_dict[key]) tenants_ids = [] for t in ctx.context["tenants"].keys(): tenants_ids.append(t) for (user, tenant_id, orig_user) in zip(ctx.context["users"], tenants_ids, user_list): self.assertEqual(user["id"], orig_user.id) self.assertEqual(user["tenant_id"], tenant_id) @mock.patch("%s.identity" % CTX) def test_users_contains_correct_endpoint_type(self, mock_identity): credential = objects.Credential( "foo_url", "foo", "foo_pass", endpoint_type=consts.EndpointType.INTERNAL) config = { "config": { "users": { "tenants": 1, "users_per_tenant": 2, "resource_management_workers": 1 } }, "admin": {"credential": credential}, "task": {"uuid": "task_id"} } user_generator = users.UserGenerator(config) users_ = user_generator._create_users() for user in users_: self.assertEqual("internal", user["credential"].endpoint_type) @mock.patch("%s.identity" % CTX) def test_users_contains_default_endpoint_type(self, mock_identity): credential = objects.Credential("foo_url", "foo", "foo_pass") config = { "config": { "users": { "tenants": 1, "users_per_tenant": 2, "resource_management_workers": 1 } }, "admin": {"credential": credential}, "task": {"uuid": "task_id"} } user_generator = users.UserGenerator(config) users_ = user_generator._create_users() for user in users_: self.assertEqual("public", user["credential"].endpoint_type) rally-0.9.1/tests/unit/plugins/openstack/context/keystone/test_roles.py0000664000567000056710000001327613073417717027703 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally import exceptions from rally.plugins.openstack.context.keystone import roles from tests.unit import fakes from tests.unit import test CTX = "rally.plugins.openstack.context.keystone.roles" class RoleGeneratorTestCase(test.TestCase): def create_default_roles_and_patch_add_remove_functions(self, fc): fc.keystone().roles.add_user_role = mock.MagicMock() fc.keystone().roles.remove_user_role = mock.MagicMock() fc.keystone().roles.create("r1", "test_role1") fc.keystone().roles.create("r2", "test_role2") self.assertEqual(2, len(fc.keystone().roles.list())) @property def context(self): return { "config": { "roles": [ "test_role1", "test_role2" ] }, "admin": {"credential": mock.MagicMock()}, "task": mock.MagicMock() } @mock.patch("%s.osclients" % CTX) def test_add_role(self, mock_osclients): fc = fakes.FakeClients() mock_osclients.Clients.return_value = fc self.create_default_roles_and_patch_add_remove_functions(fc) ctx = roles.RoleGenerator(self.context) ctx.context["users"] = [{"id": "u1", "tenant_id": "t1"}, {"id": "u2", "tenant_id": "t2"}] ctx.credential = mock.MagicMock() ctx.setup() expected = {"r1": "test_role1", "r2": "test_role2"} self.assertEqual(expected, ctx.context["roles"]) @mock.patch("%s.osclients" % CTX) def test_add_role_which_does_not_exist(self, mock_osclients): fc = fakes.FakeClients() mock_osclients.Clients.return_value = fc self.create_default_roles_and_patch_add_remove_functions(fc) ctx = roles.RoleGenerator(self.context) ctx.context["users"] = [{"id": "u1", "tenant_id": "t1"}, {"id": "u2", "tenant_id": "t2"}] ctx.config = ["unknown_role"] ctx.credential = mock.MagicMock() ex = self.assertRaises(exceptions.NoSuchRole, ctx._get_role_object, "unknown_role") expected = "There is no role with name `unknown_role`." self.assertEqual(expected, str(ex)) @mock.patch("%s.osclients" % CTX) def test_remove_role(self, mock_osclients): fc = fakes.FakeClients() mock_osclients.Clients.return_value = fc self.create_default_roles_and_patch_add_remove_functions(fc) ctx = roles.RoleGenerator(self.context) ctx.context["roles"] = {"r1": "test_role1", "r2": "test_role2"} ctx.context["users"] = [{"id": "u1", "tenant_id": "t1"}, {"id": "u2", "tenant_id": "t2"}] ctx.credential = mock.MagicMock() ctx.cleanup() calls = [ mock.call(user="u1", role="r1", tenant="t1"), mock.call(user="u2", role="r1", tenant="t2"), mock.call(user="u1", role="r2", tenant="t1"), mock.call(user="u2", role="r2", tenant="t2") ] fc.keystone().roles.remove_user_role.assert_has_calls(calls, any_order=True) @mock.patch("%s.osclients" % CTX) def test_setup_and_cleanup(self, mock_osclients): fc = fakes.FakeClients() mock_osclients.Clients.return_value = fc self.create_default_roles_and_patch_add_remove_functions(fc) with roles.RoleGenerator(self.context) as ctx: ctx.context["users"] = [{"id": "u1", "tenant_id": "t1"}, {"id": "u2", "tenant_id": "t2"}] ctx.setup() ctx.credential = mock.MagicMock() calls = [ mock.call(user="u1", role="r1", tenant="t1"), mock.call(user="u2", role="r1", tenant="t2"), mock.call(user="u1", role="r2", tenant="t1"), mock.call(user="u2", role="r2", tenant="t2") ] fc.keystone().roles.add_user_role.assert_has_calls(calls, any_order=True) self.assertEqual( 4, fc.keystone().roles.add_user_role.call_count) self.assertEqual( 0, fc.keystone().roles.remove_user_role.call_count) self.assertEqual(2, len(ctx.context["roles"])) self.assertEqual(2, len(fc.keystone().roles.list())) # Cleanup (called by content manager) self.assertEqual(2, len(fc.keystone().roles.list())) self.assertEqual(4, fc.keystone().roles.add_user_role.call_count) self.assertEqual(4, fc.keystone().roles.remove_user_role.call_count) calls = [ mock.call(user="u1", role="r1", tenant="t1"), mock.call(user="u2", role="r1", tenant="t2"), mock.call(user="u1", role="r2", tenant="t1"), mock.call(user="u2", role="r2", tenant="t2") ] fc.keystone().roles.remove_user_role.assert_has_calls(calls, any_order=True) rally-0.9.1/tests/unit/plugins/openstack/context/keystone/test_existing_users.py0000664000567000056710000000576313073417720031626 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.context.keystone import existing_users from tests.unit import test CTX = "rally.plugins.openstack.context" class ExistingUserTestCase(test.TestCase): @mock.patch("%s.keystone.existing_users.osclients.Clients" % CTX) @mock.patch("%s.keystone.existing_users.objects.Credential" % CTX) def test_setup(self, mock_credential, mock_clients): user1 = mock.MagicMock(tenant_id="1", user_id="1", tenant_name="proj", username="usr") user2 = mock.MagicMock(tenant_id="1", user_id="2", tenant_name="proj", username="usr") user3 = mock.MagicMock(tenant_id="2", user_id="3", tenant_name="proj", username="usr") user_list = [user1, user2, user3] class AuthRef(object): USER_ID_COUNT = 0 PROJECT_ID_COUNT = 0 @property def user_id(self): self.USER_ID_COUNT += 1 return user_list[self.USER_ID_COUNT - 1].user_id @property def project_id(self): self.PROJECT_ID_COUNT += 1 return user_list[self.PROJECT_ID_COUNT - 1].tenant_id mock_clients.return_value.keystone.auth_ref = AuthRef() mock_credential.side_effect = user_list context = { "task": mock.MagicMock(), "config": { "existing_users": user_list }, } existing_users.ExistingUsers(context).setup() self.assertIn("users", context) self.assertIn("tenants", context) self.assertIn("user_choice_method", context) self.assertEqual("random", context["user_choice_method"]) self.assertEqual(3, len(context["users"])) self.assertEqual( { "id": user1.user_id, "credential": user1, "tenant_id": user1.tenant_id }, context["users"][0] ) self.assertEqual(["1", "2"], sorted(context["tenants"].keys())) self.assertEqual({"id": "1", "name": user1.tenant_name}, context["tenants"]["1"]) self.assertEqual({"id": "2", "name": user3.tenant_name}, context["tenants"]["2"]) def test_cleanup(self): # NOTE(boris-42): Test that cleanup is not abstract existing_users.ExistingUsers({"task": mock.MagicMock()}).cleanup() rally-0.9.1/tests/unit/plugins/openstack/context/test_api_versions.py0000664000567000056710000001245513073417720027407 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import jsonschema import mock from rally.common import utils from rally import exceptions from rally.plugins.openstack.context import api_versions from tests.unit import test class OpenStackServicesTestCase(test.TestCase): def setUp(self): super(OpenStackServicesTestCase, self).setUp() self.mock_clients = mock.patch("rally.osclients.Clients").start() osclient_kc = self.mock_clients.return_value.keystone self.mock_kc = osclient_kc.return_value self.service_catalog = osclient_kc.service_catalog self.service_catalog.get_endpoints.return_value = [] self.mock_kc.services.list.return_value = [] def test_validate_correct_config(self): api_versions.OpenStackAPIVersions.validate({ "nova": {"service_type": "compute", "version": 2}, "cinder": {"service_name": "cinderv2", "version": 2}, "neutron": {"service_type": "network"}, "glance": {"service_name": "glance"}, "heat": {"version": 1} }) def test_validate_wrong_configs(self): # Non-existing clients should be caught self.assertRaises( exceptions.PluginNotFound, api_versions.OpenStackAPIVersions.validate, {"invalid": {"service_type": "some_type"}}) # Additional properties should be restricted self.assertRaises( jsonschema.ValidationError, api_versions.OpenStackAPIVersions.validate, {"nova": {"some_key": "some_value"}}) # Setting service_type is allowed only # for those clients, which support it self.assertRaises( exceptions.ValidationError, api_versions.OpenStackAPIVersions.validate, {"keystone": {"service_type": "identity"}}) # Setting service_name is allowed only # for those clients, which support it self.assertRaises( exceptions.ValidationError, api_versions.OpenStackAPIVersions.validate, {"keystone": {"service_name": "keystone"}}) # Setting version is allowed only # for those clients, which support it self.assertRaises( exceptions.ValidationError, api_versions.OpenStackAPIVersions.validate, {"keystone": {"version": 1}}) # Unsupported version should be caught self.assertRaises( exceptions.ValidationError, api_versions.OpenStackAPIVersions.validate, {"nova": {"version": 666}}) def test_setup_with_wrong_service_name(self): context = { "config": {api_versions.OpenStackAPIVersions.get_name(): { "nova": {"service_name": "service_name"}}}, "admin": {"credential": mock.MagicMock()}, "users": [{"credential": mock.MagicMock()}]} ctx = api_versions.OpenStackAPIVersions(context) self.assertRaises(exceptions.ValidationError, ctx.setup) self.service_catalog.get_endpoints.assert_called_once_with() self.mock_kc.services.list.assert_called_once_with() def test_setup_with_wrong_service_name_and_without_admin(self): context = { "config": {api_versions.OpenStackAPIVersions.get_name(): { "nova": {"service_name": "service_name"}}}, "users": [{"credential": mock.MagicMock()}]} ctx = api_versions.OpenStackAPIVersions(context) self.assertRaises(exceptions.BenchmarkSetupFailure, ctx.setup) self.service_catalog.get_endpoints.assert_called_once_with() self.assertFalse(self.mock_kc.services.list.called) def test_setup_with_wrong_service_type(self): context = { "config": {api_versions.OpenStackAPIVersions.get_name(): { "nova": {"service_type": "service_type"}}}, "users": [{"credential": mock.MagicMock()}]} ctx = api_versions.OpenStackAPIVersions(context) self.assertRaises(exceptions.ValidationError, ctx.setup) self.service_catalog.get_endpoints.assert_called_once_with() def test_setup_with_service_name(self): self.mock_kc.services.list.return_value = [ utils.Struct(type="computev21", name="NovaV21")] name = api_versions.OpenStackAPIVersions.get_name() context = { "config": {name: {"nova": {"service_name": "NovaV21"}}}, "admin": {"credential": mock.MagicMock()}, "users": [{"credential": mock.MagicMock()}]} ctx = api_versions.OpenStackAPIVersions(context) ctx.setup() self.service_catalog.get_endpoints.assert_called_once_with() self.mock_kc.services.list.assert_called_once_with() self.assertEqual( "computev21", ctx.context["config"]["api_versions"]["nova"]["service_type"]) rally-0.9.1/tests/unit/plugins/openstack/context/cleanup/0000775000567000056710000000000013073420067024713 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/cleanup/__init__.py0000664000567000056710000000000013073417717027022 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/context/cleanup/test_admin.py0000664000567000056710000001275513073417720027430 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import jsonschema import mock from rally.common import utils from rally.plugins.openstack.context.cleanup import admin from rally.plugins.openstack.context.cleanup import base from rally.plugins.openstack import scenario from tests.unit import test BASE = "rally.plugins.openstack.context.cleanup.admin" class AdminCleanupTestCase(test.TestCase): @mock.patch("%s.manager" % BASE) def test_validate(self, mock_manager): mock_manager.list_resource_names.return_value = set(["a", "b", "c"]) admin.AdminCleanup.validate(["a"]) mock_manager.list_resource_names.assert_called_once_with( admin_required=True) @mock.patch("%s.manager" % BASE) def test_validate_no_such_cleanup(self, mock_manager): mock_manager.list_resource_names.return_value = set(["a", "b", "c"]) self.assertRaises(base.NoSuchCleanupResources, admin.AdminCleanup.validate, ["a", "d"]) mock_manager.list_resource_names.assert_called_once_with( admin_required=True) def test_validate_invalid_config(self): self.assertRaises(jsonschema.ValidationError, admin.AdminCleanup.validate, {}) @mock.patch("rally.common.plugin.discover.itersubclasses") @mock.patch("%s.manager.find_resource_managers" % BASE, return_value=[mock.MagicMock(), mock.MagicMock()]) @mock.patch("%s.manager.SeekAndDestroy" % BASE) def test_cleanup(self, mock_seek_and_destroy, mock_find_resource_managers, mock_itersubclasses): class ResourceClass(utils.RandomNameGeneratorMixin): pass mock_itersubclasses.return_value = [ResourceClass] ctx = { "config": {"admin_cleanup": ["a", "b"]}, "admin": mock.MagicMock(), "users": mock.MagicMock(), "task": {"uuid": "task_id"} } admin_cleanup = admin.AdminCleanup(ctx) admin_cleanup.setup() admin_cleanup.cleanup() mock_itersubclasses.assert_called_once_with(scenario.OpenStackScenario) mock_find_resource_managers.assert_called_once_with(("a", "b"), True) mock_seek_and_destroy.assert_has_calls([ mock.call(mock_find_resource_managers.return_value[0], ctx["admin"], ctx["users"], api_versions=None, resource_classes=[ResourceClass], task_id="task_id"), mock.call().exterminate(), mock.call(mock_find_resource_managers.return_value[1], ctx["admin"], ctx["users"], api_versions=None, resource_classes=[ResourceClass], task_id="task_id"), mock.call().exterminate() ]) @mock.patch("rally.common.plugin.discover.itersubclasses") @mock.patch("%s.manager.find_resource_managers" % BASE, return_value=[mock.MagicMock(), mock.MagicMock()]) @mock.patch("%s.manager.SeekAndDestroy" % BASE) def test_cleanup_admin_with_api_versions(self, mock_seek_and_destroy, mock_find_resource_managers, mock_itersubclasses): class ResourceClass(utils.RandomNameGeneratorMixin): pass mock_itersubclasses.return_value = [ResourceClass] ctx = { "config": {"admin_cleanup": ["a", "b"], "api_versions": {"cinder": {"version": "1", "service_type": "volume" } } }, "admin": mock.MagicMock(), "users": mock.MagicMock(), "task": mock.MagicMock() } admin_cleanup = admin.AdminCleanup(ctx) admin_cleanup.setup() admin_cleanup.cleanup() mock_itersubclasses.assert_called_once_with(scenario.OpenStackScenario) mock_find_resource_managers.assert_called_once_with(("a", "b"), True) mock_seek_and_destroy.assert_has_calls([ mock.call(mock_find_resource_managers.return_value[0], ctx["admin"], ctx["users"], api_versions=ctx["config"]["api_versions"], resource_classes=[ResourceClass], task_id=ctx["task"]["uuid"]), mock.call().exterminate(), mock.call(mock_find_resource_managers.return_value[1], ctx["admin"], ctx["users"], api_versions=ctx["config"]["api_versions"], resource_classes=[ResourceClass], task_id=ctx["task"]["uuid"]), mock.call().exterminate() ]) rally-0.9.1/tests/unit/plugins/openstack/context/cleanup/test_user.py0000664000567000056710000001224713073417720027312 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import jsonschema import mock from rally.common import utils from rally.plugins.openstack.context.cleanup import base from rally.plugins.openstack.context.cleanup import user from rally.plugins.openstack import scenario from tests.unit import test BASE = "rally.plugins.openstack.context.cleanup.user" class UserCleanupTestCase(test.TestCase): @mock.patch("%s.manager" % BASE) def test_validate(self, mock_manager): mock_manager.list_resource_names.return_value = set(["a", "b", "c"]) user.UserCleanup.validate(["a"]) mock_manager.list_resource_names.assert_called_once_with( admin_required=False) @mock.patch("%s.manager" % BASE) def test_validate_no_such_cleanup(self, mock_manager): mock_manager.list_resource_names.return_value = set(["a", "b", "c"]) self.assertRaises(base.NoSuchCleanupResources, user.UserCleanup.validate, ["a", "b", "d"]) mock_manager.list_resource_names.assert_called_once_with( admin_required=False) def test_validate_invalid_config(self): self.assertRaises(jsonschema.ValidationError, user.UserCleanup.validate, {}) @mock.patch("rally.common.plugin.discover.itersubclasses") @mock.patch("%s.manager.find_resource_managers" % BASE, return_value=[mock.MagicMock(), mock.MagicMock()]) @mock.patch("%s.manager.SeekAndDestroy" % BASE) def test_cleanup(self, mock_seek_and_destroy, mock_find_resource_managers, mock_itersubclasses): class ResourceClass(utils.RandomNameGeneratorMixin): pass mock_itersubclasses.return_value = [ResourceClass] ctx = { "config": {"cleanup": ["a", "b"]}, "users": mock.MagicMock(), "task": {"uuid": "task_id"} } admin_cleanup = user.UserCleanup(ctx) admin_cleanup.setup() admin_cleanup.cleanup() mock_itersubclasses.assert_called_once_with(scenario.OpenStackScenario) mock_find_resource_managers.assert_called_once_with(("a", "b"), False) mock_seek_and_destroy.assert_has_calls([ mock.call(mock_find_resource_managers.return_value[0], None, ctx["users"], api_versions=None, resource_classes=[ResourceClass], task_id="task_id"), mock.call().exterminate(), mock.call(mock_find_resource_managers.return_value[1], None, ctx["users"], api_versions=None, resource_classes=[ResourceClass], task_id="task_id"), mock.call().exterminate() ]) @mock.patch("rally.common.plugin.discover.itersubclasses") @mock.patch("%s.manager.find_resource_managers" % BASE, return_value=[mock.MagicMock(), mock.MagicMock()]) @mock.patch("%s.manager.SeekAndDestroy" % BASE) def test_cleanup_user_with_api_versions( self, mock_seek_and_destroy, mock_find_resource_managers, mock_itersubclasses): class ResourceClass(utils.RandomNameGeneratorMixin): pass mock_itersubclasses.return_value = [ResourceClass] ctx = { "config": {"admin_cleanup": ["a", "b"], "api_versions": {"cinder": {"version": "1", "service_type": "volume" } } }, "admin": mock.MagicMock(), "users": mock.MagicMock(), "task": {"uuid": "task_id"} } user_cleanup = user.UserCleanup(ctx) user_cleanup.setup() user_cleanup.cleanup() mock_itersubclasses.assert_called_once_with(scenario.OpenStackScenario) mock_find_resource_managers.assert_called_once_with({}, False) mock_seek_and_destroy.assert_has_calls([ mock.call(mock_find_resource_managers.return_value[0], None, ctx["users"], api_versions=ctx["config"]["api_versions"], resource_classes=[ResourceClass], task_id="task_id"), mock.call().exterminate(), mock.call(mock_find_resource_managers.return_value[1], None, ctx["users"], api_versions=ctx["config"]["api_versions"], resource_classes=[ResourceClass], task_id="task_id"), mock.call().exterminate() ]) rally-0.9.1/tests/unit/plugins/openstack/scenarios/0000775000567000056710000000000013073420067023566 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/watcher/0000775000567000056710000000000013073420067025223 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/watcher/__init__.py0000664000567000056710000000000013073417717027332 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/watcher/test_utils.py0000664000567000056710000000745013073417717030012 0ustar jenkinsjenkins00000000000000# Copyright 2016: Servionica LTD. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from oslo_config import cfg from rally.plugins.openstack.scenarios.watcher import utils from tests.unit import test CONF = cfg.CONF class WatcherScenarioTestCase(test.ScenarioTestCase): def test_create_audit_template(self): watcher_scenario = utils.WatcherScenario(self.context) watcher_scenario.generate_random_name = mock.MagicMock( return_value="mock_name") watcher_scenario._create_audit_template("fake_goal", "fake_strategy") self.admin_clients( "watcher").audit_template.create.assert_called_once_with( goal="fake_goal", strategy="fake_strategy", name="mock_name") self._test_atomic_action_timer(watcher_scenario.atomic_actions(), "watcher.create_audit_template") def test_list_audit_templates(self): audit_templates_list = [] watcher_scenario = utils.WatcherScenario(self.context) self.admin_clients( "watcher").audit_template.list.return_value = audit_templates_list return_audit_templates_list = watcher_scenario._list_audit_templates() self.assertEqual(audit_templates_list, return_audit_templates_list) self._test_atomic_action_timer(watcher_scenario.atomic_actions(), "watcher.list_audit_templates") def test_delete_audit_template(self): watcher_scenario = utils.WatcherScenario(self.context) watcher_scenario._delete_audit_template("fake_audit_template") self.admin_clients( "watcher").audit_template.delete.assert_called_once_with( "fake_audit_template") self._test_atomic_action_timer(watcher_scenario.atomic_actions(), "watcher.delete_audit_template") def test_create_audit(self): mock_audit_template = mock.Mock() watcher_scenario = utils.WatcherScenario(self.context) audit = watcher_scenario._create_audit(mock_audit_template) self.mock_wait_for_status.mock.assert_called_once_with( audit, ready_statuses=["SUCCEEDED"], failure_statuses=["FAILED"], status_attr="state", update_resource=self.mock_get_from_manager.mock.return_value, check_interval=CONF.benchmark.watcher_audit_launch_poll_interval, timeout=CONF.benchmark.watcher_audit_launch_timeout, id_attr="uuid") self.mock_get_from_manager.mock.assert_called_once_with() self.admin_clients("watcher").audit.create.assert_called_once_with( audit_template_uuid=mock_audit_template, audit_type="ONESHOT") self._test_atomic_action_timer(watcher_scenario.atomic_actions(), "watcher.create_audit") def test_delete_audit(self): mock_audit = mock.Mock() watcher_scenario = utils.WatcherScenario(self.context) watcher_scenario._delete_audit(mock_audit) self.admin_clients("watcher").audit.delete.assert_called_once_with( mock_audit.uuid) self._test_atomic_action_timer(watcher_scenario.atomic_actions(), "watcher.delete_audit") rally-0.9.1/tests/unit/plugins/openstack/scenarios/watcher/test_basic.py0000664000567000056710000000432513073417717027731 0ustar jenkinsjenkins00000000000000# Copyright 2016: Servionica LTD. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.watcher import basic from tests.unit import test class WatcherTestCase(test.ScenarioTestCase): def test_create_audit_template_and_delete(self): scenario = basic.CreateAuditTemplateAndDelete(self.context) audit_template = mock.Mock() scenario._create_audit_template = mock.MagicMock( return_value=audit_template) scenario._delete_audit_template = mock.MagicMock() scenario.run("goal", "strategy") scenario._create_audit_template.assert_called_once_with("goal", "strategy") scenario._delete_audit_template.assert_called_once_with( audit_template.uuid) def test_list_audit_template(self): scenario = basic.ListAuditTemplates(self.context) scenario._list_audit_templates = mock.MagicMock() scenario.run() scenario._list_audit_templates.assert_called_once_with( detail=False, goal=None, limit=None, name=None, sort_dir=None, sort_key=None, strategy=None) def test_create_audit_and_delete(self): mock_audit = mock.MagicMock() scenario = basic.CreateAuditAndDelete(self.context) scenario.context = mock.MagicMock() scenario._create_audit = mock.MagicMock(return_value=mock_audit) scenario.sleep_between = mock.MagicMock() scenario._delete_audit = mock.MagicMock() scenario.run() scenario._create_audit.assert_called_once_with(mock.ANY) scenario._delete_audit.assert_called_once_with(mock_audit) rally-0.9.1/tests/unit/plugins/openstack/scenarios/__init__.py0000664000567000056710000000000013073417717025675 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/magnum/0000775000567000056710000000000013073420067025052 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/magnum/__init__.py0000664000567000056710000000000013073417717027161 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/magnum/test_clusters.py0000664000567000056710000001053413073417720030334 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally import exceptions from rally.plugins.openstack.scenarios.magnum import clusters from tests.unit import test @ddt.ddt class MagnumClustersTestCase(test.ScenarioTestCase): @staticmethod def _get_context(): context = test.get_test_context() context.update({ "tenant": { "id": "rally_tenant_id", "cluster_template": "rally_cluster_template_uuid" } }) return context @ddt.data( {"kwargs": {}}, {"kwargs": {"fakearg": "f"}}) def test_list_clusters(self, kwargs): scenario = clusters.ListClusters() scenario._list_clusters = mock.Mock() scenario.run(**kwargs) scenario._list_clusters.assert_called_once_with(**kwargs) def test_create_cluster_with_existing_ct_and_list_clusters(self): scenario = clusters.CreateAndListClusters() kwargs = {"cluster_template_uuid": "existing_cluster_template_uuid", "fakearg": "f"} fake_cluster = mock.Mock() scenario._create_cluster = mock.Mock(return_value=fake_cluster) scenario._list_clusters = mock.Mock(return_value=[fake_cluster, mock.Mock(), mock.Mock()]) # Positive case scenario.run(2, **kwargs) scenario._create_cluster.assert_called_once_with( "existing_cluster_template_uuid", 2, **kwargs) scenario._list_clusters.assert_called_once_with(**kwargs) # Negative case1: cluster isn't created scenario._create_cluster.return_value = None self.assertRaises(exceptions.RallyAssertionError, scenario.run, 2, **kwargs) scenario._create_cluster.assert_called_with( "existing_cluster_template_uuid", 2, **kwargs) # Negative case2: created cluster not in the list of available clusters scenario._create_cluster.return_value = mock.MagicMock() self.assertRaises(exceptions.RallyAssertionError, scenario.run, 2, **kwargs) scenario._create_cluster.assert_called_with( "existing_cluster_template_uuid", 2, **kwargs) scenario._list_clusters.assert_called_with(**kwargs) def test_create_and_list_clusters(self): context = self._get_context() scenario = clusters.CreateAndListClusters(context) fake_cluster = mock.Mock() kwargs = {"fakearg": "f"} scenario._create_cluster = mock.Mock(return_value=fake_cluster) scenario._list_clusters = mock.Mock(return_value=[fake_cluster, mock.Mock(), mock.Mock()]) # Positive case scenario.run(2, **kwargs) scenario._create_cluster.assert_called_once_with( "rally_cluster_template_uuid", 2, **kwargs) scenario._list_clusters.assert_called_once_with(**kwargs) # Negative case1: cluster isn't created scenario._create_cluster.return_value = None self.assertRaises(exceptions.RallyAssertionError, scenario.run, 2, **kwargs) scenario._create_cluster.assert_called_with( "rally_cluster_template_uuid", 2, **kwargs) # Negative case2: created cluster not in the list of available clusters scenario._create_cluster.return_value = mock.MagicMock() self.assertRaises(exceptions.RallyAssertionError, scenario.run, 2, **kwargs) scenario._create_cluster.assert_called_with( "rally_cluster_template_uuid", 2, **kwargs) scenario._list_clusters.assert_called_with(**kwargs) rally-0.9.1/tests/unit/plugins/openstack/scenarios/magnum/test_utils.py0000664000567000056710000000775113073417717027645 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.magnum import utils from tests.unit import test CONF = utils.CONF class MagnumScenarioTestCase(test.ScenarioTestCase): def setUp(self): super(MagnumScenarioTestCase, self).setUp() self.cluster_template = mock.Mock() self.cluster = mock.Mock() self.scenario = utils.MagnumScenario(self.context) def test_list_cluster_templates(self): fake_list = [self.cluster_template] self.clients("magnum").cluster_templates.list.return_value = fake_list return_ct_list = self.scenario._list_cluster_templates() self.assertEqual(fake_list, return_ct_list) self.clients("magnum").cluster_templates.list.assert_called_once_with() self._test_atomic_action_timer(self.scenario.atomic_actions(), "magnum.list_cluster_templates") def test_create_cluster_template(self): self.scenario.generate_random_name = mock.Mock( return_value="generated_name") fake_ct = self.cluster_template self.clients("magnum").cluster_templates.create.return_value = fake_ct return_cluster_template = self.scenario._create_cluster_template( image="test_image", keypair="test_key", external_network="public", dns_nameserver="8.8.8.8", flavor="m1.large", docker_volume_size=50, network_driver="docker", coe="swarm") self.assertEqual(fake_ct, return_cluster_template) _, kwargs = self.clients("magnum").cluster_templates.create.call_args self.assertEqual("generated_name", kwargs["name"]) self._test_atomic_action_timer(self.scenario.atomic_actions(), "magnum.create_cluster_template") def test_list_clusters(self): return_clusters_list = self.scenario._list_clusters(limit="foo1") client = self.clients("magnum") client.clusters.list.assert_called_once_with(limit="foo1") self.assertEqual(client.clusters.list.return_value, return_clusters_list) self._test_atomic_action_timer( self.scenario.atomic_actions(), "magnum.list_clusters") def test_create_cluster(self): self.scenario.generate_random_name = mock.Mock( return_value="generated_name") self.clients("magnum").clusters.create.return_value = self.cluster return_cluster = self.scenario._create_cluster( cluster_template="generated_uuid", node_count=2) self.mock_wait_for_status.mock.assert_called_once_with( self.cluster, ready_statuses=["CREATE_COMPLETE"], update_resource=self.mock_get_from_manager.mock.return_value, check_interval=CONF.benchmark. magnum_cluster_create_poll_interval, timeout=CONF.benchmark.magnum_cluster_create_timeout, id_attr="uuid") _, kwargs = self.clients("magnum").clusters.create.call_args self.assertEqual("generated_name", kwargs["name"]) self.assertEqual("generated_uuid", kwargs["cluster_template_id"]) self.mock_get_from_manager.mock.assert_called_once_with() self.assertEqual( self.mock_wait_for_status.mock.return_value, return_cluster) self._test_atomic_action_timer( self.scenario.atomic_actions(), "magnum.create_cluster") rally-0.9.1/tests/unit/plugins/openstack/scenarios/magnum/test_cluster_templates.py0000664000567000056710000000220113073417717032225 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally.plugins.openstack.scenarios.magnum import cluster_templates from tests.unit import test @ddt.ddt class MagnumClusterTemplatesTestCase(test.TestCase): @ddt.data( {"kwargs": {}}, {"kwargs": {"fakearg": "f"}}) @ddt.unpack def test_list_cluster_templates(self, kwargs): scenario = cluster_templates.ListClusterTemplates() scenario._list_cluster_templates = mock.Mock() scenario.run(**kwargs) scenario._list_cluster_templates.assert_called_once_with(**kwargs) rally-0.9.1/tests/unit/plugins/openstack/scenarios/cinder/0000775000567000056710000000000013073420067025032 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/cinder/__init__.py0000664000567000056710000000000013073417717027141 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/cinder/test_volume_backups.py0000664000567000056710000000326713073417720031474 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.cinder import volume_backups from tests.unit import test class CinderBackupTestCase(test.ScenarioTestCase): def test_create_incremental_volume_backup(self): fake_volume = mock.MagicMock() fake_backup = mock.MagicMock() scenario = volume_backups.CreateIncrementalVolumeBackup(self.context) scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario._create_backup = mock.MagicMock(return_value=fake_backup) scenario._delete_volume = mock.MagicMock() scenario._delete_backup = mock.MagicMock() volume_kwargs = {"some_var": "zaq"} backup_kwargs = {"incremental": True} scenario.run(1, do_delete=True, create_volume_kwargs=volume_kwargs, create_backup_kwargs=backup_kwargs) self.assertEqual(2, scenario._create_backup.call_count) scenario._create_volume.assert_called_once_with(1, **volume_kwargs) scenario._delete_backup.assert_has_calls(fake_backup) scenario._delete_volume.assert_called_once_with(fake_volume) rally-0.9.1/tests/unit/plugins/openstack/scenarios/cinder/test_utils.py0000664000567000056710000005215413073417720027614 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from oslo_config import cfg from rally import exceptions from rally import osclients from rally.plugins.openstack.scenarios.cinder import utils from tests.unit import fakes from tests.unit import test CINDER_UTILS = "rally.plugins.openstack.scenarios.cinder.utils" CONF = cfg.CONF class CinderScenarioTestCase(test.ScenarioTestCase): def setUp(self): super(CinderScenarioTestCase, self).setUp() wrap = mock.patch("rally.plugins.openstack.wrappers.cinder.wrap") self.mock_wrap = wrap.start() self.addCleanup(self.mock_wrap.stop) self.scenario = utils.CinderScenario( self.context, clients=osclients.Clients( fakes.FakeUserContext.user["credential"])) def test__list_volumes(self): return_volumes_list = self.scenario._list_volumes() self.assertEqual(self.clients("cinder").volumes.list.return_value, return_volumes_list) self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.list_volumes") def test__list_types(self): return_types_list = self.scenario._list_types() self.assertEqual(self.clients("cinder").volume_types.list.return_value, return_types_list) self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.list_types") def test__get_volume(self): volume = fakes.FakeVolume() self.assertEqual(self.clients("cinder").volumes.get.return_value, self.scenario._get_volume(volume.id)) self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.get_volume") def test__list_snapshots(self): return_snapshots_list = self.scenario._list_snapshots() self.assertEqual( self.clients("cinder").volume_snapshots.list.return_value, return_snapshots_list) self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.list_snapshots") def test__list_transfers(self): return_transfers_list = self.scenario._list_transfers() self.assertEqual( self.clients("cinder").transfers.list.return_value, return_transfers_list) self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.list_transfers") def test__set_metadata(self): volume = fakes.FakeVolume() self.scenario._set_metadata(volume, sets=2, set_size=4) calls = self.clients("cinder").volumes.set_metadata.call_args_list self.assertEqual(len(calls), 2) for call in calls: call_volume, metadata = call[0] self.assertEqual(call_volume, volume) self.assertEqual(len(metadata), 4) self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.set_4_metadatas_2_times") def test__delete_metadata(self): volume = fakes.FakeVolume() keys = ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l"] self.scenario._delete_metadata(volume, keys, deletes=3, delete_size=4) calls = self.clients("cinder").volumes.delete_metadata.call_args_list self.assertEqual(len(calls), 3) all_deleted = [] for call in calls: call_volume, del_keys = call[0] self.assertEqual(call_volume, volume) self.assertEqual(len(del_keys), 4) for key in del_keys: self.assertIn(key, keys) self.assertNotIn(key, all_deleted) all_deleted.append(key) self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.delete_4_metadatas_3_times") def test__delete_metadata_not_enough_keys(self): volume = fakes.FakeVolume() keys = ["a", "b", "c", "d", "e"] self.assertRaises(exceptions.InvalidArgumentsException, self.scenario._delete_metadata, volume, keys, deletes=2, delete_size=3) def test__create_volume(self): return_volume = self.scenario._create_volume(1) self.mock_wait_for.mock.assert_called_once_with( self.mock_wrap.return_value.create_volume.return_value, ready_statuses=["available"], update_resource=self.mock_get_from_manager.mock.return_value, timeout=CONF.benchmark.cinder_volume_create_timeout, check_interval=CONF.benchmark.cinder_volume_create_poll_interval ) self.mock_get_from_manager.mock.assert_called_once_with() self.assertEqual(self.mock_wait_for.mock.return_value, return_volume) self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.create_volume") @mock.patch("rally.plugins.openstack.scenarios.cinder.utils.random") def test__create_volume_with_size_range(self, mock_random): mock_random.randint.return_value = 3 return_volume = self.scenario._create_volume( size={"min": 1, "max": 5}, display_name="TestVolume") self.mock_wrap.return_value.create_volume.assert_called_once_with( 3, display_name="TestVolume") self.mock_wait_for.mock.assert_called_once_with( self.mock_wrap.return_value.create_volume.return_value, ready_statuses=["available"], update_resource=self.mock_get_from_manager.mock.return_value, timeout=CONF.benchmark.cinder_volume_create_timeout, check_interval=CONF.benchmark.cinder_volume_create_poll_interval ) self.mock_get_from_manager.mock.assert_called_once_with() self.assertEqual(self.mock_wait_for.mock.return_value, return_volume) self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.create_volume") def test__update_volume(self): fake_volume = mock.MagicMock() volume_update_args = {"display_name": "_updated", "display_description": "_updated"} self.scenario._update_volume(fake_volume, **volume_update_args) self.mock_wrap.return_value.update_volume.assert_called_once_with( fake_volume, display_name="_updated", display_description="_updated") self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.update_volume") def test__update_readonly_flag(self): fake_volume = mock.MagicMock() self.scenario._update_readonly_flag(fake_volume, "fake_flag") self.clients( "cinder").volumes.update_readonly_flag.assert_called_once_with( fake_volume, "fake_flag") self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.update_readonly_flag") def test__delete_volume(self): cinder = mock.Mock() self.scenario._delete_volume(cinder) cinder.delete.assert_called_once_with() self.mock_wait_for_status.mock.assert_called_once_with( cinder, ready_statuses=["deleted"], check_deletion=True, update_resource=self.mock_get_from_manager.mock.return_value, timeout=cfg.CONF.benchmark.cinder_volume_create_timeout, check_interval=cfg.CONF.benchmark .cinder_volume_create_poll_interval) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.delete_volume") @mock.patch("rally.plugins.openstack.scenarios.cinder.utils.random") def test__extend_volume_with_size_range(self, mock_random): volume = mock.Mock() mock_random.randint.return_value = 3 self.clients("cinder").volumes.extend.return_value = volume self.scenario._extend_volume(volume, new_size={"min": 1, "max": 5}) volume.extend.assert_called_once_with(volume, 3) self.mock_wait_for.mock.assert_called_once_with( volume, ready_statuses=["available"], update_resource=self.mock_get_from_manager.mock.return_value, timeout=CONF.benchmark.cinder_volume_create_timeout, check_interval=CONF.benchmark.cinder_volume_create_poll_interval ) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.extend_volume") def test__extend_volume(self): volume = mock.Mock() self.clients("cinder").volumes.extend.return_value = volume self.scenario._extend_volume(volume, 2) self.mock_wait_for.mock.assert_called_once_with( volume, ready_statuses=["available"], update_resource=self.mock_get_from_manager.mock.return_value, timeout=CONF.benchmark.cinder_volume_create_timeout, check_interval=CONF.benchmark.cinder_volume_create_poll_interval ) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.extend_volume") @mock.patch("rally.plugins.openstack.wrappers.glance.wrap") def test__upload_volume_to_image(self, mock_wrap): volume = mock.Mock() image = {"os-volume_upload_image": {"image_id": 1}} volume.upload_to_image.return_value = (None, image) self.clients("cinder").images.get.return_value = image self.scenario.generate_random_name = mock.Mock( return_value="test_vol") self.scenario._upload_volume_to_image(volume, False, "container", "disk") volume.upload_to_image.assert_called_once_with(False, "test_vol", "container", "disk") self.mock_wait_for.mock.assert_has_calls([ mock.call( volume, ready_statuses=["available"], update_resource=self.mock_get_from_manager.mock.return_value, timeout=CONF.benchmark.cinder_volume_create_timeout, check_interval=CONF.benchmark. cinder_volume_create_poll_interval), mock.call( self.clients("glance").images.get.return_value, ready_statuses=["active"], update_resource=mock_wrap.return_value.get_image, timeout=CONF.benchmark.glance_image_create_timeout, check_interval=CONF.benchmark. glance_image_create_poll_interval) ]) self.mock_get_from_manager.mock.assert_called_once_with() self.clients("glance").images.get.assert_called_once_with(1) def test__create_snapshot(self): return_snapshot = self.scenario._create_snapshot("uuid", False) self.mock_wait_for.mock.assert_called_once_with( self.mock_wrap.return_value.create_snapshot.return_value, ready_statuses=["available"], update_resource=self.mock_get_from_manager.mock.return_value, timeout=cfg.CONF.benchmark.cinder_volume_create_timeout, check_interval=cfg.CONF.benchmark .cinder_volume_create_poll_interval) self.mock_get_from_manager.mock.assert_called_once_with() self.assertEqual(self.mock_wait_for.mock.return_value, return_snapshot) self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.create_snapshot") def test__delete_snapshot(self): snapshot = mock.Mock() self.scenario._delete_snapshot(snapshot) snapshot.delete.assert_called_once_with() self.mock_wait_for_status.mock.assert_called_once_with( snapshot, ready_statuses=["deleted"], check_deletion=True, update_resource=self.mock_get_from_manager.mock.return_value, timeout=cfg.CONF.benchmark.cinder_volume_create_timeout, check_interval=cfg.CONF.benchmark .cinder_volume_create_poll_interval) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.delete_snapshot") def test__create_backup(self): return_backup = self.scenario._create_backup("uuid") self.mock_wait_for.mock.assert_called_once_with( self.clients("cinder").backups.create.return_value, ready_statuses=["available"], update_resource=self.mock_get_from_manager.mock.return_value, timeout=cfg.CONF.benchmark.cinder_volume_create_timeout, check_interval=cfg.CONF.benchmark .cinder_volume_create_poll_interval) self.mock_get_from_manager.mock.assert_called_once_with() self.assertEqual(self.mock_wait_for.mock.return_value, return_backup) self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.create_backup") def test__delete_backup(self): backup = mock.Mock() self.scenario._delete_backup(backup) backup.delete.assert_called_once_with() self.mock_wait_for_status.mock.assert_called_once_with( backup, ready_statuses=["deleted"], check_deletion=True, update_resource=self.mock_get_from_manager.mock.return_value, timeout=cfg.CONF.benchmark.cinder_volume_create_timeout, check_interval=cfg.CONF.benchmark .cinder_volume_create_poll_interval) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.delete_backup") def test__restore_backup(self): # NOTE(mdovgal): added for pep8 visual indent test passing bench_cfg = cfg.CONF.benchmark backup = mock.Mock() restore = mock.Mock() self.clients("cinder").restores.restore.return_value = backup self.clients("cinder").backups.get.return_value = backup self.clients("cinder").volumes.get.return_value = restore return_restore = self.scenario._restore_backup(backup.id, None) self.mock_wait_for.mock.assert_has_calls([ mock.call( backup, ready_statuses=["available"], update_resource=self.mock_get_from_manager.mock.return_value, timeout=bench_cfg.cinder_backup_restore_timeout, check_interval=bench_cfg.cinder_backup_restore_poll_interval), mock.call( restore, ready_statuses=["available"], update_resource=self.mock_get_from_manager.mock.return_value, timeout=bench_cfg.cinder_volume_create_timeout, check_interval=bench_cfg.cinder_volume_create_poll_interval) ]) self.mock_get_from_manager.mock.assert_has_calls([mock.call(), mock.call()]) self.assertEqual(self.mock_wait_for.mock.return_value, return_restore) self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.restore_backup") def test__list_backups(self): return_backups_list = self.scenario._list_backups() self.assertEqual( self.clients("cinder").backups.list.return_value, return_backups_list) self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.list_backups") def test__get_random_server(self): servers = [1, 2, 3] context = {"user": {"tenant_id": "fake"}, "users": [{"tenant_id": "fake", "users_per_tenant": 1}], "tenant": {"id": "fake", "servers": servers}} self.scenario.context = context self.scenario.clients = mock.Mock() self.scenario.clients("nova").servers.get = mock.Mock( side_effect=lambda arg: arg) server_id = self.scenario.get_random_server() self.assertIn(server_id, servers) def test__create_volume_type(self, **kwargs): random_name = "random_name" self.scenario.generate_random_name = mock.Mock( return_value=random_name) result = self.scenario._create_volume_type() self.assertEqual( self.admin_clients("cinder").volume_types.create.return_value, result) admin_clients = self.admin_clients("cinder") admin_clients.volume_types.create.assert_called_once_with( name="random_name") self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.create_volume_type") def test__delete_encryption_type(self): volume_type = mock.Mock() self.assertRaises(exceptions.EncryptionTypeDeleteException, self.scenario._delete_encryption_type, volume_type) def test__create_encryption_type(self): volume_type = mock.Mock() specs = { "provider": "foo_pro", "cipher": "foo_cip", "key_size": 512, "control_location": "foo_con" } result = self.scenario._create_encryption_type(volume_type, specs) self.assertEqual( self.admin_clients( "cinder").volume_encryption_types.create.return_value, result) self.admin_clients( "cinder").volume_encryption_types.create.assert_called_once_with( volume_type, specs) self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.create_encryption_type") def test__list_encryption_type(self): return_encryption_types_list = self.scenario._list_encryption_type() client = self.admin_clients("cinder") self.assertEqual(client.volume_encryption_types.list.return_value, return_encryption_types_list) self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.list_encryption_type") def test__delete_volume_type(self): volume_type = mock.Mock() self.scenario._delete_volume_type(volume_type) admin_clients = self.admin_clients("cinder") admin_clients.volume_types.delete.assert_called_once_with( volume_type) self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.delete_volume_type") def test__transfer_create(self): fake_volume = mock.MagicMock() random_name = "random_name" self.scenario.generate_random_name = mock.MagicMock( return_value=random_name) result = self.scenario._transfer_create(fake_volume.id) self.assertEqual( self.clients("cinder").transfers.create.return_value, result) self.clients("cinder").transfers.create.assert_called_once_with( fake_volume.id, random_name) self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.transfer_create") def test__transfer_accept(self): fake_transfer = mock.MagicMock() result = self.scenario._transfer_accept(fake_transfer.id, "fake_key") self.assertEqual( self.clients("cinder").transfers.accept.return_value, result) self.clients("cinder").transfers.accept.assert_called_once_with( fake_transfer.id, "fake_key") self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.transfer_accept") def test__set_volume_type_keys(self): volume_type = mock.MagicMock() volume_type.set_keys = mock.MagicMock() volume_type_key = {"volume_backend_name": "LVM_iSCSI"} result = self.scenario._set_volume_type_keys(volume_type, volume_type_key) self.assertEqual(volume_type.set_keys.return_value, result) volume_type.set_keys.assert_called_once_with(volume_type_key) self._test_atomic_action_timer(self.scenario.atomic_actions(), "cinder.set_volume_type_keys") rally-0.9.1/tests/unit/plugins/openstack/scenarios/cinder/test_volumes.py0000775000567000056710000007624513073417720030160 0ustar jenkinsjenkins00000000000000# Copyright 2013 Huawei Technologies Co.,LTD. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally.plugins.openstack.scenarios.cinder import volumes from tests.unit import test CINDER_VOLUMES = ("rally.plugins.openstack.scenarios.cinder.volumes" ".CinderVolumes") class fake_type(object): name = "fake" @ddt.ddt class CinderServersTestCase(test.ScenarioTestCase): def _get_context(self): context = test.get_test_context() context.update({ "user": {"tenant_id": "fake", "credential": mock.MagicMock()}, "tenant": {"id": "fake", "name": "fake", "volumes": [{"id": "uuid", "size": 1}], "servers": [1]}}) return context def test_create_and_list_volume(self): scenario = volumes.CreateAndListVolume(self.context) scenario._create_volume = mock.MagicMock() scenario._list_volumes = mock.MagicMock() scenario.run(1, True, fakearg="f") scenario._create_volume.assert_called_once_with(1, fakearg="f") scenario._list_volumes.assert_called_once_with(True) def test_create_and_get_volume(self): scenario = volumes.CreateAndGetVolume(self.context) scenario._create_volume = mock.MagicMock() scenario._get_volume = mock.MagicMock() scenario.run(1, fakearg="f") scenario._create_volume.assert_called_once_with(1, fakearg="f") scenario._get_volume.assert_called_once_with( scenario._create_volume.return_value.id) def test_list_volumes(self): scenario = volumes.ListVolumes(self.context) scenario._list_volumes = mock.MagicMock() scenario.run(True) scenario._list_volumes.assert_called_once_with(True) def test_list_types(self): scenario = volumes.ListTypes(self.context) scenario._list_types = mock.MagicMock() scenario.run(None, None) scenario._list_types.assert_called_once_with(None, None) def test_list_transfers(self): scenario = volumes.ListTransfers(self.context) scenario._list_transfers = mock.MagicMock() scenario.run(True, None) scenario._list_transfers.assert_called_once_with(True, None) def test_create_and_update_volume(self): volume_update_args = {"dispaly_name": "_updated"} scenario = volumes.CreateAndUpdateVolume(self.context) fake_volume = mock.MagicMock() scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario._update_volume = mock.MagicMock() scenario.run(1, update_volume_kwargs=volume_update_args) scenario._create_volume.assert_called_once_with(1) scenario._update_volume.assert_called_once_with(fake_volume, **volume_update_args) def test_create_volume_and_update_readonly_flag(self): scenario = volumes.CreateVolumeAndUpdateReadonlyFlag(self.context) fake_volume = mock.MagicMock() scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario._update_readonly_flag = mock.MagicMock() scenario.run(1, None, True, fakearg="f") scenario._create_volume.assert_called_once_with(1, fakearg="f") scenario._update_readonly_flag.assert_called_once_with( fake_volume.id, True) def test_create_and_delete_volume(self): fake_volume = mock.MagicMock() scenario = volumes.CreateAndDeleteVolume(self.context) scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario.sleep_between = mock.MagicMock() scenario._delete_volume = mock.MagicMock() scenario.run(size=1, min_sleep=10, max_sleep=20, fakearg="f") scenario._create_volume.assert_called_once_with(1, fakearg="f") scenario.sleep_between.assert_called_once_with(10, 20) scenario._delete_volume.assert_called_once_with(fake_volume) def test_create_volume(self): fake_volume = mock.MagicMock() scenario = volumes.CreateVolume(self.context) scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario.run(1, fakearg="f") scenario._create_volume.assert_called_once_with(1, fakearg="f") def test_create_volume_and_modify_metadata(self): scenario = volumes.ModifyVolumeMetadata(self._get_context()) scenario._set_metadata = mock.Mock() scenario._delete_metadata = mock.Mock() scenario.run(sets=5, set_size=4, deletes=3, delete_size=2) scenario._set_metadata.assert_called_once_with("uuid", 5, 4) scenario._delete_metadata.assert_called_once_with( "uuid", scenario._set_metadata.return_value, 3, 2) def test_create_and_extend_volume(self): fake_volume = mock.MagicMock() scenario = volumes.CreateAndExtendVolume(self.context) scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario._extend_volume = mock.MagicMock(return_value=fake_volume) scenario.sleep_between = mock.MagicMock() scenario._delete_volume = mock.MagicMock() scenario.run(1, 2, 10, 20, fakearg="f") scenario._create_volume.assert_called_once_with(1, fakearg="f") self.assertTrue(scenario._extend_volume.called) scenario.sleep_between.assert_called_once_with(10, 20) scenario._delete_volume.assert_called_once_with(fake_volume) def test_create_from_image_and_delete_volume(self): fake_volume = mock.MagicMock() scenario = volumes.CreateAndDeleteVolume(self.context) scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario._delete_volume = mock.MagicMock() scenario.run(1, image="fake_image") scenario._create_volume.assert_called_once_with(1, imageRef="fake_image") scenario._delete_volume.assert_called_once_with(fake_volume) def test_create_volume_from_image(self): fake_volume = mock.MagicMock() scenario = volumes.CreateVolume(self.context) scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario.run(1, image="fake_image") scenario._create_volume.assert_called_once_with(1, imageRef="fake_image") def test_create_volume_from_image_and_list(self): fake_volume = mock.MagicMock() scenario = volumes.CreateAndListVolume(self.context) scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario._list_volumes = mock.MagicMock() scenario.run(1, True, "fake_image") scenario._create_volume.assert_called_once_with(1, imageRef="fake_image") scenario._list_volumes.assert_called_once_with(True) def test_create_from_volume_and_delete_volume(self): fake_volume = mock.MagicMock() vol_size = 1 scenario = volumes.CreateFromVolumeAndDeleteVolume(self._get_context()) scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario._delete_volume = mock.MagicMock() scenario.run(vol_size) scenario._create_volume.assert_called_once_with(1, source_volid="uuid") scenario._delete_volume.assert_called_once_with(fake_volume) def test_create_and_delete_snapshot(self): fake_snapshot = mock.MagicMock() scenario = volumes.CreateAndDeleteSnapshot(self._get_context()) scenario._create_snapshot = mock.MagicMock(return_value=fake_snapshot) scenario.sleep_between = mock.MagicMock() scenario._delete_snapshot = mock.MagicMock() scenario.run(False, 10, 20, fakearg="f") scenario._create_snapshot.assert_called_once_with("uuid", force=False, fakearg="f") scenario.sleep_between.assert_called_once_with(10, 20) scenario._delete_snapshot.assert_called_once_with(fake_snapshot) def test_create_and_list_snapshots(self): fake_snapshot = mock.MagicMock() scenario = volumes.CreateAndListSnapshots(self._get_context()) scenario._create_snapshot = mock.MagicMock(return_value=fake_snapshot) scenario._list_snapshots = mock.MagicMock() scenario.run(False, True, fakearg="f") scenario._create_snapshot.assert_called_once_with("uuid", force=False, fakearg="f") scenario._list_snapshots.assert_called_once_with(True) def test_create_and_attach_volume(self): fake_volume = mock.MagicMock() fake_server = mock.MagicMock() fake_attach = mock.MagicMock() scenario = volumes.CreateAndAttachVolume(self.context) scenario._attach_volume = mock.MagicMock(return_value=fake_attach) scenario._detach_volume = mock.MagicMock() scenario._boot_server = mock.MagicMock(return_value=fake_server) scenario._delete_server = mock.MagicMock() scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario._delete_volume = mock.MagicMock() volume_args = {"some_key": "some_val"} vm_args = {"some_key": "some_val"} scenario.run(10, "img", "0", create_volume_params=volume_args, create_vm_params=vm_args) scenario._attach_volume.assert_called_once_with(fake_server, fake_volume) scenario._detach_volume.assert_called_once_with(fake_server, fake_volume, fake_attach) scenario._delete_volume.assert_called_once_with(fake_volume) scenario._delete_server.assert_called_once_with(fake_server) def test_create_and_upload_volume_to_image(self): fake_volume = mock.Mock() fake_image = mock.Mock() scenario = volumes.CreateAndUploadVolumeToImage(self.context) scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario._upload_volume_to_image = mock.MagicMock( return_value=fake_image) scenario._delete_volume = mock.MagicMock() scenario._delete_image = mock.MagicMock() scenario.run(2, image="img", container_format="fake", disk_format="disk", do_delete=False, fakeargs="fakeargs") scenario._create_volume.assert_called_once_with(2, imageRef="img", fakeargs="fakeargs") scenario._upload_volume_to_image.assert_called_once_with(fake_volume, False, "fake", "disk") scenario._create_volume.reset_mock() scenario._upload_volume_to_image.reset_mock() scenario.run(1, image=None, do_delete=True, fakeargs="fakeargs") scenario._create_volume.assert_called_once_with(1, fakeargs="fakeargs") scenario._upload_volume_to_image.assert_called_once_with(fake_volume, False, "bare", "raw") scenario._delete_volume.assert_called_once_with(fake_volume) scenario._delete_image.assert_called_once_with(fake_image) def test_create_snapshot_and_attach_volume(self): fake_volume = mock.MagicMock() fake_snapshot = mock.MagicMock() fake_server = mock.MagicMock() fake_attach = mock.MagicMock() scenario = volumes.CreateSnapshotAndAttachVolume(self._get_context()) scenario._attach_volume = mock.MagicMock(return_value=fake_attach) scenario._detach_volume = mock.MagicMock() scenario._boot_server = mock.MagicMock(return_value=fake_server) scenario._delete_server = mock.MagicMock() scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario._delete_volume = mock.MagicMock() scenario._create_snapshot = mock.MagicMock(return_value=fake_snapshot) scenario._delete_snapshot = mock.MagicMock() self.clients("nova").servers.get = mock.MagicMock( return_value=fake_server) scenario.run() self.assertTrue(scenario._create_volume.called) scenario._create_snapshot.assert_called_once_with(fake_volume.id, False) scenario._delete_snapshot.assert_called_once_with(fake_snapshot) scenario._attach_volume.assert_called_once_with(fake_server, fake_volume) scenario._detach_volume.assert_called_once_with(fake_server, fake_volume, fake_attach) scenario._delete_volume.assert_called_once_with(fake_volume) def test_create_snapshot_and_attach_volume_use_volume_type_with_name(self): fake_volume = mock.MagicMock() fake_snapshot = mock.MagicMock() fake_server = mock.MagicMock() fake_attach = mock.MagicMock() scenario = volumes.CreateSnapshotAndAttachVolume(self._get_context()) scenario._attach_volume = mock.MagicMock(return_value=fake_attach) scenario._detach_volume = mock.MagicMock() scenario._boot_server = mock.MagicMock(return_value=fake_server) scenario._delete_server = mock.MagicMock() scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario._delete_volume = mock.MagicMock() scenario._create_snapshot = mock.MagicMock(return_value=fake_snapshot) scenario._delete_snapshot = mock.MagicMock() fake = fake_type() self.clients("cinder").volume_types.list = mock.MagicMock( return_value=[fake]) self.clients("nova").servers.get = mock.MagicMock( return_value=fake_server) scenario.run(volume_type="fake_volume_type") # Make sure create volume's second arg was the correct volume type. # fake or none (randomly selected) self.assertTrue(scenario._create_volume.called) vol_type = scenario._create_volume.call_args_list[0][1]["volume_type"] self.assertEqual(vol_type, "fake_volume_type") scenario._create_snapshot.assert_called_once_with(fake_volume.id, False) scenario._delete_snapshot.assert_called_once_with(fake_snapshot) scenario._attach_volume.assert_called_once_with(fake_server, fake_volume) scenario._detach_volume.assert_called_once_with(fake_server, fake_volume, fake_attach) scenario._delete_volume.assert_called_once_with(fake_volume) def test_create_nested_snapshots_and_attach_volume(self): fake_volume = mock.MagicMock() fake_snapshot = mock.MagicMock() fake_attach = mock.MagicMock() scenario = volumes.CreateNestedSnapshotsAndAttachVolume( context=self._get_context()) scenario._attach_volume = mock.MagicMock(return_value=fake_attach) scenario._detach_volume = mock.MagicMock() scenario._delete_server = mock.MagicMock() scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario._delete_volume = mock.MagicMock() scenario._create_snapshot = mock.MagicMock(return_value=fake_snapshot) scenario._delete_snapshot = mock.MagicMock() scenario.run() volume_count = scenario._create_volume.call_count snapshots_count = scenario._create_snapshot.call_count attached_count = scenario._attach_volume.call_count self.assertEqual(scenario._delete_volume.call_count, volume_count) self.assertEqual(scenario._delete_snapshot.call_count, snapshots_count) self.assertEqual(scenario._detach_volume.call_count, attached_count) def test_create_nested_snapshots_and_attach_volume_kwargs(self): fake_volume = mock.MagicMock() fake_snapshot = mock.MagicMock() fake_attach = mock.MagicMock() scenario = volumes.CreateNestedSnapshotsAndAttachVolume( context=self._get_context()) scenario._attach_volume = mock.MagicMock(return_value=fake_attach) scenario._detach_volume = mock.MagicMock() scenario._delete_server = mock.MagicMock() scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario._delete_volume = mock.MagicMock() scenario._create_snapshot = mock.MagicMock(return_value=fake_snapshot) scenario._delete_snapshot = mock.MagicMock() volume_kwargs = {"volume_type": "type1"} scenario.run(size={"min": 1, "max": 1}, create_volume_kwargs=volume_kwargs) scenario._create_volume.assert_called_once_with(1, **volume_kwargs) self.assertEqual(fake_volume, scenario._create_volume.return_value) def test_create_nested_snapshots_and_attach_volume_snapshot_kwargs(self): fake_volume = mock.MagicMock() fake_volume.id = "FAKE_ID" fake_snapshot = mock.MagicMock() fake_attach = mock.MagicMock() scenario = volumes.CreateNestedSnapshotsAndAttachVolume( context=self._get_context()) scenario._attach_volume = mock.MagicMock(return_value=fake_attach) scenario._detach_volume = mock.MagicMock() scenario._delete_server = mock.MagicMock() scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario._delete_volume = mock.MagicMock() scenario._create_snapshot = mock.MagicMock(return_value=fake_snapshot) scenario._delete_snapshot = mock.MagicMock() volume_kwargs = {"volume_type": "type1"} snapshot_kwargs = {"name": "snapshot1", "description": "snaphot one"} scenario.run(size={"min": 1, "max": 1}, create_volume_kwargs=volume_kwargs, create_snapshot_kwargs=snapshot_kwargs) scenario._create_snapshot.assert_called_once_with(fake_volume.id, False, **snapshot_kwargs) self.assertEqual(fake_snapshot, scenario._create_snapshot.return_value) def test_create_nested_snapshots_and_attach_volume_deprecate_kwargs(self): fake_volume = mock.MagicMock() fake_volume.id = "FAKE_ID" fake_snapshot = mock.MagicMock() fake_attach = mock.MagicMock() scenario = volumes.CreateNestedSnapshotsAndAttachVolume( self._get_context()) scenario._attach_volume = mock.MagicMock(return_value=fake_attach) scenario._detach_volume = mock.MagicMock() scenario._delete_server = mock.MagicMock() scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario._delete_volume = mock.MagicMock() scenario._create_snapshot = mock.MagicMock(return_value=fake_snapshot) scenario._delete_snapshot = mock.MagicMock() volume_kwargs = {"volume_type": "type1"} snapshot_kwargs = {"name": "snapshot1", "description": "snaphot one"} scenario.run(size={"min": 1, "max": 1}, create_volume_kwargs=volume_kwargs, **snapshot_kwargs) scenario._create_snapshot.assert_called_once_with(fake_volume.id, False, **snapshot_kwargs) self.assertEqual(fake_snapshot, scenario._create_snapshot.return_value) def test_create_nested_snapshots_calls_order(self): fake_volume1 = mock.MagicMock() fake_volume2 = mock.MagicMock() fake_snapshot1 = mock.MagicMock() fake_snapshot2 = mock.MagicMock() scenario = volumes.CreateNestedSnapshotsAndAttachVolume( self._get_context()) scenario._attach_volume = mock.MagicMock(return_value=mock.MagicMock()) scenario._detach_volume = mock.MagicMock() scenario._delete_server = mock.MagicMock() scenario._create_volume = mock.MagicMock( side_effect=[fake_volume1, fake_volume2]) scenario._delete_volume = mock.MagicMock() scenario._create_snapshot = mock.MagicMock( side_effect=[fake_snapshot1, fake_snapshot2]) scenario._delete_snapshot = mock.MagicMock() scenario.run(nested_level=2) vol_delete_calls = [mock.call(fake_volume2), mock.call(fake_volume1)] snap_delete_calls = [mock.call(fake_snapshot2), mock.call(fake_snapshot1)] scenario._delete_volume.assert_has_calls(vol_delete_calls) scenario._delete_snapshot.assert_has_calls(snap_delete_calls) @mock.patch("rally.plugins.openstack.scenarios.cinder.volumes.random") def test_create_nested_snapshots_check_resources_size(self, mock_random): mock_random.randint.return_value = 3 fake_volume = mock.MagicMock() fake_snapshot = mock.MagicMock() fake_server = mock.MagicMock() scenario = volumes.CreateNestedSnapshotsAndAttachVolume( self._get_context()) scenario.get_random_server = mock.MagicMock(return_value=fake_server) scenario._attach_volume = mock.MagicMock(return_value=mock.MagicMock()) scenario._detach_volume = mock.MagicMock() scenario._delete_server = mock.MagicMock() scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario._delete_volume = mock.MagicMock() scenario._create_snapshot = mock.MagicMock(return_value=fake_snapshot) scenario._delete_snapshot = mock.MagicMock() scenario.run(nested_level=2) # NOTE: One call for random size random_call_count = mock_random.randint.call_count self.assertEqual(1, random_call_count) calls = scenario._create_volume.mock_calls expected_calls = [mock.call(3)] expected_calls.extend( [mock.call(3, snapshot_id=fake_snapshot.id)]) self.assertEqual(expected_calls, calls) def test_create_volume_backup(self): fake_volume = mock.MagicMock() fake_backup = mock.MagicMock() scenario = volumes.CreateVolumeBackup(self.context) self._get_scenario(scenario, fake_volume, fake_backup) volume_kwargs = {"some_var": "zaq"} scenario.run(1, do_delete=True, create_volume_kwargs=volume_kwargs) scenario._create_volume.assert_called_once_with(1, **volume_kwargs) scenario._create_backup.assert_called_once_with(fake_volume.id) scenario._delete_volume.assert_called_once_with(fake_volume) scenario._delete_backup.assert_called_once_with(fake_backup) def test_create_volume_backup_no_delete(self): fake_volume = mock.MagicMock() fake_backup = mock.MagicMock() scenario = volumes.CreateVolumeBackup(self.context) self._get_scenario(scenario, fake_volume, fake_backup) volume_kwargs = {"some_var": "zaq"} scenario.run(1, do_delete=False, create_volume_kwargs=volume_kwargs) scenario._create_volume.assert_called_once_with(1, **volume_kwargs) scenario._create_backup.assert_called_once_with(fake_volume.id) self.assertFalse(scenario._delete_volume.called) self.assertFalse(scenario._delete_backup.called) def _get_scenario(self, scenario, fake_volume, fake_backup, fake_restore=None): scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario._create_backup = mock.MagicMock(return_value=fake_backup) scenario._restore_backup = mock.MagicMock(return_value=fake_restore) scenario._list_backups = mock.MagicMock() scenario._delete_volume = mock.MagicMock() scenario._delete_backup = mock.MagicMock() def test_create_and_restore_volume_backup(self): fake_volume = mock.MagicMock() fake_backup = mock.MagicMock() fake_restore = mock.MagicMock() scenario = volumes.CreateAndRestoreVolumeBackup(self.context) self._get_scenario(scenario, fake_volume, fake_backup, fake_restore) volume_kwargs = {"some_var": "zaq"} scenario.run(1, do_delete=True, create_volume_kwargs=volume_kwargs) scenario._create_volume.assert_called_once_with(1, **volume_kwargs) scenario._create_backup.assert_called_once_with(fake_volume.id) scenario._restore_backup.assert_called_once_with(fake_backup.id) scenario._delete_volume.assert_called_once_with(fake_volume) scenario._delete_backup.assert_called_once_with(fake_backup) def test_create_and_restore_volume_backup_no_delete(self): fake_volume = mock.MagicMock() fake_backup = mock.MagicMock() fake_restore = mock.MagicMock() scenario = volumes.CreateAndRestoreVolumeBackup(self.context) self._get_scenario(scenario, fake_volume, fake_backup, fake_restore) volume_kwargs = {"some_var": "zaq"} scenario.run(1, do_delete=False, create_volume_kwargs=volume_kwargs) scenario._create_volume.assert_called_once_with(1, **volume_kwargs) scenario._create_backup.assert_called_once_with(fake_volume.id) scenario._restore_backup.assert_called_once_with(fake_backup.id) self.assertFalse(scenario._delete_volume.called) self.assertFalse(scenario._delete_backup.called) def test_create_and_list_volume_backups(self): fake_volume = mock.MagicMock() fake_backup = mock.MagicMock() scenario = volumes.CreateAndListVolumeBackups(self.context) self._get_scenario(scenario, fake_volume, fake_backup) volume_kwargs = {"some_var": "zaq"} scenario.run(1, detailed=True, do_delete=True, create_volume_kwargs=volume_kwargs) scenario._create_volume.assert_called_once_with(1, **volume_kwargs) scenario._create_backup.assert_called_once_with(fake_volume.id) scenario._list_backups.assert_called_once_with(True) scenario._delete_volume.assert_called_once_with(fake_volume) scenario._delete_backup.assert_called_once_with(fake_backup) def test_create_and_list_volume_backups_no_delete(self): fake_volume = mock.MagicMock() fake_backup = mock.MagicMock() scenario = volumes.CreateAndListVolumeBackups(self.context) self._get_scenario(scenario, fake_volume, fake_backup) volume_kwargs = {"some_var": "zaq"} scenario.run(1, detailed=True, do_delete=False, create_volume_kwargs=volume_kwargs) scenario._create_volume.assert_called_once_with(1, **volume_kwargs) scenario._create_backup.assert_called_once_with(fake_volume.id) scenario._list_backups.assert_called_once_with(True) self.assertFalse(scenario._delete_volume.called) self.assertFalse(scenario._delete_backup.called) @ddt.data({}, {"nested_level": 2}, {"image": "img"}) @ddt.unpack def test_create_volume_and_clone(self, nested_level=1, image=None): create_volumes_count = nested_level + 1 fake_volumes = [mock.Mock(size=1) for i in range(create_volumes_count)] scenario = volumes.CreateVolumeAndClone(self.context) scenario._create_volume = mock.MagicMock(side_effect=fake_volumes) scenario.run(1, image=image, nested_level=nested_level, fakearg="fake") expected = [mock.call(1, imageRef=image, fakearg="fake") if image else mock.call(1, fakearg="fake")] for i in range(nested_level): expected.append(mock.call(fake_volumes[i].size, source_volid=fake_volumes[i].id, atomic_action=False, fakearg="fake") ) self._test_atomic_action_timer(scenario.atomic_actions(), "cinder.clone_volume") scenario._create_volume.assert_has_calls(expected) def test_create_volume_from_snapshot(self): fake_snapshot = mock.MagicMock(id=1) fake_volume = mock.MagicMock() create_snapshot_args = {"force": False} scenario = volumes.CreateVolumeFromSnapshot(self._get_context()) scenario._create_snapshot = mock.MagicMock(return_value=fake_snapshot) scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario._delete_snapshot = mock.MagicMock() scenario._delete_volume = mock.MagicMock() scenario.run(fakearg="f") scenario._create_snapshot.assert_called_once_with("uuid") scenario._create_volume.assert_called_once_with( 1, snapshot_id=fake_snapshot.id, fakearg="f") scenario._delete_snapshot.assert_called_once_with(fake_snapshot) scenario._delete_volume.assert_called_once_with(fake_volume) scenario._create_snapshot.reset_mock() scenario._create_volume.reset_mock() scenario._delete_snapshot.reset_mock() scenario._delete_volume.reset_mock() scenario.run(do_delete=False, create_snapshot_kwargs=create_snapshot_args, fakearg="f") scenario._create_snapshot.assert_called_once_with( "uuid", **create_snapshot_args) scenario._create_volume.assert_called_once_with( 1, snapshot_id=fake_snapshot.id, fakearg="f") self.assertFalse(scenario._delete_snapshot.called) self.assertFalse(scenario._delete_volume.called) @ddt.data({}, {"image": "img"}) @ddt.unpack def test_create_and_accept_transfer(self, image=None): fake_volume = mock.MagicMock() fake_transfer = mock.MagicMock() scenario = volumes.CreateAndAcceptTransfer(self._get_context()) scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario._transfer_create = mock.MagicMock(return_value=fake_transfer) scenario._transfer_accept = mock.MagicMock() scenario.run(1, image=image, fakearg="fake") expected = [mock.call(1, imageRef=image, fakearg="fake") if image else mock.call(1, fakearg="fake")] scenario._create_volume.assert_has_calls(expected) scenario._transfer_create.assert_called_once_with(fake_volume.id) scenario._transfer_accept.assert_called_once_with( fake_transfer.id, fake_transfer.auth_key) rally-0.9.1/tests/unit/plugins/openstack/scenarios/cinder/test_volume_types.py0000664000567000056710000000761613073417720031212 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.cinder import volume_types from tests.unit import test class fake_type(object): name = "fake" class CinderVolumeTypesTestCase(test.ScenarioTestCase): def _get_context(self): context = test.get_test_context() context.update({ "volume_types": [{"id": "fake_id", "name": "fake_name"}]}) return context def test_create_and_delete_volume_type(self): scenario = volume_types.CreateAndDeleteVolumeType(self.context) scenario._create_volume_type = mock.Mock() scenario._delete_volume_type = mock.Mock() scenario.run(fakeargs="fakeargs") scenario._create_volume_type.assert_called_once_with( fakeargs="fakeargs") scenario._delete_volume_type.assert_called_once_with( scenario._create_volume_type.return_value) def test_create_and_delete_encryption_type(self): scenario = volume_types.CreateAndDeleteEncryptionType( self._get_context()) scenario._create_encryption_type = mock.Mock() scenario._delete_encryption_type = mock.Mock() scenario.run(create_specs="fakecreatespecs") scenario._create_encryption_type.assert_called_once_with( "fake_id", "fakecreatespecs") scenario._delete_encryption_type.assert_called_once_with( "fake_id") def test_create_volume_type_and_encryption_type(self): scenario = volume_types.CreateVolumeTypeAndEncryptionType(self.context) scenario._create_volume_type = mock.Mock() scenario._create_encryption_type = mock.Mock() scenario.run(specs="fakespecs", fakeargs="fakeargs") scenario._create_volume_type.assert_called_once_with( fakeargs="fakeargs") scenario._create_encryption_type.assert_called_once_with( scenario._create_volume_type.return_value, "fakespecs") def test_create_and_list_encryption_type(self): scenario = volume_types.CreateAndListEncryptionType(self.context) scenario._create_volume_type = mock.Mock() scenario._create_encryption_type = mock.Mock() scenario._list_encryption_type = mock.Mock() scenario.run(specs="fakespecs", search_opts="fakeopts", fakeargs="fakeargs") scenario._create_volume_type.assert_called_once_with( fakeargs="fakeargs") scenario._create_encryption_type.assert_called_once_with( scenario._create_volume_type.return_value, "fakespecs") scenario._list_encryption_type.assert_called_once_with( "fakeopts") def test_create_and_set_volume_type_keys(self): scenario = volume_types.CreateAndSetVolumeTypeKeys(self.context) volume_type = mock.MagicMock() volume_type_key = {"volume_backend_name": "LVM_iSCSI"} scenario._create_volume_type = mock.MagicMock() scenario._set_volume_type_keys = mock.MagicMock() scenario._create_volume_type.return_value = volume_type scenario.run(volume_type_key, fakeargs="fakeargs") scenario._create_volume_type.assert_called_once_with( fakeargs="fakeargs") scenario._set_volume_type_keys.assert_called_once_with(volume_type, volume_type_key) rally-0.9.1/tests/unit/plugins/openstack/scenarios/ec2/0000775000567000056710000000000013073420067024237 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/ec2/__init__.py0000664000567000056710000000000013073417717026346 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/ec2/test_servers.py0000664000567000056710000000231413073417717027351 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.ec2 import servers from tests.unit import test class EC2ServersTestCase(test.ScenarioTestCase): def test_list_servers(self): scenario = servers.ListServers(self.context) scenario._list_servers = mock.MagicMock() scenario.run() scenario._list_servers.assert_called_once_with() def test_boot_server(self): scenario = servers.BootServer(self.context) scenario._boot_servers = mock.Mock() scenario.run("foo_image", "foo_flavor", foo="bar") scenario._boot_servers.assert_called_once_with( "foo_image", "foo_flavor", foo="bar") rally-0.9.1/tests/unit/plugins/openstack/scenarios/ec2/test_utils.py0000664000567000056710000000543213073417717027024 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from oslo_config import cfg from rally.plugins.openstack.scenarios.ec2 import utils from tests.unit import test CONF = cfg.CONF class EC2ScenarioTestCase(test.ScenarioTestCase): def setUp(self): super(EC2ScenarioTestCase, self).setUp() self.server1 = mock.MagicMock() self.server2 = mock.MagicMock() self.reservations = mock.MagicMock(instances=[self.server1, self.server2]) def test__list_servers(self): servers_list = [] self.clients("ec2").get_only_instances.return_value = servers_list ec2_scenario = utils.EC2Scenario() return_servers_list = ec2_scenario._list_servers() self.assertEqual(servers_list, return_servers_list) self._test_atomic_action_timer(ec2_scenario.atomic_actions(), "ec2.list_servers") def test__update_resource(self): resource = mock.MagicMock() scenario = utils.EC2Scenario(self.context) self.assertEqual(scenario._update_resource(resource), resource) resource.update.assert_called_once_with() def test__boot_servers(self): self.clients("ec2").run_instances.return_value = self.reservations ec2_scenario = utils.EC2Scenario(context={}) ec2_scenario._update_resource = mock.Mock() ec2_scenario._boot_servers("image", "flavor", 2) expected = [ mock.call( self.server1, ready_statuses=["RUNNING"], update_resource=ec2_scenario._update_resource, check_interval=CONF.benchmark.ec2_server_boot_poll_interval, timeout=CONF.benchmark.ec2_server_boot_timeout ), mock.call( self.server2, ready_statuses=["RUNNING"], update_resource=ec2_scenario._update_resource, check_interval=CONF.benchmark.ec2_server_boot_poll_interval, timeout=CONF.benchmark.ec2_server_boot_timeout ) ] self.mock_wait_for.mock.assert_has_calls(expected) self._test_atomic_action_timer(ec2_scenario.atomic_actions(), "ec2.boot_servers") rally-0.9.1/tests/unit/plugins/openstack/scenarios/nova/0000775000567000056710000000000013073420067024531 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/nova/test_flavors.py0000664000567000056710000001247713073417717027641 0ustar jenkinsjenkins00000000000000# Copyright: 2015. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally import exceptions from rally.plugins.openstack.scenarios.nova import flavors from tests.unit import test @ddt.ddt class NovaFlavorsTestCase(test.TestCase): def test_list_flavors(self): scenario = flavors.ListFlavors() scenario._list_flavors = mock.Mock() scenario.run(detailed=True, fakearg="fakearg") scenario._list_flavors.assert_called_once_with(True, fakearg="fakearg") @ddt.data({}, {"is_public": True}, {"is_public": False}, {"fakeargs": "fakeargs"}, {"is_public": False, "fakeargs": "fakeargs"}) @ddt.unpack def test_create_and_list_flavor_access(self, **kwargs): # Common parameters ram = 100 vcpus = 1 disk = 1 scenario = flavors.CreateAndListFlavorAccess() scenario._create_flavor = mock.Mock() scenario._list_flavor_access = mock.Mock() # Positive case: scenario.run(ram, vcpus, disk, **kwargs) kwargs.pop("is_public", None) scenario._create_flavor.assert_called_once_with(ram, vcpus, disk, is_public=False, **kwargs) scenario._list_flavor_access.assert_called_once_with( scenario._create_flavor.return_value.id) # Negative case1: flavor wasn't created scenario._create_flavor.return_value = None self.assertRaises(exceptions.RallyAssertionError, scenario.run, ram, vcpus, disk, **kwargs) scenario._create_flavor.assert_called_with(ram, vcpus, disk, is_public=False, **kwargs) def test_create_flavor_add_tenant_access(self, **kwargs): flavor = mock.MagicMock() context = {"user": {"tenant_id": "fake"}, "tenant": {"id": "fake"}} scenario = flavors.CreateFlavorAndAddTenantAccess() scenario.context = context scenario.generate_random_name = mock.MagicMock() scenario._create_flavor = mock.MagicMock(return_value=flavor) scenario._add_tenant_access = mock.MagicMock() # Positive case: scenario.run(ram=100, vcpus=1, disk=1, **kwargs) scenario._create_flavor.assert_called_once_with(100, 1, 1, **kwargs) scenario._add_tenant_access.assert_called_once_with(flavor.id, "fake") # Negative case1: flavor wasn't created scenario._create_flavor.return_value = None self.assertRaises(exceptions.RallyAssertionError, scenario.run, 100, 1, 1, **kwargs) scenario._create_flavor.assert_called_with(100, 1, 1, **kwargs) def test_create_flavor(self): scenario = flavors.CreateFlavor() scenario._create_flavor = mock.MagicMock() scenario.run(ram=100, vcpus=1, disk=1, fakeargs="fakeargs") scenario._create_flavor.assert_called_once_with(100, 1, 1, fakeargs="fakeargs") def test_create_and_get_flavor(self, **kwargs): scenario = flavors.CreateAndGetFlavor() scenario._create_flavor = mock.Mock() scenario._get_flavor = mock.Mock() scenario.run(ram=100, vcpus=1, disk=1, **kwargs) scenario._create_flavor.assert_called_once_with(100, 1, 1, **kwargs) scenario._get_flavor.assert_called_once_with( scenario._create_flavor.return_value.id) def test_create_and_delete_flavor(self, **kwargs): scenario = flavors.CreateAndDeleteFlavor() scenario._create_flavor = mock.Mock() scenario._delete_flavor = mock.Mock() scenario.run(ram=100, vcpus=1, disk=1, **kwargs) scenario._create_flavor.assert_called_once_with(100, 1, 1, **kwargs) scenario._delete_flavor.assert_called_once_with( scenario._create_flavor.return_value.id) def test_create_flavor_and_set_keys(self): scenario = flavors.CreateFlavorAndSetKeys() scenario._create_flavor = mock.MagicMock() scenario._set_flavor_keys = mock.MagicMock() specs_args = {"fakeargs": "foo"} scenario.run(ram=100, vcpus=1, disk=1, extra_specs=specs_args, fakeargs="fakeargs") scenario._create_flavor.assert_called_once_with(100, 1, 1, fakeargs="fakeargs") scenario._set_flavor_keys.assert_called_once_with( scenario._create_flavor.return_value, specs_args) rally-0.9.1/tests/unit/plugins/openstack/scenarios/nova/test_hosts.py0000664000567000056710000000312013073417720027300 0ustar jenkinsjenkins00000000000000# Copyright 2016 IBM Corp. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.nova import hosts from tests.unit import test class NovaHostsTestCase(test.TestCase): def test_list_hosts(self): scenario = hosts.ListHosts() scenario._list_hosts = mock.Mock() scenario.run(zone=None) scenario._list_hosts.assert_called_once_with(None) def test_list_and_get_hosts(self): fake_hosts = [mock.Mock(host_name="fake_hostname")] scenario = hosts.ListAndGetHosts() scenario._list_hosts = mock.create_autospec(scenario._list_hosts, return_value=fake_hosts) scenario._get_host = mock.create_autospec(scenario._get_host, "fake_hostname") scenario.run(zone="nova") scenario._list_hosts.assert_called_once_with("nova", service="compute") scenario._get_host.assert_called_once_with( "fake_hostname", atomic_action=False) rally-0.9.1/tests/unit/plugins/openstack/scenarios/nova/__init__.py0000664000567000056710000000000013073417717026640 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/nova/test_server_groups.py0000775000567000056710000000525713073417720031065 0ustar jenkinsjenkins00000000000000# Copyright 2017: Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally import exceptions as rally_exceptions from rally.plugins.openstack.scenarios.nova import server_groups from tests.unit import test SERVER_GROUPS_MODULE = "rally.plugins.openstack.scenarios.nova.server_groups" NOVA_SERVER_GROUPS = SERVER_GROUPS_MODULE + ".NovaServerGroups" @ddt.ddt class NovaServerGroupsTestCase(test.ScenarioTestCase): def test_create_and_list_server_groups(self): scenario = server_groups.CreateAndListServerGroups(self.context) gen_name = mock.MagicMock() scenario.generate_random_name = gen_name all_projects = False create_args = {"policies": ["fake_policy"]} fake_server_group = mock.MagicMock() scenario._create_server_group = mock.MagicMock() scenario._list_server_groups = mock.MagicMock() scenario._list_server_groups.return_value = [mock.MagicMock(), fake_server_group, mock.MagicMock()] # Positive case scenario._create_server_group.return_value = fake_server_group scenario.run(kwargs=create_args) scenario._create_server_group.assert_called_once_with(**create_args) scenario._list_server_groups.assert_called_once_with(all_projects) # Negative case1: server group isn't created scenario._create_server_group.return_value = None self.assertRaises(rally_exceptions.RallyAssertionError, scenario.run, kwargs=create_args) scenario._create_server_group.assert_called_with(**create_args) # Negative case2: server group not in the list of available server # groups scenario._create_server_group.return_value = mock.MagicMock() self.assertRaises(rally_exceptions.RallyAssertionError, scenario.run, kwargs=create_args) scenario._create_server_group.assert_called_with(**create_args) scenario._list_server_groups.assert_called_with(all_projects) rally-0.9.1/tests/unit/plugins/openstack/scenarios/nova/test_floating_ips_bulk.py0000664000567000056710000000717113073417717031653 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally import exceptions from rally.plugins.openstack.scenarios.nova import floating_ips_bulk from tests.unit import test class NovaFloatingIPsBulkTestCase(test.ScenarioTestCase): def test_create_and_list_floating_ips_bulk(self): # Positive case: scenario = floating_ips_bulk.CreateAndListFloatingIpsBulk(self.context) scenario._create_floating_ips_bulk = mock.MagicMock() scenario._list_floating_ips_bulk = mock.MagicMock() start_cidr = "10.2.0.0/24" scenario._list_floating_ips_bulk.return_value = ["10.2.0.0", "10.2.0.1", "10.2.0.2", "10.2.0.3", "10.2.0.4", "10.2.0.5"] # Positive case scenario._create_floating_ips_bulk.return_value = ["10.2.0.1", "10.2.0.2"] scenario.run(start_cidr=start_cidr, fakearg="fakearg") scenario._create_floating_ips_bulk.assert_called_once_with( start_cidr, fakearg="fakearg") scenario._list_floating_ips_bulk.assert_called_once_with() # Negative case1: IPs aren't created scenario._create_floating_ips_bulk.return_value = None self.assertRaises(exceptions.RallyAssertionError, scenario.run, start_cidr=start_cidr, fakearg="fakearg") scenario._create_floating_ips_bulk.assert_called_with( start_cidr, fakearg="fakearg") # Negative case2: list doesn't contain new ips scenario._create_floating_ips_bulk.return_value = ["10.2.0.1", "10.2.0.2", "10.2.0.6"] self.assertRaises(exceptions.RallyAssertionError, scenario.run, start_cidr=start_cidr, fakearg="fakearg") scenario._create_floating_ips_bulk.assert_called_with( start_cidr, fakearg="fakearg") scenario._list_floating_ips_bulk.assert_called_with() def test_create_and_delete_floating_ips_bulk(self): scenario = floating_ips_bulk.CreateAndDeleteFloatingIpsBulk( self.context) fake_floating_ips_bulk = mock.MagicMock() fake_floating_ips_bulk.ip_range = "10.2.0.0/24" scenario._create_floating_ips_bulk = mock.MagicMock( return_value=fake_floating_ips_bulk) scenario._delete_floating_ips_bulk = mock.MagicMock() start_cidr = "10.2.0.0/24" scenario.run(start_cidr=start_cidr, fakearg="fakearg") scenario._create_floating_ips_bulk.assert_called_once_with( start_cidr, fakearg="fakearg") scenario._delete_floating_ips_bulk.assert_called_once_with( fake_floating_ips_bulk.ip_range) rally-0.9.1/tests/unit/plugins/openstack/scenarios/nova/test_keypairs.py0000664000567000056710000001027713073417717030010 0ustar jenkinsjenkins00000000000000# Copyright 2015: Hewlett-Packard Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally import exceptions from rally.plugins.openstack.scenarios.nova import keypairs from tests.unit import fakes from tests.unit import test class NovaKeypairTestCase(test.ScenarioTestCase): def test_create_and_list_keypairs(self): fake_nova_client = fakes.FakeNovaClient() fake_nova_client.keypairs.create("keypair") fake_keypair = list(fake_nova_client.keypairs.cache.values())[0] scenario = keypairs.CreateAndListKeypairs(self.context) scenario._create_keypair = mock.MagicMock() scenario._list_keypairs = mock.MagicMock() scenario._list_keypairs.return_value = [fake_keypair] * 3 # Positive case: scenario._create_keypair.return_value = fake_keypair.id scenario.run(fakearg="fakearg") scenario._create_keypair.assert_called_once_with(fakearg="fakearg") scenario._list_keypairs.assert_called_once_with() # Negative case1: keypair isn't created scenario._create_keypair.return_value = None self.assertRaises(exceptions.RallyAssertionError, scenario.run, fakearg="fakearg") scenario._create_keypair.assert_called_with(fakearg="fakearg") # Negative case2: new keypair not in the list of keypairs scenario._create_keypair.return_value = "fake_keypair" self.assertRaises(exceptions.RallyAssertionError, scenario.run, fakearg="fakearg") scenario._create_keypair.assert_called_with(fakearg="fakearg") scenario._list_keypairs.assert_called_with() def test_create_and_get_keypair(self): scenario = keypairs.CreateAndGetKeypair(self.context) fake_keypair = mock.MagicMock() scenario._create_keypair = mock.MagicMock() scenario._get_keypair = mock.MagicMock() scenario._create_keypair.return_value = fake_keypair scenario.run(fakearg="fakearg") scenario._create_keypair.assert_called_once_with(fakearg="fakearg") scenario._get_keypair.assert_called_once_with(fake_keypair) def test_create_and_delete_keypair(self): scenario = keypairs.CreateAndDeleteKeypair(self.context) scenario.generate_random_name = mock.MagicMock(return_value="name") scenario._create_keypair = mock.MagicMock(return_value="foo_keypair") scenario._delete_keypair = mock.MagicMock() scenario.run(fakearg="fakearg") scenario._create_keypair.assert_called_once_with(fakearg="fakearg") scenario._delete_keypair.assert_called_once_with("foo_keypair") def test_boot_and_delete_server_with_keypair(self): scenario = keypairs.BootAndDeleteServerWithKeypair(self.context) scenario.generate_random_name = mock.MagicMock(return_value="name") scenario._create_keypair = mock.MagicMock(return_value="foo_keypair") scenario._boot_server = mock.MagicMock(return_value="foo_server") scenario._delete_server = mock.MagicMock() scenario._delete_keypair = mock.MagicMock() fake_server_args = { "foo": 1, "bar": 2, } scenario.run("img", 1, boot_server_kwargs=fake_server_args, fake_arg1="foo", fake_arg2="bar") scenario._create_keypair.assert_called_once_with( fake_arg1="foo", fake_arg2="bar") scenario._boot_server.assert_called_once_with( "img", 1, foo=1, bar=2, key_name="foo_keypair") scenario._delete_server.assert_called_once_with("foo_server") scenario._delete_keypair.assert_called_once_with("foo_keypair") rally-0.9.1/tests/unit/plugins/openstack/scenarios/nova/test_aggregates.py0000664000567000056710000001717113073417717030272 0ustar jenkinsjenkins00000000000000# Copyright 2016 IBM Corp. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally import exceptions from rally.plugins.openstack.scenarios.nova import aggregates from tests.unit import test class NovaAggregatesTestCase(test.ScenarioTestCase): def test_list_aggregates(self): scenario = aggregates.ListAggregates() scenario._list_aggregates = mock.Mock() scenario.run() scenario._list_aggregates.assert_called_once_with() def test_create_and_list_aggregates(self): # Positive case scenario = aggregates.CreateAndListAggregates() scenario._create_aggregate = mock.Mock(return_value="agg1") scenario._list_aggregates = mock.Mock(return_value=("agg1", "agg2")) scenario.run(availability_zone="nova") scenario._create_aggregate.assert_called_once_with("nova") scenario._list_aggregates.assert_called_once_with() # Negative case 1: aggregate isn't created scenario._create_aggregate.return_value = None self.assertRaises(exceptions.RallyAssertionError, scenario.run, availability_zone="nova") scenario._create_aggregate.assert_called_with("nova") # Negative case 2: aggregate was created but not included into list scenario._create_aggregate.return_value = "agg3" self.assertRaises(exceptions.RallyAssertionError, scenario.run, availability_zone="nova") scenario._create_aggregate.assert_called_with("nova") scenario._list_aggregates.assert_called_with() def test_create_and_delete_aggregate(self): scenario = aggregates.CreateAndDeleteAggregate() scenario._create_aggregate = mock.Mock() scenario._delete_aggregate = mock.Mock() scenario.run(availability_zone="nova") scenario._create_aggregate.assert_called_once_with("nova") aggregate = scenario._create_aggregate.return_value scenario._delete_aggregate.assert_called_once_with(aggregate) def test_create_and_update_aggregate(self): scenario = aggregates.CreateAndUpdateAggregate() scenario._create_aggregate = mock.Mock() scenario._update_aggregate = mock.Mock() scenario.run(availability_zone="nova") scenario._create_aggregate.assert_called_once_with("nova") aggregate = scenario._create_aggregate.return_value scenario._update_aggregate.assert_called_once_with(aggregate) def test_create_aggregate_add_and_remove_host(self): fake_aggregate = "fake_aggregate" fake_hosts = [mock.Mock(service={"host": "fake_host_name"})] scenario = aggregates.CreateAggregateAddAndRemoveHost() scenario._create_aggregate = mock.MagicMock( return_value=fake_aggregate) scenario._list_hypervisors = mock.MagicMock(return_value=fake_hosts) scenario._aggregate_add_host = mock.MagicMock() scenario._aggregate_remove_host = mock.MagicMock() scenario.run(availability_zone="nova") scenario._create_aggregate.assert_called_once_with( "nova") scenario._list_hypervisors.assert_called_once_with() scenario._aggregate_add_host.assert_called_once_with( "fake_aggregate", "fake_host_name") scenario._aggregate_remove_host.assert_called_once_with( "fake_aggregate", "fake_host_name") def test_create_and_get_aggregate_details(self): scenario = aggregates.CreateAndGetAggregateDetails() scenario._create_aggregate = mock.Mock() scenario._get_aggregate_details = mock.Mock() scenario.run(availability_zone="nova") scenario._create_aggregate.assert_called_once_with("nova") aggregate = scenario._create_aggregate.return_value scenario._get_aggregate_details.assert_called_once_with(aggregate) def test_create_aggregate_add_host_and_boot_server(self): fake_aggregate = mock.Mock() fake_hosts = [mock.Mock(service={"host": "fake_host_name"})] fake_flavor = mock.MagicMock(id="flavor-id-0", ram=512, disk=1, vcpus=1) fake_metadata = {"test_metadata": "true"} fake_server = mock.MagicMock(id="server-id-0") setattr(fake_server, "OS-EXT-SRV-ATTR:hypervisor_hostname", "fake_host_name") fake_aggregate_kwargs = {"fake_arg1": "f"} scenario = aggregates.CreateAggregateAddHostAndBootServer() scenario._create_aggregate = mock.MagicMock( return_value=fake_aggregate) scenario._list_hypervisors = mock.MagicMock(return_value=fake_hosts) scenario._aggregate_add_host = mock.MagicMock() scenario._aggregate_set_metadata = mock.MagicMock() scenario._create_flavor = mock.MagicMock(return_value=fake_flavor) scenario._boot_server = mock.MagicMock(return_value=fake_server) self.admin_clients("nova").servers.get.return_value = fake_server scenario.run("img", fake_metadata, availability_zone="nova", boot_server_kwargs=fake_aggregate_kwargs) scenario._create_aggregate.assert_called_once_with("nova") scenario._list_hypervisors.assert_called_once_with() scenario._aggregate_set_metadata.assert_called_once_with( fake_aggregate, fake_metadata) scenario._aggregate_add_host(fake_aggregate, "fake_host_name") scenario._create_flavor.assert_called_once_with(512, 1, 1) fake_flavor.set_keys.assert_called_once_with(fake_metadata) scenario._boot_server.assert_called_once_with("img", "flavor-id-0", **fake_aggregate_kwargs) self.admin_clients("nova").servers.get.assert_called_once_with( "server-id-0") self.assertEqual(getattr( fake_server, "OS-EXT-SRV-ATTR:hypervisor_hostname"), "fake_host_name") def test_create_aggregate_add_host_and_boot_server_failure(self): fake_aggregate = mock.Mock() fake_hosts = [mock.Mock(service={"host": "fake_host_name"})] fake_flavor = mock.MagicMock(id="flavor-id-0", ram=512, disk=1, vcpus=1) fake_metadata = {"test_metadata": "true"} fake_server = mock.MagicMock(id="server-id-0") setattr(fake_server, "OS-EXT-SRV-ATTR:hypervisor_hostname", "wrong_host_name") fake_boot_server_kwargs = {"fake_arg1": "f"} scenario = aggregates.CreateAggregateAddHostAndBootServer() scenario._create_aggregate = mock.MagicMock( return_value=fake_aggregate) scenario._list_hypervisors = mock.MagicMock(return_value=fake_hosts) scenario._aggregate_add_host = mock.MagicMock() scenario._aggregate_set_metadata = mock.MagicMock() scenario._create_flavor = mock.MagicMock(return_value=fake_flavor) scenario._boot_server = mock.MagicMock(return_value=fake_server) self.admin_clients("nova").servers.get.return_value = fake_server self.assertRaises(exceptions.RallyException, scenario.run, "img", fake_metadata, "nova", fake_boot_server_kwargs) rally-0.9.1/tests/unit/plugins/openstack/scenarios/nova/test_security_group.py0000664000567000056710000002362513073417717031245 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally import exceptions from rally.plugins.openstack.scenarios.nova import security_group from tests.unit import fakes from tests.unit import test class FakeNeutronScenario(object): def __enter__(self): return {} def __exit__(self, exc_type, exc_val, exc_tb): pass class NovaSecurityGroupTestCase(test.ScenarioTestCase): def test_create_and_delete_security_groups(self): fake_secgroups = [fakes.FakeSecurityGroup(None, None, 1, "uuid1"), fakes.FakeSecurityGroup(None, None, 2, "uuid2")] scenario = security_group.CreateAndDeleteSecgroups(self.context) scenario._create_security_groups = mock.MagicMock( return_value=fake_secgroups) scenario._create_rules_for_security_group = mock.MagicMock() scenario._delete_security_groups = mock.MagicMock() security_group_count = 2 rules_per_security_group = 10 scenario.run(security_group_count, rules_per_security_group) scenario._create_security_groups.assert_called_once_with( security_group_count) scenario._create_rules_for_security_group.assert_called_once_with( fake_secgroups, rules_per_security_group) scenario._delete_security_groups.assert_called_once_with( fake_secgroups) def test_create_and_update_security_groups(self): fake_secgroups = [fakes.FakeSecurityGroup(None, None, 1, "uuid1"), fakes.FakeSecurityGroup(None, None, 2, "uuid2")] scenario = security_group.CreateAndUpdateSecgroups(self.context) scenario._create_security_groups = mock.MagicMock( return_value=fake_secgroups) scenario._update_security_groups = mock.MagicMock() scenario._generate_random_name = mock.Mock( return_value="_updated") security_group_count = 2 scenario.run(security_group_count) scenario._create_security_groups.assert_called_once_with( security_group_count) scenario._update_security_groups.assert_called_once_with( fake_secgroups) def test_create_and_list_secgroups(self): fake_secgroups = [fakes.FakeSecurityGroup(None, None, 1, "uuid1"), fakes.FakeSecurityGroup(None, None, 2, "uuid2")] scenario = security_group.CreateAndListSecgroups(self.context) scenario._create_security_groups = mock.MagicMock() scenario._create_rules_for_security_group = mock.MagicMock() scenario._list_security_groups = mock.MagicMock() scenario._list_security_groups.return_value = fake_secgroups scenario._list_security_groups.return_value.append( fakes.FakeSecurityGroup(None, None, 3, "uuid3")) scenario._list_security_groups.return_value.append( fakes.FakeSecurityGroup(None, None, 4, "uuid4")) security_group_count = 2 rules_per_security_group = 10 # Positive case: scenario._create_security_groups.return_value = fake_secgroups scenario.run( security_group_count, rules_per_security_group) scenario._create_security_groups.assert_called_once_with( security_group_count) scenario._create_rules_for_security_group.assert_called_once_with( fake_secgroups, rules_per_security_group) scenario._list_security_groups.assert_called_once_with() # Negative case1: groups aren't created scenario._create_security_groups.return_value = None self.assertRaises(exceptions.RallyAssertionError, scenario.run, security_group_count, rules_per_security_group) scenario._create_security_groups.assert_called_with( security_group_count) # Negative case2: new groups are not present in the list of groups fake_secgroups = [fakes.FakeSecurityGroup(None, None, 6, "uuid6")] scenario._create_security_groups.return_value = fake_secgroups scenario._create_rules_for_security_group = mock.MagicMock() self.assertRaises(exceptions.RallyAssertionError, scenario.run, security_group_count, rules_per_security_group) scenario._create_security_groups.assert_called_with( security_group_count) scenario._create_rules_for_security_group.assert_called_with( fake_secgroups, rules_per_security_group) scenario._list_security_groups.assert_called_with() def _generate_fake_server_with_sg(self, number_of_secgroups): sg_list = [] for i in range(number_of_secgroups): sg_list.append( fakes.FakeSecurityGroup(None, None, i, "uuid%s" % i)) return mock.MagicMock( list_security_group=mock.MagicMock(return_value=sg_list)), sg_list def _test_boot_and_delete_server_with_secgroups(self): fake_server, sg_list = self._generate_fake_server_with_sg(2) scenario = security_group.BootAndDeleteServerWithSecgroups( self.context) scenario._create_security_groups = mock.MagicMock( return_value=sg_list) scenario._create_rules_for_security_group = mock.MagicMock() scenario._boot_server = mock.MagicMock(return_value=fake_server) scenario.generate_random_name = mock.MagicMock( return_value="name") scenario._delete_server = mock.MagicMock() scenario._delete_security_groups = mock.MagicMock() image = "img" flavor = 1 security_group_count = 2 rules_per_security_group = 10 scenario.run( image, flavor, security_group_count, rules_per_security_group, fakearg="fakearg") scenario._create_security_groups.assert_called_once_with( security_group_count) scenario.generate_random_name.assert_called_once_with() scenario._create_rules_for_security_group.assert_called_once_with( sg_list, rules_per_security_group) scenario._boot_server.assert_called_once_with( "name", image, flavor, security_groups=[sg.name for sg in sg_list], fakearg="fakearg") fake_server.list_security_group.assert_called_once_with() scenario._delete_server.assert_called_once_with(fake_server) scenario._delete_security_groups.assert_called_once_with(sg_list) def _test_boot_and_delete_server_with_sg_not_attached(self): fake_secgroups = [fakes.FakeSecurityGroup(None, None, 1, "uuid1"), fakes.FakeSecurityGroup(None, None, 2, "uuid2")] fake_server, sg_list = self._generate_fake_server_with_sg(1) scenario = security_group.BootAndDeleteServerWithSecgroups() scenario._create_security_groups = mock.MagicMock( return_value=fake_secgroups) scenario._create_rules_for_security_group = mock.MagicMock() scenario._boot_server = mock.MagicMock(return_value=fake_server) scenario.generate_random_name = mock.MagicMock( return_value="name") scenario._delete_server = mock.MagicMock() scenario._delete_security_groups = mock.MagicMock() image = "img" flavor = 1 security_group_count = 2 rules_per_security_group = 10 self.assertRaises(security_group.NovaSecurityGroupException, scenario.run, image, flavor, security_group_count, rules_per_security_group) scenario._create_security_groups.assert_called_once_with( security_group_count) scenario.generate_random_name.assert_called_once_with() scenario._create_rules_for_security_group.assert_called_once_with( fake_secgroups, rules_per_security_group) scenario._boot_server.assert_called_once_with( "name", image, flavor, security_groups=[sg.name for sg in fake_secgroups]) fake_server.list_security_group.assert_called_once_with() scenario._delete_server.assert_called_once_with(fake_server) scenario._delete_security_groups.assert_called_once_with( fake_secgroups) def test_boot_server_and_add_secgroups(self): fake_secgroups = [fakes.FakeSecurityGroup(None, None, 1, "uuid1"), fakes.FakeSecurityGroup(None, None, 2, "uuid2")] nova_scenario = security_group.BootServerAndAddSecgroups(self.context) nova_scenario._create_security_groups = mock.MagicMock( return_value=fake_secgroups) nova_scenario._create_rules_for_security_group = mock.MagicMock() nova_scenario._boot_server = mock.MagicMock() nova_scenario.add_server_secgroup = mock.MagicMock() image = "img" flavor = 1 security_group_count = 2 rules_per_security_group = 10 nova_scenario.run( image, flavor, security_group_count, rules_per_security_group, fakearg="fakearg") nova_scenario._create_security_groups.assert_called_once_with( security_group_count) nova_scenario._create_rules_for_security_group.assert_called_once_with( fake_secgroups, rules_per_security_group) nova_scenario._boot_server.assert_called_once_with(image, flavor, fakearg="fakearg") rally-0.9.1/tests/unit/plugins/openstack/scenarios/nova/test_servers.py0000775000567000056710000011506313073417720027646 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally import exceptions as rally_exceptions from rally.plugins.openstack.scenarios.nova import servers from tests.unit import fakes from tests.unit import test NOVA_SERVERS_MODULE = "rally.plugins.openstack.scenarios.nova.servers" NOVA_SERVERS = NOVA_SERVERS_MODULE + ".NovaServers" @ddt.ddt class NovaServersTestCase(test.ScenarioTestCase): @ddt.data(("rescue_unrescue", ["_rescue_server", "_unrescue_server"], 1), ("stop_start", ["_stop_server", "_start_server"], 2), ("pause_unpause", ["_pause_server", "_unpause_server"], 3), ("suspend_resume", ["_suspend_server", "_resume_server"], 4), ("lock_unlock", ["_lock_server", "_unlock_server"], 5), ("shelve_unshelve", ["_shelve_server", "_unshelve_server"], 6)) @ddt.unpack def test_action_pair(self, action_pair, methods, nof_calls): actions = [{action_pair: nof_calls}] fake_server = mock.MagicMock() scenario = servers.BootAndBounceServer(self.context) scenario._boot_server = mock.MagicMock(return_value=fake_server) scenario._delete_server = mock.MagicMock() scenario.generate_random_name = mock.MagicMock(return_value="name") for method in methods: setattr(scenario, method, mock.MagicMock()) scenario.run("img", 1, actions=actions) scenario._boot_server.assert_called_once_with("img", 1) server_calls = [] for i in range(nof_calls): server_calls.append(mock.call(fake_server)) for method in methods: mocked_method = getattr(scenario, method) self.assertEqual(nof_calls, mocked_method.call_count, "%s not called %d times" % (method, nof_calls)) mocked_method.assert_has_calls(server_calls) scenario._delete_server.assert_called_once_with(fake_server, force=False) def test_multiple_bounce_actions(self): actions = [{"hard_reboot": 5}, {"stop_start": 8}, {"rescue_unrescue": 3}, {"pause_unpause": 2}, {"suspend_resume": 4}, {"lock_unlock": 6}, {"shelve_unshelve": 7}] fake_server = mock.MagicMock() scenario = servers.BootAndBounceServer(self.context) scenario._boot_server = mock.MagicMock(return_value=fake_server) scenario._delete_server = mock.MagicMock() scenario._reboot_server = mock.MagicMock() scenario._stop_and_start_server = mock.MagicMock() scenario._rescue_and_unrescue_server = mock.MagicMock() scenario._pause_and_unpause_server = mock.MagicMock() scenario._suspend_and_resume_server = mock.MagicMock() scenario._lock_and_unlock_server = mock.MagicMock() scenario._shelve_and_unshelve_server = mock.MagicMock() scenario.generate_random_name = mock.MagicMock(return_value="name") scenario.run("img", 1, actions=actions) scenario._boot_server.assert_called_once_with("img", 1) server_calls = [] for i in range(5): server_calls.append(mock.call(fake_server)) self.assertEqual(5, scenario._reboot_server.call_count, "Reboot not called 5 times") scenario._reboot_server.assert_has_calls(server_calls) server_calls = [] for i in range(8): server_calls.append(mock.call(fake_server)) self.assertEqual(8, scenario._stop_and_start_server.call_count, "Stop/Start not called 8 times") scenario._stop_and_start_server.assert_has_calls(server_calls) server_calls = [] for i in range(3): server_calls.append(mock.call(fake_server)) self.assertEqual(3, scenario._rescue_and_unrescue_server.call_count, "Rescue/Unrescue not called 3 times") scenario._rescue_and_unrescue_server.assert_has_calls(server_calls) server_calls = [] for i in range(2): server_calls.append(mock.call(fake_server)) self.assertEqual(2, scenario._pause_and_unpause_server.call_count, "Pause/Unpause not called 2 times") scenario._pause_and_unpause_server.assert_has_calls(server_calls) server_calls = [] for i in range(4): server_calls.append(mock.call(fake_server)) self.assertEqual(4, scenario._suspend_and_resume_server.call_count, "Suspend/Resume not called 4 times") scenario._suspend_and_resume_server.assert_has_calls(server_calls) server_calls = [] for i in range(6): server_calls.append(mock.call(fake_server)) self.assertEqual(6, scenario._lock_and_unlock_server.call_count, "Lock/Unlock not called 6 times") scenario._lock_and_unlock_server.assert_has_calls(server_calls) server_calls = [] for i in range(7): server_calls.append(mock.call(fake_server)) self.assertEqual(7, scenario._shelve_and_unshelve_server.call_count, "Shelve/Unshelve not called 7 times") scenario._shelve_and_unshelve_server.assert_has_calls(server_calls) scenario._delete_server.assert_called_once_with(fake_server, force=False) def test_boot_lock_unlock_and_delete(self): server = fakes.FakeServer() image = fakes.FakeImage() flavor = fakes.FakeFlavor() scenario = servers.BootLockUnlockAndDelete(self.context) scenario._boot_server = mock.Mock(return_value=server) scenario._lock_server = mock.Mock(side_effect=lambda s: s.lock()) scenario._unlock_server = mock.Mock(side_effect=lambda s: s.unlock()) scenario._delete_server = mock.Mock( side_effect=lambda s, **kwargs: self.assertFalse(getattr(s, "OS-EXT-STS:locked", False))) scenario.run(image, flavor, fakearg="fakearg") scenario._boot_server.assert_called_once_with(image, flavor, fakearg="fakearg") scenario._lock_server.assert_called_once_with(server) scenario._unlock_server.assert_called_once_with(server) scenario._delete_server.assert_called_once_with(server, force=False) @ddt.data("hard_reboot", "soft_reboot", "stop_start", "rescue_unrescue", "pause_unpause", "suspend_resume", "lock_unlock", "shelve_unshelve") def test_validate_actions(self, action): scenario = servers.BootAndBounceServer(self.context) self.assertRaises(rally_exceptions.InvalidConfigException, scenario.run, 1, 1, actions=[{action: "no"}]) self.assertRaises(rally_exceptions.InvalidConfigException, scenario.run, 1, 1, actions=[{action: -1}]) self.assertRaises(rally_exceptions.InvalidConfigException, scenario.run, 1, 1, actions=[{action: 0}]) def test_validate_actions_additional(self): scenario = servers.BootAndBounceServer(self.context) self.assertRaises(rally_exceptions.InvalidConfigException, scenario.run, 1, 1, actions=[{"not_existing_action": "no"}]) # NOTE: next should fail because actions parameter is a just a # dictionary, not an array of dictionaries self.assertRaises(rally_exceptions.InvalidConfigException, scenario.run, 1, 1, actions={"hard_reboot": 1}) def _verify_reboot(self, soft=True): actions = [{"soft_reboot" if soft else "hard_reboot": 5}] fake_server = mock.MagicMock() scenario = servers.BootAndBounceServer(self.context) scenario._reboot_server = mock.MagicMock() scenario._soft_reboot_server = mock.MagicMock() scenario._boot_server = mock.MagicMock(return_value=fake_server) scenario._delete_server = mock.MagicMock() scenario.generate_random_name = mock.MagicMock(return_value="name") scenario.run("img", 1, actions=actions) scenario._boot_server.assert_called_once_with("img", 1) server_calls = [] for i in range(5): server_calls.append(mock.call(fake_server)) if soft: self.assertEqual(5, scenario._soft_reboot_server.call_count, "Reboot not called 5 times") scenario._soft_reboot_server.assert_has_calls(server_calls) else: self.assertEqual(5, scenario._reboot_server.call_count, "Reboot not called 5 times") scenario._reboot_server.assert_has_calls(server_calls) scenario._delete_server.assert_called_once_with(fake_server, force=False) def test_boot_soft_reboot(self): self._verify_reboot(soft=True) def test_boot_hard_reboot(self): self._verify_reboot(soft=False) def test_boot_and_delete_server(self): fake_server = object() scenario = servers.BootAndDeleteServer(self.context) scenario.generate_random_name = mock.MagicMock(return_value="name") scenario._boot_server = mock.MagicMock(return_value=fake_server) scenario._delete_server = mock.MagicMock() scenario.sleep_between = mock.MagicMock() scenario.run("img", 0, 10, 20, fakearg="fakearg") scenario._boot_server.assert_called_once_with("img", 0, fakearg="fakearg") scenario.sleep_between.assert_called_once_with(10, 20) scenario._delete_server.assert_called_once_with(fake_server, force=False) def test_boot_and_delete_multiple_servers(self): scenario = servers.BootAndDeleteMultipleServers(self.context) scenario._boot_servers = mock.Mock() scenario._delete_servers = mock.Mock() scenario.sleep_between = mock.Mock() scenario.run("img", "flavor", count=15, min_sleep=10, max_sleep=20, fakearg="fakearg") scenario._boot_servers.assert_called_once_with("img", "flavor", 1, instances_amount=15, fakearg="fakearg") scenario.sleep_between.assert_called_once_with(10, 20) scenario._delete_servers.assert_called_once_with( scenario._boot_servers.return_value, force=False) def test_boot_and_list_server(self): scenario = servers.BootAndListServer(self.context) # scenario.generate_random_name = mock.MagicMock(return_value="name") img_name = "img" flavor_uuid = 0 details = True fake_server_name = mock.MagicMock() scenario._boot_server = mock.MagicMock() scenario._list_servers = mock.MagicMock() scenario._list_servers.return_value = [mock.MagicMock(), fake_server_name, mock.MagicMock()] # Positive case scenario._boot_server.return_value = fake_server_name scenario.run(img_name, flavor_uuid, fakearg="fakearg") scenario._boot_server.assert_called_once_with(img_name, flavor_uuid, fakearg="fakearg") scenario._list_servers.assert_called_once_with(details) # Negative case1: server isn't created scenario._boot_server.return_value = None self.assertRaises(rally_exceptions.RallyAssertionError, scenario.run, img_name, flavor_uuid, fakearg="fakearg") scenario._boot_server.assert_called_with(img_name, flavor_uuid, fakearg="fakearg") # Negative case2: server not in the list of available servers scenario._boot_server.return_value = mock.MagicMock() self.assertRaises(rally_exceptions.RallyAssertionError, scenario.run, img_name, flavor_uuid, fakearg="fakearg") scenario._boot_server.assert_called_with(img_name, flavor_uuid, fakearg="fakearg") scenario._list_servers.assert_called_with(details) def test_suspend_and_resume_server(self): fake_server = object() scenario = servers.SuspendAndResumeServer(self.context) scenario.generate_random_name = mock.MagicMock(return_value="name") scenario._boot_server = mock.MagicMock(return_value=fake_server) scenario._suspend_server = mock.MagicMock() scenario._resume_server = mock.MagicMock() scenario._delete_server = mock.MagicMock() scenario.run("img", 0, fakearg="fakearg") scenario._boot_server.assert_called_once_with("img", 0, fakearg="fakearg") scenario._suspend_server.assert_called_once_with(fake_server) scenario._resume_server.assert_called_once_with(fake_server) scenario._delete_server.assert_called_once_with(fake_server, force=False) def test_pause_and_unpause_server(self): fake_server = object() scenario = servers.PauseAndUnpauseServer(self.context) scenario.generate_random_name = mock.MagicMock(return_value="name") scenario._boot_server = mock.MagicMock(return_value=fake_server) scenario._pause_server = mock.MagicMock() scenario._unpause_server = mock.MagicMock() scenario._delete_server = mock.MagicMock() scenario.run("img", 0, fakearg="fakearg") scenario._boot_server.assert_called_once_with("img", 0, fakearg="fakearg") scenario._pause_server.assert_called_once_with(fake_server) scenario._unpause_server.assert_called_once_with(fake_server) scenario._delete_server.assert_called_once_with(fake_server, force=False) def test_shelve_and_unshelve_server(self): fake_server = mock.MagicMock() scenario = servers.ShelveAndUnshelveServer(self.context) scenario._boot_server = mock.MagicMock(return_value=fake_server) scenario._shelve_server = mock.MagicMock() scenario._unshelve_server = mock.MagicMock() scenario._delete_server = mock.MagicMock() scenario.run("img", 0, fakearg="fakearg") scenario._boot_server.assert_called_once_with("img", 0, fakearg="fakearg") scenario._shelve_server.assert_called_once_with(fake_server) scenario._unshelve_server.assert_called_once_with(fake_server) scenario._delete_server.assert_called_once_with(fake_server, force=False) def test_list_servers(self): scenario = servers.ListServers(self.context) scenario._list_servers = mock.MagicMock() scenario.run(True) scenario._list_servers.assert_called_once_with(True) def test_boot_server_from_volume(self): fake_server = object() scenario = servers.BootServerFromVolume(self.context) scenario._boot_server = mock.MagicMock(return_value=fake_server) fake_volume = fakes.FakeVolumeManager().create() fake_volume.id = "volume_id" scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario.run("img", 0, 5, volume_type=None, auto_assign_nic=False, fakearg="f") scenario._create_volume.assert_called_once_with(5, imageRef="img", volume_type=None) scenario._boot_server.assert_called_once_with( None, 0, auto_assign_nic=False, block_device_mapping={"vda": "volume_id:::1"}, fakearg="f") def test_boot_server_from_volume_and_delete(self): fake_server = object() scenario = servers.BootServerFromVolumeAndDelete(self.context) scenario._boot_server = mock.MagicMock(return_value=fake_server) scenario.sleep_between = mock.MagicMock() scenario._delete_server = mock.MagicMock() fake_volume = fakes.FakeVolumeManager().create() fake_volume.id = "volume_id" scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario.run("img", 0, 5, None, 10, 20, fakearg="f") scenario._create_volume.assert_called_once_with(5, imageRef="img", volume_type=None) scenario._boot_server.assert_called_once_with( None, 0, block_device_mapping={"vda": "volume_id:::1"}, fakearg="f") scenario.sleep_between.assert_called_once_with(10, 20) scenario._delete_server.assert_called_once_with(fake_server, force=False) def _prepare_boot(self, nic=None, assert_nic=False): fake_server = mock.MagicMock() scenario = servers.BootServer(self.context) scenario._boot_server = mock.MagicMock(return_value=fake_server) scenario.generate_random_name = mock.MagicMock(return_value="name") kwargs = {"fakearg": "f"} expected_kwargs = {"fakearg": "f"} assert_nic = nic or assert_nic if nic: kwargs["nics"] = nic if assert_nic: self.clients("nova").networks.create("net-1") expected_kwargs["nics"] = nic or [{"net-id": "net-2"}] return scenario, kwargs, expected_kwargs def _verify_boot_server(self, nic=None, assert_nic=False): scenario, kwargs, expected_kwargs = self._prepare_boot( nic=nic, assert_nic=assert_nic) scenario.run("img", 0, **kwargs) scenario._boot_server.assert_called_once_with( "img", 0, auto_assign_nic=False, **expected_kwargs) def test_boot_server_no_nics(self): self._verify_boot_server(nic=None, assert_nic=False) def test_boot_server_with_nic(self): self._verify_boot_server(nic=[{"net-id": "net-1"}], assert_nic=True) def test_snapshot_server(self): fake_server = object() fake_image = fakes.FakeImageManager()._create() fake_image.id = "image_id" scenario = servers.SnapshotServer(self.context) scenario.generate_random_name = mock.MagicMock(return_value="name") scenario._boot_server = mock.MagicMock(return_value=fake_server) scenario._create_image = mock.MagicMock(return_value=fake_image) scenario._delete_server = mock.MagicMock() scenario._delete_image = mock.MagicMock() scenario.run("i", 0, fakearg=2) scenario._boot_server.assert_has_calls([ mock.call("i", 0, fakearg=2), mock.call("image_id", 0, fakearg=2)]) scenario._create_image.assert_called_once_with(fake_server) scenario._delete_server.assert_has_calls([ mock.call(fake_server, force=False), mock.call(fake_server, force=False)]) scenario._delete_image.assert_called_once_with(fake_image) def _test_resize(self, confirm=False): fake_server = object() fake_image = fakes.FakeImageManager()._create() fake_image.id = "image_id" flavor = mock.MagicMock() to_flavor = mock.MagicMock() scenario = servers.ResizeServer(self.context) scenario.generate_random_name = mock.MagicMock(return_value="name") scenario._boot_server = mock.MagicMock(return_value=fake_server) scenario._resize_confirm = mock.MagicMock() scenario._resize_revert = mock.MagicMock() scenario._resize = mock.MagicMock() scenario._delete_server = mock.MagicMock() kwargs = {"confirm": confirm} scenario.run(fake_image, flavor, to_flavor, **kwargs) scenario._resize.assert_called_once_with(fake_server, to_flavor) if confirm: scenario._resize_confirm.assert_called_once_with(fake_server) else: scenario._resize_revert.assert_called_once_with(fake_server) def test_resize_with_confirm(self): self._test_resize(confirm=True) def test_resize_with_revert(self): self._test_resize(confirm=False) @ddt.data({"confirm": True}, {"confirm": False}) @ddt.unpack def test_resize_shoutoff_server(self, confirm=False): fake_server = object() flavor = mock.MagicMock() to_flavor = mock.MagicMock() scenario = servers.ResizeShutoffServer(self.context) scenario.generate_random_name = mock.MagicMock(return_value="name") scenario._boot_server = mock.MagicMock(return_value=fake_server) scenario._stop_server = mock.MagicMock() scenario._resize_confirm = mock.MagicMock() scenario._resize_revert = mock.MagicMock() scenario._resize = mock.MagicMock() scenario._delete_server = mock.MagicMock() scenario.run("img", flavor, to_flavor, confirm=confirm) scenario._boot_server.assert_called_once_with("img", flavor) scenario._stop_server.assert_called_once_with(fake_server) scenario._resize.assert_called_once_with(fake_server, to_flavor) if confirm: scenario._resize_confirm.assert_called_once_with(fake_server, "SHUTOFF") else: scenario._resize_revert.assert_called_once_with(fake_server, "SHUTOFF") scenario._delete_server.assert_called_once_with(fake_server, force=False) @ddt.data({"confirm": True, "do_delete": True}, {"confirm": False, "do_delete": True}) @ddt.unpack def test_boot_server_attach_created_volume_and_resize(self, confirm=False, do_delete=False): fake_volume = mock.MagicMock() fake_server = mock.MagicMock() flavor = mock.MagicMock() to_flavor = mock.MagicMock() fake_attachment = mock.MagicMock() scenario = servers.BootServerAttachCreatedVolumeAndResize(self.context) scenario.generate_random_name = mock.MagicMock(return_value="name") scenario._boot_server = mock.MagicMock(return_value=fake_server) scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario._attach_volume = mock.MagicMock(return_value=fake_attachment) scenario._resize_confirm = mock.MagicMock() scenario._resize_revert = mock.MagicMock() scenario._resize = mock.MagicMock() scenario._detach_volume = mock.MagicMock() scenario._delete_volume = mock.MagicMock() scenario._delete_server = mock.MagicMock() scenario.sleep_between = mock.MagicMock() volume_size = 10 scenario.run("img", flavor, to_flavor, volume_size, min_sleep=10, max_sleep=20, confirm=confirm, do_delete=do_delete) scenario._boot_server.assert_called_once_with("img", flavor) scenario._create_volume.assert_called_once_with(volume_size) scenario._attach_volume.assert_called_once_with(fake_server, fake_volume) scenario._detach_volume.assert_called_once_with(fake_server, fake_volume, fake_attachment) scenario.sleep_between.assert_called_once_with(10, 20) scenario._resize.assert_called_once_with(fake_server, to_flavor) if confirm: scenario._resize_confirm.assert_called_once_with(fake_server) else: scenario._resize_revert.assert_called_once_with(fake_server) if do_delete: scenario._detach_volume.assert_called_once_with(fake_server, fake_volume, fake_attachment) scenario._delete_volume.assert_called_once_with(fake_volume) scenario._delete_server.assert_called_once_with(fake_server, force=False) @ddt.data({"confirm": True, "do_delete": True}, {"confirm": False, "do_delete": True}) @ddt.unpack def test_boot_server_from_volume_and_resize(self, confirm=False, do_delete=False): fake_server = object() flavor = mock.MagicMock() to_flavor = mock.MagicMock() scenario = servers.BootServerFromVolumeAndResize(self.context) scenario._boot_server = mock.MagicMock(return_value=fake_server) scenario.generate_random_name = mock.MagicMock(return_value="name") scenario._resize_confirm = mock.MagicMock() scenario._resize_revert = mock.MagicMock() scenario._resize = mock.MagicMock() scenario.sleep_between = mock.MagicMock() scenario._delete_server = mock.MagicMock() fake_volume = fakes.FakeVolumeManager().create() fake_volume.id = "volume_id" scenario._create_volume = mock.MagicMock(return_value=fake_volume) volume_size = 10 scenario.run("img", flavor, to_flavor, volume_size, min_sleep=10, max_sleep=20, confirm=confirm, do_delete=do_delete) scenario._create_volume.assert_called_once_with(10, imageRef="img") scenario._boot_server.assert_called_once_with( None, flavor, block_device_mapping={"vda": "volume_id:::1"}) scenario.sleep_between.assert_called_once_with(10, 20) scenario._resize.assert_called_once_with(fake_server, to_flavor) if confirm: scenario._resize_confirm.assert_called_once_with(fake_server) else: scenario._resize_revert.assert_called_once_with(fake_server) if do_delete: scenario._delete_server.assert_called_once_with(fake_server, force=False) def test_boot_and_live_migrate_server(self): fake_server = mock.MagicMock() scenario = servers.BootAndLiveMigrateServer(self.context) scenario.generate_random_name = mock.MagicMock(return_value="name") scenario._boot_server = mock.MagicMock(return_value=fake_server) scenario.sleep_between = mock.MagicMock() scenario._find_host_to_migrate = mock.MagicMock( return_value="host_name") scenario._live_migrate = mock.MagicMock() scenario._delete_server = mock.MagicMock() scenario.run("img", 0, min_sleep=10, max_sleep=20, fakearg="fakearg") scenario._boot_server.assert_called_once_with("img", 0, fakearg="fakearg") scenario.sleep_between.assert_called_once_with(10, 20) scenario._find_host_to_migrate.assert_called_once_with(fake_server) scenario._live_migrate.assert_called_once_with(fake_server, "host_name", False, False) scenario._delete_server.assert_called_once_with(fake_server) def test_boot_server_from_volume_and_live_migrate(self): fake_server = mock.MagicMock() scenario = servers.BootServerFromVolumeAndLiveMigrate(self.context) scenario.generate_random_name = mock.MagicMock(return_value="name") scenario._boot_server = mock.MagicMock(return_value=fake_server) scenario.sleep_between = mock.MagicMock() scenario._find_host_to_migrate = mock.MagicMock( return_value="host_name") scenario._live_migrate = mock.MagicMock() scenario._delete_server = mock.MagicMock() fake_volume = fakes.FakeVolumeManager().create() fake_volume.id = "volume_id" scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario.run("img", 0, 5, volume_type=None, min_sleep=10, max_sleep=20, fakearg="f") scenario._create_volume.assert_called_once_with(5, imageRef="img", volume_type=None) scenario._boot_server.assert_called_once_with( None, 0, block_device_mapping={"vda": "volume_id:::1"}, fakearg="f") scenario.sleep_between.assert_called_once_with(10, 20) scenario._find_host_to_migrate.assert_called_once_with(fake_server) scenario._live_migrate.assert_called_once_with(fake_server, "host_name", False, False) scenario._delete_server.assert_called_once_with(fake_server, force=False) def test_boot_server_attach_created_volume_and_live_migrate(self): fake_volume = mock.MagicMock() fake_server = mock.MagicMock() fake_attachment = mock.MagicMock() scenario = servers.BootServerAttachCreatedVolumeAndLiveMigrate( self.context) scenario._attach_volume = mock.MagicMock(return_value=fake_attachment) scenario._detach_volume = mock.MagicMock() scenario.sleep_between = mock.MagicMock() scenario._find_host_to_migrate = mock.MagicMock( return_value="host_name") scenario._live_migrate = mock.MagicMock() scenario._boot_server = mock.MagicMock(return_value=fake_server) scenario._delete_server = mock.MagicMock() scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario._delete_volume = mock.MagicMock() image = "img" flavor = "flavor" size = 5 boot_kwargs = {"some_var": "asd"} scenario.run(image, flavor, size, min_sleep=10, max_sleep=20, boot_server_kwargs=boot_kwargs) scenario._boot_server.assert_called_once_with(image, flavor, **boot_kwargs) scenario._create_volume.assert_called_once_with(size) scenario._attach_volume.assert_called_once_with(fake_server, fake_volume) scenario._detach_volume.assert_called_once_with(fake_server, fake_volume, fake_attachment) scenario.sleep_between.assert_called_once_with(10, 20) scenario._live_migrate.assert_called_once_with(fake_server, "host_name", False, False) scenario._delete_volume.assert_called_once_with(fake_volume) scenario._delete_server.assert_called_once_with(fake_server) def _test_boot_and_migrate_server(self, confirm=False): fake_server = mock.MagicMock() scenario = servers.BootAndMigrateServer(self.context) scenario.generate_random_name = mock.MagicMock(return_value="name") scenario._boot_server = mock.MagicMock(return_value=fake_server) scenario._migrate = mock.MagicMock() scenario._resize_confirm = mock.MagicMock() scenario._resize_revert = mock.MagicMock() scenario._delete_server = mock.MagicMock() kwargs = {"confirm": confirm} scenario.run("img", 0, fakearg="fakearg", **kwargs) scenario._boot_server.assert_called_once_with("img", 0, fakearg="fakearg", confirm=confirm) scenario._migrate.assert_called_once_with(fake_server) if confirm: scenario._resize_confirm.assert_called_once_with(fake_server, status="ACTIVE") else: scenario._resize_revert.assert_called_once_with(fake_server, status="ACTIVE") scenario._delete_server.assert_called_once_with(fake_server) def test_boot_and_migrate_server_with_confirm(self): self._test_boot_and_migrate_server(confirm=True) def test_boot_and_migrate_server_with_revert(self): self._test_boot_and_migrate_server(confirm=False) def test_boot_and_rebuild_server(self): scenario = servers.BootAndRebuildServer(self.context) scenario._boot_server = mock.Mock() scenario._rebuild_server = mock.Mock() scenario._delete_server = mock.Mock() from_image = "img1" to_image = "img2" flavor = "flavor" scenario.run(from_image, to_image, flavor, fakearg="fakearg") scenario._boot_server.assert_called_once_with(from_image, flavor, fakearg="fakearg") server = scenario._boot_server.return_value scenario._rebuild_server.assert_called_once_with(server, to_image) scenario._delete_server.assert_called_once_with(server) def test_boot_and_show_server(self): server = fakes.FakeServer() image = fakes.FakeImage() flavor = fakes.FakeFlavor() scenario = servers.BootAndShowServer(self.context) scenario._boot_server = mock.MagicMock(return_value=server) scenario._show_server = mock.MagicMock() scenario.run(image, flavor, fakearg="fakearg") scenario._boot_server.assert_called_once_with(image, flavor, fakearg="fakearg") scenario._show_server.assert_called_once_with(server) @ddt.data({"length": None}, {"length": 10}) @ddt.unpack def test_boot_and_get_console_server(self, length): server = fakes.FakeServer() image = fakes.FakeImage() flavor = fakes.FakeFlavor() kwargs = {"fakearg": "fakearg"} scenario = servers.BootAndGetConsoleOutput(self.context) scenario._boot_server = mock.MagicMock(return_value=server) scenario._get_server_console_output = mock.MagicMock() scenario.run(image, flavor, length, **kwargs) scenario._boot_server.assert_called_once_with(image, flavor, **kwargs) scenario._get_server_console_output.assert_called_once_with(server, length) @mock.patch(NOVA_SERVERS_MODULE + ".network_wrapper.wrap") def test_boot_and_associate_floating_ip(self, mock_wrap): scenario = servers.BootAndAssociateFloatingIp(self.context) server = mock.Mock() scenario._boot_server = mock.Mock(return_value=server) scenario._associate_floating_ip = mock.Mock() image = "img" flavor = "flavor" scenario.run(image, flavor, fakearg="fakearg") scenario._boot_server.assert_called_once_with(image, flavor, fakearg="fakearg") net_wrap = mock_wrap.return_value net_wrap.create_floating_ip.assert_called_once_with( tenant_id=server.tenant_id) scenario._associate_floating_ip.assert_called_once_with( server, net_wrap.create_floating_ip.return_value["ip"]) @mock.patch(NOVA_SERVERS_MODULE + ".network_wrapper.wrap") def test_boot_server_associate_and_dissociate_floating_ip(self, mock_wrap): scenario = servers.BootServerAssociateAndDissociateFloatingIP( self.context) server = mock.Mock() scenario._boot_server = mock.Mock(return_value=server) scenario._associate_floating_ip = mock.Mock() scenario._dissociate_floating_ip = mock.Mock() image = "img" flavor = "flavor" scenario.run(image, flavor, fakearg="fakearg") scenario._boot_server.assert_called_once_with(image, flavor, fakearg="fakearg") net_wrap = mock_wrap.return_value net_wrap.create_floating_ip.assert_called_once_with( tenant_id=server.tenant_id) scenario._associate_floating_ip.assert_called_once_with( server, net_wrap.create_floating_ip.return_value["ip"]) scenario._dissociate_floating_ip.assert_called_once_with( server, net_wrap.create_floating_ip.return_value["ip"]) def test_boot_and_update_server(self): scenario = servers.BootAndUpdateServer(self.context) scenario._boot_server = mock.Mock() scenario._update_server = mock.Mock() scenario.run("img", "flavor", "desp", fakearg="fakearg") scenario._boot_server.assert_called_once_with("img", "flavor", fakearg="fakearg") scenario._update_server.assert_called_once_with( scenario._boot_server.return_value, "desp") def test_boot_server_from_volume_snapshot(self): fake_volume = mock.MagicMock(id="volume_id") fake_snapshot = mock.MagicMock(id="snapshot_id") scenario = servers.BootServerFromVolumeSnapshot(self.context) scenario._boot_server = mock.MagicMock() scenario._create_volume = mock.MagicMock(return_value=fake_volume) scenario._create_snapshot = mock.MagicMock(return_value=fake_snapshot) scenario.run("img", "flavor", 1, volume_type=None, auto_assign_nic=False, fakearg="f") scenario._create_volume.assert_called_once_with(1, imageRef="img", volume_type=None) scenario._create_snapshot.assert_called_once_with("volume_id", False) scenario._boot_server.assert_called_once_with( None, "flavor", auto_assign_nic=False, block_device_mapping={"vda": "snapshot_id:snap::1"}, fakearg="f") rally-0.9.1/tests/unit/plugins/openstack/scenarios/nova/test_networks.py0000664000567000056710000000567313073417717030041 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally import exceptions from rally.plugins.openstack.scenarios.nova import networks from tests.unit import test class NovaNetworksTestCase(test.TestCase): def test_create_and_list_networks(self): scenario = networks.CreateAndListNetworks() scenario._create_network = mock.MagicMock() scenario._list_networks = mock.MagicMock() scenario._list_networks.return_value = ["fake_network_uuid", "new_network_uuid", "first_intergalactic_network"] start_cidr = "10.2.0.0/24" # Positive case: scenario._create_network.return_value = "new_network_uuid" scenario.run(start_cidr=start_cidr, fakearg="fakearg") scenario._create_network.assert_called_once_with( start_cidr, fakearg="fakearg") scenario._list_networks.assert_called_once_with() # Negative case 1: network isn't created scenario._create_network.return_value = None self.assertRaises(exceptions.RallyAssertionError, scenario.run, start_cidr=start_cidr, fakearg="fakearg") scenario._create_network.assert_called_with( start_cidr, fakearg="fakearg") # Negative case 2: new network isn't in the list of available networks scenario._create_network.return_value = "universal_network" self.assertRaises(exceptions.RallyAssertionError, scenario.run, start_cidr=start_cidr, fakearg="fakearg") scenario._create_network.assert_called_with( start_cidr, fakearg="fakearg") scenario._list_networks.assert_called_with() def test_create_and_delete_network(self): scenario = networks.CreateAndDeleteNetwork() fake_network = mock.MagicMock() fake_network.cidr = "10.2.0.0/24" start_cidr = "10.2.0.0/24" scenario._create_network = mock.MagicMock(return_value=fake_network) scenario._delete_network = mock.MagicMock() scenario.run(start_cidr=start_cidr, fakearg="fakearg") scenario._create_network.assert_called_once_with( start_cidr, fakearg="fakearg") scenario._delete_network.assert_called_once_with( fake_network) rally-0.9.1/tests/unit/plugins/openstack/scenarios/nova/test_utils.py0000775000567000056710000016604713073417720027325 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from oslo_config import cfg from rally import exceptions as rally_exceptions from rally.plugins.openstack.scenarios.nova import utils from tests.unit import fakes from tests.unit import test BM_UTILS = "rally.task.utils" NOVA_UTILS = "rally.plugins.openstack.scenarios.nova.utils" CONF = cfg.CONF @ddt.ddt class NovaScenarioTestCase(test.ScenarioTestCase): def setUp(self): super(NovaScenarioTestCase, self).setUp() self.server = mock.Mock() self.server1 = mock.Mock() self.volume = mock.Mock() self.floating_ip = mock.Mock() self.image = mock.Mock() self.context["iteration"] = 3 self.context["config"] = {"users": {"tenants": 2}} def _context_with_networks(self, networks): retval = {"tenant": {"networks": networks}} retval.update(self.context) return retval def _context_with_secgroup(self, secgroup): retval = {"user": {"secgroup": secgroup, "credential": mock.MagicMock()}} retval.update(self.context) return retval def test__list_servers(self): servers_list = [] self.clients("nova").servers.list.return_value = servers_list nova_scenario = utils.NovaScenario(self.context) return_servers_list = nova_scenario._list_servers(True) self.assertEqual(servers_list, return_servers_list) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.list_servers") def test__pick_random_nic(self): context = {"tenant": {"networks": [{"id": "net_id_1"}, {"id": "net_id_2"}]}, "iteration": 0} nova_scenario = utils.NovaScenario(context=context) nic1 = nova_scenario._pick_random_nic() self.assertEqual(nic1, [{"net-id": "net_id_1"}]) context["iteration"] = 1 nova_scenario = utils.NovaScenario(context=context) nic2 = nova_scenario._pick_random_nic() # balance to net 2 self.assertEqual(nic2, [{"net-id": "net_id_2"}]) context["iteration"] = 2 nova_scenario = utils.NovaScenario(context=context) nic3 = nova_scenario._pick_random_nic() # balance again, get net 1 self.assertEqual(nic3, [{"net-id": "net_id_1"}]) @ddt.data( {}, {"kwargs": {"auto_assign_nic": True}}, {"kwargs": {"auto_assign_nic": True, "nics": [{"net-id": "baz_id"}]}}, {"context": {"user": {"secgroup": {"name": "test"}}}}, {"context": {"user": {"secgroup": {"name": "new8"}}}, "kwargs": {"security_groups": ["test8"]}}, {"context": {"user": {"secgroup": {"name": "test1"}}}, "kwargs": {"security_groups": ["test1"]}}, ) @ddt.unpack def test__boot_server(self, context=None, kwargs=None): self.clients("nova").servers.create.return_value = self.server if context is None: context = self.context context.setdefault("user", {}).setdefault("credential", mock.MagicMock()) context.setdefault("config", {}) nova_scenario = utils.NovaScenario(context=context) nova_scenario.generate_random_name = mock.Mock() nova_scenario._pick_random_nic = mock.Mock() if kwargs is None: kwargs = {} kwargs["fakearg"] = "fakearg" return_server = nova_scenario._boot_server("image_id", "flavor_id", **kwargs) self.mock_wait_for_status.mock.assert_called_once_with( self.server, ready_statuses=["ACTIVE"], update_resource=self.mock_get_from_manager.mock.return_value, check_interval=CONF.benchmark.nova_server_boot_poll_interval, timeout=CONF.benchmark.nova_server_boot_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self.assertEqual(self.mock_wait_for_status.mock.return_value, return_server) expected_kwargs = {"fakearg": "fakearg"} if "nics" in kwargs: expected_kwargs["nics"] = kwargs["nics"] elif "auto_assign_nic" in kwargs: expected_kwargs["nics"] = (nova_scenario._pick_random_nic. return_value) expected_secgroups = set() if "security_groups" in kwargs: expected_secgroups.update(kwargs["security_groups"]) if "secgroup" in context["user"]: expected_secgroups.add(context["user"]["secgroup"]["name"]) if expected_secgroups: expected_kwargs["security_groups"] = list(expected_secgroups) self.clients("nova").servers.create.assert_called_once_with( nova_scenario.generate_random_name.return_value, "image_id", "flavor_id", **expected_kwargs) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.boot_server") def test__boot_server_with_network_exception(self): self.clients("nova").servers.create.return_value = self.server nova_scenario = utils.NovaScenario( context=self._context_with_networks(None)) self.assertRaises(TypeError, nova_scenario._boot_server, "image_id", "flavor_id", auto_assign_nic=True) def test__suspend_server(self): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._suspend_server(self.server) self.server.suspend.assert_called_once_with() self.mock_wait_for_status.mock.assert_called_once_with( self.server, ready_statuses=["SUSPENDED"], update_resource=self.mock_get_from_manager.mock.return_value, check_interval=CONF.benchmark.nova_server_suspend_poll_interval, timeout=CONF.benchmark.nova_server_suspend_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.suspend_server") def test__resume_server(self): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._resume_server(self.server) self.server.resume.assert_called_once_with() self.mock_wait_for_status.mock.assert_called_once_with( self.server, ready_statuses=["ACTIVE"], update_resource=self.mock_get_from_manager.mock.return_value, check_interval=CONF.benchmark.nova_server_resume_poll_interval, timeout=CONF.benchmark.nova_server_resume_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.resume_server") def test__pause_server(self): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._pause_server(self.server) self.server.pause.assert_called_once_with() self.mock_wait_for_status.mock.assert_called_once_with( self.server, ready_statuses=["PAUSED"], update_resource=self.mock_get_from_manager.mock.return_value, check_interval=CONF.benchmark.nova_server_pause_poll_interval, timeout=CONF.benchmark.nova_server_pause_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.pause_server") def test__unpause_server(self): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._unpause_server(self.server) self.server.unpause.assert_called_once_with() self.mock_wait_for_status.mock.assert_called_once_with( self.server, ready_statuses=["ACTIVE"], update_resource=self.mock_get_from_manager.mock.return_value, check_interval=CONF.benchmark.nova_server_unpause_poll_interval, timeout=CONF.benchmark.nova_server_unpause_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.unpause_server") def test__shelve_server(self): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._shelve_server(self.server) self.server.shelve.assert_called_once_with() self.mock_wait_for_status.mock.assert_called_once_with( self.server, ready_statuses=["SHELVED_OFFLOADED"], update_resource=self.mock_get_from_manager.mock.return_value, check_interval=CONF.benchmark.nova_server_shelve_poll_interval, timeout=CONF.benchmark.nova_server_shelve_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.shelve_server") def test__unshelve_server(self): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._unshelve_server(self.server) self.server.unshelve.assert_called_once_with() self.mock_wait_for_status.mock.assert_called_once_with( self.server, ready_statuses=["ACTIVE"], update_resource=self.mock_get_from_manager.mock.return_value, check_interval=CONF.benchmark.nova_server_unshelve_poll_interval, timeout=CONF.benchmark.nova_server_unshelve_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.unshelve_server") def test__create_image(self): self.clients("nova").images.get.return_value = self.image nova_scenario = utils.NovaScenario(context=self.context) return_image = nova_scenario._create_image(self.server) self.mock_wait_for_status.mock.assert_called_once_with( self.image, ready_statuses=["ACTIVE"], update_resource=self.mock_get_from_manager.mock.return_value, check_interval=CONF.benchmark. nova_server_image_create_poll_interval, timeout=CONF.benchmark.nova_server_image_create_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self.assertEqual(self.mock_wait_for_status.mock.return_value, return_image) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.create_image") def test__default_delete_server(self): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._delete_server(self.server) self.server.delete.assert_called_once_with() self.mock_wait_for_status.mock.assert_called_once_with( self.server, ready_statuses=["deleted"], check_deletion=True, update_resource=self.mock_get_from_manager.mock.return_value, check_interval=CONF.benchmark.nova_server_delete_poll_interval, timeout=CONF.benchmark.nova_server_delete_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.delete_server") def test__force_delete_server(self): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._delete_server(self.server, force=True) self.server.force_delete.assert_called_once_with() self.mock_wait_for_status.mock.assert_called_once_with( self.server, ready_statuses=["deleted"], check_deletion=True, update_resource=self.mock_get_from_manager.mock.return_value, check_interval=CONF.benchmark.nova_server_delete_poll_interval, timeout=CONF.benchmark.nova_server_delete_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.force_delete_server") def test__reboot_server(self): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._reboot_server(self.server) self.server.reboot.assert_called_once_with(reboot_type="HARD") self.mock_wait_for_status.mock.assert_called_once_with( self.server, ready_statuses=["ACTIVE"], update_resource=self.mock_get_from_manager.mock.return_value, check_interval=CONF.benchmark.nova_server_reboot_poll_interval, timeout=CONF.benchmark.nova_server_reboot_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.reboot_server") def test__soft_reboot_server(self): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._soft_reboot_server(self.server) self.server.reboot.assert_called_once_with(reboot_type="SOFT") self.mock_wait_for_status.mock.assert_called_once_with( self.server, ready_statuses=["ACTIVE"], update_resource=self.mock_get_from_manager.mock.return_value, check_interval=CONF.benchmark.nova_server_reboot_poll_interval, timeout=CONF.benchmark.nova_server_reboot_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.soft_reboot_server") def test__rebuild_server(self): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._rebuild_server(self.server, "img", fakearg="fakearg") self.server.rebuild.assert_called_once_with("img", fakearg="fakearg") self.mock_wait_for_status.mock.assert_called_once_with( self.server, ready_statuses=["ACTIVE"], update_resource=self.mock_get_from_manager.mock.return_value, check_interval=CONF.benchmark.nova_server_rebuild_poll_interval, timeout=CONF.benchmark.nova_server_rebuild_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.rebuild_server") def test__start_server(self): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._start_server(self.server) self.server.start.assert_called_once_with() self.mock_wait_for_status.mock.assert_called_once_with( self.server, ready_statuses=["ACTIVE"], update_resource=self.mock_get_from_manager.mock.return_value, check_interval=CONF.benchmark.nova_server_start_poll_interval, timeout=CONF.benchmark.nova_server_start_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.start_server") def test__stop_server(self): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._stop_server(self.server) self.server.stop.assert_called_once_with() self.mock_wait_for_status.mock.assert_called_once_with( self.server, ready_statuses=["SHUTOFF"], update_resource=self.mock_get_from_manager.mock.return_value, check_interval=CONF.benchmark.nova_server_stop_poll_interval, timeout=CONF.benchmark.nova_server_stop_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.stop_server") def test__rescue_server(self): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._rescue_server(self.server) self.server.rescue.assert_called_once_with() self.mock_wait_for_status.mock.assert_called_once_with( self.server, ready_statuses=["RESCUE"], update_resource=self.mock_get_from_manager.mock.return_value, check_interval=CONF.benchmark.nova_server_rescue_poll_interval, timeout=CONF.benchmark.nova_server_rescue_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.rescue_server") def test__unrescue_server(self): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._unrescue_server(self.server) self.server.unrescue.assert_called_once_with() self.mock_wait_for_status.mock.assert_called_once_with( self.server, ready_statuses=["ACTIVE"], update_resource=self.mock_get_from_manager.mock.return_value, check_interval=CONF.benchmark.nova_server_unrescue_poll_interval, timeout=CONF.benchmark.nova_server_unrescue_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.unrescue_server") def _test_delete_servers(self, force=False): servers = [self.server, self.server1] nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._delete_servers(servers, force=force) check_interval = CONF.benchmark.nova_server_delete_poll_interval expected = [] for server in servers: expected.append(mock.call( server, ready_statuses=["deleted"], check_deletion=True, update_resource=self.mock_get_from_manager.mock.return_value, check_interval=check_interval, timeout=CONF.benchmark.nova_server_delete_timeout)) if force: server.force_delete.assert_called_once_with() self.assertFalse(server.delete.called) else: server.delete.assert_called_once_with() self.assertFalse(server.force_delete.called) self.mock_wait_for_status.mock.assert_has_calls(expected) timer_name = "nova.%sdelete_servers" % ("force_" if force else "") self._test_atomic_action_timer(nova_scenario.atomic_actions(), timer_name) def test__default_delete_servers(self): self._test_delete_servers() def test__force_delete_servers(self): self._test_delete_servers(force=True) @mock.patch("rally.plugins.openstack.wrappers.glance.wrap") def test__delete_image(self, mock_wrap): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._clients = mock.Mock() nova_scenario._delete_image(self.image) self.clients("glance").images.delete.assert_called_once_with( self.image.id) self.mock_wait_for_status.mock.assert_called_once_with( self.image, ready_statuses=["deleted", "pending_delete"], check_deletion=True, update_resource=mock_wrap.return_value.get_image, check_interval=CONF.benchmark. nova_server_image_delete_poll_interval, timeout=CONF.benchmark.nova_server_image_delete_timeout) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.delete_image") @ddt.data( {"requests": 1}, {"requests": 25}, {"requests": 2, "instances_amount": 100, "auto_assign_nic": True, "fakearg": "fake"}, {"auto_assign_nic": True, "nics": [{"net-id": "foo"}]}, {"auto_assign_nic": False, "nics": [{"net-id": "foo"}]}) @ddt.unpack def test__boot_servers(self, image_id="image", flavor_id="flavor", requests=1, instances_amount=1, auto_assign_nic=False, **kwargs): servers = [mock.Mock() for i in range(instances_amount)] self.clients("nova").servers.list.return_value = servers scenario = utils.NovaScenario(context=self.context) scenario.generate_random_name = mock.Mock() scenario._pick_random_nic = mock.Mock() scenario._boot_servers(image_id, flavor_id, requests, instances_amount=instances_amount, auto_assign_nic=auto_assign_nic, **kwargs) expected_kwargs = dict(kwargs) if auto_assign_nic and "nics" not in kwargs: expected_kwargs["nics"] = scenario._pick_random_nic.return_value create_calls = [ mock.call( "%s_%d" % (scenario.generate_random_name.return_value, i), image_id, flavor_id, min_count=instances_amount, max_count=instances_amount, **expected_kwargs) for i in range(requests)] self.clients("nova").servers.create.assert_has_calls(create_calls) wait_for_status_calls = [ mock.call( servers[i], ready_statuses=["ACTIVE"], update_resource=self.mock_get_from_manager.mock.return_value, check_interval=CONF.benchmark.nova_server_boot_poll_interval, timeout=CONF.benchmark.nova_server_boot_timeout) for i in range(instances_amount)] self.mock_wait_for_status.mock.assert_has_calls(wait_for_status_calls) self.mock_get_from_manager.mock.assert_has_calls( [mock.call() for i in range(instances_amount)]) self._test_atomic_action_timer(scenario.atomic_actions(), "nova.boot_servers") def test__show_server(self): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._show_server(self.server) self.clients("nova").servers.get.assert_called_once_with( self.server ) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.show_server") def test__get_console_server(self): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._get_server_console_output(self.server) self.clients( "nova").servers.get_console_output.assert_called_once_with( self.server, length=None) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.get_console_output_server") def test__associate_floating_ip(self): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._associate_floating_ip(self.server, self.floating_ip) self.server.add_floating_ip.assert_called_once_with(self.floating_ip, fixed_address=None) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.associate_floating_ip") def test__associate_floating_ip_with_no_atomic_action(self): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._associate_floating_ip(self.server, self.floating_ip, atomic_action=False) self.server.add_floating_ip.assert_called_once_with(self.floating_ip, fixed_address=None) def test__dissociate_floating_ip(self): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._dissociate_floating_ip(self.server, self.floating_ip) self.server.remove_floating_ip.assert_called_once_with( self.floating_ip) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.dissociate_floating_ip") def test__dissociate_floating_ip_with_no_atomic_action(self): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._dissociate_floating_ip(self.server, self.floating_ip, atomic_action=False) self.server.remove_floating_ip.assert_called_once_with( self.floating_ip) def test__check_ip_address(self): nova_scenario = utils.NovaScenario(context=self.context) fake_server = fakes.FakeServerManager().create("test_server", "image_id_01", "flavor_id_01") fake_server.addresses = { "private": [ {"version": 4, "addr": "1.2.3.4"}, ]} floating_ip = fakes.FakeFloatingIP() floating_ip.ip = "10.20.30.40" # Also test function check_ip_address accept a string as attr self.assertFalse( nova_scenario.check_ip_address(floating_ip.ip)(fake_server)) self.assertTrue( nova_scenario.check_ip_address(floating_ip.ip, must_exist=False) (fake_server)) fake_server.addresses["private"].append( {"version": 4, "addr": floating_ip.ip} ) # Also test function check_ip_address accept an object with attr ip self.assertTrue( nova_scenario.check_ip_address(floating_ip) (fake_server)) self.assertFalse( nova_scenario.check_ip_address(floating_ip, must_exist=False) (fake_server)) def test__list_networks(self): network_list = [] self.clients("nova").networks.list.return_value = network_list nova_scenario = utils.NovaScenario(context=self.context) return_network_list = nova_scenario._list_networks() self.assertEqual(network_list, return_network_list) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.list_networks") def test__resize(self): nova_scenario = utils.NovaScenario(context=self.context) to_flavor = mock.Mock() nova_scenario._resize(self.server, to_flavor) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.resize") def test__resize_confirm(self): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._resize_confirm(self.server) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.resize_confirm") @ddt.data({}, {"status": "SHUTOFF"}) @ddt.unpack def test__resize_revert(self, status=None): nova_scenario = utils.NovaScenario(context=self.context) if status is None: nova_scenario._resize_revert(self.server) status = "ACTIVE" else: nova_scenario._resize_revert(self.server, status=status) self.mock_wait_for_status.mock.assert_called_once_with( self.server, ready_statuses=[status], update_resource=self.mock_get_from_manager.mock.return_value, check_interval=CONF.benchmark. nova_server_resize_revert_poll_interval, timeout=CONF.benchmark.nova_server_resize_revert_timeout) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.resize_revert") def test__attach_volume(self): expect_attach = mock.MagicMock() device = None (self.clients("nova").volumes.create_server_volume .return_value) = expect_attach nova_scenario = utils.NovaScenario(context=self.context) attach = nova_scenario._attach_volume(self.server, self.volume, device) (self.clients("nova").volumes.create_server_volume .assert_called_once_with(self.server.id, self.volume.id, device)) self.assertEqual(expect_attach, attach) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.attach_volume") def test__detach_volume(self): attach = mock.MagicMock(id="attach_id") self.clients("nova").volumes.delete_server_volume.return_value = None nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._detach_volume(self.server, self.volume, attach) (self.clients("nova").volumes.delete_server_volume .assert_called_once_with(self.server.id, attach.id)) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.detach_volume") def test__detach_volume_no_attach(self): self.clients("nova").volumes.delete_server_volume.return_value = None nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._detach_volume(self.server, self.volume, None) (self.clients("nova").volumes.delete_server_volume .assert_called_once_with(self.server.id, self.volume.id)) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.detach_volume") def test__live_migrate_server(self): fake_host = mock.MagicMock() self.admin_clients("nova").servers.get(return_value=self.server) nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._live_migrate(self.server, fake_host, block_migration=False, disk_over_commit=False, skip_host_check=True) self.mock_wait_for_status.mock.assert_called_once_with( self.server, ready_statuses=["ACTIVE"], update_resource=self.mock_get_from_manager.mock.return_value, check_interval=CONF.benchmark. nova_server_live_migrate_poll_interval, timeout=CONF.benchmark.nova_server_live_migrate_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.live_migrate") def test__find_host_to_migrate(self): fake_server = self.server fake_host = {"nova-compute": {"available": True}} fake_host_compute_off = {"nova-compute": {"available": False}} fake_host_no_compute = {"nova-conductor": {"available": True}} self.admin_clients("nova").servers.get.return_value = fake_server self.admin_clients("nova").availability_zones.list.return_value = [ mock.MagicMock(zoneName="a", hosts={"a1": fake_host, "a2": fake_host, "a3": fake_host}), mock.MagicMock(zoneName="b", hosts={"b1": fake_host, "b2": fake_host, "b3": fake_host, "b4": fake_host_compute_off, "b5": fake_host_no_compute}), mock.MagicMock(zoneName="c", hosts={"c1": fake_host, "c2": fake_host, "c3": fake_host}) ] setattr(fake_server, "OS-EXT-SRV-ATTR:host", "b2") setattr(fake_server, "OS-EXT-AZ:availability_zone", "b") nova_scenario = utils.NovaScenario(context=self.context) self.assertIn( nova_scenario._find_host_to_migrate(fake_server), ["b1", "b3"]) def test__migrate_server(self): fake_server = self.server setattr(fake_server, "OS-EXT-SRV-ATTR:host", "a1") self.clients("nova").servers.get(return_value=fake_server) nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._migrate(fake_server, skip_host_check=True) self.mock_wait_for_status.mock.assert_called_once_with( fake_server, ready_statuses=["VERIFY_RESIZE"], update_resource=self.mock_get_from_manager.mock.return_value, check_interval=CONF.benchmark.nova_server_migrate_poll_interval, timeout=CONF.benchmark.nova_server_migrate_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.migrate") self.assertRaises(rally_exceptions.MigrateException, nova_scenario._migrate, fake_server, skip_host_check=False) def test__create_security_groups(self): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario.generate_random_name = mock.MagicMock() security_group_count = 5 sec_groups = nova_scenario._create_security_groups( security_group_count) self.assertEqual(security_group_count, len(sec_groups)) self.assertEqual(security_group_count, nova_scenario.generate_random_name.call_count) self.assertEqual( security_group_count, self.clients("nova").security_groups.create.call_count) self._test_atomic_action_timer( nova_scenario.atomic_actions(), "nova.create_%s_security_groups" % security_group_count) def test__create_rules_for_security_group(self): nova_scenario = utils.NovaScenario(context=self.context) fake_secgroups = [fakes.FakeSecurityGroup(None, None, 1, "uuid1"), fakes.FakeSecurityGroup(None, None, 2, "uuid2")] rules_per_security_group = 10 nova_scenario._create_rules_for_security_group( fake_secgroups, rules_per_security_group) self.assertEqual( len(fake_secgroups) * rules_per_security_group, self.clients("nova").security_group_rules.create.call_count) self._test_atomic_action_timer( nova_scenario.atomic_actions(), "nova.create_%s_rules" % (rules_per_security_group * len(fake_secgroups))) def test__update_security_groups(self): nova_scenario = utils.NovaScenario(context=self.context) fake_secgroups = [fakes.FakeSecurityGroup(None, None, 1, "uuid1"), fakes.FakeSecurityGroup(None, None, 2, "uuid2")] nova_scenario._update_security_groups(fake_secgroups) self.assertEqual( len(fake_secgroups), self.clients("nova").security_groups.update.call_count) self._test_atomic_action_timer( nova_scenario.atomic_actions(), "nova.update_%s_security_groups" % len(fake_secgroups)) def test__delete_security_groups(self): nova_scenario = utils.NovaScenario(context=self.context) fake_secgroups = [fakes.FakeSecurityGroup(None, None, 1, "uuid1"), fakes.FakeSecurityGroup(None, None, 2, "uuid2")] nova_scenario._delete_security_groups(fake_secgroups) self.assertSequenceEqual( map(lambda x: mock.call(x.id), fake_secgroups), self.clients("nova").security_groups.delete.call_args_list) self._test_atomic_action_timer( nova_scenario.atomic_actions(), "nova.delete_%s_security_groups" % len(fake_secgroups)) def test__list_security_groups(self): nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._list_security_groups() self.clients("nova").security_groups.list.assert_called_once_with() self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.list_security_groups") def test__add_server_secgroups(self): server = mock.Mock() fake_secgroups = [fakes.FakeSecurityGroup(None, None, 1, "uuid1")] nova_scenario = utils.NovaScenario() security_group = fake_secgroups[0] result = nova_scenario._add_server_secgroups(server, security_group.name) self.assertEqual( self.clients("nova").servers.add_security_group.return_value, result) (self.clients("nova").servers.add_security_group. assert_called_once_with(server, security_group.name)) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.add_server_secgroups") def test__list_keypairs(self): nova_scenario = utils.NovaScenario() result = nova_scenario._list_keypairs() self.assertEqual(self.clients("nova").keypairs.list.return_value, result) self.clients("nova").keypairs.list.assert_called_once_with() self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.list_keypairs") def test__create_keypair(self): nova_scenario = utils.NovaScenario() nova_scenario.generate_random_name = mock.Mock( return_value="rally_nova_keypair_fake") result = nova_scenario._create_keypair(fakeargs="fakeargs") self.assertEqual( self.clients("nova").keypairs.create.return_value.name, result) self.clients("nova").keypairs.create.assert_called_once_with( "rally_nova_keypair_fake", fakeargs="fakeargs") self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.create_keypair") def test__create_server_group(self): nova_scenario = utils.NovaScenario() result = nova_scenario._create_server_group(fakeargs="fakeargs") self.assertEqual( self.clients("nova").server_groups.create.return_value, result) self.clients("nova").server_groups.create.assert_called_once_with( fakeargs="fakeargs") self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.create_server_group") def test__list_server_groups(self): nova_scenario = utils.NovaScenario() result1 = nova_scenario._list_server_groups(all_projects=False) result2 = nova_scenario._list_server_groups(all_projects=True) self.assertEqual(self.clients("nova").server_groups.list.return_value, result1) admcli = self.admin_clients("nova") self.assertEqual(admcli.server_groups.list.return_value, result2) self.clients("nova").server_groups.list.assert_called_once_with( False) self.admin_clients("nova").server_groups.list.assert_called_once_with( True) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.list_server_groups") def test__delete_keypair(self): nova_scenario = utils.NovaScenario() nova_scenario._delete_keypair("fake_keypair") self.clients("nova").keypairs.delete.assert_called_once_with( "fake_keypair") self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.delete_keypair") def test__get_keypair(self): nova_scenario = utils.NovaScenario() nova_scenario._get_keypair("fake_keypair") self.clients("nova").keypairs.get.assert_called_once_with( "fake_keypair") self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.get_keypair") def test__list_floating_ips_bulk(self): floating_ips_bulk_list = ["foo_floating_ips_bulk"] self.admin_clients("nova").floating_ips_bulk.list.return_value = ( floating_ips_bulk_list) nova_scenario = utils.NovaScenario(context=self.context) return_floating_ips_bulk_list = nova_scenario._list_floating_ips_bulk() self.assertEqual(floating_ips_bulk_list, return_floating_ips_bulk_list) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.list_floating_ips_bulk") @mock.patch(NOVA_UTILS + ".network_wrapper.generate_cidr") def test__create_floating_ips_bulk(self, mock_generate_cidr): fake_cidr = "10.2.0.0/24" fake_pool = "test1" fake_floating_ips_bulk = mock.MagicMock() fake_floating_ips_bulk.ip_range = fake_cidr fake_floating_ips_bulk.pool = fake_pool self.admin_clients("nova").floating_ips_bulk.create.return_value = ( fake_floating_ips_bulk) nova_scenario = utils.NovaScenario(context=self.context) return_iprange = nova_scenario._create_floating_ips_bulk(fake_cidr) mock_generate_cidr.assert_called_once_with(start_cidr=fake_cidr) self.assertEqual(return_iprange, fake_floating_ips_bulk) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.create_floating_ips_bulk") def test__delete_floating_ips_bulk(self): fake_cidr = "10.2.0.0/24" nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._delete_floating_ips_bulk(fake_cidr) self.admin_clients( "nova").floating_ips_bulk.delete.assert_called_once_with(fake_cidr) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.delete_floating_ips_bulk") def test__list_hypervisors(self): nova_scenario = utils.NovaScenario() result = nova_scenario._list_hypervisors(detailed=False) self.assertEqual( self.admin_clients("nova").hypervisors.list.return_value, result) self.admin_clients("nova").hypervisors.list.assert_called_once_with( False) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.list_hypervisors") def test__statistics_hypervisors(self): nova_scenario = utils.NovaScenario() result = nova_scenario._statistics_hypervisors() self.assertEqual( self.admin_clients("nova").hypervisors.statistics.return_value, result) (self.admin_clients("nova").hypervisors.statistics. assert_called_once_with()) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.statistics_hypervisors") def test__get_hypervisor(self): hypervisor = mock.Mock() nova_scenario = utils.NovaScenario() result = nova_scenario._get_hypervisor(hypervisor) self.assertEqual( self.admin_clients("nova").hypervisors.get.return_value, result) self.admin_clients("nova").hypervisors.get.assert_called_once_with( hypervisor) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.get_hypervisor") def test__search_hypervisors(self): nova_scenario = utils.NovaScenario() nova_scenario._search_hypervisors("fake_hostname", servers=False) self.admin_clients("nova").hypervisors.search.assert_called_once_with( "fake_hostname", servers=False) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.search_hypervisors") def test__get_host(self): nova_scenario = utils.NovaScenario() result = nova_scenario._get_host("host_name") self.assertEqual( self.admin_clients("nova").hosts.get.return_value, result) self.admin_clients("nova").hosts.get.assert_called_once_with( "host_name") self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.get_host") def test__list_images(self): nova_scenario = utils.NovaScenario() result = nova_scenario._list_images(detailed=False, fakearg="fakearg") self.assertEqual(self.clients("nova").images.list.return_value, result) self.clients("nova").images.list.assert_called_once_with( False, fakearg="fakearg") self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.list_images") def test__lock_server(self): server = mock.Mock() nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._lock_server(server) server.lock.assert_called_once_with() self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.lock_server") def test__unlock_server(self): server = mock.Mock() nova_scenario = utils.NovaScenario(context=self.context) nova_scenario._unlock_server(server) server.unlock.assert_called_once_with() self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.unlock_server") def test__delete_network(self): nova_scenario = utils.NovaScenario() result = nova_scenario._delete_network("fake_net_id") self.assertEqual( self.admin_clients("nova").networks.delete.return_value, result) self.admin_clients("nova").networks.delete.assert_called_once_with( "fake_net_id") self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.delete_network") @mock.patch(NOVA_UTILS + ".network_wrapper.generate_cidr") def test__create_network(self, mock_generate_cidr): nova_scenario = utils.NovaScenario() nova_scenario.generate_random_name = mock.Mock( return_value="rally_novanet_fake") result = nova_scenario._create_network("fake_start_cidr", fakearg="fakearg") mock_generate_cidr.assert_called_once_with( start_cidr="fake_start_cidr") self.assertEqual( self.admin_clients("nova").networks.create.return_value, result) self.admin_clients("nova").networks.create.assert_called_once_with( label="rally_novanet_fake", cidr=mock_generate_cidr.return_value, fakearg="fakearg") self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.create_network") def test__list_flavors(self): nova_scenario = utils.NovaScenario() result = nova_scenario._list_flavors(detailed=True, fakearg="fakearg") self.assertEqual(self.clients("nova").flavors.list.return_value, result) self.clients("nova").flavors.list.assert_called_once_with( True, fakearg="fakearg") self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.list_flavors") def test__set_flavor_keys(self): flavor = mock.MagicMock() nova_scenario = utils.NovaScenario() extra_specs = {"fakeargs": "foo"} flavor.set_keys = mock.MagicMock() result = nova_scenario._set_flavor_keys(flavor, extra_specs) self.assertEqual(flavor.set_keys.return_value, result) flavor.set_keys.assert_called_once_with(extra_specs) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.set_flavor_keys") @ddt.data({}, {"hypervisor": "foo_hypervisor"}) @ddt.unpack def test__list_agents(self, hypervisor=None): nova_scenario = utils.NovaScenario() result = nova_scenario._list_agents(hypervisor) self.assertEqual( self.admin_clients("nova").agents.list.return_value, result) self.admin_clients("nova").agents.list.assert_called_once_with( hypervisor) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.list_agents") def test__list_aggregates(self): nova_scenario = utils.NovaScenario() result = nova_scenario._list_aggregates() self.assertEqual( self.admin_clients("nova").aggregates.list.return_value, result) self.admin_clients("nova").aggregates.list.assert_called_once_with() self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.list_aggregates") def test__list_availability_zones(self): nova_scenario = utils.NovaScenario() result = nova_scenario._list_availability_zones(detailed=True) self.assertEqual( self.admin_clients("nova").availability_zones.list.return_value, result) avail_zones_client = self.admin_clients("nova").availability_zones avail_zones_client.list.assert_called_once_with(True) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.list_availability_zones") @ddt.data({}, {"zone": "foo_zone"}, {"zone": "foo_zone", "service": "some"}) @ddt.unpack def test__list_hosts(self, zone=None, service=None): hosts = [mock.Mock(service="foo"), mock.Mock(service="some")] self.admin_clients("nova").hosts.list.return_value = hosts nova_scenario = utils.NovaScenario() result = nova_scenario._list_hosts(zone, service=service) if service: hosts = [h for h in hosts if h.service == service] self.assertEqual(hosts, result) self.admin_clients("nova").hosts.list.assert_called_once_with(zone) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.list_hosts") @ddt.data({}, {"host": "foo_host"}, {"binary": "foo_binary"}, {"host": "foo_host", "binary": "foo_binary"}) @ddt.unpack def test__list_services(self, host=None, binary=None): nova_scenario = utils.NovaScenario() result = nova_scenario._list_services(host=host, binary=binary) self.assertEqual(self.admin_clients("nova").services.list.return_value, result) self.admin_clients("nova").services.list.assert_called_once_with( host, binary) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.list_services") def test__list_flavor_access(self): nova_scenario = utils.NovaScenario() result = nova_scenario._list_flavor_access("foo_id") self.assertEqual( self.admin_clients("nova").flavor_access.list.return_value, result) self.admin_clients("nova").flavor_access.list.assert_called_once_with( flavor="foo_id") self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.list_flavor_access") def test__add_tenant_access(self): tenant = mock.Mock() flavor = mock.Mock() nova_scenario = utils.NovaScenario() admin_clients = self.admin_clients("nova") result = nova_scenario._add_tenant_access(flavor.id, tenant.id) self.assertEqual( admin_clients.flavor_access.add_tenant_access.return_value, result) admin_clients.flavor_access.add_tenant_access.assert_called_once_with( flavor.id, tenant.id) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.add_tenant_access") def test__create_flavor(self): nova_scenario = utils.NovaScenario() random_name = "random_name" nova_scenario.generate_random_name = mock.Mock( return_value=random_name) result = nova_scenario._create_flavor(500, 1, 1, fakearg="fakearg") self.assertEqual( self.admin_clients("nova").flavors.create.return_value, result) self.admin_clients("nova").flavors.create.assert_called_once_with( random_name, 500, 1, 1, fakearg="fakearg") self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.create_flavor") def test__get_flavor(self): nova_scenario = utils.NovaScenario() result = nova_scenario._get_flavor("foo_flavor_id") self.assertEqual( self.admin_clients("nova").flavors.get.return_value, result) self.admin_clients("nova").flavors.get.assert_called_once_with( "foo_flavor_id") self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.get_flavor") def test__delete_flavor(self): nova_scenario = utils.NovaScenario() result = nova_scenario._delete_flavor("foo_flavor_id") self.assertEqual( self.admin_clients("nova").flavors.delete.return_value, result) self.admin_clients("nova").flavors.delete.assert_called_once_with( "foo_flavor_id") self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.delete_flavor") def test__update_server(self): server = mock.Mock() nova_scenario = utils.NovaScenario() nova_scenario.generate_random_name = mock.Mock( return_value="new_name") server.update = mock.Mock() result = nova_scenario._update_server(server) self.assertEqual(result, server.update.return_value) nova_scenario.generate_random_name.assert_called_once_with() server.update.assert_called_once_with(name="new_name") nova_scenario.generate_random_name.reset_mock() server.update.reset_mock() result = nova_scenario._update_server(server, description="desp") self.assertEqual(result, server.update.return_value) nova_scenario.generate_random_name.assert_called_once_with() server.update.assert_called_once_with(name="new_name", description="desp") self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.update_server") def test_create_aggregate(self): nova_scenario = utils.NovaScenario(context=self.context) random_name = "random_name" nova_scenario.generate_random_name = mock.Mock( return_value=random_name) result = nova_scenario._create_aggregate("nova") self.assertEqual( self.admin_clients("nova").aggregates.create.return_value, result) self.admin_clients("nova").aggregates.create.assert_called_once_with( random_name, "nova") self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.create_aggregate") def test_delete_aggregate(self): nova_scenario = utils.NovaScenario(context=self.context) result = nova_scenario._delete_aggregate("fake_aggregate") self.assertEqual( self.admin_clients("nova").aggregates.delete.return_value, result) self.admin_clients("nova").aggregates.delete.assert_called_once_with( "fake_aggregate") self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.delete_aggregate") def test_get_aggregate_details(self): nova_scenario = utils.NovaScenario(context=self.context) result = nova_scenario._get_aggregate_details("fake_aggregate") self.assertEqual( self.admin_clients("nova").aggregates.get_details.return_value, result) self.admin_clients( "nova").aggregates.get_details.assert_called_once_with( "fake_aggregate") self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.get_aggregate_details") def test_update_aggregate(self): aggregate = mock.Mock() nova_scenario = utils.NovaScenario(context=self.context) nova_scenario.generate_random_name = mock.Mock( return_value="random_name") values = {"name": "random_name", "availability_zone": "random_name"} result = nova_scenario._update_aggregate(aggregate=aggregate) self.assertEqual( self.admin_clients("nova").aggregates.update.return_value, result) self.admin_clients("nova").aggregates.update.assert_called_once_with( aggregate, values) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.update_aggregate") def test_aggregate_add_host(self): nova_scenario = utils.NovaScenario(context=self.context) result = nova_scenario._aggregate_add_host("fake_agg", "fake_host") self.assertEqual( self.admin_clients("nova").aggregates.add_host.return_value, result) self.admin_clients("nova").aggregates.add_host.assert_called_once_with( "fake_agg", "fake_host") self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.aggregate_add_host") def test_aggregate_remove_host(self): nova_scenario = utils.NovaScenario(context=self.context) result = nova_scenario._aggregate_remove_host("fake_agg", "fake_host") self.assertEqual( self.admin_clients("nova").aggregates.remove_host.return_value, result) self.admin_clients( "nova").aggregates.remove_host.assert_called_once_with( "fake_agg", "fake_host") self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.aggregate_remove_host") def test__uptime_hypervisor(self): nova_scenario = utils.NovaScenario() nova_scenario._uptime_hypervisor("fake_hostname") self.admin_clients("nova").hypervisors.uptime.assert_called_once_with( "fake_hostname") self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.uptime_hypervisor") def test_aggregate_set_metadata(self): nova_scenario = utils.NovaScenario(context=self.context) fake_metadata = {"test_metadata": "true"} result = nova_scenario._aggregate_set_metadata("fake_aggregate", fake_metadata) self.assertEqual( self.admin_clients("nova").aggregates.set_metadata.return_value, result) self.admin_clients( "nova").aggregates.set_metadata.assert_called_once_with( "fake_aggregate", fake_metadata) self._test_atomic_action_timer(nova_scenario.atomic_actions(), "nova.aggregate_set_metadata") rally-0.9.1/tests/unit/plugins/openstack/scenarios/nova/test_hypervisors.py0000664000567000056710000000623513073417717030555 0ustar jenkinsjenkins00000000000000# Copyright 2013 Cisco Systems Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.nova import hypervisors from tests.unit import test class NovaHypervisorsTestCase(test.ScenarioTestCase): def test_list_hypervisors(self): scenario = hypervisors.ListHypervisors(self.context) scenario._list_hypervisors = mock.Mock() scenario.run(detailed=False) scenario._list_hypervisors.assert_called_once_with(False) def test_list_and_get_hypervisors(self): scenario = hypervisors.ListAndGetHypervisors(self.context) scenario._list_hypervisors = mock.MagicMock(detailed=False) scenario._get_hypervisor = mock.MagicMock() scenario.run(detailed=False) scenario._list_hypervisors.assert_called_once_with(False) for hypervisor in scenario._list_hypervisors.return_value: scenario._get_hypervisor.assert_called_once_with(hypervisor) self._test_atomic_action_timer(scenario.atomic_actions(), "nova.get_hypervisor") def test_statistics_hypervisors(self): scenario = hypervisors.StatisticsHypervisors(self.context) scenario._statistics_hypervisors = mock.Mock() scenario.run() scenario._statistics_hypervisors.assert_called_once_with() def test_list_and_get_uptime_hypervisors(self): scenario = hypervisors.ListAndGetUptimeHypervisors(self.context) scenario._list_hypervisors = mock.MagicMock(detailed=False) scenario._uptime_hypervisor = mock.MagicMock() scenario.run(detailed=False) scenario._list_hypervisors.assert_called_once_with(False) for hypervisor in scenario._list_hypervisors.return_value: scenario._uptime_hypervisor.assert_called_once_with(hypervisor) self._test_atomic_action_timer(scenario.atomic_actions(), "nova.uptime_hypervisor") def test_list_and_search_hypervisors(self): fake_hypervisors = [mock.Mock(hypervisor_hostname="fake_hostname")] scenario = hypervisors.ListAndSearchHypervisors(self.context) scenario._list_hypervisors = mock.MagicMock( return_value=fake_hypervisors) scenario._search_hypervisors = mock.MagicMock() scenario.run(detailed=False) scenario._list_hypervisors.assert_called_once_with(False) scenario._search_hypervisors.assert_called_once_with( "fake_hostname", atomic_action=False) self._test_atomic_action_timer(scenario.atomic_actions(), "nova.search_1_hypervisors") rally-0.9.1/tests/unit/plugins/openstack/scenarios/nova/test_services.py0000664000567000056710000000211413073417717027773 0ustar jenkinsjenkins00000000000000# Copyright 2016 IBM Corp. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.nova import services from tests.unit import test class NovaServicesTestCase(test.TestCase): def test_list_services(self): scenario = services.ListServices() scenario._list_services = mock.Mock() scenario.run(host="foo_host", binary="foo_hypervisor") scenario._list_services.assert_called_once_with("foo_host", "foo_hypervisor") rally-0.9.1/tests/unit/plugins/openstack/scenarios/nova/test_agents.py0000664000567000056710000000172513073417717027440 0ustar jenkinsjenkins00000000000000# Copyright 2016 IBM Corp. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.nova import agents from tests.unit import test class NovaAgentsTestCase(test.TestCase): def test_list_agents(self): scenario = agents.ListAgents() scenario._list_agents = mock.Mock() scenario.run(hypervisor=None) scenario._list_agents.assert_called_once_with(None) rally-0.9.1/tests/unit/plugins/openstack/scenarios/nova/test_images.py0000664000567000056710000000200013073417717027407 0ustar jenkinsjenkins00000000000000# Copyright: 2015 Workday, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.nova import images from tests.unit import test class NovaImagesTestCase(test.TestCase): def test_list_images(self): scenario = images.ListImages() scenario._list_images = mock.Mock() scenario.run(detailed=False, fakearg="fakearg") scenario._list_images.assert_called_once_with(False, fakearg="fakearg") rally-0.9.1/tests/unit/plugins/openstack/scenarios/nova/test_availability_zones.py0000664000567000056710000000204713073417717032045 0ustar jenkinsjenkins00000000000000# Copyright 2016 IBM Corp. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.nova import availability_zones from tests.unit import test class NovaAvailabilityZonesTestCase(test.TestCase): def test_list_availability_zones(self): scenario = availability_zones.ListAvailabilityZones() scenario._list_availability_zones = mock.Mock() scenario.run(detailed=False) scenario._list_availability_zones.assert_called_once_with(False) rally-0.9.1/tests/unit/plugins/openstack/scenarios/glance/0000775000567000056710000000000013073420067025017 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/glance/__init__.py0000664000567000056710000000000013073417717027126 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/glance/test_utils.py0000664000567000056710000000733313073417717027606 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import tempfile import ddt import mock from rally.plugins.openstack.scenarios.glance import utils from tests.unit import test GLANCE_UTILS = "rally.plugins.openstack.scenarios.glance.utils" @ddt.ddt class GlanceScenarioTestCase(test.ScenarioTestCase): def setUp(self): super(GlanceScenarioTestCase, self).setUp() self.image = mock.Mock() self.image1 = mock.Mock() self.scenario_clients = mock.Mock() self.scenario_clients.glance.choose_version.return_value = 1 def test_list_images(self): scenario = utils.GlanceScenario(context=self.context) return_images_list = scenario._list_images() self.clients("glance").images.list.assert_called_once_with() self.assertEqual(list(self.clients("glance").images.list.return_value), return_images_list) self._test_atomic_action_timer(scenario.atomic_actions(), "glance.list_images") @ddt.data({}, {"name": "foo"}, {"name": None}, {"name": ""}, {"name": "bar", "fakearg": "fakearg"}, {"fakearg": "fakearg"}) @mock.patch("rally.plugins.openstack.wrappers.glance.wrap") def test_create_image(self, create_args, mock_wrap): image_location = tempfile.NamedTemporaryFile() mock_wrap.return_value.create_image.return_value = self.image scenario = utils.GlanceScenario(context=self.context, clients=self.scenario_clients) scenario.generate_random_name = mock.Mock() return_image = scenario._create_image("container_format", image_location.name, "disk_format", **create_args) expected_args = dict(create_args) if not expected_args.get("name"): expected_args["name"] = scenario.generate_random_name.return_value self.assertEqual(self.image, return_image) mock_wrap.assert_called_once_with(scenario._clients.glance, scenario) mock_wrap.return_value.create_image.assert_called_once_with( "container_format", image_location.name, "disk_format", **expected_args) self._test_atomic_action_timer(scenario.atomic_actions(), "glance.create_image") @mock.patch("rally.plugins.openstack.wrappers.glance.wrap") def test_delete_image(self, mock_wrap): deleted_image = mock.Mock(status="DELETED") wrapper = mock_wrap.return_value wrapper.get_image.side_effect = [self.image, deleted_image] scenario = utils.GlanceScenario(context=self.context, clients=self.scenario_clients) scenario._delete_image(self.image) self.clients("glance").images.delete.assert_called_once_with( self.image.id) mock_wrap.assert_called_once_with(scenario._clients.glance, scenario) self._test_atomic_action_timer(scenario.atomic_actions(), "glance.delete_image") rally-0.9.1/tests/unit/plugins/openstack/scenarios/glance/test_images.py0000664000567000056710000001121613073417720027700 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally import exceptions from rally.plugins.openstack.scenarios.glance import images from tests.unit import fakes from tests.unit import test BASE = "rally.plugins.openstack.scenarios.glance.images" class GlanceImagesTestCase(test.ScenarioTestCase): @mock.patch("%s.CreateAndListImage._list_images" % BASE) @mock.patch("%s.CreateAndListImage._create_image" % BASE) def test_create_and_list_image(self, mock_create_image, mock_list_images): fake_image = fakes.FakeImage(id=1, name="img_name1") mock_create_image.return_value = fake_image mock_list_images.return_value = [ fakes.FakeImage(id=0, name="img_name1"), fake_image, fakes.FakeImage(id=2, name="img_name1") ] # Positive case images.CreateAndListImage(self.context).run( "cf", "url", "df", fakearg="f") mock_create_image.assert_called_once_with( "cf", "url", "df", fakearg="f") mock_list_images.assert_called_once_with() # Negative case: image isn't created mock_create_image.return_value = None self.assertRaises(exceptions.RallyAssertionError, images.CreateAndListImage(self.context).run, "cf", "url", "df", fakearg="f") mock_create_image.assert_called_with( "cf", "url", "df", fakearg="f") # Negative case: created image n ot in the list of available images mock_create_image.return_value = fakes.FakeImage( id=12, name="img_nameN") self.assertRaises(exceptions.RallyAssertionError, images.CreateAndListImage(self.context).run, "cf", "url", "df", fakearg="f") mock_create_image.assert_called_with( "cf", "url", "df", fakearg="f") mock_list_images.assert_called_with() @mock.patch("%s.ListImages._list_images" % BASE) def test_list_images(self, mock_list_images__list_images): images.ListImages(self.context).run() mock_list_images__list_images.assert_called_once_with() @mock.patch("%s.CreateAndDeleteImage._delete_image" % BASE) @mock.patch("%s.CreateAndDeleteImage._create_image" % BASE) @mock.patch("%s.CreateAndDeleteImage.generate_random_name" % BASE, return_value="test-rally-image") def test_create_and_delete_image(self, mock_random_name, mock_create_image, mock_delete_image): fake_image = object() mock_create_image.return_value = fake_image images.CreateAndDeleteImage(self.context).run( "cf", "url", "df", fakearg="f") mock_create_image.assert_called_once_with( "cf", "url", "df", fakearg="f") mock_delete_image.assert_called_once_with(fake_image) @mock.patch("%s.CreateImageAndBootInstances._boot_servers" % BASE) @mock.patch("%s.CreateImageAndBootInstances._create_image" % BASE) def test_create_image_and_boot_instances(self, mock_create_image, mock_boot_servers): fake_image = fakes.FakeImage() fake_servers = [mock.Mock() for i in range(5)] mock_create_image.return_value = fake_image mock_boot_servers.return_value = fake_servers create_image_kwargs = {"fakeimagearg": "f"} boot_server_kwargs = {"fakeserverarg": "f"} images.CreateImageAndBootInstances(self.context).run( "cf", "url", "df", "fid", 5, create_image_kwargs=create_image_kwargs, boot_server_kwargs=boot_server_kwargs) mock_create_image.assert_called_once_with("cf", "url", "df", **create_image_kwargs) mock_boot_servers.assert_called_once_with("image-id-0", "fid", 5, **boot_server_kwargs) rally-0.9.1/tests/unit/plugins/openstack/scenarios/ceilometer/0000775000567000056710000000000013073420067025716 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/ceilometer/test_traits.py0000664000567000056710000000476013073417717030654 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.ceilometer import traits from tests.unit import test class CeilometerTraitsTestCase(test.ScenarioTestCase): def setUp(self): super(CeilometerTraitsTestCase, self).setUp() patch = mock.patch( "rally.plugins.openstack.services.identity.identity.Identity") self.addCleanup(patch.stop) self.mock_identity = patch.start() def get_test_context(self): context = super(CeilometerTraitsTestCase, self).get_test_context() context["admin"] = {"id": "fake_user_id", "credential": mock.MagicMock() } return context def test_list_traits(self): scenario = traits.CreateUserAndListTraits(self.context) scenario._list_event_traits = mock.MagicMock() scenario._list_events = mock.MagicMock( return_value=[mock.Mock( event_type="fake_event_type", traits=[{"name": "fake_trait_name"}]) ]) scenario.run() self.mock_identity.return_value.create_user.assert_called_once_with() scenario._list_events.assert_called_with() scenario._list_event_traits.assert_called_once_with( event_type="fake_event_type", trait_name="fake_trait_name") def test_list_trait_descriptions(self): scenario = traits.CreateUserAndListTraitDescriptions( self.context) scenario._list_event_trait_descriptions = mock.MagicMock() scenario._list_events = mock.MagicMock( return_value=[mock.Mock( event_type="fake_event_type") ]) scenario.run() self.mock_identity.return_value.create_user.assert_called_once_with() scenario._list_events.assert_called_with() scenario._list_event_trait_descriptions.assert_called_once_with( event_type="fake_event_type") rally-0.9.1/tests/unit/plugins/openstack/scenarios/ceilometer/__init__.py0000664000567000056710000000000013073417717030025 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/ceilometer/test_resources.py0000664000567000056710000001105113073417717031347 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally import exceptions from rally.plugins.openstack.scenarios.ceilometer import resources from tests.unit import test BASE = "rally.plugins.openstack.scenarios.ceilometer" class CeilometerResourcesTestCase(test.ScenarioTestCase): @mock.patch("%s.resources.ListMatchedResources.run" % BASE) def test_all_resource_list_queries( self, mock_list_matched_resources_run): metadata_query = {"a": "test"} start_time = "fake start time" end_time = "fake end time" limit = 100 scenario = resources.ListResources(self.context) scenario.run(metadata_query, start_time, end_time, limit) mock_list_matched_resources_run.assert_any_call(limit=100) mock_list_matched_resources_run.assert_any_call(start_time=start_time, end_time=end_time) mock_list_matched_resources_run.assert_any_call(end_time=end_time) mock_list_matched_resources_run.assert_any_call(start_time=start_time) mock_list_matched_resources_run.assert_any_call( metadata_query=metadata_query) mock_list_matched_resources_run.assert_any_call( filter_by_user_id=True) mock_list_matched_resources_run.assert_any_call( filter_by_project_id=True) mock_list_matched_resources_run.assert_any_call( filter_by_resource_id=True) def test_list_matched_resources(self): scenario = resources.ListMatchedResources(self.context) scenario._list_resources = mock.MagicMock() context = {"user": {"tenant_id": "fake", "id": "fake_id"}, "tenant": {"id": "fake_id", "resources": ["fake_resource"]}} scenario.context = context metadata_query = {"a": "test"} start_time = "2015-09-09T00:00:00" end_time = "2015-09-10T00:00:00" limit = 100 scenario.run(True, True, True, metadata_query, start_time, end_time, limit) scenario._list_resources.assert_called_once_with( [{"field": "user_id", "value": "fake_id", "op": "eq"}, {"field": "project_id", "value": "fake_id", "op": "eq"}, {"field": "resource_id", "value": "fake_resource", "op": "eq"}, {"field": "metadata.a", "value": "test", "op": "eq"}, {"field": "timestamp", "value": "2015-09-09T00:00:00", "op": ">="}, {"field": "timestamp", "value": "2015-09-10T00:00:00", "op": "<="} ], 100) def test_get_tenant_resources(self): scenario = resources.GetTenantResources(self.context) resource_list = ["id1", "id2", "id3", "id4"] context = {"user": {"tenant_id": "fake"}, "tenant": {"id": "fake", "resources": resource_list}} scenario.context = context scenario._get_resource = mock.MagicMock() scenario.run() for resource_id in resource_list: scenario._get_resource.assert_any_call(resource_id) @mock.patch("%s.resources.ListMatchedResources.run" % BASE) def test_resource_list_queries_without_limit_and_metadata( self, mock_list_matched_resources_run): scenario = resources.ListResources() scenario.run() expected_call_args_list = [ mock.call(filter_by_project_id=True), mock.call(filter_by_user_id=True), mock.call(filter_by_resource_id=True) ] self.assertSequenceEqual( expected_call_args_list, mock_list_matched_resources_run.call_args_list) def test_get_tenant_resources_with_exception(self): scenario = resources.GetTenantResources(self.context) resource_list = [] context = {"user": {"tenant_id": "fake"}, "tenant": {"id": "fake", "resources": resource_list}} scenario.context = context self.assertRaises(exceptions.RallyAssertionError, scenario.run) rally-0.9.1/tests/unit/plugins/openstack/scenarios/ceilometer/test_stats.py0000664000567000056710000000333513073417717030501 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.ceilometer import stats from tests.unit import test class CeilometerStatsTestCase(test.ScenarioTestCase): def test_get_stats(self): scenario = stats.GetStats(self.context) scenario._get_stats = mock.MagicMock() context = {"user": {"tenant_id": "fake", "id": "fake_id"}, "tenant": {"id": "fake_id", "resources": ["fake_resource"]}} metadata_query = {"a": "test"} period = 10 groupby = "user_id" aggregates = "sum" scenario.context = context scenario.run("fake_meter", True, True, True, metadata_query, period, groupby, aggregates) scenario._get_stats.assert_called_once_with( "fake_meter", [{"field": "user_id", "value": "fake_id", "op": "eq"}, {"field": "project_id", "value": "fake_id", "op": "eq"}, {"field": "resource_id", "value": "fake_resource", "op": "eq"}, {"field": "metadata.a", "value": "test", "op": "eq"}], 10, "user_id", "sum" ) rally-0.9.1/tests/unit/plugins/openstack/scenarios/ceilometer/test_queries.py0000664000567000056710000001147613073417717031025 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json import mock from rally.plugins.openstack.scenarios.ceilometer import queries from tests.unit import test class CeilometerQueriesTestCase(test.ScenarioTestCase): def test_create_and_query_alarms(self): scenario = queries.CeilometerQueriesCreateAndQueryAlarms(self.context) scenario._create_alarm = mock.MagicMock() scenario._query_alarms = mock.MagicMock() scenario.run("fake_meter_name", 100, "fake_filter", "fake_orderby_attribute", 10, fakearg="f") scenario._create_alarm.assert_called_once_with("fake_meter_name", 100, {"fakearg": "f"}) scenario._query_alarms.assert_called_once_with( json.dumps("fake_filter"), "fake_orderby_attribute", 10) def test_create_and_query_alarms_no_filter(self): scenario = queries.CeilometerQueriesCreateAndQueryAlarms(self.context) scenario._create_alarm = mock.MagicMock() scenario._query_alarms = mock.MagicMock() scenario.run("fake_meter_name", 100, None, "fake_orderby_attribute", 10, fakearg="f") scenario._create_alarm.assert_called_once_with("fake_meter_name", 100, {"fakearg": "f"}) scenario._query_alarms.assert_called_once_with( None, "fake_orderby_attribute", 10) def test_create_and_query_alarm_history(self): fake_alarm = mock.MagicMock() fake_alarm.alarm_id = "fake_alarm_id" scenario = queries.CeilometerQueriesCreateAndQueryAlarmHistory( self.context) scenario._create_alarm = mock.MagicMock(return_value=fake_alarm) scenario._query_alarm_history = mock.MagicMock() fake_filter = json.dumps({"=": {"alarm_id": fake_alarm.alarm_id}}) scenario.run("fake_meter_name", 100, "fake_orderby_attribute", 10, fakearg="f") scenario._create_alarm.assert_called_once_with("fake_meter_name", 100, {"fakearg": "f"}) scenario._query_alarm_history.assert_called_once_with( fake_filter, "fake_orderby_attribute", 10) def test_create_and_query_samples(self): scenario = queries.CeilometerQueriesCreateAndQuerySamples(self.context) scenario._create_sample = mock.MagicMock() scenario._query_samples = mock.MagicMock() scenario.run("fake_counter_name", "fake_counter_type", "fake_counter_unit", "fake_counter_volume", "fake_resource_id", "fake_filter", "fake_orderby_attribute", 10, fakearg="f") scenario._create_sample.assert_called_once_with("fake_counter_name", "fake_counter_type", "fake_counter_unit", "fake_counter_volume", "fake_resource_id", fakearg="f") scenario._query_samples.assert_called_once_with( json.dumps("fake_filter"), "fake_orderby_attribute", 10) def test_create_and_query_samples_no_filter(self): scenario = queries.CeilometerQueriesCreateAndQuerySamples(self.context) scenario._create_sample = mock.MagicMock() scenario._query_samples = mock.MagicMock() scenario.run("fake_counter_name", "fake_counter_type", "fake_counter_unit", "fake_counter_volume", "fake_resource_id", None, "fake_orderby_attribute", 10, fakearg="f") scenario._create_sample.assert_called_once_with("fake_counter_name", "fake_counter_type", "fake_counter_unit", "fake_counter_volume", "fake_resource_id", fakearg="f") scenario._query_samples.assert_called_once_with( None, "fake_orderby_attribute", 10) rally-0.9.1/tests/unit/plugins/openstack/scenarios/ceilometer/test_events.py0000664000567000056710000000726313073417717030653 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally import exceptions from rally.plugins.openstack.scenarios.ceilometer import events from tests.unit import test class CeilometerEventsTestCase(test.ScenarioTestCase): def setUp(self): super(CeilometerEventsTestCase, self).setUp() patch = mock.patch( "rally.plugins.openstack.services.identity.identity.Identity") self.addCleanup(patch.stop) self.mock_identity = patch.start() def get_test_context(self): context = super(CeilometerEventsTestCase, self).get_test_context() context["admin"] = {"id": "fake_user_id", "credential": mock.MagicMock() } return context def test_list_events(self): scenario = events.CeilometerEventsCreateUserAndListEvents(self.context) scenario._list_events = mock.MagicMock() scenario.run() self.mock_identity.return_value.create_user.assert_called_once_with() scenario._list_events.assert_called_once_with() def test_list_events_fails(self): scenario = events.CeilometerEventsCreateUserAndListEvents(self.context) scenario._list_events = mock.MagicMock(return_value=[]) self.assertRaises(exceptions.RallyException, scenario.run) self.mock_identity.return_value.create_user.assert_called_once_with() scenario._list_events.assert_called_once_with() def test_list_event_types(self): scenario = events.CeilometerEventsCreateUserAndListEventTypes( self.context) scenario._list_event_types = mock.MagicMock() scenario.run() self.mock_identity.return_value.create_user.assert_called_once_with() scenario._list_event_types.assert_called_once_with() def test_list_event_types_fails(self): scenario = events.CeilometerEventsCreateUserAndListEventTypes( self.context) scenario._list_event_types = mock.MagicMock(return_value=[]) self.assertRaises(exceptions.RallyException, scenario.run) self.mock_identity.return_value.create_user.assert_called_once_with() scenario._list_event_types.assert_called_once_with() def test_get_event(self): scenario = events.CeilometerEventsCreateUserAndGetEvent(self.context) scenario._get_event = mock.MagicMock() scenario._list_events = mock.MagicMock( return_value=[mock.Mock(message_id="fake_id")]) scenario.run() self.mock_identity.return_value.create_user.assert_called_once_with() scenario._list_events.assert_called_with() scenario._get_event.assert_called_with(event_id="fake_id") def test_get_event_fails(self): scenario = events.CeilometerEventsCreateUserAndGetEvent(self.context) scenario._list_events = mock.MagicMock(return_value=[]) scenario._get_event = mock.MagicMock() self.assertRaises(exceptions.RallyException, scenario.run) self.mock_identity.return_value.create_user.assert_called_once_with() scenario._list_events.assert_called_with() self.assertFalse(scenario._get_event.called) rally-0.9.1/tests/unit/plugins/openstack/scenarios/ceilometer/test_samples.py0000664000567000056710000000556513073417717031016 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.ceilometer import samples from tests.unit import test BASE = "rally.plugins.openstack.scenarios.ceilometer" class CeilometerSamplesTestCase(test.ScenarioTestCase): @mock.patch("%s.samples.ListMatchedSamples.run" % BASE) def test_all_list_samples(self, mock_list_matched_samples_run): metadata_query = {"a": "test"} limit = 10 scenario = samples.ListSamples(self.context) scenario.run(metadata_query, limit) mock_list_matched_samples_run.assert_any_call(limit=10) mock_list_matched_samples_run.assert_any_call( metadata_query=metadata_query) mock_list_matched_samples_run.assert_any_call( filter_by_resource_id=True) mock_list_matched_samples_run.assert_any_call( filter_by_user_id=True) mock_list_matched_samples_run.assert_any_call( filter_by_project_id=True) @mock.patch("%s.samples.ListMatchedSamples.run" % BASE) def test_list_samples_without_limit_and_metadata( self, mock_list_matched_samples_run): scenario = samples.ListSamples() scenario.run() expected_call_args_list = [ mock.call(filter_by_project_id=True), mock.call(filter_by_user_id=True), mock.call(filter_by_resource_id=True) ] self.assertSequenceEqual( expected_call_args_list, mock_list_matched_samples_run.call_args_list) def test_list_matched_samples(self): scenario = samples.ListMatchedSamples() scenario._list_samples = mock.MagicMock() context = {"user": {"tenant_id": "fake", "id": "fake_id"}, "tenant": {"id": "fake_id", "resources": ["fake_resource"]}} scenario.context = context metadata_query = {"a": "test"} limit = 10 scenario.run(True, True, True, metadata_query, limit) scenario._list_samples.assert_called_once_with( [{"field": "user_id", "value": "fake_id", "op": "eq"}, {"field": "project_id", "value": "fake_id", "op": "eq"}, {"field": "resource_id", "value": "fake_resource", "op": "eq"}, {"field": "metadata.a", "value": "test", "op": "eq"}], 10) rally-0.9.1/tests/unit/plugins/openstack/scenarios/ceilometer/test_alarms.py0000664000567000056710000001141713073417717030622 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.ceilometer import alarms from tests.unit import test class CeilometerAlarmsTestCase(test.ScenarioTestCase): def test_create_alarm(self): scenario = alarms.CreateAlarm(self.context) scenario._create_alarm = mock.MagicMock() scenario.run("fake_meter_name", "fake_threshold", fakearg="f") scenario._create_alarm.assert_called_once_with("fake_meter_name", "fake_threshold", {"fakearg": "f"}) def test_list_alarm(self): scenario = alarms.ListAlarms(self.context) scenario._list_alarms = mock.MagicMock() scenario.run() scenario._list_alarms.assert_called_once_with() def test_create_and_list_alarm(self): fake_alarm = mock.MagicMock() scenario = alarms.CreateAndListAlarm(self.context) scenario._create_alarm = mock.MagicMock(return_value=fake_alarm) scenario._list_alarms = mock.MagicMock() scenario.run("fake_meter_name", "fake_threshold", fakearg="f") scenario._create_alarm.assert_called_once_with("fake_meter_name", "fake_threshold", {"fakearg": "f"}) scenario._list_alarms.assert_called_once_with(fake_alarm.alarm_id) def test_create_and_get_alarm(self): fake_alarm = mock.MagicMock() scenario = alarms.CreateAndGetAlarm(self.context) scenario._create_alarm = mock.MagicMock(return_value=fake_alarm) scenario._get_alarm = mock.MagicMock() scenario.run("fake_meter_name", "fake_threshold", fakearg="f") scenario._create_alarm.assert_called_once_with("fake_meter_name", "fake_threshold", {"fakearg": "f"}) scenario._get_alarm.assert_called_once_with(fake_alarm.alarm_id) def test_create_and_update_alarm(self): fake_alram_dict_diff = {"description": "Changed Test Description"} fake_alarm = mock.MagicMock() scenario = alarms.CreateAndUpdateAlarm(self.context) scenario._create_alarm = mock.MagicMock(return_value=fake_alarm) scenario._update_alarm = mock.MagicMock() scenario.run("fake_meter_name", "fake_threshold", fakearg="f") scenario._create_alarm.assert_called_once_with("fake_meter_name", "fake_threshold", {"fakearg": "f"}) scenario._update_alarm.assert_called_once_with(fake_alarm.alarm_id, fake_alram_dict_diff) def test_create_and_delete_alarm(self): fake_alarm = mock.MagicMock() scenario = alarms.CreateAndDeleteAlarm(self.context) scenario._create_alarm = mock.MagicMock(return_value=fake_alarm) scenario._delete_alarm = mock.MagicMock() scenario.run("fake_meter_name", "fake_threshold", fakearg="f") scenario._create_alarm.assert_called_once_with("fake_meter_name", "fake_threshold", {"fakearg": "f"}) scenario._delete_alarm.assert_called_once_with(fake_alarm.alarm_id) def test_create_and_get_alarm_history(self): alarm = mock.Mock(alarm_id="foo_id") scenario = alarms.CreateAlarmAndGetHistory( self.context) scenario._create_alarm = mock.MagicMock(return_value=alarm) scenario._get_alarm_state = mock.MagicMock() scenario._get_alarm_history = mock.MagicMock() scenario._set_alarm_state = mock.MagicMock() scenario.run("meter_name", "threshold", "state", 60, fakearg="f") scenario._create_alarm.assert_called_once_with( "meter_name", "threshold", {"fakearg": "f"}) scenario._get_alarm_state.assert_called_once_with("foo_id") scenario._get_alarm_history.assert_called_once_with("foo_id") scenario._set_alarm_state.assert_called_once_with(alarm, "state", 60) rally-0.9.1/tests/unit/plugins/openstack/scenarios/ceilometer/test_utils.py0000664000567000056710000004500713073417717030505 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import datetime as dt from dateutil import parser import mock from rally import exceptions from rally.plugins.openstack.scenarios.ceilometer import utils from tests.unit import test CEILOMETER_UTILS = "rally.plugins.openstack.scenarios.ceilometer.utils" class CeilometerScenarioTestCase(test.ScenarioTestCase): def setUp(self): super(CeilometerScenarioTestCase, self).setUp() self.scenario = utils.CeilometerScenario(self.context) def test__make_samples_no_batch_size(self): self.scenario.generate_random_name = mock.Mock( return_value="fake_resource") test_timestamp = dt.datetime(2015, 10, 20, 14, 18, 40) result = list(self.scenario._make_samples(count=2, interval=60, timestamp=test_timestamp)) self.assertEqual(1, len(result)) expected = {"counter_name": "cpu_util", "counter_type": "gauge", "counter_unit": "%", "counter_volume": 1, "resource_id": "fake_resource", "timestamp": test_timestamp.isoformat()} self.assertEqual(expected, result[0][0]) samples_int = (parser.parse(result[0][0]["timestamp"]) - parser.parse(result[0][1]["timestamp"])).seconds self.assertEqual(60, samples_int) def test__make_samples_batch_size(self): self.scenario.generate_random_name = mock.Mock( return_value="fake_resource") test_timestamp = dt.datetime(2015, 10, 20, 14, 18, 40) result = list(self.scenario._make_samples(count=4, interval=60, batch_size=2, timestamp=test_timestamp)) self.assertEqual(2, len(result)) expected = {"counter_name": "cpu_util", "counter_type": "gauge", "counter_unit": "%", "counter_volume": 1, "resource_id": "fake_resource", "timestamp": test_timestamp.isoformat()} self.assertEqual(expected, result[0][0]) samples_int = (parser.parse(result[0][-1]["timestamp"]) - parser.parse(result[1][0]["timestamp"])).seconds # NOTE(idegtiarov): here we check that interval between last sample in # first batch and first sample in second batch is equal 60 sec. self.assertEqual(60, samples_int) def test__make_timestamp_query(self): start_time = "2015-09-09T00:00:00" end_time = "2015-09-10T00:00:00" expected_start = [ {"field": "timestamp", "value": "2015-09-09T00:00:00", "op": ">="}] expected_end = [ {"field": "timestamp", "value": "2015-09-10T00:00:00", "op": "<="} ] actual = self.scenario._make_timestamp_query(start_time, end_time) self.assertEqual(expected_start + expected_end, actual) self.assertRaises(exceptions.InvalidArgumentsException, self.scenario._make_timestamp_query, end_time, start_time) self.assertEqual( expected_start, self.scenario._make_timestamp_query(start_time=start_time)) self.assertEqual( expected_end, self.scenario._make_timestamp_query(end_time=end_time)) def test__list_alarms_by_id(self): self.assertEqual(self.clients("ceilometer").alarms.get.return_value, self.scenario._list_alarms("alarm-id")) self.clients("ceilometer").alarms.get.assert_called_once_with( "alarm-id") self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.list_alarms") def test__list_alarms(self): self.assertEqual(self.clients("ceilometer").alarms.list.return_value, self.scenario._list_alarms()) self.clients("ceilometer").alarms.list.assert_called_once_with() self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.list_alarms") def test__get_alarm(self): self.assertEqual(self.clients("ceilometer").alarms.get.return_value, self.scenario._get_alarm("alarm-id")) self.clients("ceilometer").alarms.get.assert_called_once_with( "alarm-id") self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.get_alarm") def test__create_alarm(self): alarm_dict = {"alarm_id": "fake-alarm-id"} orig_alarm_dict = copy.copy(alarm_dict) self.scenario.generate_random_name = mock.Mock() self.assertEqual(self.scenario._create_alarm("fake-meter-name", 100, alarm_dict), self.clients("ceilometer").alarms.create.return_value) self.clients("ceilometer").alarms.create.assert_called_once_with( meter_name="fake-meter-name", threshold=100, description="Test Alarm", alarm_id="fake-alarm-id", name=self.scenario.generate_random_name.return_value) # ensure that _create_alarm() doesn't modify the alarm dict as # a side-effect self.assertEqual(alarm_dict, orig_alarm_dict) self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.create_alarm") def test__delete_alarms(self): self.scenario._delete_alarm("alarm-id") self.clients("ceilometer").alarms.delete.assert_called_once_with( "alarm-id") self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.delete_alarm") def test__update_alarm(self): alarm_diff = {"description": "Changed Test Description"} orig_alarm_diff = copy.copy(alarm_diff) self.scenario._update_alarm("alarm-id", alarm_diff) self.clients("ceilometer").alarms.update.assert_called_once_with( "alarm-id", **alarm_diff) # ensure that _create_alarm() doesn't modify the alarm dict as # a side-effect self.assertEqual(alarm_diff, orig_alarm_diff) self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.update_alarm") def test__get_alarm_history(self): self.assertEqual( self.scenario._get_alarm_history("alarm-id"), self.clients("ceilometer").alarms.get_history.return_value) self.clients("ceilometer").alarms.get_history.assert_called_once_with( "alarm-id") self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.get_alarm_history") def test__get_alarm_state(self): self.assertEqual( self.scenario._get_alarm_state("alarm-id"), self.clients("ceilometer").alarms.get_state.return_value) self.clients("ceilometer").alarms.get_state.assert_called_once_with( "alarm-id") self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.get_alarm_state") def test__set_alarm_state(self): alarm = mock.Mock() self.clients("ceilometer").alarms.create.return_value = alarm return_alarm = self.scenario._set_alarm_state(alarm, "ok", 100) self.mock_wait_for.mock.assert_called_once_with( alarm, ready_statuses=["ok"], update_resource=self.mock_get_from_manager.mock.return_value, timeout=100, check_interval=1) self.mock_get_from_manager.mock.assert_called_once_with() self.assertEqual(self.mock_wait_for.mock.return_value, return_alarm) self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.set_alarm_state") def test__list_events(self): self.assertEqual( self.scenario._list_events(), self.admin_clients("ceilometer").events.list.return_value ) self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.list_events") def test__get_events(self): self.assertEqual( self.scenario._get_event(event_id="fake_id"), self.admin_clients("ceilometer").events.get.return_value ) self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.get_event") def test__list_event_types(self): self.assertEqual( self.scenario._list_event_types(), self.admin_clients("ceilometer").event_types.list.return_value ) self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.list_event_types") def test__list_event_traits(self): self.assertEqual( self.scenario._list_event_traits( event_type="fake_event_type", trait_name="fake_trait_name"), self.admin_clients("ceilometer").traits.list.return_value ) self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.list_event_traits") def test__list_event_trait_descriptions(self): self.assertEqual( self.scenario._list_event_trait_descriptions( event_type="fake_event_type" ), self.admin_clients("ceilometer").trait_descriptions.list. return_value ) self._test_atomic_action_timer( self.scenario.atomic_actions(), "ceilometer.list_event_trait_descriptions") def test__list_meters(self): self.assertEqual(self.scenario._list_meters(), self.clients("ceilometer").meters.list.return_value) self.clients("ceilometer").meters.list.assert_called_once_with( q=None, limit=None) self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.list_meters") def test__list_resources(self): self.assertEqual( self.scenario._list_resources(), self.clients("ceilometer").resources.list.return_value) self.clients("ceilometer").resources.list.assert_called_once_with( q=None, limit=None) self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.list_resources") def test__list_samples(self): self.assertEqual( self.scenario._list_samples(), self.clients("ceilometer").new_samples.list.return_value) self.clients("ceilometer").new_samples.list.assert_called_once_with( q=None, limit=None) self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.list_samples") def test__list_samples_with_query(self): self.assertEqual( self.scenario._list_samples(query=[{"field": "user_id", "volume": "fake_id"}], limit=10), self.clients("ceilometer").new_samples.list.return_value) self.clients("ceilometer").new_samples.list.assert_called_once_with( q=[{"field": "user_id", "volume": "fake_id"}], limit=10) self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.list_samples:limit&user_id") def test__get_resource(self): self.assertEqual(self.scenario._get_resource("fake-resource-id"), self.clients("ceilometer").resources.get.return_value) self.clients("ceilometer").resources.get.assert_called_once_with( "fake-resource-id") self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.get_resource") def test__get_stats(self): self.assertEqual( self.scenario._get_stats("fake-meter"), self.clients("ceilometer").statistics.list.return_value) self.clients("ceilometer").statistics.list.assert_called_once_with( "fake-meter", q=None, period=None, groupby=None, aggregates=None) self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.get_stats") def test__create_meter(self): self.scenario.generate_random_name = mock.Mock() self.assertEqual( self.scenario._create_meter(fakearg="fakearg"), self.clients("ceilometer").samples.create.return_value[0]) self.clients("ceilometer").samples.create.assert_called_once_with( counter_name=self.scenario.generate_random_name.return_value, fakearg="fakearg") self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.create_meter") def test__query_alarms(self): self.assertEqual( self.scenario._query_alarms("fake-filter", "fake-orderby", 10), self.clients("ceilometer").query_alarms.query.return_value) self.clients("ceilometer").query_alarms.query.assert_called_once_with( "fake-filter", "fake-orderby", 10) self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.query_alarms") def test__query_alarm_history(self): self.assertEqual( self.scenario._query_alarm_history( "fake-filter", "fake-orderby", 10), self.clients("ceilometer").query_alarm_history.query.return_value) self.clients( "ceilometer").query_alarm_history.query.assert_called_once_with( "fake-filter", "fake-orderby", 10) self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.query_alarm_history") def test__query_samples(self): self.assertEqual( self.scenario._query_samples("fake-filter", "fake-orderby", 10), self.clients("ceilometer").query_samples.query.return_value) self.clients("ceilometer").query_samples.query.assert_called_once_with( "fake-filter", "fake-orderby", 10) self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.query_samples") def test__create_sample_no_resource_id(self): self.scenario.generate_random_name = mock.Mock() created_sample = self.scenario._create_sample("test-counter-name", "test-counter-type", "test-counter-unit", "test-counter-volume") self.assertEqual( created_sample, self.clients("ceilometer").samples.create.return_value) self.clients("ceilometer").samples.create.assert_called_once_with( counter_name="test-counter-name", counter_type="test-counter-type", counter_unit="test-counter-unit", counter_volume="test-counter-volume", resource_id=self.scenario.generate_random_name.return_value) self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.create_sample") def test__create_sample(self): created_sample = self.scenario._create_sample("test-counter-name", "test-counter-type", "test-counter-unit", "test-counter-volume", "test-resource-id") self.assertEqual( created_sample, self.clients("ceilometer").samples.create.return_value) self.clients("ceilometer").samples.create.assert_called_once_with( counter_name="test-counter-name", counter_type="test-counter-type", counter_unit="test-counter-unit", counter_volume="test-counter-volume", resource_id="test-resource-id") self._test_atomic_action_timer(self.scenario.atomic_actions(), "ceilometer.create_sample") def test__make_general_query(self): self.scenario.context = { "user": {"tenant_id": "fake", "id": "fake_id"}, "tenant": {"id": "fake_id", "resources": ["fake_resource"]}} metadata = {"fake_field": "boo"} expected = [ {"field": "user_id", "value": "fake_id", "op": "eq"}, {"field": "project_id", "value": "fake_id", "op": "eq"}, {"field": "resource_id", "value": "fake_resource", "op": "eq"}, {"field": "metadata.fake_field", "value": "boo", "op": "eq"}, ] actual = self.scenario._make_general_query(True, True, True, metadata) self.assertEqual(expected, actual) def test__make_query_item(self): expected = {"field": "foo", "op": "eq", "value": "bar"} self.assertEqual(expected, self.scenario._make_query_item("foo", value="bar")) def test__make_profiler_key(self): query = [ {"field": "test_field1", "op": "eq", "value": "bar"}, {"field": "test_field2", "op": "==", "value": None} ] limit = 100 method = "fake_method" actual = self.scenario._make_profiler_key(method, query, limit) self.assertEqual("fake_method:limit&test_field1&test_field2", actual) actual = self.scenario._make_profiler_key(method, query, None) self.assertEqual("fake_method:test_field1&test_field2", actual) self.assertEqual(method, self.scenario._make_profiler_key(method, None, None)) rally-0.9.1/tests/unit/plugins/openstack/scenarios/ceilometer/test_meters.py0000664000567000056710000000574413073417717030650 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.ceilometer import meters from tests.unit import test BASE = "rally.plugins.openstack.scenarios.ceilometer" class CeilometerMetersTestCase(test.ScenarioTestCase): @mock.patch("%s.meters.ListMatchedMeters.run" % BASE) def test_all_meter_list_queries( self, mock_list_matched_meters_run): scenario = meters.ListMeters(self.context) metadata_query = {"a": "test"} limit = 100 scenario.run(metadata_query, limit) mock_list_matched_meters_run.assert_any_call(limit=100) mock_list_matched_meters_run.assert_any_call( metadata_query=metadata_query) mock_list_matched_meters_run.assert_any_call(filter_by_user_id=True) mock_list_matched_meters_run.assert_any_call(filter_by_project_id=True) mock_list_matched_meters_run.assert_any_call( filter_by_resource_id=True) @mock.patch("%s.meters.ListMatchedMeters.run" % BASE) def test_meter_list_queries_without_limit_and_metadata( self, mock_list_matched_meters_run): scenario = meters.ListMeters(self.context) scenario.run() expected_call_args_list = [ mock.call(filter_by_project_id=True), mock.call(filter_by_user_id=True), mock.call(filter_by_resource_id=True) ] self.assertSequenceEqual( expected_call_args_list, mock_list_matched_meters_run.call_args_list) @mock.patch("%s.meters.ListMatchedMeters._list_meters" % BASE) def test_list_matched_meters( self, mock_list_matched_meters__list_meters): mock_func = mock_list_matched_meters__list_meters scenario = meters.ListMatchedMeters(self.context) context = {"user": {"tenant_id": "fake", "id": "fake_id"}, "tenant": {"id": "fake_id", "resources": ["fake_resource"]}} scenario.context = context metadata_query = {"a": "test"} limit = 100 scenario.run(True, True, True, metadata_query, limit) mock_func.assert_called_once_with( [{"field": "user_id", "value": "fake_id", "op": "eq"}, {"field": "project_id", "value": "fake_id", "op": "eq"}, {"field": "resource_id", "value": "fake_resource", "op": "eq"}, {"field": "metadata.a", "value": "test", "op": "eq"}], 100) rally-0.9.1/tests/unit/plugins/openstack/scenarios/fuel/0000775000567000056710000000000013073420067024521 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/fuel/__init__.py0000664000567000056710000000000013073417717026630 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/fuel/test_environments.py0000664000567000056710000000400013073417717030663 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.fuel import environments from tests.unit import test class FuelEnvironmentsTestCase(test.ScenarioTestCase): def test_create_and_list_environments(self): scenario = environments.CreateAndListEnvironments(self.context) scenario._create_environment = mock.Mock() scenario._list_environments = mock.Mock() scenario.run( release_id=2, network_provider="test_neutron", deployment_mode="test_mode", net_segment_type="test_type") scenario._create_environment.assert_called_once_with( release_id=2, network_provider="test_neutron", deployment_mode="test_mode", net_segment_type="test_type") scenario._list_environments.assert_called_once_with() def test_create_and_delete_environments(self): scenario = environments.CreateAndDeleteEnvironment(self.context) scenario._create_environment = mock.Mock(return_value=42) scenario._delete_environment = mock.Mock() scenario.run( release_id=2, network_provider="test_neutron", deployment_mode="test_mode", net_segment_type="test_type") scenario._create_environment.assert_called_once_with( release_id=2, network_provider="test_neutron", deployment_mode="test_mode", net_segment_type="test_type") scenario._delete_environment.assert_called_once_with(42, 5) rally-0.9.1/tests/unit/plugins/openstack/scenarios/fuel/test_nodes.py0000664000567000056710000000341713073417717027257 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.fuel import nodes from tests.unit import test class FuelNodesTestCase(test.TestCase): context = {"fuel": {"environments": ["1"]}} def test_add_and_remove_node(self): scenario = nodes.AddAndRemoveNode(self.context) scenario._list_node_ids = mock.Mock(return_value=["1"]) scenario._node_is_assigned = mock.Mock(return_value=False) scenario._add_node = mock.Mock() scenario._remove_node = mock.Mock() scenario.run(node_roles="some_role") scenario._list_node_ids.assert_called_once_with() scenario._node_is_assigned.assert_called_once_with("1") scenario._add_node.assert_called_once_with("1", ["1"], "some_role") scenario._remove_node.assert_called_once_with("1", "1") def test_add_and_remove_nodes_error(self): scenario = nodes.AddAndRemoveNode(self.context) scenario._list_node_ids = mock.Mock(return_value=["1"]) scenario._node_is_assigned = mock.Mock(return_value=True) scenario._add_node = mock.Mock() scenario._remove_node = mock.Mock() self.assertRaises(RuntimeError, scenario.run, node_roles="some_role") rally-0.9.1/tests/unit/plugins/openstack/scenarios/fuel/test_utils.py0000664000567000056710000002467213073417717027315 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.fuel import utils from tests.unit import test UTILS = "rally.plugins.openstack.scenarios.fuel.utils." class ModuleTestCase(test.TestCase): @mock.patch(UTILS + "six") @mock.patch(UTILS + "FuelClient", return_value="fuel_client") def test_fuel(self, mock_fuel_client, mock_six): mock_six.moves.urllib.parse.urlparse().hostname = "foo_host" client = utils.Fuel( mock.Mock(username="foo_user", password="foo_pass"), {}, {}).create_client() mock_fuel_client.assert_called_once_with( version="v1", server_address="foo_host", server_port=8000, username="foo_user", password="foo_pass") self.assertEqual("fuel_client", client) class FuelEnvTestCase(test.TestCase): def test___init__(self): env = utils.FuelEnvManager("some_client") self.assertEqual("some_client", env.client) def test_get(self): client = mock.Mock() fenv = utils.FuelEnvManager(client) result = fenv.get("some_id") client.get_by_id.assert_called_once_with("some_id") self.assertEqual(result, client.get_by_id("some_id")) client.get_by_id.side_effect = BaseException self.assertIsNone(fenv.get("some_id")) def test_list(self): client = mock.Mock() envs = [ {"name": "one"}, {"name": "two"}, {"name": "three"}] client.get_all.return_value = envs fenv = utils.FuelEnvManager(client) self.assertEqual(envs, fenv.list()) def test_list_exception(self): client = mock.Mock() client.get_all = mock.Mock(side_effect=SystemExit) fenv = utils.FuelEnvManager(client) self.assertRaises(RuntimeError, fenv.list) def test_create(self): client = mock.Mock() client.create.return_value = "env" fenv = utils.FuelEnvManager(client) kwargs = {"release_id": 42, "network_provider": "testprov", "deployment_mode": "some_mode", "net_segment_type": "bar"} self.assertEqual("env", fenv.create("some_env", **kwargs)) client.create.assert_called_once_with("some_env", 42, "testprov", "some_mode", "bar") client.create.side_effect = SystemExit self.assertRaises(RuntimeError, fenv.create, "some_env", **kwargs) def test_create_env_not_returned(self): client = mock.Mock() client.create.return_value = None kwargs = {"release_id": 42, "network_provider": "testprov", "deployment_mode": "some_mode", "net_segment_type": "bar"} fenv = utils.FuelEnvManager(client) self.assertRaises(RuntimeError, fenv.create, "some_env", **kwargs) @mock.patch(UTILS + "scenario.OpenStackScenario") def test_delete(self, mock_open_stack_scenario): mock_open_stack_scenario.RESOURCE_NAME_PREFIX = "" envs = [{"id": "some_one", "name": "one"}] client = mock.Mock() client.get_all.return_value = envs client.delete_by_id.side_effect = SystemExit fenv = utils.FuelEnvManager(client) self.assertRaises(RuntimeError, fenv.delete, "some_one", retries=2) self.assertEqual(3, len(client.delete_by_id.mock_calls)) @mock.patch(UTILS + "scenario.OpenStackScenario") def test_delete_error(self, mock_open_stack_scenario): mock_open_stack_scenario.RESOURCE_NAME_PREFIX = "" envs = [{"id": "some_one", "name": "one"}] client = mock.Mock() client.delete_by_id.side_effect = SystemExit client.get_all.return_value = envs fenv = utils.FuelEnvManager(client) self.assertRaises(RuntimeError, fenv.delete, "some_one", retries=1) self.assertEqual(2, len(client.delete_by_id.mock_calls)) class FuelClientTestCase(test.TestCase): @mock.patch(UTILS + "FuelEnvManager") @mock.patch(UTILS + "os") def test___init__(self, mock_os, mock_fuel_env_manager): mock_os.environ = {} mock_fuelclient = mock.Mock(get_client=lambda *args, **kw: [args, kw]) with mock.patch.dict("sys.modules", {"fuelclient": mock_fuelclient}): client = utils.FuelClient(version="foo_version", server_address="foo_address", server_port=1234, username="foo_user", password="foo_pass") expected_environ = {"KEYSTONE_PASS": "foo_pass", "KEYSTONE_USER": "foo_user", "LISTEN_PORT": "1234", "SERVER_ADDRESS": "foo_address"} self.assertEqual(expected_environ, mock_os.environ) self.assertEqual(mock_fuel_env_manager.return_value, client.environment) self.assertEqual([("node",), {"version": "foo_version"}], client.node) self.assertEqual([("task",), {"version": "foo_version"}], client.task) mock_fuel_env_manager.assert_called_once_with( [("environment",), {"version": "foo_version"}]) class FuelScenarioTestCase(test.ScenarioTestCase): def test__list_environments(self): scenario = utils.FuelScenario(self.context) self.assertEqual( scenario._list_environments(), self.admin_clients("fuel").environment.list.return_value) self.admin_clients("fuel").environment.list.assert_called_once_with() self._test_atomic_action_timer(scenario.atomic_actions(), "fuel.list_environments") def test__create_environment(self): self.admin_clients("fuel").environment.create.return_value = {"id": 42} fuel_scenario = utils.FuelScenario() fuel_scenario.admin_clients = self.admin_clients fuel_scenario.generate_random_name = mock.Mock() result = fuel_scenario._create_environment() self.assertEqual( self.admin_clients("fuel").environment.create.return_value["id"], result) tmp_mck = self.admin_clients("fuel").environment.create tmp_mck.assert_called_once_with( fuel_scenario.generate_random_name.return_value, 1, "neutron", "ha_compact", "vlan") def test__delete_environment(self): fuel_scenario = utils.FuelScenario() fuel_scenario.admin_clients = self.admin_clients fuel_scenario._delete_environment(42, 33) tmp_mock = fuel_scenario.admin_clients("fuel") tmp_mock.environment.delete.assert_called_once_with(42, 33) def test__add_nodes(self): fscen = utils.FuelScenario() fscen.admin_clients = mock.Mock() fscen._add_node("1", ["42"], node_roles=["some_role"]) tmp_mock = fscen.admin_clients.return_value.environment.client tmp_mock.add_nodes.assert_called_once_with("1", ["42"], ["some_role"]) def test__add_nodes_error(self): fscen = utils.FuelScenario() fscen.admin_clients = mock.Mock() tmp_mock = fscen.admin_clients.return_value.environment.client tmp_mock.add_nodes.side_effect = BaseException self.assertRaises(RuntimeError, fscen._add_node, "1", "42", node_roles="some_role") @mock.patch(UTILS + "FuelClient") def test__remove_nodes(self, mock_fuel_client): mock_tmp = mock_fuel_client.fuelclient_module.objects mock_env = mock_tmp.environment.Environment mock_env.return_value = mock.Mock() fscen = utils.FuelScenario() fscen._remove_node("1", "2") mock_env.assert_called_once_with("1") mock_env.return_value.unassign.assert_called_once_with(["2"]) @mock.patch(UTILS + "FuelClient") def test__remove_nodes_error(self, mock_fuel_client): mock_tmp = mock_fuel_client.fuelclient_module.objects mock_env = mock_tmp.environment.Environment mock_env.return_value = mock.Mock() mock_env.return_value.unassign.side_effect = BaseException fscen = utils.FuelScenario() self.assertRaises(RuntimeError, fscen._remove_node, "1", "2") def test__list_node_ids(self): fscen = utils.FuelScenario() fscen.admin_clients = mock.Mock() fscen.admin_clients.return_value.node.get_all.return_value = [ {"id": "id1"}, {"id": "id2"}] res = fscen._list_node_ids("env") self.assertEqual(["id1", "id2"], res) tmp_mock = fscen.admin_clients.return_value.node.get_all tmp_mock.assert_called_once_with(environment_id="env") def test__node_is_assigned(self): fscen = utils.FuelScenario() fscen.admin_clients = mock.Mock() fscen.admin_clients.return_value.node.get_by_id.return_value = { "id": "id1", "cluster": "some_id"} self.assertTrue(fscen._node_is_assigned("id1")) fscen.admin_clients.return_value.node.get_by_id.return_value[ "cluster"] = "" self.assertFalse(fscen._node_is_assigned("id2")) @mock.patch(UTILS + "FuelScenario._node_is_assigned", return_value=False) @mock.patch(UTILS + "FuelScenario._list_node_ids", return_value=["id1", "id2"]) def test__get_free_node_id(self, mock__list_node_ids, mock__node_is_assigned): node_id = utils.FuelScenario()._get_free_node_id() self.assertIn(node_id, mock__list_node_ids.return_value) @mock.patch(UTILS + "FuelScenario._node_is_assigned", return_value=True) @mock.patch(UTILS + "FuelScenario._list_node_ids", return_value=["id1", "id2"]) def test__get_free_node_id_exception(self, mock__list_node_ids, mock__node_is_assigned): self.assertRaises(RuntimeError, utils.FuelScenario()._get_free_node_id) rally-0.9.1/tests/unit/plugins/openstack/scenarios/senlin/0000775000567000056710000000000013073420067025056 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/senlin/__init__.py0000664000567000056710000000000013073417717027165 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/senlin/test_clusters.py0000664000567000056710000000264313073417717030350 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.senlin import clusters from tests.unit import test class SenlinClustersTestCase(test.ScenarioTestCase): def test_create_and_delete_cluster(self): mock_cluster = mock.Mock() self.context["tenant"] = {"profile": "fake_profile_id"} scenario = clusters.CreateAndDeleteCluster(self.context) scenario._create_cluster = mock.Mock(return_value=mock_cluster) scenario._delete_cluster = mock.Mock() scenario.run(desired_capacity=1, min_size=0, max_size=3, timeout=60, metadata={"k2": "v2"}) scenario._create_cluster.assert_called_once_with("fake_profile_id", 1, 0, 3, 60, {"k2": "v2"}) scenario._delete_cluster.assert_called_once_with(mock_cluster) rally-0.9.1/tests/unit/plugins/openstack/scenarios/senlin/test_utils.py0000664000567000056710000001472413073417717027647 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from oslo_config import cfg from rally import exceptions from rally.plugins.openstack.scenarios.senlin import utils from tests.unit import test SENLIN_UTILS = "rally.plugins.openstack.scenarios.senlin.utils." CONF = cfg.CONF class SenlinScenarioTestCase(test.ScenarioTestCase): def test_list_cluster(self): fake_cluster_list = ["cluster1", "cluster2"] self.admin_clients("senlin").clusters.return_value = fake_cluster_list scenario = utils.SenlinScenario(self.context) result = scenario._list_clusters() self.assertEqual(list(fake_cluster_list), result) self.admin_clients("senlin").clusters.assert_called_once_with() def test_list_cluster_with_queries(self): fake_cluster_list = ["cluster1", "cluster2"] self.admin_clients("senlin").clusters.return_value = fake_cluster_list scenario = utils.SenlinScenario(self.context) result = scenario._list_clusters(status="ACTIVE") self.assertEqual(list(fake_cluster_list), result) self.admin_clients("senlin").clusters.assert_called_once_with( status="ACTIVE") @mock.patch(SENLIN_UTILS + "SenlinScenario.generate_random_name", return_value="test_cluster") def test_create_cluster(self, mock_generate_random_name): fake_cluster = mock.Mock(id="fake_cluster_id") res_cluster = mock.Mock() self.admin_clients("senlin").create_cluster.return_value = fake_cluster self.mock_wait_for_status.mock.return_value = res_cluster scenario = utils.SenlinScenario(self.context) result = scenario._create_cluster("fake_profile_id", desired_capacity=1, min_size=0, max_size=3, metadata={"k1": "v1"}, timeout=60) self.assertEqual(res_cluster, result) self.admin_clients("senlin").create_cluster.assert_called_once_with( profile_id="fake_profile_id", name="test_cluster", desired_capacity=1, min_size=0, max_size=3, metadata={"k1": "v1"}, timeout=60) self.mock_wait_for_status.mock.assert_called_once_with( fake_cluster, ready_statuses=["ACTIVE"], failure_statuses=["ERROR"], update_resource=scenario._get_cluster, timeout=CONF.benchmark.senlin_action_timeout) mock_generate_random_name.assert_called_once_with() self._test_atomic_action_timer(scenario.atomic_actions(), "senlin.create_cluster") def test_get_cluster(self): fake_cluster = mock.Mock(id="fake_cluster_id") scenario = utils.SenlinScenario(context=self.context) scenario._get_cluster(fake_cluster) self.admin_clients("senlin").get_cluster.assert_called_once_with( "fake_cluster_id") def test_get_cluster_notfound(self): fake_cluster = mock.Mock(id="fake_cluster_id") ex = Exception() ex.code = 404 self.admin_clients("senlin").get_cluster.side_effect = ex scenario = utils.SenlinScenario(context=self.context) self.assertRaises(exceptions.GetResourceNotFound, scenario._get_cluster, fake_cluster) self.admin_clients("senlin").get_cluster.assert_called_once_with( "fake_cluster_id") def test_get_cluster_failed(self): fake_cluster = mock.Mock(id="fake_cluster_id") ex = Exception() ex.code = 500 self.admin_clients("senlin").get_cluster.side_effect = ex scenario = utils.SenlinScenario(context=self.context) self.assertRaises(exceptions.GetResourceFailure, scenario._get_cluster, fake_cluster) self.admin_clients("senlin").get_cluster.assert_called_once_with( "fake_cluster_id") def test_delete_cluster(self): fake_cluster = mock.Mock() scenario = utils.SenlinScenario(context=self.context) scenario._delete_cluster(fake_cluster) self.admin_clients("senlin").delete_cluster.assert_called_once_with( fake_cluster) self.mock_wait_for_status.mock.assert_called_once_with( fake_cluster, ready_statuses=["DELETED"], failure_statuses=["ERROR"], check_deletion=True, update_resource=scenario._get_cluster, timeout=CONF.benchmark.senlin_action_timeout) self._test_atomic_action_timer(scenario.atomic_actions(), "senlin.delete_cluster") @mock.patch(SENLIN_UTILS + "SenlinScenario.generate_random_name", return_value="test_profile") def test_create_profile(self, mock_generate_random_name): test_spec = { "version": "1.0", "type": "test_type", "properties": { "key1": "value1" } } scenario = utils.SenlinScenario(self.context) result = scenario._create_profile(test_spec, metadata={"k2": "v2"}) self.assertEqual( self.clients("senlin").create_profile.return_value, result) self.clients("senlin").create_profile.assert_called_once_with( spec=test_spec, name="test_profile", metadata={"k2": "v2"}) mock_generate_random_name.assert_called_once_with() self._test_atomic_action_timer(scenario.atomic_actions(), "senlin.create_profile") def test_delete_profile(self): fake_profile = mock.Mock() scenario = utils.SenlinScenario(context=self.context) scenario._delete_profile(fake_profile) self.clients("senlin").delete_profile.assert_called_once_with( fake_profile) self._test_atomic_action_timer(scenario.atomic_actions(), "senlin.delete_profile") rally-0.9.1/tests/unit/plugins/openstack/scenarios/monasca/0000775000567000056710000000000013073420067025207 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/monasca/__init__.py0000664000567000056710000000000013073417717027316 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/monasca/test_metrics.py0000664000567000056710000000216113073417717030276 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally.plugins.openstack.scenarios.monasca import metrics from tests.unit import test @ddt.ddt class MonascaMetricsTestCase(test.ScenarioTestCase): @ddt.data( {"region": None}, {"region": "fake_region"}, ) @ddt.unpack def test_list_metrics(self, region=None): scenario = metrics.ListMetrics(self.context) self.region = region scenario._list_metrics = mock.MagicMock() scenario.run(region=self.region) scenario._list_metrics.assert_called_once_with(region=self.region) rally-0.9.1/tests/unit/plugins/openstack/scenarios/monasca/test_utils.py0000664000567000056710000000335513073417717027776 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt from rally.plugins.openstack.scenarios.monasca import utils from tests.unit import test @ddt.ddt class MonascaScenarioTestCase(test.ScenarioTestCase): def setUp(self): super(MonascaScenarioTestCase, self).setUp() self.scenario = utils.MonascaScenario(self.context) self.kwargs = { "dimensions": { "region": "fake_region", "hostname": "fake_host_name", "service": "fake_service", "url": "fake_url" } } def test_list_metrics(self): return_metric_value = self.scenario._list_metrics() self.assertEqual(return_metric_value, self.clients("monasca").metrics.list.return_value) self._test_atomic_action_timer(self.scenario.atomic_actions(), "monasca.list_metrics") @ddt.data( {"name": ""}, {"name": "fake_metric"}, ) @ddt.unpack def test_create_metrics(self, name=None): self.name = name self.scenario._create_metrics(name=self.name, kwargs=self.kwargs) self.assertEqual(1, self.clients("monasca").metrics.create.call_count) rally-0.9.1/tests/unit/plugins/openstack/scenarios/designate/0000775000567000056710000000000013073420067025531 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/designate/__init__.py0000664000567000056710000000000013073417717027640 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/designate/test_utils.py0000664000567000056710000002602513073417717030317 0ustar jenkinsjenkins00000000000000# Copyright 2014 Hewlett-Packard Development Company, L.P. # # Author: Endre Karlson # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally.plugins.openstack.scenarios.designate import utils from tests.unit import test DESIGNATE_UTILS = "rally.plugins.openstack.scenarios.designate.utils." @ddt.ddt class DesignateScenarioTestCase(test.ScenarioTestCase): def setUp(self): super(DesignateScenarioTestCase, self).setUp() self.domain = mock.Mock() self.zone = mock.Mock() self.server = mock.Mock() self.client = self.clients("designate", version="2") @ddt.data( {}, {"email": "root@zone.name"}) def test_create_domain(self, domain_data): random_name = "foo" scenario = utils.DesignateScenario(context=self.context) scenario.generate_random_name = mock.Mock(return_value=random_name) self.clients("designate").domains.create.return_value = self.domain expected = {"email": "root@random.name"} expected.update(domain_data) expected["name"] = "%s.name." % random_name domain = scenario._create_domain(domain_data) self.clients("designate").domains.create.assert_called_once_with( expected) self.assertEqual(self.domain, domain) self._test_atomic_action_timer(scenario.atomic_actions(), "designate.create_domain") def test_list_domains(self): scenario = utils.DesignateScenario(context=self.context) return_domains_list = scenario._list_domains() self.assertEqual(self.clients("designate").domains.list.return_value, return_domains_list) self._test_atomic_action_timer(scenario.atomic_actions(), "designate.list_domains") def test_delete_domain(self): scenario = utils.DesignateScenario(context=self.context) domain = scenario._create_domain() scenario._delete_domain(domain["id"]) self._test_atomic_action_timer(scenario.atomic_actions(), "designate.delete_domain") def test_update_domain(self): scenario = utils.DesignateScenario(context=self.context) domain = scenario._create_domain() self.clients("designate").domains.update.return_value = self.domain updated_domain = scenario._update_domain(domain) self.clients("designate").domains.update.assert_called_once_with( domain) self.assertEqual(self.domain, updated_domain) self._test_atomic_action_timer(scenario.atomic_actions(), "designate.update_domain") @ddt.data( {}, {"data": "127.0.0.1"}) def test_create_record(self, record_data): random_name = "foo" domain_name = "zone.name." domain = {"name": domain_name, "id": "123"} record_name = "%s.%s" % (random_name, domain_name) scenario = utils.DesignateScenario(context=self.context) scenario.generate_random_name = mock.Mock(return_value=random_name) expected = {"type": "A", "data": "10.0.0.1"} expected.update(record_data) expected["name"] = record_name scenario._create_record(domain, record=record_data) self.clients("designate").records.create.assert_called_once_with( domain["id"], expected) self._test_atomic_action_timer(scenario.atomic_actions(), "designate.create_record") def test_list_records(self): scenario = utils.DesignateScenario(context=self.context) return_records_list = scenario._list_records("123") self.assertEqual(self.clients("designate").records.list.return_value, return_records_list) self._test_atomic_action_timer(scenario.atomic_actions(), "designate.list_records") def test_delete_record(self): scenario = utils.DesignateScenario(context=self.context) domain_id = mock.Mock() record_id = mock.Mock() scenario._delete_record(domain_id, record_id) self.clients("designate").records.delete.assert_called_once_with( domain_id, record_id) self._test_atomic_action_timer(scenario.atomic_actions(), "designate.delete_record") self.clients("designate").records.delete.reset_mock() scenario._delete_record(domain_id, record_id, atomic_action=False) self.clients("designate").records.delete.assert_called_once_with( domain_id, record_id) def test_create_server(self): scenario = utils.DesignateScenario(context=self.context) random_name = "foo" scenario.generate_random_name = mock.Mock(return_value=random_name) explicit_name = "bar.io." self.admin_clients( "designate").servers.create.return_value = self.server # Check that the defaults / randoms are used if nothing is specified server = scenario._create_server() self.admin_clients("designate").servers.create.assert_called_once_with( {"name": "name.%s." % random_name}) self.assertEqual(self.server, server) self._test_atomic_action_timer(scenario.atomic_actions(), "designate.create_server") self.admin_clients("designate").servers.create.reset_mock() # Check that when specifying server name defaults are not used... data = {"name": explicit_name} server = scenario._create_server(data) self.admin_clients( "designate").servers.create.assert_called_once_with(data) self.assertEqual(self.server, server) def test_delete_server(self): scenario = utils.DesignateScenario(context=self.context) scenario._delete_server("foo_id") self.admin_clients("designate").servers.delete.assert_called_once_with( "foo_id") self._test_atomic_action_timer(scenario.atomic_actions(), "designate.delete_server") # NOTE: API V2 @ddt.data( {}, {"email": "root@zone.name"}, {"name": "example.name."}, { "email": "root@zone.name", "name": "example.name." }) def test_create_zone(self, zone_data): scenario = utils.DesignateScenario() random_name = "foo" scenario = utils.DesignateScenario(context=self.context) scenario.generate_random_name = mock.Mock(return_value=random_name) self.client.zones.create.return_value = self.zone expected = { "email": "root@random.name", "name": "%s.name." % random_name, "type_": "PRIMARY" } expected.update(zone_data) # Check that the defaults / randoms are used if nothing is specified zone = scenario._create_zone(**zone_data) self.client.zones.create.assert_called_once_with( description=None, ttl=None, **expected) self.assertEqual(self.zone, zone) self._test_atomic_action_timer(scenario.atomic_actions(), "designate.create_zone") def test_list_zones(self): scenario = utils.DesignateScenario(context=self.context) return_zones_list = scenario._list_zones() self.assertEqual(self.client.zones.list.return_value, return_zones_list) self._test_atomic_action_timer(scenario.atomic_actions(), "designate.list_zones") def test_delete_zone(self): scenario = utils.DesignateScenario(context=self.context) zone = scenario._create_zone() scenario._delete_zone(zone["id"]) self._test_atomic_action_timer(scenario.atomic_actions(), "designate.delete_zone") def test_list_recordsets(self): scenario = utils.DesignateScenario(context=self.context) return_recordsets_list = scenario._list_recordsets("123") self.assertEqual( self.client.recordsets.list.return_value, return_recordsets_list) self._test_atomic_action_timer(scenario.atomic_actions(), "designate.list_recordsets") @ddt.data( {}, {"data": "127.0.0.1"}) def test_create_recordset(self, recordset_data): scenario = utils.DesignateScenario() random_name = "foo" zone_name = "zone.name." random_recordset_name = "%s.%s" % (random_name, zone_name) scenario = utils.DesignateScenario(context=self.context) scenario.generate_random_name = mock.Mock(return_value=random_name) zone = {"name": zone_name, "id": "123"} # Create with randoms (name and type) scenario._create_recordset(zone) self.client.recordsets.create.assert_called_once_with( zone["id"], name=random_recordset_name, type_="A", records=["10.0.0.1"]) self._test_atomic_action_timer(scenario.atomic_actions(), "designate.create_recordset") self.client.recordsets.create.reset_mock() # Specify name recordset = {"name": "www.zone.name.", "type_": "ASD"} scenario._create_recordset(zone, recordset) self.client.recordsets.create.assert_called_once_with( zone["id"], name="www.zone.name.", type_="ASD", records=["10.0.0.1"]) self.client.recordsets.create.reset_mock() # Specify type without underscore scenario._create_recordset(zone, {"type": "A"}) self.client.recordsets.create.assert_called_once_with( zone["id"], name="foo.zone.name.", type_="A", records=["10.0.0.1"]) def test_delete_recordset(self): scenario = utils.DesignateScenario(context=self.context) zone_id = mock.Mock() recordset_id = mock.Mock() scenario._delete_recordset(zone_id, recordset_id) self.client.recordsets.delete.assert_called_once_with( zone_id, recordset_id) self._test_atomic_action_timer(scenario.atomic_actions(), "designate.delete_recordset") self.client.recordsets.delete.reset_mock() scenario._delete_recordset(zone_id, recordset_id, atomic_action=False) self.client.recordsets.delete.assert_called_once_with( zone_id, recordset_id) rally-0.9.1/tests/unit/plugins/openstack/scenarios/designate/test_basic.py0000664000567000056710000003067713073417717030250 0ustar jenkinsjenkins00000000000000# Copyright 2014 Hewlett-Packard Development Company, L.P. # # Author: Endre Karlson # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally import exceptions from rally.plugins.openstack.scenarios.designate import basic from tests.unit import test BASE = "rally.plugins.openstack.scenarios.designate.basic" class DesignateBasicTestCase(test.ScenarioTestCase): @mock.patch("%s.CreateAndListDomains._list_domains" % BASE) @mock.patch("%s.CreateAndListDomains._create_domain" % BASE) def test_create_and_list_domains(self, mock__create_domain, mock__list_domains): mock__create_domain.return_value = "fake_domain.xyz" mock__list_domains.return_value = (["fake_domain.org", "fake_domain.xyz", "ultimate_question.net"]) basic.CreateAndListDomains(self.context).run() mock__create_domain.assert_called_once_with() mock__list_domains.assert_called_once_with() @mock.patch("%s.CreateAndListDomains._list_domains" % BASE) @mock.patch("%s.CreateAndListDomains._create_domain" % BASE) def test_create_and_list_domains_fails(self, mock__create_domain, mock__list_domains): mock__list_domains.return_value = (["fake_domain.org", "fake_domain.xyz", "ultimate_question.net"]) scenario = basic.CreateAndListDomains(self.context) self.assertRaises(exceptions.RallyAssertionError, scenario.run) mock__create_domain.assert_called_once_with() mock__create_domain.return_value = "fake_not_existed_domain.xyz" self.assertRaises(exceptions.RallyAssertionError, scenario.run) mock__create_domain.assert_called_with() mock__list_domains.assert_called_with() @mock.patch("%s.CreateAndDeleteDomain._delete_domain" % BASE) @mock.patch("%s.CreateAndDeleteDomain._create_domain" % BASE, return_value={"id": "123"}) def test_create_and_delete_domain(self, mock__create_domain, mock__delete_domain): basic.CreateAndDeleteDomain(self.context).run() mock__create_domain.assert_called_once_with() mock__delete_domain.assert_called_once_with("123") @mock.patch("%s.CreateAndUpdateDomain._update_domain" % BASE) @mock.patch("%s.CreateAndUpdateDomain._create_domain" % BASE) def test_create_and_update_domain(self, mock__create_domain, mock__update_domain): domain = { "name": "zone.name", "email": "email@zone.name", "id": "123"} mock__create_domain.return_value = domain basic.CreateAndUpdateDomain(self.context).run() mock__update_domain.assert_called_once_with(domain) @mock.patch("%s.ListDomains._list_domains" % BASE) def test_list_domains(self, mock__list_domains): basic.ListDomains(self.context).run() mock__list_domains.assert_called_once_with() @mock.patch("%s.CreateAndListRecords._list_records" % BASE) @mock.patch("%s.CreateAndListRecords._create_record" % BASE) @mock.patch("%s.CreateAndListRecords._create_domain" % BASE) def test_create_and_list_records(self, mock__create_domain, mock__create_record, mock__list_records): domain = { "name": "zone.name", "email": "email@zone.name", "id": "123"} mock__create_domain.return_value = domain records_per_domain = 5 return_value = mock.call(domain, atomic_action=False) mock__create_record.return_value = return_value mock__list_records.return_value = [return_value] * records_per_domain basic.CreateAndListRecords(self.context).run( records_per_domain=records_per_domain) mock__create_domain.assert_called_once_with() self.assertEqual(mock__create_record.mock_calls, [return_value] * records_per_domain) mock__list_records.assert_called_once_with(domain["id"]) @mock.patch("%s.CreateAndDeleteRecords._delete_record" % BASE) @mock.patch("%s.CreateAndDeleteRecords._create_record" % BASE) @mock.patch("%s.CreateAndDeleteRecords._create_domain" % BASE) def test_create_and_delete_records(self, mock__create_domain, mock__create_record, mock__delete_record): domain = { "name": "zone.name", "email": "email@zone.name", "id": "123"} mock__create_domain.return_value = domain mock__create_record.return_value = {"id": "321"} records_per_domain = 5 basic.CreateAndDeleteRecords(self.context).run( records_per_domain=records_per_domain) mock__create_domain.assert_called_once_with() self.assertEqual(mock__create_record.mock_calls, [mock.call(domain, atomic_action=False)] * records_per_domain) self.assertEqual(mock__delete_record.mock_calls, [mock.call(domain["id"], "321", atomic_action=False)] * records_per_domain) @mock.patch("%s.ListRecords._list_records" % BASE) def test_list_records(self, mock__list_records): basic.ListRecords(self.context).run("123") mock__list_records.assert_called_once_with("123") @mock.patch("%s.CreateAndListServers._list_servers" % BASE) @mock.patch("%s.CreateAndListServers._create_server" % BASE) def test_create_and_list_servers(self, mock__create_server, mock__list_servers): mock__create_server.return_value = "fake_server" mock__list_servers.return_value = ["fake_srv1", "fake_srv2", "fake_server"] # Positive case: basic.CreateAndListServers(self.context).run() mock__create_server.assert_called_once_with() mock__list_servers.assert_called_once_with() # Negative case: server isn't created mock__create_server.return_value = None self.assertRaises(exceptions.RallyAssertionError, basic.CreateAndListServers(self.context).run) mock__create_server.assert_called_with() # Negative case: server not found in the list of existed servers mock__create_server.return_value = "The_main_server_of_the_universe" self.assertRaises(exceptions.RallyAssertionError, basic.CreateAndListServers(self.context).run) mock__create_server.assert_called_with() mock__list_servers.assert_called_with() @mock.patch("%s.CreateAndDeleteServer._delete_server" % BASE) @mock.patch("%s.CreateAndDeleteServer._create_server" % BASE, return_value={"id": "123"}) def test_create_and_delete_server(self, mock__create_server, mock__delete_server): basic.CreateAndDeleteServer(self.context).run() mock__create_server.assert_called_once_with() mock__delete_server.assert_called_once_with("123") @mock.patch("%s.ListServers._list_servers" % BASE) def test_list_servers(self, mock__list_servers): basic.ListServers(self.context).run() mock__list_servers.assert_called_once_with() # NOTE: API V2 @mock.patch("%s.CreateAndListZones._list_zones" % BASE) @mock.patch("%s.CreateAndListZones._create_zone" % BASE) def test_create_and_list_zones(self, mock__create_zone, mock__list_zones): mock__create_zone.return_value = "Area_51" mock__list_zones.return_value = ["Area_51", "Siachen", "Bagram"] # Positive case: basic.CreateAndListZones(self.context).run() mock__create_zone.assert_called_once_with() mock__list_zones.assert_called_once_with() # Negative case: zone isn't created mock__create_zone.return_value = None self.assertRaises(exceptions.RallyAssertionError, basic.CreateAndListZones(self.context).run) mock__create_zone.assert_called_with() # Negative case: created zone not in the list of available zones mock__create_zone.return_value = "HAARP" self.assertRaises(exceptions.RallyAssertionError, basic.CreateAndListZones(self.context).run) mock__create_zone.assert_called_with() mock__list_zones.assert_called_with() @mock.patch("%s.CreateAndDeleteZone._delete_zone" % BASE) @mock.patch("%s.CreateAndDeleteZone._create_zone" % BASE, return_value={"id": "123"}) def test_create_and_delete_zone(self, mock__create_zone, mock__delete_zone): basic.CreateAndDeleteZone(self.context).run() mock__create_zone.assert_called_once_with() mock__delete_zone.assert_called_once_with("123") @mock.patch("%s.ListZones._list_zones" % BASE) def test_list_zones(self, mock_list_zones__list_zones): basic.ListZones(self.context).run() mock_list_zones__list_zones.assert_called_once_with() @mock.patch("%s.ListRecordsets._list_recordsets" % BASE) def test_list_recordsets(self, mock__list_recordsets): basic.ListRecordsets(self.context).run("123") mock__list_recordsets.assert_called_once_with("123") @mock.patch("%s.CreateAndDeleteRecordsets._delete_recordset" % BASE) @mock.patch("%s.CreateAndDeleteRecordsets._create_recordset" % BASE, return_value={"id": "321"}) def test_create_and_delete_recordsets(self, mock__create_recordset, mock__delete_recordset): zone = {"id": "1234"} self.context.update({ "tenant": { "zones": [zone] } }) recordsets_per_zone = 5 basic.CreateAndDeleteRecordsets(self.context).run( recordsets_per_zone=recordsets_per_zone) self.assertEqual(mock__create_recordset.mock_calls, [mock.call(zone, atomic_action=False)] * recordsets_per_zone) self.assertEqual(mock__delete_recordset.mock_calls, [mock.call(zone["id"], "321", atomic_action=False)] * recordsets_per_zone) @mock.patch("%s.CreateAndListRecordsets._list_recordsets" % BASE) @mock.patch("%s.CreateAndListRecordsets._create_recordset" % BASE) def test_create_and_list_recordsets(self, mock__create_recordset, mock__list_recordsets): zone = {"id": "1234"} self.context.update({ "tenant": { "zones": [zone] } }) recordsets_per_zone = 5 basic.CreateAndListRecordsets(self.context).run( recordsets_per_zone=recordsets_per_zone) self.assertEqual(mock__create_recordset.mock_calls, [mock.call(zone, atomic_action=False)] * recordsets_per_zone) mock__list_recordsets.assert_called_once_with(zone["id"]) rally-0.9.1/tests/unit/plugins/openstack/scenarios/ironic/0000775000567000056710000000000013073420067025051 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/ironic/__init__.py0000664000567000056710000000000013073417717027160 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/ironic/test_nodes.py0000664000567000056710000000560013073417720027575 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally import exceptions from rally.plugins.openstack.scenarios.ironic import nodes from tests.unit import test class IronicNodesTestCase(test.ScenarioTestCase): def test_create_and_list_node(self): class Node(object): def __init__(self, name): self.name = name scenario = nodes.CreateAndListNode(self.context) scenario._create_node = mock.Mock(return_value=Node("node_obj1")) scenario._list_nodes = mock.Mock( return_value=[Node(name) for name in ("node_obj1", "node_obj2", "node_obj3")]) driver = "foo" fake_params = { "sort_dir": "foo1", "associated": "foo2", "detail": True, "maintenance": "foo5", "fake_parameter1": "foo7" } # Positive case: scenario.run(driver, **fake_params) scenario._create_node.assert_called_once_with(driver, fake_parameter1="foo7") scenario._list_nodes.assert_called_once_with( sort_dir="foo1", associated="foo2", detail=True, maintenance="foo5") # Negative case: created node not in the list of available nodes scenario._create_node = mock.Mock(uuid="foooo") self.assertRaises(exceptions.RallyAssertionError, scenario.run, driver, **fake_params) scenario._create_node.assert_called_with(driver, fake_parameter1="foo7") scenario._list_nodes.assert_called_with( sort_dir="foo1", associated="foo2", detail=True, maintenance="foo5") def test_create_and_delete_node(self): fake_node = mock.Mock(uuid="fake_uuid") scenario = nodes.CreateAndDeleteNode(self.context) scenario._create_node = mock.Mock(return_value=fake_node) scenario._delete_node = mock.Mock() driver = "fake" scenario.run(driver, fake_parameter1="fake1", fake_parameter2="fake2") scenario._create_node.assert_called_once_with( driver, fake_parameter1="fake1", fake_parameter2="fake2") scenario._delete_node.assert_called_once_with( scenario._create_node.return_value) rally-0.9.1/tests/unit/plugins/openstack/scenarios/ironic/test_utils.py0000664000567000056710000000554013073417720027630 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.ironic import utils from tests.unit import test IRONIC_UTILS = "rally.plugins.openstack.scenarios.ironic.utils" class IronicScenarioTestCase(test.ScenarioTestCase): @mock.patch("%s.utils.wait_for_status" % IRONIC_UTILS) def test__create_node(self, mock_wait_for_status): self.admin_clients("ironic").node.create.return_value = "fake_node" scenario = utils.IronicScenario(self.context) scenario.generate_random_name = mock.Mock() scenario._create_node(driver="fake", fake_param="foo") self.admin_clients("ironic").node.create.assert_called_once_with( driver="fake", fake_param="foo", name=scenario.generate_random_name.return_value) self.assertTrue(mock_wait_for_status.called) self._test_atomic_action_timer(scenario.atomic_actions(), "ironic.create_node") @mock.patch("%s.utils.wait_for_status" % IRONIC_UTILS) def test__delete_node(self, mock_wait_for_status): mock_node_delete = mock.Mock() self.admin_clients("ironic").node.delete = mock_node_delete scenario = utils.IronicScenario(self.context) scenario._delete_node(mock.Mock(uuid="fake_id")) self.assertTrue(mock_wait_for_status.called) self.admin_clients("ironic").node.delete.assert_called_once_with( "fake_id") self._test_atomic_action_timer(scenario.atomic_actions(), "ironic.delete_node") def test__list_nodes(self): self.admin_clients("ironic").node.list.return_value = ["fake"] scenario = utils.IronicScenario(self.context) fake_params = { "sort_dir": "foo1", "associated": "foo2", "detail": True, "maintenance": "foo5" } return_nodes_list = scenario._list_nodes(**fake_params) self.assertEqual(["fake"], return_nodes_list) self.admin_clients("ironic").node.list.assert_called_once_with( sort_dir="foo1", associated="foo2", detail=True, maintenance="foo5") self._test_atomic_action_timer(scenario.atomic_actions(), "ironic.list_nodes") rally-0.9.1/tests/unit/plugins/openstack/scenarios/swift/0000775000567000056710000000000013073420067024722 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/swift/__init__.py0000664000567000056710000000000013073417717027031 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/swift/test_objects.py0000664000567000056710000002125213073417717027776 0ustar jenkinsjenkins00000000000000# Copyright 2015 Cisco Systems, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally.plugins.openstack.scenarios.swift import objects from tests.unit import test @ddt.ddt class SwiftObjectsTestCase(test.ScenarioTestCase): def test_create_container_and_object_then_list_objects(self): scenario = objects.CreateContainerAndObjectThenListObjects( self.context) scenario._create_container = mock.MagicMock(return_value="AA") scenario._upload_object = mock.MagicMock() scenario._list_objects = mock.MagicMock() scenario.run(objects_per_container=5, object_size=100) self.assertEqual(1, scenario._create_container.call_count) self.assertEqual(5, scenario._upload_object.call_count) scenario._list_objects.assert_called_once_with("AA") self._test_atomic_action_timer(scenario.atomic_actions(), "swift.create_5_objects") def test_create_container_and_object_then_delete_all(self): scenario = objects.CreateContainerAndObjectThenDeleteAll(self.context) scenario._create_container = mock.MagicMock(return_value="BB") scenario._upload_object = mock.MagicMock( side_effect=[("etaaag", "ooobj_%i" % i) for i in range(3)]) scenario._delete_object = mock.MagicMock() scenario._delete_container = mock.MagicMock() scenario.run(objects_per_container=3, object_size=10) self.assertEqual(1, scenario._create_container.call_count) self.assertEqual(3, scenario._upload_object.call_count) scenario._delete_object.assert_has_calls( [mock.call("BB", "ooobj_%i" % i, atomic_action=False) for i in range(3)]) scenario._delete_container.assert_called_once_with("BB") self._test_atomic_action_timer(scenario.atomic_actions(), "swift.create_3_objects") self._test_atomic_action_timer(scenario.atomic_actions(), "swift.delete_3_objects") def test_create_container_and_object_then_download_object(self): scenario = objects.CreateContainerAndObjectThenDownloadObject( self.context ) scenario._create_container = mock.MagicMock(return_value="CC") scenario._upload_object = mock.MagicMock( side_effect=[("etaaaag", "obbbj_%i" % i) for i in range(2)]) scenario._download_object = mock.MagicMock() scenario.run(objects_per_container=2, object_size=50) self.assertEqual(1, scenario._create_container.call_count) self.assertEqual(2, scenario._upload_object.call_count) scenario._download_object.assert_has_calls( [mock.call("CC", "obbbj_%i" % i, atomic_action=False) for i in range(2)]) self._test_atomic_action_timer(scenario.atomic_actions(), "swift.create_2_objects") self._test_atomic_action_timer(scenario.atomic_actions(), "swift.download_2_objects") @ddt.data(1, 5) def test_list_objects_in_containers(self, num_cons): con_list = [{"name": "cooon_%s" % i} for i in range(num_cons)] scenario = objects.ListObjectsInContainers(self.context) scenario._list_containers = mock.MagicMock(return_value=("header", con_list)) scenario._list_objects = mock.MagicMock() scenario.run() scenario._list_containers.assert_called_once_with() con_calls = [mock.call(container["name"], atomic_action=False) for container in con_list] scenario._list_objects.assert_has_calls(con_calls) key_suffix = "container" if num_cons > 1: key_suffix = "%i_containers" % num_cons self._test_atomic_action_timer(scenario.atomic_actions(), "swift.list_objects_in_%s" % key_suffix) @ddt.data([1, 1], [1, 2], [2, 1], [3, 5]) @ddt.unpack def test_list_and_download_objects_in_containers(self, num_cons, num_objs): con_list = [{"name": "connn_%s" % i} for i in range(num_cons)] obj_list = [{"name": "ooobj_%s" % i} for i in range(num_objs)] scenario = objects.ListAndDownloadObjectsInContainers(self.context) scenario._list_containers = mock.MagicMock(return_value=("header", con_list)) scenario._list_objects = mock.MagicMock(return_value=("header", obj_list)) scenario._download_object = mock.MagicMock() scenario.run() scenario._list_containers.assert_called_once_with() con_calls = [mock.call(container["name"], atomic_action=False) for container in con_list] scenario._list_objects.assert_has_calls(con_calls) obj_calls = [] for container in con_list: for obj in obj_list: obj_calls.append(mock.call(container["name"], obj["name"], atomic_action=False)) scenario._download_object.assert_has_calls(obj_calls, any_order=True) list_key_suffix = "container" if num_cons > 1: list_key_suffix = "%i_containers" % num_cons self._test_atomic_action_timer( scenario.atomic_actions(), "swift.list_objects_in_%s" % list_key_suffix) download_key_suffix = "object" if num_cons * num_objs > 1: download_key_suffix = "%i_objects" % (num_cons * num_objs) self._test_atomic_action_timer( scenario.atomic_actions(), "swift.download_%s" % download_key_suffix) def test_functional_create_container_and_object_then_list_objects(self): names_list = ["AA", "BB", "CC", "DD"] scenario = objects.CreateContainerAndObjectThenListObjects( self.context) scenario.generate_random_name = mock.MagicMock(side_effect=names_list) scenario._list_objects = mock.MagicMock() scenario.run(objects_per_container=3, object_size=100) scenario._list_objects.assert_called_once_with("AA") self._test_atomic_action_timer(scenario.atomic_actions(), "swift.create_3_objects") def test_functional_create_container_and_object_then_delete_all(self): names_list = ["111", "222", "333", "444", "555"] scenario = objects.CreateContainerAndObjectThenDeleteAll(self.context) scenario.generate_random_name = mock.MagicMock(side_effect=names_list) scenario._delete_object = mock.MagicMock() scenario._delete_container = mock.MagicMock() scenario.run(objects_per_container=4, object_size=240) scenario._delete_object.assert_has_calls( [mock.call("111", name, atomic_action=False) for name in names_list[1:]]) scenario._delete_container.assert_called_once_with("111") self._test_atomic_action_timer(scenario.atomic_actions(), "swift.create_4_objects") self._test_atomic_action_timer(scenario.atomic_actions(), "swift.delete_4_objects") def test_functional_create_container_and_object_then_download_object(self): names_list = ["aaa", "bbb", "ccc", "ddd", "eee", "fff"] scenario = objects.CreateContainerAndObjectThenDownloadObject( self.context) scenario.generate_random_name = mock.MagicMock(side_effect=names_list) scenario._download_object = mock.MagicMock() scenario.run(objects_per_container=5, object_size=750) scenario._download_object.assert_has_calls( [mock.call("aaa", name, atomic_action=False) for name in names_list[1:]]) self._test_atomic_action_timer(scenario.atomic_actions(), "swift.create_5_objects") self._test_atomic_action_timer(scenario.atomic_actions(), "swift.download_5_objects") rally-0.9.1/tests/unit/plugins/openstack/scenarios/swift/test_utils.py0000664000567000056710000001432113073417717027504 0ustar jenkinsjenkins00000000000000# Copyright 2015: Cisco Systems, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally.plugins.openstack.scenarios.swift import utils from tests.unit import test SWIFT_UTILS = "rally.plugins.openstack.scenarios.swift.utils" @ddt.ddt class SwiftScenarioTestCase(test.ScenarioTestCase): def test__list_containers(self): headers_dict = mock.MagicMock() containers_list = mock.MagicMock() self.clients("swift").get_account.return_value = (headers_dict, containers_list) scenario = utils.SwiftScenario(context=self.context) self.assertEqual((headers_dict, containers_list), scenario._list_containers(fargs="f")) kw = {"full_listing": True, "fargs": "f"} self.clients("swift").get_account.assert_called_once_with(**kw) self._test_atomic_action_timer(scenario.atomic_actions(), "swift.list_containers") @ddt.data( {}, {"headers": {"X-fake-name": "fake-value"}}, {"public": False, "headers": {"X-fake-name": "fake-value"}}, {"public": False}) @ddt.unpack def test__create_container(self, public=True, kwargs=None, headers=None): if kwargs is None: kwargs = {"fakearg": "fake"} if headers is None: headers = {} scenario = utils.SwiftScenario(self.context) scenario.generate_random_name = mock.MagicMock() container = scenario._create_container(public=public, headers=headers, **kwargs) self.assertEqual(container, scenario.generate_random_name.return_value) kwargs["headers"] = headers kwargs["headers"]["X-Container-Read"] = ".r:*,.rlistings" self.clients("swift").put_container.assert_called_once_with( scenario.generate_random_name.return_value, **kwargs) self._test_atomic_action_timer(scenario.atomic_actions(), "swift.create_container") def test__delete_container(self): container_name = mock.MagicMock() scenario = utils.SwiftScenario(context=self.context) scenario._delete_container(container_name, fargs="f") kw = {"fargs": "f"} self.clients("swift").delete_container.assert_called_once_with( container_name, **kw) self._test_atomic_action_timer(scenario.atomic_actions(), "swift.delete_container") def test__list_objects(self): container_name = mock.MagicMock() headers_dict = mock.MagicMock() objects_list = mock.MagicMock() self.clients("swift").get_container.return_value = (headers_dict, objects_list) scenario = utils.SwiftScenario(context=self.context) self.assertEqual((headers_dict, objects_list), scenario._list_objects(container_name, fargs="f")) kw = {"full_listing": True, "fargs": "f"} self.clients("swift").get_container.assert_called_once_with( container_name, **kw) self._test_atomic_action_timer(scenario.atomic_actions(), "swift.list_objects") def test__upload_object(self): container_name = mock.MagicMock() content = mock.MagicMock() etag = mock.MagicMock() self.clients("swift").put_object.return_value = etag scenario = utils.SwiftScenario(self.context) scenario.generate_random_name = mock.MagicMock() self.clients("swift").put_object.reset_mock() self.assertEqual((etag, scenario.generate_random_name.return_value), scenario._upload_object(container_name, content, fargs="f")) kw = {"fargs": "f"} self.clients("swift").put_object.assert_called_once_with( container_name, scenario.generate_random_name.return_value, content, **kw) self.assertEqual(1, scenario.generate_random_name.call_count) self._test_atomic_action_timer(scenario.atomic_actions(), "swift.upload_object") def test__download_object(self): container_name = mock.MagicMock() object_name = mock.MagicMock() headers_dict = mock.MagicMock() content = mock.MagicMock() self.clients("swift").get_object.return_value = (headers_dict, content) scenario = utils.SwiftScenario(context=self.context) self.assertEqual((headers_dict, content), scenario._download_object(container_name, object_name, fargs="f")) kw = {"fargs": "f"} self.clients("swift").get_object.assert_called_once_with( container_name, object_name, **kw) self._test_atomic_action_timer(scenario.atomic_actions(), "swift.download_object") def test__delete_object(self): container_name = mock.MagicMock() object_name = mock.MagicMock() scenario = utils.SwiftScenario(context=self.context) scenario._delete_object(container_name, object_name, fargs="f") kw = {"fargs": "f"} self.clients("swift").delete_object.assert_called_once_with( container_name, object_name, **kw) self._test_atomic_action_timer(scenario.atomic_actions(), "swift.delete_object") rally-0.9.1/tests/unit/plugins/openstack/scenarios/zaqar/0000775000567000056710000000000013073420067024704 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/zaqar/__init__.py0000664000567000056710000000000013073417717027013 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/zaqar/test_utils.py0000664000567000056710000000527313073417717027474 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.zaqar import utils from tests.unit import fakes from tests.unit import test UTILS = "rally.plugins.openstack.scenarios.zaqar.utils." class ZaqarScenarioTestCase(test.ScenarioTestCase): @mock.patch(UTILS + "ZaqarScenario.generate_random_name", return_value="kitkat") def test_queue_create(self, mock_generate_random_name): scenario = utils.ZaqarScenario(self.context) result = scenario._queue_create(fakearg="fakearg") self.assertEqual(self.clients("zaqar").queue.return_value, result) self.clients("zaqar").queue.assert_called_once_with("kitkat", fakearg="fakearg") self._test_atomic_action_timer(scenario.atomic_actions(), "zaqar.create_queue") def test_queue_delete(self): queue = fakes.FakeQueue() queue.delete = mock.MagicMock() scenario = utils.ZaqarScenario(context=self.context) scenario._queue_delete(queue) queue.delete.assert_called_once_with() self._test_atomic_action_timer(scenario.atomic_actions(), "zaqar.delete_queue") def test_messages_post(self): queue = fakes.FakeQueue() queue.post = mock.MagicMock() messages = [{"body": {"id": "one"}, "ttl": 100}, {"body": {"id": "two"}, "ttl": 120}, {"body": {"id": "three"}, "ttl": 140}] min_msg_count = max_msg_count = len(messages) scenario = utils.ZaqarScenario(context=self.context) scenario._messages_post(queue, messages, min_msg_count, max_msg_count) queue.post.assert_called_once_with(messages) def test_messages_list(self): queue = fakes.FakeQueue() queue.messages = mock.MagicMock() scenario = utils.ZaqarScenario(context=self.context) scenario._messages_list(queue) queue.messages.assert_called_once_with() self._test_atomic_action_timer(scenario.atomic_actions(), "zaqar.list_messages") rally-0.9.1/tests/unit/plugins/openstack/scenarios/zaqar/test_basic.py0000664000567000056710000000411113073417717027403 0ustar jenkinsjenkins00000000000000# Copyright (c) 2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.zaqar import basic from tests.unit import test BASE = "rally.plugins.openstack.scenarios.zaqar.basic" class ZaqarBasicTestCase(test.ScenarioTestCase): @mock.patch("%s.CreateQueue.generate_random_name" % BASE, return_value="fizbit") def test_create_queue(self, mock_random_name): scenario = basic.CreateQueue(self.context) scenario._queue_create = mock.MagicMock() scenario.run(fakearg="fake") scenario._queue_create.assert_called_once_with(fakearg="fake") @mock.patch("%s.CreateQueue.generate_random_name" % BASE, return_value="kitkat") def test_producer_consumer(self, mock_random_name): scenario = basic.ProducerConsumer(self.context) messages = [{"body": {"id": idx}, "ttl": 360} for idx in range(20)] queue = mock.MagicMock() scenario._queue_create = mock.MagicMock(return_value=queue) scenario._messages_post = mock.MagicMock() scenario._messages_list = mock.MagicMock() scenario._queue_delete = mock.MagicMock() scenario.run(min_msg_count=20, max_msg_count=20, fakearg="fake") scenario._queue_create.assert_called_once_with(fakearg="fake") scenario._messages_post.assert_called_once_with(queue, messages, 20, 20) scenario._messages_list.assert_called_once_with(queue) scenario._queue_delete.assert_called_once_with(queue) rally-0.9.1/tests/unit/plugins/openstack/scenarios/mistral/0000775000567000056710000000000013073420067025241 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/mistral/__init__.py0000664000567000056710000000000013073417717027350 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/mistral/test_executions.py0000664000567000056710000001665713073417717031067 0ustar jenkinsjenkins00000000000000# Copyright 2016: Nokia Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.mistral import executions from tests.unit import test BASE = "rally.plugins.openstack.scenarios.mistral.executions" MISTRAL_WBS_BASE = "rally.plugins.openstack.scenarios.mistral.workbooks" WB_DEFINITION = """--- version: 2.0 name: wb workflows: wf1: type: direct tasks: noop_task: action: std.noop wf2: type: direct tasks: noop_task: action: std.noop wf3: type: direct tasks: noop_task: action: std.noop wf4: type: direct tasks: noop_task: action: std.noop """ WB_DEF_ONE_WF = """--- version: 2.0 name: wb workflows: wf1: type: direct tasks: noop_task: action: std.noop """ PARAMS_EXAMPLE = {"env": {"env_param": "env_param_value"}} INPUT_EXAMPLE = """{"input1": "value1", "some_json_input": {"a": "b"}}""" WB = type("obj", (object,), {"name": "wb", "definition": WB_DEFINITION})() WB_ONE_WF = ( type("obj", (object,), {"name": "wb", "definition": WB_DEF_ONE_WF})() ) class MistralExecutionsTestCase(test.ScenarioTestCase): @mock.patch("%s.ListExecutions._list_executions" % BASE) def test_list_executions(self, mock__list_executions): executions.ListExecutions(self.context).run() self.assertEqual(1, mock__list_executions.called) @mock.patch("%s.CreateExecutionFromWorkbook._create_execution" % BASE) @mock.patch("%s.CreateExecutionFromWorkbook._create_workbook" % BASE, return_value=WB) def test_create_execution(self, mock__create_workbook, mock__create_execution): executions.CreateExecutionFromWorkbook(self.context).run(WB_DEFINITION) self.assertEqual(1, mock__create_workbook.called) self.assertEqual(1, mock__create_execution.called) @mock.patch("%s.CreateExecutionFromWorkbook._create_execution" % BASE) @mock.patch("%s.CreateExecutionFromWorkbook._create_workbook" % BASE, return_value=WB) def test_create_execution_with_input(self, mock__create_workbook, mock__create_execution): executions.CreateExecutionFromWorkbook(self.context).run( WB_DEFINITION, wf_input=INPUT_EXAMPLE) self.assertEqual(1, mock__create_workbook.called) self.assertEqual(1, mock__create_execution.called) @mock.patch("%s.CreateExecutionFromWorkbook._create_execution" % BASE) @mock.patch("%s.CreateExecutionFromWorkbook._create_workbook" % BASE, return_value=WB) @mock.patch("json.loads", return_value=PARAMS_EXAMPLE) def test_create_execution_with_params(self, mock_loads, mock__create_workbook, mock__create_execution): executions.CreateExecutionFromWorkbook(self.context).run( WB_DEFINITION, params=str(PARAMS_EXAMPLE)) self.assertEqual(1, mock_loads.called) self.assertEqual(1, mock__create_workbook.called) self.assertEqual(1, mock__create_execution.called) @mock.patch("%s.CreateExecutionFromWorkbook._create_execution" % BASE) @mock.patch("%s.CreateExecutionFromWorkbook._create_workbook" % BASE, return_value=WB) def test_create_execution_with_wf_name(self, mock__create_workbook, mock__create_execution): executions.CreateExecutionFromWorkbook(self.context).run( WB_DEFINITION, "wf4") self.assertEqual(1, mock__create_workbook.called) self.assertEqual(1, mock__create_execution.called) # we concatenate workbook name with the workflow name in the test # the workbook name is not random because we mock the method that # adds the random part mock__create_execution.assert_called_once_with("wb.wf4", None,) @mock.patch("%s.CreateExecutionFromWorkbook._delete_execution" % BASE) @mock.patch("%s.CreateExecutionFromWorkbook._delete_workbook" % BASE) @mock.patch("%s.CreateExecutionFromWorkbook._create_execution" % BASE) @mock.patch("%s.CreateExecutionFromWorkbook._create_workbook" % BASE, return_value=WB) def test_create_delete_execution( self, mock__create_workbook, mock__create_execution, mock__delete_workbook, mock__delete_execution): executions.CreateExecutionFromWorkbook(self.context).run( WB_DEFINITION, do_delete=True) self.assertEqual(1, mock__create_workbook.called) self.assertEqual(1, mock__create_execution.called) self.assertEqual(1, mock__delete_workbook.called) self.assertEqual(1, mock__delete_execution.called) @mock.patch("%s.CreateExecutionFromWorkbook._delete_execution" % BASE) @mock.patch("%s.CreateExecutionFromWorkbook._delete_workbook" % BASE) @mock.patch("%s.CreateExecutionFromWorkbook._create_execution" % BASE) @mock.patch("%s.CreateExecutionFromWorkbook._create_workbook" % BASE, return_value=WB) def test_create_delete_execution_with_wf_name( self, mock__create_workbook, mock__create_execution, mock__delete_workbook, mock__delete_execution): executions.CreateExecutionFromWorkbook(self.context).run( WB_DEFINITION, "wf4", do_delete=True) self.assertEqual(1, mock__create_workbook.called) self.assertEqual(1, mock__create_execution.called) self.assertEqual(1, mock__delete_workbook.called) self.assertEqual(1, mock__delete_execution.called) # we concatenate workbook name with the workflow name in the test # the workbook name is not random because we mock the method that # adds the random part mock__create_execution.assert_called_once_with("wb.wf4", None) @mock.patch("%s.CreateExecutionFromWorkbook._delete_execution" % BASE) @mock.patch("%s.CreateExecutionFromWorkbook._delete_workbook" % BASE) @mock.patch("%s.CreateExecutionFromWorkbook._create_execution" % BASE) @mock.patch("%s.CreateExecutionFromWorkbook._create_workbook" % BASE, return_value=WB_ONE_WF) def test_create_delete_execution_without_wf_name( self, mock__create_workbook, mock__create_execution, mock__delete_workbook, mock__delete_execution): executions.CreateExecutionFromWorkbook(self.context).run( WB_DEF_ONE_WF, do_delete=True) self.assertEqual(1, mock__create_workbook.called) self.assertEqual(1, mock__create_execution.called) self.assertEqual(1, mock__delete_workbook.called) self.assertEqual(1, mock__delete_execution.called) # we concatenate workbook name with the workflow name in the test # the workbook name is not random because we mock the method that # adds the random part mock__create_execution.assert_called_once_with("wb.wf1", None) rally-0.9.1/tests/unit/plugins/openstack/scenarios/mistral/test_utils.py0000664000567000056710000001336613073417717030033 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.plugins.openstack.scenarios.mistral import utils from tests.unit import fakes from tests.unit import test MISTRAL_UTILS = "rally.plugins.openstack.scenarios.mistral.utils" PARAMS_EXAMPLE = {"env": {"env_param": "param_value"}} INPUT_EXAMPLE = """{"input1": "value1", "some_json_input": {"a": "b"}}""" class MistralScenarioTestCase(test.ScenarioTestCase): def test_list_workbooks(self): scenario = utils.MistralScenario(context=self.context) return_wbs_list = scenario._list_workbooks() self.assertEqual( self.clients("mistral").workbooks.list.return_value, return_wbs_list) self._test_atomic_action_timer( scenario.atomic_actions(), "mistral.list_workbooks" ) def test_create_workbook(self): definition = "version: \"2.0\"\nname: wb" scenario = utils.MistralScenario(context=self.context) self.assertEqual( self.clients("mistral").workbooks.create.return_value, scenario._create_workbook(definition) ) self._test_atomic_action_timer( scenario.atomic_actions(), "mistral.create_workbook" ) def test_delete_workbook(self): scenario = utils.MistralScenario(context=self.context) scenario._delete_workbook("wb_name") self.clients("mistral").workbooks.delete.assert_called_once_with( "wb_name" ) self._test_atomic_action_timer( scenario.atomic_actions(), "mistral.delete_workbook" ) def test_list_executions(self): scenario = utils.MistralScenario(context=self.context) return_executions_list = scenario._list_executions() self.assertEqual( return_executions_list, self.clients("mistral").executions.list.return_value ) self._test_atomic_action_timer( scenario.atomic_actions(), "mistral.list_executions" ) def test_create_execution(self): scenario = utils.MistralScenario(context=self.context) mock_wait_for_status = self.mock_wait_for_status.mock wf_name = "fake_wf_name" mock_create_exec = self.clients("mistral").executions.create self.assertEqual( mock_wait_for_status.return_value, scenario._create_execution("%s" % wf_name) ) mock_create_exec.assert_called_once_with(wf_name, workflow_input=None) args, kwargs = mock_wait_for_status.call_args self.assertEqual(mock_create_exec.return_value, args[0]) self.assertEqual(["ERROR"], kwargs["failure_statuses"]) self.assertEqual(["SUCCESS"], kwargs["ready_statuses"]) self._test_atomic_action_timer( scenario.atomic_actions(), "mistral.create_execution" ) def test_create_execution_with_input(self): scenario = utils.MistralScenario(context=self.context) mock_wait_for_status = self.mock_wait_for_status.mock wf_name = "fake_wf_name" mock_create_exec = self.clients("mistral").executions.create self.assertEqual( mock_wait_for_status.return_value, scenario._create_execution( wf_name, wf_input=str(INPUT_EXAMPLE)) ) mock_create_exec.assert_called_once_with(wf_name, workflow_input=INPUT_EXAMPLE) def test_create_execution_with_params(self): scenario = utils.MistralScenario(context=self.context) mock_wait_for_status = self.mock_wait_for_status.mock wf_name = "fake_wf_name" mock_create_exec = self.clients("mistral").executions.create self.assertEqual( mock_wait_for_status.return_value, scenario._create_execution( wf_name, **PARAMS_EXAMPLE) ) mock_create_exec.assert_called_once_with(wf_name, workflow_input=None, **PARAMS_EXAMPLE) args, kwargs = mock_wait_for_status.call_args self.assertEqual(mock_create_exec.return_value, args[0]) self.assertEqual(["ERROR"], kwargs["failure_statuses"]) self.assertEqual(["SUCCESS"], kwargs["ready_statuses"]) self._test_atomic_action_timer( scenario.atomic_actions(), "mistral.create_execution" ) args, kwargs = mock_wait_for_status.call_args self.assertEqual(mock_create_exec.return_value, args[0]) self.assertEqual(["ERROR"], kwargs["failure_statuses"]) self.assertEqual(["SUCCESS"], kwargs["ready_statuses"]) self._test_atomic_action_timer( scenario.atomic_actions(), "mistral.create_execution" ) def test_delete_execution(self): scenario = utils.MistralScenario(context=self.context) execution = fakes.FakeMistralClient().execution.create() scenario._delete_execution(execution) self.clients("mistral").executions.delete.assert_called_once_with( execution.id ) self._test_atomic_action_timer( scenario.atomic_actions(), "mistral.delete_execution" ) rally-0.9.1/tests/unit/plugins/openstack/scenarios/mistral/test_workbooks.py0000664000567000056710000000442113073417717030703 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.mistral import workbooks from tests.unit import test BASE = "rally.plugins.openstack.scenarios.mistral.workbooks" class MistralWorkbooksTestCase(test.ScenarioTestCase): @mock.patch("%s.ListWorkbooks._list_workbooks" % BASE) def test_list_workbooks(self, mock_list_workbooks__list_workbooks): workbooks.ListWorkbooks(self.context).run() mock_list_workbooks__list_workbooks.assert_called_once_with() @mock.patch("%s.CreateWorkbook._create_workbook" % BASE) def test_create_workbook(self, mock_create_workbook__create_workbook): definition = "---\nversion: \"2.0\"\nname: wb" fake_wb = mock.MagicMock() fake_wb.name = "wb" mock_create_workbook__create_workbook.return_value = fake_wb workbooks.CreateWorkbook(self.context).run(definition) self.assertEqual(1, mock_create_workbook__create_workbook.called) @mock.patch("%s.CreateWorkbook._delete_workbook" % BASE) @mock.patch("%s.CreateWorkbook._create_workbook" % BASE) def test_create_delete_workbook(self, mock_create_workbook__create_workbook, mock_create_workbook__delete_workbook): definition = "---\nversion: \"2.0\"\nname: wb" fake_wb = mock.MagicMock() fake_wb.name = "wb" mock_create_workbook__create_workbook.return_value = fake_wb workbooks.CreateWorkbook(self.context).run(definition, do_delete=True) self.assertTrue(mock_create_workbook__create_workbook.called) mock_create_workbook__delete_workbook.assert_called_once_with( fake_wb.name) rally-0.9.1/tests/unit/plugins/openstack/scenarios/murano/0000775000567000056710000000000013073420067025067 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/murano/__init__.py0000664000567000056710000000000013073417717027176 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/murano/test_environments.py0000775000567000056710000001411413073417717031243 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.murano import environments from tests.unit import test MURANO_SCENARIO = ("rally.plugins.openstack.scenarios.murano." "environments") class MuranoEnvironmentsTestCase(test.ScenarioTestCase): def _get_context(self): self.context.update({ "tenant": { "packages": [mock.MagicMock(fully_qualified_name="fake")] }, "user": { "tenant_id": "fake_tenant_id" }, "config": { "murano_packages": { "app_package": ( "rally-jobs/extra/murano/" "applications/HelloReporter/" "io.murano.apps.HelloReporter.zip") } } }) return self.context def test_list_environments(self): TEST_TARGET = "ListEnvironments" list_env_module = ("{}.{}.{}").format(MURANO_SCENARIO, TEST_TARGET, "_list_environments") scenario = environments.ListEnvironments(self.context) with mock.patch(list_env_module) as mock_list_env: scenario.run() mock_list_env.assert_called_once_with() def test_create_and_delete_environment(self): TEST_TARGET = "CreateAndDeleteEnvironment" generate_random_name_module = ("{}.{}.{}").format( MURANO_SCENARIO, TEST_TARGET, "generate_random_name") create_env_module = ("{}.{}.{}").format(MURANO_SCENARIO, TEST_TARGET, "_create_environment") create_session_module = ("{}.{}.{}").format(MURANO_SCENARIO, TEST_TARGET, "_create_session") delete_env_module = ("{}.{}.{}").format(MURANO_SCENARIO, TEST_TARGET, "_delete_environment") scenario = environments.CreateAndDeleteEnvironment(self.context) with mock.patch(generate_random_name_module) as mock_random_name: with mock.patch(create_env_module) as mock_create_env: with mock.patch(create_session_module) as mock_create_session: with mock.patch(delete_env_module) as mock_delete_env: fake_env = mock.Mock(id="fake_id") mock_create_env.return_value = fake_env mock_random_name.return_value = "foo" scenario.run() mock_create_env.assert_called_once_with() mock_create_session.assert_called_once_with( fake_env.id) mock_delete_env.assert_called_once_with( fake_env) def test_create_and_deploy_environment(self): TEST_TARGET = "CreateAndDeployEnvironment" create_env_module = ("{}.{}.{}").format(MURANO_SCENARIO, TEST_TARGET, "_create_environment") create_session_module = ("{}.{}.{}").format(MURANO_SCENARIO, TEST_TARGET, "_create_session") create_service_module = ("{}.{}.{}").format(MURANO_SCENARIO, TEST_TARGET, "_create_service") deploy_env_module = ("{}.{}.{}").format(MURANO_SCENARIO, TEST_TARGET, "_deploy_environment") scenario = environments.CreateAndDeployEnvironment(self.context) with mock.patch(create_env_module) as mock_create_env: with mock.patch(create_session_module) as mock_create_session: with mock.patch(create_service_module) as mock_create_service: with mock.patch(deploy_env_module) as mock_deploy_env: fake_env = mock.MagicMock(id="fake_env_id") mock_create_env.return_value = fake_env fake_session = mock.Mock(id="fake_session_id") mock_create_session.return_value = fake_session scenario.context = self._get_context() scenario.context["tenants"] = { "fake_tenant_id": { "packages": [mock.MagicMock()] } } scenario.run(1) mock_create_env.assert_called_once_with() mock_create_session.assert_called_once_with( fake_env.id) mock_create_service.assert_called_once_with( fake_env, fake_session, "fake", atomic_action=False) mock_deploy_env.assert_called_once_with( fake_env, fake_session) self._test_atomic_action_timer( scenario.atomic_actions(), "murano.create_services") rally-0.9.1/tests/unit/plugins/openstack/scenarios/murano/test_utils.py0000664000567000056710000002342313073417717027654 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from oslo_config import cfg from rally.plugins.openstack.scenarios.murano import utils from tests.unit import test MRN_UTILS = "rally.plugins.openstack.scenarios.murano.utils" CONF = cfg.CONF class MuranoScenarioTestCase(test.ScenarioTestCase): def test_list_environments(self): self.clients("murano").environments.list.return_value = [] scenario = utils.MuranoScenario(context=self.context) return_environments_list = scenario._list_environments() self.assertEqual([], return_environments_list) self._test_atomic_action_timer(scenario.atomic_actions(), "murano.list_environments") def test_create_environments(self): self.clients("murano").environments.create = mock.Mock() scenario = utils.MuranoScenario(context=self.context) scenario.generate_random_name = mock.Mock() create_env = scenario._create_environment() self.assertEqual( create_env, self.clients("murano").environments.create.return_value) self.clients("murano").environments.create.assert_called_once_with( {"name": scenario.generate_random_name.return_value}) self._test_atomic_action_timer(scenario.atomic_actions(), "murano.create_environment") def test_delete_environment(self): environment = mock.Mock(id="id") self.clients("murano").environments.delete.return_value = "ok" scenario = utils.MuranoScenario(context=self.context) scenario._delete_environment(environment) self.clients("murano").environments.delete.assert_called_once_with( environment.id ) def test_create_session(self): self.clients("murano").sessions.configure.return_value = "sess" scenario = utils.MuranoScenario(context=self.context) create_sess = scenario._create_session("id") self.assertEqual("sess", create_sess) self._test_atomic_action_timer(scenario.atomic_actions(), "murano.create_session") def test__create_service(self,): self.clients("murano").services.post.return_value = "app" mock_env = mock.Mock(id="ip") mock_sess = mock.Mock(id="ip") scenario = utils.MuranoScenario(context=self.context) create_app = scenario._create_service(mock_env, mock_sess, "fake_full_name", atomic_action=True) self.assertEqual("app", create_app) self._test_atomic_action_timer(scenario.atomic_actions(), "murano.create_service") def test_deploy_environment(self): environment = mock.Mock(id="id") session = mock.Mock(id="id") self.clients("murano").sessions.deploy.return_value = "ok" scenario = utils.MuranoScenario(context=self.context) scenario._deploy_environment(environment, session) self.clients("murano").sessions.deploy.assert_called_once_with( environment.id, session.id ) config = CONF.benchmark self.mock_wait_for.mock.assert_called_once_with( environment, update_resource=self.mock_get_from_manager.mock.return_value, ready_statuses=["READY"], check_interval=config.murano_deploy_environment_check_interval, timeout=config.murano_deploy_environment_timeout) self.mock_get_from_manager.mock.assert_called_once_with( ["DEPLOY FAILURE"]) self._test_atomic_action_timer(scenario.atomic_actions(), "murano.deploy_environment") @mock.patch(MRN_UTILS + ".open", side_effect=mock.mock_open(read_data="Key: value"), create=True) def test_read_from_file(self, mock_open): utility = utils.MuranoPackageManager({"uuid": "fake_task_id"}) data = utility._read_from_file("filename") expected_data = {"Key": "value"} self.assertEqual(expected_data, data) @mock.patch(MRN_UTILS + ".MuranoPackageManager._read_from_file") @mock.patch(MRN_UTILS + ".MuranoPackageManager._write_to_file") def test_change_app_fullname( self, mock_murano_package_manager__write_to_file, mock_murano_package_manager__read_from_file): manifest = {"FullName": "app.name_abc", "Classes": {"app.name_abc": "app_class.yaml"}} mock_murano_package_manager__read_from_file.side_effect = ( [manifest]) utility = utils.MuranoPackageManager({"uuid": "fake_task_id"}) utility._change_app_fullname("tmp/tmpfile/") mock_murano_package_manager__read_from_file.assert_has_calls( [mock.call("tmp/tmpfile/manifest.yaml")] ) mock_murano_package_manager__write_to_file.assert_has_calls( [mock.call(manifest, "tmp/tmpfile/manifest.yaml")] ) @mock.patch("zipfile.is_zipfile") @mock.patch("tempfile.mkdtemp") @mock.patch("shutil.copytree") @mock.patch(MRN_UTILS + ".MuranoPackageManager._change_app_fullname") @mock.patch("rally.common.fileutils.pack_dir") @mock.patch("shutil.rmtree") def test_prepare_zip_if_not_zip( self, mock_shutil_rmtree, mock_pack_dir, mock_murano_package_manager__change_app_fullname, mock_shutil_copytree, mock_tempfile_mkdtemp, mock_zipfile_is_zipfile): utility = utils.MuranoPackageManager({"uuid": "fake_task_id"}) package_path = "tmp/tmpfile" mock_zipfile_is_zipfile.return_value = False mock_tempfile_mkdtemp.return_value = "tmp/tmpfile" mock_pack_dir.return_value = "tmp/tmpzipfile" zip_file = utility._prepare_package(package_path) self.assertEqual("tmp/tmpzipfile", zip_file) mock_tempfile_mkdtemp.assert_called_once_with() mock_shutil_copytree.assert_called_once_with( "tmp/tmpfile", "tmp/tmpfile/package/" ) (mock_murano_package_manager__change_app_fullname. assert_called_once_with("tmp/tmpfile/package/")) mock_shutil_rmtree.assert_called_once_with("tmp/tmpfile") @mock.patch("zipfile.is_zipfile") def test_prepare_zip_if_zip(self, mock_zipfile_is_zipfile): utility = utils.MuranoPackageManager({"uuid": "fake_task_id"}) package_path = "tmp/tmpfile.zip" mock_zipfile_is_zipfile.return_value = True zip_file = utility._prepare_package(package_path) self.assertEqual("tmp/tmpfile.zip", zip_file) def test_list_packages(self): scenario = utils.MuranoScenario() self.assertEqual(self.clients("murano").packages.list.return_value, scenario._list_packages()) self._test_atomic_action_timer(scenario.atomic_actions(), "murano.list_packages") @mock.patch(MRN_UTILS + ".open", create=True) def test_import_package(self, mock_open): self.clients("murano").packages.create.return_value = ( "created_foo_package" ) scenario = utils.MuranoScenario() mock_open.return_value = "opened_foo_package.zip" imp_package = scenario._import_package("foo_package.zip") self.assertEqual("created_foo_package", imp_package) self.clients("murano").packages.create.assert_called_once_with( {}, {"file": "opened_foo_package.zip"}) mock_open.assert_called_once_with("foo_package.zip") self._test_atomic_action_timer(scenario.atomic_actions(), "murano.import_package") def test_delete_package(self): package = mock.Mock(id="package_id") scenario = utils.MuranoScenario() scenario._delete_package(package) self.clients("murano").packages.delete.assert_called_once_with( "package_id" ) self._test_atomic_action_timer(scenario.atomic_actions(), "murano.delete_package") def test_update_package(self): package = mock.Mock(id="package_id") self.clients("murano").packages.update.return_value = "updated_package" scenario = utils.MuranoScenario() upd_package = scenario._update_package( package, {"tags": ["tag"]}, "add" ) self.assertEqual("updated_package", upd_package) self.clients("murano").packages.update.assert_called_once_with( "package_id", {"tags": ["tag"]}, "add" ) self._test_atomic_action_timer(scenario.atomic_actions(), "murano.update_package") def test_filter_packages(self): self.clients("murano").packages.filter.return_value = [] scenario = utils.MuranoScenario() return_apps_list = scenario._filter_applications( {"category": "Web"} ) self.assertEqual([], return_apps_list) self.clients("murano").packages.filter.assert_called_once_with( category="Web" ) self._test_atomic_action_timer(scenario.atomic_actions(), "murano.filter_applications") rally-0.9.1/tests/unit/plugins/openstack/scenarios/murano/test_packages.py0000664000567000056710000000662313073417717030275 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.murano import packages from tests.unit import test MURANO_SCENARIO = ("rally.plugins.openstack.scenarios.murano." "packages.MuranoPackages") class MuranoPackagesTestCase(test.TestCase): def setUp(self): super(MuranoPackagesTestCase, self).setUp() self.mock_remove = mock.patch("os.remove") self.mock_remove.start() def tearDown(self): super(MuranoPackagesTestCase, self).tearDown() self.mock_remove.stop() def mock_modules(self, scenario): scenario._import_package = mock.Mock() scenario._zip_package = mock.Mock() scenario._list_packages = mock.Mock() scenario._delete_package = mock.Mock() scenario._update_package = mock.Mock() scenario._filter_applications = mock.Mock() def test_make_zip_import_and_list_packages(self): scenario = packages.ImportAndListPackages() self.mock_modules(scenario) scenario.run("foo_package.zip") scenario._import_package.assert_called_once_with( scenario._zip_package.return_value) scenario._zip_package.assert_called_once_with("foo_package.zip") scenario._list_packages.assert_called_once_with( include_disabled=False) def test_import_and_delete_package(self): scenario = packages.ImportAndDeletePackage() self.mock_modules(scenario) fake_package = mock.Mock() scenario._import_package.return_value = fake_package scenario.run("foo_package.zip") scenario._import_package.assert_called_once_with( scenario._zip_package.return_value) scenario._delete_package.assert_called_once_with(fake_package) def test_package_lifecycle(self): scenario = packages.PackageLifecycle() self.mock_modules(scenario) fake_package = mock.Mock() scenario._import_package.return_value = fake_package scenario.run("foo_package.zip", {"category": "Web"}, "add") scenario._import_package.assert_called_once_with( scenario._zip_package.return_value) scenario._update_package.assert_called_once_with( fake_package, {"category": "Web"}, "add") scenario._delete_package.assert_called_once_with(fake_package) def test_import_and_filter_applications(self): scenario = packages.ImportAndFilterApplications() self.mock_modules(scenario) fake_package = mock.Mock() scenario._import_package.return_value = fake_package scenario.run("foo_package.zip", {"category": "Web"}) scenario._import_package.assert_called_once_with( scenario._zip_package.return_value) scenario._filter_applications.assert_called_once_with( {"category": "Web"} ) rally-0.9.1/tests/unit/plugins/openstack/scenarios/heat/0000775000567000056710000000000013073420067024507 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/heat/__init__.py0000664000567000056710000000000013073417717026616 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/heat/test_stacks.py0000664000567000056710000003561013073417717027425 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally import exceptions from rally.plugins.openstack.scenarios.heat import stacks from tests.unit import test BASE = "rally.plugins.openstack.scenarios.heat.stacks" class HeatStacksTestCase(test.ScenarioTestCase): def setUp(self): super(HeatStacksTestCase, self).setUp() self.default_template = "heat_template_version: 2013-05-23" self.default_parameters = {"dummy_param": "dummy_key"} self.default_files = ["dummy_file.yaml"] self.default_environment = {"env": "dummy_env"} self.default_output_key = "dummy_output_key" @mock.patch("%s.CreateAndListStack._list_stacks" % BASE) @mock.patch("%s.CreateAndListStack._create_stack" % BASE) def test_create_and_list_stack(self, mock__create_stack, mock__list_stacks): stack = mock.Mock() mock__create_stack.return_value = stack mock__list_stacks.return_value = [stack] * 3 # Positive case: stacks.CreateAndListStack(self.context).run( template_path=self.default_template, parameters=self.default_parameters, files=self.default_files, environment=self.default_environment) mock__create_stack.assert_called_once_with( self.default_template, self.default_parameters, self.default_files, self.default_environment) mock__list_stacks.assert_called_once_with() # Negative case1: stack isn't created mock__create_stack.return_value = None self.assertRaises(exceptions.RallyAssertionError, stacks.CreateAndListStack(self.context).run, template_path=self.default_template, parameters=self.default_parameters, files=self.default_files, environment=self.default_environment) mock__create_stack.assert_called_with( self.default_template, self.default_parameters, self.default_files, self.default_environment) # Negative case2: created stack not in the list of available stacks fake_stack = mock.Mock() mock__create_stack.return_value = fake_stack self.assertRaises(exceptions.RallyAssertionError, stacks.CreateAndListStack(self.context).run, template_path=self.default_template, parameters=self.default_parameters, files=self.default_files, environment=self.default_environment) mock__create_stack.assert_called_with( self.default_template, self.default_parameters, self.default_files, self.default_environment) mock__list_stacks.assert_called_with() @mock.patch("%s.ListStacksAndResources._list_stacks" % BASE) def test_list_stack_and_resources(self, mock__list_stacks): stack = mock.Mock() heat_scenario = stacks.ListStacksAndResources(self.context) mock__list_stacks.return_value = [stack] heat_scenario.run() self.clients("heat").resources.list.assert_called_once_with( stack.id) self._test_atomic_action_timer(heat_scenario.atomic_actions(), "heat.list_resources_of_1_stacks") @mock.patch("%s.ListStacksAndEvents._list_stacks" % BASE) def test_list_stack_and_events(self, mock__list_stacks): stack = mock.Mock() mock__list_stacks.return_value = [stack] heat_scenario = stacks.ListStacksAndEvents(self.context) heat_scenario.run() self.clients("heat").events.list.assert_called_once_with(stack.id) self._test_atomic_action_timer( heat_scenario.atomic_actions(), "heat.list_events_of_1_stacks") @mock.patch("%s.CreateAndDeleteStack._delete_stack" % BASE) @mock.patch("%s.CreateAndDeleteStack._create_stack" % BASE) @mock.patch("%s.CreateAndDeleteStack.generate_random_name" % BASE, return_value="test-rally-stack") def test_create_and_delete_stack(self, mock_generate_random_name, mock__create_stack, mock__delete_stack): fake_stack = object() mock__create_stack.return_value = fake_stack stacks.CreateAndDeleteStack(self.context).run( template_path=self.default_template, parameters=self.default_parameters, files=self.default_files, environment=self.default_environment) mock__create_stack.assert_called_once_with( self.default_template, self.default_parameters, self.default_files, self.default_environment) mock__delete_stack.assert_called_once_with(fake_stack) @mock.patch("%s.CreateCheckDeleteStack._delete_stack" % BASE) @mock.patch("%s.CreateCheckDeleteStack._check_stack" % BASE) @mock.patch("%s.CreateCheckDeleteStack._create_stack" % BASE) def test_create_check_delete_stack(self, mock__create_stack, mock__check_stack, mock__delete_stack): stacks.CreateCheckDeleteStack(self.context).run( template_path=self.default_template, parameters=self.default_parameters, files=self.default_files, environment=self.default_environment) mock__create_stack.assert_called_once_with( self.default_template, self.default_parameters, self.default_files, self.default_environment) mock__check_stack.assert_called_once_with( mock__create_stack.return_value) mock__delete_stack.assert_called_once_with( mock__create_stack.return_value) @mock.patch("%s.CreateUpdateDeleteStack._delete_stack" % BASE) @mock.patch("%s.CreateUpdateDeleteStack._update_stack" % BASE) @mock.patch("%s.CreateUpdateDeleteStack._create_stack" % BASE) @mock.patch("%s.CreateUpdateDeleteStack.generate_random_name" % BASE, return_value="test-rally-stack") def test_create_update_delete_stack(self, mock_generate_random_name, mock__create_stack, mock__update_stack, mock__delete_stack): fake_stack = object() mock__create_stack.return_value = fake_stack stacks.CreateUpdateDeleteStack(self.context).run( template_path=self.default_template, parameters=self.default_parameters, updated_template_path=self.default_template, files=self.default_files, environment=self.default_environment ) mock__create_stack.assert_called_once_with( self.default_template, self.default_parameters, self.default_files, self.default_environment) mock__update_stack.assert_called_once_with( fake_stack, self.default_template, self.default_parameters, self.default_files, self.default_environment) mock__delete_stack.assert_called_once_with(fake_stack) def test_create_stack_and_scale(self): heat_scenario = stacks.CreateStackAndScale(self.context) stack = mock.Mock() heat_scenario._create_stack = mock.Mock(return_value=stack) heat_scenario._scale_stack = mock.Mock() heat_scenario.run( self.default_template, "key", -1, parameters=self.default_parameters, files=self.default_files, environment=self.default_environment) heat_scenario._create_stack.assert_called_once_with( self.default_template, self.default_parameters, self.default_files, self.default_environment) heat_scenario._scale_stack.assert_called_once_with( stack, "key", -1) @mock.patch("%s.CreateSuspendResumeDeleteStack._delete_stack" % BASE) @mock.patch("%s.CreateSuspendResumeDeleteStack._resume_stack" % BASE) @mock.patch("%s.CreateSuspendResumeDeleteStack._suspend_stack" % BASE) @mock.patch("%s.CreateSuspendResumeDeleteStack._create_stack" % BASE) def test_create_suspend_resume_delete_stack(self, mock__create_stack, mock__suspend_stack, mock__resume_stack, mock__delete_stack): stacks.CreateSuspendResumeDeleteStack(self.context).run( template_path=self.default_template, parameters=self.default_parameters, files=self.default_files, environment=self.default_environment) mock__create_stack.assert_called_once_with( self.default_template, self.default_parameters, self.default_files, self.default_environment ) mock__suspend_stack.assert_called_once_with( mock__create_stack.return_value) mock__resume_stack.assert_called_once_with( mock__create_stack.return_value) mock__delete_stack.assert_called_once_with( mock__create_stack.return_value) @mock.patch("%s.CreateSnapshotRestoreDeleteStack._delete_stack" % BASE) @mock.patch("%s.CreateSnapshotRestoreDeleteStack._restore_stack" % BASE) @mock.patch("%s.CreateSnapshotRestoreDeleteStack._snapshot_stack" % BASE, return_value={"id": "dummy_id"}) @mock.patch("%s.CreateSnapshotRestoreDeleteStack._create_stack" % BASE, return_value=object()) def test_create_snapshot_restore_delete_stack(self, mock__create_stack, mock__snapshot_stack, mock__restore_stack, mock__delete_stack): stacks.CreateSnapshotRestoreDeleteStack(self.context).run( template_path=self.default_template, parameters=self.default_parameters, files=self.default_files, environment=self.default_environment) mock__create_stack.assert_called_once_with( self.default_template, self.default_parameters, self.default_files, self.default_environment) mock__snapshot_stack.assert_called_once_with( mock__create_stack.return_value) mock__restore_stack.assert_called_once_with( mock__create_stack.return_value, "dummy_id") mock__delete_stack.assert_called_once_with( mock__create_stack.return_value) @mock.patch("%s.CreateStackAndShowOutputViaAPI" "._stack_show_output_via_API" % BASE) @mock.patch("%s.CreateStackAndShowOutputViaAPI._create_stack" % BASE) def test_create_and_show_output_via_API(self, mock__create_stack, mock__stack_show_output_api): stacks.CreateStackAndShowOutputViaAPI(self.context).run( template_path=self.default_template, output_key=self.default_output_key, parameters=self.default_parameters, files=self.default_files, environment=self.default_environment) mock__create_stack.assert_called_once_with( self.default_template, self.default_parameters, self.default_files, self.default_environment) mock__stack_show_output_api.assert_called_once_with( mock__create_stack.return_value, self.default_output_key) @mock.patch("%s.CreateStackAndShowOutput._stack_show_output" % BASE) @mock.patch("%s.CreateStackAndShowOutput._create_stack" % BASE) def test_create_and_show_output(self, mock__create_stack, mock__stack_show_output): stacks.CreateStackAndShowOutput(self.context).run( template_path=self.default_template, output_key=self.default_output_key, parameters=self.default_parameters, files=self.default_files, environment=self.default_environment) mock__create_stack.assert_called_once_with( self.default_template, self.default_parameters, self.default_files, self.default_environment) mock__stack_show_output.assert_called_once_with( mock__create_stack.return_value, self.default_output_key) @mock.patch("%s.CreateStackAndListOutputViaAPI" "._stack_list_output_via_API" % BASE) @mock.patch("%s.CreateStackAndListOutputViaAPI._create_stack" % BASE) def test_create_and_list_output_via_API(self, mock__create_stack, mock__stack_list_output_api): stacks.CreateStackAndListOutputViaAPI(self.context).run( template_path=self.default_template, parameters=self.default_parameters, files=self.default_files, environment=self.default_environment) mock__create_stack.assert_called_once_with( self.default_template, self.default_parameters, self.default_files, self.default_environment) mock__stack_list_output_api.assert_called_once_with( mock__create_stack.return_value) @mock.patch("%s.CreateStackAndListOutput._stack_list_output" % BASE) @mock.patch("%s.CreateStackAndListOutput._create_stack" % BASE) def test_create_and_list_output(self, mock__create_stack, mock__stack_list_output): stacks.CreateStackAndListOutput(self.context).run( template_path=self.default_template, parameters=self.default_parameters, files=self.default_files, environment=self.default_environment) mock__create_stack.assert_called_once_with( self.default_template, self.default_parameters, self.default_files, self.default_environment) mock__stack_list_output.assert_called_once_with( mock__create_stack.return_value) rally-0.9.1/tests/unit/plugins/openstack/scenarios/heat/test_utils.py0000664000567000056710000003461213073417717027276 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally import exceptions from rally.plugins.openstack.scenarios.heat import utils from tests.unit import test HEAT_UTILS = "rally.plugins.openstack.scenarios.heat.utils" CONF = utils.CONF class HeatScenarioTestCase(test.ScenarioTestCase): def setUp(self): super(HeatScenarioTestCase, self).setUp() self.stack = mock.Mock() self.scenario = utils.HeatScenario(self.context) self.default_template = "heat_template_version: 2013-05-23" self.dummy_parameters = {"dummy_param": "dummy_key"} self.dummy_files = ["dummy_file.yaml"] self.dummy_environment = {"dummy_env": "dummy_env_value"} self.default_output_key = "dummy_output_key" def test_list_stacks(self): scenario = utils.HeatScenario(self.context) return_stacks_list = scenario._list_stacks() self.clients("heat").stacks.list.assert_called_once_with() self.assertEqual(list(self.clients("heat").stacks.list.return_value), return_stacks_list) self._test_atomic_action_timer(scenario.atomic_actions(), "heat.list_stacks") def test_create_stack(self): self.clients("heat").stacks.create.return_value = { "stack": {"id": "test_id"} } self.clients("heat").stacks.get.return_value = self.stack return_stack = self.scenario._create_stack(self.default_template, self.dummy_parameters, self.dummy_files, self.dummy_environment) args, kwargs = self.clients("heat").stacks.create.call_args self.assertIn(self.dummy_parameters, kwargs.values()) self.assertIn(self.default_template, kwargs.values()) self.assertIn(self.dummy_files, kwargs.values()) self.assertIn(self.dummy_environment, kwargs.values()) self.mock_wait_for.mock.assert_called_once_with( self.stack, update_resource=self.mock_get_from_manager.mock.return_value, ready_statuses=["CREATE_COMPLETE"], failure_statuses=["CREATE_FAILED"], check_interval=CONF.benchmark.heat_stack_create_poll_interval, timeout=CONF.benchmark.heat_stack_create_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self.assertEqual(self.mock_wait_for.mock.return_value, return_stack) self._test_atomic_action_timer(self.scenario.atomic_actions(), "heat.create_stack") def test_update_stack(self): self.clients("heat").stacks.update.return_value = None scenario = utils.HeatScenario(self.context) scenario._update_stack(self.stack, self.default_template, self.dummy_parameters, self.dummy_files, self.dummy_environment) args, kwargs = self.clients("heat").stacks.update.call_args self.assertIn(self.dummy_parameters, kwargs.values()) self.assertIn(self.default_template, kwargs.values()) self.assertIn(self.dummy_files, kwargs.values()) self.assertIn(self.dummy_environment, kwargs.values()) self.assertIn(self.stack.id, args) self.mock_wait_for.mock.assert_called_once_with( self.stack, update_resource=self.mock_get_from_manager.mock.return_value, ready_statuses=["UPDATE_COMPLETE"], failure_statuses=["UPDATE_FAILED"], check_interval=CONF.benchmark.heat_stack_update_poll_interval, timeout=CONF.benchmark.heat_stack_update_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(scenario.atomic_actions(), "heat.update_stack") def test_check_stack(self): scenario = utils.HeatScenario(self.context) scenario._check_stack(self.stack) self.clients("heat").actions.check.assert_called_once_with( self.stack.id) self.mock_wait_for.mock.assert_called_once_with( self.stack, update_resource=self.mock_get_from_manager.mock.return_value, ready_statuses=["CHECK_COMPLETE"], failure_statuses=["CHECK_FAILED"], check_interval=CONF.benchmark.heat_stack_check_poll_interval, timeout=CONF.benchmark.heat_stack_check_timeout) self._test_atomic_action_timer(scenario.atomic_actions(), "heat.check_stack") def test_delete_stack(self): scenario = utils.HeatScenario(self.context) scenario._delete_stack(self.stack) self.stack.delete.assert_called_once_with() self.mock_wait_for_status.mock.assert_called_once_with( self.stack, ready_statuses=["DELETE_COMPLETE"], failure_statuses=["DELETE_FAILED"], check_deletion=True, update_resource=self.mock_get_from_manager.mock.return_value, check_interval=CONF.benchmark.heat_stack_delete_poll_interval, timeout=CONF.benchmark.heat_stack_delete_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(scenario.atomic_actions(), "heat.delete_stack") def test_suspend_stack(self): scenario = utils.HeatScenario(self.context) scenario._suspend_stack(self.stack) self.clients("heat").actions.suspend.assert_called_once_with( self.stack.id) self.mock_wait_for.mock.assert_called_once_with( self.stack, update_resource=self.mock_get_from_manager.mock.return_value, ready_statuses=["SUSPEND_COMPLETE"], failure_statuses=["SUSPEND_FAILED"], check_interval=CONF.benchmark.heat_stack_suspend_poll_interval, timeout=CONF.benchmark.heat_stack_suspend_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(scenario.atomic_actions(), "heat.suspend_stack") def test_resume_stack(self): scenario = utils.HeatScenario(self.context) scenario._resume_stack(self.stack) self.clients("heat").actions.resume.assert_called_once_with( self.stack.id) self.mock_wait_for.mock.assert_called_once_with( self.stack, update_resource=self.mock_get_from_manager.mock.return_value, ready_statuses=["RESUME_COMPLETE"], failure_statuses=["RESUME_FAILED"], check_interval=CONF.benchmark.heat_stack_resume_poll_interval, timeout=CONF.benchmark.heat_stack_resume_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(scenario.atomic_actions(), "heat.resume_stack") def test_snapshot_stack(self): scenario = utils.HeatScenario(self.context) scenario._snapshot_stack(self.stack) self.clients("heat").stacks.snapshot.assert_called_once_with( self.stack.id) self.mock_wait_for.mock.assert_called_once_with( self.stack, update_resource=self.mock_get_from_manager.mock.return_value, ready_statuses=["SNAPSHOT_COMPLETE"], failure_statuses=["SNAPSHOT_FAILED"], check_interval=CONF.benchmark.heat_stack_snapshot_poll_interval, timeout=CONF.benchmark.heat_stack_snapshot_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(scenario.atomic_actions(), "heat.snapshot_stack") def test_restore_stack(self): scenario = utils.HeatScenario(self.context) scenario._restore_stack(self.stack, "dummy_id") self.clients("heat").stacks.restore.assert_called_once_with( self.stack.id, "dummy_id") self.mock_wait_for.mock.assert_called_once_with( self.stack, update_resource=self.mock_get_from_manager.mock.return_value, ready_statuses=["RESTORE_COMPLETE"], failure_statuses=["RESTORE_FAILED"], check_interval=CONF.benchmark.heat_stack_restore_poll_interval, timeout=CONF.benchmark.heat_stack_restore_timeout) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(scenario.atomic_actions(), "heat.restore_stack") def test__count_instances(self): self.clients("heat").resources.list.return_value = [ mock.Mock(resource_type="OS::Nova::Server"), mock.Mock(resource_type="OS::Nova::Server"), mock.Mock(resource_type="OS::Heat::AutoScalingGroup")] scenario = utils.HeatScenario(self.context) self.assertEqual(scenario._count_instances(self.stack), 2) self.clients("heat").resources.list.assert_called_once_with( self.stack.id, nested_depth=1) def test__scale_stack(self): scenario = utils.HeatScenario(self.context) scenario._count_instances = mock.Mock(side_effect=[3, 3, 2]) scenario._stack_webhook = mock.Mock() scenario._scale_stack(self.stack, "test_output_key", -1) scenario._stack_webhook.assert_called_once_with(self.stack, "test_output_key") self.mock_wait_for.mock.assert_called_once_with( self.stack, is_ready=mock.ANY, failure_statuses=["UPDATE_FAILED"], update_resource=self.mock_get_from_manager.mock.return_value, timeout=CONF.benchmark.heat_stack_scale_timeout, check_interval=CONF.benchmark.heat_stack_scale_poll_interval) self.mock_get_from_manager.mock.assert_called_once_with() self._test_atomic_action_timer(scenario.atomic_actions(), "heat.scale_with_test_output_key") @mock.patch("requests.post") def test_stack_webhook(self, mock_post): scenario = utils.HeatScenario(self.context) stack = mock.Mock(outputs=[ {"output_key": "output1", "output_value": "url1"}, {"output_key": "output2", "output_value": "url2"}]) scenario._stack_webhook(stack, "output1") mock_post.assert_called_with("url1") self._test_atomic_action_timer(scenario.atomic_actions(), "heat.output1_webhook") @mock.patch("requests.post") def test_stack_webhook_invalid_output_key(self, mock_post): scenario = utils.HeatScenario(self.context) stack = mock.Mock() stack.outputs = [{"output_key": "output1", "output_value": "url1"}, {"output_key": "output2", "output_value": "url2"}] self.assertRaises(exceptions.InvalidConfigException, scenario._stack_webhook, stack, "bogus") def test_stack_show_output(self): scenario = utils.HeatScenario(self.context) scenario._stack_show_output(self.stack, self.default_output_key) self.clients("heat").stacks.output_show.assert_called_once_with( self.stack.id, self.default_output_key) self._test_atomic_action_timer(scenario.atomic_actions(), "heat.show_output") def test_stack_show_output_via_API(self): scenario = utils.HeatScenario(self.context) scenario._stack_show_output_via_API( self.stack, self.default_output_key) self.clients("heat").stacks.get.assert_called_once_with( stack_id=self.stack.id) self._test_atomic_action_timer(scenario.atomic_actions(), "heat.show_output_via_API") def test_stack_list_output(self): scenario = utils.HeatScenario(self.context) scenario._stack_list_output(self.stack) self.clients("heat").stacks.output_list.assert_called_once_with( self.stack.id) self._test_atomic_action_timer(scenario.atomic_actions(), "heat.list_output") def test_stack_list_output_via_API(self): scenario = utils.HeatScenario(self.context) scenario._stack_list_output_via_API(self.stack) self.clients("heat").stacks.get.assert_called_once_with( stack_id=self.stack.id) self._test_atomic_action_timer(scenario.atomic_actions(), "heat.list_output_via_API") class HeatScenarioNegativeTestCase(test.ScenarioTestCase): patch_benchmark_utils = False def test_failed_create_stack(self): self.clients("heat").stacks.create.return_value = { "stack": {"id": "test_id"} } stack = mock.Mock() resource = mock.Mock() resource.stack_status = "CREATE_FAILED" stack.manager.get.return_value = resource self.clients("heat").stacks.get.return_value = stack scenario = utils.HeatScenario(context=self.context) ex = self.assertRaises(exceptions.GetResourceErrorStatus, scenario._create_stack, "stack_name") self.assertIn("has CREATE_FAILED status", str(ex)) def test_failed_update_stack(self): stack = mock.Mock() resource = mock.Mock() resource.stack_status = "UPDATE_FAILED" stack.manager.get.return_value = resource self.clients("heat").stacks.get.return_value = stack scenario = utils.HeatScenario(context=self.context) ex = self.assertRaises(exceptions.GetResourceErrorStatus, scenario._update_stack, stack, "heat_template_version: 2013-05-23") self.assertIn("has UPDATE_FAILED status", str(ex)) rally-0.9.1/tests/unit/plugins/openstack/scenarios/sahara/0000775000567000056710000000000013073420067025025 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/sahara/__init__.py0000664000567000056710000000000013073417717027134 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/sahara/test_jobs.py0000664000567000056710000002007413073417717027406 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from oslo_config import cfg from rally.plugins.openstack.scenarios.sahara import jobs from tests.unit import test CONF = cfg.CONF BASE = "rally.plugins.openstack.scenarios.sahara.jobs" class SaharaJobTestCase(test.ScenarioTestCase): def setUp(self): super(SaharaJobTestCase, self).setUp() self.context = test.get_test_context() CONF.set_override("sahara_cluster_check_interval", 0, "benchmark", enforce_type=True) CONF.set_override("sahara_job_check_interval", 0, "benchmark", enforce_type=True) @mock.patch("%s.CreateLaunchJob._run_job_execution" % BASE) def test_create_launch_job_java(self, mock_run_job): self.clients("sahara").jobs.create.return_value = mock.MagicMock( id="42") self.context.update({ "tenant": { "sahara": { "image": "test_image", "mains": ["main_42"], "libs": ["lib_42"], "cluster": "cl_42", "input": "in_42" } } }) scenario = jobs.CreateLaunchJob(self.context) scenario.generate_random_name = mock.Mock( return_value="job_42") scenario.run(job_type="java", configs={"conf_key": "conf_val"}, job_idx=0) self.clients("sahara").jobs.create.assert_called_once_with( name="job_42", type="java", description="", mains=["main_42"], libs=["lib_42"] ) mock_run_job.assert_called_once_with( job_id="42", cluster_id="cl_42", input_id=None, output_id=None, configs={"conf_key": "conf_val"}, job_idx=0 ) @mock.patch("%s.CreateLaunchJob._run_job_execution" % BASE) @mock.patch("%s.CreateLaunchJob._create_output_ds" % BASE, return_value=mock.MagicMock(id="out_42")) def test_create_launch_job_pig(self, mock_create_output, mock_run_job): self.clients("sahara").jobs.create.return_value = mock.MagicMock( id="42") self.context.update({ "tenant": { "sahara": { "image": "test_image", "mains": ["main_42"], "libs": ["lib_42"], "cluster": "cl_42", "input": "in_42" } } }) scenario = jobs.CreateLaunchJob(self.context) scenario.generate_random_name = mock.Mock(return_value="job_42") scenario.run(job_type="pig", configs={"conf_key": "conf_val"}, job_idx=0) self.clients("sahara").jobs.create.assert_called_once_with( name="job_42", type="pig", description="", mains=["main_42"], libs=["lib_42"] ) mock_run_job.assert_called_once_with( job_id="42", cluster_id="cl_42", input_id="in_42", output_id="out_42", configs={"conf_key": "conf_val"}, job_idx=0 ) @mock.patch("%s.CreateLaunchJob._run_job_execution" % BASE) @mock.patch("%s.CreateLaunchJob.generate_random_name" % BASE, return_value="job_42") def test_create_launch_job_sequence(self, mock__random_name, mock_run_job): self.clients("sahara").jobs.create.return_value = mock.MagicMock( id="42") self.context.update({ "tenant": { "sahara": { "image": "test_image", "mains": ["main_42"], "libs": ["lib_42"], "cluster": "cl_42", "input": "in_42" } } }) scenario = jobs.CreateLaunchJobSequence(self.context) scenario.run( jobs=[ { "job_type": "java", "configs": {"conf_key": "conf_val"} }, { "job_type": "java", "configs": {"conf_key2": "conf_val2"} }]) jobs_create_call = mock.call(name="job_42", type="java", description="", mains=["main_42"], libs=["lib_42"]) self.clients("sahara").jobs.create.assert_has_calls( [jobs_create_call, jobs_create_call]) mock_run_job.assert_has_calls([ mock.call(job_id="42", cluster_id="cl_42", input_id=None, output_id=None, configs={"conf_key": "conf_val"}, job_idx=0), mock.call(job_id="42", cluster_id="cl_42", input_id=None, output_id=None, configs={"conf_key2": "conf_val2"}, job_idx=1) ]) @mock.patch("%s.CreateLaunchJob.generate_random_name" % BASE, return_value="job_42") @mock.patch("%s.CreateLaunchJobSequenceWithScaling" "._scale_cluster" % BASE) @mock.patch("%s.CreateLaunchJob._run_job_execution" % BASE) def test_create_launch_job_sequence_with_scaling( self, mock_run_job, mock_create_launch_job_sequence_with_scaling__scale_cluster, mock_create_launch_job_generate_random_name ): self.clients("sahara").jobs.create.return_value = mock.MagicMock( id="42") self.clients("sahara").clusters.get.return_value = mock.MagicMock( id="cl_42", status="active") self.context.update({ "tenant": { "sahara": { "image": "test_image", "mains": ["main_42"], "libs": ["lib_42"], "cluster": "cl_42", "input": "in_42" } } }) scenario = jobs.CreateLaunchJobSequenceWithScaling(self.context) scenario.run( jobs=[ { "job_type": "java", "configs": {"conf_key": "conf_val"} }, { "job_type": "java", "configs": {"conf_key2": "conf_val2"} }], deltas=[1, -1]) jobs_create_call = mock.call(name="job_42", type="java", description="", mains=["main_42"], libs=["lib_42"]) self.clients("sahara").jobs.create.assert_has_calls( [jobs_create_call, jobs_create_call]) je_0 = mock.call(job_id="42", cluster_id="cl_42", input_id=None, output_id=None, configs={"conf_key": "conf_val"}, job_idx=0) je_1 = mock.call(job_id="42", cluster_id="cl_42", input_id=None, output_id=None, configs={"conf_key2": "conf_val2"}, job_idx=1) mock_run_job.assert_has_calls([je_0, je_1, je_0, je_1, je_0, je_1]) rally-0.9.1/tests/unit/plugins/openstack/scenarios/sahara/test_clusters.py0000664000567000056710000001422713073417717030320 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.sahara import clusters from tests.unit import test BASE = "rally.plugins.openstack.scenarios.sahara.clusters" class SaharaClustersTestCase(test.ScenarioTestCase): @mock.patch("%s.CreateAndDeleteCluster._delete_cluster" % BASE) @mock.patch("%s.CreateAndDeleteCluster._launch_cluster" % BASE, return_value=mock.MagicMock(id=42)) def test_create_and_delete_cluster(self, mock_launch_cluster, mock_delete_cluster): scenario = clusters.CreateAndDeleteCluster(self.context) scenario.context = { "tenant": { "sahara": { "image": "test_image", } } } scenario.run(master_flavor="test_flavor_m", worker_flavor="test_flavor_w", workers_count=5, plugin_name="test_plugin", hadoop_version="test_version") mock_launch_cluster.assert_called_once_with( flavor_id=None, master_flavor_id="test_flavor_m", worker_flavor_id="test_flavor_w", image_id="test_image", workers_count=5, plugin_name="test_plugin", hadoop_version="test_version", floating_ip_pool=None, volumes_per_node=None, volumes_size=None, auto_security_group=None, security_groups=None, node_configs=None, cluster_configs=None, enable_anti_affinity=False, enable_proxy=False, use_autoconfig=True) mock_delete_cluster.assert_called_once_with( mock_launch_cluster.return_value) @mock.patch("%s.CreateAndDeleteCluster._delete_cluster" % BASE) @mock.patch("%s.CreateAndDeleteCluster._launch_cluster" % BASE, return_value=mock.MagicMock(id=42)) def test_create_and_delete_cluster_deprecated_flavor(self, mock_launch_cluster, mock_delete_cluster): scenario = clusters.CreateAndDeleteCluster(self.context) scenario.context = { "tenant": { "sahara": { "image": "test_image", } } } scenario.run(flavor="test_deprecated_arg", master_flavor=None, worker_flavor=None, workers_count=5, plugin_name="test_plugin", hadoop_version="test_version") mock_launch_cluster.assert_called_once_with( flavor_id="test_deprecated_arg", master_flavor_id=None, worker_flavor_id=None, image_id="test_image", workers_count=5, plugin_name="test_plugin", hadoop_version="test_version", floating_ip_pool=None, volumes_per_node=None, volumes_size=None, auto_security_group=None, security_groups=None, node_configs=None, cluster_configs=None, enable_anti_affinity=False, enable_proxy=False, use_autoconfig=True) mock_delete_cluster.assert_called_once_with( mock_launch_cluster.return_value) @mock.patch("%s.CreateScaleDeleteCluster._delete_cluster" % BASE) @mock.patch("%s.CreateScaleDeleteCluster._scale_cluster" % BASE) @mock.patch("%s.CreateScaleDeleteCluster._launch_cluster" % BASE, return_value=mock.MagicMock(id=42)) def test_create_scale_delete_cluster(self, mock_launch_cluster, mock_scale_cluster, mock_delete_cluster): self.clients("sahara").clusters.get.return_value = mock.MagicMock( id=42, status="active" ) scenario = clusters.CreateScaleDeleteCluster(self.context) scenario.context = { "tenant": { "sahara": { "image": "test_image", } } } scenario.run(master_flavor="test_flavor_m", worker_flavor="test_flavor_w", workers_count=5, deltas=[1, -1], plugin_name="test_plugin", hadoop_version="test_version") mock_launch_cluster.assert_called_once_with( flavor_id=None, master_flavor_id="test_flavor_m", worker_flavor_id="test_flavor_w", image_id="test_image", workers_count=5, plugin_name="test_plugin", hadoop_version="test_version", floating_ip_pool=None, volumes_per_node=None, volumes_size=None, auto_security_group=None, security_groups=None, node_configs=None, cluster_configs=None, enable_anti_affinity=False, enable_proxy=False, use_autoconfig=True) mock_scale_cluster.assert_has_calls([ mock.call( self.clients("sahara").clusters.get.return_value, 1), mock.call( self.clients("sahara").clusters.get.return_value, -1), ]) mock_delete_cluster.assert_called_once_with( self.clients("sahara").clusters.get.return_value) rally-0.9.1/tests/unit/plugins/openstack/scenarios/sahara/test_node_group_templates.py0000664000567000056710000000672513073417717032677 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.sahara import (node_group_templates as ngts) from tests.unit import test BASE = "rally.plugins.openstack.scenarios.sahara.node_group_templates" class SaharaNodeGroupTemplatesTestCase(test.TestCase): def setUp(self): super(SaharaNodeGroupTemplatesTestCase, self).setUp() self.context = test.get_test_context() @mock.patch("%s.CreateAndListNodeGroupTemplates" "._list_node_group_templates" % BASE) @mock.patch("%s.CreateAndListNodeGroupTemplates" "._create_master_node_group_template" % BASE) @mock.patch("%s.CreateAndListNodeGroupTemplates" "._create_worker_node_group_template" % BASE) def test_create_and_list_node_group_templates(self, mock_create_worker, mock_create_master, mock_list_group): ngts.CreateAndListNodeGroupTemplates(self.context).run( "test_flavor", "test_plugin", "test_version") mock_create_master.assert_called_once_with( flavor_id="test_flavor", plugin_name="test_plugin", hadoop_version="test_version", use_autoconfig=True) mock_create_worker.assert_called_once_with( flavor_id="test_flavor", plugin_name="test_plugin", hadoop_version="test_version", use_autoconfig=True) mock_list_group.assert_called_once_with() @mock.patch("%s.CreateDeleteNodeGroupTemplates" "._delete_node_group_template" % BASE) @mock.patch("%s.CreateDeleteNodeGroupTemplates" "._create_master_node_group_template" % BASE) @mock.patch("%s.CreateDeleteNodeGroupTemplates" "._create_worker_node_group_template" % BASE) def test_create_delete_node_group_templates(self, mock_create_worker, mock_create_master, mock_delete_group): ngts.CreateDeleteNodeGroupTemplates(self.context).run( "test_flavor", "test_plugin", "test_version") mock_create_master.assert_called_once_with( flavor_id="test_flavor", plugin_name="test_plugin", hadoop_version="test_version", use_autoconfig=True) mock_create_worker.assert_called_once_with( flavor_id="test_flavor", plugin_name="test_plugin", hadoop_version="test_version", use_autoconfig=True) mock_delete_group.assert_has_calls(calls=[ mock.call(mock_create_master.return_value), mock.call(mock_create_worker.return_value)]) rally-0.9.1/tests/unit/plugins/openstack/scenarios/sahara/test_utils.py0000664000567000056710000004605113073417717027614 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from oslo_config import cfg from oslo_utils import uuidutils from saharaclient.api import base as sahara_base from rally import consts from rally import exceptions from rally.plugins.openstack.scenarios.sahara import utils from tests.unit import test CONF = cfg.CONF SAHARA_UTILS = "rally.plugins.openstack.scenarios.sahara.utils" class SaharaScenarioTestCase(test.ScenarioTestCase): # NOTE(stpierre): the Sahara utils generally do funny stuff with # wait_for() calls -- frequently the is_ready and # update_resource arguments are functions defined in the Sahara # utils themselves instead of the more standard resource_is() and # get_from_manager() calls. As a result, the tests below do more # integrated/functional testing of wait_for() calls, and we can't # just mock out wait_for and friends the way we usually do. patch_benchmark_utils = False def setUp(self): super(SaharaScenarioTestCase, self).setUp() CONF.set_override("sahara_cluster_check_interval", 0, "benchmark", enforce_type=True) CONF.set_override("sahara_job_check_interval", 0, "benchmark", enforce_type=True) def test_list_node_group_templates(self): ngts = [] self.clients("sahara").node_group_templates.list.return_value = ngts scenario = utils.SaharaScenario(self.context) return_ngts_list = scenario._list_node_group_templates() self.assertEqual(ngts, return_ngts_list) self._test_atomic_action_timer(scenario.atomic_actions(), "sahara.list_node_group_templates") @mock.patch(SAHARA_UTILS + ".SaharaScenario.generate_random_name", return_value="random_name") @mock.patch(SAHARA_UTILS + ".sahara_consts") def test_create_node_group_templates( self, mock_sahara_consts, mock_generate_random_name): scenario = utils.SaharaScenario(self.context) mock_processes = { "test_plugin": { "test_version": { "master": ["p1"], "worker": ["p2"] } } } mock_sahara_consts.NODE_PROCESSES = mock_processes scenario._create_master_node_group_template( flavor_id="test_flavor", plugin_name="test_plugin", hadoop_version="test_version", use_autoconfig=True ) scenario._create_worker_node_group_template( flavor_id="test_flavor", plugin_name="test_plugin", hadoop_version="test_version", use_autoconfig=True ) create_calls = [ mock.call( name="random_name", plugin_name="test_plugin", hadoop_version="test_version", flavor_id="test_flavor", node_processes=["p1"], use_autoconfig=True), mock.call( name="random_name", plugin_name="test_plugin", hadoop_version="test_version", flavor_id="test_flavor", node_processes=["p2"], use_autoconfig=True )] self.clients("sahara").node_group_templates.create.assert_has_calls( create_calls) self._test_atomic_action_timer( scenario.atomic_actions(), "sahara.create_master_node_group_template") self._test_atomic_action_timer( scenario.atomic_actions(), "sahara.create_worker_node_group_template") def test_delete_node_group_templates(self): scenario = utils.SaharaScenario(self.context) ng = mock.MagicMock(id=42) scenario._delete_node_group_template(ng) delete_mock = self.clients("sahara").node_group_templates.delete delete_mock.assert_called_once_with(42) self._test_atomic_action_timer(scenario.atomic_actions(), "sahara.delete_node_group_template") @mock.patch(SAHARA_UTILS + ".SaharaScenario.generate_random_name", return_value="random_name") @mock.patch(SAHARA_UTILS + ".sahara_consts") def test_launch_cluster(self, mock_sahara_consts, mock_generate_random_name): self.context.update({ "tenant": { "networks": [ { "id": "test_neutron_id", "router_id": "test_router_id" } ] } }) self.clients("services").values.return_value = [ consts.Service.NEUTRON ] scenario = utils.SaharaScenario(context=self.context) mock_processes = { "test_plugin": { "test_version": { "master": ["p1"], "worker": ["p2"] } } } mock_configs = { "test_plugin": { "test_version": { "target": "HDFS", "config_name": "dfs.replication" } } } floating_ip_pool_uuid = uuidutils.generate_uuid() node_groups = [ { "name": "master-ng", "flavor_id": "test_flavor_m", "node_processes": ["p1"], "floating_ip_pool": floating_ip_pool_uuid, "count": 1, "auto_security_group": True, "security_groups": ["g1", "g2"], "node_configs": {"HDFS": {"local_config": "local_value"}}, "use_autoconfig": True, }, { "name": "worker-ng", "flavor_id": "test_flavor_w", "node_processes": ["p2"], "floating_ip_pool": floating_ip_pool_uuid, "volumes_per_node": 5, "volumes_size": 10, "count": 42, "auto_security_group": True, "security_groups": ["g1", "g2"], "node_configs": {"HDFS": {"local_config": "local_value"}}, "use_autoconfig": True, } ] mock_sahara_consts.NODE_PROCESSES = mock_processes mock_sahara_consts.REPLICATION_CONFIGS = mock_configs self.clients("sahara").clusters.create.return_value.id = ( "test_cluster_id") self.clients("sahara").clusters.get.return_value.status = ( "active") scenario._launch_cluster( plugin_name="test_plugin", hadoop_version="test_version", master_flavor_id="test_flavor_m", worker_flavor_id="test_flavor_w", image_id="test_image", floating_ip_pool=floating_ip_pool_uuid, volumes_per_node=5, volumes_size=10, auto_security_group=True, security_groups=["g1", "g2"], workers_count=42, node_configs={"HDFS": {"local_config": "local_value"}}, use_autoconfig=True ) self.clients("sahara").clusters.create.assert_called_once_with( name="random_name", plugin_name="test_plugin", hadoop_version="test_version", node_groups=node_groups, default_image_id="test_image", cluster_configs={"HDFS": {"dfs.replication": 3}}, net_id="test_neutron_id", anti_affinity=None, use_autoconfig=True ) self._test_atomic_action_timer(scenario.atomic_actions(), "sahara.launch_cluster") @mock.patch(SAHARA_UTILS + ".SaharaScenario.generate_random_name", return_value="random_name") @mock.patch(SAHARA_UTILS + ".sahara_consts") def test_launch_cluster_with_proxy(self, mock_sahara_consts, mock_generate_random_name): context = { "tenant": { "networks": [ { "id": "test_neutron_id", "router_id": "test_router_id" } ] } } self.clients("services").values.return_value = [ consts.Service.NEUTRON ] scenario = utils.SaharaScenario(context=context) mock_processes = { "test_plugin": { "test_version": { "master": ["p1"], "worker": ["p2"] } } } mock_configs = { "test_plugin": { "test_version": { "target": "HDFS", "config_name": "dfs.replication" } } } floating_ip_pool_uuid = uuidutils.generate_uuid() node_groups = [ { "name": "master-ng", "flavor_id": "test_flavor_m", "node_processes": ["p1"], "floating_ip_pool": floating_ip_pool_uuid, "count": 1, "auto_security_group": True, "security_groups": ["g1", "g2"], "node_configs": {"HDFS": {"local_config": "local_value"}}, "is_proxy_gateway": True, "use_autoconfig": True, }, { "name": "worker-ng", "flavor_id": "test_flavor_w", "node_processes": ["p2"], "volumes_per_node": 5, "volumes_size": 10, "count": 40, "auto_security_group": True, "security_groups": ["g1", "g2"], "node_configs": {"HDFS": {"local_config": "local_value"}}, "use_autoconfig": True, }, { "name": "proxy-ng", "flavor_id": "test_flavor_w", "node_processes": ["p2"], "floating_ip_pool": floating_ip_pool_uuid, "volumes_per_node": 5, "volumes_size": 10, "count": 2, "auto_security_group": True, "security_groups": ["g1", "g2"], "node_configs": {"HDFS": {"local_config": "local_value"}}, "is_proxy_gateway": True, "use_autoconfig": True, } ] mock_sahara_consts.NODE_PROCESSES = mock_processes mock_sahara_consts.REPLICATION_CONFIGS = mock_configs self.clients("sahara").clusters.create.return_value = mock.MagicMock( id="test_cluster_id") self.clients("sahara").clusters.get.return_value = mock.MagicMock( status="active") scenario._launch_cluster( plugin_name="test_plugin", hadoop_version="test_version", master_flavor_id="test_flavor_m", worker_flavor_id="test_flavor_w", image_id="test_image", floating_ip_pool=floating_ip_pool_uuid, volumes_per_node=5, volumes_size=10, auto_security_group=True, security_groups=["g1", "g2"], workers_count=42, node_configs={"HDFS": {"local_config": "local_value"}}, enable_proxy=True, use_autoconfig=True ) self.clients("sahara").clusters.create.assert_called_once_with( name="random_name", plugin_name="test_plugin", hadoop_version="test_version", node_groups=node_groups, default_image_id="test_image", cluster_configs={"HDFS": {"dfs.replication": 3}}, net_id="test_neutron_id", anti_affinity=None, use_autoconfig=True ) self._test_atomic_action_timer(scenario.atomic_actions(), "sahara.launch_cluster") @mock.patch(SAHARA_UTILS + ".SaharaScenario.generate_random_name", return_value="random_name") @mock.patch(SAHARA_UTILS + ".sahara_consts") def test_launch_cluster_error(self, mock_sahara_consts, mock_generate_random_name): scenario = utils.SaharaScenario(self.context) mock_processes = { "test_plugin": { "test_version": { "master": ["p1"], "worker": ["p2"] } } } mock_configs = { "test_plugin": { "test_version": { "target": "HDFS", "config_name": "dfs.replication" } } } mock_sahara_consts.NODE_PROCESSES = mock_processes mock_sahara_consts.REPLICATION_CONFIGS = mock_configs self.clients("sahara").clusters.create.return_value = mock.MagicMock( id="test_cluster_id") self.clients("sahara").clusters.get.return_value = mock.MagicMock( status="error") self.assertRaises(exceptions.GetResourceErrorStatus, scenario._launch_cluster, plugin_name="test_plugin", hadoop_version="test_version", master_flavor_id="test_flavor_m", worker_flavor_id="test_flavor_w", image_id="test_image", floating_ip_pool="test_pool", volumes_per_node=5, volumes_size=10, workers_count=42, node_configs={"HDFS": {"local_config": "local_value"}}) def test_scale_cluster(self): scenario = utils.SaharaScenario(self.context) cluster = mock.MagicMock(id=42, node_groups=[{ "name": "random_master", "count": 1 }, { "name": "random_worker", "count": 41 }]) self.clients("sahara").clusters.get.return_value = mock.MagicMock( id=42, status="active") expected_scale_object = { "resize_node_groups": [{ "name": "random_worker", "count": 42 }] } scenario._scale_cluster(cluster, 1) self.clients("sahara").clusters.scale.assert_called_once_with( 42, expected_scale_object) def test_delete_cluster(self): scenario = utils.SaharaScenario(self.context) cluster = mock.MagicMock(id=42) self.clients("sahara").clusters.get.side_effect = [ cluster, sahara_base.APIException() ] scenario._delete_cluster(cluster) delete_mock = self.clients("sahara").clusters.delete delete_mock.assert_called_once_with(42) cl_get_expected = mock.call(42) self.clients("sahara").clusters.get.assert_has_calls([cl_get_expected, cl_get_expected]) self._test_atomic_action_timer(scenario.atomic_actions(), "sahara.delete_cluster") @mock.patch(SAHARA_UTILS + ".SaharaScenario.generate_random_name", return_value="42") def test_create_output_ds(self, mock_generate_random_name): self.context.update({ "sahara": { "output_conf": { "output_type": "hdfs", "output_url_prefix": "hdfs://test_out/" } } }) scenario = utils.SaharaScenario(self.context) scenario._create_output_ds() self.clients("sahara").data_sources.create.assert_called_once_with( name="42", description="", data_source_type="hdfs", url="hdfs://test_out/42" ) @mock.patch(SAHARA_UTILS + ".SaharaScenario.generate_random_name", return_value="42") def test_create_output_ds_swift(self, mock_generate_random_name): self.context.update({ "sahara": { "output_conf": { "output_type": "swift", "output_url_prefix": "swift://test_out/" } } }) scenario = utils.SaharaScenario(self.context) self.assertRaises(exceptions.RallyException, scenario._create_output_ds) def test_run_job_execution(self): self.clients("sahara").job_executions.get.side_effect = [ mock.MagicMock(info={"status": "pending"}, id="42"), mock.MagicMock(info={"status": "SUCCESS"}, id="42")] self.clients("sahara").job_executions.create.return_value = ( mock.MagicMock(id="42")) scenario = utils.SaharaScenario(self.context) scenario._run_job_execution(job_id="test_job_id", cluster_id="test_cluster_id", input_id="test_input_id", output_id="test_output_id", configs={"k": "v"}, job_idx=0) self.clients("sahara").job_executions.create.assert_called_once_with( job_id="test_job_id", cluster_id="test_cluster_id", input_id="test_input_id", output_id="test_output_id", configs={"k": "v"} ) je_get_expected = mock.call("42") self.clients("sahara").job_executions.get.assert_has_calls( [je_get_expected, je_get_expected] ) def test_run_job_execution_fail(self): self.clients("sahara").job_executions.get.side_effect = [ mock.MagicMock(info={"status": "pending"}, id="42"), mock.MagicMock(info={"status": "killed"}, id="42")] self.clients("sahara").job_executions.create.return_value = ( mock.MagicMock(id="42")) scenario = utils.SaharaScenario(self.context) self.assertRaises(exceptions.RallyException, scenario._run_job_execution, job_id="test_job_id", cluster_id="test_cluster_id", input_id="test_input_id", output_id="test_output_id", configs={"k": "v"}, job_idx=0) self.clients("sahara").job_executions.create.assert_called_once_with( job_id="test_job_id", cluster_id="test_cluster_id", input_id="test_input_id", output_id="test_output_id", configs={"k": "v"} ) rally-0.9.1/tests/unit/plugins/openstack/scenarios/vm/0000775000567000056710000000000013073420067024210 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/vm/__init__.py0000664000567000056710000000000013073417717026317 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/vm/test_vmtasks.py0000664000567000056710000003166113073417717027330 0ustar jenkinsjenkins00000000000000# Copyright 2013: Rackspace UK # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally import exceptions from rally.plugins.openstack.scenarios.vm import vmtasks from tests.unit import test BASE = "rally.plugins.openstack.scenarios.vm.vmtasks" @ddt.ddt class VMTasksTestCase(test.ScenarioTestCase): def setUp(self): super(VMTasksTestCase, self).setUp() self.context.update({"user": {"keypair": {"name": "keypair_name"}, "credential": mock.MagicMock()}}) def create_env(self, scenario): self.ip = {"id": "foo_id", "ip": "foo_ip", "is_floating": True} scenario._boot_server_with_fip = mock.Mock( return_value=("foo_server", self.ip)) scenario._wait_for_ping = mock.Mock() scenario._delete_server_with_fip = mock.Mock() scenario._create_volume = mock.Mock( return_value=mock.Mock(id="foo_volume")) scenario._run_command = mock.MagicMock( return_value=(0, "{\"foo\": 42}", "foo_err")) scenario.add_output = mock.Mock() return scenario def test_boot_runcommand_delete(self): scenario = self.create_env(vmtasks.BootRuncommandDelete(self.context)) scenario._run_command = mock.MagicMock( return_value=(0, "{\"foo\": 42}", "foo_err")) scenario.run("foo_flavor", image="foo_image", command={"script_file": "foo_script", "interpreter": "foo_interpreter"}, username="foo_username", password="foo_password", use_floating_ip="use_fip", floating_network="ext_network", force_delete="foo_force", volume_args={"size": 16}, foo_arg="foo_value") scenario._create_volume.assert_called_once_with(16, imageRef=None) scenario._boot_server_with_fip.assert_called_once_with( "foo_image", "foo_flavor", key_name="keypair_name", use_floating_ip="use_fip", floating_network="ext_network", block_device_mapping={"vdrally": "foo_volume:::1"}, foo_arg="foo_value") scenario._wait_for_ping.assert_called_once_with("foo_ip") scenario._run_command.assert_called_once_with( "foo_ip", 22, "foo_username", "foo_password", command={"script_file": "foo_script", "interpreter": "foo_interpreter"}) scenario._delete_server_with_fip.assert_called_once_with( "foo_server", self.ip, force_delete="foo_force") scenario.add_output.assert_called_once_with( complete={"chart_plugin": "TextArea", "data": [ "StdErr: foo_err", "StdOut:", "{\"foo\": 42}"], "title": "Script Output"}) @ddt.data( {"output": (0, "", ""), "expected": [{"complete": {"chart_plugin": "TextArea", "data": [ "StdErr: (none)", "StdOut:", ""], "title": "Script Output"}}]}, {"output": (1, "{\"foo\": 42}", ""), "raises": exceptions.ScriptError}, {"output": ("", 1, ""), "raises": TypeError}, {"output": (0, "{\"foo\": 42}", ""), "expected": [{"complete": {"chart_plugin": "TextArea", "data": [ "StdErr: (none)", "StdOut:", "{\"foo\": 42}"], "title": "Script Output"}}]}, {"output": (0, "{\"additive\": [1, 2]}", ""), "expected": [{"complete": {"chart_plugin": "TextArea", "data": [ "StdErr: (none)", "StdOut:", "{\"additive\": [1, 2]}"], "title": "Script Output"}}]}, {"output": (0, "{\"complete\": [3, 4]}", ""), "expected": [{"complete": {"chart_plugin": "TextArea", "data": [ "StdErr: (none)", "StdOut:", "{\"complete\": [3, 4]}"], "title": "Script Output"}}]}, {"output": (0, "{\"additive\": [1, 2], \"complete\": [3, 4]}", ""), "expected": [{"additive": 1}, {"additive": 2}, {"complete": 3}, {"complete": 4}]} ) @ddt.unpack def test_boot_runcommand_delete_add_output(self, output, expected=None, raises=None): scenario = self.create_env(vmtasks.BootRuncommandDelete(self.context)) scenario._run_command.return_value = output kwargs = {"flavor": "foo_flavor", "image": "foo_image", "command": {"remote_path": "foo"}, "username": "foo_username", "password": "foo_password", "use_floating_ip": "use_fip", "floating_network": "ext_network", "force_delete": "foo_force", "volume_args": {"size": 16}, "foo_arg": "foo_value"} if raises: self.assertRaises(raises, scenario.run, **kwargs) self.assertFalse(scenario.add_output.called) else: scenario.run(**kwargs) calls = [mock.call(**kw) for kw in expected] scenario.add_output.assert_has_calls(calls, any_order=True) scenario._create_volume.assert_called_once_with(16, imageRef=None) scenario._boot_server_with_fip.assert_called_once_with( "foo_image", "foo_flavor", key_name="keypair_name", use_floating_ip="use_fip", floating_network="ext_network", block_device_mapping={"vdrally": "foo_volume:::1"}, foo_arg="foo_value") scenario._run_command.assert_called_once_with( "foo_ip", 22, "foo_username", "foo_password", command={"remote_path": "foo"}) scenario._delete_server_with_fip.assert_called_once_with( "foo_server", self.ip, force_delete="foo_force") def test_boot_runcommand_delete_command_timeouts(self): scenario = self.create_env(vmtasks.BootRuncommandDelete(self.context)) scenario._run_command.side_effect = exceptions.SSHTimeout() self.assertRaises(exceptions.SSHTimeout, scenario.run, "foo_flavor", "foo_image", "foo_interpreter", "foo_script", "foo_username") scenario._delete_server_with_fip.assert_called_once_with( "foo_server", self.ip, force_delete=False) self.assertFalse(scenario.add_output.called) def test_boot_runcommand_delete_ping_wait_timeouts(self): scenario = self.create_env(vmtasks.BootRuncommandDelete(self.context)) scenario._wait_for_ping.side_effect = exceptions.TimeoutException( resource_type="foo_resource", resource_name="foo_name", resource_id="foo_id", desired_status="foo_desired_status", resource_status="foo_resource_status") exc = self.assertRaises(exceptions.TimeoutException, scenario.run, "foo_image", "foo_flavor", "foo_interpreter", "foo_script", "foo_username", wait_for_ping=True) self.assertEqual(exc.kwargs["resource_type"], "foo_resource") self.assertEqual(exc.kwargs["resource_name"], "foo_name") self.assertEqual(exc.kwargs["resource_id"], "foo_id") self.assertEqual(exc.kwargs["desired_status"], "foo_desired_status") self.assertEqual(exc.kwargs["resource_status"], "foo_resource_status") scenario._delete_server_with_fip.assert_called_once_with( "foo_server", self.ip, force_delete=False) self.assertFalse(scenario.add_output.called) @mock.patch("%s.json" % BASE) def test_boot_runcommand_delete_json_fails(self, mock_json): scenario = self.create_env(vmtasks.BootRuncommandDelete(self.context)) mock_json.loads.side_effect = ValueError() scenario.run("foo_image", "foo_flavor", "foo_interpreter", "foo_script", "foo_username") scenario.add_output.assert_called_once_with(complete={ "chart_plugin": "TextArea", "data": ["StdErr: foo_err", "StdOut:", "{\"foo\": 42}"], "title": "Script Output"}) scenario._delete_server_with_fip.assert_called_once_with( "foo_server", self.ip, force_delete=False) def test_boot_runcommand_delete_custom_image(self): context = { "user": { "tenant_id": "tenant_id", "keypair": {"name": "foo_keypair_name"}, "credential": mock.Mock() }, "tenant": { "custom_image": {"id": "image_id"} } } scenario = self.create_env(vmtasks.BootRuncommandDelete(context)) scenario._run_command = mock.MagicMock( return_value=(0, "{\"foo\": 42}", "foo_err")) scenario.run("foo_flavor", command={"script_file": "foo_script", "interpreter": "foo_interpreter"}, username="foo_username", password="foo_password", use_floating_ip="use_fip", floating_network="ext_network", force_delete="foo_force", volume_args={"size": 16}, foo_arg="foo_value") scenario._create_volume.assert_called_once_with(16, imageRef=None) scenario._boot_server_with_fip.assert_called_once_with( "image_id", "foo_flavor", key_name="foo_keypair_name", use_floating_ip="use_fip", floating_network="ext_network", block_device_mapping={"vdrally": "foo_volume:::1"}, foo_arg="foo_value") scenario._wait_for_ping.assert_called_once_with("foo_ip") scenario._run_command.assert_called_once_with( "foo_ip", 22, "foo_username", "foo_password", command={"script_file": "foo_script", "interpreter": "foo_interpreter"}) scenario._delete_server_with_fip.assert_called_once_with( "foo_server", self.ip, force_delete="foo_force") scenario.add_output.assert_called_once_with( complete={"chart_plugin": "TextArea", "data": [ "StdErr: foo_err", "StdOut:", "{\"foo\": 42}"], "title": "Script Output"}) @mock.patch("%s.heat" % BASE) @mock.patch("%s.sshutils" % BASE) def test_runcommand_heat(self, mock_sshutils, mock_heat): fake_ssh = mock.Mock() fake_ssh.execute.return_value = [0, "key:val", ""] mock_sshutils.SSH.return_value = fake_ssh fake_stack = mock.Mock() fake_stack.stack.outputs = [{"output_key": "gate_node", "output_value": "ok"}] mock_heat.main.Stack.return_value = fake_stack context = { "user": {"keypair": {"name": "name", "private": "pk"}, "credential": "ok"}, "tenant": {"networks": [{"router_id": "1"}]} } scenario = vmtasks.RuncommandHeat(context) scenario.generate_random_name = mock.Mock(return_value="name") scenario.add_output = mock.Mock() workload = {"username": "admin", "resource": ["foo", "bar"]} scenario.run(workload, "template", {"file_key": "file_value"}, {"param_key": "param_value"}) expected = {"chart_plugin": "Table", "data": {"rows": [["key", "val"]], "cols": ["key", "value"]}, "description": "Data generated by workload", "title": "Workload summary"} scenario.add_output.assert_called_once_with(complete=expected) rally-0.9.1/tests/unit/plugins/openstack/scenarios/vm/test_utils.py0000664000567000056710000003024013073417717026770 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import subprocess import mock import netaddr from oslo_config import cfg from rally.plugins.openstack.scenarios.vm import utils from tests.unit import test VMTASKS_UTILS = "rally.plugins.openstack.scenarios.vm.utils" CONF = cfg.CONF class VMScenarioTestCase(test.ScenarioTestCase): @mock.patch("%s.open" % VMTASKS_UTILS, side_effect=mock.mock_open(), create=True) def test__run_command_over_ssh_script_file(self, mock_open): mock_ssh = mock.MagicMock() vm_scenario = utils.VMScenario(self.context) vm_scenario._run_command_over_ssh( mock_ssh, { "script_file": "foobar", "interpreter": ["interpreter", "interpreter_arg"], "command_args": ["arg1", "arg2"] } ) mock_ssh.execute.assert_called_once_with( ["interpreter", "interpreter_arg", "arg1", "arg2"], stdin=mock_open.side_effect()) mock_open.assert_called_once_with("foobar", "rb") @mock.patch("%s.six.moves.StringIO" % VMTASKS_UTILS) def test__run_command_over_ssh_script_inline(self, mock_string_io): mock_ssh = mock.MagicMock() vm_scenario = utils.VMScenario(self.context) vm_scenario._run_command_over_ssh( mock_ssh, { "script_inline": "foobar", "interpreter": ["interpreter", "interpreter_arg"], "command_args": ["arg1", "arg2"] } ) mock_ssh.execute.assert_called_once_with( ["interpreter", "interpreter_arg", "arg1", "arg2"], stdin=mock_string_io.return_value) mock_string_io.assert_called_once_with("foobar") def test__run_command_over_ssh_remote_path(self): mock_ssh = mock.MagicMock() vm_scenario = utils.VMScenario(self.context) vm_scenario._run_command_over_ssh( mock_ssh, { "remote_path": ["foo", "bar"], "command_args": ["arg1", "arg2"] } ) mock_ssh.execute.assert_called_once_with( ["foo", "bar", "arg1", "arg2"], stdin=None) def test__run_command_over_ssh_remote_path_copy(self): mock_ssh = mock.MagicMock() vm_scenario = utils.VMScenario(self.context) vm_scenario._run_command_over_ssh( mock_ssh, { "remote_path": ["foo", "bar"], "local_path": "/bin/false", "command_args": ["arg1", "arg2"] } ) mock_ssh.put_file.assert_called_once_with( "/bin/false", "bar", mode=0o755 ) mock_ssh.execute.assert_called_once_with( ["foo", "bar", "arg1", "arg2"], stdin=None) def test__wait_for_ssh(self): ssh = mock.MagicMock() vm_scenario = utils.VMScenario(self.context) vm_scenario._wait_for_ssh(ssh) ssh.wait.assert_called_once_with(120, 1) def test__wait_for_ping(self): vm_scenario = utils.VMScenario(self.context) vm_scenario._ping_ip_address = mock.Mock(return_value=True) vm_scenario._wait_for_ping(netaddr.IPAddress("1.2.3.4")) self.mock_wait_for_status.mock.assert_called_once_with( utils.Host("1.2.3.4"), ready_statuses=[utils.Host.ICMP_UP_STATUS], update_resource=utils.Host.update_status, timeout=CONF.benchmark.vm_ping_timeout, check_interval=CONF.benchmark.vm_ping_poll_interval) @mock.patch(VMTASKS_UTILS + ".VMScenario._run_command_over_ssh") @mock.patch("rally.common.sshutils.SSH") def test__run_command(self, mock_sshutils_ssh, mock_vm_scenario__run_command_over_ssh): vm_scenario = utils.VMScenario(self.context) vm_scenario.context = {"user": {"keypair": {"private": "ssh"}}} vm_scenario._run_command("1.2.3.4", 22, "username", "password", command={"script_file": "foo", "interpreter": "bar"}) mock_sshutils_ssh.assert_called_once_with( "username", "1.2.3.4", port=22, pkey="ssh", password="password") mock_sshutils_ssh.return_value.wait.assert_called_once_with(120, 1) mock_vm_scenario__run_command_over_ssh.assert_called_once_with( mock_sshutils_ssh.return_value, {"script_file": "foo", "interpreter": "bar"}) def get_scenario(self): server = mock.Mock( networks={"foo_net": "foo_data"}, addresses={"foo_net": [{"addr": "foo_ip"}]}, tenant_id="foo_tenant" ) scenario = utils.VMScenario(self.context) scenario._boot_server = mock.Mock(return_value=server) scenario._delete_server = mock.Mock() scenario._associate_floating_ip = mock.Mock() scenario._wait_for_ping = mock.Mock() return scenario, server def test__boot_server_with_fip_without_networks(self): scenario, server = self.get_scenario() server.networks = {} self.assertRaises(RuntimeError, scenario._boot_server_with_fip, "foo_image", "foo_flavor", foo_arg="foo_value") scenario._boot_server.assert_called_once_with( "foo_image", "foo_flavor", foo_arg="foo_value", auto_assign_nic=True) def test__boot_server_with_fixed_ip(self): scenario, server = self.get_scenario() scenario._attach_floating_ip = mock.Mock() server, ip = scenario._boot_server_with_fip( "foo_image", "foo_flavor", floating_network="ext_network", use_floating_ip=False, foo_arg="foo_value") self.assertEqual(ip, {"ip": "foo_ip", "id": None, "is_floating": False}) scenario._boot_server.assert_called_once_with( "foo_image", "foo_flavor", auto_assign_nic=True, foo_arg="foo_value") self.assertEqual(scenario._attach_floating_ip.mock_calls, []) def test__boot_server_with_fip(self): scenario, server = self.get_scenario() scenario._attach_floating_ip = mock.Mock( return_value={"id": "foo_id", "ip": "foo_ip"}) server, ip = scenario._boot_server_with_fip( "foo_image", "foo_flavor", floating_network="ext_network", use_floating_ip=True, foo_arg="foo_value") self.assertEqual(ip, {"ip": "foo_ip", "id": "foo_id", "is_floating": True}) scenario._boot_server.assert_called_once_with( "foo_image", "foo_flavor", auto_assign_nic=True, foo_arg="foo_value") scenario._attach_floating_ip.assert_called_once_with( server, "ext_network") def test__delete_server_with_fixed_ip(self): ip = {"ip": "foo_ip", "id": None, "is_floating": False} scenario, server = self.get_scenario() scenario._delete_floating_ip = mock.Mock() scenario._delete_server_with_fip(server, ip, force_delete=True) self.assertEqual(scenario._delete_floating_ip.mock_calls, []) scenario._delete_server.assert_called_once_with(server, force=True) def test__delete_server_with_fip(self): fip = {"ip": "foo_ip", "id": "foo_id", "is_floating": True} scenario, server = self.get_scenario() scenario._delete_floating_ip = mock.Mock() scenario._delete_server_with_fip(server, fip, force_delete=True) scenario._delete_floating_ip.assert_called_once_with(server, fip) scenario._delete_server.assert_called_once_with(server, force=True) @mock.patch(VMTASKS_UTILS + ".network_wrapper.wrap") def test__attach_floating_ip(self, mock_wrap): scenario, server = self.get_scenario() netwrap = mock_wrap.return_value netwrap.create_floating_ip.return_value = { "id": "foo_id", "ip": "foo_ip"} scenario._attach_floating_ip( server, floating_network="bar_network") mock_wrap.assert_called_once_with(scenario.clients, scenario) netwrap.create_floating_ip.assert_called_once_with( ext_network="bar_network", tenant_id="foo_tenant", fixed_ip="foo_ip") scenario._associate_floating_ip.assert_called_once_with( server, "foo_ip", fixed_address="foo_ip", atomic_action=False) @mock.patch(VMTASKS_UTILS + ".network_wrapper.wrap") def test__delete_floating_ip(self, mock_wrap): scenario, server = self.get_scenario() _check_addr = mock.Mock(return_value=True) scenario.check_ip_address = mock.Mock(return_value=_check_addr) scenario._dissociate_floating_ip = mock.Mock() scenario._delete_floating_ip( server, fip={"id": "foo_id", "ip": "foo_ip"}) scenario.check_ip_address.assert_called_once_with( "foo_ip") _check_addr.assert_called_once_with(server) scenario._dissociate_floating_ip.assert_called_once_with( server, "foo_ip", atomic_action=False) mock_wrap.assert_called_once_with(scenario.clients, scenario) mock_wrap.return_value.delete_floating_ip.assert_called_once_with( "foo_id", wait=True) class HostTestCase(test.TestCase): @mock.patch(VMTASKS_UTILS + ".sys") @mock.patch("subprocess.Popen") def test__ping_ip_address_linux(self, mock_popen, mock_sys): mock_popen.return_value.returncode = 0 mock_sys.platform = "linux2" host = utils.Host("1.2.3.4") self.assertEqual(utils.Host.ICMP_UP_STATUS, utils.Host.update_status(host).status) mock_popen.assert_called_once_with( ["ping", "-c1", "-w1", str(host.ip)], stderr=subprocess.PIPE, stdout=subprocess.PIPE) mock_popen.return_value.wait.assert_called_once_with() @mock.patch(VMTASKS_UTILS + ".sys") @mock.patch("subprocess.Popen") def test__ping_ip_address_linux_ipv6(self, mock_popen, mock_sys): mock_popen.return_value.returncode = 0 mock_sys.platform = "linux2" host = utils.Host("1ce:c01d:bee2:15:a5:900d:a5:11fe") self.assertEqual(utils.Host.ICMP_UP_STATUS, utils.Host.update_status(host).status) mock_popen.assert_called_once_with( ["ping6", "-c1", "-w1", str(host.ip)], stderr=subprocess.PIPE, stdout=subprocess.PIPE) mock_popen.return_value.wait.assert_called_once_with() @mock.patch(VMTASKS_UTILS + ".sys") @mock.patch("subprocess.Popen") def test__ping_ip_address_other_os(self, mock_popen, mock_sys): mock_popen.return_value.returncode = 0 mock_sys.platform = "freebsd10" host = utils.Host("1.2.3.4") self.assertEqual(utils.Host.ICMP_UP_STATUS, utils.Host.update_status(host).status) mock_popen.assert_called_once_with( ["ping", "-c1", str(host.ip)], stderr=subprocess.PIPE, stdout=subprocess.PIPE) mock_popen.return_value.wait.assert_called_once_with() @mock.patch(VMTASKS_UTILS + ".sys") @mock.patch("subprocess.Popen") def test__ping_ip_address_other_os_ipv6(self, mock_popen, mock_sys): mock_popen.return_value.returncode = 0 mock_sys.platform = "freebsd10" host = utils.Host("1ce:c01d:bee2:15:a5:900d:a5:11fe") self.assertEqual(utils.Host.ICMP_UP_STATUS, utils.Host.update_status(host).status) mock_popen.assert_called_once_with( ["ping6", "-c1", str(host.ip)], stderr=subprocess.PIPE, stdout=subprocess.PIPE) mock_popen.return_value.wait.assert_called_once_with() rally-0.9.1/tests/unit/plugins/openstack/scenarios/authenticate/0000775000567000056710000000000013073420067026244 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/authenticate/__init__.py0000664000567000056710000000000013073417717030353 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/authenticate/test_authenticate.py0000664000567000056710000001023613073417717032345 0ustar jenkinsjenkins00000000000000# Copyright (C) 2014 Yahoo! Inc. All Rights Reserved. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.plugins.openstack.scenarios.authenticate import authenticate from tests.unit import test import mock class AuthenticateTestCase(test.ScenarioTestCase): def test_keystone(self): scenario_inst = authenticate.Keystone() scenario_inst.run() self.assertTrue(self.client_created("keystone")) self._test_atomic_action_timer(scenario_inst.atomic_actions(), "authenticate.keystone") def test_validate_glance(self): scenario_inst = authenticate.ValidateGlance() scenario_inst.run(5) # NOTE(stpierre): We can't use assert_has_calls() here because # that includes calls on the return values of the mock object # as well. Glance (and Heat and Monasca, tested below) returns # an iterator that the scenario wraps in list() in order to # force glanceclient to actually make the API call, and this # results in a bunch of call().__iter__() and call().__len__() # calls that aren't matched if we use assert_has_calls(). self.assertItemsEqual( self.clients("glance").images.list.call_args_list, [mock.call(name=mock.ANY)] * 5) self._test_atomic_action_timer(scenario_inst.atomic_actions(), "authenticate.validate_glance_5_times") def test_validate_nova(self): scenario_inst = authenticate.ValidateNova() scenario_inst.run(5) self.clients("nova").flavors.list.assert_has_calls([mock.call()] * 5) self._test_atomic_action_timer(scenario_inst.atomic_actions(), "authenticate.validate_nova_5_times") def test_validate_ceilometer(self): scenario_inst = authenticate.ValidateCeilometer() scenario_inst.run(5) self.clients("ceilometer").meters.list.assert_has_calls( [mock.call()] * 5) self._test_atomic_action_timer( scenario_inst.atomic_actions(), "authenticate.validate_ceilometer_5_times") def test_validate_cinder(self): scenario_inst = authenticate.ValidateCinder() scenario_inst.run(5) self.clients("cinder").volume_types.list.assert_has_calls( [mock.call()] * 5) self._test_atomic_action_timer(scenario_inst.atomic_actions(), "authenticate.validate_cinder_5_times") def test_validate_neutron(self): scenario_inst = authenticate.ValidateNeutron() scenario_inst.run(5) self.clients("neutron").list_networks.assert_has_calls( [mock.call()] * 5) self._test_atomic_action_timer(scenario_inst.atomic_actions(), "authenticate.validate_neutron_5_times") def test_validate_heat(self): scenario_inst = authenticate.ValidateHeat() scenario_inst.run(5) self.assertItemsEqual( self.clients("heat").stacks.list.call_args_list, [mock.call(limit=0)] * 5) self._test_atomic_action_timer(scenario_inst.atomic_actions(), "authenticate.validate_heat_5_times") def test_validate_monasca(self): scenario_inst = authenticate.ValidateMonasca() scenario_inst.run(5) self.assertItemsEqual( self.clients("monasca").metrics.list.call_args_list, [mock.call(limit=0)] * 5) self._test_atomic_action_timer(scenario_inst.atomic_actions(), "authenticate.validate_monasca_5_times") rally-0.9.1/tests/unit/plugins/openstack/scenarios/manila/0000775000567000056710000000000013073420067025027 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/manila/__init__.py0000664000567000056710000000000013073417717027136 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/manila/test_shares.py0000664000567000056710000002313313073417717027737 0ustar jenkinsjenkins00000000000000# Copyright 2015 Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally.plugins.openstack.scenarios.manila import shares from tests.unit import test @ddt.ddt class ManilaSharesTestCase(test.ScenarioTestCase): @ddt.data( {"share_proto": "nfs", "size": 3}, {"share_proto": "cifs", "size": 4, "share_network": "foo", "share_type": "bar"}, ) def test_create_and_delete_share(self, params): fake_share = mock.MagicMock() scenario = shares.CreateAndDeleteShare(self.context) scenario._create_share = mock.MagicMock(return_value=fake_share) scenario.sleep_between = mock.MagicMock() scenario._delete_share = mock.MagicMock() scenario.run(min_sleep=3, max_sleep=4, **params) scenario._create_share.assert_called_once_with(**params) scenario.sleep_between.assert_called_once_with(3, 4) scenario._delete_share.assert_called_once_with(fake_share) @ddt.data( {}, {"detailed": True}, {"detailed": False}, {"search_opts": None}, {"search_opts": {}}, {"search_opts": {"foo": "bar"}}, {"detailed": True, "search_opts": None}, {"detailed": False, "search_opts": None}, {"detailed": True, "search_opts": {"foo": "bar"}}, {"detailed": False, "search_opts": {"quuz": "foo"}}, ) @ddt.unpack def test_list_shares(self, detailed=True, search_opts=None): scenario = shares.ListShares(self.context) scenario._list_shares = mock.MagicMock() scenario.run(detailed=detailed, search_opts=search_opts) scenario._list_shares.assert_called_once_with( detailed=detailed, search_opts=search_opts) @ddt.data( {}, {"description": "foo_description"}, {"neutron_net_id": "foo_neutron_net_id"}, {"neutron_subnet_id": "foo_neutron_subnet_id"}, {"nova_net_id": "foo_nova_net_id"}, {"description": "foo_description", "neutron_net_id": "foo_neutron_net_id", "neutron_subnet_id": "foo_neutron_subnet_id", "nova_net_id": "foo_nova_net_id"}, ) def test_create_share_network_and_delete(self, params): fake_sn = mock.MagicMock() scenario = shares.CreateShareNetworkAndDelete(self.context) scenario._create_share_network = mock.MagicMock(return_value=fake_sn) scenario._delete_share_network = mock.MagicMock() expected_params = { "description": None, "neutron_net_id": None, "neutron_subnet_id": None, "nova_net_id": None, } expected_params.update(params) scenario.run(**params) scenario._create_share_network.assert_called_once_with( **expected_params) scenario._delete_share_network.assert_called_once_with(fake_sn) @ddt.data( {}, {"description": "foo_description"}, {"neutron_net_id": "foo_neutron_net_id"}, {"neutron_subnet_id": "foo_neutron_subnet_id"}, {"nova_net_id": "foo_nova_net_id"}, {"description": "foo_description", "neutron_net_id": "foo_neutron_net_id", "neutron_subnet_id": "foo_neutron_subnet_id", "nova_net_id": "foo_nova_net_id"}, ) def test_create_share_network_and_list(self, params): scenario = shares.CreateShareNetworkAndList(self.context) fake_network = mock.Mock() scenario._create_share_network = mock.Mock( return_value=fake_network) scenario._list_share_networks = mock.Mock( return_value=[fake_network, mock.Mock(), mock.Mock()]) expected_create_params = { "description": params.get("description"), "neutron_net_id": params.get("neutron_net_id"), "neutron_subnet_id": params.get("neutron_subnet_id"), "nova_net_id": params.get("nova_net_id"), } expected_list_params = { "detailed": params.get("detailed", True), "search_opts": params.get("search_opts"), } expected_create_params.update(params) scenario.run(**params) scenario._create_share_network.assert_called_once_with( **expected_create_params) scenario._list_share_networks.assert_called_once_with( **expected_list_params) @ddt.data( {}, {"search_opts": None}, {"search_opts": {}}, {"search_opts": {"foo": "bar"}}, ) def test_list_share_servers(self, search_opts): scenario = shares.ListShareServers(self.context) scenario.context = {"admin": {"credential": "fake_credential"}} scenario._list_share_servers = mock.MagicMock() scenario.run(search_opts=search_opts) scenario._list_share_servers.assert_called_once_with( search_opts=search_opts) @ddt.data( {"security_service_type": "fake_type"}, {"security_service_type": "fake_type", "dns_ip": "fake_dns_ip", "server": "fake_server", "domain": "fake_domain", "user": "fake_user", "password": "fake_password", "description": "fake_description"}, ) def test_create_security_service_and_delete(self, params): fake_ss = mock.MagicMock() scenario = shares.CreateSecurityServiceAndDelete(self.context) scenario._create_security_service = mock.MagicMock( return_value=fake_ss) scenario._delete_security_service = mock.MagicMock() expected_params = { "security_service_type": params.get("security_service_type"), "dns_ip": params.get("dns_ip"), "server": params.get("server"), "domain": params.get("domain"), "user": params.get("user"), "password": params.get("password"), "description": params.get("description"), } scenario.run(**params) scenario._create_security_service.assert_called_once_with( **expected_params) scenario._delete_security_service.assert_called_once_with(fake_ss) @ddt.data("ldap", "kerberos", "active_directory") def test_attach_security_service_to_share_network(self, security_service_type): scenario = shares.AttachSecurityServiceToShareNetwork(self.context) scenario._create_share_network = mock.MagicMock() scenario._create_security_service = mock.MagicMock() scenario._add_security_service_to_share_network = mock.MagicMock() scenario.run(security_service_type=security_service_type) scenario._create_share_network.assert_called_once_with() scenario._create_security_service.assert_called_once_with( security_service_type=security_service_type) scenario._add_security_service_to_share_network.assert_has_calls([ mock.call(scenario._create_share_network.return_value, scenario._create_security_service.return_value)]) @ddt.data( {"share_proto": "nfs", "size": 3, "detailed": True}, {"share_proto": "cifs", "size": 4, "detailed": False, "share_network": "foo", "share_type": "bar"}, ) def test_create_and_list_share(self, params): scenario = shares.CreateAndListShare() scenario._create_share = mock.MagicMock() scenario.sleep_between = mock.MagicMock() scenario._list_shares = mock.MagicMock() scenario.run(min_sleep=3, max_sleep=4, **params) detailed = params.pop("detailed") scenario._create_share.assert_called_once_with(**params) scenario.sleep_between.assert_called_once_with(3, 4) scenario._list_shares.assert_called_once_with(detailed=detailed) @ddt.data( ({}, 0, 0), ({}, 1, 1), ({}, 2, 2), ({}, 3, 0), ({"sets": 5, "set_size": 8, "delete_size": 10}, 1, 1), ) @ddt.unpack def test_set_and_delete_metadata(self, params, iteration, share_number): scenario = shares.SetAndDeleteMetadata() share_list = [{"id": "fake_share_%s_id" % d} for d in range(3)] scenario.context = {"tenant": {"shares": share_list}} scenario.context["iteration"] = iteration scenario._set_metadata = mock.MagicMock() scenario._delete_metadata = mock.MagicMock() expected_set_params = { "share": share_list[share_number], "sets": params.get("sets", 10), "set_size": params.get("set_size", 3), "key_min_length": params.get("key_min_length", 1), "key_max_length": params.get("key_max_length", 256), "value_min_length": params.get("value_min_length", 1), "value_max_length": params.get("value_max_length", 1024), } scenario.run(**params) scenario._set_metadata.assert_called_once_with(**expected_set_params) scenario._delete_metadata.assert_called_once_with( share=share_list[share_number], keys=scenario._set_metadata.return_value, delete_size=params.get("delete_size", 3), ) rally-0.9.1/tests/unit/plugins/openstack/scenarios/manila/test_utils.py0000664000567000056710000002603013073417717027611 0ustar jenkinsjenkins00000000000000# Copyright 2015 Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally import exceptions from rally.plugins.openstack.context.manila import consts from rally.plugins.openstack.scenarios.manila import utils from tests.unit import test BM_UTILS = "rally.task.utils." @ddt.ddt class ManilaScenarioTestCase(test.ScenarioTestCase): def setUp(self): super(ManilaScenarioTestCase, self).setUp() self.scenario = utils.ManilaScenario(self.context) def test__create_share(self): fake_share = mock.Mock() self.clients("manila").shares.create.return_value = fake_share self.scenario.context = { "tenant": { consts.SHARE_NETWORKS_CONTEXT_NAME: { "share_networks": [{"id": "sn_1_id"}, {"id": "sn_2_id"}], } }, "iteration": 0, } fake_random_name = "fake_random_name_value" self.scenario.generate_random_name = mock.Mock( return_value=fake_random_name) self.scenario._create_share("nfs") self.clients("manila").shares.create.assert_called_once_with( "nfs", 1, name=fake_random_name, share_network=self.scenario.context["tenant"][ consts.SHARE_NETWORKS_CONTEXT_NAME]["share_networks"][0]["id"]) self.mock_wait_for.mock.assert_called_once_with( fake_share, ready_statuses=["available"], update_resource=self.mock_get_from_manager.mock.return_value, timeout=300, check_interval=3) self.mock_get_from_manager.mock.assert_called_once_with() @mock.patch(BM_UTILS + "wait_for_status") def test__delete_share(self, mock_wait_for_status): fake_share = mock.MagicMock() self.scenario._delete_share(fake_share) fake_share.delete.assert_called_once_with() mock_wait_for_status.assert_called_once_with( fake_share, ready_statuses=["deleted"], check_deletion=True, update_resource=self.mock_get_from_manager.mock.return_value, timeout=180, check_interval=2) self.mock_get_from_manager.mock.assert_called_once_with( ("error_deleting", )) @ddt.data( {}, {"detailed": False, "search_opts": None}, {"detailed": True, "search_opts": {"name": "foo_sn"}}, {"search_opts": {"project_id": "fake_project"}}, ) def test__list_shares(self, params): fake_shares = ["foo", "bar"] self.clients("manila").shares.list.return_value = fake_shares result = self.scenario._list_shares(**params) self.assertEqual(fake_shares, result) self.clients("manila").shares.list.assert_called_once_with( detailed=params.get("detailed", True), search_opts=params.get("search_opts")) def test__create_share_network(self): fake_sn = mock.Mock() self.scenario.generate_random_name = mock.Mock() self.clients("manila").share_networks.create.return_value = fake_sn data = { "neutron_net_id": "fake_neutron_net_id", "neutron_subnet_id": "fake_neutron_subnet_id", "nova_net_id": "fake_nova_net_id", "description": "fake_description", } expected = dict(data) expected["name"] = self.scenario.generate_random_name.return_value result = self.scenario._create_share_network(**data) self.assertEqual(fake_sn, result) self.clients("manila").share_networks.create.assert_called_once_with( **expected) @mock.patch(BM_UTILS + "wait_for_status") def test__delete_share_network(self, mock_wait_for_status): fake_sn = mock.MagicMock() self.scenario._delete_share_network(fake_sn) fake_sn.delete.assert_called_once_with() mock_wait_for_status.assert_called_once_with( fake_sn, ready_statuses=["deleted"], check_deletion=True, update_resource=self.mock_get_from_manager.mock.return_value, timeout=180, check_interval=2) self.mock_get_from_manager.mock.assert_called_once_with() @ddt.data( {"detailed": True, "search_opts": {"name": "foo_sn"}}, {"detailed": False, "search_opts": None}, {}, {"search_opts": {"project_id": "fake_project"}}, ) def test__list_share_networks(self, params): fake_share_networks = ["foo", "bar"] self.clients("manila").share_networks.list.return_value = ( fake_share_networks) result = self.scenario._list_share_networks(**params) self.assertEqual(fake_share_networks, result) self.clients("manila").share_networks.list.assert_called_once_with( detailed=params.get("detailed", True), search_opts=params.get("search_opts")) @ddt.data( {}, {"search_opts": None}, {"search_opts": {"project_id": "fake_project"}}, ) def test__list_share_servers(self, params): fake_share_servers = ["foo", "bar"] self.admin_clients("manila").share_servers.list.return_value = ( fake_share_servers) result = self.scenario._list_share_servers(**params) self.assertEqual(fake_share_servers, result) self.admin_clients( "manila").share_servers.list.assert_called_once_with( search_opts=params.get("search_opts")) @ddt.data("ldap", "kerberos", "active_directory") def test__create_security_service(self, ss_type): fake_ss = mock.Mock() self.clients("manila").security_services.create.return_value = fake_ss self.scenario.generate_random_name = mock.Mock() data = { "security_service_type": ss_type, "dns_ip": "fake_dns_ip", "server": "fake_server", "domain": "fake_domain", "user": "fake_user", "password": "fake_password", "description": "fake_description", } expected = dict(data) expected["type"] = expected.pop("security_service_type") expected["name"] = self.scenario.generate_random_name.return_value result = self.scenario._create_security_service(**data) self.assertEqual(fake_ss, result) self.clients( "manila").security_services.create.assert_called_once_with( **expected) @mock.patch(BM_UTILS + "wait_for_status") def test__delete_security_service(self, mock_wait_for_status): fake_ss = mock.MagicMock() self.scenario._delete_security_service(fake_ss) fake_ss.delete.assert_called_once_with() mock_wait_for_status.assert_called_once_with( fake_ss, ready_statuses=["deleted"], check_deletion=True, update_resource=self.mock_get_from_manager.mock.return_value, timeout=180, check_interval=2) self.mock_get_from_manager.mock.assert_called_once_with() def test__add_security_service_to_share_network(self): fake_sn = mock.MagicMock() fake_ss = mock.MagicMock() result = self.scenario._add_security_service_to_share_network( share_network=fake_sn, security_service=fake_ss) self.assertEqual( self.clients( "manila").share_networks.add_security_service.return_value, result) self.clients( "manila").share_networks.add_security_service.assert_has_calls([ mock.call(fake_sn, fake_ss)]) @ddt.data( {"key_min_length": 5, "key_max_length": 4}, {"value_min_length": 5, "value_max_length": 4}, ) def test__set_metadata_wrong_params(self, params): self.assertRaises( exceptions.InvalidArgumentsException, self.scenario._set_metadata, {"id": "fake_share_id"}, **params) @ddt.data( {}, {"sets": 0, "set_size": 1}, {"sets": 1, "set_size": 1}, {"sets": 5, "set_size": 7}, {"sets": 5, "set_size": 2}, {"key_min_length": 1, "key_max_length": 1}, {"key_min_length": 1, "key_max_length": 2}, {"key_min_length": 256, "key_max_length": 256}, {"value_min_length": 1, "value_max_length": 1}, {"value_min_length": 1, "value_max_length": 2}, {"value_min_length": 1024, "value_max_length": 1024}, ) def test__set_metadata(self, params): share = {"id": "fake_share_id"} sets = params.get("sets", 1) set_size = params.get("set_size", 1) gen_name_calls = sets * set_size * 2 data = range(gen_name_calls) generator_data = iter(data) def fake_random_name(prefix="fake", length="fake"): return next(generator_data) scenario = self.scenario scenario.clients = mock.MagicMock() scenario._generate_random_part = mock.MagicMock( side_effect=fake_random_name) keys = scenario._set_metadata(share, **params) self.assertEqual( gen_name_calls, scenario._generate_random_part.call_count) self.assertEqual( params.get("sets", 1), scenario.clients.return_value.shares.set_metadata.call_count) scenario.clients.return_value.shares.set_metadata.assert_has_calls([ mock.call( share["id"], dict([(j, j + 1) for j in data[ i * set_size * 2: (i + 1) * set_size * 2: 2]]) ) for i in range(sets) ]) self.assertEqual([i for i in range(0, gen_name_calls, 2)], keys) @ddt.data(None, [], {"fake_set"}, {"fake_key": "fake_value"}) def test__delete_metadata_wrong_params(self, keys): self.assertRaises( exceptions.InvalidArgumentsException, self.scenario._delete_metadata, "fake_share", keys=keys, ) @ddt.data( {"keys": [i for i in range(30)]}, {"keys": list(range(7)), "delete_size": 2}, {"keys": list(range(7)), "delete_size": 3}, {"keys": list(range(7)), "delete_size": 4}, ) def test__delete_metadata(self, params): share = {"id": "fake_share_id"} delete_size = params.get("delete_size", 3) keys = params.get("keys", []) scenario = self.scenario scenario.clients = mock.MagicMock() scenario._delete_metadata(share, **params) scenario.clients.return_value.shares.delete_metadata.assert_has_calls([ mock.call(share["id"], keys[i:i + delete_size]) for i in range(0, len(keys), delete_size) ]) rally-0.9.1/tests/unit/plugins/openstack/scenarios/quotas/0000775000567000056710000000000013073420067025102 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/quotas/test_quotas.py0000664000567000056710000000626213073417717030045 0ustar jenkinsjenkins00000000000000# Copyright 2014: Kylin Cloud # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.quotas import quotas from tests.unit import test class QuotasTestCase(test.ScenarioTestCase): def setUp(self): super(QuotasTestCase, self).setUp() self.context.update({ "user": { "tenant_id": "fake", "credential": mock.MagicMock() }, "tenant": {"id": "fake"} }) def test_nova_get(self): scenario = quotas.NovaGet(self.context) scenario._get_quotas = mock.MagicMock() scenario.run() scenario._get_quotas.assert_called_once_with("nova", "fake") def test_cinder_get(self): scenario = quotas.CinderGet(self.context) scenario._get_quotas = mock.MagicMock() scenario.run() scenario._get_quotas.assert_called_once_with("cinder", "fake") def test_nova_update(self): scenario = quotas.NovaUpdate(self.context) scenario._update_quotas = mock.MagicMock() scenario.run(max_quota=1024) scenario._update_quotas.assert_called_once_with("nova", "fake", 1024) def test_nova_update_and_delete(self): scenario = quotas.NovaUpdateAndDelete(self.context) scenario._update_quotas = mock.MagicMock() scenario._delete_quotas = mock.MagicMock() scenario.run(max_quota=1024) scenario._update_quotas.assert_called_once_with("nova", "fake", 1024) scenario._delete_quotas.assert_called_once_with("nova", "fake") def test_cinder_update(self): scenario = quotas.CinderUpdate(self.context) scenario._update_quotas = mock.MagicMock() scenario.run(max_quota=1024) scenario._update_quotas.assert_called_once_with("cinder", "fake", 1024) def test_cinder_update_and_delete(self): scenario = quotas.CinderUpdateAndDelete(self.context) scenario._update_quotas = mock.MagicMock() scenario._delete_quotas = mock.MagicMock() scenario.run(max_quota=1024) scenario._update_quotas.assert_called_once_with("cinder", "fake", 1024) scenario._delete_quotas.assert_called_once_with("cinder", "fake") def test_neutron_update(self): scenario = quotas.NeutronUpdate(self.context) scenario._update_quotas = mock.MagicMock() mock_quota_update_fn = self.admin_clients("neutron").update_quota scenario.run(max_quota=1024) scenario._update_quotas.assert_called_once_with("neutron", "fake", 1024, mock_quota_update_fn) rally-0.9.1/tests/unit/plugins/openstack/scenarios/quotas/__init__.py0000664000567000056710000000000013073417717027211 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/quotas/test_utils.py0000664000567000056710000001060213073417717027662 0ustar jenkinsjenkins00000000000000# Copyright 2014: Kylin Cloud # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.scenarios.quotas import utils from tests.unit import test class QuotasScenarioTestCase(test.ScenarioTestCase): def test__update_quotas(self): tenant_id = "fake_tenant" quotas = { "metadata_items": 10, "key_pairs": 10, "injected_file_content_bytes": 1024, "injected_file_path_bytes": 1024, "ram": 5120, "instances": 10, "injected_files": 10, "cores": 10, } self.admin_clients("nova").quotas.update.return_value = quotas scenario = utils.QuotasScenario(self.context) scenario._generate_quota_values = mock.MagicMock(return_value=quotas) result = scenario._update_quotas("nova", tenant_id) self.assertEqual(quotas, result) self.admin_clients("nova").quotas.update.assert_called_once_with( tenant_id, **quotas) self._test_atomic_action_timer(scenario.atomic_actions(), "quotas.update_quotas") def test__update_quotas_fn(self): tenant_id = "fake_tenant" quotas = { "metadata_items": 10, "key_pairs": 10, "injected_file_content_bytes": 1024, "injected_file_path_bytes": 1024, "ram": 5120, "instances": 10, "injected_files": 10, "cores": 10, } self.admin_clients("nova").quotas.update.return_value = quotas scenario = utils.QuotasScenario(self.context) scenario._generate_quota_values = mock.MagicMock(return_value=quotas) mock_quota = mock.Mock(return_value=quotas) result = scenario._update_quotas("nova", tenant_id, quota_update_fn=mock_quota) self.assertEqual(quotas, result) self._test_atomic_action_timer(scenario.atomic_actions(), "quotas.update_quotas") def test__generate_quota_values_nova(self): max_quota = 1024 scenario = utils.QuotasScenario(self.context) quotas = scenario._generate_quota_values(max_quota, "nova") for k, v in quotas.items(): self.assertGreaterEqual(v, -1) self.assertLessEqual(v, max_quota) def test__generate_quota_values_cinder(self): max_quota = 1024 scenario = utils.QuotasScenario(self.context) quotas = scenario._generate_quota_values(max_quota, "cinder") for k, v in quotas.items(): self.assertGreaterEqual(v, -1) self.assertLessEqual(v, max_quota) def test__generate_quota_values_neutron(self): max_quota = 1024 scenario = utils.QuotasScenario(self.context) quotas = scenario._generate_quota_values(max_quota, "neutron") for v in quotas.values(): for v1 in v.values(): for v2 in v1.values(): self.assertGreaterEqual(v2, -1) self.assertLessEqual(v2, max_quota) def test__delete_quotas(self): tenant_id = "fake_tenant" scenario = utils.QuotasScenario(self.context) scenario._delete_quotas("nova", tenant_id) self.admin_clients("nova").quotas.delete.assert_called_once_with( tenant_id) self._test_atomic_action_timer(scenario.atomic_actions(), "quotas.delete_quotas") def test__get_quotas(self): tenant_id = "fake_tenant" scenario = utils.QuotasScenario(self.context) scenario._get_quotas("nova", tenant_id) self.admin_clients("nova").quotas.get.assert_called_once_with( tenant_id) self._test_atomic_action_timer(scenario.atomic_actions(), "quotas.get_quotas") rally-0.9.1/tests/unit/plugins/openstack/scenarios/neutron/0000775000567000056710000000000013073420067025260 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/neutron/__init__.py0000664000567000056710000000000013073417717027367 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/neutron/test_security_groups.py0000664000567000056710000000661313073417717032155 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally.plugins.openstack.scenarios.neutron import security_groups from tests.unit import test @ddt.ddt class NeutronSecurityGroup(test.TestCase): @ddt.data( {}, {"security_group_create_args": {}}, {"security_group_create_args": {"description": "fake-description"}}, ) @ddt.unpack def test_create_and_list_security_groups( self, security_group_create_args=None): scenario = security_groups.CreateAndListSecurityGroups() security_group_data = security_group_create_args or {} scenario._create_security_group = mock.Mock() scenario._list_security_groups = mock.Mock() scenario.run(security_group_create_args=security_group_create_args) scenario._create_security_group.assert_called_once_with( **security_group_data) scenario._list_security_groups.assert_called_once_with() @ddt.data( {}, {"security_group_create_args": {}}, {"security_group_create_args": {"description": "fake-description"}}, ) @ddt.unpack def test_create_and_delete_security_groups( self, security_group_create_args=None): scenario = security_groups.CreateAndDeleteSecurityGroups() security_group_data = security_group_create_args or {} scenario._create_security_group = mock.Mock() scenario._delete_security_group = mock.Mock() scenario.run(security_group_create_args=security_group_create_args) scenario._create_security_group.assert_called_once_with( **security_group_data) scenario._delete_security_group.assert_called_once_with( scenario._create_security_group.return_value) @ddt.data( {}, {"security_group_create_args": {}}, {"security_group_create_args": {"description": "fake-description"}}, {"security_group_update_args": {}}, {"security_group_update_args": {"description": "fake-updated-descr"}}, ) @ddt.unpack def test_create_and_update_security_groups( self, security_group_create_args=None, security_group_update_args=None): scenario = security_groups.CreateAndUpdateSecurityGroups() security_group_data = security_group_create_args or {} security_group_update_data = security_group_update_args or {} scenario._create_security_group = mock.Mock() scenario._update_security_group = mock.Mock() scenario.run(security_group_create_args=security_group_create_args, security_group_update_args=security_group_update_args) scenario._create_security_group.assert_called_once_with( **security_group_data) scenario._update_security_group.assert_called_once_with( scenario._create_security_group.return_value, **security_group_update_data) rally-0.9.1/tests/unit/plugins/openstack/scenarios/neutron/test_utils.py0000775000567000056710000012263613073417720030050 0ustar jenkinsjenkins00000000000000# Copyright 2013: Intel Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally import exceptions from rally.plugins.openstack.scenarios.neutron import utils from tests.unit import test NEUTRON_UTILS = "rally.plugins.openstack.scenarios.neutron.utils." @ddt.ddt class NeutronScenarioTestCase(test.ScenarioTestCase): def setUp(self): super(NeutronScenarioTestCase, self).setUp() self.network = mock.Mock() self.scenario = utils.NeutronScenario(self.context) self.random_name = "random_name" self.scenario.generate_random_name = mock.Mock( return_value=self.random_name) def test__get_network_id(self): networks = [{"id": "foo-id", "name": "foo-network"}, {"id": "bar-id", "name": "bar-network"}] network_id = "foo-id" # Valid network-name network = "foo-network" self.scenario._list_networks = mock.Mock(return_value=networks) resultant_network_id = self.scenario._get_network_id(network) self.assertEqual(network_id, resultant_network_id) self.scenario._list_networks.assert_called_once_with( atomic_action=False) self.scenario._list_networks.reset_mock() # Valid network-id network = "foo-id" resultant_network_id = self.scenario._get_network_id(network) self.assertEqual(network_id, resultant_network_id) self.scenario._list_networks.assert_called_once_with( atomic_action=False) self.scenario._list_networks.reset_mock() # Invalid network-name network = "absent-network" self.assertRaises(exceptions.NotFoundException, self.scenario._get_network_id, network) self.scenario._list_networks.assert_called_once_with( atomic_action=False) def test_create_network(self): self.clients("neutron").create_network.return_value = self.network network_data = {"admin_state_up": False} expected_network_data = {"network": network_data} network = self.scenario._create_network(network_data) self.assertEqual(self.network, network) self.clients("neutron").create_network.assert_called_once_with( expected_network_data) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.create_network") def test_list_networks(self): networks_list = [] networks_dict = {"networks": networks_list} self.clients("neutron").list_networks.return_value = networks_dict # without atomic action return_networks_list = self.scenario._list_networks( atomic_action=False) self.assertEqual(networks_list, return_networks_list) # with atomic action return_networks_list = self.scenario._list_networks() self.assertEqual(networks_list, return_networks_list) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.list_networks") def test_show_network(self): network = { "network": { "id": "fake-id", "name": "fake-name", "admin_state_up": False } } return_network = self.scenario._show_network(network) self.assertEqual(self.clients("neutron").show_network.return_value, return_network) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.show_network") def test_update_network(self): expected_network = { "network": { "name": self.scenario.generate_random_name.return_value, "admin_state_up": False, "fakearg": "fake" } } self.clients("neutron").update_network.return_value = expected_network network = {"network": {"name": "network-name", "id": "network-id"}} network_update_args = {"name": "foo", "admin_state_up": False, "fakearg": "fake"} result_network = self.scenario._update_network(network, network_update_args) self.clients("neutron").update_network.assert_called_once_with( network["network"]["id"], expected_network) self.assertEqual(result_network, expected_network) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.update_network") def test_delete_network(self): network_create_args = {} network = self.scenario._create_network(network_create_args) self.scenario._delete_network(network) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.delete_network") @mock.patch(NEUTRON_UTILS + "network_wrapper") def test_create_subnet(self, mock_network_wrapper): network_id = "fake-id" start_cidr = "192.168.0.0/24" mock_network_wrapper.generate_cidr.return_value = "192.168.0.0/24" network = {"network": {"id": network_id}} expected_subnet_data = { "subnet": { "network_id": network_id, "cidr": start_cidr, "ip_version": self.scenario.SUBNET_IP_VERSION, "name": self.scenario.generate_random_name.return_value } } # Default options subnet_data = {"network_id": network_id} self.scenario._create_subnet(network, subnet_data, start_cidr) self.clients("neutron").create_subnet.assert_called_once_with( expected_subnet_data) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.create_subnet") self.clients("neutron").create_subnet.reset_mock() # Custom options extras = {"cidr": "192.168.16.0/24", "allocation_pools": []} mock_network_wrapper.generate_cidr.return_value = "192.168.16.0/24" subnet_data.update(extras) expected_subnet_data["subnet"].update(extras) self.scenario._create_subnet(network, subnet_data) self.clients("neutron").create_subnet.assert_called_once_with( expected_subnet_data) def test_list_subnets(self): subnets = [{"name": "fake1"}, {"name": "fake2"}] self.clients("neutron").list_subnets.return_value = { "subnets": subnets } result = self.scenario._list_subnets() self.assertEqual(subnets, result) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.list_subnets") def test_update_subnet(self): expected_subnet = { "subnet": { "name": self.scenario.generate_random_name.return_value, "enable_dhcp": False, "fakearg": "fake" } } self.clients("neutron").update_subnet.return_value = expected_subnet subnet = {"subnet": {"name": "subnet-name", "id": "subnet-id"}} subnet_update_args = {"name": "foo", "enable_dhcp": False, "fakearg": "fake"} result_subnet = self.scenario._update_subnet(subnet, subnet_update_args) self.clients("neutron").update_subnet.assert_called_once_with( subnet["subnet"]["id"], expected_subnet) self.assertEqual(result_subnet, expected_subnet) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.update_subnet") def test_delete_subnet(self): network = self.scenario._create_network({}) subnet = self.scenario._create_subnet(network, {}) self.scenario._delete_subnet(subnet) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.delete_subnet") def test_create_router(self): router = mock.Mock() self.clients("neutron").create_router.return_value = router # Default options result_router = self.scenario._create_router({}) self.clients("neutron").create_router.assert_called_once_with({ "router": { "name": self.scenario.generate_random_name.return_value } }) self.assertEqual(result_router, router) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.create_router") def test_create_router_with_ext_gw(self): router = mock.Mock() external_network = [{"id": "ext-net", "router:external": True}] self.scenario._list_networks = mock.Mock(return_value=external_network) self.clients("neutron").create_router.return_value = router # External_gw options gw_info = {"network_id": external_network[0]["id"], "enable_snat": True} router_data = { "name": self.scenario.generate_random_name.return_value, "external_gateway_info": gw_info } result_router = self.scenario._create_router({}, external_gw=True) self.clients("neutron").create_router.assert_called_once_with( {"router": router_data}) self.assertEqual(result_router, router) self._test_atomic_action_timer( self.scenario.atomic_actions(), "neutron.create_router") def test_create_router_with_ext_gw_but_no_ext_net(self): router = mock.Mock() external_network = [{"id": "ext-net", "router:external": False}] self.scenario._list_networks = mock.Mock(return_value=external_network) self.clients("neutron").create_router.return_value = router # External_gw options with no external networks in list_networks() result_router = self.scenario._create_router({}, external_gw=True) self.clients("neutron").create_router.assert_called_once_with({ "router": {"name": self.scenario.generate_random_name.return_value} }) self.assertEqual(result_router, router) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.create_router") def test_create_router_explicit(self): router = mock.Mock() self.clients("neutron").create_router.return_value = router # Custom options router_data = {"name": "explicit_name", "admin_state_up": True} result_router = self.scenario._create_router(router_data) self.clients("neutron").create_router.assert_called_once_with( {"router": router_data}) self.assertEqual(result_router, router) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.create_router") def test_list_routers(self): routers = [mock.Mock()] self.clients("neutron").list_routers.return_value = { "routers": routers} self.assertEqual(routers, self.scenario._list_routers()) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.list_routers") def test_list_agents(self): agents = [mock.Mock()] self.clients("neutron").list_agents.return_value = { "agents": agents} self.assertEqual(agents, self.scenario._list_agents()) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.list_agents") def test_update_router(self): expected_router = { "router": { "name": self.scenario.generate_random_name.return_value, "admin_state_up": False, "fakearg": "fake" } } self.clients("neutron").update_router.return_value = expected_router router = { "router": { "id": "router-id", "name": "router-name", "admin_state_up": True } } router_update_args = {"name": "foo", "admin_state_up": False, "fakearg": "fake"} result_router = self.scenario._update_router(router, router_update_args) self.clients("neutron").update_router.assert_called_once_with( router["router"]["id"], expected_router) self.assertEqual(result_router, expected_router) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.update_router") def test_delete_router(self): router = self.scenario._create_router({}) self.scenario._delete_router(router) self.clients("neutron").delete_router.assert_called_once_with( router["router"]["id"]) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.delete_router") def test_remove_interface_router(self): subnet = {"name": "subnet-name", "id": "subnet-id"} router_data = {"id": 1} router = self.scenario._create_router(router_data) self.scenario._add_interface_router(subnet, router) self.scenario._remove_interface_router(subnet, router) mock_remove_router = self.clients("neutron").remove_interface_router mock_remove_router.assert_called_once_with( router["id"], {"subnet_id": subnet["id"]}) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.remove_interface_router") def test_SUBNET_IP_VERSION(self): """Curent NeutronScenario implementation supports only IPv4.""" self.assertEqual(utils.NeutronScenario.SUBNET_IP_VERSION, 4) def test_create_port(self): net_id = "network-id" net = {"network": {"id": net_id}} expected_port_args = { "port": { "network_id": net_id, "name": self.scenario.generate_random_name.return_value } } # Defaults port_create_args = {} self.scenario._create_port(net, port_create_args) self.clients("neutron" ).create_port.assert_called_once_with(expected_port_args) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.create_port") self.clients("neutron").create_port.reset_mock() # Custom options port_args = {"admin_state_up": True} expected_port_args["port"].update(port_args) self.scenario._create_port(net, port_args) self.clients("neutron" ).create_port.assert_called_once_with(expected_port_args) def test_list_ports(self): ports = [{"name": "port1"}, {"name": "port2"}] self.clients("neutron").list_ports.return_value = {"ports": ports} self.assertEqual(ports, self.scenario._list_ports()) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.list_ports") def test_update_port(self): expected_port = { "port": { "admin_state_up": False, "fakearg": "fake", "name": self.scenario.generate_random_name.return_value } } self.clients("neutron").update_port.return_value = expected_port port = { "port": { "id": "port-id", "name": "port-name", "admin_state_up": True } } port_update_args = { "admin_state_up": False, "fakearg": "fake" } result_port = self.scenario._update_port(port, port_update_args) self.clients("neutron").update_port.assert_called_once_with( port["port"]["id"], expected_port) self.assertEqual(result_port, expected_port) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.update_port") def test_delete_port(self): network = self.scenario._create_network({}) port = self.scenario._create_port(network, {}) self.scenario._delete_port(port) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.create_port") @ddt.data( {"context": {"tenant": {"networks": [mock.MagicMock(), mock.MagicMock()]}}}, {"network_create_args": {"fakearg": "fake"}, "context": {"tenant": {"networks": [mock.MagicMock(), mock.MagicMock()]}}}) @ddt.unpack @mock.patch("random.choice", side_effect=lambda l: l[0]) def test_get_or_create_network(self, mock_random_choice, network_create_args=None, context=None): self.scenario.context = context self.scenario._create_network = mock.Mock( return_value={"network": mock.Mock()}) network = self.scenario._get_or_create_network(network_create_args) # ensure that the return value is the proper type either way self.assertIn("network", network) if "networks" in context["tenant"]: self.assertEqual(network, {"network": context["tenant"]["networks"][0]}) self.assertFalse(self.scenario._create_network.called) else: self.assertEqual(network, self.scenario._create_network.return_value) self.scenario._create_network.assert_called_once_with( network_create_args or {}) @mock.patch(NEUTRON_UTILS + "NeutronScenario._create_subnet") @mock.patch(NEUTRON_UTILS + "NeutronScenario._create_network") def test_create_network_and_subnets(self, mock__create_network, mock__create_subnet): mock__create_network.return_value = {"network": {"id": "fake-id"}} mock__create_subnet.return_value = { "subnet": { "name": "subnet-name", "id": "subnet-id", "enable_dhcp": False } } network_create_args = {} subnet_create_args = {} subnets_per_network = 4 # Default options self.scenario._create_network_and_subnets( network_create_args=network_create_args, subnet_create_args=subnet_create_args, subnets_per_network=subnets_per_network) mock__create_network.assert_called_once_with({}) mock__create_subnet.assert_has_calls( [mock.call({"network": {"id": "fake-id"}}, {}, "1.0.0.0/24")] * subnets_per_network) mock__create_network.reset_mock() mock__create_subnet.reset_mock() # Custom options self.scenario._create_network_and_subnets( network_create_args=network_create_args, subnet_create_args={"allocation_pools": []}, subnet_cidr_start="10.10.10.0/24", subnets_per_network=subnets_per_network) mock__create_network.assert_called_once_with({}) mock__create_subnet.assert_has_calls( [mock.call({"network": {"id": "fake-id"}}, {"allocation_pools": []}, "10.10.10.0/24")] * subnets_per_network) def test_list_floating_ips(self): fips_list = [{"id": "floating-ip-id"}] fips_dict = {"floatingips": fips_list} self.clients("neutron").list_floatingips.return_value = fips_dict self.assertEqual(self.scenario._list_floating_ips(), self.clients("neutron").list_floatingips.return_value) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.list_floating_ips") def test_delete_floating_ip(self): fip = {"floatingip": {"id": "fake-id"}} self.scenario._delete_floating_ip(fip["floatingip"]) self.clients("neutron").delete_floatingip.assert_called_once_with( fip["floatingip"]["id"]) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.delete_floating_ip") @ddt.data( {}, {"router_create_args": {"admin_state_up": False}}, {"network_create_args": {"router:external": True}, "subnet_create_args": {"allocation_pools": []}, "subnet_cidr_start": "default_cidr", "subnets_per_network": 3, "router_create_args": {"admin_state_up": False}}) @ddt.unpack def test_create_network_structure(self, network_create_args=None, subnet_create_args=None, subnet_cidr_start=None, subnets_per_network=None, router_create_args=None): network = mock.MagicMock() router_create_args = router_create_args or {} subnets = [] routers = [] router_create_calls = [] for i in range(subnets_per_network or 1): subnets.append(mock.MagicMock()) routers.append(mock.MagicMock()) router_create_calls.append(mock.call(router_create_args)) self.scenario._create_network = mock.Mock(return_value=network) self.scenario._create_subnets = mock.Mock(return_value=subnets) self.scenario._create_router = mock.Mock(side_effect=routers) self.scenario._add_interface_router = mock.Mock() actual = self.scenario._create_network_structure(network_create_args, subnet_create_args, subnet_cidr_start, subnets_per_network, router_create_args) self.assertEqual(actual, (network, subnets, routers)) self.scenario._create_network.assert_called_once_with( network_create_args or {}) self.scenario._create_subnets.assert_called_once_with( network, subnet_create_args, subnet_cidr_start, subnets_per_network) self.scenario._create_router.assert_has_calls(router_create_calls) add_iface_calls = [mock.call(subnets[i]["subnet"], routers[i]["router"]) for i in range(subnets_per_network or 1)] self.scenario._add_interface_router.assert_has_calls(add_iface_calls) def test_delete_v1_pool(self): pool = {"pool": {"id": "fake-id"}} self.scenario._delete_v1_pool(pool["pool"]) self.clients("neutron").delete_pool.assert_called_once_with( pool["pool"]["id"]) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.delete_pool") def test_update_pool(self): expected_pool = { "pool": { "name": self.scenario.generate_random_name.return_value, "admin_state_up": False, "fakearg": "fake" } } self.clients("neutron").update_pool.return_value = expected_pool pool = {"pool": {"name": "pool-name", "id": "pool-id"}} pool_update_args = {"name": "foo", "admin_state_up": False, "fakearg": "fake"} result_pool = self.scenario._update_v1_pool(pool, **pool_update_args) self.assertEqual(result_pool, expected_pool) self.clients("neutron").update_pool.assert_called_once_with( pool["pool"]["id"], expected_pool) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.update_pool") def test_list_v1_pools(self): pools_list = [] pools_dict = {"pools": pools_list} self.clients("neutron").list_pools.return_value = pools_dict return_pools_dict = self.scenario._list_v1_pools() self.assertEqual(pools_dict, return_pools_dict) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.list_pools") def test_list_v1_vips(self): vips_list = [] vips_dict = {"vips": vips_list} self.clients("neutron").list_vips.return_value = vips_dict return_vips_dict = self.scenario._list_v1_vips() self.assertEqual(vips_dict, return_vips_dict) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.list_vips") def test_delete_v1_vip(self): vip = {"vip": {"id": "fake-id"}} self.scenario._delete_v1_vip(vip["vip"]) self.clients("neutron").delete_vip.assert_called_once_with( vip["vip"]["id"]) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.delete_vip") def test_update_v1_vip(self): expected_vip = { "vip": { "name": self.scenario.generate_random_name.return_value, "admin_state_up": False } } self.clients("neutron").update_vip.return_value = expected_vip vip = {"vip": {"name": "vip-name", "id": "vip-id"}} vip_update_args = {"name": "foo", "admin_state_up": False} result_vip = self.scenario._update_v1_vip(vip, **vip_update_args) self.assertEqual(result_vip, expected_vip) self.clients("neutron").update_vip.assert_called_once_with( vip["vip"]["id"], expected_vip) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.update_vip") @mock.patch(NEUTRON_UTILS + "NeutronScenario.generate_random_name") def test_create_security_group(self, mock_generate_random_name): security_group_create_args = {"description": "Fake security group"} expected_security_group = { "security_group": { "id": "fake-id", "name": self.scenario.generate_random_name.return_value, "description": "Fake security group" } } self.clients("neutron").create_security_group = mock.Mock( return_value=expected_security_group) security_group_data = { "security_group": {"name": "random_name", "description": "Fake security group"} } resultant_security_group = self.scenario._create_security_group( **security_group_create_args) self.assertEqual(expected_security_group, resultant_security_group) self.clients("neutron").create_security_group.assert_called_once_with( security_group_data) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.create_security_group") def test_list_security_groups(self): security_groups_list = [{"id": "security-group-id"}] security_groups_dict = {"security_groups": security_groups_list} self.clients("neutron").list_security_groups = mock.Mock( return_value=security_groups_dict) self.assertEqual( self.scenario._list_security_groups(), self.clients("neutron").list_security_groups.return_value) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.list_security_groups") def test_delete_security_group(self): security_group = {"security_group": {"id": "fake-id"}} self.scenario._delete_security_group(security_group) self.clients("neutron").delete_security_group.assert_called_once_with( security_group["security_group"]["id"]) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.delete_security_group") def test_update_security_group(self): security_group = { "security_group": { "id": "security-group-id", "description": "Not updated" } } expected_security_group = { "security_group": { "id": "security-group-id", "name": self.scenario.generate_random_name.return_value, "description": "Updated" } } self.clients("neutron").update_security_group = mock.Mock( return_value=expected_security_group) result_security_group = self.scenario._update_security_group( security_group, description="Updated") self.clients("neutron").update_security_group.assert_called_once_with( security_group["security_group"]["id"], {"security_group": { "description": "Updated", "name": self.scenario.generate_random_name.return_value}} ) self.assertEqual(result_security_group, expected_security_group) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.update_security_group") @ddt.data( {"networks": [{"subnets": "subnet-id"}]}, {"pool_create_args": None, "networks": [{"subnets": ["subnet-id"]}]}, {"pool_create_args": {}, "networks": [{"subnets": ["subnet-id"]}]}, {"pool_create_args": {"name": "given-name"}, "networks": [{"subnets": ["subnet-id"]}]}, ) @ddt.unpack def test__create_v1_pools(self, networks, pool_create_args=None): pool_create_args = pool_create_args or {} pool = {"pool": {"id": "pool-id"}} self.scenario._create_lb_pool = mock.Mock(return_value=pool) resultant_pools = self.scenario._create_v1_pools( networks=networks, **pool_create_args) if networks: subnets = [] [subnets.extend(net["subnets"]) for net in networks] self.scenario._create_lb_pool.assert_has_calls( [mock.call(subnet, atomic_action=False, **pool_create_args) for subnet in subnets]) self.assertEqual(resultant_pools, [pool] * len(subnets)) self._test_atomic_action_timer( self.scenario.atomic_actions(), "neutron.create_%s_pools" % len(subnets)) @ddt.data( {"subnet_id": "foo-id"}, {"pool_create_args": None, "subnet_id": "foo-id"}, {"pool_create_args": {}, "subnet_id": "foo-id"}, {"pool_create_args": {"name": "given-name"}, "subnet_id": "foo-id"}, {"subnet_id": "foo-id", "atomic_action": False}, {"pool_create_args": None, "subnet_id": "foo-id", "atomic_action": False}, {"pool_create_args": {}, "subnet_id": "foo-id", "atomic_action": False}, {"pool_create_args": {"name": "given-name"}, "subnet_id": "foo-id", "atomic_action": False}, ) @ddt.unpack def test__create_lb_pool(self, subnet_id=None, atomic_action=True, pool_create_args=None): pool = {"pool": {"id": "pool-id"}} pool_create_args = pool_create_args or {} if pool_create_args.get("name") is None: self.generate_random_name = mock.Mock(return_value="random_name") self.clients("neutron").create_pool.return_value = pool args = {"lb_method": "ROUND_ROBIN", "protocol": "HTTP", "name": "random_name", "subnet_id": subnet_id} args.update(pool_create_args) expected_pool_data = {"pool": args} resultant_pool = self.scenario._create_lb_pool( subnet_id=subnet_id, atomic_action=atomic_action, **pool_create_args) self.assertEqual(resultant_pool, pool) self.clients("neutron").create_pool.assert_called_once_with( expected_pool_data) if atomic_action: self._test_atomic_action_timer( self.scenario.atomic_actions(), "neutron.create_pool") @ddt.data( {}, {"vip_create_args": {}}, {"vip_create_args": {"name": "given-name"}}, ) @ddt.unpack def test__create_v1_vip(self, vip_create_args=None): vip = {"vip": {"id": "vip-id"}} pool = {"pool": {"id": "pool-id", "subnet_id": "subnet-id"}} vip_create_args = vip_create_args or {} if vip_create_args.get("name") is None: self.scenario.generate_random_name = mock.Mock( return_value="random_name") self.clients("neutron").create_vip.return_value = vip args = {"protocol_port": 80, "protocol": "HTTP", "name": "random_name", "subnet_id": pool["pool"]["subnet_id"], "pool_id": pool["pool"]["id"]} args.update(vip_create_args) expected_vip_data = {"vip": args} resultant_vip = self.scenario._create_v1_vip(pool, **vip_create_args) self.assertEqual(resultant_vip, vip) self.clients("neutron").create_vip.assert_called_once_with( expected_vip_data) @ddt.data( {}, {"floating_ip_args": {}}, {"floating_ip_args": {"floating_ip_address": "1.0.0.1"}}, ) @ddt.unpack def test__create_floating_ip(self, floating_ip_args=None): floating_network = "floating" fip = {"floatingip": {"id": "fip-id"}} network_id = "net-id" floating_ip_args = floating_ip_args or {} self.clients("neutron").create_floatingip.return_value = fip mock_get_network_id = self.scenario._get_network_id = mock.Mock() mock_get_network_id.return_value = network_id args = {"floating_network_id": network_id} args.update(floating_ip_args) expected_fip_data = {"floatingip": args} resultant_fip = self.scenario._create_floatingip( floating_network, **floating_ip_args) self.assertEqual(resultant_fip, fip) self.clients("neutron").create_floatingip.assert_called_once_with( expected_fip_data) mock_get_network_id.assert_called_once_with(floating_network) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.create_floating_ip") @ddt.data( {}, {"healthmonitor_create_args": {}}, {"healthmonitor_create_args": {"type": "TCP"}}, {"atomic_action": False}, {"atomic_action": False, "healthmonitor_create_args": {"type": "TCP"}}, {"healthmonitor_create_args": {}, "atomic_action": False}, ) @ddt.unpack def test__create_v1_healthmonitor(self, atomic_action=True, healthmonitor_create_args=None): hm = {"health_monitor": {"id": "hm-id"}} healthmonitor_create_args = healthmonitor_create_args or {} self.clients("neutron").create_health_monitor.return_value = hm args = {"type": "PING", "delay": 20, "timeout": 10, "max_retries": 3} args.update(healthmonitor_create_args) expected_hm_data = {"health_monitor": args} resultant_hm = self.scenario._create_v1_healthmonitor( atomic_action=atomic_action, **healthmonitor_create_args) self.assertEqual(resultant_hm, hm) self.clients("neutron").create_health_monitor.assert_called_once_with( expected_hm_data) if atomic_action: self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.create_healthmonitor") def test_list_v1_healthmonitors(self): hm_list = [] hm_dict = {"health_monitors": hm_list} self.clients("neutron").list_health_monitors.return_value = hm_dict return_hm_dict = self.scenario._list_v1_healthmonitors() self.assertEqual(hm_dict, return_hm_dict) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.list_healthmonitors") def test_delete_v1_healthmonitor(self): healthmonitor = {"health_monitor": {"id": "fake-id"}} self.scenario._delete_v1_healthmonitor(healthmonitor["health_monitor"]) self.clients("neutron").delete_health_monitor.assert_called_once_with( healthmonitor["health_monitor"]["id"]) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.delete_healthmonitor") def test_update_healthmonitor(self): expected_hm = {"health_monitor": {"admin_state_up": False}} mock_update = self.clients("neutron").update_health_monitor mock_update.return_value = expected_hm hm = {"health_monitor": {"id": "pool-id"}} healthmonitor_update_args = {"admin_state_up": False} result_hm = self.scenario._update_v1_healthmonitor( hm, **healthmonitor_update_args) self.assertEqual(result_hm, expected_hm) mock_update.assert_called_once_with( hm["health_monitor"]["id"], expected_hm) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.update_healthmonitor") def test_update_loadbalancer_resource(self): lb = {"id": "1", "provisioning_status": "READY"} new_lb = {"id": "1", "provisioning_status": "ACTIVE"} self.clients("neutron").show_loadbalancer.return_value = { "loadbalancer": new_lb} return_lb = self.scenario.update_loadbalancer_resource(lb) self.clients("neutron").show_loadbalancer.assert_called_once_with( lb["id"]) self.assertEqual(new_lb, return_lb) def test_update_loadbalancer_resource_not_found(self): from neutronclient.common import exceptions as n_exceptions lb = {"id": "1", "provisioning_status": "READY"} self.clients("neutron").show_loadbalancer.side_effect = ( n_exceptions.NotFound) self.assertRaises(exceptions.GetResourceNotFound, self.scenario.update_loadbalancer_resource, lb) self.clients("neutron").show_loadbalancer.assert_called_once_with( lb["id"]) def test_update_loadbalancer_resource_failure(self): from neutronclient.common import exceptions as n_exceptions lb = {"id": "1", "provisioning_status": "READY"} self.clients("neutron").show_loadbalancer.side_effect = ( n_exceptions.Forbidden) self.assertRaises(exceptions.GetResourceFailure, self.scenario.update_loadbalancer_resource, lb) self.clients("neutron").show_loadbalancer.assert_called_once_with( lb["id"]) def test__create_lbaasv2_loadbalancer(self): neutronclient = self.clients("neutron") create_args = {"name": "s_rally", "vip_subnet_id": "1", "fake": "fake"} new_lb = {"id": "1", "provisioning_status": "ACTIVE"} self.scenario.generate_random_name = mock.Mock( return_value="s_rally") self.mock_wait_for_status.mock.return_value = new_lb return_lb = self.scenario._create_lbaasv2_loadbalancer( "1", fake="fake") neutronclient.create_loadbalancer.assert_called_once_with( {"loadbalancer": create_args}) self.assertEqual(new_lb, return_lb) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.create_lbaasv2_loadbalancer") def test__list_lbaasv2_loadbalancers(self): value = {"loadbalancer": [{"id": "1", "name": "s_rally"}]} self.clients("neutron").list_loadbalancers.return_value = value return_value = self.scenario._list_lbaasv2_loadbalancers( True, fake="fake") (self.clients("neutron").list_loadbalancers .assert_called_once_with(True, fake="fake")) self.assertEqual(value, return_value) self._test_atomic_action_timer(self.scenario.atomic_actions(), "neutron.list_lbaasv2_loadbalancers") class NeutronScenarioFunctionalTestCase(test.FakeClientsScenarioTestCase): @mock.patch(NEUTRON_UTILS + "network_wrapper.generate_cidr") def test_functional_create_network_and_subnets(self, mock_generate_cidr): scenario = utils.NeutronScenario(context=self.context) network_create_args = {} subnet_create_args = {} subnets_per_network = 5 subnet_cidr_start = "1.1.1.0/24" cidrs = ["1.1.%d.0/24" % i for i in range(subnets_per_network)] cidrs_ = iter(cidrs) mock_generate_cidr.side_effect = lambda **kw: next(cidrs_) network, subnets = scenario._create_network_and_subnets( network_create_args, subnet_create_args, subnets_per_network, subnet_cidr_start) # This checks both data (cidrs seem to be enough) and subnets number result_cidrs = sorted([s["subnet"]["cidr"] for s in subnets]) self.assertEqual(cidrs, result_cidrs) rally-0.9.1/tests/unit/plugins/openstack/scenarios/neutron/test_network.py0000664000567000056710000004263113073417720030372 0ustar jenkinsjenkins00000000000000# Copyright 2014: Intel Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally.plugins.openstack.scenarios.neutron import network from tests.unit import test BASE = "rally.plugins.openstack.scenarios.neutron.network" @ddt.ddt class NeutronNetworksTestCase(test.ScenarioTestCase): @ddt.data( {"network_create_args": {}}, {"network_create_args": {"name": "given-name"}}, {"network_create_args": {"provider:network_type": "vxlan"}} ) @ddt.unpack @mock.patch("%s.CreateAndListNetworks._list_networks" % BASE) @mock.patch("%s.CreateAndListNetworks._create_network" % BASE) def test_create_and_list_networks(self, mock__create_network, mock__list_networks, network_create_args): scenario = network.CreateAndListNetworks(self.context) scenario.run(network_create_args=network_create_args) mock__create_network.assert_called_once_with(network_create_args) mock__list_networks.assert_called_once_with() mock__create_network.reset_mock() mock__list_networks.reset_mock() @ddt.data( {"network_create_args": {}}, {"network_create_args": {"name": "given-name"}}, ) @ddt.unpack @mock.patch("%s.CreateAndShowNetwork._show_network" % BASE) @mock.patch("%s.CreateAndShowNetwork._create_network" % BASE) def test_create_and_show_network(self, mock__create_network, mock__show_network, network_create_args): scenario = network.CreateAndShowNetwork(self.context) mock_net = mock.Mock() mock__create_network.return_value = mock_net scenario.run(network_create_args=network_create_args) mock__create_network.assert_called_once_with(network_create_args) mock__show_network.assert_called_once_with(mock_net) mock__create_network.reset_mock() mock__show_network.reset_mock() @mock.patch("%s.CreateAndUpdateNetworks._update_network" % BASE) @mock.patch("%s.CreateAndUpdateNetworks._create_network" % BASE, return_value={ "network": { "id": "network-id", "name": "network-name", "admin_state_up": False } }) def test_create_and_update_networks(self, mock__create_network, mock__update_network): scenario = network.CreateAndUpdateNetworks(self.context) network_update_args = {"name": "_updated", "admin_state_up": True} # Default options scenario.run(network_update_args=network_update_args) mock__create_network.assert_called_once_with({}) mock__update_network.assert_has_calls( [mock.call( mock__create_network.return_value, network_update_args )]) mock__create_network.reset_mock() mock__update_network.reset_mock() # Explicit network name is specified network_create_args = { "name": "network-name", "admin_state_up": False } scenario.run(network_create_args=network_create_args, network_update_args=network_update_args) mock__create_network.assert_called_once_with(network_create_args) mock__update_network.assert_has_calls( [mock.call(mock__create_network.return_value, network_update_args)]) @mock.patch("%s.CreateAndDeleteNetworks._delete_network" % BASE) @mock.patch("%s.CreateAndDeleteNetworks._create_network" % BASE) def test_create_and_delete_networks(self, mock__create_network, mock__delete_network): scenario = network.CreateAndDeleteNetworks(self.context) # Default options network_create_args = {} scenario.run() mock__create_network.assert_called_once_with(network_create_args) self.assertTrue(mock__delete_network.call_count) mock__create_network.reset_mock() mock__delete_network.reset_mock() # Explicit network name is specified network_create_args = {"name": "given-name"} scenario.run(network_create_args=network_create_args) mock__create_network.assert_called_once_with(network_create_args) self.assertTrue(mock__delete_network.call_count) def test_create_and_list_subnets(self): network_create_args = {"router:external": True} subnet_create_args = {"allocation_pools": []} subnet_cidr_start = "default_cidr" subnets_per_network = 5 net = mock.MagicMock() scenario = network.CreateAndListSubnets(self.context) scenario._create_network = mock.Mock(return_value=net) scenario._create_subnets = mock.Mock() scenario._list_subnets = mock.Mock() scenario.run(network_create_args=network_create_args, subnet_create_args=subnet_create_args, subnet_cidr_start=subnet_cidr_start, subnets_per_network=subnets_per_network) scenario._create_network.assert_called_once_with( network_create_args) scenario._create_subnets.assert_called_once_with( net, subnet_create_args, subnet_cidr_start, subnets_per_network) scenario._list_subnets.assert_called_once_with() def test_create_and_update_subnets(self): network_create_args = {"router:external": True} subnet_create_args = {"allocation_pools": []} subnet_update_args = {"enabled_dhcp": True} subnet_cidr_start = "default_cidr" subnets_per_network = 5 net = mock.MagicMock() subnets = [mock.MagicMock() for _ in range(subnets_per_network)] scenario = network.CreateAndUpdateSubnets(self.context) scenario._create_network = mock.Mock(return_value=net) scenario._create_subnets = mock.Mock(return_value=subnets) scenario._update_subnet = mock.Mock() scenario.run(subnet_update_args, network_create_args=network_create_args, subnet_create_args=subnet_create_args, subnet_cidr_start=subnet_cidr_start, subnets_per_network=subnets_per_network) scenario._create_network.assert_called_once_with( network_create_args) scenario._create_subnets.assert_called_once_with( net, subnet_create_args, subnet_cidr_start, subnets_per_network) scenario._update_subnet.assert_has_calls( [mock.call(s, subnet_update_args) for s in subnets]) def test_create_and_delete_subnets(self): network_create_args = {"router:external": True} subnet_create_args = {"allocation_pools": []} subnet_cidr_start = "default_cidr" subnets_per_network = 5 net = mock.MagicMock() subnets = [mock.MagicMock() for _ in range(subnets_per_network)] scenario = network.CreateAndDeleteSubnets(self.context) scenario._get_or_create_network = mock.Mock(return_value=net) scenario._create_subnets = mock.Mock(return_value=subnets) scenario._delete_subnet = mock.Mock() scenario.run(network_create_args=network_create_args, subnet_create_args=subnet_create_args, subnet_cidr_start=subnet_cidr_start, subnets_per_network=subnets_per_network) scenario._get_or_create_network.assert_called_once_with( network_create_args) scenario._create_subnets.assert_called_once_with( net, subnet_create_args, subnet_cidr_start, subnets_per_network) scenario._delete_subnet.assert_has_calls( [mock.call(s) for s in subnets]) def test_create_and_list_routers(self): network_create_args = {"router:external": True} subnet_create_args = {"allocation_pools": []} subnet_cidr_start = "default_cidr" subnets_per_network = 5 router_create_args = {"admin_state_up": True} scenario = network.CreateAndListRouters(self.context) scenario._create_network_structure = mock.Mock() scenario._list_routers = mock.Mock() scenario.run(network_create_args=network_create_args, subnet_create_args=subnet_create_args, subnet_cidr_start=subnet_cidr_start, subnets_per_network=subnets_per_network, router_create_args=router_create_args) scenario._create_network_structure.assert_called_once_with( network_create_args, subnet_create_args, subnet_cidr_start, subnets_per_network, router_create_args) scenario._list_routers.assert_called_once_with() def test_list_agents(self): agent_args = { "F": "id", "sort-dir": "asc" } scenario = network.ListAgents(self.context) scenario._list_agents = mock.Mock() scenario.run(agent_args=agent_args) scenario._list_agents.assert_called_once_with(**agent_args) def test_create_and_update_routers(self): router_update_args = {"admin_state_up": False} network_create_args = {"router:external": True} subnet_create_args = {"allocation_pools": []} subnet_cidr_start = "default_cidr" subnets_per_network = 5 router_create_args = {"admin_state_up": True} net = mock.MagicMock() subnets = [mock.MagicMock() for i in range(subnets_per_network)] routers = [mock.MagicMock() for i in range(subnets_per_network)] scenario = network.CreateAndUpdateRouters(self.context) scenario._create_network_structure = mock.Mock( return_value=(net, subnets, routers)) scenario._update_router = mock.Mock() scenario.run(router_update_args, network_create_args=network_create_args, subnet_create_args=subnet_create_args, subnet_cidr_start=subnet_cidr_start, subnets_per_network=subnets_per_network, router_create_args=router_create_args) scenario._create_network_structure.assert_called_once_with( network_create_args, subnet_create_args, subnet_cidr_start, subnets_per_network, router_create_args) update_calls = [mock.call(router, router_update_args) for router in routers] scenario._update_router.assert_has_calls(update_calls) def test_create_and_delete_routers(self): network_create_args = {"router:external": True} subnet_create_args = {"allocation_pools": []} subnet_cidr_start = "default_cidr" subnets_per_network = 5 router_create_args = {"admin_state_up": True} net = mock.MagicMock() subnets = [mock.MagicMock() for i in range(subnets_per_network)] routers = [mock.MagicMock() for i in range(subnets_per_network)] scenario = network.CreateAndDeleteRouters(self.context) scenario._create_network_structure = mock.Mock( return_value=(net, subnets, routers)) scenario._remove_interface_router = mock.Mock() scenario._delete_router = mock.Mock() scenario.run(network_create_args=network_create_args, subnet_create_args=subnet_create_args, subnet_cidr_start=subnet_cidr_start, subnets_per_network=subnets_per_network, router_create_args=router_create_args) scenario._create_network_structure.assert_called_once_with( network_create_args, subnet_create_args, subnet_cidr_start, subnets_per_network, router_create_args) scenario._remove_interface_router.assert_has_calls([ mock.call(subnets[i]["subnet"], routers[i]["router"]) for i in range(subnets_per_network)]) scenario._delete_router.assert_has_calls( [mock.call(router) for router in routers]) def test_create_and_list_ports(self): port_create_args = {"allocation_pools": []} ports_per_network = 10 network_create_args = {"router:external": True} net = mock.MagicMock() scenario = network.CreateAndListPorts(self.context) scenario._get_or_create_network = mock.Mock(return_value=net) scenario._create_port = mock.MagicMock() scenario._list_ports = mock.Mock() scenario.run(network_create_args=network_create_args, port_create_args=port_create_args, ports_per_network=ports_per_network) scenario._get_or_create_network.assert_called_once_with( network_create_args) scenario._create_port.assert_has_calls( [mock.call(net, port_create_args) for _ in range(ports_per_network)]) scenario._list_ports.assert_called_once_with() def test_create_and_update_ports(self): port_update_args = {"admin_state_up": False}, port_create_args = {"allocation_pools": []} ports_per_network = 10 network_create_args = {"router:external": True} net = mock.MagicMock() ports = [mock.MagicMock() for _ in range(ports_per_network)] scenario = network.CreateAndUpdatePorts(self.context) scenario._get_or_create_network = mock.Mock(return_value=net) scenario._create_port = mock.Mock(side_effect=ports) scenario._update_port = mock.Mock() scenario.run(port_update_args, network_create_args=network_create_args, port_create_args=port_create_args, ports_per_network=ports_per_network) scenario._get_or_create_network.assert_called_once_with( network_create_args) scenario._create_port.assert_has_calls( [mock.call(net, port_create_args) for _ in range(ports_per_network)]) scenario._update_port.assert_has_calls( [mock.call(p, port_update_args) for p in ports]) def test_create_and_delete_ports(self): port_create_args = {"allocation_pools": []} ports_per_network = 10 network_create_args = {"router:external": True} net = mock.MagicMock() ports = [mock.MagicMock() for _ in range(ports_per_network)] scenario = network.CreateAndDeletePorts(self.context) scenario._get_or_create_network = mock.Mock(return_value=net) scenario._create_port = mock.Mock(side_effect=ports) scenario._delete_port = mock.Mock() scenario.run(network_create_args=network_create_args, port_create_args=port_create_args, ports_per_network=ports_per_network) scenario._get_or_create_network.assert_called_once_with( network_create_args) scenario._create_port.assert_has_calls( [mock.call(net, port_create_args) for _ in range(ports_per_network)]) scenario._delete_port.assert_has_calls( [mock.call(p) for p in ports]) @ddt.data( {"floating_network": "ext-net"}, {"floating_network": "ext-net", "floating_ip_args": {"floating_ip_address": "1.1.1.1"}}, ) @ddt.unpack def test_create_and_list_floating_ips(self, floating_network=None, floating_ip_args=None): scenario = network.CreateAndListFloatingIps(self.context) floating_ip_args = floating_ip_args or {} scenario._create_floatingip = mock.Mock() scenario._list_floating_ips = mock.Mock() scenario.run(floating_network=floating_network, floating_ip_args=floating_ip_args) scenario._create_floatingip.assert_called_once_with( floating_network, **floating_ip_args) scenario._list_floating_ips.assert_called_once_with() @ddt.data( {"floating_network": "ext-net"}, {"floating_network": "ext-net", "floating_ip_args": {"floating_ip_address": "1.1.1.1"}}, ) @ddt.unpack def test_create_and_delete_floating_ips(self, floating_network=None, floating_ip_args=None): scenario = network.CreateAndDeleteFloatingIps(self.context) floating_ip_args = floating_ip_args or {} fip = {"floatingip": {"id": "floating-ip-id"}} scenario._create_floatingip = mock.Mock(return_value=fip) scenario._delete_floating_ip = mock.Mock() scenario.run(floating_network=floating_network, floating_ip_args=floating_ip_args) scenario._create_floatingip.assert_called_once_with( floating_network, **floating_ip_args) scenario._delete_floating_ip.assert_called_once_with( scenario._create_floatingip.return_value["floatingip"]) rally-0.9.1/tests/unit/plugins/openstack/scenarios/neutron/test_loadbalancer_v2.py0000775000567000056710000000424513073417717031727 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally.plugins.openstack.scenarios.neutron import loadbalancer_v2 from tests.unit import test @ddt.ddt class NeutronLoadbalancerv2TestCase(test.TestCase): def _get_context(self): context = test.get_test_context() context.update({ "user": { "id": "fake_user", "tenant_id": "fake_tenant", "credential": mock.MagicMock() }, "tenant": {"id": "fake_tenant", "networks": [{"id": "fake_net", "subnets": ["fake_subnet"]}]}}) return context @ddt.data( {}, {"lb_create_args": None}, {"lb_create_args": {}}, {"lb_create_args": {"name": "given-name"}}, ) @ddt.unpack def test_create_and_list_load_balancers(self, lb_create_args=None): context = self._get_context() scenario = loadbalancer_v2.CreateAndListLoadbalancers(context) lb_create_args = lb_create_args or {} networks = context["tenant"]["networks"] scenario._create_lbaasv2_loadbalancer = mock.Mock() scenario._list_lbaasv2_loadbalancers = mock.Mock() scenario.run(lb_create_args=lb_create_args) subnets = [] mock_has_calls = [] for network in networks: subnets.extend(network.get("subnets", [])) for subnet in subnets: mock_has_calls.append(mock.call(subnet, **lb_create_args)) scenario._create_lbaasv2_loadbalancer.assert_has_calls(mock_has_calls) scenario._list_lbaasv2_loadbalancers.assert_called_once_with() rally-0.9.1/tests/unit/plugins/openstack/scenarios/neutron/test_loadbalancer_v1.py0000664000567000056710000003026713073417717031726 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally.plugins.openstack.scenarios.neutron import loadbalancer_v1 from tests.unit import test @ddt.ddt class NeutronLoadbalancerv1TestCase(test.TestCase): def _get_context(self): context = test.get_test_context() context.update({ "user": { "id": "fake_user", "tenant_id": "fake_tenant", "credential": mock.MagicMock() }, "tenant": {"id": "fake_tenant", "networks": [{"id": "fake_net", "subnets": ["fake_subnet"]}]}}) return context @ddt.data( {}, {"pool_create_args": None}, {"pool_create_args": {}}, {"pool_create_args": {"name": "given-name"}}, ) @ddt.unpack def test_create_and_list_pools(self, pool_create_args=None): scenario = loadbalancer_v1.CreateAndListPools(self._get_context()) pool_data = pool_create_args or {} networks = self._get_context()["tenant"]["networks"] scenario._create_v1_pools = mock.Mock() scenario._list_v1_pools = mock.Mock() scenario.run(pool_create_args=pool_create_args) scenario._create_v1_pools.assert_called_once_with(networks, **pool_data) scenario._list_v1_pools.assert_called_once_with() @ddt.data( {}, {"pool_create_args": None}, {"pool_create_args": {}}, {"pool_create_args": {"name": "given-name"}}, ) @ddt.unpack def test_create_and_delete_pools(self, pool_create_args=None): scenario = loadbalancer_v1.CreateAndDeletePools(self._get_context()) pools = [{ "pool": { "id": "pool-id" } }] pool_data = pool_create_args or {} networks = self._get_context()["tenant"]["networks"] scenario._create_v1_pools = mock.Mock(return_value=pools) scenario._delete_v1_pool = mock.Mock() scenario.run(pool_create_args=pool_create_args) self.assertEqual([mock.call(networks, **pool_data)], scenario._create_v1_pools.mock_calls) for _ in pools: self.assertEqual(1, scenario._delete_v1_pool.call_count) @ddt.data( {}, {"pool_create_args": None}, {"pool_create_args": {}}, {"pool_create_args": {"name": "given-name"}}, {"pool_update_args": None}, {"pool_update_args": {}}, {"pool_update_args": {"name": "updated-name"}}, {"pool_create_args": None, "pool_update_args": None}, {"pool_create_args": {"name": "given-name"}, "pool_update_args": {"name": "updated-name"}}, {"pool_create_args": None, "pool_update_args": {"name": "updated-name"}}, {"pool_create_args": None, "pool_update_args": {}}, {"pool_create_args": {}, "pool_update_args": None}, ) @ddt.unpack def test_create_and_update_pools(self, pool_create_args=None, pool_update_args=None): scenario = loadbalancer_v1.CreateAndUpdatePools(self._get_context()) pools = [{ "pool": { "id": "pool-id" } }] updated_pool = { "pool": { "id": "pool-id", "name": "updated-pool", "admin_state_up": True } } pool_data = pool_create_args or {} pool_update_args = pool_update_args or {} pool_update_args.update({"name": "_updated", "admin_state_up": True}) scenario._create_v1_pools = mock.Mock(return_value=pools) scenario._update_v1_pool = mock.Mock(return_value=updated_pool) networks = self._get_context()["tenant"]["networks"] scenario.run(pool_create_args=pool_data, pool_update_args=pool_update_args) self.assertEqual([mock.call(networks, **pool_data)], scenario._create_v1_pools.mock_calls) for pool in pools: scenario._update_v1_pool.assert_called_once_with( pool, **pool_update_args) @ddt.data( {}, {"vip_create_args": None}, {"vip_create_args": {}}, {"vip_create_args": {"name": "given-vip-name"}}, {"pool_create_args": None}, {"pool_create_args": {}}, {"pool_create_args": {"name": "given-pool-name"}}, ) @ddt.unpack def test_create_and_list_vips(self, pool_create_args=None, vip_create_args=None): scenario = loadbalancer_v1.CreateAndListVips(self._get_context()) pools = [{ "pool": { "id": "pool-id" } }] vip_data = vip_create_args or {} pool_data = pool_create_args or {} networks = self._get_context()["tenant"]["networks"] scenario._create_v1_pools = mock.Mock(return_value=pools) scenario._create_v1_vip = mock.Mock() scenario._list_v1_vips = mock.Mock() scenario.run(pool_create_args=pool_create_args, vip_create_args=vip_create_args) scenario._create_v1_pools.assert_called_once_with(networks, **pool_data) scenario._create_v1_vip.assert_has_calls( [mock.call(pool, **vip_data) for pool in pools]) scenario._list_v1_vips.assert_called_once_with() @ddt.data( {}, {"vip_create_args": None}, {"vip_create_args": {}}, {"vip_create_args": {"name": "given-name"}}, {"pool_create_args": None}, {"pool_create_args": {}}, {"pool_create_args": {"name": "given-pool-name"}}, ) @ddt.unpack def test_create_and_delete_vips(self, pool_create_args=None, vip_create_args=None): scenario = loadbalancer_v1.CreateAndDeleteVips(self._get_context()) pools = [{ "pool": { "id": "pool-id" } }] vip = { "vip": { "id": "vip-id" } } vip_data = vip_create_args or {} pool_data = pool_create_args or {} networks = self._get_context()["tenant"]["networks"] scenario._create_v1_pools = mock.Mock(return_value=pools) scenario._create_v1_vip = mock.Mock(return_value=vip) scenario._delete_v1_vip = mock.Mock() scenario.run(pool_create_args=pool_create_args, vip_create_args=vip_create_args) scenario._create_v1_pools.assert_called_once_with(networks, **pool_data) scenario._create_v1_vip.assert_has_calls( [mock.call(pool, **vip_data) for pool in pools]) scenario._delete_v1_vip.assert_has_calls([mock.call(vip["vip"])]) @ddt.data( {}, {"vip_create_args": None}, {"vip_create_args": {}}, {"vip_create_args": {"name": "given-vip-name"}}, {"pool_create_args": None}, {"pool_create_args": {}}, {"pool_create_args": {"name": "given-pool-name"}}, ) @ddt.unpack def test_create_and_update_vips(self, pool_create_args=None, vip_create_args=None, vip_update_args=None): scenario = loadbalancer_v1.CreateAndUpdateVips(self._get_context()) pools = [{ "pool": { "id": "pool-id", } }] expected_vip = { "vip": { "id": "vip-id", "name": "vip-name" } } updated_vip = { "vip": { "id": "vip-id", "name": "updated-vip-name" } } vips = [expected_vip] vip_data = vip_create_args or {} vip_update_data = vip_update_args or {} pool_data = pool_create_args or {} networks = self._get_context()["tenant"]["networks"] scenario._create_v1_pools = mock.Mock(return_value=pools) scenario._create_v1_vip = mock.Mock(return_value=expected_vip) scenario._update_v1_vip = mock.Mock(return_value=updated_vip) scenario.run(pool_create_args=pool_create_args, vip_create_args=vip_create_args, vip_update_args=vip_update_args) scenario._create_v1_pools.assert_called_once_with(networks, **pool_data) scenario._create_v1_vip.assert_has_calls( [mock.call(pool, **vip_data) for pool in pools]) scenario._update_v1_vip.assert_has_calls( [mock.call(vip, **vip_update_data) for vip in vips]) @ddt.data( {}, {"healthmonitor_create_args": None}, {"healthmonitor_create_args": {}}, {"healthmonitor_create_args": {"name": "given-name"}}, ) @ddt.unpack def test_create_and_list_healthmonitors(self, healthmonitor_create_args=None): scenario = loadbalancer_v1.CreateAndListHealthmonitors( self._get_context()) hm_data = healthmonitor_create_args or {} scenario._create_v1_healthmonitor = mock.Mock() scenario._list_v1_healthmonitors = mock.Mock() scenario.run(healthmonitor_create_args=healthmonitor_create_args) scenario._create_v1_healthmonitor.assert_called_once_with(**hm_data) scenario._list_v1_healthmonitors.assert_called_once_with() @ddt.data( {}, {"healthmonitor_create_args": None}, {"healthmonitor_create_args": {}}, {"healthmonitor_create_args": {"name": "given-name"}}, ) @ddt.unpack def test_create_and_delete_healthmonitors(self, healthmonitor_create_args=None): scenario = loadbalancer_v1.CreateAndDeleteHealthmonitors( self._get_context()) hm = {"health_monitor": {"id": "hm-id"}} hm_data = healthmonitor_create_args or {} scenario._create_v1_healthmonitor = mock.Mock(return_value=hm) scenario._delete_v1_healthmonitor = mock.Mock() scenario.run(healthmonitor_create_args=healthmonitor_create_args) scenario._create_v1_healthmonitor.assert_called_once_with(**hm_data) scenario._delete_v1_healthmonitor.assert_called_once_with( scenario._create_v1_healthmonitor.return_value["health_monitor"]) @ddt.data( {}, {"healthmonitor_create_args": None}, {"healthmonitor_create_args": {}}, {"healthmonitor_create_args": {"name": "given-name"}}, ) @ddt.unpack def test_create_and_update_healthmonitors(self, healthmonitor_create_args=None, healthmonitor_update_args=None): scenario = loadbalancer_v1.CreateAndUpdateHealthmonitors( self._get_context()) mock_random = loadbalancer_v1.random = mock.Mock() hm = {"healthmonitor": {"id": "hm-id"}} hm_data = healthmonitor_create_args or {} hm_update_data = healthmonitor_update_args or { "max_retries": mock_random.choice.return_value} scenario._create_v1_healthmonitor = mock.Mock(return_value=hm) scenario._update_v1_healthmonitor = mock.Mock() scenario.run(healthmonitor_create_args=healthmonitor_create_args, healthmonitor_update_args=healthmonitor_update_args) scenario._create_v1_healthmonitor.assert_called_once_with(**hm_data) scenario._update_v1_healthmonitor.assert_called_once_with( scenario._create_v1_healthmonitor.return_value, **hm_update_data) rally-0.9.1/tests/unit/plugins/openstack/scenarios/keystone/0000775000567000056710000000000013073420067025427 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/keystone/__init__.py0000664000567000056710000000000013073417717027536 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/scenarios/keystone/test_utils.py0000664000567000056710000003734213073417717030221 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally.plugins.openstack.scenarios.keystone import utils from tests.unit import fakes from tests.unit import test UTILS = "rally.plugins.openstack.scenarios.keystone.utils." @ddt.ddt class KeystoneScenarioTestCase(test.ScenarioTestCase): @mock.patch("uuid.uuid4", return_value="pwd") def test_user_create(self, mock_uuid4): scenario = utils.KeystoneScenario(self.context) scenario.generate_random_name = mock.Mock(return_value="foobarov") result = scenario._user_create() self.assertEqual( self.admin_clients("keystone").users.create.return_value, result) self.admin_clients("keystone").users.create.assert_called_once_with( "foobarov", password=mock_uuid4.return_value, email="foobarov@rally.me") mock_uuid4.assert_called_with() self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.create_user") def test_update_user_enabled(self): user = mock.Mock() enabled = mock.Mock() scenario = utils.KeystoneScenario(self.context) scenario._update_user_enabled(user, enabled) self.admin_clients( "keystone").users.update_enabled.assert_called_once_with(user, enabled) self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.update_user_enabled") def test_token_validate(self): token = mock.MagicMock() scenario = utils.KeystoneScenario(self.context) scenario._token_validate(token) self.admin_clients( "keystone").tokens.validate.assert_called_once_with(token) self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.validate_token") def test_token_authenticate(self): name = mock.MagicMock() psswd = "foopsswd" tenant_id = mock.MagicMock() tenant_name = mock.MagicMock() scenario = utils.KeystoneScenario(self.context) scenario._authenticate_token(name, psswd, tenant_id, tenant_name) self.admin_clients( "keystone").tokens.authenticate.assert_called_once_with( name, tenant_id, tenant_name, "foopsswd") self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.token_authenticate") @mock.patch("rally.plugins.openstack.wrappers.keystone.wrap") def test_role_create(self, mock_wrap, **kwargs): role = mock.MagicMock() mock_wrap.return_value.create_role.return_value = role scenario = utils.KeystoneScenario(self.context) scenario.generate_random_name = mock.MagicMock() return_role = scenario._role_create(**kwargs) self.assertEqual(role, return_role) mock_wrap.assert_called_once_with(scenario.admin_clients("keystone")) mock_wrap.return_value.create_role.assert_called_once_with( scenario.generate_random_name.return_value, **kwargs) self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.create_role") @mock.patch("rally.plugins.openstack.wrappers.keystone.wrap") def test_role_delete(self, mock_wrap): role = mock.MagicMock() scenario = utils.KeystoneScenario(self.context) scenario._role_delete(role.id) mock_wrap.assert_called_once_with(scenario.admin_clients("keystone")) mock_wrap.return_value.delete_role.assert_called_once_with(role.id) self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.role_delete") def test_list_roles_for_user(self): user = mock.MagicMock() tenant = mock.MagicMock() scenario = utils.KeystoneScenario(self.context) scenario._list_roles_for_user(user, tenant) self.admin_clients( "keystone").roles.roles_for_user.assert_called_once_with(user, tenant) self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.list_roles") def test_role_add(self): user = mock.MagicMock() role = mock.MagicMock() tenant = mock.MagicMock() scenario = utils.KeystoneScenario(self.context) scenario._role_add(user=user.id, role=role.id, tenant=tenant.id) self.admin_clients( "keystone").roles.add_user_role.assert_called_once_with(user.id, role.id, tenant.id) self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.add_role") def test_user_delete(self): resource = fakes.FakeResource() resource.delete = mock.MagicMock() scenario = utils.KeystoneScenario(self.context) scenario._resource_delete(resource) resource.delete.assert_called_once_with() r = "keystone.delete_%s" % resource.__class__.__name__.lower() self._test_atomic_action_timer(scenario.atomic_actions(), r) def test_role_remove(self): user = mock.MagicMock() role = mock.MagicMock() tenant = mock.MagicMock() scenario = utils.KeystoneScenario(self.context) scenario._role_remove(user=user, role=role, tenant=tenant) self.admin_clients( "keystone").roles.remove_user_role.assert_called_once_with(user, role, tenant) self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.remove_role") def test_tenant_create(self): scenario = utils.KeystoneScenario(self.context) scenario.generate_random_name = mock.Mock() result = scenario._tenant_create() self.assertEqual( self.admin_clients("keystone").tenants.create.return_value, result) self.admin_clients("keystone").tenants.create.assert_called_once_with( scenario.generate_random_name.return_value) self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.create_tenant") @ddt.data( {"service_type": "service_type"}, {"service_type": None} ) def test_service_create(self, service_type): scenario = utils.KeystoneScenario(self.context) scenario.generate_random_name = mock.Mock() result = scenario._service_create( service_type=service_type, description="description") self.assertEqual( self.admin_clients("keystone").services.create.return_value, result) if service_type == "service_type": self.admin_clients( "keystone").services.create.assert_called_once_with( scenario.generate_random_name.return_value, service_type, description="description") elif service_type is None: self.admin_clients( "keystone").services.create.assert_called_once_with( scenario.generate_random_name.return_value, "rally_test_type", description="description") self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.create_service") def test_tenant_create_with_users(self): tenant = mock.MagicMock() scenario = utils.KeystoneScenario(self.context) scenario.generate_random_name = mock.Mock(return_value="foobarov") scenario._users_create(tenant, users_per_tenant=1) self.admin_clients("keystone").users.create.assert_called_once_with( "foobarov", password="foobarov", email="foobarov@rally.me", tenant_id=tenant.id) self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.create_users") def test_list_users(self): scenario = utils.KeystoneScenario(self.context) scenario._list_users() self.admin_clients("keystone").users.list.assert_called_once_with() self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.list_users") def test_list_tenants(self): scenario = utils.KeystoneScenario(self.context) scenario._list_tenants() self.admin_clients("keystone").tenants.list.assert_called_once_with() self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.list_tenants") def test_list_services(self): scenario = utils.KeystoneScenario(self.context) scenario._list_services() self.admin_clients("keystone").services.list.assert_called_once_with() self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.service_list") def test_delete_service(self): service = mock.MagicMock() scenario = utils.KeystoneScenario(self.context) scenario._delete_service(service_id=service.id) self.admin_clients("keystone").services.delete.assert_called_once_with( service.id) self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.delete_service") def test_get_tenant(self): tenant = mock.MagicMock() scenario = utils.KeystoneScenario(self.context) scenario._get_tenant(tenant_id=tenant.id) self.admin_clients("keystone").tenants.get.assert_called_once_with( tenant.id) self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.get_tenant") def test_get_user(self): user = mock.MagicMock() scenario = utils.KeystoneScenario(self.context) scenario._get_user(user_id=user.id) self.admin_clients("keystone").users.get.assert_called_once_with( user.id) self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.get_user") def test_get_role(self): role = mock.MagicMock() scenario = utils.KeystoneScenario(self.context) scenario._get_role(role_id=role.id) self.admin_clients("keystone").roles.get.assert_called_once_with( role.id) self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.get_role") def test_get_service(self): service = mock.MagicMock() scenario = utils.KeystoneScenario(self.context) scenario._get_service(service_id=service.id) self.admin_clients("keystone").services.get.assert_called_once_with( service.id) self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.get_service") def test_update_tenant(self): tenant = mock.MagicMock() description = "new description" scenario = utils.KeystoneScenario(self.context) scenario.generate_random_name = mock.Mock() scenario._update_tenant(tenant=tenant, description=description) self.admin_clients("keystone").tenants.update.assert_called_once_with( tenant.id, scenario.generate_random_name.return_value, description) self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.update_tenant") def test_update_user_password(self): password = "pswd" user = mock.MagicMock() scenario = utils.KeystoneScenario(self.context) scenario._update_user_password(password=password, user_id=user.id) self.admin_clients( "keystone").users.update_password.assert_called_once_with(user.id, password) self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.update_user_password") @mock.patch("rally.plugins.openstack.scenario.OpenStackScenario." "admin_clients") def test_update_user_password_v3(self, mock_open_stack_scenario_admin_clients): password = "pswd" user = mock.MagicMock() scenario = utils.KeystoneScenario() type(mock_open_stack_scenario_admin_clients.return_value).version = ( mock.PropertyMock(return_value="v3")) scenario._update_user_password(password=password, user_id=user.id) mock_open_stack_scenario_admin_clients( "keystone").users.update.assert_called_once_with( user.id, password=password) self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.update_user_password") def test_get_service_by_name(self): scenario = utils.KeystoneScenario(self.context) svc_foo, svc_bar = mock.Mock(), mock.Mock() scenario._list_services = mock.Mock(return_value=[svc_foo, svc_bar]) self.assertEqual(scenario._get_service_by_name(svc_bar.name), svc_bar) self.assertIsNone(scenario._get_service_by_name("spam")) @mock.patch(UTILS + "KeystoneScenario.clients") def test_create_ec2credentials(self, mock_clients): scenario = utils.KeystoneScenario(self.context) creds = mock.Mock() mock_clients("keystone").ec2.create.return_value = creds create_creds = scenario._create_ec2credentials("user_id", "tenant_id") self.assertEqual(create_creds, creds) mock_clients("keystone").ec2.create.assert_called_once_with( "user_id", "tenant_id") self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.create_ec2creds") @mock.patch(UTILS + "KeystoneScenario.clients") def test_list_ec2credentials(self, mock_clients): scenario = utils.KeystoneScenario(self.context) creds_list = mock.Mock() mock_clients("keystone").ec2.list.return_value = creds_list list_creds = scenario._list_ec2credentials("user_id") self.assertEqual(list_creds, creds_list) mock_clients("keystone").ec2.list.assert_called_once_with("user_id") self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.list_ec2creds") @mock.patch(UTILS + "KeystoneScenario.clients") def test_delete_ec2credentials(self, mock_clients): scenario = utils.KeystoneScenario(self.context) mock_clients("keystone").ec2.delete = mock.MagicMock() scenario._delete_ec2credential("user_id", "access") mock_clients("keystone").ec2.delete.assert_called_once_with("user_id", "access") self._test_atomic_action_timer(scenario.atomic_actions(), "keystone.delete_ec2creds") rally-0.9.1/tests/unit/plugins/openstack/scenarios/keystone/test_basic.py0000775000567000056710000003605513073417717030145 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally import exceptions from rally.plugins.openstack.scenarios.keystone import basic from tests.unit import test @ddt.ddt class KeystoneBasicTestCase(test.ScenarioTestCase): def get_test_context(self): context = super(KeystoneBasicTestCase, self).get_test_context() context.update({ "admin": { "id": "fake_user_id", "credential": mock.MagicMock() }, "user": { "id": "fake_user_id", "credential": mock.MagicMock() }, "tenant": {"id": "fake_tenant_id", "name": "fake_tenant_name"} }) return context def setUp(self): super(KeystoneBasicTestCase, self).setUp() patch = mock.patch( "rally.plugins.openstack.services.identity.identity.Identity") self.addCleanup(patch.stop) self.mock_identity = patch.start() def test_create_user(self): scenario = basic.CreateUser(self.context) scenario.run(password="tttt", project_id="id") self.mock_identity.return_value.create_user.assert_called_once_with( password="tttt", project_id="id") def test_create_delete_user(self): identity_service = self.mock_identity.return_value fake_email = "abcd" fake_user = identity_service.create_user.return_value scenario = basic.CreateDeleteUser(self.context) scenario.run(email=fake_email, enabled=True) identity_service.create_user.assert_called_once_with( email=fake_email, enabled=True) identity_service.delete_user.assert_called_once_with(fake_user.id) def test_create_user_set_enabled_and_delete(self): identity_service = self.mock_identity.return_value scenario = basic.CreateUserSetEnabledAndDelete(self.context) fake_email = "abcd" fake_user = identity_service.create_user.return_value scenario.run(enabled=True, email=fake_email) identity_service.create_user.assert_called_once_with( email=fake_email, enabled=True) identity_service.update_user.assert_called_once_with( fake_user.id, enabled=False) identity_service.delete_user.assert_called_once_with(fake_user.id) def test_user_authenticate_and_validate_token(self): identity_service = self.mock_identity.return_value scenario = basic.AuthenticateUserAndValidateToken(self.context) fake_token = identity_service.fetch_token.return_value scenario.run() identity_service.fetch_token.assert_called_once_with() identity_service.validate_token.assert_called_once_with(fake_token) def test_create_tenant(self): scenario = basic.CreateTenant(self.context) scenario.run(enabled=True) self.mock_identity.return_value.create_project.assert_called_once_with( enabled=True) def test_create_tenant_with_users(self): identity_service = self.mock_identity.return_value fake_project = identity_service.create_project.return_value number_of_users = 1 scenario = basic.CreateTenantWithUsers(self.context) scenario.run(users_per_tenant=number_of_users, enabled=True) identity_service.create_project.assert_called_once_with(enabled=True) identity_service.create_users.assert_called_once_with( fake_project.id, number_of_users=number_of_users) def test_create_and_list_users(self): scenario = basic.CreateAndListUsers(self.context) passwd = "tttt" project_id = "id" scenario.run(password=passwd, project_id=project_id) self.mock_identity.return_value.create_user.assert_called_once_with( password=passwd, project_id=project_id) self.mock_identity.return_value.list_users.assert_called_once_with() def test_create_and_list_tenants(self): identity_service = self.mock_identity.return_value scenario = basic.CreateAndListTenants(self.context) scenario.run(enabled=True) identity_service.create_project.assert_called_once_with(enabled=True) identity_service.list_projects.assert_called_once_with() def test_assign_and_remove_user_role(self): fake_tenant = self.context["tenant"]["id"] fake_user = self.context["user"]["id"] fake_role = mock.MagicMock() self.mock_identity.return_value.create_role.return_value = fake_role scenario = basic.AddAndRemoveUserRole(self.context) scenario.run() self.mock_identity.return_value.create_role.assert_called_once_with() self.mock_identity.return_value.add_role.assert_called_once_with( role_id=fake_role.id, user_id=fake_user, project_id=fake_tenant) self.mock_identity.return_value.revoke_role.assert_called_once_with( fake_role.id, user_id=fake_user, project_id=fake_tenant) def test_create_and_delete_role(self): fake_role = mock.MagicMock() self.mock_identity.return_value.create_role.return_value = fake_role scenario = basic.CreateAndDeleteRole(self.context) scenario.run() self.mock_identity.return_value.create_role.assert_called_once_with() self.mock_identity.return_value.delete_role.assert_called_once_with( fake_role.id) def test_create_and_get_role(self): fake_role = mock.MagicMock() self.mock_identity.return_value.create_role.return_value = fake_role scenario = basic.CreateAndGetRole(self.context) scenario.run() self.mock_identity.return_value.create_role.assert_called_once_with() self.mock_identity.return_value.get_role.assert_called_once_with( fake_role.id) def test_create_and_list_user_roles(self): scenario = basic.CreateAddAndListUserRoles(self.context) fake_tenant = self.context["tenant"]["id"] fake_user = self.context["user"]["id"] fake_role = mock.MagicMock() self.mock_identity.return_value.create_role.return_value = fake_role scenario.run() self.mock_identity.return_value.create_role.assert_called_once_with() self.mock_identity.return_value.add_role.assert_called_once_with( user_id=fake_user, role_id=fake_role.id, project_id=fake_tenant) self.mock_identity.return_value.list_roles.assert_called_once_with( user_id=fake_user, project_id=fake_tenant) def test_create_and_list_roles(self): # Positive case scenario = basic.CreateAddListRoles(self.context) create_kwargs = {"fakewargs": "name"} list_kwargs = {"fakewargs": "f"} self.mock_identity.return_value.create_role = mock.Mock( return_value="role1") self.mock_identity.return_value.list_roles = mock.Mock( return_value=("role1", "role2")) scenario.run(create_role_kwargs=create_kwargs, list_role_kwargs=list_kwargs) self.mock_identity.return_value.create_role.assert_called_once_with( **create_kwargs) self.mock_identity.return_value.list_roles.assert_called_once_with( **list_kwargs) # Negative case 1: role isn't created self.mock_identity.return_value.create_role.return_value = None self.assertRaises(exceptions.RallyAssertionError, scenario.run, create_role_kwargs=create_kwargs, list_role_kwargs=list_kwargs) self.mock_identity.return_value.create_role.assert_called_with( **create_kwargs) # Negative case 2: role was created but included into list self.mock_identity.return_value.create_role.return_value = "role3" self.assertRaises(exceptions.RallyAssertionError, scenario.run, create_role_kwargs=create_kwargs, list_role_kwargs=list_kwargs) self.mock_identity.return_value.create_role.assert_called_with( **create_kwargs) self.mock_identity.return_value.list_roles.assert_called_with( **list_kwargs) @ddt.data(None, "keystone", "fooservice") def test_get_entities(self, service_name): identity_service = self.mock_identity.return_value fake_project = identity_service.create_project.return_value fake_user = identity_service.create_user.return_value fake_role = identity_service.create_role.return_value fake_service = identity_service.create_service.return_value scenario = basic.GetEntities(self.context) scenario.run(service_name) identity_service.create_project.assert_called_once_with() identity_service.create_user.assert_called_once_with( project_id=fake_project.id) identity_service.create_role.assert_called_once_with() identity_service.get_project.assert_called_once_with(fake_project.id) identity_service.get_user.assert_called_once_with(fake_user.id) identity_service.get_role.assert_called_once_with(fake_role.id) if service_name is None: identity_service.create_service.assert_called_once_with() self.assertFalse(identity_service.get_service_by_name.called) identity_service.get_service.assert_called_once_with( fake_service.id) else: identity_service.get_service_by_name.assert_called_once_with( service_name) self.assertFalse(identity_service.create_service.called) identity_service.get_service.assert_called_once_with( identity_service.get_service_by_name.return_value.id) def test_create_and_delete_service(self): identity_service = self.mock_identity.return_value scenario = basic.CreateAndDeleteService(self.context) service_type = "test_service_type" description = "test_description" fake_service = identity_service.create_service.return_value scenario.run(service_type=service_type, description=description) identity_service.create_service.assert_called_once_with( service_type=service_type, description=description) identity_service.delete_service.assert_called_once_with( fake_service.id) def test_create_update_and_delete_tenant(self): identity_service = self.mock_identity.return_value scenario = basic.CreateUpdateAndDeleteTenant(self.context) gen_name = mock.MagicMock() basic.CreateUpdateAndDeleteTenant.generate_random_name = gen_name fake_project = identity_service.create_project.return_value scenario.run() identity_service.create_project.assert_called_once_with() identity_service.update_project.assert_called_once_with( fake_project.id, description=gen_name.return_value, name=gen_name.return_value) identity_service.delete_project(fake_project.id) def test_create_user_update_password(self): identity_service = self.mock_identity.return_value scenario = basic.CreateUserUpdatePassword(self.context) fake_password = "pswd" fake_user = identity_service.create_user.return_value scenario.generate_random_name = mock.MagicMock( return_value=fake_password) scenario.run() scenario.generate_random_name.assert_called_once_with() identity_service.create_user.assert_called_once_with() identity_service.update_user.assert_called_once_with( fake_user.id, password=fake_password) def test_create_and_update_user(self): identity_service = self.mock_identity.return_value scenario = basic.CreateAndUpdateUser(self.context) scenario.admin_clients("keystone").users.get = mock.MagicMock() fake_user = identity_service.create_user.return_value create_args = {"fakearg1": "f"} update_args = {"fakearg1": "fakearg"} setattr(self.admin_clients("keystone").users.get.return_value, "fakearg1", "fakearg") scenario.run(create_user_kwargs=create_args, update_user_kwargs=update_args) identity_service.create_user.assert_called_once_with(**create_args) identity_service.update_user.assert_called_once_with( fake_user.id, **update_args) def test_create_and_list_services(self): identity_service = self.mock_identity.return_value scenario = basic.CreateAndListServices(self.context) service_type = "test_service_type" description = "test_description" scenario.run(service_type=service_type, description=description) identity_service.create_service.assert_called_once_with( service_type=service_type, description=description) identity_service.list_services.assert_called_once_with() def test_create_and_list_ec2credentials(self): identity_service = self.mock_identity.return_value scenario = basic.CreateAndListEc2Credentials(self.context) scenario.run() identity_service.create_ec2credentials.assert_called_once_with( self.context["user"]["id"], project_id=self.context["tenant"]["id"]) identity_service.list_ec2credentials.assert_called_with( self.context["user"]["id"]) def test_create_and_delete_ec2credential(self): identity_service = self.mock_identity.return_value fake_creds = identity_service.create_ec2credentials.return_value scenario = basic.CreateAndDeleteEc2Credential(self.context) scenario.run() identity_service.create_ec2credentials.assert_called_once_with( self.context["user"]["id"], project_id=self.context["tenant"]["id"]) identity_service.delete_ec2credential.assert_called_once_with( self.context["user"]["id"], access=fake_creds.access) def test_add_and_remove_user_role(self): context = self.context tenant_id = context["tenant"]["id"] user_id = context["user"]["id"] fake_role = mock.MagicMock() self.mock_identity.return_value.create_role.return_value = fake_role scenario = basic.AddAndRemoveUserRole(context) scenario.run() self.mock_identity.return_value.create_role.assert_called_once_with() self.mock_identity.return_value.add_role.assert_called_once_with( role_id=fake_role.id, user_id=user_id, project_id=tenant_id) self.mock_identity.return_value.revoke_role.assert_called_once_with( fake_role.id, user_id=user_id, project_id=tenant_id) rally-0.9.1/tests/unit/plugins/openstack/cleanup/0000775000567000056710000000000013073420067023227 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/cleanup/__init__.py0000664000567000056710000000000013073417717025336 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/openstack/cleanup/test_resources.py0000775000567000056710000011376113073417720026670 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from boto import exception as boto_exception import ddt import mock from neutronclient.common import exceptions as neutron_exceptions from novaclient import exceptions as nova_exc from watcherclient.common.apiclient import exceptions as watcher_exceptions from rally import consts from rally.plugins.openstack.cleanup import resources from tests.unit import test BASE = "rally.plugins.openstack.cleanup.resources" class SynchronizedDeletionTestCase(test.TestCase): def test_is_deleted(self): self.assertTrue(resources.SynchronizedDeletion().is_deleted()) class QuotaMixinTestCase(test.TestCase): @mock.patch("%s.identity.Identity" % BASE) def test_list(self, mock_identity): quota = resources.QuotaMixin() quota.tenant_uuid = None quota.user = mock.MagicMock() self.assertEqual([], quota.list()) self.assertFalse(mock_identity.called) quota.tenant_uuid = mock.MagicMock() self.assertEqual([mock_identity.return_value.get_project.return_value], quota.list()) mock_identity.assert_called_once_with(quota.user) class MagnumMixinTestCase(test.TestCase): def test_id(self): magnum = resources.MagnumMixin() magnum._service = "magnum" magnum.raw_resource = mock.MagicMock() self.assertEqual(magnum.raw_resource.uuid, magnum.id()) def test_list(self): magnum = resources.MagnumMixin() magnum._service = "magnum" some_resources = [mock.MagicMock(), mock.MagicMock(), mock.MagicMock(), mock.MagicMock()] magnum._manager = mock.MagicMock() magnum._manager.return_value.list.side_effect = ( some_resources[:2], some_resources[2:4], []) self.assertEqual(some_resources, magnum.list()) self.assertEqual( [mock.call(marker=None), mock.call(marker=some_resources[1].uuid), mock.call(marker=some_resources[3].uuid)], magnum._manager.return_value.list.call_args_list) class NovaServerTestCase(test.TestCase): def test_list(self): server = resources.NovaServer() server._manager = mock.MagicMock() server.list() server._manager.return_value.list.assert_called_once_with(limit=-1) def test_delete(self): server = resources.NovaServer() server.raw_resource = mock.Mock() server._manager = mock.Mock() server.delete() server._manager.return_value.delete.assert_called_once_with( server.raw_resource.id) def test_delete_locked(self): server = resources.NovaServer() server.raw_resource = mock.Mock() setattr(server.raw_resource, "OS-EXT-STS:locked", True) server._manager = mock.Mock() server.delete() server.raw_resource.unlock.assert_called_once_with() server._manager.return_value.delete.assert_called_once_with( server.raw_resource.id) class NovaFloatingIPsTestCase(test.TestCase): def test_name(self): fips = resources.NovaFloatingIPs() fips.raw_resource = mock.MagicMock() self.assertTrue(fips.name()) class NovaFlavorsTestCase(test.TestCase): @mock.patch("%s.base.ResourceManager._manager" % BASE) def test_is_deleted(self, mock_resource_manager__manager): exc = nova_exc.NotFound(404) mock_resource_manager__manager().get.side_effect = exc flavor = resources.NovaFlavors() flavor.raw_resource = mock.MagicMock() self.assertEqual(True, flavor.is_deleted()) @mock.patch("%s.base.ResourceManager._manager" % BASE) def test_is_deleted_fail(self, mock_resource_manager__manager): mock_resource_manager__manager().get.side_effect = TypeError() flavor = resources.NovaFlavors() flavor.raw_resource = mock.MagicMock() self.assertRaises(TypeError, flavor.is_deleted) class NovaServerGroupsTestCase(test.TestCase): @mock.patch("%s.base.ResourceManager._manager" % BASE) @mock.patch("rally.common.utils.name_matches_object") def test_list(self, mock_name_matches_object, mock_resource_manager__manager): server_groups = [mock.MagicMock(name="rally_foo1"), mock.MagicMock(name="rally_foo2"), mock.MagicMock(name="foo3")] mock_name_matches_object.side_effect = [False, True, True] mock_resource_manager__manager().list.return_value = server_groups self.assertEqual(server_groups, resources.NovaServerGroups().list()) class NovaSecurityGroupTestCase(test.TestCase): @mock.patch("%s.base.ResourceManager._manager" % BASE) def test_list(self, mock_resource_manager__manager): secgroups = [mock.MagicMock(), mock.MagicMock(), mock.MagicMock()] secgroups[0].name = "a" secgroups[1].name = "b" secgroups[2].name = "default" mock_resource_manager__manager().list.return_value = secgroups self.assertSequenceEqual(secgroups[:2], resources.NovaSecurityGroup().list()) class NovaFloatingIpsBulkTestCase(test.TestCase): def test_id(self): ip_range = resources.NovaFloatingIpsBulk() ip_range.raw_resource = mock.MagicMock() self.assertEqual(ip_range.raw_resource.address, ip_range.id()) def test_name(self): fips = resources.NovaFloatingIpsBulk() fips.raw_resource = mock.MagicMock() self.assertIsNone(fips.name()) class NovaNetworksTestCase(test.TestCase): def test_name(self): network = resources.NovaNetworks() network.raw_resource = mock.MagicMock() self.assertEqual(network.raw_resource.label, network.name()) class EC2MixinTestCase(test.TestCase): def get_ec2_mixin(self): ec2 = resources.EC2Mixin() ec2._service = "ec2" return ec2 def test__manager(self): ec2 = self.get_ec2_mixin() ec2.user = mock.MagicMock() self.assertEqual(ec2.user.ec2.return_value, ec2._manager()) class EC2ServerTestCase(test.TestCase): @mock.patch("%s.EC2Server._manager" % BASE) def test_is_deleted(self, mock_ec2_server__manager): raw_res1 = mock.MagicMock(state="terminated") raw_res2 = mock.MagicMock(state="terminated") resource = mock.MagicMock(id="test_id") manager = resources.EC2Server(resource=resource) mock_ec2_server__manager().get_only_instances.return_value = [raw_res1] self.assertTrue(manager.is_deleted()) raw_res1.state = "running" self.assertFalse(manager.is_deleted()) mock_ec2_server__manager().get_only_instances.return_value = [ raw_res1, raw_res2] self.assertFalse(manager.is_deleted()) raw_res1.state = "terminated" self.assertTrue(manager.is_deleted()) mock_ec2_server__manager().get_only_instances.return_value = [] self.assertTrue(manager.is_deleted()) @mock.patch("%s.EC2Server._manager" % BASE) def test_is_deleted_exceptions(self, mock_ec2_server__manager): mock_ec2_server__manager.side_effect = [ boto_exception.EC2ResponseError( status="fake", reason="fake", body={"Error": {"Code": "fake_code"}}), boto_exception.EC2ResponseError( status="fake", reason="fake", body={"Error": {"Code": "InvalidInstanceID.NotFound"}}) ] manager = resources.EC2Server(resource=mock.MagicMock()) self.assertFalse(manager.is_deleted()) self.assertTrue(manager.is_deleted()) @mock.patch("%s.EC2Server._manager" % BASE) def test_delete(self, mock_ec2_server__manager): resource = mock.MagicMock(id="test_id") manager = resources.EC2Server(resource=resource) manager.delete() mock_ec2_server__manager().terminate_instances.assert_called_once_with( instance_ids=["test_id"]) @mock.patch("%s.EC2Server._manager" % BASE) def test_list(self, mock_ec2_server__manager): manager = resources.EC2Server() mock_ec2_server__manager().get_only_instances.return_value = [ "a", "b", "c"] self.assertEqual(["a", "b", "c"], manager.list()) class NeutronMixinTestCase(test.TestCase): def get_neutron_mixin(self): neut = resources.NeutronMixin() neut._service = "neutron" return neut def test_manager(self): neut = self.get_neutron_mixin() neut.user = mock.MagicMock() self.assertEqual(neut.user.neutron.return_value, neut._manager()) @mock.patch("%s.NeutronMixin._manager" % BASE) def test_supports_extension(self, mock__manager): mock__manager().list_extensions.return_value = { "extensions": [{"alias": "foo"}, {"alias": "bar"}] } neut = self.get_neutron_mixin() self.assertTrue(neut.supports_extension("foo")) self.assertTrue(neut.supports_extension("bar")) self.assertFalse(neut.supports_extension("foobar")) def test_id(self): neut = self.get_neutron_mixin() neut.raw_resource = {"id": "test"} self.assertEqual("test", neut.id()) def test_name(self): neutron = self.get_neutron_mixin() neutron.raw_resource = {"id": "test_id", "name": "test_name"} self.assertEqual("test_name", neutron.name()) def test_delete(self): neut = self.get_neutron_mixin() neut.user = mock.MagicMock() neut._resource = "some_resource" neut.raw_resource = {"id": "42"} neut.delete() neut.user.neutron().delete_some_resource.assert_called_once_with("42") def test_list(self): neut = self.get_neutron_mixin() neut.user = mock.MagicMock() neut._resource = "some_resource" neut.tenant_uuid = "user_tenant" some_resources = [{"tenant_id": neut.tenant_uuid}, {"tenant_id": "a"}] neut.user.neutron().list_some_resources.return_value = { "some_resources": some_resources } self.assertEqual([some_resources[0]], list(neut.list())) neut.user.neutron().list_some_resources.assert_called_once_with( tenant_id=neut.tenant_uuid) class NeutronLbaasV1MixinTestCase(test.TestCase): def get_neutron_lbaasv1_mixin(self, extensions=None): if extensions is None: extensions = [] neut = resources.NeutronLbaasV1Mixin() neut._service = "neutron" neut._resource = "some_resource" neut._manager = mock.Mock() neut._manager().list_extensions.return_value = { "extensions": [{"alias": ext} for ext in extensions] } return neut def test_list_lbaas_available(self): neut = self.get_neutron_lbaasv1_mixin(extensions=["lbaas"]) neut.tenant_uuid = "user_tenant" some_resources = [{"tenant_id": neut.tenant_uuid}, {"tenant_id": "a"}] neut._manager().list_some_resources.return_value = { "some_resources": some_resources } self.assertEqual([some_resources[0]], list(neut.list())) neut._manager().list_some_resources.assert_called_once_with( tenant_id=neut.tenant_uuid) def test_list_lbaas_unavailable(self): neut = self.get_neutron_lbaasv1_mixin() self.assertEqual([], list(neut.list())) self.assertFalse(neut._manager().list_some_resources.called) class NeutronLbaasV2MixinTestCase(test.TestCase): def get_neutron_lbaasv2_mixin(self, extensions=None): if extensions is None: extensions = [] neut = resources.NeutronLbaasV2Mixin() neut._service = "neutron" neut._resource = "some_resource" neut._manager = mock.Mock() neut._manager().list_extensions.return_value = { "extensions": [{"alias": ext} for ext in extensions] } return neut def test_list_lbaasv2_available(self): neut = self.get_neutron_lbaasv2_mixin(extensions=["lbaasv2"]) neut.tenant_uuid = "user_tenant" some_resources = [{"tenant_id": neut.tenant_uuid}, {"tenant_id": "a"}] neut._manager().list_some_resources.return_value = { "some_resources": some_resources } self.assertEqual([some_resources[0]], list(neut.list())) neut._manager().list_some_resources.assert_called_once_with( tenant_id=neut.tenant_uuid) def test_list_lbaasv2_unavailable(self): neut = self.get_neutron_lbaasv2_mixin() self.assertEqual([], list(neut.list())) self.assertFalse(neut._manager().list_some_resources.called) class NeutronV2LoadbalancerTestCase(test.TestCase): def get_neutron_lbaasv2_lb(self): neutron_lb = resources.NeutronV2Loadbalancer() neutron_lb.raw_resource = {"id": "1", "name": "s_rally"} neutron_lb._manager = mock.Mock() return neutron_lb def test_is_deleted_true(self): from neutronclient.common import exceptions as n_exceptions neutron_lb = self.get_neutron_lbaasv2_lb() neutron_lb._manager().show_loadbalancer.side_effect = ( n_exceptions.NotFound) self.assertTrue(neutron_lb.is_deleted()) neutron_lb._manager().show_loadbalancer.assert_called_once_with( neutron_lb.id()) def test_is_deleted_false(self): from neutronclient.common import exceptions as n_exceptions neutron_lb = self.get_neutron_lbaasv2_lb() neutron_lb._manager().show_loadbalancer.return_value = ( neutron_lb.raw_resource) self.assertFalse(neutron_lb.is_deleted()) neutron_lb._manager().show_loadbalancer.assert_called_once_with( neutron_lb.id()) neutron_lb._manager().show_loadbalancer.reset_mock() neutron_lb._manager().show_loadbalancer.side_effect = ( n_exceptions.Forbidden) self.assertFalse(neutron_lb.is_deleted()) neutron_lb._manager().show_loadbalancer.assert_called_once_with( neutron_lb.id()) class NeutronFloatingIPTestCase(test.TestCase): def test_name(self): fips = resources.NeutronFloatingIP({"name": "foo"}) self.assertIsInstance(fips.name(), resources.base.NoName) def test_list(self): fips = {"floatingips": [{"tenant_id": "foo", "id": "foo"}]} user = mock.MagicMock() user.services.return_value = {} user.neutron.return_value.list_floatingips.return_value = fips self.assertEqual( [], resources.NeutronFloatingIP(user=user, tenant_uuid="foo").list()) self.assertFalse(user.neutron.return_value.list_floatingips.called) user.services.return_value = { consts.ServiceType.NETWORK: consts.Service.NEUTRON} self.assertEqual(fips["floatingips"], list( resources.NeutronFloatingIP(user=user, tenant_uuid="foo").list())) user.neutron.return_value.list_floatingips.assert_called_once_with( tenant_id="foo") class NeutronPortTestCase(test.TestCase): def test_delete(self): raw_res = {"device_owner": "abbabaab", "id": "some_id"} user = mock.MagicMock() resources.NeutronPort(resource=raw_res, user=user).delete() user.neutron().delete_port.assert_called_once_with(raw_res["id"]) def test_delete_port_raise_exception(self): raw_res = {"device_owner": "abbabaab", "id": "some_id"} user = mock.MagicMock() user.neutron().delete_port.side_effect = ( neutron_exceptions.PortNotFoundClient) resources.NeutronPort(resource=raw_res, user=user).delete() user.neutron().delete_port.assert_called_once_with(raw_res["id"]) def test_delete_port_device_owner(self): raw_res = { "device_owner": "network:router_interface", "id": "some_id", "device_id": "dev_id" } user = mock.MagicMock() resources.NeutronPort(resource=raw_res, user=user).delete() user.neutron().remove_interface_router.assert_called_once_with( raw_res["device_id"], {"port_id": raw_res["id"]}) def test_name(self): raw_res = { "id": "some_id", "device_id": "dev_id", } self.assertIsInstance( resources.NeutronPort(resource=raw_res, user=mock.MagicMock()).name(), resources.base.NoName) raw_res["name"] = "foo" self.assertEqual("foo", resources.NeutronPort( resource=raw_res, user=mock.MagicMock()).name()) raw_res["parent_name"] = "bar" self.assertEqual("bar", resources.NeutronPort( resource=raw_res, user=mock.MagicMock()).name()) del raw_res["name"] self.assertEqual("bar", resources.NeutronPort( resource=raw_res, user=mock.MagicMock()).name()) def test_list(self): tenant_uuid = "uuuu-uuuu-iiii-dddd" ports = [ # the case when 'name' is present, so 'device_owner' field is not # required {"tenant_id": tenant_uuid, "id": "id1", "name": "foo"}, # 3 different cases when router_interface is an owner {"tenant_id": tenant_uuid, "id": "id2", "device_owner": "network:router_interface", "device_id": "router-1"}, {"tenant_id": tenant_uuid, "id": "id3", "device_owner": "network:router_interface_distributed", "device_id": "router-1"}, {"tenant_id": tenant_uuid, "id": "id4", "device_owner": "network:ha_router_replicated_interface", "device_id": "router-2"}, # the case when gateway router is an owner {"tenant_id": tenant_uuid, "id": "id5", "device_owner": "network:router_gateway", "device_id": "router-3"}, # the case when gateway router is an owner, but device_id is # invalid {"tenant_id": tenant_uuid, "id": "id6", "device_owner": "network:router_gateway", "device_id": "aaaa"}, # the case when port was auto-created with floating-ip {"tenant_id": tenant_uuid, "id": "id7", "device_owner": "network:dhcp", "device_id": "asdasdasd"}, # the case when port is from another tenant {"tenant_id": "wrong tenant", "id": "id8", "name": "foo"}, # WTF port without any parent and name {"tenant_id": tenant_uuid, "id": "id9", "device_owner": ""}, ] routers = [ {"id": "router-1", "name": "Router-1", "tenant_id": tenant_uuid}, {"id": "router-2", "name": "Router-2", "tenant_id": tenant_uuid}, {"id": "router-3", "name": "Router-3", "tenant_id": tenant_uuid}, {"id": "router-4", "name": "Router-4", "tenant_id": tenant_uuid}, {"id": "router-5", "name": "Router-5", "tenant_id": tenant_uuid}, ] expected_ports = [] for port in ports: if port["tenant_id"] == tenant_uuid: expected_ports.append(copy.deepcopy(port)) if ("device_id" in port and port["device_id"].startswith("router")): expected_ports[-1]["parent_name"] = [ r for r in routers if r["id"] == port["device_id"]][0]["name"] class FakeNeutronClient(object): list_ports = mock.Mock() list_routers = mock.Mock() neutron = FakeNeutronClient neutron.list_ports.return_value = {"ports": ports} neutron.list_routers.return_value = {"routers": routers} user = mock.Mock(neutron=neutron) self.assertEqual(expected_ports, resources.NeutronPort( user=user, tenant_uuid=tenant_uuid).list()) neutron.list_ports.assert_called_once_with() neutron.list_routers.assert_called_once_with() @ddt.ddt class NeutronSecurityGroupTestCase(test.TestCase): @ddt.data( {"admin": mock.Mock(), "admin_required": True}, {"admin": None, "admin_required": False}) @ddt.unpack def test_list(self, admin, admin_required): sg_list = [{"tenant_id": "user_tenant", "name": "default"}, {"tenant_id": "user_tenant", "name": "foo_sg"}] neut = resources.NeutronSecurityGroup() neut.user = mock.MagicMock() neut._resource = "security_group" neut.tenant_uuid = "user_tenant" neut.user.neutron().list_security_groups.return_value = { "security_groups": sg_list } expected_result = [sg_list[1]] self.assertEqual(expected_result, list(neut.list())) neut.user.neutron().list_security_groups.assert_called_once_with( tenant_id=neut.tenant_uuid) class NeutronQuotaTestCase(test.TestCase): def test_delete(self): admin = mock.MagicMock() resources.NeutronQuota(admin=admin, tenant_uuid="fake").delete() admin.neutron.return_value.delete_quota.assert_called_once_with("fake") @ddt.ddt class GlanceImageTestCase(test.TestCase): @mock.patch("rally.plugins.openstack.wrappers.glance.wrap") def test__wrapper_admin(self, mock_glance_wrap): admin = mock.Mock() glance = resources.GlanceImage(admin=admin) wrapper = glance._wrapper() mock_glance_wrap.assert_called_once_with(admin.glance, glance) self.assertEqual(wrapper, mock_glance_wrap.return_value) @mock.patch("rally.plugins.openstack.wrappers.glance.wrap") def test__wrapper_user(self, mock_glance_wrap): user = mock.Mock() glance = resources.GlanceImage(user=user) wrapper = glance._wrapper() mock_glance_wrap.assert_called_once_with(user.glance, glance) self.assertEqual(wrapper, mock_glance_wrap.return_value) @mock.patch("rally.plugins.openstack.wrappers.glance.wrap") def test__wrapper_admin_preferred(self, mock_glance_wrap): admin = mock.Mock() user = mock.Mock() glance = resources.GlanceImage(admin=admin, user=user) wrapper = glance._wrapper() mock_glance_wrap.assert_called_once_with(admin.glance, glance) self.assertEqual(wrapper, mock_glance_wrap.return_value) def test_list(self): glance = resources.GlanceImage() glance._wrapper = mock.Mock() glance.tenant_uuid = mock.Mock() self.assertEqual(glance.list(), glance._wrapper.return_value.list_images.return_value) glance._wrapper.return_value.list_images.assert_called_once_with( owner=glance.tenant_uuid) def test_delete(self): glance = resources.GlanceImage() glance._client = mock.Mock() glance._wrapper = mock.Mock() glance.raw_resource = mock.Mock() client = glance._client.return_value wrapper = glance._wrapper.return_value deleted_image = mock.Mock(status="DELETED") wrapper.get_image.side_effect = [glance.raw_resource, deleted_image] glance.delete() client().images.delete.assert_called_once_with(glance.raw_resource.id) class CeilometerTestCase(test.TestCase): def test_id(self): ceil = resources.CeilometerAlarms() ceil.raw_resource = mock.MagicMock() self.assertEqual(ceil.raw_resource.alarm_id, ceil.id()) @mock.patch("%s.CeilometerAlarms._manager" % BASE) def test_list(self, mock_ceilometer_alarms__manager): ceil = resources.CeilometerAlarms() ceil.tenant_uuid = mock.MagicMock() mock_ceilometer_alarms__manager().list.return_value = ["a", "b", "c"] mock_ceilometer_alarms__manager.reset_mock() self.assertEqual(["a", "b", "c"], ceil.list()) mock_ceilometer_alarms__manager().list.assert_called_once_with( q=[{"field": "project_id", "op": "eq", "value": ceil.tenant_uuid}]) class ZaqarQueuesTestCase(test.TestCase): def test_list(self): user = mock.Mock() zaqar = resources.ZaqarQueues(user=user) zaqar.list() user.zaqar().queues.assert_called_once_with() class KeystoneMixinTestCase(test.TestCase): def test_is_deleted(self): self.assertTrue(resources.KeystoneMixin().is_deleted()) def get_keystone_mixin(self): kmixin = resources.KeystoneMixin() kmixin._service = "keystone" return kmixin @mock.patch("%s.identity" % BASE) def test_manager(self, mock_identity): keystone_mixin = self.get_keystone_mixin() keystone_mixin.admin = mock.MagicMock() self.assertEqual(mock_identity.Identity.return_value, keystone_mixin._manager()) mock_identity.Identity.assert_called_once_with( keystone_mixin.admin) @mock.patch("%s.identity" % BASE) def test_delete(self, mock_identity): keystone_mixin = self.get_keystone_mixin() keystone_mixin._resource = "some_resource" keystone_mixin.id = lambda: "id_a" keystone_mixin.admin = mock.MagicMock() keystone_mixin.delete() mock_identity.Identity.assert_called_once_with(keystone_mixin.admin) identity_service = mock_identity.Identity.return_value identity_service.delete_some_resource.assert_called_once_with("id_a") @mock.patch("%s.identity" % BASE) def test_list(self, mock_identity): keystone_mixin = self.get_keystone_mixin() keystone_mixin._resource = "some_resource2" keystone_mixin.admin = mock.MagicMock() identity = mock_identity.Identity self.assertSequenceEqual( identity.return_value.list_some_resource2s.return_value, keystone_mixin.list()) identity.assert_called_once_with(keystone_mixin.admin) identity.return_value.list_some_resource2s.assert_called_once_with() class KeystoneEc2TestCase(test.TestCase): def test_user_id_property(self): user_client = mock.Mock() admin_client = mock.Mock() manager = resources.KeystoneEc2(user=user_client, admin=admin_client) self.assertEqual(user_client.keystone.auth_ref.user_id, manager.user_id) def test_list(self): user_client = mock.Mock() admin_client = mock.Mock() with mock.patch("%s.identity.Identity" % BASE, autospec=True) as p: identity = p.return_value manager = resources.KeystoneEc2(user=user_client, admin=admin_client) self.assertEqual(identity.list_ec2credentials.return_value, manager.list()) p.assert_called_once_with(user_client) identity.list_ec2credentials.assert_called_once_with( manager.user_id) def test_delete(self): user_client = mock.Mock() admin_client = mock.Mock() raw_resource = mock.Mock() with mock.patch("%s.identity.Identity" % BASE, autospec=True) as p: manager = resources.KeystoneEc2(user=user_client, admin=admin_client, resource=raw_resource) manager.delete() p.assert_called_once_with(user_client) p.return_value.delete_ec2credential.assert_called_once_with( manager.user_id, access=raw_resource.access) class SwiftMixinTestCase(test.TestCase): def get_swift_mixin(self): swift_mixin = resources.SwiftMixin() swift_mixin._service = "swift" return swift_mixin def test_manager(self): swift_mixin = self.get_swift_mixin() swift_mixin.user = mock.MagicMock() self.assertEqual(swift_mixin.user.swift.return_value, swift_mixin._manager()) def test_id(self): swift_mixin = self.get_swift_mixin() swift_mixin.raw_resource = mock.MagicMock() self.assertEqual(swift_mixin.raw_resource, swift_mixin.id()) def test_name(self): swift = self.get_swift_mixin() swift.raw_resource = ["name1", "name2"] self.assertEqual("name2", swift.name()) def test_delete(self): swift_mixin = self.get_swift_mixin() swift_mixin.user = mock.MagicMock() swift_mixin._resource = "some_resource" swift_mixin.raw_resource = mock.MagicMock() swift_mixin.delete() swift_mixin.user.swift().delete_some_resource.assert_called_once_with( *swift_mixin.raw_resource) class SwiftObjectTestCase(test.TestCase): @mock.patch("%s.SwiftMixin._manager" % BASE) def test_list(self, mock_swift_mixin__manager): containers = [mock.MagicMock(), mock.MagicMock()] objects = [mock.MagicMock(), mock.MagicMock(), mock.MagicMock()] mock_swift_mixin__manager().get_account.return_value = ( "header", containers) mock_swift_mixin__manager().get_container.return_value = ( "header", objects) self.assertEqual(len(containers), len(resources.SwiftContainer().list())) self.assertEqual(len(containers) * len(objects), len(resources.SwiftObject().list())) class SwiftContainerTestCase(test.TestCase): @mock.patch("%s.SwiftMixin._manager" % BASE) def test_list(self, mock_swift_mixin__manager): containers = [mock.MagicMock(), mock.MagicMock(), mock.MagicMock()] mock_swift_mixin__manager().get_account.return_value = ( "header", containers) self.assertEqual(len(containers), len(resources.SwiftContainer().list())) class ManilaShareTestCase(test.TestCase): def test_list(self): share_resource = resources.ManilaShare() share_resource._manager = mock.MagicMock() share_resource.list() self.assertEqual("shares", share_resource._resource) share_resource._manager.return_value.list.assert_called_once_with() def test_delete(self): share_resource = resources.ManilaShare() share_resource._manager = mock.MagicMock() share_resource.id = lambda: "fake_id" share_resource.delete() self.assertEqual("shares", share_resource._resource) share_resource._manager.return_value.delete.assert_called_once_with( "fake_id") class ManilaShareNetworkTestCase(test.TestCase): def test_list(self): sn_resource = resources.ManilaShareNetwork() sn_resource._manager = mock.MagicMock() sn_resource.list() self.assertEqual("share_networks", sn_resource._resource) sn_resource._manager.return_value.list.assert_called_once_with() def test_delete(self): sn_resource = resources.ManilaShareNetwork() sn_resource._manager = mock.MagicMock() sn_resource.id = lambda: "fake_id" sn_resource.delete() self.assertEqual("share_networks", sn_resource._resource) sn_resource._manager.return_value.delete.assert_called_once_with( "fake_id") class ManilaSecurityServiceTestCase(test.TestCase): def test_list(self): ss_resource = resources.ManilaSecurityService() ss_resource._manager = mock.MagicMock() ss_resource.list() self.assertEqual("security_services", ss_resource._resource) ss_resource._manager.return_value.list.assert_called_once_with() def test_delete(self): ss_resource = resources.ManilaSecurityService() ss_resource._manager = mock.MagicMock() ss_resource.id = lambda: "fake_id" ss_resource.delete() self.assertEqual("security_services", ss_resource._resource) ss_resource._manager.return_value.delete.assert_called_once_with( "fake_id") class MistralMixinTestCase(test.TestCase): def test_delete(self): mistral = resources.MistralMixin() mistral._service = "mistral" mistral.user = mock.MagicMock() mistral._resource = "some_resources" mistral.raw_resource = {"id": "TEST_ID"} mistral.user.mistral().some_resources.delete.return_value = None mistral.delete() mistral.user.mistral().some_resources.delete.assert_called_once_with( "TEST_ID") class MistralWorkbookTestCase(test.TestCase): def test_delete(self): mistral = resources.MistralWorkbooks() mistral._service = "mistral" mistral.user = mock.MagicMock() mistral._resource = "some_resources" mistral.raw_resource = {"name": "TEST_NAME"} mistral.user.mistral().some_resources.delete.return_value = None mistral.delete() mistral.user.mistral().some_resources.delete.assert_called_once_with( "TEST_NAME") class FuelEnvironmentTestCase(test.TestCase): def test_id(self): fres = resources.FuelEnvironment() fres.raw_resource = {"id": 42, "name": "chavez"} self.assertEqual(42, fres.id()) def test_name(self): fuel = resources.FuelEnvironment() fuel.raw_resource = {"id": "test_id", "name": "test_name"} self.assertEqual("test_name", fuel.name()) @mock.patch("%s.FuelEnvironment._manager" % BASE) def test_is_deleted(self, mock__manager): mock__manager.return_value.get.return_value = None fres = resources.FuelEnvironment() fres.id = mock.Mock() self.assertTrue(fres.is_deleted()) mock__manager.return_value.get.return_value = "env" self.assertFalse(fres.is_deleted()) mock__manager.return_value.get.assert_called_with(fres.id.return_value) class SenlinMixinTestCase(test.TestCase): def test_id(self): senlin = resources.SenlinMixin() senlin.raw_resource = {"id": "TEST_ID"} self.assertEqual("TEST_ID", senlin.id()) def test__manager(self): senlin = resources.SenlinMixin() senlin._service = "senlin" senlin.user = mock.MagicMock() self.assertEqual(senlin.user.senlin.return_value, senlin._manager()) def test_list(self): senlin = resources.SenlinMixin() senlin._service = "senlin" senlin.user = mock.MagicMock() senlin._resource = "some_resources" some_resources = [{"name": "resource1"}, {"name": "resource2"}] senlin.user.senlin().some_resources.return_value = some_resources self.assertEqual(some_resources, senlin.list()) senlin.user.senlin().some_resources.assert_called_once_with() def test_delete(self): senlin = resources.SenlinMixin() senlin._service = "senlin" senlin.user = mock.MagicMock() senlin._resource = "some_resources" senlin.raw_resource = {"id": "TEST_ID"} senlin.user.senlin().delete_some_resource.return_value = None senlin.delete() senlin.user.senlin().delete_some_resource.assert_called_once_with( "TEST_ID") class WatcherTemplateTestCase(test.TestCase): def test_id(self): watcher = resources.WatcherTemplate() watcher.raw_resource = mock.MagicMock(uuid=100) self.assertEqual(100, watcher.id()) @mock.patch("%s.WatcherTemplate._manager" % BASE) def test_is_deleted(self, mock__manager): mock__manager.return_value.get.return_value = None watcher = resources.WatcherTemplate() watcher.id = mock.Mock() self.assertFalse(watcher.is_deleted()) mock__manager.side_effect = [watcher_exceptions.NotFound()] self.assertTrue(watcher.is_deleted()) def test_list(self): watcher = resources.WatcherTemplate() watcher._manager = mock.MagicMock() watcher.list() self.assertEqual("audit_template", watcher._resource) watcher._manager().list.assert_called_once_with(limit=0) class WatcherAuditTestCase(test.TestCase): def test_id(self): watcher = resources.WatcherAudit() watcher.raw_resource = mock.MagicMock(uuid=100) self.assertEqual(100, watcher.id()) def test_name(self): watcher = resources.WatcherAudit() watcher.raw_resource = mock.MagicMock(uuid="name") self.assertEqual("name", watcher.name()) @mock.patch("%s.WatcherAudit._manager" % BASE) def test_is_deleted(self, mock__manager): mock__manager.return_value.get.return_value = None watcher = resources.WatcherAudit() watcher.id = mock.Mock() self.assertFalse(watcher.is_deleted()) mock__manager.side_effect = [watcher_exceptions.NotFound()] self.assertTrue(watcher.is_deleted()) def test_list(self): watcher = resources.WatcherAudit() watcher._manager = mock.MagicMock() watcher.list() self.assertEqual("audit", watcher._resource) watcher._manager().list.assert_called_once_with(limit=0) class WatcherActionPlanTestCase(test.TestCase): def test_id(self): watcher = resources.WatcherActionPlan() watcher.raw_resource = mock.MagicMock(uuid=100) self.assertEqual(100, watcher.id()) def test_name(self): watcher = resources.WatcherActionPlan() watcher.raw_resource = mock.MagicMock(uuid="name") self.assertEqual("name", watcher.name()) @mock.patch("%s.WatcherActionPlan._manager" % BASE) def test_is_deleted(self, mock__manager): mock__manager.return_value.get.return_value = None watcher = resources.WatcherActionPlan() watcher.id = mock.Mock() self.assertFalse(watcher.is_deleted()) mock__manager.side_effect = [watcher_exceptions.NotFound()] self.assertTrue(watcher.is_deleted()) def test_list(self): watcher = resources.WatcherActionPlan() watcher._manager = mock.MagicMock() watcher.list() self.assertEqual("action_plan", watcher._resource) watcher._manager().list.assert_called_once_with(limit=0) rally-0.9.1/tests/unit/plugins/openstack/cleanup/test_base.py0000664000567000056710000000774513073417717025577 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.openstack.cleanup import base from tests.unit import test BASE = "rally.plugins.openstack.cleanup.base" class ResourceDecoratorTestCase(test.TestCase): def test_resource(self): @base.resource("service", "res") class Fake(object): pass self.assertEqual(Fake._service, "service") self.assertEqual(Fake._resource, "res") class ResourceManagerTestCase(test.TestCase): def test__manager(self): user = mock.MagicMock() user.service1().resource1 = "user_res" manager = base.ResourceManager(user=user) manager._service = "service1" manager._resource = "resource1" self.assertEqual("user_res", manager._manager()) def test__manager_admin(self): admin = mock.MagicMock() admin.service1().resource1 = "admin_res" manager = base.ResourceManager(admin=admin) manager._service = "service1" manager._resource = "resource1" manager._admin_required = True self.assertEqual("admin_res", manager._manager()) def test_id(self): resource = mock.MagicMock(id="test_id") manager = base.ResourceManager(resource=resource) self.assertEqual(resource.id, manager.id()) def test_name(self): resource = mock.MagicMock(name="test_name") manager = base.ResourceManager(resource=resource) self.assertEqual(resource.name, manager.name()) @mock.patch("%s.ResourceManager._manager" % BASE) def test_is_deleted(self, mock_resource_manager__manager): raw_res = mock.MagicMock(status="deleted") mock_resource_manager__manager().get.return_value = raw_res mock_resource_manager__manager.reset_mock() resource = mock.MagicMock(id="test_id") manager = base.ResourceManager(resource=resource) self.assertTrue(manager.is_deleted()) raw_res.status = "DELETE_COMPLETE" self.assertTrue(manager.is_deleted()) raw_res.status = "ACTIVE" self.assertFalse(manager.is_deleted()) mock_resource_manager__manager.assert_has_calls( [mock.call(), mock.call().get(resource.id)] * 3) self.assertEqual(mock_resource_manager__manager.call_count, 3) @mock.patch("%s.ResourceManager._manager" % BASE) def test_is_deleted_exceptions(self, mock_resource_manager__manager): class Fake500Exc(Exception): code = 500 class Fake404Exc(Exception): code = 404 mock_resource_manager__manager.side_effect = [ Exception, Fake500Exc, Fake404Exc] manager = base.ResourceManager(resource=mock.MagicMock()) self.assertFalse(manager.is_deleted()) self.assertFalse(manager.is_deleted()) self.assertTrue(manager.is_deleted()) @mock.patch("%s.ResourceManager._manager" % BASE) def test_delete(self, mock_resource_manager__manager): res = mock.MagicMock(id="test_id") manager = base.ResourceManager(resource=res) manager.delete() mock_resource_manager__manager.assert_has_calls( [mock.call(), mock.call().delete(res.id)]) @mock.patch("%s.ResourceManager._manager" % BASE) def test_list(self, mock_resource_manager__manager): base.ResourceManager().list() mock_resource_manager__manager.assert_has_calls( [mock.call(), mock.call().list()]) rally-0.9.1/tests/unit/plugins/openstack/cleanup/test_manager.py0000664000567000056710000005102613073417720026260 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.common import utils from rally.plugins.openstack.cleanup import base from rally.plugins.openstack.cleanup import manager from tests.unit import test BASE = "rally.plugins.openstack.cleanup.manager" class SeekAndDestroyTestCase(test.TestCase): def setUp(self): super(SeekAndDestroyTestCase, self).setUp() # clear out the client cache manager.SeekAndDestroy.cache = {} @mock.patch("%s.osclients.Clients" % BASE, side_effect=[mock.MagicMock(), mock.MagicMock()]) def test__get_cached_client(self, mock_clients): destroyer = manager.SeekAndDestroy(None, None, None) self.assertIsNone(destroyer._get_cached_client(None)) users = [{"credential": "a"}, {"credential": "b"}] self.assertEqual(destroyer._get_cached_client(users[0]), destroyer._get_cached_client(users[0])) # ensure that cache is used self.assertItemsEqual(mock_clients.call_args_list, [mock.call("a", api_info=None)]) mock_clients.reset_mock() self.assertEqual(destroyer._get_cached_client(users[1]), destroyer._get_cached_client(users[1])) self.assertItemsEqual(mock_clients.call_args_list, [mock.call("b", api_info=None)]) mock_clients.reset_mock() self.assertNotEqual(destroyer._get_cached_client(users[0]), destroyer._get_cached_client(users[1])) self.assertFalse(mock_clients.called) @mock.patch("%s.osclients.Clients" % BASE, side_effect=[mock.MagicMock(), mock.MagicMock()]) def test__get_cached_client_shared_cache(self, mock_clients): # ensure that cache is shared between SeekAndDestroy objects destroyer1 = manager.SeekAndDestroy(None, None, None) destroyer2 = manager.SeekAndDestroy(None, None, None) user = {"credential": "a"} self.assertEqual(destroyer1._get_cached_client(user), destroyer2._get_cached_client(user)) self.assertItemsEqual(mock_clients.call_args_list, [mock.call("a", api_info=None)]) @mock.patch("%s.osclients.Clients" % BASE, side_effect=[mock.MagicMock(), mock.MagicMock()]) def test__get_cached_client_shared_cache_api_versions(self, mock_clients): # ensure that cache is shared between SeekAndDestroy objects # with matching api_versions dicts api_versions = {"cinder": {"version": "1", "service_type": "volume"}} destroyer1 = manager.SeekAndDestroy(None, None, None, api_versions=api_versions) destroyer2 = manager.SeekAndDestroy(None, None, None, api_versions=api_versions) user = {"credential": "a"} self.assertEqual(destroyer1._get_cached_client(user), destroyer2._get_cached_client(user)) self.assertItemsEqual(mock_clients.call_args_list, [mock.call("a", api_info=api_versions)]) @mock.patch("%s.osclients.Clients" % BASE, side_effect=[mock.MagicMock(), mock.MagicMock()]) def test__get_cached_client_no_cache_api_versions(self, mock_clients): # ensure that cache is not shared between SeekAndDestroy # objects with different api_versions dicts api_versions = [ {"cinder": {"version": "1", "service_type": "volume"}}, {"cinder": {"version": "2", "service_type": "volumev2"}} ] destroyer1 = manager.SeekAndDestroy(None, None, None, api_versions=api_versions[0]) destroyer2 = manager.SeekAndDestroy(None, None, None, api_versions=api_versions[1]) user = {"credential": "a"} self.assertNotEqual(destroyer1._get_cached_client(user), destroyer2._get_cached_client(user)) self.assertItemsEqual(mock_clients.call_args_list, [mock.call("a", api_info=api_versions[0]), mock.call("a", api_info=api_versions[1])]) @mock.patch("%s.LOG" % BASE) def test__delete_single_resource(self, mock_log): mock_resource = mock.MagicMock(_max_attempts=3, _timeout=10, _interval=0.01) mock_resource.delete.side_effect = [Exception, Exception, True] mock_resource.is_deleted.side_effect = [False, False, True] manager.SeekAndDestroy(None, None, None)._delete_single_resource( mock_resource) mock_resource.delete.assert_has_calls([mock.call()] * 3) self.assertEqual(mock_resource.delete.call_count, 3) mock_resource.is_deleted.assert_has_calls([mock.call()] * 3) self.assertEqual(mock_resource.is_deleted.call_count, 3) # NOTE(boris-42): No logs and no exceptions means no bugs! self.assertEqual(0, mock_log.call_count) @mock.patch("%s.LOG" % BASE) def test__delete_single_resource_timeout(self, mock_log): mock_resource = mock.MagicMock(_max_attempts=1, _timeout=0.02, _interval=0.025) mock_resource.delete.return_value = True mock_resource.is_deleted.side_effect = [False, False, True] manager.SeekAndDestroy(None, None, None)._delete_single_resource( mock_resource) mock_resource.delete.assert_called_once_with() mock_resource.is_deleted.assert_called_once_with() self.assertEqual(1, mock_log.warning.call_count) @mock.patch("%s.LOG" % BASE) def test__delete_single_resource_excpetion_in_is_deleted(self, mock_log): mock_resource = mock.MagicMock(_max_attempts=3, _timeout=10, _interval=0) mock_resource.delete.return_value = True mock_resource.is_deleted.side_effect = [Exception] * 4 manager.SeekAndDestroy(None, None, None)._delete_single_resource( mock_resource) mock_resource.delete.assert_called_once_with() self.assertEqual(4, mock_resource.is_deleted.call_count) self.assertEqual(5, mock_log.warning.call_count) self.assertEqual(4, mock_log.exception.call_count) def _manager(self, list_side_effect, **kw): mock_mgr = mock.MagicMock() mock_mgr().list.side_effect = list_side_effect mock_mgr.reset_mock() for k, v in kw.items(): setattr(mock_mgr, k, v) return mock_mgr @mock.patch("%s.SeekAndDestroy._get_cached_client" % BASE) def test__publisher_admin(self, mock__get_cached_client): mock_mgr = self._manager([Exception, Exception, [1, 2, 3]], _perform_for_admin_only=False) admin = mock.MagicMock() publish = manager.SeekAndDestroy(mock_mgr, admin, None)._publisher queue = [] publish(queue) mock__get_cached_client.assert_called_once_with(admin) mock_mgr.assert_called_once_with( admin=mock__get_cached_client.return_value) self.assertEqual(queue, [(admin, None, x) for x in range(1, 4)]) @mock.patch("%s.SeekAndDestroy._get_cached_client" % BASE) def test__publisher_admin_only(self, mock__get_cached_client): mock_mgr = self._manager([Exception, Exception, [1, 2, 3]], _perform_for_admin_only=True) admin = mock.MagicMock() publish = manager.SeekAndDestroy( mock_mgr, admin, ["u1", "u2"])._publisher queue = [] publish(queue) mock__get_cached_client.assert_called_once_with(admin) mock_mgr.assert_called_once_with( admin=mock__get_cached_client.return_value) self.assertEqual(queue, [(admin, None, x) for x in range(1, 4)]) @mock.patch("%s.SeekAndDestroy._get_cached_client" % BASE) def test__publisher_user_resource(self, mock__get_cached_client): mock_mgr = self._manager([Exception, Exception, [1, 2, 3], Exception, Exception, [4, 5]], _perform_for_admin_only=False, _tenant_resource=True) admin = mock.MagicMock() users = [{"tenant_id": 1, "id": 1}, {"tenant_id": 2, "id": 2}] publish = manager.SeekAndDestroy(mock_mgr, admin, users)._publisher queue = [] publish(queue) mock_client = mock__get_cached_client.return_value mock_mgr.assert_has_calls([ mock.call(admin=mock_client, user=mock_client, tenant_uuid=users[0]["tenant_id"]), mock.call().list(), mock.call().list(), mock.call().list(), mock.call(admin=mock_client, user=mock_client, tenant_uuid=users[1]["tenant_id"]), mock.call().list(), mock.call().list() ]) mock__get_cached_client.assert_has_calls([ mock.call(admin), mock.call(users[0]), mock.call(users[1]) ]) expected_queue = [(admin, users[0], x) for x in range(1, 4)] expected_queue += [(admin, users[1], x) for x in range(4, 6)] self.assertEqual(queue, expected_queue) @mock.patch("%s.LOG" % BASE) @mock.patch("%s.SeekAndDestroy._get_cached_client" % BASE) def test__gen_publisher_tenant_resource(self, mock__get_cached_client, mock_log): mock_mgr = self._manager([Exception, [1, 2, 3], Exception, Exception, Exception, ["this shouldn't be in results"]], _perform_for_admin_only=False, _tenant_resource=True) users = [{"tenant_id": 1, "id": 1}, {"tenant_id": 1, "id": 2}, {"tenant_id": 2, "id": 3}] publish = manager.SeekAndDestroy( mock_mgr, None, users)._publisher queue = [] publish(queue) mock_client = mock__get_cached_client.return_value mock_mgr.assert_has_calls([ mock.call(admin=mock_client, user=mock_client, tenant_uuid=users[0]["tenant_id"]), mock.call().list(), mock.call().list(), mock.call(admin=mock_client, user=mock_client, tenant_uuid=users[2]["tenant_id"]), mock.call().list(), mock.call().list(), mock.call().list() ]) mock__get_cached_client.assert_has_calls([ mock.call(None), mock.call(users[0]), mock.call(users[2]) ]) self.assertEqual(queue, [(None, users[0], x) for x in range(1, 4)]) self.assertTrue(mock_log.warning.mock_called) self.assertTrue(mock_log.exception.mock_called) @mock.patch("rally.common.utils.name_matches_object") @mock.patch("%s.SeekAndDestroy._get_cached_client" % BASE) @mock.patch("%s.SeekAndDestroy._delete_single_resource" % BASE) def test__consumer(self, mock__delete_single_resource, mock__get_cached_client, mock_name_matches_object): mock_mgr = mock.MagicMock(__name__="Test") resource_classes = [mock.Mock()] task_id = "task_id" mock_name_matches_object.return_value = True consumer = manager.SeekAndDestroy( mock_mgr, None, None, resource_classes=resource_classes, task_id=task_id)._consumer admin = mock.MagicMock() user1 = {"id": "a", "tenant_id": "uuid1"} cache = {} consumer(cache, (admin, user1, "res")) mock_mgr.assert_called_once_with( resource="res", admin=mock__get_cached_client.return_value, user=mock__get_cached_client.return_value, tenant_uuid=user1["tenant_id"]) mock__get_cached_client.assert_has_calls([ mock.call(admin), mock.call(user1) ]) mock__delete_single_resource.assert_called_once_with( mock_mgr.return_value) mock_mgr.reset_mock() mock__get_cached_client.reset_mock() mock__delete_single_resource.reset_mock() mock_name_matches_object.reset_mock() consumer(cache, (admin, None, "res2")) mock_mgr.assert_called_once_with( resource="res2", admin=mock__get_cached_client.return_value, user=mock__get_cached_client.return_value, tenant_uuid=None) mock__get_cached_client.assert_has_calls([ mock.call(admin), mock.call(None) ]) mock__delete_single_resource.assert_called_once_with( mock_mgr.return_value) @mock.patch("rally.common.utils.name_matches_object") @mock.patch("%s.SeekAndDestroy._get_cached_client" % BASE) @mock.patch("%s.SeekAndDestroy._delete_single_resource" % BASE) def test__consumer_with_noname_resource(self, mock__delete_single_resource, mock__get_cached_client, mock_name_matches_object): mock_mgr = mock.MagicMock(__name__="Test") mock_mgr.return_value.name.return_value = True task_id = "task_id" mock_name_matches_object.return_value = False consumer = manager.SeekAndDestroy(mock_mgr, None, None, task_id=task_id)._consumer consumer(None, (None, None, "res")) self.assertFalse(mock__delete_single_resource.called) mock_mgr.return_value.name.return_value = base.NoName("foo") consumer(None, (None, None, "res")) mock__delete_single_resource.assert_called_once_with( mock_mgr.return_value) @mock.patch("%s.broker.run" % BASE) def test_exterminate(self, mock_broker_run): manager_cls = mock.MagicMock(_threads=5) cleaner = manager.SeekAndDestroy(manager_cls, None, None) cleaner._publisher = mock.Mock() cleaner._consumer = mock.Mock() cleaner.exterminate() mock_broker_run.assert_called_once_with(cleaner._publisher, cleaner._consumer, consumers_count=5) class ResourceManagerTestCase(test.TestCase): def _get_res_mock(self, **kw): _mock = mock.MagicMock() for k, v in kw.items(): setattr(_mock, k, v) return _mock def _list_res_names_helper(self, names, admin_required, mock_iter): self.assertEqual(set(names), manager.list_resource_names(admin_required)) mock_iter.assert_called_once_with(base.ResourceManager) mock_iter.reset_mock() @mock.patch("%s.discover.itersubclasses" % BASE) def test_list_resource_names(self, mock_itersubclasses): mock_itersubclasses.return_value = [ self._get_res_mock(_service="fake", _resource="1", _admin_required=True), self._get_res_mock(_service="fake", _resource="2", _admin_required=False), self._get_res_mock(_service="other", _resource="2", _admin_required=False) ] self._list_res_names_helper( ["fake", "other", "fake.1", "fake.2", "other.2"], None, mock_itersubclasses) self._list_res_names_helper( ["fake", "fake.1"], True, mock_itersubclasses) self._list_res_names_helper( ["fake", "other", "fake.2", "other.2"], False, mock_itersubclasses) @mock.patch("%s.discover.itersubclasses" % BASE) def test_find_resource_managers(self, mock_itersubclasses): mock_itersubclasses.return_value = [ self._get_res_mock(_service="fake", _resource="1", _order=1, _admin_required=True), self._get_res_mock(_service="fake", _resource="2", _order=3, _admin_required=False), self._get_res_mock(_service="other", _resource="2", _order=2, _admin_required=False) ] self.assertEqual(mock_itersubclasses.return_value[0:2], manager.find_resource_managers(names=["fake"])) self.assertEqual(mock_itersubclasses.return_value[0:1], manager.find_resource_managers(names=["fake.1"])) self.assertEqual( [mock_itersubclasses.return_value[0], mock_itersubclasses.return_value[2], mock_itersubclasses.return_value[1]], manager.find_resource_managers(names=["fake", "other"])) self.assertEqual(mock_itersubclasses.return_value[0:1], manager.find_resource_managers(names=["fake"], admin_required=True)) self.assertEqual(mock_itersubclasses.return_value[1:2], manager.find_resource_managers(names=["fake"], admin_required=False)) @mock.patch("rally.common.plugin.discover.itersubclasses") @mock.patch("%s.SeekAndDestroy" % BASE) @mock.patch("%s.find_resource_managers" % BASE, return_value=[mock.MagicMock(), mock.MagicMock()]) def test_cleanup(self, mock_find_resource_managers, mock_seek_and_destroy, mock_itersubclasses): class A(utils.RandomNameGeneratorMixin): pass class B(object): pass mock_itersubclasses.return_value = [A, B] manager.cleanup(names=["a", "b"], admin_required=True, admin="admin", users=["user"], superclass=A, task_id="task_id") mock_find_resource_managers.assert_called_once_with(["a", "b"], True) mock_seek_and_destroy.assert_has_calls([ mock.call(mock_find_resource_managers.return_value[0], "admin", ["user"], api_versions=None, resource_classes=[A], task_id="task_id"), mock.call().exterminate(), mock.call(mock_find_resource_managers.return_value[1], "admin", ["user"], api_versions=None, resource_classes=[A], task_id="task_id"), mock.call().exterminate() ]) @mock.patch("rally.common.plugin.discover.itersubclasses") @mock.patch("%s.SeekAndDestroy" % BASE) @mock.patch("%s.find_resource_managers" % BASE, return_value=[mock.MagicMock(), mock.MagicMock()]) def test_cleanup_with_api_versions(self, mock_find_resource_managers, mock_seek_and_destroy, mock_itersubclasses): class A(utils.RandomNameGeneratorMixin): pass class B(object): pass mock_itersubclasses.return_value = [A, B] api_versions = {"cinder": {"version": "1", "service_type": "volume"}} manager.cleanup(names=["a", "b"], admin_required=True, admin="admin", users=["user"], api_versions=api_versions, superclass=utils.RandomNameGeneratorMixin, task_id="task_id") mock_find_resource_managers.assert_called_once_with(["a", "b"], True) mock_seek_and_destroy.assert_has_calls([ mock.call(mock_find_resource_managers.return_value[0], "admin", ["user"], api_versions=api_versions, resource_classes=[A], task_id="task_id"), mock.call().exterminate(), mock.call(mock_find_resource_managers.return_value[1], "admin", ["user"], api_versions=api_versions, resource_classes=[A], task_id="task_id"), mock.call().exterminate() ]) rally-0.9.1/tests/unit/plugins/common/0000775000567000056710000000000013073420067021101 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/common/test_types.py0000664000567000056710000000673213073417717023676 0ustar jenkinsjenkins00000000000000# Copyright (C) 2014 Yahoo! Inc. All Rights Reserved. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally import exceptions from rally.plugins.common import types from tests.unit import test class PathOrUrlTestCase(test.TestCase): @mock.patch("os.path.isfile") @mock.patch("requests.head") def test_transform_file(self, mock_requests_head, mock_isfile): mock_isfile.return_value = True path = types.PathOrUrl.transform(None, "fake_path") self.assertEqual("fake_path", path) @mock.patch("os.path.isfile") @mock.patch("requests.head") def test_transform_bogus(self, mock_requests_head, mock_isfile): mock_isfile.return_value = False mock_requests_head.return_value = mock.Mock(status_code=500) self.assertRaises(exceptions.InvalidScenarioArgument, types.PathOrUrl.transform, None, "fake_path") mock_requests_head.assert_called_once_with("fake_path") @mock.patch("os.path.isfile") @mock.patch("requests.head") def test_transform_url(self, mock_requests_head, mock_isfile): mock_isfile.return_value = False mock_requests_head.return_value = mock.Mock(status_code=200) path = types.PathOrUrl.transform(None, "fake_url") self.assertEqual("fake_url", path) class FileTypeTestCase(test.TestCase): @mock.patch("six.moves.builtins.open", side_effect=mock.mock_open(read_data="file_context"), create=True) def test_transform_by_path(self, mock_open): resource_config = "file.yaml" file_context = types.FileType.transform( clients=None, resource_config=resource_config) self.assertEqual(file_context, "file_context") @mock.patch("six.moves.builtins.open", side_effect=IOError, create=True) def test_transform_by_path_no_match(self, mock_open): resource_config = "nonexistant.yaml" self.assertRaises(IOError, types.FileType.transform, clients=None, resource_config=resource_config) class FileTypeDictTestCase(test.TestCase): @mock.patch("six.moves.builtins.open", side_effect=mock.mock_open(read_data="file_context"), create=True) def test_transform_by_path(self, mock_open): resource_config = ["file.yaml"] file_context = types.FileTypeDict.transform( clients=None, resource_config=resource_config) self.assertEqual(file_context, {"file.yaml": "file_context"}) @mock.patch("six.moves.builtins.open", side_effect=IOError, create=True) def test_transform_by_path_no_match(self, mock_open): resource_config = ["nonexistant.yaml"] self.assertRaises(IOError, types.FileTypeDict.transform, clients=None, resource_config=resource_config) rally-0.9.1/tests/unit/plugins/common/__init__.py0000664000567000056710000000000013073417717023210 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/common/trigger/0000775000567000056710000000000013073420067022544 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/common/trigger/__init__.py0000664000567000056710000000000013073417717024653 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/common/trigger/test_periodic.py0000664000567000056710000000723613073417720025765 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import jsonschema import mock from rally.plugins.common.trigger import periodic from tests.unit import test def create_config(**kwargs): return {"name": "periodic", "args": kwargs} @ddt.ddt class PeriodicTriggerTestCase(test.TestCase): def setUp(self): super(PeriodicTriggerTestCase, self).setUp() self.hook_cls = mock.MagicMock(__name__="name") self.trigger = periodic.PeriodicTrigger( {"trigger": {"name": "periodic", "args": {"unit": "iteration", "step": 2}}}, mock.MagicMock(), self.hook_cls) @ddt.data((create_config(unit="time", step=1), True), (create_config(unit="time", step=0), False), (create_config(unit="time", step=1, start=0), True), (create_config(unit="time", step=1, start=-1), False), (create_config(unit="time", step=1, start=0, end=1), True), (create_config(unit="time", step=1, start=0, end=0), False), (create_config(unit="time", wrong_prop=None), False), (create_config(unit="time"), False), (create_config(unit="iteration", step=1), True), (create_config(unit="iteration", step=0), False), (create_config(unit="iteration", step=1, start=1), True), (create_config(unit="iteration", step=1, start=0), False), (create_config(unit="iteration", step=1, start=1, end=1), True), (create_config(unit="iteration", step=1, start=1, end=0), False), (create_config(unit="iteration", wrong_prop=None), False), (create_config(unit="iteration"), False), (create_config(unit="wrong-unit", step=1), False), (create_config(step=1), False)) @ddt.unpack def test_config_schema(self, config, valid): if valid: periodic.PeriodicTrigger.validate(config) else: self.assertRaises(jsonschema.ValidationError, periodic.PeriodicTrigger.validate, config) def test_get_listening_event(self): event_type = self.trigger.get_listening_event() self.assertEqual("iteration", event_type) @ddt.data((1, True), (2, False), (3, True), (4, False), (5, True), (6, False), (7, True), (8, False), (9, True), (10, False)) @ddt.unpack def test_on_event(self, value, should_call): self.trigger.on_event("iteration", value) self.assertEqual(should_call, self.hook_cls.called) @ddt.data((0, False), (1, False), (2, True), (3, False), (4, False), (5, True), (6, False), (7, False), (8, True), (9, False)) @ddt.unpack def test_on_event_start_end(self, value, should_call): trigger = periodic.PeriodicTrigger( {"trigger": {"name": "periodic", "args": {"unit": "time", "step": 3, "start": 2, "end": 9}}}, mock.MagicMock(), self.hook_cls) trigger.on_event("time", value) self.assertEqual(should_call, self.hook_cls.called) rally-0.9.1/tests/unit/plugins/common/trigger/test_event.py0000664000567000056710000000566413073417720025313 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import jsonschema import mock from rally.plugins.common.trigger import event from tests.unit import test def create_config(**kwargs): return {"name": "event", "args": kwargs} @ddt.ddt class EventTriggerTestCase(test.TestCase): def setUp(self): super(EventTriggerTestCase, self).setUp() self.hook_cls = mock.MagicMock(__name__="name") self.trigger = event.EventTrigger( {"trigger": {"name": "event", "args": {"unit": "iteration", "at": [1, 4, 5]}}}, mock.MagicMock(), self.hook_cls) @ddt.data((create_config(unit="time", at=[0, 3, 5]), True), (create_config(unit="time", at=[2, 2]), False), (create_config(unit="time", at=[-1]), False), (create_config(unit="time", at=[1.5]), False), (create_config(unit="time", at=[]), False), (create_config(unit="time", wrong_prop=None), False), (create_config(unit="time"), False), (create_config(unit="iteration", at=[1, 5, 13]), True), (create_config(unit="iteration", at=[1, 1]), False), (create_config(unit="iteration", at=[0]), False), (create_config(unit="iteration", at=[-1]), False), (create_config(unit="iteration", at=[1.5]), False), (create_config(unit="iteration", at=[]), False), (create_config(unit="iteration", wrong_prop=None), False), (create_config(unit="iteration"), False), (create_config(unit="wrong-unit", at=[1, 2, 3]), False), (create_config(at=[1, 2, 3]), False)) @ddt.unpack def test_config_schema(self, config, valid): if valid: event.EventTrigger.validate(config) else: self.assertRaises(jsonschema.ValidationError, event.EventTrigger.validate, config) def test_get_listening_event(self): event_type = self.trigger.get_listening_event() self.assertEqual("iteration", event_type) @ddt.data((1, True), (4, True), (5, True), (0, False), (2, False), (3, False), (6, False), (7, False)) @ddt.unpack def test_on_event(self, value, should_call): self.trigger.on_event("iteration", value) self.assertEqual(should_call, self.hook_cls.called) rally-0.9.1/tests/unit/plugins/common/hook/0000775000567000056710000000000013073420067022041 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/common/hook/__init__.py0000664000567000056710000000000013073417717024150 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/common/hook/test_sys_call.py0000664000567000056710000001302713073417720025270 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import subprocess import ddt import jsonschema import mock from rally import consts from rally.plugins.common.hook import sys_call from tests.unit import fakes from tests.unit import test @ddt.ddt class SysCallHookTestCase(test.TestCase): def test_validate(self): sys_call.SysCallHook.validate( { "name": "sys_call", "description": "list folder", "args": "ls", "trigger": { "name": "event", "args": { "unit": "iteration", "at": [10] } } } ) def test_validate_error(self): conf = { "name": "sys_call", "description": "list folder", "args": { "cmd": 50, }, "trigger": { "name": "event", "args": { "unit": "iteration", "at": [10] } } } self.assertRaises( jsonschema.ValidationError, sys_call.SysCallHook.validate, conf) @ddt.data( {"stdout": "foo output", "expected": { "additive": [], "complete": [{"chart_plugin": "TextArea", "data": ["RetCode: 0", "StdOut: foo output", "StdErr: (empty)"], "description": "Args: foo cmd", "title": "System call"}]}}, {"stdout": """{"additive": [], "complete": [ {"chart_plugin": "Pie", "title": "Bar Pie", "data": [["A", 4], ["B", 2]]}]}""", "expected": { "additive": [], "complete": [{"chart_plugin": "Pie", "data": [["A", 4], ["B", 2]], "title": "Bar Pie"}]}}) @ddt.unpack @mock.patch("rally.common.utils.Timer", side_effect=fakes.FakeTimer) @mock.patch("rally.plugins.common.hook.sys_call.subprocess.Popen") def test_run(self, mock_popen, mock_timer, stdout, expected): popen_instance = mock_popen.return_value popen_instance.returncode = 0 popen_instance.communicate.return_value = (stdout, "") hook = sys_call.SysCallHook(mock.Mock(), "foo cmd", {"iteration": 1}) hook.run_sync() self.assertEqual( {"finished_at": fakes.FakeTimer().finish_timestamp(), "output": expected, "started_at": fakes.FakeTimer().timestamp(), "status": consts.HookStatus.SUCCESS, "triggered_by": {"iteration": 1}}, hook.result()) mock_popen.assert_called_once_with(["foo", "cmd"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) @ddt.data( {"communicate_streams": ("foo out", "foo err"), "expected_error_details": "foo err", "expected_data_stderr": "StdErr: foo err"}, {"communicate_streams": ("foo out", ""), "expected_error_details": "stdout: foo out", "expected_data_stderr": "StdErr: (empty)"}) @ddt.unpack @mock.patch("rally.common.utils.Timer", side_effect=fakes.FakeTimer) @mock.patch("rally.plugins.common.hook.sys_call.subprocess.Popen") def test_run_error(self, mock_popen, mock_timer, communicate_streams, expected_error_details, expected_data_stderr): popen_instance = mock_popen.return_value popen_instance.communicate.return_value = communicate_streams popen_instance.returncode = 1 task = mock.MagicMock() sys_call_hook = sys_call.SysCallHook(task, "/bin/bash -c 'ls'", {"iteration": 1}) sys_call_hook.run_sync() self.assertEqual( {"error": {"details": expected_error_details, "etype": "n/a", "msg": "Subprocess returned 1"}, "finished_at": fakes.FakeTimer().finish_timestamp(), "output": { "additive": [], "complete": [{"chart_plugin": "TextArea", "data": ["RetCode: 1", "StdOut: foo out", expected_data_stderr], "description": "Args: /bin/bash -c 'ls'", "title": "System call"}]}, "started_at": fakes.FakeTimer().timestamp(), "status": "failed", "triggered_by": {"iteration": 1}}, sys_call_hook.result()) mock_popen.assert_called_once_with( ["/bin/bash", "-c", "ls"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) rally-0.9.1/tests/unit/plugins/common/verification/0000775000567000056710000000000013073420067023563 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/common/verification/__init__.py0000664000567000056710000000000013073417717025672 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/common/verification/test_reporters.py0000664000567000056710000004337413073417717027244 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import datetime as dt import os import ddt import mock from rally.common import utils from rally.plugins.common.verification import reporters from tests.unit import test PATH = "rally.plugins.common.verification.reporters" def get_verifications(): tests_1 = { "some.test.TestCase.test_foo[id=iiiidddd;smoke]": {"name": "some.test.TestCase.test_foo", "tags": ["smoke", "id"], "status": "success", "timestamp": "2001-01-01T00:01:00", "duration": "8"}, "some.test.TestCase.test_skipped": {"name": "some.test.TestCase.test_skipped", "status": "skip", "timestamp": "2001-01-01T00:02:00", "reason": "Skipped until Bug: 666 is resolved.", "duration": "0"}, "some.test.TestCase.test_xfail": {"name": "some.test.TestCase.test_xfail", "status": "xfail", "timestamp": "2001-01-01T00:03:00", "reason": "something", "traceback": "HEEELP", "duration": "3"}, "some.test.TestCase.test_uxsuccess": {"name": "some.test.TestCase.test_uxsuccess", "status": "uxsuccess", "timestamp": "2001-01-01T00:04:00", "reason": "It should fail since I expect it", "duration": "3"} } tests_2 = { "some.test.TestCase.test_foo[id=iiiidddd;smoke]": {"name": "some.test.TestCase.test_foo", "tags": ["smoke", "id"], "status": "success", "timestamp": "2001-02-01T00:01:00", "duration": "8"}, "some.test.TestCase.test_failed": {"name": "some.test.TestCase.test_failed", "status": "fail", "timestamp": "2001-02-01T00:02:00", "traceback": "HEEEEEEELP", "duration": "8"}, "some.test.TestCase.test_skipped": {"name": "some.test.TestCase.test_skipped", "status": "skip", "timestamp": "2001-02-01T00:03:00", "reason": "Skipped until Bug: 666 is resolved.", "duration": "0"}, "some.test.TestCase.test_xfail": {"name": "some.test.TestCase.test_xfail", "status": "xfail", "timestamp": "2001-02-01T00:04:00", "reason": "something", "traceback": "HEEELP", "duration": "4"} } tests_3 = { "some.test.TestCase.test_foo[id=iiiidddd;smoke]": {"name": "some.test.TestCase.test_foo", "tags": ["smoke", "id"], "status": "success", "timestamp": "2001-03-01T00:01:00", "duration": "8"}, "some.test.TestCase.test_failed": {"name": "some.test.TestCase.test_failed", "status": "fail", "timestamp": "2001-03-01T00:02:00", "traceback": "HEEEEEEELP", "duration": "7"}, "some.test.TestCase.test_skipped": {"name": "some.test.TestCase.test_skipped", "status": "skip", "timestamp": "2001-03-01T00:03:00", "reason": "Skipped until Bug: 666 is resolved.", "duration": "0"}, "some.test.TestCase.test_xfail": {"name": "some.test.TestCase.test_xfail", "status": "xfail", "timestamp": "2001-03-01T00:04:00", "reason": "something", "traceback": "HEEELP", "duration": "3"} } return [ utils.Struct(uuid="foo-bar-1", created_at=dt.datetime(2001, 1, 1), updated_at=dt.datetime(2001, 1, 2), status="finished", run_args="set_name=compute", tests_duration=1.111, tests_count=9, skipped=0, success=3, expected_failures=3, unexpected_success=2, failures=1, tests=tests_1), utils.Struct(uuid="foo-bar-2", created_at=dt.datetime(2002, 1, 1), updated_at=dt.datetime(2002, 1, 2), status="finished", run_args="set_name=full", tests_duration=22.222, tests_count=99, skipped=0, success=33, expected_failures=33, unexpected_success=22, failures=11, tests=tests_2), utils.Struct(uuid="foo-bar-3", created_at=dt.datetime(2003, 1, 1), updated_at=dt.datetime(2003, 1, 2), status="finished", run_args="set_name=full", tests_duration=33.333, tests_count=99, skipped=0, success=33, expected_failures=33, unexpected_success=22, failures=11, tests=tests_3) ] class JSONReporterTestCase(test.TestCase): def test_validate(self): # nothing should fail reporters.JSONReporter.validate(mock.Mock()) reporters.JSONReporter.validate("") reporters.JSONReporter.validate(None) def test__generate(self): reporter = reporters.JSONReporter(get_verifications(), None) report = reporter._generate() self.assertEqual( collections.OrderedDict( [("foo-bar-1", {"status": "finished", "started_at": "2001-01-01T00:00:00", "finished_at": "2001-01-02T00:00:00", "tests_duration": 1.111, "tests_count": 9, "run_args": "set_name=compute", "skipped": 0, "success": 3, "unexpected_success": 2, "expected_failures": 3, "failures": 1}), ("foo-bar-2", {"status": "finished", "started_at": "2002-01-01T00:00:00", "finished_at": "2002-01-02T00:00:00", "tests_duration": 22.222, "tests_count": 99, "run_args": "set_name=full", "skipped": 0, "success": 33, "unexpected_success": 22, "expected_failures": 33, "failures": 11}), ("foo-bar-3", {"status": "finished", "started_at": "2003-01-01T00:00:00", "finished_at": "2003-01-02T00:00:00", "tests_duration": 33.333, "tests_count": 99, "run_args": "set_name=full", "skipped": 0, "success": 33, "unexpected_success": 22, "expected_failures": 33, "failures": 11})]), report["verifications"]) self.assertEqual({ "some.test.TestCase.test_foo[id=iiiidddd;smoke]": { "by_verification": {"foo-bar-1": {"duration": "8", "status": "success"}, "foo-bar-2": {"duration": "8", "status": "success"}, "foo-bar-3": {"duration": "8", "status": "success"} }, "name": "some.test.TestCase.test_foo", "tags": ["smoke", "id"]}, "some.test.TestCase.test_failed": { "by_verification": {"foo-bar-2": {"details": "HEEEEEEELP", "duration": "8", "status": "fail"}, "foo-bar-3": {"details": "HEEEEEEELP", "duration": "7", "status": "fail"}}, "name": "some.test.TestCase.test_failed", "tags": []}, "some.test.TestCase.test_skipped": { "by_verification": { "foo-bar-1": { "details": "Skipped until Bug: https://launchpad.net/" "bugs/666 is resolved.", "duration": "0", "status": "skip"}, "foo-bar-2": { "details": "Skipped until Bug: https://launchpad.net/" "bugs/666 is resolved.", "duration": "0", "status": "skip"}, "foo-bar-3": { "details": "Skipped until Bug: https://launchpad.net/" "bugs/666 is resolved.", "duration": "0", "status": "skip"}}, "name": "some.test.TestCase.test_skipped", "tags": []}, "some.test.TestCase.test_xfail": { "by_verification": { "foo-bar-1": {"details": "something\n\nHEEELP", "duration": "3", "status": "xfail"}, "foo-bar-2": {"details": "something\n\nHEEELP", "duration": "4", "status": "xfail"}, "foo-bar-3": {"details": "something\n\nHEEELP", "duration": "3", "status": "xfail"}}, "name": "some.test.TestCase.test_xfail", "tags": []}, "some.test.TestCase.test_uxsuccess": { "name": "some.test.TestCase.test_uxsuccess", "tags": [], "by_verification": {"foo-bar-1": { "details": "It should fail since I expect it", "duration": "3", "status": "uxsuccess"}}}}, report["tests"]) @mock.patch("%s.json.dumps" % PATH) @mock.patch("%s.JSONReporter._generate" % PATH) def test_generate(self, mock__generate, mock_dumps): reporter = reporters.JSONReporter([], output_destination=None) self.assertEqual({"print": mock_dumps.return_value}, reporter.generate()) mock__generate.assert_called_once_with() mock_dumps.assert_called_once_with(mock__generate.return_value, indent=4) mock__generate.reset_mock() mock_dumps.reset_mock() path = "some_path" reporter = reporters.JSONReporter([], output_destination=path) self.assertEqual({"files": {path: mock_dumps.return_value}, "open": path}, reporter.generate()) mock__generate.assert_called_once_with() mock_dumps.assert_called_once_with(mock__generate.return_value, indent=4) @ddt.ddt class HTMLReporterTestCase(test.TestCase): @mock.patch("%s.utils" % PATH) @mock.patch("%s.json.dumps" % PATH) @ddt.data((reporters.HTMLReporter, False), (reporters.HTMLStaticReporter, True)) @ddt.unpack def test_generate(self, cls, include_libs, mock_dumps, mock_utils): mock_render = mock_utils.get_template.return_value.render reporter = cls(get_verifications(), None) self.assertEqual({"print": mock_render.return_value}, reporter.generate()) mock_render.assert_called_once_with(data=mock_dumps.return_value, include_libs=include_libs) mock_utils.get_template.assert_called_once_with( "verification/report.html") self.assertEqual(1, mock_dumps.call_count) args, kwargs = mock_dumps.call_args self.assertTrue(not kwargs) self.assertEqual(1, len(args)) ctx = args[0] self.assertEqual({"uuids", "verifications", "tests", "show_comparison_note"}, set(ctx.keys())) self.assertEqual(["foo-bar-1", "foo-bar-2", "foo-bar-3"], list(ctx["uuids"])) self.assertTrue(ctx["show_comparison_note"]) self.assertEqual({ "some.test.TestCase.test_foo[id=iiiidddd;smoke]": { "by_verification": {"foo-bar-1": {"details": None, "duration": "8", "status": "success"}, "foo-bar-2": {"details": None, "duration": "8 (+0.0)", "status": "success"}, "foo-bar-3": {"details": None, "duration": "8 (+0.0)", "status": "success"}}, "has_details": False, "name": "some.test.TestCase.test_foo", "tags": ["smoke", "id"]}, "some.test.TestCase.test_failed": { "by_verification": {"foo-bar-2": {"details": "HEEEEEEELP", "duration": "8", "status": "fail"}, "foo-bar-3": {"details": "HEEEEEEELP", "duration": "7 (-1.0)", "status": "fail"}}, "has_details": True, "name": "some.test.TestCase.test_failed", "tags": []}, "some.test.TestCase.test_skipped": { "by_verification": { "foo-bar-1": {"details": "Skipped until Bug: https://laun" "chpad.net/bugs/666 is resolved.", "duration": "", "status": "skip"}, "foo-bar-2": {"details": "Skipped until Bug: https://laun" "chpad.net/bugs/666 is resolved.", "duration": "", "status": "skip"}, "foo-bar-3": {"details": "Skipped until Bug: https://laun" "chpad.net/bugs/666 is resolved.", "duration": "", "status": "skip"}}, "has_details": True, "name": "some.test.TestCase.test_skipped", "tags": []}, "some.test.TestCase.test_xfail": { "by_verification": { "foo-bar-1": {"details": "something\n\nHEEELP", "duration": "3", "status": "xfail"}, "foo-bar-2": {"details": "something\n\nHEEELP", "duration": "4 (+1.0)", "status": "xfail"}, "foo-bar-3": {"details": "something\n\nHEEELP", "duration": "3 (+0.0)", "status": "xfail"}}, "has_details": True, "name": "some.test.TestCase.test_xfail", "tags": []}, "some.test.TestCase.test_uxsuccess": { "name": "some.test.TestCase.test_uxsuccess", "tags": [], "has_details": True, "by_verification": {"foo-bar-1": { "details": "It should fail since I expect it", "duration": "3", "status": "uxsuccess"}}}}, ctx["tests"]) class JUnitXMLReporterTestCase(test.TestCase): @mock.patch("%s.dt" % PATH) @mock.patch("%s.version.version_string" % PATH) def test_generate(self, mock_version_string, mock_dt): mock_dt.datetime.utcnow.return_value.strftime.return_value = "TIME" # release when junit reporter was introduced mock_version_string.return_value = "0.8.0" with open(os.path.join(os.path.dirname(__file__), "junit_report.xml")) as f: expected_report = f.read() junit_reporter = reporters.JUnitXMLReporter(get_verifications(), None) self.assertEqual({"print": expected_report}, junit_reporter.generate()) dest = "path" junit_reporter = reporters.JUnitXMLReporter(get_verifications(), dest) self.assertEqual({"open": dest, "files": {dest: expected_report}}, junit_reporter.generate()) rally-0.9.1/tests/unit/plugins/common/verification/junit_report.xml0000664000567000056710000000475513073417717027054 0ustar jenkinsjenkins00000000000000 Skipped until Bug: 666 is resolved. It is an unexpected success. The test should fail due to: It should fail since I expect it HEEEEEEELP Skipped until Bug: 666 is resolved. HEEEEEEELP Skipped until Bug: 666 is resolved. rally-0.9.1/tests/unit/plugins/common/verification/test_testr.py0000664000567000056710000002433013073417717026347 0ustar jenkinsjenkins00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import subprocess import mock from rally import exceptions from rally.plugins.common.verification import testr from tests.unit import test PATH = "rally.plugins.common.verification.testr" class TestrContextTestCase(test.TestCase): def setUp(self): super(TestrContextTestCase, self).setUp() self.verifier = mock.Mock() self.prepare_run_args = self.verifier.manager.prepare_run_args self.prepare_run_args.side_effect = lambda x: x def assertEqualCmd(self, expected, actual, msg=""): cmd = ["testr", "run", "--subunit"] cmd.extend(expected) self.assertEqual(cmd, actual, message=msg) def test_setup_with_concurrency(self): # default behaviour cfg = {"verifier": self.verifier} ctx = testr.TestrContext(cfg) ctx.setup() self.assertEqualCmd(["--parallel"], cfg["testr_cmd"]) cfg = {"verifier": self.verifier, "run_args": {"concurrency": 0}} ctx = testr.TestrContext(cfg) ctx.setup() self.assertEqualCmd(["--parallel"], cfg["testr_cmd"]) # serial mode cfg = {"verifier": self.verifier, "run_args": {"concurrency": 1}} ctx = testr.TestrContext(cfg) ctx.setup() self.assertEqualCmd(["--concurrency", "1"], cfg["testr_cmd"]) # parallel mode cfg = {"verifier": self.verifier, "run_args": {"concurrency": 2}} ctx = testr.TestrContext(cfg) ctx.setup() self.assertEqualCmd(["--parallel", "--concurrency", "2"], cfg["testr_cmd"]) @mock.patch("%s.common_utils.generate_random_path" % PATH) def test_setup_with_skip_and_load_lists(self, mock_generate_random_path): # with load_list, but without skip_list load_list = ["tests.foo", "tests.bar"] cfg = {"verifier": self.verifier, "run_args": {"load_list": load_list}} ctx = testr.TestrContext(cfg) mock_open = mock.mock_open() with mock.patch("%s.open" % PATH, mock_open): ctx.setup() mock_open.assert_called_once_with( mock_generate_random_path.return_value, "w") handle = mock_open.return_value handle.write.assert_called_once_with("\n".join(load_list)) self.assertEqualCmd(["--parallel", "--load-list", mock_generate_random_path.return_value], cfg["testr_cmd"]) self.assertFalse(self.verifier.manager.list_tests.called) # with load_list and skip_list load_list = ["tests.foo", "tests.bar"] skip_list = ["tests.foo"] cfg = {"verifier": self.verifier, "run_args": {"load_list": load_list, "skip_list": skip_list}} ctx = testr.TestrContext(cfg) mock_open = mock.mock_open() with mock.patch("%s.open" % PATH, mock_open): ctx.setup() mock_open.assert_called_once_with( mock_generate_random_path.return_value, "w") handle = mock_open.return_value handle.write.assert_called_once_with(load_list[1]) self.assertEqualCmd(["--parallel", "--load-list", mock_generate_random_path.return_value], cfg["testr_cmd"]) self.assertFalse(self.verifier.manager.list_tests.called) # with skip_list, but without load_list load_list = ["tests.foo", "tests.bar"] self.verifier.manager.list_tests.return_value = load_list skip_list = ["tests.foo"] cfg = {"verifier": self.verifier, "run_args": {"skip_list": skip_list}} ctx = testr.TestrContext(cfg) mock_open = mock.mock_open() with mock.patch("%s.open" % PATH, mock_open): ctx.setup() mock_open.assert_called_once_with( mock_generate_random_path.return_value, "w") handle = mock_open.return_value handle.write.assert_called_once_with(load_list[1]) self.assertEqualCmd(["--parallel", "--load-list", mock_generate_random_path.return_value], cfg["testr_cmd"]) self.verifier.manager.list_tests.assert_called_once_with() def test_setup_with_failing(self): cfg = {"verifier": self.verifier, "run_args": {"failed": True}} ctx = testr.TestrContext(cfg) ctx.setup() self.assertEqualCmd(["--parallel", "--failing"], cfg["testr_cmd"]) def test_setup_with_pattern(self): cfg = {"verifier": self.verifier, "run_args": {"pattern": "foo"}} ctx = testr.TestrContext(cfg) ctx.setup() self.assertEqualCmd(["--parallel", "foo"], cfg["testr_cmd"]) @mock.patch("%s.os.remove" % PATH) @mock.patch("%s.os.path.exists" % PATH) def test_cleanup(self, mock_exists, mock_remove): files = {"/path/foo_1": True, "/path/bar_1": False, "/path/foo_2": False, "/path/bar_2": True} def fake_exists(path): return files.get(path, False) mock_exists.side_effect = fake_exists ctx = testr.TestrContext({"verifier": self.verifier}) ctx._tmp_files = files.keys() ctx.cleanup() self.assertEqual([mock.call(f) for f in files.keys()], mock_exists.call_args_list) self.assertEqual([mock.call(f) for f in files.keys() if files[f]], mock_remove.call_args_list) class TestrLauncherTestCase(test.TestCase): def test_run_environ_property(self): env = mock.Mock() class FakeLauncher(testr.TestrLauncher): @property def environ(self): return env self.assertEqual(env, FakeLauncher(mock.Mock()).run_environ) @mock.patch("%s.utils.check_output" % PATH) def test_list_tests(self, mock_check_output): mock_check_output.return_value = ( "logging message\n" # should be ignored "one more useless data\n" # should be ignored "tests.FooTestCase.test_something\n" # valid "tests.FooTestCase.test_another[\n" # invalid "tests.BarTestCase.test_another[id=123]\n" # valid "tests.FooTestCase.test_another[id=a2-213,smoke]\n" # valid ) verifier = mock.Mock() launcher = testr.TestrLauncher(verifier) self.assertEqual(["tests.FooTestCase.test_something", "tests.BarTestCase.test_another[id=123]", "tests.FooTestCase.test_another[id=a2-213,smoke]"], launcher.list_tests()) mock_check_output.assert_called_once_with( ["testr", "list-tests", ""], cwd=launcher.repo_dir, env=launcher.environ, debug_output=False) @mock.patch("%s.shutil.rmtree" % PATH) @mock.patch("%s.utils.check_output" % PATH) @mock.patch("%s.os.path.exists" % PATH) @mock.patch("%s.os.path.isdir" % PATH) def test__init_testr(self, mock_isdir, mock_exists, mock_check_output, mock_rmtree): launcher = testr.TestrLauncher(mock.Mock()) # case #1: testr already initialized mock_isdir.return_value = True launcher._init_testr() self.assertFalse(mock_check_output.called) self.assertFalse(mock_exists.called) self.assertFalse(mock_rmtree.called) # case #2: initializing testr without errors mock_isdir.return_value = False launcher._init_testr() mock_check_output.assert_called_once_with( ["testr", "init"], cwd=launcher.repo_dir, env=launcher.environ) self.assertFalse(mock_exists.called) self.assertFalse(mock_rmtree.called) mock_check_output.reset_mock() # case #3: initializing testr with error mock_check_output.side_effect = OSError test_repository_dir = os.path.join(launcher.base_dir, ".testrepository") self.assertRaises(exceptions.RallyException, launcher._init_testr) mock_check_output.assert_called_once_with( ["testr", "init"], cwd=launcher.repo_dir, env=launcher.environ) mock_exists.assert_called_once_with(test_repository_dir) mock_rmtree.assert_called_once_with(test_repository_dir) @mock.patch("%s.subunit_v2.parse" % PATH) @mock.patch("%s.subprocess.Popen" % PATH) def test_run(self, mock_popen, mock_parse): launcher = testr.TestrLauncher(mock.Mock()) ctx = {"testr_cmd": ["ls", "-la"], "run_args": {"xfail_list": mock.Mock(), "skip_list": mock.Mock()}} self.assertEqual(mock_parse.return_value, launcher.run(ctx)) mock_popen.assert_called_once_with(ctx["testr_cmd"], env=launcher.run_environ, cwd=launcher.repo_dir, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) mock_popen.return_value.wait.assert_called_once_with() mock_parse.assert_called_once_with( mock_popen.return_value.stdout, live=True, expected_failures=ctx["run_args"]["xfail_list"], skipped_tests=ctx["run_args"]["skip_list"], logger_name=launcher.verifier.name) @mock.patch("%s.manager.VerifierManager.install" % PATH) def test_install(self, mock_verifier_manager_install): launcher = testr.TestrLauncher(mock.Mock()) launcher._init_testr = mock.Mock() launcher.install() mock_verifier_manager_install.assert_called_once_with() launcher._init_testr.assert_called_once_with() rally-0.9.1/tests/unit/plugins/common/sla/0000775000567000056710000000000013073420067021660 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/common/sla/test_failure_rate.py0000664000567000056710000001256313073417720025744 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import jsonschema from rally.plugins.common.sla import failure_rate from tests.unit import test class SLAPluginTestCase(test.TestCase): def test_validate(self): cnf = {"test_criterion": 42} failure_rate.sla.SLA.validate(cnf) def test_validate_invalid_name(self): self.assertRaises(jsonschema.ValidationError, failure_rate.FailureRate.validate, {"nonexistent": 42}) def test_validate_invalid_type(self): self.assertRaises(jsonschema.ValidationError, failure_rate.FailureRate.validate, {"test_criterion": 42.0}) @ddt.ddt class FailureRateTestCase(test.TestCase): def test_config_schema(self): self.assertRaises(jsonschema.ValidationError, failure_rate.FailureRate.validate, {"failure_rate": {"min": -1}}) self.assertRaises(jsonschema.ValidationError, failure_rate.FailureRate.validate, {"failure_rate": {"min": 100.1}}) self.assertRaises(jsonschema.ValidationError, failure_rate.FailureRate.validate, {"failure_rate": {"max": -0.1}}) self.assertRaises(jsonschema.ValidationError, failure_rate.FailureRate.validate, {"failure_rate": {"max": 101}}) def test_result_min(self): sla1 = failure_rate.FailureRate({"min": 80.0}) sla2 = failure_rate.FailureRate({"min": 60.5}) # 75% failure rate for sla in [sla1, sla2]: sla.add_iteration({"error": ["error"]}) sla.add_iteration({"error": []}) sla.add_iteration({"error": ["error"]}) sla.add_iteration({"error": ["error"]}) self.assertFalse(sla1.result()["success"]) # 80.0% > 75.0% self.assertTrue(sla2.result()["success"]) # 60.5% < 75.0% self.assertEqual("Failed", sla1.status()) self.assertEqual("Passed", sla2.status()) def test_result_max(self): sla1 = failure_rate.FailureRate({"max": 25.0}) sla2 = failure_rate.FailureRate({"max": 75.0}) # 50% failure rate for sla in [sla1, sla2]: sla.add_iteration({"error": ["error"]}) sla.add_iteration({"error": []}) self.assertFalse(sla1.result()["success"]) # 25.0% < 50.0% self.assertTrue(sla2.result()["success"]) # 75.0% > 50.0% self.assertEqual("Failed", sla1.status()) self.assertEqual("Passed", sla2.status()) def test_result_min_max(self): sla1 = failure_rate.FailureRate({"min": 50, "max": 90}) sla2 = failure_rate.FailureRate({"min": 5, "max": 20}) sla3 = failure_rate.FailureRate({"min": 24.9, "max": 25.1}) # 25% failure rate for sla in [sla1, sla2, sla3]: sla.add_iteration({"error": ["error"]}) sla.add_iteration({"error": []}) sla.add_iteration({"error": []}) sla.add_iteration({"error": []}) self.assertFalse(sla1.result()["success"]) # 25.0% < 50.0% self.assertFalse(sla2.result()["success"]) # 25.0% > 20.0% self.assertTrue(sla3.result()["success"]) # 24.9% < 25.0% < 25.1% self.assertEqual("Failed", sla1.status()) self.assertEqual("Failed", sla2.status()) self.assertEqual("Passed", sla3.status()) def test_result_no_iterations(self): sla = failure_rate.FailureRate({"max": 10.0}) self.assertTrue(sla.result()["success"]) def test_add_iteration(self): sla = failure_rate.FailureRate({"max": 35.0}) self.assertTrue(sla.add_iteration({"error": []})) self.assertTrue(sla.add_iteration({"error": []})) self.assertTrue(sla.add_iteration({"error": []})) self.assertTrue(sla.add_iteration({"error": ["error"]})) # 33% self.assertFalse(sla.add_iteration({"error": ["error"]})) # 40% @ddt.data([[0, 1, 0, 0], [0, 1, 1, 1, 0, 0, 0, 0], [0, 0, 1, 0, 0, 1]]) def test_merge(self, errors): single_sla = failure_rate.FailureRate({"max": 25}) for ee in errors: for e in ee: single_sla.add_iteration({"error": ["error"] if e else []}) slas = [failure_rate.FailureRate({"max": 25}) for _ in errors] for idx, sla in enumerate(slas): for e in errors[idx]: sla.add_iteration({"error": ["error"] if e else []}) merged_sla = slas[0] for sla in slas[1:]: merged_sla.merge(sla) self.assertEqual(single_sla.success, merged_sla.success) self.assertEqual(single_sla.errors, merged_sla.errors) self.assertEqual(single_sla.total, merged_sla.total) rally-0.9.1/tests/unit/plugins/common/sla/__init__.py0000664000567000056710000000000013073417717023767 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/common/sla/test_ouliers.py0000664000567000056710000001256613073417720024767 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import jsonschema from rally.plugins.common.sla import outliers from tests.unit import test @ddt.ddt class OutliersTestCase(test.TestCase): def test_config_schema(self): outliers.Outliers.validate({"outliers": {"max": 0, "min_iterations": 5, "sigmas": 2.5}}) self.assertRaises(jsonschema.ValidationError, outliers.Outliers.validate, {"outliers": {"max": -1}}) self.assertRaises(jsonschema.ValidationError, outliers.Outliers.validate, {"outliers": {"max": 0, "min_iterations": 2}}) self.assertRaises(jsonschema.ValidationError, outliers.Outliers.validate, {"outliers": {"max": 0, "sigmas": 0}}) def test_result(self): sla1 = outliers.Outliers({"max": 1}) sla2 = outliers.Outliers({"max": 2}) iteration_durations = [3.1, 4.2, 3.6, 4.5, 2.8, 3.3, 4.1, 3.8, 4.3, 2.9, 10.2, 11.2, 3.4] # outliers: 10.2, 11.2 for sla in [sla1, sla2]: for d in iteration_durations: sla.add_iteration({"duration": d}) self.assertFalse(sla1.result()["success"]) # 2 outliers > 1 self.assertTrue(sla2.result()["success"]) # 2 outliers <= 2 self.assertEqual("Failed", sla1.status()) self.assertEqual("Passed", sla2.status()) def test_result_large_sigmas(self): sla = outliers.Outliers({"max": 1, "sigmas": 5}) iteration_durations = [3.1, 4.2, 3.6, 4.5, 2.8, 3.3, 4.1, 3.8, 4.3, 2.9, 10.2, 11.2, 3.4] for d in iteration_durations: sla.add_iteration({"duration": d}) # NOTE(msdubov): No outliers registered since sigmas = 5 (not 2) self.assertTrue(sla.result()["success"]) self.assertEqual("Passed", sla.status()) def test_result_no_iterations(self): sla = outliers.Outliers({"max": 0}) self.assertTrue(sla.result()["success"]) def test_result_few_iterations_large_min_iterations(self): sla = outliers.Outliers({"max": 0, "min_iterations": 10}) iteration_durations = [3.1, 4.2, 4.7, 3.6, 15.14, 2.8] for d in iteration_durations: sla.add_iteration({"duration": d}) # NOTE(msdubov): SLA doesn't fail because it hasn't iterations < 10 self.assertTrue(sla.result()["success"]) def test_result_few_iterations_small_min_iterations(self): sla = outliers.Outliers({"max": 0, "min_iterations": 5}) iteration_durations = [3.1, 4.2, 4.7, 3.6, 15.14, 2.8] for d in iteration_durations: sla.add_iteration({"duration": d}) # NOTE(msdubov): Now this SLA can fail with >= 5 iterations self.assertFalse(sla.result()["success"]) def test_add_iteration(self): sla = outliers.Outliers({"max": 1}) # NOTE(msdubov): One outlier in the first 11 iterations first_iterations = [3.1, 4.2, 3.6, 4.5, 2.8, 3.3, 4.1, 3.8, 4.3, 2.9, 10.2] for d in first_iterations: self.assertTrue(sla.add_iteration({"duration": d})) # NOTE(msdubov): 12th iteration makes the SLA always failed self.assertFalse(sla.add_iteration({"duration": 11.2})) self.assertFalse(sla.add_iteration({"duration": 3.4})) @ddt.data([[3.1, 4.2, 3.6, 4.5, 2.8, 3.3, 4.1, 3.8, 4.3, 2.9, 10.2], [3.1, 4.2, 3.6, 4.5, 2.8, 3.3, 20.1, 3.8, 4.3, 2.9, 24.2], [3.1, 4.2, 3.6, 4.5, 2.8, 3.3, 4.1, 30.8, 4.3, 49.9, 69.2]]) def test_merge(self, durations): single_sla = outliers.Outliers({"max": 1}) for dd in durations: for d in dd: single_sla.add_iteration({"duration": d}) slas = [outliers.Outliers({"max": 1}) for _ in durations] for idx, sla in enumerate(slas): for duration in durations[idx]: sla.add_iteration({"duration": duration}) merged_sla = slas[0] for sla in slas[1:]: merged_sla.merge(sla) self.assertEqual(single_sla.success, merged_sla.success) self.assertEqual(single_sla.iterations, merged_sla.iterations) # self.assertEqual(single_sla.threshold, merged_sla.threshold) # NOTE(ikhudoshyn): We are unable to implement # rally.plugins.common.sla.outliers.Outliers.merge(..) correctly # (see my comment for the method) # The assert above will fail with the majority of data # The line below passes with this particular data # but may fail as well on another data self.assertEqual(single_sla.outliers, merged_sla.outliers) rally-0.9.1/tests/unit/plugins/common/sla/test_max_average_duration_per_atomic.py0000664000567000056710000000710013073417720031657 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import jsonschema from rally.plugins.common.sla import max_average_duration_per_atomic as madpa from tests.unit import test @ddt.ddt class MaxAverageDurationPerAtomicTestCase(test.TestCase): def test_config_schema(self): properties = { "max_avg_duration_per_atomic": {"neutron.list_ports": "elf", "neutron.create_port": 1.0} } self.assertRaises( jsonschema.ValidationError, madpa.MaxAverageDurationPerAtomic.validate, properties) properties["max_avg_duration_per_atomic"]["neutron.list_ports"] = 1.0 madpa.MaxAverageDurationPerAtomic.validate(properties) def test_result(self): cls = madpa.MaxAverageDurationPerAtomic sla1 = cls({"a1": 42, "a2": 42}) sla2 = cls({"a1": 42, "a2": 2}) for sla in [sla1, sla2]: sla.add_iteration({"atomic_actions": {"a1": 3.14, "a2": 7.77}}) sla.add_iteration({"atomic_actions": {"a1": 8.14, "a2": 9.77}}) self.assertTrue(sla1.result()["success"]) self.assertFalse(sla2.result()["success"]) self.assertEqual("Passed", sla1.status()) self.assertEqual("Failed", sla2.status()) def test_result_no_iterations(self): sla = madpa.MaxAverageDurationPerAtomic({"a1": 8.14, "a2": 9.77}) self.assertTrue(sla.result()["success"]) def test_add_iteration(self): sla = madpa.MaxAverageDurationPerAtomic({"a1": 5, "a2": 10}) add = sla.add_iteration self.assertTrue(add({"atomic_actions": {"a1": 2.5, "a2": 5.0}})) self.assertTrue(add({"atomic_actions": {"a1": 5.0, "a2": 10.0}})) # the following pushes a2 over the limit self.assertFalse(add({"atomic_actions": {"a1": 5.0, "a2": 20.0}})) # bring a2 back self.assertTrue(add({"atomic_actions": {"a1": 5.0, "a2": 2.0}})) # push a1 over self.assertFalse(add({"atomic_actions": {"a1": 10.0, "a2": 2.0}})) # bring it back self.assertTrue(add({"atomic_actions": {"a1": 1.0, "a2": 2.0}})) @ddt.data([[1.0, 2.0, 1.5, 4.3], [2.1, 3.4, 1.2, 6.3, 7.2, 7.0, 1.], [1.1, 1.1, 2.2, 2.2, 3.3, 4.3]]) def test_merge(self, durations): init = {"a1": 8.14, "a2": 9.77} single_sla = madpa.MaxAverageDurationPerAtomic(init) for dd in durations: for d in dd: single_sla.add_iteration( {"atomic_actions": {"a1": d, "a2": d * 2}}) slas = [madpa.MaxAverageDurationPerAtomic(init) for _ in durations] for idx, sla in enumerate(slas): for d in durations[idx]: sla.add_iteration({"atomic_actions": {"a1": d, "a2": d * 2}}) merged_sla = slas[0] for sla in slas[1:]: merged_sla.merge(sla) self.assertEqual(single_sla.success, merged_sla.success) self.assertEqual(single_sla.avg_by_action, merged_sla.avg_by_action) rally-0.9.1/tests/unit/plugins/common/sla/test_max_average_duration.py0000664000567000056710000000571613073417720027470 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import jsonschema from rally.plugins.common.sla import max_average_duration from tests.unit import test @ddt.ddt class MaxAverageDurationTestCase(test.TestCase): def test_config_schema(self): properties = { "max_avg_duration": 0 } self.assertRaises(jsonschema.ValidationError, max_average_duration.MaxAverageDuration.validate, properties) def test_result(self): sla1 = max_average_duration.MaxAverageDuration(42) sla2 = max_average_duration.MaxAverageDuration(3.62) for sla in [sla1, sla2]: sla.add_iteration({"duration": 3.14}) sla.add_iteration({"duration": 6.28}) self.assertTrue(sla1.result()["success"]) # 42 > avg([3.14, 6.28]) self.assertFalse(sla2.result()["success"]) # 3.62 < avg([3.14, 6.28]) self.assertEqual("Passed", sla1.status()) self.assertEqual("Failed", sla2.status()) def test_result_no_iterations(self): sla = max_average_duration.MaxAverageDuration(42) self.assertTrue(sla.result()["success"]) def test_add_iteration(self): sla = max_average_duration.MaxAverageDuration(4.0) self.assertTrue(sla.add_iteration({"duration": 3.5})) self.assertTrue(sla.add_iteration({"duration": 2.5})) self.assertTrue(sla.add_iteration({"duration": 5.0})) # avg = 3.667 self.assertFalse(sla.add_iteration({"duration": 7.0})) # avg = 4.5 self.assertTrue(sla.add_iteration({"duration": 1.0})) # avg = 3.8 @ddt.data([[1.0, 2.0, 1.5, 4.3], [2.1, 3.4, 1.2, 6.3, 7.2, 7.0, 1.], [1.1, 1.1, 2.2, 2.2, 3.3, 4.3]]) def test_merge(self, durations): single_sla = max_average_duration.MaxAverageDuration(4.0) for dd in durations: for d in dd: single_sla.add_iteration({"duration": d}) slas = [max_average_duration.MaxAverageDuration(4.0) for _ in durations] for idx, sla in enumerate(slas): for duration in durations[idx]: sla.add_iteration({"duration": duration}) merged_sla = slas[0] for sla in slas[1:]: merged_sla.merge(sla) self.assertEqual(single_sla.success, merged_sla.success) self.assertEqual(single_sla.avg, merged_sla.avg) rally-0.9.1/tests/unit/plugins/common/sla/test_performance_degradation.py0000664000567000056710000000705013073417720030137 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import jsonschema from rally.plugins.common.sla import performance_degradation as perfdegr from tests.unit import test @ddt.ddt class PerformanceDegradationTestCase(test.TestCase): def setUp(self): super(PerformanceDegradationTestCase, self).setUp() self.sla = perfdegr.PerformanceDegradation({"max_degradation": 50}) def test_config_schema(self): properties = { "performance_degradation": {} } self.assertRaises( jsonschema.ValidationError, perfdegr.PerformanceDegradation.validate, properties) properties["performance_degradation"]["max_degradation"] = -1 self.assertRaises( jsonschema.ValidationError, perfdegr.PerformanceDegradation.validate, properties) properties["performance_degradation"]["max_degradation"] = 1000.0 perfdegr.PerformanceDegradation.validate(properties) @ddt.data(([39.0, 30.0, 32.0, 49.0, 47.0, 43.0], False, "Failed"), ([31.0, 30.0, 32.0, 39.0, 45.0, 43.0], True, "Passed"), ([], True, "Passed")) @ddt.unpack def test_iterations(self, durations, result, status): for duration in durations: self.sla.add_iteration({"duration": duration}) self.assertIs(self.sla.success, result) self.assertIs(self.sla.result()["success"], result) self.assertEqual(status, self.sla.status()) @ddt.data(([39.0, 30.0, 32.0], [49.0, 40.0, 51.0], False, "Failed"), ([31.0, 30.0, 32.0], [39.0, 45.0, 43.0], True, "Passed"), ([31.0, 30.0, 32.0], [32.0, 49.0, 30.0], False, "Failed"), ([], [31.0, 30.0, 32.0], True, "Passed"), ([31.0, 30.0, 32.0], [], True, "Passed"), ([], [], True, "Passed"), ([35.0, 30.0, 49.0], [], False, "Failed"), ([], [35.0, 30.0, 49.0], False, "Failed")) @ddt.unpack def test_merge(self, durations1, durations2, result, status): for duration in durations1: self.sla.add_iteration({"duration": duration}) sla2 = perfdegr.PerformanceDegradation({"max_degradation": 50}) for duration in durations2: sla2.add_iteration({"duration": duration}) self.sla.merge(sla2) self.assertIs(self.sla.success, result) self.assertIs(self.sla.result()["success"], result) self.assertEqual(status, self.sla.status()) def test_details(self): self.assertEqual("Current degradation: 0.0% - Passed", self.sla.details()) for duration in [39.0, 30.0, 32.0]: self.sla.add_iteration({"duration": duration}) self.assertEqual("Current degradation: 30.0% - Passed", self.sla.details()) self.sla.add_iteration({"duration": 75.0}) self.assertEqual("Current degradation: 150.0% - Failed", self.sla.details()) rally-0.9.1/tests/unit/plugins/common/sla/test_iteration_time.py0000664000567000056710000000551213073417720026312 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import jsonschema from rally.plugins.common.sla import iteration_time from tests.unit import test @ddt.ddt class IterationTimeTestCase(test.TestCase): def test_config_schema(self): properties = { "max_seconds_per_iteration": 0 } self.assertRaises(jsonschema.ValidationError, iteration_time.IterationTime.validate, properties) def test_result(self): sla1 = iteration_time.IterationTime(42) sla2 = iteration_time.IterationTime(3.62) for sla in [sla1, sla2]: sla.add_iteration({"duration": 3.14}) sla.add_iteration({"duration": 6.28}) self.assertTrue(sla1.result()["success"]) # 42 > 6.28 self.assertFalse(sla2.result()["success"]) # 3.62 < 6.28 self.assertEqual("Passed", sla1.status()) self.assertEqual("Failed", sla2.status()) def test_result_no_iterations(self): sla = iteration_time.IterationTime(42) self.assertTrue(sla.result()["success"]) def test_add_iteration(self): sla = iteration_time.IterationTime(4.0) self.assertTrue(sla.add_iteration({"duration": 3.14})) self.assertTrue(sla.add_iteration({"duration": 2.0})) self.assertTrue(sla.add_iteration({"duration": 3.99})) self.assertFalse(sla.add_iteration({"duration": 4.5})) self.assertFalse(sla.add_iteration({"duration": 3.8})) @ddt.data([[1.0, 2.0, 1.5, 4.3], [2.1, 3.4, 1.2, 6.3, 7.2, 7.0, 1.], [1.1, 1.1, 2.2, 2.2, 3.3, 4.3]]) def test_merge(self, durations): single_sla = iteration_time.IterationTime(4.0) for dd in durations: for d in dd: single_sla.add_iteration({"duration": d}) slas = [iteration_time.IterationTime(4.0) for _ in durations] for idx, sla in enumerate(slas): for duration in durations[idx]: sla.add_iteration({"duration": duration}) merged_sla = slas[0] for sla in slas[1:]: merged_sla.merge(sla) self.assertEqual(single_sla.success, merged_sla.success) self.assertEqual(single_sla.max_iteration_time, merged_sla.max_iteration_time) rally-0.9.1/tests/unit/plugins/common/context/0000775000567000056710000000000013073420067022565 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/common/context/__init__.py0000664000567000056710000000000013073417717024674 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/common/context/test_dummy.py0000664000567000056710000000252113073417717025341 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import exceptions from rally.plugins.common.context import dummy from tests.unit import test class DummyContextTestCase(test.TestCase): def test_setup(self): dummy.DummyContext({"task": "some_task"}).setup() config = {"dummy_context": {"fail_setup": True}} self.assertRaises( exceptions.RallyException, dummy.DummyContext({"task": "some_task", "config": config}).setup) def test_cleanup(self): dummy.DummyContext({"task": "some_task"}).cleanup() config = {"dummy_context": {"fail_cleanup": True}} self.assertRaises( exceptions.RallyException, dummy.DummyContext({"task": "some_task", "config": config}).cleanup) rally-0.9.1/tests/unit/plugins/common/scenarios/0000775000567000056710000000000013073420067023067 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/common/scenarios/__init__.py0000664000567000056710000000000013073417717025176 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/common/scenarios/requests/0000775000567000056710000000000013073420067024742 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/common/scenarios/requests/__init__.py0000664000567000056710000000000013073417717027051 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/common/scenarios/requests/test_utils.py0000664000567000056710000000303513073417717027524 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.common.scenarios.requests import utils from tests.unit import test class RequestsTestCase(test.TestCase): @mock.patch("requests.request") def test__check_request(self, mock_request): mock_request.return_value = mock.MagicMock(status_code=200) scenario = utils.RequestScenario(test.get_test_context()) scenario._check_request(status_code=200, url="sample", method="GET") self._test_atomic_action_timer(scenario.atomic_actions(), "requests.check_request") mock_request.assert_called_once_with("GET", "sample") @mock.patch("requests.request") def test_check_wrong_request(self, mock_request): mock_request.return_value = mock.MagicMock(status_code=200) scenario = utils.RequestScenario(test.get_test_context()) self.assertRaises(ValueError, scenario._check_request, status_code=201, url="sample", method="GET") rally-0.9.1/tests/unit/plugins/common/scenarios/requests/test_http_requests.py0000664000567000056710000000333413073417717031300 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.common.scenarios.requests import http_requests from tests.unit import test SCN = "rally.plugins.common.scenarios" class RequestScenarioTestCase(test.TestCase): @mock.patch("%s.requests.utils.RequestScenario._check_request" % SCN) def test_check_request(self, mock__check_request): Requests = http_requests.HttpRequestsCheckRequest( test.get_test_context()) Requests.run("sample_url", "GET", 200) mock__check_request.assert_called_once_with("sample_url", "GET", 200) @mock.patch("%s.requests.utils.RequestScenario._check_request" % SCN) @mock.patch("%s.requests.http_requests.random.choice" % SCN) def test_check_random_request(self, mock_choice, mock__check_request): mock_choice.return_value = {"url": "sample_url"} Requests = http_requests.HttpRequestsCheckRandomRequest( test.get_test_context()) Requests.run(status_code=200, requests=[{"url": "sample_url"}]) mock_choice.assert_called_once_with([{"url": "sample_url"}]) mock__check_request.assert_called_once_with( status_code=200, url="sample_url") rally-0.9.1/tests/unit/plugins/common/scenarios/dummy/0000775000567000056710000000000013073420067024222 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/common/scenarios/dummy/__init__.py0000664000567000056710000000000013073417717026331 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/common/scenarios/dummy/test_dummy.py0000664000567000056710000002337213073417717027005 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally.plugins.common.scenarios.dummy import dummy from tests.unit import test DUMMY = "rally.plugins.common.scenarios.dummy.dummy." @ddt.ddt class DummyFailureTestCase(test.TestCase): @ddt.data({"iteration": 0, "kwargs": {}}, {"iteration": 0, "kwargs": {"each": 1}}, {"iteration": 5, "kwargs": {"from_iteration": 4}, "raises": False}, {"iteration": 5, "kwargs": {"from_iteration": 5, "to_iteration": 5}}, {"iteration": 5, "kwargs": {"from_iteration": 4, "to_iteration": 5}}, {"iteration": 5, "kwargs": {"from_iteration": 5, "to_iteration": 6}}, {"iteration": 5, "kwargs": {"from_iteration": 4, "to_iteration": 6}}, {"iteration": 5, "kwargs": {"from_iteration": 4, "to_iteration": 6, "sleep": 5}}, {"iteration": 5, "raises": False, "kwargs": {"from_iteration": 4, "to_iteration": 6, "sleep": 5, "each": 2}}, {"iteration": 6, "kwargs": {"from_iteration": 4, "to_iteration": 6, "sleep": 5, "each": 2}}) @ddt.unpack @mock.patch(DUMMY + "utils.interruptable_sleep") def test_run(self, mock_interruptable_sleep, iteration, kwargs, raises=True): scenario = dummy.DummyFailure( test.get_test_context(iteration=iteration)) if raises: self.assertRaises(dummy.DummyScenarioException, scenario.run, **kwargs) else: scenario.run(**kwargs) mock_interruptable_sleep.assert_called_once_with( kwargs.get("sleep", 0.1)) @ddt.ddt class DummyTestCase(test.TestCase): @mock.patch(DUMMY + "utils.interruptable_sleep") def test_dummy(self, mock_interruptable_sleep): scenario = dummy.Dummy(test.get_test_context()) scenario.sleep_between = mock.MagicMock() scenario.run(sleep=10) mock_interruptable_sleep.assert_called_once_with(10) @mock.patch(DUMMY + "utils.interruptable_sleep") def test_dummy_exception(self, mock_interruptable_sleep): scenario = dummy.DummyException(test.get_test_context()) size_of_message = 5 self.assertRaises(dummy.DummyScenarioException, scenario.run, size_of_message, sleep=10) mock_interruptable_sleep.assert_called_once_with(10) def test_dummy_exception_probability(self): scenario = dummy.DummyExceptionProbability(test.get_test_context()) # should not raise an exception as probability is 0 for i in range(100): scenario.run(exception_probability=0) # should always raise an exception as probability is 1 for i in range(100): self.assertRaises(dummy.DummyScenarioException, scenario.run, exception_probability=1) @mock.patch(DUMMY + "random") def test_dummy_output(self, mock_random): mock_random.randint.side_effect = lambda min_, max_: max_ desc = "This is a description text for %s" for random_range, exp in (None, 25), (1, 1), (42, 42): scenario = dummy.DummyOutput(test.get_test_context()) if random_range is None: scenario.run() else: scenario.run(random_range=random_range) expected = { "additive": [ {"chart_plugin": "StatsTable", "data": [["%s stat" % s, exp] for s in ("foo", "bar", "spam")], "description": desc % "Additive StatsTable", "title": "Additive StatsTable"}, {"chart_plugin": "StackedArea", "data": [["foo %i" % i, exp] for i in range(1, 7)], "label": "Measure this in Foo units", "title": "Additive StackedArea (no description)"}, {"chart_plugin": "Lines", "data": [["bar %i" % i, exp] for i in range(1, 4)], "description": desc % "Additive Lines", "label": "Measure this in Bar units", "title": "Additive Lines"}, {"chart_plugin": "Pie", "data": [["spam %i" % i, exp] for i in range(1, 4)], "description": desc % "Additive Pie", "title": "Additive Pie"}], "complete": [ {"axis_label": "This is a custom X-axis label", "chart_plugin": "Lines", "data": [["Foo", [[i, exp] for i in range(1, 8)]], ["Bar", [[i, exp] for i in range(1, 8)]], ["Spam", [[i, exp] for i in range(1, 8)]]], "description": desc % "Complete Lines", "label": "Measure this is some units", "title": "Complete Lines"}, {"axis_label": "This is a custom X-axis label", "chart_plugin": "StackedArea", "data": [["alpha", [[i, exp] for i in range(50)]], ["beta", [[i, exp] for i in range(50)]], ["gamma", [[i, exp] for i in range(50)]]], "description": desc % "Complete StackedArea", "label": "Yet another measurement units", "title": "Complete StackedArea"}, {"title": "Arbitrary Text", "chart_plugin": "TextArea", "data": ["Lorem ipsum dolor sit amet, consectetur " "adipiscing elit, sed do eiusmod tempor " "incididunt ut labore et dolore magna " "aliqua." * 2] * 4}, {"chart_plugin": "Pie", "data": [[s, exp] for s in ("delta", "epsilon", "zeta", "theta", "lambda", "omega")], "title": "Complete Pie (no description)"}, {"chart_plugin": "Table", "data": {"cols": ["%s column" % s for s in ("mu", "xi", "pi", "tau", "chi")], "rows": [["%s row" % s, exp, exp, exp, exp] for s in ("iota", "nu", "rho", "phi", "psi")]}, "description": desc % "Complete Table", "title": "Complete Table"}]} self.assertEqual(expected, scenario._output) def test_dummy_random_fail_in_atomic(self): scenario = dummy.DummyRandomFailInAtomic(test.get_test_context()) for i in range(10): scenario.run(exception_probability=0) for i in range(10): self.assertRaises(KeyError, scenario.run, exception_probability=1) @ddt.data({}, {"actions_num": 5, "sleep_min": 0, "sleep_max": 2}, {"actions_num": 7, "sleep_min": 1.23, "sleep_max": 4.56}, {"actions_num": 1, "sleep_max": 4.56}, {"sleep_min": 1}) @ddt.unpack @mock.patch(DUMMY + "random") @mock.patch(DUMMY + "utils.interruptable_sleep") def test_dummy_random_action(self, mock_interruptable_sleep, mock_random, **kwargs): mock_random.uniform.side_effect = range(100) scenario = dummy.DummyRandomAction(test.get_test_context()) scenario.run(**kwargs) actions_num = kwargs.get("actions_num", 5) calls = [mock.call(i) for i in range(actions_num)] self.assertEqual(calls, mock_interruptable_sleep.mock_calls) calls = [mock.call(kwargs.get("sleep_min", 0), kwargs.get("sleep_max", 2)) for i in range(actions_num)] self.assertEqual(calls, mock_random.uniform.mock_calls) for i in range(actions_num): self._test_atomic_action_timer(scenario.atomic_actions(), "action_%d" % i) @ddt.data({"number_of_actions": 5, "sleep_factor": 1}, {"number_of_actions": 7, "sleep_factor": 2}, {"number_of_actions": 1, "sleep_factor": 3}) @ddt.unpack @mock.patch(DUMMY + "utils.interruptable_sleep") def test_dummy_timed_atomic_actions(self, mock_interruptable_sleep, number_of_actions, sleep_factor): dummy.DummyRandomAction(test.get_test_context()).run( number_of_actions, sleep_factor) scenario = dummy.DummyTimedAtomicAction(test.get_test_context()) scenario.run(number_of_actions, sleep_factor) for i in range(number_of_actions): self._test_atomic_action_timer(scenario.atomic_actions(), "action_%d" % i) mock_interruptable_sleep.assert_any_call(i * sleep_factor) rally-0.9.1/tests/unit/plugins/common/runners/0000775000567000056710000000000013073420067022575 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/common/runners/__init__.py0000664000567000056710000000000013073417717024704 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/common/runners/test_serial.py0000664000567000056710000000555513073417720025501 0ustar jenkinsjenkins00000000000000# Copyright (C) 2014 Yahoo! Inc. All Rights Reserved. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.plugins.common.runners import serial from tests.unit import fakes from tests.unit import test class SerialScenarioRunnerTestCase(test.TestCase): @mock.patch("rally.common.utils.DequeAsQueue") @mock.patch("rally.task.runner._run_scenario_once") def test__run_scenario(self, mock__run_scenario_once, mock_deque_as_queue): times = 5 result = {"duration": 10., "idle_duration": 0., "error": [], "output": {"additive": [], "complete": []}, "atomic_actions": {}, "timestamp": 1.} mock__run_scenario_once.return_value = result deque_as_queue_inst = mock_deque_as_queue.return_value expected_results = [[result] for i in range(times)] runner = serial.SerialScenarioRunner(mock.MagicMock(), {"times": times}) runner._run_scenario(fakes.FakeScenario, "do_it", fakes.FakeContext().context, {}) self.assertEqual(len(runner.result_queue), times) results = list(runner.result_queue) self.assertEqual(results, expected_results) expected_calls = [] for i in range(times): ctxt = fakes.FakeContext().context ctxt["iteration"] = i + 1 ctxt["task"] = mock.ANY expected_calls.append( mock.call(fakes.FakeScenario, "do_it", ctxt, {}, deque_as_queue_inst) ) mock__run_scenario_once.assert_has_calls(expected_calls) mock_deque_as_queue.assert_called_once_with(runner.event_queue) def test__run_scenario_aborted(self): runner = serial.SerialScenarioRunner(mock.MagicMock(), {"times": 5}) runner.abort() runner._run_scenario(fakes.FakeScenario, "do_it", fakes.FakeContext().context, {}) self.assertEqual(len(runner.result_queue), 0) def test_abort(self): runner = serial.SerialScenarioRunner(mock.MagicMock(), {"times": 5}) self.assertFalse(runner.aborted.is_set()) runner.abort() self.assertTrue(runner.aborted.is_set()) rally-0.9.1/tests/unit/plugins/common/runners/test_rps.py0000664000567000056710000003375613073417720025032 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import jsonschema import mock from rally import exceptions from rally.plugins.common.runners import rps from rally.task import runner from tests.unit import fakes from tests.unit import test RUNNERS_BASE = "rally.task.runner." RUNNERS = "rally.plugins.common.runners." @ddt.ddt class RPSScenarioRunnerTestCase(test.TestCase): def setUp(self): super(RPSScenarioRunnerTestCase, self).setUp() self.task = mock.MagicMock() @ddt.data( { "config": { "type": "rps", "rps": { "start": 1, "end": 3, "step": 1, }, "times": 6 } }, { "config": { "type": "rps", "rps": { "start": 1, "end": 10, "step": 1, }, "times": 55 } }, { "config": { "type": "rps", "rps": { "start": 1, "end": 2, "step": 1, }, "times": 1 } }, { "config": { "type": "rps", "rps": { "start": 2, "end": 1, "step": 1, }, "times": 2 } }, { "config": { "type": "rps", "rps": { "start": 2, "end": 1, "step": 3, }, "times": 2 } }, { "config": { "type": "rps", "times": 1, "rps": 100, "max_concurrency": 50, "max_cpu_count": 8, "timeout": 1 } }, { "config": { "type": "rps", "rps": 0.000001 } }, { "config": { "type": "rps", "rps": { "start": 1, "end": 10, "step": 1, }, "times": 55 } }, ) @ddt.unpack def test_validate(self, config): if "times" not in config: self.assertRaises( jsonschema.exceptions.ValidationError, rps.RPSScenarioRunner.validate, config) elif config["times"] == 2: self.assertRaises( exceptions.InvalidTaskException, rps.RPSScenarioRunner.validate, config) else: rps.RPSScenarioRunner.validate(config) def test_rps_parameter_validate_failed(self): config = { "type": "rps", "rps": 0 } self.assertRaises(jsonschema.ValidationError, rps.RPSScenarioRunner.validate, config) def test_validate_failed(self): config = {"type": "rps", "a": 10} self.assertRaises(jsonschema.ValidationError, rps.RPSScenarioRunner.validate, config) @mock.patch(RUNNERS + "rps.LOG") @mock.patch(RUNNERS + "rps.time") @mock.patch(RUNNERS + "rps.threading.Thread") @mock.patch(RUNNERS + "rps.multiprocessing.Queue") @mock.patch(RUNNERS + "rps.runner") def test__worker_process(self, mock_runner, mock_queue, mock_thread, mock_time, mock_log): def time_side(): time_side.last += 0.03 time_side.count += 1 return time_side.last time_side.last = 0 time_side.count = 0 mock_time.time = time_side mock_thread_instance = mock.MagicMock( isAlive=mock.MagicMock(return_value=False)) mock_thread.return_value = mock_thread_instance mock_event = mock.MagicMock( is_set=mock.MagicMock(return_value=False)) mock_event_queue = mock.MagicMock() times = 4 max_concurrent = 3 fake_ram_int = iter(range(10)) context = {"users": [{"tenant_id": "t1", "credential": "c1", "id": "uuid1"}]} info = {"processes_to_start": 1, "processes_counter": 1} mock_runs_per_second = mock.MagicMock(return_value=10) rps._worker_process(mock_queue, fake_ram_int, 1, times, max_concurrent, context, "Dummy", "dummy", (), mock_event_queue, mock_event, mock_runs_per_second, 10, 1, info) self.assertEqual(times, mock_log.debug.call_count) self.assertEqual(times + 1, mock_thread.call_count) self.assertEqual(times + 1, mock_thread_instance.start.call_count) self.assertEqual(times + 1, mock_thread_instance.join.call_count) # NOTE(rvasilets): `times` + 1 here because `times` the number of # scenario repetition and one more need on "initialization" stage # of the thread stuff. self.assertEqual(1, mock_time.sleep.call_count) self.assertEqual(2, mock_thread_instance.isAlive.call_count) self.assertEqual(times * 4 - 1, mock_time.time.count) self.assertEqual(times, mock_runner._get_scenario_context.call_count) for i in range(times): scenario_context = mock_runner._get_scenario_context(i, context) call = mock.call( args=(mock_queue, "Dummy", "dummy", scenario_context, (), mock_event_queue), target=mock_runner._worker_thread, ) self.assertIn(call, mock_thread.mock_calls) @mock.patch(RUNNERS + "rps.runner._run_scenario_once") def test__worker_thread(self, mock__run_scenario_once): mock_queue = mock.MagicMock() mock_event_queue = mock.MagicMock() args = ("fake_cls", "fake_method_name", "fake_context_obj", {}, mock_event_queue) runner._worker_thread(mock_queue, *args) self.assertEqual(1, mock_queue.put.call_count) expected_calls = [mock.call(*args)] self.assertEqual(expected_calls, mock__run_scenario_once.mock_calls) @ddt.data( { "config": { "times": 20, "rps": 20, "timeout": 5, "max_concurrency": 15 } }, { "config": { "type": "rps", "rps": { "start": 1, "end": 10, "step": 1, }, "times": 55 } }, { "config": { "type": "rps", "rps": { "start": 1, "end": 10, "step": 1, }, "times": 50 } }, { "config": { "type": "rps", "rps": { "start": 1, "end": 10, "step": 1, }, "times": 75 } }, ) @ddt.unpack @mock.patch(RUNNERS + "rps.time.sleep") def test__run_scenario(self, mock_sleep, config): runner_obj = rps.RPSScenarioRunner(self.task, config) runner_obj._run_scenario(fakes.FakeScenario, "do_it", fakes.FakeContext({}).context, {}) self.assertEqual(config["times"], len(runner_obj.result_queue)) for result_batch in runner_obj.result_queue: for result in result_batch: self.assertIsNotNone(result) @mock.patch(RUNNERS + "rps.time.sleep") def test__run_scenario_exception(self, mock_sleep): config = {"times": 4, "rps": 10} runner_obj = rps.RPSScenarioRunner(self.task, config) runner_obj._run_scenario(fakes.FakeScenario, "something_went_wrong", fakes.FakeContext({}).context, {}) self.assertEqual(len(runner_obj.result_queue), config["times"]) for result_batch in runner_obj.result_queue: for result in result_batch: self.assertIsNotNone(result) @mock.patch(RUNNERS + "rps.time.sleep") def test__run_scenario_aborted(self, mock_sleep): config = {"times": 20, "rps": 20, "timeout": 5} runner_obj = rps.RPSScenarioRunner(self.task, config) runner_obj.abort() runner_obj._run_scenario(fakes.FakeScenario, "do_it", fakes.FakeUser().context, {}) self.assertEqual(len(runner_obj.result_queue), 0) for result in runner_obj.result_queue: self.assertIsNotNone(result) @mock.patch(RUNNERS + "constant.multiprocessing.Queue") @mock.patch(RUNNERS + "rps.multiprocessing.cpu_count") @mock.patch(RUNNERS + "rps.RPSScenarioRunner._log_debug_info") @mock.patch(RUNNERS + "rps.RPSScenarioRunner._create_process_pool") @mock.patch(RUNNERS + "rps.RPSScenarioRunner._join_processes") def test_that_cpu_count_is_adjusted_properly( self, mock__join_processes, mock__create_process_pool, mock__log_debug_info, mock_cpu_count, mock_queue): samples = [ { "input": {"times": 20, "rps": 20, "max_concurrency": 10, "max_cpu_count": 1}, "real_cpu": 2, "expected": { # max_cpu_used equals to min(max_cpu_count, real_cpu) "max_cpu_used": 1, # processes_to_start equals to # min(max_cpu_used, times, max_concurrency)) "processes_to_start": 1, "rps_per_worker": 20, "times_per_worker": 20, "times_overhead": 0, "concurrency_per_worker": 10, "concurrency_overhead": 0 } }, { "input": {"times": 20, "rps": 9, "max_concurrency": 5, "max_cpu_count": 3}, "real_cpu": 4, "expected": { "max_cpu_used": 3, "processes_to_start": 3, "rps_per_worker": 3, "times_per_worker": 6, "times_overhead": 2, "concurrency_per_worker": 1, "concurrency_overhead": 2 } }, { "input": {"times": 10, "rps": 20, "max_concurrency": 12, "max_cpu_count": 20}, "real_cpu": 20, "expected": { "max_cpu_used": 20, "processes_to_start": 10, "rps_per_worker": 2, "times_per_worker": 1, "times_overhead": 0, "concurrency_per_worker": 1, "concurrency_overhead": 2 } }, { "input": {"times": 20, "rps": 20, "max_concurrency": 10, "max_cpu_count": 20}, "real_cpu": 20, "expected": { "max_cpu_used": 20, "processes_to_start": 10, "rps_per_worker": 2, "times_per_worker": 2, "times_overhead": 0, "concurrency_per_worker": 1, "concurrency_overhead": 0 } } ] for sample in samples: mock__log_debug_info.reset_mock() mock_cpu_count.reset_mock() mock__create_process_pool.reset_mock() mock__join_processes.reset_mock() mock_queue.reset_mock() mock_cpu_count.return_value = sample["real_cpu"] runner_obj = rps.RPSScenarioRunner(self.task, sample["input"]) runner_obj._run_scenario(fakes.FakeScenario, "do_it", fakes.FakeUser().context, {}) mock_cpu_count.assert_called_once_with() mock__log_debug_info.assert_called_once_with( times=sample["input"]["times"], timeout=0, max_cpu_used=sample["expected"]["max_cpu_used"], processes_to_start=sample["expected"]["processes_to_start"], times_per_worker=sample["expected"]["times_per_worker"], times_overhead=sample["expected"]["times_overhead"], concurrency_per_worker=( sample["expected"]["concurrency_per_worker"]), concurrency_overhead=( sample["expected"]["concurrency_overhead"])) args, kwargs = mock__create_process_pool.call_args self.assertIn(sample["expected"]["processes_to_start"], args) self.assertIn(rps._worker_process, args) mock__join_processes.assert_called_once_with( mock__create_process_pool.return_value, mock_queue.return_value, mock_queue.return_value) def test_abort(self): config = {"times": 4, "rps": 10} runner_obj = rps.RPSScenarioRunner(self.task, config) self.assertFalse(runner_obj.aborted.is_set()) runner_obj.abort() self.assertTrue(runner_obj.aborted.is_set()) rally-0.9.1/tests/unit/plugins/common/runners/test_constant.py0000664000567000056710000003270313073417720026046 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import jsonschema import mock from rally import exceptions from rally.plugins.common.runners import constant from rally.task import runner from tests.unit import fakes from tests.unit import test RUNNERS_BASE = "rally.task.runner." RUNNERS = "rally.plugins.common.runners." class ConstantScenarioRunnerTestCase(test.TestCase): def setUp(self): super(ConstantScenarioRunnerTestCase, self).setUp() self.config = {"times": 4, "concurrency": 2, "timeout": 2, "type": "constant", "max_cpu_count": 2} self.context = fakes.FakeContext({"task": {"uuid": "uuid"}}).context self.args = {"a": 1} self.task = mock.MagicMock() def test_validate(self): constant.ConstantScenarioRunner.validate(self.config) def test_validate_failed_by_additional_key(self): self.config["new_key"] = "should fail" self.assertRaises(jsonschema.ValidationError, constant.ConstantScenarioRunner.validate, self.config) def test_validate_failed_by_wrong_concurrency(self): self.config["concurrency"] = self.config["times"] + 1 self.assertRaises(exceptions.ValidationError, constant.ConstantScenarioRunner.validate, self.config) @mock.patch(RUNNERS + "constant.runner") def test__run_scenario_once_with_unpack_args(self, mock_runner): result = constant._run_scenario_once_with_unpack_args( ("FOO", ("BAR", "QUUZ"))) self.assertEqual(mock_runner._run_scenario_once.return_value, result) mock_runner._run_scenario_once.assert_called_once_with( "FOO", ("BAR", "QUUZ")) @mock.patch(RUNNERS + "constant.time") @mock.patch(RUNNERS + "constant.threading.Thread") @mock.patch(RUNNERS + "constant.multiprocessing.Queue") @mock.patch(RUNNERS + "constant.runner") def test__worker_process(self, mock_runner, mock_queue, mock_thread, mock_time): mock_thread_instance = mock.MagicMock( isAlive=mock.MagicMock(return_value=False)) mock_thread.return_value = mock_thread_instance mock_event = mock.MagicMock( is_set=mock.MagicMock(return_value=False)) mock_event_queue = mock.MagicMock() times = 4 fake_ram_int = iter(range(10)) context = {"users": [{"tenant_id": "t1", "credential": "c1", "id": "uuid1"}]} info = {"processes_to_start": 1, "processes_counter": 1} constant._worker_process(mock_queue, fake_ram_int, 1, 2, times, context, "Dummy", "dummy", (), mock_event_queue, mock_event, info) self.assertEqual(times + 1, mock_thread.call_count) self.assertEqual(times + 1, mock_thread_instance.start.call_count) self.assertEqual(times + 1, mock_thread_instance.join.call_count) # NOTE(rvasilets): `times` + 1 here because `times` the number of # scenario repetition and one more need on "initialization" stage # of the thread stuff. self.assertEqual(times, mock_runner._get_scenario_context.call_count) for i in range(times): scenario_context = mock_runner._get_scenario_context(i, context) call = mock.call( args=(mock_queue, "Dummy", "dummy", scenario_context, (), mock_event_queue), target=mock_runner._worker_thread, ) self.assertIn(call, mock_thread.mock_calls) @mock.patch(RUNNERS_BASE + "_run_scenario_once") def test__worker_thread(self, mock__run_scenario_once): mock_queue = mock.MagicMock() mock_event_queue = mock.MagicMock() args = ("fake_cls", "fake_method_name", "fake_context_obj", {}, mock_event_queue) runner._worker_thread(mock_queue, *args) self.assertEqual(1, mock_queue.put.call_count) expected_calls = [mock.call(*args)] self.assertEqual(expected_calls, mock__run_scenario_once.mock_calls) def test__run_scenario(self): runner_obj = constant.ConstantScenarioRunner(self.task, self.config) runner_obj._run_scenario( fakes.FakeScenario, "do_it", self.context, self.args) self.assertEqual(len(runner_obj.result_queue), self.config["times"]) for result_batch in runner_obj.result_queue: for result in result_batch: self.assertIsNotNone(result) def test__run_scenario_exception(self): runner_obj = constant.ConstantScenarioRunner(self.task, self.config) runner_obj._run_scenario(fakes.FakeScenario, "something_went_wrong", self.context, self.args) self.assertEqual(len(runner_obj.result_queue), self.config["times"]) for result_batch in runner_obj.result_queue: for result in result_batch: self.assertIsNotNone(result) self.assertIn("error", runner_obj.result_queue[0][0]) def test__run_scenario_aborted(self): runner_obj = constant.ConstantScenarioRunner(self.task, self.config) runner_obj.abort() runner_obj._run_scenario(fakes.FakeScenario, "do_it", self.context, self.args) self.assertEqual(len(runner_obj.result_queue), 0) @mock.patch(RUNNERS + "constant.multiprocessing.Queue") @mock.patch(RUNNERS + "constant.multiprocessing.cpu_count") @mock.patch(RUNNERS + "constant.ConstantScenarioRunner._log_debug_info") @mock.patch(RUNNERS + "constant.ConstantScenarioRunner._create_process_pool") @mock.patch(RUNNERS + "constant.ConstantScenarioRunner._join_processes") def test_that_cpu_count_is_adjusted_properly( self, mock__join_processes, mock__create_process_pool, mock__log_debug_info, mock_cpu_count, mock_queue): samples = [ { "input": {"times": 20, "concurrency": 20, "type": "constant", "max_cpu_count": 1}, "real_cpu": 2, "expected": { # max_cpu_used equals to min(max_cpu_count, real_cpu) "max_cpu_used": 1, # processes_to_start equals to # min(max_cpu_used, times, concurrency)) "processes_to_start": 1, "concurrency_per_worker": 20, "concurrency_overhead": 0, } }, { "input": {"times": 20, "concurrency": 15, "type": "constant", "max_cpu_count": 3}, "real_cpu": 2, "expected": { "max_cpu_used": 2, "processes_to_start": 2, "concurrency_per_worker": 7, "concurrency_overhead": 1, } }, { "input": {"times": 20, "concurrency": 1, "type": "constant", "max_cpu_count": 3}, "real_cpu": 2, "expected": { "max_cpu_used": 2, "processes_to_start": 1, "concurrency_per_worker": 1, "concurrency_overhead": 0, } }, { "input": {"times": 2, "concurrency": 5, "type": "constant", "max_cpu_count": 4}, "real_cpu": 4, "expected": { "max_cpu_used": 4, "processes_to_start": 2, "concurrency_per_worker": 2, "concurrency_overhead": 1, } } ] for sample in samples: mock__log_debug_info.reset_mock() mock_cpu_count.reset_mock() mock__create_process_pool.reset_mock() mock__join_processes.reset_mock() mock_queue.reset_mock() mock_cpu_count.return_value = sample["real_cpu"] runner_obj = constant.ConstantScenarioRunner(self.task, sample["input"]) runner_obj._run_scenario(fakes.FakeScenario, "do_it", self.context, self.args) mock_cpu_count.assert_called_once_with() mock__log_debug_info.assert_called_once_with( times=sample["input"]["times"], concurrency=sample["input"]["concurrency"], timeout=0, max_cpu_used=sample["expected"]["max_cpu_used"], processes_to_start=sample["expected"]["processes_to_start"], concurrency_per_worker=( sample["expected"]["concurrency_per_worker"]), concurrency_overhead=( sample["expected"]["concurrency_overhead"])) args, kwargs = mock__create_process_pool.call_args self.assertIn(sample["expected"]["processes_to_start"], args) self.assertIn(constant._worker_process, args) mock__join_processes.assert_called_once_with( mock__create_process_pool.return_value, mock_queue.return_value, mock_queue.return_value) def test_abort(self): runner_obj = constant.ConstantScenarioRunner(self.task, self.config) self.assertFalse(runner_obj.aborted.is_set()) runner_obj.abort() self.assertTrue(runner_obj.aborted.is_set()) class ConstantForDurationScenarioRunnerTestCase(test.TestCase): def setUp(self): super(ConstantForDurationScenarioRunnerTestCase, self).setUp() self.config = {"duration": 0, "concurrency": 2, "timeout": 2, "type": "constant_for_duration"} self.context = fakes.FakeContext({"task": {"uuid": "uuid"}}).context self.context["iteration"] = 14 self.args = {"a": 1} def test_validate(self): constant.ConstantForDurationScenarioRunner.validate(self.config) def test_validate_failed(self): self.config["times"] = "gagaga" self.assertRaises(jsonschema.ValidationError, runner.ScenarioRunner.validate, self.config) def test_run_scenario_constantly_for_duration(self): runner_obj = constant.ConstantForDurationScenarioRunner( None, self.config) runner_obj._run_scenario(fakes.FakeScenario, "do_it", self.context, self.args) # NOTE(mmorais): when duration is 0, scenario executes exactly 1 time expected_times = 1 self.assertEqual(len(runner_obj.result_queue), expected_times) for result_batch in runner_obj.result_queue: for result in result_batch: self.assertIsNotNone(result) def test_run_scenario_constantly_for_duration_exception(self): runner_obj = constant.ConstantForDurationScenarioRunner( None, self.config) runner_obj._run_scenario(fakes.FakeScenario, "something_went_wrong", self.context, self.args) # NOTE(mmorais): when duration is 0, scenario executes exactly 1 time expected_times = 1 self.assertEqual(len(runner_obj.result_queue), expected_times) for result_batch in runner_obj.result_queue: for result in result_batch: self.assertIsNotNone(result) self.assertIn("error", runner_obj.result_queue[0][0]) def test_run_scenario_constantly_for_duration_timeout(self): runner_obj = constant.ConstantForDurationScenarioRunner( None, self.config) runner_obj._run_scenario(fakes.FakeScenario, "raise_timeout", self.context, self.args) # NOTE(mmorais): when duration is 0, scenario executes exactly 1 time expected_times = 1 self.assertEqual(len(runner_obj.result_queue), expected_times) for result_batch in runner_obj.result_queue: for result in result_batch: self.assertIsNotNone(result) self.assertIn("error", runner_obj.result_queue[0][0]) def test__run_scenario_constantly_aborted(self): runner_obj = constant.ConstantForDurationScenarioRunner(None, self.config) runner_obj.abort() runner_obj._run_scenario(fakes.FakeScenario, "do_it", self.context, self.args) self.assertEqual(len(runner_obj.result_queue), 0) def test_abort(self): runner_obj = constant.ConstantForDurationScenarioRunner(None, self.config) self.assertFalse(runner_obj.aborted.is_set()) runner_obj.abort() self.assertTrue(runner_obj.aborted.is_set()) rally-0.9.1/tests/unit/plugins/common/exporter/0000775000567000056710000000000013073420067022751 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/common/exporter/__init__.py0000664000567000056710000000000013073417717025060 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/common/exporter/test_file_system.py0000664000567000056710000000702313073417717026717 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock import six import six.moves.builtins as __builtin__ from rally import exceptions from rally.plugins.common.exporter import file_system from tests.unit import test if six.PY3: import io file = io.BytesIO @ddt.ddt class FileExporterTestCase(test.TestCase): @mock.patch("rally.plugins.common.exporter.file_system.os.path.exists") @mock.patch.object(__builtin__, "open", autospec=True) @mock.patch("rally.plugins.common.exporter.file_system.json.dumps") @mock.patch("rally.api.Task.get") def test_file_exporter_export(self, mock_task_get, mock_dumps, mock_open, mock_exists): mock_task = mock.Mock() mock_exists.return_value = True mock_task_get.return_value = mock_task mock_task.get_results.return_value = [{ "key": "fake_key", "data": { "raw": "bar_raw", "sla": "baz_sla", "hooks": "baz_hooks", "load_duration": "foo_load_duration", "full_duration": "foo_full_duration", } }] mock_dumps.return_value = "fake_results" input_mock = mock.MagicMock(spec=file) mock_open.return_value = input_mock exporter = file_system.FileExporter("file-exporter:///fake_path.json") exporter.export("fake_uuid") mock_open().__enter__().write.assert_called_once_with("fake_results") mock_task_get.assert_called_once_with("fake_uuid") expected_dict = [ { "load_duration": "foo_load_duration", "full_duration": "foo_full_duration", "result": "bar_raw", "key": "fake_key", "hooks": "baz_hooks", "sla": "baz_sla" } ] mock_dumps.assert_called_once_with(expected_dict, sort_keys=False, indent=4, separators=(",", ": ")) @mock.patch("rally.api.Task.get") def test_file_exporter_export_running_task(self, mock_task_get): mock_task = mock.Mock() mock_task_get.return_value = mock_task mock_task.get_results.return_value = [] exporter = file_system.FileExporter("file-exporter:///fake_path.json") self.assertRaises(exceptions.RallyException, exporter.export, "fake_uuid") @ddt.data( {"connection": "", "raises": exceptions.InvalidConnectionString}, {"connection": "file-exporter:///fake_path.json", "raises": None}, {"connection": "file-exporter:///fake_path.fake", "raises": exceptions.InvalidConnectionString}, ) @ddt.unpack def test_file_exporter_validate(self, connection, raises): print(connection) if raises: self.assertRaises(raises, file_system.FileExporter, connection) else: file_system.FileExporter(connection) rally-0.9.1/tests/unit/plugins/workload/0000775000567000056710000000000013073420067021433 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/workload/__init__.py0000664000567000056710000000000013073417717023542 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/plugins/workload/test_siege.py0000664000567000056710000000610213073417717024147 0ustar jenkinsjenkins00000000000000# # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys from rally.plugins.workload import siege from tests.unit import test import mock SIEGE_OUTPUT = """ Transactions: 522 hits Availability: 100.00 % Elapsed time: 3.69 secs Data transferred: 1.06 MB Response time: 0.10 secs Transaction rate: 141.46 trans/sec Throughput: 0.29 MB/sec Concurrency: 14.71 Successful transactions: 522 Failed transactions: 0 Longest transaction: 0.26 Shortest transaction: 0.08 """ OUTPUT = [ {"output_value": "curl", "descr": "", "output_key": "curl_cli"}, {"output_value": "wp-net", "descr": "", "output_key": "net_name"}, {"output_value": ["10.0.0.3", "172.16.0.159"], "description": "", "output_key": "gate_node"}, {"output_value": { "1": {"wordpress-network": ["10.0.0.4"]}, "0": {"wordpress-network": ["10.0.0.5"]}}, "description": "No description given", "output_key": "wp_nodes"}] class SiegeTestCase(test.TestCase): @mock.patch("rally.plugins.workload.siege.json.load") def test_get_instances(self, mock_load): mock_load.return_value = OUTPUT instances = list(siege.get_instances()) self.assertEqual(["10.0.0.4", "10.0.0.5"], instances) @mock.patch("rally.plugins.workload.siege.get_instances") @mock.patch("rally.plugins.workload.siege.generate_urls_list") @mock.patch("rally.plugins.workload.siege.subprocess.check_output") def test_run(self, mock_check_output, mock_generate_urls_list, mock_get_instances): mock_get_instances.return_value = [1, 2] mock_generate_urls_list.return_value = "urls" mock_check_output.return_value = SIEGE_OUTPUT mock_write = mock.MagicMock() mock_stdout = mock.MagicMock(write=mock_write) real_stdout = sys.stdout sys.stdout = mock_stdout siege.run() expected = [mock.call("Transaction rate:141.46\n"), mock.call("Throughput:0.29\n")] sys.stdout = real_stdout self.assertEqual(expected, mock_write.mock_calls) @mock.patch("rally.plugins.workload.siege.tempfile.NamedTemporaryFile") def test_generate_urls_list(self, mock_named_temporary_file): mock_urls = mock.MagicMock() mock_named_temporary_file.return_value = mock_urls name = siege.generate_urls_list(["foo", "bar"]) self.assertEqual(mock_urls.name, name) rally-0.9.1/tests/unit/ui/0000775000567000056710000000000013073420067016545 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/ui/__init__.py0000664000567000056710000000000013073417717020654 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/ui/test_utils.py0000664000567000056710000000226013073417717021326 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import jinja2 from rally.ui import utils from tests.unit import test class ModuleTestCase(test.TestCase): def test_get_template(self): template = utils.get_template("base.html") self.assertIsInstance(template, jinja2.environment.Template) self.assertEqual("base.html", template.name) self.assertIn("include_raw_file", template.globals) def test_get_template_raises(self): self.assertRaises(jinja2.exceptions.TemplateNotFound, utils.get_template, "nonexistent") rally-0.9.1/tests/unit/test_test_ddt.py0000664000567000056710000000266413073417717021373 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ast from tests.unit import test from tests.unit import test_ddt class DDTDecoratorCheckerTestCase(test.TestCase): def test_pass(self): code = """ @ddt.ddt class Test(object): @ddt.data({}) def test_func(self): pass """ tree = ast.parse(code).body[0] visitor = test_ddt.DDTDecoratorChecker() visitor.visit(tree) self.assertEqual(visitor.errors, {}) def test_fail(self): code = """ class Test(object): @ddt.data({}) def test_func(self): pass """ tree = ast.parse(code).body[0] visitor = test_ddt.DDTDecoratorChecker() visitor.visit(tree) self.assertEqual( visitor.errors, {"Test": {"lineno": 3, "message": "Class Test has functions that use DDT, " "but is not decorated with `ddt.ddt`"}}) rally-0.9.1/tests/unit/test_mock.py0000664000567000056710000003032413073417717020504 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ast import itertools import os import re import six.moves from tests.unit import test class Variants(object): def __init__(self, variants, print_prefix="mock_"): self.variants = variants self.print_prefix = print_prefix def __repr__(self): variants = self.variants if len(variants) > 3: variants = variants[:3] variants = [repr(self.print_prefix + var) for var in variants] return "{" + ", ".join(variants) + ( ", ...}" if len(self.variants) > 3 else "}") def __eq__(self, val): return getattr(val, "variants", val) == self.variants def __ne__(self, other): return not self.__eq__(other) def __contains__(self, val): return val in self.variants def pairwise_isinstance(*args): return all(itertools.starmap(isinstance, args)) class FuncMockArgsDecoratorsChecker(ast.NodeVisitor): """Recursively visit an AST looking for misusage of mocks in tests. The misusage being tested by this particular class is unmatched mocked object name against the argument names. The following is the correct usages:: @mock.patch("module.abc") # or # or @mock.patch(MODULE + ".abc") # or @mock.patch("%s.abc" % MODULE) where MODULE="module" def test_foobar(self, mock_module_abc): # or `mock_abc' ... @mock.patch("pkg.ClassName.abc") # or # or @mock.patch(CLASSNAME + ".abc") # or @mock.patch("%s.abc" % CLASSNAME) where CLASSNAME="pkg.ClassName" def test_foobar(self, mock_class_name_abc): ... class FooClassNameTestCase(...): @mock.patch("pkg.FooClassName.abc") def test_foobar(self, mock_abc): # Iff the mocked object is inside the tested class then # the class name in mock argname is optional. ... While these are not:: @mock.patch("module.abc") def test_foobar(self, m_abc): # must be prefixed with `mock_' @mock.patch("module.abc") def test_foobar(self, mock_cba): # must contain mocked object name (`mock_abc') @mock.patch("module.abc") def test_foobar(self, mock_modulewrong_abc): # must match the module `mock_module_abc' @mock.patch("ClassName.abc") def test_foobar(self, mock_class_abc): # must match the python-styled class name + method name """ # NOTE(amaretskiy): Disable check if shortest variant is too long # because long name is not convenient and could # even be blocked by PEP8 SHORTEST_VARIANT_LEN_LIMIT = 25 def __init__(self): self.errors = [] self.globals_ = {} @classmethod def _get_name(cls, node): if isinstance(node, ast.Name): return node.id if isinstance(node, ast.Attribute): return cls._get_name(node.value) + "." + node.attr return "" def _get_value(self, node): """Get mock.patch string argument regexp. It is either a string (if we are lucky), string-format of ("%s.something" % GVAL) or (GVAL + ".something") """ val = None if isinstance(node, ast.Str): val = node.s elif isinstance(node, ast.BinOp): if pairwise_isinstance( (node.op, ast.Mod), (node.left, ast.Str), (node.right, ast.Name)): val = node.left.s % self.globals_[node.right.id] elif pairwise_isinstance( (node.op, ast.Add), (node.left, ast.Name), (node.right, ast.Str)): val = self.globals_[node.left.id] + node.right.s elif isinstance(node, ast.Name): val = self.globals_[node.id] if val is None: raise ValueError( "Unable to find value in %s, only the following are parsed: " "GLOBAL, 'pkg.foobar', '%%s.foobar' %% GLOBAL or 'GLOBAL + " "'.foobar'" % ast.dump(node)) return val CAMELCASE_SPLIT_ANY_AND_CAPITAL = re.compile("(.)([A-Z][a-z]+)") CAMELCASE_SPLIT_LOWER_AND_CAPITAL = re.compile("([a-z0-9])([A-Z])") CAMELCASE_SPLIT_REPL = r"\1_\2" @classmethod def _camelcase_to_python(cls, name): for regexp in (cls.CAMELCASE_SPLIT_ANY_AND_CAPITAL, cls.CAMELCASE_SPLIT_LOWER_AND_CAPITAL): name = regexp.sub(cls.CAMELCASE_SPLIT_REPL, name) return name.lower() def _get_mocked_class_value_variants(self, class_name, mocked_name): class_name = self._camelcase_to_python(class_name) mocked_name = self._camelcase_to_python(mocked_name) if class_name == self.classname_python: # Optional, since class name of the mocked package is the same as # class name of the *TestCase return [mocked_name, class_name + "_" + mocked_name] # Full class name is required otherwise return [class_name + "_" + mocked_name] def _add_pkg_optional_prefixes(self, tokens, variants): prefixed_variants = list(variants) for token in map(self._camelcase_to_python, reversed(tokens)): prefixed_variants.append(token + "_" + prefixed_variants[-1]) return prefixed_variants def _get_mocked_name_variants(self, name): tokens = name.split(".") variants = [self._camelcase_to_python(tokens.pop())] if tokens: if tokens[-1][0].isupper(): # Mocked something inside a class, check if we should require # the class name to be present in mock argument variants = self._get_mocked_class_value_variants( class_name=tokens.pop(), mocked_name=variants[0]) variants = self._add_pkg_optional_prefixes(tokens, variants) return Variants(variants) def _get_mock_decorators_variants(self, funccall): """Return all the mock.patch{,.object} decorated for function.""" mock_decorators = [] for decorator in reversed(funccall.decorator_list): if not isinstance(decorator, ast.Call): continue funcname = self._get_name(decorator.func) if funcname == "mock.patch": decname = self._get_value(decorator.args[0]) elif funcname == "mock.patch.object": decname = (self._get_name(decorator.args[0]) + "." + self._get_value(decorator.args[1])) else: continue mock_decorators.append( self._get_mocked_name_variants(decname) ) return mock_decorators @staticmethod def _get_mock_args(node): """Return all the mock arguments.""" args = [] PREFIX_LENGTH = len("mock_") for arg in node.args.args: name = getattr(arg, "id", getattr(arg, "arg", None)) if not name.startswith("mock_"): continue args.append(name[PREFIX_LENGTH:]) return args def visit_Assign(self, node): """Catch all the globals.""" self.generic_visit(node) if node.col_offset == 0: mnode = ast.Module(body=[node]) code = compile(mnode, "", "exec") try: exec(code, self.globals_) except Exception: pass self.globals_.pop("__builtins__", None) self.globals_.pop("builtins", None) def visit_ClassDef(self, node): classname_camel = node.name if node.name.endswith("TestCase"): classname_camel = node.name[:-len("TestCase")] self.classname_python = self._camelcase_to_python(classname_camel) self.generic_visit(node) def check_name(self, arg, dec_vars): return (dec_vars is not None and arg in dec_vars) def visit_FunctionDef(self, node): self.generic_visit(node) mock_decs = self._get_mock_decorators_variants(node) if not mock_decs: return mock_args = self._get_mock_args(node) error_msgs = [] mismatched = False for arg, dec_vars in six.moves.zip_longest(mock_args, mock_decs): if not self.check_name(arg, dec_vars): if arg and dec_vars: sorted_by_len = sorted( dec_vars.variants, key=lambda i: len(i), reverse=True) shortest_name = sorted_by_len.pop() if len(shortest_name) <= self.SHORTEST_VARIANT_LEN_LIMIT: error_msgs.append( ("Argument '%(arg)s' misnamed; should be either " "of %(dec)s that is derived from the mock " "decorator args.\n") % {"arg": arg, "dec": dec_vars}) elif not arg: error_msgs.append( "Missing or malformed argument for %s decorator." % dec_vars) mismatched = True elif not dec_vars: error_msgs.append( "Missing or malformed decorator for '%s' argument." % arg) mismatched = True if error_msgs: if mismatched: self.errors.append({ "lineno": node.lineno, "args": mock_args, "decs": mock_decs, "messages": error_msgs }) else: self.errors.append({ "lineno": node.lineno, "mismatch_pairs": list(zip(mock_args, mock_decs)), "messages": error_msgs }) class MockUsageCheckerTestCase(test.TestCase): tests_path = os.path.join(os.path.dirname(__file__)) def test_mock_decorators_and_args(self): """Ensure that mocked objects are called correctly in the arguments. See `FuncMockArgsDecoratorsChecker' docstring for details. """ errors = [] for dirname, dirnames, filenames in os.walk(self.tests_path): for filename in filenames: if (not filename.startswith("test_") or not filename.endswith(".py")): continue filename = os.path.relpath(os.path.join(dirname, filename)) with open(filename, "rb") as fh: tree = ast.parse(fh.read(), filename) visitor = FuncMockArgsDecoratorsChecker() visitor.visit(tree) errors.extend( dict(filename=filename, **error) for error in visitor.errors) if errors: print(FuncMockArgsDecoratorsChecker.__doc__) print( "\n\n" "The following errors were found during the described check:") for error in errors: print("\n\n" "Errors at file %(filename)s line %(lineno)d:\n\n" "%(message)s" % { "message": "\n".join(error["messages"]), "filename": error["filename"], "lineno": error["lineno"]}) # NOTE(pboldin): When the STDOUT is shuted the below is the last # resort to know what is wrong with the mock names. for error in errors: error["messages"] = [ message.rstrip().replace("\n", " ").replace("\t", "") for message in error["messages"] ] self.assertEqual([], errors) rally-0.9.1/tests/unit/doc/0000775000567000056710000000000013073420067016675 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/doc/test_jsonschemas.py0000664000567000056710000001747113073417720022637 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from rally.common.plugin import plugin from rally import plugins from tests.unit import test class ConfigSchemasTestCase(test.TestCase): OBJECT_TYPE_KEYS = {"$schema", "type", "description", "required", "properties", "patternProperties", "additionalProperties", "oneOf", "anyOf"} ARRAY_TYPE_KEYS = {"$schema", "type", "description", "items", "uniqueItems", "minItems", "maxItems", "additionalItems"} NUMBER_TYPE_KEYS = {"$schema", "type", "description", "minimum", "maximum", "exclusiveMinimum"} STRING_TYPE_KEYS = {"$schema", "type", "description", "pattern"} def fail(self, p, schema, msg): super(ConfigSchemasTestCase, self).fail( "Config schema of plugin '%s' (%s) is invalid. %s " "(Schema: %s)" % (p.get_name(), "%s.%s" % (p.__module__, p.__name__), msg, schema)) def _check_anyOf_or_oneOf(self, p, schema, definitions): if "anyOf" in schema or "oneOf" in schema: key = "anyOf" if "anyOf" in schema else "oneOf" for case in schema[key]: if "description" not in case and "$ref" not in case: self.fail(p, schema, "Each case of '%s' should have " "description." % key) full_schema = copy.deepcopy(schema) full_schema.pop(key) for k, v in case.items(): full_schema[k] = v self._check_item(p, full_schema, definitions) def _check_object_type(self, p, schema, definitions): unexpected_keys = set(schema.keys()) - self.OBJECT_TYPE_KEYS if "definitions" in unexpected_keys: # TODO(andreykurilin): do not use definitions since it is a hard # task to parse and display them unexpected_keys -= {"definitions"} if unexpected_keys: self.fail(p, schema, ("Found unexpected key(s) for object type: " "%s." % ", ".join(unexpected_keys))) if "patternProperties" in schema: if "properties" in schema: self.fail(p, schema, "Usage both 'patternProperties' and " "'properties' in one time is restricted.") if not isinstance(schema["patternProperties"], dict): self.fail(p, schema, "Field 'patternProperties' should be a " "dict.") for pattern, description in schema["patternProperties"].items(): self._check_item(p, description, definitions) if "properties" in schema: for property_name, description in schema["properties"].items(): self._check_item(p, description, definitions) def _check_array_type(self, p, schema, definitions): unexpected_keys = set(schema.keys()) - self.ARRAY_TYPE_KEYS if "additionalProperties" in unexpected_keys: self.fail(p, schema, "Array type doesn't support " "'additionalProperties' field.") if unexpected_keys: self.fail(p, schema, ("Found unexpected key(s) for array type: " "%s." % ", ".join(unexpected_keys))) if "items" not in schema: self.fail(p, schema, "Expected items of array type should be " "described via 'items' field.") if isinstance(schema["items"], dict): self._check_item(p, schema["items"], definitions) if "additionalItems" in schema: self.fail(p, schema, "When items is a single schema, the " "`additionalItems` keyword is " "meaningless, and it should not be used.") elif isinstance(schema["items"], list): for item in schema["items"]: self._check_item(p, item, definitions) else: self.fail(p, schema, ("Field 'items' of array type should be a " "list or a dict, but not '%s'" % type(schema["items"]))) def _check_string_type(self, p, schema): unexpected_keys = set(schema.keys()) - self.STRING_TYPE_KEYS if unexpected_keys: self.fail(p, schema, ("Found unexpected key(s) for string type: " "%s." % ", ".join(unexpected_keys))) def _check_number_type(self, p, schema): unexpected_keys = set(schema.keys()) - self.NUMBER_TYPE_KEYS if unexpected_keys: self.fail(p, schema, ("Found unexpected key(s) for integer/number " "type: %s." % ", ".join(unexpected_keys))) def _check_simpliest_types(self, p, schema): unexpected_keys = set(schema.keys()) - {"type", "description"} if unexpected_keys: self.fail(p, schema, ("Found unexpected key(s) for boolean type: " "%s." % ", ".join(unexpected_keys))) def _check_item(self, p, schema, definitions): if "type" in schema or "anyOf" in schema or "oneOf" in schema: if "anyOf" in schema or "oneOf" in schema: self._check_anyOf_or_oneOf(p, schema, definitions) elif "type" in schema: if schema["type"] == "object": self._check_object_type(p, schema, definitions) elif schema["type"] == "array": self._check_array_type(p, schema, definitions) elif schema["type"] == "string": self._check_string_type(p, schema) elif schema["type"] in ("number", "integer"): self._check_number_type(p, schema) elif schema["type"] in ("boolean", "null"): self._check_simpliest_types(p, schema) else: self.fail(p, schema, "Wrong type is used: %s" % schema["type"]) elif "enum" in schema: pass elif schema == {}: # NOTE(andreykurilin): an empty dict means that the user can # transmit whatever he want in whatever he want format. It is # not the case which we want to support. self.fail(p, schema, "Empty schema is not allowed.") elif "$ref" in schema: definition_name = schema["$ref"].replace("#/definitions/", "") if definition_name not in definitions: self.fail(p, schema, "Definition '%s' is not found." % definition_name) else: self.fail(p, schema, "Wrong format.") @plugins.ensure_plugins_are_loaded def test_schema_is_valid(self): for p in plugin.Plugin.get_all(): if not hasattr(p, "CONFIG_SCHEMA"): continue # allow only top level definitions definitions = p.CONFIG_SCHEMA.get("definitions", {}) for definition in definitions.values(): self._check_item(p, definition, definitions) # check schema itself self._check_item(p, p.CONFIG_SCHEMA, definitions) rally-0.9.1/tests/unit/doc/__init__.py0000664000567000056710000000000013073417716021003 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/doc/test_format.py0000664000567000056710000000601713073417720021604 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import fnmatch import io import os import re import testtools class TestFormat(testtools.TestCase): def _check_lines_wrapping(self, doc_file, raw): code_block = False text_inside_simple_tables = False lines = raw.split("\n") for i, line in enumerate(lines): if code_block: if not line or line.startswith(" "): continue else: code_block = False if "::" in line: code_block = True # simple style tables also can fit >=80 symbols # open simple style table if ("===" in line or "---" in line) and not lines[i - 1]: text_inside_simple_tables = True if "http://" in line or "https://" in line or ":ref:" in line: continue # Allow lines which do not contain any whitespace if re.match("\s*[^\s]+$", line): continue if not text_inside_simple_tables: self.assertTrue( len(line) < 80, msg="%s:%d: Line limited to a maximum of 79 characters." % (doc_file, i + 1)) # close simple style table if "===" in line and not lines[i + 1]: text_inside_simple_tables = False def _check_no_cr(self, doc_file, raw): matches = re.findall("\r", raw) self.assertEqual( len(matches), 0, "Found %s literal carriage returns in file %s" % (len(matches), doc_file)) def _check_trailing_spaces(self, doc_file, raw): for i, line in enumerate(raw.split("\n")): trailing_spaces = re.findall("\s+$", line) self.assertEqual( len(trailing_spaces), 0, "Found trailing spaces on line %s of %s" % (i + 1, doc_file)) def test_lines(self): files = [] docs_dir = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir, "doc") for root, dirnames, filenames in os.walk(docs_dir): for filename in fnmatch.filter(filenames, '*.rst'): files.append(os.path.join(root, filename)) for filename in files: with io.open(filename, encoding="utf-8") as f: data = f.read() self._check_lines_wrapping(filename, data) self._check_no_cr(filename, data) self._check_trailing_spaces(filename, data) rally-0.9.1/tests/unit/doc/test_specs.py0000664000567000056710000001053413073417720021430 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import glob import os import re import docutils.core from tests.unit import test class TitlesTestCase(test.TestCase): specs_path = os.path.join( os.path.dirname(__file__), os.pardir, os.pardir, os.pardir, "doc", "specs") def _get_title(self, section_tree): section = {"subtitles": []} for node in section_tree: if node.tagname == "title": section["name"] = node.rawsource elif node.tagname == "section": subsection = self._get_title(node) section["subtitles"].append(subsection["name"]) return section def _get_titles(self, spec): titles = {} for node in spec: if node.tagname == "section": # Note subsection subtitles are thrown away section = self._get_title(node) titles[section["name"]] = section["subtitles"] return titles def _check_titles(self, filename, expect, actual): missing_sections = [x for x in expect.keys() if x not in actual.keys()] extra_sections = [x for x in actual.keys() if x not in expect.keys()] msgs = [] if missing_sections: msgs.append("Missing sections: %s" % missing_sections) if extra_sections: msgs.append("Extra sections: %s" % extra_sections) for section in expect.keys(): missing_subsections = [x for x in expect[section] if x not in actual.get(section, {})] # extra subsections are allowed if missing_subsections: msgs.append("Section '%s' is missing subsections: %s" % (section, missing_subsections)) if msgs: self.fail("While checking '%s':\n %s" % (filename, "\n ".join(msgs))) def _check_lines_wrapping(self, tpl, raw): for i, line in enumerate(raw.split("\n")): if "http://" in line or "https://" in line: continue self.assertTrue( len(line) < 80, msg="%s:%d: Line limited to a maximum of 79 characters." % (tpl, i+1)) def _check_no_cr(self, tpl, raw): matches = re.findall("\r", raw) self.assertEqual( len(matches), 0, "Found %s literal carriage returns in file %s" % (len(matches), tpl)) def _check_trailing_spaces(self, tpl, raw): for i, line in enumerate(raw.split("\n")): trailing_spaces = re.findall(" +$", line) self.assertEqual( len(trailing_spaces), 0, "Found trailing spaces on line %s of %s" % (i+1, tpl)) def test_template(self): with open(os.path.join(self.specs_path, "template.rst")) as f: template = f.read() spec = docutils.core.publish_doctree(template) template_titles = self._get_titles(spec) for d in ["implemented", "in-progress"]: spec_dir = "%s/%s" % (self.specs_path, d) self.assertTrue(os.path.isdir(spec_dir), "%s is not a directory" % spec_dir) for filename in glob.glob(spec_dir + "/*"): if filename.endswith("README.rst"): continue self.assertTrue( filename.endswith(".rst"), "spec's file must have .rst ext. Found: %s" % filename) with open(filename) as f: data = f.read() titles = self._get_titles(docutils.core.publish_doctree(data)) self._check_titles(filename, template_titles, titles) self._check_lines_wrapping(filename, data) self._check_no_cr(filename, data) self._check_trailing_spaces(filename, data) rally-0.9.1/tests/unit/doc/test_docstrings.py0000664000567000056710000000757013073417720022500 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally import plugins from rally.common.plugin import discover from rally.common.plugin import info from rally.common.plugin import plugin from rally.deployment import engine from rally.deployment.serverprovider import provider from rally.task import sla from tests.unit import test EXCEPTIONS_DOCSTR = "missed_docstrings.txt" EXCEPTIONS_FORMAT = "wrong_format.txt" class DocstringsTestCase(test.TestCase): def setUp(self): super(DocstringsTestCase, self).setUp() plugins.load() self.exceptions = self._open_file( EXCEPTIONS_DOCSTR) + self._open_file(EXCEPTIONS_FORMAT) def _open_file(self, filename): with open("./tests/unit/doc/%s" % filename) as file: return (file.read().lower().split()) def _check_docstrings(self, msg_buffer): for plg_cls in plugin.Plugin.get_all(): if plg_cls.__module__.startswith("rally."): if plg_cls.get_name().lower() not in self.exceptions: doc = info.parse_docstring(plg_cls.__doc__) short_description = doc["short_description"] if short_description.startswith("Test"): msg_buffer.append("One-line description for %s" " should be declarative and not" " start with 'Test(s) ...'" % plg_cls.__name__) if not plg_cls.get_info()["title"]: msg = ("Class '{}' should have a docstring.") inst_name = plg_cls.__name__ msg_buffer.append(msg.format(inst_name)) def _check_described_params(self, msg_buffer): for plg_cls in plugin.Plugin.get_all(): if plg_cls.get_name().lower() not in self.exceptions: ignored_params = ["self", "scenario_obj"] if hasattr(plg_cls, "run"): code_block = plg_cls.run.__code__ params_count = code_block.co_argcount params = code_block.co_varnames[:params_count] param_data = plg_cls.get_info()["parameters"] documented_params = [p["name"] for p in param_data] for param in params: if param not in ignored_params: if param not in documented_params: msg = ("Class: %(class)s Docstring for " "%(scenario)s should" " describe the '%(param)s' parameter" " in the :param : clause." % {"class": plg_cls.__name__, "scenario": plg_cls.get_name(), "param": param}) msg_buffer.append(msg) def test_all_plugins_have_docstrings(self): msg_buffer = [] self._check_docstrings(msg_buffer) if msg_buffer: self.fail("\n%s" % "\n".join(msg_buffer)) msg_buffer = [] self._check_described_params(msg_buffer) if msg_buffer: self.fail("\n%s" % "\n".join(msg_buffer)) rally-0.9.1/tests/unit/doc/missed_docstrings.txt0000664000567000056710000000121213073417716023164 0ustar jenkinsjenkins00000000000000fake fake_hidden_context fake_user_context lbaas keypair swift_objects test_custom_image fake_verifier_context classbased.fooscenario TestConvertPlugin.one_arg TestConvertPlugin.two_args CachedTestCase.test_cached keystone nova neutron glance heat cinder manila ceilometer gnocchi ironic sahara zaqar murano designate trove mistral swift ec2 monasca senlin magnum watcher dummy dummy_trigger dummy_hook path_or_url file file_dict nova_flavor ec2_flavor glance_image glance_image_args ec2_image cinder_volume_type neutron_network watcher_strategy watcher_goal file test-exporter test_criterion test_base_plugin test_some_plugin test_deprecated_plugin rally-0.9.1/tests/unit/doc/wrong_format.txt0000664000567000056710000000010413073417717022145 0ustar jenkinsjenkins00000000000000Tempest Constant Constant_For_Duration RPS Serial some_fake_verifierrally-0.9.1/tests/unit/doc/test_task_samples.py0000664000567000056710000001413313073417720023000 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json import mock import os import re import traceback import yaml from rally import api from rally.task import scenario from rally.task import engine from tests.unit import test class TaskSampleTestCase(test.TestCase): samples_path = os.path.join( os.path.dirname(__file__), os.pardir, os.pardir, os.pardir, "samples", "tasks") def setUp(self): super(TaskSampleTestCase, self).setUp() if os.environ.get("TOX_ENV_NAME") == "cover": self.skipTest("There is no need to check samples in coverage job.") @mock.patch("rally.task.engine.TaskEngine" "._validate_config_semantic") def test_schema_is_valid(self, mock_task_engine__validate_config_semantic): scenarios = set() for dirname, dirnames, filenames in os.walk(self.samples_path): for filename in filenames: full_path = os.path.join(dirname, filename) # NOTE(hughsaunders): Skip non config files # (bug https://bugs.launchpad.net/rally/+bug/1314369) if not re.search("\.(ya?ml|json)$", filename, flags=re.I): continue with open(full_path) as task_file: try: task_config = yaml.safe_load(api._Task.render_template (task_file.read())) eng = engine.TaskEngine(task_config, mock.MagicMock(), mock.Mock()) eng.validate() except Exception: print(traceback.format_exc()) self.fail("Invalid task file: %s" % full_path) else: scenarios.update(task_config.keys()) missing = set(s.get_name() for s in scenario.Scenario.get_all()) missing -= scenarios # check missing scenario is not from plugin missing = [s for s in list(missing) if scenario.Scenario.get(s).__module__.startswith("rally")] self.assertEqual(missing, [], "These scenarios don't have samples: %s" % missing) def test_json_correct_syntax(self): for dirname, dirnames, filenames in os.walk(self.samples_path): for filename in filenames: if not filename.endswith(".json"): continue full_path = os.path.join(dirname, filename) with open(full_path) as task_file: try: json.loads(api._Task.render_template(task_file.read())) except Exception: print(traceback.format_exc()) self.fail("Invalid JSON file: %s" % full_path) def test_task_config_pair_existance(self): inexistent_paths = [] for dirname, dirnames, filenames in os.walk(self.samples_path): # iterate over unique config names for sample_name in set( f[:-5] for f in filenames if f.endswith(".json") or f.endswith(".yaml")): partial_path = os.path.join(dirname, sample_name) yaml_path = partial_path + ".yaml" json_path = partial_path + ".json" if not os.path.exists(yaml_path): inexistent_paths.append(yaml_path) elif not os.path.exists(json_path): inexistent_paths.append(json_path) if inexistent_paths: self.fail("Sample task configs are missing:\n%r" % inexistent_paths) def test_task_config_pairs_equality(self): for dirname, dirnames, filenames in os.walk(self.samples_path): # iterate over unique config names for sample_name in set( f[:-5] for f in filenames if f.endswith(".json") or f.endswith(".yaml")): partial_path = os.path.join(dirname, sample_name) yaml_path = partial_path + ".yaml" json_path = partial_path + ".json" if os.path.exists(yaml_path) and os.path.exists(json_path): with open(json_path) as json_file: json_config = yaml.safe_load(api._Task.render_template (json_file.read())) with open(yaml_path) as yaml_file: yaml_config = yaml.safe_load(api._Task.render_template (yaml_file.read())) self.assertEqual(json_config, yaml_config, "Sample task configs are not equal:" "\n%s\n%s" % (yaml_path, json_path)) def test_no_underscores_in_filename(self): bad_filenames = [] for dirname, dirnames, filenames in os.walk(self.samples_path): for filename in filenames: if "_" in filename and (filename.endswith(".yaml") or filename.endswith(".json")): full_path = os.path.join(dirname, filename) bad_filenames.append(full_path) self.assertEqual([], bad_filenames, "Following sample task filenames contain " "underscores (_) but must use dashes (-) instead: " "{}".format(bad_filenames)) rally-0.9.1/tests/unit/verification/0000775000567000056710000000000013073420067020612 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/verification/__init__.py0000664000567000056710000000000013073417717022721 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/verification/test_context.py0000664000567000056710000000336613073417717023727 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.verification import context from tests.unit import test @context.configure("fake_verifier_context", order=314) class FakeContext(context.VerifierContext): def cleanup(self): pass def setup(self): pass class VerifierContextTestCase(test.TestCase): def test__meta_get(self): data = {"key1": "value1", "key2": "value2"} for k, v in data.items(): FakeContext._meta_set(k, v) for k, v in data.items(): self.assertEqual(v, FakeContext._meta_get(k)) self.assertTrue(FakeContext.is_hidden()) class ContextManagerTestCase(test.TestCase): @mock.patch("rally.verification.context.VerifierContext") def test_validate(self, mock_verifier_context): config = {"ctx1": mock.Mock(), "ctx2": mock.Mock()} context.ContextManager.validate(config) self.assertEqual([mock.call(k, allow_hidden=True) for k, v in config.items()], mock_verifier_context.get.call_args_list) self.assertEqual( [mock.call(v) for k, v in config.items()], mock_verifier_context.get.return_value.validate.call_args_list) rally-0.9.1/tests/unit/verification/test_manager.py0000664000567000056710000003770013073417717023654 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import sys import mock from rally import exceptions from rally.verification import manager from tests.unit import test DEFAULT_REPO = "https://git.example.com" DEFAULT_VERSION = 3.14159 @manager.configure("some_fake_verifier", default_repo=DEFAULT_REPO, default_version=DEFAULT_VERSION) class FakeVerifier(manager.VerifierManager): @classmethod def _get_default_meta(cls): return {"fake_key1": "fake_value"} def run(self, verification, pattern=None, load_list=None, skip_list=None, xfail_list=None, concurrency=None, failed=False, extra_args=None): pass def list_tests(self, pattern=""): pass class VerifierManagerTestCase(test.TestCase): def setUp(self): super(VerifierManagerTestCase, self).setUp() check_output_p = mock.patch("rally.verification.manager.utils." "check_output") self.check_output = check_output_p.start() self.addCleanup(check_output_p.stop) @mock.patch.dict(os.environ, values={"PATH": ""}, clear=True) def test_environ(self): verifier = mock.Mock(system_wide=False) vmanager = FakeVerifier(verifier) self.assertEqual({"PATH": "%s/bin:" % vmanager.venv_dir, "VIRTUAL_ENV": vmanager.venv_dir}, vmanager.environ) verifier.system_wide = True self.assertEqual({"PATH": ""}, vmanager.environ) @mock.patch("rally.verification.manager.VerifierManager.validate_args") @mock.patch("rally.verification.context.ContextManager.validate") def test_validate(self, mock_context_manager_validate, mock_validate_args): fvmanager = FakeVerifier(mock.Mock()) args = mock.Mock() with mock.patch.object(FakeVerifier, "_meta_get") as mock__meta_get: fvmanager.validate(args) mock__meta_get.assert_called_once_with("context") mock_validate_args.assert_called_once_with(args) mock_context_manager_validate.assert_called_once_with( mock__meta_get.return_value) @mock.patch("rally.verification.manager.os.path.exists", side_effect=[False, True]) def test__clone(self, mock_exists): verifier = mock.Mock(version=None) vmanager = FakeVerifier(verifier) # Check source validation verifier.source = "some_source" e = self.assertRaises(exceptions.RallyException, vmanager._clone) self.assertEqual("Source path 'some_source' is not valid.", "%s" % e) verifier.source = None # Version to switch repo is provided verifier.version = "1.0.0" vmanager._clone() self.assertEqual( [mock.call(["git", "clone", DEFAULT_REPO, vmanager.repo_dir, "-b", DEFAULT_VERSION]), mock.call(["git", "checkout", "1.0.0"], cwd=vmanager.repo_dir)], self.check_output.call_args_list) verifier.update_properties.assert_not_called() # Version to switch repo is not provided verifier.version = None self.check_output.side_effect = [ "Output from cloning", "heads/master", "Output from cloning", "0.1.0-72-g4a39bd4", "4a39bd4qwerty12345", "Output from cloning", "2.0.0", "12345qwerty4a39bd4"] # Case 1: verifier is switched to a branch self.check_output.reset_mock() verifier.update_properties.reset_mock() vmanager._clone() self.assertEqual( [mock.call(["git", "clone", DEFAULT_REPO, vmanager.repo_dir, "-b", DEFAULT_VERSION]), mock.call(["git", "describe", "--all"], cwd=vmanager.repo_dir)], self.check_output.call_args_list) verifier.update_properties.assert_called_once_with(version="master") # Case 2: verifier is switched to a commit ID self.check_output.reset_mock() verifier.update_properties.reset_mock() vmanager._clone() self.assertEqual( [mock.call(["git", "clone", DEFAULT_REPO, vmanager.repo_dir, "-b", DEFAULT_VERSION]), mock.call(["git", "describe", "--all"], cwd=vmanager.repo_dir), mock.call(["git", "rev-parse", "HEAD"], cwd=vmanager.repo_dir)], self.check_output.call_args_list) verifier.update_properties.assert_called_once_with( version="4a39bd4qwerty12345") # Case 3: verifier is switched to a tag self.check_output.reset_mock() verifier.update_properties.reset_mock() vmanager._clone() self.assertEqual( [mock.call(["git", "clone", DEFAULT_REPO, vmanager.repo_dir, "-b", DEFAULT_VERSION]), mock.call(["git", "describe", "--all"], cwd=vmanager.repo_dir), mock.call(["git", "rev-parse", "HEAD"], cwd=vmanager.repo_dir)], self.check_output.call_args_list) verifier.update_properties.assert_called_once_with(version="2.0.0") @mock.patch("rally.verification.manager.VerifierManager.install_venv") @mock.patch("rally.verification.manager.VerifierManager.check_system_wide") @mock.patch("rally.verification.manager.VerifierManager._clone") @mock.patch("rally.verification.utils.create_dir") def test_install(self, mock_create_dir, mock__clone, mock_check_system_wide, mock_install_venv): verifier = mock.Mock() vmanager = FakeVerifier(verifier) # venv case verifier.system_wide = False vmanager.install() mock__clone.assert_called_once_with() self.assertFalse(mock_check_system_wide.called) mock_install_venv.assert_called_once_with() # system-wide case mock__clone.reset_mock() mock_check_system_wide.reset_mock() mock_install_venv.reset_mock() verifier.system_wide = True vmanager.install() mock__clone.assert_called_once_with() mock_check_system_wide.assert_called_once_with() self.assertFalse(mock_install_venv.called) @mock.patch("rally.verification.manager.shutil.rmtree") @mock.patch("rally.verification.manager.os.path.exists", return_value=True) def test_uninstall(self, mock_exists, mock_rmtree): vmanager = FakeVerifier(mock.MagicMock()) vmanager.uninstall() mock_exists.assert_called_once_with(vmanager.home_dir) mock_rmtree.assert_called_once_with(vmanager.home_dir) mock_exists.reset_mock() mock_rmtree.reset_mock() vmanager.uninstall(full=True) mock_exists.assert_called_once_with(vmanager.base_dir) mock_rmtree.assert_called_once_with(vmanager.base_dir) @mock.patch("rally.verification.manager.shutil.rmtree") @mock.patch("rally.verification.manager.os.path.exists") def test_install_venv(self, mock_exists, mock_rmtree): mock_exists.return_value = False vmanager = FakeVerifier(mock.Mock()) vmanager.install_venv() self.assertEqual( [mock.call(["virtualenv", "-p", sys.executable, vmanager.venv_dir], cwd=vmanager.repo_dir, msg_on_err="Failed to initialize virtual env in %s " "directory." % vmanager.venv_dir), mock.call(["pip", "install", "-e", "./"], cwd=vmanager.repo_dir, env=vmanager.environ) ], self.check_output.call_args_list) self.assertFalse(mock_rmtree.called) # case: venv was created previously mock_exists.return_value = True self.check_output.reset_mock() vmanager.install_venv() self.assertEqual( [mock.call(["virtualenv", "-p", sys.executable, vmanager.venv_dir], cwd=vmanager.repo_dir, msg_on_err="Failed to initialize virtual env in %s " "directory." % vmanager.venv_dir), mock.call(["pip", "install", "-e", "./"], cwd=vmanager.repo_dir, env=vmanager.environ) ], self.check_output.call_args_list) mock_rmtree.assert_called_once_with(vmanager.venv_dir) def test_check_system_wide(self): vmanager = FakeVerifier(mock.Mock()) pip_module = mock.Mock() pip_module_gid = pip_module.get_installed_distributions packages = [] for name in ("SQLAlchemy", "NumPy"): packages.append(mock.Mock()) packages[-1].name = name pip_module.req.parse_requirements.return_value = packages with mock.patch.dict("sys.modules", {"pip": pip_module}): pip_module_gid.return_value = [mock.Mock(key="sqlalchemy"), mock.Mock(key="numpy")] vmanager.check_system_wide() pip_module.req.parse_requirements.assert_called_once_with( "%s/requirements.txt" % vmanager.repo_dir, session=False) pip_module_gid.assert_called_once_with() # failure pip_module_gid.reset_mock() missed_package = pip_module_gid.return_value.pop() e = self.assertRaises(manager.VerifierSetupFailure, vmanager.check_system_wide) self.assertIn("Please install '%s'." % missed_package.key, "%s" % e) def test_checkout(self): vmanager = FakeVerifier(mock.Mock()) version = "3.14159" vmanager.checkout(version) self.assertEqual( [mock.call(["git", "checkout", "master"], cwd=vmanager.repo_dir), mock.call(["git", "remote", "update"], cwd=vmanager.repo_dir), mock.call(["git", "pull"], cwd=vmanager.repo_dir), mock.call(["git", "checkout", version], cwd=vmanager.repo_dir)], self.check_output.call_args_list) def test_configure(self): vmanager = FakeVerifier(mock.Mock()) self.assertRaises(NotImplementedError, vmanager.configure, extra_options={"key": "value"}) def test_is_configured(self): vmanager = FakeVerifier(mock.Mock()) self.assertTrue(vmanager.is_configured()) def test_override_configuration(self): # coverage should be 100%... self.assertRaises(NotImplementedError, FakeVerifier(mock.Mock()).override_configuration, "something") def test_extend_configuration(self): # coverage should be 100%... self.assertRaises(NotImplementedError, FakeVerifier(mock.Mock()).extend_configuration, "something") def test_get_configuration(self): self.assertEqual("", FakeVerifier(mock.Mock()).get_configuration()) def test_install_extension(self): # coverage should be 100%... self.assertRaises(NotImplementedError, FakeVerifier(mock.Mock()).install_extension, "source") def test_list_extensions(self): self.assertEqual([], FakeVerifier(mock.Mock()).list_extensions()) def test_uninstall_extension(self): # coverage should be 100%... self.assertRaises(NotImplementedError, FakeVerifier(mock.Mock()).uninstall_extension, "name") @mock.patch("rally.verification.manager.six.StringIO") @mock.patch("rally.verification.manager.subunit_v2") def test_parse_results(self, mock_subunit_v2, mock_string_io): data = "123123" self.assertEqual(mock_subunit_v2.parse.return_value, FakeVerifier(mock.Mock()).parse_results(data)) mock_subunit_v2.parse.assert_called_once_with( mock_string_io.return_value) mock_string_io.assert_called_once_with(data) def test_validate_args(self): # validating "pattern" argument fvmanager = FakeVerifier(mock.Mock()) fvmanager.validate_args({"pattern": "it is string"}) e = self.assertRaises(exceptions.ValidationError, fvmanager.validate_args, {"pattern": 2}) self.assertEqual("'pattern' argument should be a string.", e.kwargs["message"]) # validating "concurrency" argument fvmanager.validate_args({"concurrency": 1}) fvmanager.validate_args({"concurrency": 5}) fvmanager.validate_args({"concurrency": 0}) e = self.assertRaises(exceptions.ValidationError, fvmanager.validate_args, {"concurrency": -1}) self.assertEqual("'concurrency' argument should be a positive integer " "or zero.", e.kwargs["message"]) e = self.assertRaises(exceptions.ValidationError, fvmanager.validate_args, {"concurrency": "bla"}) self.assertEqual("'concurrency' argument should be a positive integer " "or zero.", e.kwargs["message"]) # validating "load_list" argument fvmanager.validate_args({"load_list": []}) e = self.assertRaises(exceptions.ValidationError, fvmanager.validate_args, {"load_list": "str"}) self.assertEqual("'load_list' argument should be a list of tests.", e.kwargs["message"]) # validating "skip_list" argument fvmanager.validate_args({"skip_list": {}}) e = self.assertRaises(exceptions.ValidationError, fvmanager.validate_args, {"skip_list": "str"}) self.assertEqual("'skip_list' argument should be a dict of tests where" " keys are test names and values are reasons.", e.kwargs["message"]) # validating "xfail_list" argument fvmanager.validate_args({"xfail_list": {}}) e = self.assertRaises(exceptions.ValidationError, fvmanager.validate_args, {"xfail_list": "str"}) self.assertEqual("'xfail_list' argument should be a dict of tests " "where keys are test names and values are reasons.", e.kwargs["message"]) def test__get_doc(self): self.assertEqual( "\n" "**Running arguments**:\n" " * *concurrency*: Number of processes to be used for launching " "tests. In case of 0 value, number of processes will be equal to " "number of CPU cores.\n" " * *load_list*: a list of tests to launch.\n" " * *pattern*: a regular expression of tests to launch.\n" " * *skip_list*: a list of tests to skip (actually, it is a dict " "where keys are names of tests, values are reasons).\n" " * *xfail_list*: a list of tests that are expected to fail " "(actually, it is a dict where keys are names of tests, values " "are reasons).\n" "**Installation arguments**:\n" " * *system_wide*: Whether or not to use the system-wide " "environment for verifier instead of a virtual environment. " "Defaults to False.\n" " * *source*: Path or URL to the repo to clone verifier from. " "Defaults to https://git.example.com\n" " * *version*: Branch, tag or commit ID to checkout before " "verifier installation. Defaults to '%s'." % DEFAULT_VERSION, FakeVerifier._get_doc()) rally-0.9.1/tests/unit/verification/test_utils.py0000664000567000056710000000675613073417717023411 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import subprocess import mock from six.moves import configparser from rally.verification import utils from tests.unit import test class UtilsTestCase(test.TestCase): @mock.patch("rally.verification.utils.os.makedirs") @mock.patch("rally.verification.utils.os.path.isdir", side_effect=[False, True]) def test_create_dir(self, mock_isdir, mock_makedirs): utils.create_dir("some") mock_makedirs.assert_called_once_with("some") mock_makedirs.reset_mock() utils.create_dir("some") mock_makedirs.assert_not_called() @mock.patch("rally.verification.utils.encodeutils") @mock.patch("rally.verification.utils.LOG") @mock.patch("rally.verification.utils.subprocess.check_output") def test_check_output(self, mock_check_output, mock_log, mock_encodeutils): self.assertEqual(mock_check_output.return_value, utils.check_output()) self.assertFalse(mock_log.error.called) mock_check_output.side_effect = subprocess.CalledProcessError(1, None) self.assertRaises(subprocess.CalledProcessError, utils.check_output) self.assertEqual(2, mock_log.error.call_count) mock_log.error.reset_mock() msg = "bla bla bla" self.assertRaises(subprocess.CalledProcessError, utils.check_output, msg_on_err=msg) self.assertEqual(3, mock_log.error.call_count) mock_log.error.assert_any_call(msg) @mock.patch("rally.verification.utils.six.StringIO") @mock.patch("rally.verification.utils.add_extra_options") @mock.patch("rally.verification.utils.configparser.ConfigParser") @mock.patch("six.moves.builtins.open", side_effect=mock.mock_open()) def test_extend_configfile(self, mock_open, mock_config_parser, mock_add_extra_options, mock_string_io): extra_options = mock.Mock() conf_path = "/path/to/fake/conf" utils.extend_configfile(extra_options, conf_path) conf = mock_config_parser.return_value conf.read.assert_called_once_with(conf_path) mock_add_extra_options.assert_called_once_with(extra_options, conf) conf = mock_add_extra_options.return_value conf.write.assert_has_calls([mock.call(mock_open.side_effect()), mock.call(mock_string_io.return_value)]) mock_string_io.return_value.getvalue.assert_called_once_with() def test_add_extra_options(self): conf = configparser.ConfigParser() extra_options = {"section": {"foo": "bar"}, "section2": {"option": "value"}} conf = utils.add_extra_options(extra_options, conf) expected = {"section": ("foo", "bar"), "section2": ("option", "value")} for section, option in expected.items(): result = conf.items(section) self.assertIn(option, result) rally-0.9.1/tests/unit/verification/test_reporter.py0000664000567000056710000000623213073417717024100 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import jsonschema import mock from rally.verification import reporter from tests.unit import test class ReporterTestCase(test.TestCase): def test_make(self): reporter_cls = mock.Mock() reporter_cls.return_value.generate.return_value = {} reporter.VerificationReporter.make(reporter_cls, None, None) reporter_cls.return_value.generate.return_value = {"files": {}} reporter.VerificationReporter.make(reporter_cls, None, None) reporter_cls.return_value.generate.return_value = { "files": {"/path/foo": "content"}} reporter.VerificationReporter.make(reporter_cls, None, None) reporter_cls.return_value.generate.return_value = {"open": "/path/foo"} reporter.VerificationReporter.make(reporter_cls, None, None) reporter_cls.return_value.generate.return_value = {"print": "foo"} reporter.VerificationReporter.make(reporter_cls, None, None) reporter_cls.return_value.generate.return_value = { "files": {"/path/foo": "content"}, "open": "/path/foo", "print": "foo"} reporter.VerificationReporter.make(reporter_cls, None, None) reporter_cls.return_value.generate.return_value = {"files": []} self.assertRaises(jsonschema.ValidationError, reporter.VerificationReporter.make, reporter_cls, None, None) reporter_cls.return_value.generate.return_value = {"files": ""} self.assertRaises(jsonschema.ValidationError, reporter.VerificationReporter.make, reporter_cls, None, None) reporter_cls.return_value.generate.return_value = {"files": {"a": {}}} self.assertRaises(jsonschema.ValidationError, reporter.VerificationReporter.make, reporter_cls, None, None) reporter_cls.return_value.generate.return_value = {"open": []} self.assertRaises(jsonschema.ValidationError, reporter.VerificationReporter.make, reporter_cls, None, None) reporter_cls.return_value.generate.return_value = {"print": []} self.assertRaises(jsonschema.ValidationError, reporter.VerificationReporter.make, reporter_cls, None, None) reporter_cls.return_value.generate.return_value = {"additional": ""} self.assertRaises(jsonschema.ValidationError, reporter.VerificationReporter.make, reporter_cls, None, None) rally-0.9.1/tests/unit/deployment/0000775000567000056710000000000013073420067020310 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/deployment/test_engine.py0000664000567000056710000001626713073417716023211 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Test for deploy engines.""" import mock from rally import consts from rally.deployment import engine from rally import exceptions from tests.unit import test def make_fake_deployment(**kwargs): values = dict({ "uuid": "1359befb-8737-4f4e-bea9-492416106977", "config": { "name": "fake", }, "status": consts.DeployStatus.DEPLOY_INIT, }, **kwargs) return FakeDeployment(values=values) class FakeDeployment(object): def __init__(self, values=None): if values is None: values = {} self._values = values def __getitem__(self, name): return self._values[name] def update_status(self, status): self._values["status"] = status def set_started(self): pass def set_completed(self): pass def delete(self): pass @engine.configure(name="FakeEngine") class FakeEngine(engine.Engine): """Fake deployment engine. Used for tests. """ deployed = False cleanuped = False def __init__(self, deployment): super(FakeEngine, self).__init__(deployment) self.deployment = deployment def deploy(self): self.deployed = True return self def cleanup(self): self.cleanuped = True class EngineMixIn(object): def deploy(self): pass def cleanup(self): pass class EngineTestCase(test.TestCase): def test_get_engine_not_found(self): deployment = make_fake_deployment() self.assertRaises(exceptions.PluginNotFound, engine.Engine.get_engine, "non_existing_engine", deployment) self.assertEqual(consts.DeployStatus.DEPLOY_FAILED, deployment["status"]) def test_config(self): deployment = make_fake_deployment() engine = FakeEngine(deployment) self.assertEqual(deployment["config"], engine.config) @mock.patch.object(FakeDeployment, "set_completed") @mock.patch.object(FakeDeployment, "set_started") def test_make_deploy(self, mock_fake_deployment_set_started, mock_fake_deployment_set_completed): deployment = make_fake_deployment() engine = FakeEngine(deployment) credential = engine.make_deploy() self.assertEqual(engine, credential) self.assertTrue(credential.deployed) self.assertFalse(credential.cleanuped) mock_fake_deployment_set_completed.assert_called_once_with() mock_fake_deployment_set_started.assert_called_once_with() @mock.patch.object(FakeDeployment, "set_started") @mock.patch.object(FakeEngine, "deploy") def test_make_deploy_failed(self, mock_fake_engine_deploy, mock_fake_deployment_set_started): class DeployFailed(Exception): pass deployment = make_fake_deployment() engine = FakeEngine(deployment) mock_fake_engine_deploy.side_effect = DeployFailed() self.assertRaises(DeployFailed, engine.make_deploy) mock_fake_deployment_set_started.assert_called_once_with() @mock.patch.object(FakeDeployment, "update_status") def test_make_cleanup(self, mock_fake_deployment_update_status): deployment = make_fake_deployment() engine = FakeEngine(deployment) engine.make_cleanup() self.assertTrue(engine.cleanuped) self.assertFalse(engine.deployed) mock_fake_deployment_update_status.assert_has_calls([ mock.call(consts.DeployStatus.CLEANUP_STARTED), mock.call(consts.DeployStatus.CLEANUP_FINISHED), ]) self.assertTrue(engine.cleanuped) @mock.patch.object(FakeDeployment, "update_status") @mock.patch.object(FakeEngine, "cleanup") def test_make_cleanup_failed(self, mock_fake_engine_cleanup, mock_fake_deployment_update_status): class CleanUpFailed(Exception): pass deployment = make_fake_deployment() engine = FakeEngine(deployment) mock_fake_engine_cleanup.side_effect = CleanUpFailed() self.assertRaises(CleanUpFailed, engine.make_cleanup) mock_fake_deployment_update_status.assert_has_calls([ mock.call(consts.DeployStatus.CLEANUP_STARTED), ]) self.assertFalse(engine.cleanuped) @mock.patch.object(FakeDeployment, "update_status") def test_with_statement(self, mock_fake_deployment_update_status): deployment = make_fake_deployment() engine = FakeEngine(deployment) with engine as deployer: self.assertEqual(engine, deployer) self.assertFalse(mock_fake_deployment_update_status.called) self.assertFalse(engine.cleanuped) self.assertFalse(engine.deployed) def test_with_statement_failed_on_init(self): self._assert_changed_status_on_error( consts.DeployStatus.DEPLOY_INIT, consts.DeployStatus.DEPLOY_FAILED) def test_with_statement_failed_on_started(self): self._assert_changed_status_on_error( consts.DeployStatus.DEPLOY_STARTED, consts.DeployStatus.DEPLOY_FAILED) def test_with_statement_failed_on_finished(self): self._assert_changed_status_on_error( consts.DeployStatus.DEPLOY_FINISHED, consts.DeployStatus.DEPLOY_INCONSISTENT) def test_with_statement_failed_on_cleanup(self): self._assert_changed_status_on_error( consts.DeployStatus.CLEANUP_STARTED, consts.DeployStatus.CLEANUP_FAILED) @mock.patch.object(FakeDeployment, "update_status") def _assert_changed_status_on_error(self, initial, final, mock_fake_deployment_update_status): class SomeError(Exception): pass def context_with_error(manager): with mock.patch("traceback.print_exception"): with manager: raise SomeError() deployment = make_fake_deployment(status=initial) engine = FakeEngine(deployment) self.assertRaises(SomeError, context_with_error, engine) mock_fake_deployment_update_status.assert_called_once_with(final) self.assertFalse(engine.cleanuped) self.assertFalse(engine.deployed) def test_get_engine(self): deployment = make_fake_deployment() engine_inst = engine.Engine.get_engine("FakeEngine", deployment) self.assertIsInstance(engine_inst, FakeEngine) def test_engine_factory_is_abstract(self): self.assertRaises(TypeError, engine.Engine) rally-0.9.1/tests/unit/deployment/test_lxc.py0000664000567000056710000002370513073417720022520 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.deployment import engine from tests.unit import test MOD = "rally.deployment.engines.lxc." class LxcEngineTestCase(test.TestCase): def setUp(self): super(LxcEngineTestCase, self).setUp() self.config = { "type": "LxcEngine", "container_name": "rally", "containers_per_host": 2, "tunnel_to": ["1.1.1.1", "2.2.2.2"], "distribution": "ubuntu", "start_lxc_network": "10.128.128.0/28", "engine": { "name": "FakeEngine", "config": { "key": "value", }, }, "provider": { "type": "DummyProvider", "credentials": [{"user": "root", "host": "host1.net"}, {"user": "root", "host": "host2.net"}] } } self.deployment = { "uuid": "test-deployment-uuid", "config": self.config, } self.engine = engine.Engine.get_engine("LxcEngine", self.deployment) def test_config(self): self.assertEqual(self.deployment["config"], self.engine.config) @mock.patch(MOD + "objects") @mock.patch(MOD + "engine") def test__deploy_first(self, mock_engine, mock_objects): fake_credentials = {"user": "admin", "host": "host.net"} fake_deployment = mock.Mock() fake_engine = mock.Mock() mock_objects.Deployment = mock.Mock(return_value=fake_deployment) mock_engine.Engine.get_engine = mock.Mock( return_value=fake_engine) fake_host = mock.Mock() fake_so = mock.Mock() fake_so.get_credentials.return_value = fake_credentials fake_host.get_server_object = mock.Mock(return_value=fake_so) self.engine._deploy_first(fake_host, "name", "dist", "release") host_calls = [ mock.call.prepare(), mock.call.create_container("name", "dist", "release"), mock.call.start_containers(), mock.call.get_server_object("name"), mock.call.stop_containers()] self.assertEqual(host_calls, fake_host.mock_calls) fake_engine.deploy.assert_called_once_with() mock_engine.Engine.get_engine.assert_called_once_with( "FakeEngine", fake_deployment) engine_config = self.config["engine"].copy() engine_config["provider"] = {"credentials": [fake_credentials], "type": "DummyProvider"} mock_objects.Deployment.assert_called_once_with( config=engine_config, parent_uuid="test-deployment-uuid") @mock.patch(MOD + "provider.ProviderFactory.get_provider") def test__get_provider(self, mock_provider_factory_get_provider): mock_provider_factory_get_provider.return_value = "fake_provider" provider = self.engine._get_provider() self.assertEqual("fake_provider", provider) mock_provider_factory_get_provider.assert_called_once_with( self.config["provider"], self.deployment) @mock.patch(MOD + "open", create=True) @mock.patch(MOD + "get_script_path", return_value="fake_sp") @mock.patch(MOD + "lxc.LxcHost") @mock.patch(MOD + "LxcEngine._deploy_first") @mock.patch(MOD + "LxcEngine._get_provider") def test_deploy(self, mock__get_provider, mock__deploy_first, mock_lxc_host, mock_get_script_path, mock_open): mock_open.return_value = "fs" fake_containers = ((mock.Mock(), mock.Mock()), (mock.Mock(), mock.Mock())) fake_hosts = mock_lxc_host.side_effect = [mock.Mock(), mock.Mock()] fake_hosts[0].get_server_objects.return_value = fake_containers[0] fake_hosts[1].get_server_objects.return_value = fake_containers[1] fake_hosts[0]._port_cache = {1: 2, 3: 4} fake_hosts[1]._port_cache = {5: 6, 7: 8} fake_provider = mock__get_provider.return_value fake_servers = [mock.Mock(), mock.Mock()] fake_servers[0].get_credentials.return_value = "fc1" fake_servers[1].get_credentials.return_value = "fc2" fake_provider.create_servers.return_value = fake_servers add_res_calls = [ {"provider_name": "LxcEngine", "info": {"host": "fc1", "config": {"network": "10.128.128.0/28", "tunnel_to": ["1.1.1.1", "2.2.2.2"]}, "forwarded_ports": [(1, 2), (3, 4)], "containers": fake_hosts[0].containers}}, {"provider_name": "LxcEngine", "info": {"host": "fc2", "config": {"network": "10.128.128.16/28", "tunnel_to": ["1.1.1.1", "2.2.2.2"]}, "forwarded_ports": [(5, 6), (7, 8)], "containers": fake_hosts[1].containers}}] def add_resource(**actual_kwargs): expected_kwargs = add_res_calls.pop(0) self.assertEqual(expected_kwargs["provider_name"], actual_kwargs["provider_name"]) self.assertEqual(expected_kwargs["info"]["host"], actual_kwargs["info"]["host"]) self.assertEqual(expected_kwargs["info"]["config"], actual_kwargs["info"]["config"]) self.assertEqual(expected_kwargs["info"]["containers"], actual_kwargs["info"]["containers"]) self.assertSequenceEqual( expected_kwargs["info"]["forwarded_ports"], actual_kwargs["info"]["forwarded_ports"]) fake_deployment = mock.MagicMock() fake_deployment.add_resource = add_resource fake_deployment.__getitem__.side_effect = self.deployment.__getitem__ with mock.patch.object(self.engine, "deployment", fake_deployment): credentials = self.engine.deploy() self.assertEqual(1, len(credentials.keys())) self.assertIn("openstack", credentials) self.assertEqual(1, len(credentials["openstack"])) credential = credentials["openstack"][0] self.assertIsInstance(credential["admin"], dict) self.assertEqual([], credential["users"]) lxc_host_calls = [ mock.call(fake_servers[0], {"network": "10.128.128.0/28", "tunnel_to": ["1.1.1.1", "2.2.2.2"]}), mock.call(fake_servers[1], {"network": "10.128.128.16/28", "tunnel_to": ["1.1.1.1", "2.2.2.2"]})] self.assertEqual(lxc_host_calls, mock_lxc_host.mock_calls) deploy_first_calls = [ mock.call(fake_hosts[0], "rally-10-128-128-0-000", "ubuntu", None), mock.call(fake_hosts[1], "rally-10-128-128-16-000", "ubuntu", None)] self.assertEqual(deploy_first_calls, mock__deploy_first.mock_calls) host1_calls = [ mock.call.create_clone("rally-10-128-128-0-001", "rally-10-128-128-0-000"), mock.call.start_containers(), mock.call.get_server_objects()] host2_calls = [ mock.call.create_clone("rally-10-128-128-16-001", "rally-10-128-128-16-000"), mock.call.start_containers(), mock.call.get_server_objects()] self.assertEqual(host1_calls, fake_hosts[0].mock_calls) self.assertEqual(host2_calls, fake_hosts[1].mock_calls) self.assertEqual([mock.call("fake_sp", "rb")] * 4, mock_open.mock_calls) for host in fake_containers: for container in host: self.assertEqual([mock.call.ssh.run("/bin/sh -e", stdin="fs")], container.mock_calls) @mock.patch(MOD + "LxcEngine._get_provider") @mock.patch(MOD + "lxc.LxcHost") @mock.patch(MOD + "provider.Server.from_credentials") def test_cleanup(self, mock_server_from_credentials, mock_lxc_host, mock__get_provider): mock__get_provider.return_value = fake_provider = mock.Mock() mock_lxc_host.side_effect = fake_hosts = [mock.Mock(), mock.Mock()] mock_server_from_credentials.side_effect = ["s1", "s2"] fake_resources = [] for i in range(2): res = mock.Mock() res.info = {"host": "host%d" % i, "config": "fake_config%d" % i, "forwarded_ports": [(1, 2), (3, 4)], "containers": "fake_containers"} fake_resources.append(res) with mock.patch.object(self.engine, "deployment") as mock_deployment: mock_deployment.get_resources.return_value = fake_resources self.engine.cleanup() for host in fake_hosts: self.assertEqual("fake_containers", host.containers) self.assertEqual([mock.call.destroy_containers(), mock.call.destroy_ports([(1, 2), (3, 4)]), mock.call.delete_tunnels()], host.mock_calls) delete_calls = [mock.call.delete_resource(r.id) for r in fake_resources] self.assertEqual(delete_calls, mock_deployment.delete_resource.call_args_list) fake_provider.destroy_servers.assert_called_once_with() rally-0.9.1/tests/unit/deployment/engines/0000775000567000056710000000000013073420067021740 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/deployment/engines/__init__.py0000664000567000056710000000000013073417716024046 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/deployment/engines/test_devstack.py0000664000567000056710000001134713073417720025165 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import jsonschema import mock from rally.deployment.engines import devstack from tests.unit import test RALLY_CONFIG = "rally https://github.com/openstack/rally master" SAMPLE_CONFIG = { "type": "DevstackEngine", "provider": { "name": "ExistingServers", "credentials": [{"user": "root", "host": "example.com"}], }, "local_conf": { "ADMIN_PASSWORD": "secret", "ENABLE_PLUGIN": [RALLY_CONFIG], }, } DEVSTACK_REPO = "https://git.openstack.org/openstack-dev/devstack" class DevstackEngineTestCase(test.TestCase): def setUp(self): super(DevstackEngineTestCase, self).setUp() self.deployment = { "uuid": "de641026-dbe3-4abe-844a-ffef930a600a", "config": SAMPLE_CONFIG, } self.engine = devstack.DevstackEngine(self.deployment) def test_invalid_config(self): self.deployment = SAMPLE_CONFIG.copy() self.deployment["config"] = {"type": 42} engine = devstack.DevstackEngine(self.deployment) self.assertRaises(jsonschema.ValidationError, engine.validate) def test_construct(self): self.assertEqual(self.engine.local_conf["ADMIN_PASSWORD"], "secret") @mock.patch("rally.deployment.engines.devstack.open", create=True) def test_prepare_server(self, mock_open): mock_open.return_value = "fake_file" server = mock.Mock() server.password = "secret" self.engine.prepare_server(server) calls = [ mock.call("/bin/sh -e", stdin="fake_file"), mock.call("chpasswd", stdin="rally:secret"), ] self.assertEqual(calls, server.ssh.run.mock_calls) filename = mock_open.mock_calls[0][1][0] self.assertTrue(filename.endswith("rally/deployment/engines/" "devstack/install.sh")) self.assertEqual([mock.call(filename, "rb")], mock_open.mock_calls) @mock.patch("rally.deployment.engine.Engine.get_provider") @mock.patch("rally.deployment.engines.devstack.get_updated_server") @mock.patch("rally.deployment.engines.devstack.get_script") @mock.patch("rally.deployment.serverprovider.provider.Server") @mock.patch("rally.deployment.engines.devstack.objects.Credential") def test_deploy(self, mock_credential, mock_server, mock_get_script, mock_get_updated_server, mock_engine_get_provider): mock_engine_get_provider.return_value = fake_provider = ( mock.Mock() ) server = mock.Mock(host="host") mock_credential.return_value.to_dict.return_value = "fake_credential" mock_get_updated_server.return_value = ds_server = mock.Mock() mock_get_script.return_value = "fake_script" server.get_credentials.return_value = "fake_credentials" fake_provider.create_servers.return_value = [server] with mock.patch.object(self.engine, "deployment") as mock_deployment: credentials = self.engine.deploy() self.assertEqual( {"openstack": [{"admin": "fake_credential", "users": []}]}, credentials) mock_credential.assert_called_once_with( "http://host:5000/v2.0/", "admin", "secret", "admin", "admin") mock_credential.return_value.to_dict.assert_called_once_with( include_permission=True) mock_deployment.add_resource.assert_called_once_with( info="fake_credentials", provider_name="DevstackEngine", type="credentials") cmd = "/bin/sh -e -s %s %s" % ( mock_deployment["config"].get("devstack_repo"), mock_deployment["config"].get("devstack_branch")) server.ssh.run.assert_called_once_with(cmd, stdin="fake_script") ds_calls = [ mock.call.ssh.run("cat > ~/devstack/local.conf", stdin=mock.ANY), mock.call.ssh.run("~/devstack/stack.sh") ] self.assertEqual(ds_calls, ds_server.mock_calls) local_conf = ds_server.mock_calls[0][2]["stdin"] self.assertIn("ADMIN_PASSWORD=secret", local_conf) self.assertIn("enable_plugin " + RALLY_CONFIG, local_conf) rally-0.9.1/tests/unit/deployment/engines/test_existing.py0000664000567000056710000001240313073417720025205 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Test ExistingCloud.""" import ddt import jsonschema from rally import consts from rally.deployment import engine as deploy_engine from rally.deployment.engines import existing from tests.unit import test @ddt.ddt class TestExistingCloud(test.TestCase): def setUp(self): super(TestExistingCloud, self).setUp() self.deployments = { "v2.0": { "config": { "type": "ExistingCloud", "auth_url": "http://example.net:5000/v2.0/", "region_name": "RegionOne", "endpoint_type": consts.EndpointType.INTERNAL, "https_insecure": False, "https_cacert": "cacert", "admin": { "username": "admin", "password": "myadminpass", "tenant_name": "demo" } } }, "v3": { "config": { "type": "ExistingCloud", "auth_url": "http://example.net:5000/v3/", "region_name": "RegionOne", "endpoint_type": consts.EndpointType.INTERNAL, "https_insecure": False, "https_cacert": "cacert", "admin": { "username": "admin", "password": "myadminpass", "domain_name": "domain", "project_name": "demo", "project_domain_name": "Default", "user_domain_name": "Default", } } } } @ddt.data("v2.0", "v3") def test_init_and_valid_config(self, keystone_version): engine = existing.ExistingCloud(self.deployments[keystone_version]) engine.validate() @ddt.data("v2.0", "v3") def test_invalid_config(self, keystone_version): deployment = self.deployments[keystone_version] deployment["config"]["admin"] = 42 engine = existing.ExistingCloud(deployment) self.assertRaises(jsonschema.ValidationError, engine.validate) @ddt.data("v2.0", "v3") def test_additional_vars(self, keystone_version): deployment = self.deployments[keystone_version] deployment["extra"] = {} existing.ExistingCloud(deployment).validate() deployment["extra"] = {"some_var": "some_value"} existing.ExistingCloud(deployment).validate() deployment["extra"] = ["item1", "item2"] existing.ExistingCloud(deployment).validate() @ddt.data("v2.0", "v3") def test_deploy(self, keystone_version): deployment = self.deployments[keystone_version] engine = existing.ExistingCloud(deployment) credentials = engine.deploy() credentials = credentials["openstack"][0] admin_credential = deployment["config"].copy() admin_credential.pop("type") admin_credential["endpoint"] = None admin_credential.update(admin_credential.pop("admin")) admin_credential["permission"] = consts.EndpointPermission.ADMIN actual_credentials = credentials["admin"] if keystone_version == "v3": # NOTE(andreykurilin): credentials obj uses `tenant_name` for both # keystone v2 and v3. It works perfectly for rally code (no # contradictions and misunderstandings ), but in case of checking # credentials.to_dict with data from database (where we use # project_name for keystone v3 config and tenant_name for # keystone v2), we need to transform vars. admin_credential["tenant_name"] = admin_credential.pop( "project_name") else: # NOTE(andreykurilin): there are no domain related variables in v2, # so we need to pop them from credentials.to_dict() actual_credentials.pop("domain_name") actual_credentials.pop("user_domain_name") actual_credentials.pop("project_domain_name") self.assertEqual(admin_credential, actual_credentials) self.assertEqual([], credentials["users"]) @ddt.data("v2.0", "v3") def test_cleanup(self, keystone_version): existing.ExistingCloud(self.deployments[keystone_version]).cleanup() @ddt.data("v2.0", "v3") def test_is_in_factory(self, keystone_version): name = self.deployments[keystone_version]["config"]["type"] engine = deploy_engine.Engine.get_engine( name, self.deployments[keystone_version]) self.assertIsInstance(engine, existing.ExistingCloud) rally-0.9.1/tests/unit/deployment/__init__.py0000664000567000056710000000000013073417716022416 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/deployment/serverprovider/0000775000567000056710000000000013073420067023371 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/deployment/serverprovider/__init__.py0000664000567000056710000000000013073417716025477 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/deployment/serverprovider/test_provider.py0000664000567000056710000000704213073417716026646 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Test for vm providers.""" import mock from rally.common import sshutils from rally.deployment.serverprovider import provider from rally import exceptions from tests.unit import test class ProviderMixIn(object): def create_servers(self, image_uuid=None, amount=1): pass def destroy_servers(self): pass class ProviderA(ProviderMixIn, provider.ProviderFactory): """Fake server provider. Used for tests. """ pass class ProviderB(ProviderMixIn, provider.ProviderFactory): """Fake server provider. Used for tests. """ pass class ProviderC(ProviderB): """Fake server provider. Used for tests. """ pass FAKE_PROVIDERS = [ProviderA, ProviderB, ProviderC] class ProviderFactoryTestCase(test.TestCase): @mock.patch.object(provider.ProviderFactory, "validate") def test_init(self, mock_validate): ProviderA(None, None) mock_validate.assert_called_once_with() def test_get_provider_not_found(self): self.assertRaises(exceptions.PluginNotFound, provider.ProviderFactory.get_provider, {"type": "fail"}, None) def test_vm_prvoider_factory_is_abstract(self): self.assertRaises(TypeError, provider.ProviderFactory) class ServerTestCase(test.TestCase): def setUp(self): super(ServerTestCase, self).setUp() self.vals = ["192.168.1.1", "admin", "some_key", "pwd"] self.keys = ["host", "user", "key", "password"] def test_init_server_dto(self): server = provider.Server(*self.vals) for k, v in dict(zip(self.keys, self.vals)).items(): self.assertEqual(getattr(server, k), v) self.assertIsInstance(server.ssh, sshutils.SSH) def test_credentials(self): server_one = provider.Server(*self.vals) creds = server_one.get_credentials() server_two = provider.Server.from_credentials(creds) for k in self.keys: self.assertEqual(getattr(server_one, k), getattr(server_two, k)) class ResourceManagerTestCase(test.TestCase): def setUp(self): super(ResourceManagerTestCase, self).setUp() self.deployment = mock.Mock() self.resources = provider.ResourceManager(self.deployment, "provider") def test_create(self): self.resources.create("info", type="type") self.deployment.add_resource.assert_called_once_with("provider", type="type", info="info") def test_get_all(self): self.resources.get_all(type="type") self.deployment.get_resources.assert_called_once_with( provider_name="provider", type="type") def test_delete(self): self.resources.delete("resource_id") self.deployment.delete_resource.assert_called_once_with("resource_id") rally-0.9.1/tests/unit/deployment/serverprovider/providers/0000775000567000056710000000000013073420067025406 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/deployment/serverprovider/providers/test_lxc.py0000664000567000056710000004160313073417716027620 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import jsonschema import mock import netaddr from rally.deployment.serverprovider.providers import lxc from rally import exceptions from tests.unit import test MOD_NAME = "rally.deployment.serverprovider.providers.lxc." class HelperFunctionsTestCase(test.TestCase): @mock.patch(MOD_NAME + "open", create=True, return_value="fake_script") def test__get_script(self, mock_open): script = lxc._get_script("script.sh") self.assertEqual("fake_script", script) path = mock_open.mock_calls[0][1][0] mode = mock_open.mock_calls[0][1][1] self.assertTrue(path.endswith( "rally/deployment/serverprovider/providers/lxc/script.sh")) self.assertEqual("rb", mode) @mock.patch(MOD_NAME + "_get_script", return_value="fake_script") @mock.patch(MOD_NAME + "moves.StringIO") def test__get_script_from_template(self, mock_string_io, mock__get_script): mock__get_script.return_value = fake_script = mock.Mock() fake_script.read.return_value = "fake_data {k1} {k2}" mock_string_io.return_value = "fake_formatted_script" script = lxc._get_script_from_template("fake_tpl", k1="v1", k2="v2") self.assertEqual("fake_formatted_script", script) mock_string_io.assert_called_once_with("fake_data v1 v2") class LxcHostTestCase(test.TestCase): def setUp(self): super(LxcHostTestCase, self).setUp() sample_config = {"network": "10.1.1.0/24", "forward_ssh": True, "tunnel_to": ["1.1.1.1", "2.2.2.2"]} self.server = mock.Mock() self.server.host = "fake_server_ip" self.server.get_credentials.return_value = {"ip": "3.3.3.3"} self.host = lxc.LxcHost(self.server, sample_config) @mock.patch(MOD_NAME + "provider.Server") def test__get_updated_server(self, mock_server): server = self.host._get_updated_server(host="4.4.4.4") new_server = mock_server.from_credentials({"host": "4.4.4.4"}) self.assertEqual(new_server, server) def test_backingstore_btrfs(self): self.server.ssh.execute.return_value = [0, "", ""] self.assertEqual("btrfs", self.host.backingstore) self.assertEqual("btrfs", self.host.backingstore) # second call will return cached value self.assertEqual([mock.call.ssh.execute("df -t btrfs /var/lib/lxc/")], self.server.mock_calls) def test_backingstore_none(self): self.server.ssh.execute.return_value = [-1, "", ""] self.assertEqual("", self.host.backingstore) @mock.patch(MOD_NAME + "moves.StringIO") @mock.patch(MOD_NAME + "_get_script", return_value="fake_script") def test_prepare(self, mock__get_script, mock_string_io): mock_string_io.return_value = fake_conf = mock.Mock() self.host.create_local_tunnels = mock.Mock() self.host.create_remote_tunnels = mock.Mock() self.host.prepare() write_calls = [ mock.call("LXC_DHCP_MAX=\"253\"\n"), mock.call("LXC_NETMASK=\"255.255.255.0\"\n"), mock.call("LXC_ADDR=\"10.1.1.1\"\n"), mock.call("LXC_DHCP_RANGE=\"10.1.1.2,10.1.1.254\"\n"), mock.call("LXC_NETWORK=\"10.1.1.0/24\"\n"), mock.call("LXC_BRIDGE=\"lxcbr0\"\n"), mock.call("USE_LXC_BRIDGE=\"true\"\n") ] fake_conf.write.assert_has_calls(write_calls, any_order=True) ssh_calls = [mock.call.run("cat > /tmp/.lxc_default", stdin=fake_conf), mock.call.run("/bin/sh", stdin="fake_script")] self.assertEqual(ssh_calls, self.server.ssh.mock_calls) self.host.create_local_tunnels.assert_called_once_with() self.host.create_remote_tunnels.assert_called_once_with() @mock.patch(MOD_NAME + "os.unlink") @mock.patch(MOD_NAME + "_get_script_from_template") def test_create_local_tunnels(self, mock__get_script_from_template, mock_unlink): mock__get_script_from_template.side_effect = ["s1", "s2"] self.host.create_local_tunnels() getscript_calls = [ mock.call("tunnel-local.sh", local="fake_server_ip", net=netaddr.IPNetwork("10.1.1.0/24"), remote="1.1.1.1"), mock.call("tunnel-local.sh", local="fake_server_ip", net=netaddr.IPNetwork("10.1.1.0/24"), remote="2.2.2.2"), ] self.assertEqual( getscript_calls, mock__get_script_from_template.mock_calls) self.assertEqual([mock.call("/bin/sh", stdin="s1"), mock.call("/bin/sh", stdin="s2")], self.server.ssh.run.mock_calls) @mock.patch(MOD_NAME + "_get_script_from_template") def test_create_remote_tunnels(self, mock__get_script_from_template): mock__get_script_from_template.side_effect = ["s1", "s2"] fake_server = mock.Mock() self.host._get_updated_server = mock.Mock(return_value=fake_server) self.host.create_remote_tunnels() self.assertEqual([mock.call("/bin/sh", stdin="s1"), mock.call("/bin/sh", stdin="s2")], fake_server.ssh.run.mock_calls) def test_delete_tunnels(self): s1 = mock.Mock() s2 = mock.Mock() self.host._get_updated_server = mock.Mock(side_effect=[s1, s2]) self.host.delete_tunnels() s1.ssh.execute.assert_called_once_with("ip tun del t10.1.1.0") s2.ssh.execute.assert_called_once_with("ip tun del t10.1.1.0") self.assertEqual([mock.call("ip tun del t1.1.1.1"), mock.call("ip tun del t2.2.2.2")], self.server.ssh.execute.mock_calls) @mock.patch(MOD_NAME + "time.sleep") def test_get_ip(self, mock_sleep): s1 = "link/ether fe:54:00:d3:f5:98 brd ff:ff:ff:ff:ff:ff" s2 = s1 + "\n inet 10.20.0.1/24 scope global br1" self.host.server.ssh.execute.side_effect = [(0, s1, ""), (0, s2, "")] ip = self.host.get_ip("name") self.assertEqual("10.20.0.1", ip) self.assertEqual([mock.call("lxc-attach -n name ip" " addr list dev eth0")] * 2, self.host.server.ssh.execute.mock_calls) def test_create_container(self): self.host.configure_container = mock.Mock() self.host._backingstore = "btrfs" self.host.create_container("name", "dist") self.server.ssh.run.assert_called_once_with( "lxc-create -B btrfs -n name -t dist") self.assertEqual(["name"], self.host.containers) self.host.configure_container.assert_called_once_with("name") # check with no btrfs self.host._backingstore = "" self.host.create_container("name", "dist") self.assertEqual(mock.call("lxc-create -n name -t dist"), self.server.ssh.run.mock_calls[1]) # check release self.host.create_container("name", "ubuntu", "raring") self.host.create_container("name", "debian", "woody") expected = [mock.call("lxc-create -n name -t ubuntu -- -r raring"), mock.call("SUITE=woody lxc-create -n name -t debian")] self.assertEqual(expected, self.server.ssh.run.mock_calls[2:]) def test_create_clone(self): self.host._backingstore = "btrfs" self.host.configure_container = mock.Mock() self.host.create_clone("name", "src") self.server.ssh.execute.assert_called_once_with("lxc-clone --snapshot" " -o src -n name") self.assertEqual(["name"], self.host.containers) # check with no btrfs self.host._backingstore = "" self.host.create_clone("name", "src") self.assertEqual(mock.call("lxc-clone -o src -n name"), self.server.ssh.execute.mock_calls[1]) @mock.patch(MOD_NAME + "os.path.join") @mock.patch(MOD_NAME + "_get_script") def test_configure_container(self, mock__get_script, mock_join): mock__get_script.return_value = "fake_script" mock_join.return_value = "fake_path" self.server.ssh.execute.return_value = 0, "", "" self.host.configure_container("name") self.server.ssh.run.assert_called_once_with( "/bin/sh -e -s fake_path", stdin="fake_script") def test_start_containers(self): self.host.containers = ["c1", "c2"] self.host.start_containers() calls = [mock.call("lxc-start -d -n c1"), mock.call("lxc-start -d -n c2")] self.assertEqual(calls, self.server.ssh.run.mock_calls) def test_stop_containers(self): self.host.containers = ["c1", "c2"] self.host.stop_containers() calls = [ mock.call("lxc-stop -n c1"), mock.call("lxc-stop -n c2"), ] self.assertEqual(calls, self.server.ssh.run.mock_calls) def test_destroy_containers(self): self.host.containers = ["c1", "c2"] self.host.destroy_containers() calls = [ mock.call("lxc-stop -n c1"), mock.call("lxc-destroy -n c1"), mock.call("lxc-stop -n c2"), mock.call("lxc-destroy -n c2"), ] self.assertEqual(calls, self.server.ssh.run.mock_calls) def test_get_server_object(self): fake_server = mock.Mock() self.host.get_ip = mock.Mock(return_value="ip") self.host.get_port = mock.Mock(return_value=42) self.host._get_updated_server = mock.Mock(return_value=fake_server) so = self.host.get_server_object("c1", wait=False) self.assertEqual(fake_server, so) self.host.get_port.assert_called_once_with("ip") self.host._get_updated_server.assert_called_once_with(port=42) self.assertFalse(fake_server.ssh.wait.mock_calls) so = self.host.get_server_object("c1", wait=True) fake_server.ssh.wait.assert_called_once_with(timeout=300) @mock.patch(MOD_NAME + "LxcHost.get_server_object") def test_get_server_objects(self, mock_get_server_object): mock_get_server_object.side_effect = ["s1", "s2"] self.host.containers = ["c1", "c2"] retval = list(self.host.get_server_objects(wait="wait")) self.assertEqual(["s1", "s2"], retval) self.assertEqual([mock.call("c1", "wait"), mock.call("c2", "wait")], mock_get_server_object.mock_calls) class LxcProviderTestCase(test.TestCase): def setUp(self): super(LxcProviderTestCase, self).setUp() self.config = { "type": "LxcProvider", "distribution": "ubuntu", "start_lxc_network": "10.1.1.0/29", "containers_per_host": 2, "tunnel_to": ["10.10.10.10", "20.20.20.20"], "container_name_prefix": "rally-lxc", "host_provider": { "name": "ExistingServers", "credentials": [{"user": "root", "host": "host1.net"}, {"user": "root", "host": "host2.net"}]} } self.deployment = {"uuid": "fake_uuid"} self.provider = lxc.LxcProvider(self.deployment, self.config) def test_validate(self): self.provider.validate() def test_validate_invalid_tunnel(self): config = self.config.copy() config["tunnel_to"] = "ok" self.assertRaises(jsonschema.ValidationError, lxc.LxcProvider, self.deployment, config) def test_validate_required_field(self): config = self.config.copy() del(config["host_provider"]) self.assertRaises(jsonschema.ValidationError, lxc.LxcProvider, self.deployment, config) def test_validate_too_small_network(self): config = self.config.copy() config["containers_per_host"] = 42 self.assertRaises(exceptions.InvalidConfigException, lxc.LxcProvider, self.deployment, config) @mock.patch(MOD_NAME + "LxcHost") @mock.patch(MOD_NAME + "provider.ProviderFactory.get_provider") def test_create_servers(self, mock_provider_factory_get_provider, mock_lxc_host): fake_provider = mock.Mock() fake_provider.create_servers.return_value = ["server1", "server2"] fake_hosts = [] fake_sos = [] for i in (1, 2): fake_host_sos = [mock.Mock(), mock.Mock()] fake_sos.extend(fake_host_sos) fake_host = mock.Mock() fake_host.containers = ["c-%d-1" % i, "c-%d-2" % i] fake_host._port_cache = {1: i, 2: i} fake_host.config = {"network": "fake-%d" % i} fake_host.server.get_credentials.return_value = {"ip": "f%d" % i} fake_host.get_server_objects.return_value = fake_host_sos fake_hosts.append(fake_host) mock_lxc_host.side_effect = fake_hosts mock_provider_factory_get_provider.return_value = fake_provider fake_info = [ {"host": {"ip": "f1"}, "config": {"network": "fake-1"}, "forwarded_ports": [(1, 1), (2, 1)], "container_names": ["c-1-1", "c-1-2"]}, {"host": {"ip": "f2"}, "config": {"network": "fake-2"}, "forwarded_ports": [(1, 2), (2, 2)], "container_names": ["c-2-1", "c-2-2"]}] def res_create(actual_info): expected_info = fake_info.pop(0) self.assertEqual(expected_info["host"], actual_info["host"]) self.assertEqual(expected_info["config"], actual_info["config"]) self.assertEqual(expected_info["container_names"], actual_info["container_names"]) self.assertSequenceEqual(expected_info["forwarded_ports"], actual_info["forwarded_ports"]) fake_res = mock.MagicMock() fake_res.create = res_create with mock.patch.object(self.provider, "resources", fake_res): servers = self.provider.create_servers() self.assertEqual(fake_sos, servers) host1_calls = [ mock.call.prepare(), mock.call.create_container("rally-lxc-000-10-1-1-0", "ubuntu", None), mock.call.create_clone("rally-lxc-001-10-1-1-0", "rally-lxc-000-10-1-1-0"), mock.call.start_containers(), mock.call.get_server_objects(), mock.call.server.get_credentials(), ] host2_calls = [ mock.call.prepare(), mock.call.create_container("rally-lxc-000-10-1-1-8", "ubuntu", None), mock.call.create_clone("rally-lxc-001-10-1-1-8", "rally-lxc-000-10-1-1-8"), mock.call.start_containers(), mock.call.get_server_objects(), mock.call.server.get_credentials(), ] self.assertEqual(host1_calls, fake_hosts[0].mock_calls) self.assertEqual(host2_calls, fake_hosts[1].mock_calls) @mock.patch(MOD_NAME + "LxcHost") @mock.patch(MOD_NAME + "provider.Server.from_credentials") def test_destroy_servers(self, mock_server_from_credentials, mock_lxc_host): fake_resource = {"info": {"config": "fake_config", "host": "fake_credentials", "forwarded_ports": [1, 2], "container_names": ["n1", "n2"]}} fake_resource["id"] = "fake_res_id" fake_host = mock.Mock() mock_server_from_credentials.return_value = "fake_server" mock_lxc_host.return_value = fake_host self.provider.resources = mock.Mock() self.provider.resources.get_all.return_value = [fake_resource] with mock.patch.object(self.provider, "get_host_provider") as ghp: ghp.return_value = fake_host_provider = mock.Mock() self.provider.destroy_servers() mock_lxc_host.assert_called_once_with("fake_server", "fake_config") host_calls = [mock.call.destroy_containers(), mock.call.destroy_ports([1, 2]), mock.call.delete_tunnels()] self.assertEqual(host_calls, fake_host.mock_calls) self.provider.resources.delete.assert_called_once_with("fake_res_id") fake_host_provider.destroy_servers.assert_called_once_with() rally-0.9.1/tests/unit/deployment/serverprovider/providers/__init__.py0000664000567000056710000000000013073417716027514 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/deployment/serverprovider/providers/test_virsh.py0000664000567000056710000001312013073417716030156 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import jsonschema import mock import netaddr from oslotest import mockpatch from rally.deployment.serverprovider.providers import virsh from tests.unit import test class VirshProviderTestCase(test.TestCase): def setUp(self): super(VirshProviderTestCase, self).setUp() self.deployment = mock.Mock() self.config = { "type": "VirshProvider", "connection": "user@host", "template_name": "prefix", "template_user": "user", "template_password": "password", } self.provider = virsh.VirshProvider(self.deployment, self.config) self.useFixture(mockpatch.PatchObject(self.provider, "resources")) @mock.patch( "rally.deployment.serverprovider.providers.virsh.netaddr.IPAddress") @mock.patch("rally.deployment.serverprovider.providers.virsh.subprocess") @mock.patch("time.sleep") def test_create_vm(self, mock_sleep, mock_subprocess, mock_ip_address): mock_subprocess.check_output.return_value = "10.0.0.1" mock_ip_address.return_value = "10.0.0.2" server = self.provider.create_vm("name") script_path = ("%s/virsh/get_domain_ip.sh" % os.path.split(virsh.__file__)[0]) mock_subprocess.assert_has_calls([ mock.call.check_call( ["virt-clone", "--connect=qemu+ssh://user@host/system", "-o", "prefix", "-n", "name", "--auto-clone"]), mock.call.check_call( ["virsh", "--connect=qemu+ssh://user@host/system", "start", "name"]), mock.call.check_call( ["scp", "-o StrictHostKeyChecking=no", script_path, "user@host:~/get_domain_ip.sh"]), mock.call.check_output(["ssh", "-o StrictHostKeyChecking=no", "user@host", "./get_domain_ip.sh", "name"]), ]) mock_ip_address.assert_called_once_with("10.0.0.1") self.assertEqual(server.host, "10.0.0.2") self.assertEqual(server.user, "user") self.assertIsNone(server.key) self.assertEqual(server.password, "password") self.provider.resources.create.assert_called_once_with({ "name": "name", }) @mock.patch( "rally.deployment.serverprovider.providers.virsh.netaddr.IPAddress") @mock.patch("rally.deployment.serverprovider.providers.virsh.subprocess") @mock.patch("time.sleep") def test_create_vm_ip_failed(self, mock_sleep, mock_subprocess, mock_ip_address): mock_ip_address.side_effect = netaddr.core.AddrFormatError server = self.provider.create_vm("name") mock_subprocess.assert_has_calls(3 * [ mock.call.check_output(["ssh", "-o StrictHostKeyChecking=no", "user@host", "./get_domain_ip.sh", "name"]), ]) self.assertEqual(server.host, "None") @mock.patch("rally.deployment.serverprovider.providers.virsh.subprocess") def test_destroy_vm(self, mock_subprocess): self.provider.destroy_vm("uuid") mock_subprocess.assert_has_calls([ mock.call.check_call( ["virsh", "--connect=qemu+ssh://user@host/system", "destroy", "uuid"]), mock.call.check_call( ["virsh", "--connect=qemu+ssh://user@host/system", "undefine", "uuid", "--remove-all-storage"]), ]) @mock.patch("rally.deployment.serverprovider.providers.virsh.uuid") @mock.patch.object(virsh.VirshProvider, "create_vm") def test_create_servers(self, mock_create_vm, mock_uuid): mock_uuid.uuid4.side_effect = ["1", "2", "3"] mock_create_vm.side_effect = ["s1", "s2", "s3"] servers = self.provider.create_servers(amount=3) self.assertEqual(servers, ["s1", "s2", "s3"]) mock_create_vm.assert_has_calls([ mock.call("1"), mock.call("2"), mock.call("3"), ]) @mock.patch.object(virsh.VirshProvider, "destroy_vm") def test_destroy_servers(self, mock_destroy_vm): self.provider.resources.get_all.return_value = [ {"info": {"name": "1"}}, {"info": {"name": "2"}}, {"info": {"name": "3"}}, ] self.provider.destroy_servers() mock_destroy_vm.assert_has_calls([ mock.call("1"), mock.call("2"), mock.call("3"), ]) self.provider.resources.get_all.assert_called_once_with() def test_invalid_config(self): self.config["type"] = 42 self.assertRaises(jsonschema.ValidationError, virsh.VirshProvider, self.deployment, self.config) def test_invalid_connection(self): self.config["connection"] = "user host" self.assertRaises(jsonschema.ValidationError, virsh.VirshProvider, self.deployment, self.config) rally-0.9.1/tests/unit/deployment/serverprovider/providers/test_cobbler.py0000664000567000056710000001000313073417716030430 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.deployment.serverprovider.providers import cobbler from tests.unit import test class TestCobblerProvider(test.TestCase): def setUp(self): self.config = {"type": "CobblerProvider", "host": "h1", "user": "u1", "password": "p1", "system_password": "p2", "selector": {"profile": "p1", "owners": "o1"}} self.rendered = {"ip_address_eth3": "", "ip_address_eth1": "1.1.1.1", "power_user": "fake_root", "redhat_management_key": "fake_key", "name": "fake_name"} self.system_names = ["s1", "s2"] self.token = "token" self.handle = "handle" super(TestCobblerProvider, self).setUp() def create_mocks(self, mock_server, is_no_ip, provider): mock_server.find_system = mock.Mock(return_value=self.system_names) mock_server.login = mock.Mock(return_value=self.token) mock_server.get_system_handle = mock.Mock(return_value=self.handle) mock_server.power_system = mock.Mock() if is_no_ip: self.rendered["ip_address_eth1"] = "" mock_server.get_system_as_rendered = mock.Mock( return_value=self.rendered) provider.cobbler = mock_server @mock.patch("six.moves.xmlrpc_client.Server") def test_create_servers(self, mock_server): provider = cobbler.CobblerProvider(config=self.config, deployment=None) mock_server.assert_called_once_with(uri="http://h1/cobbler_api") self.create_mocks(mock_server=mock_server, is_no_ip=False, provider=provider) credentials = provider.create_servers() mock_server.find_system.assert_called_once_with( self.config["selector"]) mock_server.login.assert_called_with(self.config["user"], self.config["password"]) mock_server.login.call_count = len(self.system_names) mock_server.power_system.assert_called_with(self.handle, "reboot", self.token) self.assertEqual(["1.1.1.1"] * 2, [s.host for s in credentials]) self.assertEqual(["fake_root"] * 2, [s.user for s in credentials]) self.assertEqual(["p2"] * 2, [s.password for s in credentials]) self.assertEqual(["fake_key"] * 2, [s.key for s in credentials]) self.assertEqual([22] * 2, [s.port for s in credentials]) @mock.patch("six.moves.xmlrpc_client.Server") def test_create_servers_when_selects_nothing(self, mock_server): provider = cobbler.CobblerProvider(config=self.config, deployment=None) mock_server.find_system = mock.Mock(return_value=[]) provider.cobbler = mock_server self.assertRaisesRegexp(RuntimeError, "No associated systems selected by {.*}$", provider.create_servers) @mock.patch("six.moves.xmlrpc_client.Server") def test_create_servers_when_no_ip_found(self, mock_server): provider = cobbler.CobblerProvider(config=self.config, deployment=None) self.create_mocks(mock_server=mock_server, is_no_ip=True, provider=provider) self.assertRaisesRegexp(RuntimeError, "No valid ip address found for system '.*'$", provider.create_servers) rally-0.9.1/tests/unit/deployment/serverprovider/providers/test_existing.py0000664000567000056710000000356613073417716030672 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import jsonschema from rally.deployment.serverprovider import provider from rally.deployment.serverprovider.providers import existing from tests.unit import test class ExistingServersTestCase(test.TestCase): def setUp(self): super(ExistingServersTestCase, self).setUp() self.config = {"type": "ExistingServers", "credentials": [{"user": "user", "host": "host1"}, {"user": "user", "host": "host2"}]} def test_create_servers(self): _provider = provider.ProviderFactory.get_provider(self.config, None) credentials = _provider.create_servers() self.assertEqual(["host1", "host2"], [s.host for s in credentials]) self.assertEqual(["user", "user"], [s.user for s in credentials]) def test_invalid_config(self): self.config["type"] = 42 self.assertRaises(jsonschema.ValidationError, existing.ExistingServers, None, self.config) def test_invalid_credentials(self): self.config["credentials"] = ["user@host1", "user@host2"] self.assertRaises(jsonschema.ValidationError, existing.ExistingServers, None, self.config) rally-0.9.1/tests/unit/deployment/serverprovider/providers/test_openstack.py0000664000567000056710000002666013073417720031022 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests for OpenStack VM provider.""" import textwrap import jsonschema import mock from oslotest import mockpatch from rally.deployment.serverprovider.providers import openstack as provider from rally import exceptions from tests.unit import fakes from tests.unit import test MOD_NAME = "rally.deployment.serverprovider.providers.openstack" OSProvider = provider.OpenStackProvider class FakeOSClients(object): def nova(self): return "nova" def glance(self): return "glance" class OpenStackProviderTestCase(test.TestCase): def setUp(self): super(OpenStackProviderTestCase, self).setUp() self.useFixture(mockpatch.Patch( "rally.deployment.serverprovider.provider.ResourceManager")) def _get_valid_config(self): return { "image": { "url": "http://example.net/img.qcow2", "format": "qcow2", "name": "Image", "checksum": "0123456789abcdef", }, "deployment_name": "rally-dep-1", "auth_url": "urlto", "user": "name", "password": "mypass", "tenant": "tenant", "flavor_id": "22"} def _init_mock_clients(self): self.clients = mock.MagicMock() self.image = mock.MagicMock() self.image.checksum = "0123456789abcdef" self.image.get = mock.MagicMock(return_value=self.image) self.image.id = "fake-uuid" self.glance_client = mock.Mock(return_value=self.image) self.glance_client.images.create = mock.Mock(return_value=self.image) self.glance_client.images.list = mock.Mock(return_value=[self.image]) self.clients.glance = mock.Mock(return_value=self.glance_client) self.instance = mock.MagicMock() self.instance.status = "ACTIVE" self.nova_client = mock.MagicMock() self.nova_client.servers.create = mock.MagicMock( return_value=self.instance) self.clients.nova = mock.MagicMock(return_value=self.nova_client) @mock.patch( "rally.deployment.serverprovider.providers.openstack.osclients") def test_openstack_provider_init(self, mock_osclients): cfg = self._get_valid_config() mock_osclients.Clients = mock.MagicMock(return_value=FakeOSClients()) os_provider = OSProvider(mock.MagicMock(), cfg) self.assertEqual("nova", os_provider.nova) self.assertEqual("glance", os_provider.glance) @mock.patch("rally.osclients.Clients") def test_init_no_glance(self, mock_clients): mock_clients.return_value.glance.side_effect = KeyError("image") cfg = self._get_valid_config() provider = OSProvider(mock.MagicMock(), cfg) self.assertIsNone(provider.glance) @mock.patch( "rally.deployment.serverprovider.providers.openstack.osclients") def test_openstack_provider_init_with_invalid_conf_no_user(self, mock_osclients): cfg = self._get_valid_config() cfg.pop("user") self.assertRaises(jsonschema.ValidationError, OSProvider, mock.MagicMock(), cfg) @mock.patch( "rally.deployment.serverprovider.providers.openstack.osclients") def test_openstack_provider_init_with_invalid_conf_no_url(self, mock_osclients): cfg = self._get_valid_config() del cfg["image"]["url"] del cfg["image"]["checksum"] self.assertRaises(jsonschema.ValidationError, OSProvider, mock.MagicMock(), cfg) @mock.patch( "rally.deployment.serverprovider.providers.openstack.osclients") def test_openstack_provider_init_with_invalid_conf_extra_key( self, mock_osclients): cfg = self._get_valid_config() cfg["aaaaa"] = "bbbbb" self.assertRaises(jsonschema.ValidationError, OSProvider, mock.MagicMock(), cfg) @mock.patch( "rally.deployment.serverprovider.providers.openstack.osclients") def test_openstack_provider_init_with_invalid_conf_flavor_(self, mock_osclients): cfg = self._get_valid_config()["user"] = 1111 self.assertRaises(jsonschema.ValidationError, OSProvider, mock.MagicMock(), cfg) @mock.patch( "rally.deployment.serverprovider.providers.openstack.osclients") def test_openstack_provider_with_valid_config(self, mock_osclients): cfg = self._get_valid_config() OSProvider(mock.MagicMock(), cfg) @mock.patch( "rally.deployment.serverprovider.providers.openstack.osclients") def test_openstack_provider_with_valid_config_uuid(self, mock_osclients): cfg = self._get_valid_config() cfg["image"] = dict(uuid="289D7A51-1A0C-43C4-800D-706EA8A3CDF3") OSProvider(mock.MagicMock(), cfg) @mock.patch( "rally.deployment.serverprovider.providers.openstack.osclients") def test_openstack_provider_with_valid_config_checksum(self, mock_osclients): cfg = self._get_valid_config() cfg["image"] = dict(checksum="checksum") OSProvider(mock.MagicMock(), cfg) def test_cloud_init_success_notready(self): fake_server = mock.Mock() fake_server.ssh.execute.return_value = (1, "", "") # Not ready yet -> False self.assertFalse(provider._cloud_init_success(fake_server)) def test_cloud_init_success_completed(self): fake_server = mock.Mock() result_json_text = textwrap.dedent(""" { "v1": { "errors": [], "datasource": "DataSourceFoo" } } """) fake_server.ssh.execute.return_value = (0, result_json_text, "") # Completed (with no errors) -> True self.assertTrue(provider._cloud_init_success(fake_server)) def test_cloud_init_success_errors(self): fake_server = mock.Mock() result_json_text = textwrap.dedent(""" { "v1": { "errors": ["omg!"], "datasource": "DataSourceFoo" } } """) fake_server.ssh.execute.return_value = (0, result_json_text, "") # Completed with errors -> Exception self.assertRaises(RuntimeError, provider._cloud_init_success, fake_server) @mock.patch("time.sleep") @mock.patch(MOD_NAME + ".provider.Server") @mock.patch(MOD_NAME + ".osclients") @mock.patch(MOD_NAME + ".utils") def test_create_servers(self, mock_utils, mock_osclients, mock_server, mock_sleep): fake_keypair = mock.Mock() fake_keypair.name = "fake_key_name" provider = OSProvider(mock.Mock(), self._get_valid_config()) provider.sg = mock.Mock(id="33") provider.config["secgroup_name"] = "some_sg" provider.nova = mock.Mock() provider.get_image_uuid = mock.Mock(return_value="fake_image_uuid") provider.get_userdata = mock.Mock(return_value="fake_userdata") provider.get_nics = mock.Mock(return_value="fake_nics") provider.create_keypair = mock.Mock(return_value=(fake_keypair, "fake_path")) mock_utils.wait_for = lambda x, **kw: x mock_server.return_value = fake_server = mock.Mock() provider.nova.servers.create.return_value = fake_instance = mock.Mock() fake_instance.accessIPv4 = None fake_instance.accessIPv6 = None fake_instance.addresses = {"private": [{"addr": "1.2.3.4"}]} servers = provider.create_servers() provider.nova.security_groups.create.assert_called_once_with( provider.config["secgroup_name"], provider.config["secgroup_name"]) mock_server.assert_called_once_with(host="1.2.3.4", user="root", key="fake_path") self.assertEqual([fake_server], servers) fake_server.ssh.wait.assert_called_once_with(interval=5, timeout=120) provider.nova.servers.create.assert_called_once_with( "rally-dep-1-0", "fake_image_uuid", "22", userdata="fake_userdata", nics="fake_nics", key_name="fake_key_name", config_drive=False, security_groups=[provider.sg.name]) @mock.patch(MOD_NAME + ".osclients") def test_get_image_found_by_checksum(self, mock_osclients): self._init_mock_clients() mock_osclients.Clients = mock.MagicMock(return_value=self.clients) prov = OSProvider(mock.MagicMock(), self._get_valid_config()) image_uuid = prov.get_image_uuid() self.assertEqual(image_uuid, "fake-uuid") @mock.patch(MOD_NAME + ".osclients") def test_get_image_download(self, mock_osclients): self._init_mock_clients() self.glance_client.images.list = mock.Mock(return_value=[]) mock_osclients.Clients = mock.MagicMock(return_value=self.clients) prov = OSProvider(mock.MagicMock(), self._get_valid_config()) image_uuid = prov.get_image_uuid() self.assertEqual(image_uuid, "fake-uuid") @mock.patch(MOD_NAME + ".osclients") def test_get_image_no_glance_exception( self, mock_osclients): prov = OSProvider(mock.MagicMock(), self._get_valid_config()) prov.glance = None self.assertRaises(exceptions.InvalidConfigException, prov.get_image_uuid) @mock.patch(MOD_NAME + ".osclients") def test_get_image_from_uuid_no_glance(self, mock_osclients): conf = self._get_valid_config() conf["image"]["uuid"] = "EC7A1DB7-C5BD-49A2-8066-613809CB22F5" prov = OSProvider(mock.MagicMock(), conf) prov.glance = True self.assertEqual(conf["image"]["uuid"], prov.get_image_uuid()) @mock.patch(MOD_NAME + ".osclients") def test_destroy_servers(self, mock_osclients): prov = OSProvider(mock.MagicMock(), self._get_valid_config()) prov.resources.get_all.side_effect = [ [fakes.FakeResource( id=1, items={"info": {"id": "35FC0503-FED6-419F-B6EE-B704198CE642"}} )], [fakes.FakeResource( id=2, items={"info": {"id": "keypair_name"}} )], ] prov.destroy_servers() prov.resources.get_all.assert_has_calls([ mock.call(type="server"), mock.call(type="keypair"), ]) prov.nova.servers.delete.assert_called_once_with( "35FC0503-FED6-419F-B6EE-B704198CE642") prov.nova.keypairs.delete.assert_called_once_with("keypair_name") prov.resources.delete.assert_has_calls([ mock.call(1), mock.call(2), ]) rally-0.9.1/tests/unit/deployment/test_multihost.py0000664000567000056710000001055713073417716023770 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally import consts from rally.deployment import engine from tests.unit import fakes from tests.unit import test MOD = "rally.deployment.engines.multihost." class MultihostEngineTestCase(test.TestCase): def setUp(self): super(MultihostEngineTestCase, self).setUp() self.config = { "type": "MultihostEngine", "controller": { "type": "DummyEngine", "endpoint": {"auth_url": "http://h1.net"} }, "nodes": [ { "type": "DummyEngine", "endpoint": {"auth_url": "endpoint1"}, }, { "type": "DummyEngine", "endpoint": {"auth_url": "endpoint2", "cnt": "{controller_ip}"} } ] } self.deployment = fakes.FakeDeployment( uuid="905b2f16-6453-4b86-8ba5-6d32025fcfa6", config=self.config, ) self.engine = engine.Engine.get_engine("MultihostEngine", self.deployment) def test_config(self): self.assertEqual(self.deployment["config"], self.engine.config) @mock.patch(MOD + "objects.Deployment") @mock.patch(MOD + "engine.Engine") def test__deploy_node(self, mock_engine, mock_deployment): fake_credential = mock.Mock() fake_deployment = mock.Mock() fake_engine = mock.Mock() fake_engine.__enter__ = mock.Mock() fake_engine.__exit__ = mock.Mock() fake_engine.make_deploy = mock.Mock(return_value=fake_credential) mock_deployment.return_value = fake_deployment mock_engine.get_engine = mock.Mock(return_value=fake_engine) engine, credential = self.engine._deploy_node(self.config["nodes"][0]) self.assertEqual(fake_engine, engine) self.assertEqual(fake_credential, credential) mock_deployment.assert_called_once_with( config=self.config["nodes"][0], parent_uuid=self.deployment["uuid"]) fake_engine.__enter__.assert_called_once_with() fake_engine.__exit__.assert_called_once_with(None, None, None) def test__update_controller_ip(self): self.engine.controller_ip = "1.2.3.4" self.engine._update_controller_ip(self.config) expected = {"auth_url": "endpoint2", "cnt": "1.2.3.4"} self.assertEqual(expected, self.config["nodes"][1]["endpoint"]) @mock.patch(MOD + "MultihostEngine._deploy_node") @mock.patch(MOD + "MultihostEngine._update_controller_ip") def test_deploy(self, mock__update_controller_ip, mock__deploy_node): fake_credentials = [mock.Mock()] fake_credentials[0].auth_url = "http://h1.net" mock__deploy_node.return_value = [mock.Mock(), fake_credentials] credentials = self.engine.deploy() self.assertEqual(self.engine.controller_ip, "h1.net") self.assertEqual(fake_credentials, credentials) expected = [ mock.call(self.config["nodes"][0]), mock.call(self.config["nodes"][1]), ] self.assertEqual(expected, mock__update_controller_ip.mock_calls) self.deployment.update_status.assert_called_once_with( consts._DeployStatus.DEPLOY_SUBDEPLOY) @mock.patch("rally.api") @mock.patch(MOD + "db") def test_cleanup(self, mock_db, mock_api): mock_db.deployment_list.return_value = [{"uuid": "uuid1"}, {"uuid": "uuid2"}] self.engine.cleanup() api_calls = [ mock.call.Deployment.destroy("uuid1"), mock.call.Deployment.destroy("uuid2"), ] self.assertEqual(api_calls, mock_api.mock_calls) rally-0.9.1/tests/unit/test_test_mock.py0000664000567000056710000003342513073417717021550 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ast import mock from tests.unit import test from tests.unit import test_mock class VariantsTestCase(test.TestCase): def setUp(self): self.variants = test_mock.Variants(["test", "foo", "bar"]) super(VariantsTestCase, self).setUp() def test_print(self): self.assertEqual( "{'mock_test', 'mock_foo', 'mock_bar'}", repr(self.variants) ) def test_print_long(self): variants = test_mock.Variants(["test", "foo", "bar", "buz"]) self.assertEqual( "{'mock_test', 'mock_foo', 'mock_bar', ...}", repr(variants) ) def test_equal(self): self.assertEqual(["test", "foo", "bar"], self.variants) self.assertEqual(self.variants, self.variants) mock_variants = mock.Mock(variants=["test", "foo", "bar"]) self.assertEqual(mock_variants, self.variants) self.assertNotEqual(["abc"], self.variants) def test_contains(self): self.assertIn("test", self.variants) self.assertNotIn("abc", self.variants) class FuncMockArgsDecoratorsCheckerTestCase(test.TestCase): code = """ @mock.patch("os.path.join") @mock.patch("pkg.module1.OtherObject.method") @mock.patch("pkg.module2.MyClassObject.method") @mock.patch.object(pkg.SomeKindOfObject, "abc") @mock.patch.object(pkg.MyClassObject, "abc") def test_func(self, mock_args, mock_args2, mock_some_longer_args): pass """ code_mock_decorators = [ ["abc", "my_class_object_abc", "pkg_my_class_object_abc"], ["some_kind_of_object_abc", "pkg_some_kind_of_object_abc"], [ "method", "my_class_object_method", "module2_my_class_object_method", "pkg_module2_my_class_object_method" ], [ "other_object_method", "module1_other_object_method", "pkg_module1_other_object_method", ], [ "join", "path_join", "os_path_join" ], ] code_mock_args = ["args", "args2", "some_longer_args"] def setUp(self): super(FuncMockArgsDecoratorsCheckerTestCase, self).setUp() self.visitor = test_mock.FuncMockArgsDecoratorsChecker() self.visitor.classname_python = "" self.visitor.globals_["EXPR"] = "expression" self.tree = self._parse_expr(self.code) def _parse_expr(self, code): firstbody = ast.parse(code).body[0] if isinstance(firstbody, ast.Expr): return firstbody.value return firstbody def test__get_name(self): self.assertEqual( "os.path.join", self.visitor._get_name(self._parse_expr("os.path.join")) ) def test__get_value_str(self): self.assertEqual( "not.your.fault", self.visitor._get_value(self._parse_expr("'not.your.fault'")) ) def test__get_value_mod(self): self.assertEqual( "some.crazy.mod.expression", self.visitor._get_value( self._parse_expr("'some.crazy.mod.%s' % EXPR") ) ) def test__get_value_add(self): self.assertEqual( "expression.some.crazy.add", self.visitor._get_value( self._parse_expr("EXPR + '.some.crazy.add'") ) ) def test__get_value_global(self): self.assertEqual( "expression", self.visitor._get_value( self._parse_expr("EXPR") ) ) def test__get_value_none(self): self.assertRaises( ValueError, self.visitor._get_value, ast.parse("import abc") ) def test__get_value_asserts(self): self.assertRaises( ValueError, self.visitor._get_value, self._parse_expr("EXPR % 'abc'") ) self.assertRaises( ValueError, self.visitor._get_value, self._parse_expr("'abc' + EXPR") ) def test__camelcase_to_python_camel(self): self.assertEqual( "some_class_name", self.visitor._camelcase_to_python("SomeClassName") ) def test__camelcase_to_python_python(self): self.assertEqual( "some_python_name", self.visitor._camelcase_to_python("some_python_name") ) def test__get_mocked_class_value_variants_matches_class(self): self.visitor.classname_python = "foo_class" self.assertEqual( ["mocked_obj", "foo_class_mocked_obj"], self.visitor._get_mocked_class_value_variants( class_name="FooClass", mocked_name="MockedObj" ) ) def test__get_mocked_class_value_variants_different_class(self): self.visitor.classname_python = "foo_class" self.assertEqual( ["bar_class_mocked_obj"], self.visitor._get_mocked_class_value_variants( class_name="BarClass", mocked_name="MockedObj" ) ) def test__add_pkg_optional_prefixes(self): self.assertEqual( ["foo", "bar", "bar_class_bar", "pkg_bar_class_bar", "some_pkg_bar_class_bar"], self.visitor._add_pkg_optional_prefixes( "some.pkg.BarClass".split("."), ["foo", "bar"] ) ) def test__get_mocked_name_variants_single(self): self.assertEqual( ["foo_bar"], self.visitor._get_mocked_name_variants( "FooBar" ) ) self.assertEqual( ["foobar"], self.visitor._get_mocked_name_variants( "foobar" ) ) def test__get_mocked_name_variants_classname(self): self.visitor.classname_python = "foo_bar" self.assertEqual( ["method", "foo_bar_method", "pkg_foo_bar_method"], self.visitor._get_mocked_name_variants( "pkg.FooBar.method" ) ) self.visitor.classname_python = "" self.assertEqual( ["foo_bar_method", "pkg_foo_bar_method"], self.visitor._get_mocked_name_variants( "pkg.FooBar.method" ) ) def test__get_mocked_name_variants_pkg(self): self.assertEqual( ["method", "pkg_method", "long_pkg_method", "some_long_pkg_method"], self.visitor._get_mocked_name_variants( "some.long.pkg.method" ) ) def test__get_mock_decorators_variants(self): self.visitor.classname_python = "my_class_object" self.assertEqual( self.code_mock_decorators, self.visitor._get_mock_decorators_variants(self.tree) ) def test__get_mock_args(self): self.assertEqual( self.code_mock_args, self.visitor._get_mock_args(self.tree) ) def test_visit_Assign(self): self.visitor.globals_ = {} self.visitor.visit_Assign( self._parse_expr("ABC = '20' + '40'") ) self.assertEqual( {"ABC": "2040"}, self.visitor.globals_ ) self.visitor.visit_Assign( self._parse_expr("abc = 20 + 40") ) self.assertEqual( {"ABC": "2040", "abc": 60}, self.visitor.globals_ ) def test_visit_ClassDef(self): self.visitor.visit_ClassDef( self._parse_expr("class MyObject(object): pass") ) self.assertEqual( "my_object", self.visitor.classname_python ) self.visitor.visit_ClassDef( self._parse_expr("class YourObjectTestCase(object): pass") ) self.assertEqual( "your_object", self.visitor.classname_python ) def test_visit_FunctionDef_empty_decs(self): self.visitor._get_mock_decorators_variants = mock.Mock( return_value=[] ) self.assertIsNone(self.visitor.visit_FunctionDef(self.tree)) self.assertEqual([], self.visitor.errors) self.visitor._get_mock_decorators_variants.assert_called_once_with( self.tree ) def test_visit_FunctionDef_good(self): self.visitor._get_mock_decorators_variants = mock.Mock( return_value=[ ["foo", "foo_bar", "pkg_foo_bar"] ] ) self.visitor._get_mock_args = mock.Mock( return_value=["pkg_foo_bar"] ) self.assertIsNone(self.visitor.visit_FunctionDef(self.tree)) self.assertEqual([], self.visitor.errors) self.visitor._get_mock_decorators_variants.assert_called_once_with( self.tree ) self.visitor._get_mock_args.assert_called_once_with( self.tree ) def test_visit_FunctionDef_misnamed(self): variants = test_mock.Variants( ["foo", "foo_bar", "pkg_foo_bar", "a"] ) self.visitor._get_mock_decorators_variants = mock.Mock( return_value=[variants] ) self.visitor._get_mock_args = mock.Mock( return_value=["bar_foo_misnamed"] ) self.assertIsNone(self.visitor.visit_FunctionDef(self.tree)) self.assertEqual( [ { "lineno": 2, "messages": [ "Argument 'bar_foo_misnamed' misnamed; should be " "either of %s that is derived from the mock decorator " "args.\n" % variants ], "mismatch_pairs": [ ("bar_foo_misnamed", variants) ] } ], self.visitor.errors) self.visitor._get_mock_decorators_variants.assert_called_once_with( self.tree ) self.visitor._get_mock_args.assert_called_once_with( self.tree ) def test_visit_FunctionDef_mismatch_args(self): variants = test_mock.Variants( ["foo", "foo_bar", "pkg_foo_bar", "a"] ) self.visitor._get_mock_decorators_variants = mock.Mock( return_value=[variants] ) self.visitor._get_mock_args = mock.Mock( return_value=["bar_foo_misnamed", "mismatched"] ) self.assertIsNone(self.visitor.visit_FunctionDef(self.tree)) self.assertEqual( [ { "lineno": 2, "messages": [ "Argument 'bar_foo_misnamed' misnamed; should be " "either of %s that is derived from the mock decorator " "args.\n" % variants, "Missing or malformed decorator for 'mismatched' " "argument." ], "args": self.visitor._get_mock_args.return_value, "decs": [variants] } ], self.visitor.errors) self.visitor._get_mock_decorators_variants.assert_called_once_with( self.tree ) self.visitor._get_mock_args.assert_called_once_with( self.tree ) def test_visit_FunctionDef_mismatch_decs(self): variants = test_mock.Variants( ["foo", "foo_bar", "pkg_foo_bar", "a"] ) self.visitor._get_mock_decorators_variants = mock.Mock( return_value=[variants] ) self.visitor._get_mock_args = mock.Mock( return_value=[] ) self.assertIsNone(self.visitor.visit_FunctionDef(self.tree)) self.assertEqual( [ { "lineno": 2, "messages": [ "Missing or malformed argument for {'mock_foo', " "'mock_foo_bar', 'mock_pkg_foo_bar', ...} decorator." ], "args": self.visitor._get_mock_args.return_value, "decs": [variants] } ], self.visitor.errors) self.visitor._get_mock_decorators_variants.assert_called_once_with( self.tree ) self.visitor._get_mock_args.assert_called_once_with( self.tree ) def test_visit(self): self.visitor.classname_python = "my_class_object" self.visitor.visit(self.tree) self.assertEqual( self.code_mock_args, self.visitor.errors[0]["args"] ) self.assertEqual( self.code_mock_decorators, self.visitor.errors[0]["decs"] ) self.assertEqual(2, self.visitor.errors[0]["lineno"]) def test_visit_ok(self): self.visitor.classname_python = "my_class_object" self.visitor.visit( self._parse_expr( """ class MyClassObjectTestCase(object): @mock.patch("foo.bar.MyClassObject.yep") @mock.patch("foo.bar.ClassName.ok") @mock.patch.object(pkg.FooClass, "method") def test_mockings(self, mock_pkg_foo_class_method, mock_class_name_ok, mock_yep): pass """) ) self.assertEqual( [], self.visitor.errors ) rally-0.9.1/tests/unit/task/0000775000567000056710000000000013073420067017072 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/task/test_engine.py0000664000567000056710000012525713073417720021766 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests for the Test engine.""" import collections import copy import threading import jsonschema import mock from rally.common import objects from rally import consts from rally import exceptions from rally.task import engine from tests.unit import fakes from tests.unit import test class TestException(exceptions.RallyException): msg_fmt = "TestException" class TaskEngineTestCase(test.TestCase): @mock.patch("rally.task.engine.TaskConfig") def test_init(self, mock_task_config): config = mock.MagicMock() task = mock.MagicMock() mock_task_config.return_value = fake_task_instance = mock.MagicMock() eng = engine.TaskEngine(config, task, mock.Mock()) mock_task_config.assert_has_calls([mock.call(config)]) self.assertEqual(eng.config, fake_task_instance) self.assertEqual(eng.task, task) def test_init_empty_config(self): config = None task = mock.Mock() exception = self.assertRaises(exceptions.InvalidTaskException, engine.TaskEngine, config, task, mock.Mock()) self.assertIn("Input task is empty", str(exception)) self.assertTrue(task.set_failed.called) @mock.patch("rally.task.engine.TaskConfig") @mock.patch("jsonschema.validate") def test_validate(self, mock_validate, mock_task_config): mock_task_config.return_value = config = mock.MagicMock() eng = engine.TaskEngine(mock.MagicMock(), mock.MagicMock(), mock.Mock()) mock_validate = mock.MagicMock() eng._validate_config_scenarios_name = mock_validate.names eng._validate_config_syntax = mock_validate.syntax eng._validate_config_semantic = mock_validate.semantic eng.validate() expected_calls = [ mock.call.names(config), mock.call.syntax(config), mock.call.semantic(config) ] mock_validate.assert_has_calls(expected_calls) def test_validate__wrong_schema(self): config = { "wrong": True } task = mock.MagicMock() self.assertRaises(exceptions.InvalidTaskException, engine.TaskEngine, config, task, mock.Mock()) self.assertTrue(task.set_failed.called) @mock.patch("rally.task.engine.TaskConfig") def test_validate__wrong_scenarios_name(self, mock_task_config): task = mock.MagicMock() eng = engine.TaskEngine(mock.MagicMock(), task, mock.Mock()) eng._validate_config_scenarios_name = mock.MagicMock( side_effect=exceptions.NotFoundScenarios) self.assertRaises(exceptions.InvalidTaskException, eng.validate) self.assertTrue(task.set_failed.called) @mock.patch("rally.task.engine.TaskConfig") def test_validate__wrong_syntax(self, mock_task_config): task = mock.MagicMock() eng = engine.TaskEngine(mock.MagicMock(), task, mock.Mock()) eng._validate_config_scenarios_name = mock.MagicMock() eng._validate_config_syntax = mock.MagicMock( side_effect=exceptions.InvalidTaskConfig) self.assertRaises(exceptions.InvalidTaskException, eng.validate) self.assertTrue(task.set_failed.called) @mock.patch("rally.task.engine.TaskConfig") def test_validate__wrong_semantic(self, mock_task_config): task = mock.MagicMock() eng = engine.TaskEngine(mock.MagicMock(), task, mock.Mock()) eng._validate_config_scenarios_name = mock.MagicMock() eng._validate_config_syntax = mock.MagicMock() eng._validate_config_semantic = mock.MagicMock( side_effect=exceptions.InvalidTaskConfig) self.assertRaises(exceptions.InvalidTaskException, eng.validate) self.assertTrue(task.set_failed.called) @mock.patch("rally.task.engine.TaskConfig") @mock.patch("rally.task.engine.scenario.Scenario.get_all") def test__validate_config_scenarios_name( self, mock_scenario_get_all, mock_task_config): mock_task_instance = mock.MagicMock() mock_subtask = mock.MagicMock() mock_subtask.workloads = [ engine.Workload({"name": "a"}, 0), engine.Workload({"name": "b"}, 1) ] mock_task_instance.subtasks = [mock_subtask] mock_scenario_get_all.return_value = [ mock.MagicMock(get_name=lambda: "e"), mock.MagicMock(get_name=lambda: "b"), mock.MagicMock(get_name=lambda: "a") ] eng = engine.TaskEngine(mock.MagicMock(), mock.MagicMock(), mock.Mock()) eng._validate_config_scenarios_name(mock_task_instance) @mock.patch("rally.task.engine.TaskConfig") @mock.patch("rally.task.engine.scenario.Scenario") def test__validate_config_scenarios_name_non_exsisting( self, mock_scenario, mock_task_config): mock_task_instance = mock.MagicMock() mock_subtask = mock.MagicMock() mock_subtask.workloads = [ engine.Workload({"name": "exist"}, 0), engine.Workload({"name": "nonexist1"}, 1), engine.Workload({"name": "nonexist2"}, 2) ] mock_task_instance.subtasks = [mock_subtask] mock_scenario.get_all.return_value = [ mock.Mock(get_name=lambda: "exist"), mock.Mock(get_name=lambda: "aaa")] eng = engine.TaskEngine(mock.MagicMock(), mock.MagicMock(), mock.Mock()) exc = self.assertRaises(exceptions.NotFoundScenarios, eng._validate_config_scenarios_name, mock_task_instance) self.assertEqual("There are no benchmark scenarios with names: " "`nonexist2, nonexist1`.", str(exc)) @mock.patch("rally.task.engine.scenario.Scenario.get") @mock.patch("rally.task.hook.Hook.validate") @mock.patch("rally.task.engine.TaskConfig") @mock.patch("rally.task.engine.runner.ScenarioRunner.validate") @mock.patch("rally.task.engine.context.ContextManager.validate") def test__validate_config_syntax( self, mock_context_manager_validate, mock_scenario_runner_validate, mock_task_config, mock_hook_validate, mock_scenario_get ): default_context = {"foo": 1} scenario_cls = mock_scenario_get.return_value scenario_cls.get_default_context.return_value = default_context mock_task_instance = mock.MagicMock() mock_subtask = mock.MagicMock() mock_subtask.workloads = [ engine.Workload({"name": "sca", "context": "a"}, 0), engine.Workload({"name": "sca", "runner": "b"}, 1), engine.Workload({"name": "sca", "hooks": ["c"]}, 2), ] mock_task_instance.subtasks = [mock_subtask] eng = engine.TaskEngine(mock.MagicMock(), mock.MagicMock(), mock.Mock()) eng._validate_config_syntax(mock_task_instance) mock_scenario_runner_validate.assert_has_calls( [mock.call({}), mock.call("b")], any_order=True) mock_context_manager_validate.assert_has_calls( [mock.call("a"), mock.call(default_context, allow_hidden=True), mock.call({}), mock.call(default_context, allow_hidden=True), mock.call({}), mock.call(default_context, allow_hidden=True)], any_order=True ) mock_hook_validate.assert_called_once_with("c") @mock.patch("rally.task.engine.scenario.Scenario.get") @mock.patch("rally.task.engine.TaskConfig") @mock.patch("rally.task.engine.runner.ScenarioRunner") @mock.patch("rally.task.engine.context.ContextManager.validate") def test__validate_config_syntax__wrong_runner( self, mock_context_manager_validate, mock_scenario_runner, mock_task_config, mock_scenario_get): scenario_cls = mock_scenario_get.return_value scenario_cls.get_default_context.return_value = {} mock_task_instance = mock.MagicMock() mock_subtask = mock.MagicMock() mock_subtask.workloads = [ engine.Workload({"name": "sca", "context": "a"}, 0), engine.Workload({"name": "sca", "runner": "b"}, 1) ] mock_task_instance.subtasks = [mock_subtask] eng = engine.TaskEngine(mock.MagicMock(), mock.MagicMock(), mock.Mock()) mock_scenario_runner.validate = mock.MagicMock( side_effect=jsonschema.ValidationError("a")) self.assertRaises(exceptions.InvalidTaskConfig, eng._validate_config_syntax, mock_task_instance) @mock.patch("rally.task.engine.scenario.Scenario.get") @mock.patch("rally.task.engine.TaskConfig") @mock.patch("rally.task.engine.runner.ScenarioRunner.validate") @mock.patch("rally.task.engine.context.ContextManager") def test__validate_config_syntax__wrong_context( self, mock_context_manager, mock_scenario_runner_validate, mock_task_config, mock_scenario_get): scenario_cls = mock_scenario_get.return_value scenario_cls.get_default_context.return_value = {} mock_task_instance = mock.MagicMock() mock_subtask = mock.MagicMock() mock_subtask.workloads = [ engine.Workload({"name": "sca", "context": "a"}, 0), engine.Workload({"name": "sca", "runner": "b"}, 1) ] mock_task_instance.subtasks = [mock_subtask] eng = engine.TaskEngine(mock.MagicMock(), mock.MagicMock(), mock.Mock()) mock_context_manager.validate = mock.MagicMock( side_effect=jsonschema.ValidationError("a")) self.assertRaises(exceptions.InvalidTaskConfig, eng._validate_config_syntax, mock_task_instance) @mock.patch("rally.task.engine.scenario.Scenario.get") @mock.patch("rally.task.engine.TaskConfig") def test__validate_config_semantic_helper(self, mock_task_config, mock_scenario_get): deployment = mock.Mock() eng = engine.TaskEngine(mock.MagicMock(), mock.MagicMock(), deployment) mock_scenario = mock_scenario_get.return_value workloads = [engine.Workload( {"name": "name", "runner": "runner", "args": "args"}, 0)] user_context = mock.MagicMock() user_context.__enter__.return_value.context = { "users": [{"foo": "user1"}]} eng._validate_config_semantic_helper("admin", user_context, workloads, deployment) mock_scenario.validate.assert_called_once_with( "name", {"runner": "runner", "args": "args"}, admin="admin", users=[{"foo": "user1"}], deployment=deployment) @mock.patch("rally.task.engine.scenario.Scenario.get") @mock.patch("rally.task.engine.TaskConfig") def test__validate_config_semanitc_helper_invalid_arg( self, mock_task_config, mock_scenario_get): eng = engine.TaskEngine(mock.MagicMock(), mock.MagicMock(), mock.Mock()) mock_scenario = mock_scenario_get.return_value mock_scenario.validate.side_effect = exceptions.InvalidScenarioArgument user_context = mock.MagicMock() workloads = [engine.Workload({"name": "name"}, 0)] self.assertRaises(exceptions.InvalidTaskConfig, eng._validate_config_semantic_helper, "a", user_context, workloads, "fake_deployment") @mock.patch("rally.osclients.Clients") @mock.patch("rally.task.engine.scenario.Scenario.get") @mock.patch("rally.task.engine.context.Context") @mock.patch("rally.task.engine.objects.Credential") @mock.patch("rally.task.engine.TaskConfig") @mock.patch("rally.task.engine.TaskEngine" "._validate_config_semantic_helper") @mock.patch("rally.task.engine.objects.Deployment.get", return_value="FakeDeployment") def test__validate_config_semantic( self, mock_deployment_get, mock__validate_config_semantic_helper, mock_task_config, mock_credential, mock_context, mock_scenario_get, mock_clients): deployment = fakes.FakeDeployment( uuid="deployment_uuid", admin={"foo": "admin"}, users=[{"bar": "user1"}]) scenario_cls = mock_scenario_get.return_value scenario_cls.get_namespace.return_value = "default" mock_task_instance = mock.MagicMock() mock_subtask1 = mock.MagicMock() wconf1 = engine.Workload({"name": "a", "runner": "ra", "context": {"users": {}}}, 0) wconf2 = engine.Workload({"name": "a", "runner": "rb"}, 1) mock_subtask1.workloads = [wconf1, wconf2] mock_subtask2 = mock.MagicMock() wconf3 = engine.Workload({"name": "b", "runner": "ra"}, 0) mock_subtask2.workloads = [wconf3] mock_task_instance.subtasks = [mock_subtask1, mock_subtask2] fake_task = mock.MagicMock() eng = engine.TaskEngine(mock_task_instance, fake_task, deployment) eng._validate_config_semantic(mock_task_instance) admin = mock_credential.return_value user_context = mock_context.get.return_value.return_value mock_clients.assert_called_once_with(admin) mock_clients.return_value.verified_keystone.assert_called_once_with() mock__validate_config_semantic_helper.assert_has_calls([ mock.call(admin, user_context, [wconf1], deployment), mock.call(admin, user_context, [wconf2, wconf3], deployment), ], any_order=True) @mock.patch("rally.common.objects.Task.get_status") @mock.patch("rally.task.engine.TaskConfig") @mock.patch("rally.task.engine.ResultConsumer") @mock.patch("rally.task.engine.context.ContextManager.cleanup") @mock.patch("rally.task.engine.context.ContextManager.setup") @mock.patch("rally.task.engine.scenario.Scenario") @mock.patch("rally.task.engine.runner.ScenarioRunner") def test_run__update_status( self, mock_scenario_runner, mock_scenario, mock_context_manager_setup, mock_context_manager_cleanup, mock_result_consumer, mock_task_config, mock_task_get_status): task = mock.MagicMock() mock_task_get_status.return_value = consts.TaskStatus.ABORTING eng = engine.TaskEngine(mock.MagicMock(), task, mock.Mock()) eng.run() task.update_status.assert_has_calls([ mock.call(consts.TaskStatus.RUNNING), mock.call(consts.TaskStatus.FINISHED) ]) @mock.patch("rally.task.engine.objects.Credential") @mock.patch("rally.task.engine.objects.task.Task.get_status") @mock.patch("rally.task.engine.TaskConfig") @mock.patch("rally.task.engine.LOG") @mock.patch("rally.task.engine.ResultConsumer") @mock.patch("rally.task.engine.scenario.Scenario") @mock.patch("rally.task.engine.runner.ScenarioRunner") @mock.patch("rally.task.engine.context.ContextManager.cleanup") @mock.patch("rally.task.engine.context.ContextManager.setup") def test_run_exception_is_logged( self, mock_context_manager_setup, mock_context_manager_cleanup, mock_scenario_runner, mock_scenario, mock_result_consumer, mock_log, mock_task_config, mock_task_get_status, mock_credential): scenario_cls = mock_scenario.get.return_value scenario_cls.get_namespace.return_value = "openstack" mock_context_manager_setup.side_effect = Exception mock_result_consumer.is_task_in_aborting_status.return_value = False mock_task_instance = mock.MagicMock() mock_subtask = mock.MagicMock() mock_subtask.workloads = [ engine.Workload( {"name": "a.task", "context": {"context_a": {"a": 1}}}, 0), engine.Workload( {"name": "b.task", "context": {"context_b": {"b": 2}}}, 1) ] mock_task_instance.subtasks = [mock_subtask] mock_task_config.return_value = mock_task_instance deployment = fakes.FakeDeployment( uuid="deployment_uuid", admin={"foo": "admin"}) eng = engine.TaskEngine(mock.MagicMock(), mock.MagicMock(), deployment) eng.run() self.assertEqual(2, mock_log.exception.call_count) @mock.patch("rally.task.engine.objects.Credential") @mock.patch("rally.task.engine.ResultConsumer") @mock.patch("rally.task.engine.context.ContextManager.cleanup") @mock.patch("rally.task.engine.context.ContextManager.setup") @mock.patch("rally.task.engine.scenario.Scenario") @mock.patch("rally.task.engine.runner.ScenarioRunner") def test_run__task_soft_aborted( self, mock_scenario_runner, mock_scenario, mock_context_manager_setup, mock_context_manager_cleanup, mock_result_consumer, mock_credential): scenario_cls = mock_scenario.get.return_value scenario_cls.get_namespace.return_value = "openstack" task = mock.MagicMock() mock_result_consumer.is_task_in_aborting_status.side_effect = [False, False, True] config = { "a.task": [{"runner": {"type": "a", "b": 1}}], "b.task": [{"runner": {"type": "a", "b": 1}}], "c.task": [{"runner": {"type": "a", "b": 1}}] } fake_runner_cls = mock.MagicMock() fake_runner = mock.MagicMock() fake_runner_cls.return_value = fake_runner mock_scenario_runner.get.return_value = fake_runner_cls deployment = fakes.FakeDeployment( uuid="deployment_uuid", admin={"foo": "admin"}) eng = engine.TaskEngine(config, task, deployment) eng.run() self.assertEqual(2, fake_runner.run.call_count) self.assertEqual(mock.call(consts.TaskStatus.ABORTED), task.update_status.mock_calls[-1]) @mock.patch("rally.common.objects.Task.get_status") @mock.patch("rally.task.engine.ResultConsumer") @mock.patch("rally.task.engine.context.ContextManager.cleanup") @mock.patch("rally.task.engine.context.ContextManager.setup") @mock.patch("rally.task.engine.scenario.Scenario") @mock.patch("rally.task.engine.runner.ScenarioRunner") def test_run__task_aborted( self, mock_scenario_runner, mock_scenario, mock_context_manager_setup, mock_context_manager_cleanup, mock_result_consumer, mock_task_get_status): task = mock.MagicMock(spec=objects.Task) config = { "a.task": [{"runner": {"type": "a", "b": 1}}], "b.task": [{"runner": {"type": "a", "b": 1}}], "c.task": [{"runner": {"type": "a", "b": 1}}] } fake_runner_cls = mock.MagicMock() fake_runner = mock.MagicMock() fake_runner_cls.return_value = fake_runner mock_task_get_status.return_value = consts.TaskStatus.SOFT_ABORTING mock_scenario_runner.get.return_value = fake_runner_cls eng = engine.TaskEngine(config, task, mock.Mock()) eng.run() self.assertEqual(mock.call(consts.TaskStatus.ABORTED), task.update_status.mock_calls[-1]) @mock.patch("rally.task.engine.objects.Credential") @mock.patch("rally.task.engine.TaskConfig") @mock.patch("rally.task.engine.scenario.Scenario.get") def test__prepare_context(self, mock_scenario_get, mock_task_config, mock_credential): default_context = {"a": 1, "b": 2} mock_scenario = mock_scenario_get.return_value mock_scenario.get_default_context.return_value = default_context mock_scenario.get_namespace.return_value = "openstack" task = mock.MagicMock() name = "a.task" context = {"b": 3, "c": 4} config = { "a.task": [{"context": {"context_a": {"a": 1}}}], } deployment = fakes.FakeDeployment( uuid="deployment_uuid", admin={"foo": "admin"}) eng = engine.TaskEngine(config, task, deployment) result = eng._prepare_context(context, name) expected_context = copy.deepcopy(default_context) expected_context.setdefault("users", {}) expected_context.update(context) expected_result = { "task": task, "admin": {"credential": mock_credential.return_value}, "scenario_name": name, "config": expected_context } self.assertEqual(result, expected_result) mock_scenario_get.assert_called_once_with(name) @mock.patch("rally.task.engine.objects.Credential") @mock.patch("rally.task.engine.TaskConfig") @mock.patch("rally.task.engine.scenario.Scenario.get") def test__prepare_context_with_existing_users(self, mock_scenario_get, mock_task_config, mock_credential): mock_scenario = mock_scenario_get.return_value mock_scenario.get_default_context.return_value = {} mock_scenario.get_namespace.return_value = "default" task = mock.MagicMock() name = "a.task" context = {"b": 3, "c": 4} config = { "a.task": [{"context": {"context_a": {"a": 1}}}], } deployment = fakes.FakeDeployment( uuid="deployment_uuid", admin={"foo": "admin"}, users=[{"bar": "user1"}]) eng = engine.TaskEngine(config, task, deployment) result = eng._prepare_context(context, name) expected_context = {"existing_users": [{"bar": "user1"}]} expected_context.update(context) expected_result = { "task": task, "admin": {"credential": mock_credential.return_value}, "scenario_name": name, "config": expected_context } self.assertEqual(result, expected_result) mock_scenario_get.assert_called_once_with(name) class ResultConsumerTestCase(test.TestCase): @mock.patch("rally.common.objects.Task.get_status") @mock.patch("rally.task.engine.ResultConsumer.wait_and_abort") @mock.patch("rally.task.sla.SLAChecker") def test_consume_results( self, mock_sla_checker, mock_result_consumer_wait_and_abort, mock_task_get_status): mock_sla_instance = mock.MagicMock() mock_sla_checker.return_value = mock_sla_instance mock_task_get_status.return_value = consts.TaskStatus.RUNNING key = {"kw": {"fake": 2}, "name": "fake", "pos": 0} task = mock.MagicMock() subtask = mock.Mock(spec=objects.Subtask) workload = mock.Mock(spec=objects.Workload) runner = mock.MagicMock() results = [ [{"duration": 1, "timestamp": 3}], [{"duration": 2, "timestamp": 2}] ] runner.result_queue = collections.deque(results) runner.event_queue = collections.deque() with engine.ResultConsumer( key, task, subtask, workload, runner, False) as consumer_obj: pass mock_sla_instance.add_iteration.assert_has_calls([ mock.call({"duration": 1, "timestamp": 3}), mock.call({"duration": 2, "timestamp": 2})]) self.assertEqual([{"duration": 2, "timestamp": 2}, {"duration": 1, "timestamp": 3}], consumer_obj.results) @mock.patch("rally.task.hook.HookExecutor") @mock.patch("rally.task.engine.LOG") @mock.patch("rally.task.engine.time.time") @mock.patch("rally.common.objects.Task.get_status") @mock.patch("rally.task.engine.ResultConsumer.wait_and_abort") @mock.patch("rally.task.sla.SLAChecker") def test_consume_results_no_iteration( self, mock_sla_checker, mock_result_consumer_wait_and_abort, mock_task_get_status, mock_time, mock_log, mock_hook_executor): mock_time.side_effect = [0, 1] mock_sla_instance = mock.MagicMock() mock_sla_results = mock.MagicMock() mock_sla_checker.return_value = mock_sla_instance mock_sla_instance.results.return_value = mock_sla_results mock_task_get_status.return_value = consts.TaskStatus.RUNNING key = {"kw": {"fake": 2}, "name": "fake", "pos": 0} task = mock.MagicMock() subtask = mock.Mock(spec=objects.Subtask) workload = mock.Mock(spec=objects.Workload) runner = mock.MagicMock() results = [] runner.result_queue = collections.deque(results) runner.event_queue = collections.deque() with engine.ResultConsumer( key, task, subtask, workload, runner, False): pass self.assertFalse(workload.add_workload_data.called) workload.set_results.assert_called_once_with({ "full_duration": 1, "sla": mock_sla_results, "load_duration": 0 }) @mock.patch("rally.common.objects.Task.get_status") @mock.patch("rally.task.engine.ResultConsumer.wait_and_abort") @mock.patch("rally.task.sla.SLAChecker") def test_consume_results_sla_failure_abort( self, mock_sla_checker, mock_result_consumer_wait_and_abort, mock_task_get_status): mock_sla_instance = mock.MagicMock() mock_sla_checker.return_value = mock_sla_instance mock_sla_instance.add_iteration.side_effect = [True, True, False, False] key = {"kw": {"fake": 2}, "name": "fake", "pos": 0} task = mock.MagicMock() subtask = mock.Mock(spec=objects.Subtask) workload = mock.Mock(spec=objects.Workload) runner = mock.MagicMock() runner.result_queue = collections.deque( [[{"duration": 1, "timestamp": 1}, {"duration": 2, "timestamp": 2}]] * 4) with engine.ResultConsumer(key, task, subtask, workload, runner, True): pass self.assertTrue(runner.abort.called) task.update_status.assert_called_once_with( consts.TaskStatus.SOFT_ABORTING) @mock.patch("rally.task.hook.HookExecutor") @mock.patch("rally.common.objects.Task.get_status") @mock.patch("rally.task.engine.threading.Thread") @mock.patch("rally.task.engine.threading.Event") @mock.patch("rally.task.sla.SLAChecker") def test_consume_results_abort_manually(self, mock_sla_checker, mock_event, mock_thread, mock_task_get_status, mock_hook_executor): runner = mock.MagicMock(result_queue=False) is_done = mock.MagicMock() is_done.isSet.side_effect = (False, True) task = mock.MagicMock() mock_task_get_status.return_value = consts.TaskStatus.ABORTED subtask = mock.Mock(spec=objects.Subtask) workload = mock.Mock(spec=objects.Workload) key = {"kw": {"fake": 2}, "name": "fake", "pos": 0} mock_hook_executor_instance = mock_hook_executor.return_value with engine.ResultConsumer(key, task, subtask, workload, runner, True): pass mock_sla_checker.assert_called_once_with(key["kw"]) mock_hook_executor.assert_called_once_with(key["kw"], task) self.assertFalse(mock_hook_executor_instance.on_iteration.called) mocked_set_aborted = mock_sla_checker.return_value.set_aborted_manually mocked_set_aborted.assert_called_once_with() @mock.patch("rally.common.objects.Task.get_status") @mock.patch("rally.task.sla.SLAChecker") def test_consume_results_sla_failure_continue(self, mock_sla_checker, mock_task_get_status): mock_sla_instance = mock.MagicMock() mock_sla_checker.return_value = mock_sla_instance mock_task_get_status.return_value = consts.TaskStatus.CRASHED mock_sla_instance.add_iteration.side_effect = [True, True, False, False] key = {"kw": {"fake": 2}, "name": "fake", "pos": 0} task = mock.MagicMock() subtask = mock.Mock(spec=objects.Subtask) workload = mock.Mock(spec=objects.Workload) runner = mock.MagicMock() runner.result_queue = collections.deque( [[{"duration": 1, "timestamp": 4}]] * 4) runner.event_queue = collections.deque() with engine.ResultConsumer(key, task, subtask, workload, runner, False): pass self.assertEqual(0, runner.abort.call_count) @mock.patch("rally.common.objects.Task.get_status") @mock.patch("rally.task.engine.threading.Thread") @mock.patch("rally.task.engine.threading.Event") @mock.patch("rally.task.sla.SLAChecker") def test_consume_results_with_unexpected_failure(self, mock_sla_checker, mock_event, mock_thread, mock_task_get_status): mock_sla_instance = mock.MagicMock() mock_sla_checker.return_value = mock_sla_instance key = {"kw": {"fake": 2}, "name": "fake", "pos": 0} task = mock.MagicMock() subtask = mock.Mock(spec=objects.Subtask) workload = mock.Mock(spec=objects.Workload) runner = mock.MagicMock() runner.result_queue = collections.deque([1]) runner.event_queue = collections.deque() exc = TestException() try: with engine.ResultConsumer(key, task, subtask, workload, runner, False): raise exc except TestException: pass mock_sla_instance.set_unexpected_failure.assert_has_calls( [mock.call(exc)]) @mock.patch("rally.task.engine.CONF") @mock.patch("rally.common.objects.Task.get_status") @mock.patch("rally.task.engine.ResultConsumer.wait_and_abort") @mock.patch("rally.task.sla.SLAChecker") def test_consume_results_chunked( self, mock_sla_checker, mock_result_consumer_wait_and_abort, mock_task_get_status, mock_conf): mock_conf.raw_result_chunk_size = 2 mock_sla_instance = mock.MagicMock() mock_sla_checker.return_value = mock_sla_instance mock_task_get_status.return_value = consts.TaskStatus.RUNNING key = {"kw": {"fake": 2}, "name": "fake", "pos": 0} task = mock.MagicMock(spec=objects.Task) subtask = mock.Mock(spec=objects.Subtask) workload = mock.Mock(spec=objects.Workload) runner = mock.MagicMock() results = [ [{"duration": 1, "timestamp": 3}, {"duration": 2, "timestamp": 2}, {"duration": 3, "timestamp": 3}], [{"duration": 4, "timestamp": 2}, {"duration": 5, "timestamp": 3}], [{"duration": 6, "timestamp": 2}], [{"duration": 7, "timestamp": 1}], ] runner.result_queue = collections.deque(results) runner.event_queue = collections.deque() with engine.ResultConsumer( key, task, subtask, workload, runner, False) as consumer_obj: pass mock_sla_instance.add_iteration.assert_has_calls([ mock.call({"duration": 1, "timestamp": 3}), mock.call({"duration": 2, "timestamp": 2}), mock.call({"duration": 3, "timestamp": 3}), mock.call({"duration": 4, "timestamp": 2}), mock.call({"duration": 5, "timestamp": 3}), mock.call({"duration": 6, "timestamp": 2}), mock.call({"duration": 7, "timestamp": 1})]) self.assertEqual([{"duration": 7, "timestamp": 1}], consumer_obj.results) workload.add_workload_data.assert_has_calls([ mock.call(0, {"raw": [{"duration": 2, "timestamp": 2}, {"duration": 1, "timestamp": 3}]}), mock.call(1, {"raw": [{"duration": 4, "timestamp": 2}, {"duration": 3, "timestamp": 3}]}), mock.call(2, {"raw": [{"duration": 6, "timestamp": 2}, {"duration": 5, "timestamp": 3}]}), mock.call(3, {"raw": [{"duration": 7, "timestamp": 1}]})]) @mock.patch("rally.task.engine.LOG") @mock.patch("rally.task.hook.HookExecutor") @mock.patch("rally.task.engine.time.time") @mock.patch("rally.common.objects.Task.get_status") @mock.patch("rally.task.engine.ResultConsumer.wait_and_abort") @mock.patch("rally.task.sla.SLAChecker") def test_consume_events( self, mock_sla_checker, mock_result_consumer_wait_and_abort, mock_task_get_status, mock_time, mock_hook_executor, mock_log): mock_time.side_effect = [0, 1] mock_sla_instance = mock_sla_checker.return_value mock_sla_results = mock_sla_instance.results.return_value mock_hook_executor_instance = mock_hook_executor.return_value mock_hook_results = mock_hook_executor_instance.results.return_value mock_task_get_status.return_value = consts.TaskStatus.RUNNING key = {"kw": {"fake": 2, "hooks": []}, "name": "fake", "pos": 0} task = mock.MagicMock() subtask = mock.Mock(spec=objects.Subtask) workload = mock.Mock(spec=objects.Workload) runner = mock.MagicMock() events = [ {"type": "iteration", "value": 1}, {"type": "iteration", "value": 2}, {"type": "iteration", "value": 3} ] runner.result_queue = collections.deque() runner.event_queue = collections.deque(events) consumer_obj = engine.ResultConsumer(key, task, subtask, workload, runner, False) stop_event = threading.Event() def set_stop_event(event_type, value): if not runner.event_queue: stop_event.set() mock_hook_executor_instance.on_event.side_effect = set_stop_event with consumer_obj: stop_event.wait(1) mock_hook_executor_instance.on_event.assert_has_calls([ mock.call(event_type="iteration", value=1), mock.call(event_type="iteration", value=2), mock.call(event_type="iteration", value=3) ]) self.assertFalse(workload.add_workload_data.called) workload.set_results.assert_called_once_with({ "full_duration": 1, "sla": mock_sla_results, "hooks": mock_hook_results, "load_duration": 0 }) @mock.patch("rally.task.engine.threading.Thread") @mock.patch("rally.task.engine.threading.Event") @mock.patch("rally.common.objects.Task.get_status") @mock.patch("rally.task.engine.TaskEngine._prepare_context") @mock.patch("rally.task.engine.time.sleep") @mock.patch("rally.task.engine.TaskEngine._get_runner") def test_wait_and_abort_on_abort( self, mock_task_engine__get_runner, mock_sleep, mock_task_engine__prepare_context, mock_task_get_status, mock_event, mock_thread): runner = mock.MagicMock() key = mock.MagicMock() task = mock.MagicMock() subtask = mock.Mock(spec=objects.Subtask) workload = mock.Mock(spec=objects.Workload) mock_task_get_status.side_effect = (consts.TaskStatus.RUNNING, consts.TaskStatus.RUNNING, consts.TaskStatus.ABORTING) mock_is_done = mock.MagicMock() mock_event.return_value = mock_is_done mock_is_done.isSet.return_value = False res = engine.ResultConsumer(key, task, subtask, workload, runner, True) res.wait_and_abort() runner.abort.assert_called_with() # test task.get_status is checked until is_done is not set self.assertEqual(3, mock_task_get_status.call_count) @mock.patch("rally.task.engine.threading.Thread") @mock.patch("rally.task.engine.threading.Event") @mock.patch("rally.common.objects.Task.get_status") @mock.patch("rally.task.engine.TaskEngine._prepare_context") @mock.patch("rally.task.engine.time.sleep") @mock.patch("rally.task.engine.TaskEngine._get_runner") def test_wait_and_abort_on_no_abort( self, mock_task_engine__get_runner, mock_sleep, mock_task_engine__prepare_context, mock_task_get_status, mock_event, mock_thread): runner = mock.MagicMock() key = mock.MagicMock() task = mock.MagicMock() subtask = mock.Mock(spec=objects.Subtask) workload = mock.Mock(spec=objects.Workload) mock_task_get_status.return_value = consts.TaskStatus.RUNNING mock_is_done = mock.MagicMock() mock_event.return_value = mock_is_done mock_is_done.isSet.side_effect = [False, False, False, False, True] res = engine.ResultConsumer(key, task, subtask, workload, runner, True) res.wait_and_abort() # check method don't abort runner if task is not aborted self.assertFalse(runner.abort.called) # test task.get_status is checked until is_done is not set self.assertEqual(4, mock_task_get_status.call_count) class TaskTestCase(test.TestCase): @mock.patch("jsonschema.validate") def test_validate_json(self, mock_validate): config = {} engine.TaskConfig(config) mock_validate.assert_has_calls([ mock.call(config, engine.TaskConfig.CONFIG_SCHEMA_V1)]) @mock.patch("jsonschema.validate") @mock.patch("rally.task.engine.TaskConfig._make_subtasks") def test_validate_json_v2(self, mock_task_config__make_subtasks, mock_validate): config = {"version": 2} engine.TaskConfig(config) mock_validate.assert_has_calls([ mock.call(config, engine.TaskConfig.CONFIG_SCHEMA_V2)]) @mock.patch("rally.task.engine.TaskConfig._get_version") @mock.patch("rally.task.engine.TaskConfig._validate_json") @mock.patch("rally.task.engine.TaskConfig._make_subtasks") def test_validate_version(self, mock_task_config__make_subtasks, mock_task_config__validate_json, mock_task_config__get_version): mock_task_config__get_version.return_value = 1 engine.TaskConfig(mock.MagicMock()) @mock.patch("rally.task.engine.TaskConfig._get_version") @mock.patch("rally.task.engine.TaskConfig._validate_json") @mock.patch("rally.task.engine.TaskConfig._make_subtasks") def test_validate_version_wrong_version( self, mock_task_config__make_subtasks, mock_task_config__validate_json, mock_task_config__get_version): mock_task_config__get_version.return_value = "wrong" self.assertRaises(exceptions.InvalidTaskException, engine.TaskConfig, mock.MagicMock) @mock.patch("rally.task.engine.SubTask") @mock.patch("rally.task.engine.TaskConfig._get_version") @mock.patch("rally.task.engine.TaskConfig._validate_json") def test_make_subtasks_v1(self, mock_task_config__validate_json, mock_task_config__get_version, mock_sub_task): mock_task_config__get_version.return_value = 1 config = {"a.task": [{"s": 1}, {"s": 2}], "b.task": [{"s": 3}]} self.assertEqual(3, len(engine.TaskConfig(config).subtasks)) mock_sub_task.assert_has_calls([ mock.call({ "title": "a.task", "workloads": [{"s": 1, "name": "a.task"}] }), mock.call({ "title": "a.task", "workloads": [{"s": 2, "name": "a.task"}] }), mock.call({ "title": "b.task", "workloads": [{"s": 3, "name": "b.task"}] }) ], any_order=True) @mock.patch("rally.task.engine.SubTask") @mock.patch("rally.task.engine.TaskConfig._get_version") @mock.patch("rally.task.engine.TaskConfig._validate_json") def test_make_subtasks_v2(self, mock_task_config__validate_json, mock_task_config__get_version, mock_sub_task): mock_task_config__get_version.return_value = 2 subtask_conf1 = mock.MagicMock() subtask_conf2 = mock.MagicMock() config = {"subtasks": [subtask_conf1, subtask_conf2]} self.assertEqual(2, len(engine.TaskConfig(config).subtasks)) mock_sub_task.assert_has_calls([ mock.call(subtask_conf1), mock.call(subtask_conf2)]) class WorkloadTestCase(test.TestCase): def setUp(self): super(WorkloadTestCase, self).setUp() self.wconf = engine.Workload({ "name": "n", "runner": "r", "context": "c", "sla": "s", "hooks": "h", "args": "a" }, 0) def test_to_dict(self): expected_dict = { "runner": "r", "context": "c", "sla": "s", "hooks": "h", "args": "a" } self.assertEqual(expected_dict, self.wconf.to_dict()) def test_to_task(self): expected_dict = { "runner": "r", "context": "c", "sla": "s", "hooks": "h", "args": "a" } self.assertEqual(expected_dict, self.wconf.to_task()) def test_make_key(self): expected_key = { "name": "n", "pos": 0, "kw": { "runner": "r", "context": "c", "sla": "s", "hooks": "h", "args": "a" } } self.assertEqual(expected_key, self.wconf.make_key()) def test_make_exception_args(self): expected_args = { "name": "n", "pos": 0, "reason": "r", "config": { "runner": "r", "context": "c", "sla": "s", "hooks": "h", "args": "a" } } self.assertEqual(expected_args, self.wconf.make_exception_args("r")) rally-0.9.1/tests/unit/task/test_types.py0000664000567000056710000001060213073417717021656 0ustar jenkinsjenkins00000000000000# Copyright (C) 2014 Yahoo! Inc. All Rights Reserved. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally.task import scenario from rally.task import types from tests.unit import test class TestConvertPlugin(scenario.Scenario): @types.convert(bar={"type": "test_bar"}) @scenario.configure() def one_arg(self, bar): """Dummy docstring. :param bar: dummy parameter """ pass @types.convert(bar={"type": "test_bar"}, baz={"type": "test_baz"}) @scenario.configure() def two_args(self, bar, baz): """Dummy docstring. :param bar: dummy parameter :param baz: dummy parameter """ pass class ConvertTestCase(test.TestCase): # NOTE(stpierre): These cases test types.convert(), # types._get_preprocessor_loader(), and bits of # types.preprocess(). This may not look very elegant, but it's the # easiest way to test both convert() and # _get_preprocessor_loader() without getting so fine-grained that # the tests are basically tests that the computer is on. @mock.patch("rally.task.types.ResourceType.get", create=True) def test_convert(self, mock_resource_type_get): mock_transform = mock_resource_type_get.return_value.transform args = types.preprocess("TestConvertPlugin.one_arg", mock.MagicMock(), {"bar": "bar_config"}) mock_resource_type_get.assert_called_once_with("test_bar") mock_transform.assert_called_once_with(clients=mock.ANY, resource_config="bar_config") self.assertDictEqual(args, {"bar": mock_transform.return_value}) @mock.patch("rally.task.types.ResourceType.get", create=True) def test_convert_multiple(self, mock_resource_type_get): loaders = {"test_bar": mock.Mock(), "test_baz": mock.Mock()} mock_resource_type_get.side_effect = lambda p: loaders[p] args = types.preprocess("TestConvertPlugin.two_args", mock.MagicMock(), {"bar": "bar_config", "baz": "baz_config"}) mock_resource_type_get.assert_has_calls([mock.call("test_bar"), mock.call("test_baz")], any_order=True) loaders["test_bar"].transform.assert_called_once_with( clients=mock.ANY, resource_config="bar_config") loaders["test_baz"].transform.assert_called_once_with( clients=mock.ANY, resource_config="baz_config") self.assertDictEqual( args, {"bar": loaders["test_bar"].transform.return_value, "baz": loaders["test_baz"].transform.return_value}) class PreprocessTestCase(test.TestCase): @mock.patch("rally.task.types.scenario.Scenario.get") @mock.patch("rally.task.types.osclients") def test_preprocess(self, mock_osclients, mock_scenario_get): name = "some_plugin" context = { "a": 1, "b": 2, "admin": {"credential": mock.MagicMock()} } args = {"a": 10, "b": 20} class Preprocessor(types.ResourceType): @classmethod def transform(cls, clients, resource_config): return resource_config * 2 mock_scenario_get.return_value._meta_get.return_value = { "a": Preprocessor } result = types.preprocess(name, context, args) mock_scenario_get.assert_called_once_with(name) mock_scenario_get.return_value._meta_get.assert_called_once_with( "preprocessors", default={}) mock_osclients.Clients.assert_called_once_with( context["admin"]["credential"]) self.assertEqual({"a": 20, "b": 20}, result) rally-0.9.1/tests/unit/task/test_sla.py0000664000567000056710000001420013073417720021261 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import mock from rally.common.plugin import plugin from rally.task import sla from tests.unit import test @plugin.configure(name="test_criterion") class TestCriterion(sla.SLA): CONFIG_SCHEMA = {"type": "integer"} def add_iteration(self, iteration): self.success = self.criterion_value == iteration return self.success def merge(self, other): raise NotImplementedError() def details(self): return "detail" @ddt.ddt class SLACheckerTestCase(test.TestCase): def test_add_iteration_and_results(self): sla_checker = sla.SLAChecker({"sla": {"test_criterion": 42}}) iteration = {"key": {"name": "fake", "pos": 0}, "data": 42} self.assertTrue(sla_checker.add_iteration(iteration["data"])) expected_result = [{"criterion": "test_criterion", "detail": "detail", "success": True}] self.assertEqual(expected_result, sla_checker.results()) iteration["data"] = 43 self.assertFalse(sla_checker.add_iteration(iteration["data"])) expected_result = [{"criterion": "test_criterion", "detail": "detail", "success": False}] self.assertEqual(expected_result, sla_checker.results()) def test_set_unexpected_failure(self): exc = "error;(" sla_checker = sla.SLAChecker({"sla": {}}) self.assertEqual([], sla_checker.results()) sla_checker.set_unexpected_failure(exc) self.assertEqual([{"criterion": "something_went_wrong", "success": False, "detail": "Unexpected error: %s" % exc}], sla_checker.results()) def test_set_aborted_on_sla(self): sla_checker = sla.SLAChecker({"sla": {}}) self.assertEqual([], sla_checker.results()) sla_checker.set_aborted_on_sla() self.assertEqual( [{"criterion": "aborted_on_sla", "success": False, "detail": "Task was aborted due to SLA failure(s)."}], sla_checker.results()) def test_set_aborted_manually(self): sla_checker = sla.SLAChecker({"sla": {}}) self.assertEqual([], sla_checker.results()) sla_checker.set_aborted_manually() self.assertEqual( [{"criterion": "aborted_manually", "success": False, "detail": "Task was aborted due to abort signal."}], sla_checker.results()) def test__format_result(self): name = "some_name" success = True detail = "some details" self.assertEqual({"criterion": name, "success": success, "detail": detail}, sla._format_result(name, success, detail)) def test__validate_config_positive(self): sla_checker = sla.SLAChecker({"sla": {}}) another_sla_checker = sla.SLAChecker({"sla": {}}) sla_checker._validate_config(another_sla_checker) def test__validate_config_negative(self): sla_checker = sla.SLAChecker({"sla": {}}) another_sla_checker = sla.SLAChecker({"sla": {"test_criterion": 42}}) self.assertRaises(TypeError, sla_checker._validate_config, another_sla_checker) def test__validate_sla_types(self): sla_checker = sla.SLAChecker({"sla": {}}) mock_sla1 = mock.MagicMock() mock_sla2 = mock.MagicMock() sla_checker.sla_criteria = [mock_sla1, mock_sla2] another_sla_checker = sla.SLAChecker({"sla": {}}) mock_sla3 = mock.MagicMock() mock_sla4 = mock.MagicMock() another_sla_checker.sla_criteria = [mock_sla3, mock_sla4] sla_checker._validate_sla_types(another_sla_checker) mock_sla1.assert_has_calls([ mock.call.validate_type(mock_sla3) ]) mock_sla1.validate_type.assert_called_once_with(mock_sla3) mock_sla2.validate_type.assert_called_once_with(mock_sla4) @ddt.data({"merge_result1": True, "merge_result2": True, "result": True}, {"merge_result1": True, "merge_result2": False, "result": False}, {"merge_result1": False, "merge_result2": False, "result": False}) @ddt.unpack def test_merge(self, merge_result1, merge_result2, result): sla_checker = sla.SLAChecker({"sla": {}}) mock_sla1 = mock.MagicMock() mock_sla2 = mock.MagicMock() sla_checker.sla_criteria = [mock_sla1, mock_sla2] mock_sla1.merge.return_value = merge_result1 mock_sla2.merge.return_value = merge_result2 another_sla_checker = sla.SLAChecker({"sla": {}}) mock_sla3 = mock.MagicMock() mock_sla4 = mock.MagicMock() another_sla_checker.sla_criteria = [mock_sla3, mock_sla4] sla_checker._validate_config = mock.MagicMock() sla_checker._validate_sla_types = mock.MagicMock() self.assertEqual(result, sla_checker.merge(another_sla_checker)) mock_sla1.merge.assert_called_once_with(mock_sla3) mock_sla2.merge.assert_called_once_with(mock_sla4) class SLATestCase(test.TestCase): def test_validate_type_positive(self): sla1 = TestCriterion(0) sla2 = TestCriterion(0) sla1.validate_type(sla2) def test_validate_type_negative(self): sla1 = TestCriterion(0) class AnotherTestCriterion(TestCriterion): pass sla2 = AnotherTestCriterion(0) self.assertRaises(TypeError, sla1.validate_type, sla2) rally-0.9.1/tests/unit/task/__init__.py0000664000567000056710000000000013073417717021201 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/task/test_atomic.py0000664000567000056710000001024213073417720021760 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import mock from rally.task import atomic from tests.unit import test class ActionTimerMixinTestCase(test.TestCase): def test_atomic_actions(self): inst = atomic.ActionTimerMixin() self.assertEqual(inst._atomic_actions, inst.atomic_actions()) class AtomicActionTestCase(test.TestCase): @mock.patch("time.time", side_effect=[1, 3, 6, 10, 15, 21]) def test_action_timer_context(self, mock_time): inst = atomic.ActionTimerMixin() with atomic.ActionTimer(inst, "test"): with atomic.ActionTimer(inst, "test"): with atomic.ActionTimer(inst, "some"): pass expected = [("test", 20), ("test (2)", 12), ("some", 4)] self.assertEqual(collections.OrderedDict(expected), inst.atomic_actions()) @mock.patch("time.time", side_effect=[1, 3]) def test_action_timer_context_with_exception(self, mock_time): inst = atomic.ActionTimerMixin() class TestException(Exception): pass try: with atomic.ActionTimer(inst, "test"): raise TestException("test") except TestException: pass expected = [("test", 2)] self.assertEqual(collections.OrderedDict(expected), inst.atomic_actions()) @mock.patch("time.time", side_effect=[1, 3]) def test_action_timer_decorator(self, mock_time): class Some(atomic.ActionTimerMixin): @atomic.action_timer("some") def some_func(self, a, b): return a + b inst = Some() self.assertEqual(5, inst.some_func(2, 3)) self.assertEqual(collections.OrderedDict({"some": 2}), inst.atomic_actions()) @mock.patch("time.time", side_effect=[1, 3]) def test_action_timer_decorator_with_exception(self, mock_time): class TestException(Exception): pass class TestTimer(atomic.ActionTimerMixin): @atomic.action_timer("test") def some_func(self): raise TestException("test") inst = TestTimer() self.assertRaises(TestException, inst.some_func) self.assertEqual(collections.OrderedDict({"test": 2}), inst.atomic_actions()) @mock.patch("time.time", side_effect=[1, 3, 1, 3]) def test_optional_action_timer_decorator(self, mock_time): class TestAtomicTimer(atomic.ActionTimerMixin): @atomic.optional_action_timer("some") def some_func(self, a, b): return a + b @atomic.optional_action_timer("some", argument_name="foo", default=False) def other_func(self, a, b): return a + b inst = TestAtomicTimer() self.assertEqual(5, inst.some_func(2, 3)) self.assertEqual(collections.OrderedDict({"some": 2}), inst.atomic_actions()) inst = TestAtomicTimer() self.assertEqual(5, inst.some_func(2, 3, atomic_action=False)) self.assertEqual(collections.OrderedDict(), inst.atomic_actions()) inst = TestAtomicTimer() self.assertEqual(5, inst.other_func(2, 3)) self.assertEqual(collections.OrderedDict(), inst.atomic_actions()) inst = TestAtomicTimer() self.assertEqual(5, inst.other_func(2, 3, foo=True)) self.assertEqual(collections.OrderedDict({"some": 2}), inst.atomic_actions()) rally-0.9.1/tests/unit/task/test_trigger.py0000664000567000056710000000560613073417720022157 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests for Trigger base class.""" import ddt import jsonschema import mock from rally.task import trigger from tests.unit import test @trigger.configure(name="dummy_trigger") class DummyTrigger(trigger.Trigger): CONFIG_SCHEMA = {"type": "array", "minItems": 1, "uniqueItems": True, "items": { "type": "integer", "minimum": 0, }} def get_listening_event(self): return "dummy" def on_event(self, event_type, value=None): if value not in self.config: return super(DummyTrigger, self).on_event(event_type, value) @ddt.ddt class TriggerTestCase(test.TestCase): @ddt.data(({"name": "dummy_trigger", "args": [5]}, True), ({"name": "dummy_trigger", "args": ["str"]}, False)) @ddt.unpack def test_validate(self, config, valid): if valid: trigger.Trigger.validate(config) else: self.assertRaises(jsonschema.ValidationError, trigger.Trigger.validate, config) def test_on_event_and_get_results(self): # get_results requires launched hooks, so if we want to test it, we # need to duplicate all calls on_event. It is redundant, so let's merge # test_on_event and test_get_results in one test. right_values = [5, 7, 12, 13] cfg = {"trigger": {"args": right_values}} task = mock.MagicMock() hook_cls = mock.MagicMock(__name__="fake") dummy_trigger = DummyTrigger(cfg, task, hook_cls) for i in range(0, 20): dummy_trigger.on_event("fake", i) self.assertEqual( [mock.call(task, {}, {"event_type": "fake", "value": i}) for i in right_values], hook_cls.call_args_list) self.assertEqual(len(right_values), hook_cls.return_value.run_async.call_count) hook_status = hook_cls.return_value.result.return_value["status"] self.assertEqual( {"config": cfg, "results": [hook_cls.return_value.result.return_value] * len(right_values), "summary": {hook_status: len(right_values)}}, dummy_trigger.get_results()) rally-0.9.1/tests/unit/task/test_validation.py0000775000567000056710000012572413073417720022655 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import ddt from glanceclient import exc as glance_exc import mock from novaclient import exceptions as nova_exc import six from rally.common.plugin import plugin from rally import consts from rally import exceptions import rally.osclients from rally.task import validation from tests.unit import fakes from tests.unit import test MODULE = "rally.task.validation." class ValidationUtilsTestCase(test.TestCase): def test_validator(self): @plugin.from_func() def scenario(): pass scenario._meta_init() def validator_func(config, clients, deployment, a, b, c, d): return (config, clients, deployment, a, b, c, d) validator = validation.validator(validator_func) self.assertEqual(scenario, validator("a", "b", "c", d=1)(scenario)) self.assertEqual(1, len(scenario._meta_get("validators"))) self.assertEqual( ("conf", "client", "deploy", "a", "b", "c", 1), scenario._meta_get("validators")[0]("conf", "client", "deploy")) @ddt.ddt class ValidatorsTestCase(test.TestCase): def _unwrap_validator(self, validator, *args, **kwargs): @plugin.from_func() def func(): pass func._meta_init() validator(*args, **kwargs)(func) return func._meta_get("validators")[0] def test_number_not_nullable(self): validator = self._unwrap_validator(validation.number, param_name="n") self.assertFalse(validator({}, None, None).is_valid) def test_number_nullable(self): validator = self._unwrap_validator(validation.number, param_name="n", nullable=True) self.assertTrue(validator({}, None, None).is_valid) def test_number_min_max_value(self): validator = self._unwrap_validator(validation.number, param_name="a", minval=4, maxval=10) result = validator({"args": {"a": 3.9}}, None, None) self.assertFalse(result.is_valid, result.msg) result = validator({"args": {"a": 4.1}}, None, None) self.assertTrue(result.is_valid, result.msg) result = validator({"args": {"a": 11}}, None, None) self.assertFalse(result.is_valid, result.msg) def test_number_integer_only(self): validator = self._unwrap_validator(validation.number, param_name="b", integer_only=True) result = validator({"args": {"b": 3.9}}, None, None) self.assertFalse(result.is_valid, result.msg) result = validator({"args": {"b": 3}}, None, None) self.assertTrue(result.is_valid, result.msg) @mock.patch(MODULE + "os.access") def test__file_access_ok(self, mock_access): mock_access.return_value = True result = validation._file_access_ok( "foobar", os.R_OK, "p", False) self.assertTrue(result.is_valid, result.msg) @mock.patch(MODULE + "os.access") def test__file_access_not_found(self, mock_access): mock_access.return_value = False result = validation._file_access_ok( "foobar", os.R_OK, "p", False) self.assertFalse(result.is_valid, result.msg) @mock.patch(MODULE + "_file_access_ok") def test_file_exists(self, mock__file_access_ok): mock__file_access_ok.return_value = "foobar" validator = self._unwrap_validator(validation.file_exists, param_name="p", required=False) result = validator({"args": {"p": "test_file"}}, None, None) self.assertEqual("foobar", result) mock__file_access_ok.assert_called_once_with( "test_file", os.R_OK, "p", False) @ddt.data({"raises_message": "Command must be a dictionary"}, {"command": "foo", "raises_message": "Command must be a dictionary"}, {"command": {"interpreter": "foobar", "script_file": "foo", "script_inline": "bar"}, "raises_message": "Exactly one of "}, {"command": {"script_file": "foobar"}, "raises_message": "Supplied dict specifies no"}, {"command": {"script_inline": "foobar", "interpreter": "foo", "local_path": "bar"}, "raises_message": "When uploading an interpreter its path"}, {"command": {"interpreter": "/bin/bash", "script_path": "foo"}, "raises_message": ("Unexpected command parameters: " "script_path")}, {"command": {"script_inline": "foobar", "interpreter": ["ENV=bar", "/bin/foo"], "local_path": "bar", "remote_path": "/bin/foo"}}, {"command": {"script_inline": "foobar", "interpreter": "foo"}}) @ddt.unpack def test_check_command_dict(self, command=None, raises_message=None): if raises_message: e = self.assertRaises( ValueError, validation.check_command_dict, command) self.assertIn(raises_message, str(e)) else: self.assertIsNone(validation.check_command_dict(command)) @mock.patch("rally.task.validation._file_access_ok") def test_valid_command(self, mock__file_access_ok): validator = self._unwrap_validator(validation.valid_command, param_name="p") mock__file_access_ok.return_value = validation.ValidationResult(True) command = {"script_file": "foobar", "interpreter": "foo"} result = validator({"args": {"p": command}}, None, None) self.assertTrue(result.is_valid, result.msg) mock__file_access_ok.assert_called_once_with( filename="foobar", mode=os.R_OK, param_name="p.script_file", required=True) def test_valid_command_not_required(self): validator = self._unwrap_validator(validation.valid_command, param_name="p", required=False) result = validator({"args": {"p": None}}, None, None) self.assertTrue(result.is_valid) def test_valid_command_required(self): validator = self._unwrap_validator(validation.valid_command, param_name="p") result = validator({"args": {"p": None}}, None, None) self.assertFalse(result.is_valid, result.msg) @mock.patch("rally.task.validation._file_access_ok") def test_valid_command_unreadable_script_file(self, mock__file_access_ok): mock__file_access_ok.return_value = validation.ValidationResult(False) validator = self._unwrap_validator(validation.valid_command, param_name="p") command = {"script_file": "foobar", "interpreter": "foo"} result = validator({"args": {"p": command}}, None, None) self.assertFalse(result.is_valid, result.msg) @mock.patch("rally.task.validation.check_command_dict") def test_valid_command_fail_check_command_dict(self, mock_check_command_dict): validator = self._unwrap_validator(validation.valid_command, param_name="p") mock_check_command_dict.side_effect = ValueError("foobar") command = {"foo": "bar"} result = validator({"args": {"p": command}}, None, None) self.assertFalse(result.is_valid, result.msg) self.assertEqual("foobar", result.msg) def test_valid_command_script_inline(self): validator = self._unwrap_validator(validation.valid_command, param_name="p") command = {"script_inline": "bar", "interpreter": "/bin/sh"} result = validator({"args": {"p": command}}, None, None) self.assertTrue(result.is_valid, result.msg) @mock.patch("rally.task.validation._file_access_ok") def test_valid_command_local_path(self, mock__file_access_ok): mock__file_access_ok.return_value = validation.ValidationResult(False) validator = self._unwrap_validator(validation.valid_command, param_name="p") command = {"remote_path": "bar", "local_path": "foobar"} result = validator({"args": {"p": command}}, None, None) self.assertFalse(result.is_valid, result.msg) mock__file_access_ok.assert_called_once_with( filename="foobar", mode=os.R_OK, param_name="p.local_path", required=True) def test__get_validated_image_no_value_in_config(self): result = validation._get_validated_image({}, None, "non_existing") self.assertFalse(result[0].is_valid, result[0].msg) def test__get_validated_image_from_context(self): clients = mock.MagicMock() image = { "size": 0, "min_ram": 0, "min_disk": 0 } result = validation._get_validated_image({"args": { "image": {"name": "foo"}}, "context": { "images": { "image_name": "foo"} }}, clients, "image") self.assertTrue(result[0].is_valid, result[0].msg) self.assertEqual(result[1], image) result = validation._get_validated_image({"args": { "image": {"regex": r"^foo$"}}, "context": { "images": { "image_name": "foo"} }}, clients, "image") self.assertTrue(result[0].is_valid, result[0].msg) self.assertEqual(result[1], image) @mock.patch(MODULE + "openstack_types.GlanceImage.transform", return_value="image_id") def test__get_validated_image(self, mock_glance_image_transform): clients = mock.MagicMock() clients.glance().images.get().to_dict.return_value = { "image": "image_id"} result = validation._get_validated_image({"args": {"a": "test"}, "context": { "image_name": "foo"}}, clients, "a") self.assertTrue(result[0].is_valid, result[0].msg) self.assertEqual({"image": "image_id", "min_disk": 0, "min_ram": 0, "size": 0}, result[1]) mock_glance_image_transform.assert_called_once_with( clients=clients, resource_config="test") clients.glance().images.get.assert_called_with("image_id") @mock.patch(MODULE + "openstack_types.GlanceImage.transform", side_effect=exceptions.InvalidScenarioArgument) def test__get_validated_image_transform_error( self, mock_glance_image_transform): result = validation._get_validated_image({"args": {"a": "test"}}, None, "a") self.assertFalse(result[0].is_valid, result[0].msg) @mock.patch(MODULE + "openstack_types.GlanceImage.transform") def test__get_validated_image_not_found( self, mock_glance_image_transform): clients = mock.MagicMock() clients.glance().images.get().to_dict.side_effect = ( glance_exc.HTTPNotFound("")) result = validation._get_validated_image({"args": {"a": "test"}}, clients, "a") self.assertFalse(result[0].is_valid, result[0].msg) def test__get_validated_flavor_no_value_in_config(self): result = validation._get_validated_flavor({}, None, "non_existing") self.assertFalse(result[0].is_valid, result[0].msg) @mock.patch(MODULE + "openstack_types.Flavor.transform", return_value="flavor_id") def test__get_validated_flavor( self, mock_flavor_transform): clients = mock.MagicMock() clients.nova().flavors.get.return_value = "flavor" result = validation._get_validated_flavor({"args": {"a": "test"}}, clients, "a") self.assertTrue(result[0].is_valid, result[0].msg) self.assertEqual(result[1], "flavor") mock_flavor_transform.assert_called_once_with( clients=clients, resource_config="test") clients.nova().flavors.get.assert_called_once_with(flavor="flavor_id") @mock.patch(MODULE + "openstack_types.Flavor.transform", side_effect=exceptions.InvalidScenarioArgument) def test__get_validated_flavor_transform_error( self, mock_flavor_transform): result = validation._get_validated_flavor({"args": {"a": "test"}}, None, "a") self.assertFalse(result[0].is_valid, result[0].msg) @mock.patch(MODULE + "openstack_types.Flavor.transform") def test__get_validated_flavor_not_found( self, mock_flavor_transform): clients = mock.MagicMock() clients.nova().flavors.get.side_effect = nova_exc.NotFound("") result = validation._get_validated_flavor({"args": {"a": "test"}}, clients, "a") self.assertFalse(result[0].is_valid, result[0].msg) @mock.patch(MODULE + "openstack_types.Flavor.transform") def test__get_validated_flavor_from_context( self, mock_flavor_transform): clients = mock.MagicMock() clients.nova().flavors.get.side_effect = nova_exc.NotFound("") config = { "args": {"flavor": {"name": "test"}}, "context": { "flavors": [{ "name": "test", "ram": 32, }] } } result = validation._get_validated_flavor(config, clients, "flavor") self.assertTrue(result[0].is_valid, result[0].msg) @mock.patch(MODULE + "openstack_types.Flavor.transform") def test__get_validated_flavor_from_context_failed( self, mock_flavor_transform): clients = mock.MagicMock() clients.nova().flavors.get.side_effect = nova_exc.NotFound("") config = { "args": {"flavor": {"name": "test"}}, "context": { "flavors": [{ "name": "othername", "ram": 32, }] } } result = validation._get_validated_flavor(config, clients, "flavor") self.assertFalse(result[0].is_valid, result[0].msg) config = { "args": {"flavor": {"name": "test"}}, } result = validation._get_validated_flavor(config, clients, "flavor") self.assertFalse(result[0].is_valid, result[0].msg) @ddt.data("nfS", "Cifs", "GLUSTERFS", "hdfs") def test_validate_share_proto_valid(self, share_proto): validator = self._unwrap_validator(validation.validate_share_proto) result = validator( {"args": {"share_proto": share_proto}}, "clients", "deployment") self.assertTrue(result.is_valid, result.msg) @ddt.data( *([{"args": {"share_proto": v}} for v in ( None, "", "nfsfoo", "foonfs", "nfscifs", )] + [{}, {"args": {}}]) ) def test_validate_share_proto_invalid(self, config): validator = self._unwrap_validator(validation.validate_share_proto) result = validator(config, "clients", "deployment") self.assertFalse(result.is_valid, result.msg) def test_image_exists(self): validator = self._unwrap_validator(validation.image_exists, "param") result = validator({}, "clients", "deployment") self.assertFalse(result.is_valid, result.msg) def test_image_exists_nullable(self): validator = self._unwrap_validator(validation.image_exists, "param", nullable=True) result = validator({}, "clients", "deployment") self.assertTrue(result.is_valid, result.msg) def test_image_exists_failed(self): validator = self._unwrap_validator(validation.image_exists, "param") result = validator({}, None, None) self.assertFalse(result.is_valid) def test_flavor_exists(self): validator = self._unwrap_validator(validation.flavor_exists, "param") result = validator({}, "clients", "deployment") self.assertFalse(result.is_valid, result.msg) def test_image_valid_on_flavor_flavor_or_image_not_specified(self): validator = self._unwrap_validator(validation.image_valid_on_flavor, "flavor", "image") result = validator({}, None, None) self.assertFalse(result.is_valid, result.msg) result = validator({"args": {"flavor": {"id": 11}}}, mock.MagicMock(), None) self.assertFalse(result.is_valid, result.msg) @mock.patch(MODULE + "_get_validated_image") @mock.patch(MODULE + "_get_validated_flavor") def test_image_valid_on_flavor(self, mock__get_validated_flavor, mock__get_validated_image): image = { "id": "fake_id", "min_ram": None, "size": 2, "min_disk": 0 } flavor = mock.MagicMock() success = validation.ValidationResult(True) fail = validation.ValidationResult(False) mock__get_validated_flavor.return_value = (success, flavor) mock__get_validated_image.return_value = (fail, image) validator = self._unwrap_validator(validation.image_valid_on_flavor, "flavor", "image") result = validator(None, None, None) self.assertFalse(result.is_valid, result.msg) mock__get_validated_image.return_value = (success, image) # test flavor.disk None validator = self._unwrap_validator(validation.image_valid_on_flavor, "flavor", "image") flavor.disk = None flavor.ram = 2 image["min_ram"] = 4 result = validator(None, None, None) self.assertFalse(result.is_valid, result.msg) image["min_ram"] = 1 result = validator(None, None, None) self.assertTrue(result.is_valid, result.msg) # test validate_disk false validator = self._unwrap_validator(validation.image_valid_on_flavor, "flavor", "image", False) flavor.disk = 1 flavor.ram = 2 image["min_ram"] = 4 result = validator(None, None, None) self.assertFalse(result.is_valid, result.msg) image["min_ram"] = 1 result = validator(None, None, None) self.assertTrue(result.is_valid, result.msg) # test validate_disk true and flavor.disk not None validator = self._unwrap_validator(validation.image_valid_on_flavor, "flavor", "image") image["size"] = 2 image["min_disk"] = 0 flavor.disk = 5.0 / (1024 ** 3) result = validator(None, None, None) self.assertTrue(result.is_valid, result.msg) image["min_disk"] = flavor.disk * 2 result = validator(None, None, None) self.assertFalse(result.is_valid, result.msg) image["min_disk"] = flavor.disk / 4 image["size"] = 1000 result = validator(None, None, None) self.assertFalse(result.is_valid, result.msg) @mock.patch(MODULE + "openstack_types.Flavor.transform") @mock.patch(MODULE + "_get_validated_image") def test_image_valid_on_flavor_context( self, mock__get_validated_image, mock_flavor_transform): clients = mock.MagicMock() clients.nova().flavors.get.side_effect = nova_exc.NotFound("") image = {"min_ram": 24, "id": "fake_id"} success = validation.ValidationResult(True) mock__get_validated_image.return_value = (success, image) validator = self._unwrap_validator(validation.image_valid_on_flavor, "flavor", "image") config = { "args": {"flavor": {"name": "test"}}, "context": { "flavors": [{ "name": "test", "ram": 32, }] } } # test ram image["min_ram"] = 64 result = validator(config, clients, None) self.assertFalse(result.is_valid, result.msg) def test_network_exists(self): validator = self._unwrap_validator(validation.network_exists, "net") net1 = mock.MagicMock() net1.label = "private" net2 = mock.MagicMock() net2.label = "custom" clients = mock.MagicMock() clients.nova().networks.list.return_value = [net1, net2] result = validator({}, clients, None) self.assertTrue(result.is_valid, result.msg) result = validator({"args": {"net": "custom"}}, clients, None) self.assertTrue(result.is_valid, result.msg) result = validator({"args": {"net": "custom2"}}, clients, None) self.assertFalse(result.is_valid, result.msg) def test_external_network_exists(self): validator = self._unwrap_validator( validation.external_network_exists, "name") result = validator({"args": {}}, None, None) self.assertTrue(result.is_valid, result.msg) clients = mock.MagicMock() net1 = mock.MagicMock() net2 = mock.MagicMock() clients.nova().floating_ip_pools.list.return_value = [net1, net2] net1.name = "public" net2.name = "custom" result = validator({}, clients, None) self.assertTrue(result.is_valid, result.msg) result = validator({"args": {"name": "custom"}}, clients, None) self.assertTrue(result.is_valid, result.msg) result = validator({"args": {"name": "non_exist"}}, clients, None) self.assertFalse(result.is_valid, result.msg) net1.name = {"name": "public"} net2.name = {"name": "custom"} result = validator({"args": {"name": "custom"}}, clients, None) self.assertTrue(result.is_valid, result.msg) def test_required_parameters(self): validator = self._unwrap_validator(validation.required_parameters, "a", "b") result = validator({"args": {"a": 1, "b": 2, "c": 3}}, None, None) self.assertTrue(result.is_valid, result.msg) result = validator({"args": {"a": 1, "c": 3}}, None, None) self.assertFalse(result.is_valid, result.msg) @mock.patch("rally.common.objects.Credential") def test_required_service(self, mock_credential): validator = self._unwrap_validator(validation.required_services, consts.Service.KEYSTONE, consts.Service.NOVA, consts.Service.NOVA_NET) clients = mock.MagicMock() clients.services().values.return_value = [consts.Service.KEYSTONE, consts.Service.NOVA, consts.Service.NOVA_NET] fake_service = mock.Mock(binary="nova-network", status="enabled") with mock.patch("rally.osclients.Clients") as clients_cls: nova_client = clients_cls.return_value.nova.return_value nova_client.services.list.return_value = [fake_service] deployment = fakes.FakeDeployment(admin={"info": "admin"}) result = validator({}, clients, deployment) clients_cls.assert_called_once_with(mock_credential.return_value) mock_credential.assert_called_once_with(info="admin") self.assertTrue(result.is_valid, result.msg) validator = self._unwrap_validator(validation.required_services, consts.Service.KEYSTONE, consts.Service.NOVA) clients.services().values.return_value = [consts.Service.KEYSTONE] with mock.patch("rally.osclients.Clients") as clients_cls: result = validator({}, clients, None) self.assertFalse(clients_cls.called) self.assertFalse(result.is_valid, result.msg) def test_required_service_wrong_service(self): validator = self._unwrap_validator(validation.required_services, consts.Service.KEYSTONE, consts.Service.NOVA, "lol") clients = mock.MagicMock() result = validator({}, clients, None) self.assertFalse(result.is_valid, result.msg) def test_required_contexts(self): validator = self._unwrap_validator(validation.required_contexts, "c1", "c2", "c3") result = validator({"context": {"a": 1}}, None, None) self.assertFalse(result.is_valid, result.msg) result = validator({"context": {"c1": 1, "c2": 2, "c3": 3}}, None, None) self.assertTrue(result.is_valid, result.msg) result = validator({"context": {"c1": 1, "c2": 2, "c3": 3, "a": 1}}, None, None) self.assertTrue(result.is_valid, result.msg) def test_required_contexts_with_or(self): validator = self._unwrap_validator(validation.required_contexts, ("a1", "a2"), "c1", ("b1", "b2"), "c2") result = validator({"context": {"c1": 1, "c2": 2}}, None, None) self.assertFalse(result.is_valid, result.msg) result = validator({"context": {"c1": 1, "c2": 2, "c3": 3, "b1": 1, "a1": 1}}, None, None) self.assertTrue(result.is_valid, result.msg) result = validator({"context": {"c1": 1, "c2": 2, "c3": 3, "b1": 1, "b2": 2, "a1": 1}}, None, None) self.assertTrue(result.is_valid, result.msg) def test_required_param_or_context(self): validator = self._unwrap_validator( validation.required_param_or_context, "image", "custom_image") result = validator({"args": {"image": {"name": ""}}, "context": {"custom_image": { "name": "fake_image"}}}, None, None) self.assertTrue(result.is_valid) result = validator({"context": {"custom_image": { "name": "fake_image"}}}, None, None) self.assertTrue(result.is_valid) validator = self._unwrap_validator( validation.required_param_or_context, "image", "custom_image") result = validator({"args": {"image": {"name": "fake_image"}}, "context": {"custom_image": ""}}, None, None) self.assertTrue(result.is_valid) result = validator({"args": {"image": {"name": "fake_image"}}}, None, None) self.assertTrue(result.is_valid) validator = self._unwrap_validator( validation.required_param_or_context, "image", "custom_image") result = validator({"args": {"image": {"name": ""}}, "context": {"custom_image": {"name": ""}}}, None, None) self.assertTrue(result.is_valid) validator = self._unwrap_validator( validation.required_param_or_context, "image", "custom_image") result = validator({}, None, None) self.assertFalse(result.is_valid) def test_required_openstack_with_admin(self): validator = self._unwrap_validator(validation.required_openstack, admin=True) # admin presented in deployment fake_deployment = fakes.FakeDeployment(admin="admin_credential") self.assertTrue(validator(None, None, fake_deployment).is_valid) # admin not presented in deployment fake_deployment = fakes.FakeDeployment(users=["u1", "h2"]) self.assertFalse(validator(None, None, fake_deployment).is_valid) def test_required_openstack_with_users(self): validator = self._unwrap_validator(validation.required_openstack, users=True) # users presented in deployment fake_deployment = fakes.FakeDeployment( admin=None, users=["u_credential"]) self.assertTrue(validator({}, None, fake_deployment).is_valid) # admin and users presented in deployment fake_deployment = fakes.FakeDeployment(admin="a", users=["u1", "h2"]) self.assertTrue(validator({}, None, fake_deployment).is_valid) # admin and user context fake_deployment = fakes.FakeDeployment(admin="a", users=[]) context = {"context": {"users": True}} self.assertTrue(validator(context, None, fake_deployment).is_valid) # just admin presented fake_deployment = fakes.FakeDeployment(admin="a", users=[]) self.assertFalse(validator({}, None, fake_deployment).is_valid) def test_required_openstack_with_admin_and_users(self): validator = self._unwrap_validator(validation.required_openstack, admin=True, users=True) fake_deployment = fakes.FakeDeployment(admin="a", users=[]) self.assertFalse(validator({}, None, fake_deployment).is_valid) fake_deployment = fakes.FakeDeployment(admin="a", users=["u"]) self.assertTrue(validator({}, None, fake_deployment).is_valid) # admin and user context fake_deployment = fakes.FakeDeployment(admin="a", users=[]) context = {"context": {"users": True}} self.assertTrue(validator(context, None, fake_deployment).is_valid) def test_required_openstack_invalid(self): validator = self._unwrap_validator(validation.required_openstack) self.assertFalse(validator(None, None, None).is_valid) def test_volume_type_exists(self): validator = self._unwrap_validator(validation.volume_type_exists, param_name="volume_type") clients = mock.MagicMock() clients.cinder().volume_types.list.return_value = [] context = {"args": {"volume_type": False}} result = validator(context, clients, mock.MagicMock()) self.assertTrue(result.is_valid, result.msg) def test_volume_type_exists_check_types(self): validator = self._unwrap_validator(validation.volume_type_exists, param_name="volume_type") clients = mock.MagicMock() clients.cinder().volume_types.list.return_value = ["type"] context = {"args": {"volume_type": True}} result = validator(context, clients, mock.MagicMock()) self.assertTrue(result.is_valid, result.msg) def test_volume_type_exists_check_types_no_types_exist(self): validator = self._unwrap_validator(validation.volume_type_exists, param_name="volume_type") clients = mock.MagicMock() clients.cinder().volume_types.list.return_value = [] context = {"args": {"volume_type": True}} result = validator(context, clients, mock.MagicMock()) self.assertFalse(result.is_valid, result.msg) @mock.patch(MODULE + "osclients") def test_required_clients(self, mock_osclients): validator = self._unwrap_validator(validation.required_clients, "keystone", "nova") clients = mock.MagicMock() clients.keystone.return_value = "keystone" clients.nova.return_value = "nova" deployment = fakes.FakeDeployment() result = validator({}, clients, deployment) self.assertTrue(result.is_valid, result.msg) self.assertFalse(mock_osclients.Clients.called) clients.nova.side_effect = ImportError result = validator({}, clients, deployment) self.assertFalse(result.is_valid, result.msg) @mock.patch(MODULE + "objects") @mock.patch(MODULE + "osclients") def test_required_clients_with_admin(self, mock_osclients, mock_objects): validator = self._unwrap_validator(validation.required_clients, "keystone", "nova", admin=True) clients = mock.Mock() clients.keystone.return_value = "keystone" clients.nova.return_value = "nova" mock_osclients.Clients.return_value = clients mock_objects.Credential.return_value = "foo_credential" deployment = fakes.FakeDeployment(admin={"foo": "bar"}) result = validator({}, clients, deployment) self.assertTrue(result.is_valid, result.msg) mock_objects.Credential.assert_called_once_with(foo="bar") mock_osclients.Clients.assert_called_once_with("foo_credential") clients.nova.side_effect = ImportError result = validator({}, clients, deployment) self.assertFalse(result.is_valid, result.msg) @ddt.data( {"ext_validate": "existing_extension", "validation_result": True}, {"ext_validate": "absent_extension", "validation_result": False}, ) @ddt.unpack def test_required_neutron_extensions(self, ext_validate, validation_result): validator = self._unwrap_validator( validation.required_neutron_extensions, ext_validate) clients = mock.Mock() clients.neutron.return_value.list_extensions.return_value = ( {"extensions": [{"alias": "existing_extension"}]}) result = validator({}, clients, {}) self.assertEqual(result.is_valid, validation_result) def test_required_cinder_services(self): validator = self._unwrap_validator( validation.required_cinder_services, service_name=six.text_type("cinder-service")) with mock.patch.object(rally.osclients.Cinder, "create_client") as c: fake_service = mock.Mock(binary="cinder-service", state="up") cinder_client = mock.Mock() services = mock.Mock() services.list.return_value = [fake_service] cinder_client.services = services c.return_value = cinder_client deployment = fakes.FakeDeployment( admin={"auth_url": "fake_credential", "username": "username", "password": "password"}) result = validator({}, None, deployment) self.assertTrue(result.is_valid, result.msg) fake_service.state = "down" result = validator({}, None, deployment) self.assertFalse(result.is_valid, result.msg) def test_restricted_parameters(self): validator = self._unwrap_validator( validation.restricted_parameters, ["param_name"]) result = validator({"args": {}}, None, None) self.assertTrue(result.is_valid, result.msg) def test_restricted_parameters_negative(self): validator = self._unwrap_validator( validation.restricted_parameters, ["param_name"]) result = validator({"args": {"param_name": "value"}}, None, None) self.assertFalse(result.is_valid, result.msg) def test_restricted_parameters_in_dict(self): validator = self._unwrap_validator( validation.restricted_parameters, ["param_name"], "subdict") result = validator({"args": {"subdict": {}}}, None, None) self.assertTrue(result.is_valid, result.msg) def test_restricted_parameters_in_dict_negative(self): validator = self._unwrap_validator( validation.restricted_parameters, ["param_name"], "subdict") result = validator({"args": {"subdict": {"param_name": "value"}}}, None, None) self.assertFalse(result.is_valid, result.msg) def test_restricted_parameters_string_param_names(self): validator = self._unwrap_validator( validation.restricted_parameters, "param_name") result = validator({"args": {}}, None, None) self.assertTrue(result.is_valid, result.msg) @ddt.data( {"exception_msg": "Heat template validation failed on fake_path1. " "Original error message: fake_msg."}, {"exception_msg": None} ) @ddt.unpack @mock.patch(MODULE + "os.path.exists", return_value=True) @mock.patch(MODULE + "open", side_effect=mock.mock_open(), create=True) def test_validate_heat_template(self, mock_open, mock_exists, exception_msg): validator = self._unwrap_validator( validation.validate_heat_template, "template_path1", "template_path2") clients = mock.MagicMock() mock_open().__enter__().read.side_effect = ["fake_template1", "fake_template2"] heat_validator = mock.MagicMock() if exception_msg: heat_validator.side_effect = Exception("fake_msg") clients.heat().stacks.validate = heat_validator context = {"args": {"template_path1": "fake_path1", "template_path2": "fake_path2"}} result = validator(context, clients, mock.MagicMock()) if not exception_msg: heat_validator.assert_has_calls([ mock.call(template="fake_template1"), mock.call(template="fake_template2") ]) mock_open.assert_has_calls([ mock.call("fake_path1", "r"), mock.call("fake_path2", "r") ], any_order=True) self.assertTrue(result.is_valid, result.msg) else: heat_validator.assert_called_once_with(template="fake_template1") self.assertEqual("Heat template validation failed on fake_path1." " Original error message: fake_msg.", result.msg) self.assertFalse(result.is_valid) def _get_keystone_v2_mock_client(self): keystone = mock.Mock() del keystone.projects keystone.tenants = mock.Mock() return keystone def _get_keystone_v3_mock_client(self): keystone = mock.Mock() del keystone.tenants keystone.projects = mock.Mock() return keystone def test_required_api_versions_keystonev2(self): validator = self._unwrap_validator( validation.required_api_versions, component="keystone", versions=[2.0]) clients = mock.MagicMock() clients.keystone.return_value = self._get_keystone_v3_mock_client() self.assertFalse(validator({}, clients, None).is_valid) clients.keystone.return_value = self._get_keystone_v2_mock_client() self.assertTrue(validator({}, clients, None).is_valid) def test_required_api_versions_keystonev3(self): validator = self._unwrap_validator( validation.required_api_versions, component="keystone", versions=[3]) clients = mock.MagicMock() clients.keystone.return_value = self._get_keystone_v2_mock_client() self.assertFalse(validator({}, clients, None).is_valid) clients.keystone.return_value = self._get_keystone_v3_mock_client() self.assertTrue(validator({}, clients, None).is_valid) def test_required_api_versions_keystone_all_versions(self): validator = self._unwrap_validator( validation.required_api_versions, component="keystone", versions=[2.0, 3]) clients = mock.MagicMock() clients.keystone.return_value = self._get_keystone_v3_mock_client() self.assertTrue(validator({}, clients, None).is_valid) clients.keystone.return_value = self._get_keystone_v2_mock_client() self.assertTrue(validator({}, clients, None).is_valid) @ddt.data({"nova_version": 2, "required_versions": [2], "valid": True}, {"nova_version": 3, "required_versions": [2], "valid": False}, {"nova_version": None, "required_versions": [2], "valid": False}, {"nova_version": 2, "required_versions": [2, 3], "valid": True}, {"nova_version": 4, "required_versions": [2, 3], "valid": False}) @ddt.unpack def test_required_api_versions_choose_version(self, nova_version=None, required_versions=(2,), valid=False): validator = self._unwrap_validator( validation.required_api_versions, component="nova", versions=required_versions) clients = mock.MagicMock() clients.nova.choose_version.return_value = nova_version self.assertEqual(validator({}, clients, None).is_valid, valid) @ddt.data({"required_version": 2, "valid": True}, {"required_version": 3, "valid": False}) @ddt.unpack def test_required_api_versions_context(self, required_version=None, valid=False): validator = self._unwrap_validator( validation.required_api_versions, component="nova", versions=[required_version]) clients = mock.MagicMock() config = {"context": {"api_versions": {"nova": {"version": 2}}}} self.assertEqual(validator(config, clients, None).is_valid, valid) @mock.patch( "rally.common.yamlutils.safe_load", return_value={ "version": "2.0", "name": "wb", "workflows": { "wf1": { "type": "direct", "tasks": { "t1": { "action": "std.noop" } } } } } ) @mock.patch(MODULE + "os.access") @mock.patch(MODULE + "open") def test_workbook_contains_workflow(self, mock_open, mock_access, mock_safe_load): validator = self._unwrap_validator( validation.workbook_contains_workflow, "definition", "workflow_name") clients = mock.MagicMock() context = { "args": { "definition": "fake_path1", "workflow_name": "wf1" } } result = validator(context, clients, None) self.assertTrue(result.is_valid) self.assertEqual(1, mock_open.called) self.assertEqual(1, mock_access.called) self.assertEqual(1, mock_safe_load.called) rally-0.9.1/tests/unit/task/test_context.py0000664000567000056710000002264713073417720022204 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt import jsonschema import mock from rally import exceptions from rally.task import context from tests.unit import fakes from tests.unit import test @ddt.ddt class BaseContextTestCase(test.TestCase): @ddt.data({"config": {"bar": "spam"}, "expected": {"bar": "spam"}}, {"config": {"bar": "spam"}, "expected": {"bar": "spam"}}, {"config": {}, "expected": {}}, {"config": None, "expected": None}, {"config": 42, "expected": 42}, {"config": "foo str", "expected": "foo str"}, {"config": [], "expected": ()}, {"config": [11, 22, 33], "expected": (11, 22, 33)}) @ddt.unpack def test_init(self, config, expected): ctx = {"config": {"foo": 42, "fake": config}, "task": "foo_task"} ins = fakes.FakeContext(ctx) self.assertEqual(ins.config, expected) self.assertEqual(ins.task, "foo_task") self.assertEqual(ins.context, ctx) def test_init_with_default_config(self): @context.configure(name="foo", order=1) class FooContext(fakes.FakeContext): DEFAULT_CONFIG = {"alpha": "beta", "delta": "gamma"} ctx = {"config": {"foo": {"ab": "cd"}, "bar": 42}, "task": "foo_task"} ins = FooContext(ctx) self.assertEqual({"ab": "cd", "alpha": "beta", "delta": "gamma"}, ins.config) def test_init_empty_context(self): ctx0 = { "task": mock.MagicMock(), "config": {"fake": {"foo": 42}} } ctx = fakes.FakeContext(ctx0) self.assertEqual(ctx.config, ctx0["config"]["fake"]) self.assertEqual(ctx.task, ctx0["task"]) self.assertEqual(ctx.context, ctx0) def test_validate__context(self): fakes.FakeContext.validate({"test": 2}) def test_validate__wrong_context(self): self.assertRaises(jsonschema.ValidationError, fakes.FakeContext.validate, {"nonexisting": 2}) def test_setup_is_abstract(self): @context.configure("test_abstract_setup", 0) class A(context.Context): def cleanup(self): pass self.assertRaises(TypeError, A) def test_cleanup_is_abstract(self): @context.configure("test_abstract_cleanup", 0) class A(context.Context): def setup(self): pass self.assertRaises(TypeError, A) def test_with_statement(self): ctx0 = { "task": mock.MagicMock() } ctx = fakes.FakeContext(ctx0) ctx.setup = mock.MagicMock() ctx.cleanup = mock.MagicMock() with ctx as entered_ctx: self.assertEqual(ctx, entered_ctx) ctx.cleanup.assert_called_once_with() def test_lt(self): @context.configure(name="lt", order=fakes.FakeContext.get_order() - 1) class FakeLowerContext(fakes.FakeContext): pass ctx = mock.MagicMock() self.assertTrue(FakeLowerContext(ctx) < fakes.FakeContext(ctx)) self.assertFalse(fakes.FakeContext(ctx) < FakeLowerContext(ctx)) self.assertFalse(fakes.FakeContext(ctx) < fakes.FakeContext(ctx)) def test_gt(self): @context.configure(name="f", order=fakes.FakeContext.get_order() + 1) class FakeBiggerContext(fakes.FakeContext): pass ctx = mock.MagicMock() self.assertTrue(FakeBiggerContext(ctx) > fakes.FakeContext(ctx)) self.assertFalse(fakes.FakeContext(ctx) > FakeBiggerContext(ctx)) self.assertFalse(fakes.FakeContext(ctx) > fakes.FakeContext(ctx)) def test_eq(self): @context.configure(name="fake2", order=fakes.FakeContext.get_order() + 1) class FakeOtherContext(fakes.FakeContext): pass ctx = mock.MagicMock() self.assertFalse(FakeOtherContext(ctx) == fakes.FakeContext(ctx)) self.assertTrue(FakeOtherContext(ctx) == FakeOtherContext(ctx)) class ContextManagerTestCase(test.TestCase): @mock.patch("rally.task.context.Context.get") def test_validate(self, mock_context_get): config = { "ctx1": mock.MagicMock(), "ctx2": mock.MagicMock() } context.ContextManager.validate(config) for ctx in ("ctx1", "ctx2"): mock_context_get.assert_has_calls([ mock.call(ctx, allow_hidden=False), mock.call().validate(config[ctx]), ]) @mock.patch("rally.task.context.Context.get") def test_validate_hidden(self, mock_context_get): config = { "ctx1": mock.MagicMock(), "ctx2": mock.MagicMock() } context.ContextManager.validate(config, allow_hidden=True) for ctx in ("ctx1", "ctx2"): mock_context_get.assert_has_calls([ mock.call(ctx, allow_hidden=True), mock.call().validate(config[ctx]), ]) def test_validate__non_existing_context(self): config = { "nonexisting": {"nonexisting": 2} } self.assertRaises(exceptions.PluginNotFound, context.ContextManager.validate, config) @mock.patch("rally.task.context.Context.get") def test_setup(self, mock_context_get): mock_context = mock.MagicMock() mock_context.return_value = mock.MagicMock(__lt__=lambda x, y: True) mock_context_get.return_value = mock_context ctx_object = {"config": {"a": [], "b": []}} manager = context.ContextManager(ctx_object) result = manager.setup() self.assertEqual(result, ctx_object) mock_context_get.assert_has_calls( [mock.call("a", allow_hidden=True), mock.call("b", allow_hidden=True)], any_order=True) mock_context.assert_has_calls( [mock.call(ctx_object), mock.call(ctx_object)], any_order=True) self.assertEqual([mock_context(), mock_context()], manager._visited) mock_context.return_value.assert_has_calls( [mock.call.setup(), mock.call.setup()], any_order=True) @mock.patch("rally.task.context.Context.get") def test_cleanup(self, mock_context_get): mock_context = mock.MagicMock() mock_context.return_value = mock.MagicMock(__lt__=lambda x, y: True) mock_context_get.return_value = mock_context ctx_object = {"config": {"a": [], "b": []}} manager = context.ContextManager(ctx_object) manager.cleanup() mock_context_get.assert_has_calls( [mock.call("a", allow_hidden=True), mock.call("b", allow_hidden=True)], any_order=True) mock_context.assert_has_calls( [mock.call(ctx_object), mock.call(ctx_object)], any_order=True) mock_context.return_value.assert_has_calls( [mock.call.cleanup(), mock.call.cleanup()], any_order=True) @mock.patch("rally.task.context.Context.get") def test_cleanup_exception(self, mock_context_get): mock_context = mock.MagicMock() mock_context.return_value = mock.MagicMock(__lt__=lambda x, y: True) mock_context.cleanup.side_effect = Exception() mock_context_get.return_value = mock_context ctx_object = {"config": {"a": [], "b": []}} manager = context.ContextManager(ctx_object) manager.cleanup() mock_context_get.assert_has_calls( [mock.call("a", allow_hidden=True), mock.call("b", allow_hidden=True)], any_order=True) mock_context.assert_has_calls( [mock.call(ctx_object), mock.call(ctx_object)], any_order=True) mock_context.return_value.assert_has_calls( [mock.call.cleanup(), mock.call.cleanup()], any_order=True) @mock.patch("rally.task.context.ContextManager.cleanup") @mock.patch("rally.task.context.ContextManager.setup") def test_with_statement( self, mock_context_manager_setup, mock_context_manager_cleanup): with context.ContextManager(mock.MagicMock()): mock_context_manager_setup.assert_called_once_with() mock_context_manager_setup.reset_mock() self.assertFalse(mock_context_manager_cleanup.called) self.assertFalse(mock_context_manager_setup.called) mock_context_manager_cleanup.assert_called_once_with() @mock.patch("rally.task.context.ContextManager.cleanup") @mock.patch("rally.task.context.ContextManager.setup") def test_with_statement_excpetion_during_setup( self, mock_context_manager_setup, mock_context_manager_cleanup): mock_context_manager_setup.side_effect = Exception("abcdef") try: with context.ContextManager(mock.MagicMock()): pass except Exception: pass finally: mock_context_manager_setup.assert_called_once_with() mock_context_manager_cleanup.assert_called_once_with() rally-0.9.1/tests/unit/task/processing/0000775000567000056710000000000013073420067021246 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/task/processing/__init__.py0000664000567000056710000000000013073417717023355 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/unit/task/processing/test_plot.py0000664000567000056710000005714013073417720023646 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json import ddt import mock from rally.task.processing import plot from tests.unit import test PLOT = "rally.task.processing.plot." @ddt.ddt class PlotTestCase(test.TestCase): @mock.patch(PLOT + "charts") def test__process_scenario(self, mock_charts): for mock_ins, ret in [ (mock_charts.MainStatsTable, "main_stats"), (mock_charts.MainStackedAreaChart, "main_stacked"), (mock_charts.AtomicStackedAreaChart, "atomic_stacked"), (mock_charts.OutputStackedAreaDeprecatedChart, "output_stacked"), (mock_charts.LoadProfileChart, "load_profile"), (mock_charts.MainHistogramChart, "main_histogram"), (mock_charts.AtomicHistogramChart, "atomic_histogram"), (mock_charts.AtomicAvgChart, "atomic_avg")]: setattr(mock_ins.return_value.render, "return_value", ret) iterations = [ {"timestamp": i + 2, "error": [], "duration": i + 5, "idle_duration": i, "output": {"additive": [], "complete": []}, "atomic_actions": {"foo_action": i + 10}} for i in range(10)] data = {"iterations": iterations, "sla": [], "key": {"kw": {"runner": {"type": "constant"}}, "name": "Foo.bar", "pos": 0}, "info": {"atomic": {"foo_action": {"max_duration": 19, "min_duration": 10}}, "full_duration": 40, "load_duration": 32, "iterations_count": 10, "iterations_passed": 10, "max_duration": 14, "min_duration": 5, "output_names": [], "tstamp_end": 25, "tstamp_start": 2}, "created_at": "xxx_time", "hooks": []} result = plot._process_scenario(data, 1) self.assertEqual( {"cls": "Foo", "met": "bar", "name": "bar [2]", "pos": "1", "runner": "constant", "config": json.dumps( {"Foo.bar": [{"runner": {"type": "constant"}}]}, indent=2), "created_at": "xxx_time", "full_duration": 40, "load_duration": 32, "hooks": [], "atomic": {"histogram": "atomic_histogram", "iter": "atomic_stacked", "pie": "atomic_avg"}, "iterations": {"histogram": "main_histogram", "iter": "main_stacked", "pie": [("success", 10), ("errors", 0)]}, "iterations_count": 10, "errors": [], "load_profile": "load_profile", "additive_output": [], "complete_output": [[], [], [], [], [], [], [], [], [], []], "has_output": False, "output_errors": [], "sla": [], "sla_success": True, "table": "main_stats"}, result) @ddt.data( {"hooks": [], "expected": []}, {"hooks": [ {"config": { "trigger": {"args": {"at": [2, 5], "unit": "iteration"}, "name": "event"}, "args": "foo cmd", "description": "Foo", "name": "sys_call"}, "results": [ {"status": "success", "finished_at": 1475589987.525735, "triggered_by": {"event_type": "iteration", "value": 2}, "started_at": 1475589987.433399, "output": { "additive": [ {"chart_plugin": "StatsTable", "title": "Foo table", "data": [["A", 158], ["B", 177]]}], "complete": []}}, {"status": "success", "finished_at": 1475589993.457818, "triggered_by": {"event_type": "iteration", "value": 5}, "started_at": 1475589993.432734, "output": { "additive": [ {"chart_plugin": "StatsTable", "title": "Foo table", "data": [["A", 243], ["B", 179]]}], "complete": []}}], "summary": {"success": 2}}, {"config": {"trigger": {"args": {"at": [1, 2, 4], "unit": "time"}, "name": "event"}, "args": "bar cmd", "name": "sys_call"}, "results": [ {"status": "success", "finished_at": 1475589988.437791, "triggered_by": {"event_type": "time", "value": 1}, "started_at": 1475589988.434244, "output": {"additive": [], "complete": [ {"chart_plugin": "Pie", "title": "Bar Pie", "data": [["F", 4], ["G", 2]]}]}}, {"status": "success", "finished_at": 1475589989.437589, "triggered_by": {"event_type": "time", "value": 2}, "started_at": 1475589989.433964, "output": {"additive": [], "complete": [ {"chart_plugin": "Pie", "title": "Bar Pie", "data": [["F", 42], ["G", 24]]}]}}], "summary": {"success": 2}}], "expected": [ {"additive": [ {"data": {"cols": ["Action", "Min (sec)", "Median (sec)", "90%ile (sec)", "95%ile (sec)", "Max (sec)", "Avg (sec)", "Count"], "rows": [["A", 158.0, 200.5, 234.5, 238.75, 243.0, 100.75, 2], ["B", 177.0, 178.0, 178.8, 178.9, 179.0, 89.5, 2]]}, "axis_label": "", "description": "", "label": "", "title": "Foo table", "widget": "Table"}], "complete": [], "desc": "Foo", "name": "sys_call"}, {"additive": [], "complete": [ {"charts": [{"data": [["F", 4], ["G", 2]], "title": "Bar Pie", "widget": "Pie"}], "finished_at": "2016-10-04 14:06:28", "started_at": "2016-10-04 14:06:28", "status": "success", "triggered_by": "time: 1"}, {"charts": [{"data": [["F", 42], ["G", 24]], "title": "Bar Pie", "widget": "Pie"}], "finished_at": "2016-10-04 14:06:29", "started_at": "2016-10-04 14:06:29", "status": "success", "triggered_by": "time: 2"}], "desc": "", "name": "sys_call"}]}) @ddt.unpack def test__process_hooks(self, hooks, expected): self.assertEqual(expected, plot._process_hooks(hooks)) @mock.patch(PLOT + "_process_scenario") @mock.patch(PLOT + "json.dumps", return_value="json_data") def test__process_tasks(self, mock_json_dumps, mock__process_scenario): tasks_results = [{"key": {"name": i, "kw": "kw_" + i}} for i in ("a", "b", "c", "b")] mock__process_scenario.side_effect = lambda a, b: ( {"cls": "%s_cls" % a["key"]["name"], "name": str(b), "met": "dummy", "pos": str(b)}) source, tasks = plot._process_tasks(tasks_results) self.assertEqual(source, "json_data") mock_json_dumps.assert_called_once_with( {"a": ["kw_a"], "b": ["kw_b", "kw_b"], "c": ["kw_c"]}, sort_keys=True, indent=2) self.assertEqual( tasks, [{"cls": "a_cls", "met": "dummy", "name": "0", "pos": "0"}, {"cls": "b_cls", "met": "dummy", "name": "0", "pos": "0"}, {"cls": "b_cls", "met": "dummy", "name": "1", "pos": "1"}, {"cls": "c_cls", "met": "dummy", "name": "0", "pos": "0"}]) @ddt.data({}, {"include_libs": True}, {"include_libs": False}) @ddt.unpack @mock.patch(PLOT + "_process_tasks") @mock.patch(PLOT + "_extend_results") @mock.patch(PLOT + "ui_utils.get_template") @mock.patch(PLOT + "json.dumps", side_effect=lambda s: "json_" + s) @mock.patch("rally.common.version.version_string", return_value="42.0") def test_plot(self, mock_version_string, mock_dumps, mock_get_template, mock__extend_results, mock__process_tasks, **ddt_kwargs): mock__process_tasks.return_value = "source", "scenarios" mock_get_template.return_value.render.return_value = "tasks_html" mock__extend_results.return_value = ["extended_result"] html = plot.plot("tasks_results", **ddt_kwargs) self.assertEqual(html, "tasks_html") mock__extend_results.assert_called_once_with("tasks_results") mock_get_template.assert_called_once_with("task/report.html") mock__process_tasks.assert_called_once_with(["extended_result"]) if "include_libs" in ddt_kwargs: mock_get_template.return_value.render.assert_called_once_with( version="42.0", data="json_scenarios", source="json_source", include_libs=ddt_kwargs["include_libs"]) else: mock_get_template.return_value.render.assert_called_once_with( version="42.0", data="json_scenarios", source="json_source", include_libs=False) @mock.patch(PLOT + "objects.Task.extend_results") def test__extend_results(self, mock_task_extend_results): mock_task_extend_results.side_effect = iter( [["extended_foo"], ["extended_bar"], ["extended_spam"]]) tasks_results = [ {"key": "%s_key" % k, "sla": "%s_sla" % k, "hooks": "%s_hooks" % k, "full_duration": "%s_full_duration" % k, "load_duration": "%s_load_duration" % k, "created_at": "%s_time" % k, "result": "%s_result" % k} for k in ("foo", "bar", "spam")] generic_results = [ {"id": None, "created_at": None, "updated_at": None, "task_uuid": None, "key": "%s_key" % k, "data": {"raw": "%s_result" % k, "full_duration": "%s_full_duration" % k, "load_duration": "%s_load_duration" % k, "hooks": "%s_hooks" % k, "sla": "%s_sla" % k}, "created_at": "%s_time" % k} for k in ("foo", "bar", "spam")] results = plot._extend_results(tasks_results) self.assertEqual([mock.call([r], True) for r in generic_results], mock_task_extend_results.mock_calls) self.assertEqual(["extended_foo", "extended_bar", "extended_spam"], results) def test__extend_results_empty(self): self.assertEqual([], plot._extend_results([])) @mock.patch(PLOT + "Trends") @mock.patch(PLOT + "ui_utils.get_template") @mock.patch(PLOT + "_extend_results") @mock.patch("rally.common.version.version_string", return_value="42.0") def test_trends(self, mock_version_string, mock__extend_results, mock_get_template, mock_trends): mock__extend_results.return_value = ["foo", "bar"] trends = mock.Mock() trends.get_data.return_value = ["foo", "bar"] mock_trends.return_value = trends template = mock.Mock() template.render.return_value = "trends html" mock_get_template.return_value = template self.assertEqual("trends html", plot.trends("tasks_results")) self.assertEqual([mock.call("foo"), mock.call("bar")], trends.add_result.mock_calls) mock_get_template.assert_called_once_with("task/trends.html") template.render.assert_called_once_with(version="42.0", data="[\"foo\", \"bar\"]") @ddt.ddt class TrendsTestCase(test.TestCase): def test___init__(self): trends = plot.Trends() self.assertEqual({}, trends._data) self.assertRaises(TypeError, plot.Trends, 42) @ddt.data({"args": [None], "result": "None"}, {"args": [""], "result": ""}, {"args": [" str value "], "result": "str value"}, {"args": [" 42 "], "result": "42"}, {"args": ["42"], "result": "42"}, {"args": [42], "result": "42"}, {"args": [42.00], "result": "42.0"}, {"args": [[3.2, 1, " foo ", None]], "result": "1,3.2,None,foo"}, {"args": [(" def", "abc", [22, 33])], "result": "22,33,abc,def"}, {"args": [{}], "result": ""}, {"args": [{1: 2, "a": " b c "}], "result": "1:2|a:b c"}, {"args": [{"foo": "bar", (1, 2): [5, 4, 3]}], "result": "1,2:3,4,5|foo:bar"}, {"args": [1, 2], "raises": TypeError}, {"args": [set()], "raises": TypeError}) @ddt.unpack def test__to_str(self, args, result=None, raises=None): trends = plot.Trends() if raises: self.assertRaises(raises, trends._to_str, *args) else: self.assertEqual(result, trends._to_str(*args)) @mock.patch(PLOT + "hashlib") def test__make_hash(self, mock_hashlib): mock_hashlib.md5.return_value.hexdigest.return_value = "md5_digest" trends = plot.Trends() trends._to_str = mock.Mock() trends._to_str.return_value.encode.return_value = "foo_str" self.assertEqual("md5_digest", trends._make_hash("foo_obj")) trends._to_str.assert_called_once_with("foo_obj") trends._to_str.return_value.encode.assert_called_once_with("utf8") mock_hashlib.md5.assert_called_once_with("foo_str") def _make_result(self, salt, sla_success=True, with_na=False): if with_na: atomic = {"a": "n/a", "b": "n/a"} stat_rows = [ ["a", "n/a", "n/a", "n/a", "n/a", "n/a", "n/a", "n/a", 4], ["b", "n/a", "n/a", "n/a", "n/a", "n/a", "n/a", "n/a", 4], ["total", "n/a", "n/a", "n/a", "n/a", "n/a", "n/a", "n/a", 4]] else: atomic = {"a": 123, "b": 456} stat_rows = [["a", 0.7, 0.85, 0.9, 0.87, 1.25, 0.67, "100.0%", 4], ["b", 0.5, 0.75, 0.85, 0.9, 1.1, 0.58, "100.0%", 4], ["total", 1.2, 1.55, 1.7, 1.8, 1.5, 0.8, "100.0%", 4]] return { "key": {"kw": "kw_%d" % salt, "name": "Scenario.name_%d" % salt}, "sla": [{"success": sla_success}], "info": {"iterations_count": 4, "atomic": atomic, "tstamp_start": 123456.789 + salt, "stat": {"rows": stat_rows, "cols": ["Action", "Min (sec)", "Median (sec)", "90%ile (sec)", "95%ile (sec)", "Max (sec)", "Avg (sec)", "Success", "Count"]}}, "iterations": ["", "", "", ""]} def _sort_trends(self, trends_result): for idx in range(len(trends_result)): trends_result[idx]["durations"].sort() for a_idx in range(len(trends_result[idx]["actions"])): trends_result[idx]["actions"][a_idx]["durations"].sort() return trends_result def test_add_result_and_get_data(self): trends = plot.Trends() for i in 0, 1: trends.add_result(self._make_result(i)) expected = [ {"actions": [{"durations": [("90%ile", [(123456789, 0.9)]), ("95%ile", [(123456789, 0.87)]), ("avg", [(123456789, 0.67)]), ("max", [(123456789, 1.25)]), ("median", [(123456789, 0.85)]), ("min", [(123456789, 0.7)])], "name": "a", "success": [("success", [(123456789, 100.0)])]}, {"durations": [("90%ile", [(123456789, 0.85)]), ("95%ile", [(123456789, 0.9)]), ("avg", [(123456789, 0.58)]), ("max", [(123456789, 1.1)]), ("median", [(123456789, 0.75)]), ("min", [(123456789, 0.5)])], "name": "b", "success": [("success", [(123456789, 100.0)])]}], "cls": "Scenario", "config": "\"kw_0\"", "durations": [("90%ile", [(123456789, 1.7)]), ("95%ile", [(123456789, 1.8)]), ("avg", [(123456789, 0.8)]), ("max", [(123456789, 1.5)]), ("median", [(123456789, 1.55)]), ("min", [(123456789, 1.2)])], "length": 1, "met": "name_0", "name": "Scenario.name_0", "sla_failures": 0, "stat": {"avg": 1.425, "max": 1.8, "min": 0.8}, "success": [("success", [(123456789, 100.0)])]}, {"actions": [{"durations": [("90%ile", [(123457789, 0.9)]), ("95%ile", [(123457789, 0.87)]), ("avg", [(123457789, 0.67)]), ("max", [(123457789, 1.25)]), ("median", [(123457789, 0.85)]), ("min", [(123457789, 0.7)])], "name": "a", "success": [("success", [(123457789, 100.0)])]}, {"durations": [("90%ile", [(123457789, 0.85)]), ("95%ile", [(123457789, 0.9)]), ("avg", [(123457789, 0.58)]), ("max", [(123457789, 1.1)]), ("median", [(123457789, 0.75)]), ("min", [(123457789, 0.5)])], "name": "b", "success": [("success", [(123457789, 100.0)])]}], "cls": "Scenario", "config": "\"kw_1\"", "durations": [("90%ile", [(123457789, 1.7)]), ("95%ile", [(123457789, 1.8)]), ("avg", [(123457789, 0.8)]), ("max", [(123457789, 1.5)]), ("median", [(123457789, 1.55)]), ("min", [(123457789, 1.2)])], "length": 1, "met": "name_1", "name": "Scenario.name_1", "sla_failures": 0, "stat": {"avg": 1.425, "max": 1.8, "min": 0.8}, "success": [("success", [(123457789, 100.0)])]}] self.assertEqual(expected, self._sort_trends(trends.get_data())) def test_add_result_once_and_get_data(self): trends = plot.Trends() trends.add_result(self._make_result(42, sla_success=False)) expected = [ {"actions": [{"durations": [("90%ile", [(123498789, 0.9)]), ("95%ile", [(123498789, 0.87)]), ("avg", [(123498789, 0.67)]), ("max", [(123498789, 1.25)]), ("median", [(123498789, 0.85)]), ("min", [(123498789, 0.7)])], "name": "a", "success": [("success", [(123498789, 100.0)])]}, {"durations": [("90%ile", [(123498789, 0.85)]), ("95%ile", [(123498789, 0.9)]), ("avg", [(123498789, 0.58)]), ("max", [(123498789, 1.1)]), ("median", [(123498789, 0.75)]), ("min", [(123498789, 0.5)])], "name": "b", "success": [("success", [(123498789, 100.0)])]}], "cls": "Scenario", "config": "\"kw_42\"", "durations": [("90%ile", [(123498789, 1.7)]), ("95%ile", [(123498789, 1.8)]), ("avg", [(123498789, 0.8)]), ("max", [(123498789, 1.5)]), ("median", [(123498789, 1.55)]), ("min", [(123498789, 1.2)])], "length": 1, "met": "name_42", "name": "Scenario.name_42", "sla_failures": 1, "stat": {"avg": 1.425, "max": 1.8, "min": 0.8}, "success": [("success", [(123498789, 100.0)])]}] self.assertEqual(expected, self._sort_trends(trends.get_data())) def test_add_result_with_na_and_get_data(self): trends = plot.Trends() trends.add_result( self._make_result(42, sla_success=False, with_na=True)) expected = [ {"actions": [{"durations": [("90%ile", [(123498789, "n/a")]), ("95%ile", [(123498789, "n/a")]), ("avg", [(123498789, "n/a")]), ("max", [(123498789, "n/a")]), ("median", [(123498789, "n/a")]), ("min", [(123498789, "n/a")])], "name": "a", "success": [("success", [(123498789, 0)])]}, {"durations": [("90%ile", [(123498789, "n/a")]), ("95%ile", [(123498789, "n/a")]), ("avg", [(123498789, "n/a")]), ("max", [(123498789, "n/a")]), ("median", [(123498789, "n/a")]), ("min", [(123498789, "n/a")])], "name": "b", "success": [("success", [(123498789, 0)])]}], "cls": "Scenario", "config": "\"kw_42\"", "durations": [("90%ile", [(123498789, "n/a")]), ("95%ile", [(123498789, "n/a")]), ("avg", [(123498789, "n/a")]), ("max", [(123498789, "n/a")]), ("median", [(123498789, "n/a")]), ("min", [(123498789, "n/a")])], "length": 1, "met": "name_42", "name": "Scenario.name_42", "sla_failures": 1, "stat": {"avg": None, "max": None, "min": None}, "success": [("success", [(123498789, 0)])]}] self.assertEqual(expected, self._sort_trends(trends.get_data())) def test_get_data_no_results_added(self): trends = plot.Trends() self.assertEqual([], trends.get_data()) rally-0.9.1/tests/unit/task/processing/test_utils.py0000664000567000056710000000431413073417717024031 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt from rally.task.processing import utils from tests.unit import test @ddt.ddt class GraphZipperTestCase(test.TestCase): @ddt.data({"data_stream": list(range(1, 11)), "zipped_size": 8, "expected": [[1, 1.2], [3, 2.4], [4, 3.6], [5, 4.8], [7, 6.2], [8, 7.4], [9, 8.6], [10, 9.8]]}, {"data_stream": [.005, .8, 22, .004, .7, 12, .5, .07, .02] * 10, "zipped_size": 8, "expected": [ [1, 3.769244444444445], [18, 4.706933333333334], [29, 4.339911111111111], [40, 3.2279111111111116], [52, 3.769244444444445], [63, 4.706933333333334], [74, 4.339911111111111], [90, 3.2279111111111116]]}, {"data_stream": list(range(1, 100)), "zipped_size": 1000, "expected": [[i, i] for i in range(1, 100)]}, {"data_stream": [1, 4, 11, None, 42], "zipped_size": 1000, "expected": [[1, 1], [2, 4], [3, 11], [4, 0], [5, 42]]}) @ddt.unpack def test_add_point_and_get_zipped_graph(self, data_stream=None, zipped_size=None, expected=None): merger = utils.GraphZipper(len(data_stream), zipped_size) [merger.add_point(value) for value in data_stream] self.assertEqual(expected, merger.get_zipped_graph()) def test_add_point_raises(self): merger = utils.GraphZipper(10, 8) self.assertRaises(TypeError, merger.add_point) [merger.add_point(1) for value in range(10)] self.assertRaises(RuntimeError, merger.add_point, 1) rally-0.9.1/tests/unit/task/processing/test_charts.py0000664000567000056710000007470713073417717024172 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import ddt import mock from rally.common.plugin import plugin from rally.task.processing import charts from tests.unit import test CHARTS = "rally.task.processing.charts." class ChartTestCase(test.TestCase): class Chart(charts.Chart): widget = "FooWidget" def _map_iteration_values(self, iteration): return [("foo_" + k, iteration[k]) for k in ["a", "b"]] @property def wload_info(self): return {"iterations_count": 42, "atomic": {"a": {}, "b": {}, "c": {}}} def test___init__(self): self.assertRaises(TypeError, charts.Chart, self.wload_info) chart = self.Chart(self.wload_info) self.assertIsInstance(chart, plugin.Plugin) self.assertEqual({}, chart._data) self.assertEqual(42, chart.base_size) self.assertEqual(1000, chart.zipped_size) chart = self.Chart(self.wload_info, zipped_size=24) self.assertEqual({}, chart._data) self.assertEqual(42, chart.base_size) self.assertEqual(24, chart.zipped_size) @mock.patch(CHARTS + "utils.GraphZipper") def test_add_iteration_and_render(self, mock_graph_zipper): gzipper_a = mock.Mock(get_zipped_graph=lambda: "a_points") gzipper_b = mock.Mock(get_zipped_graph=lambda: "b_points") mock_graph_zipper.side_effect = [gzipper_a, gzipper_b] chart = self.Chart(self.wload_info, 24) self.assertEqual([], chart.render()) [chart.add_iteration(itr) for itr in [{"a": 1, "b": 2}, {"a": 3, "b": 4}]] self.assertEqual([mock.call(42, 24), mock.call(42, 24)], mock_graph_zipper.mock_calls) self.assertEqual(2, len(chart._data)) self.assertEqual([mock.call(1), mock.call(3)], chart._data["foo_a"].add_point.mock_calls) self.assertEqual([mock.call(2), mock.call(4)], chart._data["foo_b"].add_point.mock_calls) self.assertEqual([("foo_a", "a_points"), ("foo_b", "b_points")], chart.render()) def test__fix_atomic_actions(self): chart = self.Chart(self.wload_info) self.assertEqual( {"atomic_actions": {"a": 5, "b": 6, "c": 0}}, chart._fix_atomic_actions({"atomic_actions": {"a": 5, "b": 6}})) class MainStackedAreaChartTestCase(test.TestCase): def test_add_iteration_and_render(self): chart = charts.MainStackedAreaChart({"iterations_count": 3, "iterations_failed": 0}, 10) self.assertIsInstance(chart, charts.Chart) [chart.add_iteration(itr) for itr in ( {"duration": 1.1, "idle_duration": 2.2, "error": []}, {"error": [], "duration": 1.1, "idle_duration": 0.5}, {"duration": 1.3, "idle_duration": 3.4, "error": []})] expected = [("duration", [[1, 1.1], [2, 1.1], [3, 1.3]]), ("idle_duration", [[1, 2.2], [2, 0.5], [3, 3.4]])] self.assertEqual(expected, chart.render()) def test_add_iteration_and_render_with_failed_iterations(self): chart = charts.MainStackedAreaChart({"iterations_count": 3, "iterations_failed": 2}, 10) self.assertIsInstance(chart, charts.Chart) [chart.add_iteration(itr) for itr in ( {"duration": 1.1, "idle_duration": 2.2, "error": []}, {"error": ["foo_err"], "duration": 1.1, "idle_duration": 0.5}, {"duration": 1.3, "idle_duration": 3.4, "error": ["foo_err"]})] expected = [("duration", [[1, 1.1], [2, 0], [3, 0]]), ("idle_duration", [[1, 2.2], [2, 0], [3, 0]]), ("failed_duration", [[1, 0], [2, 1.6], [3, 4.7]])] self.assertEqual(expected, chart.render()) class AtomicStackedAreaChartTestCase(test.TestCase): def test_add_iteration_and_render(self): iterations = ( {"atomic_actions": {"foo": 1.1}, "error": []}, {"atomic_actions": {"foo": 1.1, "bar": 1.2}, "error": [], "duration": 40, "idle_duration": 2}, {"atomic_actions": {"bar": 1.2}, "error": [], "duration": 5.5, "idle_duration": 2.5}) expected = [("bar", [[1, 0], [2, 1.2], [3, 1.2]]), ("foo", [[1, 1.1], [2, 1.1], [3, 0]])] chart = charts.AtomicStackedAreaChart( {"iterations_count": 3, "iterations_failed": 0, "atomic": {"foo": {}, "bar": {}}}, 10) self.assertIsInstance(chart, charts.Chart) [chart.add_iteration(iteration) for iteration in iterations] self.assertEqual(expected, sorted(chart.render())) def test_add_iteration_and_render_with_failed_iterations(self): iterations = ( {"atomic_actions": {"foo": 1.1}, "error": []}, {"atomic_actions": {"foo": 1.1, "bar": 1.2}, "error": ["foo_err"], "duration": 40, "idle_duration": 2}, {"atomic_actions": {"bar": 1.2}, "error": ["foo_err"], "duration": 5.5, "idle_duration": 2.5}) expected = [("bar", [[1, 0], [2, 1.2], [3, 1.2]]), ("failed_duration", [[1, 0], [2, 39.7], [3, 6.8]]), ("foo", [[1, 1.1], [2, 1.1], [3, 0]])] chart = charts.AtomicStackedAreaChart( {"iterations_count": 3, "iterations_failed": 2, "atomic": {"foo": {}, "bar": {}}}, 10) self.assertIsInstance(chart, charts.Chart) [chart.add_iteration(iteration) for iteration in iterations] self.assertEqual(expected, sorted(chart.render())) class AvgChartTestCase(test.TestCase): class AvgChart(charts.AvgChart): def _map_iteration_values(self, iteration): return iteration["foo"].items() def test_add_iteration_and_render(self): self.assertRaises(TypeError, charts.AvgChart, {"iterations_count": 3}) chart = self.AvgChart({"iterations_count": 3}) self.assertIsInstance(chart, charts.AvgChart) [chart.add_iteration({"foo": x}) for x in ({"a": 1.3, "b": 4.3}, {"a": 2.4, "b": 5.4}, {"a": 3.5, "b": 7.7})] self.assertEqual([("a", 2.4), ("b", 5.8)], sorted(chart.render())) class AtomicAvgChartTestCase(test.TestCase): def test_add_iteration_and_render(self): chart = charts.AtomicAvgChart({"iterations_count": 3, "atomic": {"foo": {}, "bar": {}}}) self.assertIsInstance(chart, charts.AvgChart) [chart.add_iteration({"atomic_actions": collections.OrderedDict(a)}) for a in ([("foo", 2), ("bar", 5)], [("foo", 4)], [("bar", 7)])] self.assertEqual([("bar", 4.0), ("foo", 2.0)], sorted(chart.render())) @ddt.ddt class LoadProfileChartTestCase(test.TestCase): @ddt.data( {"info": {"iterations_count": 9, "tstamp_start": 0.0, "load_duration": 8.0}, "iterations": [(0.0, 0.5), (0.5, 0.5), (2.0, 4.0), (2.0, 2.0), (4.0, 2.0), (6.0, 0.5), (6.5, 0.5), (7.5, 0.5), (7.5, 1.5)], "kwargs": {"scale": 8}, "expected": [("parallel iterations", [(0.0, 0), (1.25, 0.8), (2.5, 0.8), (3.75, 2), (5.0, 2.0), (6.25, 1.8), (7.5, 0.6000000000000001), (8.75, 1.4), (10.0, 0.2)])]}, {"info": {"iterations_count": 6, "tstamp_start": 0.0, "load_duration": 12.0}, "iterations": [(0.0, 0.75), (0.75, 0.75), (1.5, 0.375), (3.0, 5.0), (3.75, 4.25), (10.0, 1.0)], "kwargs": {"name": "Custom name", "scale": 8}, "expected": [("Custom name", [(0.0, 0), (1.875, 1.0), (3.75, 0.4), (5.625, 2.0), (7.5, 2), (9.375, 0.5333333333333333), (11.25, 0.5333333333333333), (13.125, 0), (15.0, 0)])]}, {"info": {"iterations_count": 2, "tstamp_start": 0.0, "load_duration": 1.0}, "iterations": [(0.0, 0.5), (0.5, 0.5)], "kwargs": {"scale": 4}, "expected": [("parallel iterations", [(0.0, 0), (0.375, 1.0), (0.75, 1.0), (1.125, 0.6666666666666666), (1.5, 0)])]}) @ddt.unpack def test_add_iteration_and_render(self, info, iterations, kwargs, expected): chart = charts.LoadProfileChart(info, **kwargs) self.assertIsInstance(chart, charts.Chart) for iteration in iterations: ts, duration = iteration chart.add_iteration({"timestamp": ts, "duration": duration}) self.assertEqual(expected, chart.render()) @ddt.ddt class HistogramChartTestCase(test.TestCase): class HistogramChart(charts.HistogramChart): def __init__(self, workload_info): super(HistogramChartTestCase.HistogramChart, self).__init__(workload_info) self._data["bar"] = {"views": self._init_views(1.2, 4.2), "disabled": None} def _map_iteration_values(self, iteration): return iteration["foo"].items() def test_add_iteration_and_render(self): self.assertRaises(TypeError, charts.HistogramChart, {"iterations_count": 3}) chart = self.HistogramChart({"iterations_count": 3}) self.assertIsInstance(chart, charts.HistogramChart) [chart.add_iteration({"foo": x}) for x in ({"bar": 1.2}, {"bar": 2.4}, {"bar": 4.2})] expected = { "data": [ [{"disabled": None, "key": "bar", "view": "Square Root Choice", "values": [{"x": 2.7, "y": 2}, {"x": 4.2, "y": 1}]}], [{"disabled": None, "key": "bar", "view": "Sturges Formula", "values": [{"x": 2.2, "y": 1}, {"x": 3.2, "y": 1}, {"x": 4.2, "y": 1}]}], [{"disabled": None, "key": "bar", "view": "Rice Rule", "values": [{"x": 2.2, "y": 1}, {"x": 3.2, "y": 1}, {"x": 4.2, "y": 1}]}]], "views": [{"id": 0, "name": "Square Root Choice"}, {"id": 1, "name": "Sturges Formula"}, {"id": 2, "name": "Rice Rule"}]} self.assertEqual(expected, chart.render()) @ddt.data( {"base_size": 2, "min_value": 1, "max_value": 4, "expected": [{"bins": 2, "view": "Square Root Choice", "x": [2.5, 4.0], "y": [0, 0]}, {"bins": 2, "view": "Sturges Formula", "x": [2.5, 4.0], "y": [0, 0]}, {"bins": 3, "view": "Rice Rule", "x": [2.0, 3.0, 4.0], "y": [0, 0, 0]}]}, {"base_size": 100, "min_value": 27, "max_value": 42, "expected": [ {"bins": 10, "view": "Square Root Choice", "x": [28.5, 30.0, 31.5, 33.0, 34.5, 36.0, 37.5, 39.0, 40.5, 42.0], "y": [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]}, {"bins": 8, "view": "Sturges Formula", "x": [28.875, 30.75, 32.625, 34.5, 36.375, 38.25, 40.125, 42.0], "y": [0, 0, 0, 0, 0, 0, 0, 0]}, {"bins": 10, "view": "Rice Rule", "x": [28.5, 30.0, 31.5, 33.0, 34.5, 36.0, 37.5, 39.0, 40.5, 42.0], "y": [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]}]}) @ddt.unpack def test_views(self, base_size=None, min_value=None, max_value=None, expected=None): chart = self.HistogramChart({"iterations_count": base_size}) self.assertEqual(expected, chart._init_views(min_value, max_value)) class MainHistogramChartTestCase(test.TestCase): def test_add_iteration_and_render(self): chart = charts.MainHistogramChart( {"iterations_count": 3, "min_duration": 2, "max_duration": 7}) self.assertIsInstance(chart, charts.HistogramChart) [chart.add_iteration(itr) for itr in ( {"duration": 1.1, "idle_duration": 2.2, "error": None}, {"error": True}, {"duration": 1.3, "idle_duration": 3.4, "error": None})] expected = { "data": [ [{"disabled": None, "key": "task", "values": [{"x": 4.5, "y": 3}, {"x": 7.0, "y": 0}], "view": "Square Root Choice"}], [{"disabled": None, "key": "task", "view": "Sturges Formula", "values": [{"x": 3.666666666666667, "y": 3}, {"x": 5.333333333333334, "y": 0}, {"x": 7.0, "y": 0}]}], [{"disabled": None, "key": "task", "view": "Rice Rule", "values": [{"x": 3.666666666666667, "y": 3}, {"x": 5.333333333333334, "y": 0}, {"x": 7.0, "y": 0}]}]], "views": [{"id": 0, "name": "Square Root Choice"}, {"id": 1, "name": "Sturges Formula"}, {"id": 2, "name": "Rice Rule"}]} self.assertEqual(expected, chart.render()) class AtomicHistogramChartTestCase(test.TestCase): def test_add_iteration_and_render(self): chart = charts.AtomicHistogramChart( {"iterations_count": 3, "atomic": collections.OrderedDict( [("foo", {"min_duration": 1.6, "max_duration": 2.8}), ("bar", {"min_duration": 3.1, "max_duration": 5.5})])}) self.assertIsInstance(chart, charts.HistogramChart) [chart.add_iteration({"atomic_actions": a}) for a in ({"foo": 1.6, "bar": 3.1}, {"foo": 2.8}, {"bar": 5.5})] expected = { "data": [ [{"disabled": 0, "key": "foo", "view": "Square Root Choice", "values": [{"x": 2.2, "y": 2}, {"x": 2.8, "y": 1}]}, {"disabled": 1, "key": "bar", "view": "Square Root Choice", "values": [{"x": 4.3, "y": 2}, {"x": 5.5, "y": 1}]}], [{"disabled": 0, "key": "foo", "view": "Sturges Formula", "values": [{"x": 2.0, "y": 2}, {"x": 2.4, "y": 0}, {"x": 2.8, "y": 1}]}, {"disabled": 1, "key": "bar", "view": "Sturges Formula", "values": [{"x": 3.9, "y": 2}, {"x": 4.7, "y": 0}, {"x": 5.5, "y": 1}]}], [{"disabled": 0, "key": "foo", "view": "Rice Rule", "values": [{"x": 2.0, "y": 2}, {"x": 2.4, "y": 0}, {"x": 2.8, "y": 1}]}, {"disabled": 1, "key": "bar", "view": "Rice Rule", "values": [{"x": 3.9, "y": 2}, {"x": 4.7, "y": 0}, {"x": 5.5, "y": 1}]}]], "views": [{"id": 0, "name": "Square Root Choice"}, {"id": 1, "name": "Sturges Formula"}, {"id": 2, "name": "Rice Rule"}]} self.assertEqual(expected, chart.render()) class TableTestCase(test.TestCase): class Table(charts.Table): columns = ["Name", "Min", "Max", "Max rounded by 2"] def __init__(self, *args, **kwargs): super(TableTestCase.Table, self).__init__(*args, **kwargs) for name in "foo", "bar": self._data[name] = [ [charts.streaming.MinComputation(), None], [charts.streaming.MaxComputation(), None], [charts.streaming.MaxComputation(), lambda st, has_result: round(st.result(), 2) if has_result else "n/a"]] def _map_iteration_values(self, iteration): return iteration def add_iteration(self, iteration): for name, value in self._map_iteration_values(iteration).items(): for i, dummy in enumerate(self._data[name]): self._data[name][i][0].add(value) def test___init__(self): self.assertRaises(TypeError, charts.Table, {"iterations_count": 42}) def test__round(self): table = self.Table({"iterations_count": 4}) streaming_ins = mock.Mock() streaming_ins.result.return_value = 42.424242 self.assertRaises(TypeError, table._round, streaming_ins) self.assertEqual("n/a", table._round(streaming_ins, False)) self.assertEqual(round(42.424242, 3), table._round(streaming_ins, True)) def test__row_has_results(self): table = self.Table({"iterations_count": 1}) for st_cls in (charts.streaming.MinComputation, charts.streaming.MaxComputation, charts.streaming.MeanComputation): st = st_cls() self.assertFalse(table._row_has_results([(st, None)])) st.add(0) self.assertTrue(table._row_has_results([(st, None)])) def test__row_has_results_and_get_rows(self): table = self.Table({"iterations_count": 3}) self.assertFalse(table._row_has_results(table._data["foo"])) self.assertFalse(table._row_has_results(table._data["bar"])) self.assertEqual( [["foo", "n/a", "n/a", "n/a"], ["bar", "n/a", "n/a", "n/a"]], table.get_rows()) for i in range(3): table.add_iteration({"foo": i + 1.2, "bar": i + 3.456}) self.assertTrue(table._row_has_results(table._data["foo"])) self.assertTrue(table._row_has_results(table._data["bar"])) self.assertEqual( [["foo", 1.2, 3.2, 3.2], ["bar", 3.456, 5.456, 5.46]], table.get_rows()) def test_render(self): table = self.Table({"iterations_count": 42}) table.get_rows = lambda: "rows data" self.assertEqual({"cols": ["Name", "Min", "Max", "Max rounded by 2"], "rows": "rows data"}, table.render()) def generate_iteration(duration, error, *actions): return { "atomic_actions": collections.OrderedDict(actions), "duration": duration, "error": error } @ddt.ddt class MainStatsTableTestCase(test.TestCase): @ddt.data( { "info": { "iterations_count": 1, "atomic": collections.OrderedDict([("foo", {}), ("bar", {})]) }, "data": [ generate_iteration(10.0, False, ("foo", 1.0), ("bar", 2.0)) ], "expected_rows": [ ["foo", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, "100.0%", 1], ["bar", 2.0, 2.0, 2.0, 2.0, 2.0, 2.0, "100.0%", 1], ["total", 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, "100.0%", 1]] }, { "info": {"iterations_count": 2, "atomic": {"foo": {}}}, "data": [ generate_iteration(10.0, True, ("foo", 1.0)), generate_iteration(10.0, True, ("foo", 2.0)) ], "expected_rows": [ ["foo", "n/a", "n/a", "n/a", "n/a", "n/a", "n/a", "n/a", 2], ["total", "n/a", "n/a", "n/a", "n/a", "n/a", "n/a", "n/a", 2]] }, { "info": {"iterations_count": 2, "atomic": {"foo": {}}}, "data": [ generate_iteration(10.0, False, ("foo", 1.0)), generate_iteration(20.0, True, ("foo", 2.0)) ], "expected_rows": [ ["foo", 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, "50.0%", 2], ["total", 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, "50.0%", 2]] }, { "info": { "iterations_count": 4, "atomic": collections.OrderedDict([("foo", {}), ("bar", {})]) }, "data": [ generate_iteration(10.0, False, ("foo", 1.0), ("bar", 4.0)), generate_iteration(20.0, False, ("foo", 2.0), ("bar", 4.0)), generate_iteration(30.0, False, ("foo", 3.0), ("bar", 4.0)), generate_iteration(40.0, True, ("foo", 4.0), ("bar", 4.0)) ], "expected_rows": [ ["foo", 1.0, 2.0, 2.8, 2.9, 3.0, 2.0, "75.0%", 4], ["bar", 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, "75.0%", 4], ["total", 10.0, 20.0, 28.0, 29.0, 30.0, 20.0, "75.0%", 4]] }, { "info": { "iterations_count": 0, "atomic": collections.OrderedDict() }, "data": [], "expected_rows": [ ["total", "n/a", "n/a", "n/a", "n/a", "n/a", "n/a", "n/a", 0]] }, { "info": {"iterations_count": 4, "atomic": collections.OrderedDict([("foo", {}), ("bar", {})])}, "data": [ generate_iteration(1.6, True, ("foo", 1.2)), generate_iteration(5.2, False, ("foo", 1.2)), generate_iteration(5.0, True, ("bar", 4.8)), generate_iteration(12.3, False, ("foo", 4.2), ("bar", 5.6)) ], "expected_rows": [ ["foo", 1.2, 2.7, 3.9, 4.05, 4.2, 2.7, "66.7%", 3], ["bar", 5.6, 5.6, 5.6, 5.6, 5.6, 5.6, "50.0%", 2], ["total", 5.2, 8.75, 11.59, 11.945, 12.3, 8.75, "50.0%", 4]] } ) @ddt.unpack def test_add_iteration_and_render(self, info, data, expected_rows): table = charts.MainStatsTable(info) for el in data: table.add_iteration(el) expected = {"cols": ["Action", "Min (sec)", "Median (sec)", "90%ile (sec)", "95%ile (sec)", "Max (sec)", "Avg (sec)", "Success", "Count"], "rows": expected_rows} self.assertEqual(expected, table.render()) class OutputChartTestCase(test.TestCase): class OutputChart(charts.OutputChart): widget = "FooWidget" def test___init__(self): self.assertRaises(TypeError, charts.OutputChart, {"iterations_count": 42}) chart = self.OutputChart({"iterations_count": 42}) self.assertIsInstance(chart, charts.Chart) def test__map_iteration_values(self): chart = self.OutputChart({"iterations_count": 42}) self.assertEqual("foo data", chart._map_iteration_values("foo data")) def test_render(self): chart = self.OutputChart({"iterations_count": 42}) self.assertEqual( {"widget": "FooWidget", "data": [], "title": "", "description": "", "label": "", "axis_label": ""}, chart.render()) chart = self.OutputChart({"iterations_count": 42}, title="foo title", description="Test!", label="Foo label", axis_label="Axis label") self.assertEqual( {"widget": "FooWidget", "data": [], "label": "Foo label", "axis_label": "Axis label", "title": "foo title", "description": "Test!"}, chart.render()) class OutputStackedAreaChartTestCase(test.TestCase): def test___init__(self): self.assertEqual("StackedArea", charts.OutputStackedAreaChart.widget) chart = charts.OutputStackedAreaChart({"iterations_count": 42}) self.assertIsInstance(chart, charts.OutputChart) def test_render(self): # Explicit label chart = charts.OutputStackedAreaChart( {"iterations_count": 2}, label="Label", axis_label="Axis label") chart.add_iteration((("foo", 10), ("bar", 20))) # One iteration is transformed to Table self.assertEqual({"axis_label": "Axis label", "data": {"cols": ["Name", "Label"], "rows": [["foo", 10], ["bar", 20]]}, "description": "", "label": "Label", "title": "", "widget": "Table"}, chart.render()) chart.add_iteration((("foo", 11), ("bar", 21))) # StackedArea for more iterations self.assertEqual({"axis_label": "Axis label", "data": [("foo", [[1, 10], [2, 11]]), ("bar", [[1, 20], [2, 21]])], "description": "", "label": "Label", "title": "", "widget": "StackedArea"}, chart.render()) # No label chart = charts.OutputStackedAreaChart({"iterations_count": 1}) chart.add_iteration((("foo", 10), ("bar", 20))) self.assertEqual({"axis_label": "", "data": {"cols": ["Name", "Value"], "rows": [["foo", 10], ["bar", 20]]}, "description": "", "label": "", "title": "", "widget": "Table"}, chart.render()) class OutputAvgChartTestCase(test.TestCase): def test___init__(self): self.assertEqual("Pie", charts.OutputAvgChart.widget) chart = charts.OutputAvgChart({"iterations_count": 42}) self.assertIsInstance(chart, charts.OutputChart) self.assertIsInstance(chart, charts.AvgChart) class OutputTableTestCase(test.TestCase): class OutputTable(charts.OutputTable): columns = [] def add_iteration(self, iteration): pass def test___init__(self): self.assertEqual("Table", charts.OutputTable.widget) self.assertRaises(TypeError, charts.OutputTable, {"iterations_count": 42}) self.OutputTable({"iterations_count": 42}) @ddt.ddt class OutputStatsTableTestCase(test.TestCase): def test___init__(self): self.assertEqual("Table", charts.OutputStatsTable.widget) self.assertEqual( ["Action", "Min (sec)", "Median (sec)", "90%ile (sec)", "95%ile (sec)", "Max (sec)", "Avg (sec)", "Count"], charts.OutputStatsTable.columns) table = charts.OutputStatsTable({"iterations_count": 42}) self.assertIsInstance(table, charts.Table) @ddt.data( {"title": "Foo title", "description": "", "iterations": [], "expected": []}, {"title": "Foo title", "description": "Test description!", "iterations": [[("a", 11), ("b", 22)], [("a", 5.6), ("b", 7.8)], [("a", 42), ("b", 24)]], "expected": [["a", 5.6, 11.0, 35.8, 38.9, 42.0, 10.267, 3], ["b", 7.8, 22.0, 23.6, 23.8, 24.0, 9.467, 3]]}) @ddt.unpack def test_add_iteration_and_render(self, title, description, iterations, expected): table = charts.OutputStatsTable({"iterations_count": len(iterations)}, title=title, description=description) for iteration in iterations: table.add_iteration(iteration) self.assertEqual({"title": title, "description": description, "widget": "Table", "data": {"cols": charts.OutputStatsTable.columns, "rows": expected}, "label": "", "axis_label": ""}, table.render()) class OutputTextAreaTestCase(test.TestCase): def test_class(self): self.assertTrue(issubclass(charts.OutputTextArea, charts.OutputChart)) self.assertEqual("TextArea", charts.OutputTextArea.widget) @ddt.ddt class ModuleTestCase(test.TestCase): @ddt.data({"args": ["unexpected_foo", {}], "expected": ("unexpected output type: 'unexpected_foo', " "should be in ('additive', 'complete')")}, {"args": ["additive", 42], "expected": ("additive output item has wrong type 'int', " "must be 'dict'")}, {"args": ["additive", {}], "expected": "additive output missing key 'title'"}, {"args": ["additive", {"title": "foo"}], "expected": "additive output missing key 'chart_plugin'"}, {"args": ["additive", {"title": "a", "chart_plugin": "b"}], "expected": "additive output missing key 'data'"}, {"args": ["additive", {"title": "a", "chart_plugin": "b", "data": "c"}], "expected": ("Value of additive output data has wrong type " "'str', should be in ('list', 'dict')")}, {"args": ["additive", {"title": "a", "chart_plugin": "b", "data": []}]}, {"args": ["additive", {"title": "a", "chart_plugin": "b", "data": [], "unexpected_foo": 42}], "expected": ("additive output has unexpected key " "'unexpected_foo'")}, {"args": ["complete", {}], "expected": "complete output missing key 'title'"}, {"args": ["complete", {"title": "foo"}], "expected": "complete output missing key 'chart_plugin'"}, {"args": ["complete", {"title": "a", "chart_plugin": "b"}], "expected": "complete output missing key 'data'"}, {"args": ["complete", {"title": "a", "chart_plugin": "b", "data": "c"}], "expected": ("Value of complete output data has wrong type " "'str', should be in ('list', 'dict')")}, {"args": ["complete", {"title": "a", "chart_plugin": "b", "data": {"foo": "bar"}}]}, {"args": ["complete", {"title": "a", "chart_plugin": "b", "data": [], "unexpected": "bar"}], "expected": ("complete output has unexpected key " "'unexpected'")}) @ddt.unpack def test_validate_output(self, args, expected=None): self.assertEqual(expected, charts.validate_output(*args)) rally-0.9.1/tests/unit/task/test_functional.py0000664000567000056710000002241113073417717022655 0ustar jenkinsjenkins00000000000000# Copyright 2015: Red Hat, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys import testtools from rally import exceptions from rally.task import functional from tests.unit import test class FunctionalMixinTestCase(test.TestCase): def test_asserts(self): class A(functional.FunctionalMixin): def __init__(self): super(A, self).__init__() a = A() a.assertEqual(1, 1) self.assertRaises(exceptions.RallyAssertionError, a.assertEqual, "a", "b") a.assertNotEqual(1, 2) self.assertRaises(exceptions.RallyAssertionError, a.assertNotEqual, "a", "a") a.assertTrue(True) self.assertRaises(exceptions.RallyAssertionError, a.assertTrue, False) a.assertFalse(False) self.assertRaises(exceptions.RallyAssertionError, a.assertFalse, True) a.assertIs("a", "a") self.assertRaises(exceptions.RallyAssertionError, a.assertIs, "a", "b") a.assertIsNot("a", "b") self.assertRaises(exceptions.RallyAssertionError, a.assertIsNot, "a", "a") a.assertIsNone(None) self.assertRaises(exceptions.RallyAssertionError, a.assertIsNone, "a") a.assertIsNotNone("a") self.assertRaises(exceptions.RallyAssertionError, a.assertIsNotNone, None) a.assertIn("1", ["1", "2", "3"]) self.assertRaises(exceptions.RallyAssertionError, a.assertIn, "4", ["1", "2", "3"]) a.assertNotIn("4", ["1", "2", "3"]) self.assertRaises(exceptions.RallyAssertionError, a.assertNotIn, "1", ["1", "2", "3"]) a.assertIsSubset(["1", "2"], ["1", "2", "3"]) self.assertRaises(exceptions.RallyAssertionError, a.assertIsSubset, ["1", "4"], ["1", "2", "3"]) a.assertIsNotSubset(["1", "4"], ["1", "2", "3"]) self.assertRaises(exceptions.RallyAssertionError, a.assertIsNotSubset, ["1", "2"], ["1", "2", "3"]) a.assertIsInstance("a", str) self.assertRaises(exceptions.RallyAssertionError, a.assertIsInstance, "a", int) a.assertIsNotInstance("a", int) self.assertRaises(exceptions.RallyAssertionError, a.assertIsNotInstance, "a", str) a.assertLessEqual(len(["1", "2"]), len(["3", "4", "5"])) a.assertLessEqual(len(["1", "2", "3"]), len(["3", "4", "5"])) self.assertRaises(exceptions.RallyAssertionError, a.assertLessEqual, len(["1", "2", "3"]), len(["3", "4"])) a.assertLess(len(["1", "2"]), len(["3", "4", "5"])) self.assertRaises(exceptions.RallyAssertionError, a.assertLess, len(["1", "2", "3"]), len(["3", "4", "5"])) self.assertRaises(exceptions.RallyAssertionError, a.assertLess, len(["1", "2", "3"]), len(["3", "4"])) a.assertGreaterEqual(len(["1", "2", "9", "8"]), len(["3", "4", "5"])) a.assertGreaterEqual(len(["1", "2", "3"]), len(["3", "4", "5"])) self.assertRaises(exceptions.RallyAssertionError, a.assertGreaterEqual, len(["1", "2"]), len(["3", "4", "5"])) a.assertGreater(len(["1", "2", "9", "8"]), len(["3", "4", "5"])) self.assertRaises(exceptions.RallyAssertionError, a.assertGreater, len(["1", "2", "3"]), len(["3", "4", "5"])) self.assertRaises(exceptions.RallyAssertionError, a.assertGreater, len(["1", "2"]), len(["3", "4", "5"])) @testtools.skipIf(sys.version_info < (2, 7), "assertRaises as context not supported") def test_assert_with_custom_message(self): class A(functional.FunctionalMixin): def __init__(self): super(A, self).__init__() a = A() custom_message = "A custom message" assert_message = "Assertion error: .+\\. " + custom_message a.assertEqual(1, 1, "It's equal") message = self._catch_exception_message(a.assertEqual, "a", "b", custom_message) self.assertRegex(message, assert_message) a.assertNotEqual(1, 2) message = self._catch_exception_message(a.assertNotEqual, "a", "a", custom_message) self.assertRegex(message, assert_message) a.assertTrue(True) message = self._catch_exception_message(a.assertTrue, False, custom_message) self.assertRegex(message, assert_message) a.assertFalse(False) message = self._catch_exception_message(a.assertFalse, True, custom_message) self.assertRegex(message, assert_message) a.assertIs("a", "a") message = self._catch_exception_message(a.assertIs, "a", 1, custom_message) self.assertRegex(message, assert_message) a.assertIsNot("a", "b") message = self._catch_exception_message(a.assertIsNot, "a", "a", custom_message) self.assertRegex(message, assert_message) a.assertIsNone(None) message = self._catch_exception_message(a.assertIsNone, "a", custom_message) self.assertRegex(message, assert_message) a.assertIsNotNone("a") message = self._catch_exception_message(a.assertIsNotNone, None, custom_message) self.assertRegex(message, assert_message) a.assertIn("1", ["1", "2", "3"]) message = self._catch_exception_message(a.assertIn, "1", ["2", "3", "4"], custom_message) self.assertRegex(message, assert_message) a.assertNotIn("4", ["1", "2", "3"]) message = self._catch_exception_message(a.assertNotIn, "1", ["1", "2", "3"], custom_message) self.assertRegex(message, assert_message) a.assertIsInstance("a", str) message = self._catch_exception_message(a.assertIsInstance, "a", int, custom_message) self.assertRegex(message, assert_message) a.assertIsNotInstance("a", int) message = self._catch_exception_message(a.assertIsNotInstance, "a", str, custom_message) self.assertRegex(message, assert_message) a.assertLessEqual(len(["1", "2"]), len(["3", "4", "5"])) a.assertLessEqual(len(["1", "2", "3"]), len(["3", "4", "5"])) self.assertRaises(exceptions.RallyAssertionError, a.assertLessEqual, len(["1", "2", "3"]), len(["3", "4"]), custom_message) a.assertLess(len(["1", "2"]), len(["3", "4", "5"])) self.assertRaises(exceptions.RallyAssertionError, a.assertLess, len(["1", "2", "3"]), len(["3", "4", "5"]), custom_message) self.assertRaises(exceptions.RallyAssertionError, a.assertLess, len(["1", "2", "3"]), len(["3", "4"]), custom_message) a.assertGreaterEqual(len(["1", "2", "9", "8"]), len(["3", "4", "5"])) a.assertGreaterEqual(len(["1", "2", "3"]), len(["3", "4", "5"])) self.assertRaises(exceptions.RallyAssertionError, a.assertGreaterEqual, len(["1", "2"]), len(["3", "4", "5"]), custom_message) a.assertGreater(len(["1", "2", "9", "8"]), len(["3", "4", "5"])) self.assertRaises(exceptions.RallyAssertionError, a.assertGreater, len(["1", "2", "3"]), len(["3", "4", "5"]), custom_message) self.assertRaises(exceptions.RallyAssertionError, a.assertGreater, len(["1", "2"]), len(["3", "4", "5"]), custom_message) def _catch_exception_message(self, func, *args): try: func(*args) except Exception as e: return str(e) rally-0.9.1/tests/unit/task/test_hook.py0000664000567000056710000002577213073417720021462 0ustar jenkinsjenkins00000000000000# Copyright 2016: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests for HookExecutor and Hook classes.""" import jsonschema import mock from rally import consts from rally.task import hook from tests.unit import fakes from tests.unit import test @hook.configure(name="dummy_hook") class DummyHook(hook.Hook): CONFIG_SCHEMA = { "type": "object", "properties": { "status": {"type": "string"}, "error": {"type": "object"}, "output": {"type": "object"}, }, "required": ["status"], "additionalProperties": False, } def run(self): self.set_status(self.config["status"]) error = self.config.get("error") if error: self.set_error(*error) output = self.config.get("output") if output: self.add_output(**output) class HookExecutorTestCase(test.TestCase): def setUp(self): super(HookExecutorTestCase, self).setUp() self.conf = { "hooks": [ { "name": "dummy_hook", "description": "dummy_action", "args": { "status": consts.HookStatus.SUCCESS, }, "trigger": { "name": "event", "args": { "unit": "iteration", "at": [1], } } } ] } self.task = mock.MagicMock() @mock.patch("rally.task.hook.HookExecutor._timer_method") @mock.patch("rally.common.utils.Timer", side_effect=fakes.FakeTimer) def test_results(self, mock_timer, mock__timer_method): hook_executor = hook.HookExecutor(self.conf, self.task) hook_executor.on_event(event_type="iteration", value=1) self.assertEqual( [{"config": self.conf["hooks"][0], "results": [{ "triggered_by": {"event_type": "iteration", "value": 1}, "started_at": fakes.FakeTimer().timestamp(), "finished_at": fakes.FakeTimer().finish_timestamp(), "status": consts.HookStatus.SUCCESS}], "summary": {consts.HookStatus.SUCCESS: 1}}], hook_executor.results()) @mock.patch("rally.task.hook.HookExecutor._timer_method") @mock.patch("rally.common.utils.Timer", side_effect=fakes.FakeTimer) def test_result_optional(self, mock_timer, mock__timer_method): hook_args = self.conf["hooks"][0]["args"] hook_args["error"] = ["Exception", "Description", "Traceback"] hook_args["output"] = {"additive": None, "complete": None} hook_executor = hook.HookExecutor(self.conf, self.task) hook_executor.on_event(event_type="iteration", value=1) self.assertEqual( [{"config": self.conf["hooks"][0], "results": [{ "triggered_by": {"event_type": "iteration", "value": 1}, "started_at": fakes.FakeTimer().timestamp(), "finished_at": fakes.FakeTimer().finish_timestamp(), "error": {"details": "Traceback", "etype": "Exception", "msg": "Description"}, "output": {"additive": [], "complete": []}, "status": consts.HookStatus.FAILED}], "summary": {consts.HookStatus.FAILED: 1}}], hook_executor.results()) def test_empty_result(self): hook_executor = hook.HookExecutor(self.conf, self.task) self.assertEqual([{"config": self.conf["hooks"][0], "results": [], "summary": {}}], hook_executor.results()) @mock.patch("rally.task.hook.HookExecutor._timer_method") @mock.patch.object(DummyHook, "run", side_effect=Exception("My err msg")) @mock.patch("rally.common.utils.Timer", side_effect=fakes.FakeTimer) def test_failed_result(self, mock_timer, mock_dummy_hook_run, mock__timer_method): hook_executor = hook.HookExecutor(self.conf, self.task) hook_executor.on_event(event_type="iteration", value=1) self.assertEqual( [{"config": self.conf["hooks"][0], "results": [{ "triggered_by": {"event_type": "iteration", "value": 1}, "error": {"etype": "Exception", "msg": mock.ANY, "details": mock.ANY}, "started_at": fakes.FakeTimer().timestamp(), "finished_at": fakes.FakeTimer().finish_timestamp(), "status": consts.HookStatus.FAILED}], "summary": {consts.HookStatus.FAILED: 1}}], hook_executor.results()) @mock.patch("rally.common.utils.Timer", side_effect=fakes.FakeTimer) def test_time_event(self, mock_timer): trigger_args = self.conf["hooks"][0]["trigger"]["args"] trigger_args["unit"] = "time" hook_executor = hook.HookExecutor(self.conf, self.task) hook_executor.on_event(event_type="time", value=1) self.assertEqual( [{"config": self.conf["hooks"][0], "results": [{ "triggered_by": {"event_type": "time", "value": 1}, "started_at": fakes.FakeTimer().timestamp(), "finished_at": fakes.FakeTimer().finish_timestamp(), "status": consts.HookStatus.SUCCESS}], "summary": {consts.HookStatus.SUCCESS: 1}}], hook_executor.results()) @mock.patch("rally.common.utils.Timer", side_effect=fakes.FakeTimer) def test_time_periodic(self, mock_timer): self.conf["hooks"][0]["trigger"] = { "name": "periodic", "args": {"unit": "time", "step": 2}} hook_executor = hook.HookExecutor(self.conf, self.task) for i in range(1, 7): hook_executor.on_event(event_type="time", value=i) self.assertEqual( [{ "config": self.conf["hooks"][0], "results":[ { "triggered_by": {"event_type": "time", "value": 2}, "started_at": fakes.FakeTimer().timestamp(), "finished_at": fakes.FakeTimer().finish_timestamp(), "status": consts.HookStatus.SUCCESS }, { "triggered_by": {"event_type": "time", "value": 4}, "started_at": fakes.FakeTimer().timestamp(), "finished_at": fakes.FakeTimer().finish_timestamp(), "status": consts.HookStatus.SUCCESS }, { "triggered_by": {"event_type": "time", "value": 6}, "started_at": fakes.FakeTimer().timestamp(), "finished_at": fakes.FakeTimer().finish_timestamp(), "status": consts.HookStatus.SUCCESS } ], "summary": {consts.HookStatus.SUCCESS: 3} }], hook_executor.results()) @mock.patch("rally.common.utils.Stopwatch", autospec=True) @mock.patch("rally.common.utils.Timer", side_effect=fakes.FakeTimer) def test_timer_thread(self, mock_timer, mock_stopwatch): trigger_args = self.conf["hooks"][0]["trigger"]["args"] trigger_args["unit"] = "time" hook_executor = hook.HookExecutor(self.conf, self.task) def stop_timer(sec): if sec == 3: hook_executor._timer_stop_event.set() stopwatch_inst = mock_stopwatch.return_value stopwatch_inst.sleep.side_effect = stop_timer hook_executor.on_event(event_type="iteration", value=1) self.assertTrue(hook_executor._timer_stop_event.wait(1)) self.assertEqual( [{"config": self.conf["hooks"][0], "results": [{ "triggered_by": {"event_type": "time", "value": 1}, "started_at": fakes.FakeTimer().timestamp(), "finished_at": fakes.FakeTimer().finish_timestamp(), "status": consts.HookStatus.SUCCESS}], "summary": {consts.HookStatus.SUCCESS: 1} }], hook_executor.results()) stopwatch_inst.start.assert_called_once_with() stopwatch_inst.sleep.assert_has_calls([ mock.call(1), mock.call(2), mock.call(3), ]) class HookTestCase(test.TestCase): def test_validate(self): hook.Hook.validate( { "name": "dummy_hook", "description": "dummy_action", "args": { "status": consts.HookStatus.SUCCESS, }, "trigger": { "name": "event", "args": { "unit": "iteration", "at": [1], } } } ) def test_validate_error(self): conf = { "name": "dummy_hook", "description": "dummy_action", "args": 3, "trigger": { "name": "event", "args": { "unit": "iteration", "at": [1], } } } self.assertRaises(jsonschema.ValidationError, hook.Hook.validate, conf) @mock.patch("rally.common.utils.Timer", side_effect=fakes.FakeTimer) def test_result(self, mock_timer): task = mock.MagicMock() triggered_by = {"event_type": "iteration", "value": 1} dummy_hook = DummyHook(task, {"status": consts.HookStatus.SUCCESS}, triggered_by) dummy_hook.run_sync() self.assertEqual( {"started_at": fakes.FakeTimer().timestamp(), "finished_at": fakes.FakeTimer().finish_timestamp(), "triggered_by": triggered_by, "status": consts.HookStatus.SUCCESS}, dummy_hook.result()) def test_result_not_started(self): task = mock.MagicMock() triggered_by = {"event_type": "iteration", "value": 1} dummy_hook = DummyHook(task, {"status": consts.HookStatus.SUCCESS}, triggered_by) self.assertEqual( {"started_at": 0.0, "finished_at": 0.0, "triggered_by": triggered_by, "status": consts.HookStatus.SUCCESS}, dummy_hook.result()) rally-0.9.1/tests/unit/task/test_scenario.py0000664000567000056710000003121213073417720022307 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import traceback import uuid import mock from rally import consts from rally import exceptions from rally.task import context from rally.task import scenario from rally.task import validation from tests.unit import fakes from tests.unit import test class ScenarioConfigureTestCase(test.TestCase): def test_configure(self): @scenario.configure("fooscenario.name", "testing") def some_func(): pass self.assertEqual("fooscenario.name", some_func.get_name()) self.assertEqual("testing", some_func.get_namespace()) self.assertFalse(some_func.is_classbased) some_func.unregister() def test_configure_default_name(self): @scenario.configure(namespace="testing", context={"any": 42}) def some_func(): pass self.assertIsNone(some_func._meta_get("name")) self.assertEqual("testing", some_func.get_namespace()) self.assertEqual({"any": 42}, some_func.get_default_context()) self.assertFalse(some_func.is_classbased) some_func.unregister() def test_configure_cls(self): class ScenarioPluginCls(scenario.Scenario): @scenario.configure(namespace="any", context={"any": 43}) def some(self): pass self.assertEqual("ScenarioPluginCls.some", ScenarioPluginCls.some.get_name()) self.assertEqual("any", ScenarioPluginCls.some.get_namespace()) self.assertEqual({"any": 43}, ScenarioPluginCls.some.get_default_context()) self.assertFalse(ScenarioPluginCls.some.is_classbased) ScenarioPluginCls.some.unregister() def test_configure_classbased(self): @scenario.configure(name="fooscenario.name", namespace="testing") class SomeScenario(scenario.Scenario): def run(self): pass self.assertEqual("fooscenario.name", SomeScenario.get_name()) self.assertTrue(SomeScenario.is_classbased) SomeScenario.unregister() class ScenarioTestCase(test.TestCase): def test__validate_helper(self): validators = [ mock.MagicMock(return_value=validation.ValidationResult(True)), mock.MagicMock(return_value=validation.ValidationResult(True)) ] clients = mock.MagicMock() config = {"a": 1, "b": 2} deployment = mock.MagicMock() scenario.Scenario._validate_helper(validators, clients, config, deployment) for validator in validators: validator.assert_called_with(config, clients=clients, deployment=deployment) @mock.patch("rally.task.scenario.LOG") def test__validate_helper_somethingwent_wrong(self, mock_log): validator = mock.MagicMock() validator.side_effect = Exception() self.assertRaises(exceptions.InvalidScenarioArgument, scenario.Scenario._validate_helper, [validator], "cl", "config", "deployment") validator.assert_called_once_with("config", clients="cl", deployment="deployment") self.assertTrue(mock_log.exception.called) def test__validate_helper__no_valid(self): validators = [ mock.MagicMock(return_value=validation.ValidationResult(True)), mock.MagicMock( return_value=validation.ValidationResult(is_valid=False) ) ] clients = mock.MagicMock() args = {"a": 1, "b": 2} self.assertRaises(exceptions.InvalidScenarioArgument, scenario.Scenario._validate_helper, validators, clients, args, "fake_uuid") def test_validate__no_validators(self): class Testing(fakes.FakeScenario): @scenario.configure() def validate__no_validators(self): pass scenario.Scenario.validate("Testing.validate__no_validators", {"a": 1, "b": 2}) Testing.validate__no_validators.unregister() @mock.patch("rally.task.scenario.Scenario._validate_helper") def test_validate__admin_validators(self, mock_scenario__validate_helper): class Testing(fakes.FakeScenario): @scenario.configure(namespace="testing") def validate_admin_validators(self): pass validators = [mock.MagicMock(), mock.MagicMock()] for validator in validators: validator.permission = consts.EndpointPermission.ADMIN Testing.validate_admin_validators._meta_set( "validators", validators) deployment = mock.MagicMock() args = {"a": 1, "b": 2} scenario.Scenario.validate("Testing.validate_admin_validators", args, admin="admin", deployment=deployment) mock_scenario__validate_helper.assert_called_once_with( validators, "admin", args, deployment) Testing.validate_admin_validators.unregister() @mock.patch("rally.task.scenario.Scenario._validate_helper") def test_validate_user_validators(self, mock_scenario__validate_helper): class Testing(fakes.FakeScenario): @scenario.configure() def validate_user_validators(self): pass validators = [mock.MagicMock(), mock.MagicMock()] for validator in validators: validator.permission = consts.EndpointPermission.USER Testing.validate_user_validators._meta_set("validators", validators) args = {"a": 1, "b": 2} scenario.Scenario.validate( "Testing.validate_user_validators", args, users=["u1", "u2"]) mock_scenario__validate_helper.assert_has_calls([ mock.call(validators, "u1", args, None), mock.call(validators, "u2", args, None) ]) Testing.validate_user_validators.unregister() def test__validate_scenario_args(self): class Testing(fakes.FakeScenario): @scenario.configure() def fake_scenario_to_validate_scenario_args(self, arg1, arg2, arg3=None): pass name = "Testing.fake_scenario_to_validate_scenario_args" scen = scenario.Scenario.get(name) # check case when argument is missed e = self.assertRaises(exceptions.InvalidArgumentsException, scenario.Scenario._validate_scenario_args, scen, name, {"args": {"arg1": 3}}) self.assertIn("Argument(s) 'arg2' should be specified in task config.", e.format_message()) # check case when one argument is redundant e = self.assertRaises(exceptions.InvalidArgumentsException, scenario.Scenario._validate_scenario_args, scen, name, {"args": {"arg1": 1, "arg2": 2, "arg4": 4}}) self.assertIn("Unexpected argument(s) found ['arg4']", e.format_message()) def test__validate_scenario_args_with_class_based_scenario(self): name = "%s.need_dot" % uuid.uuid4() @scenario.configure(name=name) class Testing(scenario.Scenario): def run(self, arg): pass e = self.assertRaises(exceptions.InvalidArgumentsException, scenario.Scenario._validate_scenario_args, Testing, name, {}) self.assertIn("Argument(s) 'arg' should be specified in task config.", e.format_message()) def test_sleep_between_invalid_args(self): self.assertRaises(exceptions.InvalidArgumentsException, scenario.Scenario().sleep_between, 15, 5) self.assertRaises(exceptions.InvalidArgumentsException, scenario.Scenario().sleep_between, -1, 0) self.assertRaises(exceptions.InvalidArgumentsException, scenario.Scenario().sleep_between, 0, -2) def test_sleep_between(self): scenario_inst = scenario.Scenario() scenario_inst.sleep_between(0.001, 0.002) self.assertGreaterEqual(scenario_inst.idle_duration(), 0.001) self.assertLessEqual(scenario_inst.idle_duration(), 0.002) def test_sleep_beetween_multi(self): scenario_inst = scenario.Scenario() scenario_inst.sleep_between(0.001, 0.001) scenario_inst.sleep_between(0.004, 0.004) self.assertEqual(scenario_inst.idle_duration(), 0.005) @mock.patch("rally.common.utils.interruptable_sleep") @mock.patch("rally.task.scenario.random.uniform") def test_sleep_between_internal(self, mock_uniform, mock_interruptable_sleep): scenario_inst = scenario.Scenario() mock_uniform.return_value = 1.5 scenario_inst.sleep_between(1, 2) mock_interruptable_sleep.assert_called_once_with( mock_uniform.return_value, 0.1) self.assertEqual(scenario_inst.idle_duration(), mock_uniform.return_value) def test_scenario_context_are_valid(self): for s in scenario.Scenario.get_all(): try: context.ContextManager.validate(s.get_default_context(), allow_hidden=True) except Exception: print(traceback.format_exc()) self.fail("Scenario `%s` has wrong context" % scenario) def test_add_output(self): scenario_inst = scenario.Scenario() self.assertEqual({"additive": [], "complete": []}, scenario_inst._output) additive1 = {"title": "Additive 1", "chart_plugin": "Plugin1", "description": "Foo description", "data": [["foo", 1], ["bar", 2]]} additive2 = {"title": "Additive 2", "chart_plugin": "Plugin2", "description": "Bar description", "data": [["foo", 42], ["bar", 24]]} complete1 = {"title": "Complete 1", "chart_plugin": "Plugin3", "description": "Complete description", "data": [["ab", 1], ["cd", 2]]} complete2 = {"title": "Complete 2", "chart_plugin": "Plugin4", "description": "Another complete description", "data": [["vx", 1], ["yz", 2]]} scenario_inst.add_output(additive=additive1) self.assertEqual({"additive": [additive1], "complete": []}, scenario_inst._output) scenario_inst.add_output(complete=complete1) self.assertEqual({"additive": [additive1], "complete": [complete1]}, scenario_inst._output) scenario_inst.add_output(additive=additive2, complete=complete2) self.assertEqual({"additive": [additive1, additive2], "complete": [complete1, complete2]}, scenario_inst._output) def test_add_output_raises(self): additive = {"title": "Foo title", "chart_plugin": "Plugin1", "description": "Foo description", "data": [["ab", 1], ["cd", 2]]} complete = {"title": "Bar title", "chart_plugin": "Plugin2", "description": "Bar description", "data": [["ef", 1], ["jh", 2]]} scenario_inst = scenario.Scenario() scenario_inst.add_output(additive=additive, complete=complete) for key in "title", "chart_plugin", "data": broken_additive = additive.copy() del broken_additive[key] self.assertRaises(exceptions.RallyException, scenario_inst.add_output, additive=broken_additive) broken_complete = complete.copy() del broken_complete[key] self.assertRaises(exceptions.RallyException, scenario_inst.add_output, complete=broken_complete) rally-0.9.1/tests/unit/task/test_runner.py0000664000567000056710000003622313073417720022024 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import multiprocessing import ddt import mock from rally.plugins.common.runners import serial from rally.task import runner from rally.task import scenario from tests.unit import fakes from tests.unit import test BASE = "rally.task.runner." class ScenarioRunnerHelpersTestCase(test.TestCase): @mock.patch(BASE + "utils.format_exc") def test_format_result_on_timeout(self, mock_format_exc): mock_exc = mock.MagicMock() expected = { "duration": 100, "idle_duration": 0, "output": {"additive": [], "complete": []}, "atomic_actions": {}, "error": mock_format_exc.return_value } self.assertEqual(runner.format_result_on_timeout(mock_exc, 100), expected) mock_format_exc.assert_called_once_with(mock_exc) def test_get_scenario_context(self): context_obj = {"foo": "bar"} result = runner._get_scenario_context(13, context_obj) self.assertEqual(result, {"foo": "bar", "iteration": 14}) def test_run_scenario_once_internal_logic(self): context = runner._get_scenario_context( 12, fakes.FakeContext({}).context) scenario_cls = mock.MagicMock() event_queue = mock.MagicMock() runner._run_scenario_once( scenario_cls, "test", context, {}, event_queue) expected_calls = [ mock.call(context), mock.call().test(), mock.call().idle_duration(), mock.call().idle_duration(), mock.call().atomic_actions() ] scenario_cls.assert_has_calls(expected_calls, any_order=True) event_queue.put.assert_called_once_with( {"type": "iteration", "value": 13}) @mock.patch(BASE + "rutils.Timer", side_effect=fakes.FakeTimer) def test_run_scenario_once_without_scenario_output(self, mock_timer): result = runner._run_scenario_once( fakes.FakeScenario, "do_it", mock.MagicMock(), {}, mock.MagicMock()) expected_result = { "duration": fakes.FakeTimer().duration(), "timestamp": fakes.FakeTimer().timestamp(), "idle_duration": 0, "error": [], "output": {"additive": [], "complete": []}, "atomic_actions": {} } self.assertEqual(expected_result, result) @mock.patch(BASE + "rutils.Timer", side_effect=fakes.FakeTimer) def test_run_scenario_once_with_added_scenario_output(self, mock_timer): result = runner._run_scenario_once( fakes.FakeScenario, "with_add_output", mock.MagicMock(), {}, mock.MagicMock()) expected_result = { "duration": fakes.FakeTimer().duration(), "timestamp": fakes.FakeTimer().timestamp(), "idle_duration": 0, "error": [], "output": {"additive": [{"chart_plugin": "FooPlugin", "description": "Additive description", "data": [["a", 1]], "title": "Additive"}], "complete": [{"data": [["a", [[1, 2], [2, 3]]]], "description": "Complete description", "title": "Complete", "chart_plugin": "BarPlugin"}]}, "atomic_actions": {} } self.assertEqual(expected_result, result) @mock.patch(BASE + "rutils.Timer", side_effect=fakes.FakeTimer) def test_run_scenario_once_exception(self, mock_timer): result = runner._run_scenario_once( fakes.FakeScenario, "something_went_wrong", mock.MagicMock(), {}, mock.MagicMock()) expected_error = result.pop("error") expected_result = { "duration": fakes.FakeTimer().duration(), "timestamp": fakes.FakeTimer().timestamp(), "idle_duration": 0, "output": {"additive": [], "complete": []}, "atomic_actions": {} } self.assertEqual(expected_result, result) self.assertEqual(expected_error[:2], ["Exception", "Something went wrong"]) @ddt.ddt class ScenarioRunnerTestCase(test.TestCase): @mock.patch(BASE + "rutils.Timer.duration", return_value=10) def test_run(self, mock_timer_duration): runner_obj = serial.SerialScenarioRunner( mock.MagicMock(), mock.MagicMock()) runner_obj._run_scenario = mock.MagicMock() scenario_name = "NovaServers.boot_server_from_volume_and_delete" config_kwargs = {"image": {"id": 1}, "flavor": {"id": 1}} context_obj = { "task": runner_obj.task, "scenario_name": scenario_name, "admin": {"credential": mock.MagicMock()}, "config": { "cleanup": ["nova", "cinder"], "some_ctx": 2, "users": {} } } result = runner_obj.run(scenario_name, context_obj, config_kwargs) self.assertIsNone(result) self.assertEqual(runner_obj.run_duration, mock_timer_duration.return_value) self.assertEqual(list(runner_obj.result_queue), []) plugin_cls, method_name = scenario.Scenario.get(scenario_name), "run" self.assertTrue(plugin_cls.is_classbased) expected_config_kwargs = {"image": 1, "flavor": 1} runner_obj._run_scenario.assert_called_once_with( plugin_cls, method_name, context_obj, expected_config_kwargs) @mock.patch(BASE + "rutils.Timer.duration", return_value=10) def test_run_classbased(self, mock_timer_duration): scenario_class = fakes.FakeClassBasedScenario runner_obj = serial.SerialScenarioRunner( mock.MagicMock(), mock.MagicMock()) runner_obj._run_scenario = mock.Mock() context_obj = {"task": runner_obj.task, "scenario_name": "classbased.fooscenario", "admin": {"credential": "foo_credentials"}, "config": {}} result = runner_obj.run("classbased.fooscenario", context_obj, {"foo": 11, "bar": "spam"}) self.assertIsNone(result) self.assertEqual(runner_obj.run_duration, mock_timer_duration.return_value) self.assertEqual([], list(runner_obj.result_queue)) runner_obj._run_scenario.assert_called_once_with( scenario_class, "run", context_obj, {"foo": 11, "bar": "spam"}) def test_abort(self): runner_obj = serial.SerialScenarioRunner( mock.MagicMock(), mock.MagicMock()) self.assertFalse(runner_obj.aborted.is_set()) runner_obj.abort() self.assertTrue(runner_obj.aborted.is_set()) def test__create_process_pool(self): runner_obj = serial.SerialScenarioRunner( mock.MagicMock(), mock.MagicMock()) processes_to_start = 10 def worker_process(i): pass counter = ((i,) for i in range(100)) process_pool = runner_obj._create_process_pool( processes_to_start, worker_process, counter) self.assertEqual(processes_to_start, len(process_pool)) for process in process_pool: self.assertIsInstance(process, multiprocessing.Process) @mock.patch(BASE + "ScenarioRunner._send_result") def test__join_processes(self, mock_scenario_runner__send_result): process = mock.MagicMock(is_alive=mock.MagicMock(return_value=False)) processes = 10 process_pool = collections.deque([process] * processes) mock_result_queue = mock.MagicMock( empty=mock.MagicMock(return_value=True)) mock_event_queue = mock.MagicMock( empty=mock.MagicMock(return_value=True)) runner_obj = serial.SerialScenarioRunner( mock.MagicMock(), mock.MagicMock()) runner_obj._join_processes( process_pool, mock_result_queue, mock_event_queue) self.assertEqual(processes, process.join.call_count) mock_result_queue.close.assert_called_once_with() def _get_runner(self, task="mock_me", config="mock_me", batch_size=0): class ScenarioRunner(runner.ScenarioRunner): def _run_scenario(self, *args, **kwargs): raise NotImplementedError("Do not run me!") task = task if task != "mock_me" else mock.Mock() config = config if config != "mock_me" else mock.Mock() scenario_runner = ScenarioRunner(task, config, batch_size) scenario_runner._meta_init() scenario_runner._meta_set("name", "FakePlugin_%s" % id(ScenarioRunner)) return scenario_runner @ddt.data( {"data": {"duration": 1.0, "timestamp": 1.0, "idle_duration": 1.0, "output": {"additive": [], "complete": []}, "error": ["err1", "err2"], "atomic_actions": {}}, "expected": True}, {"data": {"duration": 1.0, "timestamp": 1.0, "idle_duration": 1.0, "error": [], "output": {"additive": [], "complete": []}, "atomic_actions": {"foo": 4.2}}, "expected": True}, {"data": {"duration": 1.0, "timestamp": 1.0, "idle_duration": 1.0, "error": [], "output": {"additive": ["a1", "a2"], "complete": ["c1", "c2"]}, "atomic_actions": {"foo": 4.2}}, "validate_output_calls": [("additive", "a1"), ("additive", "a2"), ("complete", "c1"), ("complete", "c2")], "expected": True}, {"data": {"duration": 1.0, "timestamp": 1.0, "idle_duration": 1.0, "error": [], "output": {"additive": ["a1", "a2"], "complete": ["c1", "c2"]}, "atomic_actions": {"foo": 4.2}}, "validate_output_return_value": "validation error message"}, {"data": {"duration": 1.0, "timestamp": 1.0, "idle_duration": 1.0, "error": [42], "output": {"additive": [], "complete": []}, "atomic_actions": {"foo": 4.2}}}, {"data": {"duration": 1.0, "timestamp": 1.0, "idle_duration": 1.0, "error": [], "output": {"additive": [], "complete": []}, "atomic_actions": {"foo": 42}}}, {"data": {"duration": 1.0, "timestamp": 1.0, "idle_duration": 1.0, "error": [], "output": {"additive": [], "complete": []}, "atomic_actions": {"foo": "non-float"}}}, {"data": {"duration": 1, "timestamp": 1.0, "idle_duration": 1.0, "error": [], "output": {"additive": [], "complete": []}, "atomic_actions": {}}}, {"data": {"duration": 1.0, "timestamp": 1, "idle_duration": 1.0, "error": [], "output": {"additive": [], "complete": []}, "atomic_actions": {}}}, {"data": {"duration": 1.0, "timestamp": 1.0, "idle_duration": 1, "error": [], "output": {"additive": [], "complete": []}, "atomic_actions": {}}}, {"data": {"duration": 1.0, "timestamp": 1.0, "idle_duration": 1.0, "error": "foo", "output": {"additive": [], "complete": []}, "atomic_actions": {}}}, {"data": {"duration": 1.0, "timestamp": 1.0, "idle_duration": 1.0, "error": [], "output": {"additive": []}, "atomic_actions": {}}}, {"data": {"duration": 1.0, "timestamp": 1.0, "idle_duration": 1.0, "error": [], "output": {"complete": []}, "atomic_actions": {}}}, {"data": {"duration": 1.0, "timestamp": 1.0, "idle_duration": 1.0, "error": [], "output": {}, "atomic_actions": {}}}, {"data": {"timestamp": 1.0, "idle_duration": 1.0, "error": [], "output": {"additive": [], "complete": []}, "atomic_actions": {}}}, {"data": {"duration": 1.0, "idle_duration": 1.0, "error": [], "output": {"additive": [], "complete": []}, "atomic_actions": {}}}, {"data": {"duration": 1.0, "timestamp": 1.0, "error": [], "output": {"additive": [], "complete": []}, "atomic_actions": {}}}, {"data": {"duration": 1.0, "timestamp": 1.0, "idle_duration": 1.0, "output": {"additive": [], "complete": []}, "atomic_actions": {}}}, {"data": {"duration": 1.0, "timestamp": 1.0, "idle_duration": 1.0, "error": [], "atomic_actions": {}}}, {"data": {"duration": 1.0, "timestamp": 1.0, "idle_duration": 1.0, "error": [], "output": {"additive": [], "complete": []}}}, {"data": []}, {"data": {}}, {"data": "foo"}) @ddt.unpack @mock.patch("rally.task.runner.LOG") @mock.patch(BASE + "charts.validate_output") def test__result_has_valid_schema(self, mock_validate_output, mock_log, data, expected=False, validate_output_return_value=None, validate_output_calls=None): runner_ = self._get_runner(task={"uuid": "foo_uuid"}) mock_validate_output.return_value = validate_output_return_value self.assertEqual(expected, runner_._result_has_valid_schema(data), message=repr(data)) if validate_output_calls: mock_validate_output.assert_has_calls( [mock.call(*args) for args in validate_output_calls], any_order=True) def test__send_result(self): runner_ = self._get_runner(task={"uuid": "foo_uuid"}) result = {"timestamp": 42} runner_._result_has_valid_schema = mock.Mock(return_value=True) self.assertIsNone(runner_._send_result(result)) self.assertEqual([], runner_.result_batch) self.assertEqual(collections.deque([[result]]), runner_.result_queue) @mock.patch("rally.task.runner.LOG") def test__send_result_with_invalid_schema(self, mock_log): runner_ = self._get_runner(task={"uuid": "foo_uuid"}) result = {"timestamp": 42} runner_._result_has_valid_schema = mock.Mock(return_value=False) self.assertIsNone(runner_._send_result(result)) runner_._result_has_valid_schema.assert_called_once_with(result) self.assertTrue(mock_log.warning.called) self.assertEqual([], runner_.result_batch) self.assertEqual(collections.deque([]), runner_.result_queue) rally-0.9.1/tests/unit/task/test_utils.py0000775000567000056710000005467113073417720021665 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import datetime as dt from jsonschema import exceptions as schema_exceptions import mock from rally import exceptions from rally.task import utils from tests.unit import fakes from tests.unit import test class BenchmarkUtilsTestCase(test.TestCase): def test_wait_for_delete(self): def update_resource(self): raise exceptions.GetResourceNotFound(resource=None) resource = mock.MagicMock() utils.wait_for_delete(resource, update_resource=update_resource) @mock.patch("time.sleep") @mock.patch("time.time") def test_wait_for_delete_fails(self, mock_time, mock_sleep): def update_resource(self): pass mock_time.side_effect = [1, 2, 3, 4] resource = mock.MagicMock() self.assertRaises(exceptions.TimeoutException, utils.wait_for_delete, resource, update_resource=update_resource, timeout=1) def test_resource_is(self): is_active = utils.resource_is("ACTIVE") self.assertEqual(is_active.status_getter, utils.get_status) self.assertTrue(is_active(fakes.FakeResource(status="active"))) self.assertTrue(is_active(fakes.FakeResource(status="aCtIvE"))) self.assertFalse(is_active(fakes.FakeResource(status="ERROR"))) def test_resource_is_with_fake_status_getter(self): fake_getter = mock.MagicMock(return_value="LGTM") fake_res = mock.MagicMock() is_lgtm = utils.resource_is("LGTM", fake_getter) self.assertTrue(is_lgtm(fake_res)) fake_getter.assert_called_once_with(fake_res) def test_infinite_run_args_generator(self): args = lambda x: (x, "a", "b", 123) for i, real_args in enumerate(utils.infinite_run_args_generator(args)): self.assertEqual((i, "a", "b", 123), real_args) if i > 5: break def test_manager_list_sizes(self): manager = fakes.FakeManager() def lst(): return [1] * 10 manager.list = lst manager_list_size = utils.manager_list_size([5]) self.assertFalse(manager_list_size(manager)) manager_list_size = utils.manager_list_size([10]) self.assertTrue(manager_list_size(manager)) def test_get_from_manager(self): get_from_manager = utils.get_from_manager() manager = fakes.FakeManager() resource = fakes.FakeResource(manager=manager) manager._cache(resource) self.assertEqual(get_from_manager(resource), resource) def test_get_from_manager_with_uuid_field(self): get_from_manager = utils.get_from_manager() manager = fakes.FakeManager() resource = fakes.FakeResource(manager=manager) manager._cache(resource) self.assertEqual(get_from_manager(resource, id_attr="uuid"), resource) def test_get_from_manager_in_error_state(self): get_from_manager = utils.get_from_manager() manager = fakes.FakeManager() resource = fakes.FakeResource(manager=manager, status="ERROR") manager._cache(resource) self.assertRaises(exceptions.GetResourceFailure, get_from_manager, resource) def test_get_from_manager_in_deleted_state(self): get_from_manager = utils.get_from_manager() manager = fakes.FakeManager() resource = fakes.FakeResource(manager=manager, status="DELETED") manager._cache(resource) self.assertRaises(exceptions.GetResourceNotFound, get_from_manager, resource) def test_get_from_manager_in_deleted_state_for_heat_resource(self): get_from_manager = utils.get_from_manager() manager = fakes.FakeManager() resource = fakes.FakeResource(manager=manager) resource.stack_status = "DELETE_COMPLETE" manager._cache(resource) self.assertRaises(exceptions.GetResourceNotFound, get_from_manager, resource) def test_get_from_manager_in_deleted_state_for_ceilometer_resource(self): get_from_manager = utils.get_from_manager() manager = fakes.FakeManager() resource = fakes.FakeResource(manager=manager) resource.state = "DELETED" manager._cache(resource) self.assertRaises(exceptions.GetResourceNotFound, get_from_manager, resource) def test_get_from_manager_not_found(self): get_from_manager = utils.get_from_manager() manager = mock.MagicMock() resource = fakes.FakeResource(manager=manager, status="ERROR") class NotFoundException(Exception): http_status = 404 manager.get = mock.MagicMock(side_effect=NotFoundException) self.assertRaises(exceptions.GetResourceNotFound, get_from_manager, resource) def test_get_from_manager_http_exception(self): get_from_manager = utils.get_from_manager() manager = mock.MagicMock() resource = fakes.FakeResource(manager=manager, status="ERROR") class HTTPException(Exception): pass manager.get = mock.MagicMock(side_effect=HTTPException) self.assertRaises(exceptions.GetResourceFailure, get_from_manager, resource) def test_check_service_status(self): class service(object): def __init__(self, name): self.status = "enabled" self.state = "up" self.name = name def __str__(self): return self.name client = mock.MagicMock() client.services.list.return_value = [service("nova-compute"), service("nova-network"), service("glance-api")] ret = utils.check_service_status(client, "nova-network") self.assertTrue(ret) self.assertTrue(client.services.list.called) def test_check_service_status_fail(self): class service(object): def __init__(self, name): self.status = "enabled" self.state = "down" self.name = name def __str__(self): return self.name client = mock.MagicMock() client.services.list.return_value = [service("nova-compute"), service("nova-network"), service("glance-api")] ret = utils.check_service_status(client, "nova-network") self.assertFalse(ret) self.assertTrue(client.services.list.called) class WaitForTestCase(test.TestCase): def setUp(self): super(WaitForTestCase, self).setUp() self.resource = fakes.FakeResource() self.load_secs = 0.01 self.fake_checker_delayed = self.get_fake_checker_delayed( seconds=self.load_secs) def get_fake_checker_delayed(self, **delay): deadline = dt.datetime.now() + dt.timedelta(**delay) return lambda obj: dt.datetime.now() > deadline def fake_checker_false(self, obj): return False def fake_updater(self, obj): return obj def test_wait_for_with_updater(self): loaded_resource = utils.wait_for(self.resource, is_ready=self.fake_checker_delayed, update_resource=self.fake_updater, timeout=1, check_interval=self.load_secs / 3) self.assertEqual(loaded_resource, self.resource) def test_wait_for_no_updater(self): loaded_resource = utils.wait_for(self.resource, is_ready=self.fake_checker_delayed, update_resource=None, timeout=1, check_interval=self.load_secs / 3) self.assertEqual(loaded_resource, self.resource) def test_wait_for_timeout_failure(self): self.resource.name = "fake_name" self.resource.id = "fake_id" self.resource.status = "fake_stale_status" is_ready = utils.resource_is("fake_new_status") exc = self.assertRaises( exceptions.TimeoutException, utils.wait_for, self.resource, is_ready=is_ready, update_resource=self.fake_updater, timeout=self.load_secs, check_interval=self.load_secs / 3) self.assertEqual(exc.kwargs["resource_name"], "fake_name") self.assertEqual(exc.kwargs["resource_id"], "fake_id") self.assertEqual(exc.kwargs["desired_status"], "fake_new_status") self.assertEqual(exc.kwargs["resource_status"], "FAKE_STALE_STATUS") self.assertIn("FakeResource", str(exc)) self.assertIn("fake_new_status", str(exc)) def action_one(self, *args, **kwargs): pass def action_two(self, *args, **kwargs): pass class ActionBuilderTestCase(test.TestCase): def setUp(self): super(ActionBuilderTestCase, self).setUp() self.mock_one = "%s.action_one" % __name__ self.mock_two = "%s.action_two" % __name__ def test_invalid_keyword(self): builder = utils.ActionBuilder(["action_one", "action_two"]) self.assertRaises(schema_exceptions.ValidationError, builder.build_actions, [{"missing": 1}]) def test_invalid_bind(self): builder = utils.ActionBuilder(["action_one"]) self.assertRaises(schema_exceptions.ValidationError, builder.bind_action, "missing", action_one) def test_invalid_schema(self): builder = utils.ActionBuilder(["action_one", "action_two"]) self.assertRaises(schema_exceptions.ValidationError, builder.validate, [{"action_oone": 1}, {"action_twoo": 2}]) self.assertRaises(schema_exceptions.ValidationError, builder.validate, [{"action_one": -1}, {"action_two": 2}]) self.assertRaises(schema_exceptions.ValidationError, builder.validate, [{"action_one": 0}, {"action_two": 2}]) self.assertRaises(schema_exceptions.ValidationError, builder.validate, [{1: 0}, {"action_two": 2}]) self.assertRaises(schema_exceptions.ValidationError, builder.validate, [{"action_two": "action_two"}]) def test_positional_args(self): with mock.patch(self.mock_one) as mock_action_one: with mock.patch(self.mock_two) as mock_action_two: builder = utils.ActionBuilder(["action_one", "action_two"]) builder.bind_action("action_one", mock_action_one, "a", "b") builder.bind_action("action_two", mock_action_two, "c") actions = builder.build_actions([{"action_two": 3}, {"action_one": 4}]) for action in actions: action() self.assertEqual(4, mock_action_one.call_count, "action one not called 4 times") mock_calls = [] for i in range(4): mock_calls.append(mock.call("a", "b")) mock_action_one.assert_has_calls(mock_calls) self.assertEqual(3, mock_action_two.call_count, "action two not called 3 times") mock_calls = [] for i in range(3): mock_calls.append(mock.call("c")) mock_action_two.assert_has_calls(mock_calls) with mock.patch(self.mock_one) as mock_action_one: with mock.patch(self.mock_two) as mock_action_two: builder = utils.ActionBuilder(["action_one", "action_two"]) builder.bind_action("action_one", mock_action_one, "a", "b") builder.bind_action("action_two", mock_action_two, "c") actions = builder.build_actions([{"action_two": 3}, {"action_one": 4}], "d", 5) for action in actions: action() self.assertEqual(4, mock_action_one.call_count, "action one not called 4 times") mock_calls = [] for i in range(4): mock_calls.append(mock.call("a", "b", "d", 5)) mock_action_one.assert_has_calls(mock_calls) self.assertEqual(3, mock_action_two.call_count, "action two not called 3 times") mock_calls = [] for i in range(3): mock_calls.append(mock.call("c", "d", 5)) mock_action_two.assert_has_calls(mock_calls) def test_kwargs(self): with mock.patch(self.mock_one) as mock_action_one: with mock.patch(self.mock_two) as mock_action_two: builder = utils.ActionBuilder(["action_one", "action_two"]) builder.bind_action("action_one", mock_action_one, a=1, b=2) builder.bind_action("action_two", mock_action_two, c=3) actions = builder.build_actions([{"action_two": 3}, {"action_one": 4}]) for action in actions: action() self.assertEqual(4, mock_action_one.call_count, "action one not called 4 times") mock_calls = [] for i in range(4): mock_calls.append(mock.call(a=1, b=2)) mock_action_one.assert_has_calls(mock_calls) self.assertEqual(3, mock_action_two.call_count, "action two not called 3 times") mock_calls = [] for i in range(3): mock_calls.append(mock.call(c=3)) mock_action_two.assert_has_calls(mock_calls) with mock.patch(self.mock_one) as mock_action_one: with mock.patch(self.mock_two) as mock_action_two: builder = utils.ActionBuilder(["action_one", "action_two"]) builder.bind_action("action_one", mock_action_one, a=1, b=2) builder.bind_action("action_two", mock_action_two, c=3) actions = builder.build_actions([{"action_two": 3}, {"action_one": 4}], d=4, e=5) for action in actions: action() self.assertEqual(4, mock_action_one.call_count, "action one not called 4 times") mock_calls = [] for i in range(4): mock_calls.append(mock.call(a=1, b=2, d=4, e=5)) mock_action_one.assert_has_calls(mock_calls) self.assertEqual(3, mock_action_two.call_count, "action two not called 3 times") mock_calls = [] for i in range(3): mock_calls.append(mock.call(c=3, d=4, e=5)) mock_action_two.assert_has_calls(mock_calls) def test_mixed_args(self): with mock.patch(self.mock_one) as mock_action_one: with mock.patch(self.mock_two) as mock_action_two: builder = utils.ActionBuilder(["action_one", "action_two"]) builder.bind_action("action_one", mock_action_one, "one", a=1, b=2) builder.bind_action("action_two", mock_action_two, "two", c=3) actions = builder.build_actions([{"action_two": 3}, {"action_one": 4}], "three", d=4) for action in actions: action() self.assertEqual(4, mock_action_one.call_count, "action one not called 4 times") mock_calls = [] for i in range(4): mock_calls.append(mock.call("one", "three", a=1, b=2, d=4)) mock_action_one.assert_has_calls(mock_calls) self.assertEqual(3, mock_action_two.call_count, "action two not called 3 times") mock_calls = [] for i in range(3): mock_calls.append(mock.call("two", "three", c=3, d=4)) mock_action_two.assert_has_calls(mock_calls) class WaitForStatusTestCase(test.TestCase): def test_wrong_ready_statuses_type(self): self.assertRaises(ValueError, utils.wait_for, {}, ready_statuses="abc") def test_wrong_failure_statuses_type(self): self.assertRaises(ValueError, utils.wait_for, {}, ready_statuses=["abc"], failure_statuses="abc") def test_no_ready_statuses(self): self.assertRaises(ValueError, utils.wait_for, {}, ready_statuses=[]) def test_no_update(self): self.assertRaises(ValueError, utils.wait_for, {}, ready_statuses=["ready"]) @mock.patch("rally.task.utils.time.sleep") def test_exit_instantly(self, mock_sleep): res = {"status": "ready"} upd = mock.MagicMock(return_value=res) utils.wait_for(resource=res, ready_statuses=["ready"], update_resource=upd) upd.assert_called_once_with(res) self.assertFalse(mock_sleep.called) @mock.patch("rally.task.utils.time.sleep") @mock.patch("rally.task.utils.time.time", return_value=1) def test_wait_successful(self, mock_time, mock_sleep): res = {"status": "not_ready"} upd = mock.MagicMock(side_effect=[{"status": "not_ready"}, {"status": "not_ready_yet"}, {"status": "still_not_ready"}, {"status": "almost_ready"}, {"status": "ready"}]) utils.wait_for(resource=res, ready_statuses=["ready"], update_resource=upd) upd.assert_has_calls([mock.call({"status": "not_ready"}), mock.call({"status": "not_ready"}), mock.call({"status": "not_ready_yet"}), mock.call({"status": "still_not_ready"}), mock.call({"status": "almost_ready"})]) @mock.patch("rally.task.utils.time.sleep") @mock.patch("rally.task.utils.time.time", return_value=1) def test_wait_successful_with_uuid(self, mock_time, mock_sleep): res = {"status": "not_ready"} upd = mock.MagicMock(side_effect=[{"status": "not_ready"}, {"status": "not_ready_yet"}, {"status": "still_not_ready"}, {"status": "almost_ready"}, {"status": "ready"}]) utils.wait_for(resource=res, ready_statuses=["ready"], update_resource=upd, id_attr="uuid") upd.assert_has_calls([mock.call({"status": "not_ready"}, id_attr="uuid"), mock.call({"status": "not_ready"}, id_attr="uuid"), mock.call({"status": "not_ready_yet"}, id_attr="uuid"), mock.call({"status": "still_not_ready"}, id_attr="uuid"), mock.call({"status": "almost_ready"}, id_attr="uuid")]) @mock.patch("rally.task.utils.time.sleep") @mock.patch("rally.task.utils.time.time", return_value=1) def test_wait_failure(self, mock_time, mock_sleep): res = {"status": "not_ready"} upd = mock.MagicMock(side_effect=[{"status": "not_ready"}, {"status": "fail"}]) self.assertRaises(exceptions.GetResourceErrorStatus, utils.wait_for, resource=res, ready_statuses=["ready"], failure_statuses=["fail"], update_resource=upd) @mock.patch("rally.task.utils.time.sleep") @mock.patch("rally.task.utils.time.time", return_value=1) def test_wait_deletion_404(self, mock_time, mock_sleep): # resource manager returns 404, wait_for_status catch and accept that res = mock.MagicMock() notfound = exceptions.GetResourceNotFound(resource=None) upd = mock.MagicMock(side_effect=notfound) ret = utils.wait_for_status(resource=res, ready_statuses=["deleted"], check_deletion=True, update_resource=upd) self.assertIsNone(ret) @mock.patch("rally.task.utils.time.sleep") @mock.patch("rally.task.utils.time.time", return_value=1) def test_wait_deletion_deleted(self, mock_time, mock_sleep): # resource manager return resouce with "deleted" status sometime, # wait_for_status return the resouce instance. res = {"status": "deleted"} upd = mock.MagicMock(side_effect=[{"status": "deleted"}]) ret = utils.wait_for_status(resource=res, ready_statuses=["deleted"], check_deletion=True, update_resource=upd) self.assertEqual(res, ret) @mock.patch("rally.task.utils.time.sleep") @mock.patch("rally.task.utils.time.time", side_effect=[1, 2, 3, 4]) def test_wait_timeout(self, mock_time, mock_sleep): res = {"status": "not_ready"} upd = mock.MagicMock(side_effect=[{"status": "not_ready"}, {"status": "fail"}]) self.assertRaises(exceptions.TimeoutException, utils.wait_for_status, resource=res, ready_statuses=["ready"], update_resource=upd, timeout=2, id_attr="uuid") rally-0.9.1/tests/unit/task/test_services.py0000664000567000056710000002336613073417720022342 0ustar jenkinsjenkins00000000000000# # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from rally import exceptions from rally.task import atomic from rally.task import service from tests.unit import test PATH = "rally.task.service" class ServiceTestCase(test.TestCase): def setUp(self): super(ServiceTestCase, self).setUp() self.clients = mock.MagicMock() self.clients.cc.choose_version.return_value = 1 @mock.patch("%s.atomic" % PATH) def test_atomic(self, mock_atomic): @service.service("some_service", "some_type", version="1") class SomeV1Service(service.Service): pass SomeV1Service(mock.MagicMock(), atomic_inst=mock.MagicMock()) self.assertFalse(mock_atomic.ActionTimerMixin.called) # ensure that previous call will no affect anything mock_atomic.ActionTimerMixin.reset_mock() SomeV1Service(mock.MagicMock()) self.assertTrue(mock_atomic.ActionTimerMixin.called) def test_generate_random_name(self): @service.service("some_service", "some_type", version="1") class SomeV1Service(service.Service): pass self.assertRaises(exceptions.RallyException, SomeV1Service(self.clients).generate_random_name) name_generator = mock.MagicMock() impl = SomeV1Service(self.clients, name_generator=name_generator) self.assertEqual(name_generator.return_value, impl.generate_random_name()) name_generator.assert_called_once_with() def test_version(self): class SomeService(service.UnifiedService): pass @service.service("some_service", "some_type", version="1") class SomeV1Service(service.Service): pass @service.compat_layer(SomeV1Service) class UnifiedSomeV1Service(SomeService): pass clients = mock.MagicMock() clients.some_service.choose_version.return_value = "1" self.assertEqual("1", SomeService(clients).version) self.assertEqual("1", SomeV1Service(clients).version) self.assertEqual("1", UnifiedSomeV1Service(clients).version) def test_is_applicable(self): class SomeService(service.UnifiedService): pass @service.service("some_service", "some_type", version="1") class SomeV1Service(service.Service): pass @service.compat_layer(SomeV1Service) class UnifiedSomeV1Service(SomeService): pass clients = mock.MagicMock() clients.some_service.choose_version.return_value = "1" self.assertFalse(SomeService.is_applicable(clients)) self.assertTrue(UnifiedSomeV1Service.is_applicable(clients)) clients.some_service.choose_version.return_value = "2" self.assertFalse(SomeService.is_applicable(clients)) self.assertFalse(UnifiedSomeV1Service.is_applicable(clients)) class ServiceMetaTestCase(test.TestCase): def test_servicemeta_fail_on_missed_public_function(self): def init_classes(): class SomeService(service.UnifiedService): @service.should_be_overridden def foo(self): pass @service.service("some_service", "some_type", version="1") class SomeV1Service(service.Service): pass @service.compat_layer(SomeV1Service) class UnifiedSomeV1Service(SomeService): pass e = self.assertRaises(exceptions.RallyException, init_classes) self.assertIn("Missed method(s): foo", str(e)) class DiscoverTestCase(test.TestCase): def test_discover_impl_based_on_version(self): class SomeService(service.UnifiedService): pass @service.service("some_service", "some_type", version="1") class SomeV1Service(service.Service): pass @service.compat_layer(SomeV1Service) class UnifiedSomeV1Service(SomeService): pass @service.service("some_service", "some_type", version="2") class SomeV2Service(service.Service): pass @service.compat_layer(SomeV2Service) class UnifiedSomeV2Service(SomeService): pass clients = mock.MagicMock() clients.some_service.choose_version.return_value = "1" self.assertIsInstance(SomeService(clients)._impl, UnifiedSomeV1Service) clients.some_service.choose_version.return_value = "2" self.assertIsInstance(SomeService(clients)._impl, UnifiedSomeV2Service) self.assertFalse(clients.services.called) def test_discover_impl_based_on_service(self): class SomeService(service.UnifiedService): pass @service.service("some_service", "some_type", version="1") class SomeV1Service(service.Service): pass @service.compat_layer(SomeV1Service) class UnifiedSomeV1Service(SomeService): pass @service.service("another_impl_of_some_service", "another_type", version="2") class AnotherSomeV2Service(service.Service): pass @service.compat_layer(AnotherSomeV2Service) class UnifiedAnotherSomeV2Service(SomeService): pass clients = mock.MagicMock() clients.some_service.choose_version.return_value = "1" clients.another_impl_of_some_service.choose_version.return_value = "2" clients.services.return_value = {"some_type": "some_service"} self.assertIsInstance(SomeService(clients)._impl, UnifiedSomeV1Service) clients.services.return_value = { "another_type": "another_impl_of_some_service"} self.assertIsInstance(SomeService(clients)._impl, UnifiedAnotherSomeV2Service) def test_discover_impl_fail_with_wrong_version(self): class SomeService(service.UnifiedService): pass @service.service("some_service", "some_type", version="1") class SomeV1Service(service.Service): pass @service.compat_layer(SomeV1Service) class UnifiedSomeV1Service(SomeService): pass clients = mock.MagicMock() clients.some_service.choose_version.return_value = "2" e = self.assertRaises(exceptions.RallyException, SomeService, clients) self.assertEqual("There is no proper implementation for " "SomeService.", str(e)) def test_discover_impl_fail_with_unavailable_service(self): class SomeService(service.UnifiedService): pass @service.service("some_service", "some_type", version="1") class SomeV1Service(SomeService): pass @service.compat_layer(SomeV1Service) class UnifiedSomeV1Service(SomeService): pass @service.service("another_service", "another_type", version="2") class AnotherSomeV2Service(SomeService): pass @service.compat_layer(AnotherSomeV2Service) class UnifiedAnotherSomeV2Service(SomeService): pass clients = mock.MagicMock() clients.some_service.choose_version.return_value = "1" clients.another_service.choose_version.return_value = "2" clients.services.return_value = {} e = self.assertRaises(exceptions.RallyException, SomeService, clients) self.assertEqual("There is no proper implementation for SomeService.", str(e)) class MethodWrapperTestCase(test.TestCase): def test_positional(self): class Some(object): @service.method_wrapper def foo(slf, *args, **kwargs): if len(args) > 1: self.fail("`method_wrapper` should fail when number of " "positional arguments are bigger than 1.") Some().foo() Some().foo(some=2, another=3) Some().foo(1, some=2, another=3) self.assertRaises(TypeError, Some().foo, 1, 2) def test_disabling_atomics(self): class Some(service.UnifiedService): def discover_impl(self): return mock.MagicMock, None @atomic.action_timer("some") def foo(slf): pass def bar(slf): pass some = Some(mock.MagicMock(version="777")) some.foo(no_atomic=True) self.assertNotIn("some", some._atomic_actions) # check that we are working with correct variable some.foo() self.assertIn("some", some._atomic_actions) class ServiceWithoutAtomicTestCase(test.TestCase): def test_access(self): class Some(atomic.ActionTimerMixin): def __getattr__(self, attr): return self some_cls = Some() # add something to atomic actions dict to simplify comparison # (empty fake dict != not empty _atomic_actions dict) with atomic.ActionTimer(some_cls, "some"): pass wrapped_service = service._ServiceWithoutAtomic(some_cls) self.assertNotEqual(some_cls.atomic_actions(), wrapped_service.atomic_actions()) self.assertNotEqual(some_cls._atomic_actions, wrapped_service._atomic_actions) self.assertEqual(some_cls, wrapped_service.some_var) rally-0.9.1/tests/unit/task/test_exporter.py0000664000567000056710000000206213073417717022363 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from rally.task import exporter from tests.unit import test @exporter.configure(name="test-exporter") class TestExporter(exporter.Exporter): def validate(self): pass def export(self, task, connection_string): pass class ExporterTestCase(test.TestCase): def test_task_export(self): self.assertRaises(TypeError, exporter.Exporter, "fake_connection") def test_task_export_instantiate(self): TestExporter("fake_connection") rally-0.9.1/tests/unit/test.py0000664000567000056710000001616413073417720017473 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import uuid import mock from oslo_config import fixture from oslotest import base from oslotest import mockpatch from rally.common import db from rally import plugins from tests.unit import fakes class DatabaseFixture(fixture.Config): """Create clean DB before starting test.""" def setUp(self): super(DatabaseFixture, self).setUp() db_url = os.environ.get("RALLY_UNITTEST_DB_URL", "sqlite://") db.engine_reset() self.conf.set_default("connection", db_url, group="database") db.schema_cleanup() db.schema_create() class TestCase(base.BaseTestCase): """Test case base class for all unit tests.""" def setUp(self): super(TestCase, self).setUp() self.addCleanup(mock.patch.stopall) plugins.load() def _test_atomic_action_timer(self, atomic_actions, name): action_duration = atomic_actions.get(name) self.assertIsNotNone(action_duration) self.assertIsInstance(action_duration, float) def assertSequenceEqual(self, iterable_1, iterable_2, msg=None): self.assertEqual(tuple(iterable_1), tuple(iterable_2), msg) class DBTestCase(TestCase): """Base class for tests which use DB.""" def setUp(self): super(DBTestCase, self).setUp() self.useFixture(DatabaseFixture()) # TODO(boris-42): This should be moved to test.plugins.test module # or similar class ScenarioTestCase(TestCase): """Base class for Scenario tests using mocked self.clients.""" benchmark_utils = "rally.task.utils" patch_benchmark_utils = True def client_factory(self, client_type, version=None, admin=False): """Create a new client object.""" return mock.MagicMock(client_type=client_type, version=version, admin=admin) def clients(self, client_type, version=None, admin=False): """Get a mocked client.""" key = (client_type, version, admin) if key not in self._clients: self._clients[key] = self.client_factory(client_type, version=version, admin=admin) return self._clients[key] def admin_clients(self, client_type, version=None): """Get a mocked admin client.""" return self.clients(client_type, version=version, admin=True) def client_created(self, client_type, version=None, admin=False): """Determine if a client has been created. This can be used to see if a scenario calls 'self.clients("foo")', without checking to see what was done with the client object returned by that call. """ key = (client_type, version, admin) return key in self._clients def get_client_mocks(self): base_path = "rally.plugins.openstack" return [ mock.patch( "%s.scenario.OpenStackScenario.clients" % base_path, mock.Mock(side_effect=self.clients)), mock.patch( "%s.scenario.OpenStackScenario.admin_clients" % base_path, mock.Mock(side_effect=self.admin_clients)) ] def get_test_context(self): return get_test_context() def setUp(self): super(ScenarioTestCase, self).setUp() if self.patch_benchmark_utils: self.mock_resource_is = mockpatch.Patch( self.benchmark_utils + ".resource_is") self.mock_get_from_manager = mockpatch.Patch( self.benchmark_utils + ".get_from_manager") self.mock_wait_for = mockpatch.Patch( self.benchmark_utils + ".wait_for") self.mock_wait_for_delete = mockpatch.Patch( self.benchmark_utils + ".wait_for_delete") self.mock_wait_for_status = mockpatch.Patch( self.benchmark_utils + ".wait_for_status") self.useFixture(self.mock_resource_is) self.useFixture(self.mock_get_from_manager) self.useFixture(self.mock_wait_for) self.useFixture(self.mock_wait_for_delete) self.useFixture(self.mock_wait_for_status) self.mock_sleep = mockpatch.Patch("time.sleep") self.useFixture(self.mock_sleep) self._clients = {} self._client_mocks = self.get_client_mocks() for patcher in self._client_mocks: patcher.start() self.context = self.get_test_context() def tearDown(self): for patcher in self._client_mocks: patcher.stop() super(ScenarioTestCase, self).tearDown() class ContextClientAdapter(object): def __init__(self, endpoint, test_case): self.endpoint = endpoint self.test_case = test_case def mock_client(self, name, version=None): admin = self.endpoint.startswith("admin") client = self.test_case.clients(name, version=version, admin=admin) if not isinstance(client.return_value, mock.Mock): return client.return_value if client.side_effect is not None: # NOTE(pboldin): if a client has side_effects that means the # user wants some of the returned values overrided (look at # the test_existing_users for instance) return client() return client def __getattr__(self, name): # NOTE(pboldin): __getattr__ magic is called last, after the value # were looked up for in __dict__ return lambda version=None: self.mock_client(name, version) class ContextTestCase(ScenarioTestCase): def setUp(self): super(ContextTestCase, self).setUp() self._adapters = {} def context_client(self, endpoint, api_info=None): if endpoint not in self._adapters: self._adapters[endpoint] = ContextClientAdapter(endpoint, self) return self._adapters[endpoint] def get_client_mocks(self): return [ mock.patch( "rally.osclients.Clients", mock.Mock(side_effect=self.context_client)) ] class FakeClientsScenarioTestCase(ScenarioTestCase): """Base class for Scenario tests using fake (not mocked) self.clients.""" def client_factory(self, client_type, version=None, admin=False): return getattr(self._fake_clients, client_type)() def setUp(self): super(FakeClientsScenarioTestCase, self).setUp() self._fake_clients = fakes.FakeClients() def get_test_context(**kwargs): kwargs["task"] = {"uuid": str(uuid.uuid4())} return kwargs rally-0.9.1/tests/unit/test_ddt.py0000664000567000056710000001043013073417717020322 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ast import os from tests.unit import test class DDTDecoratorChecker(ast.NodeVisitor): """Visit an AST tree looking for classes lacking the ddt.ddt decorator. DDT uses decorators on test case functions to supply different test data, but if the class that those functions are members of is not decorated with @ddt.ddt, then the data expansion never happens and the tests are incomplete. This is very easy to miss both when writing and when reviewing code, so this visitor ensures that every class that contains a function decorated with a @ddt.* decorator is itself decorated with @ddt.ddt """ def __init__(self): self.classes = [] self.errors = {} @classmethod def _get_name(cls, node): if isinstance(node, ast.Name): return node.id if isinstance(node, ast.Attribute): return cls._get_name(node.value) + "." + node.attr return "" def _is_ddt(self, cls_node): return "ddt.ddt" in (self._get_name(d) for d in cls_node.decorator_list) def visit_ClassDef(self, node): self.classes.append(node) self.generic_visit(node) self.classes.pop() def visit_FunctionDef(self, node): if not self.classes: # NOTE(stpierre): we only care about functions that are # defined inside of classes return cls = self.classes[-1] if cls.name in self.errors: # NOTE(stpierre): if this class already has been found to # be in error, ignore the rest of its functions return for decorator in node.decorator_list: if not isinstance(decorator, ast.Call): continue funcname = self._get_name(decorator.func) if funcname.startswith("ddt."): if not self._is_ddt(cls): msg = ("Class %s has functions that use DDT, " "but is not decorated with `ddt.ddt`" % cls.name) self.errors[cls.name] = { "lineno": node.lineno, "message": msg } class DDTDecoratorCheckerTestCase(test.TestCase): tests_path = os.path.join(os.path.dirname(__file__)) def test_ddt_class_decorator(self): """Classes with DDT-decorated functions have ddt.ddt class decorator. """ errors = [] for dirname, dirnames, filenames in os.walk(self.tests_path): for filename in filenames: if not (filename.startswith("test_") and filename.endswith(".py")): continue filename = os.path.relpath(os.path.join(dirname, filename)) with open(filename, "rb") as fh: try: tree = ast.parse(fh.read(), filename) except TypeError as err: errors.append({"message": str(err), "filename": filename, "lineno": -1}) visitor = DDTDecoratorChecker() visitor.visit(tree) errors.extend( dict(filename=filename, **error) for error in visitor.errors.values()) if errors: msg = [""] for error in errors: msg.extend([ "Errors at %(filename)s line %(lineno)d: %(message)s" % { "message": error["message"], "filename": error["filename"], "lineno": error["lineno"]}, ""]) self.fail("\n".join(msg)) rally-0.9.1/tests/unit/test_pytest_launcher.py0000664000567000056710000000477413073417717022776 0ustar jenkinsjenkins00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import mock from tests.ci import pytest_launcher from tests.unit import test PATH = "tests.ci.pytest_launcher" class ExitError(Exception): pass class PyTestLauncherTestCase(test.TestCase): def setUp(self): super(PyTestLauncherTestCase, self).setUp() sp_patcher = mock.patch("%s.subprocess" % PATH) self.sp = sp_patcher.start() self.addCleanup(sp_patcher.stop) exit_patcher = mock.patch("%s.exit" % PATH, side_effect=ExitError) self.exit = exit_patcher.start() self.addCleanup(exit_patcher.stop) os_patcher = mock.patch("%s.os" % PATH) self.os = os_patcher.start() self.addCleanup(os_patcher.stop) # emulate local run by default self.os.environ = {} self.os.path.join.side_effect = os.path.join self.os.path.abspath.side_effect = os.path.abspath self.os.path.expanduser.side_effect = os.path.expanduser def test_wrong_posargs(self): self.assertRaises(ExitError, pytest_launcher.main, ["script name", "test_path", "--posargs='posargs with spaces'"]) self.assertFalse(self.sp.called) self.assertFalse(self.os.called) def test_parsing_path(self): def os_path_exists(path): dpath = "some/path/to/some/test" return dpath.startswith(path) or path == "%s/module.py" % dpath self.os.path.exists.side_effect = os_path_exists pytest_launcher.main( ["script_name", "some/path", "--posargs=some.path.to.some.test.module.TestCase.test"]) expected_path = os.path.abspath( "some/path/to/some/test/module.py::TestCase::test") self.assertEqual(1, self.sp.check_call.call_count) call_args_obj = self.sp.check_call.call_args_list[0] call_args = call_args_obj[0] self.assertEqual(expected_path, call_args[0][-1]) rally-0.9.1/tests/unit/test_api.py0000664000567000056710000020315613073417720020323 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Test for api.""" import copy import os import ddt import jsonschema from keystoneclient import exceptions as keystone_exceptions import mock from oslo_config import cfg from rally import api from rally.common import objects from rally import consts from rally import exceptions from tests.unit import fakes from tests.unit import test FAKE_DEPLOYMENT_CONFIG = { # TODO(akscram): A fake engine is more suitable for that. "type": "ExistingCloud", "auth_url": "http://example.net:5000/v2.0/", "admin": { "username": "admin", "password": "myadminpass", "tenant_name": "demo", "domain_name": None, "project_domain_name": "Default", "user_domain_name": "Default", }, "region_name": "RegionOne", "endpoint_type": consts.EndpointType.INTERNAL, } @ddt.ddt class TaskAPITestCase(test.TestCase): def setUp(self): super(TaskAPITestCase, self).setUp() self.task_uuid = "b0d9cd6c-2c94-4417-a238-35c7019d0257" self.task = { "uuid": self.task_uuid, } @mock.patch("rally.api.objects.Task") @mock.patch("rally.api.objects.Deployment.get", return_value=fakes.FakeDeployment(uuid="deployment_uuid", admin="fake_admin", users=["fake_user"])) @mock.patch("rally.api.engine.TaskEngine") def test_validate( self, mock_task_engine, mock_deployment_get, mock_task): api._Task.validate(mock_deployment_get.return_value["uuid"], "config") mock_task_engine.assert_has_calls([ mock.call("config", mock_task.return_value, mock_deployment_get.return_value), mock.call().validate() ]) mock_task.assert_called_once_with( temporary=True, deployment_uuid=mock_deployment_get.return_value["uuid"]) mock_deployment_get.assert_called_once_with( mock_deployment_get.return_value["uuid"]) @mock.patch("rally.api.objects.Task") @mock.patch("rally.api.objects.Deployment", return_value=fakes.FakeDeployment(uuid="deployment_uuid")) @mock.patch("rally.api.engine.TaskEngine") def test_validate_engine_exception(self, mock_task_engine, mock_deployment, mock_task): excpt = exceptions.InvalidTaskException() mock_task_engine.return_value.validate.side_effect = excpt self.assertRaises(exceptions.InvalidTaskException, api._Task.validate, mock_deployment.return_value["uuid"], "config") def test_render_template(self): self.assertEqual( "3 = 3", api._Task.render_template("{{a + b}} = {{c}}", a=1, b=2, c=3)) def test_render_template_default_values(self): template = "{% set a = a or 1 %}{{a + b}} = {{c}}" self.assertEqual("3 = 3", api._Task.render_template(template, b=2, c=3)) self.assertEqual( "5 = 5", api._Task.render_template(template, a=2, b=3, c=5)) def test_render_template_default_filter(self): template = "{{ c | default(3) }}" self.assertEqual("3", api._Task.render_template(template)) self.assertEqual("5", api._Task.render_template(template, c=5)) def test_render_template_builtin(self): template = "{% for i in range(4) %}{{i}}{% endfor %}" self.assertEqual("0123", api._Task.render_template(template)) def test_render_template_missing_args(self): self.assertRaises(TypeError, api._Task.render_template, "{{a}}") def test_render_template_include_other_template(self): other_template_path = os.path.join( os.path.dirname(__file__), "..", "..", "samples/tasks/scenarios/nova/boot.json") template = "{%% include \"%s\" %%}" % os.path.basename( other_template_path) with open(other_template_path) as f: other_template = f.read() expect = api._Task.render_template(other_template) actual = api._Task.render_template( template, os.path.dirname(other_template_path)) self.assertEqual(expect, actual) def test_render_template_min(self): template = "{{ min(1, 2)}}" self.assertEqual("1", api._Task.render_template(template)) def test_render_template_max(self): template = "{{ max(1, 2)}}" self.assertEqual("2", api._Task.render_template(template)) def test_render_template_ceil(self): template = "{{ ceil(2.2)}}" self.assertEqual("3", api._Task.render_template(template)) def test_render_template_round(self): template = "{{ round(2.2)}}" self.assertEqual("2", api._Task.render_template(template)) @mock.patch("rally.common.objects.Deployment.get", return_value={ "uuid": "b0d9cd6c-2c94-4417-a238-35c7019d0257", "status": consts.DeployStatus.DEPLOY_FINISHED}) @mock.patch("rally.common.objects.Task") def test_create(self, mock_task, mock_deployment_get): tag = "a" api._Task.create(mock_deployment_get.return_value["uuid"], tag) mock_task.assert_called_once_with( deployment_uuid=mock_deployment_get.return_value["uuid"], tag=tag) @mock.patch("rally.common.objects.Deployment.get", return_value={ "name": "xxx_name", "uuid": "u_id", "status": consts.DeployStatus.DEPLOY_INIT}) def test_create_on_unfinished_deployment(self, mock_deployment_get): deployment_id = mock_deployment_get.return_value["uuid"] tag = "a" self.assertRaises(exceptions.DeploymentNotFinishedStatus, api._Task.create, deployment_id, tag) @mock.patch("rally.api.objects.Task", return_value=fakes.FakeTask(uuid="some_uuid")) @mock.patch("rally.api.objects.Deployment.get", return_value=fakes.FakeDeployment(uuid="deployment_uuid", admin="fake_admin", users=["fake_user"])) @mock.patch("rally.api.engine.TaskEngine") def test_start(self, mock_task_engine, mock_deployment_get, mock_task): api._Task.start(mock_deployment_get.return_value["uuid"], "config") mock_task_engine.assert_has_calls([ mock.call("config", mock_task.return_value, mock_deployment_get.return_value, abort_on_sla_failure=False), mock.call().run(), ]) mock_task.assert_called_once_with( deployment_uuid=mock_deployment_get.return_value["uuid"]) mock_deployment_get.assert_called_once_with( mock_deployment_get.return_value["uuid"]) @mock.patch("rally.api.objects.Task", return_value=fakes.FakeTask(uuid="some_uuid", task={}, temporary=True)) @mock.patch("rally.api.objects.Deployment.get", return_value=fakes.FakeDeployment(uuid="deployment_uuid", admin="fake_admin", users=["fake_user"])) def test_start_temporary_task(self, mock_deployment_get, mock_task): self.assertRaises(ValueError, api._Task.start, mock_deployment_get.return_value["uuid"], "config") @mock.patch("rally.api.objects.Task") @mock.patch("rally.api.objects.Deployment.get") @mock.patch("rally.api.engine.TaskEngine") def test_start_exception(self, mock_task_engine, mock_deployment_get, mock_task): mock_task.return_value.is_temporary = False mock_task_engine.return_value.run.side_effect = TypeError self.assertRaises(TypeError, api._Task.start, "deployment_uuid", "config") mock_deployment_get().update_status.assert_called_once_with( consts.DeployStatus.DEPLOY_INCONSISTENT) @ddt.data(True, False) @mock.patch("rally.api.time") @mock.patch("rally.api.objects.Task") def test_abort_sync(self, soft, mock_task, mock_time): mock_task.get_status.side_effect = ( consts.TaskStatus.INIT, consts.TaskStatus.VALIDATING, consts.TaskStatus.RUNNING, consts.TaskStatus.ABORTING, consts.TaskStatus.SOFT_ABORTING, consts.TaskStatus.ABORTED) some_uuid = "ca441749-0eb9-4fcc-b2f6-76d314c55404" api._Task.abort(some_uuid, soft=soft, async=False) mock_task.get.assert_called_once_with(some_uuid) mock_task.get.return_value.abort.assert_called_once_with(soft=soft) self.assertEqual([mock.call(some_uuid)] * 6, mock_task.get_status.call_args_list) self.assertTrue(mock_time.sleep.called) @ddt.data(True, False) @mock.patch("rally.api.time") @mock.patch("rally.api.objects.Task") def test_abort_async(self, soft, mock_task, mock_time): some_uuid = "133695fb-400d-4988-859c-30bfaa0488ce" api._Task.abort(some_uuid, soft=soft, async=True) mock_task.get.assert_called_once_with(some_uuid) mock_task.get.return_value.abort.assert_called_once_with(soft=soft) self.assertFalse(mock_task.get_status.called) self.assertFalse(mock_time.sleep.called) @ddt.data({"task_status": "strange value", "expected_status": consts.TaskStatus.FINISHED}, {"task_status": consts.TaskStatus.INIT, "expected_status": consts.TaskStatus.FINISHED}, {"task_status": consts.TaskStatus.VALIDATING, "expected_status": consts.TaskStatus.FINISHED}, {"task_status": consts.TaskStatus.ABORTING, "expected_status": consts.TaskStatus.FINISHED}, {"task_status": consts.TaskStatus.SOFT_ABORTING, "expected_status": consts.TaskStatus.FINISHED}, {"task_status": consts.TaskStatus.RUNNING, "expected_status": consts.TaskStatus.FINISHED}, {"task_status": consts.TaskStatus.ABORTED, "expected_status": None}, {"task_status": consts.TaskStatus.FINISHED, "expected_status": None}, {"task_status": consts.TaskStatus.CRASHED, "expected_status": None}, {"task_status": "strange value", "force": True, "expected_status": None}, {"task_status": consts.TaskStatus.INIT, "force": True, "expected_status": None}, {"task_status": consts.TaskStatus.VALIDATING, "force": True, "expected_status": None}, {"task_status": consts.TaskStatus.RUNNING, "force": True, "expected_status": None}, {"task_status": consts.TaskStatus.ABORTING, "force": True, "expected_status": None}, {"task_status": consts.TaskStatus.SOFT_ABORTING, "force": True, "expected_status": None}, {"task_status": consts.TaskStatus.ABORTED, "force": True, "expected_status": None}, {"task_status": consts.TaskStatus.FINISHED, "force": True, "expected_status": None}, {"task_status": consts.TaskStatus.CRASHED, "force": True, "expected_status": None}) @ddt.unpack @mock.patch("rally.api.objects.Task.get_status") @mock.patch("rally.api.objects.Task.delete_by_uuid") def test_delete(self, mock_task_delete_by_uuid, mock_task_get_status, task_status, expected_status, force=False, raises=None): mock_task_get_status.return_value = task_status api._Task.delete(self.task_uuid, force=force) if force: self.assertFalse(mock_task_get_status.called) else: mock_task_get_status.assert_called_once_with(self.task_uuid) mock_task_delete_by_uuid.assert_called_once_with( self.task_uuid, status=expected_status) @mock.patch("rally.api.objects.Task") def test_get_detailed(self, mock_task): mock_task.get_detailed.return_value = "detailed_task_data" self.assertEqual("detailed_task_data", api._Task.get_detailed("task_uuid")) mock_task.get_detailed.assert_called_once_with("task_uuid") @mock.patch("rally.api.objects.Task") def test_get_detailed_with_extended_results(self, mock_task): mock_task.get_detailed.return_value = (("uuid", "foo_uuid"), ("results", "raw_results")) mock_task.extend_results.return_value = "extended_results" self.assertEqual({"uuid": "foo_uuid", "results": "extended_results"}, api._Task.get_detailed("foo_uuid", extended_results=True)) mock_task.get_detailed.assert_called_once_with("foo_uuid") mock_task.extend_results.assert_called_once_with("raw_results") class BaseDeploymentTestCase(test.TestCase): def setUp(self): super(BaseDeploymentTestCase, self).setUp() self.deployment_config = copy.deepcopy(FAKE_DEPLOYMENT_CONFIG) self.deployment_uuid = "599bdf1d-fe77-461a-a810-d59b1490f4e3" admin_credential = copy.deepcopy(FAKE_DEPLOYMENT_CONFIG) admin_credential.pop("type") admin_credential["endpoint"] = None admin_credential.update(admin_credential.pop("admin")) admin_credential["permission"] = consts.EndpointPermission.ADMIN admin_credential["https_insecure"] = False admin_credential["https_cacert"] = None self.credentials = {"admin": admin_credential, "users": []} self.deployment = { "uuid": self.deployment_uuid, "name": "fake_name", "config": self.deployment_config, "credentials": {"openstack": [self.credentials]} } class DeploymentAPITestCase(BaseDeploymentTestCase): @mock.patch("rally.common.objects.deploy.db.deployment_update") @mock.patch("rally.common.objects.deploy.db.deployment_create") @mock.patch("rally.deployment.engine.Engine.validate") def test_create(self, mock_engine_validate, mock_deployment_create, mock_deployment_update): mock_deployment_create.return_value = self.deployment mock_deployment_update.return_value = self.deployment dep = api._Deployment.create(self.deployment_config, "fake_deployment") self.assertIsInstance(dep, objects.Deployment) mock_deployment_create.assert_called_once_with({ "name": "fake_deployment", "config": self.deployment_config, }) mock_engine_validate.assert_called_once_with() mock_deployment_update.assert_has_calls([ mock.call(self.deployment_uuid, {"credentials": {"openstack": [self.credentials]}}) ]) @mock.patch("rally.common.objects.deploy.db.deployment_update") @mock.patch("rally.common.objects.deploy.db.deployment_create") @mock.patch("rally.deployment.engine.Engine.validate", side_effect=jsonschema.ValidationError("ValidationError")) def test_create_validation_error( self, mock_engine_validate, mock_deployment_create, mock_deployment_update): mock_deployment_create.return_value = self.deployment self.assertRaises(jsonschema.ValidationError, api._Deployment.create, self.deployment_config, "fake_deployment") mock_deployment_update.assert_called_once_with( self.deployment_uuid, {"status": consts.DeployStatus.DEPLOY_FAILED}) @mock.patch("rally.api.LOG") @mock.patch("rally.common.objects.deploy.db.deployment_create", side_effect=exceptions.DeploymentNameExists( deployment="fake_deploy")) def test_create_duplication_error(self, mock_deployment_create, mock_log): self.assertRaises(exceptions.DeploymentNameExists, api._Deployment.create, self.deployment_config, "fake_deployment") @mock.patch("rally.api._Verifier.delete") @mock.patch("rally.api._Verifier.list") @mock.patch("rally.common.objects.deploy.db.deployment_delete") @mock.patch("rally.common.objects.deploy.db.deployment_update") @mock.patch("rally.common.objects.deploy.db.deployment_get") def test_destroy(self, mock_deployment_get, mock_deployment_update, mock_deployment_delete, mock___verifier_list, mock___verifier_delete): mock_deployment_get.return_value = self.deployment mock_deployment_update.return_value = self.deployment list_verifiers = [mock.Mock(), mock.Mock()] mock___verifier_list.return_value = list_verifiers api._Deployment.destroy(self.deployment_uuid) mock_deployment_get.assert_called_once_with(self.deployment_uuid) mock_deployment_delete.assert_called_once_with(self.deployment_uuid) mock___verifier_list.assert_called_once_with() self.assertEqual( [mock.call(m.name, self.deployment["name"], force=True) for m in list_verifiers], mock___verifier_delete.call_args_list) @mock.patch("rally.common.objects.deploy.db.deployment_update") @mock.patch("rally.common.objects.deploy.db.deployment_get") def test_recreate(self, mock_deployment_get, mock_deployment_update): mock_deployment_get.return_value = self.deployment mock_deployment_update.return_value = self.deployment api._Deployment.recreate(self.deployment_uuid) mock_deployment_get.assert_called_once_with(self.deployment_uuid) mock_deployment_update.assert_has_calls([ mock.call( self.deployment_uuid, {"credentials": {"openstack": [{"admin": self.credentials["admin"], "users": self.credentials["users"]}]}}) ]) @mock.patch("rally.common.objects.deploy.db.deployment_update") @mock.patch("rally.common.objects.deploy.db.deployment_get") def test_recreate_config(self, mock_deployment_get, mock_deployment_update): mock_deployment_get.return_value = self.deployment mock_deployment_update.return_value = self.deployment config = copy.deepcopy(self.deployment_config) config["admin"] = {"username": "admin", "password": "pass1", "tenant_name": "demo"} config["users"] = [{"username": "user1", "password": "pass2", "tenant_name": "demo"}] api._Deployment.recreate(self.deployment_uuid, config) mock_deployment_get.assert_called_once_with(self.deployment_uuid) mock_deployment_update.assert_has_calls([ mock.call(self.deployment_uuid, {"config": config}), ]) @mock.patch("rally.common.objects.deploy.db.deployment_update") @mock.patch("rally.common.objects.deploy.db.deployment_get") def test_recreate_config_invalid(self, mock_deployment_get, mock_deployment_update): mock_deployment_get.return_value = self.deployment mock_deployment_update.return_value = self.deployment config = copy.deepcopy(self.deployment_config) config["admin"] = {"foo": "bar"} self.assertRaises(jsonschema.ValidationError, api._Deployment.recreate, self.deployment_uuid, config) @mock.patch("rally.common.objects.deploy.db.deployment_update") @mock.patch("rally.common.objects.deploy.db.deployment_get") def test_recreate_config_wrong_type(self, mock_deployment_get, mock_deployment_update): mock_deployment_get.return_value = self.deployment mock_deployment_update.return_value = self.deployment config = copy.deepcopy(self.deployment_config) config["type"] = "foo" self.assertRaises(exceptions.RallyException, api._Deployment.recreate, self.deployment_uuid, config) @mock.patch("rally.common.objects.deploy.db.deployment_get") def test_get(self, mock_deployment_get): deployment_id = "aaaa-bbbb-cccc-dddd" mock_deployment_get.return_value = self.deployment ret = api._Deployment.get(deployment_id) for key in self.deployment: self.assertEqual(ret[key], self.deployment[key]) @mock.patch("rally.common.objects.Deployment.list") def test_list(self, mock_deployment_list): mock_deployment_list.return_value = self.deployment ret = api._Deployment.list() for key in self.deployment: self.assertEqual(ret[key], self.deployment[key]) @mock.patch("rally.osclients.Clients.services") @mock.patch("rally.osclients.Keystone.create_client") def test_deployment_check(self, mock_keystone_create_client, mock_clients_services): sample_credential = objects.Credential("http://192.168.1.1:5000/v2.0/", "admin", "adminpass").to_dict() deployment = mock.Mock(spec=objects.Deployment) deployment.get_credentials_for.return_value = { "admin": sample_credential, "users": [sample_credential]} api._Deployment.check(deployment) mock_keystone_create_client.assert_called_with() mock_clients_services.assert_called_once_with() def test_deployment_check_raise(self): sample_credential = objects.Credential("http://192.168.1.1:5000/v2.0/", "admin", "adminpass").to_dict() sample_credential["not-exist-key"] = "error" deployment = mock.Mock(spec=objects.Deployment) self.assertRaises(TypeError, api._Deployment.check, deployment) @mock.patch("rally.osclients.Clients.services") def test_deployment_check_connect_failed(self, mock_clients_services): sample_credential = objects.Credential("http://192.168.1.1:5000/v2.0/", "admin", "adminpass").to_dict() deployment = mock.Mock(spec=objects.Deployment) deployment.get_credentials_for.return_value = { "admin": sample_credential, "users": []} refused = keystone_exceptions.ConnectionRefused() mock_clients_services.side_effect = refused self.assertRaises( keystone_exceptions.ConnectionRefused, api._Deployment.check, deployment) class APITestCase(test.TestCase): @mock.patch("os.path.isfile", return_value=False) @mock.patch("rally.common.version.database_revision", return_value={"revision": "foobar", "current_head": "foobar"}) @mock.patch("rally.common.version.version_string", return_value="0.0.0") @mock.patch("rally.api.CONF", spec=cfg.CONF) def test_init_config_args(self, mock_conf, mock_version_string, mock_database_revision, mock_isfile): api_ = api.API(config_args=["foo", "bar", "baz"]) mock_conf.assert_called_once_with( ["foo", "bar", "baz"], default_config_files=None, project="rally", version="0.0.0") self.assertIs(api_.deployment, api._Deployment) self.assertIs(api_.task, api._Task) @mock.patch("os.path.isfile", return_value=False) @mock.patch("rally.common.version.database_revision", return_value={"revision": "foobar", "current_head": "foobar"}) @mock.patch("rally.common.version.version_string", return_value="0.0.0") @mock.patch("rally.api.CONF", spec=cfg.CONF) def test_init_config_file(self, mock_conf, mock_version_string, mock_database_revision, mock_isfile): api_ = api.API(config_file="myfile.conf") mock_conf.assert_called_once_with( [], default_config_files=["myfile.conf"], project="rally", version="0.0.0") self.assertIs(api_.deployment, api._Deployment) self.assertIs(api_.task, api._Task) @mock.patch("os.path.isfile", return_value=False) @mock.patch("rally.common.version.database_revision", return_value={"revision": "foobar", "current_head": "foobar"}) @mock.patch("rally.common.version.version_string", return_value="0.0.0") @mock.patch("rally.api.CONF", spec=cfg.CONF) def test_init_no_default_config_file(self, mock_conf, mock_version_string, mock_database_revision, mock_isfile): api.API() mock_conf.assert_called_once_with( [], default_config_files=None, project="rally", version="0.0.0") @mock.patch("os.path.isfile") @mock.patch("rally.common.version.database_revision", return_value={"revision": "foobar", "current_head": "foobar"}) @mock.patch("rally.common.version.version_string", return_value="0.0.0") @mock.patch("rally.api.CONF", spec=cfg.CONF) def test_init_default_config_file(self, mock_conf, mock_version_string, mock_database_revision, mock_isfile): mock_isfile.side_effect = lambda f: f == "/etc/rally/rally.conf" api.API() mock_conf.assert_called_once_with( [], default_config_files=["/etc/rally/rally.conf"], project="rally", version="0.0.0") @mock.patch("os.path.isfile", return_value=False) @mock.patch("rally.common.version.version_string", return_value="0.0.0") @mock.patch("rally.api.CONF", spec=cfg.CONF) def test_init_exception(self, mock_conf, mock_version_string, mock_isfile): mock_conf.side_effect = cfg.ConfigFilesNotFoundError(["file1", "file2"]) self.assertRaises(exceptions.RallyException, api.API) mock_conf.assert_called_once_with( [], default_config_files=None, project="rally", version="0.0.0") @mock.patch("os.path.isfile", return_value=False) @mock.patch("rally.common.plugin.discover.load_plugins") @mock.patch("rally.common.version.database_revision", return_value={"revision": "foobar", "current_head": "foobar"}) @mock.patch("rally.common.version.version_string", return_value="0.0.0") @mock.patch("rally.api.CONF", spec=cfg.CONF) def test_init_plugin_path(self, mock_conf, mock_version_string, mock_database_revision, mock_load_plugins, mock_isfile): mock_conf.__contains__.return_value = True mock_conf.get.side_effect = ( lambda a: ["/path/from/args"] if a == "plugin_paths" else None) api.API(plugin_paths=["/my/path"]) mock_conf.assert_called_once_with([], default_config_files=None, project="rally", version="0.0.0") mock_load_plugins.assert_has_calls([ mock.call("/my/path"), mock.call("/path/from/args"), ]) @mock.patch("os.path.isfile", return_value=False) @mock.patch("rally.common.version.database_revision", return_value={"revision": "spam", "current_head": "foobar"}) @mock.patch("rally.common.version.version_string", return_value="0.0.0") @mock.patch("rally.api.CONF", spec=cfg.CONF) def test_init_check_revision_exception(self, mock_conf, mock_version_string, mock_database_revision, mock_isfile): exc = self.assertRaises(exceptions.RallyException, api.API) self.assertIn("rally-manage db upgrade", str(exc)) mock_conf.assert_called_once_with( [], default_config_files=None, project="rally", version="0.0.0") @mock.patch("os.path.isfile", return_value=False) @mock.patch("rally.common.version.database_revision", return_value={"revision": None, "current_head": "foobar"}) @mock.patch("rally.common.version.version_string", return_value="0.0.0") @mock.patch("rally.api.CONF", spec=cfg.CONF) def test_init_check_revision_exception_no_db(self, mock_conf, mock_version_string, mock_database_revision, mock_isfile): exc = self.assertRaises(exceptions.RallyException, api.API) self.assertIn("rally-manage db create", str(exc)) mock_conf.assert_called_once_with( [], default_config_files=None, project="rally", version="0.0.0") def test_init_rally_endpoint(self): self.assertRaises(NotImplementedError, api.API, rally_endpoint="foo") class FakeVerifierManager(object): NAME = "fake_verifier" NAMESPACE = "tests" TITLE = "Fake verifier which is used only for testing purpose" @classmethod def get_name(cls): return cls.NAME @classmethod def get_namespace(cls): return cls.NAMESPACE @classmethod def get_info(cls): return {"title": cls.TITLE} class VerifierAPITestCase(test.TestCase): @mock.patch("rally.api.vmanager.VerifierManager.get_all") def test_list_plugins(self, mock_verifier_manager_get_all): mock_verifier_manager_get_all.return_value = [FakeVerifierManager] namespace = "some" self.assertEqual( [{"name": FakeVerifierManager.NAME, "namespace": FakeVerifierManager.NAMESPACE, "description": FakeVerifierManager.TITLE, "location": "%s.%s" % (FakeVerifierManager.__module__, FakeVerifierManager.__name__)}], api._Verifier.list_plugins(namespace)) mock_verifier_manager_get_all.assert_called_once_with( namespace=namespace) @mock.patch("rally.api.objects.Verifier.get") def test_get(self, mock_verifier_get): uuid = "some" self.assertEqual(mock_verifier_get.return_value, api._Verifier.get(uuid)) mock_verifier_get.assert_called_once_with(uuid) @mock.patch("rally.api.objects.Verifier.list") def test_list(self, mock_verifier_list): status = "some_special_status" self.assertEqual(mock_verifier_list.return_value, api._Verifier.list(status)) mock_verifier_list.assert_called_once_with(status) @mock.patch("rally.api.objects.Verifier.create") @mock.patch("rally.api._Verifier.get") @mock.patch("rally.api.vmanager.VerifierManager.get") def test_create(self, mock_verifier_manager_get, mock___verifier_get, mock_verifier_create): mock___verifier_get.side_effect = exceptions.ResourceNotFound(id="1") name = "SomeVerifier" vtype = "fake_verifier" namespace = "tests" source = "https://example.com" version = "3.1415" system_wide = True extra_settings = {"verifier_specific_option": "value_for_it"} verifier_obj = mock_verifier_create.return_value verifier_obj.manager._meta_get.side_effect = [namespace, source] verifier_uuid = api._Verifier.create( name, vtype=vtype, version=version, system_wide=system_wide, extra_settings=extra_settings) mock_verifier_manager_get.assert_called_once_with(vtype, namespace=None) mock___verifier_get.assert_called_once_with(name) mock_verifier_create.assert_called_once_with( name=name, source=None, system_wide=system_wide, version=version, vtype=vtype, namespace=None, extra_settings=extra_settings) self.assertEqual(verifier_obj.uuid, verifier_uuid) verifier_obj.update_properties.assert_called_once_with( namespace=namespace, source=source) self.assertEqual([mock.call(consts.VerifierStatus.INSTALLING), mock.call(consts.VerifierStatus.INSTALLED)], verifier_obj.update_status.call_args_list) verifier_obj.manager.install.assert_called_once_with() @mock.patch("rally.api.objects.Verifier.create") @mock.patch("rally.api._Verifier.get") @mock.patch("rally.api.vmanager.VerifierManager.get") def test_create_fails_on_existing_verifier( self, mock_verifier_manager_get, mock___verifier_get, mock_verifier_create): name = "SomeVerifier" vtype = "fake_verifier" namespace = "tests" source = "https://example.com" version = "3.1415" system_wide = True extra_settings = {"verifier_specific_option": "value_for_it"} self.assertRaises(exceptions.RallyException, api._Verifier.create, name=name, vtype=vtype, namespace=namespace, source=source, version=version, system_wide=system_wide, extra_settings=extra_settings) mock_verifier_manager_get.assert_called_once_with(vtype, namespace=namespace) mock___verifier_get.assert_called_once_with(name) self.assertFalse(mock_verifier_create.called) @mock.patch("rally.api.objects.Verifier.create") @mock.patch("rally.api._Verifier.get") @mock.patch("rally.api.vmanager.VerifierManager.get") def test_create_fails_on_install_step( self, mock_verifier_manager_get, mock___verifier_get, mock_verifier_create): mock___verifier_get.side_effect = exceptions.ResourceNotFound(id="1") verifier_obj = mock_verifier_create.return_value verifier_obj.manager.install.side_effect = RuntimeError name = "SomeVerifier" vtype = "fake_verifier" namespace = "tests" source = "https://example.com" version = "3.1415" system_wide = True extra_settings = {"verifier_specific_option": "value_for_it"} self.assertRaises(RuntimeError, api._Verifier.create, name=name, vtype=vtype, namespace=namespace, source=source, version=version, system_wide=system_wide, extra_settings=extra_settings) mock_verifier_manager_get.assert_called_once_with(vtype, namespace=namespace) mock___verifier_get.assert_called_once_with(name) mock_verifier_create.assert_called_once_with( name=name, source=source, system_wide=system_wide, version=version, vtype=vtype, namespace=namespace, extra_settings=extra_settings) self.assertEqual([mock.call(consts.VerifierStatus.INSTALLING), mock.call(consts.VerifierStatus.FAILED)], verifier_obj.update_status.call_args_list) verifier_obj.manager.install.assert_called_once_with() @mock.patch("rally.api.objects.Verifier.delete") @mock.patch("rally.api._Verification.list") @mock.patch("rally.api._Verifier.get") def test_delete_no_verifications(self, mock___verifier_get, mock___verification_list, mock_verifier_delete): mock___verification_list.return_value = [] verifier_obj = mock___verifier_get.return_value verifier_id = "uuuiiiddd" deployment_id = "deployment" # remove just deployment specific data api._Verifier.delete(verifier_id, deployment_id=deployment_id) self.assertFalse(mock_verifier_delete.called) mock___verification_list.assert_called_once_with( verifier_id, deployment_id) verifier_obj.set_deployment.assert_called_once_with(deployment_id) verifier_obj.manager.uninstall.assert_called_once_with() mock___verification_list.reset_mock() verifier_obj.set_deployment.reset_mock() verifier_obj.manager.uninstall.reset_mock() # remove the whole verifier api._Verifier.delete(verifier_id) mock___verification_list.assert_called_once_with(verifier_id, None) self.assertFalse(verifier_obj.set_deployment.called) verifier_obj.manager.uninstall.assert_called_once_with(full=True) mock_verifier_delete.assert_called_once_with(verifier_id) @mock.patch("rally.api.objects.Verifier.delete") @mock.patch("rally.api._Verification.delete") @mock.patch("rally.api._Verification.list") @mock.patch("rally.api._Verifier.get") def test_delete_with_verifications( self, mock___verifier_get, mock___verification_list, mock___verification_delete, mock_verifier_delete): verifications = [mock.Mock(), mock.Mock()] mock___verification_list.return_value = verifications verifier_id = "uuuiiiddd" self.assertRaises(exceptions.RallyException, api._Verifier.delete, verifier_id) mock___verification_list.assert_called_once_with(verifier_id, None) self.assertFalse(mock___verification_delete.called) mock___verification_list.reset_mock() api._Verifier.delete(verifier_id, force=True) mock___verification_list.assert_called_once_with(verifier_id, None) self.assertEqual([mock.call(v.uuid) for v in verifications], mock___verification_delete.call_args_list) @mock.patch("rally.api.utils.BackupHelper") @mock.patch("rally.api._Verifier.get") def test_update_failed(self, mock___verifier_get, mock_backup_helper): verifier_obj = mock___verifier_get.return_value verifier_obj.system_wide = False uuid = "uuuuiiiidddd" e = self.assertRaises(exceptions.RallyException, api._Verifier.update, uuid) self.assertIn("At least one of the following parameters should be", "%s" % e) for status in consts.VerifierStatus: if status != consts.VerifierStatus.INSTALLED: verifier_obj.status = status e = self.assertRaises(exceptions.RallyException, api._Verifier.update, uuid, system_wide=True) self.assertIn("because verifier is in '%s' status" % status, "%s" % e) verifier_obj.status = consts.VerifierStatus.INSTALLED msg = "It is impossible to update the virtual environment for verifier" e = self.assertRaises(exceptions.RallyException, api._Verifier.update, uuid, system_wide=True, update_venv=True) self.assertIn(msg, "%s" % e) verifier_obj.system_wide = True e = self.assertRaises(exceptions.RallyException, api._Verifier.update, uuid, update_venv=True) self.assertIn(msg, "%s" % e) @mock.patch("rally.api.utils.BackupHelper") @mock.patch("rally.api._Verifier.get") def test_update(self, mock___verifier_get, mock_backup_helper): verifier_obj = mock___verifier_get.return_value verifier_obj.system_wide = False verifier_obj.status = consts.VerifierStatus.INSTALLED uuid = "uuuuiiiidddd" version = "3.1415" # check updating just version api._Verifier.update(uuid, version=version) verifier_obj.manager.checkout.assert_called_once_with(version) self.assertFalse(verifier_obj.manager.check_system_wide.called) verifier_obj.update_properties.assert_called_once_with( status=verifier_obj.status, version=version) verifier_obj.update_status.assert_called_once_with( consts.VerifierStatus.UPDATING) self.assertFalse(verifier_obj.manager.install_venv.called) verifier_obj.manager.checkout.reset_mock() verifier_obj.manager.check_system_wide.reset_mock() verifier_obj.update_properties.reset_mock() verifier_obj.update_status.reset_mock() # check system_wide api._Verifier.update(uuid, version=version, system_wide=True) verifier_obj.manager.checkout.assert_called_once_with(version) verifier_obj.manager.check_system_wide.assert_called_once_with() verifier_obj.update_properties.assert_called_once_with( status=verifier_obj.status, version=version, system_wide=True) verifier_obj.update_status.assert_called_once_with( consts.VerifierStatus.UPDATING) self.assertFalse(verifier_obj.manager.install_venv.called) verifier_obj.manager.checkout.reset_mock() verifier_obj.manager.check_system_wide.reset_mock() verifier_obj.update_properties.reset_mock() verifier_obj.update_status.reset_mock() # check switching from system-wide to virtual environment verifier_obj.system_wide = True api._Verifier.update(uuid, system_wide=False) verifier_obj.manager.install_venv.assert_called_once_with() self.assertFalse(verifier_obj.manager.check_system_wide.called) verifier_obj.update_status.assert_called_once_with( consts.VerifierStatus.UPDATING) verifier_obj.update_properties.assert_called_once_with( status=verifier_obj.status, system_wide=False) verifier_obj.manager.check_system_wide.reset_mock() verifier_obj.update_properties.reset_mock() verifier_obj.update_status.reset_mock() verifier_obj.manager.install_venv.reset_mock() # check updating virtual environment verifier_obj.system_wide = False api._Verifier.update(uuid, update_venv=True) verifier_obj.manager.install_venv.assert_called_once_with() self.assertFalse(verifier_obj.manager.check_system_wide.called) verifier_obj.update_status.assert_called_once_with( consts.VerifierStatus.UPDATING) verifier_obj.update_properties.assert_called_once_with( status=verifier_obj.status) verifier_obj.manager.check_system_wide.reset_mock() verifier_obj.update_properties.reset_mock() verifier_obj.update_status.reset_mock() verifier_obj.manager.install_venv.reset_mock() # check switching from virtual environment to system-wide verifier_obj.system_wide = False api._Verifier.update(uuid, system_wide=True) self.assertFalse(verifier_obj.manager.install_venv.called) verifier_obj.manager.check_system_wide.assert_called_once_with() verifier_obj.update_status.assert_called_once_with( consts.VerifierStatus.UPDATING) verifier_obj.update_properties.assert_called_once_with( status=verifier_obj.status, system_wide=True) @mock.patch("rally.api._Verifier.get") def test_configure_with_wrong_state_of_verifier(self, mock___verifier_get): verifier_obj = mock___verifier_get.return_value verifier_id = "uuiiiidd" deployment_id = "deployment" for status in consts.VerifierStatus: if status != consts.VerifierStatus.INSTALLED: verifier_obj.status = status e = self.assertRaises(exceptions.RallyException, api._Verifier.configure, verifier_id, deployment_id) self.assertIn("because verifier is in '%s' status" % status, "%s" % e) @mock.patch("rally.plugins.openstack.verification.tempest.manager." "os.path.exists") @mock.patch("rally.api._Verifier.get") def test_configure_when_it_is_already_configured(self, mock___verifier_get, mock_exists): verifier_obj = mock___verifier_get.return_value verifier_id = "uuiiiidd" deployment_id = "deployment" extra = {"key": "value"} verifier_obj.status = consts.VerifierStatus.INSTALLED # no recreate and no extra options self.assertEqual(verifier_obj.manager.get_configuration.return_value, api._Verifier.configure(verifier_id, deployment_id, reconfigure=False)) self.assertFalse(verifier_obj.manager.extend_configuration.called) self.assertFalse(verifier_obj.manager.configure.called) self.assertFalse(verifier_obj.update_status.called) # no recreate, just extend existing configuration self.assertEqual(verifier_obj.manager.get_configuration.return_value, api._Verifier.configure(verifier_id, deployment_id, reconfigure=False, extra_options=extra)) verifier_obj.manager.extend_configuration.assert_called_once_with( extra) self.assertFalse(verifier_obj.manager.configure.called) verifier_obj.update_status.reset_mock() verifier_obj.manager.extend_configuration.reset_mock() # recreate with extra options self.assertEqual(verifier_obj.manager.configure.return_value, api._Verifier.configure(verifier_id, deployment_id, reconfigure=True, extra_options=extra)) self.assertFalse(verifier_obj.manager.extend_configuration.called) verifier_obj.manager.configure.asset_called_once_with( extra_options=extra) @mock.patch("rally.api._Verifier.get") def test_override_config_with_wrong_state_of_verifier(self, mock___verifier_get): verifier_obj = mock___verifier_get.return_value verifier_id = "uuiiiidd" deployment_id = "deployment" new_content = {} for status in consts.VerifierStatus: if status != consts.VerifierStatus.INSTALLED: verifier_obj.status = status e = self.assertRaises(exceptions.RallyException, api._Verifier.override_configuration, verifier_id, deployment_id, new_content) self.assertIn("because verifier %s is in '%s' status" % (verifier_obj, status), "%s" % e) @mock.patch("rally.plugins.openstack.verification.tempest.manager." "os.path.exists") @mock.patch("rally.api._Verifier.get") def test_override_config_when_it_is_already_configured( self, mock___verifier_get, mock_exists): verifier_obj = mock___verifier_get.return_value verifier_id = "uuiiiidd" deployment_id = "deployment" new_config = {"key": "value"} verifier_obj.status = consts.VerifierStatus.INSTALLED api._Verifier.override_configuration(verifier_id, deployment_id, new_configuration=new_config) verifier_obj.manager.override_configuration.assert_called_once_with( new_config) @mock.patch("rally.api._Verifier.get") def test_list_tests(self, mock___verifier_get): verifier_obj = mock___verifier_get.return_value verifier_id = "uuiiiidd" pattern = "some" verifier_obj.status = consts.VerifierStatus.INIT e = self.assertRaises(exceptions.RallyException, api._Verifier.list_tests, verifier_id, pattern=pattern) self.assertIn("because verifier %s is in '%s' status" % (verifier_obj, verifier_obj.status), "%s" % e) self.assertFalse(verifier_obj.manager.list_tests.called) verifier_obj.status = consts.VerifierStatus.INSTALLED self.assertEqual(verifier_obj.manager.list_tests.return_value, api._Verifier.list_tests(verifier_id, pattern)) verifier_obj.manager.list_tests.assert_called_once_with(pattern) @mock.patch("rally.api._Verifier.get") def test_add_extension(self, mock___verifier_get): verifier_obj = mock___verifier_get.return_value verifier_id = "uuiiiidd" source = "example.com" version = 3.14159 extra_settings = {} for status in consts.VerifierStatus: if status != consts.VerifierStatus.INSTALLED: verifier_obj.status = status e = self.assertRaises(exceptions.RallyException, api._Verifier.add_extension, verifier_id, source, version=version, extra_settings=extra_settings) self.assertIn("because verifier %s is in '%s' status" % (verifier_obj, status), "%s" % e) verifier_obj.status = consts.VerifierStatus.INSTALLED api._Verifier.add_extension(verifier_id, source, version=version, extra_settings=extra_settings) verifier_obj.manager.install_extension.assert_called_once_with( source, version=version, extra_settings=extra_settings) self.assertEqual([mock.call(consts.VerifierStatus.EXTENDING), mock.call(verifier_obj.status)], verifier_obj.update_status.call_args_list) # check status will be updated in case of failure at installation step verifier_obj.update_status.reset_mock() verifier_obj.manager.install_extension.side_effect = RuntimeError self.assertRaises(RuntimeError, api._Verifier.add_extension, verifier_id, source, version=version, extra_settings=extra_settings) self.assertEqual([mock.call(consts.VerifierStatus.EXTENDING), mock.call(verifier_obj.status)], verifier_obj.update_status.call_args_list) @mock.patch("rally.api._Verifier.get") def test_list_extensions(self, mock___verifier_get): verifier_obj = mock___verifier_get.return_value verifier_id = "uuiiiidd" for status in consts.VerifierStatus: if status != consts.VerifierStatus.INSTALLED: verifier_obj.status = status e = self.assertRaises(exceptions.RallyException, api._Verifier.list_extensions, verifier_id) self.assertIn("because verifier %s is in '%s' status" % (verifier_obj, status), "%s" % e) self.assertFalse(verifier_obj.manager.list_extensions.called) verifier_obj.status = consts.VerifierStatus.INSTALLED self.assertEqual(verifier_obj.manager.list_extensions.return_value, api._Verifier.list_extensions(verifier_id)) verifier_obj.manager.list_extensions.assert_called_once_with() @mock.patch("rally.api._Verifier.get") def test_delete_extension(self, mock___verifier_get): verifier_obj = mock___verifier_get.return_value verifier_id = "uuiiiidd" name = "some" for status in consts.VerifierStatus: if status != consts.VerifierStatus.INSTALLED: verifier_obj.status = status e = self.assertRaises(exceptions.RallyException, api._Verifier.delete_extension, verifier_id, name) self.assertIn("because verifier %s is in '%s' status" % (verifier_obj, status), "%s" % e) self.assertFalse(verifier_obj.manager.list_tests.called) verifier_obj.status = consts.VerifierStatus.INSTALLED api._Verifier.delete_extension(verifier_id, name) verifier_obj.manager.uninstall_extension.assert_called_once_with(name) class VerificationAPITestCase(test.TestCase): @mock.patch("rally.api.objects.Verification.get") def test_get(self, mock_verification_get): verification_uuid = "uuiiiidd" self.assertEqual(mock_verification_get.return_value, api._Verification.get(verification_uuid)) mock_verification_get.assert_called_once_with(verification_uuid) @mock.patch("rally.api.objects.Verification.get") def test_delete(self, mock_verification_get): verification_uuid = "uuiiiidd" api._Verification.delete(verification_uuid) mock_verification_get.assert_called_once_with(verification_uuid) mock_verification_get.return_value.delete.assert_called_once_with() @mock.patch("rally.api.objects.Verification.list") def test_list(self, mock_verification_list): verifier_id = "vuuuiiddd" deployment_id = "duuuiidd" tags = ["foo", "bar"] status = "some_status" self.assertEqual(mock_verification_list.return_value, api._Verification.list(verifier_id, deployment_id=deployment_id, tags=tags, status=status)) mock_verification_list.assert_called_once_with( verifier_id, deployment_id=deployment_id, tags=tags, status=status) @mock.patch("rally.api.vreporter.VerificationReporter") @mock.patch("rally.api.objects.Verification.get") def test_report(self, mock_verification_get, mock_verification_reporter): verifications = ["uuid-1", "uuid-2"] output_type = mock.Mock() output_dest = mock.Mock() reporter = mock_verification_reporter.get.return_value self.assertEqual(mock_verification_reporter.make.return_value, api._Verification.report(verifications, output_type=output_type, output_dest=output_dest)) mock_verification_reporter.get.assert_called_once_with(output_type) reporter.validate.assert_called_once_with(output_dest) mock_verification_reporter.make.assert_called_once_with( reporter, [mock_verification_get.return_value, mock_verification_get.return_value], output_dest) self.assertEqual([mock.call(u) for u in verifications], mock_verification_get.call_args_list) @mock.patch("rally.api.objects.Verification.create") @mock.patch("rally.api._Verifier.get") def test_import_results(self, mock___verifier_get, mock_verification_create): verifier_id = "vuuuiiddd" deployment_id = "duuuiidd" data = "contest of file with results" run_args = {"set_name": "compute"} verifier_obj = mock___verifier_get.return_value averification, aresults = api._Verification.import_results( verifier_id, deployment_id=deployment_id, data=data, **run_args) self.assertEqual(mock_verification_create.return_value, averification) self.assertEqual(verifier_obj.manager.parse_results.return_value, aresults) mock___verifier_get.assert_called_once_with(verifier_id) verifier_obj.set_deployment.assert_called_once_with(deployment_id) verifier_obj.manager.validate_args.assert_called_once_with(run_args) mock_verification_create.assert_called_once_with( verifier_id, deployment_id=deployment_id, run_args=run_args) averification.update_status.assert_called_once_with( consts.VerificationStatus.RUNNING) verifier_obj.manager.parse_results.assert_called_once_with(data) averification.finish.assert_called_once_with(aresults.totals, aresults.tests) # check setting failed self.assertFalse(averification.set_failed.called) averification.finish.reset_mock() verifier_obj.manager.parse_results.side_effect = RuntimeError self.assertRaises(RuntimeError, api._Verification.import_results, verifier_id, deployment_id=deployment_id, data=data, **run_args) self.assertFalse(averification.finish.called) self.assertTrue(averification.set_failed.called) @mock.patch("rally.api._Verifier.get") @mock.patch("rally.api.objects.Deployment.get", return_value=fakes.FakeDeployment( uuid="deployment_uuid", status=consts.DeployStatus.DEPLOY_FINISHED)) def test_start_failed_due_to_wrong_status_of_verifier( self, mock_deployment_get, mock___verifier_get): verifier_id = "vuuuiiddd" deployment_id = "duuuiidd" verifier_obj = mock___verifier_get.return_value for status in consts.VerifierStatus: if status != consts.VerifierStatus.INSTALLED: verifier_obj.status = status e = self.assertRaises(exceptions.RallyException, api._Verification.start, verifier_id, deployment_id) self.assertIn( "Failed to start verification because verifier %s is in " "'%s' status" % (verifier_obj, verifier_obj.status), "%s" % e) @mock.patch("rally.api.objects.Verification.create") @mock.patch("rally.api._Verifier.configure") @mock.patch("rally.api._Verifier.get") @mock.patch("rally.api.objects.Deployment.get", return_value=fakes.FakeDeployment( uuid="deployment_uuid", status=consts.DeployStatus.DEPLOY_FINISHED)) def test_start_with_configuring(self, mock_deployment_get, mock___verifier_get, mock_configure, mock_verification_create): verifier_id = "vuuuiiddd" deployment_id = "duuuiidd" verifier_obj = mock___verifier_get.return_value verifier_obj.status = consts.VerifierStatus.INSTALLED verifier_obj.manager.is_configured.return_value = False api._Verification.start(verifier_id, deployment_id) mock_deployment_get.assert_called_once_with(deployment_id) verifier_obj.set_deployment.assert_called_once_with(deployment_id) mock_configure.assert_called_once_with(verifier_obj, deployment_id) @mock.patch("rally.api.objects.Verification.create") @mock.patch("rally.api._Verifier.configure") @mock.patch("rally.api._Verifier.get") @mock.patch("rally.api.objects.Deployment.get", return_value=fakes.FakeDeployment( uuid="deployment_uuid", status=consts.DeployStatus.DEPLOY_FINISHED)) def test_start(self, mock_deployment_get, mock___verifier_get, mock_configure, mock_verification_create): verifier_id = "vuuuiiddd" deployment_id = "duuuiidd" tags = ["foo", "bar"] run_args = {"arg": "value"} verifier_obj = mock___verifier_get.return_value verifier_obj.status = consts.VerifierStatus.INSTALLED verification_obj = mock_verification_create.return_value api._Verification.start(verifier_id, deployment_id, tags=tags, **run_args) mock_deployment_get.assert_called_once_with(deployment_id) verifier_obj.set_deployment.assert_called_once_with(deployment_id) verifier_obj.manager.validate.assert_called_once_with(run_args) mock_verification_create.assert_called_once_with( verifier_id=verifier_id, deployment_id=deployment_id, tags=tags, run_args=run_args) verification_obj.update_status.assert_called_once_with( consts.VerificationStatus.RUNNING) context = {"config": verifier_obj.manager._meta_get.return_value, "run_args": run_args, "verification": verification_obj, "verifier": verifier_obj} verifier_obj.manager.run.assert_called_once_with(context) results = verifier_obj.manager.run.return_value verification_obj.finish.assert_called_once_with(results.totals, results.tests) self.assertFalse(mock_configure.called) @mock.patch("rally.api.objects.Deployment.get", return_value=fakes.FakeDeployment( name="xxx_name", uuid="deployment_uuid", status=consts.DeployStatus.DEPLOY_INIT)) def test_start_on_unfinished_deployment(self, mock_deployment_get): verifier_id = "v_id" deployment_id = mock_deployment_get.return_value["uuid"] tags = ["foo", "bar"] run_args = {"arg": "value"} self.assertRaises(exceptions.DeploymentNotFinishedStatus, api._Verification.start, verifier_id, deployment_id, tags=tags, **run_args) @mock.patch("rally.api.objects.Verification.create") @mock.patch("rally.api._Verifier.configure") @mock.patch("rally.api._Verifier.get") @mock.patch("rally.api.objects.Deployment.get", return_value=fakes.FakeDeployment( uuid="deployment_uuid", status=consts.DeployStatus.DEPLOY_FINISHED)) def test_start_failed_to_run(self, mock_deployment_get, mock___verifier_get, mock_configure, mock_verification_create): verifier_id = "vuuuiiddd" deployment_id = "duuuiidd" tags = ["foo", "bar"] run_args = {"arg": "value"} verifier_obj = mock___verifier_get.return_value verifier_obj.status = consts.VerifierStatus.INSTALLED verification_obj = mock_verification_create.return_value verifier_obj.manager.run.side_effect = RuntimeError self.assertRaises(RuntimeError, api._Verification.start, verifier_id, deployment_id, tags=tags, **run_args) verifier_obj.set_deployment.assert_called_once_with(deployment_id) verifier_obj.manager.validate.assert_called_once_with(run_args) mock_verification_create.assert_called_once_with( verifier_id=verifier_id, deployment_id=deployment_id, tags=tags, run_args=run_args) verification_obj.update_status.assert_called_once_with( consts.VerificationStatus.RUNNING) context = {"config": verifier_obj.manager._meta_get.return_value, "run_args": run_args, "verification": verification_obj, "verifier": verifier_obj} verifier_obj.manager.run.assert_called_once_with(context) self.assertFalse(verification_obj.finish.called) self.assertFalse(mock_configure.called) @mock.patch("rally.api._Verification.start") @mock.patch("rally.api._Deployment.get") @mock.patch("rally.api._Verification.get") def test_rerun(self, mock___verification_get, mock___deployment_get, mock___verification_start): tests = {"test_1": {"status": "success"}, "test_2": {"status": "fail"}} mock___verification_get.return_value = mock.Mock( uuid="uuid", verifier_uuid="v_uuid", deployment_uuid="d_uuid", tests=tests) mock___deployment_get.return_value = {"name": "d_name", "uuid": "d_uuid"} api._Verification.rerun("uuid") mock___verification_start.assert_called_once_with( "v_uuid", "d_uuid", load_list=tests.keys(), tags=None) @mock.patch("rally.api._Verification.start") @mock.patch("rally.api._Deployment.get") @mock.patch("rally.api._Verification.get") def test_rerun_failed_tests( self, mock___verification_get, mock___deployment_get, mock___verification_start): tests = {"test_1": {"status": "success"}, "test_2": {"status": "fail"}, "test_3": {"status": "fail"}} mock___verification_get.return_value = mock.Mock( uuid="uuid", verifier_uuid="v_uuid", deployment_uuid="d_uuid", tests=tests) mock___deployment_get.return_value = {"name": "d_name", "uuid": "d_uuid"} api._Verification.rerun("uuid", failed=True) expected_tests = [t for t, r in tests.items() if r["status"] == "fail"] mock___verification_start.assert_called_once_with( "v_uuid", "d_uuid", load_list=expected_tests, tags=None) @mock.patch("rally.api._Verification.get") def test_rerun_failed_tests_raise_exc( self, mock___verification_get): tests = {"test_1": {"status": "success"}, "test_2": {"status": "success"}, "test_3": {"status": "skip"}} mock___verification_get.return_value = mock.Mock( uuid="uuid", verifier_uuid="v_uuid", deployment_uuid="d_uuid", tests=tests) e = self.assertRaises(exceptions.RallyException, api._Verification.rerun, "uuid", failed=True) self.assertEqual("There are no failed tests from verification " "(UUID=uuid).", "%s" % e) rally-0.9.1/tests/README.rst0000664000567000056710000000454613073417716016660 0ustar jenkinsjenkins00000000000000Testing ======= Please, don't hesitate to write tests ;) Unit tests ---------- *Files: /tests/unit/** The goal of unit tests is to ensure that internal parts of the code work properly. All internal methods should be fully covered by unit tests with a reasonable mocks usage. About Rally unit tests: - All `unit tests `_ are located inside /tests/unit/* - Tests are written on top of: *testtools* and *mock* libs - `Tox `_ is used to run unit tests To run unit tests locally:: $ pip install tox $ tox To run py27, py34, py35 or pep8 only:: $ tox -e # NOTE: is one of py27, py34, py35 or pep8 To run py27/py34/py35 against mysql or psql $ export RALLY_UNITTEST_DB_URL="mysql://user:secret@localhost/rally" $ tox -epy27 To run specific test of py27/py34/py35:: $ tox -e py27 -- tests.unit.test_osclients To get test coverage:: $ tox -e cover # NOTE: Results will be in ./cover/index.html To generate docs:: $ tox -e docs # NOTE: Documentation will be in doc/source/_build/html/index.html Functional tests ---------------- *Files: /tests/functional/** The goal of `functional tests `_ is to check that everything works well together. Fuctional tests use Rally API only and check responses without touching internal parts. To run functional tests locally:: $ source openrc $ rally deployment create --fromenv --name testing $ tox -e cli # NOTE: openrc file with OpenStack admin credentials Output of every Rally execution will be collected under some reports root in directory structure like: reports_root/ClassName/MethodName_suffix.extension This functionality implemented in tests.functional.utils.Rally.__call__ method. Use 'gen_report_path' method of 'Rally' class to get automatically generated file path and name if you need. You can use it to publish html reports, generated during tests. Reports root can be passed through environment variable 'REPORTS_ROOT'. Default is 'rally-cli-output-files'. Rally CI scripts ---------------- *Files: /tests/ci/** This directory contains scripts and files related to the Rally CI system. Rally Style Commandments ------------------------ *File: /tests/hacking/checks.py* This module contains Rally specific hacking rules for checking commandments. rally-0.9.1/tests/hacking/0000775000567000056710000000000013073420067016555 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/hacking/__init__.py0000664000567000056710000000000013073417716020663 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/hacking/checks.py0000664000567000056710000005033613073417720020400 0ustar jenkinsjenkins00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Guidelines for writing new hacking checks - Use only for Rally specific tests. OpenStack general tests should be submitted to the common 'hacking' module. - Pick numbers in the range N3xx. Find the current test with the highest allocated number and then pick the next value. - Keep the test method code in the source file ordered based on the N3xx value. - List the new rule in the top level HACKING.rst file - Add test cases for each new rule to tests/unit/test_hacking.py """ import functools import re import tokenize re_assert_true_instance = re.compile( r"(.)*assertTrue\(isinstance\((\w|\.|\'|\"|\[|\])+, " r"(\w|\.|\'|\"|\[|\])+\)\)") re_assert_equal_type = re.compile( r"(.)*assertEqual\(type\((\w|\.|\'|\"|\[|\])+\), " r"(\w|\.|\'|\"|\[|\])+\)") re_assert_equal_end_with_none = re.compile(r"assertEqual\(.*?,\s+None\)$") re_assert_equal_start_with_none = re.compile(r"assertEqual\(None,") re_assert_not_equal_end_with_none = re.compile( r"assertNotEqual\(.*?,\s+None\)$") re_assert_not_equal_start_with_none = re.compile(r"assertNotEqual\(None,") re_assert_true_false_with_in_or_not_in = re.compile( r"assert(True|False)\(" r"(\w|[][.'\"])+( not)? in (\w|[][.'\",])+(, .*)?\)") re_assert_true_false_with_in_or_not_in_spaces = re.compile( r"assert(True|False)\((\w|[][.'\"])+( not)? in [\[|'|\"](\w|[][.'\", ])+" r"[\[|'|\"](, .*)?\)") re_assert_equal_in_end_with_true_or_false = re.compile( r"assertEqual\((\w|[][.'\"])+( not)? in (\w|[][.'\", ])+, (True|False)\)") re_assert_equal_in_start_with_true_or_false = re.compile( r"assertEqual\((True|False), (\w|[][.'\"])+( not)? in (\w|[][.'\", ])+\)") re_no_construct_dict = re.compile( r"\sdict\(\)") re_no_construct_list = re.compile( r"\slist\(\)") re_str_format = re.compile(r""" % # start of specifier \(([^)]+)\) # mapping key, in group 1 [#0 +\-]? # optional conversion flag (?:-?\d*)? # optional minimum field width (?:\.\d*)? # optional precision [hLl]? # optional length modifier [A-z%] # conversion modifier """, re.X) re_raises = re.compile( r"\s:raise[^s] *.*$|\s:raises *:.*$|\s:raises *[^:]+$") re_db_import = re.compile(r"^from rally.common import db") re_objects_import = re.compile(r"^from rally.common import objects") re_old_type_class = re.compile(r"^\s*class \w+(\(\))?:") re_datetime_alias = re.compile(r"^(from|import) datetime(?!\s+as\s+dt$)") re_log_warn = re.compile(r"(.)*LOG\.(warn)\(\s*('|\"|_)") def skip_ignored_lines(func): @functools.wraps(func) def wrapper(logical_line, physical_line, filename): line = physical_line.strip() if not line or line.startswith("#") or line.endswith("# noqa"): return yield next(func(logical_line, physical_line, filename)) return wrapper def _parse_assert_mock_str(line): point = line.find(".assert_") if point == -1: point = line.find(".called_once_with(") if point != -1: end_pos = line[point:].find("(") + point return point, line[point + 1: end_pos], line[: point] else: return None, None, None @skip_ignored_lines def check_assert_methods_from_mock(logical_line, physical_line, filename): """Ensure that ``assert_*`` methods from ``mock`` library is used correctly N301 - base error number N302 - related to nonexistent "assert_called" N303 - related to nonexistent "assert_called_once" N304 - related to nonexistent "called_once_with" """ correct_names = ["assert_any_call", "assert_called_once_with", "assert_called_with", "assert_has_calls", "assert_not_called"] ignored_files = ["./tests/unit/test_hacking.py"] if filename.startswith("./tests") and filename not in ignored_files: pos, method_name, obj_name = _parse_assert_mock_str(logical_line) if pos: if method_name not in correct_names: error_number = "N301" msg = ("%(error_number)s:'%(method)s' is not present in `mock`" " library. %(custom_msg)s For more details, visit " "http://www.voidspace.org.uk/python/mock/ .") if method_name == "assert_called": error_number = "N302" custom_msg = ("Maybe, you should try to use " "'assertTrue(%s.called)' instead." % obj_name) elif method_name == "assert_called_once": # For more details, see a bug in Rally: # https://bugs.launchpad.net/rally/+bug/1305991 error_number = "N303" custom_msg = ("Maybe, you should try to use " "'assertEqual(1, %s.call_count)' " "or '%s.assert_called_once_with()'" " instead." % (obj_name, obj_name)) elif method_name == "called_once_with": error_number = "N304" custom_msg = ("Maybe, you should try to use " "'%s.assert_called_once_with()'" " instead." % obj_name) else: custom_msg = ("Correct 'assert_*' methods: '%s'." % "', '".join(correct_names)) yield (pos, msg % { "error_number": error_number, "method": method_name, "custom_msg": custom_msg}) @skip_ignored_lines def check_import_of_logging(logical_line, physical_line, filename): """Check correctness import of logging module N310 """ excluded_files = ["./rally/common/logging.py", "./tests/unit/test_logging.py", "./tests/ci/rally_verify.py", "./tests/ci/sync_requirements.py"] forbidden_imports = ["from oslo_log", "import oslo_log", "import logging"] if filename not in excluded_files: for forbidden_import in forbidden_imports: if logical_line.startswith(forbidden_import): yield (0, "N310 Wrong module for logging is imported. Please " "use `rally.common.logging` instead.") @skip_ignored_lines def no_translate_debug_logs(logical_line, physical_line, filename): """Check for "LOG.debug(_(" As per our translation policy, https://wiki.openstack.org/wiki/LoggingStandards#Log_Translation we shouldn't translate debug level logs. * This check assumes that 'LOG' is a logger. * Use filename so we can start enforcing this in specific folders instead of needing to do so all at once. N311 """ if logical_line.startswith("LOG.debug(_("): yield(0, "N311 Don't translate debug level logs") @skip_ignored_lines def no_use_conf_debug_check(logical_line, physical_line, filename): """Check for "cfg.CONF.debug" Rally has two DEBUG level: - Full DEBUG, which include all debug-messages from all OpenStack services - Rally DEBUG, which include only Rally debug-messages so we should use custom check to know debug-mode, instead of CONF.debug N312 """ excluded_files = ["./rally/common/logging.py"] point = logical_line.find("CONF.debug") if point != -1 and filename not in excluded_files: yield(point, "N312 Don't use `CONF.debug`. " "Function `rally.common.logging.is_debug` " "should be used instead.") @skip_ignored_lines def assert_true_instance(logical_line, physical_line, filename): """Check for assertTrue(isinstance(a, b)) sentences N320 """ if re_assert_true_instance.match(logical_line): yield (0, "N320 assertTrue(isinstance(a, b)) sentences not allowed, " "you should use assertIsInstance(a, b) instead.") @skip_ignored_lines def assert_equal_type(logical_line, physical_line, filename): """Check for assertEqual(type(A), B) sentences N321 """ if re_assert_equal_type.match(logical_line): yield (0, "N321 assertEqual(type(A), B) sentences not allowed, " "you should use assertIsInstance(a, b) instead.") @skip_ignored_lines def assert_equal_none(logical_line, physical_line, filename): """Check for assertEqual(A, None) or assertEqual(None, A) sentences N322 """ res = (re_assert_equal_start_with_none.search(logical_line) or re_assert_equal_end_with_none.search(logical_line)) if res: yield (0, "N322 assertEqual(A, None) or assertEqual(None, A) " "sentences not allowed, you should use assertIsNone(A) " "instead.") @skip_ignored_lines def assert_true_or_false_with_in(logical_line, physical_line, filename): """Check assertTrue/False(A in/not in B) with collection contents Check for assertTrue/False(A in B), assertTrue/False(A not in B), assertTrue/False(A in B, message) or assertTrue/False(A not in B, message) sentences. N323 """ res = (re_assert_true_false_with_in_or_not_in.search(logical_line) or re_assert_true_false_with_in_or_not_in_spaces.search(logical_line)) if res: yield (0, "N323 assertTrue/assertFalse(A in/not in B)sentences not " "allowed, you should use assertIn(A, B) or assertNotIn(A, B)" " instead.") @skip_ignored_lines def assert_equal_in(logical_line, physical_line, filename): """Check assertEqual(A in/not in B, True/False) with collection contents Check for assertEqual(A in B, True/False), assertEqual(True/False, A in B), assertEqual(A not in B, True/False) or assertEqual(True/False, A not in B) sentences. N324 """ res = (re_assert_equal_in_end_with_true_or_false.search(logical_line) or re_assert_equal_in_start_with_true_or_false.search(logical_line)) if res: yield (0, "N324: Use assertIn/NotIn(A, B) rather than " "assertEqual(A in/not in B, True/False) when checking " "collection contents.") @skip_ignored_lines def assert_not_equal_none(logical_line, physical_line, filename): """Check for assertNotEqual(A, None) or assertEqual(None, A) sentences N325 """ res = (re_assert_not_equal_start_with_none.search(logical_line) or re_assert_not_equal_end_with_none.search(logical_line)) if res: yield (0, "N325 assertNotEqual(A, None) or assertNotEqual(None, A) " "sentences not allowed, you should use assertIsNotNone(A) " "instead.") @skip_ignored_lines def check_no_direct_rally_objects_import(logical_line, physical_line, filename): """Check if rally.common.objects are properly imported. If you import "from rally.common import objects" you are able to use objects directly like objects.Task. N340 """ if filename == "./rally/common/objects/__init__.py": return if filename == "./rally/common/objects/endpoint.py": return if (logical_line.startswith("from rally.common.objects") or logical_line.startswith("import rally.common.objects.")): yield (0, "N340: Import objects module:" "`from rally.common import objects`. " "After that you can use directly objects e.g. objects.Task") @skip_ignored_lines def check_no_oslo_deprecated_import(logical_line, physical_line, filename): """Check if oslo.foo packages are not imported instead of oslo_foo ones. Libraries from oslo.foo namespace are deprecated because of namespace problems. N341 """ if (logical_line.startswith("from oslo.") or logical_line.startswith("import oslo.")): yield (0, "N341: Import oslo module: `from oslo_xyz import ...`. " "The oslo.xyz namespace was deprecated, use oslo_xyz " "instead") @skip_ignored_lines def check_quotes(logical_line, physical_line, filename): """Check that single quotation marks are not used N350 """ in_string = False in_multiline_string = False single_quotas_are_used = False check_tripple = ( lambda line, i, char: ( i + 2 < len(line) and (char == line[i] == line[i + 1] == line[i + 2]) ) ) i = 0 while i < len(logical_line): char = logical_line[i] if in_string: if char == "\"": in_string = False if char == "\\": i += 1 # ignore next char elif in_multiline_string: if check_tripple(logical_line, i, "\""): i += 2 # skip next 2 chars in_multiline_string = False elif char == "#": break elif char == "'": single_quotas_are_used = True break elif char == "\"": if check_tripple(logical_line, i, "\""): in_multiline_string = True i += 3 continue in_string = True i += 1 if single_quotas_are_used: yield (i, "N350 Remove Single quotes") @skip_ignored_lines def check_no_constructor_data_struct(logical_line, physical_line, filename): """Check that data structs (lists, dicts) are declared using literals N351 """ match = re_no_construct_dict.search(logical_line) if match: yield (0, "N351 Remove dict() construct and use literal {}") match = re_no_construct_list.search(logical_line) if match: yield (0, "N351 Remove list() construct and use literal []") def check_dict_formatting_in_string(logical_line, tokens): """Check that strings do not use dict-formatting with a single replacement N352 """ # NOTE(stpierre): Can't use @skip_ignored_lines here because it's # a stupid decorator that only works on functions that take # (logical_line, filename) as arguments. if (not logical_line or logical_line.startswith("#") or logical_line.endswith("# noqa")): return current_string = "" in_string = False for token_type, text, start, end, line in tokens: if token_type == tokenize.STRING: if not in_string: current_string = "" in_string = True current_string += text.strip("\"") elif token_type == tokenize.OP: if not current_string: continue # NOTE(stpierre): The string formatting operator % has # lower precedence than +, so we assume that the logical # string has concluded whenever we hit an operator of any # sort. (Most operators don't work for strings anyway.) # Some string operators do have higher precedence than %, # though, so you can technically trick this check by doing # things like: # # "%(foo)s" * 1 % {"foo": 1} # "%(foo)s"[:] % {"foo": 1} # # It also will produce false positives if you use explicit # parenthesized addition for two strings instead of # concatenation by juxtaposition, e.g.: # # ("%(foo)s" + "%(bar)s") % vals # # But if you do any of those things, then you deserve all # of the horrible things that happen to you, and probably # many more. in_string = False if text == "%": format_keys = set() for match in re_str_format.finditer(current_string): format_keys.add(match.group(1)) if len(format_keys) == 1: yield (0, "N353 Do not use mapping key string formatting " "with a single key") if text != ")": # NOTE(stpierre): You can have a parenthesized string # followed by %, so a closing paren doesn't obviate # the possibility for a substitution operator like # every other operator does. current_string = "" elif token_type in (tokenize.NL, tokenize.COMMENT): continue else: in_string = False if token_type == tokenize.NEWLINE: current_string = "" @skip_ignored_lines def check_using_unicode(logical_line, physical_line, filename): """Check crosspython unicode usage N353 """ if re.search(r"\bunicode\(", logical_line): yield (0, "N353 'unicode' function is absent in python3. Please " "use 'six.text_type' instead.") def check_raises(physical_line, filename): """Check raises usage N354 """ ignored_files = ["./tests/unit/test_hacking.py", "./tests/hacking/checks.py"] if filename not in ignored_files: if re_raises.search(physical_line): return (0, "N354 ':Please use ':raises Exception: conditions' " "in docstrings.") @skip_ignored_lines def check_old_type_class(logical_line, physical_line, filename): """Use new-style Python classes N355 """ if re_old_type_class.search(logical_line): yield (0, "N355 This class does not inherit from anything and thus " "will be an old-style class by default. Try to inherit from " "``object`` or another new-style class.") @skip_ignored_lines def check_datetime_alias(logical_line, physical_line, filename): """Ensure using ``dt`` as alias for ``datetime`` N356 """ if re_datetime_alias.search(logical_line): yield (0, "N356 Please use ``dt`` as alias for ``datetime``.") @skip_ignored_lines def check_no_six_iteritems(logical_line, physical_line, filename): """Check no six.iteritems N357 """ if re.search(r"\six.iteritems\(\)", logical_line): yield (0, "N357 Use dict.items() instead of six.iteritems()") @skip_ignored_lines def check_db_imports_in_cli(logical_line, physical_line, filename): """Ensure that CLI modules do not use ``rally.common.db`` N360 """ if (not filename.startswith("./rally/cli") or filename == "./rally/cli/manage.py"): return if re_db_import.search(logical_line): yield (0, "N360 CLI modules do not allow to work with " "`rally.common.db``.") @skip_ignored_lines def check_objects_imports_in_cli(logical_line, physical_line, filename): """Ensure that CLI modules do not use ``rally.common.objects`` N361 """ if not filename.startswith("./rally/cli"): return if re_objects_import.search(logical_line): yield (0, "N361 CLI modules do not allow to work with " "`rally.common.objects``.") @skip_ignored_lines def check_log_warn(logical_line, physical_line, filename): if re_log_warn.search(logical_line): yield(0, "N313 LOG.warn is deprecated, please use LOG.warning") def factory(register): register(check_assert_methods_from_mock) register(check_import_of_logging) register(no_translate_debug_logs) register(no_use_conf_debug_check) register(assert_true_instance) register(assert_equal_type) register(assert_equal_none) register(assert_true_or_false_with_in) register(assert_equal_in) register(check_no_direct_rally_objects_import) register(check_no_oslo_deprecated_import) register(check_quotes) register(check_no_constructor_data_struct) register(check_dict_formatting_in_string) register(check_using_unicode) register(check_raises) register(check_datetime_alias) register(check_db_imports_in_cli) register(check_objects_imports_in_cli) register(check_old_type_class) register(check_no_six_iteritems) register(check_log_warn) rally-0.9.1/tests/hacking/README.rst0000664000567000056710000000442513073417720020253 0ustar jenkinsjenkins00000000000000Rally Style Commandments ======================== - Step 1: Read the OpenStack Style Commandments http://docs.openstack.org/developer/hacking/ - Step 2: Read on Rally Specific Commandments --------------------------- * [N30x] - Reserved for rules related to ``mock`` library * [N301] - Ensure that ``assert_*`` methods from ``mock`` library is used correctly * [N302] - Ensure that nonexistent "assert_called" is not used * [N303] - Ensure that nonexistent "assert_called_once" is not used * [N310-N314] - Reserved for rules related to logging * [N310] - Ensure that ``rally.common.log`` is used as logging module * [N311] - Validate that debug level logs are not translated * [N312] - Validate correctness of debug on check. * [N313] - Validate that LOG.warning is used instead of deprecated LOG.warn. * [N32x] - Reserved for rules related to assert* methods * [N320] - Ensure that ``assertTrue(isinstance(A, B))`` is not used * [N321] - Ensure that ``assertEqual(type(A), B)`` is not used * [N322] - Ensure that ``assertEqual(A, None)`` and ``assertEqual(None, A)`` are not used * [N323] - Ensure that ``assertTrue/assertFalse(A in/not in B)`` are not used with collection contents * [N324] - Ensure that ``assertEqual(A in/not in B, True/False)`` and ``assertEqual(True/False, A in/not in B)`` are not used with collection contents * [N325] - Ensure that ``assertNotEqual(A, None)`` and ``assertNotEqual(None, A)`` are not used * [N340] - Ensure that we are importing always ``from rally import objects`` * [N341] - Ensure that we are importing oslo_xyz packages instead of deprecated oslo.xyz ones * [N350] - Ensure that single quotes are not used * [N351] - Ensure that data structs (i.e Lists and Dicts) are declared literally rather than using constructors * [N352] - Ensure that string formatting only uses a mapping if multiple mapping keys are used. * [N353] - Ensure that unicode() function is not uset because of absence in py3 * [N354] - Ensure that ``:raises: Exception`` is not used * [N355] - Ensure that we use only "new-style" Python classes * [N356] - Ensure using ``dt`` as alias for ``datetime`` * [N360-N370] - Reserved for rules related to CLI * [N360] - Ensure that CLI modules do not use ``rally.common.db`` * [N361] - Ensure that CLI modules do not use ``rally.common.objects`` rally-0.9.1/tests/functional/0000775000567000056710000000000013073420067017313 5ustar jenkinsjenkins00000000000000rally-0.9.1/tests/functional/__init__.py0000664000567000056710000000000013073417716021421 0ustar jenkinsjenkins00000000000000rally-0.9.1/tests/functional/test_cli_verify.py0000664000567000056710000000217313073417716023071 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. ############################################################################## # # THIS MODULE IS DEPRECATED. # DON'T ADD TESTS FOR "rally verify" HERE. # # This module is no longer used for testing "rally verify" command. # Functional testing for this command is moved to separate job. # https://review.openstack.org/#/c/137232 # # Please look at tests/ci/rally-verify.sh for more details. # ############################################################################## pass rally-0.9.1/tests/functional/test_cli_plugin.py0000664000567000056710000000501513073417716023061 0ustar jenkinsjenkins00000000000000# Copyright 2015: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import unittest from tests.functional import utils class PluginTestCase(unittest.TestCase): def setUp(self): super(PluginTestCase, self).setUp() self.rally = utils.Rally() def test_show_one(self): result = self.rally("plugin show Dummy.dummy") self.assertIn("NAME", result) self.assertIn("NAMESPACE", result) self.assertIn("Dummy.dummy", result) self.assertIn("MODULE", result) def test_show_multiple(self): result = self.rally("plugin show Dummy") self.assertIn("Multiple plugins found:", result) self.assertIn("Dummy.dummy", result) self.assertIn("Dummy.dummy_exception", result) self.assertIn("Dummy.dummy_random_fail_in_atomic", result) def test_show_not_found(self): name = "Dummy666666" result = self.rally("plugin show %s" % name) self.assertIn("There is no plugin: %s" % name, result) def test_show_not_found_in_specific_namespace(self): name = "Dummy" namespace = "non_existing" result = self.rally( "plugin show --name %(name)s --namespace %(namespace)s" % {"name": name, "namespace": namespace}) self.assertIn( "There is no plugin: %(name)s in %(namespace)s namespace" % {"name": name, "namespace": namespace}, result) def test_list(self): result = self.rally("plugin list Dummy") self.assertIn("Dummy.dummy", result) self.assertIn("Dummy.dummy_exception", result) self.assertIn("Dummy.dummy_random_fail_in_atomic", result) def test_list_not_found_namespace(self): result = self.rally("plugin list --namespace some") self.assertIn("There is no plugin namespace: some", result) def test_list_not_found_name(self): result = self.rally("plugin list Dummy2222") self.assertIn("There is no plugin: Dummy2222", result) rally-0.9.1/tests/functional/test_certification_task.py0000664000567000056710000000263613073417716024607 0ustar jenkinsjenkins00000000000000# Copyright 2014: Mirantis Inc. # Copyright 2014: Catalyst IT Ltd. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import traceback import unittest from tests.functional import utils class TestCertificationTask(unittest.TestCase): def test_task_samples_is_valid(self): rally = utils.Rally() full_path = os.path.join( os.path.dirname(__file__), os.pardir, os.pardir, "certification", "openstack") task_path = os.path.join(full_path, "task.yaml") args_path = os.path.join(full_path, "task_arguments.yaml") try: rally("task validate --task %s --task-args-file %s" % (task_path, args_path)) except Exception: print(traceback.format_exc()) self.assertTrue(False, "Wrong task config %s" % full_path) rally-0.9.1/tests/functional/test_cli_deployment.py0000664000567000056710000001117713073417720023744 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json import re import unittest from tests.functional import utils class DeploymentTestCase(unittest.TestCase): def setUp(self): super(DeploymentTestCase, self).setUp() self.rally = utils.Rally() def test_create_fromenv_list_show(self): self.rally.env.update(utils.TEST_ENV) self.rally("deployment create --name t_create_env --fromenv") self.assertIn("t_create_env", self.rally("deployment list")) self.assertIn(utils.TEST_ENV["OS_AUTH_URL"], self.rally("deployment show")) def test_create_fromfile(self): self.rally.env.update(utils.TEST_ENV) self.rally("deployment create --name t_create_env --fromenv") with open("/tmp/.tmp.deployment", "w") as f: f.write(self.rally("deployment config")) self.rally("deployment create --name t_create_file " "--filename /tmp/.tmp.deployment") self.assertIn("t_create_file", self.rally("deployment list")) def test_config(self): self.rally.env.update(utils.TEST_ENV) self.rally("deployment create --name t_create_env --fromenv") config = json.loads(self.rally("deployment config")) self.assertEqual(utils.TEST_ENV["OS_USERNAME"], config["admin"]["username"]) self.assertEqual(utils.TEST_ENV["OS_PASSWORD"], config["admin"]["password"]) if "project_name" in config["admin"]: # keystone v3 self.assertEqual(utils.TEST_ENV["OS_TENANT_NAME"], config["admin"]["project_name"]) else: # keystone v2 self.assertEqual(utils.TEST_ENV["OS_TENANT_NAME"], config["admin"]["tenant_name"]) self.assertEqual(utils.TEST_ENV["OS_AUTH_URL"], config["auth_url"]) def test_destroy(self): self.rally.env.update(utils.TEST_ENV) self.rally("deployment create --name t_create_env --fromenv") self.assertIn("t_create_env", self.rally("deployment list")) self.rally("deployment destroy") self.assertNotIn("t_create_env", self.rally("deployment list")) def test_check_success(self): self.assertTrue(self.rally("deployment check")) def test_check_fail(self): self.rally.env.update(utils.TEST_ENV) self.rally("deployment create --name t_create_env --fromenv") self.assertRaises(utils.RallyCliError, self.rally, ("deployment check")) def test_recreate(self): self.rally.env.update(utils.TEST_ENV) self.rally("deployment create --name t_create_env --fromenv") self.rally("deployment recreate --deployment t_create_env") self.assertIn("t_create_env", self.rally("deployment list")) def test_recreate_from_file(self): self.rally.env.update(utils.TEST_ENV) self.rally("deployment create --name t_create_env --fromenv") config = json.loads(self.rally("deployment config")) config["auth_url"] = "http://foo/" file = utils.JsonTempFile(config) self.rally("deployment recreate --deployment t_create_env " "--filename %s" % file.filename) self.assertIn("t_create_env", self.rally("deployment list")) self.assertEqual(config, json.loads(self.rally("deployment config"))) self.assertIn("http://foo/", self.rally("deployment show")) def test_use(self): self.rally.env.update(utils.TEST_ENV) output = self.rally( "deployment create --name t_create_env1 --fromenv") uuid = re.search(r"Using deployment: (?P[0-9a-f\-]{36})", output).group("uuid") self.rally("deployment create --name t_create_env2 --fromenv") self.rally("deployment use --deployment %s" % uuid) current_deployment = utils.get_global("RALLY_DEPLOYMENT", self.rally.env) self.assertEqual(uuid, current_deployment) rally-0.9.1/tests/functional/utils.py0000664000567000056710000001646013073417716021043 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import errno import inspect import json import os import shutil import subprocess import tempfile from oslo_utils import encodeutils from six.moves import configparser TEST_ENV = { "OS_USERNAME": "admin", "OS_PASSWORD": "admin", "OS_TENANT_NAME": "admin", "OS_AUTH_URL": "http://fake/", } DEPLOYMENT_FILE = "/tmp/rally_functests_main_deployment.json" class RallyCliError(Exception): def __init__(self, cmd, code, output): self.command = cmd self.code = code self.output = encodeutils.safe_decode(output) self.msg = "Command: %s Code: %d Output: %s\n" % (self.command, self.code, self.output) def __str__(self): return self.msg def __unicode__(self): return self.msg class JsonTempFile(object): def __init__(self, config): config_file = tempfile.NamedTemporaryFile(delete=False) config_file.write(encodeutils.safe_encode(json.dumps(config))) config_file.close() self.filename = config_file.name def __del__(self): os.unlink(self.filename) class TaskConfig(JsonTempFile): pass class Rally(object): """Create and represent separate rally installation. Usage: rally = Rally() rally("deployment", "create", "--name", "Some Deployment Name") output = rally("deployment list") """ def __init__(self, fake=False, force_new_db=False): if not os.path.exists(DEPLOYMENT_FILE): subprocess.call(["rally", "deployment", "config"], stdout=open(DEPLOYMENT_FILE, "w")) # NOTE(sskripnick): we should change home dir to avoid races # and do not touch any user files in ~/.rally self.tmp_dir = tempfile.mkdtemp() self.env = copy.deepcopy(os.environ) self.env["HOME"] = self.tmp_dir self.config_filename = None if force_new_db or ("RCI_KEEP_DB" not in os.environ): config_filename = os.path.join(self.tmp_dir, "conf") config = configparser.RawConfigParser() config.add_section("database") config.set("database", "connection", "sqlite:///%s/db" % self.tmp_dir) with open(config_filename, "w") as conf: config.write(conf) self.args = ["rally", "--config-file", config_filename] subprocess.call(["rally-manage", "--config-file", config_filename, "db", "recreate"], env=self.env) self.config_filename = config_filename else: self.args = ["rally"] subprocess.call(["rally-manage", "db", "recreate"], env=self.env) self.reports_root = os.environ.get("REPORTS_ROOT", "rally-cli-output-files") self._created_files = [] self("deployment create --file %s --name MAIN" % DEPLOYMENT_FILE, write_report=False) def __del__(self): shutil.rmtree(self.tmp_dir) def _safe_make_dirs(self, dirs): try: os.makedirs(dirs) except OSError as exc: if exc.errno == errno.EEXIST and os.path.isdir(dirs): pass else: raise def gen_report_path(self, suffix=None, extension=None, keep_old=False): """Report file path/name modifier :param suffix: suffix that will be appended to filename. It will be appended before extension :param extension: file extension. :param keep_old: if True, previous reports will not be deleted, but rename to 'nameSuffix.old*.extension' :return: complete report name to write report """ caller_frame = inspect.currentframe().f_back if caller_frame.f_code.co_name == "__call__": caller_frame = caller_frame.f_back method_name = caller_frame.f_code.co_name test_object = caller_frame.f_locals["self"] class_name = test_object.__class__.__name__ self._safe_make_dirs("%s/%s" % (self.reports_root, class_name)) suff = suffix or "" ext = extension or "txt" path = "%s/%s/%s%s.%s" % (self.reports_root, class_name, method_name, suff, ext) if path not in self._created_files: if os.path.exists(path): if not keep_old: os.remove(path) else: path_list = path.split(".") old_suff = "old" path_list.insert(-1, old_suff) new_path = ".".join(path_list) count = 0 while os.path.exists(new_path): count += 1 path_list[-2] = "old%d" % count new_path = ".".join(path_list) os.rename(path, new_path) self._created_files.append(path) return path def __call__(self, cmd, getjson=False, report_path=None, raw=False, suffix=None, extension=None, keep_old=False, write_report=True): """Call rally in the shell :param cmd: rally command :param getjson: in cases, when rally prints JSON, you can catch output deserialized :param report_path: if present, rally command and its output will be written to file with passed file name :param raw: don't write command itself to report file. Only output will be written """ if not isinstance(cmd, list): cmd = cmd.split(" ") try: output = encodeutils.safe_decode(subprocess.check_output( self.args + cmd, stderr=subprocess.STDOUT, env=self.env)) if write_report: if not report_path: report_path = self.gen_report_path( suffix=suffix, extension=extension, keep_old=keep_old) with open(report_path, "a") as rep: if not raw: rep.write("\n%s:\n" % " ".join(self.args + cmd)) rep.write("%s\n" % output) if getjson: return json.loads(output) return output except subprocess.CalledProcessError as e: raise RallyCliError(cmd, e.returncode, e.output) def get_global(global_key, env): home_dir = env.get("HOME") with open("%s/.rally/globals" % home_dir) as f: for line in f.readlines(): if line.startswith("%s=" % global_key): key, value = line.split("=") return value.rstrip() return "" rally-0.9.1/tests/functional/test_cli_task.py0000664000567000056710000013462713073417720022534 0ustar jenkinsjenkins00000000000000# Copyright 2013: Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import json import os import re import threading import time import unittest import mock from tests.functional import utils FAKE_TASK_UUID = "87ab639d-4968-4638-b9a1-07774c32484a" class TaskTestCase(unittest.TestCase): def _get_sample_task_config(self): return { "Dummy.dummy_random_fail_in_atomic": [ { "runner": { "type": "constant", "times": 100, "concurrency": 5 } } ] } def _get_sample_task_config_v2(self): return { "version": 2, "title": "Dummy task", "tags": ["dummy", "functional_test"], "subtasks": [ { "title": "first-subtask", "group": "Dummy group", "description": "The first subtask in dummy task", "tags": ["dummy", "functional_test"], "run_in_parallel": False, "workloads": [{ "name": "Dummy.dummy", "args": { "sleep": 0 }, "runner": { "type": "constant", "times": 10, "concurrency": 2 }, "context": { "users": { "tenants": 3, "users_per_tenant": 2 } } }] }, { "title": "second-subtask", "group": "Dummy group", "description": "The second subtask in dummy task", "tags": ["dummy", "functional_test"], "run_in_parallel": False, "workloads": [{ "name": "Dummy.dummy", "args": { "sleep": 1 }, "runner": { "type": "constant", "times": 10, "concurrency": 2 }, "context": { "users": { "tenants": 3, "users_per_tenant": 2 } } }] } ] } def _get_deployment_uuid(self, output): return re.search( r"Using deployment: (?P[0-9a-f\-]{36})", output).group("uuid") def test_status(self): rally = utils.Rally() cfg = self._get_sample_task_config() config = utils.TaskConfig(cfg) rally("task start --task %s" % config.filename) self.assertIn("finished", rally("task status")) def test_detailed(self): rally = utils.Rally() cfg = self._get_sample_task_config() config = utils.TaskConfig(cfg) rally("task start --task %s" % config.filename) detailed = rally("task detailed") self.assertIn("Dummy.dummy_random_fail_in_atomic", detailed) self.assertIn("dummy_fail_test (2)", detailed) detailed_iterations_data = rally("task detailed --iterations-data") self.assertIn(". dummy_fail_test (2)", detailed_iterations_data) self.assertNotIn("n/a", detailed_iterations_data) def test_detailed_with_errors(self): rally = utils.Rally() cfg = { "Dummy.dummy_exception": [ { "runner": { "type": "constant", "times": 1, "concurrency": 1 } } ] } config = utils.TaskConfig(cfg) output = rally("task start --task %s" % config.filename) uuid = re.search( r"(?P[0-9a-f\-]{36}): started", output).group("uuid") output = rally("task detailed") self.assertIn("Task %s has 1 error(s)" % uuid, output) def test_detailed_no_atomic_actions(self): rally = utils.Rally() cfg = { "Dummy.dummy": [ { "runner": { "type": "constant", "times": 100, "concurrency": 5 } } ] } config = utils.TaskConfig(cfg) rally("task start --task %s" % config.filename) detailed = rally("task detailed") self.assertIn("Dummy.dummy", detailed) detailed_iterations_data = rally("task detailed --iterations-data") self.assertNotIn("n/a", detailed_iterations_data) def test_start_with_empty_config(self): rally = utils.Rally() config = utils.TaskConfig(None) with self.assertRaises(utils.RallyCliError) as err: rally("task start --task %s" % config.filename) self.assertIn("Input task is empty", err.exception.output) def test_results(self): rally = utils.Rally() cfg = self._get_sample_task_config() config = utils.TaskConfig(cfg) rally("task start --task %s" % config.filename) self.assertIn("result", rally("task results")) def test_results_with_wrong_task_id(self): rally = utils.Rally() self.assertRaises(utils.RallyCliError, rally, "task results --uuid %s" % FAKE_TASK_UUID) def test_abort_with_wrong_task_id(self): rally = utils.Rally() self.assertRaises(utils.RallyCliError, rally, "task abort --uuid %s" % FAKE_TASK_UUID) def test_delete_with_wrong_task_id(self): rally = utils.Rally() self.assertRaises(utils.RallyCliError, rally, "task delete --uuid %s" % FAKE_TASK_UUID) def test_detailed_with_wrong_task_id(self): rally = utils.Rally() self.assertRaises(utils.RallyCliError, rally, "task detailed --uuid %s" % FAKE_TASK_UUID) def test_report_with_wrong_task_id(self): rally = utils.Rally() self.assertRaises(utils.RallyCliError, rally, "task report --tasks %s" % FAKE_TASK_UUID) def test_sla_check_with_wrong_task_id(self): rally = utils.Rally() self.assertRaises(utils.RallyCliError, rally, "task sla-check --uuid %s" % FAKE_TASK_UUID) def test_status_with_wrong_task_id(self): rally = utils.Rally() self.assertRaises(utils.RallyCliError, rally, "task status --uuid %s" % FAKE_TASK_UUID) def _assert_html_report_libs_are_embedded(self, file_path, expected=True): embedded_signatures = ["Copyright (c) 2011-2014 Novus Partners, Inc.", "AngularJS v1.3.3", "Copyright (c) 2010-2015, Michael Bostock"] external_signatures = ["