masakari-9.0.0/0000775000175000017500000000000013656750011013336 5ustar zuulzuul00000000000000masakari-9.0.0/doc/0000775000175000017500000000000013656750011014103 5ustar zuulzuul00000000000000masakari-9.0.0/doc/requirements.txt0000664000175000017500000000075213656747723017413 0ustar zuulzuul00000000000000# The order of packages is significant, because pip processes them in the order # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. sphinx!=1.6.6,!=1.6.7,>=1.6.5,<2.0.0;python_version=='2.7' # BSD sphinx!=1.6.6,!=1.6.7,!=2.1.0,>=1.6.5;python_version>='3.4' # BSD openstackdocstheme>=1.24.0 # Apache-2.0 os-api-ref>=1.4.0 # Apache-2.0 sphinxcontrib-apidoc>=0.2.0 # BSD # releasenotes reno>=2.5.0 # Apache-2.0 masakari-9.0.0/doc/notification_samples/0000775000175000017500000000000013656750011020315 5ustar zuulzuul00000000000000masakari-9.0.0/doc/notification_samples/update-segment-start.json0000664000175000017500000000124713656747723025311 0ustar zuulzuul00000000000000{ "event_type": "segment.update.start", "timestamp": "2018-11-27 14:32:20.396940", "payload": { "masakari_object.name": "SegmentApiPayload", "masakari_object.data": { "description": null, "fault": null, "recovery_method": "auto", "name": "test", "service_type": "compute", "id": 877, "uuid": "89597691-bebd-4860-a93e-1b6e9de34b9e" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "e6322900-025d-4dd6-a3a1-3e0e1e9badeb" }masakari-9.0.0/doc/notification_samples/process-notification-error.json0000664000175000017500000000266413656747723026531 0ustar zuulzuul00000000000000{ "event_type": "notification.process.error", "timestamp": "2018-12-20 06:21:19.315761", "payload": { "masakari_object.name": "NotificationApiPayload", "masakari_object.data": { "notification_uuid": "0adb94e0-8283-4702-9793-186d4ed914e8", "status": "running", "source_host_uuid": "6bfaf80d-7592-4ea8-ad12-60d45476d056", "fault": { "masakari_object.name": "ExceptionPayload", "masakari_object.data": { "module_name": "masakari.engine.manager", "exception": "str", "traceback": "Traceback (most recent call last):\n File \"/opt/stack/masakari/masakari/engine/manager.py\", line ...", "exception_message": "Failed to execute process recovery workflow.", "function_name": "_handle_notification_type_process" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "id": 51, "generated_time": "2017-06-13T15:34:55Z", "type": "PROCESS", "payload": {"process_name": "nova-compute"} }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "ERROR", "publisher_id": "masakari-engine:fake-mini", "message_id": "5f3c9705-b3fb-41f9-a4e0-4868db93178c" }masakari-9.0.0/doc/notification_samples/delete-segment-end.json0000664000175000017500000000121713656747723024677 0ustar zuulzuul00000000000000{ "event_type": "segment.delete.end", "timestamp": "2018-11-27 14:36:07.457369", "payload": { "masakari_object.name": "SegmentApiPayload", "masakari_object.data": { "description": null, "fault": null, "recovery_method": "auto", "uuid": "89597691-bebd-4860-a93e-1b6e9de34b9e", "service_type": "compute", "name": "test2" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "00184d05-7a96-4021-b44e-03912a6c0b0d" }masakari-9.0.0/doc/notification_samples/create-host-start.json0000664000175000017500000000127413656747723024605 0ustar zuulzuul00000000000000{ "event_type": "host.create.start", "timestamp": "2018-11-27 13:09:30.716747", "payload": { "masakari_object.name": "HostApiPayload", "masakari_object.data": { "reserved": false, "name": "fake-mini", "on_maintenance": false, "control_attributes": "TEST", "fault": null, "type": "COMPUTE", "failover_segment_id": "89597691-bebd-4860-a93e-1b6e9de34b9e" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "0ed836cc-353a-40bc-b86b-d89e6632d838" }masakari-9.0.0/doc/notification_samples/create-segment-start.json0000664000175000017500000000107613656747723025272 0ustar zuulzuul00000000000000{ "event_type": "segment.create.start", "timestamp": "2018-11-22 09:25:12.393979", "payload": { "masakari_object.name": "SegmentApiPayload", "masakari_object.data": { "service_type": "compute", "fault": null, "recovery_method": "auto", "description": null, "name": "test" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "publisher_id": "masakari-api:fake-mini", "message_id": "e44cb15b-dcba-409e-b0e1-9ee103b9a168" } masakari-9.0.0/doc/notification_samples/delete-segment-start.json0000664000175000017500000000125013656747723025263 0ustar zuulzuul00000000000000{ "event_type": "segment.delete.start", "timestamp": "2018-11-27 14:36:07.442538", "payload": { "masakari_object.name": "SegmentApiPayload", "masakari_object.data": { "description": null, "fault": null, "recovery_method": "auto", "name": "test2", "service_type": "compute", "id": 877, "uuid": "89597691-bebd-4860-a93e-1b6e9de34b9e" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "e6c32ecb-eacc-433d-ba8c-6390ea3da6d2" }masakari-9.0.0/doc/notification_samples/error-exception.json0000664000175000017500000000234313656747723024357 0ustar zuulzuul00000000000000{ "event_type": "segment.create.error", "timestamp": "2018-11-28 14:24:27.902437", "payload": { "masakari_object.name": "SegmentApiPayload", "masakari_object.data": { "service_type": "compute", "fault": { "masakari_object.name": "ExceptionPayload", "masakari_object.data": { "module_name": "pymysql.err", "exception": "DBError", "traceback": "Traceback (most recent call last):\n File \"/opt/stack/masakari/masakari/ha/api.py\", line ...", "exception_message": "(pymysql.err.Internal Error) (1054, u\"Unknown column 'name' in 'field list'\" ...", "function_name": "raise_mysql_exception" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "recovery_method": "auto", "description": null, "name": "testT6" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "ERROR", "publisher_id": "masakari-api:fake-mini", "message_id": "e5405591-1d19-4a8c-aa92-4d551165d863" }masakari-9.0.0/doc/notification_samples/update-host-start.json0000664000175000017500000000273413656747723024626 0ustar zuulzuul00000000000000{ "event_type": "host.update.start", "timestamp": "2018-11-27 13:13:25.298007", "payload": { "masakari_object.name": "HostApiPayload", "masakari_object.data": { "reserved": false, "uuid": "d6a2d900-1977-48fd-aa52-ad7a41fc068b", "on_maintenance": false, "control_attributes": "TEST", "name": "fake-mini", "failover_segment": { "masakari_object.name": "FailoverSegment", "masakari_object.data": { "uuid": "89597691-bebd-4860-a93e-1b6e9de34b9e", "deleted": false, "created_at": "2018-11-27T09:26:30Z", "recovery_method": "auto", "updated_at": "2018-11-27T09:54:50Z", "name": "test", "service_type": "compute", "deleted_at": null, "id": 877, "description": null }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "fault": null, "type": "COMPUTE", "id": 70, "failover_segment_id": "89597691-bebd-4860-a93e-1b6e9de34b9e" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "d1a3ae84-7f41-4884-bc3f-fa34c7cd1424" }masakari-9.0.0/doc/notification_samples/update-host-end.json0000664000175000017500000000275613656747723024243 0ustar zuulzuul00000000000000{ "event_type": "host.update.end", "timestamp": "2018-11-27 13:13:25.361394", "payload": { "masakari_object.name": "HostApiPayload", "masakari_object.data": { "reserved": false, "uuid": "d6a2d900-1977-48fd-aa52-ad7a41fc068b", "on_maintenance": false, "control_attributes": "TEST", "name": "fake-mini", "failover_segment": { "masakari_object.name": "FailoverSegment", "masakari_object.data": { "uuid": "89597691-bebd-4860-a93e-1b6e9de34b9e", "deleted": false, "created_at": "2018-11-27T09:26:30Z", "recovery_method": "auto", "updated_at": "2018-11-27T09:54:50Z", "name": "test", "service_type": "compute", "deleted_at": null, "id": 877, "description": null }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "fault": null, "type": "COMPUTE", "id": 70, "failover_segment_id": "89597691-bebd-4860-a93e-1b6e9de34b9e" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "e7f85d49-7d02-4713-b90b-433f8e447558" }masakari-9.0.0/doc/notification_samples/create-host-end.json0000664000175000017500000000273213656747723024216 0ustar zuulzuul00000000000000{ "event_type": "host.create.end", "timestamp": "2018-11-27 13:09:30.737034", "payload": { "masakari_object.name": "HostApiPayload", "masakari_object.data": { "reserved": false, "uuid": "d6a2d900-1977-48fd-aa52-ad7a41fc068b", "on_maintenance": false, "control_attributes": "TEST", "name": "fake-mini", "failover_segment": { "masakari_object.name": "FailoverSegment", "masakari_object.data": { "uuid": "89597691-bebd-4860-a93e-1b6e9de34b9e", "deleted": false, "created_at": "2018-11-27T09:26:30Z", "recovery_method": "auto", "updated_at": "2018-11-27T09:54:50Z", "name": "test", "service_type": "compute", "deleted_at": null, "id": 877, "description": null }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "fault": null, "type": "COMPUTE", "id": 70, "failover_segment_id": "89597691-bebd-4860-a93e-1b6e9de34b9e" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "e437834a-73e1-4c47-939a-83f6aca2e7ac" }masakari-9.0.0/doc/notification_samples/process-notification-start.json0000664000175000017500000000145113656747723026526 0ustar zuulzuul00000000000000{ "event_type": "notification.process.start", "timestamp": "2018-12-20 05:26:05.002421", "payload": { "masakari_object.name": "NotificationApiPayload", "masakari_object.data": { "notification_uuid": "15505a8c-8856-4f3d-9747-55b6e899c0f5", "status": "new", "source_host_uuid": "6bfaf80d-7592-4ea8-ad12-60d45476d056", "fault": null, "id": 47, "generated_time": "2017-06-13T15:34:55Z", "type": "VM", "payload": {"process_name": "nova-compute"} }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-engine:fake-mini", "message_id": "285be756-ac29-4b78-9e2b-9756f5077012" }masakari-9.0.0/doc/notification_samples/create-segment-end.json0000664000175000017500000000124513656747723024701 0ustar zuulzuul00000000000000{ "event_type": "segment.create.end", "timestamp": "2018-11-22 09:25:12.813483", "payload": { "masakari_object.name": "SegmentApiPayload", "masakari_object.data": { "description": null, "fault": null, "recovery_method": "auto", "name": "test", "service_type": "compute", "id": 850, "uuid": "5cce639c-da08-4e78-b615-66c88aa49d50" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "b8478b31-5943-4495-8867-e8291655f660" }masakari-9.0.0/doc/notification_samples/process-notification-end.json0000664000175000017500000000144613656747723026143 0ustar zuulzuul00000000000000{ "event_type": "notification.process.end", "timestamp": "2018-12-20 05:26:05.075917", "payload": { "masakari_object.name": "NotificationApiPayload", "masakari_object.data": { "notification_uuid": "15505a8c-8856-4f3d-9747-55b6e899c0f5", "status": "ignored", "source_host_uuid": "6bfaf80d-7592-4ea8-ad12-60d45476d056", "fault": null, "id": 47, "generated_time": "2017-06-13T15:34:55Z", "type": "VM", "payload": {"process_name": "nova-compute"} }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-engine:fake-mini", "message_id": "c081eb25-7450-4fa2-bb19-ae6d4466e14e" }masakari-9.0.0/doc/notification_samples/create-notification-end.json0000664000175000017500000000143513656747723025726 0ustar zuulzuul00000000000000{ "event_type": "notification.create.end", "timestamp": "2018-11-27 13:46:25.496514", "payload": { "masakari_object.name": "NotificationApiPayload", "masakari_object.data": { "notification_uuid": "e6b1996f-7792-4a65-83c3-23f2d4721eb0", "status": "new", "source_host_uuid": "d4ffe3a4-b2a8-41f3-a2b0-bae3b06fc1a3", "fault": null, "id": 1, "generated_time": "2017-06-13T15:34:55Z", "type": "VM", "payload": {"process_name": "nova-compute"} }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "500447b9-4797-4090-9189-b56bc3521b75" }masakari-9.0.0/doc/notification_samples/update-segment-end.json0000664000175000017500000000124613656747723024721 0ustar zuulzuul00000000000000{ "event_type": "segment.update.end", "timestamp": "2018-11-27 14:32:20.417745", "payload": { "masakari_object.name": "SegmentApiPayload", "masakari_object.data": { "description": null, "fault": null, "recovery_method": "auto", "name": "test2", "service_type": "compute", "id": 877, "uuid": "89597691-bebd-4860-a93e-1b6e9de34b9e" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "3fbe50a5-9175-4161-85f0-e502f9024657" }masakari-9.0.0/doc/notification_samples/create-notification-start.json0000664000175000017500000000130113656747723026305 0ustar zuulzuul00000000000000{ "event_type": "notification.create.start", "timestamp": "2018-11-27 13:46:23.060352", "payload": { "masakari_object.name": "NotificationApiPayload", "masakari_object.data": { "status": "new", "source_host_uuid": "d4ffe3a4-b2a8-41f3-a2b0-bae3b06fc1a3", "fault": null, "generated_time": "2017-06-13T15:34:55Z", "type": "VM", "payload": {"process_name": "nova-compute"} }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "5e2e4699-0bbd-4583-b1e2-a87c458f84eb" }masakari-9.0.0/doc/notification_samples/delete-host-end.json0000664000175000017500000000273013656747723024213 0ustar zuulzuul00000000000000{ "event_type": "host.delete.end", "timestamp": "2018-11-27 13:35:09.882636", "payload": { "masakari_object.name": "HostApiPayload", "masakari_object.data": { "reserved": false, "uuid": "3d8d1751-9cab-4a48-8801-96f102200077", "on_maintenance": false, "control_attributes": "TEST", "name": "fake-mini", "failover_segment": { "masakari_object.name": "FailoverSegment", "masakari_object.data": { "uuid": "89597691-bebd-4860-a93e-1b6e9de34b9e", "deleted": false, "created_at": "2018-11-27T09:26:30Z", "recovery_method": "auto", "updated_at": "2018-11-27T09:54:50Z", "name": "test", "service_type": "compute", "deleted_at": null, "id": 877, "description": null }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "fault": null, "type": "COMPUTE", "failover_segment_id": "89597691-bebd-4860-a93e-1b6e9de34b9e" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "64d61bcf-c875-41c3-b795-19a076f6de96" }masakari-9.0.0/doc/notification_samples/delete-host-start.json0000664000175000017500000000276013656747723024605 0ustar zuulzuul00000000000000{ "event_type": "host.delete.start", "timestamp": "2018-11-27 13:31:47.451466", "payload": { "masakari_object.name": "HostApiPayload", "masakari_object.data": { "reserved": false, "uuid": "3d8d1751-9cab-4a48-8801-96f102200077", "on_maintenance": false, "control_attributes": "TEST", "name": "fake-mini", "failover_segment": { "masakari_object.name": "FailoverSegment", "masakari_object.data": { "uuid": "89597691-bebd-4860-a93e-1b6e9de34b9e", "deleted": false, "created_at": "2018-11-27T09:26:30Z", "recovery_method": "auto", "updated_at": "2018-11-27T09:54:50Z", "name": "test", "service_type": "compute", "deleted_at": null, "id": 877, "description": null }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "fault": null, "type": "COMPUTE", "id": 71, "failover_segment_id": "89597691-bebd-4860-a93e-1b6e9de34b9e" }, "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:fake-mini", "message_id": "b5914f94-99dd-42fa-aaf3-3cedacda6b67" }masakari-9.0.0/doc/source/0000775000175000017500000000000013656750011015403 5ustar zuulzuul00000000000000masakari-9.0.0/doc/source/cli/0000775000175000017500000000000013656750011016152 5ustar zuulzuul00000000000000masakari-9.0.0/doc/source/cli/openstack-masakari.rst0000664000175000017500000000035313656747723022502 0ustar zuulzuul00000000000000================== openstack masakari ================== To control and manage masakari operations, the extended `command list `_ available in openstack command. masakari-9.0.0/doc/source/cli/masakari-status.rst0000664000175000017500000000367613656747723022051 0ustar zuulzuul00000000000000=============== masakari-status =============== ------------------------------------------ CLI interface for Masakari status commands ------------------------------------------ Synopsis ======== :: masakari-status [] Description =========== :program:`masakari-status` is a tool that provides routines for checking the status of a Masakari deployment. Options ======= The standard pattern for executing a :program:`masakari-status` command is:: masakari-status [] Run without arguments to see a list of available command categories:: masakari-status Categories are: * ``upgrade`` Detailed descriptions are below: You can also run with a category argument such as ``upgrade`` to see a list of all commands in that category:: masakari-status upgrade These sections describe the available categories and arguments for :program:`masakari-status`. Upgrade ~~~~~~~ .. _masakari-status-checks: ``masakari-status upgrade check`` Performs a release-specific readiness check before restarting services with new code. For example, missing or changed configuration options, incompatible object states, or other conditions that could lead to failures while upgrading. **Return Codes** .. list-table:: :widths: 20 80 :header-rows: 1 * - Return code - Description * - 0 - All upgrade readiness checks passed successfully and there is nothing to do. * - 1 - At least one check encountered an issue and requires further investigation. This is considered a warning but the upgrade may be OK. * - 2 - There was an upgrade status check failure that needs to be investigated. This should be considered something that stops an upgrade. * - 255 - An unexpected error occurred. **History of Checks** **7.0.0 (Stein)** * Sample check to be filled in with checks as they are added in Stein. masakari-9.0.0/doc/source/cli/index.rst0000664000175000017500000000037713656747723020042 0ustar zuulzuul00000000000000========================== Masakari CLI Documentation ========================== In this section you will find information on Masakari’s command line interface. .. toctree:: :maxdepth: 1 masakari-status masakari-manage openstack-masakari masakari-9.0.0/doc/source/cli/masakari-manage.rst0000664000175000017500000000223213656747723021741 0ustar zuulzuul00000000000000=============== masakari-manage =============== ------------------------------------- Control and manage masakari database ------------------------------------- Synopsis ======== :: masakari-manage [] Description =========== :program:`masakari-manage` controls DB by managing various admin-only aspects of masakari. Options ======= The standard pattern for executing a masakari-manage command is:: masakari-manage [] Run without arguments to see a list of available command categories:: masakari-manage You can also run with a category argument such as db to see a list of all commands in that category:: masakari-manage db These sections describe the available categories and arguments for masakari-manage. Masakari Database ~~~~~~~~~~~~~~~~~ ``masakari-manage db version`` Print the current main database version. ``masakari-manage db sync [--version ]`` Upgrade the main database schema up to the most recent version or ``--version`` if specified. ``masakari-manage db purge`` Deleting rows older than 30 day(s) from table hosts, failover_segments and notifications. masakari-9.0.0/doc/source/_static/0000775000175000017500000000000013656750011017031 5ustar zuulzuul00000000000000masakari-9.0.0/doc/source/_static/Masakari_spec_process.svg0000664000175000017500000013253513656747723024103 0ustar zuulzuul00000000000000 launchpad create a bug create a blueprint create a blueprint End states out of scope code merged bug fix? idea REST API change? submit spec for review a feature? spec merged blueprint approved for release spec required? add link on masakari meeting agenda blueprint hit by feature freeze re-submit for next release blueprint unapproved apply procedural -2 upload code for review remove procedural -2 review blueprint in masakari meeting no yes masakari-9.0.0/doc/source/_static/architecture.png0000664000175000017500000015515413656747723022254 0ustar zuulzuul00000000000000PNG  IHDR*sRGBgAMA a pHYsodIDATx^wSU?pt,ǵSwQПeޥ* *"Ә{}3$S2I&m>?9&9o9DDDDDDADDDDDADDDDDADDDDDADDDDDADDDDDADDDDDADDDDDK/=cqK.?񣨨h=t*.?/^RfϞ#""""sႥ-\1~K'tRCC?w1w\`X!""""|GGG[X!""""*}@µ⇫> DDDDDvZcɒ%z1jԨzW8nM̙3P(**:{D?** H G\(~,^K"""""۹J0ԇK> DDDDD6r!_x%J"""""D҇׻dCBDDDDd ᚥ-?ܥ!X!""""ㇻ> DDDDDVsrp҇`:NI'u?LJ> V~'"""Ǚä.e@hÆ j wy>}8U~7֬Y!?K:}bj.4{Э_^ѡp<=NXj䫯:|j"""""pZ0/}nsn-H9sCTSW 0`ɒ%\'~8'~YPG@?8'~{C`mRQ QVV6dȐc666J nL4iȑqqqa[ϟ/5kVKKZ}:w޾:n`8ڑp㏗qlmG5|p}XY-0~Ȼd+ d~3f455-[۷/Vm Jٿg}r۸+R܋OypJDDD ?.5g?7|}.@pCCՏM6acʕ[~ĉQ I${7a4^uU:Pm DJ_'⇞=֮]+/rk})V;ՓVXfMHH%ijpw J>w\6p@chWRXxވЂ6"w!qƌ';nĈiii:gq iu0OMM6 9nH #I&<$!;3Yw}W娩 %h)n,ZHOz*xpWƚEoZK~k?e.ZV ?xٮ؃L5j Y ƍ`sXu &="~`.\#V#""[Oȁ|z,W^)__;xf̘1k,<3ta;)뮻]Wq`ԩJK?b.}LMlذAVٿ?/b?qDm#$``C QM@ټΟ?hTg8~a7&t,+0+0Aw 6>q 8#GVX_ꁄ,aR2M]J 3]u7$HۮfiWCC  {п~ :ol* ,}U̙3G-y猥\(~DEE{pHaaAԑs̻QWPn5 ,}εGtt @X """" ׊>⇫@X """"R>IS"""""[bcEADDDDd#&W\4~>: 䦛nSGFDDDDDVqADDDDD񃈈񃈈񃈈񃈈񃈈񃈈񃈈񃈈񃈈񃈈񃈈񃈈񃈈񃈈9񣰰p…Ǹ;#**J=""""""ď9sKc """".rNPsyuG@DDDDD]\[޽2~Y"DDDDDs/""""" \"~z{GDDDDDDb<""""""YDDDDDV`?ADDDDdk0~YDDDDDV`?ADDDDdk0~YDDDDDV`?ADDDDdk0~YDDDDDV`?ADDDDdk0~YDDDDDV`?ADDDDdk0~YDDDDDV`F?c>}|}}DDDDD]a ďkĈh&Mu222Tkwڼy3NVZTv?IV^ݯ_?TMDDDDd?p@0` 6֣I ÇOOOWݩ믿ݮY7xo߾_~ÇU5?裏N0aܸqMMMjA+)}Ԍ1­pXXd{ 8pun6""""r pd ōYfh޼y:Νk?ʆ "¶---jF$KqUm o;sL&CVƽ-Ç7n,۷/6+Ȯl"ѪUHϚ;o35"ϛ6n4?|g m +ԟ.KCC,;S?ӧ9k82~m6rH%``)f&C@1C_~zYx`1dY*|s=`s}6Z&=fY+0y cǎm~ ۆ <1Ƙ!N8ᄤ$[߿_r#I/RUW]ercƌ9k`ҩ!cӦM?rJIƍ &MBz0i]ܲeKdMװ!C&[CC C}g9t萼jѢExچO!+˧юo>psԩRVF2#\lADrkIѹ0Ĭ"//SRoo>sLQ]FZCW_*^=$k͝;W@TUVгt<~̟?ͽ̹볰з1ƀaL&&#} }ڵkwAƥz1_*$- GaU,&~2"T'CIEBRW\?/XYoE9f̘JlXa;!!7Dd""05|̭H+i|'06JKK Fh4iWX^{?뛘y = ><55-ϰaF!Dg}V`:Z$p}Dž8"C83kjjd&Xyȑ)))o]xyycY6%o< wމ=`w,Wۧ?F'< ~EFUW].\_ hYf '(#ilAD*慘l555QaI?d+'Nl8##C5i05k+La@?ɓ'coiiiT`oތ 2a|СCcot۸)S;V;v`Q10kF+lj+¯m)hF 0ϕW^CI#miH@"""LNg '`E_"SZ,PXXpB̟??44Tu2""*ؐ=jjj(@f̘Y#&&L8ai7n̳1'?`cVR'.{ t4۟k"HIZYYLR8l}OwtL>!p0JHbWa̘1UUUj_]+O?t=~DFF"-Z(??ӸN֊jڀNFD\"~`9:f꘯c(~W鵷ď^z ALOō7b5ď䂂L1ѥ+q2 N;Mp'N,7GW\`|&_&-!ٳlcE_ ,Yog/o3ϩcY6 e)_ވk֬џgDWv/D`ZO#ICJ5u?p{N: -{N3!ߴ+)KmWڗF!v+7n-^Ǐ?~3O”xOBddñ1+"TDd"$.1a>VV";RDnn.f=c0E>>---//Zm)PBs#Zj/jA,\͜vHΤf|dF5ـ< N K Tgee!$&&&%%!x䠱 "0jƧQMPhT ?ȓp rECCCUUUII|~"xj@lADQԌO,6Ш&"0~'@FD.5Ө& 4DI8K4jj"ydDDR3>j@lADQԌO,6Ш&"0~'@FD.5Ө& 4DI8K4jj"ydDDR3>j@lADQԌO,6Ш&"0~'@FD.5Ө& 4DI8K4jj"ydDDR3>j@lADQԌO,6Ш&"0~'@FD.5Ө& 4DI8K4jj"ydDDR3>j@lADQԌO,6Ш&"0~'@e-"tjƧQMPhT ?ȓp RLJ~xu׽'sS3>j@lADE6v٧Oc5GOZZZB r ncŒm۶]s5[y7nĶ}o¶/e݃)QMPhT ?ȓ8 ü\za!((( i[`9w3333sm#00GHۀ_L#u`< hoڡv]v-QNxj@lADU2Z4ۋ{ꎏ&$ ɑ ĉ=رcǾ;w[BDnD4jj"yd@@aVVVrr|GNN(|Q3>j@lADY'%+ yKJJ/OmEԌO,6Ш&"0~'@9  $ j ͆b dRDd"$Ȉ {f|r "$Ȉ@@hiiywzǪ^fmADd"$ȈCCM6ͤkll6Ш&"0~'@FD=ǰaze^ X< 2">,ӧ=FomQ__AADP=n_5 @A`< 2":(z^/44T?AAD,R/͋&M_|||MKK < 2"Arssg͚շo_ccРA=333ejj"yd]f\ C}ףG6/zlڴI/z8p 55)TWhTSxќuY?oF[nY~}]]]{5-ZҢZ=ydjhhXt9s>Z ߃AD'l^^^E__ߠ 9qEE&P@z͛7qw}wa-?c{h/!T׿1~do߶mo'"g*EN:hhQ]]Ѐs\Q{@z3fnl!/ҥK_iu饗 3I DDnYpknn^h.2kn!&&#"WsӒGdddBBԄЂe}vjF5 xG=uԡCƀ;3?^n1D~ 'y9iFAD8uWM\GO_ILqq1.= ?⒏I+&vD.GlllJJJNNNII^쁝m4jPhTSϰf3W_}u}駟TkyKƀ={AD8uW&LJ pAk + |2_Q[ڦl2mҍ1"33S*<<)d׫&oM3Cm=c۷X0/z?K jjNgsQQΝ;%WL2xA'xDZAD8uÇ744`jʦM&NOh@;i$ipRmoA02^ `s=ְ+rq8g\X^ѣY/z}1~tERRe$<+^< ;w?q""7*L˕$@ Zmc\1MAիWz n qoV1sLc@{1&K,a5vFi#re8ʙ"f~x{FDDHѣvmmyC'{jj^u)#@?.«*-:sgDD䶜?b˒:$*{$ ˛jm.`j?UMǜƤÆ [reUUc2\.oB s9r$&Xwa 3fԩ2c#$$$&&&99ӢNv"Ts9x{2K^qƙ|p\i4imaB-#"r7N4b天I jll qXdzs{駟nql˭\,H?@{(\G.-ɓ7k,]VV/1/ƏUVmżF E///GD߿>(G||a\^xHuGݵk׾}###i261[%?dc06|yU7o- 6l؀4i10I:%#""b̘1LʇLJ udxG+CCCѽzyV!'PMPhTE8qiӮny'Ƶ^k2(1~tp#ȑ?d؈/fTm]U4559`},:JӧOz)w^^z hWMIP[a5+$& fojU VB2T@jdWď?޸96|g79NI_GF 6O1V.T233F1SOs)))%''_ƏK1~ xcl7I qCDHN4h!FN`5F,5n;`},ŀ)]>l^z_|pqoשyJmWHЯ.Bd?Ll!C[O?Ih/~M6Ʉ@|WxdM"klԶ?߿'nݺ"8yDSkۛdF5y2\w^V3x:yBV^V-`7to<+NHH1\}80w@Ddο4balZhjY[GSڢ8qUyx勳fee]G ULeGSˎOJ.?uSMD6IUMwTkAAAF,}M&|B5Y@mQMj0+/B\`Gзq Gzz:HŌH}W]uULLLnnnee% D\xdjF-kZhjY[GS`!}ܑ4仳Bp#11|L̄0BD6 դ}C7R|{n ?^ F 񫀡R/b*kEDDb3WrݾX 2˜0cfO}2331c=//q'#V\{ 1Bٗ_&βl2Ky%z 0\tzQ&55FYY "ƍC D?:!S|cXAzDv SjD:$/iT^~kꫯիؿ^-Zoooi;0Fe?\5,qw}7DZ$d9;.\G'0NMM f@zXW>(==ko>\&~59wdN& 4>'B]]]4??& $!! \ /p1 "AذaI'$o "\sPfrB5Y@mQMD60 BOF''I#00+O(?~|AAy'"r^ȡ0L믿=z^SN .--tEfK4jj"y8G+++򒒒"""LBD.F"r~嗣KJJj[yj@l^) o>fQxi$"Gq%lٲsďbwW9}Q,6Ш&"t?TUUUPPgϞ/\?>C=3sHD3#)z$''ggg#ȧ]SC,6Ш&"t5~N(91 ?sꩧq&2HD6sNc#===??_YЩGQMPhT gεJďLܲe)q"`M=ȓHD|ӧ1x=0...onnֳڗKRG,6Ш&"X?gN7~}87md@O 1xi$"k`RWW/0s}=uzHdF5L/4LNN ظq13&55U?7DD݀F"&4UUUǏݻL_c:4jj"-pMJJ@k zj```EE D4Qav QSS/_fEsݱc=ݺ術&TDd XWWWVVe˖ &ܹ3##+BDݍF"2Ns=Fz뭷EiTDdę'8U5qqq%%%555ׯ_|9k D}xi$Srd}61iiioCf|dF5. $///555111!!A/xWX!nK#u #ˁbbb*++ݺS3>j@l`z(,,AςO?T ^z-[e";^0#sL`jjj!ȄO& 4v8@pz6h !liiiII/HD*,,lsځF) `_=術yPMPhT ?@NR8p斗|߱cǰaÌ dҥL Dd_4Q0?ȶm0M1yP1/AިFSSg=t2 jj"}?СC dʕL DdG4Q0HLLk))f> 1}A䐊~Nf`B5Y@mQMD6{8U-EUUGJJJTTԿ!C_|BDK#-"†9f,X:iQ=Lj@lMnii-..NOO ^~I/4WYF"?[g_|>}`ځ)|LPڛy:#PMPhT /~NWiWTT'uLȎ;0xNDK#)U:t<55/6m~~~UUU Nx dF5٠[H4;;;!!!<<1,X#߂ED㥑L))HVVVhhh||j@lpjOMM@"""nVTT;w=m4 "{ᥑ444TUUa4R[[ꇅDd@bcccbbE o>}_=삗F(33S: DryysL5z;.ԌO,6Ш&"8&~@jkkq"ugNN~馛zu͛F,"/D=f g2d 2y!N`f|dF5adhnn_QQQRRR)**Bݻ]Xw֭[&u /D=L)ѣG+@h3+#r nLj@l2\ȟ!jkk+++ˋ?@ad?su/D3o1C\Ӌ6]ȯjY#,,6Ш&"88~zCA|7K jc"H0bǎgq]pew#gES=YdF5GR]]]TTꫯ'yژ}4y,LJJJz!cO?Ʉ|1= dF5)CH(XGppҥK dĈ-'ՖDD॑a^㢋.RPPP]]#ԳQMPhT ?áC***rssHPPPT""K#iѯ_?4zyyaa yj@lzcKfff\\\hh1 0`˖-H)Oc4y\+z;>BOHH*--㛯:>j@l!cKSS 5]C ߂ED㥑Cz_SSE;w쑙fdhz5jj"H1~ $444 W^^WWWUUU^^QyoщȈF"7#=p뭷E"=Yj@ljd B@R[[h"c9=> q0uDnF"w7o=dy\ᦛnںukEy6[SOF5Y@mQMD6p2Hbdee!ls=rgi<"sxN,\;nFJ;#!r%wiljjz'FaxQ1lذ3gfffQL6o߾)7dѣ;3,TDd׌ DR HӧO7O \p!&GÐo߾W_}բE暫oWneka 1G'u?ùߥqɒ%F=֌3Tpu:t馛~oo0=K=dF5`l Ѧ"777111<<[ouGM[{OBw!qbܸqM4I?/AeϞ=x,.I'D=fX#nfƢ".~c`S Kgׯ:Gzj\&e,zȿ3džrQ.Z& 4.? ڒ 达& d̙:rP:2nudݚ40~[CѣcSNfK/-5vر~kV3fP6l: :+Ϛ5kٲe.yBQϔF=DfF>Hs *466~}Q^צM.yyy,zؑz5YLmQ;"3p 1 6& /0!!1OG&ׇ#l{ߑ4^8s.͛'⭷ޒ,~8~W)/r= rA VZ@(OdH~::zFZz׮]« uw4K't&d<zK/}\qQuWٳgϾER\WlHуW {?~t6-`l@bcc1Ryyy%&&b/ʺc>r{=1a„4 j<2 ^'x}L=_˖- ً\MJ><ԦVA&zH\Aު/{cǪf#"K >q;..SXď˗aWv{;fϞmL]w݋/^eCM{#'v?8n)3繰/('cd57X sL  2G',=ydz?<\tKldR__?_(bE-fŲ8qExxYĘ] WX@ܫchq{#pmn_Mz?+\/MLLtvΝm& XdاNskb|SL 1yzSSSĶ2"K@C[@YSS5n8tɓ'㒌nMPMD6p񚛛1w1tM XA" Ȑ=K'x ~]w駟TTT`LKNNްa< z\c&%%!(祊K1-ȅy üov 999&"}R&"x@AO )))QQQ۷o7&{ڒwhii)Rl֭[wvi.'|տ[ff&IQQBg}&}ذa`6wO>$;;[XgedXas\y=999~"~_)T < ~>ɟ]<( _y֣>*ߙkɟa0#O?pCx{{曳f2"bȐ!$v /Im|||0,=OXXBK/twO:NRO"r["u߿?։/..ƓϿu\w c,̴|\$C DdψaE :J ٵkıbV(--O0XqdyrrrrdC.55uͯڃ>xib &3`./J> ~.Qoߎc˗haƌ }N<{cܸqKV?g @Y@П1|2VTTTPPRVV~;B5cH^^^JJ !/G~~>?6GB4~0bD'|l$i$~>+E]rW"n&c]t*tO2I QjӦMaC-?!vd,}\èEWYCB z$R`w_j"'FH$dDt4i&p}oa<|< dcؤM^^^x'^zܝ83-plCѾ7nر#33S=*3Yمd,}0ZKq{Y*!r H#@PMD6z*)))((@@vΜ9 oVRLh zjqq1JJJ''++KMm]YI?FUVIЅ!T#zT6c pŁꪘ.=i R߶@(U+ DdϋFbB***1xw"JRńZ@nKDdcT1B {yy;VB1{u?Piiik d X 8K%K <#XFO^xj"xQz H yoÇR${W^t 7$%%ʯH }sd#X `tVwk'_lR7nܺupA_sTgҨNA0$ ď 6 6̘@/^e8tИh1~]@)cK0]ט=[=إ=D@u2ڴ4AAAk֬1I O>dXX^~ìB݇µ2>YE}رc?cooo\>'?~ DvuI'Nq00bxĠ]QQW_no߾kRdɒB݇…Kd;@:(zy;v onnf vލ:`+dj1`fee%$$ dׯкh"/M^{-z?.\(~A8"DDDM#Fx뭷p >x GIIIMMM٫p"tIa<}XdtEm6K%vik֬  Zn!Cx{{4Nd?X {F۷~{СzZ7t֭[}||pqqqʐqAņ؜z8_gR!icuǥ}g0`F߾}'OO"o c" ?p"ԝ?.\%~A ."TL6M]aĈoqEu1***)))++ seee}}=DgΈꂂ$*u)=^w~tI#q9ѣo+Vܹ>8ˍ6d.?1AbCR6lI_~A955ƖD ˊl㩔Y^^/Kelʔ)ƾ!"Qo߾:W^ypV?Gzꩧ~zڵ4t|` ԖΝ=,]EbمKď{c#,}0Z^=+E\0e=@@>OOO&@n/--ũcGi… O;4cF|||RRfgupG.CÓ'O^%8қo){ .Dv`apݑg{S ™n1d}綾^¥ "D˗d)Ad/E1D`@9;;;===99r ؿݻo߾u͛7K/ zpH|瑝qvAΙnM 5551qU5rH\rrr˱+&N(Kw^__ߠH,zY:)V ȕ!r0bŸoI/B8"L%8ի3#~U^ygk⪤~.}"Vg` pZ`äةq X]VVV]]]UUvyq]tE6l U &=lAëBC0bp.//G)((IOOOJJ ܻw"d %8?$<ֶr7|3&5!Af2 gΜZiäNuȭ륏#F`H J\ےׯ_u222X YBV>҂(h,#`:"(** 3cg<~|DzygIسg#<"N>d\:wc pN5fРALb"6@N}w x0nݺ5666333,++MOO(555++KXĢ?:2F`@?op!HǏYfɻڳk.hwܩ1~ ,|߾}Ny: '>"##n޼yIIIyyyaJYCCCuu|ұ?3!honnfуFd; !---555u<>s}'Nc c'F֭;Seʥ6s8 G>ǚsN4+Kp{[us5jDd- =~`#3u${Z /ȚcǎEP0?fΜilx\׿p: 9櫯^8Kx1os@W&޽{'$$ ~`„ʾJd; tIV-p4=~HxPmeӧzhxh\rɑy1K,^مCKx`UUU^z CBhlld  t+Z̞=[7o^{t7 >0ihhcOL3a„{7(( SFe"` h3~`x+Wj5s뭷jÇ 4-?"""TѦN*?Kb p\` A)͚󓓓1t"_~'K䍐 $H;>>>555//\)ـrǹ#X`ĉO;4^燀l7|>}|g; مKR\';; $...&&[n ^X V@ʅ7_ ?.ڌ#8ΌB~Ս=zѢE!!!m$RO?TL,d,} ӝD;wwq!3׼< XT &IDAv!ûI{? "`Ͼz`m= :Tms>}L6NAv9 zCKKKcc#:Eee%EyyyϠr-` G*1{:y~.ڂvͽ``NK\lr"%*-/G̐JdGd$*&C}?.=~AN. 0hyzdƌIeM# "}d;tc t<z!9 SMD݃<~ț0~]to`\z2s̆EC>&A݇KbbbX!r$6G``|V E7>OBHdNAv]r,9E8u=K3Y!r6ㇿ?un,}`a?.#99Yp_!Pwضm[߾}q2~%~A"` n4gt#GE!ڰaÅ^'?y-?X wc0~]})--ҤIQ/^{9lxN:i8yJֱ`z) DAv!uyyyfffTT|Ayꫯ~饗6m(U[n}嗧O~'ʤo߾s###322%JVs` DAvN!*///!!!88xݛ7o~衇ƍ' ?1|7|#<l߾G=ӳg>3$rsx),\믿޹s'qqq999x?:v,}B_e1~ ~ϔfdd8p 00p޽O?_>cW^yaô/?7|LslܸIlɒ%sτ 0䙑g c߾}`)2ɏ?믿ڵ/***-- *&x#YǞrw,u7tꢢ"$P__={ر-[lڴ{Gg͚u9wo $ӦM.\ML?ؿ?YfyOdN3f8pC뮻Nݽ'@?☑1pz㒤GcݻwE C8p@bb"PܢrMMMx=FfWyO}[f0~ݡj.FR̞%ddd&%%!b-nI0X9 蘩K8]'zD~ݺu7?t۰a÷~"N I*য়~‘%`qHVx{#FG`&y#99iM"GAAG*$:: x9#'~<?An X5k DAG!R 7:Uuu5&xFrrrH0p%`j.3uL%`o>L"W_}5fA+[.C@~ %$N !Q@GcJ1(3$l !I3& ϛ<ӓlgy@ u7m|$DfL*>5fjcWXXLY8%cj.=SvL%WN0ҥK̙3k֬3fL>k޽U>Z߾}?_YA FH;K@ I!!!k$ D 1$`q8~< x\]yyyxxx6+>bÎ;~7Ν;xJ"s֭[ߏ,+(^#A$NNAB0-p<8*lj1q8XHxMz %[z6KL jY 3r>>H)eee8${I PwIY,}PO^1~BD{$v0nܹ3777==cǎC LIN:uݑiii55773Y?X";b 7cn//'&~A= z]w?-DGGhS//KwK{= {?B@FX|ď~_+VVV666SL,8t{nȐ!555999rĉ?Cooo\-233:#/Yd̙| 9pV(--Eۮ?X "1~B*@BXbʼn'(So+**c"""‚*"##SSSsssBee%Juu5ngddaش4d,jjjۮ?X̤QW1~ BCzѣ333?%p9E'ؼA>\^^@Ap޼pu-~A=,و\ :$駟޻wo-w_DyW3221C9usrr$x W477hтp =+"х D6b ׁo9s̓/^PAE!5 ے@|!{mWD "\ZX!999Ç6۹sghhh\\ Y)B Z"c=V' ]pu"#K=>"[0~IH@x@x'(oR)t H<~5҂~nsDddixX \EX!9őLx&### `~IQ7000<<<>>>==Jd nFZ&QdQwdzA$X!9hČO.,,D@#,,,&&R1{k"E* >p%c:0H!]?_xtB'Hh/++>SDvy҇\"`:xc͞=#'']#!!#//  "<~HCNW>F,Y"O__o0V"@A &Cu/E]'/񣼼RSQQ DIC="[|ʘ9zh_ HLLE@o?~HM:|Pkך @SL|W}ȱgu}]\U+9 D]Aނ ?vEE†mO^jhh *++O<3<a,}tIw#sʳ}šoZɁ=X!t>OݻSO=JMM :Rbb ^~e___?MPPPDDD||" :.!މ۴?LJjk/~|'22dHBB?oVǽ,,u =믿>s3pq]^^^XXqtlLo*++ %{`WoDn0)}Ig׷_~hϝ;wʨ$\+@ard tyĉxĎ^>(EAAAFFFbb""Glll||<:[VVb IMM s??ay10uxw޽jժ3gޠYbŦM0Ś{LjZͼ{7tŋqD d3fKKKCz_y.˗E`l5?x O?}7b< r{_t)lr~~~x8O<1po`:=^{~ӭQTT^ݻ7 D` W߿ҤI&}EDDA~~>BGss3Fi=)ڎ?~VKC-&qs̭167$!<ӫ ׿₪ocǎķ98p@s'ï B'XQQbE8䘱~!K/ᎰH'n׾} m̧7dQ<'ܸ-gΜ9x6|ЎM `GV[PPV /wމG'/ٳY!jY] Y\䚢vJJJB@/*Ӕ'. D.fC-&d:uTLی y\ ks9K,y8 `g}vuut=쳲}_}]>#Fx`,1 d$xXm7.^D"+N>}on@@@hh(.RN Ⴈأ\Jcjp1vj,z߃4tPL;^}U?`$>O???wV7|IBXz/_g̘!;wҞNK/$ǚlڴ Z?0Hpx&EXaҥƻ^bͤSO=%~ѭ-[L$3F?KgΜYYYYQQN?#rt,-//]ȕ?"""@{MLLZA_\s5`J\V ,MZ yIyk Uq%_Zr䬳2Ƙ6u?̙#{۷o1Z˒~mc`q駷l޼Yx&[?4wu 6Ż+Ǽa|ߜoZ@DGc6IXdɐ!Cpb/--RDFFb hnn.zNUU.`=~?pΚ5KN{7oZAvڬSO=϶9i'&]w| /PVxW>̹sJ~\ԩSu 9W]uMވ4~pL|7zy cƌ |||c+$z^vE K.5ޝË>N:BA&pyBXbСC%x;۷oGujj* K𨬬ĥW+"7F/F{ʜ?6g'x|I)+caǎvkV T 4o6~bO?7&:7 0k.y_o __`!Amg}1#ǪU<)YBGe~W!Ev"vc-#Gh>~viړqCS ܖOcʞ?|yYYqe I.\8p@S˷rɤof?ď!A ۶mSOG;MbB,a0p9ƅ =p!~饗O?p2]M?I'I@KmmW\!\uUHڣGmaT+Wƕ544Tbܹs2~0kpj ?:Cz󟲓˗[ǵ^+Ol\\!c,)>,Qnw^# 80~S/2V]ˊ+).~~~Oڵ W=Ehhh&Kj7~d6)xm?p-/Ţg}Nw}l'%$$zcyɽQaN6ˍSNǍ7(Ǽy1,uت=,}u Fsʻk9%&2@$4"` cu_`\8p9 z7FXp+(GeeeWZI ND&h9mٲTO?/+VȢgy` 4,,L[Q***w XlH⋍'|Rd7cTܑZ`mXj$z'X d3fdgg'&&GBB&R+vqDS^>t[N4 7nDE0//>Ν;qeA ?p@JJs.+;mcdܹs?… |C 'bd$6}WoCZkg_W\?33S>$A|С¶mی+{r}Y̛{.~|hdzdra@@7| >7l؀1~אAO>dee%F$siۿu]W^ߋ/ "777)))22244W\< FTKDZi/~.}+}~؛4H°X?C=$x?Ks>:Q' D7oapc^:%%UMM U+""sxM/4̙3/.Gqq1FNN*~Cry2B[$~G@:z񳥥~;S(d1zyyS?+V?-{O߹>^2Á<rǕ4hZ|G1g:~ȳc[~;Ņ.mf"s paIOOmСYy睧e^xaxx8㇛+KF{&ꫯ޸q#zBZZ$ Myy6 W="Wɑ>ZGm2;ǯpgffb)&˖-`0b5+0!!ͫ)\<447??cт>߶m+u֌ 8!Evj_uTTj{m67dG YYYmF\?tϞ=X~d3\:~܅IZ@Dpc(hhh0R9Ri]v-(P?`js$Wzw1׬Y}.zxq=l <~@p:dhsFI-[Hcԩd;k3`Xc}:=` :؛,`?XuQ>,gGUUU~~~RRRhhOpBn.p!̌ 3ܷonYpAx9|cƌу'|rnn.^;ďFEEEGG#xRsg_"1,K>İU\p I$B0|e˖},}5> V;+**0=p)SxyyEGGgdd \ ^/7*k7x~8<<&^>())A]Z9/D=Vb5 5>q855S;v1B)~w!!!Du 0 :~pBu?6 "o$I"2ZX,}q2777>>>00^ ]wU^^٪LUՖ<ㅐ3 ~د_ >>>ȓ ux 92xt>34B9ƎchUMt-~ 3Ad#\,0 տ__ /pGDDlEI 7ܰxbu9@'tRo-[#...== +#r m^Gn(_lC=oaS wX0ZpD2P\ .H 3f۷o^;8꒒um߾? "&&&))IGee%d#CvMc„ {%O.`z>z9sWȮƏ:ku#..n޼ydYYYqq12F||<aaa'&&a+jԝX {@VXeٲe,}Lu\GZZߏN뮻#77%%%!u3NNܨbEEEG,}(U'9݈>!Czm<&N7dff"{H)((@@Kee%A'cR;֍ V,PGO&PMd3\/0UeVWz:{o}VE+GQ]]m<y>z8yj";92cBH:7~Fx裏p^K9Nj/~daܫzPK\'N?Xf͚/DτO?ׯtѣGe.̦l,}z5쵕jhΊxzc)))IHH y___ \=Q=Ccm۶M/')<<ZF=OCCYg%wiiiQ\M@ȃA Dd\6lp"x z @(,,Dƈ x8p 999;;_yH88xw,}^}+:BkT ?p9Ff1[Ճo_ ~dff&%%!rǧdeeFp9+`̹] X  T ?p!ƔTW}Q$b  MNNGYY|CJ^]Kd}_e<&7xIj"._y;ܵkWbb"GyobpM,}P{L o+@0a`bQ Qdڒ/嫯zڵ>>><=ssstMR/.@vAe<KdF5i!//oC ?_ve?S@|d SXY] vb2) wIOv.]tС2%o^{,BHKKC䈌OMM-..F>=~ɅA@p=/&z<7ȽA&TWШ&"?pIEZ]|QCvi?CII l$\UTT`s_y0{ >rnQ X j"WX$>`̘1SNYr%Fnn.GiiK nWVVJhP{'Y}O?'3Yr_,}94-\PmFd'ƍ3#G|"##SRR0ը2ӿQA]pgKAqF4@MA?d9sm ǓO>k.__ߐt%ox/>C\bH޽Y!`RXjKChTDW?S񇷷+\j1п CRb郬 pH,{yWX s%jڀ<;wE MII)))ill4JI+@?r/,}P{TШ& 4*z>|u\R9BCCcbb233 zP • riĨ>=+j@b?75Ȑ!X]]]__aX [l.92>[hTDd\:'#777;;;''@ C3AsHw]1+-A!TDd-\O񣢢?ab+ ?>v3YK# XQMPhTLGcccCC~|Z<Kd/YK#FC@űA!TDdc@ސY1xx &Av4BdF5?pթYmۆ)Kd/.XK#G@eA!TDdcPMX BqHd,kb,F5Y@mQMD6`9XjnX!YBzPMPhT ?z>;Z jX .QMPhT ?z>T jX I'jj"Gu*8h c\Kd9K4jj"GOu7)8h c\+ү_?,}PǤdF5ـ'`郺@7! <񊊊N>duLuj@lLJ+-u)8n Éd,̟?9I飠@- jt,6Ш&"0~x'Kh;_}'zOvJġ \,}PWQMPhT ?<?AT__o,;/8t c*-B5Y@mQMD6`l,}#B ,,}TwѨ& 4r<HssZX!ga郬 F& 4r<@0BYGj@lX gqn  x,}udF5ـSAs2@ȑK~#/i4jj"Gb郜ˉ d,#AVn#TDd9 NX!`lF5Y@mQMD6`<,}+pViy_-#>sj@lyX W32HTT=/>FhTDd@9Bݍt,6Ш&"0~x>u8䁌>,}TѨ& 4r5/8y cKd;?B5Y@mQMD6`$,}AOYq@ubc_g郬:F5Y@mQMD6`,}krp ^}U1j(>:҅j@l13gA.X!bE!j@l~7>e9 d_,}H/jj"g`\# 2[0 d >ȎT7Ҩ& 4r}+@pΜ9,X ;$TDd>@\h clٗIdF5ـݱA1X!۱A%}I& 4n„ ,}[pLĵ2@,}ݩΤQMPhT ?o @\k clٝt',6Ш&"0~ &>9r d>;QMPhT ?K䎺r d>;Hjj"bQw@\q c&KiTDd7n-@u+uTB5Y@mQMD6`pS,}d,劋G-X ;RJ,6Ш&"0~#>u_E2@r,}P~%TDdw+@@n:F5Y@mQMD6`p;&>uSu2gO BLJj=Hjj"ۙ8q) JO> r] .=Bc郺[dF5ـýꃳr_Qq遌Kݤw dF5ـýA; >Ba@u/j@lFX cd,P{X &TDۻ`X0JR@AgTJQQB*GZYaۨЩB%"f"w *.M|/?7{}gZ{?0ޟZk{}!ʏSYz%Hd4@PqW@w;,P >!GQ@*i\s5Y]Hd4@PaP >!GQ@mh}n& j)F"8Z͌R (? [  Id4@`h}> j)L"CdwP >!G@+UHOlZ3P~4@J {:x(xȀ#h}uX]k, hq>P~J'23Zh56mkZ`E7\|B! ʏК6noO Yb5J'2'ZheY $2HdYx(xȀ#h}]n eU|B! ʏtB"DRR (?{H $2Hd)x(xȀ#o<@ZtB"DOR (?r{"]h S|B! ʏ\9b!@"KC)C@a>`[ $2Hdx(x[m+WtK#?h}嶵B"DIR Dk!C/4kW=W^y%i/WN<d…g}5[;u]{ya\pAu?K.M:ȗ^z6nܸ =C/Uf͚.1)?C)m=dV9U60%y6c $2Hdx(xA~E? [Ŋ+kӧGk,Mע!*Bþ/&ٲeO~*Dqmdş;ŠAsBtEN5@ve $j.H~xgpRs[s{ٲew]"e^ _{A6Yitimm5;͛7[tR ,upҗ /h斷>|Lsdbz V/?ۏ:(m*~'>_WteَN=ԊwQ}]]ӓO>yיmJ7IwQ,uD=R &&F /P\~Ƕ곿˟d ⃛knaKme%:ɶ3:ɝc˖-A>z z/_/Zh=t;찃YW^uŗ>Is5@LIJϥ^zϞ={ҤI{ŵU].{W~vm6OwkU:T(.J,% I VȲP >!PD>ΝeV^xPd޼yJ.*guSN))IC-%m7@ҔG}>h$NGW z#hVѝE]nyN5>o{Xj՚5kU.^*S>餓oP"V飏>jK.raXBU]z}8GJ+(zk<;U6k̙J~k;SWiP~Ng/lE,u5HT~O}>|xsyqƅ7_ -*cǎ?(ڵkG2"G;q6{'~ 4GтFٮO~z.ZFtn"+:tjC=TKtz{Q!3kvi'*+}Hsx$Pطo_U˚^r}WiӦEl{zI۾JAM,߸(@ .?[CITjzt+Hdx(xA#g?ْ@Yh-=R%#GN:dЫ;oڥ/;=\儊ی矷E8(]3ƣ[Q~@"KC)C gqƖ-[\⺭bݯ|+=7nI駟nw|G{0_AF>LS~vivN*ѮtE˵ ̛7G{6U-Y_~(ut~VZ|KKZ+X{YGc~WUoPŢHjU6mcwׯ=:vɣ=z\ꉳdb?%_%$xb+'47zuwqᅧjΜ9Iی5jHi֬Y}3?$x(xACDO͊凬X¾ mZ$0acօqr.{!:ȏ}c珞\ޔ߶[B[PI駟nkkRShzCU/iƫBPcC͛ivuW~vF<>}vC{~/*y :so &Z~_OGq0`@=~FWM=P{Jm#*GUv*q| Wۃ#xcVȲP >!PԶ^h'ꍤo-h5^ HME]d{5j-kX~2:^~|ԩwߜ9s,XIE;#z2ejoϩ5@ʏ>چ=IIL[rV/?T]շzovt%wtرڂvWrWD(?(o}׍g|Bm!of9sܹs+ZF_Һk[~2j̘1CYCt?e}-_z-6-W\{i]ݕ\/-tq]}-t6BޮENY*r-1U*_veZ9͞=[ &:]\}>nkֲsZ9X~r)O=fݏ}'^~~6n5zh<uFwݥq1ح;z6tHd x(xAj^~mps̩X~h%rG,;JԼAjhxz/Ci-ֲ8;mG;Cƍund] 6hׯʬx Z_+x͚5SSLb=o}/C=E-Zetؚh {Dϩq=ˏ/Ɣ#!c.d!C}կk{䯽s9Ƕd,i}$jJ&m kixb{HU?H{{{~4gϞ=.ho馛'I47ވN8y睺 T(?KHI4P >!PtG!ZC-ZU 3F˵հ^{*h!Cױ;e%>kPtzc`ݟZG[7|usΟ?_U,\𩧞RPwֲk֬ޙ4i_\ҩ4{iԔ:'t1UmmmsCW;<=WDC9ľ$\yKߑ%*D;0{I(?KHdix(xAc#G`dON 9s1}N?裟ԧlD >}uQ&L8:蠞={F]Cy]wSQ(Sk/?OLm^v|hd-+׮]}ҥO?*'xB֔lG|_6mMR>њUJ6]wuر^z_/yȐ!TU :tM`[_,X`?=1J7xhwWDG5w}/I̙_EuZ~>4;n%Y֢]ˏӧ[!v<i!RsJtn-ju7v/ʎJ m95b;R^~ /YC*W$i+hW7+Qk9aG^Ǭ`<jKUo} Y&5J'j-N=T}WWc>6mZe>TvWOƊ$}UW`UW7awQ>RE7x '~vU?[n"ʅP;vm_em+]|Pņ'?1c|=ȑ#5wʕmSCHdSi|BG>Z]c}Z\x mMSRqz#A_BJ&GvvhaÆu֩X~ʉ£kM$=R{U9KzIFVy PK&]NϿƥʑEGBOⶩ!$2Hdx(xȀ&l_Fהv[Zܶ>D<J'21TlhydL4)*?hmkCHdP >!GmYظqi]h} yB D,P~ ,!@"ϬR (?ZPև V|B! ʏ<V!$2H5P >!GAK9ó>D,P~ Yf3RZB"D_P~ߤIc]vf> Y5J'2? h%sv!$2H)6P >!G!AӫICHdȒP >!G!AsUCHdȳl|B! ʏ&Vև Y|B! ʏfUև 'C)C@BM!@"KC)C@Bͧ&G y D<J'2( h5o} Y5J'2( h$x(xȀJ *H@P8$kD,P~m ,N>D<J'2(4 (nm} Y5J'2(4 (n} y dWr[B"DR (?N AAա!$2H 8P >!GBCCHdRP >!GOCHdsp|B! ʏ@9Wև ea|B! ʏ@yVև C)C@Mrn!@"KC)C@M򩞭!@"āR (? P=[B"DR (? }Wև 'C)C@Mrέ!@"KC)C@M!@"ǁR (?ĉȮJrGFQ:::D"DR (?ƍH5k֬>}M(sέCCHdSr|B! ʏD אև %e|B! ʏDըև gC)C@͊Q!@"C)C@͊!@"OA|B2hb4@ѣ$ԜoȀA5!$2H4l0O]2hn4@Pgm} ͞={v5! ʏFTxz!@yR<d@hnԘu! ʏW ԘC@j.!@Yf72h4@rԘC@-UNZB"n<d@"h!$21 (?Z t>D5fxȀu j"W!@y<d@RhrԘx! ʏBև<2h54@PCyk} j̲GQ1h  .!@y<d@т&MD!$21KC@-gCHdPcP~& (!@Y72hM4@En[B"d2) ,!@Ydh 4@5yn} jlذay&Cˠ.sC(?fϞnYjo߾ZMA*/ &GqD[B_4@ެY+/5 H)?a(?rMxdԩ4@PC>; T!ZBw4@P]QZBP4@PEQZBP4@@( @( (W#0hDICWAqk}@Ap( 0El}@4@TiAEl}@4@LB!ol}bzeW_ 7@ B8qbA5jTZBPHh!C,^Xx@P~uBN(? :P'@] #IENDB`masakari-9.0.0/doc/source/conf.py0000775000175000017500000000617313656747723016734 0ustar zuulzuul00000000000000# -*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import os import sys sys.path.insert(0, os.path.abspath('../..')) sys.path.insert(0, os.path.abspath('../')) # -- General configuration ---------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [ 'openstackdocstheme', 'sphinx.ext.autodoc', 'ext.versioned_notifications', 'oslo_config.sphinxconfiggen', 'oslo_config.sphinxext', 'oslo_policy.sphinxpolicygen', 'oslo_policy.sphinxext', ] config_generator_config_file = [ ('../../etc/masakari/masakari-config-generator.conf', '_static/masakari'), ('../../etc/masakari/masakari-customized-recovery-flow-config-generator.conf', '_static/masakari-custom-recovery-methods'), ] sample_config_basename = '_static/masakari' policy_generator_config_file = [ ('../../etc/masakari/masakari-policy-generator.conf', '_static/masakari'), ] sample_policy_basename = '_static/masakari' # autodoc generation is a bit aggressive and a nuisance when doing heavy # text edit cycles. # execute "export SPHINX_DEBUG=1" in your terminal to disable # The suffix of source filenames. source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = u'masakari' copyright = u'2016, OpenStack Foundation' # If true, '()' will be appended to :func: etc. cross-reference text. add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). add_module_names = True # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # -- Options for HTML output -------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. # html_theme_path = ["."] # html_theme = '_theme' # html_static_path = ['static'] html_theme = 'openstackdocs' # openstackdocstheme optionns repository_name = 'openstack/masakari' # Output file base name for HTML help builder. htmlhelp_basename = '%sdoc' % project # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass # [howto/manual]). latex_documents = [ ('index', '%s.tex' % project, u'%s Documentation' % project, u'OpenStack Foundation', 'manual'), ] # Example configuration for intersphinx: refer to the Python standard library. #intersphinx_mapping = {'http://docs.python.org/': None} masakari-9.0.0/doc/source/install/0000775000175000017500000000000013656750011017051 5ustar zuulzuul00000000000000masakari-9.0.0/doc/source/install/development.environment.rst0000664000175000017500000000554213656747723024516 0ustar zuulzuul00000000000000.. Copyright 2017 NTT DATA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ====================== Development Quickstart ====================== This page describes how to setup and use a working Python development environment that can be used in developing masakari on Ubuntu. These instructions assume you're already familiar with git. Following these instructions will allow you to build the documentation and run the masakari unit tests. .. note:: For how to contribute to Masakari, refer: http://docs.openstack.org/infra/manual/developers.html Masakari uses the Gerrit code review system, refer: http://docs.openstack.org/infra/manual/developers.html#development-workflow Setup ===== There are two ways to create a development environment: using DevStack, or explicitly installing and cloning just what you need. Using DevStack -------------- To enable Masakari in DevStack, perform the following steps: Download DevStack ~~~~~~~~~~~~~~~~~ .. sourcecode:: bash export DEVSTACK_DIR=~/devstack git clone https://opendev.org/openstack/devstack.git $DEVSTACK_DIR Enable the Masakari plugin ~~~~~~~~~~~~~~~~~~~~~~~~~~ Enable the plugin by adding the following section to ``$DEVSTACK_DIR/local.conf`` .. sourcecode:: bash [[local|localrc]] enable_plugin masakari https://opendev.org/openstack/masakari Optionally, a git refspec (branch or tag or commit) may be provided as follows: .. sourcecode:: bash [[local|localrc]] enable_plugin masakari https://opendev.org/openstack/masakari Run the DevStack utility ~~~~~~~~~~~~~~~~~~~~~~~~ .. sourcecode:: bash cd $DEVSTACK_DIR ./stack.sh Explicit Install/Clone ---------------------- DevStack installs a complete OpenStack environment. Alternatively, to clone and install Masakari explicitly refer: :doc:`install_and_configure_ubuntu` Building the Documentation ========================== For a full documentation build, issue the following command from the masakari directory .. code-block:: bash tox -e docs That will create a Python virtual environment, install the needed Python prerequisites in that environment, and build all the documentation in that environment. Running unit tests ================== See `Running Python Unit Tests `_masakari-9.0.0/doc/source/install/overview.rst0000664000175000017500000000221513656747723021471 0ustar zuulzuul00000000000000========================= Masakari service overview ========================= Masakari provides Virtual Machines High Availability(VMHA), and rescues KVM-based Virtual Machines(VM) from a failure events described below: * VM process down - restart vm (use nova stop API, and nova start API). Libvirt events will be also emitted by other failures. * Provisioning process down - restarts process, changes nova-compute service status to maintenance mode (use nova service-disable). * nova-compute host failure - evacuate all the VMs from failure host to reserved host (use nova evacuate API). The below services enables deplores to integrate with the Masakari directly or through custom plug-ins. The Masakari service consists of the following components: ``masakari-api`` An OpenStack-native REST API that processes API requests by sending them to the ``masakari-engine`` over `Remote Procedure Call (RPC)`. ``masakari-engine`` Processes the notifications received from ``masakari-api`` by executing the recovery workflow in asynchronous way. masakari-9.0.0/doc/source/install/install_and_configure_ubuntu.rst0000664000175000017500000002311613656747723025561 0ustar zuulzuul00000000000000.. _install-ubuntu: Install and configure for Ubuntu ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ This section describes how to install and configure Masakari for Ubuntu 18.04 (bionic). Prerequisites ------------- Before you install and configure the masakari service, you must create databases, service credentials, and API endpoints. #. To create the masakari database, follow these steps: * Use the database access client to connect to the database server as the ``root`` user: .. code-block:: console # mysql * Create the ``masakari`` database: .. code-block:: console mysql> CREATE DATABASE masakari CHARACTER SET utf8; * Grant access privileges to the databases: .. code-block:: console mysql> GRANT ALL PRIVILEGES ON masakari.* TO 'username'@'localhost' \ IDENTIFIED BY 'MASAKARI_DBPASS'; mysql> GRANT ALL PRIVILEGES ON masakari.* TO 'username'@'%' \ IDENTIFIED BY 'MASAKARI_DBPASS'; Replace ``MASAKARI_DBPASS`` with a suitable password. * Exit the database access client. #. Source the ``admin`` credentials to gain access to admin-only CLI commands: .. code-block:: console $ . admin-openrc #. Create the Masakari service credentials: * Create the ``masakari`` user with password as ``masakari``: .. code-block:: console $ openstack user create --password-prompt masakari User Password: Repeat User Password: +---------------------+----------------------------------+ | Field | Value | +---------------------+----------------------------------+ | domain_id | default | | enabled | True | | id | 8a7dbf5279404537b1c7b86c033620fe | | name | masakari | | options | {} | | password_expires_at | None | +---------------------+----------------------------------+ * Add the ``admin`` role to the ``masakari`` user: .. code-block:: console $ openstack role add --project service --user masakari admin * Create the ``masakari`` service entity: .. code-block:: console $ openstack service create --name masakari \ --description "masakari high availability" instance-ha +-------------+----------------------------------+ | Field | Value | +-------------+----------------------------------+ | description | masakari high availability | | enabled | True | | id | 060d59eac51b4594815603d75a00aba2 | | name | masakari | | type | instance-ha | +-------------+----------------------------------+ #. Create the Masakari API service endpoints: .. code-block:: console $ openstack endpoint create --region RegionOne \ masakari public http:// /instance-ha/v1/$\(tenant_id\)s +--------------+-------------------------------------------------------+ | Field | Value | +--------------+-------------------------------------------------------+ | enabled | True | | id | 38f7af91666a47cfb97b4dc790b94424 | | interface | public | | region | RegionOne | | region_id | RegionOne | | service_id | 060d59eac51b4594815603d75a00aba2 | | service_name | masakari | | service_type | instance-ha | | url | http:///instance-ha/v1/$(tenant_id)s | +--------------+-------------------------------------------------------+ $ openstack endpoint create --region RegionOne \ masakari internal http:// /instance-ha/v1/$\(tenant_id\)s +--------------+-------------------------------------------------------+ | Field | Value | +--------------+-------------------------------------------------------+ | enabled | True | | id | 38f7af91666a47cfb97b4dc790b94424 | | interface | internal | | region | RegionOne | | region_id | RegionOne | | service_id | 060d59eac51b4594815603d75a00aba2 | | service_name | masakari | | service_type | instance-ha | | url | http:///instance-ha/v1/$(tenant_id)s | +--------------+-------------------------------------------------------+ $ openstack endpoint create --region RegionOne \ masakari admin http:///instance-ha/v1/$\(tenant_id\)s +--------------+-------------------------------------------------------+ | Field | Value | +--------------+-------------------------------------------------------+ | enabled | True | | id | 38f7af91666a47cfb97b4dc790b94424 | | interface | admin | | region | RegionOne | | region_id | RegionOne | | service_id | 060d59eac51b4594815603d75a00aba2 | | service_name | masakari | | service_type | instance-ha | | url | http:///instance-ha/v1/$(tenant_id)s | +--------------+-------------------------------------------------------+ Install and configure Masakari ------------------------------ .. note:: * You must install Masakari on the Controller Nodes only. #. Clone masakari using: .. code-block:: console # git clone https://opendev.org/openstack/masakari.git #. Prepare the masakari configuration files: #. Generate via tox: Go to ‘opt/stack/masakari’ and execute the command below, this will generate ``masakari.conf.sample``, sample configuration file at ``/opt/stack/masakari/etc/masakari/`` .. code-block:: console # tox -egenconfig #. Download from: # :download:`masakari.conf.sample ` #. Rename ``masakari.conf.sample`` file to ``masakari.conf``, and edit sections as shown below: .. code-block:: none [default] transport_url = rabbit://stackrabbit:admin@:5672/ graceful_shutdown_timeout = 5 os_privileged_user_tenant = service os_privileged_user_password = admin os_privileged_user_auth_url = http:///identity os_privileged_user_name = nova logging_exception_prefix = %(color)s%(asctime)s.%(msecs)03d TRACE %(name)s [01;35m%(instance)s[00m logging_debug_format_suffix = [00;33mfrom (pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d[00m logging_default_format_string = %(asctime)s.%(msecs)03d %(color)s%(levelname)s %(name)s [[00;36m-%(color)s] [01;35m%(instance)s%(color)s%(message)s[00m logging_context_format_string = %(asctime)s.%(msecs)03d %(color)s%(levelname)s %(name)s [[01;36m%(request_id)s [00;36m%(project_name)s %(user_name)s%(color)s] [01;35m%(instance)s%(color)s%(message)s[00m use_syslog = False debug = True masakari_api_workers = 2 [database] connection = mysql+pymysql://root:admin@1/masakari?charset=utf8 [keystone_authtoken] memcached_servers = localhost:11211 cafile = /opt/stack/data/ca-bundle.pem project_domain_name = Default project_name = service user_domain_name = Default password = username = masakari auth_url = http:///identity auth_type = password [taskflow] connection = mysql+pymysql://root:admin@/masakari?charset=utf8 .. note:: Replace ``CONTROLLER_IP`` with the IP address of controller node. Replace ``MASAKARI_PASS`` with the password you chose for the ``masakari`` user in the Identity service. #. Create ``masakari`` directory in /etc/: Copy ``masakari.conf`` file to ``/etc/masakari/`` .. code-block:: console # cp -p etc/masakari/masakari.conf.sample /etc/masakari/masakari.conf #. To install masakari run setup.py from masakari: .. code-block:: console # cd masakari # sudo python setup.py install #. Run below db command to sync database: .. code-block:: console # masakari-manage db sync Finalize installation --------------------- * Start masakari services: .. code-block:: console # masakari-api # masakari-engine masakari-9.0.0/doc/source/install/verify.rst0000664000175000017500000001123613656747723021132 0ustar zuulzuul00000000000000Verify operation ~~~~~~~~~~~~~~~~ Verify Masakari installation. #. Source the ``admin`` credentials to gain access to admin-only CLI commands: .. code-block:: console $ . admin-openrc #. List API endpoints in the Identity service to verify connectivity with the Identity service: .. note:: Below endpoints list may differ depending on the installation of OpenStack components. .. code-block:: console $ openstack endpoint list +-------------+----------------+--------------------------------------------------------+ | Name | Type | Endpoints | +-------------+----------------+--------------------------------------------------------+ | nova_legacy | compute_legacy | RegionOne | | | | public: http://controller/compute/v2/ | | | | | | nova | compute | RegionOne | | | | public: http://controller/compute/v2.1 | | | | | | cinder | block-storage | RegionOne | | | | public: http://controller/volume/v3/ | | | | | | glance | image | RegionOne | | | | public: http://controller/image | | | | | | cinderv3 | volumev3 | RegionOne | | | | public: http://controller/volume/v3/ | | | | | | masakari | instance-ha | RegionOne | | | | internal: http://controller/instance-ha/v1/ | | | | RegionOne | | | | admin: http://controller/instance-ha/v1/ | | | | RegionOne | | | | public: http://controller/instance-ha/v1/ | | | | | | keystone | identity | RegionOne | | | | public: http://controller/identity | | | | RegionOne | | | | admin: http://controller/identity | | | | | | cinderv2 | volumev2 | RegionOne | | | | public: http://controller/volume/v2/ | | | | | | placement | placement | RegionOne | | | | public: http://controller/placement | | | | | | neutron | network | RegionOne | | | | public: http://controller:9696/ | | | | | +-------------+----------------+--------------------------------------------------------+ #. Run ``segment list`` command to verify masakari-api is running properly. This will return empty segment list as you haven't yet configured ``Failover segments``. .. code-block:: console $ openstack segment list .. note:: Since ``Failover segments`` are not configured, there is no way to verify masakari-engine is running properly as the notification cannot be sent from masakari-api to masakari-engine. masakari-9.0.0/doc/source/install/index.rst0000664000175000017500000000016413656747723020733 0ustar zuulzuul00000000000000================= Masakari services ================= .. toctree:: overview install_and_configure verify masakari-9.0.0/doc/source/install/install_and_configure.rst0000664000175000017500000000210013656747723024145 0ustar zuulzuul00000000000000.. Copyright 2017 NTT DATA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ===================== Install and configure ===================== This section describes how to install and configure Masakari services on the compute node. This section assumes that you already have a :doc:`working OpenStack environment ` with the following components installed: Nova, Glance, Cinder, Neutron and Identity. The installation and configuration vary by distribution. .. toctree:: :maxdepth: 1 install_and_configure_ubuntu masakari-9.0.0/doc/source/user/0000775000175000017500000000000013656750011016361 5ustar zuulzuul00000000000000masakari-9.0.0/doc/source/user/how_to_get_involved.rst0000664000175000017500000002767513656747723023220 0ustar zuulzuul00000000000000.. Copyright 2017 NTT DATA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _getting_involved: ======================================== How to get (more) involved with Masakari ======================================== So you want to get more involved with Masakari? Or you are new to Masakari and wondering where to start? We are working on building easy ways for you to get help and ideas on how to learn more about Masakari and how the Masakari community works. How do I get started? ===================== There are quite a few global docs on this: - http://www.openstack.org/assets/welcome-guide/OpenStackWelcomeGuide.pdf - https://wiki.openstack.org/wiki/How_To_Contribute - http://www.openstack.org/community/ There is more general info, non Masakari specific info here: - https://wiki.openstack.org/wiki/Mentors - https://wiki.openstack.org/wiki/OpenStack_Upstream_Training What should I work on? ~~~~~~~~~~~~~~~~~~~~~~ So you are starting out your Masakari journey, where is a good place to start? If you'd like to learn how Masakari works before changing anything (good idea!), we recommend looking for reviews with -1s and -2s and seeing why they got down voted. Once you have some understanding, start reviewing patches. It's OK to ask people to explain things you don't understand. It's also OK to see some potential problems but put a +0. Once you're ready to write code, take a look at some of the work already marked as low-hanging fruit: * https://bugs.launchpad.net/masakari/+bugs?field.tag=low-hanging-fruit How do I get my feature in? ~~~~~~~~~~~~~~~~~~~~~~~~~~~ The best way of getting your feature in is... well it depends. First concentrate on solving your problem and/or use case, don't fixate on getting the code you have working merged. It’s likely things will need significant re-work after you discuss how your needs match up with all the existing ways Masakari is currently being used. The good news, is this process should leave you with a feature that's more flexible and doesn't lock you into your current way of thinking. A key part of getting code merged, is helping with reviewing other people's code. Great reviews of others code will help free up more core reviewer time to look at your own patches. In addition, you will understand how the review is thinking when they review your code. Also, work out if any ongoing efforts are blocking your feature and helping out speeding those up. The spec review process should help with this effort. For more details on our process, please see: :ref:`process`. What is expected of a good contributor? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ TODO - need more info on this Top Tips for working with the Masakari community ================================================ Here are some top tips around engaging with the Masakari community: - IRC - we talk a lot in #openstack-masakari - do ask us questions in there, and we will try to help you - not sure about asking questions? feel free to listen in around other people's questions - we recommend you setup an IRC bouncer: https://wiki.openstack.org/wiki/IRC - Email - Use the [masakari] tag in the mailing lists - Filtering on [masakari] and [all] can help tame the list - Be Open - i.e. don't review your teams code in private, do it publicly in gerrit - i.e. be ready to talk about openly about problems you are having, not "theoretical" issues - that way you can start to gain the trust of the wider community - Got a problem? Please ask! - Please raise any problems and ask questions early - we want to help you before you are frustrated or annoyed - unsure who to ask? Just ask in IRC. - Talk about problems first, then solutions - Don't think about "merging your patch", instead think about "solving your problem" - conversations are more productive that way - It's not the decision that's important, it's the reason behind it that's important - Don't like the way the community is going? - Please ask why we were going that way, and please engage with the debate - If you don't, we are unable to learn from what you have to offer - No one will decide, this is stuck, who can help me? - it's rare, but it happens - ...but if you don't ask, it's hard for them to help you Process ======= It can feel like you are faced with a wall of process. We are a big community, to make sure the right communication happens, we do use a minimal amount of process. If you find something that doesn't make sense, please: - ask questions to find out \*why\* it happens - if you know of a better way to do it, please speak up - one "better way" might be to remove the process if it no longer helps To learn more about Masakari's process, please read :ref:`process`. Why bother with any process? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Why is it worth creating a bug or blueprint to track your code review? This may seem like silly process, but there is usually a good reason behind it. We have lots of code to review, and we have tools to try and get to really important code reviews first. If yours is really important, but not picked up by our tools, it's possible you just get lost in the bottom of a big queue. If you have a bug fix, you have done loads of work to identify the issue, and test out your fix, and submit it. By adding a bug report, you are making it easier for other folks who hit the same problem to find your work, possibly saving them the hours of pain you went through. With any luck that gives all those people the time to fix different bugs, all that might have affected you, if you had not given them the time go fix it. It's similar with blueprints. You have worked out how to scratch your itch, lets tell others about that great new feature you have added, so they can use that. Also, it stops someone with a similar idea going through all the pain of creating a feature only to find you already have that feature ready and up for review, or merged into the latest release. Hopefully this gives you an idea why we have applied a small layer of process to what we are doing. Having said all this, we need to unlearn old habits to move forward, there may be better ways to do things, and we are open to trying them. Please help be part of the solution. .. _why_plus1: Why do code reviews if I am not in masakari-core? ================================================= Code reviews are the life blood of the developer community. There is a good discussion on how you do good reviews, and how anyone can be a reviewer: http://docs.openstack.org/infra/manual/developers.html#peer-review In the draft process guide, I discuss how doing reviews can help get your code merged faster: :ref:`process`. Let’s look at some of the top reasons why participating with code reviews really helps you: - Doing more reviews, and seeing what other reviewers notice, will help you better understand what is expected of code that gets merged into master - Having more non-core people do great reviews, leaves less review work for the core reviewers to do, so we are able get more code merged - Empathy is one of the keys to a happy community. If you are used to doing code reviews, you will better understand the comments you get when people review your code. As you do more code reviews, and see what others notice, you will get a better idea of what people are looking for when then apply a +2 to your code. What are the most useful types of code review comments? Well here are a few to the top ones: - Fundamental flaws are the biggest thing to spot. Does the patch break a whole set of existing users, or an existing feature? - Consistency of behavior is really important. Does this bit of code do things differently to where similar things happen elsewhere in Masakari? - Is the code easy to maintain, well tested and easy to read? Code is read order of magnitude times more than it is written, so optimize for the reader of the code, not the writer. Let's look at some problems people hit when starting out doing code reviews: - My +1 doesn't mean anything, why should I bother? - So your +1 really does help. Some really useful -1 votes that lead to a +1 vote helps get code into a position - When to use -1 vs 0 vs +1 - Please see the guidelines here: http://docs.openstack.org/infra/manual/developers.html#peer-review - I have already reviewed this code internally, no point in adding a +1 externally? - Please talk to your company about doing all code reviews in the public, that is a much better way to get involved. Showing how the code has evolved upstream, is much better than trying to 'perfect' code internally, before uploading for public review. You can use Draft mode, and mark things as WIP if you prefer, but please do the reviews upstream. - Where do I start? What should I review? - There are various tools, but a good place to start is: https://etherpad.openstack.org/p/masakari-pike-workitems - Depending on the time in the cycle, it's worth looking at NeedsCodeReview blueprints: https://blueprints.launchpad.net/masakari/ - Maybe take a look at things you want to see merged, bug fixes and features, or little code fixes - Look for things that have been waiting a long time for a review: - If you get through the above lists, try other tools, such as: http://status.openstack.org/reviews How to do great code reviews? ============================= http://docs.openstack.org/infra/manual/developers.html#peer-review For more tips, please see: `Why do code reviews if I am not in masakari-core?`_ How do I become masakari-core? ============================== You don't have to be masakari-core to be a valued member of the Masakari community. There are many, many ways you can help. Every quality review that helps someone get their patch closer to being ready to merge helps everyone get their code merged faster. The first step to becoming masakari-core is learning how to be an active member of the Masakari community, including learning how to do great code reviews. If you feel like you have the time to commit to all the masakari-core membership expectations, reach out to the Masakari PTL who will be able to find you an existing member of masakari-core to help mentor you. If all goes well, and you seem like a good candidate, your mentor will contact the rest of the masakari-core team to ask them to start looking at your reviews, so they are able to vote for you, if you get nominated for join masakari-core. We encourage all mentoring, where possible, to occur on #openstack-masakari so everyone can learn and benefit from your discussions. The above mentoring is available to everyone who wants to learn how to better code reviews, even if you don't ever want to commit to becoming masakari-core. If you already have a mentor, that's great, the process is only there for folks who are still trying to find a mentor. Being admitted to the mentoring program no way guarantees you will become a member of masakari-core eventually, it's here to help you improve, and help you have the sort of involvement and conversations that can lead to becoming a member of masakari-core. masakari-9.0.0/doc/source/user/notifications.rst0000664000175000017500000001241013656747723022002 0ustar zuulzuul00000000000000.. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Notifications in Masakari ========================== Similar to other OpenStack services Masakari emits notifications to the message bus with the Notifier class provided by `oslo.messaging-doc`_. From the notification consumer point of view a notification consists of two parts: an envelope with a fixed structure defined by oslo.messaging and a payload defined by the service emitting the notification. The envelope format is the following:: { "priority": , "event_type": , "timestamp": , "publisher_id": , "message_id": , "payload": } oslo.messaging provides below choices of notification drivers: =============== ========================================================================== Driver Description =============== ========================================================================== messaging Send notifications using the 1.0 message format messagingv2 Send notifications using the 2.0 message format (with a message envelope) routing Configurable routing notifier (by priority or event_type) log Publish notifications via Python logging infrastructure test Store notifications in memory for test verification noop Disable sending notifications entirely =============== ========================================================================== So notifications can be completely disabled by setting the following in Masakari configuration file: .. code-block:: ini [oslo_messaging_notifications] driver = noop Masakari supports only Versioned notifications. Versioned notifications ----------------------- Masakari code uses the masakari.rpc.get_notifier call to get a configured oslo.messaging Notifier object and it uses the oslo provided functions on the Notifier object to emit notifications. The configuration of the returned Notifier object depends on the parameters of the get_notifier call and the value of the oslo.messaging configuration options ``driver`` and ``topics``. The versioned notification the the payload is not a free form dictionary but a serialized `oslo.versionedobjects-doc`_. .. _service.update: For example the wire format of the ``segment.update`` notification looks like the following:: { "event_type": "api.update.segments.start", "timestamp": "2018-11-27 14:32:20.396940", "payload": { "masakari_object.name": "SegmentApiPayload", "masakari_object.data": { "description": null, "fault": null, "recovery_method": "auto", "name": "test", "service_type": "compute", "id": 877, "uuid": "89597691-bebd-4860-a93e-1b6e9de34b9e" }, " "masakari_object.version": "1.0", "masakari_object.namespace": "masakari" }, "priority": "INFO", "publisher_id": "masakari-api:test-virtualbox", "message_id": "e6322900-025d-4dd6-a3a1-3e0e1e9badeb" } The serialized oslo versionedobject as a payload provides a version number to the consumer so the consumer can detect if the structure of the payload is changed. Masakari provides the following contract regarding the versioned notification payload: * the payload version defined by the ``masakari_object.version`` field of the payload will be increased only if the syntax or the semantics of the ``masakari_object.data`` field of the payload is changed. * a minor version bump indicates a backward compatible change which means that only new fields are added to the payload so a well written consumer can still consume the new payload without any change. * a major version bump indicates a backward incompatible change of the payload which can mean removed fields, type change, etc in the payload. * there is an additional field 'masakari_object.name' for every payload besides 'masakari_object.data' and 'masakari_object.version'. This field contains the name of the Masakari internal representation of the payload type. Client code should not depend on this name. Existing versioned notifications ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * This provides the list of existing versioned notifications with sample payloads. .. versioned_notifications:: .. _`oslo.messaging-doc`: http://docs.openstack.org/developer/oslo.messaging/notifier.html .. _`oslo.versionedobjects-doc`: http://docs.openstack.org/developer/oslo.messaging/notifier.html masakari-9.0.0/doc/source/user/process.rst0000664000175000017500000001622313656747723020615 0ustar zuulzuul00000000000000.. Copyright 2017 NTT DATA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. .. _process: ===================== Masakari team process ===================== Masakari is always evolving its processes to ensure productive communication between all members of our community easily. OpenStack Wide Patterns ======================= Masakari follows most of the generally adopted norms for OpenStack projects. You can get more details here: * https://docs.openstack.org/infra/manual/developers.html * https://docs.openstack.org/project-team-guide/ If you are new to Masakari, please read this first: :ref:`getting_involved`. How do I get my code merged? ============================ OK, so you are new to Masakari, and you have been given a feature to implement. How do I make that happen? You can get most of your questions answered here: - https://docs.openstack.org/infra/manual/developers.html But let's put a Masakari specific twist on things... Overview ~~~~~~~~ .. image:: /_static/Masakari_spec_process.svg :alt: Flow chart showing the Masakari bug/feature process Where do you track bugs? ~~~~~~~~~~~~~~~~~~~~~~~~ We track bugs here: - https://bugs.launchpad.net/masakari If you fix an issue, please raise a bug so others who spot that issue can find the fix you kindly created for them. Also before submitting your patch it's worth checking to see if someone has already fixed it for you (Launchpad helps you with that, at little, when you create the bug report). When do I need a blueprint vs. a spec? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ To understand this question, we need to understand why blueprints and specs are useful. But here is the rough idea: - if it needs a spec, it will need a blueprint. - if it's an API change, it needs a spec. - if it's a single small patch that touches a small amount of code, with limited deployer and doc impact, it probably doesn't need a spec. If you are unsure, please ask PTL or one of the other masakari-core on IRC. How do I get my blueprint approved? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ So you need your blueprint approved? Here is how: - if you don't need a spec, please add a link to your blueprint to the agenda for the next masakari meeting: https://wiki.openstack.org/wiki/Meetings/Masakari - be sure your blueprint description has enough context for the review in that meeting. - if you need a spec, then please submit a masakari-spec for review. Got any more questions? Contact samP or one of the other masakari-core who are awake at the same time as you. IRC is best as you will often get an immediate response, if they are too busy send him/her an email. How do I get a procedural -2 removed from my patch? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ When feature freeze hits, any patches for blueprints that are still in review get a procedural -2 to stop them merging. In Masakari a blueprint is only approved for a single release. To have the -2 removed, you need to get the blueprint approved for the current release (see `How do I get my blueprint approved?`_). My code review seems stuck, what can I do? ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ First and foremost - address any -1s and -2s! A few tips: - Be precise. Ensure you're not talking at cross purposes. - Try to understand where the reviewer is coming from. They may have a very different perspective and/or use-case to you. - If you don't understand the problem, ask them to explain - this is common and helpful behavior. - Be positive. Everyone's patches have issues, including core reviewers. No-one cares once the issues are fixed. - Try not to flip-flop. When two reviewers are pulling you in different directions, stop pushing code and negotiate the best way forward. - If the reviewer does not respond to replies left on the patchset, reach out to them on IRC or email. If they still don't respond, you can try to ask their colleagues if they're on holiday (or simply wait). Finally, you can ask for mediation in the Masakari meeting by adding it to the agenda (https://wiki.openstack.org/wiki/Meetings/Masakari). This is also what you should do if you are unable to negotiate a resolution to an issue. Eventually you should get some +1s from people working through the review queue. Expect to get -1s as well. You can ask for reviews within your company, 1-2 are useful (not more), especially if those reviewers are known to give good reviews. You can spend some time while you wait reviewing other people's code - they may reciprocate and you may learn something (:ref:`Why do code reviews when I'm not core? `). If you've waited an appropriate amount of time and you haven't had any +1s, you can ask on IRC for reviews. Please don't ask for core review straight away, especially not directly (IRC or email). Core reviewer time is very valuable and gaining some +1s is a good way to show your patch meets basic quality standards. Once you have a few +1s, be patient. Remember the average wait times. You can ask for reviews each week in IRC, it helps to ask when cores are awake. Bugs ---- It helps to apply correct tracking information. - Put "Closes-Bug", "Partial-Bug" or "Related-Bug" in the commit message tags as necessary. - If you have to raise a bug in Launchpad first, do it - this helps someone else find your fix. - Make sure the bug has the correct priority and tag set. Features -------- Again, it helps to apply correct tracking information. For blueprint-only features: - Put your blueprint in the commit message, EG "blueprint simple-feature". - Mark the blueprint as NeedsCodeReview if you are finished. - Maintain the whiteboard on the blueprint so it's easy to understand which patches need reviews. - Use a single topic for all related patches. All patches for one blueprint should share a topic. For blueprint and spec features, do everything for blueprint-only features and also: - If it's a project or subteam priority, add it to: https://etherpad.openstack.org/p/masakari-pike-workitems - Ensure your spec is approved for the current release cycle. If it's not a priority, your blueprint/spec has been approved for the cycle and you have been patient, you can raise it during the Masakari meeting. The outcome may be that your spec gets unapproved for the cycle, so that priority items can take focus. If this happens to you, sorry - it should not have been approved in the first place, Masakari team bit off more than they could chew, it is their mistake not yours. You can re-propose it for the next cycle. If it's not a priority and your spec has not been approved, your code will not merge this cycle. Please re-propose your spec for the next cycle. masakari-9.0.0/doc/source/user/architecture.rst0000664000175000017500000000333413656747723021620 0ustar zuulzuul00000000000000.. Copyright 2017 NTT DATA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Masakari System Architecture ============================ Masakari comprises of two services api and engine, each performing different functions. The user-facing interface is a REST API, while internally Masakari communicates via an RPC message passing mechanism. The API servers process REST requests, which typically involve database reads/writes, sending RPC messages to other Masakari engine, and generating responses to the REST calls. RPC messaging is done via the **oslo.messaging** library, an abstraction on top of message queues. The Masakari engine will run on the same host where the Masakari api is running, and has a `manager` that is listening for `RPC` messages. The manager too has periodic tasks. Components ---------- Below you will find a helpful explanation of the key components of a typical Masakari deployment. .. image:: /_static/architecture.png :width: 100% * DB: sql database for data storage. * API: component that receives HTTP requests, converts commands and communicates with masakari engine via the **oslo.messaging** queue. * Engine: Executes recovery workflow and communicates with nova via HTTP. masakari-9.0.0/doc/source/index.rst0000664000175000017500000000635713656747723017277 0ustar zuulzuul00000000000000.. Copyright 2017 NTT DATA Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================== Welcome to Masakari's documentation! ==================================== Masakari is an OpenStack project designed to ensure high availability of instances and compute processes running on hosts. This documentation is intended to help explain the current scope of the Masakari project and the architectural decisions made to support this scope. The documentation will include the future architectural roadmap and the current development process and policies. Masakari API References ======================= The `Masakari API `_ is extensive. We provide a concept guide which gives some of the high level details, as well as a more detailed API reference. Operator Guide ============== Architecture Overview --------------------- * :doc:`Masakari architecture `: An overview of how all the components in masakari work together. Installation ------------ A detailed install guide for masakari. .. toctree:: :maxdepth: 2 install/index Reference Material ------------------ * :doc:`Configuration Guide `: Information on configuration files. * :doc:`Custom Recovery Workflow Configuration Guide ` * :doc:`CLI Commands for Masakari `: The complete command reference for Masakari. * :doc:`Versioned Notifications `: This provides the list of existing versioned notifications with sample payloads. * :doc:`Masakari team process ` * :doc:`Getting started with Masakari `: This will help newcomers understand basics of Masakari * `Nova Docs `_: A collection of guides for Nova. .. # NOTE(shilpasd): This is the section where we hide things that we don't # actually want in the table of contents but sphinx build would fail if # they aren't in the toctree somewhere. .. toctree:: :hidden: cli/index configuration/api-paste.ini.rst configuration/config.rst configuration/index.rst configuration/policy.rst configuration/recovery_config.rst configuration/recovery_workflow_custom_task.rst configuration/recovery_workflow_sample_config.rst configuration/sample_config.rst configuration/sample_policy.rst install/development.environment.rst user/architecture.rst user/how_to_get_involved.rst user/notifications.rst user/process.rst Search ====== * :ref:`search`: Search the contents of this document. * `OpenStack wide search `_: Search the wider set of OpenStack documentation, including forums. masakari-9.0.0/doc/source/configuration/0000775000175000017500000000000013656750011020252 5ustar zuulzuul00000000000000masakari-9.0.0/doc/source/configuration/config.rst0000664000175000017500000000044413656747723022273 0ustar zuulzuul00000000000000===================== Configuration Options ===================== The following is an overview of all available configuration options in Masakari. For a sample configuration file, refer to :doc:`sample_config`. .. show-options:: :config-file: etc/masakari/masakari-config-generator.conf masakari-9.0.0/doc/source/configuration/recovery_workflow_custom_task.rst0000664000175000017500000000471213656747723027234 0ustar zuulzuul00000000000000================================================ Guide for Custom Recovery Workflow Configuration ================================================ If operator wants customized recovery workflow, so here is guidelines mentioned for how to associate custom tasks from Third Party Library along with standard recovery workflows in Masakari.: #. First make sure required Third Party Library is installed on the Masakari engine node. Below is the sample custom task file. For example: .. code-block:: bash from oslo_log import log as logging from taskflow import task LOG = logging.getLogger(__name__) class Noop(task.Task): def __init__(self, novaclient): self.novaclient = novaclient super(Noop, self).__init__() def execute(self, **kwargs): LOG.info("Custom task executed successfully..!!") return #. Configure custom task in Third Party Library's setup.cfg as below: For example, Third Party Library's setup.cfg will have following entry points .. code-block:: bash masakari.task_flow.tasks = custom_pre_task = custom_main_task = custom_post_task = Note: Entry point in Third Party Library's setup.cfg should have same key as in Masakari setup.cfg for respective failure recovery. #. Configure custom task in Masakari's new conf file custom-recovery-methods.conf with same name which was given in the setup.cfg to locate class path. For example(custom task added in host auto failure config option): .. code-block:: bash host_auto_failure_recovery_tasks = { 'pre': ['disable_compute_service_task', 'custom_pre_task'], 'main': ['custom_main_task', 'prepare_HA_enabled_instances_task'], 'post': ['evacuate_instances_task', 'custom_post_task']} #. If there are any configuration parameters required for custom task, then add them into custom-recovery-methods.conf under the same group/section where they are registered in Third Party Library. All config parameters related to recovery method customization should be part of newly added conf file. Operator will be responsible to generate masakari.conf and related configuration files by themselves. #. Operator should ensure output of each task should be made available to the next tasks needing them. masakari-9.0.0/doc/source/configuration/recovery_workflow_sample_config.rst0000664000175000017500000000100713656747723027500 0ustar zuulzuul00000000000000=========================================================== Masakari Customized Recovery Workflow Configuration Options =========================================================== masakari-custom-recovery-methods.conf.sample The following is a sample Masakari recovery workflow configuration for adaptation and use. The sample configuration can also be downloaded from :download:`here `. .. literalinclude:: /_static/masakari-custom-recovery-methods.conf.sample masakari-9.0.0/doc/source/configuration/api-paste.ini.rst0000664000175000017500000000027413656747723023470 0ustar zuulzuul00000000000000============= api-paste.ini ============= The masakari service stores its API configuration settings in the ``api-paste.ini`` file. .. literalinclude:: /../../etc/masakari/api-paste.ini masakari-9.0.0/doc/source/configuration/sample_policy.rst0000664000175000017500000000065513656747723023672 0ustar zuulzuul00000000000000=========================== Sample Masakari Policy File =========================== The following is a sample masakari policy file for adaptation and use. The sample policy can also be viewed in :download:`file form `. .. important:: The sample policy file is auto-generated from masakari when this documentation is built. .. literalinclude:: /_static/masakari.policy.yaml.sample masakari-9.0.0/doc/source/configuration/policy.rst0000664000175000017500000000041213656747723022320 0ustar zuulzuul00000000000000================= Masakari Policies ================= The following is an overview of all available policies in masakari. For a sample configuration file, refer to :doc:`sample_policy`. .. show-policy:: :config-file: etc/masakari/masakari-policy-generator.conf masakari-9.0.0/doc/source/configuration/index.rst0000664000175000017500000000230713656747723022135 0ustar zuulzuul00000000000000=================== Configuration Guide =================== The configuration for masakari lies in below described files. Configuration ------------- Masakari has two main config files: ``masakari.conf`` and ``recovery_workflow_sample_config.conf``. * :doc:`Config Reference `: A complete reference of all config points in masakari and what they impact. * :doc:`Sample Config File `: A sample config file with inline documentation. * :doc:`Recovery Config Reference `: A complete reference of all config points in masakari and what they impact. * :doc:`Sample recovery workflow File `: A complete reference of defining the monitoring processes. Policy ------ Masakari, like most OpenStack projects, uses a policy language to restrict permissions on REST API actions. * :doc:`Policy Reference `: A complete reference of all policy points in masakari and what they impact. * :doc:`Sample policy File `: A sample policy file with inline documentation. API configuration settings -------------------------- * :doc:`API configuration `: A complete reference of API configuration settings. masakari-9.0.0/doc/source/configuration/sample_config.rst0000664000175000017500000000340113656747723023630 0ustar zuulzuul00000000000000============================== Masakari Configuration Options ============================== The following is a sample Masakari configuration for adaptation and use. It is auto-generated from Masakari when this documentation is built, so if you are having issues with an option, please compare your version of Masakari with the version of this documentation. The sample configuration can also be downloaded from :download:`here `. .. literalinclude:: /_static/masakari.conf.sample Minimal Configuration ===================== Edit the ``/etc/masakari/masakari.conf`` file and complete the following actions In the ``[DEFAULT]`` section, set following options: .. code-block:: bash auth_strategy = keystone masakari_topic = ha_engine os_privileged_user_tenant = service os_privileged_user_auth_url = http://controller/identity os_privileged_user_name = nova os_privileged_user_password = PRIVILEGED_USER_PASS Replace ``PRIVILEGED_USER_PASS`` with the password you chose for the privileged user in the Identity service. In the ``[database]`` section, configure database access: .. code-block:: bash connection = mysql+pymysql://root:MASAKARI_DBPASS@controller/masakari?charset=utf8 In the ``[keystone_authtoken]`` sections, configure Identity service access: .. code-block:: bash auth_url = http://controller/identity memcached_servers = controller:11211 signing_dir = /var/cache/masakari project_domain_name = Default user_domain_name = Default project_name = service username = masakari password = MASAKARI_PASS auth_type = password cafile = /opt/stack/data/ca-bundle.pem Replace ``MASAKARI_PASS`` with the password you chose for the ``masakari`` user in the Identity service. masakari-9.0.0/doc/source/configuration/recovery_config.rst0000664000175000017500000000051713656747723024212 0ustar zuulzuul00000000000000===================== Configuration Options ===================== The following is an overview of all available configuration options in Masakari. For a sample configuration file, refer to :doc:`recovery_workflow_sample_config`. .. show-options:: :config-file: etc/masakari/masakari-customized-recovery-flow-config-generator.conf masakari-9.0.0/doc/api_samples/0000775000175000017500000000000013656750011016400 5ustar zuulzuul00000000000000masakari-9.0.0/doc/api_samples/versions/0000775000175000017500000000000013656750011020250 5ustar zuulzuul00000000000000masakari-9.0.0/doc/api_samples/versions/versions-get-resp.json0000664000175000017500000000060413656747723024557 0ustar zuulzuul00000000000000{ "versions": [ { "id": "v1.0", "links": [ { "href": "http://openstack.example.com/v1/", "rel": "self" } ], "status": "CURRENT", "version": "1.0", "min_version": "1.0", "updated": "2016-07-01T11:33:21Z" } ] }masakari-9.0.0/doc/api_samples/versions/v1-version-get-resp.json0000664000175000017500000000122613656747723024721 0ustar zuulzuul00000000000000{ "version": { "id": "v1", "links": [ { "href": "http://openstack.example.com/v1/", "rel": "self" }, { "href": "http://docs.openstack.org/", "rel": "describedby", "type": "text/html" } ], "media-types": [ { "base": "application/json", "type": "application/vnd.openstack.masakari+json;version=1" } ], "status": "CURRENT", "version": "1.0", "min_version": "1.0", "updated": "2016-07-01T11:33:21Z" } }masakari-9.0.0/doc/api_samples/notifications/0000775000175000017500000000000013656750011021251 5ustar zuulzuul00000000000000masakari-9.0.0/doc/api_samples/notifications/host-notification-create-resp.json0000664000175000017500000000107613656747723030041 0ustar zuulzuul00000000000000{ "notification": { "notification_uuid": "9e66b95d-45da-4695-bfb6-ace68b35d955", "status": "new", "source_host_uuid": "083a8474-22c0-407f-b89b-c569134c3bfd", "deleted": false, "created_at": "2017-04-24T06:37:37.396994", "updated_at": null, "id": 4, "generated_time": "2017-04-24T08:34:46.000000", "deleted_at": null, "type": "COMPUTE_HOST", "payload": { "host_status": "UNKNOWN", "event": "STOPPED", "cluster_status": "OFFLINE" } } } masakari-9.0.0/doc/api_samples/notifications/host-notification-create-req.json0000664000175000017500000000045213656747723027654 0ustar zuulzuul00000000000000{ "notification": { "type": "COMPUTE_HOST", "generated_time": "2017-04-24 08:34:46", "payload": { "event": "STOPPED", "host_status": "UNKNOWN", "cluster_status": "OFFLINE" }, "hostname": "openstack-VirtualBox" } } masakari-9.0.0/doc/api_samples/notifications/notification-get-resp.json0000664000175000017500000000432113656747723026376 0ustar zuulzuul00000000000000{ "notification": { "notification_uuid": "07a331b8-df15-4582-b121-73ed3541a408", "status": "finished", "source_host_uuid": "b5bc49be-ea6f-472d-9240-968f75d7a16a", "deleted": false, "created_at": "2019-02-28T07:19:49.000000", "updated_at": "2019-02-28T07:19:59.000000", "payload": { "instance_uuid": "b9837317-a5b8-44f4-93b4-45500c562bb8", "vir_domain_event": "STOPPED_FAILED", "event": "LIFECYCLE" }, "recovery_workflow_details": [ { "progress": 1.0, "state": "SUCCESS", "name": "StopInstanceTask", "progress_details": [ {"timestamp": "2019-03-07 13:54:28.842031", "message": "Stopping instance: df528f02-2415-4a40-bad8-453ad6a519f7", "progress": "0.0"}, {"timestamp": "2019-03-07 13:54:34.442617", "message": "Stopped instance: 'df528f02-2415-4a40-bad8-453ad6a519f7'", "progress": "1.0"} ] }, { "progress": 1.0, "state": "SUCCESS", "name": "StartInstanceTask", "progress_details": [ {"timestamp": "2019-03-07 13:54:34.531755", "message": "Starting instance: 'df528f02-2415-4a40-bad8-453ad6a519f7'", "progress": "0.0"}, {"timestamp": "2019-03-07 13:54:35.930430", "message": "Instance started: 'df528f02-2415-4a40-bad8-453ad6a519f7'", "progress": "1.0"} ] }, { "progress": 1.0, "state": "SUCCESS", "name": "ConfirmInstanceActiveTask", "progress_details": [ {"timestamp": "2019-03-07 13:54:36.019208", "message": "Confirming instance 'df528f02-2415-4a40-bad8-453ad6a519f7' vm_state is ACTIVE", "progress": "0.0"}, {"timestamp": "2019-03-07 13:54:38.569373", "message": "Confirmed instance 'df528f02-2415-4a40-bad8-453ad6a519f7' vm_state is ACTIVE", "progress": "1.0"} ] } ], "generated_time": "2017-06-13T15:34:55.000000", "deleted_at": null, "type": "VM", "id": 13 } } masakari-9.0.0/doc/api_samples/notifications/process-notification-create-req.json0000664000175000017500000000040213656747723030350 0ustar zuulzuul00000000000000{ "notification": { "type": "PROCESS", "generated_time": "2017-04-21 17:29:55", "payload": { "process_name": "nova-compute", "event": "stopped" }, "hostname": "openstack-VirtualBox" } } masakari-9.0.0/doc/api_samples/notifications/vm-notification-create-resp.json0000664000175000017500000000114113656747723027477 0ustar zuulzuul00000000000000{ "notification": { "notification_uuid": "f4836386-7648-4395-89b6-75a2c5ca7ff2", "status": "new", "source_host_uuid": "083a8474-22c0-407f-b89b-c569134c3bfd", "deleted": false, "created_at": "2017-04-24T06:22:47.569979", "updated_at": null, "id": 3, "generated_time": "2017-04-23T07:18:51.523726", "deleted_at": null, "type": "VM", "payload": { "instance_uuid": "96ab1c42-668c-4f2d-8689-afa3301d4ee9", "vir_domain_event": "STOPPED_DESTROYED", "event": "LIFECYCLE" } } } masakari-9.0.0/doc/api_samples/notifications/process-notification-create-resp.json0000664000175000017500000000102613656747723030535 0ustar zuulzuul00000000000000{ "notification": { "notification_uuid": "2b412acf-c55a-442d-8fd2-e823ec0d827f", "status": "new", "source_host_uuid": "083a8474-22c0-407f-b89b-c569134c3bfd", "deleted": false, "created_at": "2017-04-24T06:05:29.387678", "updated_at": null, "id": 2, "generated_time": "2017-04-21T17:29:55.000000", "deleted_at": null, "type": "PROCESS", "payload": { "process_name": "nova-compute", "event": "stopped" } } } masakari-9.0.0/doc/api_samples/notifications/notifcations-list-resp.json0000664000175000017500000000114313656747723026603 0ustar zuulzuul00000000000000{ "notifications": [ { "notification_uuid": "32bc95ac-858d-460a-b562-7e365391be64", "status": "new", "source_host_uuid": "083a8474-22c0-407f-b89b-c569134c3bfd", "deleted": false, "created_at": "2017-04-21T12:09:44.000000", "updated_at": null, "id": 1, "generated_time": "2017-04-21T17:29:55.000000", "deleted_at": null, "type": "PROCESS", "payload": { "process_name": "nova-compute", "event": "stopped" } } ] } masakari-9.0.0/doc/api_samples/notifications/vm-notification-create-req.json0000664000175000017500000000052413656747723027321 0ustar zuulzuul00000000000000{ "notification": { "type": "VM", "generated_time": "2017-04-23T07:18:51.523726", "payload": { "instance_uuid": "96ab1c42-668c-4f2d-8689-afa3301d4ee9", "vir_domain_event": "STOPPED_DESTROYED", "event": "LIFECYCLE" }, "hostname": "openstack-VirtualBox" } } masakari-9.0.0/doc/api_samples/hosts/0000775000175000017500000000000013656750011017540 5ustar zuulzuul00000000000000masakari-9.0.0/doc/api_samples/hosts/host-get-resp.json0000664000175000017500000000156313656747723023161 0ustar zuulzuul00000000000000{ "host": { "reserved": false, "uuid": "083a8474-22c0-407f-b89b-c569134c3bfd", "deleted": false, "on_maintenance": false, "created_at": "2017-04-21T10:09:20.000000", "control_attributes": "SSH", "updated_at": null, "name": "openstack-VirtualBox", "failover_segment": { "uuid": "9e800031-6946-4b43-bf09-8b3d1cab792b", "deleted": false, "created_at": "2017-04-20T10:17:17.000000", "description": null, "recovery_method": "auto", "updated_at": null, "service_type": "COMPUTE", "deleted_at": null, "id": 2, "name": "segment2" }, "deleted_at": null, "type": "COMPUTE_HOST", "id": 1, "failover_segment_id": "9e800031-6946-4b43-bf09-8b3d1cab792b" } }masakari-9.0.0/doc/api_samples/hosts/hosts-list-resp.json0000664000175000017500000000175013656747723023536 0ustar zuulzuul00000000000000{ "hosts": [ { "reserved": false, "uuid": "083a8474-22c0-407f-b89b-c569134c3bfd", "deleted": false, "on_maintenance": false, "created_at": "2017-04-21T10:09:20.000000", "control_attributes": "SSH", "updated_at": null, "name": "openstack-VirtualBox", "failover_segment": { "uuid": "9e800031-6946-4b43-bf09-8b3d1cab792b", "deleted": false, "created_at": "2017-04-20T10:17:17.000000", "description": null, "recovery_method": "auto", "updated_at": null, "service_type": "COMPUTE", "deleted_at": null, "id": 2, "name": "segment2" }, "deleted_at": null, "type": "COMPUTE_HOST", "id": 1, "failover_segment_id": "9e800031-6946-4b43-bf09-8b3d1cab792b" } ] }masakari-9.0.0/doc/api_samples/hosts/host-create-req.json0000664000175000017500000000017713656747723023463 0ustar zuulzuul00000000000000{ "host": { "control_attributes": "SSH", "type": "COMPUTE", "name": "openstack-VirtualBox" } } masakari-9.0.0/doc/api_samples/hosts/host-create-resp.json0000664000175000017500000000156313656747723023645 0ustar zuulzuul00000000000000{ "host": { "reserved": false, "uuid": "083a8474-22c0-407f-b89b-c569134c3bfd", "deleted": false, "on_maintenance": false, "created_at": "2017-04-21T10:09:20.000000", "control_attributes": "SSH", "updated_at": null, "name": "openstack-VirtualBox", "failover_segment": { "uuid": "9e800031-6946-4b43-bf09-8b3d1cab792b", "deleted": false, "created_at": "2017-04-20T10:17:17.000000", "description": null, "recovery_method": "auto", "updated_at": null, "service_type": "COMPUTE", "deleted_at": null, "id": 2, "name": "segment2" }, "deleted_at": null, "type": "COMPUTE_HOST", "id": 1, "failover_segment_id": "9e800031-6946-4b43-bf09-8b3d1cab792b" } }masakari-9.0.0/doc/api_samples/hosts/host-update-req.json0000664000175000017500000000006313656747723023474 0ustar zuulzuul00000000000000{ "host": { "reserved": "True" } } masakari-9.0.0/doc/api_samples/hosts/host-update-resp.json0000664000175000017500000000161113656747723023656 0ustar zuulzuul00000000000000{ "host": { "reserved": true, "uuid": "083a8474-22c0-407f-b89b-c569134c3bfd", "deleted": false, "on_maintenance": false, "created_at": "2017-04-21T10:09:20.000000", "control_attributes": "SSH", "updated_at": "2017-04-21T11:12:43.351320", "name": "openstack-VirtualBox", "failover_segment": { "uuid": "9e800031-6946-4b43-bf09-8b3d1cab792b", "deleted": false, "created_at": "2017-04-20T10:17:17.000000", "description": null, "recovery_method": "auto", "updated_at": null, "service_type": "Compute", "deleted_at": null, "id": 2, "name": "new_segment" }, "deleted_at": null, "type": "COMPUTE", "id": 1, "failover_segment_id": "9e800031-6946-4b43-bf09-8b3d1cab792b" } } masakari-9.0.0/doc/api_samples/segments/0000775000175000017500000000000013656750011020225 5ustar zuulzuul00000000000000masakari-9.0.0/doc/api_samples/segments/segment-update-req.json0000664000175000017500000000007113656747723024645 0ustar zuulzuul00000000000000{ "segment": { "name": "new_segment" } } masakari-9.0.0/doc/api_samples/segments/segments-list-resp.json0000664000175000017500000000065213656747723024710 0ustar zuulzuul00000000000000{ "segments": [ { "uuid": "9e800031-6946-4b43-bf09-8b3d1cab792b", "deleted": false, "created_at": "2017-04-20T10:17:17.000000", "description": "Segment1", "recovery_method": "auto", "updated_at": null, "service_type": "Compute", "deleted_at": null, "id": 1, "name": "segment2" } ] }masakari-9.0.0/doc/api_samples/segments/segment-create-resp.json0000664000175000017500000000055313656747723025015 0ustar zuulzuul00000000000000{ "segment": { "uuid": "5fd9f925-0379-40db-a7f8-786a0b655b2a", "deleted": false, "created_at": "2017-04-21T08:59:53.991030", "description": null, "recovery_method": "AUTO", "updated_at": null, "service_type": "COMPUTE", "deleted_at": null, "id": 4, "name": "new_segment" } } masakari-9.0.0/doc/api_samples/segments/segment-update-resp.json0000664000175000017500000000060313656747723025030 0ustar zuulzuul00000000000000{ "segment": { "uuid": "5fd9f925-0379-40db-a7f8-786a0b655b2a", "deleted": false, "created_at": "2017-04-21T08:59:54.000000", "description": null, "recovery_method": "AUTO", "updated_at": "2017-04-21T09:47:03.748028", "service_type": "COMPUTE", "deleted_at": null, "id": 4, "name": "new_segment" } } masakari-9.0.0/doc/api_samples/segments/segment-get-resp.json0000664000175000017500000000055313656747723024331 0ustar zuulzuul00000000000000{ "segment": { "uuid": "5fd9f925-0379-40db-a7f8-786a0b655b2a", "deleted": false, "created_at": "2017-04-21T08:59:53.991030", "description": null, "recovery_method": "AUTO", "updated_at": null, "service_type": "COMPUTE", "deleted_at": null, "id": 4, "name": "new_segment" } } masakari-9.0.0/doc/api_samples/segments/segment-create-req.json0000664000175000017500000000017713656747723024635 0ustar zuulzuul00000000000000{ "segment" : { "service_type": "COMPUTE", "recovery_method": "AUTO", "name": "new_segment" } }masakari-9.0.0/doc/ext/0000775000175000017500000000000013656750011014703 5ustar zuulzuul00000000000000masakari-9.0.0/doc/ext/__init__.py0000664000175000017500000000000013656747723017022 0ustar zuulzuul00000000000000masakari-9.0.0/doc/ext/versioned_notifications.py0000664000175000017500000001246613656747723022235 0ustar zuulzuul00000000000000# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ This provides a sphinx extension able to list the implemented versioned notifications into the developer documentation. It is used via a single directive in the .rst file .. versioned_notifications:: """ import os from docutils import nodes from docutils.parsers import rst import importlib from oslo_serialization import jsonutils import pkgutil from masakari.notifications.objects import base as notification from masakari.objects import base from masakari.tests import json_ref import masakari.utils class VersionedNotificationDirective(rst.Directive): SAMPLE_ROOT = 'doc/notification_samples/' TOGGLE_SCRIPT = """ """ def run(self): notifications = self._collect_notifications() return self._build_markup(notifications) def _import_all_notification_packages(self): list(map(lambda module: importlib.import_module(module), ('masakari.notifications.objects.' + name for _, name, _ in pkgutil.iter_modules(masakari.notifications.objects.__path__)))) def _collect_notifications(self): self._import_all_notification_packages() base.MasakariObjectRegistry.register_notification_objects() notifications = {} ovos = base.MasakariObjectRegistry.obj_classes() for name, cls in ovos.items(): cls = cls[0] if (issubclass(cls, notification.NotificationBase) and cls != notification.NotificationBase): payload_name = cls.fields['payload'].objname payload_cls = ovos[payload_name][0] for sample in cls.samples: if sample in notifications: raise ValueError('Duplicated usage of %s ' 'sample file detected' % sample) notifications[sample] = ((cls.__name__, payload_cls.__name__, sample)) return sorted(notifications.values()) def _build_markup(self, notifications): content = [] cols = ['Event type', 'Notification class', 'Payload class', 'Sample'] table = nodes.table() content.append(table) group = nodes.tgroup(cols=len(cols)) table.append(group) head = nodes.thead() group.append(head) for _ in cols: group.append(nodes.colspec(colwidth=1)) body = nodes.tbody() group.append(body) # fill the table header row = nodes.row() body.append(row) for col_name in cols: col = nodes.entry() row.append(col) text = nodes.strong(text=col_name) col.append(text) # fill the table content, one notification per row for name, payload, sample_file in notifications: event_type = sample_file[0: -5].replace('-', '.') row = nodes.row() body.append(row) col = nodes.entry() row.append(col) text = nodes.literal(text=event_type) col.append(text) col = nodes.entry() row.append(col) text = nodes.literal(text=name) col.append(text) col = nodes.entry() row.append(col) text = nodes.literal(text=payload) col.append(text) col = nodes.entry() row.append(col) with open(os.path.join(self.SAMPLE_ROOT, sample_file), 'r') as f: sample_content = f.read() sample_obj = jsonutils.loads(sample_content) sample_obj = json_ref.resolve_refs( sample_obj, base_path=os.path.abspath(self.SAMPLE_ROOT)) sample_content = jsonutils.dumps(sample_obj, sort_keys=True, indent=4, separators=(',', ': ')) event_type = sample_file[0: -5] html_str = self.TOGGLE_SCRIPT % ((event_type, ) * 3) html_str += ("" % event_type) html_str += ("
%s
" % (event_type, sample_content)) raw = nodes.raw('', html_str, format="html") col.append(raw) return content def setup(app): app.add_directive('versioned_notifications', VersionedNotificationDirective) masakari-9.0.0/requirements.txt0000664000175000017500000000202713656747723016643 0ustar zuulzuul00000000000000# The order of packages is significant, because pip processes them in the order # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. Babel!=2.4.0,>=2.3.4 # BSD iso8601>=0.1.11 # MIT jsonschema>=2.6.0 # MIT keystoneauth1>=3.4.0 # Apache-2.0 keystonemiddleware>=4.17.0 # Apache-2.0 WebOb>=1.7.1 # MIT microversion-parse>=0.2.1 # Apache-2.0 oslo.config>=5.2.0 # Apache-2.0 oslo.context>=2.19.2 # Apache-2.0 oslo.db>=4.27.0 # Apache-2.0 oslo.messaging>=5.29.0 # Apache-2.0 oslo.i18n>=3.15.3 # Apache-2.0 oslo.log>=3.36.0 # Apache-2.0 oslo.middleware>=3.31.0 # Apache-2.0 oslo.policy>=1.30.0 # Apache-2.0 oslo.service!=1.28.1,>=1.24.0 # Apache-2.0 oslo.upgradecheck>=0.1.0 # Apache-2.0 oslo.utils>=3.33.0 # Apache-2.0 oslo.versionedobjects>=1.31.2 # Apache-2.0 pbr!=2.1.0,>=2.0.0 # Apache-2.0 python-novaclient>=9.1.0 # Apache-2.0 six>=1.10.0 # MIT stevedore>=1.20.0 # Apache-2.0 SQLAlchemy>=1.2.19 # MIT SQLAlchemy-Utils>=0.33.10 # Apache-2.0 taskflow>=2.16.0 # Apache-2.0 masakari-9.0.0/setup.cfg0000664000175000017500000000612713656750011015165 0ustar zuulzuul00000000000000[metadata] name = masakari summary = Virtual Machine High Availability (VMHA) service for OpenStack description-file = README.rst author = OpenStack author-email = openstack-discuss@lists.openstack.org home-page = https://docs.openstack.org/masakari/latest/ classifier = Environment :: OpenStack Intended Audience :: Information Technology Intended Audience :: System Administrators License :: OSI Approved :: Apache Software License Operating System :: POSIX :: Linux Programming Language :: Python Programming Language :: Python :: 3 Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.7 [files] data_files = etc/masakari = etc/masakari/api-paste.ini etc/masakari/masakari-custom-recovery-methods.conf packages = masakari [entry_points] oslo.config.opts = masakari.conf = masakari.conf.opts:list_opts customized_recovery_flow_opts = masakari.conf.opts:list_recovery_workflow_opts oslo.config.opts.defaults = masakari.api = masakari.common.config:set_middleware_defaults oslo.policy.enforcer = masakari = masakari.policy:get_enforcer oslo.policy.policies = masakari = masakari.policies:list_rules console_scripts = masakari-api = masakari.cmd.api:main masakari-engine = masakari.cmd.engine:main masakari-manage = masakari.cmd.manage:main masakari-status = masakari.cmd.status:main wsgi_scripts = masakari-wsgi = masakari.cmd.api:initialize_application masakari.database.migration_backend = sqlalchemy = oslo_db.sqlalchemy.migration masakari.api.v1.extensions = versions = masakari.api.openstack.ha.versionsV1:Versions extension_info = masakari.api.openstack.ha.extension_info:ExtensionInfo segments = masakari.api.openstack.ha.segments:Segments hosts = masakari.api.openstack.ha.hosts:Hosts notifications = masakari.api.openstack.ha.notifications:Notifications masakari.driver = taskflow_driver = masakari.engine.drivers.taskflow:TaskFlowDriver masakari.task_flow.tasks = disable_compute_service_task = masakari.engine.drivers.taskflow.host_failure:DisableComputeServiceTask prepare_HA_enabled_instances_task = masakari.engine.drivers.taskflow.host_failure:PrepareHAEnabledInstancesTask evacuate_instances_task = masakari.engine.drivers.taskflow.host_failure:EvacuateInstancesTask stop_instance_task = masakari.engine.drivers.taskflow.instance_failure:StopInstanceTask start_instance_task = masakari.engine.drivers.taskflow.instance_failure:StartInstanceTask confirm_instance_active_task = masakari.engine.drivers.taskflow.instance_failure:ConfirmInstanceActiveTask disable_compute_node_task = masakari.engine.drivers.taskflow.process_failure:DisableComputeNodeTask confirm_compute_node_disabled_task = masakari.engine.drivers.taskflow.process_failure:ConfirmComputeNodeDisabledTask no_op = masakari.engine.drivers.taskflow.no_op:Noop [compile_catalog] directory = masakari/locale domain = masakari [update_catalog] domain = masakari output_dir = masakari/locale input_file = masakari/locale/masakari.pot [extract_messages] keywords = _ gettext ngettext l_ lazy_gettext mapping_file = babel.cfg output_file = masakari/locale/masakari.pot [egg_info] tag_build = tag_date = 0 masakari-9.0.0/tox.ini0000664000175000017500000000710213656747723014671 0ustar zuulzuul00000000000000[tox] minversion = 3.1.1 envlist = pep8,py36,py37 skipsdist = True ignore_basepython_conflict = True [testenv] basepython = python3 usedevelop = True setenv = VIRTUAL_ENV={envdir} LANGUAGE=en_US LC_ALL=en_US.utf-8 deps = -c{env:UPPER_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt whitelist_externals = bash find rm env # By default stestr will set concurrency # to ncpu, to specify something else use # the concurrency= option. # call ie: 'tox -epy27 -- --concurrency=4' commands = find . -type f -name "*.pyc" -delete passenv = http_proxy HTTP_PROXY https_proxy HTTPS_PROXY no_proxy NO_PROXY OS_DEBUG GENERATE_HASHES [testenv:py36] basepython = python3.6 commands = {[testenv]commands} stestr run {posargs} [testenv:py37] basepython = python3.7 commands = {[testenv]commands} stestr run {posargs} [testenv:functional] commands = {[testenv]commands} stestr --test-path=./masakari/tests/functional run --concurrency=1 --slowest {posargs} [testenv:genconfig] commands = oslo-config-generator --config-file=etc/masakari/masakari-config-generator.conf oslo-config-generator --config-file=etc/masakari/masakari-customized-recovery-flow-config-generator.conf [testenv:genpolicy] commands = oslopolicy-sample-generator --config-file=etc/masakari/masakari-policy-generator.conf [testenv:pep8] commands = flake8 {posargs} [testenv:venv] commands = {posargs} [testenv:cover] setenv = VIRTUAL_ENV={envdir} PYTHON=coverage run --source masakari --parallel-mode commands = stestr run {posargs} coverage combine coverage html -d cover coverage xml -o cover/coverage.xml [testenv:docs] deps = -r{toxinidir}/doc/requirements.txt commands = sphinx-build -W -b html doc/source doc/build/html [testenv:releasenotes] deps = -r{toxinidir}/doc/requirements.txt commands = rm -fr releasenotes/build sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html [testenv:debug] basepython = python3 commands = oslo_debug_helper {posargs} [testenv:api-ref] # This environment is called from CI scripts to test and publish # the API Ref to docs.openstack.org. commands = rm -rf api-ref/build sphinx-build -W -b html -d api-ref/build/doctrees api-ref/source api-ref/build/html [flake8] # E123, E125 skipped as they are invalid PEP-8. show-source = True # The below hacking rules by default are disabled should be enabled: # [H106] Don't put vim configuration in source files. # [H203] Use assertIs(Not)None to check for None. # [H904] Delay string interpolations at logging calls. enable-extensions = H106,H203,H904 ignore = E123,E125,E128,E731,H405 builtins = _ exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build [hacking] local-check-factory = masakari.hacking.checks.factory import_exceptions = masakari.i18n [testenv:lower-constraints] deps = -c{toxinidir}/lower-constraints.txt -r{toxinidir}/test-requirements.txt -r{toxinidir}/requirements.txt [testenv:bindep] # Do not install any requirements. We want this to be fast and work even if # system dependencies are missing, since it's used to tell you what system # dependencies are missing! This also means that bindep must be installed # separately, outside of the requirements files, and develop mode disabled # explicitly to avoid unnecessarily installing the checked-out repo too (this # further relies on "tox.skipsdist = True" above). deps = bindep commands = bindep test usedevelop = False masakari-9.0.0/ChangeLog0000664000175000017500000002655413656750010015123 0ustar zuulzuul00000000000000CHANGES ======= 9.0.0 ----- * Use unittest.mock instead of third party mock * Ignoring host recovery if host\_status is \`UNKNOWN\` * Fix the functional test devstack job * Drop use of SQLAlchemy-Utils * HostNotFound name->id * HypervisorNotFoundByName host\_name->hypervisor\_name * Add ignore\_basepython\_conflict in tox.ini * fix a typo notiification => notification * update segment api doc * Correct indentation for spaces for doc files * Fix constraints URL enforcement for lower-constraints * Sync Sphinx requirement * [ussuri][goal] Drop python 2.7 support and testing * Reorganize masakari documentation * Support noauth2 auth\_strategy * Update master for stable/train * Cleanup instances test data * Notification functional tests fails randomly 8.0.0 ----- * Remove references of novaclient.exceptions.EndpointNotFound * Add Python 3 Train unit tests * add libpq-dev and python-dev to build psycopg2 * Replace git.openstack.org URLs with opendev.org URLs * Update api-ref location * Resolve functional gate job failure * Resolve gate job failure * OpenDev Migration Patch * Switch from oslosphinx to openstackdocstheme * Replace openstack.org git:// URLs with https:// * Update master for stable/stein * Add devstack support for masakari-monitors 7.0.0 ----- * Updated rh host workflow for recovery workflow details * Handle KeyError: 'progress\_details' * Migrate multinode jobs to Ubuntu Bionic * fix oslo-config-generator conf * Functional tests to check recovery workflow details * Functional tests for notification APIs * Configure taskflow connection * Add progress details for recovery workflows * Improve logging error messages * Functional tests for host APIs * Add functional CI job and tests for segments * Send notifications for all API changes * Add common methods to use for sending notification * Define objects used for notification * Add necessary constants and methods for RPC notification * Run all jobs by default using python3 * Py3: Ensure wsgi headers are not bytes type * Recover resized instance(power\_state=SHUTDOWN) with vm\_state=stopped * Fix parsing api version with correct service\_type * Update hacking version >1.1.0 * Allow updating host name only if it exists in nova * Add masakari-systemfault-integration-ci to .zuul.yaml * Use template for lower-constraints * Return 400 if notification payload is incorrect * change nova.services.disable use service\_uuid * Cleanup testrepository * Update coverage environment and use template for cover * Uninstall policy.json from openstack\_dashboard correctly 7.0.0.0b1 --------- * Allow adding host only if it exists in nova * Update the HTTP links to HTTPS * Change openstack-dev to openstack-discuss * Update README * Fix datetime microsecond discrepancy issue * Add framework for masakari-status upgrade check * Fix: TestMasakariAPI tests skipped due to missing \_\_init\_\_.py * Increment versioning with pbr instruction * fix tox python3 overrides * Fix docs create service for masakari Currently openstacksdk use instance-ha type service instead of masakari * switch documentation job to new PTI * import zuul job settings from project-config * Add masakari-custom-recovery-methods.conf in etc/masakari * Update reno for stable/rocky 6.0.0.0rc1 ---------- * Add hooks for recovery method customization plugins * Added devstack support for masakari-dashboard * Fix masakari installation failure for policy.json * Register and Document policy in code * Removed unnecessary parantheses in yield statements * Add line for PyCharm IDE in gitignore file 6.0.0.0b3 --------- * fix tox python3 overrides * Remove testrepository * Switch to using stestr * Avoid recovery from failure twice * Fix Context test cases 6.0.0.0b2 --------- * Fix module has no attribute 'parse\_args' * Deploy masakari-api with uwsgi * Generalize DB conf group copying * Make accept-language tests work with webob 1.8.x * Enable mutable config in Masakari * Update "auth\_url" * Fix typo in api-ref/source/failover-segments.inc file 6.0.0.0b1 --------- * add lower-constraints job * Add db purge support * Completely remove mox from Masakari unittest * Fix stable branch releasenotes * Introspective Instance Monitoring through QEMU Guest Agent * Make readme more readable * Ship files like other openstack projects * Fix the docs page link * Use method validate\_integer from oslo.utils * Updated from global requirements * Updated from global requirements * Drop all qpid related explanations * Fix for difference in tzinfo.tzname value between py27 and py35 * Changed default value of 'wait\_period\_after\_power\_off' 5.0.0 ----- * Fix default value of 'nova\_catalog\_admin\_info' * Updated from global requirements 5.0.0.0b3 --------- * Remove use of unsupported TEMPEST\_SERVICES variable * Change masakari service-type from 'ha' to 'instance-ha' * Ignore '409 Conflict' when adding reserved\_host to aggregate * Stop using deprecated 'message' attribute in Exception * Improve the home-page url in setup.cfg * Evacuates instances which task\_state is not none * Remove unnecessary sleep * Updated from global requirements * Masakari operator's documentation * Masakari developer's documentation * Masakari API Specifications * Remove setting of version/release from releasenotes * Updated from global requirements * Updated from global requirements * Make eventlet hub use a monotonic clock * Fix devstack installation of masakari * Modify unit testcases according to oslo.context * Use service\_type and service\_name of nova\_catalog\_admin\_info * Add ssl support for masakari plugin * Hacking: do not translate log messages * Upgrade oslo.db and Replace DbMigrationError with DBMigrationError * iso8601.is8601.Utc No Longer Exists 4.0.0 ----- * Make 'error' instances recovery configurable * Add missing domain name to novaclient * Make provision to evacuate all instances * Enable global hacking checks and remove local checks * Fix 'host delete' delete's host of another segment * conf: Deprecate 'masakari\_topic' RPC options * Send global\_request\_id to nova when calls are made * Update the documentation link for doc migration * Remove 'on\_shared\_storage' parameter from nova evacuate * Remove unused methods * Remove support for py34 * Use os-testr instead of testr * Enable code coverage report in console output * Use get\_rpc\_transport instead of get\_transport * Fix race condition between evacuation and its confirmation * Updated from global requirements * Exclude on maintenance reserved\_hosts for host\_failure recovery * Optimize the link address * Fix identity\_admin * Revert "Fix os\_privileged\_user\_auth\_url" * Fix failing test cases * Fix os\_privileged\_user\_auth\_url * Ignore instance recovery for 'paused' or 'rescued' instance * Use DDT to reduce test code duplication * Updated from global requirements * Remove log translations * Implement auto\_priority and rh\_priority recovery\_methods * Updated from global requirements * Delete the failure host from reserved\_host * Don't pass argument sqlite\_db in set\_defaults() * Use HostAddressOpt for opts that accept IP and hostnames * Add license information in test-requirements.txt * Fix test method which has incorrect reference * Change nova\_catalog\_admin\_info to default to publicURL * Update to a newer hacking library * Prevent 404 error when adding reserved\_host to aggregate 3.0.0.0rc1 ---------- * Add reserved\_host to failed\_host's aggregate * Add host\_failure workflow for 'reserved\_host' recovery\_method * Tolerate jsonschema==2.6.0 * Fix release notes formatting * Release note for \_process\_unfinished\_notifications periodic task * Boolean filters are not working for host list api * InstanceFailure: Ignore notifications for certain events * Set access\_policy for messaging's dispatcher * Add ha enabled config options * Implement \_process\_unfinished\_notifications periodic tasks * Sync tox environment with upper-constraint of requirements * Add business rules for host and failover\_segment * Bump taskflow version to minimum 2.7.0 * Added engine manager and rpcapi related unit tests * Stop logging unnecessary warning on context create * Fix incorrect test class name * Update to match latest global-requirements * Use dict.items() and dict.values() as per guidelines * Add unit tests for notification business rules * Remove deps under pep8 section to enable hacking checks * Fix bad request error for valid marker in hosts * Extracted HTTP response codes to constants * Add unit tests for process failure flow * Remove unused test\_masakari module * [Devstack] Fix devstack plugin compatibility * Fix failing gate jobs due to removal of nova\_client.service\_catalog * Avoid printing traceback if VM is not HA\_Enabled * Stop instance only if it's HA\_Enabled * Fix spelling mistake and log message * Add database related test cases * Fix spelling mistake * Change default topic name of masakari-engine * Return correct response codes * Add unit tests for instance failure flow * Add unit tests for host failure workflow * Add missing namespaces in masakari-config-generator.conf * Add admin role for masakari user * Fix invalid port reference in devstack settings * Add unit tests for nova support * Set admin user as the default policy * Fix 500 error while create/update host 2.0.0 ----- * Add instance and process failure workflow * Add host failure workflow * Add nova support in masakari * Add TaskFlowDriver support to process notifications * Add business rule for process\_notification * Convert boolean parameters in host api * Remove pypy from tox env list * Return HTTPBadRequest instead of HTTPNotFound * Drop MANIFEST.in - it's not needed by pbr * Add process\_notification cast call * Add authentication libraries to requirements.txt * Add RPC layer for masakari * Enable release notes translation * Fix 500 if you pass invalid sort\_key and sort\_dir * Remove unused method * Enable masakari-engine in devstack * Add masakari-engine support * Add Masakari Object Serializer * Add notifications controller for masakari * Remove wrong reference to cinder * Remove reference of Nova * Added host controller for masakari * Added failover segments controller for masakari * Add notification related objects * Remove redundant str typecasting * Using assertIsNone() instead of assertIs(None, ..) * py33 is no longer supported by Infra's CI * Remove unexpected method argument * Clean imports in code * TrivialFix: Merge imports in code * TrivialFix: Remove logging import unused * Alter nullable constraint on created\_at column * Removed unnecessary sort\_keys and sort\_dirs * Add notification db apis and migration script * Don't attempt to escalate masakari-manage privileges * Dictionary creation could be rewritten with dictionary literal * Added masakari objects * Refactor: Move post method to APIValidationTestCase base class * Add test cases for 'extension\_info' module * Correct API Version String format * Return NotFound exception for delete db api * Cleanup tox.ini: Remove obsolete constraints * Add testing for WSGI Framework * Add schema framework for masakari * Added masakari db apis * Update unique constraint of hosts table * Add devstack plugin * Use %()d for integer substitution * Make use of oslo-config-generator * Dictionary creation could be rewritten with dictionary literal * Add db sync support for masakari * Add a hacking rule to enforce use of LOG.warning * Add a hacking rule for string interpolation at logging * Add hacking checks * Add support for wsgi framework * Add project description to README.rst * Initial Cookiecutter Commit * Added .gitreview masakari-9.0.0/.mailmap0000664000175000017500000000013113656747723014772 0ustar zuulzuul00000000000000# Format is: # # masakari-9.0.0/MANIFEST.in0000664000175000017500000000000013656747723015102 0ustar zuulzuul00000000000000masakari-9.0.0/etc/0000775000175000017500000000000013656750011014111 5ustar zuulzuul00000000000000masakari-9.0.0/etc/masakari/0000775000175000017500000000000013656750011015701 5ustar zuulzuul00000000000000masakari-9.0.0/etc/masakari/masakari-policy-generator.conf0000664000175000017500000000011513656747723023636 0ustar zuulzuul00000000000000[DEFAULT] output_file = etc/masakari/policy.yaml.sample namespace = masakari masakari-9.0.0/etc/masakari/masakari-config-generator.conf0000664000175000017500000000060713656747723023612 0ustar zuulzuul00000000000000[DEFAULT] output_file = etc/masakari/masakari.conf.sample wrap_width = 80 namespace = keystonemiddleware.auth_token namespace = masakari.conf namespace = oslo.db namespace = oslo.db.concurrency namespace = oslo.log namespace = oslo.messaging namespace = oslo.middleware namespace = oslo.policy namespace = oslo.service.service namespace = oslo.service.wsgi namespace = oslo.versionedobjects masakari-9.0.0/etc/masakari/masakari-custom-recovery-methods.conf0000664000175000017500000000677713656747723025206 0ustar zuulzuul00000000000000[DEFAULT] [taskflow_driver_recovery_flows] # # From customized_recovery_flow_opts # # # This option allows operator to customize tasks to be executed for host failure # auto recovery workflow. # # Provide list of strings reflecting to the task classes that should be included # to the host failure recovery workflow. The full classname path of all task # classes should be defined in the 'masakari.task_flow.tasks' of setup.cfg and # these classes may be implemented by OpenStack Masaskari project team, deployer # or third party. # # By default below three tasks will be part of this config option:- # 1. disable_compute_service_task # 2. prepare_HA_enabled_instances_task # 3. evacuate_instances_task # # The allowed values for this option is comma separated dictionary of object # names in between ``{`` and ``}``. (dict value) host_auto_failure_recovery_tasks = main:['prepare_HA_enabled_instances_task'],post:['evacuate_instances_task'],pre:['disable_compute_service_task'] # # This option allows operator to customize tasks to be executed for host failure # reserved_host recovery workflow. # # Provide list of strings reflecting to the task classes that should be included # to the host failure recovery workflow. The full classname path of all task # classes should be defined in the 'masakari.task_flow.tasks' of setup.cfg and # these classes may be implemented by OpenStack Masaskari project team, deployer # or third party. # # By default below three tasks will be part of this config option:- # 1. disable_compute_service_task # 2. prepare_HA_enabled_instances_task # 3. evacuate_instances_task # # The allowed values for this option is comma separated dictionary of object # names in between ``{`` and ``}``. (dict value) host_rh_failure_recovery_tasks = main:['prepare_HA_enabled_instances_task', 'evacuate_instances_task'],post:[],pre:['disable_compute_service_task'] # # This option allows operator to customize tasks to be executed for instance # failure recovery workflow. # # Provide list of strings reflecting to the task classes that should be included # to the instance failure recovery workflow. The full classname path of all task # classes should be defined in the 'masakari.task_flow.tasks' of setup.cfg and # these classes may be implemented by OpenStack Masaskari project team, deployer # or third party. # # By default below three tasks will be part of this config option:- # 1. stop_instance_task # 2. start_instance_task # 3. confirm_instance_active_task # # The allowed values for this option is comma separated dictionary of object # names in between ``{`` and ``}``. (dict value) instance_failure_recovery_tasks = main:['start_instance_task'],post:['confirm_instance_active_task'],pre:['stop_instance_task'] # # This option allows operator to customize tasks to be executed for process # failure recovery workflow. # # Provide list of strings reflecting to the task classes that should be included # to the process failure recovery workflow. The full classname path of all task # classes should be defined in the 'masakari.task_flow.tasks' of setup.cfg and # these classes may be implemented by OpenStack Masaskari project team, deployer # or third party. # # By default below two tasks will be part of this config option:- # 1. disable_compute_node_task # 2. confirm_compute_node_disabled_task # # The allowed values for this option is comma separated dictionary of object # names in between ``{`` and ``}``. (dict value) process_failure_recovery_tasks = main:['confirm_compute_node_disabled_task'],post:[],pre:['disable_compute_node_task'] masakari-9.0.0/etc/masakari/api-paste.ini0000664000175000017500000000261613656747723020312 0ustar zuulzuul00000000000000[composite:masakari_api] use = call:masakari.api.urlmap:urlmap_factory /: apiversions /v1: masakari_api_v1 [composite:masakari_api_v1] use = call:masakari.api.auth:pipeline_factory_v1 keystone = cors http_proxy_to_wsgi request_id faultwrap sizelimit authtoken keystonecontext osapi_masakari_app_v1 noauth2 = cors http_proxy_to_wsgi request_id faultwrap sizelimit noauth2 osapi_masakari_app_v1 # filters [filter:cors] paste.filter_factory = oslo_middleware.cors:filter_factory oslo_config_project = masakari [filter:http_proxy_to_wsgi] paste.filter_factory = oslo_middleware.http_proxy_to_wsgi:HTTPProxyToWSGI.factory [filter:request_id] paste.filter_factory = oslo_middleware:RequestId.factory [filter:faultwrap] paste.filter_factory = masakari.api.openstack:FaultWrapper.factory [filter:sizelimit] paste.filter_factory = oslo_middleware:RequestBodySizeLimiter.factory [filter:authtoken] paste.filter_factory = keystonemiddleware.auth_token:filter_factory [filter:keystonecontext] paste.filter_factory = masakari.api.auth:MasakariKeystoneContext.factory [filter:noauth2] paste.filter_factory = masakari.api.auth:NoAuthMiddleware.factory # apps [app:osapi_masakari_app_v1] paste.app_factory = masakari.api.openstack.ha:APIRouterV1.factory [pipeline:apiversions] pipeline = faultwrap http_proxy_to_wsgi apiversionsapp [app:apiversionsapp] paste.app_factory = masakari.api.openstack.ha.versions:Versions.factory masakari-9.0.0/etc/masakari/masakari-customized-recovery-flow-config-generator.conf0000664000175000017500000000021313656747723030570 0ustar zuulzuul00000000000000[DEFAULT] wrap_width = 80 output_file = etc/masakari/masakari-custom-recovery-methods.conf.sample namespace = customized_recovery_flow_optsmasakari-9.0.0/etc/masakari/README-masakari.conf.txt0000664000175000017500000000021013656747723022122 0ustar zuulzuul00000000000000To generate the sample masakari.conf file, run the following command from the top level of the masakari directory: tox -egenconfig masakari-9.0.0/etc/masakari/masakari.conf0000664000175000017500000000121213656747723020354 0ustar zuulzuul00000000000000[DEFAULT] enabled_apis = masakari_api # Enable to specify listening IP other than default masakari_api_listen = 127.0.0.1 # Enable to specify port other than default #masakari_api_listen_port = 15868 debug = False auth_strategy=keystone [wsgi] # The paste configuration file path api_paste_config = /etc/masakari/api-paste.ini [keystone_authtoken] www_authenticate_uri = http://127.0.0.1:5000 auth_url = http://127.0.0.1:5000 auth_type = password project_domain_id = default user_domain_id = default project_name = service username = masakari password = masakari [database] connection = mysql+pymysql://root:admin@127.0.0.1/masakari?charset=utf8 masakari-9.0.0/HACKING.rst0000664000175000017500000000455713656747723015167 0ustar zuulzuul00000000000000masakari Style Commandments =========================== - Step 1: Read the OpenStack Style Commandments https://docs.openstack.org/hacking/latest/ - Step 2: Read on Masakari Specific Commandments ------------------------------ - [M301] no db session in public API methods (disabled) This enforces a guideline defined in ``oslo.db.sqlalchemy.session`` - [M302] timeutils.utcnow() wrapper must be used instead of direct calls to datetime.datetime.utcnow() to make it easy to override its return value in tests - [M303] capitalize help string Config parameter help strings should have a capitalized first letter - [M305] Change assertTrue(isinstance(A, B)) by optimal assert like assertIsInstance(A, B). - [M306] Change assertEqual(type(A), B) by optimal assert like assertIsInstance(A, B) - [M308] Validate that log messages are not translated. - [M309] Don't import translation in tests - [M310] Setting CONF.* attributes directly in tests is forbidden. Use self.flags(option=value) instead. - [M315] Method's default argument shouldn't be mutable - [M316] Ensure that the _() function is explicitly imported to ensure proper translations. - [M317] Ensure that jsonutils.%(fun)s must be used instead of json.%(fun)s - [M318] Change assertTrue/False(A in/not in B, message) to the more specific assertIn/NotIn(A, B, message) - [M319] Check for usage of deprecated assertRaisesRegexp - [M320] Must use a dict comprehension instead of a dict constructor with a sequence of key-value pairs. - [M321] Change assertEqual(A in B, True), assertEqual(True, A in B), assertEqual(A in B, False) or assertEqual(False, A in B) to the more specific assertIn/NotIn(A, B) - [M322] Check masakari.utils.spawn() is used instead of greenthread.spawn() and eventlet.spawn() - [M323] contextlib.nested is deprecated - [M324] Config options should be in the central location ``masakari/conf/`` - [M325] Check for common double word typos - [M326] Python 3: do not use dict.iteritems. - [M327] Python 3: do not use dict.iterkeys. - [M328] Python 3: do not use dict.itervalues. - [M329] Deprecated library function os.popen() - [M331] LOG.warn is deprecated. Enforce use of LOG.warning. - [M332] Yield must always be followed by a space when yielding a value. - [M333] Policy registration should be in the central location ``masakari/policies/`` - [M334] Do not use the oslo_policy.policy.Enforcer.enforce() method. masakari-9.0.0/releasenotes/0000775000175000017500000000000013656750011016027 5ustar zuulzuul00000000000000masakari-9.0.0/releasenotes/notes/0000775000175000017500000000000013656750011017157 5ustar zuulzuul00000000000000masakari-9.0.0/releasenotes/notes/add-upgrade-check-framework-52268130b25317ab.yaml0000664000175000017500000000073513656747723027272 0ustar zuulzuul00000000000000--- prelude: > Added new tool ``masakari-status upgrade check``. features: - | New framework for ``masakari-status upgrade check`` command is added. This framework allows adding various checks which can be run before a Masakari upgrade to ensure if the upgrade can be performed safely. upgrade: - | Operator can now use new CLI tool ``masakari-status upgrade check`` to check if Masakari deployment can be safely upgraded from N-1 to N release. masakari-9.0.0/releasenotes/notes/.placeholder0000664000175000017500000000000013656747723021450 0ustar zuulzuul00000000000000masakari-9.0.0/releasenotes/notes/add-periodic-tasks-0c96d6f620502a75.yaml0000664000175000017500000000174613656747723025615 0ustar zuulzuul00000000000000--- features: - | Added _process_unfinished_notifications to process notifications which are in error or new state. This periodic task will execute at regular interval defined by new config option 'process_unfinished_notifications_interval' defaults to 120 seconds. The notifications which are in ‘new’ status will be picked up based on a new config option ‘retry_notification_new_status_interval’ defaults to 60 seconds. To change the default execution time of periodic task, following config option needs to be set with desirable time under 'DEFAULT' section in 'masakari.conf' file:: [DEFAULT] process_unfinished_notifications_interval = 120 To change the default identification time of notifications which are stuck in 'NEW' state, following config option needs to be set with desirable time under 'DEFAULT' section in 'masakari.conf' file:: [DEFAULT] retry_notification_new_status_interval = 60 masakari-9.0.0/releasenotes/notes/drop-py-2-7-059d3cd5e7cb4e1a.yaml0000664000175000017500000000031613656747723024332 0ustar zuulzuul00000000000000--- upgrade: - | Python 2.7 support has been dropped. Last release of Masakari to support python 2.7 is OpenStack Train. The minimum version of Python now supported by Masakari is Python 3.6. masakari-9.0.0/releasenotes/notes/add_evacuate_error_instances_conf_option-5b4d1906137395f0.yaml0000664000175000017500000000125613656747723032347 0ustar zuulzuul00000000000000--- features: - | Operators can decide whether error instances should be allowed for evacuation along with other instances from a failed source compute node or not. Added a new config option ``ignore_instances_in_error_state`` to achieve this. When set to True, masakari will skip the recovery of error instances otherwise it will evacuate error instances as well from a failed source compute node. To use this feature, following config option need to be set under ``host_failure`` section in 'masakari.conf' file:: [host_failure] ignore_instances_in_error_state = False The default value for this config option is set to False. masakari-9.0.0/releasenotes/notes/failover_segment_apis-f5bea1cd6d103048.yaml0000664000175000017500000000067413656747723026721 0ustar zuulzuul00000000000000--- features: - | Added following new REST API's for masakari operators: - GET /v1/segments - Returns list of all failover segments. - GET /v1/segments/ - Returns specific failover segment with uuid. - POST /v1/segments - Creates a new failover segment - PUT /v1/segments/ - Updates a failover segment by uuid - DELETE /v1/segments/ - Delete a failover segment by uuid masakari-9.0.0/releasenotes/notes/add_ha_enabled_config_options-54a9270a5993d20a.yaml0000664000175000017500000000270513656747723030114 0ustar zuulzuul00000000000000--- features: - | Operators can decide whether all instances or only those instances which contain metadata key 'HA_Enabled=True' should be allowed for evacuation from a failed source compute node. When set to True, it will evacuate all instances from a failed source compute node. First preference will be given to those instances which contain 'HA_Enabled=True' metadata key, and then it will evacuate the remaining ones. When set to False, it will evacuate only those instances which contain 'HA_Enabled=True' metadata key. To use this feature, following config option need to be set under ``host_failure`` section in 'masakari.conf' file:: [host_failure] evacuate_all_instances = True - | Operators can decide whether all instances or only those instances which contain metadata key 'HA_Enabled=True' should be taken into account to recover from instance failure events. When set to True, it will execute instance failure recovery actions for an instance irrespective of whether that particular instance contains metadata key 'HA_Enabled=True' or not. When set to False, it will only execute instance failure recovery action for an instance which contain metadata key 'HA_Enabled=True'. To use this feature, following config option need to be set under ``instance_failure`` section in 'masakari.conf' file:: [instance_failure] process_all_instances = True masakari-9.0.0/releasenotes/notes/db-purge-support-7a33e2ea5d2a624b.yaml0000664000175000017500000000066213656747723025570 0ustar zuulzuul00000000000000--- features: - | Operators can now purge the soft-deleted records from the database tables. Added below command to purge the records: ``masakari-manage db purge --age_in_days --max_rows `` NOTE: ``notifications`` db records will be purged on the basis of ``update_at`` and ``status`` fields (finished, ignored, failed) as these records will not be automatically soft-deleted by the system. masakari-9.0.0/releasenotes/notes/bp-mutable-config-57efdd467c01aa7b.yaml0000664000175000017500000000037213656747723025734 0ustar zuulzuul00000000000000--- features: - | Masakari has been enabled for mutable config. Below option may be reloaded by sending SIGHUP to the correct process. 'retry_notification_new_status_interval' option will apply to process unfinished notifications. masakari-9.0.0/releasenotes/notes/adopt-oslo-config-generator-cf2fdb17cf7f13db.yaml0000664000175000017500000000050713656747723030111 0ustar zuulzuul00000000000000--- other: - | Adopt oslo-config-generator to generate sample config files. New config options from masakari code should register with masakari/conf/opts.py. A deprecated option should add a deprecated group even if it didn't alter its group, otherwise the deprecated group will use 'DEFAULT' by default. masakari-9.0.0/releasenotes/notes/reserved_host_recovery_method-d2de1f205136c8d5.yaml0000664000175000017500000000061013656747723030422 0ustar zuulzuul00000000000000--- features: - | Implemented workflow for 'reserved_host' recovery method in case of host failure. Now operator can create or update failover segment with 'reserved_host' recovery method along with the existing 'auto' method. When 'reserved_host' recovery_method is set to a failover segment, operators should also add one or more hosts with reserved flag set as True. masakari-9.0.0/releasenotes/notes/progress-details-recovery-workflows-5b14b7b3f87374f4.yaml0000664000175000017500000000232313656747723031405 0ustar zuulzuul00000000000000--- features: - | Added support to record the recovery workflow details of the notification which will be returned in a new microversion 1.1 in `GET /notifications/{notification_id}` API. For example, GET /notifications/ response will contain `recovery_workflow_details` parameter as shown here `notification_details`_ Added a new config section in Masakari conf file for configuring the back end to be used by taskflow driver:: [taskflow] # The back end for storing recovery_workflow details of the notification. # (string value) connection = mysql+pymysql://root:admin@127.0.0.1/?charset=utf8 # Where db_name, can be a new database or you can also specify masakari # database. Operator should run `masakari-manage db sync` command to add new db tables required for storing recovery_workflow_details. Note: When you run `masakari-manage db sync`, make sure you have `notification_driver=taskflow_driver` set in masakari.conf. .. _`notification_details`: https://developer.openstack.org/api-ref/instance-ha/?expanded=show-notification-details-detail#show-notification-details masakari-9.0.0/releasenotes/notes/deprecate-topic-opt-af83f82143143c61.yaml0000664000175000017500000000020213656747723025776 0ustar zuulzuul00000000000000--- deprecations: - | The ``masakari_topic`` config option is now deprecated and will be removed in the Queens release. masakari-9.0.0/releasenotes/notes/recovery-method-customization-3438b0e26e322b88.yaml0000664000175000017500000000154113656747723030162 0ustar zuulzuul00000000000000--- features: - | Operator can now customize workflows to process each type of failure notifications (hosts, instance and process) as per their requirements. Added below new config section for customized recovery flow in a new conf file masakari-custom-recovery-methods.conf - [taskflow_driver_recovery_flows] Under [taskflow_driver_recovery_flows] is added below five new config options - 'instance_failure_recovery_tasks' is a dict of tasks which will recover instance failure. - 'process_failure_recovery_tasks' is a dict of tasks which will recover process failure. - 'host_auto_failure_recovery_tasks' is a dict of tasks which will recover host failure for auto recovery. - 'host_rh_failure_recovery_tasks' is a dict of tasks which will recover host failure for rh recovery on failure host. masakari-9.0.0/releasenotes/notes/wsgi-applications-3ed7d6b89f1a5785.yaml0000664000175000017500000000053613656747723025762 0ustar zuulzuul00000000000000--- upgrade: - | WSGI application script ``masakari-wsgi`` is now available. It allows running the masakari APIs using a WSGI server of choice (for example nginx and uwsgi, apache2 with mod_proxy_uwsgi or gunicorn). The eventlet-based servers are still available, but the WSGI options will allow greater deployment flexibility. masakari-9.0.0/releasenotes/notes/policy-in-code-8740d51624055044.yaml0000664000175000017500000000217413656747723024532 0ustar zuulzuul00000000000000--- features: - | Masakari now support policy in code, which means if operators doesn't need to modify any of the default policy rules, they do not need a policy file. Operators can modify/generate a ``policy.yaml.sample`` file which will override specific policy rules from their defaults. Masakari is now configured to work with two oslo.policy CLI scripts that have been added: - The first of these can be called like ``oslopolicy-list-redundant --namespace masakari`` and will output a list of policy rules in policy.[json|yaml] that match the project defaults. These rules can be removed from the policy file as they have no effect there. - The second script can be called like ``oslopolicy-policy-generator --namespace masakari --output-file policy-merged.yaml`` and will populate the policy-merged.yaml file with the effective policy. This is the merged results of project defaults and config file overrides. NOTE: Default `policy.json` file is now removed as Masakari now uses default policies. A policy file is only needed if overriding one of the defaults. ././@LongLink0000000000000000000000000000014700000000000011217 Lustar 00000000000000masakari-9.0.0/releasenotes/notes/auto_priority_and_rh_priority_recovery_methods-b88cc00041fa2c4d.yamlmasakari-9.0.0/releasenotes/notes/auto_priority_and_rh_priority_recovery_methods-b88cc00041fa2c4d.ya0000664000175000017500000000127313656747723033641 0ustar zuulzuul00000000000000--- features: - | Implemented workflow for 'auto_priority' and 'rh_priority' recovery methods in case of host failure recovery. Operators can set failover_segment's recovery_method as 'auto_priority' and 'rh_priority' now. In case of 'auto_priority' the 'auto' workflow will be executed first to recover the instances from failed compute host. If 'auto' workflow fails to recover the instances then 'reserved_host' workflow will be tried. In case of 'rh_priority' the 'reserved_host' workflow will be executed first to recover the instances from failed compute host. If 'reserved_host' workflow fails to recover the instances then 'auto' workflow will be tried.masakari-9.0.0/releasenotes/notes/host-apis-46a87fcd56d8ed30.yaml0000664000175000017500000000116113656747723024300 0ustar zuulzuul00000000000000--- features: - | Added following new REST API's for masakari operators: - GET /v1/segments//hosts - Returns list of all hosts associated with failover segment. - GET /v1/segments//hosts/ - Returns specific host from the failover segment with uuid. - POST /v1/segments//hosts - Creates a new host in failover segment - PUT /v1/segments//hosts/ - Updates a host in failover segment by uuid - DELETE /v1/segments//hosts/ - Delete a host from failover segment by uuid masakari-9.0.0/releasenotes/notes/correct_response_code-df8b43a201efa1b4.yaml0000664000175000017500000000125113656747723026772 0ustar zuulzuul00000000000000--- fixes: - | Fixes `bug 1645699`_ which will return correct response codes for below apis: - POST /v1/notification - old_response: 200, new_response: 202 - DELETE /v1/notification - old_response: 404, new_response: 405 - PUT /v1/notification/ - old_response: 404, new_response: 405 - POST /v1/host - old_response: 200, new_response: 201 - DELETE /v1/host/ - old_response: 200, new_response: 204 - POST /v1/segment - old_response: 200, new_response: 201 - DELETE /v1/segment/ - old_response: 200, new_response: 204 .. _bug 1645699: https://bugs.launchpad.net/masakari/+bug/1645699 masakari-9.0.0/releasenotes/notes/add_reserved_host_to_aggregates-5f506d08354ec148.yaml0000664000175000017500000000064013656747723030522 0ustar zuulzuul00000000000000--- features: - | Operators can now decide based on the new config option 'add_reserved_host_to_aggregate' whether to add or not a reserved_host to all host aggregates which failed compute host belongs to. To use this feature, following config option need to be set under ``host_failure`` section in 'masakari.conf' file:: [host_failure] add_reserved_host_to_aggregate = True masakari-9.0.0/releasenotes/notes/bug-add-missing-domain-name-5181c02f3f033a22.yaml0000664000175000017500000000043413656747723027254 0ustar zuulzuul00000000000000--- prelude: > Domain name is needed when using keystone v3 to create keystone session, if not provided, InvalidInput exception will be raised. Two new options "os_user_domain_name" and "os_project_domain_name" with default value "default" are added to fix the issue. masakari-9.0.0/releasenotes/notes/evacuation_in_threads-cc9c79b10acfb5f6.yaml0000664000175000017500000000120213656747723027053 0ustar zuulzuul00000000000000--- fixes: - | Fixes `bug 1693728`_ which will fix the race condition where after evacuation of an instance to other host user might perform some actions on that instance which gives wrong instance vm_state to ConfirmEvacuationTask that results into notification failure. To fix this issue, following config option is added under ``DEFAULT`` section in 'masakari.conf' file:: [DEFAULT] host_failure_recovery_threads = 3 This config option decides the number of threads going to be used for evacuating the instances. .. _`bug 1693728`: https://bugs.launchpad.net/masakari/+bug/1693728 masakari-9.0.0/releasenotes/notes/notifications_apis-3c3d5055ae9c6649.yaml0000664000175000017500000000045713656747723026122 0ustar zuulzuul00000000000000--- features: - | Added following new REST API's related to notifications: - GET /v1/notifications - Returns list of all notifications. - GET /v1/notifications/ - Returns specific notification with uuid. - POST /v1/notifications - Creates a new notification. masakari-9.0.0/releasenotes/notes/notifications-in-masakari-f5d79838fc23cb9b.yaml0000664000175000017500000000122213656747723027440 0ustar zuulzuul00000000000000--- features: - | Added support to emit event notifications whenever user interacts with Masakari restFul APIs. The emitted notifications are documented at `sample_payloads`_. To enable this feature one should set `driver` config option under the `oslo_messaging_notifications` section as shown below:: [oslo_messaging_notifications] driver = log Note: Possible values are `messaging`, `messagingv2`, `routing`, `log`, `test`, `noop`. Notifications can be completely disabled by setting `driver` value as `noop` .. _`sample_payloads`: https://docs.openstack.org/masakari/latest/#versioned-notifications masakari-9.0.0/releasenotes/source/0000775000175000017500000000000013656750011017327 5ustar zuulzuul00000000000000masakari-9.0.0/releasenotes/source/train.rst0000664000175000017500000000017613656747723021222 0ustar zuulzuul00000000000000========================== Train Series Release Notes ========================== .. release-notes:: :branch: stable/train masakari-9.0.0/releasenotes/source/_static/0000775000175000017500000000000013656750011020755 5ustar zuulzuul00000000000000masakari-9.0.0/releasenotes/source/_static/.placeholder0000664000175000017500000000000013656747723023246 0ustar zuulzuul00000000000000masakari-9.0.0/releasenotes/source/conf.py0000664000175000017500000002144313656747723020652 0ustar zuulzuul00000000000000# -*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # Masakari Release Notes documentation build configuration file, created by # sphinx-quickstart on Tue Jun 28 9:58 AM 2016. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'openstackdocstheme', 'reno.sphinxext', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Masakari Release Notes' copyright = u'2016, OpenStack Foundation' # Release notes are version independent. # The short X.Y version. version = '' # The full version, including alpha/beta/rc tags. release = '' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all # documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False # openstackdocstheme options repository_name = 'openstack/masakari' # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'openstackdocs' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. html_use_index = False # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'MasakariReleaseNotesdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # 'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'MasakariReleaseNotes.tex', u'Masakari Release Notes Documentation', u'Masakari Developers', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'masakarireleasenotes', u'Masakari Release Notes Documentation', [u'Masakari Developers'], 1) ] # If true, show URL addresses after external links. # man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'MasakariReleaseNotes', u'Masakari Release Notes Documentation', u'Masakari Developers', 'MasakariReleaseNotes', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False # -- Options for Internationalization output ------------------------------ locale_dirs = ['locale/'] masakari-9.0.0/releasenotes/source/stein.rst0000664000175000017500000000022113656747723021216 0ustar zuulzuul00000000000000=================================== Stein Series Release Notes =================================== .. release-notes:: :branch: stable/stein masakari-9.0.0/releasenotes/source/queens.rst0000664000175000017500000000021513656747723021377 0ustar zuulzuul00000000000000============================ Queens Series Release Notes ============================ .. release-notes:: :branch: origin/stable/queens masakari-9.0.0/releasenotes/source/unreleased.rst0000664000175000017500000000016013656747723022225 0ustar zuulzuul00000000000000============================== Current Series Release Notes ============================== .. release-notes:: masakari-9.0.0/releasenotes/source/rocky.rst0000664000175000017500000000022113656747723021223 0ustar zuulzuul00000000000000=================================== Rocky Series Release Notes =================================== .. release-notes:: :branch: stable/rocky masakari-9.0.0/releasenotes/source/index.rst0000664000175000017500000000033113656747723021205 0ustar zuulzuul00000000000000Welcome to Masakari Release Notes documentation! ================================================ Contents ======== .. toctree:: :maxdepth: 1 unreleased train stein rocky queens pike ocata masakari-9.0.0/releasenotes/source/ocata.rst0000664000175000017500000000021113656747723021162 0ustar zuulzuul00000000000000=========================== Ocata Series Release Notes =========================== .. release-notes:: :branch: origin/stable/ocata masakari-9.0.0/releasenotes/source/_templates/0000775000175000017500000000000013656750011021464 5ustar zuulzuul00000000000000masakari-9.0.0/releasenotes/source/_templates/.placeholder0000664000175000017500000000000013656747723023755 0ustar zuulzuul00000000000000masakari-9.0.0/releasenotes/source/pike.rst0000664000175000017500000000020513656747723021026 0ustar zuulzuul00000000000000========================== Pike Series Release Notes ========================== .. release-notes:: :branch: origin/stable/pike masakari-9.0.0/masakari.egg-info/0000775000175000017500000000000013656750011016620 5ustar zuulzuul00000000000000masakari-9.0.0/masakari.egg-info/entry_points.txt0000664000175000017500000000372713656750010022126 0ustar zuulzuul00000000000000[console_scripts] masakari-api = masakari.cmd.api:main masakari-engine = masakari.cmd.engine:main masakari-manage = masakari.cmd.manage:main masakari-status = masakari.cmd.status:main [masakari.api.v1.extensions] extension_info = masakari.api.openstack.ha.extension_info:ExtensionInfo hosts = masakari.api.openstack.ha.hosts:Hosts notifications = masakari.api.openstack.ha.notifications:Notifications segments = masakari.api.openstack.ha.segments:Segments versions = masakari.api.openstack.ha.versionsV1:Versions [masakari.database.migration_backend] sqlalchemy = oslo_db.sqlalchemy.migration [masakari.driver] taskflow_driver = masakari.engine.drivers.taskflow:TaskFlowDriver [masakari.task_flow.tasks] confirm_compute_node_disabled_task = masakari.engine.drivers.taskflow.process_failure:ConfirmComputeNodeDisabledTask confirm_instance_active_task = masakari.engine.drivers.taskflow.instance_failure:ConfirmInstanceActiveTask disable_compute_node_task = masakari.engine.drivers.taskflow.process_failure:DisableComputeNodeTask disable_compute_service_task = masakari.engine.drivers.taskflow.host_failure:DisableComputeServiceTask evacuate_instances_task = masakari.engine.drivers.taskflow.host_failure:EvacuateInstancesTask no_op = masakari.engine.drivers.taskflow.no_op:Noop prepare_HA_enabled_instances_task = masakari.engine.drivers.taskflow.host_failure:PrepareHAEnabledInstancesTask start_instance_task = masakari.engine.drivers.taskflow.instance_failure:StartInstanceTask stop_instance_task = masakari.engine.drivers.taskflow.instance_failure:StopInstanceTask [oslo.config.opts] customized_recovery_flow_opts = masakari.conf.opts:list_recovery_workflow_opts masakari.conf = masakari.conf.opts:list_opts [oslo.config.opts.defaults] masakari.api = masakari.common.config:set_middleware_defaults [oslo.policy.enforcer] masakari = masakari.policy:get_enforcer [oslo.policy.policies] masakari = masakari.policies:list_rules [wsgi_scripts] masakari-wsgi = masakari.cmd.api:initialize_application masakari-9.0.0/masakari.egg-info/dependency_links.txt0000664000175000017500000000000113656750010022665 0ustar zuulzuul00000000000000 masakari-9.0.0/masakari.egg-info/requires.txt0000664000175000017500000000103413656750010021215 0ustar zuulzuul00000000000000Babel!=2.4.0,>=2.3.4 iso8601>=0.1.11 jsonschema>=2.6.0 keystoneauth1>=3.4.0 keystonemiddleware>=4.17.0 WebOb>=1.7.1 microversion-parse>=0.2.1 oslo.config>=5.2.0 oslo.context>=2.19.2 oslo.db>=4.27.0 oslo.messaging>=5.29.0 oslo.i18n>=3.15.3 oslo.log>=3.36.0 oslo.middleware>=3.31.0 oslo.policy>=1.30.0 oslo.service!=1.28.1,>=1.24.0 oslo.upgradecheck>=0.1.0 oslo.utils>=3.33.0 oslo.versionedobjects>=1.31.2 pbr!=2.1.0,>=2.0.0 python-novaclient>=9.1.0 six>=1.10.0 stevedore>=1.20.0 SQLAlchemy>=1.2.19 SQLAlchemy-Utils>=0.33.10 taskflow>=2.16.0 masakari-9.0.0/masakari.egg-info/pbr.json0000664000175000017500000000005613656750010020276 0ustar zuulzuul00000000000000{"git_version": "9a59610", "is_release": true}masakari-9.0.0/masakari.egg-info/PKG-INFO0000664000175000017500000001060613656750010017717 0ustar zuulzuul00000000000000Metadata-Version: 1.1 Name: masakari Version: 9.0.0 Summary: Virtual Machine High Availability (VMHA) service for OpenStack Home-page: https://docs.openstack.org/masakari/latest/ Author: OpenStack Author-email: openstack-discuss@lists.openstack.org License: UNKNOWN Description: ======== Masakari ======== Virtual Machine High Availability (VMHA) service for OpenStack Masakari provides Virtual Machine High Availability (VMHA) service for OpenStack clouds by automatically recovering the KVM-based Virtual Machine(VM)s from failure events such as VM process down, provisioning process down, and nova-compute host failure. It also provides API service for manage and control the automated rescue mechanism. NOTE: Use masakari only if instance path is configured on shared storage system i.e, 'instances_path' config option of nova has a path of shared directory otherwise instance data will be lost after the evacuation of instance from failed host if, * instance is booted from image * flavor using ephemeral disks is used Original version of Masakari: https://github.com/ntt-sic/masakari Tokyo Summit Session: https://www.youtube.com/watch?v=BmjNKceW_9A Masakari is distributed under the terms of the Apache License, Version 2.0. The full terms and conditions of this license are detailed in the LICENSE file. * Free software: Apache license 2.0 * Documentation: https://docs.openstack.org/masakari/latest * Release notes: https://docs.openstack.org/releasenotes/masakari/ * Source: https://opendev.org/openstack/masakari * Bugs: https://bugs.launchpad.net/masakari Configure masakari-api ---------------------- #. Create masakari user: .. code-block:: shell-session openstack user create --password-prompt masakari (give password as masakari) #. Add admin role to masakari user: .. code-block:: shell-session openstack role add --project service --user masakari admin #. Create new service: .. code-block:: shell-session openstack service create --name masakari --description "masakari high availability" instance-ha #. Create endpoint for masakari service: .. code-block:: shell-session openstack endpoint create --region RegionOne masakari --publicurl http://:/v1/%\(tenant_id\)s --adminurl http://:/v1/%\(tenant_id\)s --internalurl http://:/v1/%\(tenant_id\)s #. Clone masakari using .. code-block:: shell-session git clone https://github.com/openstack/masakari.git #. Run setup.py from masakari .. code-block:: shell-session sudo python setup.py install #. Create directory ``/etc/masakari`` #. Copy ``masakari.conf``, ``api-paste.ini`` and ``policy.json`` file from ``masakari/etc/`` to ``/etc/masakari`` folder #. To run masakari-api simply use following binary: .. code-block:: shell-session masakari-api Configure masakari database --------------------------- #. Create 'masakari' database #. After running setup.py for masakari (``sudo python setup.py install``), run ``masakari-manage`` command to sync the database .. code-block:: shell-session masakari-manage db sync Features -------- * TODO Platform: UNKNOWN Classifier: Environment :: OpenStack Classifier: Intended Audience :: Information Technology Classifier: Intended Audience :: System Administrators Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: POSIX :: Linux Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 masakari-9.0.0/masakari.egg-info/top_level.txt0000664000175000017500000000001113656750010021341 0ustar zuulzuul00000000000000masakari masakari-9.0.0/masakari.egg-info/not-zip-safe0000664000175000017500000000000113656750010021045 0ustar zuulzuul00000000000000 masakari-9.0.0/masakari.egg-info/SOURCES.txt0000664000175000017500000003263513656750011020515 0ustar zuulzuul00000000000000.coveragerc .mailmap .stestr.conf .zuul.yaml AUTHORS CONTRIBUTING.rst ChangeLog HACKING.rst LICENSE MANIFEST.in README.rst babel.cfg bindep.txt lower-constraints.txt requirements.txt setup.cfg setup.py test-requirements.txt tox.ini api-ref/source/conf.py api-ref/source/failover-segments.inc api-ref/source/hosts.inc api-ref/source/index.rst api-ref/source/notifications.inc api-ref/source/parameters.yaml api-ref/source/status.yaml api-ref/source/versions.inc devstack/README.rst devstack/plugin.sh devstack/settings doc/requirements.txt doc/api_samples/hosts/host-create-req.json doc/api_samples/hosts/host-create-resp.json doc/api_samples/hosts/host-get-resp.json doc/api_samples/hosts/host-update-req.json doc/api_samples/hosts/host-update-resp.json doc/api_samples/hosts/hosts-list-resp.json doc/api_samples/notifications/host-notification-create-req.json doc/api_samples/notifications/host-notification-create-resp.json doc/api_samples/notifications/notifcations-list-resp.json doc/api_samples/notifications/notification-get-resp.json doc/api_samples/notifications/process-notification-create-req.json doc/api_samples/notifications/process-notification-create-resp.json doc/api_samples/notifications/vm-notification-create-req.json doc/api_samples/notifications/vm-notification-create-resp.json doc/api_samples/segments/segment-create-req.json doc/api_samples/segments/segment-create-resp.json doc/api_samples/segments/segment-get-resp.json doc/api_samples/segments/segment-update-req.json doc/api_samples/segments/segment-update-resp.json doc/api_samples/segments/segments-list-resp.json doc/api_samples/versions/v1-version-get-resp.json doc/api_samples/versions/versions-get-resp.json doc/ext/__init__.py doc/ext/versioned_notifications.py doc/notification_samples/create-host-end.json doc/notification_samples/create-host-start.json doc/notification_samples/create-notification-end.json doc/notification_samples/create-notification-start.json doc/notification_samples/create-segment-end.json doc/notification_samples/create-segment-start.json doc/notification_samples/delete-host-end.json doc/notification_samples/delete-host-start.json doc/notification_samples/delete-segment-end.json doc/notification_samples/delete-segment-start.json doc/notification_samples/error-exception.json doc/notification_samples/process-notification-end.json doc/notification_samples/process-notification-error.json doc/notification_samples/process-notification-start.json doc/notification_samples/update-host-end.json doc/notification_samples/update-host-start.json doc/notification_samples/update-segment-end.json doc/notification_samples/update-segment-start.json doc/source/conf.py doc/source/index.rst doc/source/_static/Masakari_spec_process.svg doc/source/_static/architecture.png doc/source/cli/index.rst doc/source/cli/masakari-manage.rst doc/source/cli/masakari-status.rst doc/source/cli/openstack-masakari.rst doc/source/configuration/api-paste.ini.rst doc/source/configuration/config.rst doc/source/configuration/index.rst doc/source/configuration/policy.rst doc/source/configuration/recovery_config.rst doc/source/configuration/recovery_workflow_custom_task.rst doc/source/configuration/recovery_workflow_sample_config.rst doc/source/configuration/sample_config.rst doc/source/configuration/sample_policy.rst doc/source/install/development.environment.rst doc/source/install/index.rst doc/source/install/install_and_configure.rst doc/source/install/install_and_configure_ubuntu.rst doc/source/install/overview.rst doc/source/install/verify.rst doc/source/user/architecture.rst doc/source/user/how_to_get_involved.rst doc/source/user/notifications.rst doc/source/user/process.rst etc/masakari/README-masakari.conf.txt etc/masakari/api-paste.ini etc/masakari/masakari-config-generator.conf etc/masakari/masakari-custom-recovery-methods.conf etc/masakari/masakari-customized-recovery-flow-config-generator.conf etc/masakari/masakari-policy-generator.conf etc/masakari/masakari.conf masakari/__init__.py masakari/config.py masakari/context.py masakari/exception.py masakari/i18n.py masakari/manager.py masakari/policy.py masakari/rpc.py masakari/safe_utils.py masakari/service.py masakari/test.py masakari/utils.py masakari/version.py masakari/wsgi.py masakari.egg-info/PKG-INFO masakari.egg-info/SOURCES.txt masakari.egg-info/dependency_links.txt masakari.egg-info/entry_points.txt masakari.egg-info/not-zip-safe masakari.egg-info/pbr.json masakari.egg-info/requires.txt masakari.egg-info/top_level.txt masakari/api/__init__.py masakari/api/api_version_request.py masakari/api/auth.py masakari/api/urlmap.py masakari/api/utils.py masakari/api/versioned_method.py masakari/api/openstack/__init__.py masakari/api/openstack/common.py masakari/api/openstack/extensions.py masakari/api/openstack/wsgi.py masakari/api/openstack/ha/__init__.py masakari/api/openstack/ha/extension_info.py masakari/api/openstack/ha/hosts.py masakari/api/openstack/ha/notifications.py masakari/api/openstack/ha/segments.py masakari/api/openstack/ha/versions.py masakari/api/openstack/ha/versionsV1.py masakari/api/openstack/ha/schemas/__init__.py masakari/api/openstack/ha/schemas/hosts.py masakari/api/openstack/ha/schemas/notifications.py masakari/api/openstack/ha/schemas/payload.py masakari/api/openstack/ha/schemas/segments.py masakari/api/openstack/ha/views/__init__.py masakari/api/openstack/ha/views/versions.py masakari/api/validation/__init__.py masakari/api/validation/parameter_types.py masakari/api/validation/validators.py masakari/cmd/__init__.py masakari/cmd/api.py masakari/cmd/engine.py masakari/cmd/manage.py masakari/cmd/status.py masakari/common/__init__.py masakari/common/config.py masakari/compute/__init__.py masakari/compute/nova.py masakari/conf/__init__.py masakari/conf/api.py masakari/conf/base.py masakari/conf/database.py masakari/conf/engine.py masakari/conf/engine_driver.py masakari/conf/exceptions.py masakari/conf/nova.py masakari/conf/opts.py masakari/conf/osapi_v1.py masakari/conf/paths.py masakari/conf/service.py masakari/conf/ssl.py masakari/conf/wsgi.py masakari/db/__init__.py masakari/db/api.py masakari/db/migration.py masakari/db/sqlalchemy/__init__.py masakari/db/sqlalchemy/api.py masakari/db/sqlalchemy/migration.py masakari/db/sqlalchemy/models.py masakari/db/sqlalchemy/migrate_repo/README.txt masakari/db/sqlalchemy/migrate_repo/__init__.py masakari/db/sqlalchemy/migrate_repo/manage.py masakari/db/sqlalchemy/migrate_repo/migrate.cfg masakari/db/sqlalchemy/migrate_repo/versions/001_add_failover_segments_table.py masakari/db/sqlalchemy/migrate_repo/versions/002_add_hosts_table.py masakari/db/sqlalchemy/migrate_repo/versions/003_update_unique_constraint_hosts.py masakari/db/sqlalchemy/migrate_repo/versions/004_add_notifications_table.py masakari/db/sqlalchemy/migrate_repo/versions/005_remove_nullable_mismatch.py masakari/db/sqlalchemy/migrate_repo/versions/006_add_persistence_tables.py masakari/engine/__init__.py masakari/engine/driver.py masakari/engine/instance_events.py masakari/engine/manager.py masakari/engine/rpcapi.py masakari/engine/utils.py masakari/engine/drivers/__init__.py masakari/engine/drivers/taskflow/__init__.py masakari/engine/drivers/taskflow/base.py masakari/engine/drivers/taskflow/driver.py masakari/engine/drivers/taskflow/host_failure.py masakari/engine/drivers/taskflow/instance_failure.py masakari/engine/drivers/taskflow/no_op.py masakari/engine/drivers/taskflow/process_failure.py masakari/ha/__init__.py masakari/ha/api.py masakari/hacking/__init__.py masakari/hacking/checks.py masakari/notifications/__init__.py masakari/notifications/objects/__init__.py masakari/notifications/objects/base.py masakari/notifications/objects/exception.py masakari/notifications/objects/notification.py masakari/objects/__init__.py masakari/objects/base.py masakari/objects/fields.py masakari/objects/host.py masakari/objects/notification.py masakari/objects/segment.py masakari/policies/__init__.py masakari/policies/base.py masakari/policies/extension_info.py masakari/policies/hosts.py masakari/policies/notifications.py masakari/policies/segments.py masakari/policies/versions.py masakari/tests/__init__.py masakari/tests/base.py masakari/tests/fixtures.py masakari/tests/json_ref.py masakari/tests/uuidsentinel.py masakari/tests/functional/__init__.py masakari/tests/functional/base.py masakari/tests/functional/notification_base.py masakari/tests/functional/test_hosts.py masakari/tests/functional/test_process_notifications.py masakari/tests/functional/test_segments.py masakari/tests/functional/test_vm_notifications.py masakari/tests/unit/__init__.py masakari/tests/unit/conf_fixture.py masakari/tests/unit/fake_notifier.py masakari/tests/unit/fake_policy.py masakari/tests/unit/fakes.py masakari/tests/unit/matchers.py masakari/tests/unit/policy_fixture.py masakari/tests/unit/test_api_validation.py masakari/tests/unit/test_conf.py masakari/tests/unit/test_context.py masakari/tests/unit/test_exception.py masakari/tests/unit/test_hacking.py masakari/tests/unit/test_masakari_manage.py masakari/tests/unit/test_policy.py masakari/tests/unit/test_rpc.py masakari/tests/unit/test_safeutils.py masakari/tests/unit/test_service.py masakari/tests/unit/test_utils.py masakari/tests/unit/test_versions.py masakari/tests/unit/test_wsgi.py masakari/tests/unit/utils.py masakari/tests/unit/api/__init__.py masakari/tests/unit/api/test_api_version_request.py masakari/tests/unit/api/test_auth.py masakari/tests/unit/api/test_utils.py masakari/tests/unit/api/openstack/__init__.py masakari/tests/unit/api/openstack/fakes.py masakari/tests/unit/api/openstack/test_common.py masakari/tests/unit/api/openstack/test_extensions.py masakari/tests/unit/api/openstack/test_wsgi.py masakari/tests/unit/api/openstack/ha/__init__.py masakari/tests/unit/api/openstack/ha/test_extension_info.py masakari/tests/unit/api/openstack/ha/test_hosts.py masakari/tests/unit/api/openstack/ha/test_notifications.py masakari/tests/unit/api/openstack/ha/test_segments.py masakari/tests/unit/api/openstack/ha/test_versions.py masakari/tests/unit/cmd/__init__.py masakari/tests/unit/cmd/test_masakari_api.py masakari/tests/unit/cmd/test_status.py masakari/tests/unit/compute/__init__.py masakari/tests/unit/compute/test_nova.py masakari/tests/unit/db/__init__.py masakari/tests/unit/db/test_db_api.py masakari/tests/unit/db/test_migrations.py masakari/tests/unit/db/test_purge.py masakari/tests/unit/engine/__init__.py masakari/tests/unit/engine/fake_engine.py masakari/tests/unit/engine/test_engine_mgr.py masakari/tests/unit/engine/test_rpcapi.py masakari/tests/unit/engine/test_utils.py masakari/tests/unit/engine/drivers/__init__.py masakari/tests/unit/engine/drivers/taskflow/__init__.py masakari/tests/unit/engine/drivers/taskflow/test_host_failure_flow.py masakari/tests/unit/engine/drivers/taskflow/test_instance_failure_flow.py masakari/tests/unit/engine/drivers/taskflow/test_process_failure_flow.py masakari/tests/unit/engine/drivers/taskflow/test_taskflow_driver.py masakari/tests/unit/ha/__init__.py masakari/tests/unit/ha/test_api.py masakari/tests/unit/monkey_patch_example/__init__.py masakari/tests/unit/monkey_patch_example/example_a.py masakari/tests/unit/monkey_patch_example/example_b.py masakari/tests/unit/notifications/__init__.py masakari/tests/unit/notifications/objects/__init__.py masakari/tests/unit/notifications/objects/test_notification.py masakari/tests/unit/objects/__init__.py masakari/tests/unit/objects/fake_args.py masakari/tests/unit/objects/test_fields.py masakari/tests/unit/objects/test_hosts.py masakari/tests/unit/objects/test_notifications.py masakari/tests/unit/objects/test_objects.py masakari/tests/unit/objects/test_segments.py playbooks/devstack/post.yaml playbooks/devstack/pre.yaml playbooks/devstack/run.yaml releasenotes/notes/.placeholder releasenotes/notes/add-periodic-tasks-0c96d6f620502a75.yaml releasenotes/notes/add-upgrade-check-framework-52268130b25317ab.yaml releasenotes/notes/add_evacuate_error_instances_conf_option-5b4d1906137395f0.yaml releasenotes/notes/add_ha_enabled_config_options-54a9270a5993d20a.yaml releasenotes/notes/add_reserved_host_to_aggregates-5f506d08354ec148.yaml releasenotes/notes/adopt-oslo-config-generator-cf2fdb17cf7f13db.yaml releasenotes/notes/auto_priority_and_rh_priority_recovery_methods-b88cc00041fa2c4d.yaml releasenotes/notes/bp-mutable-config-57efdd467c01aa7b.yaml releasenotes/notes/bug-add-missing-domain-name-5181c02f3f033a22.yaml releasenotes/notes/correct_response_code-df8b43a201efa1b4.yaml releasenotes/notes/db-purge-support-7a33e2ea5d2a624b.yaml releasenotes/notes/deprecate-topic-opt-af83f82143143c61.yaml releasenotes/notes/drop-py-2-7-059d3cd5e7cb4e1a.yaml releasenotes/notes/evacuation_in_threads-cc9c79b10acfb5f6.yaml releasenotes/notes/failover_segment_apis-f5bea1cd6d103048.yaml releasenotes/notes/host-apis-46a87fcd56d8ed30.yaml releasenotes/notes/notifications-in-masakari-f5d79838fc23cb9b.yaml releasenotes/notes/notifications_apis-3c3d5055ae9c6649.yaml releasenotes/notes/policy-in-code-8740d51624055044.yaml releasenotes/notes/progress-details-recovery-workflows-5b14b7b3f87374f4.yaml releasenotes/notes/recovery-method-customization-3438b0e26e322b88.yaml releasenotes/notes/reserved_host_recovery_method-d2de1f205136c8d5.yaml releasenotes/notes/wsgi-applications-3ed7d6b89f1a5785.yaml releasenotes/source/conf.py releasenotes/source/index.rst releasenotes/source/ocata.rst releasenotes/source/pike.rst releasenotes/source/queens.rst releasenotes/source/rocky.rst releasenotes/source/stein.rst releasenotes/source/train.rst releasenotes/source/unreleased.rst releasenotes/source/_static/.placeholder releasenotes/source/_templates/.placeholder roles/devstack-config/tasks/main.ymlmasakari-9.0.0/api-ref/0000775000175000017500000000000013656750011014661 5ustar zuulzuul00000000000000masakari-9.0.0/api-ref/source/0000775000175000017500000000000013656750011016161 5ustar zuulzuul00000000000000masakari-9.0.0/api-ref/source/hosts.inc0000664000175000017500000001542313656747723020041 0ustar zuulzuul00000000000000.. -*- rst -*- ============== Hosts (hosts) ============== **Hosts** A host belongs to segment. Host can be any kind of virtual machine which can have compute service running on it. Lists, creates, shows details for, updates, and deletes hosts. List Hosts ========== .. rest_method:: GET /segments/{segment_id}/hosts Lists IDs, names, type, reserved, on_maintenance for all hosts. You can filter on the type, on_maintenance and reserved when you complete a list hosts request. **Preconditions** The segment must exist. Response Codes -------------- .. rest_status_code:: success status.yaml - 200 .. rest_status_code:: error status.yaml - 400 - 401 - 403 - 404 Request ------- .. rest_parameters:: parameters.yaml - segment_id: segment_id_path - limit: limit - marker: marker - on_maintenance: on_maintenance_query_host - reserved: reserved_query_host - sort_dir: sort_dir - sort_key: sort_key_host - type: type_query_host Response -------- .. rest_parameters:: parameters.yaml - hosts: hosts - name: host_name - uuid: host_uuid - failover_segment_id: segment_uuid - deleted: deleted - on_maintenance: on_maintenance - reserved: reserved - created_at: created - control_attributes: control_attributes - updated_at: updated - failover_segment: segment - type: host_type - id: host_id **Example List hosts** .. literalinclude:: ../../doc/api_samples/hosts/hosts-list-resp.json :language: javascript Create Host =========== .. rest_method:: POST /segments/{segment_id}/hosts Creates a host under given segment. Creates a Host under given segment with name, type, control_attributes. User can set sepcific hosts as reserved by setting reserved attribute to True. By default `on_maintenance` mode which indicates whether host is under maintenance or not is False when host is created. **Preconditions** The segment must exist. Response Codes -------------- .. rest_status_code:: success status.yaml - 201 .. rest_status_code:: error status.yaml - 400 - 401 - 403 - 404 - 409 .. A conflict(409) is returned if host with same name is already present under given segment. BadRequest (400) is returned if host doesn't exists in nova. Request ------- .. rest_parameters:: parameters.yaml - segment_id: segment_id_path - host: host - type: host_type - name: host_name - reserved: reserved - on_maintenance: on_maintenance **Example Create Host** .. literalinclude:: ../../doc/api_samples/hosts/host-create-req.json :language: javascript Response -------- .. rest_parameters:: parameters.yaml - host: host - name: host_name - uuid: host_uuid - failover_segment_id: segment_uuid - deleted: deleted - on_maintenance: on_maintenance - reserved: reserved - created_at: created - control_attributes: control_attributes - updated_at: updated - failover_segment: segment - type: host_type - id: host_id **Example Create Host** .. literalinclude:: ../../doc/api_samples/hosts/host-create-resp.json :language: javascript Show Host Details ================= .. rest_method:: GET /segments/{segment_id}/hosts/{host_id} Shows details for a host. **Preconditions** The segment must exist. The host must exist. Response Codes -------------- .. rest_status_code:: success status.yaml - 200 .. rest_status_code:: error status.yaml - 401 - 403 - 404 Request ------- .. rest_parameters:: parameters.yaml - segment_id: segment_id_path - host_id: host_id_path Response -------- .. rest_parameters:: parameters.yaml - host: host - name: host_name - uuid: host_uuid - failover_segment_id: segment_uuid - deleted: deleted - on_maintenance: on_maintenance - reserved: reserved - created_at: created - control_attributes: control_attributes - updated_at: updated - failover_segment: segment - type: host_type - id: host_id **Example Show Host Details** .. literalinclude:: ../../doc/api_samples/hosts/host-get-resp.json :language: javascript Update Host =========== .. rest_method:: PUT /segments/{segment_id}hosts/{host_id} Updates the editable attributes of an existing host. **Preconditions** - The segment must exist. - The host must exist. - User can not update host if that host or any host from the failover segment has any usage in the notification table i.e. any host from the failover segment has notification status as new/error/running. Response Codes -------------- .. rest_status_code:: success status.yaml - 200 .. rest_status_code:: error status.yaml - 400 - 401 - 403 - 404 - 409 .. A conflict(409) is returned if user tries to update host name which is already assigned to host under given segment or user tries to update the host or any other host from the failover segment has any usage in the notification table i.e. any host from the failover segment has notification status as new/error/running. BadRequest (400) is returned if host doesn't exists in nova. Request ------- .. rest_parameters:: parameters.yaml - segment_id: segment_id_path - host_id: host_id_path - type: host_type - name: segment_name - on_maintenance: on_maintenance - reserved: reserved **Example Update host reserved flag** .. literalinclude:: ../../doc/api_samples/hosts/host-update-req.json :language: javascript Response -------- .. rest_parameters:: parameters.yaml - host: host - name: host_name - uuid: host_uuid - failover_segment_id: segment_uuid - deleted: deleted - on_maintenance: on_maintenance - reserved: reserved - created_at: created - control_attributes: control_attributes - updated_at: updated - failover_segment: segment - type: host_type - id: host_id **Example Update host reserved flag** .. literalinclude:: ../../doc/api_samples/hosts/host-update-resp.json :language: javascript Delete Host =========== .. rest_method:: DELETE /segments/{segment_id}hosts/{host_id} Deletes a host from given segment. **Preconditions** - The segment must exist. - The host must exist. - User can not delete host if that host or any host from the failover segment has any usage in the notification table i.e. any host from the failover segment has notification status as new/error/running. Response Codes -------------- .. rest_status_code:: success status.yaml - 204 .. rest_status_code:: error status.yaml - 400 - 401 - 403 - 404 - 409 .. A conflict(409) is returned if user tries to delete the host or any other host from the failover segment has any usage in the notification table i.e. any host from the failover segment has notification status as new/error/running. Request ------- .. rest_parameters:: parameters.yaml - segment_id: segment_id_path - host_id: host_id_path Response -------- There is no body content for the response of a successful DELETE query.masakari-9.0.0/api-ref/source/notifications.inc0000664000175000017500000001246113656747723021551 0ustar zuulzuul00000000000000.. -*- rst -*- ============================== Notifications (notifications) ============================== **Notifications** A notification is a kind of alert provided by monitoring services (masakari-monitors) for failure of either host, process or instance. Lists, creates and shows details for notifications. List Notifications ================== .. rest_method:: GET /notifications Lists IDs, notification types, host_name, generated_time, payload and status for all notifications. Notifications contain a `status` attribute that indicates the current notification state. You can filter on the notification `status` when you complete a list notifications request. The notification `status` is returned in the response body. The possible notification `status` values are: - ``NEW``. The notification is in new state and yet to be processed. - ``RUNNING``. The notification is in progress. - ``FINISHED``. The notification is completed successfully. - ``ERROR``. The notification is ended up in error. - ``FAILED``. The notification is not processed successfully after failed once. - ``IGNORED``. The notification is ignored by masakari engine. You can also filter on the basis of `source_host_uuid`, `generated_since` and `type` when you complete a list notifications request. Response Codes -------------- .. rest_status_code:: success status.yaml - 200 .. rest_status_code:: error status.yaml - 400 - 401 - 403 - 404 Request ------- .. rest_parameters:: parameters.yaml - generated_since: generated_since_query_notifications - limit: limit - marker: marker - sort_dir: sort_dir - sort_key: sort_key_notification - source_host_uuid: source_host_uuid_query_notifications - type: type_query_notifications Response -------- .. rest_parameters:: parameters.yaml - notifications: notifications - notification_uuid: notification_uuid - deleted: deleted - created_at: created - updated_at: updated - status: notification_status - uuid: notification_uuid - source_host_uuid: source_host_uuid - generated_time: generated_time - type: notification_type - payload: notification_payload - id: notification_id **Example List Notifications** .. literalinclude:: ../../doc/api_samples/notifications/notifcations-list-resp.json :language: javascript Create Notification =================== .. rest_method:: POST /notifications Creates a notification. Response Codes -------------- .. rest_status_code:: success status.yaml - 202 .. rest_status_code:: error status.yaml - 400 - 401 - 403 - 409 .. A conflict(409) is returned if notification with same payload is exists or host for which notification is generated is under maintenance. BadRequest (400) is returned if notification payload is incorrect. Request ------- .. rest_parameters:: parameters.yaml - notification: notification - type: notification_type - generated_time: generated_time - payload: notification_payload - host_name: notification_host_name **Example create Process failure notification** .. literalinclude:: ../../doc/api_samples/notifications/process-notification-create-req.json :language: javascript **Example create VM failure notification** .. literalinclude:: ../../doc/api_samples/notifications/vm-notification-create-req.json :language: javascript **Example create COMPUTE_HOST failure notification** .. literalinclude:: ../../doc/api_samples/notifications/host-notification-create-req.json :language: javascript Response -------- .. rest_parameters:: parameters.yaml - notification: notification - type: notification_type - generated_time: generated_time - payload: notification_payload - source_host_uuid: source_host_uuid - uuid: notification_uuid - deleted: deleted - created_at: created - status: notification_status - updated_at: updated - id: notification_id **Example create Process failure notification** .. literalinclude:: ../../doc/api_samples/notifications/process-notification-create-resp.json :language: javascript **Example create VM failure notification** .. literalinclude:: ../../doc/api_samples/notifications/vm-notification-create-resp.json :language: javascript **Example create COMPUTE_HOST failure notification** .. literalinclude:: ../../doc/api_samples/notifications/host-notification-create-resp.json :language: javascript Show Notification Details ========================= .. rest_method:: GET /notifications/{notification_id} Shows details for a notification. **Preconditions** The notification must exist. Response Codes -------------- .. rest_status_code:: success status.yaml - 200 .. rest_status_code:: error status.yaml - 401 - 403 - 404 Request ------- .. rest_parameters:: parameters.yaml - notification_id: notification_id_path Response -------- .. rest_parameters:: parameters.yaml - notification: notification - type: notification_type - generated_time: generated_time - payload: notification_payload - source_host_uuid: source_host_uuid - uuid: notification_uuid - deleted: deleted - created_at: created - status: notification_status - updated_at: updated - recovery_workflow_details: recovery_workflow_details - id: notification_id **Example Show Notification Details** .. literalinclude:: ../../doc/api_samples/notifications/notification-get-resp.json :language: javascript masakari-9.0.0/api-ref/source/failover-segments.inc0000664000175000017500000001642613656747723022337 0ustar zuulzuul00000000000000.. -*- rst -*- ============================ FailoverSegments (segments) ============================ **Segments** System can be zoned from top to down levels, into Regions, Availability Zones and Host Aggregates (or Cells). Within those zones, one or more pacemaker/pacemaker-remote clusters may exist. In addition to those boundaries, shared storage boundary is also important to decide the optimal host for fail-over. Openstack zoned boundaries (such as Regions, AZ, Host Aggregates, etc..) can be managed by the nova scheduler. However, shared storage boundaries are difficult to manage. Moreover, the operator may want to use other types of boundary such as rack layout and powering. Therefore, operator may want to define the segment of hypervisor hosts and assign the failover host/hosts for each of them. Those segment can be define based on the shared storage boundaries or any other limitations may critical for selection of the failover host. Lists, creates, shows details for, updates, and deletes segments. List FailoverSegments ===================== .. rest_method:: GET /segments Lists IDs, names, description, recovery_method, service_type for all segments. Segments contains `service_type` and `recovery_method` attributes. `service_type` attribute indicates for which service (e.g. compute, cinder etc) this segment belongs to. `recovery_method` attribute indicates the recovery action to be followed when any host in a segment goes down. The possible `recovery_method` values are: - ``AUTO``. Auto recovery action. - ``RESERVED_HOST``. Reserved host recovery action. - ``AUTO_PRIORITY``. First executes auto and if auto fails then retried with reserved host recover action. - ``RH_PRIORITY``. First executes reserved host and if it fails then retried with reserved host recover action. You can filter on the `service_type` and `recovery_method` when you complete a list segments request. Response Codes -------------- .. rest_status_code:: success status.yaml - 200 .. rest_status_code:: error status.yaml - 400 - 401 - 403 - 404 Request ------- .. rest_parameters:: parameters.yaml - limit: limit - marker: marker - recovery_method: recovery_method_query_segment - service_type: service_type_query_segment - sort_dir: sort_dir - sort_key: sort_key_segment Response -------- .. rest_parameters:: parameters.yaml - segments: segments - name: segment_name - uuid: segment_uuid **Example List Segments** .. literalinclude:: ../../doc/api_samples/segments/segments-list-resp.json :language: javascript Create Segment ============== .. rest_method:: POST /segments Creates a segment. Creates a FailoverSegment with name, description, service_type and recovery_method. For `service_type` user can mention the name of service for which this segment is created. As of now user can mention `COMPUTE` as `service_type`. For `recovery_method` user can mention either `AUTO`, `RESERVED_HOST`, `AUTO_PRIORITY` or `RH_PRIORITY`. Segment name should be unique. Response Codes -------------- .. rest_status_code:: success status.yaml - 202 .. rest_status_code:: error status.yaml - 400 - 401 - 403 - 409 .. A conflict(409) is returned if segment with same name is already present. Request ------- .. rest_parameters:: parameters.yaml - sgement: segment - description: segment_description - name: segment_name - recovery_method: segment_recovery_method - service_type: segment_service_type **Example Create Segment** .. literalinclude:: ../../doc/api_samples/segments/segment-create-req.json :language: javascript Response -------- .. rest_parameters:: parameters.yaml - segment: segment - created: created - description: segment_description - id: segment_id - name: segment_name - recovery_method: segment_recovery_method - service_type: segment_service_type - updated: updated - uuid: segment_uuid **Example Create Segment** .. literalinclude:: ../../doc/api_samples/segments/segment-create-resp.json :language: javascript Show Segment Details ==================== .. rest_method:: GET /segments/{segment_id} Shows details for a segment. **Preconditions** The segment must exist. Response Codes -------------- .. rest_status_code:: success status.yaml - 200 .. rest_status_code:: error status.yaml - 401 - 403 - 404 Request ------- .. rest_parameters:: parameters.yaml - segment_id: segment_id_path Response -------- .. rest_parameters:: parameters.yaml - segment: segment - created: created - description: segment_description - id: segment_id - name: segment_name - recovery_method: segment_recovery_method - service_type: segment_service_type - updated: updated - uuid: segment_uuid **Example Show Segment Details** .. literalinclude:: ../../doc/api_samples/segments/segment-get-resp.json :language: javascript Update Segment ============== .. rest_method:: PUT /segments/{segment_id} Updates the editable attributes of an existing segment. **Preconditions** - The segment must exist. - User can not update segment if any host from the segment has any usage in the notification table i.e. any host from the failover segment has notification status as new/error/running. Response Codes -------------- .. rest_status_code:: success status.yaml - 200 .. rest_status_code:: error status.yaml - 400 - 401 - 403 - 404 - 409 .. A conflict(409) is returned if user tries to update segment name which is already assigned to segment or if any host from the segment has any usage in the notification table i.e. any host from the failover segment has notification status as new/error/running. Request ------- .. rest_parameters:: parameters.yaml - segment_id: segment_id_path - description: segment_description - name: segment_name - recovery_method: segment_recovery_method - service_type: segment_service_type **Example Update segment name** .. literalinclude:: ../../doc/api_samples/segments/segment-update-req.json :language: javascript Response -------- .. rest_parameters:: parameters.yaml - segment: segment - created: created - description: segment_description - id: segment_id - name: segment_name - recovery_method: segment_recovery_method - service_type: segment_service_type - updated: updated - uuid: segment_uuid **Example Update Segment name** .. literalinclude:: ../../doc/api_samples/segments/segment-update-resp.json :language: javascript Delete Segment ============== .. rest_method:: DELETE /segments/{segment_id} Deletes a segment. **Preconditions** - The segment must exist. - User can not delete segment if any host from the segment has any usage in the notification table i.e. any host from the failover segment has notification status as new/error/running. Response Codes -------------- .. rest_status_code:: success status.yaml - 204 .. rest_status_code:: error status.yaml - 400 - 401 - 403 - 404 - 409 .. A conflict(409) is returned if user tries to delete the segment if any host from the segment has any usage in the notification table i.e. any host from the failover segment has notification status as new/error/running. Request ------- .. rest_parameters:: parameters.yaml - segment_id: segment_id_path Response -------- There is no body content for the response of a successful DELETE query. masakari-9.0.0/api-ref/source/parameters.yaml0000664000175000017500000002437513656747723021243 0ustar zuulzuul00000000000000# variables in path api_version: in: path required: true type: string description: > The API version as returned in the links from the ``GET /`` call. host_id_path: description: | The UUID of the host. in: path required: true type: string notification_id_path: description: | The UUID of the notification. in: path required: true type: string segment_id_path: description: | The UUID of the segment. in: path required: true type: string # variables in query generated_since_query_notifications: description: | Filter the notifications list result by notification generated time. in: query required: false type: string limit: description: | Requests a page size of items. Returns a number of items up to a limit value. Use the ``limit`` parameter to make an initial limited request and use the ID of the last-seen item from the response as the ``marker`` parameter value in a subsequent limited request. in: query required: false type: integer marker: description: | The ID of the last-seen item. Use the ``limit`` parameter to make an initial limited request and use the ID of the last-seen item from the response as the ``marker`` parameter value in a subsequent limited request. in: query required: false type: string on_maintenance_query_host: description: | Filter the host list result by on_maintenance. in: query required: false type: boolean recovery_method_query_segment: description: | Filter the segment list result by recovery_method. in: query required: false type: string reserved_query_host: description: | Filter the host list result by reserved flag. in: query required: false type: boolean service_type_query_segment: description: | Filter the segment list result by service_type. in: query required: false type: string sort_dir: description: | Sort direction. A valid value is ``asc`` (ascending) or ``desc`` (descending). Default is ``desc``. You can specify multiple pairs of sort key and sort direction query parameters. If you omit the sort direction in a pair, the API uses the natural sorting direction of the direction of the segment ``sort_key`` attribute. in: query required: false type: string sort_key_host: description: | Sorts by a hosts attribute. Default attribute is ``created``. You can specify multiple pairs of sort key and sort direction query parameters. If you omit the sort direction in a pair, the API uses the natural sorting direction of the segment ``sort_key`` attribute. The sort keys are limited to: - ``created_at`` - ``type`` - ``name`` - ``updated_at`` - ``uuid`` - ``reserved`` - ``on_maintenance`` in: query required: false type: string sort_key_notification: description: | Sorts by a notification attribute. Default attribute is ``created``. You can specify multiple pairs of sort key and sort direction query parameters. If you omit the sort direction in a pair, the API uses the natural sorting direction of the segment ``sort_key`` attribute. The sort keys are limited to: - ``created_at`` - ``type`` - ``generated_time`` - ``updated_at`` - ``uuid`` - ``payload`` - ``status`` - ``source_host_uuid`` in: query required: false type: string sort_key_segment: description: | Sorts by a segment attribute. Default attribute is ``created``. You can specify multiple pairs of sort key and sort direction query parameters. If you omit the sort direction in a pair, the API uses the natural sorting direction of the segment ``sort_key`` attribute. The sort keys are limited to: - ``created_at`` - ``description`` - ``name`` - ``updated_at`` - ``uuid`` - ``recovery_method`` - ``service_type`` in: query required: false type: string source_host_uuid_query_notifications: description: | Filter the notifications list result by source_host_uuid. in: query required: false type: string type_query_host: description: | Filter the host list result by type of host. in: query required: false type: boolean type_query_notifications: description: | Filter the notifications list result by notification type. in: query required: false type: string # variables in body control_attributes: description: | Attributes to control host. in: body required: true type: string created: description: | The date and time when the resource was created. The date and time stamp format is `ISO 8601 `_ :: CCYY-MM-DDThh:mm:ss±hh:mm For example, ``2017-04-21T09:49:58-05:00``. The ``±hh:mm`` value, if included, is the time zone as an offset from UTC. In the previous example, the offset value is ``-05:00``. in: body required: true type: string deleted: description: | A boolean indicates whether this resource is deleted or not, if it has not been deleted, ``false`` will appear. in: body required: true type: boolean generated_time: description: | The date and time when the notification was created. The date and time stamp format is `ISO 8601 `_ :: CCYY-MM-DDThh:mm:ss±hh:mm For example, ``2017-04-21T09:49:58-05:00``. The ``±hh:mm`` value, if included, is the time zone as an offset from UTC. In the previous example, the offset value is ``-05:00``. in: body required: true type: string host: description: | A ``host`` object. in: body required: true type: object host_id: description: | ID of host. in: body required: true type: string host_name: description: | The host name. in: body required: true type: string host_type: description: | Type of host. in: body required: true type: string host_uuid: description: | The UUID of the host. in: body required: true type: string hosts: description: | A list of ``host`` objects. in: body required: true type: array links: description: | Links to the resources in question. in: body required: true type: array notification: description: | A ``notification`` object. in: body required: true type: object notification_host_name: description: | A name of host for which notification is created. in: body required: true type: object notification_id: description: | ID of notification. in: body required: true type: string notification_payload: description: | Payload for notification. .. note:: This is a JSON string. in: body required: true type: string notification_status: description: | The notification status. in: body required: true type: string notification_type: description: | Type of notification, can be either ``PROCESS``, ``COMPUTE_HOST`` or ``VM``. in: body required: true type: string notification_uuid: description: | The UUID of the notification. in: body required: true type: string notifications: description: | A list of ``notification`` objects. in: body required: true type: array on_maintenance: description: | A boolean indicates whether this host is on maintenance or not, if it is not on maintenance mode, ``false`` will appear. in: body required: false type: boolean recovery_workflow_details: description: | Recovery workflow details of the notification. This is a list of dictionary. ``New in version 1.1`` in: body required: true type: array reserved: description: | A boolean indicates whether this host is reserved or not, if it is not reserved, ``false`` will appear. in: body required: false type: boolean segment: description: | A ``segment`` object. in: body required: true type: object segment_description: type: string in: body required: false description: | A free form description of the segment. Limited to 255 characters in length. segment_id: description: | The Id of the segment. in: body required: true type: string segment_name: description: | The segment name. in: body required: true type: string segment_recovery_method: type: string in: body required: true description: | Type of recovery if any host in this segment goes down. User can mention either 'AUTO', 'RESERVED_HOST', 'AUTO_PRIORITY' or 'RH_PRIORITY'. segment_service_type: type: string in: body required: true description: | The name of service which will be deployed in this segment. As of now user can mention 'COMPUTE' as service_type. segment_uuid: description: | The UUID of the segment. in: body required: true type: string segments: description: | A list of ``segment`` objects. in: body required: true type: array source_host_uuid: description: | The UUID of host for which notification is generated. in: body required: true type: string updated: description: | The date and time when the resource was updated. The date and time stamp format is `ISO 8601 `_ :: CCYY-MM-DDThh:mm:ss±hh:mm For example, ``2017-04-21T09:49:58-05:00``. The ``±hh:mm`` value, if included, is the time zone as an offset from UTC. In the previous example, the offset value is ``-05:00``. in: body required: true type: string version: description: | The version. in: body required: true type: string version_id: type: string in: body required: true description: > A common name for the version in question. Informative only, it has no real semantic meaning. version_max: type: string in: body required: true description: > The maximum version supported by API. version_min: type: string in: body required: true description: > The minimum version supported by API. version_status: type: string in: body required: true description: | The status of this API version. This can be one of: - ``CURRENT``: this is the preferred version of the API to use - ``SUPPORTED``: this is an older, but still supported version of the API - ``DEPRECATED``: a deprecated version of the API that is slated for removal versions: type: array in: body required: true description: > A list of version objects that describe the API versions available. masakari-9.0.0/api-ref/source/conf.py0000664000175000017500000001727013656747723017507 0ustar zuulzuul00000000000000# Copyright (c) 2017 NTT Data # All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # masakari documentation build configuration file. # # This file is execfile()d with the current directory set to # its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import os import subprocess import sys import warnings import openstackdocstheme from masakari.version import version_info extensions = [ 'os_api_ref', ] html_theme = 'openstackdocs' html_theme_path = [openstackdocstheme.get_html_theme_path()] html_theme_options = { "sidebar_mode": "toc", } html_context = {'bug_project': 'masakari', 'bug_tag': 'api-ref'} # If extensions (or modules to document with autodoc) are in another # directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown # here. sys.path.insert(0, os.path.abspath('../../')) sys.path.insert(0, os.path.abspath('../')) sys.path.insert(0, os.path.abspath('./')) # -- General configuration # ---------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. # # source_encoding = 'utf-8' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Masakari API Reference' copyright = u'2017-present, OpenStack Foundation' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The full version, including alpha/beta/rc tags. release = version_info.release_string() # The short X.Y version. version = version_info.version_string() # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # The reST default role (used for this markup: `text`) to use # for all documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). add_module_names = False # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # -- Options for man page output # ---------------------------------------------- # Grouping the document tree for man pages. # List of tuples 'sourcefile', 'target', u'title', u'Authors name', 'manual' # -- Options for HTML output # -------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. # html_theme_path = ["."] # html_theme = '_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". # html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' git_cmd = ["git", "log", "--pretty=format:'%ad, commit %h'", "--date=local", "-n1"] try: html_last_updated_fmt = subprocess.Popen( git_cmd, stdout=subprocess.PIPE).communicate()[0].decode() except Exception: warnings.warn('Cannot get last updated time from git repository. ' 'Not setting "html_last_updated_fmt".') # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page # names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_use_modindex = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, an OpenSearch description file will be output, and all # pages will # contain a tag referring to it. The value of this option must # be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. # ".xhtml"). # html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = 'masakaridoc' # -- Options for LaTeX output # ------------------------------------------------- # The paper size ('letter' or 'a4'). # latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). # latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass # [howto/manual]). latex_documents = [('index', 'Masakari.tex', u'OpenStack Masakari API Documentation', u'OpenStack Foundation', 'manual'), ] # The name of an image file (relative to this directory) to place at # the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are # parts, # not chapters. # latex_use_parts = False # Additional stuff for the LaTeX preamble. # latex_preamble = '' # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_use_modindex = True masakari-9.0.0/api-ref/source/status.yaml0000664000175000017500000000337713656747723020422 0ustar zuulzuul00000000000000################# # Success Codes # ################# 200: default: | Request was successful. 201: default: | Resource was created and is ready to use. 202: default: | Request was accepted for processing, but the processing has not been completed. A 'location' header is included in the response which contains a link to check the progress of the request. 204: default: | The server has fulfilled the request by deleting the resource. 300: default: | There are multiple choices for resources. The request has to be more specific to successfully retrieve one of these resources. 302: default: | The response is about a redirection hint. The header of the response usually contains a 'location' value where requesters can check to track the real location of the resource. ################# # Error Codes # ################# 400: default: | Some content in the request was invalid. resource_signal: | The target resource doesn't support receiving a signal. 401: default: | User must authenticate before making a request. 403: default: | Policy does not allow current user to do this operation. 404: default: | The requested resource could not be found. 405: default: | Method is not valid for this endpoint. 409: default: | This operation conflicted with another operation on this resource. duplicate_zone: | There is already a zone with this name. 500: default: | Something went wrong inside the service. This should not happen usually. If it does happen, it means the server has experienced some serious problems. 503: default: | Service is not available. This is mostly caused by service configuration errors which prevents the service from successful start up.masakari-9.0.0/api-ref/source/index.rst0000664000175000017500000000044313656747723020043 0ustar zuulzuul00000000000000:tocdepth: 2 ============== Masakari API ============== This is a reference for the OpenStack Masakari API which is provided by the Masakari project. .. rest_expand_all:: .. include:: versions.inc .. include:: failover-segments.inc .. include:: hosts.inc .. include:: notifications.inc masakari-9.0.0/api-ref/source/versions.inc0000664000175000017500000000421713656747723020550 0ustar zuulzuul00000000000000.. -*- rst -*- ============== API Versions ============== In order to bring new features to users over time, the Masakari API supports versioning. - ''major versions'', which have dedicated urls The Version APIs work differently from other APIs as they *do not* require authentication. List All Major Versions ======================= .. rest_method:: GET / This fetches all the information about all known major API versions in the deployment. Links to more specific information will be provided for each API version. Response Codes -------------- .. rest_status_code:: success status.yaml - 200 Response -------- .. rest_parameters:: parameters.yaml - versions: versions - id: version_id - status: version_status - links: links - version: version_max - min_version: version_min .. note:: The ``updated`` parameter in the response is vestigial and provides no useful information. Response Example ---------------- This demonstrates the expected response from a bleeding edge server that supports up to the current version. .. literalinclude:: /../../doc/api_samples/versions/versions-get-resp.json :language: javascript Show Details of Specific API Version ==================================== .. rest_method:: GET /{api_version}/ This gets the details of a specific API at its root. Nearly all this information exists at the API root, so this is mostly a redundant operation. Response Codes -------------- .. rest_status_code:: success status.yaml - 200 Request ------- .. rest_parameters:: parameters.yaml - api_version: api_version Response -------- .. rest_parameters:: parameters.yaml - version: version - id: version_id - status: version_status - links: links - version: version_max - min_version: version_min .. note:: The ``updated`` and ``media-types`` parameters in the response are vestigial and provide no useful information. They will probably be deprecated and removed in the future. Response Example ---------------- This is an example of a ``GET /v1/`` on a relatively current server. .. literalinclude:: /../../doc/api_samples/versions/v1-version-get-resp.json :language: javascriptmasakari-9.0.0/masakari/0000775000175000017500000000000013656750011015126 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/utils.py0000664000175000017500000002222413656747723016662 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Utilities and helper functions.""" import contextlib import functools import inspect import pyclbr import shutil import sys import tempfile import eventlet from oslo_concurrency import lockutils from oslo_context import context as common_context from oslo_log import log as logging from oslo_utils import importutils from oslo_utils import strutils from oslo_utils import timeutils import six import masakari.conf from masakari import exception from masakari.i18n import _ from masakari import safe_utils CONF = masakari.conf.CONF LOG = logging.getLogger(__name__) def utf8(value): """Try to turn a string into utf-8 if possible. The original code was copied from the utf8 function in http://github.com/facebook/tornado/blob/master/tornado/escape.py """ if value is None or isinstance(value, six.binary_type): return value if not isinstance(value, six.text_type): value = six.text_type(value) return value.encode('utf-8') def check_isinstance(obj, cls): """Checks that obj is of type cls, and lets PyLint infer types.""" if isinstance(obj, cls): return obj raise Exception(_('Expected object of type: %s') % (str(cls))) def monkey_patch(): """If the CONF.monkey_patch set as True, this function patches a decorator for all functions in specified modules. You can set decorators for each modules using CONF.monkey_patch_modules. The format is "Module path:Decorator function". name - name of the function function - object of the function """ # If CONF.monkey_patch is not True, this function do nothing. if not CONF.monkey_patch: return if six.PY2: is_method = inspect.ismethod else: def is_method(obj): # Unbound methods became regular functions on Python 3 return inspect.ismethod(obj) or inspect.isfunction(obj) # Get list of modules and decorators for module_and_decorator in CONF.monkey_patch_modules: module, decorator_name = module_and_decorator.split(':') # import decorator function decorator = importutils.import_class(decorator_name) __import__(module) # Retrieve module information using pyclbr module_data = pyclbr.readmodule_ex(module) for key, value in module_data.items(): # set the decorator for the class methods if isinstance(value, pyclbr.Class): clz = importutils.import_class("%s.%s" % (module, key)) for method, func in inspect.getmembers(clz, is_method): setattr(clz, method, decorator("%s.%s.%s" % (module, key, method), func)) # set the decorator for the function if isinstance(value, pyclbr.Function): func = importutils.import_class("%s.%s" % (module, key)) setattr(sys.modules[module], key, decorator("%s.%s" % (module, key), func)) def walk_class_hierarchy(clazz, encountered=None): """Walk class hierarchy, yielding most derived classes first.""" if not encountered: encountered = [] for subclass in clazz.__subclasses__(): if subclass not in encountered: encountered.append(subclass) # drill down to leaves first for subsubclass in walk_class_hierarchy(subclass, encountered): yield subsubclass yield subclass def expects_func_args(*args): def _decorator_checker(dec): @functools.wraps(dec) def _decorator(f): base_f = safe_utils.get_wrapped_function(f) arg_names, a, kw, _default = inspect.getargspec(base_f) if a or kw or set(args) <= set(arg_names): # NOTE : We can't really tell if correct stuff will # be passed if it's a function with *args or **kwargs so # we still carry on and hope for the best return dec(f) else: raise TypeError("Decorated function %(f_name)s does not " "have the arguments expected by the " "decorator %(d_name)s" % {'f_name': base_f.__name__, 'd_name': dec.__name__}) return _decorator return _decorator_checker def isotime(at=None): """Current time as ISO string, as timeutils.isotime() is deprecated :returns: Current time in ISO format """ if not at: at = timeutils.utcnow() date_string = at.strftime("%Y-%m-%dT%H:%M:%S") tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' date_string += ('Z' if tz in ['UTC', 'UTC+00:00'] else tz) return date_string def strtime(at): return at.strftime("%Y-%m-%dT%H:%M:%S.%f") class ExceptionHelper(object): """Class to wrap another and translate the ClientExceptions raised by its function calls to the actual ones. """ def __init__(self, target): self._target = target def __getattr__(self, name): func = getattr(self._target, name) @functools.wraps(func) def wrapper(*args, **kwargs): try: return func(*args, **kwargs) except Exception as e: six.reraise(*e.exc_info) return wrapper def spawn(func, *args, **kwargs): """Passthrough method for eventlet.spawn. This utility exists so that it can be stubbed for testing without interfering with the service spawns. It will also grab the context from the threadlocal store and add it to the store on the new thread. This allows for continuity in logging the context when using this method to spawn a new thread. """ _context = common_context.get_current() @functools.wraps(func) def context_wrapper(*args, **kwargs): # NOTE: If update_store is not called after spawn it won't be # available for the logger to pull from threadlocal storage. if _context is not None: _context.update_store() return func(*args, **kwargs) return eventlet.spawn(context_wrapper, *args, **kwargs) def spawn_n(func, *args, **kwargs): """Passthrough method for eventlet.spawn_n. This utility exists so that it can be stubbed for testing without interfering with the service spawns. It will also grab the context from the threadlocal store and add it to the store on the new thread. This allows for continuity in logging the context when using this method to spawn a new thread. """ _context = common_context.get_current() @functools.wraps(func) def context_wrapper(*args, **kwargs): # NOTE: If update_store is not called after spawn_n it won't be # available for the logger to pull from threadlocal storage. if _context is not None: _context.update_store() func(*args, **kwargs) eventlet.spawn_n(context_wrapper, *args, **kwargs) @contextlib.contextmanager def tempdir(**kwargs): argdict = kwargs.copy() if 'dir' not in argdict: argdict['dir'] = CONF.tempdir tmpdir = tempfile.mkdtemp(**argdict) try: yield tmpdir finally: try: shutil.rmtree(tmpdir) except OSError as e: LOG.error('Could not remove tmpdir: %s', e) def validate_integer(value, name, min_value=None, max_value=None): """Make sure that value is a valid integer, potentially within range.""" try: return strutils.validate_integer(value, name, min_value, max_value) except ValueError as e: raise exception.InvalidInput(reason=e) def synchronized(name, semaphores=None, blocking=False): def wrap(f): @six.wraps(f) def inner(*args, **kwargs): lock_name = 'masakari-%s' % name int_lock = lockutils.internal_lock(lock_name, semaphores=semaphores) LOG.debug("Acquiring lock: %(lock_name)s on resource: " "%(resource)s", {'lock_name': lock_name, 'resource': f.__name__}) if not int_lock.acquire(blocking=blocking): raise exception.LockAlreadyAcquired(resource=name) try: return f(*args, **kwargs) finally: LOG.debug("Releasing lock: %(lock_name)s on resource: " "%(resource)s", {'lock_name': lock_name, 'resource': f.__name__}) int_lock.release() return inner return wrap masakari-9.0.0/masakari/__init__.py0000664000175000017500000000211213656747723017253 0ustar zuulzuul00000000000000# Copyright (c) 2016 NTT Data # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ :mod:`masakari` -- Cloud IaaS Platform =================================== .. automodule:: masakari :platform: Unix :synopsis: Infrastructure-as-a-Service Cloud platform. """ import os os.environ['EVENTLET_NO_GREENDNS'] = 'yes' # NOTE(rpodolyaka): import oslo_service first, so that it makes eventlet hub # use a monotonic clock to avoid issues with drifts of system time (see # LP 1510234 for details) import oslo_service # noqa import eventlet # noqa masakari-9.0.0/masakari/version.py0000664000175000017500000000453313656747723017212 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from pbr import version as pbr_version MASAKARI_VENDOR = "OpenStack Foundation" MASAKARI_PRODUCT = "OpenStack Masakari" MASAKARI_PACKAGE = None # OS distro package version suffix loaded = False version_info = pbr_version.VersionInfo('masakari') version_string = version_info.version_string def _load_config(): # Don't load in global context, since we can't assume # these modules are accessible when distutils uses # this module from six.moves import configparser from oslo_config import cfg from oslo_log import log as logging global loaded, MASAKARI_VENDOR, MASAKARI_PRODUCT, MASAKARI_PACKAGE if loaded: return loaded = True cfgfile = cfg.CONF.find_file("release") if cfgfile is None: return try: cfg = configparser.RawConfigParser() cfg.read(cfgfile) if cfg.has_option("Masakari", "vendor"): MASAKARI_VENDOR = cfg.get("Masakari", "vendor") if cfg.has_option("Masakari", "product"): MASAKARI_PRODUCT = cfg.get("Masakari", "product") if cfg.has_option("Masakari", "package"): MASAKARI_PACKAGE = cfg.get("Masakari", "package") except Exception as ex: LOG = logging.getLogger(__name__) LOG.error("Failed to load %(cfgfile)s: %(ex)s", {'cfgfile': cfgfile, 'ex': ex}) def vendor_string(): _load_config() return MASAKARI_VENDOR def product_string(): _load_config() return MASAKARI_PRODUCT def package_string(): _load_config() return MASAKARI_PACKAGE def version_string_with_package(): if package_string() is None: return version_info.version_string() else: return "%s-%s" % (version_info.version_string(), package_string()) masakari-9.0.0/masakari/safe_utils.py0000664000175000017500000000256413656747723017665 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Utilities and helper functions that won't produce circular imports.""" def get_wrapped_function(function): """Get the method at the bottom of a stack of decorators.""" if not hasattr(function, '__closure__') or not function.__closure__: return function def _get_wrapped_function(function): if not hasattr(function, '__closure__') or not function.__closure__: return None for closure in function.__closure__: func = closure.cell_contents deeper_func = _get_wrapped_function(func) if deeper_func: return deeper_func elif hasattr(closure.cell_contents, '__call__'): return closure.cell_contents return _get_wrapped_function(function) masakari-9.0.0/masakari/common/0000775000175000017500000000000013656750011016416 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/common/__init__.py0000664000175000017500000000000013656747723020535 0ustar zuulzuul00000000000000masakari-9.0.0/masakari/common/config.py0000664000175000017500000000315213656747723020256 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from oslo_middleware import cors def set_middleware_defaults(): """Update default configuration options for oslo.middleware.""" # CORS Defaults cfg.set_defaults(cors.CORS_OPTS, allow_headers=['X-Auth-Token', 'X-Openstack-Request-Id', 'X-Identity-Status', 'X-Roles', 'X-Service-Catalog', 'X-User-Id', 'X-Tenant-Id'], expose_headers=['X-Auth-Token', 'X-Openstack-Request-Id', 'X-Subject-Token', 'X-Service-Token'], allow_methods=['GET', 'PUT', 'POST', 'DELETE', 'PATCH'] ) masakari-9.0.0/masakari/policies/0000775000175000017500000000000013656750011016735 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/policies/__init__.py0000664000175000017500000000215013656747723021064 0ustar zuulzuul00000000000000# Copyright (C) 2018 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import itertools from masakari.policies import base from masakari.policies import extension_info from masakari.policies import hosts from masakari.policies import notifications from masakari.policies import segments from masakari.policies import versions def list_rules(): return itertools.chain( base.list_rules(), extension_info.list_rules(), hosts.list_rules(), notifications.list_rules(), segments.list_rules(), versions.list_rules() ) masakari-9.0.0/masakari/policies/hosts.py0000664000175000017500000000506113656747723020471 0ustar zuulzuul00000000000000# Copyright (C) 2018 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from masakari.policies import base HOSTS = 'os_masakari_api:os-hosts:%s' rules = [ policy.DocumentedRuleDefault( name=HOSTS % 'index', check_str=base.RULE_ADMIN_API, description="Lists IDs, names, type, reserved, on_maintenance for all" " hosts.", operations=[ { 'method': 'GET', 'path': '/segments/{segment_id}/hosts' } ]), policy.DocumentedRuleDefault( name=HOSTS % 'detail', check_str=base.RULE_ADMIN_API, description="Shows details for a host.", operations=[ { 'method': 'GET', 'path': '/segments/{segment_id}/hosts/{host_id}' } ]), policy.DocumentedRuleDefault( name=HOSTS % 'create', check_str=base.RULE_ADMIN_API, description="Creates a host under given segment.", operations=[ { 'method': 'POST', 'path': '/segments/{segment_id}/hosts' } ]), policy.DocumentedRuleDefault( name=HOSTS % 'update', check_str=base.RULE_ADMIN_API, description="Updates the editable attributes of an existing host.", operations=[ { 'method': 'PUT', 'path': '/segments/{segment_id}/hosts/{host_id}' } ]), policy.DocumentedRuleDefault( name=HOSTS % 'delete', check_str=base.RULE_ADMIN_API, description="Deletes a host from given segment.", operations=[ { 'method': 'DELETE', 'path': '/segments/{segment_id}/hosts/{host_id}' } ]), policy.RuleDefault( name=HOSTS % 'discoverable', check_str=base.RULE_ADMIN_API, description="Host API extensions to change the API.", ), ] def list_rules(): return rules masakari-9.0.0/masakari/policies/segments.py0000664000175000017500000000474413656747723021165 0ustar zuulzuul00000000000000# Copyright (C) 2018 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from masakari.policies import base SEGMENTS = 'os_masakari_api:segments:%s' rules = [ policy.DocumentedRuleDefault( name=SEGMENTS % 'index', check_str=base.RULE_ADMIN_API, description="Lists IDs, names, description, recovery_method, " "service_type for all segments.", operations=[ { 'method': 'GET', 'path': '/segments' } ]), policy.DocumentedRuleDefault( name=SEGMENTS % 'detail', check_str=base.RULE_ADMIN_API, description="Shows details for a segment.", operations=[ { 'method': 'GET', 'path': '/segments/{segment_id}' } ]), policy.DocumentedRuleDefault( name=SEGMENTS % 'create', check_str=base.RULE_ADMIN_API, description="Creates a segment.", operations=[ { 'method': 'POST', 'path': '/segments' } ]), policy.DocumentedRuleDefault( name=SEGMENTS % 'update', check_str=base.RULE_ADMIN_API, description="Updates the editable attributes of an existing host.", operations=[ { 'method': 'PUT', 'path': '/segments/{segment_id}' } ]), policy.DocumentedRuleDefault( name=SEGMENTS % 'delete', check_str=base.RULE_ADMIN_API, description="Deletes a segment.", operations=[ { 'method': 'DELETE', 'path': '/segments/{segment_id}' } ]), policy.RuleDefault( name=SEGMENTS % 'discoverable', check_str=base.RULE_ADMIN_API, description="Segment API extensions to change the API.", ), ] def list_rules(): return rules masakari-9.0.0/masakari/policies/extension_info.py0000664000175000017500000000306213656747723022357 0ustar zuulzuul00000000000000# Copyright (C) 2018 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from masakari.policies import base EXTENSIONS = 'os_masakari_api:extensions:%s' rules = [ policy.DocumentedRuleDefault( name=EXTENSIONS % 'index', check_str=base.RULE_ADMIN_API, description="List available extensions.", operations=[ { 'method': 'GET', 'path': '/extensions' } ]), policy.DocumentedRuleDefault( name=EXTENSIONS % 'detail', check_str=base.RULE_ADMIN_API, description="Shows information for an extension.", operations=[ { 'method': 'GET', 'path': '/extensions/{extensions_id}' } ]), policy.RuleDefault( name=EXTENSIONS % 'discoverable', check_str=base.RULE_ADMIN_API, description="Extension Info API extensions to change the API.", ), ] def list_rules(): return rules masakari-9.0.0/masakari/policies/notifications.py0000664000175000017500000000371513656747723022206 0ustar zuulzuul00000000000000# Copyright (C) 2018 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from masakari.policies import base NOTIFICATIONS = 'os_masakari_api:notifications:%s' rules = [ policy.DocumentedRuleDefault( name=NOTIFICATIONS % 'index', check_str=base.RULE_ADMIN_API, description="Lists IDs, notification types, host_name, generated_time," " payload and status for all notifications.", operations=[ { 'method': 'GET', 'path': '/notifications' } ]), policy.DocumentedRuleDefault( name=NOTIFICATIONS % 'detail', check_str=base.RULE_ADMIN_API, description="Shows details for a notification.", operations=[ { 'method': 'GET', 'path': '/notifications/{notification_id}' } ]), policy.DocumentedRuleDefault( name=NOTIFICATIONS % 'create', check_str=base.RULE_ADMIN_API, description="Creates a notification.", operations=[ { 'method': 'POST', 'path': '/notifications' } ]), policy.RuleDefault( name=NOTIFICATIONS % 'discoverable', check_str=base.RULE_ADMIN_API, description="Notification API extensions to change the API.", ), ] def list_rules(): return rules masakari-9.0.0/masakari/policies/versions.py0000664000175000017500000000231213656747723021175 0ustar zuulzuul00000000000000# Copyright (C) 2018 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from masakari.policies import base VERSIONS = 'os_masakari_api:versions:%s' rules = [ policy.DocumentedRuleDefault( name=VERSIONS % 'index', check_str=base.RULE_ANY, description="List all versions.", operations=[ { 'method': 'GET', 'path': '/' } ]), policy.RuleDefault( name=VERSIONS % 'discoverable', check_str=base.RULE_ANY, description="Version API extensions to change the API.", ), ] def list_rules(): return rules masakari-9.0.0/masakari/policies/base.py0000664000175000017500000000235313656747723020244 0ustar zuulzuul00000000000000# Copyright (C) 2018 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy MASAKARI_API = 'os_masakari_api' RULE_ADMIN_OR_OWNER = 'rule:admin_or_owner' RULE_ADMIN_API = 'rule:admin_api' RULE_ANY = '@' rules = [ policy.RuleDefault( "context_is_admin", "role:admin", "Decides what is required for the 'is_admin:True' check to succeed."), policy.RuleDefault( "admin_or_owner", "is_admin:True or project_id:%(project_id)s", "Default rule for most non-Admin APIs."), policy.RuleDefault( "admin_api", "is_admin:True", "Default rule for most Admin APIs.") ] def list_rules(): return rules masakari-9.0.0/masakari/config.py0000664000175000017500000000271113656747723016766 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log from masakari.common import config import masakari.conf from masakari.db.sqlalchemy import api as sqlalchemy_api from masakari import rpc from masakari import version CONF = masakari.conf.CONF def parse_args(argv, default_config_files=None, configure_db=True, init_rpc=True): log.register_options(CONF) # We use the oslo.log default log levels which includes suds=INFO # and add only the extra levels that Masakari needs log.set_defaults(default_log_levels=log.get_default_log_levels()) rpc.set_defaults(control_exchange='masakari') config.set_middleware_defaults() CONF(argv[1:], project='masakari', version=version.version_string(), default_config_files=default_config_files) if init_rpc: rpc.init(CONF) if configure_db: sqlalchemy_api.configure(CONF) masakari-9.0.0/masakari/hacking/0000775000175000017500000000000013656750011016532 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/hacking/__init__.py0000664000175000017500000000000013656747723020651 0ustar zuulzuul00000000000000masakari-9.0.0/masakari/hacking/checks.py0000664000175000017500000004042313656747723020367 0ustar zuulzuul00000000000000# Copyright (c) 2016, NTT Data # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import re """ Guidelines for writing new hacking checks - Use only for Masakari specific tests. OpenStack general tests should be submitted to the common 'hacking' module. - Pick numbers in the range M3xx. Find the current test with the highest allocated number and then pick the next value. - Keep the test method code in the source file ordered based on the M3xx value. - List the new rule in the top level HACKING.rst file - Add test cases for each new rule to masakari/tests/unit/test_hacking.py """ UNDERSCORE_IMPORT_FILES = [] session_check = re.compile(r"\w*def [a-zA-Z0-9].*[(].*session.*[)]") cfg_re = re.compile(r".*\scfg\.") cfg_opt_re = re.compile(r".*[\s\[]cfg\.[a-zA-Z]*Opt\(") rule_default_re = re.compile(r".*RuleDefault\(") policy_enforce_re = re.compile(r".*_ENFORCER\.enforce\(") asse_trueinst_re = re.compile( r"(.)*assertTrue\(isinstance\((\w|\.|\'|\"|\[|\])+, " "(\w|\.|\'|\"|\[|\])+\)\)") asse_equal_type_re = re.compile( r"(.)*assertEqual\(type\((\w|\.|\'|\"|\[|\])+\), " "(\w|\.|\'|\"|\[|\])+\)") asse_equal_in_end_with_true_or_false_re = re.compile( r"assertEqual\("r"(\w|[][.'\"])+ in (\w|[][.'\", ])+, (True|False)\)") asse_equal_in_start_with_true_or_false_re = re.compile( r"assertEqual\("r"(True|False), (\w|[][.'\"])+ in (\w|[][.'\", ])+\)") asse_equal_end_with_none_re = re.compile( r"assertEqual\(.*?,\s+None\)$") asse_equal_start_with_none_re = re.compile( r"assertEqual\(None,") # NOTE(abhishekk): Next two regexes weren't united to one for more readability. # asse_true_false_with_in_or_not_in regex checks # assertTrue/False(A in B) cases where B argument has no spaces # asse_true_false_with_in_or_not_in_spaces regex checks cases # where B argument has spaces and starts/ends with [, ', ". # For example: [1, 2, 3], "some string", 'another string'. # We have to separate these regexes to escape a false positives # results. B argument should have spaces only if it starts # with [, ", '. Otherwise checking of string # "assertFalse(A in B and C in D)" will be false positives. # In this case B argument is "B and C in D". asse_true_false_with_in_or_not_in = re.compile( r"assert(True|False)\("r"(\w|[][.'\"])+( not)? in (\w|[][.'\",])" r"+(, .*)?\)") asse_true_false_with_in_or_not_in_spaces = re.compile( r"assert(True|False)"r"\((\w|[][.'\"])+( not)? in [\[|'|\"](\w|" r"[][.'\", ])+[\[|'|\"](, .*)?\)") asse_raises_regexp = re.compile(r"assertRaisesRegexp\(") conf_attribute_set_re = re.compile(r"CONF\.[a-z0-9_.]+\s*=\s*\w") translated_log = re.compile( r"(.)*LOG\.(audit|error|info|critical|exception)" "\(\s*_\(\s*('|\")") mutable_default_args = re.compile(r"^\s*def .+\((.+=\{\}|.+=\[\])") string_translation = re.compile(r"[^_]*_\(\s*('|\")") underscore_import_check = re.compile(r"(.)*import _(.)*") import_translation_for_log_or_exception = re.compile( r"(.)*(from\smasakari.i18n\simport)\s_") # We need this for cases where they have created their own _ function. custom_underscore_check = re.compile(r"(.)*_\s*=\s*(.)*") dict_constructor_with_list_copy_re = re.compile(r".*\bdict\((\[)?(\(|\[)") http_not_implemented_re = re.compile(r"raise .*HTTPNotImplemented\(") spawn_re = re.compile( r".*(eventlet|greenthread)\.(?Pspawn(_n)?)\(.*\)") contextlib_nested = re.compile(r"^with (contextlib\.)?nested\(") doubled_words_re = re.compile( r"\b(then?|[iao]n|i[fst]|but|f?or|at|and|[dt]o)\s+\1\b") yield_not_followed_by_space = re.compile(r"^\s*yield(?:\(|{|\[|\"|').*$") _all_log_levels = {'critical', 'error', 'exception', 'info', 'warning', 'debug'} _all_hints = {'_', '_LE', '_LI', '_LW', '_LC'} log_translation_re = re.compile( r".*LOG\.(%(levels)s)\(\s*(%(hints)s)\(" % { 'levels': '|'.join(_all_log_levels), 'hints': '|'.join(_all_hints), }) def no_db_session_in_public_api(logical_line, filename): if "db/api.py" in filename: if session_check.match(logical_line): yield (0, "M301: public db api methods may not accept" " session") def use_timeutils_utcnow(logical_line, filename): # tools are OK to use the standard datetime module if "/tools/" in filename: return msg = ("M302: timeutils.utcnow() must be used instead of " "datetime.%s()") datetime_funcs = ['now', 'utcnow'] for f in datetime_funcs: pos = logical_line.find('datetime.%s' % f) if pos != -1: yield (pos, msg % f) def capital_cfg_help(logical_line, tokens): msg = "M303: capitalize help string" if cfg_re.match(logical_line): for t in range(len(tokens)): if tokens[t][1] == "help": txt = tokens[t + 2][1] if len(txt) > 1 and txt[1].islower(): yield (0, msg) def assert_true_instance(logical_line): """Check for assertTrue(isinstance(a, b)) sentences M305 """ if asse_trueinst_re.match(logical_line): yield (0, "M305: assertTrue(isinstance(a, b)) sentences " "not allowed") def assert_equal_type(logical_line): """Check for assertEqual(type(A), B) sentences M306 """ if asse_equal_type_re.match(logical_line): yield (0, "M306: assertEqual(type(A), B) sentences not allowed") def no_translate_logs(logical_line): """Check for 'LOG.*(_*("' OpenStack no longer supports log translation, so we shouldn't translate logs. * This check assumes that 'LOG' is a logger. M308 """ if log_translation_re.match(logical_line): yield (0, "M308: Log messages should not be translated") def no_import_translation_in_tests(logical_line, filename): """Check for 'from masakari.i18n import _' M309 """ if 'masakari/tests/' in filename: res = import_translation_for_log_or_exception.match(logical_line) if res: yield (0, "M309 Don't import translation in tests") def no_setting_conf_directly_in_tests(logical_line, filename): """Check for setting CONF.* attributes directly in tests The value can leak out of tests affecting how subsequent tests run. Using self.flags(option=value) is the preferred method to temporarily set config options in tests. M310 """ if 'masakari/tests/' in filename: res = conf_attribute_set_re.match(logical_line) if res: yield (0, "M310: Setting CONF.* attributes directly in " "tests is forbidden. Use self.flags(option=value) " "instead") def no_mutable_default_args(logical_line): msg = "M315: Method's default argument shouldn't be mutable!" if mutable_default_args.match(logical_line): yield (0, msg) def check_explicit_underscore_import(logical_line, filename): """Check for explicit import of the _ function We need to ensure that any files that are using the _() function to translate logs are explicitly importing the _ function. We can't trust unit test to catch whether the import has been added so we need to check for it here. """ # Build a list of the files that have _ imported. No further # checking needed once it is found. if filename in UNDERSCORE_IMPORT_FILES: pass elif (underscore_import_check.match(logical_line) or custom_underscore_check.match(logical_line)): UNDERSCORE_IMPORT_FILES.append(filename) elif (translated_log.match(logical_line) or string_translation.match(logical_line)): yield (0, "M316: Found use of _() without explicit " "import of _ !") def use_jsonutils(logical_line, filename): # tools are OK to use the standard json module if "/tools/" in filename: return msg = "M317: jsonutils.%(fun)s must be used instead of json.%(fun)s" if "json." in logical_line: json_funcs = ['dumps(', 'dump(', 'loads(', 'load('] for f in json_funcs: pos = logical_line.find('json.%s' % f) if pos != -1: yield (pos, msg % {'fun': f[:-1]}) def assert_true_or_false_with_in(logical_line): """Check for assertTrue/False(A in B), assertTrue/False(A not in B), assertTrue/False(A in B, message) or assertTrue/False(A not in B, message) sentences. M318 """ res = (asse_true_false_with_in_or_not_in.search(logical_line) or asse_true_false_with_in_or_not_in_spaces.search(logical_line)) if res: yield (0, "M318: Use assertIn/NotIn(A, B) rather than " "assertTrue/False(A in/not in B) when checking collection " "contents.") def assert_raises_regexp(logical_line): """Check for usage of deprecated assertRaisesRegexp M319 """ res = asse_raises_regexp.search(logical_line) if res: yield (0, "M319: assertRaisesRegex must be used instead " "of assertRaisesRegexp") def dict_constructor_with_list_copy(logical_line): msg = ("M320: Must use a dict comprehension instead of a dict " "constructor with a sequence of key-value pairs.") if dict_constructor_with_list_copy_re.match(logical_line): yield (0, msg) def assert_equal_in(logical_line): """Check for assertEqual(A in B, True), assertEqual(True, A in B), assertEqual(A in B, False) or assertEqual(False, A in B) sentences M321 """ res = (asse_equal_in_start_with_true_or_false_re.search(logical_line) or asse_equal_in_end_with_true_or_false_re.search(logical_line)) if res: yield (0, "M321: Use assertIn/NotIn(A, B) rather than " "assertEqual(A in B, True/False) when checking collection " "contents.") def check_greenthread_spawns(logical_line, physical_line, filename): """Check for use of greenthread.spawn(), greenthread.spawn_n(), eventlet.spawn(), and eventlet.spawn_n() M322 """ msg = ("M322: Use masakari.utils.%(spawn)s() rather than " "greenthread.%(spawn)s() and eventlet.%(spawn)s()") if "masakari/utils.py" in filename or "masakari/tests/" in filename: return match = re.match(spawn_re, logical_line) if match: yield (0, msg % {'spawn': match.group('spawn_part')}) def check_no_contextlib_nested(logical_line, filename): msg = ("M323: contextlib.nested is deprecated. With Python 2.7" "and later the with-statement supports multiple nested objects. " "See https://docs.python.org/2/library/contextlib.html" "#contextlib.nested for more information. masakari.test.nested() " "is an alternative as well.") if contextlib_nested.match(logical_line): yield (0, msg) def check_config_option_in_central_place(logical_line, filename): msg = ("M324: Config options should be in the central location " "'/masakari/conf/*'. Do not declare new config options outside " "of that folder.") # That's the correct location if "masakari/conf/" in filename: return # (pooja_jadhav) All config options (with exceptions that are clarified # in the list below) were moved to the central place. List below is for # all options that were impossible to move without doing a major impact # on code. Add full path to a module or folder. conf_exceptions = [ # CLI opts are allowed to be outside of masakari/conf directory 'masakari/cmd/manage.py', ] if any(f in filename for f in conf_exceptions): return if cfg_opt_re.match(logical_line): yield (0, msg) def check_doubled_words(physical_line, filename): """Check for the common doubled-word typos M325 """ msg = ("M325: Doubled word '%(word)s' typo found") match = re.search(doubled_words_re, physical_line) if match: return (0, msg % {'word': match.group(1)}) def check_python3_no_iteritems(logical_line): msg = ("M326: Use dict.items() instead of dict.iteritems().") if re.search(r".*\.iteritems\(\)", logical_line): yield (0, msg) def check_python3_no_iterkeys(logical_line): msg = ("M327: Use 'for key in dict' instead of 'for key in " "dict.iterkeys()'.") if re.search(r".*\.iterkeys\(\)", logical_line): yield (0, msg) def check_python3_no_itervalues(logical_line): msg = ("M328: Use dict.values() instead of dict.itervalues().") if re.search(r".*\.itervalues\(\)", logical_line): yield (0, msg) def no_os_popen(logical_line): """Disallow 'os.popen(' Deprecated library function os.popen() Replace it using subprocess https://bugs.launchpad.net/tempest/+bug/1529836 M329 """ if 'os.popen(' in logical_line: yield (0, 'M329 Deprecated library function os.popen(). ' 'Replace it using subprocess module. ') def no_log_warn(logical_line): """Disallow 'LOG.warn(' Deprecated LOG.warn(), instead use LOG.warning https://bugs.launchpad.net/senlin/+bug/1508442 M331 """ msg = ("M331: LOG.warn is deprecated, please use LOG.warning!") if "LOG.warn(" in logical_line: yield (0, msg) def yield_followed_by_space(logical_line): """Yield should be followed by a space. Yield should be followed by a space to clarify that yield is not a function. Adding a space may force the developer to rethink if there are unnecessary parentheses in the written code. Not correct: yield(x), yield(a, b) Correct: yield x, yield (a, b), yield a, b M332 """ if yield_not_followed_by_space.match(logical_line): yield (0, "M332: Yield keyword should be followed by a space.") def check_policy_registration_in_central_place(logical_line, filename): msg = ('M333: Policy registration should be in the central location ' '"/masakari/policies/*".') # This is where registration should happen if "masakari/policies/" in filename: return # A couple of policy tests register rules if "masakari/tests/unit/test_policy.py" in filename: return if rule_default_re.match(logical_line): yield (0, msg) def check_policy_enforce(logical_line, filename): """Look for uses of masakari.policy._ENFORCER.enforce() Now that policy defaults are registered in code the _ENFORCER.authorize method should be used. That ensures that only registered policies are used. Uses of _ENFORCER.enforce could allow unregistered policies to be used, so this check looks for uses of that method. M333 """ msg = ('M334: masakari.policy._ENFORCER.enforce() should not be used. ' 'Use the authorize() method instead.') if policy_enforce_re.match(logical_line): yield (0, msg) def factory(register): register(no_db_session_in_public_api) register(use_timeutils_utcnow) register(capital_cfg_help) register(no_import_translation_in_tests) register(assert_true_instance) register(assert_equal_type) register(assert_raises_regexp) register(no_translate_logs) register(no_setting_conf_directly_in_tests) register(no_mutable_default_args) register(check_explicit_underscore_import) register(use_jsonutils) register(assert_true_or_false_with_in) register(dict_constructor_with_list_copy) register(assert_equal_in) register(check_no_contextlib_nested) register(check_greenthread_spawns) register(check_config_option_in_central_place) register(check_doubled_words) register(check_python3_no_iteritems) register(check_python3_no_iterkeys) register(check_python3_no_itervalues) register(no_os_popen) register(no_log_warn) register(yield_followed_by_space) register(check_policy_registration_in_central_place) register(check_policy_enforce) masakari-9.0.0/masakari/rpc.py0000664000175000017500000001153313656747723016307 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import oslo_messaging as messaging from oslo_messaging.rpc import dispatcher from oslo_serialization import jsonutils import masakari.context import masakari.exception from masakari.objects import base __all__ = [ 'init', 'cleanup', 'set_defaults', 'add_extra_exmods', 'clear_extra_exmods', 'get_allowed_exmods', 'RequestContextSerializer', 'get_client', 'get_server', ] CONF = masakari.conf.CONF TRANSPORT = None NOTIFICATION_TRANSPORT = None NOTIFIER = None ALLOWED_EXMODS = [ masakari.exception.__name__, ] EXTRA_EXMODS = [] def init(conf): global TRANSPORT, NOTIFICATION_TRANSPORT, NOTIFIER exmods = get_allowed_exmods() TRANSPORT = create_transport(get_transport_url()) NOTIFICATION_TRANSPORT = messaging.get_notification_transport( conf, allowed_remote_exmods=exmods) serializer = RequestContextSerializer(JsonPayloadSerializer()) NOTIFIER = messaging.Notifier(NOTIFICATION_TRANSPORT, serializer=serializer, topics=['versioned_notifications']) def initialized(): return None not in [TRANSPORT, NOTIFICATION_TRANSPORT, NOTIFIER] def cleanup(): global TRANSPORT, NOTIFICATION_TRANSPORT, NOTIFIER assert TRANSPORT is not None assert NOTIFICATION_TRANSPORT is not None assert NOTIFIER is not None TRANSPORT.cleanup() NOTIFICATION_TRANSPORT.cleanup() TRANSPORT = NOTIFICATION_TRANSPORT = NOTIFIER = None def set_defaults(control_exchange): messaging.set_transport_defaults(control_exchange) def add_extra_exmods(*args): EXTRA_EXMODS.extend(args) def clear_extra_exmods(): del EXTRA_EXMODS[:] def get_allowed_exmods(): return ALLOWED_EXMODS + EXTRA_EXMODS def get_transport_url(url_str=None): return messaging.TransportURL.parse(CONF, url_str) def create_transport(url): exmods = get_allowed_exmods() return messaging.get_rpc_transport(CONF, url=url, allowed_remote_exmods=exmods) class JsonPayloadSerializer(messaging.NoOpSerializer): @staticmethod def serialize_entity(context, entity): return jsonutils.to_primitive(entity, convert_instances=True) class RequestContextSerializer(messaging.Serializer): def __init__(self, base): self._base = base def serialize_entity(self, context, entity): if not self._base: return entity return self._base.serialize_entity(context, entity) def deserialize_entity(self, context, entity): if not self._base: return entity return self._base.deserialize_entity(context, entity) def serialize_context(self, context): return context.to_dict() def deserialize_context(self, context): return masakari.context.RequestContext.from_dict(context) def get_client(target, version_cap=None, serializer=None): assert TRANSPORT is not None serializer = RequestContextSerializer(serializer) return messaging.RPCClient(TRANSPORT, target, version_cap=version_cap, serializer=serializer) def get_server(target, endpoints, serializer=None): assert TRANSPORT is not None access_policy = dispatcher.DefaultRPCAccessPolicy serializer = RequestContextSerializer(serializer) return messaging.get_rpc_server(TRANSPORT, target, endpoints, executor='eventlet', serializer=serializer, access_policy=access_policy) def get_versioned_notifier(publisher_id): assert NOTIFIER is not None return NOTIFIER.prepare(publisher_id=publisher_id) class RPCAPI(object): """Mixin class aggregating methods related to RPC API compatibility.""" RPC_API_VERSION = '1.0' TOPIC = '' BINARY = '' def __init__(self): target = messaging.Target(topic=self.TOPIC, version=self.RPC_API_VERSION) serializer = base.MasakariObjectSerializer() self.client = get_client(target, serializer=serializer) masakari-9.0.0/masakari/exception.py0000664000175000017500000002616113656747723017524 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Masakari base exception handling. Includes decorator for re-raising Masakari-type exceptions. SHOULD include dedicated exception logging. """ import functools import inspect import sys from oslo_log import log as logging from oslo_utils import excutils import six from six.moves import http_client as http import webob.exc from webob import util as woutil import masakari.conf from masakari.i18n import _ from masakari import safe_utils LOG = logging.getLogger(__name__) CONF = masakari.conf.CONF class ConvertedException(webob.exc.WSGIHTTPException): def __init__(self, code, title="", explanation=""): self.code = code # There is a strict rule about constructing status line for HTTP: # '...Status-Line, consisting of the protocol version followed by a # numeric status code and its associated textual phrase, with each # element separated by SP characters' # (http://www.faqs.org/rfcs/rfc2616.html) # 'code' and 'title' can not be empty because they correspond # to numeric status code and its associated text if title: self.title = title else: try: self.title = woutil.status_reasons[self.code] except KeyError: msg = "Improper or unknown HTTP status code used: %d" LOG.error(msg, code) self.title = woutil.status_generic_reasons[self.code // 100] self.explanation = explanation super(ConvertedException, self).__init__() def _cleanse_dict(original): """Strip all admin_password, new_pass, rescue_pass keys from a dict.""" return {k: v for k, v in original.items() if "_pass" not in k} def wrap_exception(notifier=None, get_notifier=None): """This decorator wraps a method to catch any exceptions that may get thrown. It also optionally sends the exception to the notification system. """ def inner(f): def wrapped(self, context, *args, **kw): # Don't store self or context in the payload, it now seems to # contain confidential information. try: return f(self, context, *args, **kw) except Exception as e: with excutils.save_and_reraise_exception(): if notifier or get_notifier: payload = dict(exception=e) wrapped_func = safe_utils.get_wrapped_function(f) call_dict = inspect.getcallargs(wrapped_func, self, context, *args, **kw) # self can't be serialized and shouldn't be in the # payload call_dict.pop('self', None) cleansed = _cleanse_dict(call_dict) payload.update({'args': cleansed}) # If f has multiple decorators, they must use # functools.wraps to ensure the name is # propagated. event_type = f.__name__ (notifier or get_notifier()).error(context, event_type, payload) return functools.wraps(f)(wrapped) return inner class MasakariException(Exception): """Base Masakari Exception To correctly use this class, inherit from it and define a 'msg_fmt' property. That msg_fmt will get printf'd with the keyword arguments provided to the constructor. """ msg_fmt = _("An unknown exception occurred.") code = http.INTERNAL_SERVER_ERROR headers = {} safe = False def __init__(self, message=None, **kwargs): self.kwargs = kwargs if 'code' not in self.kwargs: try: self.kwargs['code'] = self.code except AttributeError: pass if not message: try: message = self.msg_fmt % kwargs except Exception: exc_info = sys.exc_info() # kwargs doesn't match a variable in the message # log the issue and the kwargs LOG.exception('Exception in string format operation') for name, value in kwargs.items(): LOG.error("%s: %s" % (name, value)) # noqa if CONF.fatal_exception_format_errors: six.reraise(*exc_info) else: # at least get the core message out if something happened message = self.msg_fmt self.message = message super(MasakariException, self).__init__(message) def format_message(self): # NOTE: use the first argument to the python Exception object # which should be our full MasakariException message, (see __init__) return self.args[0] class APIException(MasakariException): msg_fmt = _("Error while requesting %(service)s API.") def __init__(self, message=None, **kwargs): if 'service' not in kwargs: kwargs['service'] = 'unknown' super(APIException, self).__init__(message, **kwargs) class APITimeout(APIException): msg_fmt = _("Timeout while requesting %(service)s API.") class Conflict(MasakariException): msg_fmt = _("Conflict") code = http.CONFLICT class Invalid(MasakariException): msg_fmt = _("Bad Request - Invalid Parameters") code = http.BAD_REQUEST class InvalidName(Invalid): msg_fmt = _("An invalid 'name' value was provided. " "The name must be: %(reason)s") class InvalidInput(Invalid): msg_fmt = _("Invalid input received: %(reason)s") class InvalidAPIVersionString(Invalid): msg_fmt = _("API Version String %(version)s is of invalid format. Must " "be of format MajorNum.MinorNum.") class MalformedRequestBody(MasakariException): msg_fmt = _("Malformed message body: %(reason)s") # NOTE: NotFound should only be used when a 404 error is # appropriate to be returned class NotFound(MasakariException): msg_fmt = _("Resource could not be found.") code = http.NOT_FOUND class ConfigNotFound(NotFound): msg_fmt = _("Could not find config at %(path)s") class Forbidden(MasakariException): msg_fmt = _("Forbidden") code = http.FORBIDDEN class AdminRequired(Forbidden): msg_fmt = _("User does not have admin privileges") class PolicyNotAuthorized(Forbidden): msg_fmt = _("Policy doesn't allow %(action)s to be performed.") class PasteAppNotFound(MasakariException): msg_fmt = _("Could not load paste app '%(name)s' from %(path)s") class InvalidContentType(Invalid): msg_fmt = _("Invalid content type %(content_type)s.") class VersionNotFoundForAPIMethod(Invalid): msg_fmt = _("API version %(version)s is not supported on this method.") class InvalidGlobalAPIVersion(Invalid): msg_fmt = _("Version %(req_ver)s is not supported by the API. Minimum " "is %(min_ver)s and maximum is %(max_ver)s.") class ApiVersionsIntersect(Invalid): msg_fmt = _("Version of %(name) %(min_ver) %(max_ver) intersects " "with another versions.") class ValidationError(Invalid): msg_fmt = "%(detail)s" class InvalidSortKey(Invalid): msg_fmt = _("Sort key supplied was not valid.") class MarkerNotFound(NotFound): msg_fmt = _("Marker %(marker)s could not be found.") class FailoverSegmentNotFound(NotFound): msg_fmt = _("No failover segment with id %(id)s.") class HostNotFound(NotFound): msg_fmt = _("No host with id %(id)s.") class NotificationNotFound(NotFound): msg_fmt = _("No notification with id %(id)s.") class FailoverSegmentNotFoundByName(FailoverSegmentNotFound): msg_fmt = _("Failover segment with name %(segment_name)s could not " "be found.") class HostNotFoundByName(HostNotFound): msg_fmt = _("Host with name %(host_name)s could not be found.") class HypervisorNotFoundByName(NotFound): msg_fmt = _("Hypervisor with name %(hypervisor_name)s could not be found.") class FailoverSegmentExists(MasakariException): msg_fmt = _("Failover segment with name %(name)s already exists.") class HostExists(MasakariException): msg_fmt = _("Host with name %(name)s already exists.") class Unauthorized(MasakariException): msg_fmt = _("Not authorized.") code = http.UNAUTHORIZED class ObjectActionError(MasakariException): msg_fmt = _('Object action %(action)s failed because: %(reason)s') class OrphanedObjectError(MasakariException): msg_fmt = _('Cannot call %(method)s on orphaned %(objtype)s object') class DuplicateNotification(Invalid): msg_fmt = _('Duplicate notification received for type: %(type)s') code = http.CONFLICT class HostOnMaintenanceError(Invalid): msg_fmt = _('Host %(host_name)s is already under maintenance.') code = http.CONFLICT class HostRecoveryFailureException(MasakariException): msg_fmt = _('Failed to execute host recovery.') class InstanceRecoveryFailureException(MasakariException): msg_fmt = _('Failed to execute instance recovery workflow.') class SkipInstanceRecoveryException(MasakariException): msg_fmt = _('Skipping execution of instance recovery workflow.') class SkipProcessRecoveryException(MasakariException): msg_fmt = _('Skipping execution of process recovery workflow.') class SkipHostRecoveryException(MasakariException): msg_fmt = _('Skipping execution of host recovery workflow.') class ProcessRecoveryFailureException(MasakariException): msg_fmt = _('Failed to execute process recovery workflow.') class DBNotAllowed(MasakariException): msg_fmt = _('%(binary)s attempted direct database access which is ' 'not allowed by policy') class FailoverSegmentInUse(Conflict): msg_fmt = _("Failover segment %(uuid)s can't be updated as it is in-use " "to process notifications.") class HostInUse(Conflict): msg_fmt = _("Host %(uuid)s can't be updated as it is in-use to process " "notifications.") class ReservedHostsUnavailable(MasakariException): msg_fmt = _('No reserved_hosts available for evacuation.') class LockAlreadyAcquired(MasakariException): msg_fmt = _('Lock is already acquired on %(resource)s.') class IgnoreInstanceRecoveryException(MasakariException): msg_fmt = _('Instance recovery is ignored.') class HostNotFoundUnderFailoverSegment(HostNotFound): msg_fmt = _("Host '%(host_uuid)s' under failover_segment " "'%(segment_uuid)s' could not be found.") masakari-9.0.0/masakari/tests/0000775000175000017500000000000013656750011016270 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/tests/__init__.py0000664000175000017500000000000013656747723020407 0ustar zuulzuul00000000000000masakari-9.0.0/masakari/tests/unit/0000775000175000017500000000000013656750011017247 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/tests/unit/utils.py0000664000175000017500000000376613656747723021015 0ustar zuulzuul00000000000000# Copyright 2011 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import errno import platform import socket import sys import masakari.conf import masakari.context import masakari.utils CONF = masakari.conf.CONF def get_test_admin_context(): return masakari.context.get_admin_context() def is_linux(): return platform.system() == 'Linux' def coreutils_readlink_available(): _out, err = masakari.utils.trycmd('readlink', '-nm', '/') return err == '' def is_ipv6_supported(): has_ipv6_support = socket.has_ipv6 try: s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) s.close() except socket.error as e: if e.errno == errno.EAFNOSUPPORT: has_ipv6_support = False else: raise # check if there is at least one interface with ipv6 if has_ipv6_support and sys.platform.startswith('linux'): try: with open('/proc/net/if_inet6') as f: if not f.read(): has_ipv6_support = False except IOError: has_ipv6_support = False return has_ipv6_support def get_api_version(request): if request.path[2:3].isdigit(): return int(request.path[2:3]) def _compare_args(args1, args2, cmp): return all(cmp(*pair) for pair in zip(args1, args2)) def _compare_kwargs(kwargs1, kwargs2, cmp): return all(cmp(kwargs1[k], kwargs2[k]) for k in set(list(kwargs1.keys()) + list(kwargs2.keys()))) masakari-9.0.0/masakari/tests/unit/conf_fixture.py0000664000175000017500000000302413656747723022333 0ustar zuulzuul00000000000000# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import fixture as config_fixture from oslo_policy import opts as policy_opts import masakari.conf from masakari.conf import paths from masakari import config CONF = masakari.conf.CONF class ConfFixture(config_fixture.Config): """Fixture to manage global conf settings.""" def setUp(self): super(ConfFixture, self).setUp() self.conf.set_default('api_paste_config', paths.state_path_def('etc/masakari/api-paste.ini' ), group='wsgi') config.parse_args([], default_config_files=[], configure_db=False, init_rpc=False) self.conf.set_default('connection', "sqlite://", group='database') policy_opts.set_defaults(self.conf) masakari-9.0.0/masakari/tests/unit/__init__.py0000664000175000017500000000151113656747723021376 0ustar zuulzuul00000000000000# Copyright 2016 NTT Data # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ :mod:`masakari.tests.unit` -- Masakari Unittests ===================================================== .. automodule:: masakari.tests.unit :platform: Unix """ import eventlet eventlet.monkey_patch(os=False) masakari-9.0.0/masakari/tests/unit/monkey_patch_example/0000775000175000017500000000000013656750011023443 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/tests/unit/monkey_patch_example/__init__.py0000664000175000017500000000211313656747723025571 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Example Module for testing utils.monkey_patch().""" CALLED_FUNCTION = [] def example_decorator(name, function): """decorator for notify which is used from utils.monkey_patch() :param name: name of the function :param function: - object of the function :returns: function -- decorated function """ def wrapped_func(*args, **kwarg): CALLED_FUNCTION.append(name) return function(*args, **kwarg) return wrapped_func masakari-9.0.0/masakari/tests/unit/monkey_patch_example/example_b.py0000664000175000017500000000161213656747723025771 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Example Module B for testing utils.monkey_patch().""" def example_function_b(): return 'Example function' class ExampleClassB(object): def example_method(self): return 'Example method' def example_method_add(self, arg1, arg2): return arg1 + arg2 masakari-9.0.0/masakari/tests/unit/monkey_patch_example/example_a.py0000664000175000017500000000161113656747723025767 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Example Module A for testing utils.monkey_patch().""" def example_function_a(): return 'Example function' class ExampleClassA(object): def example_method(self): return 'Example method' def example_method_add(self, arg1, arg2): return arg1 + arg2 masakari-9.0.0/masakari/tests/unit/test_hacking.py0000664000175000017500000005071213656747723022311 0ustar zuulzuul00000000000000# Copyright 2016 NTT Data. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import textwrap from unittest import mock import ddt import pep8 from masakari.hacking import checks from masakari import test @ddt.ddt class HackingTestCase(test.NoDBTestCase): """This class tests the hacking checks in masakari.hacking.checks by passing strings to the check methods like the pep8/flake8 parser would. The parser loops over each line in the file and then passes the parameters to the check method. The parameter names in the check method dictate what type of object is passed to the check method. The parameter types are:: logical_line: A processed line with the following modifications: - Multi-line statements converted to a single line. - Stripped left and right. - Contents of strings replaced with "xxx" of same length. - Comments removed. physical_line: Raw line of text from the input file. lines: a list of the raw lines from the input file tokens: the tokens that contribute to this logical line line_number: line number in the input file total_lines: number of lines in the input file blank_lines: blank lines before this one indent_char: indentation character in this file (" " or "\t") indent_level: indentation (with tabs expanded to multiples of 8) previous_indent_level: indentation on previous line previous_logical: previous logical line filename: Path of the file being run through pep8 When running a test on a check method the return will be False/None if there is no violation in the sample input. If there is an error a tuple is returned with a position in the line, and a message. So to check the result just assertTrue if the check is expected to fail and assertFalse if it should pass. """ def test_assert_true_instance(self): self.assertEqual(len(list(checks.assert_true_instance( "self.assertTrue(isinstance(e, " "exception.BuildAbortException))"))), 1) self.assertEqual( len(list(checks.assert_true_instance("self.assertTrue()"))), 0) def test_assert_equal_type(self): self.assertEqual(len(list(checks.assert_equal_type( "self.assertEqual(type(als['QuicAssist']), list)"))), 1) self.assertEqual( len(list(checks.assert_equal_type("self.assertTrue()"))), 0) def test_assert_equal_in(self): self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(a in b, True)"))), 1) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual('str' in 'string', True)"))), 1) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(any(a==1 for a in b), True)"))), 0) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(True, a in b)"))), 1) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(True, 'str' in 'string')"))), 1) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(True, any(a==1 for a in b))"))), 0) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(a in b, False)"))), 1) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual('str' in 'string', False)"))), 1) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(any(a==1 for a in b), False)"))), 0) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(False, a in b)"))), 1) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(False, 'str' in 'string')"))), 1) self.assertEqual(len(list(checks.assert_equal_in( "self.assertEqual(False, any(a==1 for a in b))"))), 0) def test_assert_true_or_false_with_in_or_not_in(self): self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(A in B)"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertFalse(A in B)"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(A not in B)"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertFalse(A not in B)"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(A in B, 'some message')"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertFalse(A in B, 'some message')"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(A not in B, 'some message')"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertFalse(A not in B, 'some message')"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(A in 'some string with spaces')"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(A in 'some string with spaces')"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(A in ['1', '2', '3'])"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(A in [1, 2, 3])"))), 1) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(any(A > 5 for A in B))"))), 0) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertTrue(any(A > 5 for A in B), 'some message')"))), 0) self.assertEqual(len(list(checks.assert_true_or_false_with_in( "self.assertFalse(some in list1 and some2 in list2)"))), 0) def test_no_setting_conf_directly_in_tests(self): self.assertEqual(len(list(checks.no_setting_conf_directly_in_tests( "CONF.option = 1", "masakari/tests/test_foo.py"))), 1) self.assertEqual(len(list(checks.no_setting_conf_directly_in_tests( "CONF.group.option = 1", "masakari/tests/test_foo.py"))), 1) self.assertEqual(len(list(checks.no_setting_conf_directly_in_tests( "CONF.option = foo = 1", "masakari/tests/test_foo.py"))), 1) # Shouldn't fail with comparisons self.assertEqual(len(list(checks.no_setting_conf_directly_in_tests( "CONF.option == 'foo'", "masakari/tests/test_foo.py"))), 0) self.assertEqual(len(list(checks.no_setting_conf_directly_in_tests( "CONF.option != 1", "masakari/tests/test_foo.py"))), 0) # Shouldn't fail since not in masakari/tests/ self.assertEqual(len(list(checks.no_setting_conf_directly_in_tests( "CONF.option = 1", "masakari/compute/foo.py"))), 0) def test_no_mutable_default_args(self): self.assertEqual(1, len(list(checks.no_mutable_default_args( "def get_info_from_bdm(virt_type, bdm, mapping=[])")))) self.assertEqual(0, len(list(checks.no_mutable_default_args( "defined = []")))) self.assertEqual(0, len(list(checks.no_mutable_default_args( "defined, undefined = [], {}")))) def test_check_explicit_underscore_import(self): self.assertEqual(len(list(checks.check_explicit_underscore_import( "LOG.info(_('My info message'))", "masakari/tests/other_files.py"))), 1) self.assertEqual(len(list(checks.check_explicit_underscore_import( "msg = _('My message')", "masakari/tests/other_files.py"))), 1) self.assertEqual(len(list(checks.check_explicit_underscore_import( "from masakari.i18n import _", "masakari/tests/other_files.py"))), 0) self.assertEqual(len(list(checks.check_explicit_underscore_import( "LOG.info(_('My info message'))", "masakari/tests/other_files.py"))), 0) self.assertEqual(len(list(checks.check_explicit_underscore_import( "msg = _('My message')", "masakari/tests/other_files.py"))), 0) self.assertEqual(len(list(checks.check_explicit_underscore_import( "from masakari.i18n import _", "masakari/tests/other_files2.py"))), 0) self.assertEqual(len(list(checks.check_explicit_underscore_import( "msg = _('My message')", "masakari/tests/other_files2.py"))), 0) self.assertEqual(len(list(checks.check_explicit_underscore_import( "_ = translations.ugettext", "masakari/tests/other_files3.py"))), 0) self.assertEqual(len(list(checks.check_explicit_underscore_import( "msg = _('My message')", "masakari/tests/other_files3.py"))), 0) # We are patching pep8 so that only the check under test is actually # installed. @mock.patch('pep8._checks', {'physical_line': {}, 'logical_line': {}, 'tree': {}}) def _run_check(self, code, checker, filename=None): pep8.register_check(checker) lines = textwrap.dedent(code).lstrip().splitlines(True) checker = pep8.Checker(filename=filename, lines=lines) checker.check_all() checker.report._deferred_print.sort() return checker.report._deferred_print def _assert_has_errors(self, code, checker, expected_errors=None, filename=None): actual_errors = [e[:3] for e in self._run_check(code, checker, filename)] self.assertEqual(expected_errors or [], actual_errors) def _assert_has_no_errors(self, code, checker, filename=None): self._assert_has_errors(code, checker, filename=filename) def test_oslo_assert_raises_regexp(self): code = """ self.assertRaisesRegexp(ValueError, "invalid literal for.*XYZ'$", int, 'XYZ') """ self._assert_has_errors(code, checks.assert_raises_regexp, expected_errors=[(1, 0, "M319")]) def test_dict_constructor_with_list_copy(self): self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " dict([(i, connect_info[i])")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " attrs = dict([(k, _from_json(v))")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " type_names = dict((value, key) for key, value in")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " dict((value, key) for key, value in")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( "foo(param=dict((k, v) for k, v in bar.items()))")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " dict([[i,i] for i in range(3)])")))) self.assertEqual(1, len(list(checks.dict_constructor_with_list_copy( " dd = dict([i,i] for i in range(3))")))) self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy( " create_kwargs = dict(snapshot=snapshot,")))) self.assertEqual(0, len(list(checks.dict_constructor_with_list_copy( " self._render_dict(xml, data_el, data.__dict__)")))) def test_check_contextlib_use(self): code = """ with test.nested( mock.patch.object(network_model.NetworkInfo, 'hydrate'), mock.patch.object(objects.InstanceInfoCache, 'save'), ) as ( hydrate_mock, save_mock ) """ filename = "masakari/api/openstack/ha/test.py" self._assert_has_no_errors(code, checks.check_no_contextlib_nested, filename=filename) code = """ with contextlib.nested( mock.patch.object(network_model.NetworkInfo, 'hydrate'), mock.patch.object(objects.InstanceInfoCache, 'save'), ) as ( hydrate_mock, save_mock ) """ filename = "masakari/api/openstack/compute/ha/test.py" errors = [(1, 0, 'M323')] self._assert_has_errors(code, checks.check_no_contextlib_nested, expected_errors=errors, filename=filename) def test_check_greenthread_spawns(self): errors = [(1, 0, "M322")] code = "greenthread.spawn(func, arg1, kwarg1=kwarg1)" self._assert_has_errors(code, checks.check_greenthread_spawns, expected_errors=errors) code = "greenthread.spawn_n(func, arg1, kwarg1=kwarg1)" self._assert_has_errors(code, checks.check_greenthread_spawns, expected_errors=errors) code = "eventlet.greenthread.spawn(func, arg1, kwarg1=kwarg1)" self._assert_has_errors(code, checks.check_greenthread_spawns, expected_errors=errors) code = "eventlet.spawn(func, arg1, kwarg1=kwarg1)" self._assert_has_errors(code, checks.check_greenthread_spawns, expected_errors=errors) code = "eventlet.spawn_n(func, arg1, kwarg1=kwarg1)" self._assert_has_errors(code, checks.check_greenthread_spawns, expected_errors=errors) code = "masakari.utils.spawn(func, arg1, kwarg1=kwarg1)" self._assert_has_no_errors(code, checks.check_greenthread_spawns) code = "masakari.utils.spawn_n(func, arg1, kwarg1=kwarg1)" self._assert_has_no_errors(code, checks.check_greenthread_spawns) def test_config_option_regex_match(self): def should_match(code): self.assertTrue(checks.cfg_opt_re.match(code)) def should_not_match(code): self.assertFalse(checks.cfg_opt_re.match(code)) should_match("opt = cfg.StrOpt('opt_name')") should_match("opt = cfg.IntOpt('opt_name')") should_match("opt = cfg.DictOpt('opt_name')") should_match("opt = cfg.Opt('opt_name')") should_match("opts=[cfg.Opt('opt_name')]") should_match(" cfg.Opt('opt_name')") should_not_match("opt_group = cfg.OptGroup('opt_group_name')") def test_check_config_option_in_central_place(self): errors = [(1, 0, "M324")] code = """ opts = [ cfg.StrOpt('random_opt', default='foo', help='I am here to do stuff'), ] """ # option at the right place in the tree self._assert_has_no_errors(code, checks.check_config_option_in_central_place, filename="masakari/conf/serial_console.py") self._assert_has_errors(code, checks.check_config_option_in_central_place, filename="masakari/cmd/serialproxy.py", expected_errors=errors) def test_check_doubled_words(self): errors = [(1, 0, "M325")] # Explicit addition of line-ending here and below since this isn't a # block comment and without it we trigger #1804062. Artificial break is # necessary to stop flake8 detecting the test code = "'This is the" + " the best comment'\n" self._assert_has_errors(code, checks.check_doubled_words, expected_errors=errors) code = "'This is the then best comment'\n" self._assert_has_no_errors(code, checks.check_doubled_words) def test_dict_iteritems(self): self.assertEqual(1, len(list(checks.check_python3_no_iteritems( "obj.iteritems()")))) self.assertEqual(0, len(list(checks.check_python3_no_iteritems( "ob.items()")))) def test_dict_iterkeys(self): self.assertEqual(1, len(list(checks.check_python3_no_iterkeys( "for key in obj.iterkeys()")))) self.assertEqual(0, len(list(checks.check_python3_no_iterkeys( "for key in ob")))) def test_dict_itervalues(self): self.assertEqual(1, len(list(checks.check_python3_no_itervalues( "obj.itervalues()")))) self.assertEqual(0, len(list(checks.check_python3_no_itervalues( "ob.values()")))) def test_no_os_popen(self): code = """ import os foobar_cmd = "foobar -get -beer" answer = os.popen(foobar_cmd).read() if answer == nok": try: os.popen(os.popen('foobar -beer -please')).read() except ValueError: go_home() """ errors = [(4, 0, 'M329'), (8, 8, 'M329')] self._assert_has_errors(code, checks.no_os_popen, expected_errors=errors) def test_no_log_warn(self): code = """ LOG.warn("LOG.warn is deprecated") """ errors = [(1, 0, 'M331')] self._assert_has_errors(code, checks.no_log_warn, expected_errors=errors) code = """ LOG.warning("LOG.warn is deprecated") """ self._assert_has_no_errors(code, checks.no_log_warn) @ddt.data('LOG.info(_LI("Bad"))', 'LOG.warning(_LW("Bad"))', 'LOG.error(_LE("Bad"))', 'LOG.exception(_("Bad"))', 'LOG.debug(_("Bad"))', 'LOG.critical(_LC("Bad"))') def test_no_translate_logs(self, log_statement): self.assertEqual(1, len(list(checks.no_translate_logs(log_statement)))) errors = [(1, 0, 'M308')] self._assert_has_errors(log_statement, checks.no_translate_logs, expected_errors=errors) def test_yield_followed_by_space(self): code = """ yield(x, y) yield{"type": "test"} yield[a, b, c] yield"test" yield'test' """ errors = [(x + 1, 0, 'M332') for x in range(5)] self._assert_has_errors(code, checks.yield_followed_by_space, expected_errors=errors) code = """ yield x yield (x, y) yield {"type": "test"} yield [a, b, c] yield "test" yield 'test' yieldx_func(a, b) """ self._assert_has_no_errors(code, checks.yield_followed_by_space) def test_check_policy_registration_in_central_place(self): errors = [(3, 0, "M333")] code = """ from masakari import policy policy.RuleDefault('context_is_admin', 'role:admin') """ # registration in the proper place self._assert_has_no_errors( code, checks.check_policy_registration_in_central_place, filename="masakari/policies/base.py") # option at a location which is not in scope right now self._assert_has_errors( code, checks.check_policy_registration_in_central_place, filename="masakari/api/openstack/ha/non_existent.py", expected_errors=errors) def test_check_policy_enforce(self): errors = [(3, 0, "M334")] code = """ from masakari import policy policy._ENFORCER.enforce('context_is_admin', target, credentials) """ self._assert_has_errors(code, checks.check_policy_enforce, expected_errors=errors) def test_check_policy_enforce_does_not_catch_other_enforce(self): # Simulate a different enforce method defined in masakari code = """ from masakari import foo foo.enforce() """ self._assert_has_no_errors(code, checks.check_policy_enforce) masakari-9.0.0/masakari/tests/unit/test_masakari_manage.py0000664000175000017500000000612513656747723024004 0ustar zuulzuul00000000000000# Copyright 2017 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys from unittest import mock from masakari.cmd import manage from masakari import context from masakari.db import api as db_api from masakari import test class DBCommandsTestCase(test.TestCase): def setUp(self): super(DBCommandsTestCase, self).setUp() self.commands = manage.DbCommands() self.context = context.get_admin_context() sys.argv = ['masakari-manage'] @mock.patch.object(db_api, 'purge_deleted_rows') @mock.patch.object(context, 'get_admin_context') def test_purge_command(self, mock_context, mock_db_purge): mock_context.return_value = self.context self.commands.purge(0, 100) mock_db_purge.assert_called_once_with(self.context, 0, 100) def test_purge_negative_age_in_days(self): ex = self.assertRaises(SystemExit, self.commands.purge, -1, 100) self.assertEqual("Must supply a non-negative value for age.", ex.code) def test_purge_invalid_age_in_days(self): ex = self.assertRaises(SystemExit, self.commands.purge, "test", 100) self.assertEqual("Invalid value for age, test", ex.code) def test_purge_command_exceeded_age_in_days(self): ex = self.assertRaises(SystemExit, self.commands.purge, 1000000, 50) self.assertEqual("Maximal age is count of days since epoch.", ex.code) def test_purge_invalid_max_rows(self): ex = self.assertRaises(SystemExit, self.commands.purge, 0, 0) self.assertEqual("Must supply value greater than 0 for max_rows.", ex.code) def test_purge_negative_max_rows(self): ex = self.assertRaises(SystemExit, self.commands.purge, 0, -5) self.assertEqual("Invalid input received: max_rows must be >= -1", ex.code) @mock.patch.object(db_api, 'purge_deleted_rows') @mock.patch.object(context, 'get_admin_context') def test_purge_max_rows(self, mock_context, mock_db_purge): mock_context.return_value = self.context value = (2 ** 31) - 1 self.commands.purge(age_in_days=1, max_rows=value) mock_db_purge.assert_called_once_with(self.context, 1, value) def test_purge_command_exceeded_maximum_rows(self): # value(2 ** 31) is greater than max_rows(2147483647) by 1. value = 2 ** 31 ex = self.assertRaises(SystemExit, self.commands.purge, age_in_days=1, max_rows=value) expected = "Invalid input received: max_rows must be <= 2147483647" self.assertEqual(expected, ex.code) masakari-9.0.0/masakari/tests/unit/matchers.py0000664000175000017500000000332013656747723021445 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Matcher classes to be used inside of the testtools assertThat framework.""" import six import testtools.matchers class EncodedByUTF8(object): def match(self, obj): if isinstance(obj, six.binary_type): if hasattr(obj, "decode"): try: obj.decode("utf-8") except UnicodeDecodeError: return testtools.matchers.Mismatch( "%s is not encoded in UTF-8." % obj) elif isinstance(obj, six.text_type): try: obj.encode("utf-8", "strict") except UnicodeDecodeError: return testtools.matchers.Mismatch("%s cannot be " "encoded in UTF-8." % obj) else: reason = ("Type of '%(obj)s' is '%(obj_type)s', " "should be '%(correct_type)s'." % { "obj": obj, "obj_type": type(obj).__name__, "correct_type": six.binary_type.__name__ }) return testtools.matchers.Mismatch(reason) masakari-9.0.0/masakari/tests/unit/test_api_validation.py0000664000175000017500000005231313656747723023667 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import re import fixtures from jsonschema import exceptions as jsonschema_exc import six from six.moves import http_client as http import sys from masakari.api import api_version_request as api_version from masakari.api import validation from masakari.api.validation import parameter_types from masakari.api.validation import validators from masakari import exception from masakari import test class FakeRequest(object): api_version_request = api_version.APIVersionRequest("1.0") environ = {} class ValidationRegex(test.NoDBTestCase): def test_build_regex_range(self): def _get_all_chars(): for i in range(0x7F): yield six.unichr(i) self.useFixture(fixtures.MonkeyPatch( 'masakari.api.validation.parameter_types._get_all_chars', _get_all_chars)) r = parameter_types._build_regex_range(ws=False) self.assertEqual(r, re.escape('!') + '-' + re.escape('~')) # if we allow whitespace the range starts earlier r = parameter_types._build_regex_range(ws=True) self.assertEqual(r, re.escape(' ') + '-' + re.escape('~')) # excluding a character will give us 2 ranges r = parameter_types._build_regex_range(ws=True, exclude=['A']) self.assertEqual(r, re.escape(' ') + '-' + re.escape('@') + 'B' + '-' + re.escape('~')) # inverting which gives us all the initial unprintable characters. r = parameter_types._build_regex_range(ws=False, invert=True) self.assertEqual(r, re.escape('\x00') + '-' + re.escape(' ')) # excluding characters that create a singleton. Naively this would be: # ' -@B-BD-~' which seems to work, but ' -@BD-~' is more natural. r = parameter_types._build_regex_range(ws=True, exclude=['A', 'C']) self.assertEqual(r, re.escape(' ') + '-' + re.escape('@') + 'B' + 'D' + '-' + re.escape('~')) # ws=True means the positive regex has printable whitespaces, # so the inverse will not. The inverse will include things we # exclude. r = parameter_types._build_regex_range( ws=True, exclude=['A', 'B', 'C', 'Z'], invert=True) self.assertEqual(r, re.escape('\x00') + '-' + re.escape('\x1f') + 'A-CZ') class APIValidationTestCase(test.NoDBTestCase): def setUp(self, schema=None): super(APIValidationTestCase, self).setUp() self.post = None if schema is not None: @validation.schema(request_body_schema=schema) def post(req, body): return 'Validation succeeded.' self.post = post def check_validation_error(self, method, body, expected_detail, req=None): if not req: req = FakeRequest() try: method(body=body, req=req,) except exception.ValidationError as ex: self.assertEqual(http.BAD_REQUEST, ex.kwargs['code']) if isinstance(expected_detail, list): self.assertIn(ex.kwargs['detail'], expected_detail, 'Exception details did not match expected') elif not re.match(expected_detail, ex.kwargs['detail']): self.assertEqual(expected_detail, ex.kwargs['detail'], 'Exception details did not match expected') except Exception as ex: self.fail('An unexpected exception happens: %s' % ex) else: self.fail('Any exception does not happen.') class FormatCheckerTestCase(test.NoDBTestCase): def test_format_checker_failed(self): format_checker = validators.FormatChecker() exc = self.assertRaises(jsonschema_exc.FormatError, format_checker.check, " ", "name") self.assertIsInstance(exc.cause, exception.InvalidName) self.assertEqual("An invalid 'name' value was provided. The name must " "be: printable characters. " "Can not start or end with whitespace.", exc.cause.format_message()) def test_format_checker_failed_with_non_string(self): checks = ["name"] format_checker = validators.FormatChecker() for check in checks: exc = self.assertRaises(jsonschema_exc.FormatError, format_checker.check, None, "name") self.assertIsInstance(exc.cause, exception.InvalidName) self.assertEqual("An invalid 'name' value was provided. The name " "must be: printable characters. " "Can not start or end with whitespace.", exc.cause.format_message()) class RequiredDisableTestCase(APIValidationTestCase): def setUp(self): schema = { 'type': 'object', 'properties': { 'foo': { 'type': 'integer', }, }, } super(RequiredDisableTestCase, self).setUp(schema=schema) def test_validate_required_disable(self): self.assertEqual(self.post(body={'foo': 1}, req=FakeRequest()), 'Validation succeeded.') self.assertEqual(self.post(body={'abc': 1}, req=FakeRequest()), 'Validation succeeded.') class RequiredEnableTestCase(APIValidationTestCase): def setUp(self): schema = { 'type': 'object', 'properties': { 'foo': { 'type': 'integer', }, }, 'required': ['foo'] } super(RequiredEnableTestCase, self).setUp(schema=schema) def test_validate_required_enable(self): self.assertEqual(self.post(body={'foo': 1}, req=FakeRequest()), 'Validation succeeded.') def test_validate_required_enable_fails(self): detail = "'foo' is a required property" self.check_validation_error(self.post, body={'abc': 1}, expected_detail=detail) class AdditionalPropertiesEnableTestCase(APIValidationTestCase): def setUp(self): schema = { 'type': 'object', 'properties': { 'foo': { 'type': 'integer', }, }, 'required': ['foo'], } super(AdditionalPropertiesEnableTestCase, self).setUp(schema=schema) def test_validate_additionalProperties_enable(self): self.assertEqual(self.post(body={'foo': 1}, req=FakeRequest()), 'Validation succeeded.') self.assertEqual(self.post(body={'foo': 1, 'ext': 1}, req=FakeRequest()), 'Validation succeeded.') class AdditionalPropertiesDisableTestCase(APIValidationTestCase): def setUp(self): schema = { 'type': 'object', 'properties': { 'foo': { 'type': 'integer', }, }, 'required': ['foo'], 'additionalProperties': False, } super(AdditionalPropertiesDisableTestCase, self).setUp(schema=schema) def test_validate_additionalProperties_disable(self): self.assertEqual(self.post(body={'foo': 1}, req=FakeRequest()), 'Validation succeeded.') def test_validate_additionalProperties_disable_fails(self): detail = "Additional properties are not allowed ('ext' was unexpected)" self.check_validation_error(self.post, body={'foo': 1, 'ext': 1}, expected_detail=detail) class PatternPropertiesTestCase(APIValidationTestCase): def setUp(self): schema = { 'patternProperties': { '^[a-zA-Z0-9]{1,10}$': { 'type': 'string' }, }, 'additionalProperties': False, } super(PatternPropertiesTestCase, self).setUp(schema=schema) def test_validate_patternProperties(self): self.assertEqual('Validation succeeded.', self.post(body={'foo': 'bar'}, req=FakeRequest())) def test_validate_patternProperties_fails(self): details = [ "Additional properties are not allowed ('__' was unexpected)", "'__' does not match any of the regexes: '^[a-zA-Z0-9]{1,10}$'" ] self.check_validation_error(self.post, body={'__': 'bar'}, expected_detail=details) details = [ "'' does not match any of the regexes: '^[a-zA-Z0-9]{1,10}$'", "Additional properties are not allowed ('' was unexpected)" ] self.check_validation_error(self.post, body={'': 'bar'}, expected_detail=details) details = [ ("'0123456789a' does not match any of the regexes: " "'^[a-zA-Z0-9]{1,10}$'"), ("Additional properties are not allowed ('0123456789a' was" " unexpected)") ] self.check_validation_error(self.post, body={'0123456789a': 'bar'}, expected_detail=details) if sys.version[:3] in ['3.5', '3.6', '3.7']: detail = "expected string or bytes-like object" else: detail = "expected string or buffer" self.check_validation_error(self.post, body={None: 'bar'}, expected_detail=detail) class StringTestCase(APIValidationTestCase): def setUp(self): schema = { 'type': 'object', 'properties': { 'foo': { 'type': 'string', }, }, } super(StringTestCase, self).setUp(schema=schema) def test_validate_string(self): self.assertEqual(self.post(body={'foo': 'abc'}, req=FakeRequest()), 'Validation succeeded.') self.assertEqual(self.post(body={'foo': '0'}, req=FakeRequest()), 'Validation succeeded.') self.assertEqual(self.post(body={'foo': ''}, req=FakeRequest()), 'Validation succeeded.') def test_validate_string_fails(self): detail = ("Invalid input for field/attribute foo. Value: 1." " 1 is not of type 'string'") self.check_validation_error(self.post, body={'foo': 1}, expected_detail=detail) detail = ("Invalid input for field/attribute foo. Value: 1.5." " 1.5 is not of type 'string'") self.check_validation_error(self.post, body={'foo': 1.5}, expected_detail=detail) detail = ("Invalid input for field/attribute foo. Value: True." " True is not of type 'string'") self.check_validation_error(self.post, body={'foo': True}, expected_detail=detail) class StringLengthTestCase(APIValidationTestCase): def setUp(self): schema = { 'type': 'object', 'properties': { 'foo': { 'type': 'string', 'minLength': 1, 'maxLength': 10, }, }, } super(StringLengthTestCase, self).setUp(schema=schema) def test_validate_string_length(self): self.assertEqual(self.post(body={'foo': '0'}, req=FakeRequest()), 'Validation succeeded.') self.assertEqual(self.post(body={'foo': '0123456789'}, req=FakeRequest()), 'Validation succeeded.') def test_validate_string_length_fails(self): detail = ("Invalid input for field/attribute foo. Value: ." " '' is too short") self.check_validation_error(self.post, body={'foo': ''}, expected_detail=detail) detail = ("Invalid input for field/attribute foo. Value: 0123456789a." " '0123456789a' is too long") self.check_validation_error(self.post, body={'foo': '0123456789a'}, expected_detail=detail) class IntegerTestCase(APIValidationTestCase): def setUp(self): schema = { 'type': 'object', 'properties': { 'foo': { 'type': ['integer', 'string'], 'pattern': '^[0-9]+$', }, }, } super(IntegerTestCase, self).setUp(schema=schema) def test_validate_integer(self): self.assertEqual(self.post(body={'foo': 1}, req=FakeRequest()), 'Validation succeeded.') self.assertEqual(self.post(body={'foo': '1'}, req=FakeRequest()), 'Validation succeeded.') self.assertEqual(self.post(body={'foo': '0123456789'}, req=FakeRequest()), 'Validation succeeded.') def test_validate_integer_fails(self): detail = ("Invalid input for field/attribute foo. Value: abc." " 'abc' does not match '^[0-9]+$'") self.check_validation_error(self.post, body={'foo': 'abc'}, expected_detail=detail) detail = ("Invalid input for field/attribute foo. Value: True." " True is not of type 'integer', 'string'") self.check_validation_error(self.post, body={'foo': True}, expected_detail=detail) detail = ("Invalid input for field/attribute foo. Value: 0xffff." " '0xffff' does not match '^[0-9]+$'") self.check_validation_error(self.post, body={'foo': '0xffff'}, expected_detail=detail) detail = ("Invalid input for field/attribute foo. Value: 1.0." " 1.0 is not of type 'integer', 'string'") self.check_validation_error(self.post, body={'foo': 1.0}, expected_detail=detail) detail = ("Invalid input for field/attribute foo. Value: 1.0." " '1.0' does not match '^[0-9]+$'") self.check_validation_error(self.post, body={'foo': '1.0'}, expected_detail=detail) class IntegerRangeTestCase(APIValidationTestCase): def setUp(self): schema = { 'type': 'object', 'properties': { 'foo': { 'type': ['integer', 'string'], 'pattern': '^[0-9]+$', 'minimum': 1, 'maximum': 10, }, }, } super(IntegerRangeTestCase, self).setUp(schema=schema) def test_validate_integer_range(self): self.assertEqual(self.post(body={'foo': 1}, req=FakeRequest()), 'Validation succeeded.') self.assertEqual(self.post(body={'foo': 10}, req=FakeRequest()), 'Validation succeeded.') self.assertEqual(self.post(body={'foo': '1'}, req=FakeRequest()), 'Validation succeeded.') def test_validate_integer_range_fails(self): detail = ("Invalid input for field/attribute foo. Value: 0." " 0(.0)? is less than the minimum of 1") self.check_validation_error(self.post, body={'foo': 0}, expected_detail=detail) detail = ("Invalid input for field/attribute foo. Value: 11." " 11(.0)? is greater than the maximum of 10") self.check_validation_error(self.post, body={'foo': 11}, expected_detail=detail) detail = ("Invalid input for field/attribute foo. Value: 0." " 0(.0)? is less than the minimum of 1") self.check_validation_error(self.post, body={'foo': '0'}, expected_detail=detail) detail = ("Invalid input for field/attribute foo. Value: 11." " 11(.0)? is greater than the maximum of 10") self.check_validation_error(self.post, body={'foo': '11'}, expected_detail=detail) class BooleanTestCase(APIValidationTestCase): def setUp(self): schema = { 'type': 'object', 'properties': { 'foo': parameter_types.boolean, }, } super(BooleanTestCase, self).setUp(schema=schema) def test_validate_boolean(self): self.assertEqual('Validation succeeded.', self.post(body={'foo': True}, req=FakeRequest())) self.assertEqual('Validation succeeded.', self.post(body={'foo': False}, req=FakeRequest())) self.assertEqual('Validation succeeded.', self.post(body={'foo': 'True'}, req=FakeRequest())) self.assertEqual('Validation succeeded.', self.post(body={'foo': 'False'}, req=FakeRequest())) self.assertEqual('Validation succeeded.', self.post(body={'foo': '1'}, req=FakeRequest())) self.assertEqual('Validation succeeded.', self.post(body={'foo': '0'}, req=FakeRequest())) def test_validate_boolean_fails(self): enum_boolean = ("[True, 'True', 'TRUE', 'true', '1', 'ON', 'On'," " 'on', 'YES', 'Yes', 'yes'," " False, 'False', 'FALSE', 'false', '0', 'OFF', 'Off'," " 'off', 'NO', 'No', 'no']") detail = ("Invalid input for field/attribute foo. Value: bar." " 'bar' is not one of %s") % enum_boolean self.check_validation_error(self.post, body={'foo': 'bar'}, expected_detail=detail) detail = ("Invalid input for field/attribute foo. Value: 2." " '2' is not one of %s") % enum_boolean self.check_validation_error(self.post, body={'foo': '2'}, expected_detail=detail) class NameTestCase(APIValidationTestCase): def setUp(self): schema = { 'type': 'object', 'properties': { 'foo': parameter_types.name, }, } super(NameTestCase, self).setUp(schema=schema) def test_validate_name(self): self.assertEqual('Validation succeeded.', self.post(body={'foo': 'm1.small'}, req=FakeRequest())) self.assertEqual('Validation succeeded.', self.post(body={'foo': 'my server'}, req=FakeRequest())) self.assertEqual('Validation succeeded.', self.post(body={'foo': 'a'}, req=FakeRequest())) self.assertEqual('Validation succeeded.', self.post(body={'foo': u'\u0434'}, req=FakeRequest())) self.assertEqual('Validation succeeded.', self.post(body={'foo': u'\u0434\u2006\ufffd'}, req=FakeRequest())) def test_validate_name_fails(self): error = ("An invalid 'name' value was provided. The name must be: " "printable characters. " "Can not start or end with whitespace.") should_fail = (' ', ' segment', 'segment ', u'a\xa0', # trailing unicode space u'\uffff', # non-printable unicode ) for item in should_fail: self.check_validation_error(self.post, body={'foo': item}, expected_detail=error) # four-byte unicode, if supported by this python build try: self.check_validation_error(self.post, body={'foo': u'\U00010000'}, expected_detail=error) except ValueError: pass class DatetimeTestCase(APIValidationTestCase): def setUp(self): schema = { 'type': 'object', 'properties': { 'foo': { 'type': 'string', 'format': 'date-time', }, }, } super(DatetimeTestCase, self).setUp(schema=schema) def test_validate_datetime(self): self.assertEqual('Validation succeeded.', self.post(body={ 'foo': '2016-01-14T01:00:00Z'}, req=FakeRequest() )) masakari-9.0.0/masakari/tests/unit/test_utils.py0000664000175000017500000002175113656747723022046 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import importlib from unittest import mock import eventlet from oslo_config import cfg from oslo_context import context as common_context from oslo_context import fixture as context_fixture import six import masakari from masakari import context from masakari import exception from masakari import test from masakari import utils CONF = cfg.CONF class UTF8TestCase(test.NoDBTestCase): def test_none_value(self): self.assertIsInstance(utils.utf8(None), type(None)) def test_bytes_value(self): some_value = b"fake data" return_value = utils.utf8(some_value) # check that type of returned value doesn't changed self.assertIsInstance(return_value, type(some_value)) self.assertEqual(some_value, return_value) def test_not_text_type(self): return_value = utils.utf8(1) self.assertEqual(b"1", return_value) self.assertIsInstance(return_value, six.binary_type) def test_text_type_with_encoding(self): some_value = 'test\u2026config' self.assertEqual(some_value, utils.utf8(some_value).decode("utf-8")) class MonkeyPatchTestCase(test.NoDBTestCase): """Unit test for utils.monkey_patch().""" def setUp(self): super(MonkeyPatchTestCase, self).setUp() self.example_package = 'masakari.tests.unit.monkey_patch_example.' self.flags( monkey_patch=True, monkey_patch_modules=[self.example_package + 'example_a' + ':' + self.example_package + 'example_decorator']) def test_monkey_patch(self): utils.monkey_patch() masakari.tests.unit.monkey_patch_example.CALLED_FUNCTION = [] from masakari.tests.unit.monkey_patch_example import example_a from masakari.tests.unit.monkey_patch_example import example_b self.assertEqual('Example function', example_a.example_function_a()) exampleA = example_a.ExampleClassA() exampleA.example_method() ret_a = exampleA.example_method_add(3, 5) self.assertEqual(ret_a, 8) self.assertEqual('Example function', example_b.example_function_b()) exampleB = example_b.ExampleClassB() exampleB.example_method() ret_b = exampleB.example_method_add(3, 5) self.assertEqual(ret_b, 8) package_a = self.example_package + 'example_a.' self.assertIn(package_a + 'example_function_a', masakari.tests.unit.monkey_patch_example.CALLED_FUNCTION) self.assertIn(package_a + 'ExampleClassA.example_method', masakari.tests.unit.monkey_patch_example.CALLED_FUNCTION) self.assertIn(package_a + 'ExampleClassA.example_method_add', masakari.tests.unit.monkey_patch_example.CALLED_FUNCTION) package_b = self.example_package + 'example_b.' self.assertNotIn(package_b + 'example_function_b', ( masakari.tests.unit.monkey_patch_example.CALLED_FUNCTION)) self.assertNotIn(package_b + 'ExampleClassB.example_method', ( masakari.tests.unit.monkey_patch_example.CALLED_FUNCTION)) self.assertNotIn(package_b + 'ExampleClassB.example_method_add', ( masakari.tests.unit.monkey_patch_example.CALLED_FUNCTION)) class MonkeyPatchDefaultTestCase(test.NoDBTestCase): """Unit test for default monkey_patch_modules value.""" def setUp(self): super(MonkeyPatchDefaultTestCase, self).setUp() self.flags( monkey_patch=True) def test_monkey_patch_default_mod(self): # monkey_patch_modules is defined to be # : # Here we check that both parts of the default values are # valid for module in CONF.monkey_patch_modules: m = module.split(':', 1) # Check we can import the module to be patched importlib.import_module(m[0]) # check the decorator is valid decorator_name = m[1].rsplit('.', 1) decorator_module = importlib.import_module(decorator_name[0]) getattr(decorator_module, decorator_name[1]) class ExpectedArgsTestCase(test.NoDBTestCase): def test_passes(self): @utils.expects_func_args('foo', 'baz') def dec(f): return f @dec def func(foo, bar, baz="lol"): pass # Call to ensure nothing errors func(None, None) def test_raises(self): @utils.expects_func_args('foo', 'baz') def dec(f): return f def func(bar, baz): pass self.assertRaises(TypeError, dec, func) def test_var_no_of_args(self): @utils.expects_func_args('foo') def dec(f): return f @dec def func(bar, *args, **kwargs): pass # Call to ensure nothing errors func(None) def test_more_layers(self): @utils.expects_func_args('foo', 'baz') def dec(f): return f def dec_2(f): def inner_f(*a, **k): return f() return inner_f @dec_2 def func(bar, baz): pass self.assertRaises(TypeError, dec, func) class SpawnNTestCase(test.NoDBTestCase): def setUp(self): super(SpawnNTestCase, self).setUp() self.useFixture(context_fixture.ClearRequestContext()) self.spawn_name = 'spawn_n' def test_spawn_n_no_context(self): self.assertIsNone(common_context.get_current()) def _fake_spawn(func, *args, **kwargs): # call the method to ensure no error is raised func(*args, **kwargs) self.assertEqual('test', args[0]) def fake(arg): pass with mock.patch.object(eventlet, self.spawn_name, _fake_spawn): getattr(utils, self.spawn_name)(fake, 'test') self.assertIsNone(common_context.get_current()) def test_spawn_n_context(self): self.assertIsNone(common_context.get_current()) ctxt = context.RequestContext('user', 'project') def _fake_spawn(func, *args, **kwargs): # call the method to ensure no error is raised func(*args, **kwargs) self.assertEqual(ctxt, args[0]) self.assertEqual('test', kwargs['kwarg1']) def fake(context, kwarg1=None): pass with mock.patch.object(eventlet, self.spawn_name, _fake_spawn): getattr(utils, self.spawn_name)(fake, ctxt, kwarg1='test') self.assertEqual(ctxt, common_context.get_current()) def test_spawn_n_context_different_from_passed(self): self.assertIsNone(common_context.get_current()) ctxt = context.RequestContext('user', 'project') ctxt_passed = context.RequestContext('user', 'project', overwrite=False) self.assertEqual(ctxt, common_context.get_current()) def _fake_spawn(func, *args, **kwargs): # call the method to ensure no error is raised func(*args, **kwargs) self.assertEqual(ctxt_passed, args[0]) self.assertEqual('test', kwargs['kwarg1']) def fake(context, kwarg1=None): pass with mock.patch.object(eventlet, self.spawn_name, _fake_spawn): getattr(utils, self.spawn_name)(fake, ctxt_passed, kwarg1='test') self.assertEqual(ctxt, common_context.get_current()) class SpawnTestCase(SpawnNTestCase): def setUp(self): super(SpawnTestCase, self).setUp() self.spawn_name = 'spawn' class ValidateIntegerTestCase(test.NoDBTestCase): def test_exception_converted(self): self.assertRaises(exception.InvalidInput, utils.validate_integer, "im-not-an-int", "not-an-int") self.assertRaises(exception.InvalidInput, utils.validate_integer, 3.14, "Pie") self.assertRaises(exception.InvalidInput, utils.validate_integer, "299", "Sparta no-show", min_value=300, max_value=300) self.assertRaises(exception.InvalidInput, utils.validate_integer, 55, "doing 55 in a 54", max_value=54) self.assertRaises(exception.InvalidInput, utils.validate_integer, six.unichr(129), "UnicodeError", max_value=1000) masakari-9.0.0/masakari/tests/unit/test_safeutils.py0000664000175000017500000000476013656747723022706 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import functools from masakari import safe_utils from masakari import test class WrappedCodeTestCase(test.NoDBTestCase): """Test the get_wrapped_function utility method.""" def _wrapper(self, function): @functools.wraps(function) def decorated_function(self, *args, **kwargs): function(self, *args, **kwargs) return decorated_function def test_single_wrapped(self): @self._wrapper def wrapped(self, instance, red=None, blue=None): pass func = safe_utils.get_wrapped_function(wrapped) func_code = func.__code__ self.assertEqual(4, len(func_code.co_varnames)) self.assertIn('self', func_code.co_varnames) self.assertIn('instance', func_code.co_varnames) self.assertIn('red', func_code.co_varnames) self.assertIn('blue', func_code.co_varnames) def test_double_wrapped(self): @self._wrapper @self._wrapper def wrapped(self, instance, red=None, blue=None): pass func = safe_utils.get_wrapped_function(wrapped) func_code = func.__code__ self.assertEqual(4, len(func_code.co_varnames)) self.assertIn('self', func_code.co_varnames) self.assertIn('instance', func_code.co_varnames) self.assertIn('red', func_code.co_varnames) self.assertIn('blue', func_code.co_varnames) def test_triple_wrapped(self): @self._wrapper @self._wrapper @self._wrapper def wrapped(self, instance, red=None, blue=None): pass func = safe_utils.get_wrapped_function(wrapped) func_code = func.__code__ self.assertEqual(4, len(func_code.co_varnames)) self.assertIn('self', func_code.co_varnames) self.assertIn('instance', func_code.co_varnames) self.assertIn('red', func_code.co_varnames) self.assertIn('blue', func_code.co_varnames) masakari-9.0.0/masakari/tests/unit/test_rpc.py0000664000175000017500000002511713656747723021472 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from unittest import mock import fixtures import oslo_messaging as messaging from oslo_messaging.rpc import dispatcher from oslo_serialization import jsonutils import testtools from masakari import context from masakari import rpc from masakari import test class FakeAPI(rpc.RPCAPI): RPC_API_VERSION = '1.0' TOPIC = 'engine' BINARY = 'masakari-engine' class RPCAPITestCase(test.TestCase): """Tests RPCAPI mixin aggregating stuff related to RPC compatibility.""" def setUp(self): super(RPCAPITestCase, self).setUp() @mock.patch('masakari.rpc.get_client') def test_init(self, get_client): def fake_get_client(target, version_cap=None, serializer=None): self.assertEqual(FakeAPI.TOPIC, target.topic) self.assertEqual(FakeAPI.RPC_API_VERSION, target.version) get_client.side_effect = fake_get_client FakeAPI() @mock.patch('masakari.rpc.get_client') def test_init_cached_caps(self, get_client): def fake_get_client(target, version_cap=None, serializer=None): self.assertEqual(FakeAPI.TOPIC, target.topic) self.assertEqual(FakeAPI.RPC_API_VERSION, target.version) get_client.side_effect = fake_get_client FakeAPI() @mock.patch.object(messaging, 'set_transport_defaults') def test_set_defaults(self, mock_set): control_exchange = mock.Mock() rpc.set_defaults(control_exchange) mock_set.assert_called_once_with(control_exchange) def test_add_extra_exmods(self): rpc.EXTRA_EXMODS = [] rpc.add_extra_exmods('foo', 'bar') self.assertEqual(['foo', 'bar'], rpc.EXTRA_EXMODS) def test_clear_extra_exmods(self): rpc.EXTRA_EXMODS = ['foo', 'bar'] rpc.clear_extra_exmods() self.assertEqual(0, len(rpc.EXTRA_EXMODS)) def test_get_allowed_exmods(self): rpc.ALLOWED_EXMODS = ['foo'] rpc.EXTRA_EXMODS = ['bar'] exmods = rpc.get_allowed_exmods() self.assertEqual(['foo', 'bar'], exmods) @mock.patch.object(rpc, 'RequestContextSerializer') @mock.patch.object(messaging, 'RPCClient') def test_get_client(self, mock_client, mock_ser): rpc.TRANSPORT = mock.Mock() tgt = mock.Mock() ser = mock.Mock() mock_client.return_value = 'client' mock_ser.return_value = ser client = rpc.get_client(tgt, version_cap='1.0', serializer='foo') mock_ser.assert_called_once_with('foo') mock_client.assert_called_once_with(rpc.TRANSPORT, tgt, version_cap='1.0', serializer=ser) self.assertEqual('client', client) @mock.patch.object(rpc, 'RequestContextSerializer') @mock.patch.object(messaging, 'get_rpc_server') def test_get_server(self, mock_get, mock_ser): rpc.TRANSPORT = mock.Mock() ser = mock.Mock() tgt = mock.Mock() ends = mock.Mock() mock_ser.return_value = ser mock_get.return_value = 'server' access_policy = dispatcher.DefaultRPCAccessPolicy server = rpc.get_server(tgt, ends, serializer='foo') mock_ser.assert_called_once_with('foo') mock_get.assert_called_once_with(rpc.TRANSPORT, tgt, ends, executor='eventlet', serializer=ser, access_policy=access_policy) self.assertEqual('server', server) class RPCResetFixture(fixtures.Fixture): def _setUp(self): self.trans = copy.copy(rpc.TRANSPORT) self.noti_trans = copy.copy(rpc.NOTIFICATION_TRANSPORT) self.noti = copy.copy(rpc.NOTIFIER) self.all_mods = copy.copy(rpc.ALLOWED_EXMODS) self.ext_mods = copy.copy(rpc.EXTRA_EXMODS) self.addCleanup(self._reset_everything) def _reset_everything(self): rpc.TRANSPORT = self.trans rpc.NOTIFICATION_TRANSPORT = self.noti_trans rpc.NOTIFIER = self.noti rpc.ALLOWED_EXMODS = self.all_mods rpc.EXTRA_EXMODS = self.ext_mods class TestRPC(testtools.TestCase): def setUp(self): super(TestRPC, self).setUp() self.useFixture(RPCResetFixture()) @mock.patch.object(rpc, 'get_allowed_exmods') @mock.patch.object(rpc, 'RequestContextSerializer') @mock.patch.object(messaging, 'get_notification_transport') @mock.patch.object(messaging, 'Notifier') def test_init_versioned(self, mock_notif, mock_noti_trans, mock_ser, mock_exmods): expected = [{'topics': ['versioned_notifications']}] self._test_init(mock_notif, mock_noti_trans, mock_ser, mock_exmods, 'versioned', expected) def test_cleanup_transport_null(self): rpc.TRANSPORT = None rpc.NOTIFICATION_TRANSPORT = mock.Mock() rpc.NOTIFIER = mock.Mock() self.assertRaises(AssertionError, rpc.cleanup) def test_cleanup_notification_transport_null(self): rpc.TRANSPORT = mock.Mock() rpc.NOTIFICATION_TRANSPORT = None rpc.NOTIFIER = mock.Mock() self.assertRaises(AssertionError, rpc.cleanup) def test_cleanup_notifier_null(self): rpc.TRANSPORT = mock.Mock() rpc.NOTIFICATION_TRANSPORT = mock.Mock() rpc.NOTIFIER = None self.assertRaises(AssertionError, rpc.cleanup) def test_cleanup(self): rpc.NOTIFIER = mock.Mock() rpc.NOTIFICATION_TRANSPORT = mock.Mock() rpc.TRANSPORT = mock.Mock() trans_cleanup = mock.Mock() not_trans_cleanup = mock.Mock() rpc.TRANSPORT.cleanup = trans_cleanup rpc.NOTIFICATION_TRANSPORT.cleanup = not_trans_cleanup rpc.cleanup() trans_cleanup.assert_called_once_with() not_trans_cleanup.assert_called_once_with() self.assertIsNone(rpc.TRANSPORT) self.assertIsNone(rpc.NOTIFICATION_TRANSPORT) self.assertIsNone(rpc.NOTIFIER) def test_get_versioned_notifier(self): rpc.NOTIFIER = mock.Mock() mock_prep = mock.Mock() mock_prep.return_value = 'notifier' rpc.NOTIFIER.prepare = mock_prep notifier = rpc.get_versioned_notifier('service.foo') mock_prep.assert_called_once_with(publisher_id='service.foo') self.assertEqual('notifier', notifier) def _test_init(self, mock_notif, mock_noti_trans, mock_ser, mock_exmods, notif_format, expected_driver_topic_kwargs, versioned_notification_topics=['versioned_notifications']): notifier = mock.Mock() notif_transport = mock.Mock() transport = mock.Mock() serializer = mock.Mock() conf = mock.Mock() conf.transport_url = None conf.notification_format = notif_format mock_exmods.return_value = ['foo'] conf.notifications.versioned_notifications_topics = ( versioned_notification_topics) mock_noti_trans.return_value = notif_transport mock_ser.return_value = serializer mock_notif.side_effect = [notifier] @mock.patch.object(rpc, 'CONF', new=conf) @mock.patch.object(rpc, 'create_transport') @mock.patch.object(rpc, 'get_transport_url') def _test(get_url, create_transport): create_transport.return_value = transport rpc.init(conf) create_transport.assert_called_once_with(get_url.return_value) _test() self.assertTrue(mock_exmods.called) self.assertIsNotNone(rpc.TRANSPORT) self.assertIsNotNone(rpc.NOTIFIER) self.assertEqual(notifier, rpc.NOTIFIER) expected_calls = [] for kwargs in expected_driver_topic_kwargs: expected_kwargs = {'serializer': serializer} expected_kwargs.update(kwargs) expected_calls.append(((notif_transport,), expected_kwargs)) self.assertEqual(expected_calls, mock_notif.call_args_list, "The calls to messaging.Notifier() did not create " "the versioned notifiers properly.") class TestJsonPayloadSerializer(test.NoDBTestCase): def test_serialize_entity(self): with mock.patch.object(jsonutils, 'to_primitive') as mock_prim: rpc.JsonPayloadSerializer.serialize_entity('context', 'entity') mock_prim.assert_called_once_with('entity', convert_instances=True) class TestRequestContextSerializer(test.NoDBTestCase): def setUp(self): super(TestRequestContextSerializer, self).setUp() self.mock_base = mock.Mock() self.ser = rpc.RequestContextSerializer(self.mock_base) self.ser_null = rpc.RequestContextSerializer(None) def test_serialize_entity(self): self.mock_base.serialize_entity.return_value = 'foo' ser_ent = self.ser.serialize_entity('context', 'entity') self.mock_base.serialize_entity.assert_called_once_with('context', 'entity') self.assertEqual('foo', ser_ent) def test_serialize_entity_null_base(self): ser_ent = self.ser_null.serialize_entity('context', 'entity') self.assertEqual('entity', ser_ent) def test_deserialize_entity(self): self.mock_base.deserialize_entity.return_value = 'foo' deser_ent = self.ser.deserialize_entity('context', 'entity') self.mock_base.deserialize_entity.assert_called_once_with('context', 'entity') self.assertEqual('foo', deser_ent) def test_deserialize_entity_null_base(self): deser_ent = self.ser_null.deserialize_entity('context', 'entity') self.assertEqual('entity', deser_ent) def test_serialize_context(self): context = mock.Mock() self.ser.serialize_context(context) context.to_dict.assert_called_once_with() @mock.patch.object(context, 'RequestContext') def test_deserialize_context(self, mock_req): self.ser.deserialize_context('context') mock_req.from_dict.assert_called_once_with('context') masakari-9.0.0/masakari/tests/unit/test_context.py0000664000175000017500000001375713656747723022401 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_context import context as o_context from oslo_context import fixture as o_fixture from masakari import context from masakari import test class ContextTestCase(test.NoDBTestCase): def setUp(self): super(ContextTestCase, self).setUp() self.useFixture(o_fixture.ClearRequestContext()) def test_request_context_elevated(self): user_ctxt = context.RequestContext('111', '222', is_admin=False) self.assertFalse(user_ctxt.is_admin) admin_ctxt = user_ctxt.elevated() self.assertTrue(admin_ctxt.is_admin) self.assertIn('admin', admin_ctxt.roles) self.assertFalse(user_ctxt.is_admin) self.assertNotIn('admin', user_ctxt.roles) def test_request_context_sets_is_admin(self): ctxt = context.RequestContext('111', '222', roles=['admin', 'weasel']) self.assertTrue(ctxt.is_admin) def test_request_context_sets_is_admin_upcase(self): ctxt = context.RequestContext('111', '222', roles=['Admin', 'weasel']) self.assertTrue(ctxt.is_admin) def test_request_context_read_deleted(self): ctxt = context.RequestContext('111', '222', read_deleted='yes') self.assertEqual('yes', ctxt.read_deleted) ctxt.read_deleted = 'no' self.assertEqual('no', ctxt.read_deleted) def test_request_context_read_deleted_invalid(self): self.assertRaises(ValueError, context.RequestContext, '111', '222', read_deleted=True) ctxt = context.RequestContext('111', '222') self.assertRaises(ValueError, setattr, ctxt, 'read_deleted', True) def test_service_catalog_default(self): ctxt = context.RequestContext('111', '222') self.assertEqual([], ctxt.service_catalog) ctxt = context.RequestContext('111', '222', service_catalog=[]) self.assertEqual([], ctxt.service_catalog) ctxt = context.RequestContext('111', '222', service_catalog=None) self.assertEqual([], ctxt.service_catalog) def test_store_when_no_overwrite(self): # If no context exists we store one even if overwrite is false # (since we are not overwriting anything). ctx = context.RequestContext('111', '222', overwrite=False) self.assertIs(o_context.get_current(), ctx) def test_no_overwrite(self): # If there is already a context in the cache a new one will # not overwrite it if overwrite=False. ctx1 = context.RequestContext('111', '222', overwrite=True) context.RequestContext('333', '444', overwrite=False) self.assertIs(o_context.get_current(), ctx1) def test_admin_no_overwrite(self): # If there is already a context in the cache creating an admin # context will not overwrite it. ctx1 = context.RequestContext('111', '222', overwrite=True) context.get_admin_context() self.assertIs(o_context.get_current(), ctx1) def test_convert_from_rc_to_dict(self): ctx = context.RequestContext( 111, 222, request_id='req-679033b7-1755-4929-bf85-eb3bfaef7e0b', timestamp='2016-03-02T22:31:56.641629') values2 = ctx.to_dict() expected_values = {'is_admin': False, 'project_id': 222, 'project_name': None, 'read_deleted': 'no', 'remote_address': None, 'request_id': 'req-679033b7-1755-4929-bf85-eb3bfaef7e0b', 'service_catalog': [], 'timestamp': '2016-03-02T22:31:56.641629', 'user_id': 111, 'user_name': None} self.assertDictContainsSubset(expected_values, values2) def test_convert_from_dict_then_to_dict(self): values = {'is_admin': False, 'project_id': '222', 'project_name': None, 'read_deleted': 'no', 'remote_address': None, 'request_id': 'req-679033b7-1755-4929-bf85-eb3bfaef7e0b', 'service_catalog': [], 'timestamp': '2016-03-02T22:31:56.641629', 'user_id': '111', 'user_name': None} ctx = context.RequestContext.from_dict(values) self.assertEqual('111', ctx.user) self.assertEqual('222', ctx.tenant) self.assertEqual('111', ctx.user_id) self.assertEqual('111', ctx.user_id) self.assertEqual('222', ctx.project_id) values2 = ctx.to_dict() self.assertDictContainsSubset(values, values2) masakari-9.0.0/masakari/tests/unit/test_exception.py0000664000175000017500000001342313656747723022701 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import inspect import six from six.moves import http_client as http from webob.util import status_reasons from masakari import exception from masakari import test class MasakariExceptionTestCase(test.NoDBTestCase): def test_default_error_msg(self): class FakeMasakariException(exception.MasakariException): msg_fmt = "default message" exc = FakeMasakariException() self.assertEqual('default message', six.text_type(exc)) def test_error_msg(self): self.assertEqual('test', six.text_type(exception.MasakariException('test'))) def test_default_error_msg_with_kwargs(self): class FakeMasakariException(exception.MasakariException): msg_fmt = "default message: %(code)s" exc = FakeMasakariException(code=int(http.INTERNAL_SERVER_ERROR)) self.assertEqual('default message: 500', six.text_type(exc)) self.assertEqual('default message: 500', exc.message) def test_error_msg_exception_with_kwargs(self): class FakeMasakariException(exception.MasakariException): msg_fmt = "default message: %(misspelled_code)s" exc = FakeMasakariException(code=int(http.INTERNAL_SERVER_ERROR), misspelled_code='blah') self.assertEqual('default message: blah', six.text_type(exc)) self.assertEqual('default message: blah', exc.message) def test_default_error_code(self): class FakeMasakariException(exception.MasakariException): code = http.NOT_FOUND exc = FakeMasakariException() self.assertEqual(http.NOT_FOUND, exc.kwargs['code']) def test_error_code_from_kwarg(self): class FakeMasakariException(exception.MasakariException): code = http.INTERNAL_SERVER_ERROR exc = FakeMasakariException(code=http.NOT_FOUND) self.assertEqual(exc.kwargs['code'], http.NOT_FOUND) def test_format_message_local(self): class FakeMasakariException(exception.MasakariException): msg_fmt = "some message" exc = FakeMasakariException() self.assertEqual(six.text_type(exc), exc.format_message()) def test_format_message_remote(self): class FakeMasakariException_Remote(exception.MasakariException): msg_fmt = "some message" if six.PY2: def __unicode__(self): return u"print the whole trace" else: def __str__(self): return "print the whole trace" exc = FakeMasakariException_Remote() self.assertEqual(u"print the whole trace", six.text_type(exc)) self.assertEqual("some message", exc.format_message()) def test_format_message_remote_error(self): class FakeMasakariException_Remote(exception.MasakariException): msg_fmt = "some message %(somearg)s" def __unicode__(self): return u"print the whole trace" self.flags(fatal_exception_format_errors=False) exc = FakeMasakariException_Remote(lame_arg='lame') self.assertEqual("some message %(somearg)s", exc.format_message()) class ConvertedExceptionTestCase(test.NoDBTestCase): def test_instantiate(self): exc = exception.ConvertedException(int(http.BAD_REQUEST), 'Bad Request', 'reason') self.assertEqual(exc.code, http.BAD_REQUEST) self.assertEqual(exc.title, 'Bad Request') self.assertEqual(exc.explanation, 'reason') def test_instantiate_without_title_known_code(self): exc = exception.ConvertedException(int(http.INTERNAL_SERVER_ERROR)) self.assertEqual(exc.title, status_reasons[http.INTERNAL_SERVER_ERROR]) def test_instantiate_without_title_unknown_code(self): exc = exception.ConvertedException(499) self.assertEqual(exc.title, 'Unknown Client Error') def test_instantiate_bad_code(self): self.assertRaises(KeyError, exception.ConvertedException, 10) class ExceptionTestCase(test.NoDBTestCase): @staticmethod def _raise_exc(exc): raise exc(int(http.INTERNAL_SERVER_ERROR)) def test_exceptions_raise(self): # NOTE(Dinesh_Bhor): disable format errors since we are not passing # kwargs self.flags(fatal_exception_format_errors=False) for name in dir(exception): exc = getattr(exception, name) if isinstance(exc, type): self.assertRaises(exc, self._raise_exc, exc) class ExceptionValidMessageTestCase(test.NoDBTestCase): def test_messages(self): failures = [] for name, obj in inspect.getmembers(exception): if name in ['MasakariException', 'InstanceFaultRollback']: continue if not inspect.isclass(obj): continue if not issubclass(obj, exception.MasakariException): continue e = obj if e.msg_fmt == "An unknown exception occurred.": failures.append('%s needs a more specific msg_fmt' % name) if failures: self.fail('\n'.join(failures)) masakari-9.0.0/masakari/tests/unit/api/0000775000175000017500000000000013656750011020020 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/tests/unit/api/__init__.py0000664000175000017500000000000013656747723022137 0ustar zuulzuul00000000000000masakari-9.0.0/masakari/tests/unit/api/test_api_version_request.py0000664000175000017500000000733213656747723025544 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from masakari.api import api_version_request from masakari import exception from masakari import test from masakari.tests.unit.api.openstack import fakes class APIVersionRequestTests(test.NoDBTestCase): def test_valid_version_strings(self): def _test_string(version, exp_major, exp_minor): v = api_version_request.APIVersionRequest(version) self.assertEqual(v.ver_major, exp_major) self.assertEqual(v.ver_minor, exp_minor) _test_string("1.0", 1, 0) def test_null_version(self): v = api_version_request.APIVersionRequest() self.assertTrue(v.is_null()) def test_invalid_version_strings(self): self.assertRaises(exception.InvalidAPIVersionString, api_version_request.APIVersionRequest, "2") self.assertRaises(exception.InvalidAPIVersionString, api_version_request.APIVersionRequest, "200") self.assertRaises(exception.InvalidAPIVersionString, api_version_request.APIVersionRequest, "2.1.4") self.assertRaises(exception.InvalidAPIVersionString, api_version_request.APIVersionRequest, "200.23.66.3") self.assertRaises(exception.InvalidAPIVersionString, api_version_request.APIVersionRequest, "5 .3") self.assertRaises(exception.InvalidAPIVersionString, api_version_request.APIVersionRequest, "5. 3") self.assertRaises(exception.InvalidAPIVersionString, api_version_request.APIVersionRequest, "5.03") self.assertRaises(exception.InvalidAPIVersionString, api_version_request.APIVersionRequest, "02.1") self.assertRaises(exception.InvalidAPIVersionString, api_version_request.APIVersionRequest, "2.001") self.assertRaises(exception.InvalidAPIVersionString, api_version_request.APIVersionRequest, "") self.assertRaises(exception.InvalidAPIVersionString, api_version_request.APIVersionRequest, " 2.1") self.assertRaises(exception.InvalidAPIVersionString, api_version_request.APIVersionRequest, "2.1 ") def test_get_string(self): vers1_string = "1.0" vers1 = api_version_request.APIVersionRequest(vers1_string) self.assertEqual(vers1_string, vers1.get_string()) self.assertRaises(ValueError, api_version_request.APIVersionRequest().get_string) def test_is_supported_min_version(self): req = fakes.HTTPRequest.blank('/fake', version='1.0') self.assertTrue(api_version_request.is_supported( req, min_version='1.0')) self.assertFalse(api_version_request.is_supported( req, min_version='2.6')) def test_is_supported_max_version(self): req = fakes.HTTPRequest.blank('/fake', version='2.4') self.assertFalse(api_version_request.is_supported( req, max_version='1.0')) self.assertTrue(api_version_request.is_supported( req, max_version='2.6')) masakari-9.0.0/masakari/tests/unit/api/test_utils.py0000664000175000017500000001701513656747723022615 0ustar zuulzuul00000000000000# Copyright (c) 2018 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import socket import testtools from unittest import mock from masakari.api import utils as api_utils from masakari.notifications.objects import base as notification_base from masakari.notifications.objects import exception as notification_exception from masakari.notifications.objects import notification as event_notification from masakari import objects from masakari.objects import fields from masakari.objects import host as host_obj from masakari.objects import notification as notification_obj class TestApiUtils(testtools.TestCase): def setUp(self): super(TestApiUtils, self).setUp() @mock.patch.object(notification_base, 'EventType') @mock.patch.object(notification_base, 'NotificationPublisher') @mock.patch.object(event_notification, 'SegmentApiNotification') @mock.patch.object(event_notification, 'SegmentApiPayload') @mock.patch.object(notification_exception.ExceptionPayload, 'from_exc_and_traceback') def test_notify_about_segment_api( self, mock_from_exception, mock_SegmentApiPayload, mock_SegmentApiNotification, mock_NotificationPublisher, mock_EventType): mock_fault = mock.Mock() mock_from_exception.return_value = mock_fault mock_payload = mock.Mock() mock_SegmentApiPayload.return_value = mock_payload mock_api_notification = mock.Mock() mock_SegmentApiNotification.return_value = mock_api_notification mock_api_notification.emit.return_value = None mock_publisher = mock.Mock() mock_NotificationPublisher.return_value = mock_publisher mock_event_type = mock.Mock() mock_EventType.return_value = mock_event_type mock_context = mock.Mock() segment = objects.FailoverSegment() action = fields.EventNotificationAction.SEGMENT_CREATE phase = fields.EventNotificationPhase.ERROR e = Exception() api_utils.notify_about_segment_api(mock_context, segment, action=action, phase=phase, exception=e) mock_from_exception.assert_called_once_with(e, None) mock_SegmentApiPayload.assert_called_once_with( segment=segment, fault=mock_fault) mock_SegmentApiNotification.assert_called_once_with( context=mock_context, priority=fields.EventNotificationPriority.ERROR, publisher=mock_publisher, event_type=mock_event_type, payload=mock_payload) mock_NotificationPublisher.assert_called_once_with( context=mock_context, host=socket.gethostname(), binary='masakari-api') mock_EventType.assert_called_once_with( action=action, phase=phase) mock_api_notification.emit.assert_called_once_with(mock_context) @mock.patch.object(notification_base, 'EventType') @mock.patch.object(notification_base, 'NotificationPublisher') @mock.patch.object(event_notification, 'HostApiNotification') @mock.patch.object(event_notification, 'HostApiPayload') @mock.patch.object(notification_exception.ExceptionPayload, 'from_exc_and_traceback') def test_notify_about_host_api( self, mock_from_exception, mock_HostApiPayload, mock_HostApiNotification, mock_NotificationPublisher, mock_EventType): mock_fault = mock.Mock() mock_from_exception.return_value = mock_fault mock_payload = mock.Mock() mock_HostApiPayload.return_value = mock_payload mock_api_notification = mock.Mock() mock_HostApiNotification.return_value = mock_api_notification mock_api_notification.emit.return_value = None mock_publisher = mock.Mock() mock_NotificationPublisher.return_value = mock_publisher mock_event_type = mock.Mock() mock_EventType.return_value = mock_event_type mock_context = mock.Mock() host = host_obj.Host() action = fields.EventNotificationAction.HOST_CREATE phase = fields.EventNotificationPhase.ERROR e = Exception() api_utils.notify_about_host_api(mock_context, host, action=action, phase=phase, exception=e) mock_from_exception.assert_called_once_with(e, None) mock_HostApiPayload.assert_called_once_with( host=host, fault=mock_fault) mock_HostApiNotification.assert_called_once_with( context=mock_context, priority=fields.EventNotificationPriority.ERROR, publisher=mock_publisher, event_type=mock_event_type, payload=mock_payload) mock_NotificationPublisher.assert_called_once_with( context=mock_context, host=socket.gethostname(), binary='masakari-api') mock_api_notification.emit.assert_called_once_with(mock_context) mock_EventType.assert_called_once_with( action=action, phase=phase) @mock.patch.object(notification_base, 'EventType') @mock.patch.object(notification_base, 'NotificationPublisher') @mock.patch.object(event_notification, 'NotificationApiNotification') @mock.patch.object(event_notification, 'NotificationApiPayload') @mock.patch.object(notification_exception.ExceptionPayload, 'from_exc_and_traceback') def test_notify_about_notification_api( self, mock_from_exception, mock_NotificationApiPayload, mock_NotificationApiNotification, mock_NotificationPublisher, mock_EventType): mock_fault = mock.Mock() mock_from_exception.return_value = mock_fault mock_payload = mock.Mock() mock_NotificationApiPayload.return_value = mock_payload mock_api_notification = mock.Mock() mock_NotificationApiNotification.return_value = mock_api_notification mock_api_notification.emit.return_value = None mock_publisher = mock.Mock() mock_NotificationPublisher.return_value = mock_publisher mock_event_type = mock.Mock() mock_EventType.return_value = mock_event_type mock_context = mock.Mock() notification = notification_obj.Notification() action = fields.EventNotificationAction.NOTIFICATION_CREATE phase = fields.EventNotificationPhase.ERROR e = Exception() api_utils.notify_about_notification_api(mock_context, notification, action=action, phase=phase, exception=e) mock_from_exception.assert_called_once_with(e, None) mock_NotificationApiPayload.assert_called_once_with( notification=notification, fault=mock_fault) mock_NotificationApiNotification.assert_called_once_with( context=mock_context, priority=fields.EventNotificationPriority.ERROR, publisher=mock_publisher, event_type=mock_event_type, payload=mock_payload) mock_NotificationPublisher.assert_called_once_with( context=mock_context, host=socket.gethostname(), binary='masakari-api') mock_api_notification.emit.assert_called_once_with(mock_context) masakari-9.0.0/masakari/tests/unit/api/test_auth.py0000664000175000017500000001723613656747723022423 0ustar zuulzuul00000000000000# Copyright (c) 2016 NTT DATA # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_middleware import request_id from oslo_serialization import jsonutils from six.moves import http_client as http import webob import webob.exc import masakari.api.auth import masakari.conf from masakari import test CONF = masakari.conf.CONF class TestMasakariKeystoneContextMiddleware(test.NoDBTestCase): def setUp(self): super(TestMasakariKeystoneContextMiddleware, self).setUp() @webob.dec.wsgify() def fake_app(req): self.context = req.environ['masakari.context'] return webob.Response() self.context = None self.middleware = masakari.api.auth.MasakariKeystoneContext(fake_app) self.request = webob.Request.blank('/') self.request.headers['X_TENANT_ID'] = 'testtenantid' self.request.headers['X_AUTH_TOKEN'] = 'testauthtoken' self.request.headers['X_SERVICE_CATALOG'] = jsonutils.dumps({}) def test_no_user_or_user_id(self): response = self.request.get_response(self.middleware) self.assertEqual(response.status_int, http.UNAUTHORIZED) def test_user_id_only(self): self.request.headers['X_USER_ID'] = 'testuserid' response = self.request.get_response(self.middleware) self.assertEqual(response.status_int, http.OK) self.assertEqual(self.context.user_id, 'testuserid') def test_user_only(self): self.request.headers['X_USER'] = 'testuser' response = self.request.get_response(self.middleware) self.assertEqual(response.status_int, http.OK) self.assertEqual(self.context.user_id, 'testuser') def test_user_id_trumps_user(self): self.request.headers['X_USER_ID'] = 'testuserid' self.request.headers['X_USER'] = 'testuser' response = self.request.get_response(self.middleware) self.assertEqual(response.status_int, http.OK) self.assertEqual(self.context.user_id, 'testuserid') def test_invalid_service_catalog(self): self.request.headers['X_USER'] = 'testuser' self.request.headers['X_SERVICE_CATALOG'] = "bad json" response = self.request.get_response(self.middleware) self.assertEqual(response.status_int, http.INTERNAL_SERVER_ERROR) def test_request_id_extracted_from_env(self): req_id = 'dummy-request-id' self.request.headers['X_PROJECT_ID'] = 'testtenantid' self.request.headers['X_USER_ID'] = 'testuserid' self.request.environ[request_id.ENV_REQUEST_ID] = req_id self.request.get_response(self.middleware) self.assertEqual(req_id, self.context.request_id) class TestNoAuthMiddleware(test.NoDBTestCase): def setUp(self): super(TestNoAuthMiddleware, self).setUp() @webob.dec.wsgify() def fake_app(req): self.context = req.environ['masakari.context'] return webob.Response() self.context = None self.middleware = masakari.api.auth.NoAuthMiddleware(fake_app) self.request = webob.Request.blank('/') self.request.headers['X_SERVICE_CATALOG'] = jsonutils.dumps({}) def test_no_user_or_user_id(self): response = self.request.get_response(self.middleware) self.assertEqual(response.status_int, http.OK) def test_user_id_only(self): self.request.headers['X_USER_ID'] = 'testuserid' response = self.request.get_response(self.middleware) self.assertEqual(response.status_int, http.OK) self.assertEqual(self.context.user_id, 'testuserid') def test_user_only(self): self.request.headers['X_USER'] = 'testuser' response = self.request.get_response(self.middleware) self.assertEqual(response.status_int, http.OK) self.assertEqual(self.context.user_id, 'testuser') def test_user_id_trumps_user(self): self.request.headers['X_USER_ID'] = 'testuserid' self.request.headers['X_USER'] = 'testuser' response = self.request.get_response(self.middleware) self.assertEqual(response.status_int, http.OK) self.assertEqual(self.context.user_id, 'testuserid') def test_invalid_service_catalog(self): self.request.headers['X_USER'] = 'testuser' self.request.headers['X_SERVICE_CATALOG'] = "bad json" response = self.request.get_response(self.middleware) self.assertEqual(response.status_int, http.INTERNAL_SERVER_ERROR) def test_request_id_extracted_from_env(self): req_id = 'dummy-request-id' self.request.headers['X_PROJECT_ID'] = 'testtenantid' self.request.headers['X_USER_ID'] = 'testuserid' self.request.environ[request_id.ENV_REQUEST_ID] = req_id self.request.get_response(self.middleware) self.assertEqual(req_id, self.context.request_id) class TestKeystoneMiddlewareRoles(test.NoDBTestCase): def setUp(self): super(TestKeystoneMiddlewareRoles, self).setUp() @webob.dec.wsgify() def role_check_app(req): context = req.environ['masakari.context'] if "knight" in context.roles and "bad" not in context.roles: return webob.Response(status="200 Role Match") elif not context.roles: return webob.Response(status="200 No Roles") else: raise webob.exc.HTTPBadRequest("unexpected role header") self.middleware = ( masakari.api.auth.MasakariKeystoneContext(role_check_app)) self.request = webob.Request.blank('/') self.request.headers['X_USER'] = 'testuser' self.request.headers['X_TENANT_ID'] = 'testtenantid' self.request.headers['X_AUTH_TOKEN'] = 'testauthtoken' self.request.headers['X_SERVICE_CATALOG'] = jsonutils.dumps({}) self.roles = "pawn, knight, rook" def test_roles(self): self.request.headers['X_ROLES'] = 'pawn,knight,rook' response = self.request.get_response(self.middleware) self.assertEqual(response.status, '200 Role Match') class TestPipeLineFactory(test.NoDBTestCase): class FakeFilter(object): def __init__(self, name): self.name = name self.obj = None def __call__(self, obj): self.obj = obj return self class FakeApp(object): def __init__(self, name): self.name = name class FakeLoader(object): def get_filter(self, name): return TestPipeLineFactory.FakeFilter(name) def get_app(self, name): return TestPipeLineFactory.FakeApp(name) def _test_pipeline(self, pipeline, app): for p in pipeline.split()[:-1]: self.assertEqual(app.name, p) self.assertIsInstance(app, TestPipeLineFactory.FakeFilter) app = app.obj self.assertEqual(app.name, pipeline.split()[-1]) self.assertIsInstance(app, TestPipeLineFactory.FakeApp) def test_pipeline_factory_v1(self): fake_pipeline = 'test1 test2 test3' CONF.set_override('auth_strategy', 'noauth2') app = masakari.api.auth.pipeline_factory_v1( TestPipeLineFactory.FakeLoader(), None, noauth2=fake_pipeline) self._test_pipeline(fake_pipeline, app) masakari-9.0.0/masakari/tests/unit/api/openstack/0000775000175000017500000000000013656750011022007 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/tests/unit/api/openstack/__init__.py0000664000175000017500000000000013656747723024126 0ustar zuulzuul00000000000000masakari-9.0.0/masakari/tests/unit/api/openstack/test_extensions.py0000664000175000017500000000642313656747723025644 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from unittest import mock from oslo_config import cfg from six.moves import http_client as http import webob.exc from masakari.api.openstack import extensions from masakari.api.openstack import ha from masakari.api.openstack.ha import extension_info from masakari import exception from masakari import test CONF = cfg.CONF class fake_bad_extension(object): name = "fake_bad_extension" alias = "fake-bad" class ExtensionLoadingTestCase(test.NoDBTestCase): @mock.patch('masakari.rpc.get_client') def test_extensions_loaded(self, mock_get_client): app = ha.APIRouterV1() self.assertIn('extensions', app._loaded_extension_info.extensions) def test_check_bad_extension(self): loaded_ext_info = extension_info.LoadedExtensionInfo() self.assertFalse(loaded_ext_info._check_extension(fake_bad_extension)) @mock.patch('masakari.rpc.get_client') @mock.patch('masakari.api.openstack.APIRouterV1._register_resources_list') def test_extensions_inherit(self, mock_register, mock_get_client): app = ha.APIRouterV1() self.assertIn('extensions', app._loaded_extension_info.extensions) ext_no_inherits = mock_register.call_args_list[0][0][0] mock_register.assert_called_with(mock.ANY, mock.ANY) name_list = [ext.obj.alias for ext in ext_no_inherits] self.assertIn('extensions', name_list) def test_extensions_expected_error(self): @extensions.expected_errors(http.NOT_FOUND) def fake_func(): raise webob.exc.HTTPNotFound() self.assertRaises(webob.exc.HTTPNotFound, fake_func) def test_extensions_expected_error_from_list(self): @extensions.expected_errors((http.NOT_FOUND, http.FORBIDDEN)) def fake_func(): raise webob.exc.HTTPNotFound() self.assertRaises(webob.exc.HTTPNotFound, fake_func) def test_extensions_unexpected_error(self): @extensions.expected_errors(http.NOT_FOUND) def fake_func(): raise webob.exc.HTTPConflict() self.assertRaises(webob.exc.HTTPInternalServerError, fake_func) def test_extensions_unexpected_error_from_list(self): @extensions.expected_errors((http.NOT_FOUND, http.REQUEST_ENTITY_TOO_LARGE)) def fake_func(): raise webob.exc.HTTPConflict() self.assertRaises(webob.exc.HTTPInternalServerError, fake_func) def test_extensions_unexpected_policy_not_authorized_error(self): @extensions.expected_errors(http.NOT_FOUND) def fake_func(): raise exception.PolicyNotAuthorized(action="foo") self.assertRaises(exception.PolicyNotAuthorized, fake_func) masakari-9.0.0/masakari/tests/unit/api/openstack/ha/0000775000175000017500000000000013656750011022377 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/tests/unit/api/openstack/ha/__init__.py0000664000175000017500000000000013656747723024516 0ustar zuulzuul00000000000000masakari-9.0.0/masakari/tests/unit/api/openstack/ha/test_extension_info.py0000664000175000017500000000767313656747723027074 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from unittest import mock from masakari.api.openstack.ha import extension_info from masakari import policy from masakari import test from masakari.tests.unit.api.openstack import fakes class fake_extension(object): def __init__(self, name, alias, description, version): self.name = name self.alias = alias self.__doc__ = description self.version = version fake_extensions = { 'ext1-alias': fake_extension('ext1', 'ext1-alias', 'ext1 description', 1), 'ext2-alias': fake_extension('ext2', 'ext2-alias', 'ext2 description', 2), 'ext3-alias': fake_extension('ext3', 'ext3-alias', 'ext3 description', 1) } simulated_extension_list = { 'segments': fake_extension('Segments', 'segments', 'Segments.', 1), 'hosts': fake_extension('Hosts', 'hosts', 'Hosts.', 2), 'os-fake': fake_extension('Cells', 'os-fake', 'Cells description', 1) } def fake_policy_authorize_selective(context, action, target): return action != 'os_masakari_api:ext1-alias:discoverable' class ExtensionInfoTest(test.NoDBTestCase): def setUp(self): super(ExtensionInfoTest, self).setUp() ext_info = extension_info.LoadedExtensionInfo() ext_info.extensions = fake_extensions self.controller = extension_info.ExtensionInfoController(ext_info) def _filter_extensions(self, res_dict): for e in [x for x in res_dict['extensions'] if '-alias' in x['alias']]: self.assertIn(e['alias'], fake_extensions) self.assertEqual(e['name'], fake_extensions[e['alias']].name) self.assertEqual(e['alias'], fake_extensions[e['alias']].alias) self.assertEqual(e['description'], fake_extensions[e['alias']].__doc__) self.assertEqual(e['updated'], "") self.assertEqual(e['links'], []) self.assertEqual(6, len(e)) @mock.patch.object(policy, 'authorize', mock.Mock(return_value=True)) def test_extension_info_list(self): req = fakes.HTTPRequest.blank('/extensions') res_dict = self.controller.index(req) self.assertGreaterEqual(len(res_dict['extensions']), 3) self._filter_extensions(res_dict) @mock.patch.object(policy, 'authorize', mock.Mock(return_value=True)) def test_extension_info_show(self): req = fakes.HTTPRequest.blank('/extensions/ext1-alias') res_dict = self.controller.show(req, 'ext1-alias') self.assertEqual(1, len(res_dict)) self.assertEqual(res_dict['extension']['name'], fake_extensions['ext1-alias'].name) self.assertEqual(res_dict['extension']['alias'], fake_extensions['ext1-alias'].alias) self.assertEqual(res_dict['extension']['description'], fake_extensions['ext1-alias'].__doc__) self.assertEqual(res_dict['extension']['updated'], "") self.assertEqual(res_dict['extension']['links'], []) self.assertEqual(6, len(res_dict['extension'])) @mock.patch.object(policy, 'authorize') def test_extension_info_list_not_all_discoverable(self, mock_authorize): mock_authorize.side_effect = fake_policy_authorize_selective req = fakes.HTTPRequest.blank('/extensions') res_dict = self.controller.index(req) self.assertGreaterEqual(len(res_dict['extensions']), 2) self._filter_extensions(res_dict) masakari-9.0.0/masakari/tests/unit/api/openstack/ha/test_segments.py0000664000175000017500000003451213656747723025662 0ustar zuulzuul00000000000000# Copyright (c) 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests for the failover segment api.""" from unittest import mock import ddt from oslo_serialization import jsonutils from six.moves import http_client as http from webob import exc from masakari.api.openstack.ha import segments from masakari import exception from masakari.ha import api as ha_api from masakari.objects import base as obj_base from masakari.objects import segment as segment_obj from masakari import test from masakari.tests.unit.api.openstack import fakes from masakari.tests import uuidsentinel def _make_segment_obj(segment_dict): return segment_obj.FailoverSegment(**segment_dict) def _make_segments_list(segments_list): return segment_obj.FailoverSegment(objects=[ _make_segment_obj(a) for a in segments_list]) FAILOVER_SEGMENT_LIST = [ {"name": "segment1", "id": "1", "service_type": "COMPUTE", "recovery_method": "auto", "uuid": uuidsentinel.fake_segment, "description": "failover_segment for compute"}, {"name": "segment2", "id": "2", "service_type": "CINDER", "recovery_method": "reserved_host", "uuid": uuidsentinel.fake_segment2, "description": "failover_segment for cinder"} ] FAILOVER_SEGMENT_LIST = _make_segments_list(FAILOVER_SEGMENT_LIST) FAILOVER_SEGMENT = {"name": "segment1", "id": "1", "service_type": "COMPUTE", "recovery_method": "auto", "uuid": uuidsentinel.fake_segment, "description": "failover_segment for compute"} FAILOVER_SEGMENT = _make_segment_obj(FAILOVER_SEGMENT) @ddt.ddt class FailoverSegmentTestCase(test.TestCase): """Test Case for failover segment api.""" bad_request = exception.ValidationError def setUp(self): super(FailoverSegmentTestCase, self).setUp() self.controller = segments.SegmentsController() self.req = fakes.HTTPRequest.blank('/v1/segments', use_admin_context=True) self.context = self.req.environ['masakari.context'] @property def app(self): return fakes.wsgi_app_v1(init_only='segments') def _assert_segment_data(self, expected, actual): self.assertTrue(obj_base.obj_equal_prims(expected, actual), "The failover segment objects were not equal") @mock.patch.object(ha_api.FailoverSegmentAPI, 'get_all') def test_index(self, mock_get_all): mock_get_all.return_value = FAILOVER_SEGMENT_LIST result = self.controller.index(self.req) result = result['segments'] self._assert_segment_data(FAILOVER_SEGMENT_LIST, _make_segments_list(result)) @mock.patch.object(ha_api.FailoverSegmentAPI, 'get_all') def test_index_marker_not_found(self, mock_get_all): fake_request = fakes.HTTPRequest.blank('/v1/segments?marker=12345', use_admin_context=True) mock_get_all.side_effect = exception.MarkerNotFound(marker="12345") self.assertRaises(exc.HTTPBadRequest, self.controller.index, fake_request) @ddt.data( # limit negative 'limit=-1', # invalid sort key 'sort_key=abcd', # invalid sort dir 'sort_dir=abcd') def test_index_invalid(self, param): req = fakes.HTTPRequest.blank("/v1/segments?%s" % param, use_admin_context=True) self.assertRaises(exc.HTTPBadRequest, self.controller.index, req) @mock.patch.object(ha_api.FailoverSegmentAPI, 'create_segment') def test_create(self, mock_create): body = { "segment": { "name": "segment1", "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment for compute" } } mock_create.return_value = FAILOVER_SEGMENT result = self.controller.create(self.req, body=body) result = result['segment'] self._assert_segment_data(FAILOVER_SEGMENT, _make_segment_obj(result)) @mock.patch.object(ha_api.FailoverSegmentAPI, 'create_segment') def test_create_with_duplicate_segment_name(self, mock_create): body = { "segment": { "name": "segment1", "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment for compute" } } mock_create.side_effect = (exception. FailoverSegmentExists(name='segment1')) self.assertRaises(exc.HTTPConflict, self.controller.create, self.req, body=body) @mock.patch('masakari.rpc.get_client') @mock.patch.object(ha_api.FailoverSegmentAPI, 'create_segment') def test_create_success_with_201_response_code( self, mock_client, mock_create): body = { "segment": { "name": "segment1", "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment for compute" } } fake_req = self.req fake_req.headers['Content-Type'] = 'application/json' fake_req.method = 'POST' fake_req.body = jsonutils.dump_as_bytes(body) resp = fake_req.get_response(self.app) self.assertEqual(http.CREATED, resp.status_code) @ddt.data( # no segment {"body": { "name": "segment1", "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment for compute"}}, # no name {"body": { "segment": { "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment for compute"}}}, # name with leading trailing spaces {"body": { "segment": { "name": " segment1 ", "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment for compute"}}}, # null name {"body": { "segment": { "name": "", "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment for compute"}}}, # name too long {"body": { "segment": { "name": "segment1" * 255, "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment for compute"}}}, # extra invalid args {"body": { "segment": { "name": "segment1" * 255, "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment for compute", "foo": "fake_foo"}}} ) @ddt.unpack def test_create_failure(self, body): self.assertRaises(self.bad_request, self.controller.create, self.req, body=body) @mock.patch.object(ha_api.FailoverSegmentAPI, 'get_segment') def test_show(self, mock_get_segment): mock_get_segment.return_value = FAILOVER_SEGMENT result = self.controller.show(self.req, uuidsentinel.fake_segment) result = result['segment'] self._assert_segment_data(FAILOVER_SEGMENT, _make_segment_obj(result)) @mock.patch.object(ha_api.FailoverSegmentAPI, 'get_segment') def test_show_with_non_existing_id(self, mock_get_segment): mock_get_segment.side_effect = exception.FailoverSegmentNotFound( id="2") self.assertRaises(exc.HTTPNotFound, self.controller.show, self.req, "2") @ddt.data( {"body": {"segment": {"name": "segment1", "service_type": "COMPUTE", "recovery_method": "auto"}}}, # with name only {"body": {"segment": {"name": "segment1"}}} ) @ddt.unpack @mock.patch.object(ha_api.FailoverSegmentAPI, 'update_segment') def test_update(self, mock_update_segment, body): mock_update_segment.return_value = FAILOVER_SEGMENT result = self.controller.update(self.req, uuidsentinel.fake_segment, body=body) result = result['segment'] self._assert_segment_data(FAILOVER_SEGMENT, _make_segment_obj(result)) @ddt.data( # no updates {"test_data": {"segment": {}}}, # no update key {"test_data": {"asdf": {}}}, # wrong updates {"test_data": {"segment": {"name": "disable", "foo": "bar"}}}, # null name {"test_data": {"segment": {"name": ""}}}, # name too long {"test_data": {"segment": {"name": "x" * 256}}} ) @ddt.unpack def test_update_failure(self, test_data): self.assertRaises(self.bad_request, self.controller.update, self.req, uuidsentinel.fake_segment, body=test_data) @mock.patch.object(ha_api.FailoverSegmentAPI, 'update_segment') def test_update_with_non_exising_segment(self, mock_update_segment): test_data = {"segment": {"name": "segment11"}} mock_update_segment.side_effect = exception.FailoverSegmentNotFound( id="2") self.assertRaises(exc.HTTPNotFound, self.controller.update, self.req, "2", body=test_data) @mock.patch.object(ha_api.FailoverSegmentAPI, 'update_segment') def test_update_with_duplicated_name(self, mock_update_segment): test_data = {"segment": {"name": "segment1"}} mock_update_segment.side_effect = exception.FailoverSegmentExists( name="segment1") self.assertRaises(exc.HTTPConflict, self.controller.update, self.req, uuidsentinel.fake_segment, body=test_data) @mock.patch.object(ha_api.FailoverSegmentAPI, 'delete_segment') def test_delete_segment(self, mock_delete): self.controller.delete(self.req, uuidsentinel.fake_segment) self.assertTrue(mock_delete.called) @mock.patch.object(ha_api.FailoverSegmentAPI, 'delete_segment') def test_delete_segment_not_found(self, mock_delete): mock_delete.side_effect = exception.FailoverSegmentNotFound( id=uuidsentinel.fake_segment) self.assertRaises(exc.HTTPNotFound, self.controller.delete, self.req, uuidsentinel.fake_segment) @mock.patch('masakari.rpc.get_client') @mock.patch.object(ha_api.FailoverSegmentAPI, 'delete_segment') def test_delete_segment_with_204_status(self, mock_client, mock_delete): url = '/v1/segments/%s' % uuidsentinel.fake_segment fake_req = fakes.HTTPRequest.blank(url, use_admin_context=True) fake_req.headers['Content-Type'] = 'application/json' fake_req.method = 'DELETE' resp = fake_req.get_response(self.app) self.assertEqual(http.NO_CONTENT, resp.status_code) class FailoverSegmentTestCasePolicyNotAuthorized(test.NoDBTestCase): """Test Case for failover segment non admin.""" def setUp(self): super(FailoverSegmentTestCasePolicyNotAuthorized, self).setUp() self.controller = segments.SegmentsController() self.req = fakes.HTTPRequest.blank('/v1/segments') self.context = self.req.environ['masakari.context'] def _check_rule(self, exc, rule_name): self.assertEqual( "Policy doesn't allow %s to be performed." % rule_name, exc.format_message()) def test_index_no_admin(self): rule_name = "os_masakari_api:segments:index" self.policy.set_rules({rule_name: "project:non_fake"}) exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.index, self.req) self._check_rule(exc, rule_name) def test_create_no_admin(self): rule_name = "os_masakari_api:segments:create" self.policy.set_rules({rule_name: "project:non_fake"}) body = { "segment": { "name": "segment1", "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment for compute" } } exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.create, self.req, body=body) self._check_rule(exc, rule_name) def test_show_no_admin(self): rule_name = "os_masakari_api:segments:detail" self.policy.set_rules({rule_name: "project:non_fake"}) exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.show, self.req, uuidsentinel.fake_segment) self._check_rule(exc, rule_name) def test_update_no_admin(self): rule_name = "os_masakari_api:segments:update" self.policy.set_rules({rule_name: "project:non_fake"}) body = { "segment": { "name": "segment1", "service_type": "COMPUTE", "recovery_method": "auto", "description": "failover_segment for compute" } } exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.update, self.req, uuidsentinel.fake_segment, body=body) self._check_rule(exc, rule_name) def test_delete_no_admin(self): rule_name = "os_masakari_api:segments:delete" self.policy.set_rules({rule_name: "project:non_fake"}) exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.delete, self.req, uuidsentinel.fake_segment) self._check_rule(exc, rule_name) masakari-9.0.0/masakari/tests/unit/api/openstack/ha/test_notifications.py0000664000175000017500000005206513656747723026711 0ustar zuulzuul00000000000000# Copyright (c) 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests for the notifications api.""" import copy from unittest import mock import ddt from oslo_serialization import jsonutils from oslo_utils import timeutils from six.moves import http_client as http from webob import exc from masakari.api.openstack.ha import notifications from masakari.engine import rpcapi as engine_rpcapi from masakari import exception from masakari.ha import api as ha_api from masakari.objects import base as obj_base from masakari.objects import fields from masakari.objects import notification as notification_obj from masakari import test from masakari.tests.unit.api.openstack import fakes from masakari.tests.unit.objects import test_objects from masakari.tests import uuidsentinel NOW = timeutils.utcnow().replace(microsecond=0) OPTIONAL = ['recovery_workflow_details'] def _make_notification_obj(notification_dict): return notification_obj.Notification(**notification_dict) def _make_notification_progress_details_obj(progress_details): return notification_obj.NotificationProgressDetails(**progress_details) def _make_notifications_list(notifications_list): return notification_obj.Notification(objects=[ _make_notification_obj(a) for a in notifications_list]) NOTIFICATION_DATA = {"type": "VM", "id": 1, "payload": {'event': 'STOPPED', 'host_status': 'NORMAL', 'cluster_status': 'ONLINE'}, "source_host_uuid": uuidsentinel.fake_host, "generated_time": NOW, "status": "running", "notification_uuid": uuidsentinel.fake_notification, "created_at": NOW, "updated_at": None, "deleted_at": None, "deleted": 0 } NOTIFICATION = _make_notification_obj(NOTIFICATION_DATA) RECOVERY_DETAILS = {"progress": 1.0, "state": "SUCCESS", "name": "StopInstanceTask", "progress_details": [ {"timestamp": "2019-03-07 13:54:28", "message": "Stopping instance", "progress": "0.0"}, ]} NOTI_DATA_WITH_DETAILS = copy.deepcopy(NOTIFICATION_DATA) NOTIFICATION_WITH_PROGRESS_DETAILS = _make_notification_obj( NOTI_DATA_WITH_DETAILS) RECOVERY_OBJ = _make_notification_progress_details_obj(RECOVERY_DETAILS) NOTIFICATION_WITH_PROGRESS_DETAILS.recovery_workflow_details = [RECOVERY_OBJ] NOTIFICATION_LIST = [ {"type": "VM", "id": 1, "payload": {'event': 'STOPPED', 'host_status': 'NORMAL', 'cluster_status': 'ONLINE'}, "source_host_uuid": uuidsentinel.fake_host, "generated_time": NOW, "status": "running", "notification_uuid": uuidsentinel.fake_notification, "created_at": NOW, "updated_at": None, "deleted_at": None, "deleted": 0}, {"type": "PROCESS", "id": 2, "payload": {'event': 'STOPPED', 'process_name': 'fake_process'}, "source_host_uuid": uuidsentinel.fake_host1, "generated_time": NOW, "status": "running", "notification_uuid": uuidsentinel.fake_notification1, "created_at": NOW, "updated_at": None, "deleted_at": None, "deleted": 0}, ] NOTIFICATION_LIST = _make_notifications_list(NOTIFICATION_LIST) @ddt.ddt class NotificationTestCase(test.TestCase): """Test Case for notifications api.""" bad_request = exception.ValidationError @mock.patch.object(engine_rpcapi, 'EngineAPI') def setUp(self, mock_rpc): super(NotificationTestCase, self).setUp() self.controller = notifications.NotificationsController() self.req = fakes.HTTPRequest.blank('/v1/notifications', use_admin_context=True) self.context = self.req.environ['masakari.context'] @property def app(self): return fakes.wsgi_app_v1(init_only='os-hosts') def _assert_notification_data(self, expected, actual): self.assertTrue(obj_base.obj_equal_prims(expected, actual), "The notifications objects were not equal") @mock.patch.object(ha_api.NotificationAPI, 'get_all') def test_index(self, mock_get_all): mock_get_all.return_value = NOTIFICATION_LIST result = self.controller.index(self.req) result = result['notifications'] self._assert_notification_data(NOTIFICATION_LIST, _make_notifications_list(result)) @ddt.data( # limit negative "limit=-1", # invalid sort key "sort_key=abcd", # invalid sort dir "sort_dir=abcd") def test_index_invalid(self, param): req = fakes.HTTPRequest.blank("/v1/notifications?%s" % param, use_admin_context=True) self.assertRaises(exc.HTTPBadRequest, self.controller.index, req) @mock.patch.object(ha_api.NotificationAPI, 'get_all') def test_index_marker_not_found(self, mock_get_all): fake_request = fakes.HTTPRequest.blank('/v1/notifications?marker=1234', use_admin_context=True) mock_get_all.side_effect = exception.MarkerNotFound(marker="1234") self.assertRaises(exc.HTTPBadRequest, self.controller.index, fake_request) def test_index_invalid_generated_since(self): req = fakes.HTTPRequest.blank('/v1/notifications?generated-since=abcd', use_admin_context=True) self.assertRaises(exc.HTTPBadRequest, self.controller.index, req) @mock.patch.object(ha_api.NotificationAPI, 'get_all') def test_index_valid_generated_since(self, mock_get_all): url = '/v1/notifications?generated-since=%s' % str(NOW) req = fakes.HTTPRequest.blank(url, use_admin_context=True) mock_get_all.return_value = NOTIFICATION_LIST result = self.controller.index(req) result = result['notifications'] self._assert_notification_data(NOTIFICATION_LIST, _make_notifications_list(result)) @mock.patch.object(ha_api.NotificationAPI, 'create_notification') def test_create(self, mock_create): mock_create.return_value = NOTIFICATION result = self.controller.create(self.req, body={ "notification": { "hostname": "fake_host", "payload": { "instance_uuid": uuidsentinel.instance_uuid, "vir_domain_event": "STOPPED_FAILED", "event": "LIFECYCLE" }, "type": "VM", "generated_time": "2016-09-13T09:11:21.656788"}}) result = result['notification'] test_objects.compare_obj(self, result, NOTIFICATION_DATA, allow_missing=OPTIONAL) @mock.patch.object(ha_api.NotificationAPI, 'create_notification') def test_create_process_notification(self, mock_create): mock_create.return_value = NOTIFICATION result = self.controller.create(self.req, body={ "notification": { "hostname": "fake_host", "payload": { "process_name": "nova-compute", "event": "STOPPED" }, "type": "PROCESS", "generated_time": "2016-09-13T09:11:21.656788"}}) result = result['notification'] test_objects.compare_obj(self, result, NOTIFICATION_DATA, allow_missing=OPTIONAL) @mock.patch('masakari.rpc.get_client') @mock.patch.object(ha_api.NotificationAPI, 'create_notification') def test_create_success_with_201_response_code( self, mock_client, mock_create): body = { "notification": { "hostname": "fake_host", "payload": { "instance_uuid": uuidsentinel.instance_uuid, "vir_domain_event": "STOPPED_FAILED", "event": "LIFECYCLE" }, "type": "VM", "generated_time": NOW } } fake_req = self.req fake_req.headers['Content-Type'] = 'application/json' fake_req.method = 'POST' fake_req.body = jsonutils.dump_as_bytes(body) resp = fake_req.get_response(self.app) self.assertEqual(http.ACCEPTED, resp.status_code) @mock.patch.object(ha_api.NotificationAPI, 'create_notification') def test_create_host_not_found(self, mock_create): body = { "notification": { "hostname": "fake_host", "payload": { "instance_uuid": uuidsentinel.instance_uuid, "vir_domain_event": "STOPPED_FAILED", "event": "LIFECYCLE" }, "type": "VM", "generated_time": "2016-09-13T09:11:21.656788" } } mock_create.side_effect = exception.HostNotFoundByName( host_name="fake_host") self.assertRaises(exc.HTTPBadRequest, self.controller.create, self.req, body=body) @ddt.data( # invalid type {"body": { "notification": {"hostname": "fake_host", "payload": {"event": "STOPPED", "host_status": "NORMAL", "cluster_status": "ONLINE"}, "type": "Fake", "generated_time": "2016-09-13T09:11:21.656788"}}}, # no notification in body {"body": {"hostname": "fake_host", "payload": {"event": "STOPPED", "host_status": "NORMAL", "cluster_status": "ONLINE"}, "type": "VM", "generated_time": "2016-09-13T09:11:21.656788"}}, # no payload {"body": {"notification": {"hostname": "fake_host", "type": "VM", "generated_time": "2016-09-13T09:11:21.656788"}}}, # no hostname {"body": {"notification": {"payload": {"event": "STOPPED", "host_status": "NORMAL", "cluster_status": "ONLINE"}, "type": "VM", "generated_time": "2016-09-13T09:11:21.656788"}}}, # no type {"body": {"notification": {"hostname": "fake_host", "payload": {"event": "STOPPED", "host_status": "NORMAL", "cluster_status": "ONLINE"}, "generated_time": "2016-09-13T09:11:21.656788"}}}, # no generated time {"body": {"notification": {"hostname": "fake_host", "payload": {"event": "STOPPED", "host_status": "NORMAL", "cluster_status": "ONLINE"}, "type": "VM", }}}, # hostname too long {"body": { "notification": {"hostname": "fake_host" * 255, "payload": {"event": "STOPPED", "host_status": "NORMAL", "cluster_status": "ONLINE"}, "type": "VM", "generated_time": "2016-09-13T09:11:21.656788"}}}, # extra invalid args {"body": { "notification": {"hostname": "fake_host", "payload": {"event": "STOPPED", "host_status": "NORMAL", "cluster_status": "ONLINE"}, "type": "VM", "generated_time": "2016-09-13T09:11:21.656788", "invalid_extra": "non_expected_parameter"}}} ) @ddt.unpack def test_create_failure(self, body): self.assertRaises(self.bad_request, self.controller.create, self.req, body=body) @ddt.data( # invalid event for PROCESS type {"params": {"payload": {"event": "invalid", "process_name": "nova-compute"}, "type": fields.NotificationType.PROCESS}}, # invalid event for VM type {"params": {"payload": {"event": "invalid", "host_status": fields.HostStatusType.NORMAL, "cluster_status": fields.ClusterStatusType.ONLINE}, "type": fields.NotificationType.VM}}, # invalid event for HOST_COMPUTE type {"params": {"payload": {"event": "invalid"}, "type": fields.NotificationType.COMPUTE_HOST}}, # empty payload {"params": {"payload": {}, "type": fields.NotificationType.COMPUTE_HOST}}, # empty process_name {"params": {"payload": {"event": fields.EventType.STOPPED, "process_name": ""}, "type": fields.NotificationType.PROCESS}}, # process_name too long value {"params": {"payload": {"event": fields.EventType.STOPPED, "process_name": "a" * 4097}, "type": fields.NotificationType.PROCESS}}, # process_name invalid data_type {"params": {"payload": {"event": fields.EventType.STOPPED, "process_name": 123}, "type": fields.NotificationType.PROCESS}} ) @ddt.unpack def test_create_with_invalid_payload(self, params): body = { "notification": {"hostname": "fake_host", "generated_time": "2016-09-13T09:11:21.656788" } } body['notification']['payload'] = params['payload'] body['notification']['type'] = params['type'] self.assertRaises(self.bad_request, self.controller.create, self.req, body=body) @mock.patch.object(ha_api.NotificationAPI, 'create_notification') def test_create_duplicate_notification(self, mock_create_notification): mock_create_notification.side_effect = exception.DuplicateNotification( type="COMPUTE_HOST") body = { "notification": {"hostname": "fake_host", "payload": {"event": "STOPPED", "host_status": "NORMAL", "cluster_status": "ONLINE"}, "type": "COMPUTE_HOST", "generated_time": str(NOW)}} self.assertRaises(exc.HTTPConflict, self.controller.create, self.req, body=body) @mock.patch.object(ha_api.NotificationAPI, 'create_notification') def test_create_host_on_maintenance(self, mock_create_notification): mock_create_notification.side_effect = ( exception.HostOnMaintenanceError(host_name="fake_host")) body = { "notification": {"hostname": "fake_host", "payload": {"event": "STOPPED", "host_status": "NORMAL", "cluster_status": "ONLINE"}, "type": "COMPUTE_HOST", "generated_time": str(NOW)}} self.assertRaises(exc.HTTPConflict, self.controller.create, self.req, body=body) @mock.patch.object(ha_api.NotificationAPI, 'get_notification') def test_show(self, mock_get_notification): mock_get_notification.return_value = NOTIFICATION result = self.controller.show(self.req, uuidsentinel.fake_notification) result = result['notification'] self._assert_notification_data(NOTIFICATION, _make_notification_obj(result)) @mock.patch.object(ha_api.NotificationAPI, 'get_notification') def test_show_with_non_existing_uuid(self, mock_get_notification): mock_get_notification.side_effect = exception.NotificationNotFound( id="2") self.assertRaises(exc.HTTPNotFound, self.controller.show, self.req, "2") @ddt.data('DELETE', 'PUT') @mock.patch('masakari.rpc.get_client') def test_delete_and_update_notification(self, method, mock_client): url = '/v1/notifications/%s' % uuidsentinel.fake_notification fake_req = fakes.HTTPRequest.blank(url, use_admin_context=True) fake_req.headers['Content-Type'] = 'application/json' fake_req.method = method resp = fake_req.get_response(self.app) self.assertEqual(http.METHOD_NOT_ALLOWED, resp.status_code) class NotificationCasePolicyNotAuthorized(test.NoDBTestCase): """Test Case for notifications non admin.""" @mock.patch.object(engine_rpcapi, 'EngineAPI') def setUp(self, mock_rpc): super(NotificationCasePolicyNotAuthorized, self).setUp() self.controller = notifications.NotificationsController() self.req = fakes.HTTPRequest.blank('/v1/notifications') self.context = self.req.environ['masakari.context'] def _check_rule(self, exc, rule_name): self.assertEqual( "Policy doesn't allow %s to be performed." % rule_name, exc.format_message()) def test_create_no_admin(self): rule_name = "os_masakari_api:notifications:create" self.policy.set_rules({rule_name: "project:non_fake"}) body = { "notification": {"hostname": "fake_host", "payload": {"event": "STOPPED", "host_status": "NORMAL", "cluster_status": "ONLINE"}, "type": "VM", "generated_time": "2016-09-13T09:11:21.656788"}} exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.create, self.req, body=body) self._check_rule(exc, rule_name) def test_show_no_admin(self): rule_name = "os_masakari_api:notifications:detail" self.policy.set_rules({rule_name: "project:non_fake"}) exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.show, self.req, uuidsentinel.fake_notification) self._check_rule(exc, rule_name) def test_index_no_admin(self): rule_name = "os_masakari_api:notifications:index" self.policy.set_rules({rule_name: "project:non_fake"}) exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.index, self.req) self._check_rule(exc, rule_name) class NotificationV1_1_TestCase(NotificationTestCase): """Test Case for notifications api for 1.1 API""" api_version = '1.1' @mock.patch.object(engine_rpcapi, 'EngineAPI') def setUp(self, mock_rpc): super(NotificationV1_1_TestCase, self).setUp() self.controller = notifications.NotificationsController() self.req = fakes.HTTPRequest.blank('/v1/notifications', use_admin_context=True, version=self.api_version) self.context = self.req.environ['masakari.context'] @mock.patch.object(ha_api.NotificationAPI, 'get_notification_recovery_workflow_details') def test_show(self, mock_get_notification_recovery_workflow_details): (mock_get_notification_recovery_workflow_details .return_value) = NOTIFICATION_WITH_PROGRESS_DETAILS result = self.controller.show(self.req, uuidsentinel.fake_notification) result = result['notification'] self.assertItemsEqual([RECOVERY_OBJ], result.recovery_workflow_details) self._assert_notification_data(NOTIFICATION_WITH_PROGRESS_DETAILS, _make_notification_obj(result)) masakari-9.0.0/masakari/tests/unit/api/openstack/ha/test_hosts.py0000664000175000017500000005357413656747723025206 0ustar zuulzuul00000000000000# Copyright (c) 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests for the hosts api.""" from unittest import mock import ddt from oslo_serialization import jsonutils from six.moves import http_client as http from webob import exc from masakari.api.openstack.ha import hosts from masakari import exception from masakari.ha import api as ha_api from masakari.objects import base as obj_base from masakari.objects import host as host_obj from masakari.objects import segment as segment_obj from masakari import test from masakari.tests.unit.api.openstack import fakes from masakari.tests import uuidsentinel def _make_host_obj(host_dict): return host_obj.Host(**host_dict) def _make_hosts_list(hosts_list): return host_obj.Host(objects=[ _make_host_obj(a) for a in hosts_list]) HOST_LIST = [ {"name": "host_1", "id": "1", "reserved": False, "on_maintenance": False, "type": "fake", "control_attributes": "fake-control_attributes", "uuid": uuidsentinel.fake_host_1, "failover_segment_id": uuidsentinel.fake_segment1}, {"name": "host_2", "id": "2", "reserved": False, "on_maintenance": False, "type": "fake", "control_attributes": "fake-control_attributes", "uuid": uuidsentinel.fake_host_2, "failover_segment_id": uuidsentinel.fake_segment1} ] HOST_LIST = _make_hosts_list(HOST_LIST) HOST = { "name": "host_1", "id": "1", "reserved": False, "on_maintenance": False, "type": "fake", "control_attributes": "fake-control_attributes", "uuid": uuidsentinel.fake_host_1, "failover_segment_id": uuidsentinel.fake_segment1 } HOST = _make_host_obj(HOST) @ddt.ddt class HostTestCase(test.TestCase): """Test Case for host api.""" bad_request = exception.ValidationError def _set_up(self): self.controller = hosts.HostsController() self.req = fakes.HTTPRequest.blank( '/v1/segments/%s/hosts' % uuidsentinel.fake_segment1, use_admin_context=True) self.context = self.req.environ['masakari.context'] def setUp(self): super(HostTestCase, self).setUp() self._set_up() @property def app(self): return fakes.wsgi_app_v1(init_only='os-hosts') def _assert_host_data(self, expected, actual): self.assertTrue(obj_base.obj_equal_prims(expected, actual), "The host objects were not equal") @mock.patch.object(segment_obj.FailoverSegment, 'get_by_uuid') @mock.patch.object(ha_api.HostAPI, 'get_all') def test_index(self, mock_get_all, mock_segment): mock_segment.return_value = mock.Mock() mock_get_all.return_value = HOST_LIST result = self.controller.index(self.req, uuidsentinel.fake_segment1) result = result['hosts'] self._assert_host_data(HOST_LIST, _make_hosts_list(result)) @mock.patch.object(segment_obj.FailoverSegment, 'get_by_uuid') @mock.patch.object(ha_api.HostAPI, 'get_all') def test_index_valid_on_maintenance(self, mock_get_all, mock_segment): host_list = [{"name": "host_1", "id": "1", "on_maintenance": True}, {"name": "host_2", "id": "2", "on_maintenance": True}] mock_get_all.return_value = host_list for parameter in ['1', 't', 'true', 'on', 'y', 'yes']: req = fakes.HTTPRequest.blank( '/v1/segments/%s/hosts?on_maintenance=''%s' % ( uuidsentinel.fake_segment1, parameter), use_admin_context=True) result = self.controller.index(req, uuidsentinel.fake_segment1) self.assertIn('hosts', result) self.assertEqual(len(host_list), len(result['hosts'])) for host in result['hosts']: self.assertTrue(host['on_maintenance']) host_list = [{"name": "host_1", "id": "1", "on_maintenance": False}, {"name": "host_2", "id": "2", "on_maintenance": False}] mock_get_all.return_value = host_list for parameter in ['0', 'f', 'false', 'off', 'n', 'no']: req = fakes.HTTPRequest.blank( '/v1/segments/%s/hosts?on_maintenance=''%s' % ( uuidsentinel.fake_segment1, parameter), use_admin_context=True) result = self.controller.index(req, uuidsentinel.fake_segment1) self.assertIn('hosts', result) self.assertEqual(len(host_list), len(result['hosts'])) for host in result['hosts']: self.assertFalse(host['on_maintenance']) @mock.patch.object(segment_obj.FailoverSegment, 'get_by_uuid', return_value=mock.Mock()) def test_index_invalid_on_maintenance(self, mock_segment): req = fakes.HTTPRequest.blank('/v1/segments/%s/hosts?on_maintenance=' 'abcd' % uuidsentinel.fake_segment1, use_admin_context=True) self.assertRaises(exc.HTTPBadRequest, self.controller.index, req, uuidsentinel.fake_segment1) @mock.patch.object(segment_obj.FailoverSegment, 'get_by_uuid') @mock.patch.object(ha_api.HostAPI, 'get_all') def test_index_valid_reserved(self, mock_get_all, mock_segment): host_list = [{"name": "host_1", "id": "1", "reserved": True}, {"name": "host_2", "id": "2", "reserved": True}] mock_get_all.return_value = host_list for parameter in ['1', 't', 'true', 'on', 'y', 'yes']: req = fakes.HTTPRequest.blank( '/v1/segments/%s/hosts?reserved=''%s' % ( uuidsentinel.fake_segment1, parameter ), use_admin_context=True) result = self.controller.index(req, uuidsentinel.fake_segment1) self.assertIn('hosts', result) self.assertEqual(len(host_list), len(result['hosts'])) for host in result['hosts']: self.assertTrue(host['reserved']) host_list = [{"name": "host_1", "id": "1", "reserved": False}, {"name": "host_2", "id": "2", "reserved": False}] mock_get_all.return_value = host_list for parameter in ['0', 'f', 'false', 'off', 'n', 'no']: req = fakes.HTTPRequest.blank( '/v1/segments/%s/hosts?reserved=''%s' % ( uuidsentinel.fake_segment1, parameter), use_admin_context=True) result = self.controller.index(req, uuidsentinel.fake_segment1) self.assertIn('hosts', result) self.assertEqual(len(host_list), len(result['hosts'])) for host in result['hosts']: self.assertFalse(host['reserved']) @mock.patch.object(segment_obj.FailoverSegment, 'get_by_uuid', return_value=mock.Mock()) def test_index_invalid_reserved(self, mock_segment): req = fakes.HTTPRequest.blank('/v1/segments/%s/hosts?reserved=' 'abcd' % uuidsentinel.fake_segment1, use_admin_context=True) self.assertRaises(exc.HTTPBadRequest, self.controller.index, req, uuidsentinel.fake_segment1) @mock.patch.object(segment_obj.FailoverSegment, 'get_by_uuid') @mock.patch.object(ha_api.HostAPI, 'get_all') def test_index_marker_not_found(self, mock_get_all, mock_segment): req = fakes.HTTPRequest.blank('/v1/segments/%s/hosts?marker=123456' % ( uuidsentinel.fake_segment1), use_admin_context=True) mock_segment.return_value = mock.Mock() mock_get_all.side_effect = exception.MarkerNotFound(marker="123456") self.assertRaises(exc.HTTPBadRequest, self.controller.index, req, uuidsentinel.fake_segment1) def test_get_all_marker_negative(self): req = fakes.HTTPRequest.blank('/v1/segments/%s/hosts?limit=-1' % ( uuidsentinel.fake_segment1), use_admin_context=True) self.assertRaises(exc.HTTPBadRequest, self.controller.index, req, uuidsentinel.fake_segment1) @ddt.data('sort_key', 'sort_dir') @mock.patch.object(segment_obj.FailoverSegment, 'get_by_uuid', return_value=mock.Mock()) def test_index_invalid(self, sort_by, mock_segment): req = fakes.HTTPRequest.blank('/v1/segments/%s/hosts?%s=abcd' % ( uuidsentinel.fake_segment1, sort_by), use_admin_context=True) self.assertRaises(exc.HTTPBadRequest, self.controller.index, req, uuidsentinel.fake_segment1) @ddt.data([exception.MarkerNotFound(marker="123456"), "/v1/segments/%s/hosts?marker=123456", exc.HTTPBadRequest], [exception.FailoverSegmentNotFound( id=uuidsentinel.fake_segment1), "/v1/segments/%s/hosts", exc.HTTPNotFound]) @ddt.unpack @mock.patch.object(segment_obj.FailoverSegment, 'get_by_uuid') @mock.patch.object(ha_api.HostAPI, 'get_all') def test_index_not_found(self, masakari_exc, url, exc, mock_get_all, mock_segment): mock_segment.return_value = mock.Mock() mock_get_all.side_effect = masakari_exc req = fakes.HTTPRequest.blank(url % uuidsentinel.fake_segment1, use_admin_context=True) self.assertRaises(exc, self.controller.index, req, uuidsentinel.fake_segment1) @mock.patch.object(ha_api.HostAPI, 'create_host') def test_create(self, mock_create): mock_create.return_value = HOST body = { "host": { "name": "host-1", "type": "fake", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes" } } result = self.controller.create(self.req, uuidsentinel.fake_segment1, body=body) result = result['host'] self._assert_host_data(HOST, _make_host_obj(result)) @mock.patch('masakari.rpc.get_client') @mock.patch.object(ha_api.HostAPI, 'create_host') def test_create_success_with_201_response_code( self, mock_client, mock_create): body = { "host": { "name": "host-1", "type": "fake", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes" } } fake_req = self.req fake_req.headers['Content-Type'] = 'application/json' fake_req.method = 'POST' fake_req.body = jsonutils.dump_as_bytes(body) resp = fake_req.get_response(self.app) self.assertEqual(http.CREATED, resp.status_code) @mock.patch.object(ha_api.HostAPI, 'create_host') def test_create_with_duplicate_host_name(self, mock_create): mock_create.side_effect = (exception. HostExists(name='host-1')) body = { "host": { "name": "host-1", "type": "fake", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes" } } self.assertRaises(exc.HTTPConflict, self.controller.create, self.req, uuidsentinel.fake_segment1, body=body) @ddt.data( # no_host {"body": { "name": "host-1", "type": "fake", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes"}}, # no_name {"body": { "host": { "type": "fake", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes"}}}, # name_with_leading_trailing_spaces {"body": { "host": { "name": " host-1 ", "type": "fake", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes"}}}, # null_name {"body": { "host": { "name": "", "type": "fake", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes"}}}, # name_too_long {"body": { "host": { "name": "host-1" * 255, "type": "fake", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes"}}}, # extra_invalid_arg {"body": { "host": { "name": "host-1", "type": "fake", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes", "foo": "bar"}}}, # type too long {"body": { "host": { "name": "host-1", "type": "x" * 256, "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes"}}}, # type special characters {"body": { "host": { "name": "host-1", "type": "x_y", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes"}}} ) @ddt.unpack def test_create_failure(self, body): self.assertRaises(self.bad_request, self.controller.create, self.req, uuidsentinel.fake_segment1, body=body) @mock.patch.object(ha_api.HostAPI, 'get_host') def test_show(self, mock_get_host): mock_get_host.return_value = HOST result = self.controller.show(self.req, uuidsentinel.fake_segment1, uuidsentinel.fake_host_1) result = result['host'] self._assert_host_data(HOST, _make_host_obj(result)) @mock.patch.object(ha_api.HostAPI, 'get_host') def test_show_with_non_existing_id(self, mock_get_host): mock_get_host.side_effect = exception.HostNotFound(id="2") self.assertRaises(exc.HTTPNotFound, self.controller.show, self.req, uuidsentinel.fake_segment1, "2") @ddt.data( {"body": { "host": { "name": "host-1", "type": "fake", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes"}}}, # only name {"body": {"host": {"name": "host-1"}}} ) @ddt.unpack @mock.patch.object(ha_api.HostAPI, 'update_host') def test_update(self, mock_update_host, body): mock_update_host.return_value = HOST result = self.controller.update(self.req, uuidsentinel.fake_segment1, uuidsentinel.fake_host_1, body=body) result = result['host'] self._assert_host_data(HOST, _make_host_obj(result)) @ddt.data( # no updates {"test_data": {"host": {}}}, # no update key {"test_data": {"asdf": {}}}, # wrong updates {"test_data": {"host": {"name": "disable", "foo": "bar"}}}, # null name {"test_data": {"host": {"name": ""}}}, # name too long {"test_data": {"host": {"name": "x" * 256}}}, # type too long {"test_data": {"host": {"type": "x" * 256}}}, # type with special characters {"test_data": {"host": {"type": "x_y"}}} ) @ddt.unpack def test_update_failure(self, test_data): self.assertRaises(self.bad_request, self.controller.update, self.req, uuidsentinel.fake_segment1, uuidsentinel.fake_host_1, body=test_data) @mock.patch.object(ha_api.HostAPI, 'update_host') def test_update_with_non_exising_host(self, mock_update_host): test_data = {"host": {"name": "host11"}} mock_update_host.side_effect = exception.HostNotFound(id="2") self.assertRaises(exc.HTTPNotFound, self.controller.update, self.req, uuidsentinel.fake_segment1, "2", body=test_data) @mock.patch.object(ha_api.HostAPI, 'update_host') def test_update_with_duplicated_name(self, mock_update_host): test_data = {"host": {"name": "host-1"}} mock_update_host.side_effect = exception.HostExists(name="host-1") self.assertRaises(exc.HTTPConflict, self.controller.update, self.req, uuidsentinel.fake_segment1, uuidsentinel.fake_host_1, body=test_data) @mock.patch.object(ha_api.HostAPI, 'delete_host') def test_delete_host(self, mock_delete): self.controller.delete(self.req, uuidsentinel.fake_segment1, uuidsentinel.fake_host_1) self.assertTrue(mock_delete.called) @mock.patch('masakari.rpc.get_client') @mock.patch.object(ha_api.HostAPI, 'delete_host') def test_delete_host_with_204_status(self, mock_client, mock_delete): url = '/v1/segments/%(segment)s/hosts/%(host)s' % { 'segment': uuidsentinel.fake_segment1, 'host': uuidsentinel.fake_host_1 } fake_req = fakes.HTTPRequest.blank(url, use_admin_context=True) fake_req.headers['Content-Type'] = 'application/json' fake_req.method = 'DELETE' resp = fake_req.get_response(self.app) self.assertEqual(http.NO_CONTENT, resp.status_code) @mock.patch.object(ha_api.HostAPI, 'delete_host') def test_delete_host_not_found(self, mock_delete): mock_delete.side_effect = exception.HostNotFound(id="2") self.assertRaises(exc.HTTPNotFound, self.controller.delete, self.req, uuidsentinel.fake_segment1, uuidsentinel.fake_host_3) @mock.patch.object(ha_api.HostAPI, 'delete_host') def test_delete_host_not_found_for_failover_segment(self, mock_delete): mock_delete.side_effect = exception.HostNotFoundUnderFailoverSegment( host_uuid=uuidsentinel.fake_host_3, segment_uuid=uuidsentinel.fake_segment1) self.assertRaises(exc.HTTPNotFound, self.controller.delete, self.req, uuidsentinel.fake_segment1, uuidsentinel.fake_host_3) class HostTestCasePolicyNotAuthorized(test.NoDBTestCase): """Test Case for host non admin.""" def _set_up(self): self.controller = hosts.HostsController() self.req = fakes.HTTPRequest.blank( '/v1/segments/%s/hosts' % uuidsentinel.fake_segment1) self.context = self.req.environ['masakari.context'] def setUp(self): super(HostTestCasePolicyNotAuthorized, self).setUp() self._set_up() def _check_rule(self, exc, rule_name): self.assertEqual( "Policy doesn't allow %s to be performed." % rule_name, exc.format_message()) def test_index_no_admin(self): rule_name = "os_masakari_api:os-hosts:index" self.policy.set_rules({rule_name: "project:non_fake"}) exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.index, self.req, uuidsentinel.fake_segment1) self._check_rule(exc, rule_name) def test_create_no_admin(self): rule_name = "os_masakari_api:os-hosts:create" self.policy.set_rules({rule_name: "project:non_fake"}) body = { "host": { "name": "host-1", "type": "fake", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes" } } exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.create, self.req, uuidsentinel.fake_segment1, body=body) self._check_rule(exc, rule_name) def test_show_no_admin(self): rule_name = "os_masakari_api:os-hosts:detail" self.policy.set_rules({rule_name: "project:non_fake"}) exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.show, self.req, uuidsentinel.fake_segment1, uuidsentinel.fake_host_1) self._check_rule(exc, rule_name) def test_update_no_admin(self): rule_name = "os_masakari_api:os-hosts:update" self.policy.set_rules({rule_name: "project:non_fake"}) body = { "host": { "name": "host-1", "type": "fake", "reserved": False, "on_maintenance": False, "control_attributes": "fake-control_attributes", } } exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.update, self.req, uuidsentinel.fake_segment1, uuidsentinel.fake_host_1, body=body) self._check_rule(exc, rule_name) def test_delete_no_admin(self): rule_name = "os_masakari_api:os-hosts:delete" self.policy.set_rules({rule_name: "project:non_fake"}) exc = self.assertRaises(exception.PolicyNotAuthorized, self.controller.delete, self.req, uuidsentinel.fake_segment1, uuidsentinel.fake_host_1) self._check_rule(exc, rule_name) masakari-9.0.0/masakari/tests/unit/api/openstack/ha/test_versions.py0000664000175000017500000001571613656747723025712 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from unittest import mock from oslo_serialization import jsonutils from six.moves import http_client as http import webob from masakari.api import api_version_request as avr from masakari.api.openstack.ha.views import versions from masakari import test from masakari.tests.unit.api.openstack import fakes NS = { 'atom': 'http://www.w3.org/2005/Atom', 'ns': 'http://docs.openstack.org/common/api/v1.0' } MAX_API_VERSION = avr.max_api_version().get_string() EXP_LINKS = {'v1.0': {'html': 'http://docs.openstack.org/', }} EXP_VERSIONS = { "v1.0": { "id": "v1.0", "status": "SUPPORTED", "version": "", "min_version": "", "updated": "2011-01-21T11:33:21Z", "links": [ { "rel": "describedby", "type": "text/html", "href": EXP_LINKS['v1.0']['html'], }, ], "media-types": [ { "base": "application/json", "type": "application/vnd.openstack.ha+json;version=1", }, ], }, "v1": { "id": "v1", "status": "CURRENT", "version": MAX_API_VERSION, "min_version": "1.0", "updated": "2013-07-23T11:33:21Z", "links": [ { "rel": "self", "href": "http://localhost/v1/", }, { "rel": "describedby", "type": "text/html", "href": EXP_LINKS['v1.0']['html'], }, ], "media-types": [ { "base": "application/json", "type": "application/vnd.openstack.ha+json;version=1.0", } ], } } def _get_self_href(response): """Extract the URL to self from response data.""" data = jsonutils.loads(response.body) for link in data['versions'][0]['links']: if link['rel'] == 'self': return link['href'] return '' class VersionsViewBuilderTests(test.NoDBTestCase): def test_view_builder(self): base_url = "http://example.org/" version_data = { "v3.2.1": { "id": "3.2.1", "status": "CURRENT", "version": "1", "min_version": "1.0", "updated": "2011-07-18T11:30:00Z", } } expected = { "versions": [ { "id": "3.2.1", "status": "CURRENT", "version": "1", "min_version": "1.0", "updated": "2011-07-18T11:30:00Z", "links": [ { "rel": "self", "href": "http://example.org/v1/", }, ], } ] } builder = versions.ViewBuilder(base_url) output = builder.build_versions(version_data) self.assertEqual(expected, output) def _test_view_builder_osapi_ha_link_prefix(self, href=None): base_url = "http://example.org/v1/" if href is None: href = base_url version_data = { "id": "v1", "status": "CURRENT", "version": "1.0", "min_version": "1.0", "updated": "2013-07-23T11:33:21Z", "links": [ { "rel": "describedby", "type": "text/html", "href": EXP_LINKS['v1.0']['html'], } ], "media-types": [ { "base": "application/json", "type": ("application/vnd.openstack." "ha+json;version=1.0") } ], } expected_data = copy.deepcopy(version_data) expected = {'version': expected_data} expected['version']['links'].insert(0, {"rel": "self", "href": href, }) builder = versions.ViewBuilder(base_url) output = builder.build_version(version_data) self.assertEqual(expected, output) def test_view_builder_without_osapi_ha_link_prefix(self): self._test_view_builder_osapi_ha_link_prefix() def test_generate_href(self): base_url = "http://example.org/app/" expected = "http://example.org/app/v1/" builder = versions.ViewBuilder(base_url) actual = builder.generate_href('v1') self.assertEqual(expected, actual) def test_generate_href_unknown(self): base_url = "http://example.org/app/" expected = "http://example.org/app/v1/" builder = versions.ViewBuilder(base_url) actual = builder.generate_href('foo') self.assertEqual(expected, actual) def test_generate_href_with_path(self): path = "random/path" base_url = "http://example.org/app/" expected = "http://example.org/app/v1/%s" % path builder = versions.ViewBuilder(base_url) actual = builder.generate_href("v1", path) self.assertEqual(actual, expected) def test_generate_href_with_empty_path(self): path = "" base_url = "http://example.org/app/" expected = "http://example.org/app/v1/" builder = versions.ViewBuilder(base_url) actual = builder.generate_href("v1", path) self.assertEqual(actual, expected) class VersionsTest(test.NoDBTestCase): exp_versions = copy.deepcopy(EXP_VERSIONS) exp_versions['v1.0']['links'].insert(0, { 'href': 'http://localhost/v1/', 'rel': 'self'}, ) @property def wsgi_app(self): return fakes.wsgi_app_v1(init_only=('versions',)) @mock.patch('masakari.rpc.get_client') def test_get_version_list_302(self, mock_get_client): req = webob.Request.blank('/v1') req.accept = "application/json" res = req.get_response(self.wsgi_app) self.assertEqual(http.FOUND, res.status_int) redirect_req = webob.Request.blank('/v1/') self.assertEqual(redirect_req.url, res.location) @mock.patch('masakari.rpc.get_client') def test_get_version_1_versions_invalid(self, mock_get_client): req = webob.Request.blank('/v1/versions/1234/foo') req.accept = "application/json" res = req.get_response(self.wsgi_app) self.assertEqual(http.NOT_FOUND, res.status_int) masakari-9.0.0/masakari/tests/unit/api/openstack/test_common.py0000664000175000017500000003417213656747723024737 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Test suites for 'common' code used throughout the OpenStack HTTP API. """ from testtools import matchers from unittest import mock import webob from masakari.api.openstack import common from masakari import test from masakari.tests.unit.api.openstack import fakes from masakari.tests import uuidsentinel class MiscFunctionsTest(test.TestCase): def test_remove_trailing_version_from_href(self): fixture = 'http://www.testsite.com/v1.1' expected = 'http://www.testsite.com' actual = common.remove_trailing_version_from_href(fixture) self.assertEqual(actual, expected) def test_remove_trailing_version_from_href_2(self): fixture = 'http://www.testsite.com/ha/v1.1' expected = 'http://www.testsite.com/ha' actual = common.remove_trailing_version_from_href(fixture) self.assertEqual(actual, expected) def test_remove_trailing_version_from_href_3(self): fixture = 'http://www.testsite.com/v1.1/images/v10.5' expected = 'http://www.testsite.com/v1.1/images' actual = common.remove_trailing_version_from_href(fixture) self.assertEqual(actual, expected) def test_remove_trailing_version_from_href_bad_request(self): fixture = 'http://www.testsite.com/v1.1/images' self.assertRaises(ValueError, common.remove_trailing_version_from_href, fixture) def test_remove_trailing_version_from_href_bad_request_2(self): fixture = 'http://www.testsite.com/images/v' self.assertRaises(ValueError, common.remove_trailing_version_from_href, fixture) def test_remove_trailing_version_from_href_bad_request_3(self): fixture = 'http://www.testsite.com/v1.1images' self.assertRaises(ValueError, common.remove_trailing_version_from_href, fixture) class TestCollectionLinks(test.NoDBTestCase): """Tests the _get_collection_links method.""" @mock.patch('masakari.api.openstack.common.ViewBuilder._get_next_link') def test_items_less_than_limit(self, href_link_mock): items = [ {"uuid": "123"} ] req = mock.MagicMock() params = mock.PropertyMock(return_value=dict(limit=10)) type(req).params = params builder = common.ViewBuilder() results = builder._get_collection_links(req, items, "ignored", "uuid") self.assertFalse(href_link_mock.called) self.assertThat(results, matchers.HasLength(0)) @mock.patch('masakari.api.openstack.common.ViewBuilder._get_next_link') def test_items_equals_given_limit(self, href_link_mock): items = [ {"uuid": "123"} ] req = mock.MagicMock() params = mock.PropertyMock(return_value=dict(limit=1)) type(req).params = params builder = common.ViewBuilder() results = builder._get_collection_links(req, items, mock.sentinel.coll_key, "uuid") href_link_mock.assert_called_once_with(req, "123", mock.sentinel.coll_key) self.assertThat(results, matchers.HasLength(1)) @mock.patch('masakari.api.openstack.common.ViewBuilder._get_next_link') def test_items_equals_default_limit(self, href_link_mock): items = [ {"uuid": "123"} ] req = mock.MagicMock() params = mock.PropertyMock(return_value=dict()) type(req).params = params self.flags(osapi_max_limit=1) builder = common.ViewBuilder() results = builder._get_collection_links(req, items, mock.sentinel.coll_key, "uuid") href_link_mock.assert_called_once_with(req, "123", mock.sentinel.coll_key) self.assertThat(results, matchers.HasLength(1)) @mock.patch('masakari.api.openstack.common.ViewBuilder._get_next_link') def test_items_equals_default_limit_with_given(self, href_link_mock): items = [ {"uuid": "123"} ] req = mock.MagicMock() # Given limit is greater than default max, only return default max params = mock.PropertyMock(return_value=dict(limit=2)) type(req).params = params self.flags(osapi_max_limit=1) builder = common.ViewBuilder() results = builder._get_collection_links(req, items, mock.sentinel.coll_key, "uuid") href_link_mock.assert_called_once_with(req, "123", mock.sentinel.coll_key) self.assertThat(results, matchers.HasLength(1)) class LinkPrefixTest(test.NoDBTestCase): def test_update_link_prefix(self): vb = common.ViewBuilder() result = vb._update_link_prefix("http://192.168.0.243:24/", "http://127.0.0.1/ha") self.assertEqual("http://127.0.0.1/ha", result) result = vb._update_link_prefix("http://foo.x.com/v1", "http://new.prefix.com") self.assertEqual("http://new.prefix.com/v1", result) result = vb._update_link_prefix("http://foo.x.com/v1", "http://new.prefix.com:20455/" "new_extra_prefix") self.assertEqual("http://new.prefix.com:20455/new_extra_prefix/v1", result) class UrlJoinTest(test.NoDBTestCase): def test_url_join(self): pieces = ["one", "two", "three"] joined = common.url_join(*pieces) self.assertEqual("one/two/three", joined) def test_url_join_extra_slashes(self): pieces = ["one/", "/two//", "/three/"] joined = common.url_join(*pieces) self.assertEqual("one/two/three", joined) def test_url_join_trailing_slash(self): pieces = ["one", "two", "three", ""] joined = common.url_join(*pieces) self.assertEqual("one/two/three/", joined) def test_url_join_empty_list(self): pieces = [] joined = common.url_join(*pieces) self.assertEqual("", joined) def test_url_join_single_empty_string(self): pieces = [""] joined = common.url_join(*pieces) self.assertEqual("", joined) def test_url_join_single_slash(self): pieces = ["/"] joined = common.url_join(*pieces) self.assertEqual("", joined) class ViewBuilderLinkTest(test.NoDBTestCase): project_id = uuidsentinel.fake_project_id api_version = "1.0" def setUp(self): super(ViewBuilderLinkTest, self).setUp() self.request = self.req("/%s" % self.project_id) self.vb = common.ViewBuilder() def req(self, url, use_admin_context=False): return fakes.HTTPRequest.blank(url, use_admin_context=use_admin_context, version=self.api_version) def test_get_project_id(self): proj_id = self.vb._get_project_id(self.request) self.assertEqual(self.project_id, proj_id) def test_get_next_link(self): identifier = "identifier" collection = "collection" next_link = self.vb._get_next_link(self.request, identifier, collection) expected = "/".join((self.request.url, "%s?marker=%s" % (collection, identifier))) self.assertEqual(expected, next_link) def test_get_href_link(self): identifier = "identifier" collection = "collection" href_link = self.vb._get_href_link(self.request, identifier, collection) expected = "/".join((self.request.url, collection, identifier)) self.assertEqual(expected, href_link) def test_get_bookmark_link(self): identifier = "identifier" collection = "collection" bookmark_link = self.vb._get_bookmark_link(self.request, identifier, collection) bmk_url = ( common.remove_trailing_version_from_href(( self.request.application_url))) expected = "/".join((bmk_url, self.project_id, collection, identifier)) self.assertEqual(expected, bookmark_link) class PaginationParamsTest(test.NoDBTestCase): """Unit tests for the `masakari.api.openstack.common.get_pagination_params` method which takes in a request object and returns 'marker' and 'limit' GET params. """ def test_no_params(self): # Test no params. req = webob.Request.blank('/') self.assertEqual(common.get_pagination_params(req), {}) def test_valid_marker(self): # Test valid marker param. req = webob.Request.blank('/?marker=263abb28-1de6-412f-b00' 'b-f0ee0c4333c2') self.assertEqual(common.get_pagination_params(req), {'marker': '263abb28-1de6-412f-b00b-f0ee0c4333c2'}) def test_valid_limit(self): # Test valid limit param. req = webob.Request.blank('/?limit=10') self.assertEqual(common.get_pagination_params(req), {'limit': 10}) def test_invalid_limit(self): # Test invalid limit param. req = webob.Request.blank('/?limit=-2') self.assertRaises( webob.exc.HTTPBadRequest, common.get_pagination_params, req) def test_valid_limit_and_marker(self): # Test valid limit and marker parameters. marker = '263abb28-1de6-412f-b00b-f0ee0c4333c2' req = webob.Request.blank('/?limit=20&marker=%s' % marker) self.assertEqual(common.get_pagination_params(req), {'marker': marker, 'limit': 20}) def test_valid_page_size(self): # Test valid page_size param. req = webob.Request.blank('/?page_size=10') self.assertEqual(common.get_pagination_params(req), {'page_size': 10}) def test_invalid_page_size(self): # Test invalid page_size param. req = webob.Request.blank('/?page_size=-2') self.assertRaises( webob.exc.HTTPBadRequest, common.get_pagination_params, req) def test_valid_limit_and_page_size(self): # Test valid limit and page_size parameters. req = webob.Request.blank('/?limit=20&page_size=5') self.assertEqual(common.get_pagination_params(req), {'page_size': 5, 'limit': 20}) class SortParamTest(test.NoDBTestCase): def test_get_sort_params_defaults(self): # Verifies the default sort key and direction. sort_keys, sort_dirs = common.get_sort_params({}) self.assertEqual(['created_at'], sort_keys) self.assertEqual(['desc'], sort_dirs) def test_get_sort_params_override_defaults(self): # Verifies that the defaults can be overriden. sort_keys, sort_dirs = common.get_sort_params({}, default_key='key1', default_dir='dir1') self.assertEqual(['key1'], sort_keys) self.assertEqual(['dir1'], sort_dirs) sort_keys, sort_dirs = common.get_sort_params({}, default_key=None, default_dir=None) self.assertEqual([], sort_keys) self.assertEqual([], sort_dirs) def test_get_sort_params_single_value(self): # Verifies a single sort key and direction. params = webob.multidict.MultiDict() params.add('sort_key', 'key1') params.add('sort_dir', 'dir1') sort_keys, sort_dirs = common.get_sort_params(params) self.assertEqual(['key1'], sort_keys) self.assertEqual(['dir1'], sort_dirs) def test_get_sort_params_single_with_default(self): # Verifies a single sort value with a default. params = webob.multidict.MultiDict() params.add('sort_key', 'key1') sort_keys, sort_dirs = common.get_sort_params(params) self.assertEqual(['key1'], sort_keys) # sort_key was supplied, sort_dir should be defaulted self.assertEqual(['desc'], sort_dirs) params = webob.multidict.MultiDict() params.add('sort_dir', 'dir1') sort_keys, sort_dirs = common.get_sort_params(params) self.assertEqual(['created_at'], sort_keys) # sort_dir was supplied, sort_key should be defaulted self.assertEqual(['dir1'], sort_dirs) def test_get_sort_params_multiple_values(self): # Verifies multiple sort parameter values. params = webob.multidict.MultiDict() params.add('sort_key', 'key1') params.add('sort_key', 'key2') params.add('sort_key', 'key3') params.add('sort_dir', 'dir1') params.add('sort_dir', 'dir2') params.add('sort_dir', 'dir3') sort_keys, sort_dirs = common.get_sort_params(params) self.assertEqual(['key1', 'key2', 'key3'], sort_keys) self.assertEqual(['dir1', 'dir2', 'dir3'], sort_dirs) # Also ensure that the input parameters are not modified sort_key_vals = [] sort_dir_vals = [] while 'sort_key' in params: sort_key_vals.append(params.pop('sort_key')) while 'sort_dir' in params: sort_dir_vals.append(params.pop('sort_dir')) self.assertEqual(['key1', 'key2', 'key3'], sort_key_vals) self.assertEqual(['dir1', 'dir2', 'dir3'], sort_dir_vals) self.assertEqual(0, len(params)) masakari-9.0.0/masakari/tests/unit/api/openstack/fakes.py0000664000175000017500000001006413656747723023473 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils import timeutils import routes import webob.dec from masakari.api import api_version_request as api_version from masakari.api import auth as api_auth from masakari.api import openstack as openstack_api from masakari.api.openstack import ha from masakari.api.openstack.ha import versions from masakari.api.openstack import wsgi as os_wsgi from masakari.api import urlmap from masakari import context from masakari.tests import uuidsentinel from masakari import wsgi @webob.dec.wsgify def fake_wsgi(self, req): return self.application def wsgi_app_v1(fake_auth_context=None, init_only=None): inner_app_v1 = ha.APIRouterV1() if fake_auth_context is not None: ctxt = fake_auth_context else: ctxt = context.RequestContext('fake', 'fake', auth_token=True) api_v1 = ( openstack_api.FaultWrapper(api_auth.InjectContext(ctxt, inner_app_v1))) mapper = urlmap.URLMap() mapper['/v1'] = api_v1 mapper['/'] = openstack_api.FaultWrapper(versions.Versions()) return mapper class FakeToken(object): id_count = 0 def __getitem__(self, key): return getattr(self, key) def __init__(self, **kwargs): FakeToken.id_count += 1 self.id = FakeToken.id_count for k, v in kwargs.items(): setattr(self, k, v) class FakeRequestContext(context.RequestContext): def __init__(self, *args, **kwargs): kwargs['auth_token'] = kwargs.get('auth_token', 'fake_auth_token') super(FakeRequestContext, self).__init__(*args, **kwargs) class HTTPRequest(os_wsgi.Request): @staticmethod def blank(*args, **kwargs): kwargs['base_url'] = 'http://localhost/v1' use_admin_context = kwargs.pop('use_admin_context', False) project_id = kwargs.pop('project_id', uuidsentinel.fake_project_id) version = kwargs.pop('version', os_wsgi.DEFAULT_API_VERSION) out = os_wsgi.Request.blank(*args, **kwargs) out.environ['masakari.context'] = FakeRequestContext( user_id=uuidsentinel.fake_user_id, project_id=project_id, is_admin=use_admin_context) out.api_version_request = api_version.APIVersionRequest(version) return out class TestRouter(wsgi.Router): def __init__(self, controller, mapper=None): if not mapper: mapper = routes.Mapper() mapper.resource("test", "tests", controller=os_wsgi.Resource(controller)) super(TestRouter, self).__init__(mapper) class FakeAuthDatabase(object): data = {} @staticmethod def auth_token_get(context, token_hash): return FakeAuthDatabase.data.get(token_hash, None) @staticmethod def auth_token_create(context, token): fake_token = FakeToken(created_at=timeutils.utcnow(), **token) FakeAuthDatabase.data[fake_token.token_hash] = fake_token FakeAuthDatabase.data['id_%i' % fake_token.id] = fake_token return fake_token @staticmethod def auth_token_destroy(context, token_id): token = FakeAuthDatabase.data.get('id_%i' % token_id) if token and token.token_hash in FakeAuthDatabase.data: del FakeAuthDatabase.data[token.token_hash] del FakeAuthDatabase.data['id_%i' % token_id] def fake_get_available_languages(): existing_translations = ['en_GB', 'en_AU', 'de', 'zh_CN', 'en_US'] return existing_translations def fake_not_implemented(*args, **kwargs): raise NotImplementedError() masakari-9.0.0/masakari/tests/unit/api/openstack/test_wsgi.py0000664000175000017500000011217513656747723024420 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import inspect from unittest import mock import six from six.moves import http_client as http import testscenarios import webob from oslo_serialization import jsonutils from masakari.api import api_version_request as api_version from masakari.api.openstack import extensions from masakari.api.openstack import wsgi from masakari.api import versioned_method from masakari import exception from masakari import test from masakari.tests.unit.api.openstack import fakes from masakari.tests.unit import matchers class MicroversionedTest(testscenarios.WithScenarios, test.NoDBTestCase): header_name = 'OpenStack-API-Version' def _make_microversion_header(self, value): return {self.header_name: 'instance-ha %s' % value} class RequestTest(MicroversionedTest): def test_content_type_missing(self): request = wsgi.Request.blank('/tests/123', method='POST') request.body = b"" self.assertIsNone(request.get_content_type()) def test_content_type_unsupported(self): request = wsgi.Request.blank('/tests/123', method='POST') request.headers["Content-Type"] = "text/html" request.body = b"asdf
" self.assertRaises(exception.InvalidContentType, request.get_content_type) def test_content_type_with_charset(self): request = wsgi.Request.blank('/tests/123') request.headers["Content-Type"] = "application/json; charset=UTF-8" result = request.get_content_type() self.assertEqual(result, "application/json") def test_content_type_accept_default(self): request = wsgi.Request.blank('/tests/123.unsupported') request.headers["Accept"] = "application/unsupported1" result = request.best_match_content_type() self.assertEqual(result, "application/json") def test_from_request(self): self.stub_out('masakari.i18n.get_available_languages', fakes.fake_get_available_languages) request = wsgi.Request.blank('/') accepted = 'bogus;q=1, en-gb;q=0.7,en-us,en;q=0.5,*;q=0.7' request.headers = {'Accept-Language': accepted} self.assertEqual(request.best_match_language(), 'en_US') def test_asterisk(self): # asterisk should match first available if there # are not any other available matches self.stub_out('masakari.i18n.get_available_languages', fakes.fake_get_available_languages) request = wsgi.Request.blank('/') accepted = '*,es;q=0.5' request.headers = {'Accept-Language': accepted} self.assertEqual(request.best_match_language(), 'en_GB') def test_prefix(self): self.stub_out('masakari.i18n.get_available_languages', fakes.fake_get_available_languages) request = wsgi.Request.blank('/') accepted = 'zh' request.headers = {'Accept-Language': accepted} self.assertEqual(request.best_match_language(), 'zh_CN') def test_secondary(self): self.stub_out('masakari.i18n.get_available_languages', fakes.fake_get_available_languages) request = wsgi.Request.blank('/') accepted = 'nn,en-gb;q=0.5' request.headers = {'Accept-Language': accepted} self.assertEqual(request.best_match_language(), 'en_GB') def test_none_found(self): self.stub_out('masakari.i18n.get_available_languages', fakes.fake_get_available_languages) request = wsgi.Request.blank('/') accepted = 'nb-no' request.headers = {'Accept-Language': accepted} self.assertIsNone(request.best_match_language()) def test_no_lang_header(self): self.stub_out('masakari.i18n.get_available_languages', fakes.fake_get_available_languages) request = wsgi.Request.blank('/') accepted = '' request.headers = {'Accept-Language': accepted} self.assertIsNone(request.best_match_language()) def test_api_version_request_header_none(self): request = wsgi.Request.blank('/') request.set_api_version_request() self.assertEqual(api_version.APIVersionRequest( api_version.DEFAULT_API_VERSION), request.api_version_request) @mock.patch("masakari.api.api_version_request.max_api_version") def test_api_version_request_header(self, mock_maxver): mock_maxver.return_value = api_version.APIVersionRequest("1.0") request = wsgi.Request.blank('/') request.headers = self._make_microversion_header('1.0') request.set_api_version_request() self.assertEqual(api_version.APIVersionRequest("1.0"), request.api_version_request) def test_api_version_request_header_invalid(self): request = wsgi.Request.blank('/') request.headers = self._make_microversion_header('1.1.1') self.assertRaises(exception.InvalidAPIVersionString, request.set_api_version_request) class ActionDispatcherTest(test.NoDBTestCase): def test_dispatch(self): serializer = wsgi.ActionDispatcher() serializer.create = lambda x: 'pants' self.assertEqual(serializer.dispatch({}, action='create'), 'pants') def test_dispatch_action_None(self): serializer = wsgi.ActionDispatcher() serializer.create = lambda x: 'pants' serializer.default = lambda x: 'trousers' self.assertEqual(serializer.dispatch({}, action=None), 'trousers') def test_dispatch_default(self): serializer = wsgi.ActionDispatcher() serializer.create = lambda x: 'pants' serializer.default = lambda x: 'trousers' self.assertEqual(serializer.dispatch({}, action='update'), 'trousers') class JSONDictSerializerTest(test.NoDBTestCase): def test_json(self): input_dict = dict(segments=dict(a=(2, 3))) expected_json = '{"segments":{"a":[2,3]}}' serializer = wsgi.JSONDictSerializer() result = serializer.serialize(input_dict) result = result.replace('\n', '').replace(' ', '') self.assertEqual(result, expected_json) class JSONDeserializerTest(test.NoDBTestCase): def test_json(self): data = """{"a": { "a1": "1", "a2": "2", "bs": ["1", "2", "3", {"c": {"c1": "1"}}], "d": {"e": "1"}, "f": "1"}}""" as_dict = { 'body': { 'a': { 'a1': '1', 'a2': '2', 'bs': ['1', '2', '3', {'c': {'c1': '1'}}], 'd': {'e': '1'}, 'f': '1', }, }, } deserializer = wsgi.JSONDeserializer() self.assertEqual(deserializer.deserialize(data), as_dict) def test_json_valid_utf8(self): data = b"""{"segment": {"recovery_method": "auto", "name": "\xe6\xa6\x82\xe5\xbf\xb5", "service_type": "COMPUTE_HOST" }} """ as_dict = { 'body': { u'segment': {u'recovery_method': 'auto', u'name': u'\u6982\u5ff5', u'service_type': u'COMPUTE_HOST' } } } deserializer = wsgi.JSONDeserializer() self.assertEqual(deserializer.deserialize(data), as_dict) def test_json_invalid_utf8(self): """Send invalid utf-8 to JSONDeserializer.""" data = b"""{"segment": { "name": "\xf0\x28\x8c\x28", "recovery_method": "auto", "description": "compute hosts with shared storage enabled." "service_type": "COMPUTE_HOST"}} """ deserializer = wsgi.JSONDeserializer() self.assertRaises(exception.MalformedRequestBody, deserializer.deserialize, data) class ResourceTest(MicroversionedTest): def get_req_id_header_name(self, request): return 'x-openstack-request-id' def test_resource_call_with_method_get(self): class Controller(object): def index(self, req): return 'success' app = fakes.TestRouter(Controller()) # the default method is GET req = webob.Request.blank('/tests') response = req.get_response(app) self.assertEqual(b'success', response.body) self.assertEqual(response.status_int, http.OK) req.body = b'{"body": {"key": "value"}}' response = req.get_response(app) self.assertEqual(b'success', response.body) self.assertEqual(response.status_int, http.OK) req.content_type = 'application/json' response = req.get_response(app) self.assertEqual(b'success', response.body) self.assertEqual(response.status_int, http.OK) def test_resource_call_with_method_post(self): class Controller(object): @extensions.expected_errors(http.BAD_REQUEST) def create(self, req, body): if expected_body != body: msg = "The request body invalid" raise webob.exc.HTTPBadRequest(explanation=msg) return "success" # verify the method: POST app = fakes.TestRouter(Controller()) req = webob.Request.blank('/tests', method="POST", content_type='application/json') req.body = b'{"body": {"key": "value"}}' expected_body = {'body': { "key": "value" } } response = req.get_response(app) self.assertEqual(response.status_int, http.OK) self.assertEqual(b'success', response.body) # verify without body expected_body = None req.body = None response = req.get_response(app) self.assertEqual(response.status_int, http.OK) self.assertEqual(b'success', response.body) # the body is validated in the controller expected_body = {'body': None} response = req.get_response(app) expected_unsupported_type_body = {'badRequest': {'message': 'The request body invalid', 'code': http.BAD_REQUEST}} self.assertEqual(response.status_int, http.BAD_REQUEST) self.assertEqual(expected_unsupported_type_body, jsonutils.loads(response.body)) def test_resource_call_with_method_put(self): class Controller(object): def update(self, req, id, body): if expected_body != body: msg = "The request body invalid" raise webob.exc.HTTPBadRequest(explanation=msg) return "success" # verify the method: PUT app = fakes.TestRouter(Controller()) req = webob.Request.blank('/tests/test_id', method="PUT", content_type='application/json') req.body = b'{"body": {"key": "value"}}' expected_body = {'body': { "key": "value" } } response = req.get_response(app) self.assertEqual(b'success', response.body) self.assertEqual(response.status_int, http.OK) req.body = None expected_body = None response = req.get_response(app) self.assertEqual(response.status_int, http.OK) # verify no content_type is contained in the request req = webob.Request.blank('/tests/test_id', method="PUT", content_type='application/xml') req.content_type = 'application/xml' req.body = b'{"body": {"key": "value"}}' response = req.get_response(app) expected_unsupported_type_body = {'badMediaType': {'message': 'Unsupported Content-Type', 'code': http.UNSUPPORTED_MEDIA_TYPE}} self.assertEqual(response.status_int, http.UNSUPPORTED_MEDIA_TYPE) self.assertEqual(expected_unsupported_type_body, jsonutils.loads(response.body)) def test_resource_call_with_method_delete(self): class Controller(object): def delete(self, req, id): return "success" # verify the method: DELETE app = fakes.TestRouter(Controller()) req = webob.Request.blank('/tests/test_id', method="DELETE") response = req.get_response(app) self.assertEqual(response.status_int, http.OK) self.assertEqual(b'success', response.body) # ignore the body req.body = b'{"body": {"key": "value"}}' response = req.get_response(app) self.assertEqual(response.status_int, http.OK) self.assertEqual(b'success', response.body) def test_resource_not_authorized(self): class Controller(object): def index(self, req): raise exception.Forbidden() req = webob.Request.blank('/tests') app = fakes.TestRouter(Controller()) response = req.get_response(app) self.assertEqual(response.status_int, http.FORBIDDEN) def test_dispatch(self): class Controller(object): def index(self, req, pants=None): return pants controller = Controller() resource = wsgi.Resource(controller) method, extensions = resource.get_method(None, 'index', None, '') actual = resource.dispatch(method, None, {'pants': 'off'}) expected = 'off' self.assertEqual(actual, expected) def test_get_method_unknown_controller_method(self): class Controller(object): def index(self, req, pants=None): return pants controller = Controller() resource = wsgi.Resource(controller) self.assertRaises(AttributeError, resource.get_method, None, 'create', None, '') def test_get_method_action_json(self): class Controller(wsgi.Controller): @wsgi.action('fooAction') def _action_foo(self, req, id, body): return body controller = Controller() resource = wsgi.Resource(controller) method, extensions = resource.get_method(None, 'action', 'application/json', '{"fooAction": true}') self.assertEqual(controller._action_foo, method) def test_get_method_action_bad_body(self): class Controller(wsgi.Controller): @wsgi.action('fooAction') def _action_foo(self, req, id, body): return body controller = Controller() resource = wsgi.Resource(controller) self.assertRaises(exception.MalformedRequestBody, resource.get_method, None, 'action', 'application/json', '{}') def test_get_method_unknown_controller_action(self): class Controller(wsgi.Controller): @wsgi.action('fooAction') def _action_foo(self, req, id, body): return body controller = Controller() resource = wsgi.Resource(controller) self.assertRaises(KeyError, resource.get_method, None, 'action', 'application/json', '{"barAction": true}') def test_get_method_action_method(self): class Controller(object): def action(self, req, pants=None): return pants controller = Controller() resource = wsgi.Resource(controller) method, extensions = resource.get_method(None, 'action', 'application/xml', 'true= 3.7 import sqlite3 tup = sqlite3.sqlite_version_info if tup[0] > 3 or (tup[0] == 3 and tup[1] >= 7): self.conn.execute("PRAGMA foreign_keys = ON") def test_purge_deleted_rows_old(self): # Purge at 30 days old, should only delete 2 rows db.purge_deleted_rows(self.context, age_in_days=30, max_rows=10) notifications_rows = self.conn.execute( self.notifications.count()).scalar() failover_segments_rows = self.conn.execute( self.failover_segments.count()).scalar() hosts_rows = self.conn.execute(self.hosts.count()).scalar() # Verify that we only deleted 2 self.assertEqual(4, notifications_rows) self.assertEqual(4, failover_segments_rows) self.assertEqual(4, hosts_rows) def test_purge_all_deleted_rows(self): db.purge_deleted_rows(self.context, age_in_days=20, max_rows=-1) notifications_rows = self.conn.execute( self.notifications.count()).scalar() failover_segments_rows = self.conn.execute( self.failover_segments.count()).scalar() hosts_rows = self.conn.execute(self.hosts.count()).scalar() # Verify that we have purged all deleted rows self.assertEqual(2, notifications_rows) self.assertEqual(2, failover_segments_rows) self.assertEqual(2, hosts_rows) def test_purge_maximum_rows_partial_deleted_records(self): db.purge_deleted_rows(self.context, age_in_days=60, max_rows=3) notifications_rows = self.conn.execute( self.notifications.count()).scalar() failover_segments_rows = self.conn.execute( self.failover_segments.count()).scalar() hosts_rows = self.conn.execute(self.hosts.count()).scalar() # Verify that we have deleted 3 rows only self.assertEqual(4, notifications_rows) self.assertEqual(5, hosts_rows) self.assertEqual(6, failover_segments_rows) masakari-9.0.0/masakari/tests/unit/objects/0000775000175000017500000000000013656750011020700 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/tests/unit/objects/__init__.py0000664000175000017500000000000013656747723023017 0ustar zuulzuul00000000000000masakari-9.0.0/masakari/tests/unit/objects/test_objects.py0000664000175000017500000010065513656747723023771 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import datetime import inspect import os import pprint from unittest import mock from oslo_versionedobjects import exception as ovo_exc from oslo_versionedobjects import fixture from masakari import objects from masakari.objects import base from masakari.objects import fields from masakari.objects import segment from masakari import test from masakari.tests.unit.objects import fake_args class MyOwnedObject(base.MasakariPersistentObject, base.MasakariObject): VERSION = '1.0' fields = {'baz': fields.IntegerField()} class MyObj(base.MasakariPersistentObject, base.MasakariObject, base.MasakariObjectDictCompat): VERSION = '1.6' fields = {'foo': fields.IntegerField(default=1), 'bar': fields.StringField(), 'missing': fields.StringField(), 'readonly': fields.IntegerField(read_only=True), 'rel_object': fields.ObjectField('MyOwnedObject', nullable=True), 'rel_objects': fields.ListOfObjectsField('MyOwnedObject', nullable=True), 'mutable_default': fields.ListOfStringsField(default=[]), } @staticmethod def _from_db_object(context, obj, db_obj): self = MyObj() self.foo = db_obj['foo'] self.bar = db_obj['bar'] self.missing = db_obj['missing'] self.readonly = 1 self._context = context return self def obj_load_attr(self, attrname): setattr(self, attrname, 'loaded!') @base.remotable_classmethod def query(cls, context): obj = cls(context=context, foo=1, bar='bar') obj.obj_reset_changes() return obj @base.remotable def marco(self): return 'polo' @base.remotable def _update_test(self): self.bar = 'updated' @base.remotable def save(self): self.obj_reset_changes() @base.remotable def refresh(self): self.foo = 321 self.bar = 'refreshed' self.obj_reset_changes() @base.remotable def modify_save_modify(self): self.bar = 'meow' self.save() self.foo = 42 self.rel_object = MyOwnedObject(baz=42) def obj_make_compatible(self, primitive, target_version): super(MyObj, self).obj_make_compatible(primitive, target_version) if target_version == '1.0' and 'bar' in primitive: primitive['bar'] = 'old%s' % primitive['bar'] class TestObjMakeList(test.NoDBTestCase): def test_obj_make_list(self): class MyList(base.ObjectListBase, base.MasakariObject): fields = { 'objects': fields.ListOfObjectsField('MyObj'), } db_objs = [{'foo': 1, 'bar': 'baz', 'missing': 'banana'}, {'foo': 2, 'bar': 'bat', 'missing': 'apple'}, ] mylist = base.obj_make_list('ctxt', MyList(), MyObj, db_objs) self.assertEqual(2, len(mylist)) self.assertEqual('ctxt', mylist._context) for index, item in enumerate(mylist): self.assertEqual(db_objs[index]['foo'], item.foo) self.assertEqual(db_objs[index]['bar'], item.bar) self.assertEqual(db_objs[index]['missing'], item.missing) def compare_obj(test, obj, db_obj, subs=None, allow_missing=None, comparators=None): """Compare a MasakariObject and a dict-like database object. This automatically converts TZ-aware datetimes and iterates over the fields of the object. :param:test: The TestCase doing the comparison :param:obj: The MasakariObject to examine :param:db_obj: The dict-like database object to use as reference :param:subs: A dict of objkey=dbkey field substitutions :param:allow_missing: A list of fields that may not be in db_obj :param:comparators: Map of comparator functions to use for certain fields """ if subs is None: subs = {} if allow_missing is None: allow_missing = [] if comparators is None: comparators = {} for key in obj.fields: if key in allow_missing and not obj.obj_attr_is_set(key): continue obj_val = getattr(obj, key) db_key = subs.get(key, key) db_val = db_obj[db_key] if isinstance(obj_val, datetime.datetime): obj_val = obj_val.replace(tzinfo=None) if key in comparators: comparator = comparators[key] comparator(db_val, obj_val) else: test.assertEqual(db_val, obj_val) class _BaseTestCase(test.TestCase): def setUp(self): super(_BaseTestCase, self).setUp() self.user_id = 'fake-user' self.project_id = 'fake-project' self.context = 'masakari-context' base.MasakariObjectRegistry.register(MyObj) base.MasakariObjectRegistry.register(MyOwnedObject) def compare_obj(self, obj, db_obj, subs=None, allow_missing=None, comparators=None): compare_obj(self, obj, db_obj, subs=subs, allow_missing=allow_missing, comparators=comparators) def str_comparator(self, expected, obj_val): """Compare an object field to a string in the db by performing a simple coercion on the object field value. """ self.assertEqual(expected, str(obj_val)) class _LocalTest(_BaseTestCase): def setUp(self): super(_LocalTest, self).setUp() class _TestObject(object): def test_object_attrs_in_init(self): # Now check the test one in this file. Should be newest version self.assertEqual('1.6', objects.MyObj.VERSION) def test_hydration_type_error(self): primitive = {'masakari_object.name': 'MyObj', 'masakari_object.namespace': 'masakari', 'masakari_object.version': '1.5', 'masakari_object.data': {'foo': 'a'}} self.assertRaises(ValueError, MyObj.obj_from_primitive, primitive) def test_hydration(self): primitive = {'masakari_object.name': 'MyObj', 'masakari_object.namespace': 'masakari', 'masakari_object.version': '1.5', 'masakari_object.data': {'foo': 1}} real_method = MyObj._obj_from_primitive def _obj_from_primitive(*args): return real_method(*args) with mock.patch.object(MyObj, '_obj_from_primitive') as ofp: ofp.side_effect = _obj_from_primitive obj = MyObj.obj_from_primitive(primitive) ofp.assert_called_once_with(None, '1.5', primitive) self.assertEqual(obj.foo, 1) def test_hydration_version_different(self): primitive = {'masakari_object.name': 'MyObj', 'masakari_object.namespace': 'masakari', 'masakari_object.version': '1.2', 'masakari_object.data': {'foo': 1}} obj = MyObj.obj_from_primitive(primitive) self.assertEqual(obj.foo, 1) self.assertEqual('1.2', obj.VERSION) def test_hydration_bad_ns(self): primitive = {'masakari_object.name': 'MyObj', 'masakari_object.namespace': 'foo', 'masakari_object.version': '1.5', 'masakari_object.data': {'foo': 1}} self.assertRaises(ovo_exc.UnsupportedObjectError, MyObj.obj_from_primitive, primitive) def test_hydration_additional_unexpected_stuff(self): primitive = {'masakari_object.name': 'MyObj', 'masakari_object.namespace': 'masakari', 'masakari_object.version': '1.5.1', 'masakari_object.data': { 'foo': 1, 'unexpected_thing': 'foobar'}} obj = MyObj.obj_from_primitive(primitive) self.assertEqual(1, obj.foo) self.assertFalse(hasattr(obj, 'unexpected_thing')) self.assertEqual('1.5.1', obj.VERSION) def test_dehydration(self): expected = {'masakari_object.name': 'MyObj', 'masakari_object.namespace': 'masakari', 'masakari_object.version': '1.6', 'masakari_object.data': {'foo': 1}} obj = MyObj(foo=1) obj.obj_reset_changes() self.assertEqual(obj.obj_to_primitive(), expected) def test_object_property(self): obj = MyObj(foo=1) self.assertEqual(obj.foo, 1) def test_object_property_type_error(self): obj = MyObj() def fail(): obj.foo = 'a' self.assertRaises(ValueError, fail) def test_load(self): obj = MyObj() self.assertEqual(obj.bar, 'loaded!') def test_load_in_base(self): @base.MasakariObjectRegistry.register_if(False) class Foo(base.MasakariObject): fields = {'foobar': fields.IntegerField()} obj = Foo() with self.assertRaisesRegex(NotImplementedError, ".*foobar.*"): obj.foobar def test_loaded_in_primitive(self): obj = MyObj(foo=1) obj.obj_reset_changes() self.assertEqual(obj.bar, 'loaded!') expected = {'masakari_object.name': 'MyObj', 'masakari_object.namespace': 'masakari', 'masakari_object.version': '1.6', 'masakari_object.changes': ['bar'], 'masakari_object.data': {'foo': 1, 'bar': 'loaded!'}} self.assertEqual(obj.obj_to_primitive(), expected) def test_changes_in_primitive(self): obj = MyObj(foo=123) self.assertEqual(obj.obj_what_changed(), set(['foo'])) primitive = obj.obj_to_primitive() self.assertIn('masakari_object.changes', primitive) obj2 = MyObj.obj_from_primitive(primitive) self.assertEqual(obj2.obj_what_changed(), set(['foo'])) obj2.obj_reset_changes() self.assertEqual(obj2.obj_what_changed(), set()) def test_orphaned_object(self): obj = MyObj.query(self.context) obj._context = None self.assertRaises(ovo_exc.OrphanedObjectError, obj._update_test) def test_changed_1(self): obj = MyObj.query(self.context) obj.foo = 123 self.assertEqual(obj.obj_what_changed(), set(['foo'])) obj._update_test() self.assertEqual(obj.obj_what_changed(), set(['foo', 'bar'])) self.assertEqual(obj.foo, 123) def test_changed_2(self): obj = MyObj.query(self.context) obj.foo = 123 self.assertEqual(obj.obj_what_changed(), set(['foo'])) obj.save() self.assertEqual(obj.obj_what_changed(), set([])) self.assertEqual(obj.foo, 123) def test_changed_3(self): obj = MyObj.query(self.context) obj.foo = 123 self.assertEqual(obj.obj_what_changed(), set(['foo'])) obj.refresh() self.assertEqual(obj.obj_what_changed(), set([])) self.assertEqual(obj.foo, 321) self.assertEqual(obj.bar, 'refreshed') def test_changed_4(self): obj = MyObj.query(self.context) obj.bar = 'something' self.assertEqual(obj.obj_what_changed(), set(['bar'])) obj.modify_save_modify() self.assertEqual(obj.obj_what_changed(), set(['foo', 'rel_object'])) self.assertEqual(obj.foo, 42) self.assertEqual(obj.bar, 'meow') self.assertIsInstance(obj.rel_object, MyOwnedObject) def test_changed_with_sub_object(self): @base.MasakariObjectRegistry.register_if(False) class ParentObject(base.MasakariObject): fields = {'foo': fields.IntegerField(), 'bar': fields.ObjectField('MyObj'), } obj = ParentObject() self.assertEqual(set(), obj.obj_what_changed()) obj.foo = 1 self.assertEqual(set(['foo']), obj.obj_what_changed()) bar = MyObj() obj.bar = bar self.assertEqual(set(['foo', 'bar']), obj.obj_what_changed()) obj.obj_reset_changes() self.assertEqual(set(), obj.obj_what_changed()) bar.foo = 1 self.assertEqual(set(['bar']), obj.obj_what_changed()) def test_static_result(self): obj = MyObj.query(self.context) self.assertEqual(obj.bar, 'bar') result = obj.marco() self.assertEqual(result, 'polo') def test_updates(self): obj = MyObj.query(self.context) self.assertEqual(obj.foo, 1) obj._update_test() self.assertEqual(obj.bar, 'updated') def test_contains(self): obj = MyObj() self.assertNotIn('foo', obj) obj.foo = 1 self.assertIn('foo', obj) self.assertNotIn('does_not_exist', obj) def test_obj_attr_is_set(self): obj = MyObj(foo=1) self.assertTrue(obj.obj_attr_is_set('foo')) self.assertFalse(obj.obj_attr_is_set('bar')) self.assertRaises(AttributeError, obj.obj_attr_is_set, 'bang') def test_obj_reset_changes_recursive(self): obj = MyObj(rel_object=MyOwnedObject(baz=123), rel_objects=[MyOwnedObject(baz=456)]) self.assertEqual(set(['rel_object', 'rel_objects']), obj.obj_what_changed()) obj.obj_reset_changes() self.assertEqual(set(['rel_object']), obj.obj_what_changed()) self.assertEqual(set(['baz']), obj.rel_object.obj_what_changed()) self.assertEqual(set(['baz']), obj.rel_objects[0].obj_what_changed()) obj.obj_reset_changes(recursive=True, fields=['foo']) self.assertEqual(set(['rel_object']), obj.obj_what_changed()) self.assertEqual(set(['baz']), obj.rel_object.obj_what_changed()) self.assertEqual(set(['baz']), obj.rel_objects[0].obj_what_changed()) obj.obj_reset_changes(recursive=True) self.assertEqual(set([]), obj.rel_object.obj_what_changed()) self.assertEqual(set([]), obj.obj_what_changed()) def test_get(self): obj = MyObj(foo=1) # Foo has value, should not get the default self.assertEqual(obj.get('foo', 2), 1) # Foo has value, should return the value without error self.assertEqual(obj.get('foo'), 1) # Bar is not loaded, so we should get the default self.assertEqual(obj.get('bar', 'not-loaded'), 'not-loaded') # Bar without a default should lazy-load self.assertEqual(obj.get('bar'), 'loaded!') # Bar now has a default, but loaded value should be returned self.assertEqual(obj.get('bar', 'not-loaded'), 'loaded!') # Invalid attribute should raise AttributeError self.assertRaises(AttributeError, obj.get, 'nothing') # ...even with a default self.assertRaises(AttributeError, obj.get, 'nothing', 3) def test_get_changes(self): obj = MyObj() self.assertEqual({}, obj.obj_get_changes()) obj.foo = 123 self.assertEqual({'foo': 123}, obj.obj_get_changes()) obj.bar = 'test' self.assertEqual({'foo': 123, 'bar': 'test'}, obj.obj_get_changes()) obj.obj_reset_changes() self.assertEqual({}, obj.obj_get_changes()) def test_obj_fields(self): @base.MasakariObjectRegistry.register_if(False) class TestObj(base.MasakariObject): fields = {'foo': fields.IntegerField()} obj_extra_fields = ['bar'] @property def bar(self): return 'this is bar' obj = TestObj() self.assertEqual(['foo', 'bar'], obj.obj_fields) def test_obj_constructor(self): obj = MyObj(context=self.context, foo=123, bar='abc') self.assertEqual(123, obj.foo) self.assertEqual('abc', obj.bar) self.assertEqual(set(['foo', 'bar']), obj.obj_what_changed()) def test_obj_read_only(self): obj = MyObj(context=self.context, foo=123, bar='abc') obj.readonly = 1 self.assertRaises(ovo_exc.ReadOnlyFieldError, setattr, obj, 'readonly', 2) def test_obj_mutable_default(self): obj = MyObj(context=self.context, foo=123, bar='abc') obj.mutable_default = None obj.mutable_default.append('s1') self.assertEqual(obj.mutable_default, ['s1']) obj1 = MyObj(context=self.context, foo=123, bar='abc') obj1.mutable_default = None obj1.mutable_default.append('s2') self.assertEqual(obj1.mutable_default, ['s2']) def test_obj_mutable_default_set_default(self): obj1 = MyObj(context=self.context, foo=123, bar='abc') obj1.obj_set_defaults('mutable_default') self.assertEqual(obj1.mutable_default, []) obj1.mutable_default.append('s1') self.assertEqual(obj1.mutable_default, ['s1']) obj2 = MyObj(context=self.context, foo=123, bar='abc') obj2.obj_set_defaults('mutable_default') self.assertEqual(obj2.mutable_default, []) obj2.mutable_default.append('s2') self.assertEqual(obj2.mutable_default, ['s2']) def test_obj_repr(self): obj = MyObj(foo=123) self.assertEqual('MyObj(bar=,created_at=,deleted=,' 'deleted_at=,foo=123,missing=,' 'mutable_default=,readonly=,rel_object=,' 'rel_objects=,updated_at=)', repr(obj)) def test_obj_make_obj_compatible(self): subobj = MyOwnedObject(baz=1) subobj.VERSION = '1.2' obj = MyObj(rel_object=subobj) obj.obj_relationships = { 'rel_object': [('1.5', '1.1'), ('1.7', '1.2')], } orig_primitive = obj.obj_to_primitive()['masakari_object.data'] with mock.patch.object(subobj, 'obj_make_compatible') as mock_compat: primitive = copy.deepcopy(orig_primitive) obj._obj_make_obj_compatible(primitive, '1.8', 'rel_object') self.assertFalse(mock_compat.called) with mock.patch.object(subobj, 'obj_make_compatible') as mock_compat: primitive = copy.deepcopy(orig_primitive) obj._obj_make_obj_compatible(primitive, '1.7', 'rel_object') mock_compat.assert_called_once_with( primitive['rel_object']['masakari_object.data'], '1.2') with mock.patch.object(subobj, 'obj_make_compatible') as mock_compat: primitive = copy.deepcopy(orig_primitive) obj._obj_make_obj_compatible(primitive, '1.6', 'rel_object') mock_compat.assert_called_once_with( primitive['rel_object']['masakari_object.data'], '1.1') self.assertEqual('1.1', primitive[ 'rel_object']['masakari_object.version']) with mock.patch.object(subobj, 'obj_make_compatible') as mock_compat: primitive = copy.deepcopy(orig_primitive) obj._obj_make_obj_compatible(primitive, '1.5', 'rel_object') mock_compat.assert_called_once_with( primitive['rel_object']['masakari_object.data'], '1.1') self.assertEqual('1.1', primitive[ 'rel_object']['masakari_object.version']) with mock.patch.object(subobj, 'obj_make_compatible') as mock_compat: primitive = copy.deepcopy(orig_primitive) obj._obj_make_obj_compatible(primitive, '1.4', 'rel_object') self.assertFalse(mock_compat.called) self.assertNotIn('rel_object', primitive) def test_obj_make_compatible_hits_sub_objects(self): subobj = MyOwnedObject(baz=1) obj = MyObj(foo=123, rel_object=subobj) obj.obj_relationships = {'rel_object': [('1.0', '1.0')]} with mock.patch.object(obj, '_obj_make_obj_compatible') as mock_compat: obj.obj_make_compatible({'rel_object': 'foo'}, '1.10') mock_compat.assert_called_once_with({'rel_object': 'foo'}, '1.10', 'rel_object') def test_obj_make_compatible_skips_unset_sub_objects(self): obj = MyObj(foo=123) obj.obj_relationships = {'rel_object': [('1.0', '1.0')]} with mock.patch.object(obj, '_obj_make_obj_compatible') as mock_compat: obj.obj_make_compatible({'rel_object': 'foo'}, '1.10') self.assertFalse(mock_compat.called) def test_obj_make_compatible_doesnt_skip_falsey_sub_objects(self): @base.MasakariObjectRegistry.register_if(False) class MyList(base.ObjectListBase, base.MasakariObject): VERSION = '1.2' fields = {'objects': fields.ListOfObjectsField('MyObjElement')} obj_relationships = { 'objects': [('1.1', '1.1'), ('1.2', '1.2')], } mylist = MyList(objects=[]) @base.MasakariObjectRegistry.register_if(False) class MyOwner(base.MasakariObject): VERSION = '1.2' fields = {'mylist': fields.ObjectField('MyList')} obj_relationships = { 'mylist': [('1.1', '1.1')], } myowner = MyOwner(mylist=mylist) primitive = myowner.obj_to_primitive('1.1') self.assertIn('mylist', primitive['masakari_object.data']) def test_obj_make_compatible_handles_list_of_objects(self): subobj = MyOwnedObject(baz=1) obj = MyObj(rel_objects=[subobj]) obj.obj_relationships = {'rel_objects': [('1.0', '1.123')]} def fake_make_compat(primitive, version): self.assertEqual('1.123', version) self.assertIn('baz', primitive) with mock.patch.object(subobj, 'obj_make_compatible') as mock_mc: mock_mc.side_effect = fake_make_compat obj.obj_to_primitive('1.0') self.assertTrue(mock_mc.called) def test_delattr(self): obj = MyObj(bar='foo') del obj.bar # Should appear unset now self.assertFalse(obj.obj_attr_is_set('bar')) # Make sure post-delete, references trigger lazy loads self.assertEqual('loaded!', getattr(obj, 'bar')) def test_delattr_unset(self): obj = MyObj() self.assertRaises(AttributeError, delattr, obj, 'bar') class TestObject(_LocalTest, _TestObject): def test_set_defaults(self): obj = MyObj() obj.obj_set_defaults('foo') self.assertTrue(obj.obj_attr_is_set('foo')) self.assertEqual(1, obj.foo) def test_set_defaults_no_default(self): obj = MyObj() self.assertRaises(ovo_exc.ObjectActionError, obj.obj_set_defaults, 'bar') def test_set_all_defaults(self): obj = MyObj() obj.obj_set_defaults() self.assertEqual(set(['deleted', 'foo', 'mutable_default']), obj.obj_what_changed()) self.assertEqual(1, obj.foo) def test_set_defaults_not_overwrite(self): obj = MyObj(deleted=True) obj.obj_set_defaults() self.assertEqual(1, obj.foo) self.assertTrue(obj.deleted) class TestRegistry(test.NoDBTestCase): @mock.patch('masakari.objects.base.objects') def test_hook_chooses_newer_properly(self, mock_objects): reg = base.MasakariObjectRegistry() reg.registration_hook(MyObj, 0) class MyNewerObj(object): VERSION = '1.123' @classmethod def obj_name(cls): return 'MyObj' self.assertEqual(MyObj, mock_objects.MyObj) reg.registration_hook(MyNewerObj, 0) self.assertEqual(MyNewerObj, mock_objects.MyObj) @mock.patch('masakari.objects.base.objects') def test_hook_keeps_newer_properly(self, mock_objects): reg = base.MasakariObjectRegistry() reg.registration_hook(MyObj, 0) class MyOlderObj(object): VERSION = '1.1' @classmethod def obj_name(cls): return 'MyObj' self.assertEqual(MyObj, mock_objects.MyObj) reg.registration_hook(MyOlderObj, 0) self.assertEqual(MyObj, mock_objects.MyObj) # NOTE(Dinesh_Bhor): The hashes in this list should only be changed if # they come with a corresponding version bump in the affected # objects object_data = { 'FailoverSegment': '1.0-5e8b8bc8840b35439b5f2b621482d15d', 'FailoverSegmentList': '1.0-dfc5c6f5704d24dcaa37b0bbb03cbe60', 'Host': '1.1-3fc4d548fa220c76906426095e5971fc', 'HostList': '1.0-25ebe1b17fbd9f114fae8b6a10d198c0', 'Notification': '1.1-91e3a051078e35300e325a3e2ae5fde5', 'NotificationProgressDetails': '1.0-fc611ac932b719fbc154dbe34bb8edee', 'NotificationList': '1.0-25ebe1b17fbd9f114fae8b6a10d198c0', 'EventType': '1.0-d1d2010a7391fa109f0868d964152607', 'ExceptionNotification': '1.0-1187e93f564c5cca692db76a66cda2a6', 'ExceptionPayload': '1.0-96f178a12691e3ef0d8e3188fc481b90', 'HostApiNotification': '1.0-1187e93f564c5cca692db76a66cda2a6', 'HostApiPayload': '1.0-ca9035d81cec6697f12dd4cac4c8f027', 'HostApiPayloadBase': '1.0-211379087a876212df6194b011207339', 'NotificationApiPayload': '1.0-c050869a1f4aed23e7645bd4d1830ecd', 'NotificationApiPayloadBase': '1.0-cda8d53a77e64f83e3782fc9c4d499bb', 'NotificationApiNotification': '1.0-1187e93f564c5cca692db76a66cda2a6', 'NotificationPublisher': '1.0-bbbc1402fb0e443a3eb227cc52b61545', 'MyObj': '1.6-ee7b607402fbfb3390a92ab7199e0d88', 'MyOwnedObject': '1.0-fec853730bd02d54cc32771dd67f08a0', 'SegmentApiNotification': '1.0-1187e93f564c5cca692db76a66cda2a6', 'SegmentApiPayload': '1.0-4c85836a1c2e4069b9dc84fa029a4657', 'SegmentApiPayloadBase': '1.0-93a7c8b78d0e9ea3f6811d4ed75fa799' } def get_masakari_objects(): """Get masakari versioned objects This returns a dict of versioned objects which are in the Masakari project namespace only. ie excludes objects from os-vif and other 3rd party modules :return: a dict mapping class names to lists of versioned objects """ all_classes = base.MasakariObjectRegistry.obj_classes() masakari_classes = {} for name in all_classes: objclasses = all_classes[name] if (objclasses[0].OBJ_PROJECT_NAMESPACE != ( base.MasakariObject.OBJ_PROJECT_NAMESPACE)): continue masakari_classes[name] = objclasses return masakari_classes class TestObjectVersions(test.NoDBTestCase, _BaseTestCase): def setUp(self): super(test.NoDBTestCase, self).setUp() base.MasakariObjectRegistry.register_notification_objects() def test_versions(self): checker = fixture.ObjectVersionChecker( get_masakari_objects()) fingerprints = checker.get_hashes() if os.getenv('GENERATE_HASHES'): open('object_hashes.txt', 'w').write( pprint.pformat(fingerprints)) raise test.TestingException( 'Generated hashes in object_hashes.txt') expected, actual = checker.test_hashes(object_data) self.assertEqual(expected, actual, 'Some objects have changed; please make sure the ' 'versions have been bumped, and then update their ' 'hashes here.') def test_obj_make_compatible(self): base.MasakariObjectRegistry.register(segment.FailoverSegment) # Iterate all object classes and verify that we can run # obj_make_compatible with every older version than current. # This doesn't actually test the data conversions, but it at least # makes sure the method doesn't blow up on something basic like # expecting the wrong version format. # Hold a dictionary of args/kwargs that need to get passed into # __init__() for specific classes. The key in the dictionary is # the obj_class that needs the init args/kwargs. init_args = fake_args.init_args init_kwargs = fake_args.init_kwargs checker = fixture.ObjectVersionChecker( base.MasakariObjectRegistry.obj_classes()) checker.test_compatibility_routines(use_manifest=True, init_args=init_args, init_kwargs=init_kwargs) def test_list_obj_make_compatible(self): @base.MasakariObjectRegistry.register_if(False) class TestObj(base.MasakariObject): VERSION = '1.4' fields = {'foo': fields.IntegerField()} @base.MasakariObjectRegistry.register_if(False) class TestListObj(base.ObjectListBase, base.MasakariObject): VERSION = '1.5' fields = {'objects': fields.ListOfObjectsField('TestObj')} obj_relationships = { 'objects': [('1.0', '1.1'), ('1.1', '1.2'), ('1.3', '1.3'), ('1.5', '1.4')] } my_list = TestListObj() my_obj = TestObj(foo=1) my_list.objects = [my_obj] primitive = my_list.obj_to_primitive(target_version='1.5') primitive_data = primitive['masakari_object.data'] obj_primitive = my_obj.obj_to_primitive(target_version='1.4') obj_primitive_data = obj_primitive['masakari_object.data'] with mock.patch.object(TestObj, 'obj_make_compatible') as comp: my_list.obj_make_compatible(primitive_data, '1.1') comp.assert_called_with(obj_primitive_data, '1.2') def test_list_obj_make_compatible_when_no_objects(self): # Test to make sure obj_make_compatible works with no 'objects' # If a List object ever has a version that did not contain the # 'objects' key, we need to make sure converting back to that version # doesn't cause backporting problems. @base.MasakariObjectRegistry.register_if(False) class TestObj(base.MasakariObject): VERSION = '1.1' fields = {'foo': fields.IntegerField()} @base.MasakariObjectRegistry.register_if(False) class TestListObj(base.ObjectListBase, base.MasakariObject): VERSION = '1.1' fields = {'objects': fields.ListOfObjectsField('TestObj')} obj_relationships = { 'objects': [('1.1', '1.1')] } my_list = TestListObj() my_list.objects = [TestObj(foo=1)] primitive = my_list.obj_to_primitive(target_version='1.1') primitive_data = primitive['masakari_object.data'] my_list.obj_make_compatible(primitive_data, target_version='1.0') self.assertNotIn('objects', primitive_data, "List was backported to before 'objects' existed." " 'objects' should not be in the primitive.") class TestObjEqualPrims(_BaseTestCase): def test_object_equal(self): obj1 = MyObj(foo=1, bar='goodbye') obj1.obj_reset_changes() obj2 = MyObj(foo=1, bar='goodbye') obj2.obj_reset_changes() obj2.bar = 'goodbye' # obj2 will be marked with field 'three' updated self.assertTrue(base.obj_equal_prims(obj1, obj2), "Objects that differ only because one a is marked " "as updated should be equal") def test_object_not_equal(self): obj1 = MyObj(foo=1, bar='goodbye') obj1.obj_reset_changes() obj2 = MyObj(foo=1, bar='hello') obj2.obj_reset_changes() self.assertFalse(base.obj_equal_prims(obj1, obj2), "Objects that differ in any field " "should not be equal") def test_object_ignore_equal(self): obj1 = MyObj(foo=1, bar='goodbye') obj1.obj_reset_changes() obj2 = MyObj(foo=1, bar='hello') obj2.obj_reset_changes() self.assertTrue(base.obj_equal_prims(obj1, obj2, ['bar']), "Objects that only differ in an ignored field " "should be equal") class TestObjMethodOverrides(test.NoDBTestCase): def test_obj_reset_changes(self): args = inspect.getargspec(base.MasakariObject.obj_reset_changes) obj_classes = base.MasakariObjectRegistry.obj_classes() for obj_name in obj_classes: obj_class = obj_classes[obj_name][0] self.assertEqual(args, inspect.getargspec(obj_class.obj_reset_changes)) masakari-9.0.0/masakari/tests/unit/objects/fake_args.py0000664000175000017500000000525713656747723023225 0ustar zuulzuul00000000000000# Copyright (c) 2018 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from masakari.notifications.objects import base from masakari.notifications.objects import exception from masakari.notifications.objects import notification as event_notification from masakari.objects import host as host_obj from masakari.objects import notification as notification_obj from masakari.objects import segment as segment_obj segment = segment_obj.FailoverSegment() host = host_obj.Host() notification = notification_obj.Notification() fault = None init_args = { event_notification.SegmentApiPayloadBase: [segment], event_notification.SegmentApiPayload: [segment, fault], event_notification.HostApiPayloadBase: [host], event_notification.HostApiPayload: [host, fault], event_notification.NotificationApiPayloadBase: [notification], event_notification.NotificationApiPayload: [notification, fault], event_notification.SegmentApiNotification: [], event_notification.HostApiNotification: [], event_notification.NotificationApiNotification: [], exception.ExceptionPayload: [], exception.ExceptionNotification: [], base.EventType: [], base.NotificationPublisher: [], segment_obj.FailoverSegment: [], segment_obj.FailoverSegmentList: [], host_obj.Host: [], host_obj.HostList: [], notification_obj.Notification: [], notification_obj.NotificationList: [], } init_kwargs = { event_notification.SegmentApiPayloadBase: {}, event_notification.SegmentApiPayload: {}, event_notification.HostApiPayloadBase: {}, event_notification.HostApiPayload: {}, event_notification.NotificationApiPayloadBase: {}, event_notification.NotificationApiPayload: {}, event_notification.SegmentApiNotification: {}, event_notification.HostApiNotification: {}, event_notification.NotificationApiNotification: {}, exception.ExceptionPayload: {}, exception.ExceptionNotification: {}, base.EventType: {}, base.NotificationPublisher: {}, segment_obj.FailoverSegment: {}, segment_obj.FailoverSegmentList: {}, host_obj.Host: {}, host_obj.HostList: {}, notification_obj.Notification: {}, notification_obj.NotificationList: {}, } masakari-9.0.0/masakari/tests/unit/objects/test_segments.py0000664000175000017500000003003013656747723024152 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from unittest import mock from oslo_utils import timeutils from masakari.api import utils as api_utils from masakari import exception from masakari.objects import fields from masakari.objects import segment from masakari.tests.unit.objects import test_objects from masakari.tests import uuidsentinel NOW = timeutils.utcnow().replace(microsecond=0) fake_segment = { 'created_at': NOW, 'updated_at': None, 'deleted_at': None, 'deleted': False, 'id': 123, 'uuid': uuidsentinel.fake_segment, 'name': 'foo-segment', 'service_type': 'COMPUTE', 'description': 'fake-description', 'recovery_method': 'auto' } class TestFailoverSegmentObject(test_objects._LocalTest): @mock.patch('masakari.db.failover_segment_get_by_name') def test_get_by_name(self, mock_api_get): mock_api_get.return_value = fake_segment segment_obj = segment.FailoverSegment.get_by_name(self.context, 'foo-segment') self.compare_obj(segment_obj, fake_segment) mock_api_get.assert_called_once_with(self.context, 'foo-segment') @mock.patch('masakari.db.failover_segment_get_by_uuid') def test_get_by_uuid(self, mock_api_get): mock_api_get.return_value = fake_segment segment_obj = (segment.FailoverSegment. get_by_uuid(self.context, uuidsentinel.fake_segment)) self.compare_obj(segment_obj, fake_segment) mock_api_get.assert_called_once_with(self.context, uuidsentinel.fake_segment) @mock.patch('masakari.db.failover_segment_get_by_id') def test_get_by_id(self, mock_api_get): mock_api_get.return_value = fake_segment fake_id = 123 segment_obj = segment.FailoverSegment.get_by_id(self.context, fake_id) self.compare_obj(segment_obj, fake_segment) mock_api_get.assert_called_once_with(self.context, fake_id) def _segment_create_attribute(self): segment_obj = segment.FailoverSegment(context=self.context) segment_obj.name = 'foo-segment' segment_obj.description = 'keydata' segment_obj.service_type = 'fake-user' segment_obj.recovery_method = 'auto' segment_obj.uuid = uuidsentinel.fake_segment return segment_obj @mock.patch.object(api_utils, 'notify_about_segment_api') @mock.patch('masakari.db.failover_segment_create') def test_create(self, mock_segment_create, mock_notify_about_segment_api): mock_segment_create.return_value = fake_segment segment_obj = self._segment_create_attribute() segment_obj.create() self.compare_obj(segment_obj, fake_segment) mock_segment_create.assert_called_once_with(self.context, { 'uuid': uuidsentinel.fake_segment, 'name': 'foo-segment', 'description': 'keydata', 'service_type': 'fake-user', 'recovery_method': 'auto'}) action = fields.EventNotificationAction.SEGMENT_CREATE phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, segment_obj, action=action, phase=phase_start), mock.call(self.context, segment_obj, action=action, phase=phase_end)] mock_notify_about_segment_api.assert_has_calls(notify_calls) @mock.patch.object(api_utils, 'notify_about_segment_api') @mock.patch('masakari.db.failover_segment_create') def test_recreate_fails(self, mock_segment_create, mock_notify_about_segment_api): mock_segment_create.return_value = fake_segment segment_obj = self._segment_create_attribute() segment_obj.create() self.assertRaises(exception.ObjectActionError, segment_obj.create) mock_segment_create.assert_called_once_with(self.context, { 'uuid': uuidsentinel.fake_segment, 'name': 'foo-segment', 'description': 'keydata', 'service_type': 'fake-user', 'recovery_method': 'auto'}) action = fields.EventNotificationAction.SEGMENT_CREATE phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, segment_obj, action=action, phase=phase_start), mock.call(self.context, segment_obj, action=action, phase=phase_end)] mock_notify_about_segment_api.assert_has_calls(notify_calls) @mock.patch.object(api_utils, 'notify_about_segment_api') @mock.patch('masakari.db.failover_segment_delete') def test_destroy(self, mock_segment_destroy, mock_notify_about_segment_api): segment_obj = self._segment_create_attribute() segment_obj.id = 123 segment_obj.destroy() mock_segment_destroy.assert_called_once_with( self.context, uuidsentinel.fake_segment) action = fields.EventNotificationAction.SEGMENT_DELETE phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, segment_obj, action=action, phase=phase_start), mock.call(self.context, segment_obj, action=action, phase=phase_end)] mock_notify_about_segment_api.assert_has_calls(notify_calls) @mock.patch.object(api_utils, 'notify_about_segment_api') @mock.patch('masakari.db.failover_segment_delete') def test_destroy_failover_segment_found(self, mock_segment_destroy, mock_notify_about_segment_api): mock_segment_destroy.side_effect = exception.FailoverSegmentNotFound( id=123) segment_obj = self._segment_create_attribute() segment_obj.id = 123 self.assertRaises(exception.FailoverSegmentNotFound, segment_obj.destroy) action = fields.EventNotificationAction.SEGMENT_DELETE phase_start = fields.EventNotificationPhase.START notify_calls = [ mock.call(self.context, segment_obj, action=action, phase=phase_start)] mock_notify_about_segment_api.assert_has_calls(notify_calls) @mock.patch('masakari.db.failover_segment_get_all_by_filters') def test_get_segment_by_recovery_method(self, mock_api_get): fake_segment2 = copy.deepcopy(fake_segment) fake_segment2['name'] = 'fake_segment2' mock_api_get.return_value = [fake_segment2, fake_segment] segment_result = (segment.FailoverSegmentList. get_all(self.context, filters={'recovery_method': 'auto'})) self.assertEqual(2, len(segment_result)) self.compare_obj(segment_result[0], fake_segment2) self.compare_obj(segment_result[1], fake_segment) mock_api_get.assert_called_once_with(self.context, filters={ 'recovery_method': 'auto' }, limit=None, marker=None, sort_dirs=None, sort_keys=None) @mock.patch('masakari.db.failover_segment_get_all_by_filters') def test_get_segment_by_service_type(self, mock_api_get): fake_segment2 = copy.deepcopy(fake_segment) fake_segment2['name'] = 'fake_segment' mock_api_get.return_value = [fake_segment2, fake_segment] segment_result = (segment.FailoverSegmentList. get_all(self.context, filters={'service_type': 'COMPUTE'})) self.assertEqual(2, len(segment_result)) self.compare_obj(segment_result[0], fake_segment2) self.compare_obj(segment_result[1], fake_segment) mock_api_get.assert_called_once_with(self.context, filters={ 'service_type': 'COMPUTE' }, limit=None, marker=None, sort_dirs=None, sort_keys=None) @mock.patch('masakari.db.failover_segment_get_all_by_filters') def test_get_limit_and_marker_invalid_marker(self, mock_api_get): segment_name = 'unknown_segment' mock_api_get.side_effect = exception.MarkerNotFound(marker=segment_name ) self.assertRaises(exception.MarkerNotFound, segment.FailoverSegmentList.get_all, self.context, limit=5, marker=segment_name) @mock.patch.object(api_utils, 'notify_about_segment_api') @mock.patch('masakari.db.failover_segment_update') def test_save(self, mock_segment_update, mock_notify_about_segment_api): mock_segment_update.return_value = fake_segment segment_object = segment.FailoverSegment(context=self.context) segment_object.name = "foo-segment" segment_object.id = 123 segment_object.uuid = uuidsentinel.fake_segment segment_object.save() self.compare_obj(segment_object, fake_segment) self.assertTrue(mock_segment_update.called) action = fields.EventNotificationAction.SEGMENT_UPDATE phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, segment_object, action=action, phase=phase_start), mock.call(self.context, segment_object, action=action, phase=phase_end)] mock_notify_about_segment_api.assert_has_calls(notify_calls) @mock.patch.object(api_utils, 'notify_about_segment_api') @mock.patch('masakari.db.failover_segment_update') def test_save_failover_segment_not_found(self, mock_segment_update, mock_notify_about_segment_api): mock_segment_update.side_effect = ( exception.FailoverSegmentNotFound(id=uuidsentinel.fake_segment)) segment_object = segment.FailoverSegment(context=self.context) segment_object.name = "foo-segment" segment_object.id = 123 segment_object.uuid = uuidsentinel.fake_segment self.assertRaises(exception.FailoverSegmentNotFound, segment_object.save) action = fields.EventNotificationAction.SEGMENT_UPDATE phase_start = fields.EventNotificationPhase.START notify_calls = [ mock.call(self.context, segment_object, action=action, phase=phase_start)] mock_notify_about_segment_api.assert_has_calls(notify_calls) @mock.patch.object(api_utils, 'notify_about_segment_api') @mock.patch('masakari.db.failover_segment_update') def test_save_failover_segment_already_exists(self, mock_segment_update, mock_notify_about_segment_api): mock_segment_update.side_effect = ( exception.FailoverSegmentExists(name="foo-segment")) segment_object = segment.FailoverSegment(context=self.context) segment_object.name = "foo-segment" segment_object.id = 123 segment_object.uuid = uuidsentinel.fake_segment self.assertRaises(exception.FailoverSegmentExists, segment_object.save) action = fields.EventNotificationAction.SEGMENT_UPDATE phase_start = fields.EventNotificationPhase.START notify_calls = [ mock.call(self.context, segment_object, action=action, phase=phase_start)] mock_notify_about_segment_api.assert_has_calls(notify_calls) masakari-9.0.0/masakari/tests/unit/objects/test_notifications.py0000664000175000017500000002601013656747723025201 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from unittest import mock from oslo_utils import timeutils from oslo_utils import uuidutils from masakari.api import utils as api_utils from masakari import db from masakari import exception from masakari.objects import fields from masakari.objects import notification from masakari.tests.unit.objects import test_objects from masakari.tests import uuidsentinel NOW = timeutils.utcnow().replace(microsecond=0) OPTIONAL = ['recovery_workflow_details'] def _fake_db_notification(**kwargs): fake_notification = { 'created_at': NOW, 'updated_at': None, 'deleted_at': None, 'deleted': False, 'id': 123, 'notification_uuid': uuidsentinel.fake_notification, 'generated_time': NOW, 'type': 'COMPUTE_HOST', 'payload': '{"fake_key": "fake_value"}', 'status': 'new', 'source_host_uuid': uuidsentinel.fake_host, } fake_notification.update(kwargs) return fake_notification def _fake_object_notification(**kwargs): fake_notification = { 'created_at': NOW, 'updated_at': None, 'deleted_at': None, 'deleted': False, 'id': 123, 'notification_uuid': uuidsentinel.fake_notification, 'generated_time': NOW, 'type': 'COMPUTE_HOST', 'payload': {"fake_key": "fake_value"}, 'status': 'new', 'source_host_uuid': uuidsentinel.fake_host, } fake_notification.update(kwargs) return fake_notification fake_object_notification = _fake_object_notification() fake_db_notification = _fake_db_notification() class TestNotificationObject(test_objects._LocalTest): def _test_query(self, db_method, obj_method, *args, **kwargs): with mock.patch.object(db, db_method) as mock_db: db_exception = kwargs.pop('db_exception', None) if db_exception: mock_db.side_effect = db_exception else: mock_db.return_value = fake_db_notification obj = getattr(notification.Notification, obj_method )(self.context, *args, **kwargs) if db_exception: self.assertIsNone(obj) self.compare_obj(obj, fake_object_notification, allow_missing=OPTIONAL) def test_get_by_id(self): self._test_query('notification_get_by_id', 'get_by_id', 123) def test_get_by_uuid(self): self._test_query('notification_get_by_uuid', 'get_by_uuid', uuidsentinel.fake_segment) def _notification_create_attributes(self, skip_uuid=False): notification_obj = notification.Notification(context=self.context) notification_obj.generated_time = NOW notification_obj.type = "COMPUTE_HOST" notification_obj.payload = {'fake_key': 'fake_value'} notification_obj.status = "new" if not skip_uuid: notification_obj.notification_uuid = uuidsentinel.fake_notification notification_obj.source_host_uuid = uuidsentinel.fake_host return notification_obj @mock.patch.object(api_utils, 'notify_about_notification_api') @mock.patch.object(db, 'notification_create') def test_create(self, mock_db_create, mock_notify_about_notification_api): mock_db_create.return_value = fake_db_notification notification_obj = self._notification_create_attributes() notification_obj.create() self.compare_obj(notification_obj, fake_object_notification, allow_missing=OPTIONAL) mock_db_create.assert_called_once_with(self.context, { 'source_host_uuid': uuidsentinel.fake_host, 'notification_uuid': uuidsentinel.fake_notification, 'generated_time': NOW, 'status': 'new', 'type': 'COMPUTE_HOST', 'payload': '{"fake_key": "fake_value"}'}) action = fields.EventNotificationAction.NOTIFICATION_CREATE phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, notification_obj, action=action, phase=phase_start), mock.call(self.context, notification_obj, action=action, phase=phase_end)] mock_notify_about_notification_api.assert_has_calls(notify_calls) @mock.patch.object(api_utils, 'notify_about_notification_api') @mock.patch.object(db, 'notification_create') def test_recreate_fails(self, mock_notification_create, mock_notify_about_notification_api): mock_notification_create.return_value = fake_db_notification notification_obj = self._notification_create_attributes() notification_obj.create() self.assertRaises(exception.ObjectActionError, notification_obj.create) mock_notification_create.assert_called_once_with(self.context, { 'source_host_uuid': uuidsentinel.fake_host, 'notification_uuid': uuidsentinel.fake_notification, 'generated_time': NOW, 'status': 'new', 'type': 'COMPUTE_HOST', 'payload': '{"fake_key": "fake_value"}'}) action = fields.EventNotificationAction.NOTIFICATION_CREATE phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, notification_obj, action=action, phase=phase_start), mock.call(self.context, notification_obj, action=action, phase=phase_end)] mock_notify_about_notification_api.assert_has_calls(notify_calls) @mock.patch.object(api_utils, 'notify_about_notification_api') @mock.patch.object(db, 'notification_create') @mock.patch.object(uuidutils, 'generate_uuid') def test_create_without_passing_uuid_in_updates(self, mock_generate_uuid, mock_db_create, mock_notify_about_notification_api): mock_db_create.return_value = fake_db_notification mock_generate_uuid.return_value = uuidsentinel.fake_notification notification_obj = self._notification_create_attributes(skip_uuid=True) notification_obj.create() self.compare_obj(notification_obj, fake_object_notification, allow_missing=OPTIONAL) mock_db_create.assert_called_once_with(self.context, { 'source_host_uuid': uuidsentinel.fake_host, 'notification_uuid': uuidsentinel.fake_notification, 'generated_time': NOW, 'status': 'new', 'type': 'COMPUTE_HOST', 'payload': '{"fake_key": "fake_value"}'}) self.assertTrue(mock_generate_uuid.called) action = fields.EventNotificationAction.NOTIFICATION_CREATE phase_start = fields.EventNotificationPhase.START notify_calls = [ mock.call(self.context, notification_obj, action=action, phase=phase_start)] mock_notify_about_notification_api.assert_has_calls(notify_calls) @mock.patch.object(db, 'notification_delete') def test_destroy(self, mock_notification_delete): notification_obj = self._notification_create_attributes() notification_obj.id = 123 notification_obj.destroy() (mock_notification_delete. assert_called_once_with(self.context, uuidsentinel.fake_notification)) self.assertRaises(NotImplementedError, lambda: notification_obj.id) @mock.patch.object(db, 'notification_delete') def test_destroy_without_id(self, mock_destroy): notification_obj = self._notification_create_attributes() self.assertRaises(exception.ObjectActionError, notification_obj.destroy) self.assertFalse(mock_destroy.called) @mock.patch.object(db, 'notification_delete') def test_destroy_without_notification_uuid(self, mock_destroy): notification_obj = self._notification_create_attributes(skip_uuid=True) notification_obj.id = 123 self.assertRaises(exception.ObjectActionError, notification_obj.destroy) self.assertFalse(mock_destroy.called) @mock.patch.object(db, 'notifications_get_all_by_filters') def test_get_notification_by_filters(self, mock_api_get): fake_db_notification2 = copy.deepcopy(fake_db_notification) fake_db_notification2['type'] = 'PROCESS' fake_db_notification2['id'] = 124 fake_db_notification2[ 'notification_uuid'] = uuidsentinel.fake_db_notification2 mock_api_get.return_value = [fake_db_notification2, fake_db_notification] filters = {'status': 'new'} notification_result = (notification.NotificationList. get_all(self.context, filters=filters)) self.assertEqual(2, len(notification_result)) mock_api_get.assert_called_once_with(self.context, filters={ 'status': 'new' }, limit=None, marker=None, sort_dirs=None, sort_keys=None) @mock.patch.object(db, 'notifications_get_all_by_filters') def test_get_limit_and_marker_invalid_marker(self, mock_api_get): notification_uuid = uuidsentinel.fake_notification mock_api_get.side_effect = (exception. MarkerNotFound(marker=notification_uuid)) self.assertRaises(exception.MarkerNotFound, notification.NotificationList.get_all, self.context, limit=5, marker=notification_uuid) @mock.patch.object(db, 'notification_update') def test_save(self, mock_notification_update): mock_notification_update.return_value = fake_db_notification notification_obj = self._notification_create_attributes() notification_obj.id = 123 notification_obj.save() self.compare_obj(notification_obj, fake_object_notification, allow_missing=OPTIONAL) (mock_notification_update. assert_called_once_with(self.context, uuidsentinel.fake_notification, {'source_host_uuid': uuidsentinel.fake_host, 'notificati' 'on_uuid': uuidsentinel.fake_notification, 'status': 'new', 'generated_time': NOW, 'payload': {'fake_key': u'fake_value'}, 'type': 'COMPUTE_HOST'} )) masakari-9.0.0/masakari/tests/unit/objects/test_fields.py0000664000175000017500000001763713656747723023615 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import datetime import iso8601 from oslo_versionedobjects import exception as ovo_exc import six from masakari.objects import fields from masakari import test from masakari import utils class FakeFieldType(fields.FieldType): def coerce(self, obj, attr, value): return '*%s*' % value def to_primitive(self, obj, attr, value): return '!%s!' % value def from_primitive(self, obj, attr, value): return value[1:-1] class FakeEnum(fields.Enum): FROG = "frog" PLATYPUS = "platypus" ALLIGATOR = "alligator" ALL = (FROG, PLATYPUS, ALLIGATOR) def __init__(self, **kwargs): super(FakeEnum, self).__init__(valid_values=FakeEnum.ALL, **kwargs) class FakeEnumAlt(fields.Enum): FROG = "frog" PLATYPUS = "platypus" AARDVARK = "aardvark" ALL = (FROG, PLATYPUS, AARDVARK) def __init__(self, **kwargs): super(FakeEnumAlt, self).__init__(valid_values=FakeEnumAlt.ALL, **kwargs) class FakeEnumField(fields.BaseEnumField): AUTO_TYPE = FakeEnum() class FakeEnumAltField(fields.BaseEnumField): AUTO_TYPE = FakeEnumAlt() class TestField(test.NoDBTestCase): def setUp(self): super(TestField, self).setUp() self.field = fields.Field(FakeFieldType()) self.coerce_good_values = [('foo', '*foo*')] self.coerce_bad_values = [] self.to_primitive_values = [('foo', '!foo!')] self.from_primitive_values = [('!foo!', 'foo')] def test_coerce_good_values(self): for in_val, out_val in self.coerce_good_values: self.assertEqual(out_val, self.field.coerce('obj', 'attr', in_val)) def test_coerce_bad_values(self): for in_val in self.coerce_bad_values: self.assertRaises((TypeError, ValueError), self.field.coerce, 'obj', 'attr', in_val) def test_to_primitive(self): for in_val, prim_val in self.to_primitive_values: self.assertEqual(prim_val, self.field.to_primitive('obj', 'attr', in_val)) def test_from_primitive(self): class ObjectLikeThing(object): _context = 'context' for prim_val, out_val in self.from_primitive_values: self.assertEqual(out_val, self.field.from_primitive(ObjectLikeThing, 'attr', prim_val)) def test_stringify(self): self.assertEqual('123', self.field.stringify(123)) class TestString(TestField): def setUp(self): super(TestString, self).setUp() self.field = fields.StringField() self.coerce_good_values = [('foo', 'foo'), (1, '1'), (True, 'True')] if six.PY2: self.coerce_good_values.append((int(1), '1')) self.coerce_bad_values = [None] self.to_primitive_values = self.coerce_good_values[0:1] self.from_primitive_values = self.coerce_good_values[0:1] def test_stringify(self): self.assertEqual("'123'", self.field.stringify(123)) class TestBaseEnum(TestField): def setUp(self): super(TestBaseEnum, self).setUp() self.field = FakeEnumField() self.coerce_good_values = [('frog', 'frog'), ('platypus', 'platypus'), ('alligator', 'alligator')] self.coerce_bad_values = ['aardvark', 'wookie'] self.to_primitive_values = self.coerce_good_values[0:1] self.from_primitive_values = self.coerce_good_values[0:1] def test_stringify(self): self.assertEqual("'platypus'", self.field.stringify('platypus')) def test_stringify_invalid(self): self.assertRaises(ValueError, self.field.stringify, 'aardvark') def test_fingerprint(self): field1 = FakeEnumField() field2 = FakeEnumAltField() self.assertNotEqual(str(field1), str(field2)) class TestEnum(TestField): def setUp(self): super(TestEnum, self).setUp() self.field = fields.EnumField( valid_values=['foo', 'bar', 1, 1, True]) self.coerce_good_values = [('foo', 'foo'), (1, '1'), (True, 'True')] if six.PY2: self.coerce_good_values.append((int(1), '1')) self.coerce_bad_values = ['boo', 2, False] self.to_primitive_values = self.coerce_good_values[0:1] self.from_primitive_values = self.coerce_good_values[0:1] def test_stringify(self): self.assertEqual("'foo'", self.field.stringify('foo')) def test_stringify_invalid(self): self.assertRaises(ValueError, self.field.stringify, '123') def test_fingerprint(self): field1 = fields.EnumField(valid_values=['foo', 'bar']) field2 = fields.EnumField(valid_values=['foo', 'bar1']) self.assertNotEqual(str(field1), str(field2)) def test_without_valid_values(self): self.assertRaises(ovo_exc.EnumValidValuesInvalidError, fields.EnumField, 1) def test_with_empty_values(self): self.assertRaises(ovo_exc.EnumRequiresValidValuesError, fields.EnumField, []) class TestDictOfStrings(TestField): def setUp(self): super(TestDictOfStrings, self).setUp() self.field = fields.DictOfStringsField() self.coerce_good_values = [({'foo': 'bar'}, {'foo': 'bar'}), ({'foo': 1}, {'foo': '1'})] self.coerce_bad_values = [{1: 'bar'}, {'foo': None}, 'foo'] self.to_primitive_values = [({'foo': 'bar'}, {'foo': 'bar'})] self.from_primitive_values = [({'foo': 'bar'}, {'foo': 'bar'})] def test_stringify(self): self.assertEqual("{key='val'}", self.field.stringify({'key': 'val'})) class TestInteger(TestField): def setUp(self): super(TestInteger, self).setUp() self.field = fields.IntegerField() self.coerce_good_values = [(1, 1), ('1', 1)] self.coerce_bad_values = ['foo', None] self.to_primitive_values = self.coerce_good_values[0:1] self.from_primitive_values = self.coerce_good_values[0:1] class TestBoolean(TestField): def setUp(self): super(TestBoolean, self).setUp() self.field = fields.BooleanField() self.coerce_good_values = [(True, True), (False, False), (1, True), ('foo', True), (0, False), ('', False)] self.coerce_bad_values = [] self.to_primitive_values = self.coerce_good_values[0:2] self.from_primitive_values = self.coerce_good_values[0:2] class TestDateTime(TestField): def setUp(self): super(TestDateTime, self).setUp() self.dt = datetime.datetime(2016, 11, 5, tzinfo=iso8601.UTC) self.field = fields.DateTimeField() self.coerce_good_values = [(self.dt, self.dt), (utils.isotime(self.dt), self.dt)] self.coerce_bad_values = [1, 'foo'] self.to_primitive_values = [(self.dt, utils.isotime(self.dt))] self.from_primitive_values = [(utils.isotime(self.dt), self.dt)] def test_stringify(self): self.assertEqual( '2016-11-05T18:00:00Z', self.field.stringify( datetime.datetime(2016, 11, 5, 18, 0, 0, tzinfo=iso8601.iso8601.UTC))) masakari-9.0.0/masakari/tests/unit/objects/test_hosts.py0000664000175000017500000003015213656747723023472 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from unittest import mock from oslo_utils import timeutils from masakari.api import utils as api_utils from masakari import db from masakari import exception from masakari.objects import fields from masakari.objects import host from masakari.objects import segment from masakari.tests.unit.objects import test_objects from masakari.tests import uuidsentinel NOW = timeutils.utcnow().replace(microsecond=0) fake_segment_dict = { 'name': 'fake_segment', 'recovery_method': 'auto', 'description': 'fake', 'service_type': 'CINDER', 'id': 123, 'uuid': uuidsentinel.fake_segment, 'created_at': NOW, 'updated_at': None, 'deleted_at': None, 'deleted': False } def _fake_host(**kwargs): fake_host = { 'created_at': NOW, 'updated_at': None, 'deleted_at': None, 'deleted': False, 'id': 123, 'uuid': uuidsentinel.fake_host, 'name': 'fake-host', 'reserved': False, 'on_maintenance': False, 'control_attributes': 'fake_control_attributes', 'type': 'SSH', 'failover_segment': fake_segment_dict, 'failover_segment_id': uuidsentinel.fake_segment, } fake_host.update(kwargs) return fake_host fake_host = _fake_host() class TestHostObject(test_objects._LocalTest): def _compare_segment_and_host_data(self, obj): self.compare_obj(obj.failover_segment, fake_segment_dict) self.assertEqual(obj.name, fake_host.get('name')) self.assertEqual(obj.reserved, fake_host.get('reserved')) self.assertEqual(obj.on_maintenance, fake_host.get('on_maintenance')) self.assertEqual(obj.type, fake_host.get('type')) self.assertEqual(obj.control_attributes, fake_host.get('control_attri' 'butes')) self.assertEqual(obj.id, 123) def _test_query(self, db_method, obj_method, *args, **kwargs): with mock.patch.object(db, db_method) as mock_db: db_exception = kwargs.pop('db_exception', None) if db_exception: mock_db.side_effect = db_exception else: mock_db.return_value = fake_host obj = getattr(host.Host, obj_method)(self.context, *args, **kwargs) if db_exception: self.assertIsNone(obj) self._compare_segment_and_host_data(obj) def test_get_by_id(self): self._test_query('host_get_by_id', 'get_by_id', 123) def test_get_by_uuid(self): self._test_query('host_get_by_uuid', 'get_by_uuid', uuidsentinel.fake_segment) def test_get_by_name(self): self._test_query('host_get_by_name', 'get_by_name', 'fake-host') def _host_create_attributes(self): host_obj = host.Host(context=self.context) host_obj.name = 'foo-host' host_obj.failover_segment_id = uuidsentinel.fake_segment host_obj.type = 'fake-type' host_obj.reserved = False host_obj.on_maintenance = False host_obj.control_attributes = 'fake_attributes' host_obj.uuid = uuidsentinel.fake_host return host_obj @mock.patch.object(api_utils, 'notify_about_host_api') @mock.patch.object(db, 'host_create') def test_create(self, mock_db_create, mock_notify_about_host_api): mock_db_create.return_value = fake_host host_obj = self._host_create_attributes() host_obj.create() self._compare_segment_and_host_data(host_obj) mock_db_create.assert_called_once_with(self.context, { 'failover_segment_id': uuidsentinel.fake_segment, 'on_maintenance': False, 'uuid': uuidsentinel.fake_host, 'reserved': False, 'name': u'foo-host', 'control_attributes': u'fake_attributes', 'type': u'fake-type'}) action = fields.EventNotificationAction.HOST_CREATE phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, host_obj, action=action, phase=phase_start), mock.call(self.context, host_obj, action=action, phase=phase_end)] mock_notify_about_host_api.assert_has_calls(notify_calls) @mock.patch.object(api_utils, 'notify_about_host_api') @mock.patch.object(db, 'host_create') def test_recreate_fails(self, mock_host_create, mock_notify_about_host_api): mock_host_create.return_value = fake_host host_obj = self._host_create_attributes() host_obj.create() self.assertRaises(exception.ObjectActionError, host_obj.create) action = fields.EventNotificationAction.HOST_CREATE phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, host_obj, action=action, phase=phase_start), mock.call(self.context, host_obj, action=action, phase=phase_end)] mock_notify_about_host_api.assert_has_calls(notify_calls) mock_host_create.assert_called_once_with(self.context, { 'uuid': uuidsentinel.fake_host, 'name': 'foo-host', 'failover_segment_id': uuidsentinel.fake_segment, 'type': 'fake-type', 'reserved': False, 'on_maintenance': False, 'control_attributes': 'fake_attributes'}) @mock.patch.object(api_utils, 'notify_about_host_api') @mock.patch.object(db, 'host_delete') def test_destroy(self, mock_host_destroy, mock_notify_about_host_api): host_obj = self._host_create_attributes() host_obj.id = 123 host_obj.destroy() mock_host_destroy.assert_called_once_with( self.context, uuidsentinel.fake_host) action = fields.EventNotificationAction.HOST_DELETE phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, host_obj, action=action, phase=phase_start), mock.call(self.context, host_obj, action=action, phase=phase_end)] mock_notify_about_host_api.assert_has_calls(notify_calls) @mock.patch.object(api_utils, 'notify_about_host_api') @mock.patch.object(db, 'host_delete') def test_destroy_host_not_found(self, mock_host_destroy, mock_notify_about_host_api): mock_host_destroy.side_effect = exception.HostNotFound(id=123) host_obj = self._host_create_attributes() host_obj.id = 123 self.assertRaises(exception.HostNotFound, host_obj.destroy) action = fields.EventNotificationAction.HOST_DELETE phase_start = fields.EventNotificationPhase.START notify_calls = [ mock.call(self.context, host_obj, action=action, phase=phase_start)] mock_notify_about_host_api.assert_has_calls(notify_calls) @mock.patch.object(db, 'host_get_all_by_filters') def test_get_host_by_filters(self, mock_api_get): fake_host2 = copy.deepcopy(fake_host) fake_host2['name'] = 'fake_host22' mock_api_get.return_value = [fake_host2, fake_host] filters = {'reserved': False} host_result = host.HostList.get_all(self.context, filters=filters) self.assertEqual(2, len(host_result)) mock_api_get.assert_called_once_with(self.context, filters={ 'reserved': False }, limit=None, marker=None, sort_dirs=None, sort_keys=None) @mock.patch.object(db, 'host_get_all_by_filters') def test_get_limit_and_marker_invalid_marker(self, mock_api_get): host_name = 'fake-host' mock_api_get.side_effect = exception.MarkerNotFound(marker=host_name) self.assertRaises(exception.MarkerNotFound, host.HostList.get_all, self.context, limit=5, marker=host_name) @mock.patch.object(api_utils, 'notify_about_host_api') @mock.patch.object(db, 'host_update') def test_save(self, mock_host_update, mock_notify_about_host_api): mock_host_update.return_value = fake_host host_obj = self._host_create_attributes() host_obj.id = 123 host_obj.save() self._compare_segment_and_host_data(host_obj) (mock_host_update. assert_called_once_with(self.context, uuidsentinel.fake_host, {'control_attributes': u'fake_attributes', 'type': u'fake-type', 'failover_segment_id': uuidsentinel.fake_segment, 'name': u'foo-host', 'uuid': uuidsentinel.fake_host, 'reserved': False, 'on_maintenance': False })) action = fields.EventNotificationAction.HOST_UPDATE phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, host_obj, action=action, phase=phase_start), mock.call(self.context, host_obj, action=action, phase=phase_end)] mock_notify_about_host_api.assert_has_calls(notify_calls) @mock.patch.object(db, 'host_update') def test_save_lazy_attribute_changed(self, mock_host_update): mock_host_update.return_value = fake_host host_obj = self._host_create_attributes() host_obj.failover_segment = (segment. FailoverSegment(context=self.context)) host_obj.id = 123 self.assertRaises(exception.ObjectActionError, host_obj.save) @mock.patch.object(api_utils, 'notify_about_host_api') @mock.patch.object(db, 'host_update') def test_save_host_already_exists(self, mock_host_update, mock_notify_about_host_api): mock_host_update.side_effect = exception.HostExists(name="foo-host") host_object = host.Host(context=self.context) host_object.name = "foo-host" host_object.id = 123 host_object.uuid = uuidsentinel.fake_host self.assertRaises(exception.HostExists, host_object.save) action = fields.EventNotificationAction.HOST_UPDATE phase_start = fields.EventNotificationPhase.START notify_calls = [ mock.call(self.context, host_object, action=action, phase=phase_start)] mock_notify_about_host_api.assert_has_calls(notify_calls) @mock.patch.object(api_utils, 'notify_about_host_api') @mock.patch.object(db, 'host_update') def test_save_host_not_found(self, mock_host_update, mock_notify_about_host_api): mock_host_update.side_effect = exception.HostNotFound(id="foo-host") host_object = host.Host(context=self.context) host_object.name = "foo-host" host_object.id = 123 host_object.uuid = uuidsentinel.fake_host self.assertRaises(exception.HostNotFound, host_object.save) action = fields.EventNotificationAction.HOST_UPDATE phase_start = fields.EventNotificationPhase.START notify_calls = [ mock.call(self.context, host_object, action=action, phase=phase_start)] mock_notify_about_host_api.assert_has_calls(notify_calls) masakari-9.0.0/masakari/tests/unit/test_wsgi.py0000664000175000017500000002001313656747723021645 0ustar zuulzuul00000000000000# Copyright 2011 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Unit tests for `masakari.wsgi`.""" import os.path import socket import tempfile from unittest import mock import eventlet import eventlet.wsgi from oslo_config import cfg import requests import testtools import masakari.exception from masakari import test from masakari.tests.unit import utils import masakari.wsgi SSL_CERT_DIR = os.path.normpath(os.path.join( os.path.dirname(os.path.abspath(__file__)), 'ssl_cert')) CONF = cfg.CONF class TestLoaderNothingExists(test.NoDBTestCase): """Loader tests where os.path.exists always returns False.""" def setUp(self): super(TestLoaderNothingExists, self).setUp() self.stub_out('os.path.exists', lambda _: False) def test_relpath_config_not_found(self): self.flags(api_paste_config='api-paste.ini', group='wsgi') self.assertRaises( masakari.exception.ConfigNotFound, masakari.wsgi.Loader, ) def test_asbpath_config_not_found(self): self.flags(api_paste_config='/etc/masakari/api-paste.ini', group='wsgi') self.assertRaises( masakari.exception.ConfigNotFound, masakari.wsgi.Loader, ) class TestLoaderNormalFilesystem(test.NoDBTestCase): """Loader tests with normal filesystem (unmodified os.path module).""" _paste_config = """ [app:test_app] use = egg:Paste#static document_root = /tmp """ def setUp(self): super(TestLoaderNormalFilesystem, self).setUp() self.config = tempfile.NamedTemporaryFile(mode="w+t") self.config.write(self._paste_config.lstrip()) self.config.seek(0) self.config.flush() self.loader = masakari.wsgi.Loader(self.config.name) def test_config_found(self): self.assertEqual(self.config.name, self.loader.config_path) def test_app_not_found(self): self.assertRaises( masakari.exception.PasteAppNotFound, self.loader.load_app, "nonexistent app", ) def test_app_found(self): url_parser = self.loader.load_app("test_app") self.assertEqual("/tmp", url_parser.directory) def tearDown(self): self.config.close() super(TestLoaderNormalFilesystem, self).tearDown() class TestWSGIServer(test.NoDBTestCase): """WSGI server tests.""" def test_no_app(self): server = masakari.wsgi.Server("test_app", None) self.assertEqual("test_app", server.name) def test_custom_max_header_line(self): self.flags(max_header_line=4096, group='wsgi') # Default is 16384 masakari.wsgi.Server("test_custom_max_header_line", None) self.assertEqual(CONF.wsgi.max_header_line, eventlet.wsgi.MAX_HEADER_LINE) def test_start_random_port(self): server = masakari.wsgi.Server("test_random_port", None, host="127.0.0.1", port=0) server.start() self.assertNotEqual(0, server.port) server.stop() server.wait() @testtools.skipIf(not utils.is_ipv6_supported(), "no ipv6 support") def test_start_random_port_with_ipv6(self): server = masakari.wsgi.Server("test_random_port", None, host="::1", port=0) server.start() self.assertEqual("::1", server.host) self.assertNotEqual(0, server.port) server.stop() server.wait() @testtools.skipIf(not utils.is_linux(), 'SO_REUSEADDR behaves differently ' 'on OSX and BSD, see bugs ' '1436895 and 1467145') def test_socket_options_for_simple_server(self): # test normal socket options has set properly self.flags(tcp_keepidle=500, group='wsgi') server = masakari.wsgi.Server("test_socket_options", None, host="127.0.0.1", port=0) server.start() sock = server._socket self.assertEqual(1, sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR)) self.assertEqual(1, sock.getsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE)) if hasattr(socket, 'TCP_KEEPIDLE'): self.assertEqual(CONF.wsgi.tcp_keepidle, sock.getsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE)) server.stop() server.wait() def test_server_pool_waitall(self): # test pools waitall method gets called while stopping server server = masakari.wsgi.Server("test_server", None, host="127.0.0.1") server.start() with mock.patch.object(server._pool, 'waitall') as mock_waitall: server.stop() server.wait() mock_waitall.assert_called_once_with() def test_uri_length_limit(self): server = masakari.wsgi.Server("test_uri_length_limit", None, host="127.0.0.1", max_url_len=16384) server.start() uri = "http://127.0.0.1:%d/%s" % (server.port, 10000 * 'x') resp = requests.get(uri, proxies={"http": ""}) eventlet.sleep(0) self.assertNotEqual(resp.status_code, requests.codes.REQUEST_URI_TOO_LARGE) uri = "http://127.0.0.1:%d/%s" % (server.port, 20000 * 'x') resp = requests.get(uri, proxies={"http": ""}) eventlet.sleep(0) self.assertEqual(resp.status_code, requests.codes.REQUEST_URI_TOO_LARGE) server.stop() server.wait() def test_reset_pool_size_to_default(self): server = masakari.wsgi.Server("test_resize", None, host="127.0.0.1", max_url_len=16384) server.start() # Stopping the server, which in turn sets pool size to 0 server.stop() self.assertEqual(server._pool.size, 0) # Resetting pool size to default server.reset() server.start() self.assertEqual(server._pool.size, CONF.wsgi.default_pool_size) def test_client_socket_timeout(self): self.flags(client_socket_timeout=5, group='wsgi') # mocking eventlet spawn method to check it is called with # configured 'client_socket_timeout' value. with mock.patch.object(eventlet, 'spawn') as mock_spawn: server = masakari.wsgi.Server("test_app", None, host="127.0.0.1", port=0) server.start() _, kwargs = mock_spawn.call_args self.assertEqual(CONF.wsgi.client_socket_timeout, kwargs['socket_timeout']) server.stop() def test_keep_alive(self): self.flags(keep_alive=False, group='wsgi') # mocking eventlet spawn method to check it is called with # configured 'keep_alive' value. with mock.patch.object(eventlet, 'spawn') as mock_spawn: server = masakari.wsgi.Server("test_app", None, host="127.0.0.1", port=0) server.start() _, kwargs = mock_spawn.call_args self.assertEqual(CONF.wsgi.keep_alive, kwargs['keepalive']) server.stop() masakari-9.0.0/masakari/tests/unit/test_service.py0000664000175000017500000001465113656747723022347 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Unit Tests for remote procedure calls using queue """ from unittest import mock from oslo_concurrency import processutils from oslo_config import cfg from oslo_service import service as _service from masakari import exception from masakari import manager from masakari import rpc from masakari import service from masakari import test CONF = cfg.CONF class FakeManager(manager.Manager): """Fake manager for tests.""" def test_method(self): return 'manager' class ServiceManagerTestCase(test.NoDBTestCase): """Test cases for Services.""" @mock.patch.object(rpc, 'init') def test_message_gets_to_manager(self, mock_rpc_init): serv = service.Service('test', 'test', 'test', 'masakari.tests.unit.test_service.FakeManager') self.assertEqual('manager', serv.test_method()) class ServiceTestCase(test.NoDBTestCase): """Test cases for Services.""" def setUp(self): super(ServiceTestCase, self).setUp() self.host = 'foo' self.binary = 'masakari-engine' self.topic = 'fake' @mock.patch.object(rpc, 'init') def test_create(self, mock_rpc_init): app = service.Service.create(host=self.host, binary=self.binary, topic=self.topic) self.assertTrue(app) @mock.patch.object(rpc, 'init') def test_repr(self, mock_rpc_init): # Test if a Service object is correctly represented, for example in # log files. serv = service.Service(self.host, self.binary, self.topic, 'masakari.tests.unit.test_service.FakeManager') exp = "" self.assertEqual(exp, repr(serv)) @mock.patch.object(_service.Service, 'stop') @mock.patch.object(rpc, 'init') @mock.patch.object(rpc, 'get_server') def test_parent_graceful_shutdown(self, mock_rpc, mock_rpc_init, mock_stop): serv = service.Service(self.host, self.binary, self.topic, 'masakari.tests.unit.test_service.FakeManager') serv.manager = mock.Mock() serv.manager.service_name = self.topic serv.start() serv.stop() serv.rpcserver.start.assert_called_once_with() serv.rpcserver.stop.assert_called_once_with() mock_stop.assert_called_once_with() @mock.patch.object(rpc, 'init') def test_reset(self, mock_rpc_init): serv = service.Service(self.host, self.binary, self.topic, 'masakari.tests.unit.test_service.FakeManager') with mock.patch.object(serv.manager, 'reset') as mock_reset: serv.reset() mock_reset.assert_called_once_with() class TestWSGIService(test.NoDBTestCase): def setUp(self): super(TestWSGIService, self).setUp() self.stub_out('masakari.wsgi.Loader.load_app', mock.MagicMock()) def test_workers_set_default(self): test_service = service.WSGIService("masakari_api") self.assertEqual(test_service.workers, processutils.get_worker_count()) def test_workers_set_good_user_setting(self): CONF.set_override('masakari_api_workers', 8) test_service = service.WSGIService("masakari_api") self.assertEqual(test_service.workers, 8) def test_workers_set_zero_user_setting(self): CONF.set_override('masakari_api_workers', 0) test_service = service.WSGIService("masakari_api") # If a value less than 1 is used, defaults to number of procs available self.assertEqual(test_service.workers, processutils.get_worker_count()) def test_service_start_with_illegal_workers(self): CONF.set_override("masakari_api_workers", -1) self.assertRaises(exception.InvalidInput, service.WSGIService, "masakari_api") def test_reset_pool_size_to_default(self): test_service = service.WSGIService("test_service") test_service.start() # Stopping the service, which in turn sets pool size to 0 test_service.stop() self.assertEqual(test_service.server._pool.size, 0) # Resetting pool size to default test_service.reset() test_service.start() self.assertEqual(test_service.server._pool.size, CONF.wsgi.default_pool_size) class TestLauncher(test.NoDBTestCase): @mock.patch.object(_service, 'launch') def test_launch_app(self, mock_launch): service._launcher = None service.serve(mock.sentinel.service) mock_launch.assert_called_once_with(mock.ANY, mock.sentinel.service, workers=None, restart_method='mutate') @mock.patch.object(_service, 'launch') def test_launch_app_with_workers(self, mock_launch): service._launcher = None service.serve(mock.sentinel.service, workers=mock.sentinel.workers) mock_launch.assert_called_once_with(mock.ANY, mock.sentinel.service, workers=mock.sentinel.workers, restart_method='mutate') @mock.patch.object(_service, 'launch') def test_launch_app_more_than_once_raises(self, mock_launch): service._launcher = None service.serve(mock.sentinel.service) self.assertRaises(RuntimeError, service.serve, mock.sentinel.service) masakari-9.0.0/masakari/tests/unit/cmd/0000775000175000017500000000000013656750011020012 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/tests/unit/cmd/__init__.py0000664000175000017500000000000013656747723022131 0ustar zuulzuul00000000000000masakari-9.0.0/masakari/tests/unit/cmd/test_masakari_api.py0000664000175000017500000000404513656747723024067 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from unittest import mock from masakari.cmd import api from masakari import config from masakari import exception from masakari import test @mock.patch.object(config, 'parse_args', new=lambda *args, **kwargs: None) class TestMasakariAPI(test.NoDBTestCase): def test_continues_without_failure(self): fake_server = mock.MagicMock() fake_server.workers = 123 def fake_service(api, **kw): return fake_server with mock.patch.object(api, 'service') as mock_service: mock_service.WSGIService.side_effect = fake_service api.main() mock_service.WSGIService.assert_has_calls([ mock.call('masakari_api', use_ssl=False), ]) launcher = mock_service.process_launcher.return_value launcher.launch_service.assert_called_once_with( fake_server, workers=123) self.assertTrue(launcher.wait.called) @mock.patch('sys.exit') def test_fails_if_all_failed(self, mock_exit): mock_exit.side_effect = exception.MasakariException with mock.patch.object(api, 'service') as mock_service: mock_service.WSGIService.side_effect = exception.PasteAppNotFound( name='masakari_api', path='/') self.assertRaises(exception.MasakariException, api.main) mock_exit.assert_called_once_with(1) launcher = mock_service.process_launcher.return_value self.assertFalse(launcher.wait.called) masakari-9.0.0/masakari/tests/unit/cmd/test_status.py0000664000175000017500000000176613656747723023000 0ustar zuulzuul00000000000000# Copyright (c) 2018 NEC, Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_upgradecheck.upgradecheck import Code from masakari.cmd import status from masakari import test class TestUpgradeChecks(test.TestCase): def setUp(self): super(TestUpgradeChecks, self).setUp() self.cmd = status.Checks() def test__sample_check(self): check_result = self.cmd._sample_check() self.assertEqual( Code.SUCCESS, check_result.code) masakari-9.0.0/masakari/tests/unit/test_versions.py0000664000175000017500000000420113656747723022545 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg import six from six.moves import builtins from masakari import test from masakari import version class VersionTestCase(test.NoDBTestCase): """Test cases for Versions code.""" def test_version_string_with_package_is_good(self): """Ensure uninstalled code get version string.""" self.stub_out('masakari.version.version_info.version_string', lambda: '5.5.5.5') self.stub_out('masakari.version.MASAKARI_PACKAGE', 'g9ec3421') self.assertEqual("5.5.5.5-g9ec3421", version.version_string_with_package()) def test_release_file(self): version.loaded = False real_open = builtins.open real_find_file = cfg.CONF.find_file def fake_find_file(self, name): if name == "release": return "/etc/masakari/release" return real_find_file(self, name) def fake_open(path, *args, **kwargs): if path == "/etc/masakari/release": data = """[Masakari] vendor = ACME Corporation product = ACME Masakari package = 1337""" return six.StringIO(data) return real_open(path, *args, **kwargs) self.stub_out('six.moves.builtins.open', fake_open) self.stub_out('oslo_config.cfg.ConfigOpts.find_file', fake_find_file) self.assertEqual(version.vendor_string(), "ACME Corporation") self.assertEqual(version.product_string(), "ACME Masakari") self.assertEqual(version.package_string(), "1337") masakari-9.0.0/masakari/tests/unit/engine/0000775000175000017500000000000013656750011020514 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/tests/unit/engine/__init__.py0000664000175000017500000000000013656747723022633 0ustar zuulzuul00000000000000masakari-9.0.0/masakari/tests/unit/engine/drivers/0000775000175000017500000000000013656750011022172 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/tests/unit/engine/drivers/__init__.py0000664000175000017500000000000013656747723024311 0ustar zuulzuul00000000000000masakari-9.0.0/masakari/tests/unit/engine/drivers/taskflow/0000775000175000017500000000000013656750011024024 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/tests/unit/engine/drivers/taskflow/__init__.py0000664000175000017500000000000013656747723026143 0ustar zuulzuul00000000000000masakari-9.0.0/masakari/tests/unit/engine/drivers/taskflow/test_process_failure_flow.py0000664000175000017500000001444413656747723031700 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Unit Tests for process failure TaskFlow """ from unittest import mock from masakari.compute import nova from masakari import context from masakari.engine.drivers.taskflow import process_failure from masakari import exception from masakari import test from masakari.tests.unit import fakes class ProcessFailureTestCase(test.TestCase): def setUp(self): super(ProcessFailureTestCase, self).setUp() self.ctxt = context.get_admin_context() self.process_name = "nova-compute" self.service_host = "fake-host" self.novaclient = nova.API() self.fake_client = fakes.FakeNovaClient() # overriding 'wait_period_after_service_update' to 2 seconds # to reduce the wait period. self.override_config('wait_period_after_service_update', 2) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') def test_compute_process_failure_flow(self, _mock_notify, _mock_novaclient): _mock_novaclient.return_value = self.fake_client # create test data self.fake_client.services.create("1", host=self.service_host, binary="nova-compute", status="enabled") # test DisableComputeNodeTask task = process_failure.DisableComputeNodeTask(self.ctxt, self.novaclient) task.execute(self.process_name, self.service_host) # test ConfirmComputeNodeDisabledTask task = process_failure.ConfirmComputeNodeDisabledTask(self.ctxt, self.novaclient) task.execute(self.process_name, self.service_host) # verify service is disabled self.assertTrue(self.novaclient.is_service_down(self.ctxt, self.service_host, self.process_name)) # verify progress details _mock_notify.assert_has_calls([ mock.call("Disabling compute service on host: 'fake-host'"), mock.call("Disabled compute service on host: 'fake-host'", 1.0), mock.call("Confirming compute service is disabled on host: " "'fake-host'"), mock.call("Confirmed compute service is disabled on host: " "'fake-host'", 1.0) ]) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') def test_compute_process_failure_flow_disabled_process(self, _mock_notify, _mock_novaclient): _mock_novaclient.return_value = self.fake_client # create test data self.fake_client.services.create("1", host=self.service_host, binary="nova-compute", status="disabled") # test DisableComputeNodeTask task = process_failure.DisableComputeNodeTask(self.ctxt, self.novaclient) with mock.patch.object( self.novaclient, 'enable_disable_service') as mock_enable_disabled: task.execute(self.process_name, self.service_host) # ensure that enable_disable_service method is not called self.assertEqual(0, mock_enable_disabled.call_count) # verify progress details _mock_notify.assert_has_calls([ mock.call("Disabling compute service on host: 'fake-host'"), mock.call('Skipping recovery for process nova-compute as it is ' 'already disabled', 1.0) ]) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') def test_compute_process_failure_flow_compute_service_disabled_failed( self, _mock_notify, _mock_novaclient): _mock_novaclient.return_value = self.fake_client # create test data self.fake_client.services.create("1", host=self.service_host, binary="nova-compute", status="enabled") def fake_is_service_down(context, host_name, binary): # assume that service is not disabled return False # test DisableComputeNodeTask task = process_failure.DisableComputeNodeTask(self.ctxt, self.novaclient) task.execute(self.process_name, self.service_host) with mock.patch.object(self.novaclient, 'is_service_down', fake_is_service_down): # test ConfirmComputeNodeDisabledTask task = process_failure.ConfirmComputeNodeDisabledTask( self.ctxt, self.novaclient) self.assertRaises(exception.ProcessRecoveryFailureException, task.execute, self.process_name, self.service_host) # verify progress details _mock_notify.assert_has_calls([ mock.call("Disabling compute service on host: 'fake-host'"), mock.call("Disabled compute service on host: 'fake-host'", 1.0), mock.call("Confirming compute service is disabled on host: " "'fake-host'"), mock.call('Failed to disable service nova-compute', 1.0) ]) masakari-9.0.0/masakari/tests/unit/engine/drivers/taskflow/test_taskflow_driver.py0000664000175000017500000004476313656747723030700 0ustar zuulzuul00000000000000# Copyright 2017 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from unittest import mock from oslo_utils import timeutils from taskflow.persistence import models from taskflow.persistence import path_based from masakari import context from masakari.engine.drivers.taskflow import base from masakari.engine.drivers.taskflow import driver from masakari.engine.drivers.taskflow import host_failure from masakari import exception from masakari.objects import fields from masakari import test from masakari.tests.unit import fakes from masakari.tests import uuidsentinel NOW = timeutils.utcnow().replace(microsecond=0) class FakeFlow(object): """Fake flow class of taskflow.""" def run(self): # run method which actually runs the flow pass class TaskflowDriverTestCase(test.TestCase): def setUp(self): super(TaskflowDriverTestCase, self).setUp() self.taskflow_driver = driver.TaskFlowDriver() self.ctxt = context.get_admin_context() @mock.patch.object(base, 'DynamicLogListener') @mock.patch.object(host_failure, 'get_auto_flow') @mock.patch.object(host_failure, 'get_rh_flow') def test_auto_priority_recovery_flow_auto_success( self, mock_rh_flow, mock_auto_flow, mock_listener): mock_auto_flow.return_value = FakeFlow FakeFlow.run = mock.Mock(return_value=None) self.taskflow_driver.execute_host_failure( self.ctxt, 'fake_host', fields.FailoverSegmentRecoveryMethod.AUTO_PRIORITY, uuidsentinel.fake_notification, reserved_host_list=[ 'host-1', 'host-2']) # Ensures that 'auto' flow executes successfully self.assertTrue(mock_auto_flow.called) # Ensures that 'reserved_host' flow will not execute self.assertFalse(mock_rh_flow.called) @mock.patch.object(base, 'DynamicLogListener') @mock.patch.object(host_failure, 'get_auto_flow') @mock.patch.object(host_failure, 'get_rh_flow') def test_auto_priority_recovery_flow_rh_success( self, mock_rh_flow, mock_auto_flow, mock_listener): mock_auto_flow.return_value = FakeFlow FakeFlow.run = mock.Mock( side_effect=exception.HostRecoveryFailureException) self.taskflow_driver.execute_host_failure( self.ctxt, 'fake_host', fields.FailoverSegmentRecoveryMethod.AUTO_PRIORITY, uuidsentinel.fake_notification, reserved_host_list=[ 'host-1', 'host-2']) # Ensures that 'auto' flow fails to recover instances self.assertTrue(mock_auto_flow.called) # Ensures that 'reserved_host' flow executes as 'auto' flow fails self.assertTrue(mock_rh_flow.called) @mock.patch.object(base, 'DynamicLogListener') @mock.patch.object(host_failure, 'get_auto_flow') @mock.patch.object(host_failure, 'get_rh_flow') def test_rh_priority_recovery_flow_rh_success( self, mock_rh_flow, mock_auto_flow, mock_listener): mock_rh_flow.return_value = FakeFlow FakeFlow.run = mock.Mock(return_value=None) self.taskflow_driver.execute_host_failure( self.ctxt, 'fake_host', fields.FailoverSegmentRecoveryMethod.RH_PRIORITY, uuidsentinel.fake_notification, reserved_host_list=[ 'host-1', 'host-2']) # Ensures that 'reserved_host' flow executes successfully self.assertTrue(mock_rh_flow.called) # Ensures that 'auto' flow will not execute self.assertFalse(mock_auto_flow.called) @mock.patch.object(base, 'DynamicLogListener') @mock.patch.object(host_failure, 'get_auto_flow') @mock.patch.object(host_failure, 'get_rh_flow') def test_rh_priority_recovery_flow_auto_success( self, mock_rh_flow, mock_auto_flow, mock_listener): mock_rh_flow.return_value = FakeFlow FakeFlow.run = mock.Mock( side_effect=exception.HostRecoveryFailureException) self.taskflow_driver.execute_host_failure( self.ctxt, 'fake_host', fields.FailoverSegmentRecoveryMethod.RH_PRIORITY, uuidsentinel.fake_notification, reserved_host_list=[ 'host-1', 'host-2']) # Ensures that 'reserved_host' flow fails to recover instances self.assertTrue(mock_rh_flow.called) # Ensures that 'auto' flow executes as 'reserved_host' flow fails self.assertTrue(mock_auto_flow.called) @mock.patch.object(base, 'DynamicLogListener') @mock.patch.object(host_failure, 'get_auto_flow') @mock.patch.object(host_failure, 'get_rh_flow') def test_complete_auto_priority_recovery_flow_failure( self, mock_rh_flow, mock_auto_flow, mock_listener): mock_auto_flow.return_value = FakeFlow mock_rh_flow.return_value = FakeFlow FakeFlow.run = mock.Mock( side_effect=exception.HostRecoveryFailureException) # Ensures that both 'auto' and 'reserved_host' flow fails to # evacuate instances self.assertRaises( exception.HostRecoveryFailureException, self.taskflow_driver.execute_host_failure, self.ctxt, 'fake_host', fields.FailoverSegmentRecoveryMethod.AUTO_PRIORITY, uuidsentinel.fake_notification, reserved_host_list=['host-1', 'host-2']) @mock.patch.object(base, 'DynamicLogListener') @mock.patch.object(host_failure, 'get_auto_flow') @mock.patch.object(host_failure, 'get_rh_flow') def test_complete_rh_priority_recovery_flow_failure( self, mock_rh_flow, mock_auto_flow, mock_listener): mock_rh_flow.return_value = FakeFlow mock_auto_flow.return_value = FakeFlow FakeFlow.run = mock.Mock( side_effect=exception.HostRecoveryFailureException) # Ensures that both 'reserved_host' and 'auto' flow fails to # evacuate instances self.assertRaises( exception.HostRecoveryFailureException, self.taskflow_driver.execute_host_failure, self.ctxt, 'fake_host', fields.FailoverSegmentRecoveryMethod.RH_PRIORITY, uuidsentinel.fake_notification, reserved_host_list=['host-1', 'host-2']) @mock.patch.object(base, 'DynamicLogListener') @mock.patch.object(host_failure, 'get_auto_flow') @mock.patch.object(host_failure, 'get_rh_flow') def test_rh_priority_recovery_flow_skip_recovery( self, mock_rh_flow, mock_auto_flow, mock_listener): mock_rh_flow.return_value = FakeFlow FakeFlow.run = mock.Mock( side_effect=exception.SkipHostRecoveryException) self.assertRaises( exception.SkipHostRecoveryException, self.taskflow_driver.execute_host_failure, self.ctxt, 'fake_host', fields.FailoverSegmentRecoveryMethod.RH_PRIORITY, uuidsentinel.fake_notification, reserved_host_list=['host-1', 'host-2']) # Ensures that 'reserved_host' flow executes but skip the host # recovery self.assertTrue(mock_rh_flow.called) # Ensures that 'auto' flow will not execute self.assertFalse(mock_auto_flow.called) @mock.patch.object(base, 'DynamicLogListener') @mock.patch.object(host_failure, 'get_auto_flow') @mock.patch.object(host_failure, 'get_rh_flow') def test_auto_priority_recovery_flow_skip_recovery( self, mock_rh_flow, mock_auto_flow, mock_listener): mock_auto_flow.return_value = FakeFlow FakeFlow.run = mock.Mock( side_effect=exception.SkipHostRecoveryException) self.assertRaises( exception.SkipHostRecoveryException, self.taskflow_driver.execute_host_failure, self.ctxt, 'fake_host', fields.FailoverSegmentRecoveryMethod.AUTO_PRIORITY, uuidsentinel.fake_notification, reserved_host_list=['host-1', 'host-2']) # Ensures that 'auto' flow executes but skip the host recovery self.assertTrue(mock_auto_flow.called) # Ensures that 'reserved_host' flow will not execute self.assertFalse(mock_rh_flow.called) @mock.patch.object(base, 'DynamicLogListener') @mock.patch.object(host_failure, 'get_auto_flow') @mock.patch.object(host_failure, 'get_rh_flow') def test_rh_priority_recovery_flow_reserved_hosts_not_available( self, mock_rh_flow, mock_auto_flow, mock_listener): self.taskflow_driver.execute_host_failure( self.ctxt, 'fake_host', fields.FailoverSegmentRecoveryMethod.RH_PRIORITY, uuidsentinel.fake_notification) # Ensures that if there are no reserved_hosts for recovery # 'reserved_host' flow will not execute self.assertFalse(mock_rh_flow.called) # Ensures that 'auto' flow executes as 'reserved_host' flow fails self.assertTrue(mock_auto_flow.called) @mock.patch.object(path_based.PathBasedConnection, 'get_atoms_for_flow') @mock.patch.object(path_based.PathBasedConnection, 'get_flows_for_book') def test_get_notification_recovery_workflow_details( self, mock_get_flows_for_book, mock_get_atoms_for_flow): notification = fakes.create_fake_notification( payload={ 'event': 'LIFECYCLE', 'instance_uuid': uuidsentinel.fake_ins, 'vir_domain_event': 'STOPPED_FAILED'}, source_host_uuid=uuidsentinel.fake_host, notification_uuid=uuidsentinel.fake_notification) fd = models.FlowDetail('test', uuid=notification.notification_uuid) atom1 = models.TaskDetail('StopInstanceTask', uuid=uuidsentinel.atom_id_1) atom1.meta = { 'progress': 1.0, 'progress_details': { 'at_progress': 1.0, 'details': { 'progress_details': [ {'timestamp': '2019-03-11 05:22:20.329171', 'message': 'Stopping instance: ' '87c8ebc3-2a70-49f0-9280-d34662dc203d', 'progress': 0.0}, {'timestamp': '2019-03-11 05:22:28.902665', 'message': "Stopped instance: " "'87c8ebc3-2a70-49f0-9280-d34662dc203d'", 'progress': 1.0}]}}} atom1.state = 'SUCCESS' atom2 = models.TaskDetail('ConfirmInstanceActiveTask', uuid=uuidsentinel.atom_id_2) atom2.meta = { 'progress': 1.0, 'progress_details': { 'at_progress': 1.0, 'details': { 'progress_details': [ {'timestamp': '2019-03-11 05:22:29.597303', 'message': "Confirming instance " "'87c8ebc3-2a70-49f0-9280-d34662dc203d' " "vm_state is ACTIVE", 'progress': 0.0}, {'timestamp': '2019-03-11 05:22:31.916620', 'message': "Confirmed instance " "'87c8ebc3-2a70-49f0-9280-d34662dc203d'" " vm_state is ACTIVE", 'progress': 1.0}] }}} atom2.state = 'SUCCESS' atom3 = models.TaskDetail('StartInstanceTask', uuid=uuidsentinel.atom_id_3) atom3.meta = { 'progress': 1.0, 'progress_details': { 'at_progress': 1.0, 'details': {'progress_details': [ {'timestamp': '2019-03-11 05:22:29.130876', 'message': "Starting instance: " "'87c8ebc3-2a70-49f0-9280-d34662dc203d'", 'progress': 0.0}, {'timestamp': '2019-03-11 05:22:29.525882', 'message': "Instance started: " "'87c8ebc3-2a70-49f0-9280-d34662dc203d'", 'progress': 1.0}]}}} atom3.state = 'SUCCESS' def fd_generator(): yield fd def atom_detail_generator(): for atom in [atom1, atom2, atom3]: yield atom flow_details = fd_generator() atom_details = atom_detail_generator() mock_get_flows_for_book.return_value = flow_details mock_get_atoms_for_flow.return_value = atom_details driver.PERSISTENCE_BACKEND = 'memory://' progress_details = ( self.taskflow_driver.get_notification_recovery_workflow_details( self.ctxt, 'auto', notification)) # list of NotificationProgressDetails object expected_result = [] expected_result.append(( fakes.create_fake_notification_progress_details( name=atom1.name, uuid=atom1.uuid, progress=atom1.meta['progress'], state=atom1.state, progress_details=atom1.meta['progress_details'] ['details']['progress_details']))) expected_result.append(( fakes.create_fake_notification_progress_details( name=atom3.name, uuid=atom3.uuid, progress=atom3.meta['progress'], state=atom3.state, progress_details=atom3.meta['progress_details'] ['details']['progress_details']))) expected_result.append(( fakes.create_fake_notification_progress_details( name=atom2.name, uuid=atom2.uuid, progress=atom2.meta['progress'], state=atom2.state, progress_details=atom2.meta['progress_details'] ['details']['progress_details']))) self.assertIsNotNone(progress_details) mock_get_flows_for_book.assert_called_once() mock_get_atoms_for_flow.assert_called_once() self.assertObjectList(expected_result, progress_details) @mock.patch.object(path_based.PathBasedConnection, 'get_atoms_for_flow') @mock.patch.object(path_based.PathBasedConnection, 'get_flows_for_book') def test_get_notification_recovery_workflow_details_raises_keyerror( self, mock_get_flows_for_book, mock_get_atoms_for_flow): notification = fakes.create_fake_notification( payload={ 'event': 'LIFECYCLE', 'instance_uuid': uuidsentinel.fake_ins, 'vir_domain_event': 'STOPPED_FAILED'}, source_host_uuid=uuidsentinel.fake_host, notification_uuid=uuidsentinel.fake_notification) fd = models.FlowDetail('test', uuid=notification.notification_uuid) atom1 = models.TaskDetail('StopInstanceTask', uuid=uuidsentinel.atom_id_1) atom1.meta = { 'progress': 1.0, 'progress_details': { 'at_progress': 1.0, 'details': { 'progress_details': [ {'timestamp': '2019-03-11 05:22:20.329171', 'message': 'Stopping instance: ' '87c8ebc3-2a70-49f0-9280-d34662dc203d', 'progress': 0.0}, {'timestamp': '2019-03-11 05:22:28.902665', 'message': "Stopped instance: " "'87c8ebc3-2a70-49f0-9280-d34662dc203d'", 'progress': 1.0}]}}} atom1.state = 'SUCCESS' atom2 = models.TaskDetail('ConfirmInstanceActiveTask', uuid=uuidsentinel.atom_id_2) atom2.meta = { 'progress': 1.0, 'progress_details': { 'at_progress': 1.0, 'details': { 'progress_details': [ {'timestamp': '2019-03-11 05:22:29.597303', 'message': "Confirming instance " "'87c8ebc3-2a70-49f0-9280-d34662dc203d' " "vm_state is ACTIVE", 'progress': 0.0}, {'timestamp': '2019-03-11 05:22:31.916620', 'message': "Confirmed instance " "'87c8ebc3-2a70-49f0-9280-d34662dc203d'" " vm_state is ACTIVE", 'progress': 1.0}] }}} atom2.state = 'SUCCESS' atom3 = models.TaskDetail('StartInstanceTask', uuid=uuidsentinel.atom_id_3) atom3.state = 'RUNNING' def fd_generator(): yield fd def atom_detail_generator(): for atom in [atom1, atom2, atom3]: yield atom flow_details = fd_generator() atom_details = atom_detail_generator() mock_get_flows_for_book.return_value = flow_details mock_get_atoms_for_flow.return_value = atom_details driver.PERSISTENCE_BACKEND = 'memory://' progress_details = ( self.taskflow_driver.get_notification_recovery_workflow_details( self.ctxt, 'auto', notification)) # list of NotificationProgressDetails object expected_result = [] expected_result.append(( fakes.create_fake_notification_progress_details( name=atom1.name, uuid=atom1.uuid, progress=atom1.meta['progress'], state=atom1.state, progress_details=atom1.meta['progress_details'] ['details']['progress_details']))) expected_result.append(( fakes.create_fake_notification_progress_details( name=atom2.name, uuid=atom2.uuid, progress=atom2.meta['progress'], state=atom2.state, progress_details=atom2.meta['progress_details'] ['details']['progress_details']))) self.assertIsNotNone(progress_details) mock_get_flows_for_book.assert_called_once() mock_get_atoms_for_flow.assert_called_once() self.assertObjectList(expected_result, progress_details) masakari-9.0.0/masakari/tests/unit/engine/drivers/taskflow/test_host_failure_flow.py0000664000175000017500000007546713656747723031213 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Unit Tests for host failure TaskFlow """ import copy from unittest import mock import ddt from masakari.compute import nova from masakari import conf from masakari import context from masakari.engine.drivers.taskflow import host_failure from masakari.engine import manager from masakari import exception from masakari import test from masakari.tests.unit import fakes CONF = conf.CONF @ddt.ddt @mock.patch.object(nova.API, "enable_disable_service") @mock.patch.object(nova.API, "lock_server") @mock.patch.object(nova.API, "unlock_server") class HostFailureTestCase(test.TestCase): def setUp(self): super(HostFailureTestCase, self).setUp() self.ctxt = context.get_admin_context() # overriding 'wait_period_after_evacuation' and # 'wait_period_after_service_update' to 2 seconds to # reduce the wait period. self.override_config("wait_period_after_evacuation", 2) self.override_config("wait_period_after_service_update", 2) self.override_config("evacuate_all_instances", False, "host_failure") self.instance_host = "fake-host" self.novaclient = nova.API() self.fake_client = fakes.FakeNovaClient() def _verify_instance_evacuated(self, old_instance_list): for server in old_instance_list: instance = self.novaclient.get_server(self.ctxt, server) if getattr(instance, 'OS-EXT-STS:vm_state') in \ ['active', 'stopped', 'error']: self.assertIn(getattr(instance, 'OS-EXT-STS:vm_state'), ['active', 'stopped', 'error']) else: if getattr(instance, 'OS-EXT-STS:vm_state') == 'resized' and \ getattr(instance, 'OS-EXT-STS:power_state') != 4: self.assertEqual('active', getattr(instance, 'OS-EXT-STS:vm_state')) else: self.assertEqual('stopped', getattr(instance, 'OS-EXT-STS:vm_state')) if CONF.host_failure.ignore_instances_in_error_state and getattr( instance, 'OS-EXT-STS:vm_state') == 'error': self.assertEqual( self.instance_host, getattr( instance, 'OS-EXT-SRV-ATTR:hypervisor_hostname')) else: self.assertNotEqual( self.instance_host, getattr( instance, 'OS-EXT-SRV-ATTR:hypervisor_hostname')) def _test_disable_compute_service(self, mock_enable_disable): task = host_failure.DisableComputeServiceTask(self.ctxt, self.novaclient) task.execute(self.instance_host) mock_enable_disable.assert_called_once_with( self.ctxt, self.instance_host) def _test_instance_list(self, instances_evacuation_count): task = host_failure.PrepareHAEnabledInstancesTask(self.ctxt, self.novaclient) instances = task.execute(self.instance_host) instance_uuid_list = [] for instance_id in instances['instance_list']: instance = self.novaclient.get_server(self.ctxt, instance_id) if CONF.host_failure.ignore_instances_in_error_state: self.assertNotEqual("error", getattr(instance, "OS-EXT-STS:vm_state")) if not CONF.host_failure.evacuate_all_instances: self.assertTrue(instance.metadata.get('HA_Enabled', False)) instance_uuid_list.append(instance.id) self.assertEqual(instances_evacuation_count, len(instances['instance_list'])) return { "instance_list": instance_uuid_list, } def _evacuate_instances(self, instance_list, mock_enable_disable, reserved_host=None): task = host_failure.EvacuateInstancesTask( self.ctxt, self.novaclient, update_host_method=manager.update_host_method) old_instance_list = copy.deepcopy(instance_list['instance_list']) if reserved_host: task.execute(self.instance_host, instance_list['instance_list'], reserved_host=reserved_host) self.assertTrue(mock_enable_disable.called) else: task.execute( self.instance_host, instance_list['instance_list']) # make sure instance is active and has different host self._verify_instance_evacuated(old_instance_list) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') def test_host_failure_flow_for_auto_recovery( self, _mock_notify, _mock_novaclient, mock_unlock, mock_lock, mock_enable_disable): _mock_novaclient.return_value = self.fake_client self.override_config("evacuate_all_instances", True, "host_failure") # create test data self.fake_client.servers.create(id="1", host=self.instance_host, ha_enabled=True) self.fake_client.servers.create(id="2", host=self.instance_host) # execute DisableComputeServiceTask self._test_disable_compute_service(mock_enable_disable) # execute PrepareHAEnabledInstancesTask instance_list = self._test_instance_list(2) # execute EvacuateInstancesTask self._evacuate_instances(instance_list, mock_enable_disable) # verify progress details _mock_notify.assert_has_calls([ mock.call("Disabling compute service on host: 'fake-host'"), mock.call("Disabled compute service on host: 'fake-host'", 1.0), mock.call('Preparing instances for evacuation'), mock.call("Total instances running on failed host 'fake-host' is 2" "", 0.3), mock.call("Total HA Enabled instances count: '1'", 0.6), mock.call("Total Non-HA Enabled instances count: '1'", 0.7), mock.call("All instances (HA Enabled/Non-HA Enabled) should be " "considered for evacuation. Total count is: '2'", 0.8), mock.call("Instances to be evacuated are: '1,2'", 1.0), mock.call("Start evacuation of instances from failed host " "'fake-host', instance uuids are: '1,2'"), mock.call("Evacuation of instance started: '1'", 0.5), mock.call("Evacuation of instance started: '2'", 0.5), mock.call("Successfully evacuate instances '1,2' from host " "'fake-host'", 0.7), mock.call('Evacuation process completed!', 1.0) ]) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') def test_host_failure_flow_for_reserved_host_recovery( self, _mock_notify, _mock_novaclient, mock_unlock, mock_lock, mock_enable_disable): _mock_novaclient.return_value = self.fake_client self.override_config("evacuate_all_instances", True, "host_failure") self.override_config("add_reserved_host_to_aggregate", True, "host_failure") # create test data self.fake_client.servers.create(id="1", host=self.instance_host, ha_enabled=True) self.fake_client.servers.create(id="2", host=self.instance_host) reserved_host = fakes.create_fake_host(name="fake-reserved-host", reserved=True) self.fake_client.aggregates.create(id="1", name='fake_agg', hosts=[self.instance_host]) # execute DisableComputeServiceTask self._test_disable_compute_service(mock_enable_disable) # execute PrepareHAEnabledInstancesTask instance_list = self._test_instance_list(2) # execute EvacuateInstancesTask with mock.patch.object(manager, "update_host_method") as mock_save: self._evacuate_instances( instance_list, mock_enable_disable, reserved_host=reserved_host.name) self.assertEqual(1, mock_save.call_count) self.assertIn(reserved_host.name, self.fake_client.aggregates.get('1').hosts) # verify progress details _mock_notify.assert_has_calls([ mock.call("Disabling compute service on host: 'fake-host'"), mock.call("Disabled compute service on host: 'fake-host'", 1.0), mock.call('Preparing instances for evacuation'), mock.call("Total instances running on failed host 'fake-host' is 2" "", 0.3), mock.call("Total HA Enabled instances count: '1'", 0.6), mock.call("Total Non-HA Enabled instances count: '1'", 0.7), mock.call("All instances (HA Enabled/Non-HA Enabled) should be " "considered for evacuation. Total count is: '2'", 0.8), mock.call("Instances to be evacuated are: '1,2'", 1.0), mock.call("Start evacuation of instances from failed host " "'fake-host', instance uuids are: '1,2'"), mock.call("Enabling reserved host: 'fake-reserved-host'", 0.1), mock.call('Add host fake-reserved-host to aggregate fake_agg', 0.2), mock.call('Added host fake-reserved-host to aggregate fake_agg', 0.3), mock.call("Evacuation of instance started: '1'", 0.5), mock.call("Evacuation of instance started: '2'", 0.5), mock.call("Successfully evacuate instances '1,2' from host " "'fake-host'", 0.7), mock.call('Evacuation process completed!', 1.0) ]) @mock.patch.object(nova.API, 'add_host_to_aggregate') @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') @mock.patch('masakari.engine.drivers.taskflow.host_failure.LOG') def test_host_failure_flow_ignores_conflict_error( self, mock_log, _mock_notify, _mock_novaclient, mock_add_host, mock_unlock, mock_lock, mock_enable_disable): _mock_novaclient.return_value = self.fake_client mock_add_host.side_effect = exception.Conflict self.override_config("add_reserved_host_to_aggregate", True, "host_failure") # create test data self.fake_client.servers.create(id="1", host=self.instance_host, ha_enabled=True) reserved_host = fakes.create_fake_host(name="fake-reserved-host", reserved=True) self.fake_client.aggregates.create(id="1", name='fake_agg', hosts=[self.instance_host, reserved_host.name]) expected_msg_format = ("Host '%(reserved_host)s' already has been " "added to aggregate '%(aggregate)s'.") % { 'reserved_host': 'fake-reserved-host', 'aggregate': 'fake_agg' } # execute DisableComputeServiceTask self._test_disable_compute_service(mock_enable_disable) # execute PrepareHAEnabledInstancesTask instance_list = self._test_instance_list(1) # execute EvacuateInstancesTask with mock.patch.object(manager, "update_host_method") as mock_save: self._evacuate_instances( instance_list, mock_enable_disable, reserved_host=reserved_host.name) self.assertEqual(1, mock_save.call_count) self.assertIn(reserved_host.name, self.fake_client.aggregates.get('1').hosts) mock_log.info.assert_any_call(expected_msg_format) # verify progress details _mock_notify.assert_has_calls([ mock.call("Disabling compute service on host: 'fake-host'"), mock.call("Disabled compute service on host: 'fake-host'", 1.0), mock.call('Preparing instances for evacuation'), mock.call("Total instances running on failed host 'fake-host' is 1" "", 0.3), mock.call("Total HA Enabled instances count: '1'", 0.6), mock.call("Instances to be evacuated are: '1'", 1.0), mock.call("Start evacuation of instances from failed host " "'fake-host', instance uuids are: '1'"), mock.call("Enabling reserved host: 'fake-reserved-host'", 0.1), mock.call('Add host fake-reserved-host to aggregate fake_agg', 0.2), mock.call("Host 'fake-reserved-host' already has been added to " "aggregate 'fake_agg'.", 1.0), mock.call("Evacuation of instance started: '1'", 0.5), mock.call("Successfully evacuate instances '1' from host " "'fake-host'", 0.7), mock.call('Evacuation process completed!', 1.0) ]) @ddt.data('rescued', 'paused', 'shelved', 'suspended', 'error', 'resized', 'active', 'resized', 'stopped') @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') def test_host_failure_flow_all_instances( self, vm_state, _mock_notify, _mock_novaclient, mock_unlock, mock_lock, mock_enable_disable): _mock_novaclient.return_value = self.fake_client # create ha_enabled test data power_state = 4 if vm_state == 'resized' else None self.fake_client.servers.create(id="1", host=self.instance_host, vm_state=vm_state, power_state=power_state, ha_enabled=True) self.fake_client.servers.create(id="2", host=self.instance_host, vm_state=vm_state, power_state=power_state, ha_enabled=True) instance_uuid_list = [] for instance in self.fake_client.servers.list(): instance_uuid_list.append(instance.id) instance_list = { "instance_list": instance_uuid_list, } # execute EvacuateInstancesTask self._evacuate_instances(instance_list, mock_enable_disable) # verify progress details _mock_notify.assert_has_calls([ mock.call("Start evacuation of instances from failed host " "'fake-host', instance uuids are: '1,2'"), mock.call("Evacuation of instance started: '1'", 0.5), mock.call("Evacuation of instance started: '2'", 0.5), mock.call("Successfully evacuate instances '1,2' from host " "'fake-host'", 0.7), mock.call('Evacuation process completed!', 1.0) ]) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') def test_host_failure_flow_ignore_error_instances( self, _mock_notify, _mock_novaclient, mock_unlock, mock_lock, mock_enable_disable): self.override_config("ignore_instances_in_error_state", True, "host_failure") self.override_config("evacuate_all_instances", True, "host_failure") _mock_novaclient.return_value = self.fake_client # create ha_enabled test data self.fake_client.servers.create(id="1", host=self.instance_host, vm_state='error', ha_enabled=True) self.fake_client.servers.create(id="2", host=self.instance_host, vm_state='active', ha_enabled=True) # execute PrepareHAEnabledInstancesTask instance_list = self._test_instance_list(1) # execute EvacuateInstancesTask self._evacuate_instances(instance_list, mock_enable_disable) # verify progress details _mock_notify.assert_has_calls([ mock.call('Preparing instances for evacuation'), mock.call("Total instances running on failed host 'fake-host' is 2" "", 0.3), mock.call("Ignoring recovery of HA_Enabled instance '1' as it is " "in 'error' state.", 0.4), mock.call("Total HA Enabled instances count: '1'", 0.6), mock.call("Total Non-HA Enabled instances count: '0'", 0.7), mock.call("All instances (HA Enabled/Non-HA Enabled) should be " "considered for evacuation. Total count is: '1'", 0.8), mock.call("Instances to be evacuated are: '2'", 1.0), mock.call("Start evacuation of instances from failed host " "'fake-host', instance uuids are: '2'"), mock.call("Evacuation of instance started: '2'", 0.5), mock.call("Successfully evacuate instances '2' from host " "'fake-host'", 0.7), mock.call('Evacuation process completed!', 1.0) ]) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') def test_host_failure_flow_ignore_error_instances_raise_skip_host_recovery( self, _mock_notify, _mock_novaclient, mock_unlock, mock_lock, mock_enable_disable): self.override_config("ignore_instances_in_error_state", True, "host_failure") self.override_config("evacuate_all_instances", False, "host_failure") _mock_novaclient.return_value = self.fake_client # create ha_enabled test data self.fake_client.servers.create(id="1", host=self.instance_host, vm_state='error', ha_enabled=True) # execute PrepareHAEnabledInstancesTask task = host_failure.PrepareHAEnabledInstancesTask(self.ctxt, self.novaclient) self.assertRaises(exception.SkipHostRecoveryException, task.execute, self.instance_host) # verify progress details _mock_notify.assert_has_calls([ mock.call('Preparing instances for evacuation'), mock.call("Total instances running on failed host 'fake-host' is 1" "", 0.3), mock.call("Ignoring recovery of HA_Enabled instance '1' as it is " "in 'error' state.", 0.4), mock.call("Total HA Enabled instances count: '0'", 0.6), mock.call("Skipped host 'fake-host' recovery as no instances needs" " to be evacuated", 1.0) ]) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') def test_host_failure_flow_no_ha_enabled_instances( self, _mock_notify, _mock_novaclient, mock_unlock, mock_lock, mock_enable_disable): _mock_novaclient.return_value = self.fake_client # create test data self.fake_client.servers.create(id="1", host=self.instance_host) self.fake_client.servers.create(id="2", host=self.instance_host) # execute DisableComputeServiceTask self._test_disable_compute_service(mock_enable_disable) # execute PrepareHAEnabledInstancesTask task = host_failure.PrepareHAEnabledInstancesTask(self.ctxt, self.novaclient) self.assertRaises(exception.SkipHostRecoveryException, task.execute, self.instance_host) # verify progress details _mock_notify.assert_has_calls([ mock.call("Disabling compute service on host: 'fake-host'"), mock.call("Disabled compute service on host: 'fake-host'", 1.0), mock.call('Preparing instances for evacuation'), mock.call("Total instances running on failed host 'fake-host' is 2" "", 0.3), mock.call("Total HA Enabled instances count: '0'", 0.6), mock.call("Skipped host 'fake-host' recovery as no instances needs" " to be evacuated", 1.0) ]) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') def test_host_failure_flow_evacuation_failed( self, _mock_notify, _mock_novaclient, mock_unlock, mock_lock, mock_enable_disable): # overriding 'wait_period_after_power_off' to 2 seconds to reduce the # wait period, default is 180 seconds. self.override_config("wait_period_after_power_off", 2) _mock_novaclient.return_value = self.fake_client # create ha_enabled test data server = self.fake_client.servers.create(id="1", vm_state='active', host=self.instance_host, ha_enabled=True) instance_uuid_list = [] for instance in self.fake_client.servers.list(): instance_uuid_list.append(instance.id) instance_list = { "instance_list": instance_uuid_list, } def fake_get_server(context, host): # assume that while evacuating instance goes into error state fake_server = copy.deepcopy(server) setattr(fake_server, 'OS-EXT-STS:vm_state', "error") return fake_server with mock.patch.object(self.novaclient, "get_server", fake_get_server): # execute EvacuateInstancesTask self.assertRaises( exception.HostRecoveryFailureException, self._evacuate_instances, instance_list, mock_enable_disable) # verify progress details _mock_notify.assert_has_calls([ mock.call("Start evacuation of instances from failed host " "'fake-host', instance uuids are: '1'"), mock.call("Evacuation of instance started: '1'", 0.5), mock.call("Failed to evacuate instances '1' from host 'fake-host'" "", 0.7) ]) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') def test_host_failure_flow_no_instances_on_host( self, _mock_notify, _mock_novaclient, mock_unlock, mock_lock, mock_enable_disable): _mock_novaclient.return_value = self.fake_client self.override_config("evacuate_all_instances", True, "host_failure") # execute DisableComputeServiceTask self._test_disable_compute_service(mock_enable_disable) # execute PrepareHAEnabledInstancesTask task = host_failure.PrepareHAEnabledInstancesTask(self.ctxt, self.novaclient) self.assertRaises(exception.SkipHostRecoveryException, task.execute, self.instance_host) # verify progress details _mock_notify.assert_has_calls([ mock.call("Disabling compute service on host: 'fake-host'"), mock.call("Disabled compute service on host: 'fake-host'", 1.0), mock.call('Preparing instances for evacuation'), mock.call("Total instances running on failed host 'fake-host' is 0" "", 0.3), mock.call("Total HA Enabled instances count: '0'", 0.6), mock.call("Total Non-HA Enabled instances count: '0'", 0.7), mock.call("All instances (HA Enabled/Non-HA Enabled) should be " "considered for evacuation. Total count is: '0'", 0.8), mock.call("Skipped host 'fake-host' recovery as no instances needs" " to be evacuated", 1.0) ]) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') def test_host_failure_flow_for_task_state_not_none( self, _mock_notify, _mock_novaclient, mock_unlock, mock_lock, mock_enable_disable): _mock_novaclient.return_value = self.fake_client # create ha_enabled test data self.fake_client.servers.create(id="1", host=self.instance_host, vm_state='active', task_state='fake_task_state', power_state=None, ha_enabled=True) self.fake_client.servers.create(id="2", host=self.instance_host, vm_state='stopped', task_state='fake_task_state', power_state=None, ha_enabled=True) self.fake_client.servers.create(id="3", host=self.instance_host, vm_state='error', task_state='fake_task_state', power_state=None, ha_enabled=True) instance_uuid_list = [] for instance in self.fake_client.servers.list(): instance_uuid_list.append(instance.id) instance_list = { "instance_list": instance_uuid_list, } # execute EvacuateInstancesTask self._evacuate_instances(instance_list, mock_enable_disable) reset_calls = [('1', 'active'), ('2', 'stopped'), ('3', 'error'), ('3', 'stopped')] stop_calls = ['2', '3'] self.assertEqual(reset_calls, self.fake_client.servers.reset_state_calls) self.assertEqual(stop_calls, self.fake_client.servers.stop_calls) # verify progress details _mock_notify.assert_has_calls([ mock.call("Start evacuation of instances from failed host " "'fake-host', instance uuids are: '1,2,3'"), mock.call("Evacuation of instance started: '1'", 0.5), mock.call("Evacuation of instance started: '2'", 0.5), mock.call("Evacuation of instance started: '3'", 0.5), mock.call("Successfully evacuate instances '1,2,3' from host " "'fake-host'", 0.7), mock.call('Evacuation process completed!', 1.0) ]) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') def test_host_failure_flow_for_RH_recovery( self, _mock_notify, _mock_novaclient, mock_unlock, mock_lock, mock_enable_disable): _mock_novaclient.return_value = self.fake_client self.override_config("evacuate_all_instances", True, "host_failure") self.override_config("add_reserved_host_to_aggregate", True, "host_failure") # create test data self.fake_client.servers.create(id="1", host=self.instance_host, ha_enabled=True) self.fake_client.servers.create(id="2", host=self.instance_host) reserved_host = fakes.create_fake_host(name="fake-reserved-host", reserved=True) self.fake_client.aggregates.create(id="1", name='fake_agg', hosts=[self.instance_host]) # execute DisableComputeServiceTask self._test_disable_compute_service(mock_enable_disable) # execute PrepareHAEnabledInstancesTask instance_list = self._test_instance_list(2) # execute EvacuateInstancesTask with mock.patch.object(manager, "update_host_method") as mock_save: self._evacuate_instances( instance_list, mock_enable_disable, reserved_host=reserved_host.name) self.assertEqual(1, mock_save.call_count) self.assertIn(reserved_host.name, self.fake_client.aggregates.get('1').hosts) # verify progress details _mock_notify.assert_has_calls([ mock.call("Disabling compute service on host: 'fake-host'"), mock.call("Disabled compute service on host: 'fake-host'", 1.0), mock.call('Preparing instances for evacuation'), mock.call("Total instances running on failed host 'fake-host' is 2" "", 0.3), mock.call("Total HA Enabled instances count: '1'", 0.6), mock.call("Total Non-HA Enabled instances count: '1'", 0.7), mock.call("All instances (HA Enabled/Non-HA Enabled) should be " "considered for evacuation. Total count is: '2'", 0.8), mock.call("Instances to be evacuated are: '1,2'", 1.0), mock.call("Start evacuation of instances from failed host " "'fake-host', instance uuids are: '1,2'"), mock.call("Enabling reserved host: 'fake-reserved-host'", 0.1), mock.call('Add host fake-reserved-host to aggregate fake_agg', 0.2), mock.call('Added host fake-reserved-host to aggregate fake_agg', 0.3), mock.call("Evacuation of instance started: '1'", 0.5), mock.call("Evacuation of instance started: '2'", 0.5), mock.call("Successfully evacuate instances '1,2' from host " "'fake-host'", 0.7), mock.call('Evacuation process completed!', 1.0) ]) masakari-9.0.0/masakari/tests/unit/engine/drivers/taskflow/test_instance_failure_flow.py0000664000175000017500000002776513656747723032040 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Unit Tests for instance failure TaskFlow """ from unittest import mock from masakari.compute import nova from masakari import context from masakari.engine.drivers.taskflow import instance_failure from masakari import exception from masakari import test from masakari.tests.unit import fakes class InstanceFailureTestCase(test.TestCase): def setUp(self): super(InstanceFailureTestCase, self).setUp() self.ctxt = context.get_admin_context() self.novaclient = nova.API() self.fake_client = fakes.FakeNovaClient() self.instance_id = "1" # overriding 'wait_period_after_power_off' and # 'wait_period_after_power_on' to 2 seconds to # reduce the wait period. self.override_config('wait_period_after_power_off', 2) self.override_config('wait_period_after_power_on', 2) self.override_config("process_all_instances", False, "instance_failure") def _test_stop_instance(self): task = instance_failure.StopInstanceTask(self.ctxt, self.novaclient) task.execute(self.instance_id) # verify instance is stopped instance = self.novaclient.get_server(self.ctxt, self.instance_id) self.assertEqual('stopped', getattr(instance, 'OS-EXT-STS:vm_state')) def _test_confirm_instance_is_active(self): task = instance_failure.ConfirmInstanceActiveTask(self.ctxt, self.novaclient) task.execute(self.instance_id) # verify instance is in active state instance = self.novaclient.get_server(self.ctxt, self.instance_id) self.assertEqual('active', getattr(instance, 'OS-EXT-STS:vm_state')) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') def test_instance_failure_flow(self, _mock_notify, _mock_novaclient): _mock_novaclient.return_value = self.fake_client # create test data self.fake_client.servers.create(self.instance_id, host="fake-host", ha_enabled=True) # test StopInstanceTask self._test_stop_instance() # test StartInstanceTask task = instance_failure.StartInstanceTask(self.ctxt, self.novaclient) task.execute(self.instance_id) # test ConfirmInstanceActiveTask self._test_confirm_instance_is_active() # verify progress details _mock_notify.assert_has_calls([ mock.call('Stopping instance: ' + self.instance_id), mock.call("Stopped instance: '" + self.instance_id + "'", 1.0), mock.call("Starting instance: '" + self.instance_id + "'"), mock.call("Instance started: '" + self.instance_id + "'", 1.0), mock.call("Confirming instance '" + self.instance_id + "' vm_state is ACTIVE"), mock.call("Confirmed instance '" + self.instance_id + "' vm_state is ACTIVE", 1.0) ]) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') def test_instance_failure_flow_resized_instance(self, _mock_notify, _mock_novaclient): _mock_novaclient.return_value = self.fake_client # create test data self.fake_client.servers.create(self.instance_id, host="fake-host", ha_enabled=True, vm_state="resized") # test StopInstanceTask self._test_stop_instance() # test StartInstanceTask task = instance_failure.StartInstanceTask(self.ctxt, self.novaclient) task.execute(self.instance_id) # test ConfirmInstanceActiveTask self._test_confirm_instance_is_active() # verify progress details _mock_notify.assert_has_calls([ mock.call('Stopping instance: ' + self.instance_id), mock.call("Stopped instance: '" + self.instance_id + "'", 1.0), mock.call("Starting instance: '" + self.instance_id + "'"), mock.call("Instance started: '" + self.instance_id + "'", 1.0), mock.call("Confirming instance '" + self.instance_id + "' vm_state is ACTIVE"), mock.call("Confirmed instance '" + self.instance_id + "' vm_state is ACTIVE", 1.0) ]) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') def test_instance_failure_flow_stop_failed(self, _mock_notify, _mock_novaclient): _mock_novaclient.return_value = self.fake_client # create test data server = self.fake_client.servers.create(self.instance_id, host="fake-host", ha_enabled=True) def fake_stop_server(context, uuid): # assume that while stopping instance goes into error state setattr(server, 'OS-EXT-STS:vm_state', "error") return server # test StopInstanceTask task = instance_failure.StopInstanceTask(self.ctxt, self.novaclient) with mock.patch.object(self.novaclient, 'stop_server', fake_stop_server): self.assertRaises( exception.InstanceRecoveryFailureException, task.execute, self.instance_id) # verify progress details _mock_notify.assert_has_calls([ mock.call('Stopping instance: ' + self.instance_id), mock.call('Failed to stop instance ' + self.instance_id, 1.0) ]) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') def test_instance_failure_flow_not_ha_enabled(self, _mock_notify, _mock_novaclient): _mock_novaclient.return_value = self.fake_client # create test data self.fake_client.servers.create(self.instance_id, host="fake-host") # test StopInstanceTask task = instance_failure.StopInstanceTask(self.ctxt, self.novaclient) self.assertRaises( exception.SkipInstanceRecoveryException, task.execute, self.instance_id) # verify progress details _mock_notify.assert_has_calls([ mock.call('Skipping recovery for instance: ' + self.instance_id + ' as it is not Ha_Enabled', 1.0) ]) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') def test_instance_failure_flow_vm_in_paused_state(self, _mock_notify, _mock_novaclient): _mock_novaclient.return_value = self.fake_client # create test data self.fake_client.servers.create(self.instance_id, host="fake-host", ha_enabled=True, vm_state="paused") # test StopInstanceTask task = instance_failure.StopInstanceTask(self.ctxt, self.novaclient) self.assertRaises( exception.IgnoreInstanceRecoveryException, task.execute, self.instance_id) # verify progress details _mock_notify.assert_has_calls([ mock.call("Recovery of instance '" + self.instance_id + "' is ignored as it is in 'paused' state.", 1.0) ]) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') def test_instance_failure_flow_not_ha_enabled_but_conf_option_is_set( self, _mock_notify, _mock_novaclient): # Setting this config option to True indicates masakari has to recover # the instance irrespective of whether it is HA_Enabled or not. self.override_config("process_all_instances", True, "instance_failure") _mock_novaclient.return_value = self.fake_client # create test data self.fake_client.servers.create(self.instance_id, host="fake-host", vm_state="resized") # test StopInstanceTask self._test_stop_instance() # test StartInstanceTask task = instance_failure.StartInstanceTask(self.ctxt, self.novaclient) task.execute(self.instance_id) # test ConfirmInstanceActiveTask self._test_confirm_instance_is_active() # verify progress details _mock_notify.assert_has_calls([ mock.call('Stopping instance: ' + self.instance_id), mock.call("Stopped instance: '" + self.instance_id + "'", 1.0), mock.call("Starting instance: '" + self.instance_id + "'"), mock.call("Instance started: '" + self.instance_id + "'", 1.0), mock.call("Confirming instance '" + self.instance_id + "' vm_state is ACTIVE"), mock.call("Confirmed instance '" + self.instance_id + "' vm_state is ACTIVE", 1.0) ]) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') def test_instance_failure_flow_start_failed(self, _mock_notify, _mock_novaclient): _mock_novaclient.return_value = self.fake_client # create test data server = self.fake_client.servers.create(self.instance_id, host="fake-host", ha_enabled=True) # test StopInstanceTask self._test_stop_instance() def fake_start_server(context, uuid): # assume that while starting instance goes into error state setattr(server, 'OS-EXT-STS:vm_state', "error") return server # test StartInstanceTask task = instance_failure.StartInstanceTask(self.ctxt, self.novaclient) with mock.patch.object(self.novaclient, 'start_server', fake_start_server): task.execute(self.instance_id) # test ConfirmInstanceActiveTask task = instance_failure.ConfirmInstanceActiveTask(self.ctxt, self.novaclient) self.assertRaises( exception.InstanceRecoveryFailureException, task.execute, self.instance_id) # verify progress details _mock_notify.assert_has_calls([ mock.call('Stopping instance: ' + self.instance_id), mock.call("Stopped instance: '" + self.instance_id + "'", 1.0), mock.call("Starting instance: '" + self.instance_id + "'"), mock.call("Instance started: '" + self.instance_id + "'", 1.0), mock.call("Confirming instance '" + self.instance_id + "' vm_state is ACTIVE"), mock.call('Failed to start instance 1', 1.0) ]) masakari-9.0.0/masakari/tests/unit/engine/test_engine_mgr.py0000664000175000017500000016063513656747723024272 0ustar zuulzuul00000000000000# All Rights Reserved. # Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from unittest import mock from oslo_utils import importutils from oslo_utils import timeutils from masakari.compute import nova import masakari.conf from masakari import context from masakari.engine import manager from masakari.engine import utils as engine_utils from masakari import exception from masakari.objects import fields from masakari.objects import host as host_obj from masakari.objects import notification as notification_obj from masakari import rpc from masakari import test from masakari.tests.unit import fakes from masakari.tests import uuidsentinel CONF = masakari.conf.CONF NOW = timeutils.utcnow().replace(microsecond=0) def _get_vm_type_notification(status="new"): return fakes.create_fake_notification( type="VM", id=1, payload={ 'event': 'LIFECYCLE', 'instance_uuid': uuidsentinel.fake_ins, 'vir_domain_event': 'STOPPED_FAILED' }, source_host_uuid=uuidsentinel.fake_host, generated_time=NOW, status=status, notification_uuid=uuidsentinel.fake_notification) @mock.patch.object(notification_obj.Notification, "get_by_uuid") class EngineManagerUnitTestCase(test.NoDBTestCase): def setUp(self): super(EngineManagerUnitTestCase, self).setUp() rpc.init(CONF) self.engine = importutils.import_object(CONF.engine_manager) self.context = context.RequestContext() def _fake_notification_workflow(self, exc=None): if exc: return exc # else the workflow executed successfully def _get_process_type_notification(self): return fakes.create_fake_notification( type="PROCESS", id=1, payload={ 'event': 'stopped', 'process_name': 'fake_service' }, source_host_uuid=uuidsentinel.fake_host, generated_time=NOW, status="new", notification_uuid=uuidsentinel.fake_notification) def _get_compute_host_type_notification(self): return fakes.create_fake_notification( type="COMPUTE_HOST", id=1, payload={ 'event': 'stopped', 'host_status': 'NORMAL', 'cluster_status': 'ONLINE' }, source_host_uuid=uuidsentinel.fake_host, generated_time=NOW, status="new", notification_uuid=uuidsentinel.fake_notification) @mock.patch("masakari.engine.drivers.taskflow." "TaskFlowDriver.execute_instance_failure") @mock.patch.object(notification_obj.Notification, "save") @mock.patch.object(engine_utils, 'notify_about_notification_update') def test_process_notification_type_vm_success(self, mock_notify_about_notification_update, mock_save, mock_instance_failure, mock_notification_get): mock_instance_failure.side_effect = self._fake_notification_workflow() notification = _get_vm_type_notification() mock_notification_get.return_value = notification self.engine.process_notification(self.context, notification=notification) self.assertEqual("finished", notification.status) mock_instance_failure.assert_called_once_with( self.context, notification.payload.get('instance_uuid'), notification.notification_uuid) action = fields.EventNotificationAction.NOTIFICATION_PROCESS phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, notification, action=action, phase=phase_start), mock.call(self.context, notification, action=action, phase=phase_end)] mock_notify_about_notification_update.assert_has_calls(notify_calls) @mock.patch("masakari.engine.drivers.taskflow." "TaskFlowDriver.execute_instance_failure") @mock.patch.object(notification_obj.Notification, "save") @mock.patch.object(engine_utils, 'notify_about_notification_update') @mock.patch('traceback.format_exc') def test_process_notification_type_vm_error(self, mock_format, mock_notify_about_notification_update, mock_save, mock_instance_failure, mock_notification_get): mock_format.return_value = mock.ANY mock_instance_failure.side_effect = self._fake_notification_workflow( exc=exception.InstanceRecoveryFailureException) notification = _get_vm_type_notification() mock_notification_get.return_value = notification self.engine.process_notification(self.context, notification=notification) self.assertEqual("error", notification.status) mock_instance_failure.assert_called_once_with( self.context, notification.payload.get('instance_uuid'), notification.notification_uuid) e = exception.InstanceRecoveryFailureException('Failed to execute ' 'instance recovery workflow.') action = fields.EventNotificationAction.NOTIFICATION_PROCESS phase_start = fields.EventNotificationPhase.START phase_error = fields.EventNotificationPhase.ERROR notify_calls = [ mock.call(self.context, notification, action=action, phase=phase_start), mock.call(self.context, notification, action=action, phase=phase_error, exception=str(e), tb=mock.ANY)] mock_notify_about_notification_update.assert_has_calls(notify_calls) @mock.patch.object(notification_obj.Notification, "save") def test_process_notification_type_vm_error_event_unmatched( self, mock_save, mock_notification_get): notification = fakes.create_fake_notification( type="VM", id=1, payload={ 'event': 'fake_event', 'instance_uuid': uuidsentinel.fake_ins, 'vir_domain_event': 'fake_vir_domain_event' }, source_host_uuid=uuidsentinel.fake_host, generated_time=NOW, status="new", notification_uuid=uuidsentinel.fake_notification) mock_notification_get.return_value = notification self.engine.process_notification(self.context, notification=notification) self.assertEqual("ignored", notification.status) @mock.patch("masakari.engine.drivers.taskflow." "TaskFlowDriver.execute_instance_failure") @mock.patch.object(notification_obj.Notification, "save") @mock.patch.object(engine_utils, 'notify_about_notification_update') @mock.patch('traceback.format_exc') def test_process_notification_type_vm_skip_recovery( self, mock_format, mock_notify_about_notification_update, mock_save, mock_instance_failure, mock_notification_get): mock_format.return_value = mock.ANY notification = _get_vm_type_notification() mock_notification_get.return_value = notification mock_instance_failure.side_effect = self._fake_notification_workflow( exc=exception.SkipInstanceRecoveryException) self.engine.process_notification(self.context, notification=notification) self.assertEqual("finished", notification.status) mock_instance_failure.assert_called_once_with( self.context, notification.payload.get('instance_uuid'), notification.notification_uuid) action = fields.EventNotificationAction.NOTIFICATION_PROCESS phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, notification, action=action, phase=phase_start), mock.call(self.context, notification, action=action, phase=phase_end)] mock_notify_about_notification_update.assert_has_calls(notify_calls) @mock.patch.object(host_obj.Host, "get_by_uuid") @mock.patch.object(host_obj.Host, "save") @mock.patch("masakari.engine.drivers.taskflow." "TaskFlowDriver.execute_process_failure") @mock.patch.object(notification_obj.Notification, "save") @mock.patch.object(engine_utils, 'notify_about_notification_update') def test_process_notification_type_process_event_stopped( self, mock_notify_about_notification_update, mock_notification_save, mock_process_failure, mock_host_save, mock_host_obj, mock_notification_get): notification = self._get_process_type_notification() mock_notification_get.return_value = notification mock_process_failure.side_effect = self._fake_notification_workflow() fake_host = fakes.create_fake_host() mock_host_obj.return_value = fake_host self.engine.process_notification(self.context, notification=notification) self.assertEqual("finished", notification.status) mock_host_save.assert_called_once() mock_process_failure.assert_called_once_with( self.context, notification.payload.get('process_name'), fake_host.name, notification.notification_uuid) action = fields.EventNotificationAction.NOTIFICATION_PROCESS phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, notification, action=action, phase=phase_start), mock.call(self.context, notification, action=action, phase=phase_end)] mock_notify_about_notification_update.assert_has_calls(notify_calls) @mock.patch.object(host_obj.Host, "get_by_uuid") @mock.patch.object(host_obj.Host, "save") @mock.patch("masakari.engine.drivers.taskflow." "TaskFlowDriver.execute_process_failure") @mock.patch.object(notification_obj.Notification, "save") @mock.patch.object(engine_utils, 'notify_about_notification_update') @mock.patch('traceback.format_exc') def test_process_notification_type_process_skip_recovery( self, mock_format, mock_notify_about_notification_update, mock_notification_save, mock_process_failure, mock_host_save, mock_host_obj, mock_notification_get): mock_format.return_value = mock.ANY notification = self._get_process_type_notification() mock_notification_get.return_value = notification fake_host = fakes.create_fake_host() mock_host_obj.return_value = fake_host mock_process_failure.side_effect = self._fake_notification_workflow( exc=exception.SkipProcessRecoveryException) self.engine.process_notification(self.context, notification=notification) self.assertEqual("finished", notification.status) mock_host_save.assert_called_once() action = fields.EventNotificationAction.NOTIFICATION_PROCESS phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, notification, action=action, phase=phase_start), mock.call(self.context, notification, action=action, phase=phase_end)] mock_notify_about_notification_update.assert_has_calls(notify_calls) @mock.patch.object(host_obj.Host, "get_by_uuid") @mock.patch.object(host_obj.Host, "save") @mock.patch("masakari.engine.drivers.taskflow." "TaskFlowDriver.execute_process_failure") @mock.patch.object(notification_obj.Notification, "save") @mock.patch.object(engine_utils, 'notify_about_notification_update') @mock.patch('traceback.format_exc') def test_process_notification_type_process_recovery_failure( self, mock_format, mock_notify_about_notification_update, mock_notification_save, mock_process_failure, mock_host_save, mock_host_obj, mock_notification_get): mock_format.return_value = mock.ANY notification = self._get_process_type_notification() mock_notification_get.return_value = notification fake_host = fakes.create_fake_host() mock_host_obj.return_value = fake_host mock_process_failure.side_effect = self._fake_notification_workflow( exc=exception.ProcessRecoveryFailureException) self.engine.process_notification(self.context, notification=notification) self.assertEqual("error", notification.status) mock_host_save.assert_called_once() e = exception.ProcessRecoveryFailureException('Failed to execute ' 'process recovery workflow.') action = fields.EventNotificationAction.NOTIFICATION_PROCESS phase_start = fields.EventNotificationPhase.START phase_error = fields.EventNotificationPhase.ERROR notify_calls = [ mock.call(self.context, notification, action=action, phase=phase_start), mock.call(self.context, notification, action=action, phase=phase_error, exception=str(e), tb=mock.ANY)] mock_notify_about_notification_update.assert_has_calls(notify_calls) @mock.patch.object(host_obj.Host, "get_by_uuid") @mock.patch.object(notification_obj.Notification, "save") @mock.patch.object(engine_utils, 'notify_about_notification_update') @mock.patch("masakari.engine.drivers.taskflow." "TaskFlowDriver.execute_process_failure") def test_process_notification_type_process_event_started( self, mock_process_failure, mock_notify_about_notification_update, mock_notification_save, mock_host_obj, mock_notification_get): notification = self._get_process_type_notification() mock_notification_get.return_value = notification notification.payload['event'] = 'started' fake_host = fakes.create_fake_host() mock_host_obj.return_value = fake_host self.engine.process_notification(self.context, notification=notification) self.assertEqual("finished", notification.status) self.assertFalse(mock_process_failure.called) action = fields.EventNotificationAction.NOTIFICATION_PROCESS phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, notification, action=action, phase=phase_start), mock.call(self.context, notification, action=action, phase=phase_end)] mock_notify_about_notification_update.assert_has_calls(notify_calls) @mock.patch.object(notification_obj.Notification, "save") @mock.patch.object(engine_utils, 'notify_about_notification_update') @mock.patch("masakari.engine.drivers.taskflow." "TaskFlowDriver.execute_process_failure") def test_process_notification_type_process_event_other( self, mock_process_failure, mock_notify_about_notification_update, mock_notification_save, mock_notification_get): notification = self._get_process_type_notification() mock_notification_get.return_value = notification notification.payload['event'] = 'other' self.engine.process_notification(self.context, notification=notification) self.assertEqual("ignored", notification.status) self.assertFalse(mock_process_failure.called) action = fields.EventNotificationAction.NOTIFICATION_PROCESS phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, notification, action=action, phase=phase_start), mock.call(self.context, notification, action=action, phase=phase_end)] mock_notify_about_notification_update.assert_has_calls(notify_calls) @mock.patch.object(host_obj.Host, "get_by_uuid") @mock.patch.object(host_obj.Host, "save") @mock.patch.object(host_obj.Host, "update") @mock.patch.object(host_obj.HostList, "get_all") @mock.patch("masakari.engine.drivers.taskflow." "TaskFlowDriver.execute_host_failure") @mock.patch.object(notification_obj.Notification, "save") @mock.patch.object(engine_utils, 'notify_about_notification_update') def test_process_notification_type_compute_host_event_stopped( self, mock_notify_about_notification_update, mock_notification_save, mock_host_failure, mock_get_all, mock_host_update, mock_host_save, mock_host_obj, mock_notification_get): notification = self._get_compute_host_type_notification() mock_notification_get.return_value = notification mock_host_failure.side_effect = self._fake_notification_workflow() fake_host = fakes.create_fake_host() mock_get_all.return_value = None fake_host.failover_segment = fakes.create_fake_failover_segment() mock_host_obj.return_value = fake_host self.engine.process_notification(self.context, notification=notification) update_data_by_host_failure = { 'on_maintenance': True, } mock_host_update.assert_called_once_with(update_data_by_host_failure) mock_host_save.assert_called_once() self.assertEqual("finished", notification.status) mock_host_failure.assert_called_once_with( self.context, fake_host.name, fake_host.failover_segment.recovery_method, notification.notification_uuid, reserved_host_list=None, update_host_method=manager.update_host_method) action = fields.EventNotificationAction.NOTIFICATION_PROCESS phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, notification, action=action, phase=phase_start), mock.call(self.context, notification, action=action, phase=phase_end)] mock_notify_about_notification_update.assert_has_calls(notify_calls) @mock.patch.object(host_obj.Host, "get_by_uuid") @mock.patch.object(host_obj.Host, "save") @mock.patch.object(host_obj.Host, "update") @mock.patch.object(host_obj.HostList, "get_all") @mock.patch.object(notification_obj.Notification, "save") @mock.patch.object(engine_utils, 'notify_about_notification_update') @mock.patch('traceback.format_exc') def test_process_notification_host_failure_without_reserved_hosts( self, mock_format, mock_notify_about_notification_update, mock_notification_save, mock_get_all, mock_host_update, mock_host_save, mock_host_obj, mock_notification_get): mock_format.return_value = mock.ANY fake_host = fakes.create_fake_host() fake_host.failover_segment = fakes.create_fake_failover_segment( recovery_method='reserved_host') mock_host_obj.return_value = fake_host notification = self._get_compute_host_type_notification() mock_notification_get.return_value = notification self.engine.process_notification(self.context, notification=notification) update_data_by_host_failure = { 'on_maintenance': True, } mock_host_update.assert_called_once_with(update_data_by_host_failure) mock_host_save.assert_called_once() self.assertEqual("error", notification.status) action = fields.EventNotificationAction.NOTIFICATION_PROCESS phase_start = fields.EventNotificationPhase.START phase_error = fields.EventNotificationPhase.ERROR e = exception.ReservedHostsUnavailable( 'No reserved_hosts available for evacuation.') notify_calls = [ mock.call(self.context, notification, action=action, phase=phase_start), mock.call(self.context, notification, action=action, phase=phase_error, exception=str(e), tb=mock.ANY)] mock_notify_about_notification_update.assert_has_calls(notify_calls) @mock.patch.object(host_obj.Host, "get_by_uuid") @mock.patch.object(host_obj.Host, "save") @mock.patch.object(host_obj.Host, "update") @mock.patch.object(host_obj.HostList, "get_all") @mock.patch("masakari.engine.drivers.taskflow." "TaskFlowDriver.execute_host_failure") @mock.patch.object(notification_obj.Notification, "save") @mock.patch.object(engine_utils, 'notify_about_notification_update') def test_process_notification_host_failure_with_reserved_hosts( self, mock_notify_about_notification_update, mock_notification_save, mock_host_failure, mock_get_all, mock_host_update, mock_host_save, mock_host_obj, mock_notification_get): fake_host = fakes.create_fake_host() fake_host.failover_segment = fakes.create_fake_failover_segment( recovery_method='reserved_host') reserved_host_object_list = [fake_host] mock_get_all.return_value = reserved_host_object_list mock_host_obj.return_value = fake_host reserved_host_list = [host.name for host in reserved_host_object_list] notification = self._get_compute_host_type_notification() mock_notification_get.return_value = notification mock_host_failure.side_effect = self._fake_notification_workflow() self.engine.process_notification(self.context, notification=notification) update_data_by_host_failure = { 'on_maintenance': True, } mock_host_update.assert_called_once_with(update_data_by_host_failure) mock_host_save.assert_called_once() self.assertEqual("finished", notification.status) mock_host_failure.assert_called_once_with( self.context, fake_host.name, fake_host.failover_segment.recovery_method, notification.notification_uuid, reserved_host_list=reserved_host_list, update_host_method=manager.update_host_method) mock_get_all.assert_called_once_with(self.context, filters={ 'failover_segment_id': fake_host.failover_segment.uuid, 'reserved': True, 'on_maintenance': False}) action = fields.EventNotificationAction.NOTIFICATION_PROCESS phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, notification, action=action, phase=phase_start), mock.call(self.context, notification, action=action, phase=phase_end)] mock_notify_about_notification_update.assert_has_calls(notify_calls) @mock.patch.object(host_obj.Host, "get_by_uuid") @mock.patch.object(host_obj.Host, "save") @mock.patch.object(host_obj.Host, "update") @mock.patch.object(host_obj.HostList, "get_all") @mock.patch("masakari.engine.drivers.taskflow." "TaskFlowDriver.execute_host_failure") @mock.patch.object(notification_obj.Notification, "save") @mock.patch.object(engine_utils, 'notify_about_notification_update') def test_process_notification_reserved_host_failure( self, mock_notify_about_notification_update, mock_notification_save, mock_host_failure, mock_get_all, mock_host_update, mock_host_save, mock_host_obj, mock_notification_get): fake_host = fakes.create_fake_host(reserved=True) fake_host.failover_segment = fakes.create_fake_failover_segment( recovery_method='reserved_host') reserved_host_object_list = [fake_host] mock_get_all.return_value = reserved_host_object_list mock_host_obj.return_value = fake_host notification = self._get_compute_host_type_notification() mock_notification_get.return_value = notification mock_host_failure.side_effect = self._fake_notification_workflow() reserved_host_list = [host.name for host in reserved_host_object_list] self.engine.process_notification(self.context, notification=notification) update_data_by_host_failure = { 'on_maintenance': True, 'reserved': False, } mock_host_update.assert_called_once_with(update_data_by_host_failure) mock_host_save.assert_called_once() self.assertEqual("finished", notification.status) mock_host_failure.assert_called_once_with( self.context, fake_host.name, fake_host.failover_segment.recovery_method, notification.notification_uuid, reserved_host_list=reserved_host_list, update_host_method=manager.update_host_method) action = fields.EventNotificationAction.NOTIFICATION_PROCESS phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, notification, action=action, phase=phase_start), mock.call(self.context, notification, action=action, phase=phase_end)] mock_notify_about_notification_update.assert_has_calls(notify_calls) @mock.patch.object(host_obj.Host, "get_by_uuid") @mock.patch.object(host_obj.Host, "save") @mock.patch.object(host_obj.Host, "update") @mock.patch.object(host_obj.HostList, "get_all") @mock.patch("masakari.engine.drivers.taskflow." "TaskFlowDriver.execute_host_failure") @mock.patch.object(notification_obj.Notification, "save") @mock.patch.object(engine_utils, 'notify_about_notification_update') @mock.patch('traceback.format_exc') def test_process_notification_type_compute_host_recovery_exception( self, mock_format, mock_notify_about_notification_update, mock_notification_save, mock_host_failure, mock_get_all, mock_host_update, mock_host_save, mock_host_obj, mock_notification_get): mock_format.return_value = mock.ANY notification = self._get_compute_host_type_notification() mock_notification_get.return_value = notification fake_host = fakes.create_fake_host() mock_get_all.return_value = None fake_host.failover_segment = fakes.create_fake_failover_segment() mock_host_obj.return_value = fake_host mock_host_failure.side_effect = self._fake_notification_workflow( exc=exception.HostRecoveryFailureException) self.engine.process_notification(self.context, notification=notification) update_data_by_host_failure = { 'on_maintenance': True, } mock_host_update.assert_called_once_with(update_data_by_host_failure) mock_host_save.assert_called_once() self.assertEqual("error", notification.status) e = exception.HostRecoveryFailureException('Failed to execute host ' 'recovery.') action = fields.EventNotificationAction.NOTIFICATION_PROCESS phase_start = fields.EventNotificationPhase.START phase_error = fields.EventNotificationPhase.ERROR notify_calls = [ mock.call(self.context, notification, action=action, phase=phase_start), mock.call(self.context, notification, action=action, phase=phase_error, exception=str(e), tb=mock.ANY)] mock_notify_about_notification_update.assert_has_calls(notify_calls) @mock.patch.object(host_obj.Host, "get_by_uuid") @mock.patch.object(host_obj.Host, "save") @mock.patch.object(host_obj.Host, "update") @mock.patch.object(host_obj.HostList, "get_all") @mock.patch("masakari.engine.drivers.taskflow." "TaskFlowDriver.execute_host_failure") @mock.patch.object(notification_obj.Notification, "save") @mock.patch.object(engine_utils, 'notify_about_notification_update') @mock.patch('traceback.format_exc') def test_process_notification_type_compute_host_skip_host_recovery( self, mock_format, mock_notify_about_notification_update, mock_notification_save, mock_host_failure, mock_get_all, mock_host_update, mock_host_save, mock_host_obj, mock_notification_get): mock_format.return_value = mock.ANY notification = self._get_compute_host_type_notification() mock_notification_get.return_value = notification fake_host = fakes.create_fake_host() mock_get_all.return_value = None fake_host.failover_segment = fakes.create_fake_failover_segment() mock_host_obj.return_value = fake_host # mock_host_failure.side_effect = str(e) mock_host_failure.side_effect = self._fake_notification_workflow( exc=exception.SkipHostRecoveryException) self.engine.process_notification(self.context, notification=notification) update_data_by_host_failure = { 'on_maintenance': True, } mock_host_update.assert_called_once_with(update_data_by_host_failure) mock_host_save.assert_called_once() self.assertEqual("finished", notification.status) action = fields.EventNotificationAction.NOTIFICATION_PROCESS phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, notification, action=action, phase=phase_start), mock.call(self.context, notification, action=action, phase=phase_end)] mock_notify_about_notification_update.assert_has_calls(notify_calls) @mock.patch.object(notification_obj.Notification, "save") @mock.patch.object(engine_utils, 'notify_about_notification_update') @mock.patch("masakari.engine.drivers.taskflow." "TaskFlowDriver.execute_host_failure") def test_process_notification_type_compute_host_event_started( self, mock_host_failure, mock_notify_about_notification_update, mock_notification_save, mock_notification_get): notification = self._get_compute_host_type_notification() mock_notification_get.return_value = notification notification.payload['event'] = 'started' self.engine.process_notification(self.context, notification=notification) self.assertEqual("finished", notification.status) self.assertFalse(mock_host_failure.called) action = fields.EventNotificationAction.NOTIFICATION_PROCESS phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, notification, action=action, phase=phase_start), mock.call(self.context, notification, action=action, phase=phase_end)] mock_notify_about_notification_update.assert_has_calls(notify_calls) @mock.patch.object(notification_obj.Notification, "save") @mock.patch.object(engine_utils, 'notify_about_notification_update') @mock.patch("masakari.engine.drivers.taskflow." "TaskFlowDriver.execute_host_failure") def test_process_notification_type_compute_host_event_other( self, mock_host_failure, mock_notify_about_notification_update, mock_notification_save, mock_notification_get): notification = self._get_compute_host_type_notification() mock_notification_get.return_value = notification notification.payload['event'] = 'other' self.engine.process_notification(self.context, notification=notification) self.assertEqual("ignored", notification.status) self.assertFalse(mock_host_failure.called) action = fields.EventNotificationAction.NOTIFICATION_PROCESS phase_start = fields.EventNotificationPhase.START phase_end = fields.EventNotificationPhase.END notify_calls = [ mock.call(self.context, notification, action=action, phase=phase_start), mock.call(self.context, notification, action=action, phase=phase_end)] mock_notify_about_notification_update.assert_has_calls(notify_calls) @mock.patch("masakari.compute.nova.API.stop_server") @mock.patch.object(notification_obj.Notification, "save") @mock.patch.object(engine_utils, 'notify_about_notification_update') @mock.patch('traceback.format_exc') @mock.patch("masakari.compute.nova.API.get_server") def test_process_notification_type_vm_ignore_instance_in_paused( self, mock_get_server, mock_format, mock_notify_about_notification_update, mock_notification_save, mock_stop_server, mock_notification_get): mock_format.return_value = mock.ANY notification = _get_vm_type_notification() mock_notification_get.return_value = notification mock_get_server.return_value = fakes.FakeNovaClient.Server( id=1, uuid=uuidsentinel.fake_ins, host='fake_host', vm_state='paused', ha_enabled=True) self.engine.process_notification(self.context, notification=notification) self.assertEqual("ignored", notification.status) self.assertFalse(mock_stop_server.called) msg = ("Recovery of instance '%(instance_uuid)s' is ignored as it is " "in '%(vm_state)s' state.") % {'instance_uuid': uuidsentinel.fake_ins, 'vm_state': 'paused'} e = exception.IgnoreInstanceRecoveryException(msg) action = fields.EventNotificationAction.NOTIFICATION_PROCESS phase_start = fields.EventNotificationPhase.START phase_error = fields.EventNotificationPhase.ERROR notify_calls = [ mock.call(self.context, notification, action=action, phase=phase_start), mock.call(self.context, notification, action=action, phase=phase_error, exception=str(e), tb=mock.ANY)] mock_notify_about_notification_update.assert_has_calls(notify_calls) @mock.patch("masakari.compute.nova.API.stop_server") @mock.patch.object(notification_obj.Notification, "save") @mock.patch.object(engine_utils, 'notify_about_notification_update') @mock.patch('traceback.format_exc') @mock.patch("masakari.compute.nova.API.get_server") def test_process_notification_type_vm_ignore_instance_in_rescued( self, mock_get_server, mock_format, mock_notify_about_notification_update, mock_notification_save, mock_stop_server, mock_notification_get): mock_format.return_value = mock.ANY notification = _get_vm_type_notification() mock_notification_get.return_value = notification mock_get_server.return_value = fakes.FakeNovaClient.Server( id=1, uuid=uuidsentinel.fake_ins, host='fake_host', vm_state='rescued', ha_enabled=True) self.engine.process_notification(self.context, notification=notification) self.assertEqual("ignored", notification.status) self.assertFalse(mock_stop_server.called) msg = ("Recovery of instance '%(instance_uuid)s' is ignored as it is " "in '%(vm_state)s' state.") % {'instance_uuid': uuidsentinel.fake_ins, 'vm_state': 'rescued'} e = exception.IgnoreInstanceRecoveryException(msg) action = fields.EventNotificationAction.NOTIFICATION_PROCESS phase_start = fields.EventNotificationPhase.START phase_error = fields.EventNotificationPhase.ERROR notify_calls = [ mock.call(self.context, notification, action=action, phase=phase_start), mock.call(self.context, notification, action=action, phase=phase_error, exception=str(e), tb=mock.ANY)] mock_notify_about_notification_update.assert_has_calls(notify_calls) def test_process_notification_stop_from_recovery_failure(self, mock_get_noti): noti_new = _get_vm_type_notification() mock_get_noti.return_value = _get_vm_type_notification( status="failed") with mock.patch("masakari.engine.manager.LOG.warning") as mock_log: self.engine.process_notification(self.context, notification=noti_new) mock_log.assert_called_once() args = mock_log.call_args[0] expected_log = ("Processing of notification is skipped to avoid " "recovering from failure twice. " "Notification received is '%(uuid)s' " "and it's status is '%(new_status)s' and the " "current status of same notification in db " "is '%(old_status)s'") expected_log_args_1 = {'uuid': noti_new.notification_uuid, 'new_status': noti_new.status, 'old_status': "failed"} self.assertEqual(expected_log, args[0]) self.assertEqual(expected_log_args_1, args[1]) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.host_failure.' 'DisableComputeServiceTask.execute') @mock.patch('masakari.engine.drivers.taskflow.host_failure.' 'PrepareHAEnabledInstancesTask.execute') @mock.patch('masakari.engine.drivers.taskflow.host_failure.' 'EvacuateInstancesTask.execute') @mock.patch('masakari.engine.drivers.taskflow.no_op.LOG') def test_host_failure_custom_flow_for_auto_recovery( self, _mock_log, _mock_task1, _mock_task2, _mock_task3, _mock_novaclient, _mock_notification_get): # For testing purpose setting BACKEND as memory masakari.engine.drivers.taskflow.base.PERSISTENCE_BACKEND = 'memory://' self.override_config( "host_auto_failure_recovery_tasks", {'pre': ['disable_compute_service_task', 'no_op'], 'main': ['prepare_HA_enabled_instances_task'], 'post': ['evacuate_instances_task']}, "taskflow_driver_recovery_flows") expected_msg_format = "Custom task executed successfully..!!" self.engine.driver.execute_host_failure( self.context, 'fake_host', fields.FailoverSegmentRecoveryMethod.AUTO, uuidsentinel.fake_notification) # Ensure custom_task added to the 'host_auto_failure_recovery_tasks' # is executed. _mock_log.info.assert_called_with(expected_msg_format) @mock.patch.object(notification_obj.Notification, "save") def test_process_notification_host_failure_with_host_status_unknown( self, mock_get_noti, mock_notification_save): notification = fakes.create_fake_notification( type="COMPUTE_HOST", id=1, payload={ 'event': 'stopped', 'host_status': 'UNKNOWN', 'cluster_status': 'ONLINE' }, source_host_uuid=uuidsentinel.fake_host, generated_time=NOW, status=fields.NotificationStatus.NEW, notification_uuid=uuidsentinel.fake_notification) mock_get_noti.return_value = notification notification_new = notification.obj_clone() notification_new.status = fields.NotificationStatus.IGNORED mock_notification_save.side_effect = [notification, notification_new] with mock.patch("masakari.engine.manager.LOG.warning") as mock_log: self.engine.process_notification(self.context, notification=notification) mock_log.assert_called_once() args = mock_log.call_args[0] expected_log = ("Notification '%(uuid)s' ignored as host_status" "is '%(host_status)s'") expected_log_args_1 = { 'uuid': notification.notification_uuid, 'host_status': fields.HostStatusType.UNKNOWN} self.assertEqual(expected_log, args[0]) self.assertEqual(expected_log_args_1, args[1]) self.assertEqual( fields.NotificationStatus.IGNORED, notification.status) @mock.patch('masakari.compute.nova.novaclient') @mock.patch.object(nova.API, "enable_disable_service") @mock.patch('masakari.engine.drivers.taskflow.host_failure.' 'PrepareHAEnabledInstancesTask.execute') @mock.patch('masakari.engine.drivers.taskflow.host_failure.' 'EvacuateInstancesTask.execute') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') @mock.patch('masakari.engine.drivers.taskflow.host_failure.LOG') def test_host_failure_flow_for_auto_recovery(self, _mock_log, _mock_notify, _mock_novaclient, _mock_enable_disable, _mock_task2, _mock_task3, _mock_notification_get): self.novaclient = nova.API() self.fake_client = fakes.FakeNovaClient() self.override_config("wait_period_after_evacuation", 2) self.override_config("wait_period_after_service_update", 2) self.override_config("evacuate_all_instances", True, "host_failure") _mock_novaclient.return_value = self.fake_client # create test data self.fake_client.servers.create(id="1", host="fake-host", ha_enabled=True) self.fake_client.servers.create(id="2", host="fake-host") instance_uuid_list = [] for instance in self.fake_client.servers.list(): instance_uuid_list.append(instance.id) instance_list = { "instance_list": ','.join(instance_uuid_list), } _mock_task2.return_value = instance_list # For testing purpose setting BACKEND as memory masakari.engine.drivers.taskflow.base.PERSISTENCE_BACKEND = 'memory://' self.engine.driver.execute_host_failure( self.context, "fake-host", fields.FailoverSegmentRecoveryMethod.AUTO, uuidsentinel.fake_notification) # make sure instance is active and has different host for server in instance_uuid_list: instance = self.novaclient.get_server(self.context, server) if CONF.host_failure.ignore_instances_in_error_state and getattr( instance, 'OS-EXT-STS:vm_state') == 'error': self.assertEqual( "fake-host", getattr( instance, 'OS-EXT-SRV-ATTR:hypervisor_hostname')) else: self.assertNotEqual( "fake-host", getattr( instance, 'OS-EXT-SRV-ATTR:hypervisor_hostname')) # verify progress details _mock_notify.assert_has_calls([ mock.call("Disabling compute service on host: 'fake-host'"), mock.call("Disabled compute service on host: 'fake-host'", 1.0) ]) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.host_failure.' 'DisableComputeServiceTask.execute') @mock.patch('masakari.engine.drivers.taskflow.host_failure.' 'PrepareHAEnabledInstancesTask.execute') @mock.patch('masakari.engine.drivers.taskflow.host_failure.' 'EvacuateInstancesTask.execute') @mock.patch('masakari.engine.drivers.taskflow.no_op.LOG') def test_host_failure_custom_flow_for_rh_recovery(self, _mock_log, _mock_task1, _mock_task2, _mock_task3, _mock_novaclient, _mock_notification_get): # For testing purpose setting BACKEND as memory masakari.engine.drivers.taskflow.base.PERSISTENCE_BACKEND = 'memory://' self.override_config( "host_rh_failure_recovery_tasks", {'pre': ['disable_compute_service_task'], 'main': [], 'post': ['no_op']}, "taskflow_driver_recovery_flows") expected_msg_format = "Custom task executed successfully..!!" self.engine.driver.execute_host_failure( self.context, 'fake_host', fields.FailoverSegmentRecoveryMethod.RESERVED_HOST, uuidsentinel.fake_notification, update_host_method=manager.update_host_method, reserved_host_list=['host-1', 'host-2']) # Ensure custom_task added to the 'host_rh_failure_recovery_tasks' # is executed. _mock_log.info.assert_called_with(expected_msg_format) @mock.patch('masakari.compute.nova.novaclient') @mock.patch.object(nova.API, "enable_disable_service") @mock.patch('masakari.engine.drivers.taskflow.host_failure.' 'PrepareHAEnabledInstancesTask.execute') @mock.patch('masakari.engine.drivers.taskflow.host_failure.' 'EvacuateInstancesTask.execute') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') @mock.patch('masakari.engine.drivers.taskflow.host_failure.LOG') def test_host_failure_flow_for_rh_recovery(self, _mock_log, _mock_notify, _mock_novaclient, _mock_enable_disable, _mock_task2, _mock_task3, _mock_notification_get): self.novaclient = nova.API() self.fake_client = fakes.FakeNovaClient() self.override_config("wait_period_after_evacuation", 2) self.override_config("wait_period_after_service_update", 2) self.override_config("evacuate_all_instances", True, "host_failure") _mock_novaclient.return_value = self.fake_client # create test data self.fake_client.servers.create(id="1", host="fake-host", ha_enabled=True) self.fake_client.servers.create(id="2", host="fake-host") instance_uuid_list = [] for instance in self.fake_client.servers.list(): instance_uuid_list.append(instance.id) instance_list = { "instance_list": ','.join(instance_uuid_list), } _mock_task2.return_value = instance_list # For testing purpose setting BACKEND as memory masakari.engine.drivers.taskflow.base.PERSISTENCE_BACKEND = 'memory://' self.engine.driver.execute_host_failure( self.context, "fake-host", fields.FailoverSegmentRecoveryMethod.RESERVED_HOST, uuidsentinel.fake_notification, update_host_method=manager.update_host_method, reserved_host_list=['host-1', 'host-2']) # make sure instance is active and has different host for server in instance_uuid_list: instance = self.novaclient.get_server(self.context, server) self.assertNotEqual( "fake-host", getattr( instance, 'OS-EXT-SRV-ATTR:hypervisor_hostname')) # verify progress details _mock_notify.assert_has_calls([ mock.call("Disabling compute service on host: 'fake-host'"), mock.call("Disabled compute service on host: 'fake-host'", 1.0) ]) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.instance_failure.' 'StopInstanceTask.execute') @mock.patch('masakari.engine.drivers.taskflow.instance_failure.' 'StartInstanceTask.execute') @mock.patch('masakari.engine.drivers.taskflow.instance_failure.' 'ConfirmInstanceActiveTask.execute') @mock.patch('masakari.engine.drivers.taskflow.no_op.LOG') def test_instance_failure_custom_flow_recovery( self, _mock_log, _mock_task1, _mock_task2, _mock_task3, _mock_novaclient, _mock_notification_get): # For testing purpose setting BACKEND as memory masakari.engine.drivers.taskflow.base.PERSISTENCE_BACKEND = 'memory://' self.override_config( "instance_failure_recovery_tasks", {'pre': ['stop_instance_task', 'no_op'], 'main': ['start_instance_task'], 'post': ['confirm_instance_active_task']}, "taskflow_driver_recovery_flows") expected_msg_format = "Custom task executed successfully..!!" self.engine.driver.execute_instance_failure( self.context, uuidsentinel.fake_ins, uuidsentinel.fake_notification) # Ensure custom_task added to the 'instance_failure_recovery_tasks' # is executed. _mock_log.info.assert_called_with(expected_msg_format) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') @mock.patch('masakari.engine.drivers.taskflow.instance_failure.LOG') def test_instance_failure_flow_recovery(self, _mock_log, _mock_notify, _mock_novaclient, _mock_notification_get): self.novaclient = nova.API() self.fake_client = fakes.FakeNovaClient() self.override_config('wait_period_after_power_off', 2) self.override_config('wait_period_after_power_on', 2) instance_id = uuidsentinel.fake_ins _mock_novaclient.return_value = self.fake_client # create test data self.fake_client.servers.create(instance_id, host="fake-host", ha_enabled=True) # For testing purpose setting BACKEND as memory masakari.engine.drivers.taskflow.base.PERSISTENCE_BACKEND = 'memory://' self.engine.driver.execute_instance_failure( self.context, instance_id, uuidsentinel.fake_notification) # verify instance is in active state instance = self.novaclient.get_server(self.context, instance_id) self.assertEqual('active', getattr(instance, 'OS-EXT-STS:vm_state')) _mock_notify.assert_has_calls([ mock.call('Stopping instance: ' + instance_id), mock.call("Stopped instance: '" + instance_id + "'", 1.0), mock.call("Starting instance: '" + instance_id + "'"), mock.call("Instance started: '" + instance_id + "'", 1.0), mock.call("Confirming instance '" + instance_id + "' vm_state is ACTIVE"), mock.call("Confirmed instance '" + instance_id + "' vm_state is ACTIVE", 1.0) ]) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.process_failure.' 'DisableComputeNodeTask.execute') @mock.patch('masakari.engine.drivers.taskflow.process_failure.' 'ConfirmComputeNodeDisabledTask.execute') @mock.patch('masakari.engine.drivers.taskflow.no_op.LOG') def test_process_failure_custom_flow_recovery( self, _mock_log, _mock_task1, _mock_task2, _mock_novaclient, _mock_notification_get): # For testing purpose setting BACKEND as memory masakari.engine.drivers.taskflow.base.PERSISTENCE_BACKEND = 'memory://' self.override_config( "process_failure_recovery_tasks", {'pre': ['disable_compute_node_task', 'no_op'], 'main': ['confirm_compute_node_disabled_task'], 'post': []}, "taskflow_driver_recovery_flows") expected_msg_format = "Custom task executed successfully..!!" self.engine.driver.execute_process_failure( self.context, 'nova-compute', 'fake_host', uuidsentinel.fake_notification, ) _mock_log.info.assert_any_call(expected_msg_format) # Ensure custom_task added to the 'process_failure_recovery_tasks' # is executed. _mock_log.info.assert_called_with(expected_msg_format) @mock.patch('masakari.compute.nova.novaclient') @mock.patch('masakari.engine.drivers.taskflow.base.MasakariTask.' 'update_details') @mock.patch('masakari.engine.drivers.taskflow.process_failure.LOG') def test_process_failure_flow_recovery(self, _mock_log, _mock_notify, _mock_novaclient, _mock_notification_get): self.novaclient = nova.API() self.fake_client = fakes.FakeNovaClient() _mock_novaclient.return_value = self.fake_client # create test data self.fake_client.services.create("1", host="fake-host", binary="nova-compute", status="enabled") # For testing purpose setting BACKEND as memory masakari.engine.drivers.taskflow.base.PERSISTENCE_BACKEND = 'memory://' self.engine.driver.execute_process_failure( self.context, "nova-compute", "fake-host", uuidsentinel.fake_notification) # verify service is disabled self.assertTrue(self.novaclient.is_service_down(self.context, "fake-host", "nova-compute")) # verify progress details _mock_notify.assert_has_calls([ mock.call("Disabling compute service on host: 'fake-host'"), mock.call("Disabled compute service on host: 'fake-host'", 1.0), mock.call("Confirming compute service is disabled on host: " "'fake-host'"), mock.call("Confirmed compute service is disabled on host: " "'fake-host'", 1.0) ]) @mock.patch.object(notification_obj.Notification, "save") @mock.patch('masakari.engine.drivers.taskflow.driver.TaskFlowDriver.' 'get_notification_recovery_workflow_details') def test_get_notification_recovery_workflow_details(self, mock_progress_details, mock_save, mock_notification_get): notification = fakes.create_fake_notification( type="VM", id=1, payload={ 'event': 'fake_event', 'instance_uuid': uuidsentinel.fake_ins, 'vir_domain_event': 'fake_vir_domain_event' }, source_host_uuid=uuidsentinel.fake_host, generated_time=NOW, status="new", notification_uuid=uuidsentinel.fake_notification,) mock_notification_get.return_value = notification self.engine.driver.get_notification_recovery_workflow_details( self.context, notification) mock_progress_details.assert_called_once_with( self.context, notification) masakari-9.0.0/masakari/tests/unit/engine/fake_engine.py0000664000175000017500000000353113656747723023343 0ustar zuulzuul00000000000000# Copyright (c) 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils import timeutils from oslo_versionedobjects import fields from masakari import objects from masakari.tests import uuidsentinel NOW = timeutils.utcnow().replace(microsecond=0) def fake_db_notification(**updates): db_notification = { "type": "VM", "id": 1, "payload": {'event': 'STOPPED', 'host_status': 'NORMAL', 'cluster_status': 'ONLINE' }, "source_host_uuid": uuidsentinel.fake_host, "generated_time": NOW, "status": "running", "notification_uuid": uuidsentinel.fake_notification, "created_at": NOW, "updated_at": None, "deleted_at": None, "deleted": 0 } for name, field in objects.Notification.fields.items(): if name in db_notification: continue if field.nullable: db_notification[name] = None elif field.default != fields.UnspecifiedDefault: db_notification[name] = field.default else: raise Exception('fake_db_notification needs help with %s' % name) if updates: db_notification.update(updates) return db_notification def fake_notification_obj(context, **updates): return objects.Notification(**updates) masakari-9.0.0/masakari/tests/unit/engine/test_utils.py0000664000175000017500000000641513656747723023313 0ustar zuulzuul00000000000000# Copyright (c) 2018 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import socket import testtools from unittest import mock from masakari.engine import utils as engine_utils from masakari.notifications.objects import base as notification_base from masakari.notifications.objects import exception as notification_exception from masakari.notifications.objects import notification as event_notification from masakari.objects import fields from masakari.objects import notification as notification_obj class TestApiUtils(testtools.TestCase): def setUp(self): super(TestApiUtils, self).setUp() @mock.patch.object(notification_base, 'EventType') @mock.patch.object(notification_base, 'NotificationPublisher') @mock.patch.object(event_notification, 'NotificationApiNotification') @mock.patch.object(event_notification, 'NotificationApiPayload') @mock.patch.object(notification_exception.ExceptionPayload, 'from_exc_and_traceback') def test_notify_about_notification_update( self, mock_from_exception, mock_NotificationApiPayload, mock_NotificationApiNotification, mock_NotificationPublisher, mock_EventType): mock_fault = mock.Mock() mock_from_exception.return_value = mock_fault mock_payload = mock.Mock() mock_NotificationApiPayload.return_value = mock_payload mock_engine_notification = mock.Mock() mock_NotificationApiNotification.return_value = ( mock_engine_notification) mock_engine_notification.emit.return_value = None mock_publisher = mock.Mock() mock_NotificationPublisher.return_value = mock_publisher mock_event_type = mock.Mock() mock_EventType.return_value = mock_event_type mock_context = mock.Mock() notification = notification_obj.Notification() action = fields.EventNotificationAction.NOTIFICATION_PROCESS phase = fields.EventNotificationPhase.ERROR e = Exception() engine_utils.notify_about_notification_update(mock_context, notification, action=action, phase=phase, exception=e) mock_from_exception.assert_called_once_with(e, None) mock_NotificationApiPayload.assert_called_once_with( notification=notification, fault=mock_fault) mock_NotificationApiNotification.assert_called_once_with( context=mock_context, priority=fields.EventNotificationPriority.ERROR, publisher=mock_publisher, event_type=mock_event_type, payload=mock_payload) mock_NotificationPublisher.assert_called_once_with( context=mock_context, host=socket.gethostname(), binary='masakari-engine') mock_engine_notification.emit.assert_called_once_with(mock_context) masakari-9.0.0/masakari/tests/unit/engine/test_rpcapi.py0000664000175000017500000000727113656747723023432 0ustar zuulzuul00000000000000# Copyright (c) 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Unit Tests for masakari.engine.rpcapi """ import copy from unittest import mock from masakari import context from masakari.engine import rpcapi as engine_rpcapi from masakari import objects from masakari import test from masakari.tests.unit.engine import fake_engine class EngineRpcAPITestCase(test.TestCase): def setUp(self): super(EngineRpcAPITestCase, self).setUp() self.context = context.RequestContext() self.fake_notification_obj = fake_engine.fake_notification_obj( self.context) def _test_engine_api(self, method, rpc_method, server=None, fanout=False, **kwargs): rpcapi = engine_rpcapi.EngineAPI() expected_retval = 'foo' if rpc_method == 'call' else None target = { "server": server, "fanout": fanout, "version": kwargs.pop('version', rpcapi.RPC_API_VERSION) } expected_msg = copy.deepcopy(kwargs) self.fake_args = None self.fake_kwargs = None def _fake_prepare_method(*args, **kwds): for kwd in kwds: self.assertEqual(target[kwd], kwds[kwd]) return rpcapi.client def _fake_rpc_method(*args, **kwargs): self.fake_args = args self.fake_kwargs = kwargs if expected_retval: return expected_retval with mock.patch.object(rpcapi.client, "prepare") as mock_prepared: mock_prepared.side_effect = _fake_prepare_method with mock.patch.object(rpcapi.client, rpc_method) as mock_method: mock_method.side_effect = _fake_rpc_method retval = getattr(rpcapi, method)(self.context, **kwargs) self.assertEqual(expected_retval, retval) expected_args = [self.context, method, expected_msg] for arg, expected_arg in zip(self.fake_args, expected_args): self.assertEqual(expected_arg, arg) for kwarg, value in self.fake_kwargs.items(): if isinstance(value, objects.Notification): expected_back = expected_msg[kwarg].obj_to_primitive() backup = value.obj_to_primitive() self.assertEqual(expected_back, backup) else: self.assertEqual(expected_msg[kwarg], value) @mock.patch("masakari.rpc.get_client") def test_process_notification(self, mock_get_client): self._test_engine_api('process_notification', rpc_method='cast', notification=self.fake_notification_obj, version='1.0') @mock.patch("masakari.rpc.get_client") def test_get_notification_recovery_workflow_details(self, mock_get_client): self._test_engine_api('get_notification_recovery_workflow_details', rpc_method='call', notification=self.fake_notification_obj, version='1.1') masakari-9.0.0/masakari/tests/unit/fake_notifier.py0000664000175000017500000000544513656747723022456 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import functools import oslo_messaging as messaging from oslo_serialization import jsonutils NOTIFICATIONS = [] VERSIONED_NOTIFICATIONS = [] def reset(): del NOTIFICATIONS[:] del VERSIONED_NOTIFICATIONS[:] FakeMessage = collections.namedtuple('Message', ['publisher_id', 'priority', 'event_type', 'payload', 'context']) class FakeNotifier(object): def __init__(self, transport, publisher_id, serializer=None): self.transport = transport self.publisher_id = publisher_id self._serializer = serializer or messaging.serializer.NoOpSerializer() for priority in ['debug', 'info', 'warn', 'error', 'critical']: setattr(self, priority, functools.partial(self._notify, priority.upper())) def prepare(self, publisher_id=None): if publisher_id is None: publisher_id = self.publisher_id return self.__class__(self.transport, publisher_id, serializer=self._serializer) def _notify(self, priority, ctxt, event_type, payload): payload = self._serializer.serialize_entity(ctxt, payload) # NOTE(Dinesh_Bhor): simulate the kombu serializer # this permit to raise an exception if something have not # been serialized correctly jsonutils.to_primitive(payload) # NOTE(Dinesh_Bhor): Try to serialize the context, as the rpc would. # An exception will be raised if something is wrong # with the context. self._serializer.serialize_context(ctxt) msg = FakeMessage(self.publisher_id, priority, event_type, payload, ctxt) NOTIFICATIONS.append(msg) class FakeVersionedNotifier(FakeNotifier): def _notify(self, priority, ctxt, event_type, payload): payload = self._serializer.serialize_entity(ctxt, payload) VERSIONED_NOTIFICATIONS.append({'publisher_id': self.publisher_id, 'priority': priority, 'event_type': event_type, 'payload': payload}) masakari-9.0.0/masakari/tests/functional/0000775000175000017500000000000013656750011020432 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/tests/functional/__init__.py0000664000175000017500000000000013656747723022551 0ustar zuulzuul00000000000000masakari-9.0.0/masakari/tests/functional/notification_base.py0000664000175000017500000001014313656747723024503 0ustar zuulzuul00000000000000# Copyright (C) 2019 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_service import loopingcall from masakari.objects import fields from masakari.tests.functional import base class NotificationTestBase(base.BaseFunctionalTest): SERVICE_TYPE = "COMPUTE" HOST_TYPE = "COMPUTE" CONTROL_ATTRIBUTES = "SSH" SERVER_WAIT_INTERVAL = 1 SERVER_WAIT_PERIOD = 300 def setUp(self, ha_api_version="1.0", recovery_method="auto"): super(NotificationTestBase, self).setUp(ha_api_version=ha_api_version) self.recovery_method = recovery_method if not self.hypervisors: self.skipTest("Skip Test as there are no hypervisors " "configured in nova") # Get image, flavor and network to create server self.image_uuids = [image.id for image in self.conn.compute.images()] self.flavors = [flavor.id for flavor in self.conn.compute.flavors()] self.private_net = next(( net.id for net in self.conn.network.networks() if net.name == 'private'), '') if not self.image_uuids: self.skipTest("Skip Test as there are no images " "configured in glance") if not self.flavors: self.skipTest("Skip Test as there are no flavors " "configured in nova") if not self.private_net: self.skipTest("Skip Test as there is no private network " "configured in neutron") # Create segment self.segment = self.admin_conn.ha.create_segment( name=self.getUniqueString(), recovery_method=self.recovery_method, service_type=self.SERVICE_TYPE) # Create valid host host_name = self.hypervisors[0]['name'] self.host = self.admin_conn.ha.create_host( segment_id=self.segment.uuid, name=host_name, type=self.HOST_TYPE, control_attributes=self.CONTROL_ATTRIBUTES) # Delete segment which delete all hosts associated with it self.addCleanup(self.admin_conn.ha.delete_segment, self.segment.uuid) def check_notification_status(self, notification, wait_interval, wait_period): def wait_for_notification_status_finished(): result = self.admin_conn.ha.get_notification( notification.notification_uuid) if result.status == fields.NotificationStatus.FINISHED: raise loopingcall.LoopingCallDone() timer = loopingcall.FixedIntervalWithTimeoutLoopingCall( wait_for_notification_status_finished) try: timer.start(interval=wait_interval, initial_delay=1, timeout=wait_period).wait() except loopingcall.LoopingCallTimeOut: self.fail("Timed out: Notification is not processed and " "it's not in the finished status") def check_server_status(self, server, status): def wait_for_server_status_change(): instance = self.admin_conn.compute.get_server(server.id) if instance.status == status: raise loopingcall.LoopingCallDone() timer = loopingcall.FixedIntervalWithTimeoutLoopingCall( wait_for_server_status_change) try: timer.start(interval=self.SERVER_WAIT_INTERVAL, timeout=self.SERVER_WAIT_PERIOD).wait() except loopingcall.LoopingCallTimeOut: self.fail("Timed out: Instance is not in the expected" " status: %s" % status) masakari-9.0.0/masakari/tests/functional/test_process_notifications.py0000664000175000017500000001162113656747723026473 0ustar zuulzuul00000000000000# Copyright (C) 2019 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils import timeutils from masakari.objects import fields from masakari.tests.functional import notification_base as base class NotificationProcessTestCase(base.NotificationTestBase): NOTIFICATION_TYPE = "PROCESS" NOTIFICATION_WAIT_INTERVAL = 1 NOTIFICATION_WAIT_PERIOD = 120 def setUp(self, ha_api_version="1.0"): super(NotificationProcessTestCase, self).setUp(ha_api_version) def _test_create_notification_event_stopped(self): # Test to create notification for process with 'STOPPED' event type notification = self.admin_conn.ha.create_notification( type=self.NOTIFICATION_TYPE, hostname=self.host.name, generated_time=timeutils.utcnow().replace(microsecond=0), payload={"process_name": "nova-compute", "event": fields.EventType.STOPPED}) self.check_notification_status(notification, self.NOTIFICATION_WAIT_INTERVAL, self.NOTIFICATION_WAIT_PERIOD) notification = self.admin_conn.ha.get_notification( notification.notification_uuid) self.assertEqual(fields.NotificationStatus.FINISHED, notification.status) host = self.admin_conn.ha.get_host(self.host.uuid, self.segment.uuid) self.assertEqual(True, host.on_maintenance) services = self.admin_conn.compute.services() for service in services: if service.binary == 'nova-compute': if service.host == self.host.name: # Enable n-cpu service which is disabled during # DisableComputeNodetask of process recovery notification # created above. self.admin_conn.compute.enable_service(service, service.host, service.binary) return notification def _test_create_notification_event_start(self): # Test to create notification for process with 'STARTED' event type notification = self.admin_conn.ha.create_notification( type=self.NOTIFICATION_TYPE, hostname=self.host.name, generated_time=timeutils.utcnow().replace(microsecond=0), payload={"process_name": "nova-compute", "event": fields.EventType.STARTED}) self.check_notification_status(notification, self.NOTIFICATION_WAIT_INTERVAL, self.NOTIFICATION_WAIT_PERIOD) notification = self.admin_conn.ha.get_notification( notification.notification_uuid) self.assertEqual(fields.NotificationStatus.FINISHED, notification.status) return notification def test_create_notification_event_stopped(self): # Test to create notification for process with 'STOPPED' event type self._test_create_notification_event_stopped() def test_create_notification_event_start(self): # Test to create notification for process with 'STARTED' event type self._test_create_notification_event_start() class NotificationProcessTestCase_V1_1(NotificationProcessTestCase): def setUp(self): super(NotificationProcessTestCase, self).setUp("1.1") def test_create_notification_event_stopped(self): # Test to create notification for process with 'STOPPED' event type notification = self._test_create_notification_event_stopped() self.assertIsNotNone(notification.recovery_workflow_details) recovery_details = notification.recovery_workflow_details # check the status of each task is successful for details in recovery_details: self.assertEqual("SUCCESS", details.state) def test_create_notification_event_start(self): # Test to create notification for process with 'STARTED' event type notification = self._test_create_notification_event_start() self.assertIsNotNone(notification.recovery_workflow_details) recovery_details = notification.recovery_workflow_details # check the status of each task is successful for details in recovery_details: self.assertEqual("SUCCESS", details.state) masakari-9.0.0/masakari/tests/functional/test_segments.py0000664000175000017500000001441513656747723023715 0ustar zuulzuul00000000000000# Copyright (C) 2019 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstack import exceptions from masakari.objects import fields from masakari.tests.functional import base class TestSegments(base.BaseFunctionalTest): def test_create_get_delete(self): # This test will create, get and delete a segment segment_data = {'name': self.getUniqueString(), 'recovery_method': fields.FailoverSegmentRecoveryMethod.AUTO, 'service_type': 'COMPUTE'} segment = self.admin_conn.ha.create_segment(**segment_data) self.assertDictContainsSubset(segment_data, segment) result = self.admin_conn.ha.get_segment(segment.uuid) self.assertEqual(segment.name, result.name) self.assertEqual(segment.recovery_method, result.recovery_method) self.assertEqual(segment.service_type, result.service_type) self.admin_conn.ha.delete_segment(segment.uuid) self.assertRaises(exceptions.ResourceNotFound, self.admin_conn.ha.get_segment, segment.uuid) def test_create_delete_with_host(self): # This test is for deleting a segment with hosts if not self.hypervisors: self.skipTest("Skipped as there are no hypervisors " "configured in nova") segment = self.admin_conn.ha.create_segment( name=self.getUniqueString(), recovery_method=fields.FailoverSegmentRecoveryMethod.AUTO, service_type='COMPUTE') # Create valid host host_name = self.hypervisors[0]['name'] host = self.admin_conn.ha.create_host(segment_id=segment.uuid, name=host_name, type='COMPUTE', control_attributes='SSH') result = self.admin_conn.ha.get_segment(segment.uuid) self.assertEqual(segment.name, result.name) # Delete segment, which should delete hosts as well self.admin_conn.ha.delete_segment(segment['uuid']) self.assertRaises(exceptions.ResourceNotFound, self.admin_conn.ha.get_segment, segment.uuid) self.assertRaises(exceptions.ResourceNotFound, self.admin_conn.ha.get_host, host.uuid, segment.uuid) def test_list(self): # This test is for listing segments using filters segment_data_1 = {'name': self.getUniqueString(), 'recovery_method': fields.FailoverSegmentRecoveryMethod.AUTO, 'service_type': 'COMPUTE'} segment_data_2 = {'name': self.getUniqueString(), 'recovery_method': fields.FailoverSegmentRecoveryMethod.RESERVED_HOST, 'service_type': 'COMPUTE'} # Create segments segment_1 = self.admin_conn.ha.create_segment(**segment_data_1) segment_2 = self.admin_conn.ha.create_segment(**segment_data_2) # Delete segments self.addCleanup(self.admin_conn.ha.delete_segment, segment_1.uuid) self.addCleanup(self.admin_conn.ha.delete_segment, segment_2.uuid) segments = self.admin_conn.ha.segments() self.assertItemsEqual([segment_1, segment_2], segments) def test_list_with_filter(self): # This test is for listing segments using filters segment_data_1 = {'name': self.getUniqueString(), 'recovery_method': fields.FailoverSegmentRecoveryMethod.AUTO, 'service_type': 'COMPUTE'} segment_data_2 = {'name': self.getUniqueString(), 'recovery_method': fields.FailoverSegmentRecoveryMethod.RESERVED_HOST, 'service_type': 'COMPUTE'} # Create segments segment_1 = self.admin_conn.ha.create_segment(**segment_data_1) segment_2 = self.admin_conn.ha.create_segment(**segment_data_2) # Delete segments self.addCleanup(self.admin_conn.ha.delete_segment, segment_1.uuid) self.addCleanup(self.admin_conn.ha.delete_segment, segment_2.uuid) for seg_object in self.admin_conn.ha.segments( recovery_method=fields.FailoverSegmentRecoveryMethod.AUTO): self.assertDictContainsSubset(segment_data_1, seg_object) for seg_object in self.admin_conn.ha.segments( recovery_method=fields.FailoverSegmentRecoveryMethod. RESERVED_HOST): self.assertDictContainsSubset(segment_data_2, seg_object) def test_update_with_host(self): # This test is for updating segment with host if not self.hypervisors: self.skipTest("Skipped as there are no hypervisors " "configured in nova") segment = self.admin_conn.ha.create_segment( name=self.getUniqueString(), recovery_method=fields.FailoverSegmentRecoveryMethod.AUTO, service_type='COMPUTE') # Delete segment self.addCleanup(self.admin_conn.ha.delete_segment, segment.uuid) # Create valid host host_name = self.hypervisors[0]['name'] self.admin_conn.ha.create_host(segment_id=segment.uuid, name=host_name, type='COMPUTE', control_attributes='SSH') # Update segment segment_1 = self.admin_conn.ha.update_segment(segment.uuid, name=self.getUniqueString(), recovery_method=fields.FailoverSegmentRecoveryMethod.RESERVED_HOST, service_type='CONTROLLER') result = self.admin_conn.ha.get_segment(segment.uuid) self.assertEqual(segment_1.name, result.name) self.assertEqual(segment_1.recovery_method, result.recovery_method) self.assertEqual(segment_1.service_type, result.service_type) masakari-9.0.0/masakari/tests/functional/test_vm_notifications.py0000664000175000017500000000610213656747723025435 0ustar zuulzuul00000000000000# Copyright (C) 2019 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils import timeutils from masakari.objects import fields from masakari.tests.functional import notification_base as base class NotificationVMTestCase(base.NotificationTestBase): NOTIFICATION_TYPE = "VM" NOTIFICATION_WAIT_INTERVAL = 1 NOTIFICATION_WAIT_PERIOD = 600 def setUp(self, ha_api_version="1.0"): super(NotificationVMTestCase, self).setUp(ha_api_version) def _test_create_notification(self): # Create server server = self.conn.compute.create_server( name='masakari_test', flavorRef=self.flavors[0], imageRef=self.image_uuids[0], networks=[{'uuid': self.private_net}], metadata={'HA_Enabled': 'True'}) self.addCleanup(self.conn.compute.delete_server, server) self.check_server_status(server, 'ACTIVE') self.admin_conn.compute.stop_server(server.id) self.check_server_status(server, 'SHUTOFF') notification = self.admin_conn.ha.create_notification( type=self.NOTIFICATION_TYPE, hostname=self.host.name, generated_time=timeutils.utcnow().replace(microsecond=0), payload={"instance_uuid": server.id, "vir_domain_event": "STOPPED_FAILED", "event": "LIFECYCLE"}) self.check_notification_status(notification, self.NOTIFICATION_WAIT_INTERVAL, self.NOTIFICATION_WAIT_PERIOD) notification = self.admin_conn.instance_ha.get_notification( notification.notification_uuid) result = self.admin_conn.compute.get_server(server.id) self.assertEqual(fields.NotificationStatus.FINISHED, notification.status) self.assertEqual('ACTIVE', result.status) return notification def test_create_notification(self): # Test to create notification for VM notification type self._test_create_notification() class NotificationVMTestCase_V1_1(NotificationVMTestCase): def setUp(self): super(NotificationVMTestCase, self).setUp("1.1") def test_create_notification(self): notification = self._test_create_notification() self.assertIsNotNone(notification.recovery_workflow_details) recovery_details = notification.recovery_workflow_details # check the status of each task is successful for details in recovery_details: self.assertEqual("SUCCESS", details.state) masakari-9.0.0/masakari/tests/functional/base.py0000664000175000017500000000463013656747723021741 0ustar zuulzuul00000000000000# Copyright (C) 2019 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging import os import sys import openstack.config from openstack import connection from masakari.tests import base #: Defines the OpenStack Client Config (OCC) cloud key in your OCC config #: file, typically in /etc/openstack/clouds.yaml. That configuration #: will determine where the functional tests will be run and what resource #: defaults will be used to run the functional tests. TEST_CLOUD_NAME = os.getenv('OS_CLOUD', 'devstack-admin') class BaseFunctionalTest(base.TestCase): def setUp(self, ha_api_version="1.0"): super(BaseFunctionalTest, self).setUp() _log_stream = sys.stdout handler = logging.StreamHandler(_log_stream) formatter = logging.Formatter('%(asctime)s %(name)-32s %(message)s') handler.setFormatter(formatter) logger = logging.getLogger('openstack') logger.setLevel(logging.DEBUG) logger.addHandler(handler) # Enable HTTP level tracing logger = logging.getLogger('keystoneauth') logger.setLevel(logging.DEBUG) logger.addHandler(handler) logger.propagate = False config = openstack.config.get_cloud_region(cloud=TEST_CLOUD_NAME) self.admin_conn = connection.Connection(region_name=config.region_name, auth=config.auth, ha_api_version=ha_api_version) devstack_user = os.getenv('OS_CLOUD', 'devstack') devstack_region = openstack.config.get_cloud_region( cloud=devstack_user) self.conn = connection.Connection(config=devstack_region) self.hypervisors = self._hypervisors() def _hypervisors(self): hypervisors = connection.Connection.list_hypervisors( connection.from_config(cloud_name=TEST_CLOUD_NAME)) return hypervisors masakari-9.0.0/masakari/tests/functional/test_hosts.py0000664000175000017500000001524613656747723023233 0ustar zuulzuul00000000000000# Copyright (C) 2019 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ddt from masakari.objects import fields from masakari.tests.functional import base @ddt.ddt class TestHosts(base.BaseFunctionalTest): def setUp(self): super(TestHosts, self).setUp() if not self.hypervisors: self.skipTest("Skipped as there are no hypervisors " "configured in nova") # Create segment self.segment = self.admin_conn.ha.create_segment( name=self.getUniqueString(), recovery_method=fields.FailoverSegmentRecoveryMethod.AUTO, service_type='COMPUTE') # Delete segment which deletes host/s associated with it self.addCleanup(self.admin_conn.ha.delete_segment, self.segment.uuid) def test_create_get(self): # This test is for testing hosts create/get # Create valid host host_name = self.hypervisors[0]['name'] host_data = {'name': host_name, 'type': 'COMPUTE', 'on_maintenance': False, 'reserved': False, 'control_attributes': 'SSH'} host = self.admin_conn.ha.create_host(self.segment.uuid, **host_data) self.assertDictContainsSubset(host_data, host) result = self.admin_conn.ha.get_host(host.uuid, self.segment.uuid) self.assertEqual('COMPUTE', result.type) self.assertEqual(False, result.on_maintenance) self.assertEqual(False, result.reserved) self.assertEqual('SSH', result.control_attributes) def test_list(self): # This test is for testing host/s creation and listing the same. expected_hosts = [] for host in self.hypervisors: host_obj = self.admin_conn.ha.create_host( segment_id=self.segment.uuid, name=host.name, type='COMPUTE', on_maintenance=False, reserved=False, control_attributes='SSH') # Deleting 'segment_id' as in GET list call of host 'segment_id' # is not there in response del host_obj['segment_id'] expected_hosts.append(host_obj) hosts = self.admin_conn.ha.hosts(self.segment.uuid) self.assertItemsEqual(expected_hosts, hosts) @ddt.data( {'on_maintenance': False, 'host_type': 'COMPUTE', 'reserved': False, 'control_attributes': 'SSH'}, {'on_maintenance': True, 'host_type': 'CONTROLLER', 'reserved': True, 'control_attributes': 'TCP'} ) @ddt.unpack def test_create_list_with_filter(self, on_maintenance, host_type, reserved, control_attributes): # This test is for testing host/s creation and listing # the same based on filters. if len(self.hypervisors) == 1: self.skipTest("Skipped as there is only one hypervisor " "configured in nova") host_data_1 = {'name': self.hypervisors[0].name, 'type': 'COMPUTE', 'on_maintenance': False, 'reserved': False, 'control_attributes': 'SSH'} host_data_2 = {'name': self.hypervisors[1].name, 'type': 'CONTROLLER', 'on_maintenance': True, 'reserved': True, 'control_attributes': 'TCP'} self.admin_conn.ha.create_host(self.segment.uuid, **host_data_1) self.admin_conn.ha.create_host(self.segment.uuid, **host_data_2) expected_host_data = {'on_maintenance': on_maintenance, 'type': host_type, 'reserved': reserved, 'control_attributes': control_attributes } # Returns list of hosts based on filters for host in self.admin_conn.ha.hosts(self.segment.uuid, on_maintenance=on_maintenance, type=host_type, reserved=reserved): self.assertDictContainsSubset(expected_host_data, host) def test_update_get_delete(self): # This test is for updating created host and deletion of same host_name = self.hypervisors[0]['name'] host = self.admin_conn.ha.create_host(segment_id=self.segment.uuid, name=host_name, on_maintenance='False', reserved='False', type='COMPUTE', control_attributes='SSH') self.admin_conn.ha.update_host(host['uuid'], segment_id=self.segment.uuid, on_maintenance='True', control_attributes='TCP', reserved='True') result = self.admin_conn.ha.get_host(host.uuid, host.failover_segment_id) # Confirm host update self.assertEqual(True, result.on_maintenance) self.assertEqual(True, result.reserved) self.assertEqual('TCP', result.control_attributes) def test_update_host_name(self): # This test is for updating host name if len(self.hypervisors) == 1: self.skipTest("Skipped as there is only one hypervisor " "configured in nova") host = self.admin_conn.ha.create_host(segment_id=self.segment.uuid, name=self.hypervisors[0]['name'], type='COMPUTE', control_attributes='SSH') # Update host name updated_host = self.admin_conn.ha.update_host(host['uuid'], segment_id=self.segment.uuid, name=self.hypervisors[1]['name']) result = self.admin_conn.ha.get_host(host.uuid, host.failover_segment_id) self.assertEqual(result.name, updated_host.name) masakari-9.0.0/masakari/tests/uuidsentinel.py0000664000175000017500000000210313656747723021366 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys class UUIDSentinels(object): def __init__(self): from oslo_utils import uuidutils self._uuid_module = uuidutils self._sentinels = {} def __getattr__(self, name): if name.startswith('_'): raise ValueError('Sentinels must not start with _') if name not in self._sentinels: self._sentinels[name] = self._uuid_module.generate_uuid() return self._sentinels[name] sys.modules[__name__] = UUIDSentinels() masakari-9.0.0/masakari/tests/fixtures.py0000664000175000017500000001150613656747723020536 0ustar zuulzuul00000000000000# Copyright (c) 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Fixtures for Masakari tests.""" from __future__ import absolute_import import fixtures from oslo_config import cfg from masakari.db import migration from masakari.db.sqlalchemy import api as session from masakari import exception CONF = cfg.CONF DB_SCHEMA = {'main': ""} SESSION_CONFIGURED = False class Timeout(fixtures.Fixture): """Setup per test timeouts. In order to avoid test deadlocks we support setting up a test timeout parameter read from the environment. In almost all cases where the timeout is reached this means a deadlock. A class level TIMEOUT_SCALING_FACTOR also exists, which allows extremely long tests to specify they need more time. """ def __init__(self, timeout, scaling=1): super(Timeout, self).__init__() try: self.test_timeout = int(timeout) except ValueError: # If timeout value is invalid do not set a timeout. self.test_timeout = 0 if scaling >= 1: self.test_timeout *= scaling else: raise ValueError('scaling value must be >= 1') def setUp(self): super(Timeout, self).setUp() if self.test_timeout > 0: self.useFixture(fixtures.Timeout(self.test_timeout, gentle=True)) class BannedDBSchemaOperations(fixtures.Fixture): """Ban some operations for migrations""" def __init__(self, banned_resources=None): super(BannedDBSchemaOperations, self).__init__() self._banned_resources = banned_resources or [] @staticmethod def _explode(resource, op): raise exception.DBNotAllowed( 'Operation %s.%s() is not allowed in a database migration' % ( resource, op)) def setUp(self): super(BannedDBSchemaOperations, self).setUp() for thing in self._banned_resources: self.useFixture(fixtures.MonkeyPatch( 'sqlalchemy.%s.drop' % thing, lambda *a, **k: self._explode(thing, 'drop'))) self.useFixture(fixtures.MonkeyPatch( 'sqlalchemy.%s.alter' % thing, lambda *a, **k: self._explode(thing, 'alter'))) class DatabasePoisonFixture(fixtures.Fixture): def setUp(self): super(DatabasePoisonFixture, self).setUp() self.useFixture(fixtures.MonkeyPatch( 'oslo_db.sqlalchemy.enginefacade._TransactionFactory.' '_create_session', self._poison_configure)) def _poison_configure(self, *a, **k): raise Exception('This test uses methods that set internal oslo_db ' 'state, but it does not claim to use the database. ' 'This will conflict with the setup of tests that ' 'do use the database and cause failures later.') class Database(fixtures.Fixture): def __init__(self, database='main', connection=None): """Create a database fixture. :param database: The type of database, 'main' :param connection: The connection string to use """ super(Database, self).__init__() global SESSION_CONFIGURED if not SESSION_CONFIGURED: session.configure(CONF) SESSION_CONFIGURED = True self.database = database if connection is not None: ctxt_mgr = session.create_context_manager( connection=connection) facade = ctxt_mgr.get_legacy_facade() self.get_engine = facade.get_engine else: self.get_engine = session.get_engine def _cache_schema(self): global DB_SCHEMA if not DB_SCHEMA[self.database]: engine = self.get_engine() conn = engine.connect() migration.db_sync() DB_SCHEMA[self.database] = "".join(line for line in conn.connection.iterdump()) engine.dispose() def cleanup(self): engine = self.get_engine() engine.dispose() def reset(self): self._cache_schema() engine = self.get_engine() engine.dispose() conn = engine.connect() conn.connection.executescript(DB_SCHEMA[self.database]) def setUp(self): super(Database, self).setUp() self.reset() self.addCleanup(self.cleanup) masakari-9.0.0/masakari/tests/json_ref.py0000664000175000017500000000453513656747723020476 0ustar zuulzuul00000000000000# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os from oslo_serialization import jsonutils def _resolve_ref(ref, base_path): file_path, _, json_path = ref.partition('#') if json_path: raise NotImplementedError('JSON refs with JSON path after the "#" is ' 'not yet supported') path = os.path.join(base_path, file_path) # binary mode is needed due to bug/1515231 with open(path, 'r+b') as f: ref_value = jsonutils.load(f) base_path = os.path.dirname(path) res = resolve_refs(ref_value, base_path) return res def resolve_refs(obj_with_refs, base_path): if isinstance(obj_with_refs, list): for i, item in enumerate(obj_with_refs): obj_with_refs[i] = resolve_refs(item, base_path) elif isinstance(obj_with_refs, dict): if '$ref' in obj_with_refs.keys(): ref = obj_with_refs.pop('$ref') resolved_ref = _resolve_ref(ref, base_path) # the rest of the ref dict contains overrides for the ref. Resolve # refs in the overrides then apply those overrides recursively # here. resolved_overrides = resolve_refs(obj_with_refs, base_path) _update_dict_recursively(resolved_ref, resolved_overrides) return resolved_ref else: for key, value in obj_with_refs.items(): obj_with_refs[key] = resolve_refs(value, base_path) else: # scalar, nothing to do pass return obj_with_refs def _update_dict_recursively(d, update): """Update dict d recursively with data from dict update""" for k, v in update.items(): if k in d and isinstance(d[k], dict) and isinstance(v, dict): _update_dict_recursively(d[k], v) else: d[k] = v masakari-9.0.0/masakari/tests/base.py0000664000175000017500000000131713656747723017576 0ustar zuulzuul00000000000000# -*- coding: utf-8 -*- # Copyright (c) 2016 NTT Data. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslotest import base class TestCase(base.BaseTestCase): """Test case base class for all unit tests.""" masakari-9.0.0/masakari/wsgi.py0000664000175000017500000004075713656747723016506 0ustar zuulzuul00000000000000# Copyright (c) 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Utility methods for working with WSGI servers.""" from __future__ import print_function import os.path import socket import ssl import sys import eventlet import eventlet.wsgi import greenlet from oslo_log import log as logging from oslo_service import service from oslo_utils import excutils from paste import deploy import routes.middleware import webob.dec import webob.exc import masakari.conf from masakari import exception from masakari.i18n import _ from masakari import utils CONF = masakari.conf.CONF LOG = logging.getLogger(__name__) class Server(service.ServiceBase): """Server class to manage a WSGI server, serving a WSGI application.""" default_pool_size = CONF.wsgi.default_pool_size def __init__(self, name, app, host='0.0.0.0', port=0, pool_size=None, protocol=eventlet.wsgi.HttpProtocol, backlog=128, use_ssl=False, max_url_len=None): """Initialize, but do not start, a WSGI server. :param name: Pretty name for logging. :param app: The WSGI application to serve. :param host: IP address to serve the application. :param port: Port number to server the application. :param pool_size: Maximum number of eventlets to spawn concurrently. :param backlog: Maximum number of queued connections. :param max_url_len: Maximum length of permitted URLs. :returns: None :raises: masakari.exception.InvalidInput """ # Allow operators to customize http requests max header line size. eventlet.wsgi.MAX_HEADER_LINE = CONF.wsgi.max_header_line self.name = name self.app = app self._server = None self._protocol = protocol self.pool_size = pool_size or self.default_pool_size self._pool = eventlet.GreenPool(self.pool_size) self._logger = logging.getLogger("masakari.%s.wsgi.server" % self.name) self._use_ssl = use_ssl self._max_url_len = max_url_len self.client_socket_timeout = CONF.wsgi.client_socket_timeout or None if backlog < 1: raise exception.InvalidInput( reason=_('The backlog must be more than 0')) bind_addr = (host, port) try: info = socket.getaddrinfo(bind_addr[0], bind_addr[1], socket.AF_UNSPEC, socket.SOCK_STREAM)[0] family = info[0] bind_addr = info[-1] except Exception: family = socket.AF_INET try: self._socket = eventlet.listen(bind_addr, family, backlog=backlog) except EnvironmentError: LOG.error("Could not bind to %(host)s:%(port)d", {'host': host, 'port': port}) raise (self.host, self.port) = self._socket.getsockname()[0:2] LOG.info("%(name)s listening on %(host)s:%(port)d", {'name': self.name, 'host': self.host, 'port': self.port}) def start(self): """Start serving a WSGI application. :returns: None """ # The server socket object will be closed after server exits, # but the underlying file descriptor will remain open, and will # give bad file descriptor error. So duplicating the socket object, # to keep file descriptor usable. dup_socket = self._socket.dup() dup_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # sockets can hang around forever without keepalive dup_socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) # This option isn't available in the OS X version of eventlet if hasattr(socket, 'TCP_KEEPIDLE'): dup_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, CONF.wsgi.tcp_keepidle) if self._use_ssl: try: ca_file = CONF.wsgi.ssl_ca_file cert_file = CONF.wsgi.ssl_cert_file key_file = CONF.wsgi.ssl_key_file if cert_file and not os.path.exists(cert_file): raise RuntimeError( _("Unable to find cert_file : %s") % cert_file) if ca_file and not os.path.exists(ca_file): raise RuntimeError( _("Unable to find ca_file : %s") % ca_file) if key_file and not os.path.exists(key_file): raise RuntimeError( _("Unable to find key_file : %s") % key_file) if self._use_ssl and (not cert_file or not key_file): raise RuntimeError( _("When running server in SSL mode, you must " "specify both a cert_file and key_file " "option value in your configuration file")) ssl_kwargs = { 'server_side': True, 'certfile': cert_file, 'keyfile': key_file, 'cert_reqs': ssl.CERT_NONE, } if CONF.wsgi.ssl_ca_file: ssl_kwargs['ca_certs'] = ca_file ssl_kwargs['cert_reqs'] = ssl.CERT_REQUIRED dup_socket = eventlet.wrap_ssl(dup_socket, **ssl_kwargs) except Exception: with excutils.save_and_reraise_exception(): LOG.error("Failed to start %(name)s on %(host)s" ":%(port)d with SSL support", {'name': self.name, 'host': self.host, 'port': self.port}) wsgi_kwargs = { 'func': eventlet.wsgi.server, 'sock': dup_socket, 'site': self.app, 'protocol': self._protocol, 'custom_pool': self._pool, 'log': self._logger, 'log_format': CONF.wsgi.wsgi_log_format, 'debug': False, 'keepalive': CONF.wsgi.keep_alive, 'socket_timeout': self.client_socket_timeout } if self._max_url_len: wsgi_kwargs['url_length_limit'] = self._max_url_len self._server = utils.spawn(**wsgi_kwargs) def reset(self): """Reset server greenpool size to default. :returns: None """ self._pool.resize(self.pool_size) def stop(self): """Stop this server. This is not a very nice action, as currently the method by which a server is stopped is by killing its eventlet. :returns: None """ LOG.info("Stopping WSGI server.") if self._server is not None: # Resize pool to stop new requests from being processed self._pool.resize(0) self._server.kill() def wait(self): """Block, until the server has stopped. Waits on the server's eventlet to finish, then returns. :returns: None """ try: if self._server is not None: self._pool.waitall() self._server.wait() except greenlet.GreenletExit: LOG.info("WSGI server has stopped.") class Request(webob.Request): def __init__(self, environ, *args, **kwargs): if CONF.wsgi.secure_proxy_ssl_header: scheme = environ.get(CONF.wsgi.secure_proxy_ssl_header) if scheme: environ['wsgi.url_scheme'] = scheme super(Request, self).__init__(environ, *args, **kwargs) class Application(object): """Base WSGI application wrapper. Subclasses need to implement __call__.""" @classmethod def factory(cls, global_config, **local_config): """Used for paste app factories in paste.deploy config files. Any local configuration (that is, values under the [app:APPNAME] section of the paste config) will be passed into the `__init__` method as kwargs. A hypothetical configuration would look like: [app:wadl] latest_version = 1.3 paste.app_factory = masakari.api.fancy_api:Wadl.factory which would result in a call to the `Wadl` class as import masakari.api.fancy_api fancy_api.Wadl(latest_version='1.3') You could of course re-implement the `factory` method in subclasses, but using the kwarg passing it shouldn't be necessary. """ return cls(**local_config) def __call__(self, environ, start_response): r"""Subclasses will probably want to implement __call__ like this: @webob.dec.wsgify(RequestClass=Request) def __call__(self, req): # Any of the following objects work as responses: # Option 1: simple string res = 'message\n' # Option 2: a nicely formatted HTTP exception page res = exc.HTTPForbidden(explanation='Nice try') # Option 3: a webob Response object (in case you need to play with # headers, or you want to be treated like an iterable, or ...) res = Response() res.app_iter = open('somefile') # Option 4: any wsgi app to be run next res = self.application # Option 5: you can get a Response object for a wsgi app, too, to # play with headers etc res = req.get_response(self.application) # You can then just return your response... return res # ... or set req.response and return None. req.response = res See the end of http://pythonpaste.org/webob/modules/dec.html for more info. """ raise NotImplementedError(_('You must implement __call__')) class Middleware(Application): """Base WSGI middleware. These classes require an application to be initialized that will be called next. By default the middleware will simply call its wrapped app, or you can override __call__ to customize its behavior. """ @classmethod def factory(cls, global_config, **local_config): """Used for paste app factories in paste.deploy config files. Any local configuration (that is, values under the [filter:APPNAME] section of the paste config) will be passed into the `__init__` method as kwargs. A hypothetical configuration would look like: [filter:analytics] redis_host = 127.0.0.1 paste.filter_factory = masakari.api.analytics:Analytics.factory which would result in a call to the `Analytics` class as import masakari.api.analytics analytics.Analytics(app_from_paste, redis_host='127.0.0.1') You could of course re-implement the `factory` method in subclasses, but using the kwarg passing it shouldn't be necessary. """ def _factory(app): return cls(app, **local_config) return _factory def __init__(self, application): self.application = application def process_request(self, req): """Called on each request. If this returns None, the next application down the stack will be executed. If it returns a response then that response will be returned and execution will stop here. """ return None def process_response(self, response): """Do whatever you'd like to the response.""" return response @webob.dec.wsgify(RequestClass=Request) def __call__(self, req): response = self.process_request(req) if response: return response response = req.get_response(self.application) return self.process_response(response) class Debug(Middleware): """Helper class for debugging a WSGI application. Can be inserted into any WSGI application chain to get information about the request and response. """ @webob.dec.wsgify(RequestClass=Request) def __call__(self, req): print(('*' * 40) + ' REQUEST ENVIRON') for key, value in req.environ.items(): print(key, '=', value) print() resp = req.get_response(self.application) print(('*' * 40) + ' RESPONSE HEADERS') for (key, value) in resp.headers.items(): print(key, '=', value) print() resp.app_iter = self.print_generator(resp.app_iter) return resp @staticmethod def print_generator(app_iter): """Iterator that prints the contents of a wrapper string.""" print(('*' * 40) + ' BODY') for part in app_iter: sys.stdout.write(part) sys.stdout.flush() yield part print() class Router(object): """WSGI middleware that maps incoming requests to WSGI apps.""" def __init__(self, mapper): """Create a router for the given routes.Mapper. Each route in `mapper` must specify a 'controller', which is a WSGI app to call. You'll probably want to specify an 'action' as well and have your controller be an object that can route the request to the action-specific method. Examples: mapper = routes.Mapper() sc = ServerController() # Explicit mapping of one route to a controller+action mapper.connect(None, '/svrlist', controller=sc, action='list') # Actions are all implicitly defined mapper.resource('server', 'servers', controller=sc) # Pointing to an arbitrary WSGI app. You can specify the # {path_info:.*} parameter so the target app can be handed just that # section of the URL. mapper.connect(None, '/v1.0/{path_info:.*}', controller=BlogApp()) """ self.map = mapper self._router = routes.middleware.RoutesMiddleware(self._dispatch, self.map) @webob.dec.wsgify(RequestClass=Request) def __call__(self, req): """Route the incoming request to a controller based on self.map. If no match, return a 404. """ return self._router @staticmethod @webob.dec.wsgify(RequestClass=Request) def _dispatch(req): """Dispatch the request to the appropriate controller. Called by self._router after matching the incoming request to a route and putting the information into req.environ. Either returns 404 or the routed WSGI app's response. """ match = req.environ['wsgiorg.routing_args'][1] if not match: return webob.exc.HTTPNotFound() app = match['controller'] return app class Loader(object): """Used to load WSGI applications from paste configurations.""" def __init__(self, config_path=None): """Initialize the loader, and attempt to find the config. :param config_path: Full or relative path to the paste config. :returns: None """ self.config_path = None config_path = config_path or CONF.wsgi.api_paste_config if not os.path.isabs(config_path): self.config_path = CONF.find_file(config_path) elif os.path.exists(config_path): self.config_path = config_path if not self.config_path: raise exception.ConfigNotFound(path=config_path) def load_app(self, name): """Return the paste URLMap wrapped WSGI application. :param name: Name of the application to load. :returns: Paste URLMap object wrapping the requested application. :raises: `masakari.exception.PasteAppNotFound` """ try: LOG.debug("Loading app %(name)s from %(path)s", {'name': name, 'path': self.config_path}) return deploy.loadapp("config:%s" % self.config_path, name=name) except LookupError: LOG.exception("Couldn't lookup app: %s", name) raise exception.PasteAppNotFound(name=name, path=self.config_path) masakari-9.0.0/masakari/manager.py0000664000175000017500000000673213656747723017142 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Base Manager class. Managers are responsible for a certain aspect of the system. It is a logical grouping of code relating to a portion of the system. In general other components should be using the manager to make changes to the components that it is responsible for. We have adopted a basic strategy of Smart managers and dumb data, which means rather than attaching methods to data objects, components should call manager methods that act on the data. Methods on managers that can be executed locally should be called directly. If a particular method must execute on a remote host, this should be done via rpc to the service that wraps the manager. Managers should be responsible for most of the db access, and non-implementation specific data. Anything implementation specific that can't be generalized should be done by the Driver. Managers will often provide methods for initial setup of a host or periodic tasks to a wrapping service. This module provides Manager, a base class for managers. """ from oslo_service import periodic_task import masakari.conf CONF = masakari.conf.CONF class PeriodicTasks(periodic_task.PeriodicTasks): def __init__(self): super(PeriodicTasks, self).__init__(CONF) class Manager(PeriodicTasks): def __init__(self, host=None, service_name='undefined'): if not host: host = CONF.host self.host = host self.service_name = service_name super(Manager, self).__init__() def periodic_tasks(self, context, raise_on_error=False): """Tasks to be run at a periodic interval.""" return self.run_periodic_tasks(context, raise_on_error=raise_on_error) def init_host(self): """Hook to do additional manager initialization when one requests the service be started. This is called before any service record is created. Child classes should override this method. """ pass def cleanup_host(self): """Hook to do cleanup work when the service shuts down. Child classes should override this method. """ pass def pre_start_hook(self): """Hook to provide the manager the ability to do additional start-up work before any RPC queues/consumers are created. This is called after other initialization has succeeded and a service record is created. Child classes should override this method. """ pass def post_start_hook(self): """Hook to provide the manager the ability to do additional start-up work immediately after a service creates RPC consumers and starts 'running'. Child classes should override this method. """ pass def reset(self): """Hook called on SIGHUP to signal the manager to re-read any dynamic configuration or do any reconfiguration tasks. """ pass masakari-9.0.0/masakari/api/0000775000175000017500000000000013656750011015677 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/api/utils.py0000664000175000017500000001062213656747723017432 0ustar zuulzuul00000000000000# Copyright (c) 2018 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import socket from masakari.notifications.objects import base as notification_base from masakari.notifications.objects import exception as notification_exception from masakari.notifications.objects import notification as event_notification from masakari.objects import fields def _get_fault_and_priority_from_exc_and_tb(exception, tb): fault = None priority = fields.EventNotificationPriority.INFO if exception: priority = fields.EventNotificationPriority.ERROR fault = notification_exception.ExceptionPayload.from_exc_and_traceback( exception, tb) return fault, priority def notify_about_segment_api(context, segment, action, phase=None, binary='masakari-api', exception=None, tb=None): """Send versioned notification about a segment API. :param segment: FailoverSegment object :param action: the name of the action :param phase: the phase of the action :param binary: the binary emitting the notification :param exception: the thrown exception (used in error notifications) :param tb: the traceback (used in error notifications) """ fault, priority = _get_fault_and_priority_from_exc_and_tb(exception, tb) payload = event_notification.SegmentApiPayload( segment=segment, fault=fault) api_notification = event_notification.SegmentApiNotification( context=context, priority=priority, publisher=notification_base.NotificationPublisher( context=context, host=socket.gethostname(), binary=binary), event_type=notification_base.EventType( action=action, phase=phase), payload=payload) api_notification.emit(context) def notify_about_host_api(context, host, action, phase=None, binary='masakari-api', exception=None, tb=None): """Send versioned notification about a host API. :param host: Host object :param action: the name of the action :param phase: the phase of the action :param binary: the binary emitting the notification :param exception: the thrown exception (used in error notifications) :param tb: the traceback (used in error notifications) """ fault, priority = _get_fault_and_priority_from_exc_and_tb(exception, tb) payload = event_notification.HostApiPayload(host=host, fault=fault) api_notification = event_notification.HostApiNotification( context=context, priority=priority, publisher=notification_base.NotificationPublisher( context=context, host=socket.gethostname(), binary=binary), event_type=notification_base.EventType( action=action, phase=phase), payload=payload) api_notification.emit(context) def notify_about_notification_api(context, notification, action, phase=None, binary='masakari-api', exception=None, tb=None): """Send versioned notification about a notification api. :param notification: Notification object :param action: the name of the action :param phase: the phase of the action :param binary: the binary emitting the notification :param exception: the thrown exception (used in error notifications) :param tb: the traceback (used in error notifications) """ fault, priority = _get_fault_and_priority_from_exc_and_tb(exception, tb) payload = event_notification.NotificationApiPayload( notification=notification, fault=fault) api_notification = event_notification.NotificationApiNotification( context=context, priority=priority, publisher=notification_base.NotificationPublisher( context=context, host=socket.gethostname(), binary=binary), event_type=notification_base.EventType( action=action, phase=phase), payload=payload) api_notification.emit(context) masakari-9.0.0/masakari/api/__init__.py0000664000175000017500000000000013656747723020016 0ustar zuulzuul00000000000000masakari-9.0.0/masakari/api/versioned_method.py0000664000175000017500000000234413656747723021632 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. class VersionedMethod(object): def __init__(self, name, start_version, end_version, func): """Versioning information for a single method @name: Name of the method @start_version: Minimum acceptable version @end_version: Maximum acceptable_version @func: Method to call Minimum and maximums are inclusive """ self.name = name self.start_version = start_version self.end_version = end_version self.func = func def __str__(self): return ("Version Method %s: min: %s, max: %s" % (self.name, self.start_version, self.end_version)) masakari-9.0.0/masakari/api/urlmap.py0000664000175000017500000002406613656747723017601 0ustar zuulzuul00000000000000# Copyright (c) 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import re from oslo_log import log as logging import paste.urlmap import six if six.PY2: # noqa import urllib2 else: # noqa from urllib import request as urllib2 from masakari.api.openstack import wsgi LOG = logging.getLogger(__name__) _quoted_string_re = r'"[^"\\]*(?:\\.[^"\\]*)*"' _option_header_piece_re = re.compile( r';\s*([^\s;=]+|%s)\s*' r'(?:=\s*([^;]+|%s))?\s*' % (_quoted_string_re, _quoted_string_re)) def unquote_header_value(value): """Unquotes a header value. This does not use the real unquoting but what browsers are actually using for quoting. :param value: the header value to unquote. """ if value and value[0] == value[-1] == '"': # this is not the real unquoting, but fixing this so that the # RFC is met will result in bugs with internet explorer and # probably some other browsers as well. IE for example is # uploading files with "C:\foo\bar.txt" as filename value = value[1:-1] return value def parse_list_header(value): """Parse lists as described by RFC 2068 Section 2. In particular, parse comma-separated lists where the elements of the list may include quoted-strings. A quoted-string could contain a comma. A non-quoted string could have quotes in the middle. Quotes are removed automatically after parsing. The return value is a standard :class:`list`: >>> parse_list_header('token, "quoted value"') ['token', 'quoted value'] :param value: a string with a list header. :return: :class:`list` """ result = [] for item in urllib2.parse_http_list(value): if item[:1] == item[-1:] == '"': item = unquote_header_value(item[1:-1]) result.append(item) return result def parse_options_header(value): """Parse a ``Content-Type`` like header into a tuple with the content type and the options: >>> parse_options_header('Content-Type: text/html; mimetype=text/html') ('Content-Type:', {'mimetype': 'text/html'}) :param value: the header to parse. :return: (str, options) """ def _tokenize(string): for match in _option_header_piece_re.finditer(string): key, value = match.groups() key = unquote_header_value(key) if value is not None: value = unquote_header_value(value) yield key, value if not value: return '', {} parts = _tokenize(';' + value) name = next(parts)[0] extra = dict(parts) return name, extra class Accept(object): def __init__(self, value): self._content_types = [parse_options_header(v) for v in parse_list_header(value)] def best_match(self, supported_content_types): best_quality = -1 best_content_type = None best_params = {} best_match = '*/*' for content_type in supported_content_types: for content_mask, params in self._content_types: try: quality = float(params.get('q', 1)) except ValueError: continue if quality < best_quality: continue elif best_quality == quality: if best_match.count('*') <= content_mask.count('*'): continue if self._match_mask(content_mask, content_type): best_quality = quality best_content_type = content_type best_params = params best_match = content_mask return best_content_type, best_params def _match_mask(self, mask, content_type): if '*' not in mask: return content_type == mask if mask == '*/*': return True mask_major = mask[:-2] content_type_major = content_type.split('/', 1)[0] return content_type_major == mask_major def urlmap_factory(loader, global_conf, **local_conf): if 'not_found_app' in local_conf: not_found_app = local_conf.pop('not_found_app') else: not_found_app = global_conf.get('not_found_app') if not_found_app: not_found_app = loader.get_app(not_found_app, global_conf=global_conf) urlmap = URLMap(not_found_app=not_found_app) for path, app_name in local_conf.items(): path = paste.urlmap.parse_path_expression(path) app = loader.get_app(app_name, global_conf=global_conf) urlmap[path] = app return urlmap class URLMap(paste.urlmap.URLMap): def _match(self, host, port, path_info): """Find longest match for a given URL path.""" for (domain, app_url), app in self.applications: if domain and domain != host and domain != host + ':' + port: continue if (path_info == app_url or path_info.startswith(app_url + '/')): return app, app_url return None, None def _set_script_name(self, app, app_url): def wrap(environ, start_response): environ['SCRIPT_NAME'] += app_url return app(environ, start_response) return wrap def _munge_path(self, app, path_info, app_url): def wrap(environ, start_response): environ['SCRIPT_NAME'] += app_url environ['PATH_INFO'] = path_info[len(app_url):] return app(environ, start_response) return wrap def _path_strategy(self, host, port, path_info): """Check path suffix for MIME type and path prefix for API version.""" mime_type = app = app_url = None parts = path_info.rsplit('.', 1) if len(parts) > 1: possible_type = 'application/' + parts[1] if possible_type in wsgi.get_supported_content_types(): mime_type = possible_type parts = path_info.split('/') if len(parts) > 1: possible_app, possible_app_url = self._match(host, port, path_info) # Don't use prefix if it ends up matching default if possible_app and possible_app_url: app_url = possible_app_url app = self._munge_path(possible_app, path_info, app_url) return mime_type, app, app_url def _content_type_strategy(self, host, port, environ): """Check Content-Type header for API version.""" app = None params = parse_options_header(environ.get('CONTENT_TYPE', ''))[1] if 'version' in params: app, app_url = self._match(host, port, '/v' + params['version']) if app: app = self._set_script_name(app, app_url) return app def _accept_strategy(self, host, port, environ, supported_content_types): """Check Accept header for best matching MIME type and API version.""" accept = Accept(environ.get('HTTP_ACCEPT', '')) app = None # Find the best match in the Accept header mime_type, params = accept.best_match(supported_content_types) if 'version' in params: app, app_url = self._match(host, port, '/v' + params['version']) if app: app = self._set_script_name(app, app_url) return mime_type, app def __call__(self, environ, start_response): host = environ.get('HTTP_HOST', environ.get('SERVER_NAME')).lower() if ':' in host: host, port = host.split(':', 1) else: if environ['wsgi.url_scheme'] == 'http': port = '80' else: port = '443' path_info = environ['PATH_INFO'] path_info = self.normalize_url(path_info, False)[1] # The MIME type for the response is determined in one of two ways: # 1) URL path suffix (eg /servers/detail.json) # 2) Accept header (eg application/json;q=0.8, application/xml;q=0.2) # The API version is determined in one of three ways: # 1) URL path prefix (eg /v1.1/tenant/servers/detail) # 2) Content-Type header (eg application/json;version=1.1) # 3) Accept header (eg application/json;q=0.8;version=1.1) supported_content_types = list(wsgi.get_supported_content_types()) mime_type, app, app_url = self._path_strategy(host, port, path_info) # Accept application/atom+xml for the index query of each API # version mount point as well as the root index if (app_url and app_url + '/' == path_info) or path_info == '/': supported_content_types.append('application/atom+xml') if not app: app = self._content_type_strategy(host, port, environ) if not mime_type or not app: possible_mime_type, possible_app = self._accept_strategy( host, port, environ, supported_content_types) if possible_mime_type and not mime_type: mime_type = possible_mime_type if possible_app and not app: app = possible_app if not mime_type: mime_type = 'application/json' if not app: # Didn't match a particular version, probably matches default app, app_url = self._match(host, port, path_info) if app: app = self._munge_path(app, path_info, app_url) if app: environ['masakari.best_content_type'] = mime_type return app(environ, start_response) LOG.debug('Could not find application for %s', environ['PATH_INFO']) environ['paste.urlmap_object'] = self return self.not_found_application(environ, start_response) masakari-9.0.0/masakari/api/validation/0000775000175000017500000000000013656750011020031 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/api/validation/__init__.py0000664000175000017500000000366613656747723022175 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Request Body validating middleware. """ import functools from masakari.api import api_version_request as api_version from masakari.api.validation import validators def schema(request_body_schema, min_version=None, max_version=None): """Register a schema to validate request body. Registered schema will be used for validating request body just before API method executing. :argument dict request_body_schema: a schema to validate request body """ def add_validator(func): @functools.wraps(func) def wrapper(*args, **kwargs): min_ver = api_version.APIVersionRequest(min_version) max_ver = api_version.APIVersionRequest(max_version) if 'req' in kwargs: ver = kwargs['req'].api_version_request else: ver = args[1].api_version_request ver.matches(min_ver, max_ver) # Only validate against the schema if it lies within # the version range specified. Note that if both min # and max are not specified the validator will always # be run. schema_validator = validators._SchemaValidator(request_body_schema) schema_validator.validate(kwargs['body']) return func(*args, **kwargs) return wrapper return add_validator masakari-9.0.0/masakari/api/validation/validators.py0000664000175000017500000001765513656747723022611 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Internal implementation of request Body validating middleware. """ import re import jsonschema from jsonschema import exceptions as jsonschema_exc from oslo_utils import timeutils from oslo_utils import uuidutils import six from masakari.api.validation import parameter_types from masakari import exception from masakari.i18n import _ def _soft_validate_additional_properties(validator, additional_properties_value, instance, schema): """Validator function. If there are not any properties on the instance that are not specified in the schema, this will return without any effect. If there are any such extra properties, they will be handled as follows: - if the validator passed to the method is not of type "object", this method will return without any effect. - if the 'additional_properties_value' parameter is True, this method will return without any effect. - if the schema has an additionalProperties value of True, the extra properties on the instance will not be touched. - if the schema has an additionalProperties value of False and there aren't patternProperties specified, the extra properties will be stripped from the instance. - if the schema has an additionalProperties value of False and there are patternProperties specified, the extra properties will not be touched and raise validation error if pattern doesn't match. """ if (not validator.is_type(instance, "object") or additional_properties_value): return properties = schema.get("properties", {}) patterns = "|".join(schema.get("patternProperties", {})) extra_properties = set() for prop in instance: if prop not in properties: if patterns: if not re.search(patterns, prop): extra_properties.add(prop) else: extra_properties.add(prop) if not extra_properties: return if patterns: error = "Additional properties are not allowed (%s %s unexpected)" if len(extra_properties) == 1: verb = "was" else: verb = "were" yield jsonschema_exc.ValidationError( error % (", ".join(repr(extra) for extra in extra_properties), verb)) else: for prop in extra_properties: del instance[prop] @jsonschema.FormatChecker.cls_checks('date-time') def _validate_datetime_format(instance): try: timeutils.parse_isotime(instance) except ValueError: return False else: return True @jsonschema.FormatChecker.cls_checks('uuid') def _validate_uuid_format(instance): return uuidutils.is_uuid_like(instance) @jsonschema.FormatChecker.cls_checks('name', exception.InvalidName) def _validate_name(instance): regex = parameter_types.valid_name_regex try: if re.search(regex.regex, instance): return True except TypeError: # The name must be string type. If instance isn't string type, the # TypeError will be raised at here. pass raise exception.InvalidName(reason=regex.reason) class FormatChecker(jsonschema.FormatChecker): """A FormatChecker can output the message from cause exception We need understandable validation errors messages for users. When a custom checker has an exception, the FormatChecker will output a readable message provided by the checker. """ def check(self, instance, format): """Check whether the instance conforms to the given format. :argument instance: the instance to check :type: any primitive type (str, number, bool) :argument str format: the format that instance should conform to :raises: :exc:`FormatError` if instance does not conform to format """ if format not in self.checkers: return # For safety reasons custom checkers can be registered with # allowed exception types. Anything else will fall into the # default formatter. func, raises = self.checkers[format] result, cause = None, None try: result = func(instance) except raises as e: cause = e if not result: msg = "%r is not a %r" % (instance, format) raise jsonschema_exc.FormatError(msg, cause=cause) class _SchemaValidator(object): """A validator class This class is changed from Draft4Validator to validate minimum/maximum value of a string number(e.g. '10'). This changes can be removed when we tighten up the API definition and the XML conversion. Also FormatCheckers are added for checking data formats which would be passed through masakari api commonly. """ validator = None validator_org = jsonschema.Draft4Validator def __init__(self, schema, relax_additional_properties=False): validators = { 'minimum': self._validate_minimum, 'maximum': self._validate_maximum, } if relax_additional_properties: validators[ 'additionalProperties'] = _soft_validate_additional_properties validator_cls = jsonschema.validators.extend(self.validator_org, validators) format_checker = FormatChecker() self.validator = validator_cls(schema, format_checker=format_checker) def validate(self, *args, **kwargs): try: self.validator.validate(*args, **kwargs) except jsonschema.ValidationError as ex: if isinstance(ex.cause, exception.InvalidName): detail = ex.cause.format_message() elif len(ex.path) > 0: detail = _("Invalid input for field/attribute %(path)s." " Value: %(value)s. %(message)s") % { 'path': ex.path.pop(), 'value': ex.instance, 'message': ex.message } else: detail = ex.message raise exception.ValidationError(detail=detail) except TypeError as ex: # NOTE: If passing non string value to patternProperties parameter, # TypeError happens. Here is for catching the TypeError. detail = six.text_type(ex) raise exception.ValidationError(detail=detail) def _number_from_str(self, instance): try: value = int(instance) except (ValueError, TypeError): try: value = float(instance) except (ValueError, TypeError): return None return value def _validate_minimum(self, validator, minimum, instance, schema): instance = self._number_from_str(instance) if instance is None: return return self.validator_org.VALIDATORS['minimum'](validator, minimum, instance, schema) def _validate_maximum(self, validator, maximum, instance, schema): instance = self._number_from_str(instance) if instance is None: return return self.validator_org.VALIDATORS['maximum'](validator, maximum, instance, schema) masakari-9.0.0/masakari/api/validation/parameter_types.py0000664000175000017500000001224413656747723023632 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Common parameter types for validating request Body. """ import re import unicodedata import six from masakari.i18n import _ class ValidationRegex(object): def __init__(self, regex, reason): self.regex = regex self.reason = reason def _is_printable(char): """determine if a unicode code point is printable. This checks if the character is either "other" (mostly control codes), or a non-horizontal space. All characters that don't match those criteria are considered printable; that is: letters; combining marks; numbers; punctuation; symbols; (horizontal) space separators. """ category = unicodedata.category(char) return (not category.startswith("C") and (not category.startswith("Z") or category == "Zs")) def _get_all_chars(): for i in range(0xFFFF): yield six.unichr(i) # build a regex that matches all printable characters. This allows # spaces in the middle of the name. Also note that the regexp below # deliberately allows the empty string. This is so only the constraint # which enforces a minimum length for the name is triggered when an # empty string is tested. Otherwise it is not deterministic which # constraint fails and this causes issues for some unittests when # PYTHONHASHSEED is set randomly. def _build_regex_range(ws=True, invert=False, exclude=None): """Build a range regex for a set of characters in utf8. This builds a valid range regex for characters in utf8 by iterating the entire space and building up a set of x-y ranges for all the characters we find which are valid. :param ws: should we include whitespace in this range. :param exclude: any characters we want to exclude :param invert: invert the logic The inversion is useful when we want to generate a set of ranges which is everything that's not a certain class. For instance, produce all all the non printable characters as a set of ranges. """ if exclude is None: exclude = [] regex = "" # are we currently in a range in_range = False # last character we found, for closing ranges last = None # last character we added to the regex, this lets us know that we # already have B in the range, which means we don't need to close # it out with B-B. While the later seems to work, it's kind of bad form. last_added = None def valid_char(char): if char in exclude: result = False elif ws: result = _is_printable(char) else: # Zs is the unicode class for space characters, of which # there are about 10 in this range. result = (_is_printable(char) and unicodedata.category(char) != "Zs") if invert is True: return not result return result # iterate through the entire character range. in_ for c in _get_all_chars(): if valid_char(c): if not in_range: regex += re.escape(c) last_added = c in_range = True else: if in_range and last != last_added: regex += "-" + re.escape(last) in_range = False last = c else: if in_range: regex += "-" + re.escape(c) return regex valid_description_regex_base = '^[%s]*$' valid_name_regex_base = '^(?![%s])[%s]*(?= req.api_version_request >= APIVersionRequest(min_version)) class APIVersionRequest(object): """This class represents an API Version Request with convenience methods for manipulation and comparison of version numbers that we need to do to implement microversions. """ def __init__(self, version_string=None): """Create an API version request object. :param version_string: String representation of APIVersionRequest. Correct format is 'X.Y', where 'X' and 'Y' are int values. None value should be used to create Null APIVersionRequest, which is equal to 0.0 """ self.ver_major = 0 self.ver_minor = 0 if version_string is not None: match = re.match(r"^([1-9]\d*)\.([1-9]\d*|0)$", version_string) if match: self.ver_major = int(match.group(1)) self.ver_minor = int(match.group(2)) else: raise exception.InvalidAPIVersionString(version=version_string) def __str__(self): """Debug/Logging representation of object.""" return ("API Version Request Major: %s, Minor: %s" % (self.ver_major, self.ver_minor)) def is_null(self): return self.ver_major == 0 and self.ver_minor == 0 def _format_type_error(self, other): return TypeError(_("'%(other)s' should be an instance of '%(cls)s'") % {"other": other, "cls": self.__class__}) def __lt__(self, other): if not isinstance(other, APIVersionRequest): raise self._format_type_error(other) return ((self.ver_major, self.ver_minor) < (other.ver_major, other.ver_minor)) def __eq__(self, other): if not isinstance(other, APIVersionRequest): raise self._format_type_error(other) return ((self.ver_major, self.ver_minor) == (other.ver_major, other.ver_minor)) def __gt__(self, other): if not isinstance(other, APIVersionRequest): raise self._format_type_error(other) return ((self.ver_major, self.ver_minor) > (other.ver_major, other.ver_minor)) def __le__(self, other): return self < other or self == other def __ne__(self, other): return not self.__eq__(other) def __ge__(self, other): return self > other or self == other def matches(self, min_version, max_version): """Returns whether the version object represents a version greater than or equal to the minimum version and less than or equal to the maximum version. @param min_version: Minimum acceptable version. @param max_version: Maximum acceptable version. @returns: boolean If min_version is null then there is no minimum limit. If max_version is null then there is no maximum limit. If self is null then raise ValueError """ if self.is_null(): raise ValueError if max_version.is_null() and min_version.is_null(): return True elif max_version.is_null(): return min_version <= self elif min_version.is_null(): return self <= max_version else: return min_version <= self <= max_version def get_string(self): """Converts object to string representation which if used to create an APIVersionRequest object results in the same version request. """ if self.is_null(): raise ValueError return "%s.%s" % (self.ver_major, self.ver_minor) masakari-9.0.0/masakari/api/openstack/0000775000175000017500000000000013656750011017666 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/api/openstack/__init__.py0000664000175000017500000002370413656747723022025 0ustar zuulzuul00000000000000# Copyright (c) 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ WSGI middleware for OpenStack API controllers. """ from oslo_log import log as logging import routes import six import stevedore import webob.dec import webob.exc from masakari.api.openstack import wsgi import masakari.conf from masakari.i18n import translate from masakari import utils from masakari import wsgi as base_wsgi LOG = logging.getLogger(__name__) CONF = masakari.conf.CONF class FaultWrapper(base_wsgi.Middleware): """Calls down the middleware stack, making exceptions into faults.""" _status_to_type = {} @staticmethod def status_to_type(status): if not FaultWrapper._status_to_type: for clazz in utils.walk_class_hierarchy(webob.exc.HTTPError): FaultWrapper._status_to_type[clazz.code] = clazz return FaultWrapper._status_to_type.get( status, webob.exc.HTTPInternalServerError)() def _error(self, inner, req): LOG.exception("Caught error: %s", six.text_type(inner)) safe = getattr(inner, 'safe', False) headers = getattr(inner, 'headers', None) status = getattr(inner, 'code', 500) if status is None: status = 500 msg_dict = dict(url=req.url, status=status) LOG.info("%(url)s returned with HTTP %(status)d", msg_dict) outer = self.status_to_type(status) if headers: outer.headers = headers if safe: user_locale = req.best_match_language() inner_msg = translate(inner.message, user_locale) outer.explanation = '%s: %s' % (inner.__class__.__name__, inner_msg) return wsgi.Fault(outer) @webob.dec.wsgify(RequestClass=wsgi.Request) def __call__(self, req): try: return req.get_response(self.application) except Exception as ex: return self._error(ex, req) class APIMapper(routes.Mapper): def routematch(self, url=None, environ=None): if url == "": result = self._match("", environ) return result[0], result[1] return routes.Mapper.routematch(self, url, environ) def connect(self, *args, **kargs): kargs.setdefault('requirements', {}) if not kargs['requirements'].get('format'): kargs['requirements']['format'] = 'json|xml' return routes.Mapper.connect(self, *args, **kargs) class ProjectMapper(APIMapper): def resource(self, member_name, collection_name, **kwargs): # NOTE(abhishekk): project_id parameter is only valid if its hex # or hex + dashes (note, integers are a subset of this). This # is required to hand our overlaping routes issues. project_id_regex = '[0-9a-f\-]+' if CONF.osapi_v1.project_id_regex: project_id_regex = CONF.osapi_v1.project_id_regex project_id_token = '{project_id:%s}' % project_id_regex if 'parent_resource' not in kwargs: kwargs['path_prefix'] = '%s/' % project_id_token else: parent_resource = kwargs['parent_resource'] p_collection = parent_resource['collection_name'] p_member = parent_resource['member_name'] kwargs['path_prefix'] = '%s/%s/:%s_id' % ( project_id_token, p_collection, p_member) routes.Mapper.resource( self, member_name, collection_name, **kwargs) # while we are in transition mode, create additional routes # for the resource that do not include project_id. if 'parent_resource' not in kwargs: del kwargs['path_prefix'] else: parent_resource = kwargs['parent_resource'] p_collection = parent_resource['collection_name'] p_member = parent_resource['member_name'] kwargs['path_prefix'] = '%s/:%s_id' % (p_collection, p_member) routes.Mapper.resource(self, member_name, collection_name, **kwargs) class PlainMapper(APIMapper): def resource(self, member_name, collection_name, **kwargs): if 'parent_resource' in kwargs: parent_resource = kwargs['parent_resource'] p_collection = parent_resource['collection_name'] p_member = parent_resource['member_name'] kwargs['path_prefix'] = '%s/:%s_id' % (p_collection, p_member) routes.Mapper.resource(self, member_name, collection_name, **kwargs) class APIRouterV1(base_wsgi.Router): """Routes requests on the OpenStack v1 API to the appropriate controller and method. """ @classmethod def factory(cls, global_config, **local_config): """Simple paste factory :class:`masakari.wsgi.Router` doesn't have one. """ return cls() @staticmethod def api_extension_namespace(): return 'masakari.api.v1.extensions' def __init__(self, init_only=None): def _check_load_extension(ext): return self._register_extension(ext) self.api_extension_manager = stevedore.enabled.EnabledExtensionManager( namespace=self.api_extension_namespace(), check_func=_check_load_extension, invoke_on_load=True, invoke_kwds={"extension_info": self.loaded_extension_info}) mapper = ProjectMapper() self.resources = {} if list(self.api_extension_manager): self._register_resources_check_inherits(mapper) self.api_extension_manager.map(self._register_controllers) LOG.info("Loaded extensions: %s", sorted(self.loaded_extension_info.get_extensions().keys())) super(APIRouterV1, self).__init__(mapper) def _register_resources_list(self, ext_list, mapper): for ext in ext_list: self._register_resources(ext, mapper) def _register_resources_check_inherits(self, mapper): ext_has_inherits = [] ext_no_inherits = [] for ext in self.api_extension_manager: for resource in ext.obj.get_resources(): if resource.inherits: ext_has_inherits.append(ext) break else: ext_no_inherits.append(ext) self._register_resources_list(ext_no_inherits, mapper) self._register_resources_list(ext_has_inherits, mapper) @property def loaded_extension_info(self): raise NotImplementedError() def _register_extension(self, ext): raise NotImplementedError() def _register_resources(self, ext, mapper): """Register resources defined by the extensions Extensions define what resources they want to add through a get_resources function """ handler = ext.obj LOG.debug("Running _register_resources on %s", ext.obj) for resource in handler.get_resources(): LOG.debug('Extended resource: %s', resource.collection) inherits = None if resource.inherits: inherits = self.resources.get(resource.inherits) if not resource.controller: resource.controller = inherits.controller wsgi_resource = wsgi.ResourceV1(resource.controller, inherits=inherits) self.resources[resource.collection] = wsgi_resource kargs = dict( controller=wsgi_resource, collection=resource.collection_actions, member=resource.member_actions) if resource.parent: kargs['parent_resource'] = resource.parent # non core-API plugins use the collection name as the # member name, but the core-API plugins use the # singular/plural convention for member/collection names if resource.member_name: member_name = resource.member_name else: member_name = resource.collection mapper.resource(member_name, resource.collection, **kargs) if resource.custom_routes_fn: resource.custom_routes_fn(mapper, wsgi_resource) def _register_controllers(self, ext): """Register controllers defined by the extensions Extensions define what resources they want to add through a get_controller_extensions function """ handler = ext.obj LOG.debug("Running _register_controllers on %s", ext.obj) for extension in handler.get_controller_extensions(): ext_name = extension.extension.name collection = extension.collection controller = extension.controller if collection not in self.resources: LOG.warning('Extension %(ext_name)s: Cannot extend ' 'resource %(collection)s: No such resource', {'ext_name': ext_name, 'collection': collection}) continue LOG.debug('Extension %(ext_name)s extending resource: ' '%(collection)s', {'ext_name': ext_name, 'collection': collection}) resource = self.resources[collection] resource.register_actions(controller) resource.register_extensions(controller) masakari-9.0.0/masakari/api/openstack/common.py0000664000175000017500000002213713656747723021555 0ustar zuulzuul00000000000000# Copyright (c) 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import re from oslo_log import log as logging import six.moves.urllib.parse as urlparse import webob import masakari.conf from masakari import exception from masakari.i18n import _ from masakari import utils CONF = masakari.conf.CONF LOG = logging.getLogger(__name__) def remove_trailing_version_from_href(href): """Removes the api version from the href. Given: 'http://www.masakari.com/ha/v1.1' Returns: 'http://www.masakari.com/ha' Given: 'http://www.masakari.com/v1.1' Returns: 'http://www.masakari.com' """ parsed_url = urlparse.urlsplit(href) url_parts = parsed_url.path.rsplit('/', 1) # NOTE: this should match vX.X or vX expression = re.compile(r'^v([0-9]+|[0-9]+\.[0-9]+)(/.*|$)') if not expression.match(url_parts.pop()): LOG.debug('href %s does not contain version', href) raise ValueError(_('href %s does not contain version') % href) new_path = url_join(*url_parts) parsed_url = list(parsed_url) parsed_url[2] = new_path return urlparse.urlunsplit(parsed_url) def url_join(*parts): """Convenience method for joining parts of a URL Any leading and trailing '/' characters are removed, and the parts joined together with '/' as a separator. If last element of 'parts' is an empty string, the returned URL will have a trailing slash. """ parts = parts or [""] clean_parts = [part.strip("/") for part in parts if part] if not parts[-1]: # Empty last element should add a trailing slash clean_parts.append("") return "/".join(clean_parts) class ViewBuilder(object): """Model API responses as dictionaries.""" def _get_project_id(self, request): """Get project id from request url if present or empty string otherwise """ project_id = request.environ["masakari.context"].project_id if project_id in request.url: return project_id return '' def _get_links(self, request, identifier, collection_name): return [ { "rel": "self", "href": self._get_href_link(request, identifier, collection_name), }, { "rel": "bookmark", "href": self._get_bookmark_link(request, identifier, collection_name), } ] def _get_next_link(self, request, identifier, collection_name): """Return href string with proper limit and marker params.""" params = request.params.copy() params["marker"] = identifier prefix = self._update_masakari_link_prefix(request.application_url) url = url_join(prefix, self._get_project_id(request), collection_name) return "%s?%s" % (url, urlparse.urlencode(params)) def _get_href_link(self, request, identifier, collection_name): """Return an href string pointing to this object.""" prefix = self._update_masakari_link_prefix(request.application_url) return url_join(prefix, self._get_project_id(request), collection_name, str(identifier)) def _get_bookmark_link(self, request, identifier, collection_name): """Create a URL that refers to a specific resource.""" base_url = remove_trailing_version_from_href(request.application_url) base_url = self._update_masakari_link_prefix(base_url) return url_join(base_url, self._get_project_id(request), collection_name, str(identifier)) def _get_collection_links(self, request, items, collection_name, id_key="uuid"): """Retrieve 'next' link, if applicable. This is included if: 1) 'limit' param is specified and equals the number of items. 2) 'limit' param is specified but it exceeds CONF.osapi_max_limit, in this case the number of items is CONF.osapi_max_limit. 3) 'limit' param is NOT specified but the number of items is CONF.osapi_max_limit. """ links = [] max_items = min( int(request.params.get("limit", CONF.osapi_max_limit)), CONF.osapi_max_limit) if max_items and max_items == len(items): last_item = items[-1] if id_key in last_item: last_item_id = last_item[id_key] elif 'id' in last_item: last_item_id = last_item["id"] else: last_item_id = last_item["flavorid"] links.append({ "rel": "next", "href": self._get_next_link(request, last_item_id, collection_name), }) return links def _update_link_prefix(self, orig_url, prefix): if not prefix: return orig_url url_parts = list(urlparse.urlsplit(orig_url)) prefix_parts = list(urlparse.urlsplit(prefix)) url_parts[0:2] = prefix_parts[0:2] url_parts[2] = prefix_parts[2] + url_parts[2] return urlparse.urlunsplit(url_parts).rstrip('/') def _update_masakari_link_prefix(self, orig_url): return self._update_link_prefix(orig_url, CONF.osapi_masakari_link_prefix) def _get_int_param(request, param): """Extract integer param from request or fail.""" try: int_param = utils.validate_integer(request.GET[param], param, min_value=0) except exception.InvalidInput as e: raise webob.exc.HTTPBadRequest(explanation=e.format_message()) return int_param def _get_marker_param(request): """Extract marker id from request or fail.""" return request.GET['marker'] def get_pagination_params(request): """Return marker, limit tuple from request. :param request: `wsgi.Request` possibly containing 'marker' and 'limit' GET variables. 'marker' is the id of the last element the client has seen, and 'limit' is the maximum number of items to return. If 'limit' is not specified, 0, or > max_limit, we default to max_limit. Negative values for either marker or limit will cause exc.HTTPBadRequest() exceptions to be raised. """ params = {} if 'limit' in request.GET: params['limit'] = _get_int_param(request, 'limit') if 'page_size' in request.GET: params['page_size'] = _get_int_param(request, 'page_size') if 'marker' in request.GET: params['marker'] = _get_marker_param(request) return params def get_limit_and_marker(request, max_limit=CONF.osapi_max_limit): """get limited parameter from request.""" params = get_pagination_params(request) limit = params.get('limit', max_limit) limit = min(max_limit, limit) marker = params.get('marker') return limit, marker def get_sort_params(input_params, default_key='created_at', default_dir='desc'): """Retrieves sort keys/directions parameters. Processes the parameters to create a list of sort keys and sort directions that correspond to the 'sort_key' and 'sort_dir' parameter values. These sorting parameters can be specified multiple times in order to generate the list of sort keys and directions. The input parameters are not modified. :param input_params: webob.multidict of request parameters (from masakari.wsgi.Request.params) :param default_key: default sort key value, added to the list if no 'sort_key' parameters are supplied :param default_dir: default sort dir value, added to the list if no 'sort_dir' parameters are supplied :returns: list of sort keys, list of sort dirs """ params = input_params.copy() sort_keys = [] sort_dirs = [] while 'sort_key' in params: sort_keys.append(params.pop('sort_key').strip()) while 'sort_dir' in params: sort_dirs.append(params.pop('sort_dir').strip()) if len(sort_keys) == 0 and default_key: sort_keys.append(default_key) if len(sort_dirs) == 0 and default_dir: sort_dirs.append(default_dir) return sort_keys, sort_dirs masakari-9.0.0/masakari/api/openstack/wsgi.py0000664000175000017500000011361013656747723021233 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import functools import inspect import microversion_parse from oslo_log import log as logging from oslo_serialization import jsonutils from oslo_utils import strutils import six from six.moves import http_client as http import webob from masakari.api import api_version_request as api_version from masakari.api import versioned_method from masakari import exception from masakari import i18n from masakari.i18n import _ from masakari import utils from masakari import wsgi LOG = logging.getLogger(__name__) _SUPPORTED_CONTENT_TYPES = ( 'application/json', 'application/vnd.openstack.masakari+json', ) _MEDIA_TYPE_MAP = { 'application/vnd.openstack.masakari+json': 'json', 'application/json': 'json', } # These are typically automatically created by routes as either defaults # collection or member methods. _ROUTES_METHODS = [ 'create', 'delete', 'show', 'update', ] _METHODS_WITH_BODY = [ 'POST', 'PUT', ] # The default api version request if none is requested in the headers DEFAULT_API_VERSION = "1.0" # name of attribute to keep version method information VER_METHOD_ATTR = 'versioned_methods' # Names of headers used by clients to request a specific version # of the REST API API_VERSION_REQUEST_HEADER = 'OpenStack-API-Version' def get_supported_content_types(): return _SUPPORTED_CONTENT_TYPES def get_media_map(): return dict(_MEDIA_TYPE_MAP.items()) # NOTE: This function allows a get on both a dict-like and an # object-like object. cache_db_items() is used on both versioned objects and # dicts, so the function can't be totally changed over to [] syntax, nor # can it be changed over to use getattr(). def item_get(item, item_key): if hasattr(item, '__getitem__'): return item[item_key] else: return getattr(item, item_key) class Request(wsgi.Request): """Add some OpenStack API-specific logic to the base webob.Request.""" def __init__(self, *args, **kwargs): super(Request, self).__init__(*args, **kwargs) self._extension_data = {'db_items': {}} if not hasattr(self, 'api_version_request'): self.api_version_request = api_version.APIVersionRequest() def cache_db_items(self, key, items, item_key='id'): """Allow API methods to store objects from a DB query to be used by API extensions within the same API request. An instance of this class only lives for the lifetime of a single API request, so there's no need to implement full cache management. """ db_items = self._extension_data['db_items'].setdefault(key, {}) for item in items: db_items[item_get(item, item_key)] = item def get_db_items(self, key): """Allow an API extension to get previously stored objects within the same API request. Note that the object data will be slightly stale. """ return self._extension_data['db_items'][key] def get_db_item(self, key, item_key): """Allow an API extension to get a previously stored object within the same API request. Note that the object data will be slightly stale. """ return self.get_db_items(key).get(item_key) def best_match_content_type(self): """Determine the requested response content-type.""" if 'masakari.best_content_type' not in self.environ: # Calculate the best MIME type content_type = None # Check URL path suffix parts = self.path.rsplit('.', 1) if len(parts) > 1: possible_type = 'application/' + parts[1] if possible_type in get_supported_content_types(): content_type = possible_type if not content_type: content_type = self.accept.best_match( get_supported_content_types()) self.environ['masakari.best_content_type'] = (content_type or 'application/json') return self.environ['masakari.best_content_type'] def get_content_type(self): """Determine content type of the request body. Does not do any body introspection, only checks header """ if "Content-Type" not in self.headers: return None content_type = self.content_type # NOTE: text/plain is the default for eventlet and # other webservers which use mimetools.Message.gettype() # whereas twisted defaults to ''. if not content_type or content_type == 'text/plain': return None if content_type not in get_supported_content_types(): raise exception.InvalidContentType(content_type=content_type) return content_type def best_match_language(self): """Determine the best available language for the request. :returns: the best language match or None if the 'Accept-Language' header was not available in the request. """ if not self.accept_language: return None return self.accept_language.best_match(i18n.get_available_languages()) def set_api_version_request(self): """Set API version request based on the request header information.""" hdr_string = microversion_parse.get_version( self.headers, service_type='instance-ha') if hdr_string is None: self.api_version_request = api_version.APIVersionRequest( api_version.DEFAULT_API_VERSION) elif hdr_string == 'latest': # 'latest' is a special keyword which is equivalent to # requesting the maximum version of the API supported self.api_version_request = api_version.max_api_version() else: self.api_version_request = api_version.APIVersionRequest( hdr_string) # Check that the version requested is within the global # minimum/maximum of supported API versions if not self.api_version_request.matches( api_version.min_api_version(), api_version.max_api_version()): raise exception.InvalidGlobalAPIVersion( req_ver=self.api_version_request.get_string(), min_ver=api_version.min_api_version().get_string(), max_ver=api_version.max_api_version().get_string()) class ActionDispatcher(object): """Maps method name to local methods through action name.""" def dispatch(self, *args, **kwargs): """Find and call local method.""" action = kwargs.pop('action', 'default') action_method = getattr(self, str(action), self.default) return action_method(*args, **kwargs) def default(self, data): raise NotImplementedError() class JSONDeserializer(ActionDispatcher): def _from_json(self, datastring): try: return jsonutils.loads(datastring) except ValueError: msg = _("cannot understand JSON") raise exception.MalformedRequestBody(reason=msg) def deserialize(self, datastring, action='default'): return self.dispatch(datastring, action=action) def default(self, datastring): return {'body': self._from_json(datastring)} class JSONDictSerializer(ActionDispatcher): """Default JSON request body serialization.""" def serialize(self, data, action='default'): return self.dispatch(data, action=action) def default(self, data): return six.text_type(jsonutils.dumps(data)) def response(code): """Attaches response code to a method. This decorator associates a response code with a method. Note that the function attributes are directly manipulated; the method is not wrapped. """ def decorator(func): func.wsgi_code = code return func return decorator class ResponseObject(object): """Bundles a response object Object that app methods may return in order to allow its response to be modified by extensions in the code. Its use is optional (and should only be used if you really know what you are doing). """ def __init__(self, obj, code=None, headers=None): """Builds a response object.""" self.obj = obj self._default_code = http.OK self._code = code self._headers = headers or {} self.serializer = JSONDictSerializer() def __getitem__(self, key): """Retrieves a header with the given name.""" return self._headers[key.lower()] def __setitem__(self, key, value): """Sets a header with the given name to the given value.""" self._headers[key.lower()] = value def __delitem__(self, key): """Deletes the header with the given name.""" del self._headers[key.lower()] def serialize(self, request, content_type): """Serializes the wrapped object. Utility method for serializing the wrapped object. Returns a webob.Response object. """ serializer = self.serializer body = None if self.obj is not None: body = serializer.serialize(self.obj) response = webob.Response(body=body) if response.headers.get('Content-Length'): if six.PY3: response.headers['Content-Length'] = (str( response.headers['Content-Length'])) else: # NOTE: we need to encode 'Content-Length' header, since # webob.Response auto sets it if "body" attr is presented. # github.com/Pylons/webob/blob/1.5.0b0/webob/response.py#L147 response.headers['Content-Length'] = utils.utf8( response.headers['Content-Length']) response.status_int = self.code for hdr, value in self._headers.items(): if six.PY3: response.headers[hdr] = str(value) else: response.headers[hdr] = utils.utf8(value) if six.PY3: response.headers['Content-Type'] = str(content_type) else: response.headers['Content-Type'] = utils.utf8(content_type) return response @property def code(self): """Retrieve the response status.""" return self._code or self._default_code @property def headers(self): """Retrieve the headers.""" return self._headers.copy() def action_peek(body): """Determine action to invoke. This looks inside the json body and fetches out the action method name. """ try: decoded = jsonutils.loads(body) except ValueError: msg = _("cannot understand JSON") raise exception.MalformedRequestBody(reason=msg) # Make sure there's exactly one key... if len(decoded) != 1: msg = _("too many body keys") raise exception.MalformedRequestBody(reason=msg) # Return the action name return list(decoded.keys())[0] class ResourceExceptionHandler(object): """Context manager to handle Resource exceptions. Used when processing exceptions generated by API implementation methods (or their extensions). Converts most exceptions to Fault exceptions, with the appropriate logging. """ def __enter__(self): return None def __exit__(self, ex_type, ex_value, ex_traceback): if not ex_value: return True if isinstance(ex_value, exception.Forbidden): raise Fault(webob.exc.HTTPForbidden( explanation=ex_value.format_message())) elif isinstance(ex_value, exception.VersionNotFoundForAPIMethod): raise elif isinstance(ex_value, exception.Invalid): raise Fault(exception.ConvertedException( code=ex_value.code, explanation=ex_value.format_message())) elif isinstance(ex_value, TypeError): exc_info = (ex_type, ex_value, ex_traceback) LOG.error('Exception handling resource: %s', ex_value, exc_info=exc_info) raise Fault(webob.exc.HTTPBadRequest()) elif isinstance(ex_value, Fault): LOG.info("Fault thrown: %s", ex_value) raise ex_value elif isinstance(ex_value, webob.exc.HTTPException): LOG.info("HTTP exception thrown: %s", ex_value) raise Fault(ex_value) # We didn't handle the exception return False class Resource(wsgi.Application): """WSGI app that handles (de)serialization and controller dispatch. WSGI app that reads routing information supplied by RoutesMiddleware and calls the requested action method upon its controller. All controller action methods must accept a 'req' argument, which is the incoming wsgi.Request. If the operation is a PUT or POST, the controller method must also accept a 'body' argument (the deserialized request body). They may raise a webob.exc exception or return a dict, which will be serialized by requested content type. Exceptions derived from webob.exc.HTTPException will be automatically wrapped in Fault() to provide API friendly error responses. """ support_api_request_version = False def __init__(self, controller, inherits=None): """:param controller: object that implement methods created by routes lib :param inherits: another resource object that this resource should inherit extensions from. Any action extensions that are applied to the parent resource will also apply to this resource. """ self.controller = controller self.default_serializers = dict(json=JSONDictSerializer) # Copy over the actions dictionary self.wsgi_actions = {} if controller: self.register_actions(controller) # Save a mapping of extensions self.wsgi_extensions = {} self.wsgi_action_extensions = {} self.inherits = inherits def register_actions(self, controller): """Registers controller actions with this resource.""" actions = getattr(controller, 'wsgi_actions', {}) for key, method_name in actions.items(): self.wsgi_actions[key] = getattr(controller, method_name) def register_extensions(self, controller): """Registers controller extensions with this resource.""" extensions = getattr(controller, 'wsgi_extensions', []) for method_name, action_name in extensions: # Look up the extending method extension = getattr(controller, method_name) if action_name: # Extending an action... if action_name not in self.wsgi_action_extensions: self.wsgi_action_extensions[action_name] = [] self.wsgi_action_extensions[action_name].append(extension) else: # Extending a regular method if method_name not in self.wsgi_extensions: self.wsgi_extensions[method_name] = [] self.wsgi_extensions[method_name].append(extension) def get_action_args(self, request_environment): """Parse dictionary created by routes library.""" # NOTE: Check for get_action_args() override in the # controller if hasattr(self.controller, 'get_action_args'): return self.controller.get_action_args(request_environment) try: args = request_environment['wsgiorg.routing_args'][1].copy() except (KeyError, IndexError, AttributeError): return {} try: del args['controller'] except KeyError: pass try: del args['format'] except KeyError: pass return args def get_body(self, request): content_type = request.get_content_type() return content_type, request.body def deserialize(self, body): return JSONDeserializer().deserialize(body) def pre_process_extensions(self, extensions, request, action_args): # List of callables for post-processing extensions post = [] for ext in extensions: if inspect.isgeneratorfunction(ext): response = None # If it's a generator function, the part before the # yield is the preprocessing stage try: with ResourceExceptionHandler(): gen = ext(req=request, **action_args) response = next(gen) except Fault as ex: response = ex # We had a response... if response: return response, [] # No response, queue up generator for post-processing post.append(gen) else: # Regular functions only perform post-processing post.append(ext) # None is response, it means we keep going. We reverse the # extension list for post-processing. return None, reversed(post) def post_process_extensions(self, extensions, resp_obj, request, action_args): for ext in extensions: response = None if inspect.isgenerator(ext): # If it's a generator, run the second half of # processing try: with ResourceExceptionHandler(): response = ext.send(resp_obj) except StopIteration: # Normal exit of generator continue except Fault as ex: response = ex else: # Regular functions get post-processing... try: with ResourceExceptionHandler(): response = ext(req=request, resp_obj=resp_obj, **action_args) except exception.VersionNotFoundForAPIMethod: # If an attached extension (@wsgi.extends) for the # method has no version match its not an error. We # just don't run the extends code continue except Fault as ex: response = ex # We had a response... if response: return response return None def _should_have_body(self, request): return request.method in _METHODS_WITH_BODY @webob.dec.wsgify(RequestClass=Request) def __call__(self, request): """WSGI method that controls (de)serialization and method dispatch.""" if self.support_api_request_version: # Set the version of the API requested based on the header try: request.set_api_version_request() except exception.InvalidAPIVersionString as e: return Fault(webob.exc.HTTPBadRequest( explanation=e.format_message())) except exception.InvalidGlobalAPIVersion as e: return Fault(webob.exc.HTTPNotAcceptable( explanation=e.format_message())) # Identify the action, its arguments, and the requested # content type action_args = self.get_action_args(request.environ) action = action_args.pop('action', None) # NOTE: we filter out InvalidContentTypes early so we # know everything is good from here on out. try: content_type, body = self.get_body(request) accept = request.best_match_content_type() except exception.InvalidContentType: msg = _("Unsupported Content-Type") return Fault(webob.exc.HTTPUnsupportedMediaType(explanation=msg)) # NOTE: Splitting the function up this way allows for # auditing by external tools that wrap the existing # function. If we try to audit __call__(), we can # run into troubles due to the @webob.dec.wsgify() # decorator. return self._process_stack(request, action, action_args, content_type, body, accept) def _process_stack(self, request, action, action_args, content_type, body, accept): """Implement the processing stack.""" # Get the implementing method try: meth, extensions = self.get_method(request, action, content_type, body) except (AttributeError, TypeError): return Fault(webob.exc.HTTPNotFound()) except KeyError as ex: msg = _("There is no such action: %s") % ex.args[0] return Fault(webob.exc.HTTPBadRequest(explanation=msg)) except exception.MalformedRequestBody: msg = _("Malformed request body") return Fault(webob.exc.HTTPBadRequest(explanation=msg)) except webob.exc.HTTPMethodNotAllowed as e: return Fault(e) if body: msg = _("Action: '%(action)s', calling method: %(meth)s, body: " "%(body)s") % {'action': action, 'body': six.text_type(body, 'utf-8'), 'meth': str(meth)} LOG.debug(strutils.mask_password(msg)) else: LOG.debug("Calling method '%(meth)s'", {'meth': str(meth)}) # Now, deserialize the request body... try: contents = {} if self._should_have_body(request): # allow empty body with PUT and POST if request.content_length == 0: contents = {'body': None} else: contents = self.deserialize(body) except exception.MalformedRequestBody: msg = _("Malformed request body") return Fault(webob.exc.HTTPBadRequest(explanation=msg)) # Update the action args action_args.update(contents) project_id = action_args.pop("project_id", None) context = request.environ.get('masakari.context') if (context and project_id and (project_id != context.project_id)): msg = _("Malformed request URL: URL's project_id '%(project_id)s'" " doesn't match Context's project_id" " '%(context_project_id)s'") % { 'project_id': project_id, 'context_project_id': context.project_id } return Fault(webob.exc.HTTPBadRequest(explanation=msg)) # Run pre-processing extensions response, post = self.pre_process_extensions(extensions, request, action_args) if not response: try: with ResourceExceptionHandler(): action_result = self.dispatch(meth, request, action_args) except Fault as ex: response = ex if not response: # No exceptions; convert action_result into a # ResponseObject resp_obj = None if type(action_result) is dict or action_result is None: resp_obj = ResponseObject(action_result) elif isinstance(action_result, ResponseObject): resp_obj = action_result else: response = action_result # Run post-processing extensions if resp_obj: # Do a preserialize to set up the response object if hasattr(meth, 'wsgi_code'): resp_obj._default_code = meth.wsgi_code # Process post-processing extensions response = self.post_process_extensions(post, resp_obj, request, action_args) if resp_obj and not response: response = resp_obj.serialize(request, accept) if hasattr(response, 'headers'): for hdr, val in list(response.headers.items()): if six.PY3: response.headers[hdr] = str(val) else: # Headers must be utf-8 strings response.headers[hdr] = utils.utf8(val) if not request.api_version_request.is_null(): response.headers[API_VERSION_REQUEST_HEADER] = \ 'masakari ' + request.api_version_request.get_string() response.headers.add('Vary', API_VERSION_REQUEST_HEADER) return response def get_method(self, request, action, content_type, body): meth, extensions = self._get_method(request, action, content_type, body) if self.inherits: _meth, parent_ext = self.inherits.get_method(request, action, content_type, body) extensions.extend(parent_ext) return meth, extensions def _get_method(self, request, action, content_type, body): """Look up the action-specific method and its extensions.""" # Look up the method try: if not self.controller: meth = getattr(self, action) else: meth = getattr(self.controller, action) except AttributeError: if (not self.wsgi_actions or action not in _ROUTES_METHODS + ['action']): if self.controller: msg = _("The request method: '%(method)s' with action: " "'%(action)s' is not allowed on this " "resource") % { 'method': request.method, 'action': action } raise webob.exc.HTTPMethodNotAllowed( explanation=msg, body_template='${explanation}') # Propagate the error raise else: return meth, self.wsgi_extensions.get(action, []) if action == 'action': action_name = action_peek(body) else: action_name = action # Look up the action method return (self.wsgi_actions[action_name], self.wsgi_action_extensions.get(action_name, [])) def dispatch(self, method, request, action_args): """Dispatch a call to the action-specific method.""" try: return method(req=request, **action_args) except exception.VersionNotFoundForAPIMethod: # We deliberately don't return any message information # about the exception to the user so it looks as if # the method is simply not implemented. return Fault(webob.exc.HTTPNotFound()) class ResourceV1(Resource): support_api_request_version = True def action(name): """Mark a function as an action. The given name will be taken as the action key in the body. This is also overloaded to allow extensions to provide non-extending definitions of create and delete operations. """ def decorator(func): func.wsgi_action = name return func return decorator def extends(*args, **kwargs): """Indicate a function extends an operation. Can be used as either:: @extends def index(...): pass or as:: @extends(action='resize') def _action_resize(...): pass """ def decorator(func): # Store enough information to find what we're extending func.wsgi_extends = (func.__name__, kwargs.get('action')) return func # If we have positional arguments, call the decorator if args: return decorator(*args) # OK, return the decorator instead return decorator class ControllerMetaclass(type): """Controller metaclass. This metaclass automates the task of assembling a dictionary mapping action keys to method names. """ def __new__(mcs, name, bases, cls_dict): """Adds the wsgi_actions dictionary to the class.""" # Find all actions actions = {} extensions = [] versioned_methods = None # start with wsgi actions from base classes for base in bases: actions.update(getattr(base, 'wsgi_actions', {})) if base.__name__ == "Controller": # NOTE: This resets the VER_METHOD_ATTR attribute # between API controller class creations. This allows us # to use a class decorator on the API methods that doesn't # require naming explicitly what method is being versioned as # it can be implicit based on the method decorated. It is a bit # ugly. if VER_METHOD_ATTR in base.__dict__: versioned_methods = getattr(base, VER_METHOD_ATTR) delattr(base, VER_METHOD_ATTR) for key, value in cls_dict.items(): if not callable(value): continue if getattr(value, 'wsgi_action', None): actions[value.wsgi_action] = key elif getattr(value, 'wsgi_extends', None): extensions.append(value.wsgi_extends) # Add the actions and extensions to the class dict cls_dict['wsgi_actions'] = actions cls_dict['wsgi_extensions'] = extensions if versioned_methods: cls_dict[VER_METHOD_ATTR] = versioned_methods return super(ControllerMetaclass, mcs).__new__(mcs, name, bases, cls_dict) @six.add_metaclass(ControllerMetaclass) class Controller(object): """Default controller.""" _view_builder_class = None def __init__(self, view_builder=None): """Initialize controller with a view builder instance.""" if view_builder: self._view_builder = view_builder elif self._view_builder_class: self._view_builder = self._view_builder_class() else: self._view_builder = None def __getattribute__(self, key): def version_select(*args, **kwargs): """Look for the method which matches the name supplied and version constraints and calls it with the supplied arguments. @return: Returns the result of the method called @raises: VersionNotFoundForAPIMethod if there is no method which matches the name and version constraints """ # The first arg to all versioned methods is always the request # object. The version for the request is attached to the # request object if len(args) == 0: ver = kwargs['req'].api_version_request else: ver = args[0].api_version_request func_list = self.versioned_methods[key] for func in func_list: if ver.matches(func.start_version, func.end_version): # Update the version_select wrapper function so # other decorator attributes like wsgi.response # are still respected. functools.update_wrapper(version_select, func.func) return func.func(self, *args, **kwargs) # No version match raise exception.VersionNotFoundForAPIMethod(version=ver) try: version_meth_dict = object.__getattribute__(self, VER_METHOD_ATTR) except AttributeError: # No versioning on this class return object.__getattribute__(self, key) if version_meth_dict and key in object.__getattribute__( self, VER_METHOD_ATTR): return version_select return object.__getattribute__(self, key) # NOTE: This decorator MUST appear first (the outermost # decorator) on an API method for it to work correctly @classmethod def api_version(cls, min_ver, max_ver=None): """Decorator for versioning api methods. Add the decorator to any method which takes a request object as the first parameter and belongs to a class which inherits from wsgi.Controller. @min_ver: string representing minimum version @max_ver: optional string representing maximum version """ def decorator(f): obj_min_ver = api_version.APIVersionRequest(min_ver) if max_ver: obj_max_ver = api_version.APIVersionRequest(max_ver) else: obj_max_ver = api_version.APIVersionRequest() # Add to list of versioned methods registered func_name = f.__name__ new_func = versioned_method.VersionedMethod( func_name, obj_min_ver, obj_max_ver, f) func_dict = getattr(cls, VER_METHOD_ATTR, {}) if not func_dict: setattr(cls, VER_METHOD_ATTR, func_dict) func_list = func_dict.get(func_name, []) if not func_list: func_dict[func_name] = func_list func_list.append(new_func) # Ensure the list is sorted by minimum version (reversed) # so later when we work through the list in order we find # the method which has the latest version which supports # the version requested. is_intersect = Controller.check_for_versions_intersection( func_list) if is_intersect: raise exception.ApiVersionsIntersect( name=new_func.name, min_ver=new_func.start_version, max_ver=new_func.end_version, ) func_list.sort(key=lambda f: f.start_version, reverse=True) return f return decorator @staticmethod def is_valid_body(body, entity_name): if not (body and entity_name in body): return False def is_dict(d): try: d.get(None) return True except AttributeError: return False return is_dict(body[entity_name]) @staticmethod def check_for_versions_intersection(func_list): """Determines function list contains version intervals intersections. General algorithm: https://en.wikipedia.org/wiki/Intersection_algorithm :param func_list: list of VersionedMethod objects :return: boolean """ pairs = [] counter = 0 for f in func_list: pairs.append((f.start_version, 1, f)) pairs.append((f.end_version, -1, f)) def compare(x): return x[0] pairs.sort(key=compare) for p in pairs: counter += p[1] if counter > 1: return True return False class Fault(webob.exc.HTTPException): """Wrap webob.exc.HTTPException to provide API friendly response.""" _fault_names = { http.BAD_REQUEST: "badRequest", http.UNAUTHORIZED: "unauthorized", http.FORBIDDEN: "forbidden", http.NOT_FOUND: "itemNotFound", http.METHOD_NOT_ALLOWED: "badMethod", http.CONFLICT: "conflictingRequest", http.REQUEST_ENTITY_TOO_LARGE: "overLimit", http.UNSUPPORTED_MEDIA_TYPE: "badMediaType", http.NOT_IMPLEMENTED: "notImplemented", http.SERVICE_UNAVAILABLE: "serviceUnavailable", # TODO(Dinesh_Bhor) Replace it with symbolic constant when it is # defined in six.moves.http_client 429: "overLimit" } def __init__(self, exception): """Create a Fault for the given webob.exc.exception.""" self.wrapped_exc = exception for key, value in list(self.wrapped_exc.headers.items()): self.wrapped_exc.headers[key] = str(value) self.status_int = exception.status_int @webob.dec.wsgify(RequestClass=Request) def __call__(self, req): """Generate a WSGI response based on the exception passed to ctor.""" user_locale = req.best_match_language() # Replace the body with fault details. code = self.wrapped_exc.status_int fault_name = self._fault_names.get(code, "masakariFault") explanation = self.wrapped_exc.explanation LOG.debug("Returning %(code)s to user: %(explanation)s", {'code': code, 'explanation': explanation}) explanation = i18n.translate(explanation, user_locale) fault_data = { fault_name: { 'code': code, 'message': explanation}} if code == http.REQUEST_ENTITY_TOO_LARGE or code == 429: retry = self.wrapped_exc.headers.get('Retry-After', None) if retry: fault_data[fault_name]['retryAfter'] = retry if not req.api_version_request.is_null(): self.wrapped_exc.headers[API_VERSION_REQUEST_HEADER] = \ 'instance-ha ' + req.api_version_request.get_string() self.wrapped_exc.headers.add('Vary', API_VERSION_REQUEST_HEADER) self.wrapped_exc.content_type = 'application/json' self.wrapped_exc.charset = 'UTF-8' self.wrapped_exc.text = JSONDictSerializer().serialize(fault_data) return self.wrapped_exc def __str__(self): return self.wrapped_exc.__str__() masakari-9.0.0/masakari/api/openstack/ha/0000775000175000017500000000000013656750011020256 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/api/openstack/ha/__init__.py0000664000175000017500000000247513656747723022417 0ustar zuulzuul00000000000000# Copyright (c) 2016 NTT Data # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ WSGI middleware for OpenStack Compute API. """ import masakari.api.openstack from masakari.api.openstack.ha import extension_info import masakari.conf CONF = masakari.conf.CONF class APIRouterV1(masakari.api.openstack.APIRouterV1): """Routes requests on the OpenStack API to the appropriate controller and method. """ def __init__(self, init_only=None): self._loaded_extension_info = extension_info.LoadedExtensionInfo() super(APIRouterV1, self).__init__(init_only) def _register_extension(self, ext): return self.loaded_extension_info.register_extension(ext.obj) @property def loaded_extension_info(self): return self._loaded_extension_info masakari-9.0.0/masakari/api/openstack/ha/hosts.py0000664000175000017500000001660613656747723022021 0ustar zuulzuul00000000000000# Copyright (c) 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """The Host API extension.""" from oslo_utils import encodeutils from oslo_utils import strutils from six.moves import http_client as http from webob import exc from masakari.api.openstack import common from masakari.api.openstack import extensions from masakari.api.openstack.ha.schemas import hosts as schema from masakari.api.openstack import wsgi from masakari.api import validation from masakari import exception from masakari.ha import api as host_api from masakari.i18n import _ from masakari import objects from masakari.policies import hosts as host_policies ALIAS = "os-hosts" class HostsController(wsgi.Controller): """The Host API controller for the OpenStack API.""" def __init__(self): self.api = host_api.HostAPI() @extensions.expected_errors((http.BAD_REQUEST, http.FORBIDDEN, http.NOT_FOUND)) def index(self, req, segment_id): """Returns a list a hosts.""" context = req.environ['masakari.context'] context.can(host_policies.HOSTS % 'index') try: filters = {} limit, marker = common.get_limit_and_marker(req) sort_keys, sort_dirs = common.get_sort_params(req.params) segment = objects.FailoverSegment.get_by_uuid(context, segment_id) filters['failover_segment_id'] = segment.uuid if 'name' in req.params: filters['name'] = req.params['name'] if 'type' in req.params: filters['type'] = req.params['type'] if 'control_attributes' in req.params: filters['control_attributes'] = req.params[ 'control_attributes'] if 'on_maintenance' in req.params: try: filters['on_maintenance'] = strutils.bool_from_string( req.params['on_maintenance'], strict=True) except ValueError as ex: msg = _("Invalid value for on_maintenance: " "%s") % encodeutils.exception_to_unicode(ex) raise exc.HTTPBadRequest(explanation=msg) if 'reserved' in req.params: try: filters['reserved'] = strutils.bool_from_string( req.params['reserved'], strict=True) except ValueError as ex: msg = _("Invalid value for reserved: " "%s") % encodeutils.exception_to_unicode(ex) raise exc.HTTPBadRequest(explanation=msg) hosts = self.api.get_all(context, filters=filters, sort_keys=sort_keys, sort_dirs=sort_dirs, limit=limit, marker=marker) except exception.MarkerNotFound as ex: raise exc.HTTPBadRequest(explanation=ex.format_message()) except exception.Invalid as e: raise exc.HTTPBadRequest(explanation=e.format_message()) except exception.FailoverSegmentNotFound as ex: raise exc.HTTPNotFound(explanation=ex.format_message()) return {'hosts': hosts} @wsgi.response(http.CREATED) @extensions.expected_errors((http.BAD_REQUEST, http.FORBIDDEN, http.NOT_FOUND, http.CONFLICT)) @validation.schema(schema.create) def create(self, req, segment_id, body): """Creates a host.""" context = req.environ['masakari.context'] context.can(host_policies.HOSTS % 'create') host_data = body.get('host') try: host = self.api.create_host(context, segment_id, host_data) except exception.HypervisorNotFoundByName as e: raise exc.HTTPBadRequest(explanation=e.message) except exception.FailoverSegmentNotFound as e: raise exc.HTTPNotFound(explanation=e.format_message()) except exception.HostExists as e: raise exc.HTTPConflict(explanation=e.format_message()) return {'host': host} @extensions.expected_errors((http.FORBIDDEN, http.NOT_FOUND)) def show(self, req, segment_id, id): """Shows the details of a host.""" context = req.environ['masakari.context'] context.can(host_policies.HOSTS % 'detail') try: host = self.api.get_host(context, segment_id, id) except exception.HostNotFound as e: raise exc.HTTPNotFound(explanation=e.format_message()) except exception.FailoverSegmentNotFound as e: raise exc.HTTPNotFound(explanation=e.format_message()) return {'host': host} @extensions.expected_errors((http.BAD_REQUEST, http.FORBIDDEN, http.NOT_FOUND, http.CONFLICT)) @validation.schema(schema.update) def update(self, req, segment_id, id, body): """Updates the existing host.""" context = req.environ['masakari.context'] context.can(host_policies.HOSTS % 'update') host_data = body.get('host') try: host = self.api.update_host(context, segment_id, id, host_data) except exception.HypervisorNotFoundByName as e: raise exc.HTTPBadRequest(explanation=e.message) except exception.HostNotFound as e: raise exc.HTTPNotFound(explanation=e.format_message()) except exception.FailoverSegmentNotFound as e: raise exc.HTTPNotFound(explanation=e.format_message()) except (exception.HostExists, exception.Conflict) as e: raise exc.HTTPConflict(explanation=e.format_message()) return {'host': host} @wsgi.response(http.NO_CONTENT) @extensions.expected_errors((http.FORBIDDEN, http.NOT_FOUND, http.CONFLICT)) def delete(self, req, segment_id, id): """Removes a host by id.""" context = req.environ['masakari.context'] context.can(host_policies.HOSTS % 'delete') try: self.api.delete_host(context, segment_id, id) except exception.FailoverSegmentNotFound as e: raise exc.HTTPNotFound(explanation=e.format_message()) except exception.HostNotFound as e: raise exc.HTTPNotFound(explanation=e.format_message()) except exception.Conflict as e: raise exc.HTTPConflict(explanation=e.format_message()) class Hosts(extensions.V1APIExtensionBase): """Hosts controller""" name = "Hosts" alias = ALIAS version = 1 def get_resources(self): parent = {'member_name': 'segment', 'collection_name': 'segments'} resources = [ extensions.ResourceExtension( 'hosts', HostsController(), parent=parent, member_name='host')] return resources def get_controller_extensions(self): return [] masakari-9.0.0/masakari/api/openstack/ha/segments.py0000664000175000017500000001256613656747723022507 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA. # All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from six.moves import http_client as http from webob import exc from masakari.api.openstack import common from masakari.api.openstack import extensions from masakari.api.openstack.ha.schemas import segments as schema from masakari.api.openstack import wsgi from masakari.api import validation from masakari import exception from masakari.ha import api as segment_api from masakari.policies import segments as segment_policies ALIAS = 'segments' class SegmentsController(wsgi.Controller): """Segments controller for the OpenStack API.""" def __init__(self): self.api = segment_api.FailoverSegmentAPI() @extensions.expected_errors((http.BAD_REQUEST, http.FORBIDDEN)) def index(self, req): """Returns a summary list of failover segments.""" context = req.environ['masakari.context'] context.can(segment_policies.SEGMENTS % 'index') try: limit, marker = common.get_limit_and_marker(req) sort_keys, sort_dirs = common.get_sort_params(req.params) filters = {} if 'recovery_method' in req.params: filters['recovery_method'] = req.params['recovery_method'] if 'service_type' in req.params: filters['service_type'] = req.params['service_type'] segments = self.api.get_all(context, filters=filters, sort_keys=sort_keys, sort_dirs=sort_dirs, limit=limit, marker=marker) except exception.MarkerNotFound as e: raise exc.HTTPBadRequest(explanation=e.format_message()) except exception.Invalid as e: raise exc.HTTPBadRequest(explanation=e.format_message()) return {'segments': segments} @extensions.expected_errors((http.FORBIDDEN, http.NOT_FOUND)) def show(self, req, id): """Return data about the given segment id.""" context = req.environ['masakari.context'] context.can(segment_policies.SEGMENTS % 'detail') try: segment = self.api.get_segment(context, id) except exception.FailoverSegmentNotFound as e: raise exc.HTTPNotFound(explanation=e.format_message()) return {'segment': segment} @wsgi.response(http.CREATED) @extensions.expected_errors((http.FORBIDDEN, http.CONFLICT)) @validation.schema(schema.create) def create(self, req, body): """Creates a new failover segment.""" context = req.environ['masakari.context'] context.can(segment_policies.SEGMENTS % 'create') segment_data = body['segment'] try: segment = self.api.create_segment(context, segment_data) except exception.FailoverSegmentExists as e: raise exc.HTTPConflict(explanation=e.format_message()) return {'segment': segment} @extensions.expected_errors((http.FORBIDDEN, http.NOT_FOUND, http.CONFLICT)) @validation.schema(schema.update) def update(self, req, id, body): """Updates the existing segment.""" context = req.environ['masakari.context'] context.can(segment_policies.SEGMENTS % 'update') segment_data = body['segment'] try: segment = self.api.update_segment(context, id, segment_data) except exception.FailoverSegmentNotFound as e: raise exc.HTTPNotFound(explanation=e.format_message()) except (exception.FailoverSegmentExists, exception.Conflict) as e: raise exc.HTTPConflict(explanation=e.format_message()) return {'segment': segment} @wsgi.response(http.NO_CONTENT) @extensions.expected_errors((http.FORBIDDEN, http.NOT_FOUND, http.CONFLICT)) def delete(self, req, id): """Removes a segment by uuid.""" context = req.environ['masakari.context'] context.can(segment_policies.SEGMENTS % 'delete') try: self.api.delete_segment(context, id) except exception.FailoverSegmentNotFound as e: raise exc.HTTPNotFound(explanation=e.format_message()) except exception.Conflict as e: raise exc.HTTPConflict(explanation=e.format_message()) class Segments(extensions.V1APIExtensionBase): """Segments Extension.""" name = "Segments" alias = ALIAS version = 1 def get_resources(self): member_actions = {'action': 'POST'} resources = [ extensions.ResourceExtension(ALIAS, SegmentsController(), member_name='segment', member_actions=member_actions) ] return resources def get_controller_extensions(self): return [] masakari-9.0.0/masakari/api/openstack/ha/versionsV1.py0000664000175000017500000000356213656747723022735 0ustar zuulzuul00000000000000# Copyright (c) 2016 NTT Data # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from six.moves import http_client import webob.exc from masakari.api.openstack import extensions from masakari.api.openstack.ha import versions from masakari.api.openstack.ha.views import versions as views_versions from masakari.api.openstack import wsgi ALIAS = "versions" class VersionsController(wsgi.Controller): @extensions.expected_errors(http_client.NOT_FOUND) def show(self, req, id='v1'): builder = views_versions.get_view_builder(req) if id not in versions.VERSIONS: raise webob.exc.HTTPNotFound() return builder.build_version(versions.VERSIONS[id]) class Versions(extensions.V1APIExtensionBase): """API Version information.""" name = "Versions" alias = ALIAS version = 1 def get_resources(self): resources = [ extensions.ResourceExtension(ALIAS, VersionsController(), custom_routes_fn=self.version_map)] return resources def get_controller_extensions(self): return [] def version_map(self, mapper, wsgi_resource): mapper.connect("versions", "/", controller=wsgi_resource, action='show', conditions={"method": ['GET']}) mapper.redirect("", "/") masakari-9.0.0/masakari/api/openstack/ha/extension_info.py0000664000175000017500000001044613656747723023704 0ustar zuulzuul00000000000000# Copyright (c) 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log as logging from six.moves import http_client import webob.exc from masakari.api.openstack import extensions from masakari.api.openstack import wsgi from masakari import exception from masakari.policies import base as base_policies from masakari.policies import extension_info as extension_policies ALIAS = 'extensions' LOG = logging.getLogger(__name__) class FakeExtension(object): def __init__(self, name, alias, description=""): self.name = name self.alias = alias self.__doc__ = description self.version = -1 class ExtensionInfoController(wsgi.Controller): def __init__(self, extension_info): self.extension_info = extension_info def _translate(self, ext): ext_data = {"name": ext.name, "alias": ext.alias, "description": ext.__doc__, "namespace": "", "updated": "", "links": []} return ext_data def _create_fake_ext(self, name, alias, description=""): return FakeExtension(name, alias, description) def _get_extensions(self, context): """Filter extensions list based on policy.""" discoverable_extensions = dict() for alias, ext in self.extension_info.get_extensions().items(): action = ':'.join([ base_policies.MASAKARI_API, alias, 'discoverable']) if context.can(action, fatal=False): discoverable_extensions[alias] = ext else: LOG.debug("Filter out extension %s from discover list", alias) return discoverable_extensions @extensions.expected_errors(()) def index(self, req): context = req.environ['masakari.context'] context.can(extension_policies.EXTENSIONS % 'index') discoverable_extensions = self._get_extensions(context) sorted_ext_list = sorted(discoverable_extensions.items()) extensions = [] for _alias, ext in sorted_ext_list: extensions.append(self._translate(ext)) return dict(extensions=extensions) @extensions.expected_errors(http_client.NOT_FOUND) def show(self, req, id): context = req.environ['masakari.context'] context.can(extension_policies.EXTENSIONS % 'detail') try: ext = self._get_extensions(context)[id] except KeyError: raise webob.exc.HTTPNotFound() return dict(extension=self._translate(ext)) class ExtensionInfo(extensions.V1APIExtensionBase): """Extension information.""" name = "Extensions" alias = ALIAS version = 1 def get_resources(self): resources = [ extensions.ResourceExtension( ALIAS, ExtensionInfoController(self.extension_info), member_name='extension')] return resources def get_controller_extensions(self): return [] class LoadedExtensionInfo(object): """Keep track of all loaded API extensions.""" def __init__(self): self.extensions = {} def register_extension(self, ext): if not self._check_extension(ext): return False alias = ext.alias if alias in self.extensions: raise exception.MasakariException( "Found duplicate extension: %s" % alias) self.extensions[alias] = ext return True def _check_extension(self, extension): """Checks for required methods in extension objects.""" try: extension.is_valid() except AttributeError: LOG.exception("Exception loading extension") return False return True def get_extensions(self): return self.extensions masakari-9.0.0/masakari/api/openstack/ha/schemas/0000775000175000017500000000000013656750011021701 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/api/openstack/ha/schemas/__init__.py0000664000175000017500000000000013656747723024020 0ustar zuulzuul00000000000000masakari-9.0.0/masakari/api/openstack/ha/schemas/hosts.py0000664000175000017500000000342313656747723023435 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from masakari.api.validation import parameter_types _base = { 'type': 'object', 'properties': { 'host': { 'type': 'object', 'properties': { 'name': parameter_types.name, 'type': parameter_types.type, 'control_attributes': parameter_types.description, 'reserved': parameter_types.boolean, 'on_maintenance': parameter_types.boolean }, 'additionalProperties': False } }, 'required': ['host'], 'additionalProperties': False } create = copy.deepcopy(_base) create['properties']['host']['required'] = ['name', 'type', 'control_attributes'] update = copy.deepcopy(_base) update['properties']['host']['anyOf'] = [{'required': ['name']}, {'required': ['type']}, {'required': ['control_attributes']}, {'required': ['reserved']}, {'required': ['on_maintenance']}, ] masakari-9.0.0/masakari/api/openstack/ha/schemas/segments.py0000664000175000017500000000352313656747723024123 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA. # All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from masakari.api.validation import parameter_types _base = { 'type': 'object', 'properties': { 'segment': { 'type': 'object', 'properties': { 'name': parameter_types.name, 'description': parameter_types.description, 'recovery_method': { 'type': 'string', 'enum': ["auto", "reserved_host", "auto_priority", "rh_priority"] }, 'service_type': parameter_types.name }, 'additionalProperties': False } }, 'required': ['segment'], 'additionalProperties': False } create = copy.deepcopy(_base) create['properties']['segment']['required'] = ['name', 'recovery_method', 'service_type'] update = copy.deepcopy(_base) update['properties']['segment']['anyOf'] = [{'required': ['name']}, {'required': ['description']}, {'required': ['recovery_method']}, {'required': ['service_type']}, ] masakari-9.0.0/masakari/api/openstack/ha/schemas/notifications.py0000664000175000017500000000265313656747723025152 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from masakari.api.validation import parameter_types from masakari.objects import fields create = { 'type': 'object', 'properties': { 'notification': { 'type': 'object', 'properties': { 'type': { 'type': 'string', 'enum': fields.NotificationType.ALL, }, 'hostname': parameter_types.hostname, 'generated_time': { 'type': 'string', 'format': 'date-time', }, 'payload': parameter_types.payload, }, 'required': ['type', 'hostname', 'generated_time', 'payload'], 'additionalProperties': False } }, 'required': ['notification'], 'additionalProperties': False } masakari-9.0.0/masakari/api/openstack/ha/schemas/payload.py0000664000175000017500000000365513656747723023735 0ustar zuulzuul00000000000000# Copyright 2018 NTT DATA. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from masakari.objects import fields create_compute_host_payload = { 'type': 'object', 'properties': { 'host_status': { 'enum': fields.HostStatusType.ALL, 'type': 'string'}, 'cluster_status': { 'enum': fields.ClusterStatusType.ALL, 'type': 'string'}, 'event': { 'enum': fields.EventType.ALL, 'type': 'string'}, }, 'required': ['event'], 'additionalProperties': False } create_process_payload = { 'type': 'object', 'properties': { 'process_name': { 'type': 'string', 'minLength': 1, 'maxLength': 4096}, 'event': { 'enum': fields.EventType.ALL, 'type': 'string'}, }, 'required': ['process_name', 'event'], 'additionalProperties': False } create_vm_payload = { 'type': 'object', 'properties': { 'instance_uuid': { 'type': 'string', 'format': 'uuid'}, 'vir_domain_event': { 'type': 'string', 'minLength': 1, 'maxLength': 255}, 'event': { 'type': 'string', 'minLength': 1, 'maxLength': 255}, }, 'required': ['instance_uuid', 'vir_domain_event', 'event'], 'additionalProperties': False } masakari-9.0.0/masakari/api/openstack/ha/notifications.py0000664000175000017500000001416113656747723023524 0ustar zuulzuul00000000000000# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils import timeutils from six.moves import http_client as http from webob import exc from masakari.api import api_version_request from masakari.api.openstack import common from masakari.api.openstack import extensions from masakari.api.openstack.ha.schemas import notifications as schema from masakari.api.openstack.ha.schemas import payload as payload_schema from masakari.api.openstack import wsgi from masakari.api import validation from masakari import exception from masakari.ha import api as notification_api from masakari.i18n import _ from masakari.objects import fields from masakari.policies import notifications as notifications_policies ALIAS = 'notifications' class NotificationsController(wsgi.Controller): """Notifications controller for the OpenStack API.""" def __init__(self): self.api = notification_api.NotificationAPI() @validation.schema(payload_schema.create_process_payload) def _validate_process_payload(self, req, body): pass @validation.schema(payload_schema.create_vm_payload) def _validate_vm_payload(self, req, body): pass @validation.schema(payload_schema.create_compute_host_payload) def _validate_comp_host_payload(self, req, body): pass @wsgi.response(http.ACCEPTED) @extensions.expected_errors((http.BAD_REQUEST, http.FORBIDDEN, http.CONFLICT)) @validation.schema(schema.create) def create(self, req, body): """Creates a new notification.""" context = req.environ['masakari.context'] context.can(notifications_policies.NOTIFICATIONS % 'create') notification_data = body['notification'] if notification_data['type'] == fields.NotificationType.PROCESS: self._validate_process_payload(req, body=notification_data['payload']) if notification_data['type'] == fields.NotificationType.VM: self._validate_vm_payload(req, body=notification_data['payload']) if notification_data['type'] == fields.NotificationType.COMPUTE_HOST: self._validate_comp_host_payload(req, body=notification_data['payload']) try: notification = self.api.create_notification( context, notification_data) except exception.HostNotFoundByName as err: raise exc.HTTPBadRequest(explanation=err.format_message()) except (exception.DuplicateNotification, exception.HostOnMaintenanceError) as err: raise exc.HTTPConflict(explanation=err.format_message()) return {'notification': notification} @extensions.expected_errors((http.BAD_REQUEST, http.FORBIDDEN)) def index(self, req): """Returns a summary list of notifications.""" context = req.environ['masakari.context'] context.can(notifications_policies.NOTIFICATIONS % 'index') try: limit, marker = common.get_limit_and_marker(req) sort_keys, sort_dirs = common.get_sort_params(req.params) filters = {} if 'status' in req.params: filters['status'] = req.params['status'] if 'source_host_uuid' in req.params: filters['source_host_uuid'] = req.params['source_host_uuid'] if 'type' in req.params: filters['type'] = req.params['type'] if 'generated-since' in req.params: try: parsed = timeutils.parse_isotime( req.params['generated-since']) except ValueError: msg = _('Invalid generated-since value') raise exc.HTTPBadRequest(explanation=msg) filters['generated-since'] = parsed notifications = self.api.get_all(context, filters, sort_keys, sort_dirs, limit, marker) except exception.MarkerNotFound as err: raise exc.HTTPBadRequest(explanation=err.format_message()) except exception.Invalid as err: raise exc.HTTPBadRequest(explanation=err.format_message()) return {'notifications': notifications} @extensions.expected_errors((http.FORBIDDEN, http.NOT_FOUND)) def show(self, req, id): """Return data about the given notification id.""" context = req.environ['masakari.context'] context.can(notifications_policies.NOTIFICATIONS % 'detail') try: if api_version_request.is_supported(req, min_version='1.1'): notification = ( self.api.get_notification_recovery_workflow_details( context, id)) else: notification = self.api.get_notification(context, id) except exception.NotificationNotFound as err: raise exc.HTTPNotFound(explanation=err.format_message()) return {'notification': notification} class Notifications(extensions.V1APIExtensionBase): """Notifications support.""" name = "Notifications" alias = ALIAS version = 1 def get_resources(self): member_actions = {'action': 'POST'} resources = [ extensions.ResourceExtension(ALIAS, NotificationsController(), member_name='notification', member_actions=member_actions) ] return resources def get_controller_extensions(self): return [] masakari-9.0.0/masakari/api/openstack/ha/versions.py0000664000175000017500000000426713656747723022531 0ustar zuulzuul00000000000000# Copyright (c) 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from six.moves import http_client from masakari.api.openstack.ha.views import versions as views_versions from masakari.api.openstack import wsgi CONF = cfg.CONF LINKS = { 'v1.0': { 'html': 'https://docs.openstack.org/' }, } VERSIONS = { "v1.0": { "id": "v1.0", "status": "CURRENT", "version": "1.0", "min_version": "1.0", "updated": "2016-07-01T11:33:21Z", "links": [ { "rel": "describedby", "type": "text/html", "href": LINKS['v1.0']['html'], }, ], "media-types": [ { "base": "application/json", "type": "application/vnd.openstack.masakari+json;version=1", } ], } } class Versions(wsgi.Resource): def __init__(self): super(Versions, self).__init__(None) def index(self, req, body=None): """Return all versions.""" builder = views_versions.get_view_builder(req) return builder.build_versions(VERSIONS) @wsgi.response(http_client.MULTIPLE_CHOICES) def multi(self, req, body=None): """Return multiple choices.""" builder = views_versions.get_view_builder(req) return builder.build_choices(VERSIONS, req) def get_action_args(self, request_environment): """Parse dictionary created by routes library.""" args = {} if request_environment['PATH_INFO'] == '/': args['action'] = 'index' else: args['action'] = 'multi' return args masakari-9.0.0/masakari/api/openstack/ha/views/0000775000175000017500000000000013656750011021413 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/api/openstack/ha/views/__init__.py0000664000175000017500000000000013656747723023532 0ustar zuulzuul00000000000000masakari-9.0.0/masakari/api/openstack/ha/views/versions.py0000664000175000017500000000613713656747723023664 0ustar zuulzuul00000000000000# Copyright (c) 2016 NTT Data # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from masakari.api.openstack import common def get_view_builder(req): base_url = req.application_url return ViewBuilder(base_url) class ViewBuilder(common.ViewBuilder): def __init__(self, base_url): """:param base_url: url of the root wsgi application.""" self.prefix = self._update_masakari_link_prefix(base_url) self.base_url = base_url def build_choices(self, VERSIONS, req): version_objs = [] for version in sorted(VERSIONS): version = VERSIONS[version] version_objs.append({ "id": version['id'], "status": version['status'], "links": [ { "rel": "self", "href": self.generate_href(version['id'], req.path), }, ], "media-types": version['media-types'], }) return dict(choices=version_objs) def build_versions(self, versions): version_objs = [] for version in sorted(versions.keys()): version = versions[version] version_objs.append({ "id": version['id'], "status": version['status'], "version": version['version'], "min_version": version['min_version'], "updated": version['updated'], "links": self._build_links(version), }) return dict(versions=version_objs) def build_version(self, version): reval = copy.deepcopy(version) reval['links'].insert(0, { "rel": "self", "href": self.prefix.rstrip('/') + '/', }) return dict(version=reval) def _build_links(self, version_data): """Generate a container of links that refer to the provided version.""" href = self.generate_href(version_data['id']) links = [ { "rel": "self", "href": href, }, ] return links def generate_href(self, version, path=None): """Create an url that refers to a specific version_number.""" # TODO(Dinesh_Bhor) When there will be increment in version then we can # define the default version_number according to the 'version' given # but currently the 'version_number' should be 'v1' by default. version_number = 'v1' path = path or '' return common.url_join(self.prefix, version_number, path) masakari-9.0.0/masakari/api/openstack/extensions.py0000664000175000017500000003233013656747723022460 0ustar zuulzuul00000000000000# Copyright (c) 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import abc import functools import os from oslo_log import log as logging from oslo_utils import importutils import six import webob.dec import webob.exc from masakari.api.openstack import wsgi from masakari import exception from masakari.i18n import _ LOG = logging.getLogger(__name__) class ExtensionDescriptor(object): """Base class that defines the contract for extensions. Note that you don't have to derive from this class to have a valid extension; it is purely a convenience. """ # The name of the extension, e.g., 'Fox In Socks' name = None # The alias for the extension, e.g., 'FOXNSOX' alias = None # Description comes from the docstring for the class # The timestamp when the extension was last updated, e.g., # '2011-01-22T19:25:27Z' updated = None def __init__(self, ext_mgr): """Register extension with the extension manager.""" ext_mgr.register(self) self.ext_mgr = ext_mgr def get_resources(self): """List of extensions.ResourceExtension extension objects. Resources define new nouns, and are accessible through URLs. """ resources = [] return resources def get_controller_extensions(self): """List of extensions.ControllerExtension extension objects. Controller extensions are used to extend existing controllers. """ controller_exts = [] return controller_exts def __repr__(self): return "" % ( self.name, self.alias, self.updated) def is_valid(self): """Validate required fields for extensions. Raises an attribute error if the attr is not defined """ for attr in ('name', 'alias', 'updated', 'namespace'): if getattr(self, attr) is None: raise AttributeError("%s is None, needs to be defined" % attr) return True class ExtensionsController(wsgi.Resource): def __init__(self, extension_manager): self.extension_manager = extension_manager super(ExtensionsController, self).__init__(None) def _translate(self, ext): ext_data = {'name': ext.name, 'alias': ext.alias, 'description': ext.__doc__, 'namespace': ext.namespace, 'updated': ext.updated, 'links': []} return ext_data def index(self, req): extensions = [] for ext in self.extension_manager.sorted_extensions(): extensions.append(self._translate(ext)) return dict(extensions=extensions) def show(self, req, id): try: ext = self.extension_manager.extensions[id] except KeyError: raise webob.exc.HTTPNotFound() return dict(extension=self._translate(ext)) def delete(self, req, id): raise webob.exc.HTTPNotFound() def create(self, req, body): raise webob.exc.HTTPNotFound() class ExtensionManager(object): """Load extensions from the configured extension path.""" def sorted_extensions(self): if self.sorted_ext_list is None: self.sorted_ext_list = sorted(self.extensions.items()) for _alias, ext in self.sorted_ext_list: yield ext def is_loaded(self, alias): return alias in self.extensions def register(self, ext): # Do nothing if the extension doesn't check out if not self._check_extension(ext): return alias = ext.alias if alias in self.extensions: raise exception.MasakariException( "Found duplicate extension: %s" % alias) self.extensions[alias] = ext self.sorted_ext_list = None def get_resources(self): """Returns a list of ResourceExtension objects.""" resources = [] resources.append(ResourceExtension('extensions', ExtensionsController(self))) for ext in self.sorted_extensions(): try: resources.extend(ext.get_resources()) except AttributeError: pass return resources def get_controller_extensions(self): """Returns a list of ControllerExtension objects.""" controller_exts = [] for ext in self.sorted_extensions(): try: get_ext_method = ext.get_controller_extensions except AttributeError: continue controller_exts.extend(get_ext_method()) return controller_exts def _check_extension(self, extension): """Checks for required methods in extension objects.""" try: extension.is_valid() except AttributeError: LOG.exception("Exception loading extension") return False return True def load_extension(self, ext_factory): """Execute an extension factory. Loads an extension. The 'ext_factory' is the name of a callable that will be imported and called with one argument--the extension manager. The factory callable is expected to call the register() method at least once. """ LOG.debug("Loading extension %s", ext_factory) if isinstance(ext_factory, six.string_types): # Load the factory factory = importutils.import_class(ext_factory) else: factory = ext_factory # Call it LOG.debug("Calling extension factory %s", ext_factory) factory(self) def _load_extensions(self): """Load extensions specified on the command line.""" extensions = list(self.cls_list) for ext_factory in extensions: try: self.load_extension(ext_factory) except Exception as exc: LOG.warning('Failed to load extension %(ext_factory)s: ' '%(exc)s', {'ext_factory': ext_factory, 'exc': exc}) class ControllerExtension(object): """Extend core controllers of masakari OpenStack API. Provide a way to extend existing masakari OpenStack API core controllers. """ def __init__(self, extension, collection, controller): self.extension = extension self.collection = collection self.controller = controller class ResourceExtension(object): """Add top level resources to the OpenStack API in masakari.""" def __init__(self, collection, controller=None, parent=None, collection_actions=None, member_actions=None, custom_routes_fn=None, inherits=None, member_name=None): if not collection_actions: collection_actions = {} if not member_actions: member_actions = {} self.collection = collection self.controller = controller self.parent = parent self.collection_actions = collection_actions self.member_actions = member_actions self.custom_routes_fn = custom_routes_fn self.inherits = inherits self.member_name = member_name def load_standard_extensions(ext_mgr, logger, path, package, ext_list=None): """Registers all standard API extensions.""" # Walk through all the modules in our directory... our_dir = path[0] for dirpath, dirnames, filenames in os.walk(our_dir): # Compute the relative package name from the dirpath relpath = os.path.relpath(dirpath, our_dir) if relpath == '.': relpkg = '' else: relpkg = '.%s' % '.'.join(relpath.split(os.sep)) # Now, consider each file in turn, only considering .py files for fname in filenames: root, ext = os.path.splitext(fname) # Skip __init__ and anything that's not .py if ext != '.py' or root == '__init__': continue # Try loading it classname = "%s%s" % (root[0].upper(), root[1:]) classpath = ("%s%s.%s.%s" % (package, relpkg, root, classname)) if ext_list is not None and classname not in ext_list: logger.debug("Skipping extension: %s" % classpath) continue try: ext_mgr.load_extension(classpath) except Exception as exc: logger.warn('Failed to load extension %(classpath)s: %(exc)s', {'classpath': classpath, 'exc': exc}) # Now, let's consider any subdirectories we may have... subdirs = [] for dname in dirnames: # Skip it if it does not have __init__.py if not os.path.exists(os.path.join(dirpath, dname, '__init__.py')): continue # If it has extension(), delegate... ext_name = "%s%s.%s.extension" % (package, relpkg, dname) try: ext = importutils.import_class(ext_name) except ImportError: # extension() doesn't exist on it, so we'll explore # the directory for ourselves subdirs.append(dname) else: try: ext(ext_mgr) except Exception as exc: logger.warn('Failed to load extension %(ext_name)s:' '%(exc)s', {'ext_name': ext_name, 'exc': exc}) # Update the list of directories we'll explore... # using os.walk 'the caller can modify the dirnames list in-place, # and walk() will only recurse into the subdirectories whose names # remain in dirnames' # https://docs.python.org/2/library/os.html#os.walk dirnames[:] = subdirs @six.add_metaclass(abc.ABCMeta) class V1APIExtensionBase(object): """Abstract base class for all v1 API extensions. All v1 API extensions must derive from this class and implement the abstract methods get_resources and get_controller_extensions even if they just return an empty list. The extensions must also define the abstract properties. """ def __init__(self, extension_info): self.extension_info = extension_info @abc.abstractmethod def get_resources(self): """Return a list of resources extensions. The extensions should return a list of ResourceExtension objects. This list may be empty. """ pass @abc.abstractmethod def get_controller_extensions(self): """Return a list of controller extensions. The extensions should return a list of ControllerExtension objects. This list may be empty. """ pass @abc.abstractproperty def name(self): """Name of the extension.""" pass @abc.abstractproperty def alias(self): """Alias for the extension.""" pass @abc.abstractproperty def version(self): """Version of the extension.""" pass def __repr__(self): return "" % ( self.name, self.alias, self.version) def is_valid(self): """Validate required fields for extensions. Raises an attribute error if the attr is not defined """ for attr in ('name', 'alias', 'version'): if getattr(self, attr) is None: raise AttributeError("%s is None, needs to be defined" % attr) return True def expected_errors(errors): """Decorator for v1 API methods which specifies expected exceptions. Specify which exceptions may occur when an API method is called. If an unexpected exception occurs then return a 500 instead and ask the user of the API to file a bug report. """ def decorator(f): @functools.wraps(f) def wrapped(*args, **kwargs): try: return f(*args, **kwargs) except Exception as exc: if isinstance(exc, webob.exc.WSGIHTTPException): if isinstance(errors, int): t_errors = (errors,) else: t_errors = errors if exc.code in t_errors: raise elif isinstance(exc, exception.Forbidden): raise elif isinstance(exc, exception.ValidationError): raise LOG.exception("Unexpected exception in API method") msg = _('Unexpected API Error. Please report this at ' 'https://bugs.launchpad.net/masakari/ and attach the ' 'Masakari API log if possible.\n%s') % type(exc) raise webob.exc.HTTPInternalServerError(explanation=msg) return wrapped return decorator masakari-9.0.0/masakari/compute/0000775000175000017500000000000013656750011016602 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/compute/__init__.py0000664000175000017500000000136113656747723020734 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_utils import importutils def API(): cls = importutils.import_class("masakari.compute.nova.API") return cls() masakari-9.0.0/masakari/compute/nova.py0000664000175000017500000002336513656747723020150 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Handles all requests to Nova. """ import functools import sys from keystoneauth1 import exceptions as keystone_exception import keystoneauth1.loading import keystoneauth1.session from novaclient import api_versions from novaclient import client as nova_client from novaclient import exceptions as nova_exception from oslo_log import log as logging from oslo_utils import encodeutils from requests import exceptions as request_exceptions import six from masakari import conf from masakari import context as ctx from masakari import exception CONF = conf.CONF CONF.import_group('keystone_authtoken', 'keystonemiddleware.auth_token') LOG = logging.getLogger(__name__) NOVA_API_VERSION = "2.53" nova_extensions = [ext for ext in nova_client.discover_extensions(NOVA_API_VERSION) if ext.name in ("list_extensions",)] def _reraise(desired_exc): six.reraise(type(desired_exc), desired_exc, sys.exc_info()[2]) def translate_nova_exception(method): """Transforms a cinder exception but keeps its traceback intact.""" @functools.wraps(method) def wrapper(self, ctx, *args, **kwargs): try: res = method(self, ctx, *args, **kwargs) except (request_exceptions.Timeout, nova_exception.CommandError, keystone_exception.ConnectionError) as exc: err_msg = encodeutils.exception_to_unicode(exc) _reraise(exception.MasakariException(reason=err_msg)) except (keystone_exception.BadRequest, nova_exception.BadRequest) as exc: err_msg = encodeutils.exception_to_unicode(exc) _reraise(exception.InvalidInput(reason=err_msg)) except (keystone_exception.Forbidden, nova_exception.Forbidden) as exc: err_msg = encodeutils.exception_to_unicode(exc) _reraise(exception.Forbidden(err_msg)) except (nova_exception.NotFound) as exc: err_msg = encodeutils.exception_to_unicode(exc) _reraise(exception.NotFound(reason=err_msg)) except nova_exception.Conflict as exc: err_msg = encodeutils.exception_to_unicode(exc) _reraise(exception.Conflict(reason=err_msg)) return res return wrapper def novaclient(context, timeout=None): """Returns a Nova client @param timeout: Number of seconds to wait for an answer before raising a Timeout exception (None to disable) """ nova_catalog_info = CONF.nova_catalog_admin_info service_type, service_name, endpoint_type = nova_catalog_info.split(':') context = ctx.RequestContext( CONF.os_privileged_user_name, None, auth_token=CONF.os_privileged_user_password, project_name=CONF.os_privileged_user_tenant, service_catalog=context.service_catalog, global_request_id=context.global_id) # User needs to authenticate to Keystone before querying Nova, so we set # auth_url to the identity service endpoint url = CONF.os_privileged_user_auth_url LOG.debug('Creating a Nova client using "%s" user', CONF.os_privileged_user_name) # Now that we have the correct auth_url, username, password and # project_name, let's build a Keystone session. loader = keystoneauth1.loading.get_plugin_loader( CONF.keystone_authtoken.auth_type) auth = loader.load_from_options( auth_url=url, username=context.user_id, password=context.auth_token, project_name=context.project_name, user_domain_name=CONF.os_user_domain_name, project_domain_name=CONF.os_project_domain_name) keystone_session = keystoneauth1.session.Session(auth=auth) client_obj = nova_client.Client( api_versions.APIVersion(NOVA_API_VERSION), session=keystone_session, insecure=CONF.nova_api_insecure, timeout=timeout, global_request_id=context.global_id, region_name=CONF.os_region_name, endpoint_type=endpoint_type, service_type=service_type, service_name=service_name, cacert=CONF.nova_ca_certificates_file, extensions=nova_extensions) return client_obj class API(object): """API for interacting with novaclient.""" @translate_nova_exception def get_servers(self, context, host): """Get a list of servers running on a specified host.""" opts = { 'host': host, 'all_tenants': True } nova = novaclient(context) LOG.info('Fetch Server list on %s', host) return nova.servers.list(detailed=True, search_opts=opts) @translate_nova_exception def enable_disable_service(self, context, host_name, enable=False, reason=None): """Enable or disable the service specified by nova service id.""" nova = novaclient(context) service = nova.services.list(host=host_name, binary='nova-compute')[0] if not enable: LOG.info('Disable nova-compute on %s', host_name) if reason: nova.services.disable_log_reason(service.id, reason) else: nova.services.disable(service.id) else: LOG.info('Enable nova-compute on %s', host_name) nova.services.enable(service.id) @translate_nova_exception def is_service_down(self, context, host_name, binary): """Check whether service is up or down on given host.""" nova = novaclient(context) service = nova.services.list(host=host_name, binary=binary)[0] return service.status == 'disabled' @translate_nova_exception def evacuate_instance(self, context, uuid, target=None): """Evacuate an instance from failed host to specified host.""" msg = ('Call evacuate command for instance %(uuid)s on host ' '%(target)s') LOG.info(msg, {'uuid': uuid, 'target': target}) nova = novaclient(context) nova.servers.evacuate(uuid, host=target) @translate_nova_exception def reset_instance_state(self, context, uuid, status='error'): """Reset the state of an instance to active or error.""" msg = ('Call reset state command on instance %(uuid)s to ' 'status: %(status)s.') LOG.info(msg, {'uuid': uuid, 'status': status}) nova = novaclient(context) nova.servers.reset_state(uuid, status) @translate_nova_exception def get_server(self, context, uuid): """Get a server.""" nova = novaclient(context) msg = ('Call get server command for instance %(uuid)s') LOG.info(msg, {'uuid': uuid}) return nova.servers.get(uuid) @translate_nova_exception def stop_server(self, context, uuid): """Stop a server.""" nova = novaclient(context) msg = ('Call stop server command for instance %(uuid)s') LOG.info(msg, {'uuid': uuid}) return nova.servers.stop(uuid) @translate_nova_exception def start_server(self, context, uuid): """Start a server.""" nova = novaclient(context) msg = ('Call start server command for instance %(uuid)s') LOG.info(msg, {'uuid': uuid}) return nova.servers.start(uuid) @translate_nova_exception def get_aggregate_list(self, context): """Get all aggregate list.""" nova = novaclient(context) LOG.info('Call aggregate-list command to get list of all aggregates.') return nova.aggregates.list() @translate_nova_exception def add_host_to_aggregate(self, context, host, aggregate): """Add host to given aggregate.""" nova = novaclient(context) msg = ("Call add_host command to add host '%(host_name)s' to " "aggregate '%(aggregate_name)s'.") LOG.info(msg, {'host_name': host, 'aggregate_name': aggregate.name}) return nova.aggregates.add_host(aggregate.id, host) @translate_nova_exception def lock_server(self, context, uuid): """Lock a server.""" nova = novaclient(context) msg = ('Call lock server command for instance %(uuid)s') LOG.info(msg, {'uuid': uuid}) return nova.servers.lock(uuid) @translate_nova_exception def unlock_server(self, context, uuid): """Unlock a server.""" nova = novaclient(context) msg = ('Call unlock server command for instance %(uuid)s') LOG.info(msg, {'uuid': uuid}) return nova.servers.unlock(uuid) @translate_nova_exception def hypervisor_search(self, context, hypervisor_name): """Search hypervisor with case sensitive hostname.""" nova = novaclient(context) msg = ("Call hypervisor search command to get list of matching " "hypervisor name '%(hypervisor_name)s'") LOG.info(msg, {'hypervisor_name': hypervisor_name}) try: hypervisors_list = nova.hypervisors.search(hypervisor_name) if hypervisor_name not in [host.hypervisor_hostname for host in hypervisors_list]: raise exception.HypervisorNotFoundByName( hypervisor_name=hypervisor_name) except nova_exception.NotFound: raise exception.HypervisorNotFoundByName( hypervisor_name=hypervisor_name) masakari-9.0.0/masakari/ha/0000775000175000017500000000000013656750011015516 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/ha/__init__.py0000664000175000017500000000000013656747723017635 0ustar zuulzuul00000000000000masakari-9.0.0/masakari/ha/api.py0000664000175000017500000003675413656747723016700 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import datetime import traceback from oslo_log import log as logging from oslo_utils import excutils from oslo_utils import strutils from oslo_utils import uuidutils from masakari.api import utils as api_utils from masakari.compute import nova import masakari.conf from masakari.engine import rpcapi as engine_rpcapi from masakari import exception from masakari.i18n import _ from masakari import objects from masakari.objects import fields CONF = masakari.conf.CONF LOG = logging.getLogger(__name__) def is_failover_segment_under_recovery(segment): filters = { 'status': [fields.NotificationStatus.NEW, fields.NotificationStatus.RUNNING, fields.NotificationStatus.ERROR] } return segment.is_under_recovery(filters=filters) class FailoverSegmentAPI(object): def get_segment(self, context, segment_uuid): """Get a single failover segment with the given segment_uuid.""" if uuidutils.is_uuid_like(segment_uuid): LOG.debug("Fetching failover segment by " "UUID", segment_uuid=segment_uuid) segment = objects.FailoverSegment.get_by_uuid(context, segment_uuid ) else: LOG.debug("Failed to fetch failover " "segment by uuid %s", segment_uuid) raise exception.FailoverSegmentNotFound(id=segment_uuid) return segment def get_all(self, context, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): """Get all failover segments filtered by one of the given parameters. If there is no filter it will retrieve all segments in the system. The results will be sorted based on the list of sort keys in the 'sort_keys' parameter (first value is primary sort key, second value is secondary sort ket, etc.). For each sort key, the associated sort direction is based on the list of sort directions in the 'sort_dirs' parameter. """ LOG.debug("Searching by: %s", str(filters)) limited_segments = (objects.FailoverSegmentList. get_all(context, filters=filters, sort_keys=sort_keys, sort_dirs=sort_dirs, limit=limit, marker=marker)) return limited_segments def create_segment(self, context, segment_data): """Create segment""" segment = objects.FailoverSegment(context=context) # Populate segment object for create segment.name = segment_data.get('name') segment.description = segment_data.get('description') segment.recovery_method = segment_data.get('recovery_method') segment.service_type = segment_data.get('service_type') try: segment.create() except Exception as e: with excutils.save_and_reraise_exception(): tb = traceback.format_exc() api_utils.notify_about_segment_api(context, segment, action=fields.EventNotificationAction.SEGMENT_CREATE, phase=fields.EventNotificationPhase.ERROR, exception=e, tb=tb) return segment def update_segment(self, context, uuid, segment_data): """Update the properties of a failover segment.""" segment = objects.FailoverSegment.get_by_uuid(context, uuid) if is_failover_segment_under_recovery(segment): msg = _("Failover segment %s can't be updated as " "it is in-use to process notifications.") % uuid LOG.error(msg) raise exception.FailoverSegmentInUse(msg) try: segment.update(segment_data) segment.save() except Exception as e: with excutils.save_and_reraise_exception(): tb = traceback.format_exc() api_utils.notify_about_segment_api(context, segment, action=fields.EventNotificationAction.SEGMENT_UPDATE, phase=fields.EventNotificationPhase.ERROR, exception=e, tb=tb) return segment def delete_segment(self, context, uuid): """Deletes the segment.""" segment = objects.FailoverSegment.get_by_uuid(context, uuid) if is_failover_segment_under_recovery(segment): msg = _("Failover segment (%s) can't be deleted as " "it is in-use to process notifications.") % uuid LOG.error(msg) raise exception.FailoverSegmentInUse(msg) try: segment.destroy() except Exception as e: with excutils.save_and_reraise_exception(): tb = traceback.format_exc() api_utils.notify_about_segment_api(context, segment, action=fields.EventNotificationAction.SEGMENT_DELETE, phase=fields.EventNotificationPhase.ERROR, exception=e, tb=tb) class HostAPI(object): """The Host API to manage hosts""" def _is_valid_host_name(self, context, name): novaclient = nova.API() novaclient.hypervisor_search(context, name) def get_host(self, context, segment_uuid, host_uuid): """Get a host by id""" objects.FailoverSegment.get_by_uuid(context, segment_uuid) if uuidutils.is_uuid_like(host_uuid): LOG.debug("Fetching host by " "UUID", host_uuid=host_uuid) host = objects.Host.get_by_uuid(context, host_uuid) else: LOG.debug("Failed to fetch host by uuid %s", host_uuid) raise exception.HostNotFound(id=host_uuid) return host def get_all(self, context, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): """Get all hosts by filter""" LOG.debug("Searching by: %s", str(filters)) limited_hosts = objects.HostList.get_all(context, filters=filters, sort_keys=sort_keys, sort_dirs=sort_dirs, limit=limit, marker=marker) return limited_hosts def create_host(self, context, segment_uuid, host_data): """Create host""" segment = objects.FailoverSegment.get_by_uuid(context, segment_uuid) host = objects.Host(context=context) # Populate host object for create host.name = host_data.get('name') host.failover_segment_id = segment.uuid host.type = host_data.get('type') host.control_attributes = host_data.get('control_attributes') host.on_maintenance = strutils.bool_from_string( host_data.get('on_maintenance', False), strict=True) host.reserved = strutils.bool_from_string( host_data.get('reserved', False), strict=True) self._is_valid_host_name(context, host.name) try: host.create() except Exception as e: with excutils.save_and_reraise_exception(): tb = traceback.format_exc() api_utils.notify_about_host_api(context, host, action=fields.EventNotificationAction.HOST_CREATE, phase=fields.EventNotificationPhase.ERROR, exception=e, tb=tb) return host def update_host(self, context, segment_uuid, id, host_data): """Update the host""" segment = objects.FailoverSegment.get_by_uuid(context, segment_uuid) host = objects.Host.get_by_uuid(context, id) if is_failover_segment_under_recovery(segment): msg = _("Host %s can't be updated as " "it is in-use to process notifications.") % host.uuid LOG.error(msg) raise exception.HostInUse(msg) if 'name' in host_data: self._is_valid_host_name(context, host_data.get('name')) if 'on_maintenance' in host_data: host_data['on_maintenance'] = strutils.bool_from_string( host_data['on_maintenance'], strict=True) if 'reserved' in host_data: host_data['reserved'] = strutils.bool_from_string( host_data['reserved'], strict=True) try: host.update(host_data) host.save() except Exception as e: with excutils.save_and_reraise_exception(): tb = traceback.format_exc() api_utils.notify_about_host_api(context, host, action=fields.EventNotificationAction.HOST_UPDATE, phase=fields.EventNotificationPhase.ERROR, exception=e, tb=tb) return host def delete_host(self, context, segment_uuid, id): """Delete the host""" segment = objects.FailoverSegment.get_by_uuid(context, segment_uuid) host = objects.Host.get_by_uuid(context, id, segment_uuid=segment_uuid) if is_failover_segment_under_recovery(segment): msg = _("Host %s can't be deleted as " "it is in-use to process notifications.") % host.uuid LOG.error(msg) raise exception.HostInUse(msg) try: host.destroy() except Exception as e: with excutils.save_and_reraise_exception(): tb = traceback.format_exc() api_utils.notify_about_host_api(context, host, action=fields.EventNotificationAction.HOST_DELETE, phase=fields.EventNotificationPhase.ERROR, exception=e, tb=tb) class NotificationAPI(object): def __init__(self): self.engine_rpcapi = engine_rpcapi.EngineAPI() @staticmethod def _is_duplicate_notification(context, notification): # Get all the notifications by filters filters = { 'type': notification.type, 'status': [fields.NotificationStatus.NEW, fields.NotificationStatus.RUNNING], 'source_host_uuid': notification.source_host_uuid, 'generated-since': (notification.generated_time - datetime.timedelta( seconds=CONF.duplicate_notification_detection_interval)) } notifications_list = objects.NotificationList.get_all(context, filters=filters) for db_notification in notifications_list: # if payload is same notification should be considered as # duplicate if db_notification.payload == notification.payload: return True return False def create_notification(self, context, notification_data): """Create notification""" # Check whether host from which the notification came is already # present in failover segment or not host_name = notification_data.get('hostname') host_object = objects.Host.get_by_name(context, host_name) host_on_maintenance = host_object.on_maintenance if host_on_maintenance: message = (_("Notification received from host %(host)s of type " "'%(type)s' is ignored as the host is already under " "maintenance.") % { 'host': host_name, 'type': notification_data.get('type') }) raise exception.HostOnMaintenanceError(message=message) notification = objects.Notification(context=context) # Populate notification object for create notification.type = notification_data.get('type') notification.generated_time = notification_data.get('generated_time') notification.source_host_uuid = host_object.uuid notification.payload = notification_data.get('payload') notification.status = fields.NotificationStatus.NEW if self._is_duplicate_notification(context, notification): message = (_("Notification received from host %(host)s of " " type '%(type)s' is duplicate.") % {'host': host_name, 'type': notification.type}) raise exception.DuplicateNotification(message=message) try: notification.create() self.engine_rpcapi.process_notification(context, notification) except Exception as e: with excutils.save_and_reraise_exception(): tb = traceback.format_exc() api_utils.notify_about_notification_api(context, notification, action=fields.EventNotificationAction.NOTIFICATION_CREATE, phase=fields.EventNotificationPhase.ERROR, exception=e, tb=tb) return notification def get_all(self, context, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): """Get all notifications filtered by one of the given parameters. If there is no filter it will retrieve all notifications in the system. The results will be sorted based on the list of sort keys in the 'sort_keys' parameter (first value is primary sort key, second value is secondary sort ket, etc.). For each sort key, the associated sort direction is based on the list of sort directions in the 'sort_dirs' parameter. """ LOG.debug("Searching by: %s", str(filters)) limited_notifications = (objects.NotificationList. get_all(context, filters, sort_keys, sort_dirs, limit, marker)) return limited_notifications def get_notification(self, context, notification_uuid): """Get a single notification with the given notification_uuid.""" if uuidutils.is_uuid_like(notification_uuid): LOG.debug("Fetching notification by " "UUID", notification_uuid=notification_uuid) notification = objects.Notification.get_by_uuid(context, notification_uuid) else: LOG.debug("Failed to fetch notification by " "uuid %s", notification_uuid) raise exception.NotificationNotFound(id=notification_uuid) return notification def get_notification_recovery_workflow_details(self, context, notification_uuid): """Get recovery workflow details details of the notification""" notification = self.get_notification(context, notification_uuid) LOG.debug("Fetching recovery workflow details of a notification %s ", notification_uuid) notification = (self.engine_rpcapi. get_notification_recovery_workflow_details( context, notification)) return notification masakari-9.0.0/masakari/i18n.py0000664000175000017500000000202113656747723016272 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """oslo.i18n integration module. See https://docs.openstack.org/oslo.i18n/latest/user/usage.html. """ import oslo_i18n DOMAIN = 'masakari' _translators = oslo_i18n.TranslatorFactory(domain=DOMAIN) # The primary translation function using the well-known name "_" _ = _translators.primary def translate(value, user_locale): return oslo_i18n.translate(value, user_locale) def get_available_languages(): return oslo_i18n.get_available_languages(DOMAIN) masakari-9.0.0/masakari/service.py0000664000175000017500000002274013656747723017165 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Generic Node base class for all workers that run on hosts.""" import os import random import sys from oslo_concurrency import processutils from oslo_log import log as logging import oslo_messaging as messaging from oslo_service import service from oslo_utils import importutils import masakari.conf from masakari import context from masakari import exception from masakari.i18n import _ from masakari.objects import base as objects_base from masakari import rpc from masakari import utils from masakari import version from masakari import wsgi LOG = logging.getLogger(__name__) CONF = masakari.conf.CONF def assert_eventlet_uses_monotonic_clock(): import eventlet.hubs as hubs import monotonic hub = hubs.get_hub() if hub.clock is not monotonic.monotonic: raise RuntimeError( 'eventlet hub is not using a monotonic clock - ' 'periodic tasks will be affected by drifts of system time.') class Service(service.Service): """Service object for binaries running on hosts. A service takes a manager and enables rpc by listening to queues based on topic. It also periodically runs tasks on the manager. """ def __init__(self, host, binary, topic, manager, periodic_enable=None, periodic_fuzzy_delay=None, periodic_interval_max=None): super(Service, self).__init__() if not rpc.initialized(): rpc.init(CONF) self.host = host self.binary = binary self.topic = topic self.manager_class_name = manager manager_class = importutils.import_class(self.manager_class_name) self.rpcserver = None self.manager = manager_class(host=self.host) self.periodic_enable = periodic_enable self.periodic_fuzzy_delay = periodic_fuzzy_delay self.periodic_interval_max = periodic_interval_max def __repr__(self): return "<%(cls_name)s: host=%(host)s, binary=%(binary)s, " \ "manager_class_name=%(manager)s>" %\ { 'cls_name': self.__class__.__name__, 'host': self.host, 'binary': self.binary, 'manager': self.manager_class_name } def start(self): assert_eventlet_uses_monotonic_clock() verstr = version.version_string_with_package() LOG.info('Starting %(topic)s (version %(version)s)', { 'topic': self.topic, 'version': verstr }) self.basic_config_check() LOG.debug("Creating RPC server for service %s", self.topic) target = messaging.Target(topic=self.topic, server=self.host) endpoints = [self.manager] serializer = objects_base.MasakariObjectSerializer() self.rpcserver = rpc.get_server(target, endpoints, serializer) self.rpcserver.start() if self.periodic_enable: if self.periodic_fuzzy_delay: initial_delay = random.randint(0, self.periodic_fuzzy_delay) else: initial_delay = None self.tg.add_dynamic_timer( self.periodic_tasks, initial_delay=initial_delay, periodic_interval_max=self.periodic_interval_max) def __getattr__(self, key): manager = self.__dict__.get('manager', None) return getattr(manager, key) @classmethod def create(cls, host=None, binary=None, topic=None, manager=None, periodic_enable=None, periodic_fuzzy_delay=None, periodic_interval_max=None): """Instantiates class and passes back application object. :param host: defaults to CONF.host :param binary: defaults to basename of executable :param topic: defaults to bin_name - 'masakari-' part :param manager: defaults to CONF._manager :param periodic_enable: defaults to CONF.periodic_enable :param periodic_fuzzy_delay: defaults to CONF.periodic_fuzzy_delay :param periodic_interval_max: if set, the max time to wait between runs """ if not host: host = CONF.host if not binary: binary = os.path.basename(sys.argv[0]) if not topic: topic = binary.rpartition('masakari-')[2] if not manager: manager_cls = ('%s_manager' % binary.rpartition('masakari-')[2]) manager = CONF.get(manager_cls, None) if periodic_enable is None: periodic_enable = CONF.periodic_enable if periodic_fuzzy_delay is None: periodic_fuzzy_delay = CONF.periodic_fuzzy_delay if periodic_interval_max is None: periodic_interval_max = CONF.periodic_interval_max service_obj = cls(host, binary, topic, manager, periodic_enable=periodic_enable, periodic_fuzzy_delay=periodic_fuzzy_delay, periodic_interval_max=periodic_interval_max) return service_obj def kill(self): """Destroy the service object in the datastore. NOTE: Although this method is not used anywhere else than tests, it is convenient to have it here, so the tests might easily and in clean way stop and remove the service_ref. """ self.stop() def stop(self): # Try to shut the connection down, but if we get any sort of # errors, go ahead and ignore them.. as we're shutting down anyway try: self.rpcserver.stop() except Exception: pass super(Service, self).stop() def periodic_tasks(self, raise_on_error=False): """Tasks to be run at a periodic interval.""" ctxt = context.get_admin_context() return self.manager.periodic_tasks(ctxt, raise_on_error=raise_on_error) def basic_config_check(self): """Perform basic config checks before starting processing.""" # Make sure the tempdir exists and is writable try: with utils.tempdir(): pass except Exception as e: LOG.error('Temporary directory is invalid: %s', e) sys.exit(1) def reset(self): self.manager.reset() class WSGIService(service.Service): """Provides ability to launch API from a 'paste' configuration.""" def __init__(self, name, loader=None, use_ssl=False, max_url_len=None): """Initialize, but do not start the WSGI server. :param name: The name of the WSGI server given to the loader. :param loader: Loads the WSGI application using the given name. :returns: None """ self.name = name self.binary = 'masakari-%s' % name self.topic = None self.loader = loader or wsgi.Loader() self.app = self.loader.load_app(name) self.host = getattr(CONF, '%s_listen' % name, "0.0.0.0") self.port = getattr(CONF, '%s_listen_port' % name, 0) self.workers = (getattr(CONF, '%s_workers' % name, None) or processutils.get_worker_count()) if self.workers and self.workers < 1: worker_name = '%s_workers' % name msg = (_("%(worker_name)s value of %(workers)s is invalid, " "must be greater than 0") % {'worker_name': worker_name, 'workers': str(self.workers)}) raise exception.InvalidInput(msg) self.use_ssl = use_ssl self.server = wsgi.Server(name, self.app, host=self.host, port=self.port, use_ssl=self.use_ssl, max_url_len=max_url_len) def reset(self): """Reset server greenpool size to default. :returns: None """ self.server.reset() def start(self): """Start serving this service using loaded configuration. Also, retrieve updated port number in case '0' was passed in, which indicates a random port should be used. :returns: None """ self.server.start() def stop(self): """Stop serving this API. :returns: None """ self.server.stop() def wait(self): """Wait for the service to stop serving this API. :returns: None """ self.server.wait() def process_launcher(): return service.ProcessLauncher(CONF, restart_method='mutate') # NOTE: the global launcher is to maintain the existing # functionality of calling service.serve + # service.wait _launcher = None def serve(server, workers=None): global _launcher if _launcher: raise RuntimeError(_('serve() can only be called once')) _launcher = service.launch(CONF, server, workers=workers, restart_method='mutate') def wait(): _launcher.wait() masakari-9.0.0/masakari/test.py0000664000175000017500000001623213656747723016503 0ustar zuulzuul00000000000000# Copyright 2016 NTT Data # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Base classes for our unit tests. Allows overriding of flags for use of fakes, and some black magic for inline callbacks. """ import contextlib import datetime import eventlet eventlet.monkey_patch(os=False) # noqa from unittest import mock import fixtures import six import testtools from oslo_config import cfg from oslo_log import log as logging from oslo_serialization import jsonutils from masakari.tests import fixtures as masakari_fixtures from masakari.tests.unit import conf_fixture from masakari.tests.unit import policy_fixture CONF = cfg.CONF logging.register_options(CONF) CONF.set_override('use_stderr', False) logging.setup(CONF, 'masakari') if six.PY2: nested = contextlib.nested else: @contextlib.contextmanager def nested(*contexts): with contextlib.ExitStack() as stack: yield [stack.enter_context(c) for c in contexts] def _patch_mock_to_raise_for_invalid_assert_calls(): def raise_for_invalid_assert_calls(wrapped): def wrapper(_self, name): valid_asserts = [ 'assert_called_with', 'assert_called_once_with', 'assert_has_calls', 'assert_any_calls'] if name.startswith('assert') and name not in valid_asserts: raise AttributeError('%s is not a valid mock assert method' % name) return wrapped(_self, name) return wrapper mock.Mock.__getattr__ = raise_for_invalid_assert_calls( mock.Mock.__getattr__) # NOTE(abhishekk): needs to be called only once at import time # to patch the mock lib _patch_mock_to_raise_for_invalid_assert_calls() class TestCase(testtools.TestCase): """Test case base class for all unit tests. Due to the slowness of DB access, please consider deriving from `NoDBTestCase` first. """ USES_DB = True def setUp(self): """Run before each test method to initialize test environment.""" super(TestCase, self).setUp() self.useFixture(conf_fixture.ConfFixture(CONF)) self.policy = self.useFixture(policy_fixture.PolicyFixture()) if self.USES_DB: self.useFixture(masakari_fixtures.Database()) else: self.useFixture(masakari_fixtures.DatabasePoisonFixture()) def stub_out(self, old, new): """Replace a function for the duration of the test. Use the monkey patch fixture to replace a function for the duration of a test. Useful when you want to provide fake methods instead of mocks during testing. This should be used instead of self.stubs.Set (which is based on mox) going forward. """ self.useFixture(fixtures.MonkeyPatch(old, new)) def override_config(self, name, override, group=None): """Cleanly override CONF variables.""" CONF.set_override(name, override, group) self.addCleanup(CONF.clear_override, name, group) def flags(self, **kw): """Override flag variables for a test.""" group = kw.pop('group', None) for k, v in kw.items(): CONF.set_override(k, v, group) def assertJsonEqual(self, expected, observed): """Asserts that 2 complex data structures are json equivalent. We use data structures which serialize down to json throughout the code, and often times we just need to know that these are json equivalent. This means that list order is not important, and should be sorted. Because this is a recursive set of assertions, when failure happens we want to expose both the local failure and the global view of the 2 data structures being compared. So a MismatchError which includes the inner failure as the mismatch, and the passed in expected / observed as matchee / matcher. """ if isinstance(expected, six.string_types): expected = jsonutils.loads(expected) if isinstance(observed, six.string_types): observed = jsonutils.loads(observed) def sort_key(x): if isinstance(x, (set, list)) or isinstance(x, datetime.datetime): return str(x) if isinstance(x, dict): items = ((sort_key(key), sort_key(value)) for key, value in x.items()) return sorted(items) return x def inner(expected, observed): if isinstance(expected, dict) and isinstance(observed, dict): self.assertEqual(len(expected), len(observed)) expected_keys = sorted(expected) observed_keys = sorted(observed) self.assertEqual(expected_keys, observed_keys) for key in list(six.iterkeys(expected)): inner(expected[key], observed[key]) elif (isinstance(expected, (list, tuple, set)) and isinstance( observed, (list, tuple, set))): self.assertEqual(len(expected), len(observed)) expected_values_iter = iter(sorted(expected, key=sort_key)) observed_values_iter = iter(sorted(observed, key=sort_key)) for i in range(len(expected)): inner(next(expected_values_iter), next(observed_values_iter)) else: self.assertEqual(expected, observed) try: inner(expected, observed) except testtools.matchers.MismatchError as e: inner_mismatch = e.mismatch # inverting the observed / expected because testtools # error messages assume expected is second. Possibly makes # reading the error messages less confusing. raise testtools.matchers.MismatchError(observed, expected, inner_mismatch, verbose=True) def assertObjEqual(self, expect, actual): actual.obj_reset_changes(recursive=True) expect.obj_reset_changes(recursive=True) self.assertEqual(expect.obj_to_primitive(), actual.obj_to_primitive()) def assertObjectList(self, expected, actual): self.assertEqual(len(expected), len(actual)) for d1, d2 in zip(expected, actual): self.assertObjEqual(d1, d2) class NoDBTestCase(TestCase): """`NoDBTestCase` differs from TestCase in that DB access is not supported. This makes tests run significantly faster. If possible, all new tests should derive from this class. """ USES_DB = False masakari-9.0.0/masakari/notifications/0000775000175000017500000000000013656750011017777 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/notifications/__init__.py0000664000175000017500000000000013656747723022116 0ustar zuulzuul00000000000000masakari-9.0.0/masakari/notifications/objects/0000775000175000017500000000000013656750011021430 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/notifications/objects/__init__.py0000664000175000017500000000000013656747723023547 0ustar zuulzuul00000000000000masakari-9.0.0/masakari/notifications/objects/notification.py0000664000175000017500000001553513656747723024521 0ustar zuulzuul00000000000000# Copyright (c) 2018 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from masakari.notifications.objects import base from masakari.objects import base as masakari_base from masakari.objects import fields @masakari_base.MasakariObjectRegistry.register_notification class SegmentApiPayloadBase(base.NotificationPayloadBase): SCHEMA = { 'id': ('segment', 'id'), 'uuid': ('segment', 'uuid'), 'name': ('segment', 'name'), 'service_type': ('segment', 'service_type'), 'description': ('segment', 'description'), 'recovery_method': ('segment', 'recovery_method'), } # Version 1.0: Initial version VERSION = '1.0' fields = { 'id': fields.IntegerField(), 'uuid': fields.UUIDField(), 'name': fields.StringField(), 'service_type': fields.StringField(), 'description': fields.StringField(nullable=True), 'recovery_method': fields.FailoverSegmentRecoveryMethodField(), } def __init__(self, segment, **kwargs): super(SegmentApiPayloadBase, self).__init__(**kwargs) self.populate_schema(segment=segment) @masakari_base.MasakariObjectRegistry.register_notification class SegmentApiPayload(SegmentApiPayloadBase): # No SCHEMA as all the additional fields are calculated VERSION = '1.0' fields = { 'fault': fields.ObjectField('ExceptionPayload', nullable=True), } def __init__(self, segment, fault, **kwargs): super(SegmentApiPayload, self).__init__( segment=segment, fault=fault, **kwargs) @masakari_base.MasakariObjectRegistry.register_notification class HostApiPayloadBase(base.NotificationPayloadBase): SCHEMA = { 'id': ('host', 'id'), 'uuid': ('host', 'uuid'), 'name': ('host', 'name'), 'failover_segment_id': ('host', 'failover_segment_id'), 'failover_segment': ('host', 'failover_segment'), 'type': ('host', 'type'), 'reserved': ('host', 'reserved'), 'control_attributes': ('host', 'control_attributes'), 'on_maintenance': ('host', 'on_maintenance'), } # Version 1.0: Initial version VERSION = '1.0' fields = { 'id': fields.IntegerField(), 'uuid': fields.UUIDField(), 'name': fields.StringField(), 'failover_segment_id': fields.UUIDField(), 'failover_segment': fields.ObjectField('FailoverSegment'), 'type': fields.StringField(), 'reserved': fields.BooleanField(), 'control_attributes': fields.StringField(), 'on_maintenance': fields.BooleanField(), } def __init__(self, host, **kwargs): super(HostApiPayloadBase, self).__init__(**kwargs) self.populate_schema(host=host) @masakari_base.MasakariObjectRegistry.register_notification class HostApiPayload(HostApiPayloadBase): # No SCHEMA as all the additional fields are calculated VERSION = '1.0' fields = { 'fault': fields.ObjectField('ExceptionPayload', nullable=True), } def __init__(self, host, fault, **kwargs): super(HostApiPayload, self).__init__( host=host, fault=fault, **kwargs) @masakari_base.MasakariObjectRegistry.register_notification class NotificationApiPayloadBase(base.NotificationPayloadBase): SCHEMA = { 'id': ('notification', 'id'), 'notification_uuid': ('notification', 'notification_uuid'), 'generated_time': ('notification', 'generated_time'), 'source_host_uuid': ('notification', 'source_host_uuid'), 'type': ('notification', 'type'), 'payload': ('notification', 'payload'), 'status': ('notification', 'status'), } # Version 1.0: Initial version VERSION = '1.0' fields = { 'id': fields.IntegerField(), 'notification_uuid': fields.UUIDField(), 'generated_time': fields.DateTimeField(), 'source_host_uuid': fields.UUIDField(), 'type': fields.NotificationTypeField(), 'payload': fields.DictOfStringsField(), 'status': fields.NotificationStatusField(), } def __init__(self, notification, **kwargs): super(NotificationApiPayloadBase, self).__init__(**kwargs) self.populate_schema(notification=notification) @masakari_base.MasakariObjectRegistry.register_notification class NotificationApiPayload(NotificationApiPayloadBase): # No SCHEMA as all the additional fields are calculated VERSION = '1.0' fields = { 'fault': fields.ObjectField('ExceptionPayload', nullable=True), } def __init__(self, notification, fault, **kwargs): super(NotificationApiPayload, self).__init__( notification=notification, fault=fault, **kwargs) @base.notification_sample('create-segment-start.json') @base.notification_sample('create-segment-end.json') @base.notification_sample('update-segment-start.json') @base.notification_sample('update-segment-end.json') @base.notification_sample('delete-segment-start.json') @base.notification_sample('delete-segment-end.json') @masakari_base.MasakariObjectRegistry.register_notification class SegmentApiNotification(base.NotificationBase): # Version 1.0: Initial version VERSION = '1.0' fields = { 'payload': fields.ObjectField('SegmentApiPayload') } @base.notification_sample('create-host-start.json') @base.notification_sample('create-host-end.json') @base.notification_sample('update-host-start.json') @base.notification_sample('update-host-end.json') @base.notification_sample('delete-host-start.json') @base.notification_sample('delete-host-end.json') @masakari_base.MasakariObjectRegistry.register_notification class HostApiNotification(base.NotificationBase): # Version 1.0: Initial version VERSION = '1.0' fields = { 'payload': fields.ObjectField('HostApiPayload') } @base.notification_sample('create-notification-start.json') @base.notification_sample('create-notification-end.json') @base.notification_sample('process-notification-start.json') @base.notification_sample('process-notification-end.json') @base.notification_sample('process-notification-error.json') @masakari_base.MasakariObjectRegistry.register_notification class NotificationApiNotification(base.NotificationBase): # Version 1.0: Initial version VERSION = '1.0' fields = { 'payload': fields.ObjectField('NotificationApiPayload') } masakari-9.0.0/masakari/notifications/objects/exception.py0000664000175000017500000000407213656747723024023 0ustar zuulzuul00000000000000# Copyright (c) 2018 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import inspect import six from masakari.notifications.objects import base from masakari.objects import base as masakari_base from masakari.objects import fields @masakari_base.MasakariObjectRegistry.register_notification class ExceptionPayload(base.NotificationPayloadBase): # Version 1.0: Initial version VERSION = '1.0' fields = { 'module_name': fields.StringField(), 'function_name': fields.StringField(), 'exception': fields.StringField(), 'exception_message': fields.StringField(), 'traceback': fields.StringField() } @classmethod def from_exc_and_traceback(cls, fault, traceback): trace = inspect.trace()[-1] # TODO(gibi): apply strutils.mask_password on exception_message and # consider emitting the exception_message only if the safe flag is # true in the exception like in the REST API module = inspect.getmodule(trace[0]) module_name = module.__name__ if module else 'unknown' return cls( function_name=trace[3], module_name=module_name, exception=fault.__class__.__name__, exception_message=six.text_type(fault), traceback=traceback) @base.notification_sample('error-exception.json') @masakari_base.MasakariObjectRegistry.register_notification class ExceptionNotification(base.NotificationBase): # Version 1.0: Initial version VERSION = '1.0' fields = { 'payload': fields.ObjectField('ExceptionPayload') } masakari-9.0.0/masakari/notifications/objects/base.py0000664000175000017500000001435213656747723022741 0ustar zuulzuul00000000000000# Copyright (c) 2018 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from masakari.objects import base from masakari.objects import fields from masakari import rpc @base.MasakariObjectRegistry.register_if(False) class NotificationObject(base.MasakariObject): """Base class for every notification related versioned object.""" # Version 1.0: Initial version VERSION = '1.0' def __init__(self, **kwargs): super(NotificationObject, self).__init__(**kwargs) # The notification objects are created on the fly when masakari emits # the notification. This causes that every object shows every field as # changed. We don't want to send this meaningless information so we # reset the object after creation. self.obj_reset_changes(recursive=False) @base.MasakariObjectRegistry.register_notification class EventType(NotificationObject): # Version 1.0: Initial version VERSION = '1.0' fields = { 'action': fields.EventNotificationActionField(nullable=False), 'phase': fields.EventNotificationPhaseField(nullable=True), } def to_notification_event_type_field(self): """Serialize the object to the wire format.""" s = '%s' % (self.action) if self.obj_attr_is_set('phase'): s += '.%s' % self.phase return s @base.MasakariObjectRegistry.register_if(False) class NotificationPayloadBase(NotificationObject): """Base class for the payload of versioned notifications.""" # SCHEMA defines how to populate the payload fields. It is a dictionary # where every key value pair has the following format: # : (, # ) # The is the name where the data will be stored in the # payload object, this field has to be defined as a field of the payload. # The shall refer to name of the parameter passed as # kwarg to the payload's populate_schema() call and this object will be # used as the source of the data. The shall be # a valid field of the passed argument. # The SCHEMA needs to be applied with the populate_schema() call before the # notification can be emitted. # The value of the payload. field will be set by the # . field. The # will not be part of the payload object internal or # external representation. # Payload fields that are not set by the SCHEMA can be filled in the same # way as in any versioned object. SCHEMA = {} # Version 1.0: Initial version VERSION = '1.0' def __init__(self, **kwargs): super(NotificationPayloadBase, self).__init__(**kwargs) self.populated = not self.SCHEMA def populate_schema(self, **kwargs): """Populate the object based on the SCHEMA and the source objects :param kwargs: A dict contains the source object at the key defined in the SCHEMA """ for key, (obj, field) in self.SCHEMA.items(): source = kwargs[obj] if source.obj_attr_is_set(field): setattr(self, key, getattr(source, field)) self.populated = True # the schema population will create changed fields but we don't need # this information in the notification self.obj_reset_changes(recursive=False) @base.MasakariObjectRegistry.register_notification class NotificationPublisher(NotificationObject): # Version 1.0: Initial version VERSION = '1.0' fields = { 'host': fields.StringField(nullable=False), 'binary': fields.StringField(nullable=False), } @classmethod def from_service_obj(cls, service): return cls(host=service.host, binary=service.binary) @base.MasakariObjectRegistry.register_if(False) class NotificationBase(NotificationObject): """Base class for versioned notifications. Every subclass shall define a 'payload' field. """ # Version 1.0: Initial version VERSION = '1.0' fields = { 'priority': fields.EventNotificationPriorityField(), 'event_type': fields.ObjectField('EventType'), 'publisher': fields.ObjectField('NotificationPublisher'), } def _emit(self, context, event_type, publisher_id, payload): notifier = rpc.get_versioned_notifier(publisher_id) notify = getattr(notifier, self.priority) notify(context, event_type=event_type, payload=payload) def emit(self, context): """Send the notification.""" assert self.payload.populated # notification payload will be a newly populated object # therefore every field of it will look changed so this does not carry # any extra information so we drop this from the payload. self.payload.obj_reset_changes(recursive=False) self._emit(context, event_type=self.event_type.to_notification_event_type_field(), publisher_id='%s:%s' % (self.publisher.binary, self.publisher.host), payload=self.payload.obj_to_primitive()) def notification_sample(sample): """Class decorator for documentation generation purposes. This is to attach the notification sample information to the notification object for documentation generation purposes. :param sample: the path of the sample json file relative to the doc/notification_samples/ directory in the masakari repository root. """ def wrap(cls): if not getattr(cls, 'samples', None): cls.samples = [sample] else: cls.samples.append(sample) return cls return wrap masakari-9.0.0/masakari/db/0000775000175000017500000000000013656750011015513 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/db/__init__.py0000664000175000017500000000127313656747723017647 0ustar zuulzuul00000000000000# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ DB abstraction for Masakari """ from masakari.db.api import * # noqa masakari-9.0.0/masakari/db/api.py0000664000175000017500000003146513656747723016667 0ustar zuulzuul00000000000000# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Defines interface for DB access. Functions in this module are imported into the masakari.db namespace. Call these functions from masakari.db namespace, not the masakari.db.api namespace. """ from oslo_db import concurrency import masakari.conf CONF = masakari.conf.CONF _BACKEND_MAPPING = {'sqlalchemy': 'masakari.db.sqlalchemy.api'} IMPL = concurrency.TpoolDbapiWrapper(CONF, backend_mapping=_BACKEND_MAPPING) # The maximum value a signed INT type may have MAX_INT = 0x7FFFFFFF def get_engine(): """Returns database engine""" return IMPL.get_engine() def failover_segment_get_all_by_filters( context, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): """Get all failover segments that match all filters. :param context: context to query under :param filters: filters for the query in the form of key/value :param sort_keys: list of attributes by which results should be sorted, paired with corresponding item in sort_dirs :param sort_dirs: list of directions in which results should be sorted, paired with corresponding item in sort_keys :param limit: maximum number of items to return :param marker: the last item of the previous page, used to determine the next page of results to return :returns: list of dictionary-like objects containing all failover segments """ return IMPL.failover_segment_get_all_by_filters(context, filters=filters, sort_keys=sort_keys, sort_dirs=sort_dirs, limit=limit, marker=marker) def failover_segment_get_by_id(context, segment_id): """Get failover segment by id. :param context: context to query under :param segment_id: id of failover segment :returns: dictionary-like object containing failover segment :raises exception.FailoverSegmentNotFound if failover segment with given ID doesn't exist. """ return IMPL.failover_segment_get_by_id(context, segment_id) def failover_segment_get_by_uuid(context, segment_uuid): """Get failover segment by uuid. :param context: context to query under :param segment_uuid: uuid of failover segment :returns: dictionary-like object containing failover segment :raises exception.FailoverSegmentNotFound if failover segment with given 'segment_uuid' doesn't exist. """ return IMPL.failover_segment_get_by_uuid(context, segment_uuid) def failover_segment_get_by_name(context, name): """Get failover segment by name :param context: context: context to query under :param name: name of failover segment :returns: dictionary-like object containing failover segment :raises exception.FailoverSegmentNotFoundByName if failover segment with given 'name' doesn't exist. """ return IMPL.failover_segment_get_by_name(context, name) def failover_segment_create(context, values): """Insert failover segment to database. :param context: context to query under :param values: dictionary of failover segment attributes to create :returns: dictionary-like object containing created failover segment :raises exception.FailoverSegmentExists if failover segment with given name already exist. """ return IMPL.failover_segment_create(context, values) def failover_segment_update(context, segment_uuid, values): """Update failover segment by uuid. :param context: context to query under :param segment_uuid: uuid of segment to be updated :param values: dictionary of values to be updated :returns: dictionary-like object containing updated failover segment :raises exception.FailoverSegmentNotFound if failover segment with given 'segment_uuid' doesn't exist. exception.FailoverSegmentExists if failover segment with given name already exist. """ return IMPL.failover_segment_update(context, segment_uuid, values) def failover_segment_delete(context, segment_uuid): """Delete the failover segment. :param context: context to query under :param segment_uuid: uuid of segment to be deleted :raises exception.FailoverSegmentNotFound if failover segment with 'segment_uuid' doesn't exist. """ return IMPL.failover_segment_delete(context, segment_uuid) def is_failover_segment_under_recovery(context, failover_segment_id, filters=None): """Checks whether failover segment is used for processing any notification :param context: context to query under :param failover_segment_id: uuid of segment :param filters: dictionary of filters; values that are lists, tuples, sets, or frozensets cause an 'IN' test to be performed, while exact matching ('==' operator) is used for other values. :returns: Returns True if any of the host belonging to a failover segment is being used for processing any notifications which are in new, error or running status otherwise it will return False. """ return IMPL.is_failover_segment_under_recovery( context, failover_segment_id, filters=filters) # db apis for host def host_get_all_by_filters( context, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): """Get all hosts that match all filters. :param context: context to query under :param filters: filters for the query in the form of key/value :param sort_keys: list of attributes by which results should be sorted, paired with corresponding item in sort_dirs :param sort_dirs: list of directions in which results should be sorted, paired with corresponding item in sort_keys :param limit: maximum number of items to return :param marker: the last item of the previous page, used to determine the next page of results to return :returns: list of dictionary-like objects containing all hosts """ return IMPL.host_get_all_by_filters(context, filters=filters, sort_keys=sort_keys, sort_dirs=sort_dirs, limit=limit, marker=marker) def host_get_by_uuid(context, host_uuid, segment_uuid=None): """Get host information by uuid. :param context: context to query under :param host_uuid: uuid of host :param segment_uuid: uuid of failover_segment :returns: dictionary-like object containing host :raises: exception.HostNotFound if host with 'host_uuid' doesn't exist """ return IMPL.host_get_by_uuid(context, host_uuid, segment_uuid=segment_uuid) def host_get_by_id(context, host_id): """Get host information by id. :param context: context to query under :param host_id: id of host :returns: dictionary-like object containing host :raises: exception.HostNotFound if host with given ID doesn't exist """ return IMPL.host_get_by_id(context, host_id) def host_get_by_name(context, name): """Get host information by name. :param context: context to query under :param name: name of host :returns: dictionary-like object containing host :raises: exception.HostNotFoundByName if host with given 'name' doesn't exist """ return IMPL.host_get_by_name(context, name) def host_create(context, values): """Create a host. :param context: context to query under :param values: dictionary of host attributes to create :returns: dictionary-like object containing created host """ return IMPL.host_create(context, values) def host_update(context, host_uuid, values): """Update host information in the database. :param context: context to query under :param host_uuid: uuid of host to be updated :param values: dictionary of host attributes to be updated :returns: dictionary-like object containing updated host :raises: exception.HostNotFound if host with 'host_uuid' doesn't exist exception.HostExists if host with given 'name' already exist """ return IMPL.host_update(context, host_uuid, values) def host_delete(context, host_uuid): """Delete the host. :param context: context to query under :param host_uuid: uuid of host to be deleted :raises: exception.HostNotFound if host with 'host_uuid' doesn't exist """ return IMPL.host_delete(context, host_uuid) # notification related db apis def notifications_get_all_by_filters( context, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): """Get all notifications that match all filters. :param context: context to query under :param filters: filters for the query in the form of key/value :param sort_keys: list of attributes by which results should be sorted, paired with corresponding item in sort_dirs :param sort_dirs: list of directions in which results should be sorted, paired with corresponding item in sort_keys :param limit: maximum number of items to return :param marker: the last item of the previous page, used to determine the next page of results to return :returns: list of dictionary-like objects containing all notifications """ return IMPL.notifications_get_all_by_filters(context, filters=filters, sort_keys=sort_keys, sort_dirs=sort_dirs, limit=limit, marker=marker) def notification_get_by_uuid(context, notification_uuid): """Get notification information by uuid. :param context: context to query under :param notification_uuid: uuid of notification :returns: dictionary-like object containing notification :raises: exception.NotificationNotFound if notification with given 'notification_uuid' doesn't exist """ return IMPL.notification_get_by_uuid(context, notification_uuid) def notification_get_by_id(context, notification_id): """Get notification information by id. :param context: context to query under :param notification_id: id of notification :returns: dictionary-like object containing notification :raises: exception.NotificationNotFound if notification with given ID doesn't exist """ return IMPL.notification_get_by_id(context, notification_id) def notification_create(context, values): """Create a notification. :param context: context to query under :param values: dictionary of notification attributes to create :returns: dictionary-like object containing created notification """ return IMPL.notification_create(context, values) def notification_update(context, notification_uuid, values): """Update notification information in the database. :param context: context to query under :param notification_uuid: uuid of notification to be updated :param values: dictionary of notification attributes to be updated :returns: dictionary-like object containing updated notification :raises: exception.NotificationNotFound if notification with given 'notification_uuid' doesn't exist """ return IMPL.notification_update(context, notification_uuid, values) def notification_delete(context, notification_uuid): """Delete the notification. :param context: context to query under :param notification_uuid: uuid of notification to be deleted :raises: exception.NotificationNotFound if notification with given 'notification_uuid' doesn't exist """ return IMPL.notification_delete(context, notification_uuid) def purge_deleted_rows(context, age_in_days, max_rows): """Purge the soft deleted rows. :param context: context to query under :param age_in_days: Purge deleted rows older than age in days :param max_rows: Limit number of records to delete """ return IMPL.purge_deleted_rows(context, age_in_days, max_rows) masakari-9.0.0/masakari/db/migration.py0000664000175000017500000000155413656747723020103 0ustar zuulzuul00000000000000# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Database setup and migration commands.""" from masakari.db.sqlalchemy import migration IMPL = migration def db_sync(version=None): """Migrate the database to `version` or the most recent version.""" return IMPL.db_sync(version=version) masakari-9.0.0/masakari/db/sqlalchemy/0000775000175000017500000000000013656750011017655 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/db/sqlalchemy/__init__.py0000664000175000017500000000000013656747723021774 0ustar zuulzuul00000000000000masakari-9.0.0/masakari/db/sqlalchemy/api.py0000664000175000017500000005667313656747723021041 0ustar zuulzuul00000000000000# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Implementation of SQLAlchemy backend.""" import datetime import sys from oslo_db import api as oslo_db_api from oslo_db import exception as db_exc from oslo_db.sqlalchemy import enginefacade from oslo_db.sqlalchemy import utils as sqlalchemyutils from oslo_log import log as logging from oslo_utils import timeutils from sqlalchemy import or_, and_ from sqlalchemy.ext.compiler import compiles from sqlalchemy import MetaData from sqlalchemy.orm import joinedload from sqlalchemy import sql import sqlalchemy.sql as sa_sql from sqlalchemy.sql import func import masakari.conf from masakari.db.sqlalchemy import models from masakari import exception from masakari.i18n import _ LOG = logging.getLogger(__name__) CONF = masakari.conf.CONF main_context_manager = enginefacade.transaction_context() def _get_db_conf(conf_group, connection=None): kw = dict(conf_group.items()) if connection is not None: kw['connection'] = connection return kw def _context_manager_from_context(context): if context: try: return context.db_connection except AttributeError: pass def get_backend(): """The backend is this module itself.""" return sys.modules[__name__] def configure(conf): main_context_manager.configure(**_get_db_conf(conf.database)) def get_engine(use_slave=False, context=None): """Get a database engine object. :param use_slave: Whether to use the slave connection :param context: The request context that can contain a context manager """ ctxt_mgr = _context_manager_from_context(context) or main_context_manager return ctxt_mgr.get_legacy_facade().get_engine(use_slave=use_slave) def create_context_manager(connection=None): """Create a database context manager object. : param connection: The database connection string """ ctxt_mgr = enginefacade.transaction_context() ctxt_mgr.configure(**_get_db_conf(CONF.database, connection=connection)) return ctxt_mgr def model_query(context, model, args=None, read_deleted=None): """Query helper that accounts for context's `read_deleted` field. :param context: MasakariContext of the query. :param model: Model to query. Must be a subclass of ModelBase. :param args: Arguments to query. If None - model is used. :param read_deleted: If not None, overrides context's read_deleted field. Permitted values are 'no', which does not return deleted values; 'only', which only returns deleted values; and 'yes', which does not filter deleted values. """ if read_deleted is None: read_deleted = context.read_deleted query_kwargs = {} if 'no' == read_deleted: query_kwargs['deleted'] = False elif 'only' == read_deleted: query_kwargs['deleted'] = True elif 'yes' == read_deleted: pass else: raise ValueError(_("Unrecognized read_deleted value '%s'") % read_deleted) query = sqlalchemyutils.model_query( model, context.session, args, **query_kwargs) return query def _process_sort_params(sort_keys, sort_dirs, default_keys=['created_at', 'id'], default_dir='desc'): """Process the sort parameters to include default keys. Creates a list of sort keys and a list of sort directions. Adds the default keys to the end of the list if they are not already included. When adding the default keys to the sort keys list, the associated direction is: 1) The first element in the 'sort_dirs' list (if specified), else 2) 'default_dir' value (Note that 'asc' is the default value since this is the default in sqlalchemy.utils.paginate_query) :param sort_keys: List of sort keys to include in the processed list :param sort_dirs: List of sort directions to include in the processed list :param default_keys: List of sort keys that need to be included in the processed list, they are added at the end of the list if not already specified. :param default_dir: Sort direction associated with each of the default keys that are not supplied, used when they are added to the processed list :returns: list of sort keys, list of sort directions :raise exception.InvalidInput: If more sort directions than sort keys are specified or if an invalid sort direction is specified """ # Determine direction to use for when adding default keys default_dir_value = default_dir if sort_dirs and len(sort_dirs) != 0: default_dir_value = sort_dirs[0] # Create list of keys (do not modify the input list) result_keys = [] if sort_keys: result_keys = list(sort_keys) # If a list of directions is not provided, use the default sort direction # for all provided keys if sort_dirs: result_dirs = [] # Verify sort direction for sort_dir in sort_dirs: if sort_dir not in ('asc', 'desc'): msg = _("Unknown sort direction, must be 'asc' or 'desc'") raise exception.InvalidInput(reason=msg) result_dirs.append(sort_dir) else: result_dirs = [default_dir_value for _sort_key in result_keys] # Ensure that the key and direction length match while len(result_dirs) < len(result_keys): result_dirs.append(default_dir_value) # Unless more direction are specified, which is an error if len(result_dirs) > len(result_keys): msg = _("Sort direction size exceeds sort key size") raise exception.InvalidInput(reason=msg) # Ensure defaults are included for key in default_keys: if key not in result_keys: result_keys.append(key) result_dirs.append(default_dir_value) return result_keys, result_dirs @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def failover_segment_get_all_by_filters( context, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): # NOTE(Dinesh_Bhor): If the limit is 0 there is no point in even going # to the database since nothing is going to be returned anyway. if limit == 0: return [] sort_keys, sort_dirs = _process_sort_params(sort_keys, sort_dirs) filters = filters or {} query = model_query(context, models.FailoverSegment) if 'recovery_method' in filters: query = query.filter(models.FailoverSegment.recovery_method == filters[ 'recovery_method']) if 'service_type' in filters: query = query.filter(models.FailoverSegment.service_type == filters[ 'service_type']) marker_row = None if marker is not None: marker_row = model_query(context, models.FailoverSegment ).filter_by(id=marker).first() if not marker_row: raise exception.MarkerNotFound(marker=marker) try: query = sqlalchemyutils.paginate_query(query, models.FailoverSegment, limit, sort_keys, marker=marker_row, sort_dirs=sort_dirs) except db_exc.InvalidSortKey as e: raise exception.InvalidSortKey(e) return query.all() @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def failover_segment_get_by_id(context, segment_id): query = model_query(context, models.FailoverSegment).filter_by(id=segment_id) result = query.first() if not result: raise exception.FailoverSegmentNotFound(id=segment_id) return result @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def failover_segment_get_by_uuid(context, segment_uuid): return _failover_segment_get_by_uuid(context, segment_uuid) def _failover_segment_get_by_uuid(context, segment_uuid): query = model_query(context, models.FailoverSegment).filter_by(uuid=segment_uuid) result = query.first() if not result: raise exception.FailoverSegmentNotFound(id=segment_uuid) return result @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def failover_segment_get_by_name(context, name): query = model_query(context, models.FailoverSegment).filter_by(name=name) result = query.first() if not result: raise exception.FailoverSegmentNotFoundByName(segment_name=name) return result @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.writer def failover_segment_create(context, values): segment = models.FailoverSegment() segment.update(values) try: segment.save(session=context.session) except db_exc.DBDuplicateEntry: raise exception.FailoverSegmentExists(name=segment.name) return _failover_segment_get_by_uuid(context, segment.uuid) @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.writer def failover_segment_update(context, segment_uuid, values): segment = _failover_segment_get_by_uuid(context, segment_uuid) segment.update(values) try: segment.save(session=context.session) except db_exc.DBDuplicateEntry: raise exception.FailoverSegmentExists(name=values.get('name')) return _failover_segment_get_by_uuid(context, segment.uuid) @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.writer def failover_segment_delete(context, segment_uuid): count = model_query(context, models.FailoverSegment ).filter_by(uuid=segment_uuid ).soft_delete(synchronize_session=False) if count == 0: raise exception.FailoverSegmentNotFound(id=segment_uuid) model_query(context, models.Host).filter_by( failover_segment_id=segment_uuid).soft_delete( synchronize_session=False) @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def is_failover_segment_under_recovery(context, failover_segment_id, filters=None): filters = filters or {} # get all hosts against the failover_segment inner_select = model_query( context, models.Host, (models.Host.uuid,)).filter( models.Host.failover_segment_id == failover_segment_id) # check if any host has notification status as new, running or error query = model_query(context, models.Notification, (func.count(models.Notification.id),)) if 'status' in filters: status = filters['status'] if isinstance(status, (list, tuple, set, frozenset)): column_attr = getattr(models.Notification, 'status') query = query.filter(column_attr.in_(status)) else: query = query.filter(models.Notification.status == status) query = query.filter( models.Notification.source_host_uuid.in_(inner_select.subquery())) return query.first()[0] > 0 # db apis for host @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def host_get_all_by_filters( context, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): # NOTE(Dinesh_Bhor): If the limit is 0 there is no point in even going # to the database since nothing is going to be returned anyway. if limit == 0: return [] sort_keys, sort_dirs = _process_sort_params(sort_keys, sort_dirs) filters = filters or {} query = model_query(context, models.Host).options(joinedload('failover_segment')) if 'failover_segment_id' in filters: query = query.filter(models.Host.failover_segment_id == filters[ 'failover_segment_id']) if 'type' in filters: query = query.filter(models.Host.type == filters['type']) if 'on_maintenance' in filters: query = query.filter(models.Host.on_maintenance == filters[ 'on_maintenance']) if 'reserved' in filters: query = query.filter(models.Host.reserved == filters['reserved']) marker_row = None if marker is not None: marker_row = model_query(context, models.Host ).filter_by(id=marker).first() if not marker_row: raise exception.MarkerNotFound(marker=marker) try: query = sqlalchemyutils.paginate_query(query, models.Host, limit, sort_keys, marker=marker_row, sort_dirs=sort_dirs) except db_exc.InvalidSortKey as e: raise exception.InvalidSortKey(e) return query.all() @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def host_get_by_uuid(context, host_uuid, segment_uuid=None): return _host_get_by_uuid(context, host_uuid, segment_uuid=segment_uuid) def _host_get_by_uuid(context, host_uuid, segment_uuid=None): query = model_query(context, models.Host ).filter_by(uuid=host_uuid ).options(joinedload('failover_segment')) if segment_uuid: query = query.filter_by(failover_segment_id=segment_uuid) result = query.first() if not result: if segment_uuid: raise exception.HostNotFoundUnderFailoverSegment( host_uuid=host_uuid, segment_uuid=segment_uuid) else: raise exception.HostNotFound(id=host_uuid) return result @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def host_get_by_id(context, host_id): query = model_query(context, models.Host ).filter_by(id=host_id ).options(joinedload('failover_segment')) result = query.first() if not result: raise exception.HostNotFound(id=host_id) return result @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def host_get_by_name(context, name): query = model_query(context, models.Host ).filter_by(name=name ).options(joinedload('failover_segment')) result = query.first() if not result: raise exception.HostNotFoundByName(host_name=name) return result @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.writer def host_create(context, values): host = models.Host() host.update(values) try: host.save(session=context.session) except db_exc.DBDuplicateEntry: raise exception.HostExists(name=host.name) return _host_get_by_uuid(context, host.uuid) @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.writer def host_update(context, host_uuid, values): host = _host_get_by_uuid(context, host_uuid) host.update(values) try: host.save(session=context.session) except db_exc.DBDuplicateEntry: raise exception.HostExists(name=values.get('name')) return _host_get_by_uuid(context, host.uuid) @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.writer def host_delete(context, host_uuid): count = model_query(context, models.Host ).filter_by(uuid=host_uuid ).soft_delete(synchronize_session=False) if count == 0: raise exception.HostNotFound(id=host_uuid) # db apis for notifications @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def notifications_get_all_by_filters( context, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): # NOTE(Dinesh_Bhor): If the limit is 0 there is no point in even going # to the database since nothing is going to be returned anyway. if limit == 0: return [] sort_keys, sort_dirs = _process_sort_params(sort_keys, sort_dirs) filters = filters or {} query = model_query(context, models.Notification) if 'source_host_uuid' in filters: query = query.filter(models.Notification.source_host_uuid == filters[ 'source_host_uuid']) if 'type' in filters: query = query.filter(models.Notification.type == filters['type']) if 'status' in filters: status = filters['status'] if isinstance(status, (list, tuple, set, frozenset)): column_attr = getattr(models.Notification, 'status') query = query.filter(column_attr.in_(status)) else: query = query.filter(models.Notification.status == status) if 'generated-since' in filters: generated_since = timeutils.normalize_time(filters['generated-since']) query = query.filter( models.Notification.generated_time >= generated_since) marker_row = None if marker is not None: marker_row = model_query(context, models.Notification ).filter_by(id=marker).first() if not marker_row: raise exception.MarkerNotFound(marker=marker) try: query = sqlalchemyutils.paginate_query(query, models.Notification, limit, sort_keys, marker=marker_row, sort_dirs=sort_dirs) except db_exc.InvalidSortKey as err: raise exception.InvalidSortKey(err) return query.all() @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def notification_get_by_uuid(context, notification_uuid): return _notification_get_by_uuid(context, notification_uuid) def _notification_get_by_uuid(context, notification_uuid): query = model_query(context, models.Notification ).filter_by(notification_uuid=notification_uuid ) result = query.first() if not result: raise exception.NotificationNotFound(id=notification_uuid) return result @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.reader def notification_get_by_id(context, notification_id): query = model_query(context, models.Notification ).filter_by(id=notification_id ) result = query.first() if not result: raise exception.NotificationNotFound(id=notification_id) return result @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.writer def notification_create(context, values): notification = models.Notification() notification.update(values) notification.save(session=context.session) return _notification_get_by_uuid(context, notification.notification_uuid) @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.writer def notification_update(context, notification_uuid, values): notification = _notification_get_by_uuid(context, notification_uuid) notification.update(values) notification.save(session=context.session) return _notification_get_by_uuid(context, notification.notification_uuid) @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.writer def notification_delete(context, notification_uuid): count = model_query(context, models.Notification ).filter_by(notification_uuid=notification_uuid ).soft_delete(synchronize_session=False) if count == 0: raise exception.NotificationNotFound(id=notification_uuid) class DeleteFromSelect(sa_sql.expression.UpdateBase): def __init__(self, table, select, column): self.table = table self.select = select self.column = column # NOTE(pooja_jadhav): MySQL doesn't yet support subquery with # 'LIMIT & IN/ALL/ANY/SOME' We need work around this with nesting select. @compiles(DeleteFromSelect) def visit_delete_from_select(element, compiler, **kw): return "DELETE FROM %s WHERE %s in (SELECT T1.%s FROM (%s) as T1)" % ( compiler.process(element.table, asfrom=True), compiler.process(element.column), element.column.name, compiler.process(element.select)) @oslo_db_api.wrap_db_retry(max_retries=5, retry_on_deadlock=True) @main_context_manager.writer def purge_deleted_rows(context, age_in_days, max_rows): """Purges soft deleted rows Deleted rows get purged from hosts and segment tables based on deleted_at column. As notifications table doesn't delete any of the notification records so rows get purged from notifications based on last updated_at and status column. """ engine = get_engine() conn = engine.connect() metadata = MetaData() metadata.reflect(engine) deleted_age = timeutils.utcnow() - datetime.timedelta(days=age_in_days) total_rows_purged = 0 for table in reversed(metadata.sorted_tables): if 'deleted' not in table.columns.keys(): continue LOG.info('Purging deleted rows older than %(age_in_days)d day(s) ' 'from table %(tbl)s', {'age_in_days': age_in_days, 'tbl': table}) column = table.c.id updated_at_column = table.c.updated_at deleted_at_column = table.c.deleted_at if table.name == 'notifications': status_column = table.c.status query_delete = sql.select([column]).where( and_(updated_at_column < deleted_age, or_( status_column == 'finished', status_column == 'failed', status_column == 'ignored'))).order_by(status_column) else: query_delete = sql.select( [column], deleted_at_column < deleted_age).order_by( deleted_at_column) if max_rows > 0: query_delete = query_delete.limit(max_rows - total_rows_purged) delete_statement = DeleteFromSelect(table, query_delete, column) result = conn.execute(delete_statement) rows = result.rowcount LOG.info('Deleted %(rows)d row(s) from table %(tbl)s', {'rows': rows, 'tbl': table}) total_rows_purged += rows if max_rows > 0 and total_rows_purged == max_rows: break LOG.info('Total deleted rows are %(rows)d', {'rows': total_rows_purged}) masakari-9.0.0/masakari/db/sqlalchemy/migration.py0000664000175000017500000000462213656747723022244 0ustar zuulzuul00000000000000# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import threading from oslo_config import cfg from oslo_db import exception as oslo_exception from oslo_db import options from stevedore import driver from masakari import db from masakari.db import api as db_api from masakari import exception from masakari.i18n import _ INIT_VERSION = 0 _IMPL = None _LOCK = threading.Lock() options.set_defaults(cfg.CONF) MIGRATE_REPO_PATH = os.path.join( os.path.abspath(os.path.dirname(__file__)), 'migrate_repo', ) def get_backend(): global _IMPL if _IMPL is None: with _LOCK: if _IMPL is None: _IMPL = driver.DriverManager( "masakari.database.migration_backend", cfg.CONF.database.backend).driver return _IMPL def db_sync(version=None, init_version=INIT_VERSION, engine=None): if engine is None: engine = db_api.get_engine() current_db_version = get_backend().db_version(engine, MIGRATE_REPO_PATH, init_version) if version and int(version) < current_db_version: msg = _('Database schema downgrade is not allowed.') raise exception.InvalidInput(reason=msg) if version and int(version) > db.MAX_INT: message = _('Version should be less than or equal to %(max_version)d.' ) % {'max_version': db.MAX_INT} raise exception.InvalidInput(reason=message) try: return get_backend().db_sync(engine=engine, abs_path=MIGRATE_REPO_PATH, version=version, init_version=init_version) except oslo_exception.DBMigrationError as exc: raise exception.InvalidInput(reason=exc) masakari-9.0.0/masakari/db/sqlalchemy/migrate_repo/0000775000175000017500000000000013656750011022332 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/db/sqlalchemy/migrate_repo/__init__.py0000664000175000017500000000000013656747723024451 0ustar zuulzuul00000000000000masakari-9.0.0/masakari/db/sqlalchemy/migrate_repo/migrate.cfg0000664000175000017500000000172713656747723024472 0ustar zuulzuul00000000000000[db_settings] # Used to identify which repository this database is versioned under. # You can use the name of your project. repository_id=masakari # The name of the database table used to track the schema version. # This name shouldn't already be used by your project. # If this is changed once a database is under version control, you'll need to # change the table name in each database too. version_table=migrate_version # When committing a change script, Migrate will attempt to generate the # sql for all supported databases; normally, if one of them fails - probably # because you don't have that database installed - it is ignored and the # commit continues, perhaps ending successfully. # Databases in this list MUST compile successfuly during a commit, or the # entire commit will fail. List the databases your application will actually # be using to ensure your updates to that database work properly. # This must be a list; example: ['postgres','sqlite'] required_dbs=[] masakari-9.0.0/masakari/db/sqlalchemy/migrate_repo/versions/0000775000175000017500000000000013656750011024202 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/db/sqlalchemy/migrate_repo/versions/001_add_failover_segments_table.py0000664000175000017500000000536513656747723032660 0ustar zuulzuul00000000000000# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from migrate import changeset from sqlalchemy import Column, MetaData, Table, Index from sqlalchemy import Integer, DateTime, String, Enum, Text def define_failover_segments_table(meta): failover_segments = Table('failover_segments', meta, Column('created_at', DateTime, nullable=False), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('deleted', Integer), Column('id', Integer, primary_key=True, nullable=False), Column('uuid', String(36), nullable=False), Column('name', String(255), nullable=False), Column('service_type', String(255), nullable=False), Column('description', Text), Column('recovery_method', Enum('auto', 'reserved_host', 'auto_priority', 'rh_priority', name='recovery_methods'), nullable=False), changeset.UniqueConstraint( 'name', 'deleted', name='uniq_segment0name0deleted' ), changeset.UniqueConstraint( 'uuid', name='uniq_segments0uuid'), Index('segments_service_type_idx', 'service_type'), mysql_engine='InnoDB', mysql_charset='utf8', extend_existing=True) return failover_segments def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine table = define_failover_segments_table(meta) table.create() masakari-9.0.0/masakari/db/sqlalchemy/migrate_repo/versions/006_add_persistence_tables.py0000664000175000017500000000223613656747723031652 0ustar zuulzuul00000000000000# Copyright 2019 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import masakari.conf from masakari.engine import driver CONF = masakari.conf.CONF NOTIFICATION_DRIVER = CONF.notification_driver PERSISTENCE_BACKEND = CONF.taskflow.connection def upgrade(migrate_engine): """Upgrade the engine with persistence tables. """ # Get the taskflow driver configured, default is 'taskflow_driver', # to load persistence tables to store progress details. taskflow_driver = driver.load_masakari_driver(NOTIFICATION_DRIVER) if PERSISTENCE_BACKEND: taskflow_driver.upgrade_backend(PERSISTENCE_BACKEND) masakari-9.0.0/masakari/db/sqlalchemy/migrate_repo/versions/005_remove_nullable_mismatch.py0000664000175000017500000000166713656747723032232 0ustar zuulzuul00000000000000# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from sqlalchemy import MetaData, Table def upgrade(migrate_engine): meta = MetaData(bind=migrate_engine) failover_segments = Table('failover_segments', meta, autoload=True) hosts = Table('hosts', meta, autoload=True) for table in [failover_segments, hosts]: table.c.created_at.alter(nullable=True) masakari-9.0.0/masakari/db/sqlalchemy/migrate_repo/versions/002_add_hosts_table.py0000664000175000017500000000470313656747723030300 0ustar zuulzuul00000000000000# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from migrate import ForeignKeyConstraint, UniqueConstraint from sqlalchemy import Column, MetaData, Table, Index from sqlalchemy import Integer, DateTime, String, Boolean, Text def define_hosts_table(meta): failover_segments = Table('failover_segments', meta, autoload=True) hosts = Table('hosts', meta, Column('created_at', DateTime, nullable=False), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('deleted', Integer), Column('id', Integer, primary_key=True, nullable=False), Column('uuid', String(36), nullable=False), Column('name', String(255), nullable=False), Column('reserved', Boolean, default=False), Column('type', String(255), nullable=False), Column('control_attributes', Text, nullable=False), Column('failover_segment_id', String(36), nullable=False), Column('on_maintenance', Boolean, default=False), UniqueConstraint('failover_segment_id', 'name', 'deleted', name='uniq_host0name0deleted'), UniqueConstraint('uuid', name='uniq_host0uuid'), ForeignKeyConstraint(columns=['failover_segment_id'], refcolumns=[failover_segments.c.uuid], name='fk_failover_segments_uuid'), Index('hosts_type_idx', 'type'), mysql_engine='InnoDB', mysql_charset='utf8', extend_existing=True) return hosts def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine table = define_hosts_table(meta) table.create() masakari-9.0.0/masakari/db/sqlalchemy/migrate_repo/versions/003_update_unique_constraint_hosts.py0000664000175000017500000000362413656747723033517 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from migrate import ForeignKeyConstraint, UniqueConstraint import sqlalchemy from sqlalchemy import Table def upgrade(migrate_engine): meta = sqlalchemy.MetaData() meta.bind = migrate_engine hosts_table = Table('hosts', meta, autoload=True) failover_segments = Table('failover_segments', meta, autoload=True) # NOTE(Dinesh_Bhor) We need to drop foreign keys first because unique # constraints that we want to delete depend on them. So drop the fk and # recreate it again after unique constraint deletion. cons_fk = ForeignKeyConstraint([hosts_table.c.failover_segment_id], [failover_segments.c.uuid], name="fk_failover_segments_uuid") cons_fk.drop(engine=migrate_engine) cons_unique = UniqueConstraint('failover_segment_id', 'name', 'deleted', name='uniq_host0name0deleted', table=hosts_table) cons_unique.drop(engine=migrate_engine) # Create an updated unique constraint updated_cons_unique = UniqueConstraint('name', 'deleted', name='uniq_host0name0deleted', table=hosts_table) cons_fk.create() updated_cons_unique.create() masakari-9.0.0/masakari/db/sqlalchemy/migrate_repo/versions/004_add_notifications_table.py0000664000175000017500000000460113656747723032010 0ustar zuulzuul00000000000000# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from migrate.changeset import UniqueConstraint from sqlalchemy import Column, MetaData, Table from sqlalchemy import Integer, DateTime, String, Enum, Text def define_notifications_table(meta): notifications = Table('notifications', meta, Column('created_at', DateTime), Column('updated_at', DateTime), Column('deleted_at', DateTime), Column('deleted', Integer), Column('id', Integer, primary_key=True, nullable=False), Column('notification_uuid', String(36), nullable=False), Column('generated_time', DateTime, nullable=False), Column('source_host_uuid', String(36), nullable=False ), Column('type', String(length=36), nullable=False), Column('payload', Text), Column('status', Enum('new', 'running', 'error', 'failed', 'ignored', 'finished', name='notification_status'), nullable=False), UniqueConstraint('notification_uuid', name='uniq_notifications0uuid'), mysql_engine='InnoDB', mysql_charset='utf8', extend_existing=True) return notifications def upgrade(migrate_engine): meta = MetaData() meta.bind = migrate_engine table = define_notifications_table(meta) table.create() masakari-9.0.0/masakari/db/sqlalchemy/migrate_repo/README.txt0000664000175000017500000000015613656747723024052 0ustar zuulzuul00000000000000This is a database migration repository. More information at https://github.com/openstack/sqlalchemy-migrate masakari-9.0.0/masakari/db/sqlalchemy/migrate_repo/manage.py0000664000175000017500000000153113656747723024154 0ustar zuulzuul00000000000000# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os from migrate.versioning.shell import main from masakari.db.sqlalchemy import migrate_repo if __name__ == '__main__': main(debug='False', repository=os.path.abspath(os.path.dirname(migrate_repo.__file__))) masakari-9.0.0/masakari/db/sqlalchemy/models.py0000664000175000017500000001374313656747723021542 0ustar zuulzuul00000000000000# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_db.sqlalchemy import models from oslo_utils import timeutils from sqlalchemy import (Column, DateTime, Index, Integer, Enum, String, schema) from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import orm from sqlalchemy import ForeignKey, Boolean, Text BASE = declarative_base() class MasakariTimestampMixin(object): # Note(tpatil): timeutils.utcnow() method return microseconds part but db # doesn't store it because of which subsequent calls to get resources # from the same db session object instance doesn't return microsecond for # datetime fields. To avoid this discrepancy, removed microseconds from # datetime fields so that there is no need to remove it for create/update # cases in the respective versioned objects. created_at = Column(DateTime, default=lambda: timeutils.utcnow().replace( microsecond=0)) updated_at = Column(DateTime, onupdate=lambda: timeutils.utcnow().replace( microsecond=0)) class MasakariAPIBase(MasakariTimestampMixin, models.ModelBase): """Base class for MasakariAPIBase Models.""" metadata = None def __copy__(self): """Implement a safe copy.copy(). SQLAlchemy-mapped objects travel with an object called an InstanceState, which is pegged to that object specifically and tracks everything about that object. It's critical within all attribute operations, including gets and deferred loading. This object definitely cannot be shared among two instances, and must be handled. The copy routine here makes use of session.merge() which already essentially implements a "copy" style of operation, which produces a new instance with a new InstanceState and copies all the data along mapped attributes without using any SQL. The mode we are using here has the caveat that the given object must be "clean", e.g. that it has no database-loaded state that has been updated and not flushed. This is a good thing, as creating a copy of an object including non-flushed, pending database state is probably not a good idea; neither represents what the actual row looks like, and only one should be flushed. """ session = orm.Session() copy = session.merge(self, load=False) session.expunge(copy) return copy class FailoverSegment(BASE, MasakariAPIBase, models.SoftDeleteMixin): """Represents a failover segment.""" __tablename__ = 'failover_segments' __table_args__ = ( schema.UniqueConstraint("name", "deleted", name="uniq_segment0name0deleted"), schema.UniqueConstraint('uuid', name='uniq_segments0uuid'), Index('segments_service_type_idx', 'service_type'), ) id = Column(Integer, primary_key=True, autoincrement=True) uuid = Column(String(36), nullable=False) name = Column(String(255), nullable=False) service_type = Column(String(255), nullable=False) description = Column(Text) recovery_method = Column(Enum('auto', 'reserved_host', 'auto_priority', 'rh_priority', name='recovery_methods'), nullable=False) class Host(BASE, MasakariAPIBase, models.SoftDeleteMixin): """Represents a host.""" __tablename__ = 'hosts' __table_args__ = ( schema.UniqueConstraint("name", "deleted", name="uniq_host0name0deleted"), schema.UniqueConstraint('uuid', name='uniq_host0uuid'), Index('hosts_type_idx', 'type'), ) id = Column(Integer, primary_key=True, autoincrement=True) uuid = Column(String(36), nullable=False) name = Column(String(255), nullable=False) reserved = Column(Boolean, default=False) type = Column(String(255), nullable=False) control_attributes = Column(Text, nullable=False) on_maintenance = Column(Boolean, default=False) failover_segment_id = Column(String(36), ForeignKey('failover_segments.uuid'), nullable=False) failover_segment = orm.relationship(FailoverSegment, backref=orm.backref('hosts'), foreign_keys=failover_segment_id, primaryjoin='and_(Host.' 'failover_segment_id==' 'FailoverSegment.uuid,' 'Host.deleted==0)') class Notification(BASE, MasakariAPIBase, models.SoftDeleteMixin): """Represents a notification.""" __tablename__ = 'notifications' __table_args__ = ( schema.UniqueConstraint('notification_uuid', name='uniq_notification0uuid'), ) id = Column(Integer, primary_key=True, autoincrement=True) notification_uuid = Column(String(36), nullable=False) generated_time = Column(DateTime, nullable=False) type = Column(String(36), nullable=False) payload = Column(Text) status = Column(Enum('new', 'running', 'error', 'failed', 'ignored', 'finished', name='notification_status'), nullable=False) source_host_uuid = Column(String(36), nullable=False) masakari-9.0.0/masakari/objects/0000775000175000017500000000000013656750011016557 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/objects/__init__.py0000664000175000017500000000167613656747723020722 0ustar zuulzuul00000000000000# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. def register_all(): # NOTE(Dinesh_Bhor): You must make sure your object gets imported in this # function in order for it to be registered by services that may # need to receive it via RPC. __import__('masakari.objects.host') __import__('masakari.objects.notification') __import__('masakari.objects.segment') masakari-9.0.0/masakari/objects/notification.py0000664000175000017500000001615713656747723021651 0ustar zuulzuul00000000000000# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log as logging from oslo_serialization import jsonutils from oslo_utils import uuidutils from masakari.api import utils as api_utils from masakari import db from masakari import exception from masakari import objects from masakari.objects import base from masakari.objects import fields LOG = logging.getLogger(__name__) NOTIFICATION_OPTIONAL_FIELDS = ['recovery_workflow_details'] @base.MasakariObjectRegistry.register class Notification(base.MasakariPersistentObject, base.MasakariObject, base.MasakariObjectDictCompat): # Version 1.0: Initial version # Version 1.1: Added recovery_workflow_details field. # Note: This field shouldn't be persisted. VERSION = '1.1' fields = { 'id': fields.IntegerField(), 'notification_uuid': fields.UUIDField(), 'generated_time': fields.DateTimeField(), 'source_host_uuid': fields.UUIDField(), 'type': fields.NotificationTypeField(), 'payload': fields.DictOfStringsField(), 'status': fields.NotificationStatusField(), # NOTE(ShilpaSD): This field shouldn't be stored in db. # The recovery workflow details read from the 'notification_driver' # will be set to this field. 'recovery_workflow_details': fields.ListOfObjectsField( 'NotificationProgressDetails', default=[]) } @staticmethod def _from_db_object(context, notification, db_notification): for key in notification.fields: if key in NOTIFICATION_OPTIONAL_FIELDS: continue if key != 'payload': setattr(notification, key, db_notification.get(key)) else: payload = db_notification.get("payload") notification.payload = jsonutils.loads(payload) notification.obj_reset_changes() notification._context = context return notification @base.remotable_classmethod def get_by_id(cls, context, id): db_notification = db.notification_get_by_id(context, id) return cls._from_db_object(context, cls(), db_notification) @base.remotable_classmethod def get_by_uuid(cls, context, uuid): db_notification = db.notification_get_by_uuid(context, uuid) return cls._from_db_object(context, cls(), db_notification) @base.remotable def create(self): if self.obj_attr_is_set('id'): raise exception.ObjectActionError(action='create', reason='already created') updates = self.masakari_obj_get_changes() # NOTE(ShilpaSD): This field doesn't exist in the Notification # db model so don't save it. updates.pop('recovery_workflow_details', None) if 'notification_uuid' not in updates: updates['notification_uuid'] = uuidutils.generate_uuid() LOG.debug('Generated uuid %(uuid)s for notifications', dict(uuid=updates['notification_uuid'])) if 'payload' in updates: updates['payload'] = jsonutils.dumps(updates['payload']) api_utils.notify_about_notification_api(self._context, self, action=fields.EventNotificationAction.NOTIFICATION_CREATE, phase=fields.EventNotificationPhase.START) db_notification = db.notification_create(self._context, updates) api_utils.notify_about_notification_api(self._context, self, action=fields.EventNotificationAction.NOTIFICATION_CREATE, phase=fields.EventNotificationPhase.END) self._from_db_object(self._context, self, db_notification) @base.remotable def save(self): updates = self.masakari_obj_get_changes() updates.pop('id', None) # NOTE(ShilpaSD): This field doesn't exist in the Notification # db model so don't save it. updates.pop('recovery_workflow_details', None) db_notification = db.notification_update(self._context, self.notification_uuid, updates) self._from_db_object(self._context, self, db_notification) @base.remotable def destroy(self): if not self.obj_attr_is_set('id'): raise exception.ObjectActionError(action='destroy', reason='already destroyed') if not self.obj_attr_is_set('notification_uuid'): raise exception.ObjectActionError(action='destroy', reason='no uuid') db.notification_delete(self._context, self.notification_uuid) delattr(self, base.get_attrname('id')) @base.MasakariObjectRegistry.register class NotificationList(base.ObjectListBase, base.MasakariObject): VERSION = '1.0' fields = { 'objects': fields.ListOfObjectsField('Notification'), } @base.remotable_classmethod def get_all(cls, context, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): groups = db.notifications_get_all_by_filters(context, filters=filters, sort_keys=sort_keys, sort_dirs=sort_dirs, limit=limit, marker=marker ) return base.obj_make_list(context, cls(context), objects.Notification, groups) def notification_sample(sample): """Class decorator to attach the notification sample information to the notification object for documentation generation purposes. :param sample: the path of the sample json file relative to the doc/notification_samples/ directory in the nova repository root. """ def wrap(cls): cls.sample = sample return cls return wrap @base.MasakariObjectRegistry.register class NotificationProgressDetails(base.MasakariObject, base.MasakariObjectDictCompat): VERSION = '1.0' fields = { 'name': fields.StringField(), 'progress': fields.FloatField(), 'progress_details': fields.ListOfDictOfNullableStringsField( default=[]), 'state': fields.StringField() } @classmethod def create(cls, name, progress, progress_details, state,): return cls(name=name, progress=progress, progress_details=progress_details, state=state) masakari-9.0.0/masakari/objects/host.py0000664000175000017500000001421413656747723020130 0ustar zuulzuul00000000000000# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log as logging from oslo_utils import uuidutils from masakari.api import utils as api_utils from masakari import db from masakari import exception from masakari import objects from masakari.objects import base from masakari.objects import fields LOG = logging.getLogger(__name__) @base.MasakariObjectRegistry.register class Host(base.MasakariPersistentObject, base.MasakariObject, base.MasakariObjectDictCompat): # Version 1.0: Initial version # Version 1.1: Added 'segment_uuid' parameter to 'get_by_uuid' method VERSION = '1.1' fields = { 'id': fields.IntegerField(), 'uuid': fields.UUIDField(), 'name': fields.StringField(), 'failover_segment_id': fields.UUIDField(), 'failover_segment': fields.ObjectField('FailoverSegment'), 'type': fields.StringField(), 'reserved': fields.BooleanField(), 'control_attributes': fields.StringField(), 'on_maintenance': fields.BooleanField(), } @staticmethod def _from_db_object(context, host, db_host): for key in host.fields: db_value = db_host.get(key) if key == "failover_segment": db_value = objects.FailoverSegment._from_db_object( host._context, objects.FailoverSegment(), db_value) setattr(host, key, db_value) host.obj_reset_changes() host._context = context return host @base.remotable_classmethod def get_by_id(cls, context, id): db_inst = db.host_get_by_id(context, id) return cls._from_db_object(context, cls(), db_inst) @base.remotable_classmethod def get_by_uuid(cls, context, uuid, segment_uuid=None): db_inst = db.host_get_by_uuid(context, uuid, segment_uuid=segment_uuid) return cls._from_db_object(context, cls(), db_inst) @base.remotable_classmethod def get_by_name(cls, context, name): db_inst = db.host_get_by_name(context, name) return cls._from_db_object(context, cls(), db_inst) @base.remotable def create(self): if self.obj_attr_is_set('id'): raise exception.ObjectActionError(action='create', reason='already created') updates = self.masakari_obj_get_changes() if 'uuid' not in updates: updates['uuid'] = uuidutils.generate_uuid() LOG.debug('Generated uuid %(uuid)s for host', dict(uuid=updates['uuid'])) if 'failover_segment' in updates: raise exception.ObjectActionError(action='create', reason='failover segment ' 'assigned') api_utils.notify_about_host_api(self._context, self, action=fields.EventNotificationAction.HOST_CREATE, phase=fields.EventNotificationPhase.START) db_host = db.host_create(self._context, updates) api_utils.notify_about_host_api(self._context, self, action=fields.EventNotificationAction.HOST_CREATE, phase=fields.EventNotificationPhase.END) self._from_db_object(self._context, self, db_host) @base.remotable def save(self): updates = self.masakari_obj_get_changes() if 'failover_segment' in updates: raise exception.ObjectActionError(action='save', reason='failover segment ' 'changed') updates.pop('id', None) api_utils.notify_about_host_api(self._context, self, action=fields.EventNotificationAction.HOST_UPDATE, phase=fields.EventNotificationPhase.START) db_host = db.host_update(self._context, self.uuid, updates) api_utils.notify_about_host_api(self._context, self, action=fields.EventNotificationAction.HOST_UPDATE, phase=fields.EventNotificationPhase.END) self._from_db_object(self._context, self, db_host) @base.remotable def destroy(self): if not self.obj_attr_is_set('id'): raise exception.ObjectActionError(action='destroy', reason='already destroyed') if not self.obj_attr_is_set('uuid'): raise exception.ObjectActionError(action='destroy', reason='no uuid') api_utils.notify_about_host_api(self._context, self, action=fields.EventNotificationAction.HOST_DELETE, phase=fields.EventNotificationPhase.START) db.host_delete(self._context, self.uuid) api_utils.notify_about_host_api(self._context, self, action=fields.EventNotificationAction.HOST_DELETE, phase=fields.EventNotificationPhase.END) delattr(self, base.get_attrname('id')) @base.MasakariObjectRegistry.register class HostList(base.ObjectListBase, base.MasakariObject): VERSION = '1.0' fields = { 'objects': fields.ListOfObjectsField('Host'), } @base.remotable_classmethod def get_all(cls, context, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): groups = db.host_get_all_by_filters(context, filters=filters, sort_keys=sort_keys, sort_dirs=sort_dirs, limit=limit, marker=marker) return base.obj_make_list(context, cls(context), objects.Host, groups) masakari-9.0.0/masakari/objects/fields.py0000664000175000017500000001643613656747723020431 0ustar zuulzuul00000000000000# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_versionedobjects import fields # Import fields from oslo.versionedobjects BooleanField = fields.BooleanField IntegerField = fields.IntegerField StringField = fields.StringField EnumField = fields.EnumField UUIDField = fields.UUIDField DateTimeField = fields.DateTimeField DictOfStringsField = fields.DictOfStringsField ObjectField = fields.ObjectField BaseEnumField = fields.BaseEnumField ListOfObjectsField = fields.ListOfObjectsField ListOfStringsField = fields.ListOfStringsField FloatField = fields.FloatField ListOfDictOfNullableStringsField = fields.ListOfDictOfNullableStringsField Field = fields.Field Enum = fields.Enum FieldType = fields.FieldType class BaseMasakariEnum(Enum): def __init__(self, **kwargs): super(BaseMasakariEnum, self).__init__( valid_values=self.__class__.ALL) class FailoverSegmentRecoveryMethod(Enum): """Represents possible recovery_methods for failover segment.""" AUTO = "auto" RESERVED_HOST = "reserved_host" AUTO_PRIORITY = "auto_priority" RH_PRIORITY = "rh_priority" ALL = (AUTO, RESERVED_HOST, AUTO_PRIORITY, RH_PRIORITY) def __init__(self): super(FailoverSegmentRecoveryMethod, self).__init__(valid_values=FailoverSegmentRecoveryMethod.ALL) @classmethod def index(cls, value): """Return an index into the Enum given a value.""" return cls.ALL.index(value) @classmethod def from_index(cls, index): """Return the Enum value at a given index.""" return cls.ALL[index] class NotificationType(Enum): """Represents possible notification types.""" COMPUTE_HOST = "COMPUTE_HOST" VM = "VM" PROCESS = "PROCESS" ALL = (COMPUTE_HOST, VM, PROCESS) def __init__(self): super(NotificationType, self).__init__(valid_values=NotificationType.ALL) @classmethod def index(cls, value): """Return an index into the Enum given a value.""" return cls.ALL.index(value) @classmethod def from_index(cls, index): """Return the Enum value at a given index.""" return cls.ALL[index] class EventType(Enum): """Represents possible event types.""" STARTED = "STARTED" STOPPED = "STOPPED" ALL = (STARTED, STOPPED) def __init__(self): super(EventType, self).__init__(valid_values=EventType.ALL) @classmethod def index(cls, value): """Return an index into the Enum given a value.""" return cls.ALL.index(value) @classmethod def from_index(cls, index): """Return the Enum value at a given index.""" return cls.ALL[index] class HostStatusType(Enum): """Represents possible event types for Host status.""" NORMAL = "NORMAL" UNKNOWN = "UNKNOWN" ALL = (NORMAL, UNKNOWN) def __init__(self): super(HostStatusType, self).__init__(valid_values=HostStatusType.ALL) @classmethod def index(cls, value): """Return an index into the Enum given a value.""" return cls.ALL.index(value) @classmethod def from_index(cls, index): """Return the Enum value at a given index.""" return cls.ALL[index] class ClusterStatusType(Enum): """Represents possible event types for Cluster status.""" ONLINE = "ONLINE" OFFLINE = "OFFLINE" ALL = (ONLINE, OFFLINE) def __init__(self): super(ClusterStatusType, self).__init__(valid_values=ClusterStatusType.ALL) @classmethod def index(cls, value): """Return an index into the Enum given a value.""" return cls.ALL.index(value) @classmethod def from_index(cls, index): """Return the Enum value at a given index.""" return cls.ALL[index] class NotificationStatus(Enum): """Represents possible statuses for notifications.""" NEW = "new" RUNNING = "running" ERROR = "error" FAILED = "failed" IGNORED = "ignored" FINISHED = "finished" ALL = (NEW, RUNNING, ERROR, FAILED, IGNORED, FINISHED) def __init__(self): super(NotificationStatus, self).__init__(valid_values=NotificationStatus.ALL) @classmethod def index(cls, value): """Return an index into the Enum given a value.""" return cls.ALL.index(value) @classmethod def from_index(cls, index): """Return the Enum value at a given index.""" return cls.ALL[index] class EventNotificationAction(Enum): # Actions of segments SEGMENT_CREATE = 'segment.create' SEGMENT_UPDATE = 'segment.update' SEGMENT_DELETE = 'segment.delete' # Actions of hosts HOST_CREATE = 'host.create' HOST_UPDATE = 'host.update' HOST_DELETE = 'host.delete' # Actions of notifications NOTIFICATION_CREATE = 'notification.create' NOTIFICATION_PROCESS = 'notification.process' ALL = (SEGMENT_CREATE, SEGMENT_UPDATE, SEGMENT_DELETE, HOST_CREATE, HOST_UPDATE, HOST_DELETE, NOTIFICATION_CREATE, NOTIFICATION_PROCESS) def __init__(self): super(EventNotificationAction, self).__init__(valid_values=EventNotificationAction.ALL) @classmethod def index(cls, value): """Return an index into the Enum given a value.""" return cls.ALL.index(value) @classmethod def from_index(cls, index): """Return the Enum value at a given index.""" return cls.ALL[index] class EventNotificationPriority(BaseMasakariEnum): AUDIT = 'audit' CRITICAL = 'critical' DEBUG = 'debug' INFO = 'info' ERROR = 'error' SAMPLE = 'sample' WARN = 'warn' ALL = (AUDIT, CRITICAL, DEBUG, INFO, ERROR, SAMPLE, WARN) class EventNotificationPhase(Enum): START = 'start' END = 'end' ERROR = 'error' ALL = (START, END, ERROR) def __init__(self): super(EventNotificationPhase, self).__init__(valid_values=EventNotificationPhase.ALL) @classmethod def index(cls, value): """Return an index into the Enum given a value.""" return cls.ALL.index(value) @classmethod def from_index(cls, index): """Return the Enum value at a given index.""" return cls.ALL[index] class FailoverSegmentRecoveryMethodField(BaseEnumField): AUTO_TYPE = FailoverSegmentRecoveryMethod() class NotificationTypeField(BaseEnumField): AUTO_TYPE = NotificationType() class NotificationStatusField(BaseEnumField): AUTO_TYPE = NotificationStatus() class EventNotificationActionField(BaseEnumField): AUTO_TYPE = EventNotificationAction() class EventNotificationPriorityField(BaseEnumField): AUTO_TYPE = EventNotificationPriority() class EventNotificationPhaseField(BaseEnumField): AUTO_TYPE = EventNotificationPhase() masakari-9.0.0/masakari/objects/segment.py0000664000175000017500000001337113656747723020620 0ustar zuulzuul00000000000000# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log as logging from oslo_utils import uuidutils from masakari.api import utils as api_utils from masakari import db from masakari import exception from masakari import objects from masakari.objects import base from masakari.objects import fields LOG = logging.getLogger(__name__) @base.MasakariObjectRegistry.register class FailoverSegment(base.MasakariPersistentObject, base.MasakariObject, base.MasakariObjectDictCompat): VERSION = '1.0' fields = { 'id': fields.IntegerField(), 'uuid': fields.UUIDField(), 'name': fields.StringField(), 'service_type': fields.StringField(), 'description': fields.StringField(nullable=True), 'recovery_method': fields.FailoverSegmentRecoveryMethodField(), } @staticmethod def _from_db_object(context, segment, db_segment): for key in segment.fields: setattr(segment, key, db_segment[key]) segment._context = context segment.obj_reset_changes() return segment @base.remotable_classmethod def get_by_id(cls, context, id): db_inst = db.failover_segment_get_by_id(context, id) return cls._from_db_object(context, cls(), db_inst) @base.remotable_classmethod def get_by_uuid(cls, context, uuid): db_inst = db.failover_segment_get_by_uuid(context, uuid) return cls._from_db_object(context, cls(), db_inst) @base.remotable_classmethod def get_by_name(cls, context, name): db_inst = db.failover_segment_get_by_name(context, name) return cls._from_db_object(context, cls(), db_inst) @base.remotable def create(self): if self.obj_attr_is_set('id'): raise exception.ObjectActionError(action='create', reason='already created') updates = self.masakari_obj_get_changes() if 'uuid' not in updates: updates['uuid'] = uuidutils.generate_uuid() LOG.debug('Generated uuid %(uuid)s for failover segment', dict(uuid=updates['uuid'])) api_utils.notify_about_segment_api(self._context, self, action=fields.EventNotificationAction.SEGMENT_CREATE, phase=fields.EventNotificationPhase.START) db_segment = db.failover_segment_create(self._context, updates) api_utils.notify_about_segment_api(self._context, self, action=fields.EventNotificationAction.SEGMENT_CREATE, phase=fields.EventNotificationPhase.END) self._from_db_object(self._context, self, db_segment) @base.remotable def save(self): updates = self.masakari_obj_get_changes() updates.pop('id', None) api_utils.notify_about_segment_api(self._context, self, action=fields.EventNotificationAction.SEGMENT_UPDATE, phase=fields.EventNotificationPhase.START) db_segment = db.failover_segment_update(self._context, self.uuid, updates) api_utils.notify_about_segment_api(self._context, self, action=fields.EventNotificationAction.SEGMENT_UPDATE, phase=fields.EventNotificationPhase.END) self._from_db_object(self._context, self, db_segment) @base.remotable def destroy(self): if not self.obj_attr_is_set('id'): raise exception.ObjectActionError(action='destroy', reason='already destroyed') if not self.obj_attr_is_set('uuid'): raise exception.ObjectActionError(action='destroy', reason='no uuid') api_utils.notify_about_segment_api(self._context, self, action=fields.EventNotificationAction.SEGMENT_DELETE, phase=fields.EventNotificationPhase.START) db.failover_segment_delete(self._context, self.uuid) api_utils.notify_about_segment_api(self._context, self, action=fields.EventNotificationAction.SEGMENT_DELETE, phase=fields.EventNotificationPhase.END) delattr(self, base.get_attrname('id')) def is_under_recovery(self, filters=None): return db.is_failover_segment_under_recovery(self._context, self.uuid, filters=filters) @base.MasakariObjectRegistry.register class FailoverSegmentList(base.ObjectListBase, base.MasakariObject): VERSION = '1.0' fields = { 'objects': fields.ListOfObjectsField('FailoverSegment'), } @base.remotable_classmethod def get_all(cls, ctxt, filters=None, sort_keys=None, sort_dirs=None, limit=None, marker=None): groups = db.failover_segment_get_all_by_filters(ctxt, filters=filters, sort_keys=sort_keys, sort_dirs=sort_dirs, limit=limit, marker=marker) return base.obj_make_list(ctxt, cls(ctxt), objects.FailoverSegment, groups) masakari-9.0.0/masakari/objects/base.py0000664000175000017500000002434413656747723020072 0ustar zuulzuul00000000000000# Copyright 2016 NTT Data. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Masakari common internal object model""" import datetime from oslo_utils import versionutils from oslo_versionedobjects import base as ovoo_base from oslo_versionedobjects import fields as obj_fields from masakari import objects def get_attrname(name): """Return the mangled name of the attribute's underlying storage.""" return '_obj_' + name class MasakariObjectRegistry(ovoo_base.VersionedObjectRegistry): notification_classes = [] def registration_hook(self, cls, index): # NOTE(Dinesh_Bhor): This is called when an object is registered, # and is responsible for maintaining masakari.objects.$OBJECT # as the highest-versioned implementation of a given object. version = versionutils.convert_version_to_tuple(cls.VERSION) if not hasattr(objects, cls.obj_name()): setattr(objects, cls.obj_name(), cls) else: cur_version = versionutils.convert_version_to_tuple( getattr(objects, cls.obj_name()).VERSION) if version >= cur_version: setattr(objects, cls.obj_name(), cls) @classmethod def register_notification(cls, notification_cls): """Register a class as notification. Use only to register concrete notification or payload classes, do not register base classes intended for inheritance only. """ cls.register_if(False)(notification_cls) cls.notification_classes.append(notification_cls) return notification_cls @classmethod def register_notification_objects(cls): """Register previously decorated notification as normal ovos. This is not intended for production use but only for testing and document generation purposes. """ for notification_cls in cls.notification_classes: cls.register(notification_cls) remotable_classmethod = ovoo_base.remotable_classmethod remotable = ovoo_base.remotable class MasakariObject(ovoo_base.VersionedObject): """Base class and object factory. This forms the base of all objects that can be remoted or instantiated via RPC. Simply defining a class that inherits from this base class will make it remotely instantiatable. Objects should implement the necessary "get" classmethod routines as well as "save" object methods as appropriate. """ OBJ_SERIAL_NAMESPACE = 'masakari_object' OBJ_PROJECT_NAMESPACE = 'masakari' def masakari_obj_get_changes(self): """Returns a dict of changed fields with tz unaware datetimes. Any timezone aware datetime field will be converted to UTC timezone and returned as timezone unaware datetime. This will allow us to pass these fields directly to a db update method as they can't have timezone information. """ # Get dirtied/changed fields changes = self.obj_get_changes() # Look for datetime objects that contain timezone information for k, v in changes.items(): if isinstance(v, datetime.datetime) and v.tzinfo: # Remove timezone information and adjust the time according to # the timezone information's offset. changes[k] = v.replace(tzinfo=None) - v.utcoffset() # Return modified dict return changes def obj_reset_changes(self, fields=None, recursive=False): """Reset the list of fields that have been changed. .. note:: - This is NOT "revert to previous values" - Specifying fields on recursive resets will only be honored at the top level. Everything below the top will reset all. :param fields: List of fields to reset, or "all" if None. :param recursive: Call obj_reset_changes(recursive=True) on any sub-objects within the list of fields being reset. """ if recursive: for field in self.obj_get_changes(): # Ignore fields not in requested set (if applicable) if fields and field not in fields: continue # Skip any fields that are unset if not self.obj_attr_is_set(field): continue value = getattr(self, field) # Don't reset nulled fields if value is None: continue # Reset straight Object and ListOfObjects fields if isinstance(self.fields[field], obj_fields.ObjectField): value.obj_reset_changes(recursive=True) elif isinstance(self.fields[field], obj_fields.ListOfObjectsField): for thing in value: thing.obj_reset_changes(recursive=True) if fields: self._changed_fields -= set(fields) else: self._changed_fields.clear() class MasakariObjectDictCompat(ovoo_base.VersionedObjectDictCompat): def __iter__(self): for name in self.obj_fields: if (self.obj_attr_is_set(name) or name in self.obj_extra_fields): yield name def keys(self): return list(self) class MasakariTimestampObject(object): """Mixin class for db backed objects with timestamp fields. Sqlalchemy models that inherit from the oslo_db TimestampMixin will include these fields and the corresponding objects will benefit from this mixin. """ fields = { 'created_at': obj_fields.DateTimeField(nullable=True), 'updated_at': obj_fields.DateTimeField(nullable=True), } class MasakariPersistentObject(object): """Mixin class for Persistent objects. This adds the fields that we use in common for most persistent objects. """ fields = { 'created_at': obj_fields.DateTimeField(nullable=True), 'updated_at': obj_fields.DateTimeField(nullable=True), 'deleted_at': obj_fields.DateTimeField(nullable=True), 'deleted': obj_fields.BooleanField(default=False), } class ObjectListBase(ovoo_base.ObjectListBase): @classmethod def _obj_primitive_key(cls, field): return 'masakari_object.%s' % field @classmethod def _obj_primitive_field(cls, primitive, field, default=obj_fields.UnspecifiedDefault): key = cls._obj_primitive_key(field) if default == obj_fields.UnspecifiedDefault: return primitive[key] else: return primitive.get(key, default) class MasakariObjectSerializer(ovoo_base.VersionedObjectSerializer): """A Masakari Object Serializer. This implements the Oslo Serializer interface and provides the ability to serialize and deserialize MasakariObject entities. Any service that needs to accept or return MasakariObjects as arguments or result values should pass this to its RPCClient and RPCServer objects. """ OBJ_BASE_CLASS = MasakariObject def __init__(self): super(MasakariObjectSerializer, self).__init__() def obj_make_list(context, list_obj, item_cls, db_list, **extra_args): """Construct an object list from a list of primitives. This calls item_cls._from_db_object() on each item of db_list, and adds the resulting object to list_obj. :param:context: Request context :param:list_obj: An ObjectListBase object :param:item_cls: The MasakariObject class of the objects within the list :param:db_list: The list of primitives to convert to objects :param:extra_args: Extra arguments to pass to _from_db_object() :returns: list_obj """ list_obj.objects = [] for db_item in db_list: item = item_cls._from_db_object(context, item_cls(), db_item, **extra_args) list_obj.objects.append(item) list_obj._context = context list_obj.obj_reset_changes() return list_obj def obj_to_primitive(obj): """Recursively turn an object into a python primitive. A MasakariObject becomes a dict, and anything that implements ObjectListBase becomes a list. """ if isinstance(obj, ObjectListBase): return [obj_to_primitive(x) for x in obj] elif isinstance(obj, MasakariObject): result = {} for key in obj.obj_fields: if obj.obj_attr_is_set(key) or key in obj.obj_extra_fields: result[key] = obj_to_primitive(getattr(obj, key)) return result else: return obj def obj_equal_prims(obj_1, obj_2, ignore=None): """Compare two primitives for equivalence ignoring some keys. This operation tests the primitives of two objects for equivalence. Object primitives may contain a list identifying fields that have been changed - this is ignored in the comparison. The ignore parameter lists any other keys to be ignored. :param:obj1: The first object in the comparison :param:obj2: The second object in the comparison :param:ignore: A list of fields to ignore :returns: True if the primitives are equal ignoring changes and specified fields, otherwise False. """ def _strip(prim, keys): if isinstance(prim, dict): for k in keys: prim.pop(k, None) for v in prim.values(): _strip(v, keys) if isinstance(prim, list): for v in prim: _strip(v, keys) return prim if ignore is not None: keys = ['masakari_object.changes'] + ignore else: keys = ['masakari_object.changes'] prim_1 = _strip(obj_1.obj_to_primitive(), keys) prim_2 = _strip(obj_2.obj_to_primitive(), keys) return prim_1 == prim_2 masakari-9.0.0/masakari/cmd/0000775000175000017500000000000013656750011015671 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/cmd/__init__.py0000664000175000017500000000120213656747723020015 0ustar zuulzuul00000000000000# Copyright (c) 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import eventlet eventlet.monkey_patch() masakari-9.0.0/masakari/cmd/api.py0000664000175000017500000000531713656747723017042 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Starter script for Masakari API. """ import os import sys from oslo_log import log as logging from oslo_service import _options as service_opts from paste import deploy import six from masakari.common import config import masakari.conf from masakari import config as api_config from masakari import exception from masakari import objects from masakari import rpc from masakari import service from masakari import version CONFIG_FILES = ['api-paste.ini', 'masakari.conf'] CONF = masakari.conf.CONF def _get_config_files(env=None): if env is None: env = os.environ dirname = env.get('OS_MASAKARI_CONFIG_DIR', '/etc/masakari').strip() return [os.path.join(dirname, config_file) for config_file in CONFIG_FILES] def main(): api_config.parse_args(sys.argv) logging.setup(CONF, "masakari") log = logging.getLogger(__name__) objects.register_all() launcher = service.process_launcher() try: server = service.WSGIService("masakari_api", use_ssl=CONF.use_ssl) launcher.launch_service(server, workers=server.workers or 1) except exception.PasteAppNotFound as ex: log.error("Failed to start ``masakari_api`` service. Error: %s", six.text_type(ex)) sys.exit(1) launcher.wait() def initialize_application(): conf_files = _get_config_files() api_config.parse_args([], default_config_files=conf_files) logging.setup(CONF, "masakari") objects.register_all() CONF(sys.argv[1:], project='masakari', version=version.version_string()) # NOTE: Dump conf at debug (log_options option comes from oslo.service) # This is gross but we don't have a public hook into oslo.service to # register these options, so we are doing it manually for now; # remove this when we have a hook method into oslo.service. CONF.register_opts(service_opts.service_opts) if CONF.log_options: CONF.log_opt_values(logging.getLogger(__name__), logging.DEBUG) config.set_middleware_defaults() rpc.init(CONF) conf = conf_files[0] return deploy.loadapp('config:%s' % conf, name="masakari_api") masakari-9.0.0/masakari/cmd/engine.py0000664000175000017500000000223613656747723017533 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Starter script for Masakari Engine.""" import sys from oslo_log import log as logging import masakari.conf from masakari import config from masakari import objects from masakari import service from masakari import utils CONF = masakari.conf.CONF def main(): config.parse_args(sys.argv) logging.setup(CONF, "masakari") utils.monkey_patch() objects.register_all() server = service.Service.create(binary='masakari-engine', topic=CONF.masakari_topic) service.serve(server) service.wait() masakari-9.0.0/masakari/cmd/status.py0000664000175000017500000000300113656747723017600 0ustar zuulzuul00000000000000# Copyright (c) 2018 NEC, Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys from oslo_upgradecheck import upgradecheck from masakari import conf from masakari.i18n import _ CONF = conf.CONF class Checks(upgradecheck.UpgradeCommands): """Contains upgrade checks Various upgrade checks should be added as separate methods in this class and added to _upgrade_checks tuple. """ def _sample_check(self): """This is sample check added to test the upgrade check framework It needs to be removed after adding any real upgrade check """ return upgradecheck.Result(upgradecheck.Code.SUCCESS, 'Sample detail') _upgrade_checks = ( # Sample check added for now. # Whereas in future real checks must be added here in tuple (_('Sample Check'), _sample_check), ) def main(): return upgradecheck.main( CONF, project='masakari', upgrade_command=Checks()) if __name__ == '__main__': sys.exit(main()) masakari-9.0.0/masakari/cmd/manage.py0000664000175000017500000001501713656747723017517 0ustar zuulzuul00000000000000#!/usr/bin/env python # Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ CLI interface for masakari management. """ import logging as python_logging import sys import time from oslo_config import cfg from oslo_db.sqlalchemy import migration from oslo_log import log as logging import six import masakari.conf from masakari import context from masakari import db from masakari.db import api as db_api from masakari.db.sqlalchemy import migration as db_migration from masakari import exception from masakari.i18n import _ from masakari import utils from masakari import version CONF = masakari.conf.CONF logging.register_options(CONF) # Decorators for actions def args(*args, **kwargs): def _decorator(func): func.__dict__.setdefault('args', []).insert(0, (args, kwargs)) return func return _decorator def _db_error(caught_exception): print('%s' % caught_exception) print(_("The above error may show that the database has not " "been created.\nPlease create a database using " "'masakari-manage db sync' before running this command.")) sys.exit(1) class DbCommands(object): """Class for managing the database.""" def __init__(self): pass @args('version', nargs='?', default=None, type=int, help='Database version') def sync(self, version=None): """Sync the database up to the most recent version.""" try: return db_migration.db_sync(version) except exception.InvalidInput as ex: print(ex) sys.exit(1) def version(self): """Print the current database version.""" print(migration.db_version(db_api.get_engine(), db_migration.MIGRATE_REPO_PATH, db_migration.INIT_VERSION)) @args('--age_in_days', type=int, default=30, help='Purge deleted rows older than age in days (default: ' '%(default)d)') @args('--max_rows', type=int, default=-1, help='Limit number of records to delete (default: %(default)d)') def purge(self, age_in_days, max_rows): """Purge rows older than a given age from masakari tables.""" try: max_rows = utils.validate_integer( max_rows, 'max_rows', -1, db.MAX_INT) except exception.Invalid as exc: sys.exit(six.text_type(exc)) try: age_in_days = int(age_in_days) except ValueError: msg = 'Invalid value for age, %(age)s' % {'age': age_in_days} sys.exit(six.text_type(msg)) if max_rows == 0: sys.exit(_("Must supply value greater than 0 for max_rows.")) if age_in_days < 0: sys.exit(_("Must supply a non-negative value for age.")) if age_in_days >= (int(time.time()) / 86400): sys.exit(_("Maximal age is count of days since epoch.")) ctx = context.get_admin_context() db_api.purge_deleted_rows(ctx, age_in_days, max_rows) CATEGORIES = { 'db': DbCommands, } def methods_of(obj): """Return non-private methods from an object. Get all callable methods of an object that don't start with underscore :return: a list of tuples of the form (method_name, method) """ result = [] for i in dir(obj): if callable(getattr(obj, i)) and not i.startswith('_'): result.append((i, getattr(obj, i))) return result def add_command_parsers(subparsers): for category in CATEGORIES: command_object = CATEGORIES[category]() parser = subparsers.add_parser(category) parser.set_defaults(command_object=command_object) category_subparsers = parser.add_subparsers(dest='action') for (action, action_fn) in methods_of(command_object): parser = category_subparsers.add_parser(action) action_kwargs = [] for args, kwargs in getattr(action_fn, 'args', []): parser.add_argument(*args, **kwargs) parser.set_defaults(action_fn=action_fn) parser.set_defaults(action_kwargs=action_kwargs) command_opt = cfg.SubCommandOpt('category', title='Command categories', help='Available categories', handler=add_command_parsers) def get_arg_string(args): arg = None if args[0] == '-': # NOTE(Dinesh_Bhor): args starts with FLAGS.oparser.prefix_chars # is optional args. Notice that cfg module takes care of # actual ArgParser so prefix_chars is always '-'. if args[1] == '-': # This is long optional arg arg = args[2:] else: arg = args[1:] else: arg = args return arg def fetch_func_args(func): fn_args = [] for args, kwargs in getattr(func, 'args', []): arg = get_arg_string(args[0]) fn_args.append(getattr(CONF.category, arg)) return fn_args def main(): """Parse options and call the appropriate class/method.""" CONF.register_cli_opt(command_opt) script_name = sys.argv[0] if len(sys.argv) < 2: print(_("\nOpenStack masakari version: %(version)s\n") % {'version': version.version_string()}) print(script_name + " category action []") print(_("Available categories:")) for category in CATEGORIES: print(_("\t%s") % category) sys.exit(2) try: CONF(sys.argv[1:], project='masakari', version=version.version_string()) logging.setup(CONF, "masakari") python_logging.captureWarnings(True) except cfg.ConfigDirNotFoundError as details: print(_("Invalid directory: %s") % details) sys.exit(2) except cfg.ConfigFilesNotFoundError as e: cfg_files = ', '.join(e.config_files) print(_("Failed to read configuration file(s): %s") % cfg_files) sys.exit(2) fn = CONF.category.action_fn fn_args = fetch_func_args(fn) fn(*fn_args) masakari-9.0.0/masakari/context.py0000664000175000017500000002255313656747723017213 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ RequestContext: context for requests that persist through all of masakari. """ import copy from keystoneauth1.access import service_catalog as ksa_service_catalog from keystoneauth1 import plugin from oslo_context import context from oslo_db.sqlalchemy import enginefacade from oslo_log import log as logging from oslo_utils import timeutils import six from masakari import exception from masakari.i18n import _ from masakari import policy from masakari import utils LOG = logging.getLogger(__name__) class _ContextAuthPlugin(plugin.BaseAuthPlugin): """A keystoneauth auth plugin that uses the values from the Context. Ideally we would use the plugin provided by auth_token middleware however this plugin isn't serialized yet so we construct one from the serialized auth data. """ def __init__(self, auth_token, sc): super(_ContextAuthPlugin, self).__init__() self.auth_token = auth_token self.service_catalog = ksa_service_catalog.ServiceCatalogV2(sc) def get_token(self, *args, **kwargs): return self.auth_token def get_endpoint(self, session, service_type=None, interface=None, region_name=None, service_name=None, **kwargs): return self.service_catalog.url_for(service_type=service_type, service_name=service_name, interface=interface, region_name=region_name) @enginefacade.transaction_context_provider class RequestContext(context.RequestContext): """Security context and request information. Represents the user taking a given action within the system. """ def __init__(self, user_id=None, project_id=None, is_admin=None, read_deleted="no", roles=None, remote_address=None, timestamp=None, request_id=None, auth_token=None, overwrite=True, user_name=None, project_name=None, service_catalog=None, user_auth_plugin=None, **kwargs): """:param read_deleted: 'no' indicates deleted records are hidden, 'yes' indicates deleted records are visible, 'only' indicates that *only* deleted records are visible. :param overwrite: Set to False to ensure that the greenthread local copy of the index is not overwritten. :param user_auth_plugin: The auth plugin for the current request's authentication data. :param kwargs: Extra arguments that might be present, but we ignore because they possibly came in from older rpc messages. """ user = kwargs.pop('user', None) tenant = kwargs.pop('tenant', None) super(RequestContext, self).__init__( auth_token=auth_token, user=user_id or user, tenant=project_id or tenant, domain=kwargs.pop('domain', None), user_domain=kwargs.pop('user_domain', None), project_domain=kwargs.pop('project_domain', None), is_admin=is_admin, read_only=kwargs.pop('read_only', False), show_deleted=kwargs.pop('show_deleted', False), request_id=request_id, resource_uuid=kwargs.pop('resource_uuid', None), overwrite=overwrite, roles=roles, is_admin_project=kwargs.pop('is_admin_project', True), global_request_id=kwargs.pop('global_request_id', None)) # oslo_context's RequestContext.to_dict() generates this field, we can # safely ignore this as we don't use it. kwargs.pop('user_identity', None) if kwargs: LOG.debug('Arguments dropped when creating context: %s', str(kwargs)) # FIXME: user_id and project_id duplicate information that is # already present in the oslo_context's RequestContext. We need to # get rid of them. self.user_id = user_id self.project_id = project_id self.read_deleted = read_deleted self.remote_address = remote_address if not timestamp: timestamp = timeutils.utcnow() if isinstance(timestamp, six.string_types): timestamp = timeutils.parse_strtime(timestamp) self.timestamp = timestamp if service_catalog: # Only include required parts of service_catalog self.service_catalog = [ s for s in service_catalog if s.get('type') in ( 'compute', 'identity')] else: # if list is empty or none self.service_catalog = [] self.user_name = user_name self.project_name = project_name self.is_admin = is_admin self.user_auth_plugin = user_auth_plugin if self.is_admin is None: self.is_admin = policy.check_is_admin(self) def get_auth_plugin(self): if self.user_auth_plugin: return self.user_auth_plugin else: return _ContextAuthPlugin(self.auth_token, self.service_catalog) def _get_read_deleted(self): return self._read_deleted def _set_read_deleted(self, read_deleted): if read_deleted not in ('no', 'yes', 'only'): raise ValueError(_("read_deleted can only be one of 'no', " "'yes' or 'only', not %r") % read_deleted) self._read_deleted = read_deleted def _del_read_deleted(self): del self._read_deleted read_deleted = property(_get_read_deleted, _set_read_deleted, _del_read_deleted) def to_dict(self): values = super(RequestContext, self).to_dict() # FIXME: defensive hasattr() checks need to be # removed once we figure out why we are seeing stack # traces values.update({ 'user_id': getattr(self, 'user_id', None), 'project_id': getattr(self, 'project_id', None), 'is_admin': getattr(self, 'is_admin', None), 'read_deleted': getattr(self, 'read_deleted', 'no'), 'remote_address': getattr(self, 'remote_address', None), 'timestamp': utils.strtime(self.timestamp) if hasattr( self, 'timestamp') else None, 'request_id': getattr(self, 'request_id', None), 'user_name': getattr(self, 'user_name', None), 'service_catalog': getattr(self, 'service_catalog', None), 'project_name': getattr(self, 'project_name', None) }) return values @classmethod def from_dict(cls, values): return cls(**values) def elevated(self, read_deleted=None): """Return a version of this context with admin flag set.""" context = copy.copy(self) # context.roles must be deepcopied to leave original roles # without changes context.roles = copy.deepcopy(self.roles) context.is_admin = True if 'admin' not in context.roles: context.roles.append('admin') if read_deleted is not None: context.read_deleted = read_deleted return context def can(self, action, target=None, fatal=True): """Verifies that the given action is valid on the target in this context. :param action: string representing the action to be checked. :param target: dictionary representing the object of the action for object creation this should be a dictionary representing the location of the object e.g. ``{'project_id': context.project_id}``. If None, then this default target will be considered: {'project_id': self.project_id, 'user_id': self.user_id} :param fatal: if False, will return False when an exception.Forbidden occurs. :raises masakari.exception.Forbidden: if verification fails and fatal is True. :return: returns a non-False value (not necessarily "True") if authorized and False if not authorized and fatal is False. """ if target is None: target = {'project_id': self.project_id, 'user_id': self.user_id} try: return policy.authorize(self, action, target) except exception.Forbidden: if fatal: raise return False def to_policy_values(self): policy = super(RequestContext, self).to_policy_values() policy['is_admin'] = self.is_admin return policy def __str__(self): return "" % self.to_dict() def get_admin_context(read_deleted="no"): return RequestContext(user_id=None, project_id=None, is_admin=True, read_deleted=read_deleted, overwrite=False) masakari-9.0.0/masakari/policy.py0000664000175000017500000001673113656747723017027 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Policy Engine For Masakari.""" import copy import logging import re import sys from oslo_config import cfg from oslo_policy import policy from oslo_utils import excutils from masakari import exception # from masakari.i18n import _LE, _LW from masakari import policies CONF = cfg.CONF LOG = logging.getLogger(__name__) _ENFORCER = None # saved_file_rules and used to compare with new rules to determine the # rules whether were updated. saved_file_rules = [] KEY_EXPR = re.compile(r'%\((\w+)\)s') def reset(): global _ENFORCER if _ENFORCER: _ENFORCER.clear() _ENFORCER = None def init(policy_file=None, rules=None, default_rule=None, use_conf=True): """Init an Enforcer class. :param policy_file: Custom policy file to use, if none is specified, `CONF.policy_file` will be used. :param rules: Default dictionary / Rules to use. It will be considered just in the first instantiation. :param default_rule: Default rule to use, CONF.default_rule will be used if none is specified. :param use_conf: Whether to load rules from config file. """ global _ENFORCER global saved_file_rules if not _ENFORCER: _ENFORCER = policy.Enforcer(CONF, policy_file=policy_file, rules=rules, default_rule=default_rule, use_conf=use_conf) register_rules(_ENFORCER) _ENFORCER.load_rules() # Only the rules which are loaded from file may be changed. current_file_rules = _ENFORCER.file_rules current_file_rules = _serialize_rules(current_file_rules) # Checks whether the rules are updated in the runtime if saved_file_rules != current_file_rules: _warning_for_deprecated_user_based_rules(current_file_rules) saved_file_rules = copy.deepcopy(current_file_rules) def _serialize_rules(rules): """Serialize all the Rule object as string which is used to compare the rules list. """ result = [(rule_name, str(rule)) for rule_name, rule in rules.items()] return sorted(result, key=lambda rule: rule[0]) def _warning_for_deprecated_user_based_rules(rules): """Warning user based policy enforcement used in the rule but the rule doesn't support it. """ for rule in rules: if 'user_id' in KEY_EXPR.findall(rule[1]): LOG.debug(("The user_id attribute isn't supported in the rule%s'. " "All the user_id based policy enforcement will be " "removed in the future."), rule[0]) def set_rules(rules, overwrite=True, use_conf=False): """Set rules based on the provided dict of rules. :param rules: New rules to use. It should be an instance of dict. :param overwrite: Whether to overwrite current rules or update them with the new rules. :param use_conf: Whether to reload rules from config file. """ init(use_conf=False) _ENFORCER.set_rules(rules, overwrite, use_conf) def authorize(context, action, target, do_raise=True, exc=None): """Verifies that the action is valid on the target in this context. :param context: masakari context :param action: string representing the action to be checked this should be colon separated for clarity. i.e. ``os_masakari_api:segments``, ``os_masakari_api:os-hosts``, ``os_masakari_api:notifications``, ``os_masakari_api:extensions`` :param target: dictionary representing the object of the action for object creation this should be a dictionary representing the location of the object e.g. ``{'project_id': context.project_id}`` :param do_raise: if True (the default), raises PolicyNotAuthorized; if False, returns False :param exc: Class of the exception to raise if the check fails. Any remaining arguments passed to :meth:`authorize` (both positional and keyword arguments) will be passed to the exception class. If not specified, :class:`PolicyNotAuthorized` will be used. :raises masakari.exception.PolicyNotAuthorized: if verification fails and do_raise is True. Or if 'exc' is specified it will raise an exception of that type. :return: returns a non-False value (not necessarily "True") if authorized, and the exact value False if not authorized and do_raise is False. """ init() credentials = context.to_policy_values() if not exc: exc = exception.PolicyNotAuthorized try: result = _ENFORCER.authorize(action, target, credentials, do_raise=do_raise, exc=exc, action=action) except policy.PolicyNotRegistered: with excutils.save_and_reraise_exception(): LOG.debug('Policy not registered') except Exception: with excutils.save_and_reraise_exception(): LOG.debug('Policy check for %(action)s failed with credentials ' '%(credentials)s', {'action': action, 'credentials': credentials}) return result def check_is_admin(context): """Whether or not roles contains 'admin' role according to policy setting. """ init() # the target is user-self credentials = context.to_policy_values() target = credentials return _ENFORCER.authorize('context_is_admin', target, credentials) @policy.register('is_admin') class IsAdminCheck(policy.Check): """An explicit check for is_admin.""" def __init__(self, kind, match): """Initialize the check.""" self.expected = (match.lower() == 'true') super(IsAdminCheck, self).__init__(kind, str(self.expected)) def __call__(self, target, creds, enforcer): """Determine whether is_admin matches the requested value.""" return creds['is_admin'] == self.expected def get_rules(): if _ENFORCER: return _ENFORCER.rules def register_rules(enforcer): enforcer.register_defaults(policies.list_rules()) def get_enforcer(): # This method is for use by oslopolicy CLI scripts. Those scripts need the # 'output-file' and 'namespace' options, but having those in sys.argv means # loading the Masakari config options will fail as those are not expected # to be present. So we pass in an arg list with those stripped out. conf_args = [] # Start at 1 because cfg.CONF expects the equivalent of sys.argv[1:] i = 1 while i < len(sys.argv): if sys.argv[i].strip('-') in ['namespace', 'output-file']: i += 2 continue conf_args.append(sys.argv[i]) i += 1 cfg.CONF(conf_args, project='masakari') init() return _ENFORCER masakari-9.0.0/masakari/engine/0000775000175000017500000000000013656750011016373 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/engine/utils.py0000664000175000017500000000446013656747723020131 0ustar zuulzuul00000000000000# Copyright (c) 2018 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import socket from masakari.notifications.objects import base as notification_base from masakari.notifications.objects import exception as notification_exception from masakari.notifications.objects import notification as event_notification from masakari.objects import fields def _get_fault_and_priority_from_exc_and_tb(exception, tb): fault = None priority = fields.EventNotificationPriority.INFO if exception: priority = fields.EventNotificationPriority.ERROR fault = notification_exception.ExceptionPayload.from_exc_and_traceback( exception, tb) return fault, priority def notify_about_notification_update(context, notification, action, phase=None, binary='masakari-engine', exception=None, tb=None): """Send versioned notification about a notification update. :param notification: Notification object :param action: the name of the action :param phase: the phase of the action :param binary: the binary emitting the notification :param exception: the thrown exception (used in error notifications) :param tb: the traceback (used in error notifications) """ fault, priority = _get_fault_and_priority_from_exc_and_tb(exception, tb) payload = event_notification.NotificationApiPayload( notification=notification, fault=fault) engine_notification = event_notification.NotificationApiNotification( context=context, priority=priority, publisher=notification_base.NotificationPublisher( context=context, host=socket.gethostname(), binary=binary), event_type=notification_base.EventType( action=action, phase=phase), payload=payload) engine_notification.emit(context) masakari-9.0.0/masakari/engine/__init__.py0000664000175000017500000000000013656747723020512 0ustar zuulzuul00000000000000masakari-9.0.0/masakari/engine/drivers/0000775000175000017500000000000013656750011020051 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/engine/drivers/__init__.py0000664000175000017500000000125013656747723022200 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. __import__('pkg_resources').declare_namespace(__name__) masakari-9.0.0/masakari/engine/drivers/taskflow/0000775000175000017500000000000013656750011021703 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/engine/drivers/taskflow/__init__.py0000664000175000017500000000131413656747723024033 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from masakari.engine.drivers.taskflow import driver TaskFlowDriver = driver.TaskFlowDriver masakari-9.0.0/masakari/engine/drivers/taskflow/instance_failure.py0000664000175000017500000002065013656747723025613 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from eventlet import timeout as etimeout from oslo_config import cfg from oslo_log import log as logging from oslo_service import loopingcall from oslo_utils import strutils from taskflow.patterns import linear_flow import masakari.conf from masakari.engine.drivers.taskflow import base from masakari import exception CONF = masakari.conf.CONF LOG = logging.getLogger(__name__) ACTION = "instance:recovery" TASKFLOW_CONF = cfg.CONF.taskflow_driver_recovery_flows class StopInstanceTask(base.MasakariTask): def __init__(self, context, novaclient, **kwargs): kwargs['requires'] = ["instance_uuid"] super(StopInstanceTask, self).__init__(context, novaclient, **kwargs) def execute(self, instance_uuid): """Stop the instance for recovery.""" instance = self.novaclient.get_server(self.context, instance_uuid) # If an instance is not HA_Enabled and "process_all_instances" config # option is also disabled, then there is no need to take any recovery # action. if not CONF.instance_failure.process_all_instances and not ( strutils.bool_from_string( instance.metadata.get('HA_Enabled', False))): msg = ("Skipping recovery for instance: %(instance_uuid)s as it is" " not Ha_Enabled") % {'instance_uuid': instance_uuid} LOG.info(msg) self.update_details(msg, 1.0) raise exception.SkipInstanceRecoveryException() vm_state = getattr(instance, 'OS-EXT-STS:vm_state') if vm_state in ['paused', 'rescued']: msg = ("Recovery of instance '%(instance_uuid)s' is ignored as it " "is in '%(vm_state)s' state.") % { 'instance_uuid': instance_uuid, 'vm_state': vm_state } LOG.warning(msg) self.update_details(msg, 1.0) raise exception.IgnoreInstanceRecoveryException(msg) if vm_state != 'stopped': if vm_state == 'resized': self.novaclient.reset_instance_state( self.context, instance.id, 'active') msg = "Stopping instance: %s" % instance_uuid self.update_details(msg) self.novaclient.stop_server(self.context, instance.id) def _wait_for_power_off(): new_instance = self.novaclient.get_server(self.context, instance_uuid) vm_state = getattr(new_instance, 'OS-EXT-STS:vm_state') if vm_state == 'stopped': raise loopingcall.LoopingCallDone() periodic_call = loopingcall.FixedIntervalLoopingCall( _wait_for_power_off) try: # add a timeout to the periodic call. periodic_call.start(interval=CONF.verify_interval) etimeout.with_timeout(CONF.wait_period_after_power_off, periodic_call.wait) msg = "Stopped instance: '%s'" % instance_uuid self.update_details(msg, 1.0) except etimeout.Timeout: msg = "Failed to stop instance %(instance)s" % { 'instance': instance.id } self.update_details(msg, 1.0) raise exception.InstanceRecoveryFailureException( message=msg) finally: # stop the periodic call, in case of exceptions or Timeout. periodic_call.stop() class StartInstanceTask(base.MasakariTask): def __init__(self, context, novaclient, **kwargs): kwargs['requires'] = ["instance_uuid"] super(StartInstanceTask, self).__init__(context, novaclient, **kwargs) def execute(self, instance_uuid): """Start the instance.""" msg = "Starting instance: '%s'" % instance_uuid self.update_details(msg) instance = self.novaclient.get_server(self.context, instance_uuid) vm_state = getattr(instance, 'OS-EXT-STS:vm_state') if vm_state == 'stopped': self.novaclient.start_server(self.context, instance.id) msg = "Instance started: '%s'" % instance_uuid self.update_details(msg, 1.0) else: msg = ("Invalid state for Instance %(instance)s. Expected state: " "'STOPPED', Actual state: '%(actual_state)s'") % { 'instance': instance_uuid, 'actual_state': vm_state } self.update_details(msg, 1.0) raise exception.InstanceRecoveryFailureException( message=msg) class ConfirmInstanceActiveTask(base.MasakariTask): def __init__(self, context, novaclient, **kwargs): kwargs['requires'] = ["instance_uuid"] super(ConfirmInstanceActiveTask, self).__init__(context, novaclient, **kwargs) def execute(self, instance_uuid): def _wait_for_active(): new_instance = self.novaclient.get_server(self.context, instance_uuid) vm_state = getattr(new_instance, 'OS-EXT-STS:vm_state') if vm_state == 'active': raise loopingcall.LoopingCallDone() periodic_call = loopingcall.FixedIntervalLoopingCall( _wait_for_active) try: msg = "Confirming instance '%s' vm_state is ACTIVE" % instance_uuid self.update_details(msg) # add a timeout to the periodic call. periodic_call.start(interval=CONF.verify_interval) etimeout.with_timeout(CONF.wait_period_after_power_on, periodic_call.wait) msg = "Confirmed instance '%s' vm_state is ACTIVE" % instance_uuid self.update_details(msg, 1.0) except etimeout.Timeout: msg = "Failed to start instance %(instance)s" % { 'instance': instance_uuid } self.update_details(msg, 1.0) raise exception.InstanceRecoveryFailureException( message=msg) finally: # stop the periodic call, in case of exceptions or Timeout. periodic_call.stop() def get_instance_recovery_flow(context, novaclient, process_what): """Constructs and returns the engine entrypoint flow. This flow will do the following: 1. Stop the instance 2. Start the instance. 3. Confirm instance is in active state. """ flow_name = ACTION.replace(":", "_") + "_engine" nested_flow = linear_flow.Flow(flow_name) task_dict = TASKFLOW_CONF.instance_failure_recovery_tasks instance_recovery_workflow_pre = linear_flow.Flow('pre_tasks') for plugin in base.get_recovery_flow(task_dict['pre'], context=context, novaclient=novaclient): instance_recovery_workflow_pre.add(plugin) instance_recovery_workflow_main = linear_flow.Flow('main_tasks') for plugin in base.get_recovery_flow(task_dict['main'], context=context, novaclient=novaclient): instance_recovery_workflow_main.add(plugin) instance_recovery_workflow_post = linear_flow.Flow('post_tasks') for plugin in base.get_recovery_flow(task_dict['post'], context=context, novaclient=novaclient): instance_recovery_workflow_post.add(plugin) nested_flow.add(instance_recovery_workflow_pre) nested_flow.add(instance_recovery_workflow_main) nested_flow.add(instance_recovery_workflow_post) return base.load_taskflow_into_engine(ACTION, nested_flow, process_what) masakari-9.0.0/masakari/engine/drivers/taskflow/no_op.py0000664000175000017500000000176713656747723023422 0ustar zuulzuul00000000000000# Copyright 2018 NTT DATA. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log as logging from taskflow import task LOG = logging.getLogger(__name__) class Noop(task.Task): def __init__(self, context, novaclient, **kwargs): self.context = context self.novaclient = novaclient super(Noop, self).__init__() def execute(self, **kwargs): LOG.info("Custom task executed successfully..!!") return masakari-9.0.0/masakari/engine/drivers/taskflow/driver.py0000664000175000017500000003552513656747723023602 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Driver TaskFlowDriver: Execute notification workflows using taskflow. """ from collections import OrderedDict import contextlib from oslo_log import log as logging from oslo_utils import excutils from taskflow import exceptions from taskflow.persistence import backends from masakari.compute import nova import masakari.conf from masakari.engine import driver from masakari.engine.drivers.taskflow import base from masakari.engine.drivers.taskflow import host_failure from masakari.engine.drivers.taskflow import instance_failure from masakari.engine.drivers.taskflow import process_failure from masakari import exception from masakari.i18n import _ from masakari import objects from masakari.objects import fields CONF = masakari.conf.CONF TASKFLOW_CONF = CONF.taskflow_driver_recovery_flows PERSISTENCE_BACKEND = CONF.taskflow.connection LOG = logging.getLogger(__name__) class TaskFlowDriver(driver.NotificationDriver): def __init__(self): super(TaskFlowDriver, self).__init__() def _execute_auto_workflow(self, context, novaclient, process_what): flow_engine = host_failure.get_auto_flow(context, novaclient, process_what) # Attaching this listener will capture all of the notifications # that taskflow sends out and redirect them to a more useful # log for masakari's debugging (or error reporting) usage. with base.DynamicLogListener(flow_engine, logger=LOG): flow_engine.run() def _execute_rh_workflow(self, context, novaclient, process_what, **kwargs): if not kwargs['reserved_host_list']: msg = _('No reserved_hosts available for evacuation.') raise exception.ReservedHostsUnavailable(message=msg) process_what['reserved_host_list'] = kwargs.pop('reserved_host_list') flow_engine = host_failure.get_rh_flow(context, novaclient, process_what, **kwargs) with base.DynamicLogListener(flow_engine, logger=LOG): try: flow_engine.run() except exception.LockAlreadyAcquired as ex: raise exception.HostRecoveryFailureException(ex.message) def _execute_auto_priority_workflow(self, context, novaclient, process_what, **kwargs): try: self._execute_auto_workflow(context, novaclient, process_what) except Exception as ex: with excutils.save_and_reraise_exception(reraise=False) as ctxt: if isinstance(ex, exception.SkipHostRecoveryException): ctxt.reraise = True return # Caught generic Exception to make sure that any failure # should lead to execute 'reserved_host' recovery workflow. msg = ("Failed to evacuate all instances from " "failed_host: '%(failed_host)s' using " "'%(auto)s' workflow, retrying using " "'%(reserved_host)s' workflow.") LOG.warning(msg, { 'failed_host': process_what['host_name'], 'auto': fields.FailoverSegmentRecoveryMethod.AUTO, 'reserved_host': fields.FailoverSegmentRecoveryMethod.RESERVED_HOST }) self._execute_rh_workflow(context, novaclient, process_what, **kwargs) def _execute_rh_priority_workflow(self, context, novaclient, process_what, **kwargs): try: self._execute_rh_workflow(context, novaclient, process_what, **kwargs) except Exception as ex: with excutils.save_and_reraise_exception(reraise=False) as ctxt: if isinstance(ex, exception.SkipHostRecoveryException): ctxt.reraise = True return # Caught generic Exception to make sure that any failure # should lead to execute 'auto' recovery workflow. msg = ("Failed to evacuate all instances from " "failed_host '%(failed_host)s' using " "'%(reserved_host)s' workflow, retrying using " "'%(auto)s' workflow") LOG.warning(msg, { 'failed_host': process_what['host_name'], 'reserved_host': fields.FailoverSegmentRecoveryMethod.RESERVED_HOST, 'auto': fields.FailoverSegmentRecoveryMethod.AUTO }) self._execute_auto_workflow(context, novaclient, process_what) def execute_host_failure(self, context, host_name, recovery_method, notification_uuid, **kwargs): novaclient = nova.API() # get flow for host failure process_what = { 'host_name': host_name, 'notification_uuid': notification_uuid } try: if recovery_method == fields.FailoverSegmentRecoveryMethod.AUTO: self._execute_auto_workflow(context, novaclient, process_what) elif recovery_method == ( fields.FailoverSegmentRecoveryMethod.RESERVED_HOST): self._execute_rh_workflow(context, novaclient, process_what, **kwargs) elif recovery_method == ( fields.FailoverSegmentRecoveryMethod.AUTO_PRIORITY): self._execute_auto_priority_workflow( context, novaclient, process_what, **kwargs) else: self._execute_rh_priority_workflow(context, novaclient, process_what, **kwargs) except Exception as exc: with excutils.save_and_reraise_exception(reraise=False) as ctxt: if isinstance(exc, (exception.SkipHostRecoveryException, exception.HostRecoveryFailureException, exception.ReservedHostsUnavailable)): ctxt.reraise = True return msg = _("Failed to execute host failure flow for " "notification '%s'.") % notification_uuid raise exception.MasakariException(msg) def execute_instance_failure(self, context, instance_uuid, notification_uuid): novaclient = nova.API() # get flow for instance failure process_what = { 'instance_uuid': instance_uuid, 'notification_uuid': notification_uuid } try: flow_engine = instance_failure.get_instance_recovery_flow( context, novaclient, process_what) except Exception: msg = (_('Failed to create instance failure flow.'), notification_uuid) LOG.exception(msg) raise exception.MasakariException(msg) # Attaching this listener will capture all of the notifications that # taskflow sends out and redirect them to a more useful log for # masakari's debugging (or error reporting) usage. with base.DynamicLogListener(flow_engine, logger=LOG): flow_engine.run() def execute_process_failure(self, context, process_name, host_name, notification_uuid): novaclient = nova.API() # get flow for process failure process_what = { 'process_name': process_name, 'host_name': host_name, 'notification_uuid': notification_uuid } # TODO(abhishekk) We need to create a map for process_name and # respective python-client so that we can pass appropriate client # as a input to the process. if process_name == "nova-compute": recovery_flow = process_failure.get_compute_process_recovery_flow else: LOG.warning("Skipping recovery for process: %s.", process_name) raise exception.SkipProcessRecoveryException() try: flow_engine = recovery_flow(context, novaclient, process_what) except Exception: msg = (_('Failed to create process failure flow.'), notification_uuid) LOG.exception(msg) raise exception.MasakariException(msg) # Attaching this listener will capture all of the notifications that # taskflow sends out and redirect them to a more useful log for # masakari's debugging (or error reporting) usage. with base.DynamicLogListener(flow_engine, logger=LOG): flow_engine.run() @contextlib.contextmanager def upgrade_backend(self, persistence_backend): try: backend = backends.fetch(persistence_backend) with contextlib.closing(backend.get_connection()) as conn: conn.upgrade() except exceptions.NotFound as e: raise e def _get_taskflow_sequence(self, context, recovery_method, notification): # Get the taskflow sequence based on the recovery method. novaclient = nova.API() task_list = [] # Get linear task flow based on notification type if notification.type == fields.NotificationType.VM: tasks = TASKFLOW_CONF.instance_failure_recovery_tasks elif notification.type == fields.NotificationType.PROCESS: tasks = TASKFLOW_CONF.process_failure_recovery_tasks elif notification.type == fields.NotificationType.COMPUTE_HOST: if recovery_method in [ fields.FailoverSegmentRecoveryMethod.AUTO, fields.FailoverSegmentRecoveryMethod.AUTO_PRIORITY]: tasks = TASKFLOW_CONF.host_auto_failure_recovery_tasks elif recovery_method in [ fields.FailoverSegmentRecoveryMethod.RESERVED_HOST, fields.FailoverSegmentRecoveryMethod.RH_PRIORITY]: tasks = TASKFLOW_CONF.host_rh_failure_recovery_tasks for plugin in base.get_recovery_flow( tasks['pre'], context=context, novaclient=novaclient, update_host_method=None): task_list.append(plugin.name) for plugin in base.get_recovery_flow( tasks['main'], context=context, novaclient=novaclient, update_host_method=None): task_list.append(plugin.name) for plugin in base.get_recovery_flow( tasks['post'], context=context, novaclient=novaclient, update_host_method=None): task_list.append(plugin.name) return task_list def get_notification_recovery_workflow_details(self, context, recovery_method, notification): """Retrieve progress details in notification""" backend = backends.fetch(PERSISTENCE_BACKEND) with contextlib.closing(backend.get_connection()) as conn: progress_details = [] flow_details = conn.get_flows_for_book( notification.notification_uuid) for flow in flow_details: od = OrderedDict() atom_details = list(conn.get_atoms_for_flow(flow.uuid)) # TODO(ShilpaSD): In case recovery_method is auto_priority/ # rh_priority, there is no way to figure out whether the # recovery was done successfully using AUTO or RH flow. # Taskflow stores 'retry_instance_evacuate_engine_retry' task # in case of RH flow so if # 'retry_instance_evacuate_engine_retry' is stored in the # given flow details then the sorting of task details should # happen based on the RH flow. # This logic won't be required after LP #1815738 is fixed. if recovery_method in ['AUTO_PRIORITY', 'RH_PRIORITY']: persisted_task_list = [atom.name for atom in atom_details] if ('retry_instance_evacuate_engine_retry' in persisted_task_list): recovery_method = ( fields.FailoverSegmentRecoveryMethod. RESERVED_HOST) else: recovery_method = ( fields.FailoverSegmentRecoveryMethod.AUTO) # TODO(ShilpaSD): Taskflow doesn't support to return task # details in the same sequence in which all tasks are # executed. Reported this issue in LP #1815738. To resolve # this issue load the tasks based on the recovery method and # later sort it based on this task list so progress_details # can be returned in the expected order. task_list = self._get_taskflow_sequence(context, recovery_method, notification) for task in task_list: for atom in atom_details: if task == atom.name: od[atom.name] = atom for key, value in od.items(): # Add progress_details only if tasks are executed and meta # is available in which progress_details are stored. if value.meta and value.meta.get("progress_details"): progress_details_obj = ( objects.NotificationProgressDetails.create( value.name, value.meta['progress'], value.meta['progress_details']['details'] ['progress_details'], value.state)) progress_details.append(progress_details_obj) return progress_details masakari-9.0.0/masakari/engine/drivers/taskflow/process_failure.py0000664000175000017500000001237513656747723025472 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from eventlet import timeout as etimeout from oslo_config import cfg from oslo_log import log as logging from oslo_service import loopingcall from taskflow.patterns import linear_flow import masakari.conf from masakari.engine.drivers.taskflow import base from masakari import exception CONF = masakari.conf.CONF LOG = logging.getLogger(__name__) ACTION = "process:recovery" TASKFLOW_CONF = cfg.CONF.taskflow_driver_recovery_flows class DisableComputeNodeTask(base.MasakariTask): def __init__(self, context, novaclient, **kwargs): kwargs['requires'] = ["process_name", "host_name"] super(DisableComputeNodeTask, self).__init__(context, novaclient, **kwargs) def execute(self, process_name, host_name): msg = "Disabling compute service on host: '%s'" % host_name self.update_details(msg) if not self.novaclient.is_service_down(self.context, host_name, process_name): # disable compute node on given host self.novaclient.enable_disable_service(self.context, host_name) msg = "Disabled compute service on host: '%s'" % host_name self.update_details(msg, 1.0) else: msg = ("Skipping recovery for process %(process_name)s as it is " "already disabled") % {'process_name': process_name} LOG.info(msg) self.update_details(msg, 1.0) class ConfirmComputeNodeDisabledTask(base.MasakariTask): def __init__(self, context, novaclient, **kwargs): kwargs['requires'] = ["process_name", "host_name"] super(ConfirmComputeNodeDisabledTask, self).__init__(context, novaclient, **kwargs) def execute(self, process_name, host_name): def _wait_for_disable(): service_disabled = self.novaclient.is_service_down( self.context, host_name, process_name) if service_disabled: raise loopingcall.LoopingCallDone() periodic_call = loopingcall.FixedIntervalLoopingCall( _wait_for_disable) try: msg = "Confirming compute service is disabled on host: '%s'" % ( host_name) self.update_details(msg) # add a timeout to the periodic call. periodic_call.start(interval=CONF.verify_interval) etimeout.with_timeout( CONF.wait_period_after_service_update, periodic_call.wait) msg = "Confirmed compute service is disabled on host: '%s'" % ( host_name) self.update_details(msg, 1.0) except etimeout.Timeout: msg = "Failed to disable service %(process_name)s" % { 'process_name': process_name } self.update_details(msg, 1.0) raise exception.ProcessRecoveryFailureException( message=msg) finally: # stop the periodic call, in case of exceptions or Timeout. periodic_call.stop() def get_compute_process_recovery_flow(context, novaclient, process_what): """Constructs and returns the engine entrypoint flow. This flow will do the following: 1. Disable nova-compute process 2. Confirm nova-compute process is disabled """ flow_name = ACTION.replace(":", "_") + "_engine" nested_flow = linear_flow.Flow(flow_name) task_dict = TASKFLOW_CONF.process_failure_recovery_tasks process_recovery_workflow_pre = linear_flow.Flow('pre_tasks') for plugin in base.get_recovery_flow(task_dict['pre'], context=context, novaclient=novaclient): process_recovery_workflow_pre.add(plugin) process_recovery_workflow_main = linear_flow.Flow('main_tasks') for plugin in base.get_recovery_flow(task_dict['main'], context=context, novaclient=novaclient): process_recovery_workflow_main.add(plugin) process_recovery_workflow_post = linear_flow.Flow('post_tasks') for plugin in base.get_recovery_flow(task_dict['post'], context=context, novaclient=novaclient): process_recovery_workflow_post.add(plugin) nested_flow.add(process_recovery_workflow_pre) nested_flow.add(process_recovery_workflow_main) nested_flow.add(process_recovery_workflow_post) return base.load_taskflow_into_engine(ACTION, nested_flow, process_what) masakari-9.0.0/masakari/engine/drivers/taskflow/base.py0000664000175000017500000001223713656747723023214 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import contextlib import os from oslo_log import log as logging from oslo_utils import timeutils from stevedore import named # For more information please visit: https://wiki.openstack.org/wiki/TaskFlow import taskflow.engines from taskflow import exceptions from taskflow import formatters from taskflow.listeners import base from taskflow.listeners import logging as logging_listener from taskflow.persistence import backends from taskflow.persistence import models from taskflow import task import masakari.conf from masakari import exception CONF = masakari.conf.CONF PERSISTENCE_BACKEND = CONF.taskflow.connection LOG = logging.getLogger(__name__) class MasakariTask(task.Task): """The root task class for all masakari tasks. It automatically names the given task using the module and class that implement the given task as the task name. """ def __init__(self, context, novaclient, **kwargs): requires = kwargs.get('requires') rebind = kwargs.get('rebind') provides = kwargs.get('provides') super(MasakariTask, self).__init__(self.__class__.__name__, requires=requires, rebind=rebind, provides=provides) self.context = context self.novaclient = novaclient self.progress = [] def update_details(self, progress_data, progress=0.0): progress_details = { 'timestamp': str(timeutils.utcnow()), 'progress': progress, 'message': progress_data } self.progress.append(progress_details) self._notifier.notify('update_progress', {'progress': progress, "progress_details": self.progress}) class SpecialFormatter(formatters.FailureFormatter): # Exception is an excepted case, don't include traceback in log if fails. _NO_TRACE_EXCEPTIONS = (exception.SkipInstanceRecoveryException, exception.SkipHostRecoveryException) def __init__(self, engine): super(SpecialFormatter, self).__init__(engine) def format(self, fail, atom_matcher): if fail.check(*self._NO_TRACE_EXCEPTIONS) is not None: exc_info = None exc_details = '%s%s' % (os.linesep, fail.pformat(traceback=False)) return (exc_info, exc_details) else: return super(SpecialFormatter, self).format(fail, atom_matcher) class DynamicLogListener(logging_listener.DynamicLoggingListener): """This is used to attach to taskflow engines while they are running. It provides a bunch of useful features that expose the actions happening inside a taskflow engine, which can be useful for developers for debugging, for operations folks for monitoring and tracking of the resource actions and more... """ def __init__(self, engine, task_listen_for=base.DEFAULT_LISTEN_FOR, flow_listen_for=base.DEFAULT_LISTEN_FOR, retry_listen_for=base.DEFAULT_LISTEN_FOR, logger=LOG): super(DynamicLogListener, self).__init__( engine, task_listen_for=task_listen_for, flow_listen_for=flow_listen_for, retry_listen_for=retry_listen_for, log=logger, fail_formatter=SpecialFormatter(engine)) def get_recovery_flow(task_list, **kwargs): """This is used create extension object from provided task_list. This method returns the extension object of the each task provided in a list using stevedore extension manager. """ extensions = named.NamedExtensionManager( 'masakari.task_flow.tasks', names=task_list, name_order=True, invoke_on_load=True, invoke_kwds=kwargs) for extension in extensions.extensions: yield extension.obj def load_taskflow_into_engine(action, nested_flow, process_what): book = None backend = None if PERSISTENCE_BACKEND: backend = backends.fetch(PERSISTENCE_BACKEND) with contextlib.closing(backend.get_connection()) as conn: try: book = conn.get_logbook(process_what['notification_uuid']) except exceptions.NotFound: pass if book is None: book = models.LogBook(action, process_what['notification_uuid']) return taskflow.engines.load(nested_flow, store=process_what, backend=backend, book=book) masakari-9.0.0/masakari/engine/drivers/taskflow/host_failure.py0000664000175000017500000005035613656747723024772 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import eventlet from eventlet import greenpool from eventlet import timeout as etimeout from oslo_config import cfg from oslo_log import log as logging from oslo_service import loopingcall from oslo_utils import excutils from oslo_utils import strutils from taskflow.patterns import linear_flow from taskflow import retry import masakari.conf from masakari.engine.drivers.taskflow import base from masakari import exception from masakari import utils CONF = masakari.conf.CONF LOG = logging.getLogger(__name__) ACTION = 'instance:evacuate' # Instance power_state SHUTDOWN = 4 TASKFLOW_CONF = cfg.CONF.taskflow_driver_recovery_flows class DisableComputeServiceTask(base.MasakariTask): def __init__(self, context, novaclient, **kwargs): kwargs['requires'] = ["host_name"] super(DisableComputeServiceTask, self).__init__(context, novaclient, **kwargs) def execute(self, host_name): msg = "Disabling compute service on host: '%s'" % host_name self.update_details(msg) self.novaclient.enable_disable_service(self.context, host_name) # Sleep until nova-compute service is marked as disabled. log_msg = ("Sleeping %(wait)s sec before starting recovery " "thread until nova recognizes the node down.") LOG.info(log_msg, {'wait': CONF.wait_period_after_service_update}) eventlet.sleep(CONF.wait_period_after_service_update) msg = "Disabled compute service on host: '%s'" % host_name self.update_details(msg, 1.0) class PrepareHAEnabledInstancesTask(base.MasakariTask): """Get all HA_Enabled instances.""" default_provides = set(["instance_list"]) def __init__(self, context, novaclient, **kwargs): kwargs['requires'] = ["host_name"] super(PrepareHAEnabledInstancesTask, self).__init__(context, novaclient, **kwargs) def execute(self, host_name): def _filter_instances(instance_list): ha_enabled_instances = [] non_ha_enabled_instances = [] for instance in instance_list: is_instance_ha_enabled = strutils.bool_from_string( instance.metadata.get('HA_Enabled', False)) if CONF.host_failure.ignore_instances_in_error_state and ( getattr(instance, "OS-EXT-STS:vm_state") == "error"): if is_instance_ha_enabled: msg = ("Ignoring recovery of HA_Enabled instance " "'%(instance_id)s' as it is in 'error' state." ) % {'instance_id': instance.id} LOG.info(msg) self.update_details(msg, 0.4) continue if is_instance_ha_enabled: ha_enabled_instances.append(instance) else: non_ha_enabled_instances.append(instance) msg = "Total HA Enabled instances count: '%d'" % len( ha_enabled_instances) self.update_details(msg, 0.6) if CONF.host_failure.evacuate_all_instances: msg = ("Total Non-HA Enabled instances count: '%d'" % len( non_ha_enabled_instances)) self.update_details(msg, 0.7) ha_enabled_instances.extend(non_ha_enabled_instances) msg = ("All instances (HA Enabled/Non-HA Enabled) should be " "considered for evacuation. Total count is: '%d'") % ( len(ha_enabled_instances)) self.update_details(msg, 0.8) return ha_enabled_instances msg = "Preparing instances for evacuation" self.update_details(msg) instance_list = self.novaclient.get_servers(self.context, host_name) msg = ("Total instances running on failed host '%(host_name)s' is " "%(instance_list)d") % {'host_name': host_name, 'instance_list': len(instance_list)} self.update_details(msg, 0.3) instance_list = _filter_instances(instance_list) if not instance_list: msg = ("Skipped host '%s' recovery as no instances needs to be " "evacuated" % host_name) self.update_details(msg, 1.0) LOG.info(msg) raise exception.SkipHostRecoveryException(message=msg) # List of instance UUID instance_list = [instance.id for instance in instance_list] msg = "Instances to be evacuated are: '%s'" % ','.join(instance_list) self.update_details(msg, 1.0) return { "instance_list": instance_list, } class EvacuateInstancesTask(base.MasakariTask): def __init__(self, context, novaclient, **kwargs): kwargs['requires'] = ["host_name", "instance_list"] self.update_host_method = kwargs['update_host_method'] super(EvacuateInstancesTask, self).__init__(context, novaclient, **kwargs) def _get_state_and_host_of_instance(self, context, instance): new_instance = self.novaclient.get_server(context, instance.id) instance_host = getattr(new_instance, "OS-EXT-SRV-ATTR:hypervisor_hostname") old_vm_state = getattr(instance, "OS-EXT-STS:vm_state") new_vm_state = getattr(new_instance, "OS-EXT-STS:vm_state") return (old_vm_state, new_vm_state, instance_host) def _stop_after_evacuation(self, context, instance): def _wait_for_stop_confirmation(): old_vm_state, new_vm_state, instance_host = ( self._get_state_and_host_of_instance(context, instance)) if new_vm_state == 'stopped': raise loopingcall.LoopingCallDone() periodic_call_stopped = loopingcall.FixedIntervalLoopingCall( _wait_for_stop_confirmation) try: self.novaclient.stop_server(context, instance.id) # confirm instance is stopped after recovery periodic_call_stopped.start(interval=CONF.verify_interval) etimeout.with_timeout( CONF.wait_period_after_power_off, periodic_call_stopped.wait) except etimeout.Timeout: with excutils.save_and_reraise_exception(): periodic_call_stopped.stop() msg = ("Instance '%(uuid)s' is successfully evacuated but " "failed to stop.") % {'uuid': instance.id} LOG.warning(msg) else: periodic_call_stopped.stop() def _evacuate_and_confirm(self, context, instance, host_name, failed_evacuation_instances, reserved_host=None): # Before locking the instance check whether it is already locked # by user, if yes don't lock the instance instance_already_locked = self.novaclient.get_server( context, instance.id).locked if not instance_already_locked: # lock the instance so that until evacuation and confirmation # is not complete, user won't be able to perform any actions # on the instance. self.novaclient.lock_server(context, instance.id) def _wait_for_evacuation_confirmation(): old_vm_state, new_vm_state, instance_host = ( self._get_state_and_host_of_instance(context, instance)) if instance_host != host_name: if ((old_vm_state == 'error' and new_vm_state == 'active') or old_vm_state == new_vm_state): raise loopingcall.LoopingCallDone() def _wait_for_evacuation(): periodic_call = loopingcall.FixedIntervalLoopingCall( _wait_for_evacuation_confirmation) try: # add a timeout to the periodic call. periodic_call.start(interval=CONF.verify_interval) etimeout.with_timeout( CONF.wait_period_after_evacuation, periodic_call.wait) except etimeout.Timeout: # Instance is not evacuated in the expected time_limit. failed_evacuation_instances.append(instance.id) else: # stop the periodic call, in case of exceptions or # Timeout. periodic_call.stop() try: vm_state = getattr(instance, "OS-EXT-STS:vm_state") task_state = getattr(instance, "OS-EXT-STS:task_state") # Nova evacuates an instance only when vm_state is in active, # stopped or error state. If an instance is in other than active, # error and stopped vm_state, masakari resets the instance state # to *error* so that the instance can be evacuated. stop_instance = True if vm_state not in ['active', 'error', 'stopped']: self.novaclient.reset_instance_state(context, instance.id) instance = self.novaclient.get_server(context, instance.id) power_state = getattr(instance, "OS-EXT-STS:power_state") if vm_state == 'resized' and power_state != SHUTDOWN: stop_instance = False elif task_state is not None: # Nova fails evacuation when the instance's task_state is not # none. In this case, masakari resets the instance's vm_state # to 'error' and task_state to none. self.novaclient.reset_instance_state(context, instance.id) instance = self.novaclient.get_server(context, instance.id) if vm_state == 'active': stop_instance = False # evacuate the instance self.novaclient.evacuate_instance(context, instance.id, target=reserved_host) _wait_for_evacuation() if vm_state != 'active': if stop_instance: self._stop_after_evacuation(self.context, instance) # If the instance was in 'error' state before failure # it should be set to 'error' after recovery. if vm_state == 'error': self.novaclient.reset_instance_state( context, instance.id) except etimeout.Timeout: # Instance is not stop in the expected time_limit. failed_evacuation_instances.append(instance.id) except Exception: # Exception is raised while resetting instance state or # evacuating the instance itself. failed_evacuation_instances.append(instance.id) finally: if not instance_already_locked: # Unlock the server after evacuation and confirmation self.novaclient.unlock_server(context, instance.id) def execute(self, host_name, instance_list, reserved_host=None): msg = ("Start evacuation of instances from failed host '%(host_name)s'" ", instance uuids are: '%(instance_list)s'") % { 'host_name': host_name, 'instance_list': ','.join(instance_list)} self.update_details(msg) def _do_evacuate(context, host_name, instance_list, reserved_host=None): failed_evacuation_instances = [] if reserved_host: msg = "Enabling reserved host: '%s'" % reserved_host self.update_details(msg, 0.1) if CONF.host_failure.add_reserved_host_to_aggregate: # Assign reserved_host to an aggregate to which the failed # compute host belongs to. aggregates = self.novaclient.get_aggregate_list(context) for aggregate in aggregates: if host_name in aggregate.hosts: try: msg = ("Add host %(reserved_host)s to " "aggregate %(aggregate)s") % { 'reserved_host': reserved_host, 'aggregate': aggregate.name} self.update_details(msg, 0.2) self.novaclient.add_host_to_aggregate( context, reserved_host, aggregate) msg = ("Added host %(reserved_host)s to " "aggregate %(aggregate)s") % { 'reserved_host': reserved_host, 'aggregate': aggregate.name} self.update_details(msg, 0.3) except exception.Conflict: msg = ("Host '%(reserved_host)s' already has " "been added to aggregate " "'%(aggregate)s'.") % { 'reserved_host': reserved_host, 'aggregate': aggregate.name} self.update_details(msg, 1.0) LOG.info(msg) # A failed compute host can be associated with # multiple aggregates but operators will not # associate it with multiple aggregates in real # deployment so adding reserved_host to the very # first aggregate from the list. break self.novaclient.enable_disable_service( context, reserved_host, enable=True) # Set reserved property of reserved_host to False self.update_host_method(self.context, reserved_host) thread_pool = greenpool.GreenPool( CONF.host_failure_recovery_threads) for instance_id in instance_list: msg = "Evacuation of instance started: '%s'" % instance_id self.update_details(msg, 0.5) instance = self.novaclient.get_server(self.context, instance_id) thread_pool.spawn_n(self._evacuate_and_confirm, context, instance, host_name, failed_evacuation_instances, reserved_host) thread_pool.waitall() evacuated_instances = list(set(instance_list).difference(set( failed_evacuation_instances))) if evacuated_instances: evacuated_instances.sort() msg = ("Successfully evacuate instances '%(instance_list)s' " "from host '%(host_name)s'") % { 'instance_list': ','.join(evacuated_instances), 'host_name': host_name} self.update_details(msg, 0.7) if failed_evacuation_instances: msg = ("Failed to evacuate instances " "'%(failed_evacuation_instances)s' from host " "'%(host_name)s'") % { 'failed_evacuation_instances': ','.join(failed_evacuation_instances), 'host_name': host_name} self.update_details(msg, 0.7) raise exception.HostRecoveryFailureException( message=msg) msg = "Evacuation process completed!" self.update_details(msg, 1.0) lock_name = reserved_host if reserved_host else None @utils.synchronized(lock_name) def do_evacuate_with_reserved_host(context, host_name, instance_list, reserved_host): _do_evacuate(self.context, host_name, instance_list, reserved_host=reserved_host) if lock_name: do_evacuate_with_reserved_host(self.context, host_name, instance_list, reserved_host) else: # No need to acquire lock on reserved_host when recovery_method is # 'auto' as the selection of compute host will be decided by nova. _do_evacuate(self.context, host_name, instance_list) def get_auto_flow(context, novaclient, process_what): """Constructs and returns the engine entrypoint flow. This flow will do the following: 1. Disable compute service on source host 2. Get all HA_Enabled instances. 3. Evacuate all the HA_Enabled instances. 4. Confirm evacuation of instances. """ flow_name = ACTION.replace(":", "_") + "_engine" nested_flow = linear_flow.Flow(flow_name) task_dict = TASKFLOW_CONF.host_auto_failure_recovery_tasks auto_evacuate_flow_pre = linear_flow.Flow('pre_tasks') for plugin in base.get_recovery_flow(task_dict['pre'], context=context, novaclient=novaclient, update_host_method=None): auto_evacuate_flow_pre.add(plugin) auto_evacuate_flow_main = linear_flow.Flow('main_tasks') for plugin in base.get_recovery_flow(task_dict['main'], context=context, novaclient=novaclient, update_host_method=None): auto_evacuate_flow_main.add(plugin) auto_evacuate_flow_post = linear_flow.Flow('post_tasks') for plugin in base.get_recovery_flow(task_dict['post'], context=context, novaclient=novaclient, update_host_method=None): auto_evacuate_flow_post.add(plugin) nested_flow.add(auto_evacuate_flow_pre) nested_flow.add(auto_evacuate_flow_main) nested_flow.add(auto_evacuate_flow_post) return base.load_taskflow_into_engine(ACTION, nested_flow, process_what) def get_rh_flow(context, novaclient, process_what, **kwargs): """Constructs and returns the engine entrypoint flow. This flow will do the following: 1. Disable compute service on source host 2. Get all HA_Enabled instances. 3. Evacuate all the HA_Enabled instances using reserved_host. 4. Confirm evacuation of instances. """ flow_name = ACTION.replace(":", "_") + "_engine" nested_flow = linear_flow.Flow(flow_name) task_dict = TASKFLOW_CONF.host_rh_failure_recovery_tasks rh_evacuate_flow_pre = linear_flow.Flow('pre_tasks') for plugin in base.get_recovery_flow( task_dict['pre'], context=context, novaclient=novaclient, **kwargs): rh_evacuate_flow_pre.add(plugin) rh_evacuate_flow_main = linear_flow.Flow( "retry_%s" % flow_name, retry=retry.ParameterizedForEach( rebind=['reserved_host_list'], provides='reserved_host')) for plugin in base.get_recovery_flow( task_dict['main'], context=context, novaclient=novaclient, **kwargs): rh_evacuate_flow_main.add(plugin) rh_evacuate_flow_post = linear_flow.Flow('post_tasks') for plugin in base.get_recovery_flow( task_dict['post'], context=context, novaclient=novaclient, **kwargs): rh_evacuate_flow_post.add(plugin) nested_flow.add(rh_evacuate_flow_pre) nested_flow.add(rh_evacuate_flow_main) nested_flow.add(rh_evacuate_flow_post) return base.load_taskflow_into_engine(ACTION, nested_flow, process_what) masakari-9.0.0/masakari/engine/driver.py0000664000175000017500000000562513656747723020270 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Driver base-class: (Beginning of) the contract that masakari drivers must follow, and shared types that support that contract """ import abc import sys from oslo_log import log as logging import six from stevedore import driver import masakari.conf from masakari import utils CONF = masakari.conf.CONF LOG = logging.getLogger(__name__) @six.add_metaclass(abc.ABCMeta) class NotificationDriver(object): @abc.abstractmethod def execute_host_failure(self, context, host_name, recovery_method, notification_uuid, **kwargs): pass @abc.abstractmethod def execute_instance_failure(self, context, instance_uuid, notification_uuid): pass @abc.abstractmethod def execute_process_failure(self, context, process_name, host_name, notification_uuid): pass @abc.abstractmethod def get_notification_recovery_workflow_details(self, context, recovery_method, notification_uuid): pass @abc.abstractmethod def upgrade_backend(self, backend): pass def load_masakari_driver(masakari_driver=None): """Load a masakari driver module. Load the masakari driver module specified by the notification_driver configuration option or, if supplied, the driver name supplied as an argument. :param masakari_driver: a masakari driver name to override the config opt :returns: a NotificationDriver instance """ if not masakari_driver: masakari_driver = CONF.notification_driver if not masakari_driver: LOG.error("Notification driver option required, but not specified") sys.exit(1) LOG.info("Loading masakari notification driver '%s'", masakari_driver) try: notification_driver = driver.DriverManager('masakari.driver', masakari_driver, invoke_on_load=True).driver return utils.check_isinstance(notification_driver, NotificationDriver) except ImportError: LOG.exception("Failed to load notification driver '%s'.", masakari_driver) sys.exit(1) masakari-9.0.0/masakari/engine/manager.py0000664000175000017500000004215513656747723020406 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Handles all processes relating to notifications. The :py:class:`MasakariManager` class is a :py:class:`masakari.manager.Manager` that handles RPC calls relating to notifications. It is responsible for processing notifications and executing workflows. """ import traceback from oslo_log import log as logging import oslo_messaging as messaging from oslo_service import periodic_task from oslo_utils import timeutils import masakari.conf from masakari.engine import driver from masakari.engine import instance_events as virt_events from masakari.engine import rpcapi from masakari.engine import utils as engine_utils from masakari import exception from masakari.i18n import _ from masakari import manager from masakari import objects from masakari.objects import fields from masakari import utils CONF = masakari.conf.CONF LOG = logging.getLogger(__name__) def update_host_method(context, host_name, reserved=False): reserved_host = objects.Host.get_by_name(context, host_name) reserved_host.reserved = reserved reserved_host.save() class MasakariManager(manager.Manager): """Manages the running notifications""" RPC_API_VERSION = rpcapi.EngineAPI.RPC_API_VERSION target = messaging.Target(version=RPC_API_VERSION) def __init__(self, masakari_driver=None, *args, **kwargs): """Load configuration options""" LOG.debug("Initializing Masakari Manager.") super(MasakariManager, self).__init__(service_name="engine", *args, **kwargs) self.driver = driver.load_masakari_driver(masakari_driver) def _handle_notification_type_process(self, context, notification): notification_status = fields.NotificationStatus.FINISHED notification_event = notification.payload.get('event') process_name = notification.payload.get('process_name') exception_info = None if notification_event.upper() == 'STARTED': LOG.info("Notification type '%(type)s' received for host " "'%(host_uuid)s': '%(process_name)s' has been " "%(event)s.", {'type': notification.type, 'host_uuid': notification.source_host_uuid, 'process_name': process_name, 'event': notification_event}) elif notification_event.upper() == 'STOPPED': host_obj = objects.Host.get_by_uuid( context, notification.source_host_uuid) host_name = host_obj.name # Mark host on_maintenance mode as True update_data = { 'on_maintenance': True, } host_obj.update(update_data) host_obj.save() try: self.driver.execute_process_failure( context, process_name, host_name, notification.notification_uuid) except exception.SkipProcessRecoveryException as e: notification_status = fields.NotificationStatus.FINISHED except (exception.MasakariException, exception.ProcessRecoveryFailureException) as e: notification_status = fields.NotificationStatus.ERROR LOG.error("Failed to process notification '%(uuid)s'." " Reason: %(error)s", {"uuid": notification.notification_uuid, "error": e.message}) exception_info = e else: LOG.warning("Invalid event: %(event)s received for " "notification type: %(notification_type)s", {'event': notification_event, 'notification_type': notification.type}) notification_status = fields.NotificationStatus.IGNORED if exception_info: tb = traceback.format_exc() engine_utils.notify_about_notification_update(context, notification, action=fields.EventNotificationAction.NOTIFICATION_PROCESS, phase=fields.EventNotificationPhase.ERROR, exception=str(exception_info), tb=tb) else: engine_utils.notify_about_notification_update(context, notification, action=fields.EventNotificationAction.NOTIFICATION_PROCESS, phase=fields.EventNotificationPhase.END) return notification_status def _handle_notification_type_instance(self, context, notification): if not virt_events.is_valid_event(notification.payload): LOG.info("Notification '%(uuid)s' received with payload " "%(payload)s is ignored.", {"uuid": notification.notification_uuid, "payload": notification.payload}) return fields.NotificationStatus.IGNORED notification_status = fields.NotificationStatus.FINISHED exception_info = None try: self.driver.execute_instance_failure( context, notification.payload.get('instance_uuid'), notification.notification_uuid) except exception.IgnoreInstanceRecoveryException as e: notification_status = fields.NotificationStatus.IGNORED exception_info = e except exception.SkipInstanceRecoveryException as e: notification_status = fields.NotificationStatus.FINISHED except (exception.MasakariException, exception.InstanceRecoveryFailureException) as e: notification_status = fields.NotificationStatus.ERROR LOG.error("Failed to process notification '%(uuid)s'." " Reason: %(error)s", {"uuid": notification.notification_uuid, "error": e.message}) exception_info = e if exception_info: tb = traceback.format_exc() engine_utils.notify_about_notification_update(context, notification, action=fields.EventNotificationAction.NOTIFICATION_PROCESS, phase=fields.EventNotificationPhase.ERROR, exception=str(exception_info), tb=tb) else: engine_utils.notify_about_notification_update(context, notification, action=fields.EventNotificationAction.NOTIFICATION_PROCESS, phase=fields.EventNotificationPhase.END) return notification_status def _handle_notification_type_host(self, context, notification): host_status = notification.payload.get('host_status') notification_status = fields.NotificationStatus.FINISHED notification_event = notification.payload.get('event') exception_info = None if host_status.upper() != fields.HostStatusType.NORMAL: # NOTE(shilpasd): Avoid host recovery for host_status other than # 'NORMAL' otherwise it could lead to unsafe evacuation of # instances running on the failed source host. LOG.warning("Notification '%(uuid)s' ignored as host_status" "is '%(host_status)s'", {'uuid': notification.notification_uuid, 'host_status': host_status.upper()}) notification_status = fields.NotificationStatus.IGNORED elif notification_event.upper() == 'STARTED': LOG.info("Notification type '%(type)s' received for host " "'%(host_uuid)s' has been %(event)s.", {'type': notification.type, 'host_uuid': notification.source_host_uuid, 'event': notification_event}) elif notification_event.upper() == 'STOPPED': host_obj = objects.Host.get_by_uuid( context, notification.source_host_uuid) host_name = host_obj.name recovery_method = host_obj.failover_segment.recovery_method # Mark host on_maintenance mode as True update_data = { 'on_maintenance': True, } # Set reserved flag to False if this host is reserved if host_obj.reserved: update_data['reserved'] = False host_obj.update(update_data) host_obj.save() reserved_host_list = None if not recovery_method == ( fields.FailoverSegmentRecoveryMethod.AUTO): reserved_host_object_list = objects.HostList.get_all( context, filters={ 'failover_segment_id': host_obj.failover_segment_id, 'reserved': True, 'on_maintenance': False }) # Create list of host name from reserved_host_object_list reserved_host_list = [host.name for host in reserved_host_object_list] try: self.driver.execute_host_failure( context, host_name, recovery_method, notification.notification_uuid, update_host_method=update_host_method, reserved_host_list=reserved_host_list) except exception.SkipHostRecoveryException as e: notification_status = fields.NotificationStatus.FINISHED except (exception.HostRecoveryFailureException, exception.ReservedHostsUnavailable, exception.MasakariException) as e: notification_status = fields.NotificationStatus.ERROR LOG.error("Failed to process notification '%(uuid)s'." " Reason: %(error)s", {"uuid": notification.notification_uuid, "error": e.message}) exception_info = e else: LOG.warning("Invalid event: %(event)s received for " "notification type: %(type)s", {'event': notification_event, 'type': notification.type}) notification_status = fields.NotificationStatus.IGNORED if exception_info: tb = traceback.format_exc() engine_utils.notify_about_notification_update(context, notification, action=fields.EventNotificationAction.NOTIFICATION_PROCESS, phase=fields.EventNotificationPhase.ERROR, exception=str(exception_info), tb=tb) else: engine_utils.notify_about_notification_update(context, notification, action=fields.EventNotificationAction.NOTIFICATION_PROCESS, phase=fields.EventNotificationPhase.END) return notification_status def _process_notification(self, context, notification): @utils.synchronized(notification.source_host_uuid, blocking=True) def do_process_notification(notification): LOG.info('Processing notification %(notification_uuid)s of ' 'type: %(type)s', {'notification_uuid': notification.notification_uuid, 'type': notification.type}) # Get notification from db notification_db = objects.Notification.get_by_uuid(context, notification.notification_uuid) # NOTE(tpatil): To fix bug 1773132, process notification only # if the notification status is New and the current notification # from DB status is Not New to avoid recovering from failure twice if (notification.status == fields.NotificationStatus.NEW and notification_db.status != fields.NotificationStatus.NEW): LOG.warning("Processing of notification is skipped to avoid " "recovering from failure twice. " "Notification received is '%(uuid)s' " "and it's status is '%(new_status)s' and the " "current status of same notification in db " "is '%(old_status)s'", {"uuid": notification.notification_uuid, "new_status": notification.status, "old_status": notification_db.status}) return update_data = { 'status': fields.NotificationStatus.RUNNING, } notification.update(update_data) notification.save() if notification.type == fields.NotificationType.PROCESS: notification_status = self._handle_notification_type_process( context, notification) elif notification.type == fields.NotificationType.VM: notification_status = self._handle_notification_type_instance( context, notification) elif notification.type == fields.NotificationType.COMPUTE_HOST: notification_status = self._handle_notification_type_host( context, notification) LOG.info("Notification %(notification_uuid)s exits with " "status: %(status)s.", {'notification_uuid': notification.notification_uuid, 'status': notification_status}) update_data = { 'status': notification_status } notification.update(update_data) notification.save() engine_utils.notify_about_notification_update(context, notification, action=fields.EventNotificationAction.NOTIFICATION_PROCESS, phase=fields.EventNotificationPhase.START) do_process_notification(notification) def process_notification(self, context, notification=None): """Processes the notification""" self._process_notification(context, notification) @periodic_task.periodic_task( spacing=CONF.process_unfinished_notifications_interval) def _process_unfinished_notifications(self, context): filters = { 'status': [fields.NotificationStatus.ERROR, fields.NotificationStatus.NEW] } notifications_list = objects.NotificationList.get_all(context, filters=filters) for notification in notifications_list: if (notification.status == fields.NotificationStatus.ERROR or (notification.status == fields.NotificationStatus.NEW and timeutils.is_older_than( notification.generated_time, CONF.retry_notification_new_status_interval))): self._process_notification(context, notification) # get updated notification from db after workflow execution notification_db = objects.Notification.get_by_uuid( context, notification.notification_uuid) if notification_db.status == fields.NotificationStatus.ERROR: # update notification status as failed notification_status = fields.NotificationStatus.FAILED update_data = { 'status': notification_status } notification_db.update(update_data) notification_db.save() LOG.error( "Periodic task 'process_unfinished_notifications': " "Notification %(notification_uuid)s exits with " "status: %(status)s.", {'notification_uuid': notification.notification_uuid, 'status': notification_status}) def get_notification_recovery_workflow_details(self, context, notification): """Retrieve recovery workflow details of the notification""" try: host_obj = objects.Host.get_by_uuid( context, notification.source_host_uuid) recovery_method = host_obj.failover_segment.recovery_method progress_details = ( self.driver.get_notification_recovery_workflow_details( context, recovery_method, notification)) notification['recovery_workflow_details'] = progress_details except Exception: msg = (_('Failed to fetch notification recovery workflow details ' 'for %s') % notification.notification_uuid) LOG.exception(msg) raise exception.MasakariException(msg) return notification masakari-9.0.0/masakari/engine/instance_events.py0000664000175000017500000000231413656747723022155 0ustar zuulzuul00000000000000# Copyright 2017 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ VM libvirt events These are the events which needs to be processed by masakari in case of instance recovery failure. """ INSTANCE_EVENTS = { # Add QEMU guest agent events here. 'QEMU_GUEST_AGENT_ERROR': ['STOPPED_FAILED'], # Add more events and vir_domain_events here. 'LIFECYCLE': ['STOPPED_FAILED'], 'IO_ERROR': ['IO_ERROR_REPORT'] } def is_valid_event(payload): vir_domain_event_list = INSTANCE_EVENTS.get(payload.get('event')) if vir_domain_event_list and payload.get( 'vir_domain_event') in vir_domain_event_list: return True return False masakari-9.0.0/masakari/engine/rpcapi.py0000664000175000017500000000403513656747723020245 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import oslo_messaging as messaging import masakari.conf from masakari.objects import base as objects_base from masakari import rpc CONF = masakari.conf.CONF class EngineAPI(rpc.RPCAPI): """Client side of the engine rpc API. API version history: .. code-block:: none 1.0 - Initial version. 1.1 - Added get_notification_recovery_workflow_details method to retrieve progress details from notification driver. """ RPC_API_VERSION = '1.1' TOPIC = CONF.masakari_topic BINARY = 'masakari-engine' def __init__(self): super(EngineAPI, self).__init__() target = messaging.Target(topic=self.TOPIC, version=self.RPC_API_VERSION) serializer = objects_base.MasakariObjectSerializer() self.client = rpc.get_client(target, serializer=serializer) def process_notification(self, context, notification): version = '1.0' cctxt = self.client.prepare(version=version) cctxt.cast(context, 'process_notification', notification=notification) def get_notification_recovery_workflow_details(self, context, notification): version = '1.1' cctxt = self.client.prepare(version=version) return cctxt.call(context, 'get_notification_recovery_workflow_details', notification=notification) masakari-9.0.0/masakari/conf/0000775000175000017500000000000013656750011016053 5ustar zuulzuul00000000000000masakari-9.0.0/masakari/conf/__init__.py0000664000175000017500000000256513656747723020214 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from masakari.conf import api from masakari.conf import base from masakari.conf import database from masakari.conf import engine from masakari.conf import engine_driver from masakari.conf import exceptions from masakari.conf import nova from masakari.conf import osapi_v1 from masakari.conf import paths from masakari.conf import service from masakari.conf import ssl from masakari.conf import wsgi CONF = cfg.CONF api.register_opts(CONF) base.register_opts(CONF) database.register_opts(CONF) engine.register_opts(CONF) engine_driver.register_opts(CONF) exceptions.register_opts(CONF) nova.register_opts(CONF) osapi_v1.register_opts(CONF) paths.register_opts(CONF) ssl.register_opts(CONF) service.register_opts(CONF) wsgi.register_opts(CONF) masakari-9.0.0/masakari/conf/api.py0000664000175000017500000000504513656747723017222 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg auth_opts = [ cfg.StrOpt("auth_strategy", default="keystone", choices=("keystone", "noauth2"), help=""" This determines the strategy to use for authentication: keystone or noauth2. 'noauth2' is designed for testing only, as it does no actual credential checking. 'noauth2' provides administrative credentials only if 'admin' is specified as the username. * Possible values: Either 'keystone' (default) or 'noauth2'. * Services that use this: ``masakari-api`` * Related options: None """), cfg.BoolOpt("use_forwarded_for", default=False, help=""" When True, the 'X-Forwarded-For' header is treated as the canonical remote address. When False (the default), the 'remote_address' header is used. You should only enable this if you have an HTML sanitizing proxy. * Possible values: True, False (default) * Services that use this: ``masakari-api`` * Related options: None """), ] osapi_opts = [ cfg.IntOpt("osapi_max_limit", default=1000, help=""" As a query can potentially return many thousands of items, you can limit the maximum number of items in a single response by setting this option. * Possible values: Any positive integer. Default is 1000. * Services that use this: ``masakari-api`` * Related options: None """), cfg.StrOpt("osapi_masakari_link_prefix", help=""" This string is prepended to the normal URL that is returned in links to the OpenStack Masakari API. If it is empty (the default), the URLs are returned unchanged. * Possible values: Any string, including an empty string (the default). * Services that use this: ``masakari-api`` * Related options: None """), ] ALL_OPTS = (auth_opts + osapi_opts) def register_opts(conf): conf.register_opts(ALL_OPTS) def list_opts(): return {"DEFAULT": ALL_OPTS} masakari-9.0.0/masakari/conf/database.py0000664000175000017500000000165313656747723020216 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from masakari.conf import paths from oslo_db import options as oslo_db_options _DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def( 'masakari.sqlite') def register_opts(conf): oslo_db_options.set_defaults(conf, connection=_DEFAULT_SQL_CONNECTION) def list_opts(): return {'DEFAULT': []} masakari-9.0.0/masakari/conf/engine.py0000664000175000017500000001025513656747723017715 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg rpcapi_opts = [ cfg.StrOpt("masakari_topic", default="ha_engine", deprecated_for_removal=True, deprecated_since='3.0.0', deprecated_reason=""" Configurable RPC topic provides little value and it can easily break Masakari if operator configures it to the same topic used by other OpenStack services.""", help=""" This is the message queue topic that the masakari engine 'listens' on. It is used when the masakari engine is started up to configure the queue, and whenever an RPC call to the masakari engine is made. * Possible values: Any string, but there is almost never any reason to ever change this value from its default of 'engine'. * Services that use this: ``masakari-engine`` * Related options: None """), ] driver_opts = [ cfg.StrOpt( 'notification_driver', default='taskflow_driver', help=""" Defines which driver to use for executing notification workflows. """), ] notification_opts = [ cfg.IntOpt('duplicate_notification_detection_interval', default=180, min=0, help="Interval in seconds for identifying duplicate " "notifications. If the notification received is identical " "to the previous ones whose status is either new or " "running and if it's created_timestamp and the current " "timestamp is less than this config option value, then " "the notification will be considered as duplicate and " "it will be ignored." ), cfg.IntOpt('wait_period_after_service_update', default=180, help='Number of seconds to wait after a service is enabled ' 'or disabled.'), cfg.IntOpt('wait_period_after_evacuation', default=90, help='Wait until instance is evacuated'), cfg.IntOpt('verify_interval', default=1, help='The monitoring interval for looping'), cfg.IntOpt('wait_period_after_power_off', default=180, help='Number of seconds to wait for instance to shut down'), cfg.IntOpt('wait_period_after_power_on', default=60, help='Number of seconds to wait for instance to start'), cfg.IntOpt('process_unfinished_notifications_interval', default=120, help='Interval in seconds for processing notifications which ' 'are in error or new state.'), cfg.IntOpt('retry_notification_new_status_interval', default=60, mutable=True, help="Interval in seconds for identifying notifications which " "are in new state. If the notification is in new state " "till this config option value after it's " "generated_time, then it is considered that notification " "is ignored by the messaging queue and will be processed " "by 'process_unfinished_notifications' periodic task."), cfg.IntOpt('host_failure_recovery_threads', default=3, min=1, help="Number of threads to be used for evacuating and " "confirming instances during execution of host_failure " "workflow."), ] ALL_OPTS = (rpcapi_opts + notification_opts + driver_opts) def register_opts(conf): conf.register_opts(ALL_OPTS) def list_opts(): return {'DEFAULT': ALL_OPTS} masakari-9.0.0/masakari/conf/opts.py0000664000175000017500000000642013656747723017434 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ This is the single point of entry to generate the sample configuration file for Masakari. It collects all the necessary info from the other modules in this package. It is assumed that: * every other module in this package has a 'list_opts' function which return a dict where * the keys are strings which are the group names * the value of each key is a list of config options for that group * the masakari.conf package doesn't have further packages with config options * this module is only used in the context of sample file generation """ import collections import importlib import os import pkgutil from masakari.conf import engine_driver LIST_OPTS_FUNC_NAME = "list_opts" _recovery_workflow_opts = [ ('taskflow_driver_recovery_flows', engine_driver.taskflow_driver_recovery_flows) ] def _tupleize(dct): """Take the dict of options and convert to the 2-tuple format.""" return [(key, val) for key, val in dct.items()] def list_opts(): opts = collections.defaultdict(list) module_names = _list_module_names() imported_modules = _import_modules(module_names) _append_config_options(imported_modules, opts) return _tupleize(opts) def list_recovery_workflow_opts(): """Return a list of oslo_config options available for recovery workflow""" return [(key, val) for key, val in _recovery_workflow_opts] def _list_module_names(): module_names = [] package_path = os.path.dirname(os.path.abspath(__file__)) for _, modname, ispkg in pkgutil.iter_modules(path=[package_path]): if modname == "opts" or ispkg: continue else: module_names.append(modname) return module_names def _import_modules(module_names): imported_modules = [] for modname in module_names: mod = importlib.import_module("masakari.conf." + modname) if not hasattr(mod, LIST_OPTS_FUNC_NAME): msg = "The module 'masakari.conf.%s' should have a '%s' "\ "function which returns the config options." % \ (modname, LIST_OPTS_FUNC_NAME) raise Exception(msg) else: imported_modules.append(mod) return imported_modules def _process_old_opts(configs): """Convert old-style 2-tuple configs to dicts.""" if isinstance(configs, tuple): configs = [configs] return {label: options for label, options in configs} def _append_config_options(imported_modules, config_options): for mod in imported_modules: configs = mod.list_opts() if not isinstance(configs, dict): configs = _process_old_opts(configs) for key, val in configs.items(): config_options[key].extend(val) masakari-9.0.0/masakari/conf/wsgi.py0000664000175000017500000001000713656747723017414 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg wsgi_group = cfg.OptGroup( 'wsgi', title='WSGI Options') api_paste_config = cfg.StrOpt( 'api_paste_config', default="api-paste.ini", help='File name for the paste.deploy config for masakari-api', deprecated_group='DEFAULT') # TODO(abhishekk): It is not possible to rename this to 'log_format' # yet, as doing so would cause a conflict if '[DEFAULT] log_format' # were used. When 'deprecated_group' is removed after Ocata, this # should be changed. wsgi_log_format = cfg.StrOpt( 'wsgi_log_format', default='%(client_ip)s "%(request_line)s" status: %(status_code)s' ' len: %(body_length)s time: %(wall_seconds).7f', help='A python format string that is used as the template to ' 'generate log lines. The following values can be formatted ' 'into it: client_ip, date_time, request_line, status_code, ' 'body_length, wall_seconds.', deprecated_group='DEFAULT') secure_proxy_ssl_header = cfg.StrOpt( 'secure_proxy_ssl_header', help='The HTTP header used to determine the scheme for the ' 'original request, even if it was removed by an SSL ' 'terminating proxy. Typical value is ' '"HTTP_X_FORWARDED_PROTO".', deprecated_group='DEFAULT') ssl_ca_file = cfg.StrOpt( 'ssl_ca_file', help='CA certificate file to use to verify connecting clients', deprecated_group='DEFAULT') ssl_cert_file = cfg.StrOpt( 'ssl_cert_file', help='SSL certificate of API server', deprecated_group='DEFAULT') ssl_key_file = cfg.StrOpt( 'ssl_key_file', help='SSL private key of API server', deprecated_group='DEFAULT') tcp_keepidle = cfg.IntOpt( 'tcp_keepidle', default=600, help='Sets the value of TCP_KEEPIDLE in seconds for each ' 'server socket. Not supported on OS X.', deprecated_group='DEFAULT') default_pool_size = cfg.IntOpt( 'default_pool_size', default=1000, help='Size of the pool of greenthreads used by wsgi', deprecated_group='DEFAULT', deprecated_name='wsgi_default_pool_size') max_header_line = cfg.IntOpt( 'max_header_line', default=16384, help='Maximum line size of message headers to be accepted. ' 'max_header_line may need to be increased when using ' 'large tokens (typically those generated by the ' 'Keystone v3 API with big service catalogs).', deprecated_group='DEFAULT') keep_alive = cfg.BoolOpt( 'keep_alive', default=True, help='If False, closes the client socket connection explicitly.', deprecated_group='DEFAULT', deprecated_name='wsgi_keep_alive') client_socket_timeout = cfg.IntOpt( 'client_socket_timeout', default=900, help="Timeout for client connections' socket operations. " "If an incoming connection is idle for this number of " "seconds it will be closed. A value of '0' means " "wait forever.", deprecated_group='DEFAULT') ALL_OPTS = [api_paste_config, wsgi_log_format, secure_proxy_ssl_header, ssl_ca_file, ssl_cert_file, ssl_key_file, tcp_keepidle, default_pool_size, max_header_line, keep_alive, client_socket_timeout ] def register_opts(conf): conf.register_group(wsgi_group) conf.register_opts(ALL_OPTS, group=wsgi_group) def list_opts(): return {wsgi_group: ALL_OPTS} masakari-9.0.0/masakari/conf/paths.py0000664000175000017500000000331513656747723017566 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import sys from oslo_config import cfg path_opts = [ cfg.StrOpt('pybasedir', default=os.path.abspath(os.path.join(os.path.dirname(__file__), '../../')), help='Directory where the masakari python module is installed'), cfg.StrOpt('bindir', default=os.path.join(sys.prefix, 'local', 'bin'), help='Directory where masakari binaries are installed'), cfg.StrOpt('state_path', default='$pybasedir', help="Top-level directory for maintaining masakari's state"), ] def basedir_def(*args): """Return an uninterpolated path relative to $pybasedir.""" return os.path.join('$pybasedir', *args) def bindir_def(*args): """Return an uninterpolated path relative to $bindir.""" return os.path.join('$bindir', *args) def state_path_def(*args): """Return an uninterpolated path relative to $state_path.""" return os.path.join('$state_path', *args) def register_opts(conf): conf.register_opts(path_opts) def list_opts(): return {"DEFAULT": path_opts} masakari-9.0.0/masakari/conf/nova.py0000664000175000017500000000526013656747723017413 0ustar zuulzuul00000000000000# Copyright (c) 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from keystoneauth1 import loading as ks_loading from oslo_config import cfg nova_opts = [ cfg.StrOpt('nova_catalog_admin_info', default='compute:nova:publicURL', help='Match this value when searching for nova in the ' 'service catalog. Format is: separated values of ' 'the form: ' '::'), cfg.StrOpt('os_region_name', help='Region name of this node'), cfg.StrOpt('nova_ca_certificates_file', help='Location of ca certificates file to use for nova client ' 'requests.'), cfg.BoolOpt('nova_api_insecure', default=False, help='Allow to perform insecure SSL requests to nova'), cfg.StrOpt('os_privileged_user_name', help='OpenStack privileged account username. Used for requests ' 'to other services (such as Nova) that require an account ' 'with special rights.'), cfg.StrOpt('os_privileged_user_password', help='Password associated with the OpenStack privileged ' 'account.', secret=True), cfg.StrOpt('os_privileged_user_tenant', help='Tenant name associated with the OpenStack privileged ' 'account.'), cfg.URIOpt('os_privileged_user_auth_url', help='Auth URL associated with the OpenStack privileged ' 'account.'), cfg.StrOpt('os_user_domain_name', default="default", help='User domain name associated with the OpenStack ' 'privileged account.'), cfg.StrOpt('os_project_domain_name', default="default", help='Project domain name associated with the OpenStack ' 'privileged account.'), ] def register_opts(conf): conf.register_opts(nova_opts) ks_loading.register_session_conf_options(conf, 'DEFAULT') def list_opts(): return { 'DEFAULT': nova_opts } masakari-9.0.0/masakari/conf/engine_driver.py0000664000175000017500000002171713656747723021275 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from oslo_config import types instance_recovery_group = cfg.OptGroup( 'instance_failure', title='Instance failure recovery options', help="Configuration options for instance failure recovery") host_recovery_group = cfg.OptGroup( 'host_failure', title='Host failure recovery options', help="Configuration options for host failure recovery") customized_recovery_flow_group = cfg.OptGroup( 'taskflow_driver_recovery_flows', title='Customized recovery flow Options', help="Configuration options for customizing various failure recovery" "workflow tasks.") taskflow_group = cfg.OptGroup( 'taskflow', title='Taskflow driver options', help="Configuration options for taskflow driver") host_failure_opts = [ cfg.BoolOpt('evacuate_all_instances', default=True, help=""" Operators can decide whether all instances or only those instances which contain metadata key 'HA_Enabled=True' should be allowed for evacuation from a failed source compute node. When set to True, it will evacuate all instances from a failed source compute node. First preference will be given to those instances which contain 'HA_Enabled=True' metadata key, and then it will evacuate the remaining ones. When set to False, it will evacuate only those instances which contain 'HA_Enabled=True' metadata key."""), cfg.BoolOpt('ignore_instances_in_error_state', default=False, help=""" Operators can decide whether error instances should be allowed for evacuation from a failed source compute node or not. When set to True, it will ignore error instances from evacuation from a failed source compute node. When set to False, it will evacuate error instances along with other instances from a failed source compute node."""), cfg.BoolOpt("add_reserved_host_to_aggregate", default=False, help=""" Operators can decide whether reserved_host should be added to aggregate group of failed compute host. When set to True, reserved host will be added to the aggregate group of failed compute host. When set to False, the reserved_host will not be added to the aggregate group of failed compute host."""), ] instance_failure_options = [ cfg.BoolOpt('process_all_instances', default=False, help=""" Operators can decide whether all instances or only those instances which contain metadata key 'HA_Enabled=True' should be taken into account to recover from instance failure events. When set to True, it will execute instance failure recovery actions for an instance irrespective of whether that particular instance contains metadata key 'HA_Enabled=True' or not. When set to False, it will only execute instance failure recovery actions for an instance which contain metadata key 'HA_Enabled=True'."""), ] taskflow_options = [ cfg.StrOpt('connection', help=""" The SQLAlchemy connection string to use to connect to the taskflow database. """), ] taskflow_driver_recovery_flows = [ cfg.Opt('host_auto_failure_recovery_tasks', type=types.Dict( bounds=False, value_type=types.List(bounds=True, item_type=types.String(quotes=True))), default={'pre': ['disable_compute_service_task'], 'main': ['prepare_HA_enabled_instances_task'], 'post': ['evacuate_instances_task']}, help=(""" This option allows operator to customize tasks to be executed for host failure auto recovery workflow. Provide list of strings reflecting to the task classes that should be included to the host failure recovery workflow. The full classname path of all task classes should be defined in the 'masakari.task_flow.tasks' of setup.cfg and these classes may be implemented by OpenStack Masaskari project team, deployer or third party. By default below three tasks will be part of this config option:- 1. disable_compute_service_task 2. prepare_HA_enabled_instances_task 3. evacuate_instances_task The allowed values for this option is comma separated dictionary of object names in between ``{`` and ``}``.""")), cfg.Opt('host_rh_failure_recovery_tasks', type=types.Dict( bounds=False, value_type=types.List(bounds=True, item_type=types.String(quotes=True))), default={'pre': ['disable_compute_service_task'], 'main': ['prepare_HA_enabled_instances_task', 'evacuate_instances_task'], 'post': []}, help=(""" This option allows operator to customize tasks to be executed for host failure reserved_host recovery workflow. Provide list of strings reflecting to the task classes that should be included to the host failure recovery workflow. The full classname path of all task classes should be defined in the 'masakari.task_flow.tasks' of setup.cfg and these classes may be implemented by OpenStack Masaskari project team, deployer or third party. By default below three tasks will be part of this config option:- 1. disable_compute_service_task 2. prepare_HA_enabled_instances_task 3. evacuate_instances_task The allowed values for this option is comma separated dictionary of object names in between ``{`` and ``}``.""")), cfg.Opt('instance_failure_recovery_tasks', type=types.Dict( bounds=False, value_type=types.List(bounds=True, item_type=types.String(quotes=True))), default={'pre': ['stop_instance_task'], 'main': ['start_instance_task'], 'post': ['confirm_instance_active_task']}, help=(""" This option allows operator to customize tasks to be executed for instance failure recovery workflow. Provide list of strings reflecting to the task classes that should be included to the instance failure recovery workflow. The full classname path of all task classes should be defined in the 'masakari.task_flow.tasks' of setup.cfg and these classes may be implemented by OpenStack Masaskari project team, deployer or third party. By default below three tasks will be part of this config option:- 1. stop_instance_task 2. start_instance_task 3. confirm_instance_active_task The allowed values for this option is comma separated dictionary of object names in between ``{`` and ``}``.""")), cfg.Opt('process_failure_recovery_tasks', type=types.Dict( bounds=False, value_type=types.List(bounds=True, item_type=types.String(quotes=True))), default={'pre': ['disable_compute_node_task'], 'main': ['confirm_compute_node_disabled_task'], 'post': []}, help=(""" This option allows operator to customize tasks to be executed for process failure recovery workflow. Provide list of strings reflecting to the task classes that should be included to the process failure recovery workflow. The full classname path of all task classes should be defined in the 'masakari.task_flow.tasks' of setup.cfg and these classes may be implemented by OpenStack Masaskari project team, deployer or third party. By default below two tasks will be part of this config option:- 1. disable_compute_node_task 2. confirm_compute_node_disabled_task The allowed values for this option is comma separated dictionary of object names in between ``{`` and ``}``.""")) ] def register_opts(conf): conf.register_group(instance_recovery_group) conf.register_group(host_recovery_group) conf.register_group(customized_recovery_flow_group) conf.register_group(taskflow_group) conf.register_opts(instance_failure_options, group=instance_recovery_group) conf.register_opts(host_failure_opts, group=host_recovery_group) conf.register_opts(taskflow_driver_recovery_flows, group=customized_recovery_flow_group) conf.register_opts(taskflow_options, group=taskflow_group) def list_opts(): return { instance_recovery_group.name: instance_failure_options, host_recovery_group.name: host_failure_opts, taskflow_group.name: taskflow_options } def customized_recovery_flow_list_opts(): return { customized_recovery_flow_group.name: taskflow_driver_recovery_flows } masakari-9.0.0/masakari/conf/service.py0000664000175000017500000000524513656747723020113 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import socket from oslo_config import cfg service_opts = [ cfg.HostAddressOpt('host', default=socket.gethostname(), help=''' Hostname, FQDN or IP address of this host. Must be valid within AMQP key. Possible values: * String with hostname, FQDN or IP address. Default is hostname of this host. '''), cfg.StrOpt('engine_manager', default='masakari.engine.manager.MasakariManager', help='Full class name for the Manager for masakari engine'), cfg.IntOpt('report_interval', default=10, help='Seconds between nodes reporting state to datastore'), cfg.BoolOpt('periodic_enable', default=True, help='Enable periodic tasks'), cfg.IntOpt('periodic_interval_max', default=300, help='Max interval time between periodic tasks execution in ' 'seconds.'), cfg.IntOpt('periodic_fuzzy_delay', default=60, help='Range of seconds to randomly delay when starting the' ' periodic task scheduler to reduce stampeding.' ' (Disable by setting to 0)'), cfg.BoolOpt('use_ssl', default=False, help='Use APIs with SSL enabled'), cfg.HostAddressOpt('masakari_api_listen', default="0.0.0.0", help='The IP address on which the Masakari API will listen.'), cfg.IntOpt('masakari_api_listen_port', default=15868, min=1, max=65535, help='The port on which the Masakari API will listen.'), cfg.IntOpt('masakari_api_workers', help='Number of workers for Masakari API service. The default ' 'will be the number of CPUs available.'), cfg.IntOpt('service_down_time', default=60, help='Maximum time since last check-in for up service'), ] def register_opts(conf): conf.register_opts(service_opts) def list_opts(): return {'DEFAULT': service_opts} masakari-9.0.0/masakari/conf/ssl.py0000664000175000017500000000140113656747723017242 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_service import sslutils def register_opts(conf): sslutils.register_opts(conf) def list_opts(): return sslutils.list_opts() masakari-9.0.0/masakari/conf/exceptions.py0000664000175000017500000000165413656747723020634 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg exc_log_opts = [ cfg.BoolOpt('fatal_exception_format_errors', default=False, help='Make exception message format errors fatal'), ] def register_opts(conf): conf.register_opts(exc_log_opts) def list_opts(): return {'DEFAULT': exc_log_opts} masakari-9.0.0/masakari/conf/base.py0000664000175000017500000000273413656747723017365 0ustar zuulzuul00000000000000# Copyright 2016 NTT DATA # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg base_options = [ cfg.StrOpt( 'tempdir', help='Explicitly specify the temporary working directory.'), cfg.BoolOpt( 'monkey_patch', default=False, help=""" Determine if monkey patching should be applied. Related options: * ``monkey_patch_modules``: This must have values set for this option to have any effect """), cfg.ListOpt( 'monkey_patch_modules', default=['masakari.api:masakari.cmd'], help=""" List of modules/decorators to monkey patch. This option allows you to patch a decorator for all functions in specified modules. Related options: * ``monkey_patch``: This must be set to ``True`` for this option to have any effect """), ] def register_opts(conf): conf.register_opts(base_options) def list_opts(): return {'DEFAULT': base_options} masakari-9.0.0/masakari/conf/osapi_v1.py0000664000175000017500000000577013656747723020177 0ustar zuulzuul00000000000000# Copyright (c) 2016 NTT Data # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg api_opts = [ cfg.ListOpt("extensions_blacklist", default=[], deprecated_for_removal=True, deprecated_group="osapi_v1", help=""" *DEPRECATED* This option is a list of all of the v2.1 API extensions to never load. However, it will be removed in the near future, after which the all the functionality that was previously in extensions will be part of the standard API, and thus always accessible. * Possible values: A list of strings, each being the alias of an extension that you do not wish to load. * Services that use this: ``masakari-api`` * Related options: enabled, extensions_whitelist """), cfg.ListOpt("extensions_whitelist", default=[], deprecated_for_removal=True, deprecated_group="osapi_v1", help=""" *DEPRECATED* This is a list of extensions. If it is empty, then *all* extensions except those specified in the extensions_blacklist option will be loaded. If it is not empty, then only those extensions in this list will be loaded, provided that they are also not in the extensions_blacklist option. Once this deprecated option is removed, after which the all the functionality that was previously in extensions will be part of the standard API, and thus always accessible. * Possible values: A list of strings, each being the alias of an extension that you wish to load, or an empty list, which indicates that all extensions are to be run. * Services that use this: ``masakari-api`` * Related options: enabled, extensions_blacklist """), cfg.StrOpt("project_id_regex", default=None, deprecated_for_removal=True, deprecated_group="osapi_v1", help=""" *DEPRECATED* This option is a string representing a regular expression (regex) that matches the project_id as contained in URLs. If not set, it will match normal UUIDs created by keystone. * Possible values: A string representing any legal regular expression * Services that use this: ``masakari-api`` * Related options: None """), ] api_opts_group = cfg.OptGroup(name="osapi_v1", title="API v1 Options") def register_opts(conf): conf.register_group(api_opts_group) conf.register_opts(api_opts, api_opts_group) def list_opts(): return {api_opts_group: api_opts} masakari-9.0.0/PKG-INFO0000664000175000017500000001060613656750011014436 0ustar zuulzuul00000000000000Metadata-Version: 1.1 Name: masakari Version: 9.0.0 Summary: Virtual Machine High Availability (VMHA) service for OpenStack Home-page: https://docs.openstack.org/masakari/latest/ Author: OpenStack Author-email: openstack-discuss@lists.openstack.org License: UNKNOWN Description: ======== Masakari ======== Virtual Machine High Availability (VMHA) service for OpenStack Masakari provides Virtual Machine High Availability (VMHA) service for OpenStack clouds by automatically recovering the KVM-based Virtual Machine(VM)s from failure events such as VM process down, provisioning process down, and nova-compute host failure. It also provides API service for manage and control the automated rescue mechanism. NOTE: Use masakari only if instance path is configured on shared storage system i.e, 'instances_path' config option of nova has a path of shared directory otherwise instance data will be lost after the evacuation of instance from failed host if, * instance is booted from image * flavor using ephemeral disks is used Original version of Masakari: https://github.com/ntt-sic/masakari Tokyo Summit Session: https://www.youtube.com/watch?v=BmjNKceW_9A Masakari is distributed under the terms of the Apache License, Version 2.0. The full terms and conditions of this license are detailed in the LICENSE file. * Free software: Apache license 2.0 * Documentation: https://docs.openstack.org/masakari/latest * Release notes: https://docs.openstack.org/releasenotes/masakari/ * Source: https://opendev.org/openstack/masakari * Bugs: https://bugs.launchpad.net/masakari Configure masakari-api ---------------------- #. Create masakari user: .. code-block:: shell-session openstack user create --password-prompt masakari (give password as masakari) #. Add admin role to masakari user: .. code-block:: shell-session openstack role add --project service --user masakari admin #. Create new service: .. code-block:: shell-session openstack service create --name masakari --description "masakari high availability" instance-ha #. Create endpoint for masakari service: .. code-block:: shell-session openstack endpoint create --region RegionOne masakari --publicurl http://:/v1/%\(tenant_id\)s --adminurl http://:/v1/%\(tenant_id\)s --internalurl http://:/v1/%\(tenant_id\)s #. Clone masakari using .. code-block:: shell-session git clone https://github.com/openstack/masakari.git #. Run setup.py from masakari .. code-block:: shell-session sudo python setup.py install #. Create directory ``/etc/masakari`` #. Copy ``masakari.conf``, ``api-paste.ini`` and ``policy.json`` file from ``masakari/etc/`` to ``/etc/masakari`` folder #. To run masakari-api simply use following binary: .. code-block:: shell-session masakari-api Configure masakari database --------------------------- #. Create 'masakari' database #. After running setup.py for masakari (``sudo python setup.py install``), run ``masakari-manage`` command to sync the database .. code-block:: shell-session masakari-manage db sync Features -------- * TODO Platform: UNKNOWN Classifier: Environment :: OpenStack Classifier: Intended Audience :: Information Technology Classifier: Intended Audience :: System Administrators Classifier: License :: OSI Approved :: Apache Software License Classifier: Operating System :: POSIX :: Linux Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 masakari-9.0.0/bindep.txt0000664000175000017500000000044013656747723015356 0ustar zuulzuul00000000000000# This is a cross-platform list tracking distribution packages needed for install and tests; # see https://docs.openstack.org/infra/bindep/ for additional information. # libpq-dev is needed by openstack-tox-py37 build only. libpq-dev [platform:dpkg test] python-dev [platform:dpkg test] masakari-9.0.0/test-requirements.txt0000664000175000017500000000126513656747723017623 0ustar zuulzuul00000000000000# The order of packages is significant, because pip processes them in the order # of appearance. Changing the order has an impact on the overall integration # process, which may cause wedges in the gate later. hacking>=1.1.0,<1.2.0 # Apache-2.0 coverage!=4.4,>=4.0 # Apache-2.0 ddt>=1.0.1 # MIT pep8>=1.5.7 psycopg2>=2.7 # LGPL/ZPL PyMySQL>=0.7.6 # MIT License python-subunit>=1.0.0 # Apache-2.0/BSD openstacksdk>=0.35.0 # Apache-2.0 os-api-ref>=1.4.0 # Apache-2.0 oslotest>=3.2.0 # Apache-2.0 stestr>=1.0.0 # Apache-2.0 SQLAlchemy-Utils>=0.33.10 # Apache-2.0 requests-mock>=1.2.0 # Apache-2.0 testresources>=2.0.0 # Apache-2.0/BSD testscenarios>=0.4 # Apache-2.0/BSD testtools>=2.2.0 # MIT masakari-9.0.0/lower-constraints.txt0000664000175000017500000000425613656747723017623 0ustar zuulzuul00000000000000alabaster==0.7.10 alembic==0.9.8 amqp==2.2.2 appdirs==1.4.3 automaton==1.14.0 Babel==2.3.4 cachetools==2.0.1 certifi==2018.1.18 chardet==3.0.4 cliff==2.11.0 cmd2==0.8.1 contextlib2==0.5.5 coverage==4.0 ddt==1.0.1 debtcollector==1.19.0 decorator==4.2.1 docutils==0.14 dogpile.cache==0.6.5 dulwich==0.19.0 enum-compat==0.0.2 eventlet==0.20.0 extras==1.0.0 fasteners==0.14.1 fixtures==3.0.0 flake8==2.5.5 future==0.16.0 futurist==1.6.0 greenlet==0.4.13 hacking==0.12.0 idna==2.6 imagesize==1.0.0 iso8601==0.1.11 Jinja2==2.10 jsonschema==2.6.0 keystoneauth1==3.4.0 keystonemiddleware==4.17.0 kombu==4.1.0 linecache2==1.0.0 Mako==1.0.7 MarkupSafe==1.0 mccabe==0.2.1 microversion-parse==0.2.1 mock==2.0.0 monotonic==1.4 mox3==0.25.0 msgpack==0.5.6 netaddr==0.7.19 netifaces==0.10.6 networkx==1.11 openstackdocstheme==1.18.1 openstacksdk==0.35.0 # Apache-2.0 os-api-ref==1.4.0 os-client-config==1.29.0 oslo.cache==1.29.0 oslo.concurrency==3.26.0 oslo.config==5.2.0 oslo.context==2.19.2 oslo.db==4.27.0 oslo.i18n==3.15.3 oslo.log==3.36.0 oslo.messaging==5.29.0 oslo.middleware==3.31.0 oslo.policy==1.30.0 oslo.serialization==2.25.0 oslo.service==1.24.0 oslo.upgradecheck==0.1.0 oslo.utils==3.33.0 oslo.versionedobjects==1.31.2 oslosphinx==4.7.0 oslotest==3.2.0 Paste==2.0.3 PasteDeploy==1.5.2 pbr==2.0.0 pep8==1.5.7 pika-pool==0.1.3 pika==0.10.0 prettytable==0.7.2 psycopg2==2.7 pycadf==2.7.0 pyflakes==0.8.1 Pygments==2.2.0 pyinotify==0.9.6 PyMySQL==0.7.6 pyparsing==2.2.0 pyperclip==1.6.0 python-dateutil==2.7.0 python-editor==1.0.3 python-keystoneclient==3.15.0 python-mimeparse==1.6.0 python-novaclient==9.1.0 python-subunit==1.0.0 pytz==2018.3 PyYAML==3.12 reno==2.5.0 repoze.lru==0.7 requests-mock==1.2.0 requests==2.18.4 requestsexceptions==1.4.0 rfc3986==1.1.0 Routes==2.4.1 simplejson==3.13.2 six==1.10.0 snowballstemmer==1.2.1 Sphinx==1.6.2 sphinxcontrib-websupport==1.0.1 sqlalchemy-migrate==0.11.0 SQLAlchemy==1.2.19 sqlparse==0.2.4 statsd==3.2.2 stestr==1.0.0 stevedore==1.20.0 SQLAlchemy-Utils==0.33.10 taskflow==2.16.0 Tempita==0.5.2 tenacity==4.9.0 testresources==2.0.0 testscenarios==0.4 testtools==2.2.0 traceback2==1.4.0 unittest2==1.1.0 urllib3==1.22 vine==1.1.4 voluptuous==0.11.1 WebOb==1.7.1 wrapt==1.10.11 masakari-9.0.0/AUTHORS0000664000175000017500000000526013656750010014410 0ustar zuulzuul00000000000000Abhishek Kekane Andreas Jaeger Arthur Dayne Chuck Short Corey Bryant Deepak Dinesh Bhor Doug Hellmann Ghanshyam Ghanshyam Mann Hangdong Zhang Ian Wienand James Page Jean-Philippe Evrard Ji-Wei Kengo Takahara Kengo Takahara Louie KWAN Neha Alhat Nguyen Hai Nguyen Hung Phuong OpenStack Release Bot Rikimaru Honjo Rikimaru Honjo Sampath Priyankara Sean McGinnis Shilpa Shilpa Devharakar Takahiro Izumi Takashi Kajinami Takashi NATSUME Van Hung Pham Vu Cong Tuan Yaguo Zhou Yukinori Sagara akhiljain23 avnish bhagyashris binhong.hua caoyuan chenjiao deepakmourya dineshbhor ghanshyam ghanshyam hussainchachuliya jayashri bidwe juhoson lcsong liyingjun lkwan melissaml niraj singh nirajsingh niwa.keiji.z03 openstack pandatt pengyuesheng pooja jadhav poojajadhav rajat29 sampathP sapd shangxiaobj shilpa shilpa.devharakar suzhengwei tpatil tpatil uchenily zhangbailin masakari-9.0.0/babel.cfg0000664000175000017500000000002113656747723015075 0ustar zuulzuul00000000000000[python: **.py] masakari-9.0.0/CONTRIBUTING.rst0000664000175000017500000000121613656747723016017 0ustar zuulzuul00000000000000If you would like to contribute to the development of OpenStack, you must follow the steps in this page: https://docs.openstack.org/infra/manual/developers.html If you already have a good understanding of how the system works and your OpenStack accounts are set up, you can skip to the development workflow section of this documentation to learn how changes to OpenStack should be submitted for review via the Gerrit tool: https://docs.openstack.org/infra/manual/developers.html#development-workflow Pull requests submitted through GitHub will be ignored. Bugs should be filed on Launchpad, not GitHub: https://bugs.launchpad.net/masakari masakari-9.0.0/README.rst0000664000175000017500000000553213656747723015052 0ustar zuulzuul00000000000000======== Masakari ======== Virtual Machine High Availability (VMHA) service for OpenStack Masakari provides Virtual Machine High Availability (VMHA) service for OpenStack clouds by automatically recovering the KVM-based Virtual Machine(VM)s from failure events such as VM process down, provisioning process down, and nova-compute host failure. It also provides API service for manage and control the automated rescue mechanism. NOTE: Use masakari only if instance path is configured on shared storage system i.e, 'instances_path' config option of nova has a path of shared directory otherwise instance data will be lost after the evacuation of instance from failed host if, * instance is booted from image * flavor using ephemeral disks is used Original version of Masakari: https://github.com/ntt-sic/masakari Tokyo Summit Session: https://www.youtube.com/watch?v=BmjNKceW_9A Masakari is distributed under the terms of the Apache License, Version 2.0. The full terms and conditions of this license are detailed in the LICENSE file. * Free software: Apache license 2.0 * Documentation: https://docs.openstack.org/masakari/latest * Release notes: https://docs.openstack.org/releasenotes/masakari/ * Source: https://opendev.org/openstack/masakari * Bugs: https://bugs.launchpad.net/masakari Configure masakari-api ---------------------- #. Create masakari user: .. code-block:: shell-session openstack user create --password-prompt masakari (give password as masakari) #. Add admin role to masakari user: .. code-block:: shell-session openstack role add --project service --user masakari admin #. Create new service: .. code-block:: shell-session openstack service create --name masakari --description "masakari high availability" instance-ha #. Create endpoint for masakari service: .. code-block:: shell-session openstack endpoint create --region RegionOne masakari --publicurl http://:/v1/%\(tenant_id\)s --adminurl http://:/v1/%\(tenant_id\)s --internalurl http://:/v1/%\(tenant_id\)s #. Clone masakari using .. code-block:: shell-session git clone https://github.com/openstack/masakari.git #. Run setup.py from masakari .. code-block:: shell-session sudo python setup.py install #. Create directory ``/etc/masakari`` #. Copy ``masakari.conf``, ``api-paste.ini`` and ``policy.json`` file from ``masakari/etc/`` to ``/etc/masakari`` folder #. To run masakari-api simply use following binary: .. code-block:: shell-session masakari-api Configure masakari database --------------------------- #. Create 'masakari' database #. After running setup.py for masakari (``sudo python setup.py install``), run ``masakari-manage`` command to sync the database .. code-block:: shell-session masakari-manage db sync Features -------- * TODO masakari-9.0.0/roles/0000775000175000017500000000000013656750011014462 5ustar zuulzuul00000000000000masakari-9.0.0/roles/devstack-config/0000775000175000017500000000000013656750011017531 5ustar zuulzuul00000000000000masakari-9.0.0/roles/devstack-config/tasks/0000775000175000017500000000000013656750011020656 5ustar zuulzuul00000000000000masakari-9.0.0/roles/devstack-config/tasks/main.yml0000664000175000017500000000064213656747723022347 0ustar zuulzuul00000000000000- name: Collect devstack stackenv file fetch: flat: yes dest: "{{ zuul.executor.log_root }}/{{ inventory_hostname }}/confs/devstack/-stackenv" src: "/opt/stack/devstack/.stackenv" - name: Collect devstack config files synchronize: dest: "{{ zuul.executor.log_root }}/{{ inventory_hostname }}/confs/devstack/" mode: pull src: "/opt/stack/devstack/{{ item }}" with_items: - local.conf masakari-9.0.0/.zuul.yaml0000664000175000017500000000712113656747723015320 0ustar zuulzuul00000000000000- nodeset: name: masakari-openstack-multi-nodes nodes: - name: controller label: ubuntu-bionic - name: compute1 label: ubuntu-bionic groups: # Nodes running the compute service - name: compute nodes: - controller - compute1 # Nodes that are not the controller - name: subnode nodes: - compute1 # Switch node for multinode networking setup - name: switch nodes: - controller # Peer nodes for multinode networking setup - name: peers nodes: - compute1 - job: name: masakari-functional-devstack-multinode parent: devstack description: | Base multinodes job for devstack-based functional tests nodeset: masakari-openstack-multi-nodes pre-run: playbooks/devstack/pre.yaml run: playbooks/devstack/run.yaml post-run: playbooks/devstack/post.yaml roles: - zuul: openstack-infra/devstack timeout: 9000 required-projects: - openstack/cinder - openstack/glance - openstack/keystone - openstack/neutron - openstack/nova - openstack/placement - openstack/horizon - openstack/python-masakariclient - openstack/masakari - openstack/masakari-monitors vars: test_matrix_configs: [neutron] devstack_services: horizon: false swift: false devstack_localrc: USE_PYTHON3: true DATABASE_TYPE: mysql zuul_work_dir: src/opendev.org/openstack/masakari host-vars: controller: devstack_plugins: masakari: https://opendev.org/openstack/masakari devstack_services: mysql: true rabbit: true tls-proxy: false horizon: false swift: false # Keystone services key: true # Glance services g-api: true # Nova services n-api: true n-api-meta: true n-cond: true n-cpu: true n-novnc: true n-sch: true placement-api: true # Legacy Neutron services q-agt: true q-dhcp: true q-l3: true q-meta: true q-metering: true q-svc: true # Cinder services c-api: true c-sch: true c-vol: true cinder: true # Masakari services masakari-api: true masakari-engine: true masakari-monitors: true tox_install_siblings: false tox_envlist: functional group-vars: subnode: devstack_localrc: DATABASE_TYPE: mysql devstack_plugins: masakari: https://opendev.org/openstack/masakari devstack_services: q-agt: true n-api: false n-api-meta: false n-cauth: false n-cond: false n-cpu: true n-novnc: false n-sch: false horizon: false tls-proxy: false # Masakari services masakari-monitors: true masakari-api: false masakari-engine: false - project: templates: - check-requirements - openstack-cover-jobs - openstack-lower-constraints-jobs - openstack-python3-ussuri-jobs - publish-openstack-docs-pti - release-notes-jobs-python3 check: jobs: - masakari-functional-devstack-multinode: voting: true - project: masakari-systemfault-integration-ci: jobs: - masakari-build-test - job: name: masakari-build-test parent: base masakari-9.0.0/setup.py0000664000175000017500000000200613656747723015066 0ustar zuulzuul00000000000000# Copyright (c) 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT import setuptools # In python < 2.7.4, a lazy loading of package `pbr` will break # setuptools if some other modules registered functions in `atexit`. # solution from: http://bugs.python.org/issue15881#msg170215 try: import multiprocessing # noqa except ImportError: pass setuptools.setup( setup_requires=['pbr>=2.0.0'], pbr=True) masakari-9.0.0/.stestr.conf0000664000175000017500000000006513656747723015630 0ustar zuulzuul00000000000000[DEFAULT] test_path=./masakari/tests/unit top_dir=./ masakari-9.0.0/devstack/0000775000175000017500000000000013656750011015142 5ustar zuulzuul00000000000000masakari-9.0.0/devstack/settings0000664000175000017500000000470413656747723016752 0ustar zuulzuul00000000000000# Settings needed for Masakari plugin # ----------------------------------- # Set up default directories MASAKARI_DIR=${MASAKARI_DIR:-${DEST}/masakari} MASAKARI_REPO=${GIT_BASE}/openstack/masakari.git} MASAKARI_BRANCH=${MASAKARI_BRANCH:-master} MASAKARI_DASHBOARD_REPO=${MASAKARI_DASHBOARD_REPO:-${GIT_BASE}/openstack/masakari-dashboard.git} MASAKARI_DASHBOARD_BRANCH=${MASAKARI_DASHBOARD_BRANCH:-master} MASAKARI_MONITORS_DIR=${MASAKARI_MONITORS_DIR:-${DEST}/masakari-monitors} MASAKARI_MONITORS_REPO=${MASAKARI_MONITORS_REPO:-${GIT_BASE}/openstack/masakari-monitors.git} MASAKARI_MONITORS_BRANCH=${MASAKARI_MONITORS_BRANCH:-master} # Set up configuration directory and files MASAKARI_CONF_DIR=${MASAKARI_CONF_DIR:-/etc/masakari} MASAKARI_DASHBOARD_DIR=$DEST/masakari-dashboard MASAKARI_CONF=${MASAKARI_CONF:-${MASAKARI_CONF_DIR}/masakari.conf} MASAKARI_API_PASTE_INI=${MASAKARI_API_PASTE_INI:-${MASAKARI_CONF_DIR}/api-paste.ini} MASAKARI_POLICY_JSON=${MASAKARI_POLICY_JSON:-${MASAKARI_CONF_DIR}/policy.json} MASAKARI_MONITORS_CONF_DIR=${MASAKARI_MONITORS_CONF_DIR:-/etc/masakarimonitors} MASAKARI_MONITORS_CONF=${MASAKARI_MONITORS_CONF:-${MASAKARI_MONITORS_CONF_DIR}/masakarimonitors.conf} MASAKARI_LOCAL_CONF_DIR=${MASAKARI_LOCAL_CONF_DIR:-${MASAKARI_DIR}/etc/masakari} MASAKARI_LOCAL_API_PASTE_INI=${MASAKARI_LOCAL_API_PASTE_INI:-${MASAKARI_LOCAL_CONF_DIR}/api-paste.ini} MASAKARI_LOCAL_POLICY_JSON=${MASAKARI_LOCAL_POLICY_JSON:-${MASAKARI_LOCAL_CONF_DIR}/policy.json} MASAKARI_AUTH_CACHE_DIR=${MASAKARI_AUTH_CACHE_DIR:-/var/cache/masakari} MASAKARI_SERVICE_HOST=${MASAKARI_SERVICE_HOST:-$SERVICE_HOST} MASAKARI_SERVICE_PROTOCOL=${MASAKARI_SERVICE_PROTOCOL:-http} # set environment variables USERNAME="masakari" SERVICE_DOMAIN_ID="default" # Support entry points installation of console scripts if [[ -d $MASAKARI_DIR/bin ]]; then MASAKARI_BIN_DIR=$MASAKARI_DIR/bin MASAKARI_MONITORS_BIN_DIR=$MASAKARI_MONITORS_DIR/bin else MASAKARI_BIN_DIR=$(get_python_exec_prefix) fi MASAKARI_MANAGE=$MASAKARI_BIN_DIR/masakari-manage # Public facing bits MASAKARI_SERVICE_PORT=${MASAKARI_SERVICE_PORT:-15868} MASAKARI_SERVICE_PORT_INT=${MASAKARI_SERVICE_PORT_INT:-25868} MASAKARI_UWSGI=$MASAKARI_BIN_DIR/masakari-wsgi MASAKARI_UWSGI_CONF=$MASAKARI_CONF_DIR/masakari-api-uwsgi.ini enable_service masakari masakari-api masakari-engine enable_service masakari-monitors masakari-processmonitor masakari-instancemonitor masakari-hostmonitor enable_service masakari-introspectiveinstancemonitor masakari-9.0.0/devstack/plugin.sh0000664000175000017500000003524113656747723017021 0ustar zuulzuul00000000000000#!/bin/bash # # lib/masakari # Functions to control the configuration and operation of the **Masakari** service # Dependencies: # ``functions`` file # ``DEST``, ``STACK_USER`` must be defined # ``SERVICE_{HOST|PROTOCOL|TOKEN}`` must be defined # ``stack.sh`` calls the entry points in this order: # # masakari-api # install - install_masakari # post-config - configure_masakari # extra - init_masakari start_masakari # unstack - stop_masakari cleanup_masakari # # masakari-engine # install - install_masakari # post-config - configure_masakari # extra - init_masakari start_masakari # unstack - stop_masakari cleanup_masakari # # masakari-monitors # post-config - configure_masakarimonitors # extra - run_masakarimonitors # unstack - stop_masakari_monitors cleanup_masakari_monitors # Save trace setting XTRACE=$(set +o | grep xtrace) set +o xtrace if is_service_enabled tls-proxy; then MASAKARI_SERVICE_PROTOCOL="https" fi # Toggle for deploying Masakari under a wsgi server. MASAKARI_USE_MOD_WSGI=${MASAKARI_USE_MOD_WSGI:-True} # Functions # --------- # setup_masakari_logging() - Adds logging configuration to conf files function setup_masakari_logging { local CONF=$1 iniset $CONF DEFAULT debug $ENABLE_DEBUG_LOG_LEVEL iniset $CONF DEFAULT use_syslog $SYSLOG if [ "$LOG_COLOR" == "True" ] && [ "$SYSLOG" == "False" ]; then # Add color to logging output setup_colorized_logging $CONF DEFAULT tenant user fi } # create_masakari_accounts() - Set up common required masakari accounts # Tenant User Roles # ------------------------------------------------------------------ # service masakari admin # if enabled function create_masakari_accounts { if [[ "$ENABLED_SERVICES" =~ "masakari" ]]; then create_service_user "$USERNAME" "admin" local masakari_service=$(get_or_create_service "masakari" \ "instance-ha" "OpenStack High Availability") if [ "$MASAKARI_USE_MOD_WSGI" == "False" ]; then get_or_create_endpoint $masakari_service \ "$REGION_NAME" \ "$MASAKARI_SERVICE_PROTOCOL://$SERVICE_HOST:$MASAKARI_SERVICE_PORT/v1/\$(tenant_id)s" \ "$MASAKARI_SERVICE_PROTOCOL://$SERVICE_HOST:$MASAKARI_SERVICE_PORT/v1/\$(tenant_id)s" \ "$MASAKARI_SERVICE_PROTOCOL://$SERVICE_HOST:$MASAKARI_SERVICE_PORT/v1/\$(tenant_id)s" else get_or_create_endpoint $masakari_service \ "$REGION_NAME" \ "$MASAKARI_SERVICE_PROTOCOL://$SERVICE_HOST/instance-ha/v1/\$(tenant_id)s" \ "$MASAKARI_SERVICE_PROTOCOL://$SERVICE_HOST/instance-ha/v1/\$(tenant_id)s" \ "$MASAKARI_SERVICE_PROTOCOL://$SERVICE_HOST/instance-ha/v1/\$(tenant_id)s" fi fi } # stack.sh entry points # --------------------- # cleanup_masakari() - Remove residual data files, anything left over from previous # runs that a clean run would need to clean up function cleanup_masakari { # Clean up dirs rm -fr $MASAKARI_AUTH_CACHE_DIR/* rm -fr $MASAKARI_CONF_DIR/* if [ "$MASAKARI_USE_MOD_WSGI" == "True" ]; then remove_uwsgi_config "$MASAKARI_UWSGI_CONF" "$MASAKARI_UWSGI" fi } # cleanup_masakari_monitors() - Remove residual data files, anything left over from previous # runs that a clean run would need to clean up function cleanup_masakari_monitors { # Clean up dirs rm -fr $MASAKARI_MONITORS_CONF_DIR/* } # iniset_conditional() - Sets the value in the inifile, but only if it's # actually got a value function iniset_conditional { local FILE=$1 local SECTION=$2 local OPTION=$3 local VALUE=$4 if [[ -n "$VALUE" ]]; then iniset ${FILE} ${SECTION} ${OPTION} ${VALUE} fi } # configure_masakari() - Set config files, create data dirs, etc function configure_masakari { setup_develop $MASAKARI_DIR # Create the masakari conf dir and cache dirs if they don't exist sudo install -d -o $STACK_USER ${MASAKARI_CONF_DIR} ${MASAKARI_AUTH_CACHE_DIR} # Copy api-paste file over to the masakari conf dir cp $MASAKARI_LOCAL_API_PASTE_INI $MASAKARI_API_PASTE_INI # (Re)create masakari conf files rm -f $MASAKARI_CONF # (Re)create masakari api conf file if needed if is_service_enabled masakari-api; then oslo-config-generator --namespace keystonemiddleware.auth_token \ --namespace masakari \ --namespace oslo.db \ > $MASAKARI_CONF # Set common configuration values (but only if they're defined) iniset $MASAKARI_CONF DEFAULT masakari_api_workers "$API_WORKERS" iniset $MASAKARI_CONF database connection `database_connection_url masakari` # Set taskflow connection to store the recovery workflow details in db iniset $MASAKARI_CONF taskflow connection `database_connection_url masakari` setup_masakari_logging $MASAKARI_CONF configure_auth_token_middleware $MASAKARI_CONF masakari $MASAKARI_AUTH_CACHE_DIR fi # Set os_privileged_user credentials (used for connecting nova service) iniset $MASAKARI_CONF DEFAULT os_privileged_user_name nova iniset $MASAKARI_CONF DEFAULT os_privileged_user_auth_url "${KEYSTONE_AUTH_PROTOCOL}://${KEYSTONE_AUTH_HOST}/identity" iniset $MASAKARI_CONF DEFAULT os_privileged_user_password "$SERVICE_PASSWORD" iniset $MASAKARI_CONF DEFAULT os_privileged_user_tenant "$SERVICE_PROJECT_NAME" iniset $MASAKARI_CONF DEFAULT graceful_shutdown_timeout "$SERVICE_GRACEFUL_SHUTDOWN_TIMEOUT" iniset_rpc_backend masakari $MASAKARI_CONF DEFAULT if is_service_enabled tls-proxy; then iniset $MASAKARI_CONF DEFAULT masakari_api_listen_port $MASAKARI_SERVICE_PORT_INT fi if [ "$MASAKARI_USE_MOD_WSGI" == "True" ]; then write_uwsgi_config "$MASAKARI_UWSGI_CONF" "$MASAKARI_UWSGI" "/instance-ha" fi } # configure_masakarimonitors() - Set config files, create data dirs, etc function configure_masakarimonitors { git_clone $MASAKARI_MONITORS_REPO $MASAKARI_MONITORS_DIR $MASAKARI_MONITORS_BRANCH # Create masakarimonitors conf dir and cache dirs if they don't exist sudo install -d -o $STACK_USER ${MASAKARI_MONITORS_CONF_DIR} setup_develop $MASAKARI_MONITORS_DIR # (Re)create masakarimonitors conf files rm -f $MASAKARI_MONITORS_CONF # (Re)create masakarimonitors api conf file if needed oslo-config-generator --namespace masakarimonitors.conf \ --namespace oslo.log \ --namespace oslo.middleware \ > $MASAKARI_MONITORS_CONF iniset $MASAKARI_MONITORS_CONF api auth_url "${KEYSTONE_AUTH_PROTOCOL}://${KEYSTONE_AUTH_HOST}/identity" iniset $MASAKARI_MONITORS_CONF api password "$SERVICE_PASSWORD" iniset $MASAKARI_MONITORS_CONF api project_name "$SERVICE_PROJECT_NAME" iniset $MASAKARI_MONITORS_CONF api username "$USERNAME" iniset $MASAKARI_MONITORS_CONF api user_domain_id "$SERVICE_DOMAIN_ID" iniset $MASAKARI_MONITORS_CONF api project_domain_id "$SERVICE_DOMAIN_ID" iniset $MASAKARI_MONITORS_CONF api region "$REGION_NAME" iniset $MASAKARI_MONITORS_CONF process process_list_path "/etc/masakarimonitors/process_list.yaml" cp $DEST/masakari-monitors/etc/masakarimonitors/process_list.yaml.sample \ $DEST/masakari-monitors/etc/masakarimonitors/process_list.yaml cp $DEST/masakari-monitors/etc/masakarimonitors/process_list.yaml \ $MASAKARI_MONITORS_CONF_DIR cp $DEST/masakari-monitors/etc/masakarimonitors/processmonitor.conf.sample \ $DEST/masakari-monitors/etc/masakarimonitors/processmonitor.conf cp $DEST/masakari-monitors/etc/masakarimonitors/processmonitor.conf $MASAKARI_MONITORS_CONF_DIR sed -i 's/start nova-compute/start devstack@n-cpu/g' $MASAKARI_MONITORS_CONF_DIR/process_list.yaml sed -i 's/restart nova-compute/restart devstack@n-cpu/g' $MASAKARI_MONITORS_CONF_DIR/process_list.yaml sed -i 's/start masakari-instancemonitor/start devstack@masakari-instancemonitor/g' \ $MASAKARI_MONITORS_CONF_DIR/process_list.yaml sed -i 's/restart masakari-instancemonitor/restart devstack@masakari-instancemonitor/g' \ $MASAKARI_MONITORS_CONF_DIR/process_list.yaml # NOTE(neha-alhat): remove monitoring of host-monitor process as # devstack support for host-monitor is not added yet. sed -i '43,52d' $MASAKARI_MONITORS_CONF_DIR/process_list.yaml } # install_masakari() - Collect source and prepare function install_masakari { setup_develop $MASAKARI_DIR } # init_masakari() - Initializes Masakari Database as a Service function init_masakari { # (Re)Create masakari db recreate_database masakari # Initialize the masakari database $MASAKARI_MANAGE db sync # Add an admin user to the 'tempest' alt_demo tenant. # This is needed to test the guest_log functionality. # The first part mimics the tempest setup, so make sure we have that. ALT_USERNAME=${ALT_USERNAME:-alt_demo} ALT_TENANT_NAME=${ALT_TENANT_NAME:-alt_demo} get_or_create_project ${ALT_TENANT_NAME} default get_or_create_user ${ALT_USERNAME} "$ADMIN_PASSWORD" "default" "alt_demo@example.com" get_or_add_user_project_role Member ${ALT_USERNAME} ${ALT_TENANT_NAME} # The second part adds an admin user to the tenant. ADMIN_ALT_USERNAME=${ADMIN_ALT_USERNAME:-admin_${ALT_USERNAME}} get_or_create_user ${ADMIN_ALT_USERNAME} "$ADMIN_PASSWORD" "default" "admin_alt_demo@example.com" get_or_add_user_project_role admin ${ADMIN_ALT_USERNAME} ${ALT_TENANT_NAME} } # start_masakari() - Start running processes function start_masakari { local masakari_url if [[ "$ENABLED_SERVICES" =~ "masakari-api" ]]; then if [ "$MASAKARI_USE_MOD_WSGI" == "False" ]; then run_process masakari-api "$MASAKARI_BIN_DIR/masakari-api --config-file=$MASAKARI_CONF --debug" masakari_url=$MASAKARI_SERVICE_PROTOCOL://$MASAKARI_SERVICE_HOST:$MASAKARI_SERVICE_PORT # Start proxy if tls enabled if is_service_enabled tls_proxy; then start_tls_proxy masakari-service '*' $MASAKARI_SERVICE_PORT $SERVICE_HOST $MASAKARI_SERVICE_PORT_INT fi else run_process "masakari-api" "$MASAKARI_BIN_DIR/uwsgi --procname-prefix masakari-api --ini $MASAKARI_UWSGI_CONF" masakari_url=$MASAKARI_SERVICE_PROTOCOL://$MASAKARI_SERVICE_HOST/instance-ha/v1 fi echo "Waiting for Masakari API to start..." if ! wait_for_service $SERVICE_TIMEOUT $masakari_url; then die $LINENO "masakari-api did not start" fi fi if [[ "$ENABLED_SERVICES" =~ "masakari-engine" ]]; then run_process masakari-engine "$MASAKARI_BIN_DIR/masakari-engine --config-file=$MASAKARI_CONF --debug" fi } #install masakari-dashboard function install_masakaridashboard { git_clone $MASAKARI_DASHBOARD_REPO $MASAKARI_DASHBOARD_DIR $MASAKARI_DASHBOARD_BRANCH setup_develop $MASAKARI_DASHBOARD_DIR ln -fs $MASAKARI_DASHBOARD_DIR/masakaridashboard/local/enabled/_50_masakaridashboard.py \ $HORIZON_DIR/openstack_dashboard/local/enabled ln -fs $MASAKARI_DASHBOARD_DIR/masakaridashboard/local/local_settings.d/_50_masakari.py \ $HORIZON_DIR/openstack_dashboard/local/local_settings.d ln -fs $MASAKARI_DASHBOARD_DIR/masakaridashboard/conf/masakari_policy.json \ $HORIZON_DIR/openstack_dashboard/conf } #uninstall masakari-dashboard function uninstall_masakaridashboard { sudo rm -f $DEST/horizon/openstack_dashboard/local/enabled/_50_masakaridashboard.py sudo rm -f $DEST/horizon/openstack_dashboard/local/local_settings.d/_50_masakari.py sudo rm -f $DEST/horizon/openstack_dashboard/conf/masakari_policy.json restart_apache_server } # stop_masakari() - Stop running processes function stop_masakari { # Kill the masakari services local serv for serv in masakari-engine masakari-api; do stop_process $serv done } #run masakari-monitors function run_masakarimonitors { run_process masakari-processmonitor "$MASAKARI_BIN_DIR/masakari-processmonitor" run_process masakari-instancemonitor "$MASAKARI_BIN_DIR/masakari-instancemonitor" run_process masakari-introspectiveinstancemonitor "$MASAKARI_BIN_DIR/masakari-introspectiveinstancemonitor" } # stop_masakari_monitors() - Stop running processes function stop_masakari_monitors { # Kill the masakari-monitors services local serv for serv in masakari-processmonitor masakari-instancemonitor masakari-introspectiveinstancemonitor; do stop_process $serv done } # Dispatcher for masakari plugin if is_service_enabled masakari; then if [[ "$1" == "stack" && "$2" == "install" ]]; then echo_summary "Installing Masakari" if [[ "$ENABLED_SERVICES" =~ "masakari-api" ]]; then install_masakari fi elif [[ "$1" == "stack" && "$2" == "post-config" ]]; then echo_summary "Configuring Masakari" if [[ "$ENABLED_SERVICES" =~ "masakari-api" ]]; then configure_masakari if is_service_enabled key; then create_masakari_accounts fi fi elif [[ "$1" == "stack" && "$2" == "extra" ]]; then # Initialize and Start the masakari API and masakari taskmgr components if [[ "$ENABLED_SERVICES" =~ "masakari-api" ]]; then init_masakari echo_summary "Starting Masakari" start_masakari fi if is_service_enabled horizon; then # install masakari-dashboard echo_summary "Installing masakari-dashboard" install_masakaridashboard fi fi if [[ "$1" == "unstack" ]]; then if is_service_enabled horizon; then echo_summary "Uninstall masakari-dashboard" uninstall_masakaridashboard fi if [[ "$ENABLED_SERVICES" =~ "masakari-api" ]]; then stop_masakari cleanup_masakari fi fi fi if is_service_enabled masakari-monitors; then if [[ "$1" == "stack" && "$2" == "post-config" ]]; then if is_service_enabled n-cpu; then # Configure masakari-monitors echo_summary "Configure masakari-monitors" configure_masakarimonitors fi elif [[ "$1" == "stack" && "$2" == "extra" ]]; then if is_service_enabled n-cpu; then # Run masakari-monitors echo_summary "Running masakari-monitors" run_masakarimonitors fi fi if [[ "$1" == "unstack" ]]; then if is_service_enabled n-cpu; then echo_summary "Uninstall masakari-monitors" stop_masakari_monitors cleanup_masakari_monitors fi fi fi # Restore xtrace $XTRACE # Tell emacs to use shell-script-mode ## Local variables: ## mode: shell-script ## End: masakari-9.0.0/devstack/README.rst0000664000175000017500000000156613656747723016661 0ustar zuulzuul00000000000000============================= Enabling Masakari in DevStack ============================= To enable Masakari in DevStack, perform the following steps: Download DevStack ================= .. sourcecode:: bash export DEVSTACK_DIR=~/devstack git clone https://opendev.org/openstack/devstack.git $DEVSTACK_DIR Enable the Masakari plugin ========================== Enable the plugin by adding the following section to ``$DEVSTACK_DIR/local.conf`` .. sourcecode:: bash [[local|localrc]] enable_plugin masakari https://opendev.org/openstack/masakari Optionally, a git refspec (branch or tag or commit) may be provided as follows: .. sourcecode:: bash [[local|localrc]] enable_plugin masakari https://opendev.org/openstack/masakari Run the DevStack utility ======================== .. sourcecode:: bash cd $DEVSTACK_DIR ./stack.sh masakari-9.0.0/LICENSE0000664000175000017500000002363713656747723014376 0ustar zuulzuul00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. masakari-9.0.0/playbooks/0000775000175000017500000000000013656750011015341 5ustar zuulzuul00000000000000masakari-9.0.0/playbooks/devstack/0000775000175000017500000000000013656750011017145 5ustar zuulzuul00000000000000masakari-9.0.0/playbooks/devstack/pre.yaml0000664000175000017500000000023013656747723020632 0ustar zuulzuul00000000000000- hosts: all roles: - orchestrate-devstack - role: bindep bindep_profile: test bindep_dir: "{{ zuul_work_dir }}" - ensure-tox masakari-9.0.0/playbooks/devstack/run.yaml0000664000175000017500000000004713656747723020656 0ustar zuulzuul00000000000000- hosts: controller roles: - tox masakari-9.0.0/playbooks/devstack/post.yaml0000664000175000017500000000010713656747723021034 0ustar zuulzuul00000000000000- hosts: all roles: - fetch-subunit-output - devstack-config masakari-9.0.0/.coveragerc0000664000175000017500000000014113656747723015473 0ustar zuulzuul00000000000000[run] branch = True source = masakari omit = masakari/openstack/* [report] ignore_errors = True